repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
MorningSage/Mekanism-Fabric | src/main/java/mekanism/client/model/ModelIndustrialAlarm.java | <gh_stars>1-10
package mekanism.client.model;
import net.minecraft.client.util.math.MatrixStack;
import com.mojang.blaze3d.vertex.IVertexBuilder;
import javax.annotation.Nonnull;
import mekanism.client.render.MekanismRenderType;
import mekanism.client.render.MekanismRenderer;
import mekanism.common.util.MekanismUtils;
import mekanism.common.util.MekanismUtils.ResourceType;
import net.minecraft.client.renderer.IRenderTypeBuffer;
import net.minecraft.client.renderer.RenderType;
import net.minecraft.client.renderer.model.ModelRenderer;
import net.minecraft.util.ResourceLocation;
public class ModelIndustrialAlarm extends MekanismJavaModel {
private static final ResourceLocation TEXTURE = MekanismUtils.getResource(ResourceType.RENDER, "industrial_alarm.png");
private static final ResourceLocation TEXTURE_ACTIVE = MekanismUtils.getResource(ResourceType.RENDER, "industrial_alarm_active.png");
private final RenderType RENDER_TYPE = MekanismRenderType.mekStandard(TEXTURE);
private final RenderType RENDER_TYPE_ACTIVE = MekanismRenderType.mekStandard(TEXTURE_ACTIVE);
private final ModelRenderer base;
private final ModelRenderer bulb;
private final ModelRenderer light_box;
private final ModelRenderer aura;
public ModelIndustrialAlarm() {
super(RenderType::getEntitySolid);
textureWidth = 64;
textureHeight = 64;
base = new ModelRenderer(this, 0, 9);
base.addBox(-3F, 0F, -3F, 6, 1, 6);
base.setRotationPoint(0F, 0F, 0F);
base.setTextureSize(64, 64);
setRotation(base, 0F, 0F, 0F);
bulb = new ModelRenderer(this, 16, 0);
bulb.addBox(-1F, 1F, -1F, 2, 3, 2);
bulb.setRotationPoint(0F, 0F, 0F);
bulb.setTextureSize(64, 64);
setRotation(bulb, 0F, 0F, 0F);
light_box = new ModelRenderer(this, 0, 0);
light_box.addBox(-2F, 1F, -2F, 4, 4, 4);
light_box.setRotationPoint(0F, 0F, 0F);
light_box.setTextureSize(64, 64);
setRotation(light_box, 0F, 0F, 0F);
aura = new ModelRenderer(this, 0, 16);
aura.addBox(-6F, 2F, -1F, 12, 1, 2);
aura.setRotationPoint(0F, 0F, 0F);
aura.setTextureSize(64, 64);
setRotation(aura, 0F, 0F, 0F);
}
public void render(@Nonnull MatrixStack matrix, @Nonnull IRenderTypeBuffer renderer, int light, int overlayLight, boolean active, float rotation, boolean renderBase,
boolean hasEffect) {
render(matrix, getVertexBuilder(renderer, active ? RENDER_TYPE_ACTIVE : RENDER_TYPE, hasEffect), light, overlayLight, 1, 1, 1, 1,
active, rotation, renderBase);
}
@Override
public void render(@Nonnull MatrixStack matrix, @Nonnull IVertexBuilder vertexBuilder, int light, int overlayLight, float red, float green, float blue,
float alpha) {
render(matrix, vertexBuilder, light, overlayLight, red, green, blue, alpha, false, 0, false);
}
private void render(@Nonnull MatrixStack matrix, @Nonnull IVertexBuilder vertexBuilder, int light, int overlayLight, float red, float green, float blue, float alpha,
boolean active, float rotation, boolean renderBase) {
if (renderBase) {
base.render(matrix, vertexBuilder, light, overlayLight, red, green, blue, alpha);
}
if (active) {
setRotation(aura, 0, (float) Math.toRadians(rotation), 0);
setRotation(bulb, 0, (float) Math.toRadians(rotation), 0);
} else {
setRotation(aura, 0, 0, 0);
setRotation(bulb, 0, 0, 0);
}
float bulbAlpha = 0.3F + (Math.abs(((rotation * 2) % 360) - 180F) / 180F) * 0.7F;
bulb.render(matrix, vertexBuilder, active ? MekanismRenderer.FULL_LIGHT : light, overlayLight, red, green, blue, bulbAlpha);
light_box.render(matrix, vertexBuilder, active ? MekanismRenderer.FULL_LIGHT : light, overlayLight, red, green, blue, alpha);
if (!renderBase) {
aura.render(matrix, vertexBuilder, MekanismRenderer.FULL_LIGHT, overlayLight, red, green, blue, bulbAlpha);
}
}
}
|
yura-hb/sesame-sampled-pairs | samples/621/b.java | <reponame>yura-hb/sesame-sampled-pairs<gh_stars>0
import java.util.List;
class ClassBuilder extends AbstractBuilder {
class MethodBuilder extends MemberBuilder {
/**
* Adds a parameter(s) to the method method builder.
* @param params a pair consisting of type and parameter name.
* @return this method builder.
*/
public MethodBuilder addParameters(Pair... params) {
this.params.addAll(List.of(params));
return this;
}
private final List<Pair> params;
}
}
|
skullbaselab/aa-afterdark | 2017-09-04-NYC-Lecture-Notes-master/w3d5/a03-prep-master/assoc-solution/app/models/price.rb | class Price < ApplicationRecord
belongs_to :company,
primary_key: :id,
foreign_key: :company_id,
class_name: 'Company'
end
|
Npahlfer/create-ignite-app | src/components/styles/createTheme.js | <filename>src/components/styles/createTheme.js
import { createTheme as createMuiTheme } from '@mui/material/styles'
import breakpoints from './breakpoints'
import createMixins from './createMixins'
import createOverrides from './createOverrides'
import createPalette from './createPalette'
import createTypography from './createTypography'
import shape from './shape'
import spacing from './spacing'
import zIndex from './zIndex'
/**
* `createTheme` wrapper function enables the following
* - Custom light/dark pallete.
* - Custom mixins with `breakpoints` and `spacing` access.
* - Custom overrides with `theme` object access.
*
* @param {object} options
*/
export default function createTheme(options = {}) {
const {
mixins: mixinsInput = {},
palette: paletteInput = {},
typography: typographyInput = {},
...other
} = options
const palette = createPalette(paletteInput)
const typography = createTypography(palette, typographyInput)
const theme = createMuiTheme(
{
breakpoints,
palette,
shape,
spacing,
typography,
zIndex,
},
other,
)
// Patch the theme object with mixins & overrides once the theme object is defined
theme.mixins = createMixins(theme.breakpoints, theme.spacing, mixinsInput)
theme.components = createOverrides(theme)
return theme
}
|
rv8flyboy/pyrobotlab | home/pedrosenarego/scripts/quitChatbot.py | <reponame>rv8flyboy/pyrobotlab<gh_stars>10-100
chatbot.WriteAndQuit
or
chatbot.writeAIML
chatbot.writeAIMLIF
|
guilhermejccavalcanti/orientdb | core/src/test/java/com/orientechnologies/orient/core/serialization/serializer/OStringSerializerHelperTest.java | <filename>core/src/test/java/com/orientechnologies/orient/core/serialization/serializer/OStringSerializerHelperTest.java
package com.orientechnologies.orient.core.serialization.serializer;
import com.orientechnologies.common.io.OIOUtils;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper.*;
import static org.junit.Assert.*;
public class OStringSerializerHelperTest {
@Test
public void test() {
final List<String> stringItems = new ArrayList<String>();
final String text = "['f\\\'oo', 'don\\\'t can\\\'t', \"\\\"bar\\\"\", 'b\\\"a\\\'z', \"q\\\"u\\'x\"]";
final int startPos = 0;
OStringSerializerHelper
.getCollection(text, startPos, stringItems, OStringSerializerHelper.LIST_BEGIN, OStringSerializerHelper.LIST_END,
OStringSerializerHelper.COLLECTION_SEPARATOR);
assertEquals(OIOUtils.getStringContent(stringItems.get(0)), "f'oo");
assertEquals(OIOUtils.getStringContent(stringItems.get(1)), "don't can't");
assertEquals(OIOUtils.getStringContent(stringItems.get(2)), "\"bar\"");
assertEquals(OIOUtils.getStringContent(stringItems.get(3)), "b\"a\'z");
assertEquals(OIOUtils.getStringContent(stringItems.get(4)), "q\"u\'x");
}
@Test
public void testSmartTrim() {
String input = " t est ";
assertEquals(smartTrim(input, true, true), "t est");
assertEquals(smartTrim(input, false, true), " t est");
assertEquals(smartTrim(input, true, false), "t est ");
assertEquals(smartTrim(input, false, false), " t est ");
}
@Test
public void testEncode() {
assertEquals(encode("test"), "test");
assertEquals(encode("\"test\""), "\\\"test\\\"");
assertEquals(encode("\\test\\"), "\\\\test\\\\");
assertEquals(encode("test\"test"), "test\\\"test");
assertEquals(encode("test\\test"), "test\\\\test");
}
@Test
public void testDecode() {
assertEquals(decode("test"), "test");
assertEquals(decode("\\\"test\\\""), "\"test\"");
assertEquals(decode("\\\\test\\\\"), "\\test\\");
assertEquals(decode("test\\\"test"), "test\"test");
assertEquals(decode("test\\\\test"), "test\\test");
}
@Test
public void testEncodeAndDecode() {
String[] values = { "test", "test\"", "test\"test", "test\\test", "test\\\\test", "test\\\\\"test", "\\\\\\\\", "\"\"\"\"",
"\\\"\\\"\\\"" };
for (String value : values) {
String encoded = encode(value);
String decoded = decode(encoded);
assertEquals(decoded, value);
}
}
@Test
public void testGetMap() {
String testText = "";
Map<String, String> map = OStringSerializerHelper.getMap(testText);
assertNotNull(map);
assertTrue(map.isEmpty());
testText = "{ param1 :value1, param2 :value2}";
// testText = "{\"param1\":\"value1\",\"param2\":\"value2\"}";
map = OStringSerializerHelper.getMap(testText);
assertNotNull(map);
assertFalse(map.isEmpty());
System.out.println(map);
System.out.println(map.keySet());
System.out.println(map.values());
assertEquals(map.get("param1"), "value1");
assertEquals(map.get("param2"), "value2");
// Following tests will be nice to support, but currently it's not supported!
// {param1 :value1, param2 :value2}
// {param1 : value1, param2 : value2}
// {param1 : "value1", param2 : "value2"}
// {"param1" : "value1", "param2" : "value2"}
// {param1 : "value1\\value1", param2 : "value2\\value2"}
}
@Test
public void testIndexOf() {
String testString = "This is my test string";
assertEquals(indexOf(testString, 0, 'T'), 0);
assertEquals(indexOf(testString, 0, 'h'), 1);
assertEquals(indexOf(testString, 0, 'i'), 2);
assertEquals(indexOf(testString, 0, 'h', 'i'), 1);
assertEquals(indexOf(testString, 2, 'i'), 2);
assertEquals(indexOf(testString, 3, 'i'), 5);
}
@Test
public void testSmartSplit() {
String testString = "a, b, c, d";
List<String> splitted = smartSplit(testString, ',');
assertEquals(splitted.get(0), "a");
assertEquals(splitted.get(1), " b");
assertEquals(splitted.get(2), " c");
assertEquals(splitted.get(3), " d");
splitted = smartSplit(testString, ',', ' ');
assertEquals(splitted.get(0), "a");
assertEquals(splitted.get(1), "b");
assertEquals(splitted.get(2), "c");
assertEquals(splitted.get(3), "d");
splitted = smartSplit(testString, ',', ' ', 'c');
assertEquals(splitted.get(0), "a");
assertEquals(splitted.get(1), "b");
assertEquals(splitted.get(2), "");
assertEquals(splitted.get(3), "d");
testString = "a test, b test, c test, d test";
splitted = smartSplit(testString, ',', ' ');
assertEquals(splitted.get(0), "atest");
assertEquals(splitted.get(1), "btest");
assertEquals(splitted.get(2), "ctest");
assertEquals(splitted.get(3), "dtest");
}
@Test
public void testGetLowerIndexOfKeywords() {
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("from", 0, "from"), 0);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from", 0, "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from foo", 0, "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select out[' from '] from foo", 0, "from"), 21);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from", 7, "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from foo", 7, "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from", 8, "from"), -1);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from foo", 8, "from"), -1);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select\tfrom", 0, "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select\tfrom\tfoo", 0, "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select\tout[' from ']\tfrom\tfoo", 0, "from"), 21);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select\nfrom", 0, "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select\nfrom\nfoo", 0, "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select\nout[' from ']\nfrom\nfoo", 0, "from"), 21);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from", 0, "let", "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from foo", 0, "let", "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select out[' from '] from foo", 0, "let", "from"), 21);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from", 0, "let", "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select from foo", 0, "let", "from"), 7);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select out[' from '] from foo let a = 1", 0, "let", "from"), 21);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select out[' from '] from foo let a = 1", 0, "from", "let"), 21);
assertEquals(OStringSerializerHelper.getLowerIndexOfKeywords("select (select from foo) as bar from foo", 0, "let", "from"), 32);
}
}
|
lenxin/spring-security | web/src/test/java/org/springframework/security/web/authentication/www/BasicAuthenticationConverterTests.java | package org.springframework.security.web.authentication.www;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.codec.binary.Base64;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.security.authentication.AuthenticationDetailsSource;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
/**
* @author <NAME>
* @since 5.2.0
*/
@RunWith(MockitoJUnitRunner.class)
public class BasicAuthenticationConverterTests {
@Mock
private AuthenticationDetailsSource<HttpServletRequest, ?> authenticationDetailsSource;
private BasicAuthenticationConverter converter;
@Before
public void setup() {
this.converter = new BasicAuthenticationConverter(this.authenticationDetailsSource);
}
@Test
public void testNormalOperation() {
String token = "rod:koala";
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic " + new String(Base64.encodeBase64(token.getBytes())));
UsernamePasswordAuthenticationToken authentication = this.converter.convert(request);
verify(this.authenticationDetailsSource).buildDetails(any());
assertThat(authentication).isNotNull();
assertThat(authentication.getName()).isEqualTo("rod");
}
@Test
public void requestWhenAuthorizationSchemeInMixedCaseThenAuthenticates() {
String token = "rod:koala";
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "BaSiC " + new String(Base64.encodeBase64(token.getBytes())));
UsernamePasswordAuthenticationToken authentication = this.converter.convert(request);
verify(this.authenticationDetailsSource).buildDetails(any());
assertThat(authentication).isNotNull();
assertThat(authentication.getName()).isEqualTo("rod");
}
@Test
public void testWhenUnsupportedAuthorizationHeaderThenIgnored() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Bearer someOtherToken");
UsernamePasswordAuthenticationToken authentication = this.converter.convert(request);
verifyZeroInteractions(this.authenticationDetailsSource);
assertThat(authentication).isNull();
}
@Test
public void testWhenInvalidBasicAuthorizationTokenThenError() {
String token = "<PASSWORD>";
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic " + new String(Base64.encodeBase64(token.getBytes())));
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.converter.convert(request));
}
@Test
public void testWhenInvalidBase64ThenError() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic NOT_VALID_BASE64");
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.converter.convert(request));
}
@Test
public void convertWhenEmptyPassword() {
String token = "<PASSWORD>:";
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic " + new String(Base64.encodeBase64(token.getBytes())));
UsernamePasswordAuthenticationToken authentication = this.converter.convert(request);
verify(this.authenticationDetailsSource).buildDetails(any());
assertThat(authentication).isNotNull();
assertThat(authentication.getName()).isEqualTo("rod");
assertThat(authentication.getCredentials()).isEqualTo("");
}
@Test
public void requestWhenEmptyBasicAuthorizationHeaderTokenThenError() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Authorization", "Basic ");
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> this.converter.convert(request));
}
}
|
vishvananda/graphpipe-go | cmd/graphpipe-tf/internal/github.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto/bfc_memory_map.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: tensorflow/core/protobuf/bfc_memory_map.proto
package for_core_protos_go_proto
import (
encoding_binary "encoding/binary"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
math_bits "math/bits"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
// Some of the data from AllocatorStats
type MemAllocatorStats struct {
NumAllocs int64 `protobuf:"varint,1,opt,name=num_allocs,json=numAllocs,proto3" json:"num_allocs,omitempty"`
BytesInUse int64 `protobuf:"varint,2,opt,name=bytes_in_use,json=bytesInUse,proto3" json:"bytes_in_use,omitempty"`
PeakBytesInUse int64 `protobuf:"varint,3,opt,name=peak_bytes_in_use,json=peakBytesInUse,proto3" json:"peak_bytes_in_use,omitempty"`
LargestAllocSize int64 `protobuf:"varint,4,opt,name=largest_alloc_size,json=largestAllocSize,proto3" json:"largest_alloc_size,omitempty"`
FragmentationMetric float32 `protobuf:"fixed32,5,opt,name=fragmentation_metric,json=fragmentationMetric,proto3" json:"fragmentation_metric,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *MemAllocatorStats) Reset() { *m = MemAllocatorStats{} }
func (m *MemAllocatorStats) String() string { return proto.CompactTextString(m) }
func (*MemAllocatorStats) ProtoMessage() {}
func (*MemAllocatorStats) Descriptor() ([]byte, []int) {
return fileDescriptor_fdf22777007c1f3d, []int{0}
}
func (m *MemAllocatorStats) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *MemAllocatorStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
func (m *MemAllocatorStats) XXX_Merge(src proto.Message) {
xxx_messageInfo_MemAllocatorStats.Merge(m, src)
}
func (m *MemAllocatorStats) XXX_Size() int {
return m.Size()
}
func (m *MemAllocatorStats) XXX_DiscardUnknown() {
xxx_messageInfo_MemAllocatorStats.DiscardUnknown(m)
}
var xxx_messageInfo_MemAllocatorStats proto.InternalMessageInfo
func (m *MemAllocatorStats) GetNumAllocs() int64 {
if m != nil {
return m.NumAllocs
}
return 0
}
func (m *MemAllocatorStats) GetBytesInUse() int64 {
if m != nil {
return m.BytesInUse
}
return 0
}
func (m *MemAllocatorStats) GetPeakBytesInUse() int64 {
if m != nil {
return m.PeakBytesInUse
}
return 0
}
func (m *MemAllocatorStats) GetLargestAllocSize() int64 {
if m != nil {
return m.LargestAllocSize
}
return 0
}
func (m *MemAllocatorStats) GetFragmentationMetric() float32 {
if m != nil {
return m.FragmentationMetric
}
return 0
}
type MemChunk struct {
Address uint64 `protobuf:"varint,1,opt,name=address,proto3" json:"address,omitempty"`
Size_ int64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"`
RequestedSize int64 `protobuf:"varint,3,opt,name=requested_size,json=requestedSize,proto3" json:"requested_size,omitempty"`
Bin int32 `protobuf:"varint,4,opt,name=bin,proto3" json:"bin,omitempty"`
OpName string `protobuf:"bytes,5,opt,name=op_name,json=opName,proto3" json:"op_name,omitempty"`
FreedAtCount uint64 `protobuf:"varint,6,opt,name=freed_at_count,json=freedAtCount,proto3" json:"freed_at_count,omitempty"`
ActionCount uint64 `protobuf:"varint,7,opt,name=action_count,json=actionCount,proto3" json:"action_count,omitempty"`
InUse bool `protobuf:"varint,8,opt,name=in_use,json=inUse,proto3" json:"in_use,omitempty"`
StepId uint64 `protobuf:"varint,9,opt,name=step_id,json=stepId,proto3" json:"step_id,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *MemChunk) Reset() { *m = MemChunk{} }
func (m *MemChunk) String() string { return proto.CompactTextString(m) }
func (*MemChunk) ProtoMessage() {}
func (*MemChunk) Descriptor() ([]byte, []int) {
return fileDescriptor_fdf22777007c1f3d, []int{1}
}
func (m *MemChunk) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *MemChunk) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
func (m *MemChunk) XXX_Merge(src proto.Message) {
xxx_messageInfo_MemChunk.Merge(m, src)
}
func (m *MemChunk) XXX_Size() int {
return m.Size()
}
func (m *MemChunk) XXX_DiscardUnknown() {
xxx_messageInfo_MemChunk.DiscardUnknown(m)
}
var xxx_messageInfo_MemChunk proto.InternalMessageInfo
func (m *MemChunk) GetAddress() uint64 {
if m != nil {
return m.Address
}
return 0
}
func (m *MemChunk) GetSize_() int64 {
if m != nil {
return m.Size_
}
return 0
}
func (m *MemChunk) GetRequestedSize() int64 {
if m != nil {
return m.RequestedSize
}
return 0
}
func (m *MemChunk) GetBin() int32 {
if m != nil {
return m.Bin
}
return 0
}
func (m *MemChunk) GetOpName() string {
if m != nil {
return m.OpName
}
return ""
}
func (m *MemChunk) GetFreedAtCount() uint64 {
if m != nil {
return m.FreedAtCount
}
return 0
}
func (m *MemChunk) GetActionCount() uint64 {
if m != nil {
return m.ActionCount
}
return 0
}
func (m *MemChunk) GetInUse() bool {
if m != nil {
return m.InUse
}
return false
}
func (m *MemChunk) GetStepId() uint64 {
if m != nil {
return m.StepId
}
return 0
}
type BinSummary struct {
Bin int32 `protobuf:"varint,1,opt,name=bin,proto3" json:"bin,omitempty"`
TotalBytesInUse int64 `protobuf:"varint,2,opt,name=total_bytes_in_use,json=totalBytesInUse,proto3" json:"total_bytes_in_use,omitempty"`
TotalBytesInBin int64 `protobuf:"varint,3,opt,name=total_bytes_in_bin,json=totalBytesInBin,proto3" json:"total_bytes_in_bin,omitempty"`
TotalChunksInUse int64 `protobuf:"varint,4,opt,name=total_chunks_in_use,json=totalChunksInUse,proto3" json:"total_chunks_in_use,omitempty"`
TotalChunksInBin int64 `protobuf:"varint,5,opt,name=total_chunks_in_bin,json=totalChunksInBin,proto3" json:"total_chunks_in_bin,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BinSummary) Reset() { *m = BinSummary{} }
func (m *BinSummary) String() string { return proto.CompactTextString(m) }
func (*BinSummary) ProtoMessage() {}
func (*BinSummary) Descriptor() ([]byte, []int) {
return fileDescriptor_fdf22777007c1f3d, []int{2}
}
func (m *BinSummary) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *BinSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
func (m *BinSummary) XXX_Merge(src proto.Message) {
xxx_messageInfo_BinSummary.Merge(m, src)
}
func (m *BinSummary) XXX_Size() int {
return m.Size()
}
func (m *BinSummary) XXX_DiscardUnknown() {
xxx_messageInfo_BinSummary.DiscardUnknown(m)
}
var xxx_messageInfo_BinSummary proto.InternalMessageInfo
func (m *BinSummary) GetBin() int32 {
if m != nil {
return m.Bin
}
return 0
}
func (m *BinSummary) GetTotalBytesInUse() int64 {
if m != nil {
return m.TotalBytesInUse
}
return 0
}
func (m *BinSummary) GetTotalBytesInBin() int64 {
if m != nil {
return m.TotalBytesInBin
}
return 0
}
func (m *BinSummary) GetTotalChunksInUse() int64 {
if m != nil {
return m.TotalChunksInUse
}
return 0
}
func (m *BinSummary) GetTotalChunksInBin() int64 {
if m != nil {
return m.TotalChunksInBin
}
return 0
}
type SnapShot struct {
ActionCount uint64 `protobuf:"varint,1,opt,name=action_count,json=actionCount,proto3" json:"action_count,omitempty"`
Size_ int64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SnapShot) Reset() { *m = SnapShot{} }
func (m *SnapShot) String() string { return proto.CompactTextString(m) }
func (*SnapShot) ProtoMessage() {}
func (*SnapShot) Descriptor() ([]byte, []int) {
return fileDescriptor_fdf22777007c1f3d, []int{3}
}
func (m *SnapShot) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *SnapShot) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
func (m *SnapShot) XXX_Merge(src proto.Message) {
xxx_messageInfo_SnapShot.Merge(m, src)
}
func (m *SnapShot) XXX_Size() int {
return m.Size()
}
func (m *SnapShot) XXX_DiscardUnknown() {
xxx_messageInfo_SnapShot.DiscardUnknown(m)
}
var xxx_messageInfo_SnapShot proto.InternalMessageInfo
func (m *SnapShot) GetActionCount() uint64 {
if m != nil {
return m.ActionCount
}
return 0
}
func (m *SnapShot) GetSize_() int64 {
if m != nil {
return m.Size_
}
return 0
}
type MemoryDump struct {
AllocatorName string `protobuf:"bytes,1,opt,name=allocator_name,json=allocatorName,proto3" json:"allocator_name,omitempty"`
BinSummary []*BinSummary `protobuf:"bytes,2,rep,name=bin_summary,json=binSummary,proto3" json:"bin_summary,omitempty"`
Chunk []*MemChunk `protobuf:"bytes,3,rep,name=chunk,proto3" json:"chunk,omitempty"`
SnapShot []*SnapShot `protobuf:"bytes,4,rep,name=snap_shot,json=snapShot,proto3" json:"snap_shot,omitempty"`
Stats *MemAllocatorStats `protobuf:"bytes,5,opt,name=stats,proto3" json:"stats,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *MemoryDump) Reset() { *m = MemoryDump{} }
func (m *MemoryDump) String() string { return proto.CompactTextString(m) }
func (*MemoryDump) ProtoMessage() {}
func (*MemoryDump) Descriptor() ([]byte, []int) {
return fileDescriptor_fdf22777007c1f3d, []int{4}
}
func (m *MemoryDump) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *MemoryDump) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
func (m *MemoryDump) XXX_Merge(src proto.Message) {
xxx_messageInfo_MemoryDump.Merge(m, src)
}
func (m *MemoryDump) XXX_Size() int {
return m.Size()
}
func (m *MemoryDump) XXX_DiscardUnknown() {
xxx_messageInfo_MemoryDump.DiscardUnknown(m)
}
var xxx_messageInfo_MemoryDump proto.InternalMessageInfo
func (m *MemoryDump) GetAllocatorName() string {
if m != nil {
return m.AllocatorName
}
return ""
}
func (m *MemoryDump) GetBinSummary() []*BinSummary {
if m != nil {
return m.BinSummary
}
return nil
}
func (m *MemoryDump) GetChunk() []*MemChunk {
if m != nil {
return m.Chunk
}
return nil
}
func (m *MemoryDump) GetSnapShot() []*SnapShot {
if m != nil {
return m.SnapShot
}
return nil
}
func (m *MemoryDump) GetStats() *MemAllocatorStats {
if m != nil {
return m.Stats
}
return nil
}
func init() {
proto.RegisterType((*MemAllocatorStats)(nil), "tensorflow.MemAllocatorStats")
proto.RegisterType((*MemChunk)(nil), "tensorflow.MemChunk")
proto.RegisterType((*BinSummary)(nil), "tensorflow.BinSummary")
proto.RegisterType((*SnapShot)(nil), "tensorflow.SnapShot")
proto.RegisterType((*MemoryDump)(nil), "tensorflow.MemoryDump")
}
func init() {
proto.RegisterFile("tensorflow/core/protobuf/bfc_memory_map.proto", fileDescriptor_fdf22777007c1f3d)
}
var fileDescriptor_fdf22777007c1f3d = []byte{
// 619 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x94, 0x4d, 0x4f, 0x1b, 0x3d,
0x10, 0xc7, 0xb5, 0x79, 0xcf, 0x04, 0x78, 0xc0, 0xf0, 0x94, 0xbd, 0x10, 0xa5, 0x51, 0x2b, 0xa5,
0x2f, 0x24, 0x02, 0x0e, 0x3d, 0x13, 0x7a, 0xe1, 0x90, 0x1e, 0x36, 0xe2, 0xd2, 0x8b, 0xe5, 0xdd,
0x38, 0x89, 0x45, 0x6c, 0x6f, 0x6d, 0xaf, 0x2a, 0x38, 0x55, 0xea, 0x97, 0xeb, 0xb1, 0xa7, 0xaa,
0x47, 0x94, 0x4f, 0x52, 0xed, 0x38, 0x61, 0x81, 0xe4, 0x14, 0xef, 0x7f, 0x7e, 0x3b, 0x9e, 0x99,
0x7f, 0x66, 0xe1, 0xd4, 0x71, 0x65, 0xb5, 0x99, 0x2e, 0xf4, 0xf7, 0x41, 0xa2, 0x0d, 0x1f, 0xa4,
0x46, 0x3b, 0x1d, 0x67, 0xd3, 0x41, 0x3c, 0x4d, 0xa8, 0xe4, 0x52, 0x9b, 0x3b, 0x2a, 0x59, 0xda,
0x47, 0x9d, 0x40, 0x81, 0x77, 0x1f, 0x02, 0x38, 0x18, 0x71, 0x79, 0xb9, 0x58, 0xe8, 0x84, 0x39,
0x6d, 0xc6, 0x8e, 0x39, 0x4b, 0x4e, 0x00, 0x54, 0x26, 0x29, 0xcb, 0x55, 0x1b, 0x06, 0x9d, 0xa0,
0x57, 0x8e, 0x9a, 0x2a, 0xf3, 0x98, 0x25, 0x1d, 0xd8, 0x89, 0xef, 0x1c, 0xb7, 0x54, 0x28, 0x9a,
0x59, 0x1e, 0x96, 0x10, 0x00, 0xd4, 0xae, 0xd5, 0x8d, 0xe5, 0xe4, 0x1d, 0x1c, 0xa4, 0x9c, 0xdd,
0xd2, 0x67, 0x58, 0x19, 0xb1, 0xbd, 0x3c, 0x30, 0x2c, 0xd0, 0x8f, 0x40, 0x16, 0xcc, 0xcc, 0xb8,
0x75, 0xfe, 0x3e, 0x6a, 0xc5, 0x3d, 0x0f, 0x2b, 0xc8, 0xee, 0xaf, 0x22, 0x78, 0xef, 0x58, 0xdc,
0x73, 0x72, 0x06, 0x47, 0x53, 0xc3, 0x66, 0x92, 0x2b, 0xc7, 0x9c, 0xd0, 0x8a, 0x4a, 0xee, 0x8c,
0x48, 0xc2, 0x6a, 0x27, 0xe8, 0x95, 0xa2, 0xc3, 0x67, 0xb1, 0x11, 0x86, 0xba, 0x3f, 0x4b, 0xd0,
0x18, 0x71, 0x79, 0x35, 0xcf, 0xd4, 0x2d, 0x09, 0xa1, 0xce, 0x26, 0x13, 0xc3, 0xad, 0x6f, 0xab,
0x12, 0xad, 0x1f, 0x09, 0x81, 0x0a, 0xde, 0xec, 0x9b, 0xc1, 0x33, 0x79, 0x0b, 0x7b, 0x86, 0x7f,
0xcb, 0xb8, 0x75, 0x7c, 0xe2, 0xeb, 0xf2, 0x3d, 0xec, 0x3e, 0xaa, 0x58, 0xd4, 0x3e, 0x94, 0x63,
0xa1, 0xb0, 0xe6, 0x6a, 0x94, 0x1f, 0xc9, 0x31, 0xd4, 0x75, 0x4a, 0x15, 0x93, 0x1c, 0x2b, 0x6b,
0x46, 0x35, 0x9d, 0x7e, 0x61, 0x92, 0x93, 0x37, 0xb0, 0x37, 0x35, 0x9c, 0x4f, 0x28, 0x73, 0x34,
0xd1, 0x99, 0x72, 0x61, 0x0d, 0xcb, 0xd8, 0x41, 0xf5, 0xd2, 0x5d, 0xe5, 0x1a, 0x79, 0x0d, 0x3b,
0x2c, 0xc1, 0xf6, 0x3c, 0x53, 0x47, 0xa6, 0xe5, 0x35, 0x8f, 0xfc, 0x0f, 0xb5, 0xd5, 0x58, 0x1b,
0x9d, 0xa0, 0xd7, 0x88, 0xaa, 0x02, 0xa7, 0x79, 0x0c, 0x75, 0xeb, 0x78, 0x4a, 0xc5, 0x24, 0x6c,
0xe2, 0x4b, 0xb5, 0xfc, 0xf1, 0x7a, 0xd2, 0xfd, 0x13, 0x00, 0x0c, 0x85, 0x1a, 0x67, 0x52, 0x32,
0x73, 0xb7, 0x2e, 0x39, 0x28, 0x4a, 0xfe, 0x00, 0xc4, 0x69, 0xc7, 0x16, 0x74, 0x8b, 0xb5, 0xff,
0x61, 0xe4, 0x89, 0x69, 0x9b, 0x70, 0x9e, 0xad, 0xbc, 0x09, 0x0f, 0x85, 0x22, 0xa7, 0x70, 0xe8,
0xe1, 0x24, 0xb7, 0xe0, 0x31, 0xf5, 0xca, 0x62, 0x0c, 0xa1, 0x39, 0xab, 0xdc, 0x5b, 0xf0, 0x3c,
0x79, 0x75, 0x0b, 0x3e, 0x14, 0xaa, 0x7b, 0x09, 0x8d, 0xb1, 0x62, 0xe9, 0x78, 0xae, 0x37, 0xe7,
0x16, 0x6c, 0xce, 0x6d, 0x8b, 0xcd, 0xdd, 0x1f, 0x25, 0x80, 0x11, 0x6e, 0xc9, 0xe7, 0x4c, 0xa6,
0xb9, 0xeb, 0x6c, 0xbd, 0x0f, 0xde, 0xc3, 0x00, 0x3d, 0xdc, 0x7d, 0x54, 0xd1, 0xca, 0x4f, 0xd0,
0x8a, 0x85, 0xa2, 0xd6, 0x4f, 0x34, 0x2c, 0x75, 0xca, 0xbd, 0xd6, 0xf9, 0xab, 0x7e, 0xb1, 0x5c,
0xfd, 0x62, 0xde, 0x11, 0xc4, 0xc5, 0xec, 0xdf, 0x43, 0x15, 0x5b, 0x0b, 0xcb, 0xf8, 0xca, 0xd1,
0xd3, 0x57, 0xd6, 0x7f, 0xd4, 0xc8, 0x23, 0xe4, 0x0c, 0x9a, 0x56, 0xb1, 0x94, 0xda, 0xb9, 0x76,
0x61, 0x65, 0x93, 0x5f, 0xb7, 0x1e, 0x35, 0xec, 0x7a, 0x08, 0x17, 0x50, 0xb5, 0xf9, 0x16, 0xe3,
0xc4, 0x5a, 0xe7, 0x27, 0x2f, 0xd2, 0x3f, 0x5f, 0xf5, 0xc8, 0xb3, 0xc3, 0xd9, 0xaf, 0x65, 0x3b,
0xf8, 0xbd, 0x6c, 0x07, 0x7f, 0x97, 0xed, 0xe0, 0x61, 0xd9, 0x0e, 0xbe, 0xde, 0xcc, 0x84, 0x9b,
0x67, 0x71, 0x3f, 0xd1, 0x72, 0xf0, 0xe4, 0xfb, 0xb2, 0xfd, 0x38, 0xd3, 0x2f, 0x3e, 0x3c, 0x53,
0x6d, 0x68, 0xae, 0x50, 0x54, 0x2c, 0x9d, 0x69, 0x7f, 0x8a, 0x6b, 0xf8, 0x73, 0xf1, 0x2f, 0x00,
0x00, 0xff, 0xff, 0x9f, 0x72, 0xe0, 0xb0, 0xb4, 0x04, 0x00, 0x00,
}
func (m *MemAllocatorStats) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *MemAllocatorStats) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *MemAllocatorStats) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.XXX_unrecognized != nil {
i -= len(m.XXX_unrecognized)
copy(dAtA[i:], m.XXX_unrecognized)
}
if m.FragmentationMetric != 0 {
i -= 4
encoding_binary.LittleEndian.PutUint32(dAtA[i:], uint32(math.Float32bits(float32(m.FragmentationMetric))))
i--
dAtA[i] = 0x2d
}
if m.LargestAllocSize != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.LargestAllocSize))
i--
dAtA[i] = 0x20
}
if m.PeakBytesInUse != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.PeakBytesInUse))
i--
dAtA[i] = 0x18
}
if m.BytesInUse != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.BytesInUse))
i--
dAtA[i] = 0x10
}
if m.NumAllocs != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.NumAllocs))
i--
dAtA[i] = 0x8
}
return len(dAtA) - i, nil
}
func (m *MemChunk) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *MemChunk) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *MemChunk) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.XXX_unrecognized != nil {
i -= len(m.XXX_unrecognized)
copy(dAtA[i:], m.XXX_unrecognized)
}
if m.StepId != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.StepId))
i--
dAtA[i] = 0x48
}
if m.InUse {
i--
if m.InUse {
dAtA[i] = 1
} else {
dAtA[i] = 0
}
i--
dAtA[i] = 0x40
}
if m.ActionCount != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.ActionCount))
i--
dAtA[i] = 0x38
}
if m.FreedAtCount != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.FreedAtCount))
i--
dAtA[i] = 0x30
}
if len(m.OpName) > 0 {
i -= len(m.OpName)
copy(dAtA[i:], m.OpName)
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(len(m.OpName)))
i--
dAtA[i] = 0x2a
}
if m.Bin != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.Bin))
i--
dAtA[i] = 0x20
}
if m.RequestedSize != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.RequestedSize))
i--
dAtA[i] = 0x18
}
if m.Size_ != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.Size_))
i--
dAtA[i] = 0x10
}
if m.Address != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.Address))
i--
dAtA[i] = 0x8
}
return len(dAtA) - i, nil
}
func (m *BinSummary) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *BinSummary) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *BinSummary) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.XXX_unrecognized != nil {
i -= len(m.XXX_unrecognized)
copy(dAtA[i:], m.XXX_unrecognized)
}
if m.TotalChunksInBin != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.TotalChunksInBin))
i--
dAtA[i] = 0x28
}
if m.TotalChunksInUse != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.TotalChunksInUse))
i--
dAtA[i] = 0x20
}
if m.TotalBytesInBin != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.TotalBytesInBin))
i--
dAtA[i] = 0x18
}
if m.TotalBytesInUse != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.TotalBytesInUse))
i--
dAtA[i] = 0x10
}
if m.Bin != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.Bin))
i--
dAtA[i] = 0x8
}
return len(dAtA) - i, nil
}
func (m *SnapShot) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *SnapShot) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *SnapShot) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.XXX_unrecognized != nil {
i -= len(m.XXX_unrecognized)
copy(dAtA[i:], m.XXX_unrecognized)
}
if m.Size_ != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.Size_))
i--
dAtA[i] = 0x10
}
if m.ActionCount != 0 {
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(m.ActionCount))
i--
dAtA[i] = 0x8
}
return len(dAtA) - i, nil
}
func (m *MemoryDump) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *MemoryDump) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *MemoryDump) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.XXX_unrecognized != nil {
i -= len(m.XXX_unrecognized)
copy(dAtA[i:], m.XXX_unrecognized)
}
if m.Stats != nil {
{
size, err := m.Stats.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x2a
}
if len(m.SnapShot) > 0 {
for iNdEx := len(m.SnapShot) - 1; iNdEx >= 0; iNdEx-- {
{
size, err := m.SnapShot[iNdEx].MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x22
}
}
if len(m.Chunk) > 0 {
for iNdEx := len(m.Chunk) - 1; iNdEx >= 0; iNdEx-- {
{
size, err := m.Chunk[iNdEx].MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x1a
}
}
if len(m.BinSummary) > 0 {
for iNdEx := len(m.BinSummary) - 1; iNdEx >= 0; iNdEx-- {
{
size, err := m.BinSummary[iNdEx].MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
}
}
if len(m.AllocatorName) > 0 {
i -= len(m.AllocatorName)
copy(dAtA[i:], m.AllocatorName)
i = encodeVarintBfcMemoryMap(dAtA, i, uint64(len(m.AllocatorName)))
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func encodeVarintBfcMemoryMap(dAtA []byte, offset int, v uint64) int {
offset -= sovBfcMemoryMap(v)
base := offset
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return base
}
func (m *MemAllocatorStats) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.NumAllocs != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.NumAllocs))
}
if m.BytesInUse != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.BytesInUse))
}
if m.PeakBytesInUse != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.PeakBytesInUse))
}
if m.LargestAllocSize != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.LargestAllocSize))
}
if m.FragmentationMetric != 0 {
n += 5
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *MemChunk) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.Address != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.Address))
}
if m.Size_ != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.Size_))
}
if m.RequestedSize != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.RequestedSize))
}
if m.Bin != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.Bin))
}
l = len(m.OpName)
if l > 0 {
n += 1 + l + sovBfcMemoryMap(uint64(l))
}
if m.FreedAtCount != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.FreedAtCount))
}
if m.ActionCount != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.ActionCount))
}
if m.InUse {
n += 2
}
if m.StepId != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.StepId))
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *BinSummary) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.Bin != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.Bin))
}
if m.TotalBytesInUse != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.TotalBytesInUse))
}
if m.TotalBytesInBin != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.TotalBytesInBin))
}
if m.TotalChunksInUse != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.TotalChunksInUse))
}
if m.TotalChunksInBin != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.TotalChunksInBin))
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *SnapShot) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.ActionCount != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.ActionCount))
}
if m.Size_ != 0 {
n += 1 + sovBfcMemoryMap(uint64(m.Size_))
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *MemoryDump) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = len(m.AllocatorName)
if l > 0 {
n += 1 + l + sovBfcMemoryMap(uint64(l))
}
if len(m.BinSummary) > 0 {
for _, e := range m.BinSummary {
l = e.Size()
n += 1 + l + sovBfcMemoryMap(uint64(l))
}
}
if len(m.Chunk) > 0 {
for _, e := range m.Chunk {
l = e.Size()
n += 1 + l + sovBfcMemoryMap(uint64(l))
}
}
if len(m.SnapShot) > 0 {
for _, e := range m.SnapShot {
l = e.Size()
n += 1 + l + sovBfcMemoryMap(uint64(l))
}
}
if m.Stats != nil {
l = m.Stats.Size()
n += 1 + l + sovBfcMemoryMap(uint64(l))
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func sovBfcMemoryMap(x uint64) (n int) {
return (math_bits.Len64(x|1) + 6) / 7
}
func sozBfcMemoryMap(x uint64) (n int) {
return sovBfcMemoryMap(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *MemAllocatorStats) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: MemAllocatorStats: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: MemAllocatorStats: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field NumAllocs", wireType)
}
m.NumAllocs = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.NumAllocs |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field BytesInUse", wireType)
}
m.BytesInUse = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.BytesInUse |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field PeakBytesInUse", wireType)
}
m.PeakBytesInUse = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.PeakBytesInUse |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field LargestAllocSize", wireType)
}
m.LargestAllocSize = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.LargestAllocSize |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 5:
if wireType != 5 {
return fmt.Errorf("proto: wrong wireType = %d for field FragmentationMetric", wireType)
}
var v uint32
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
v = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
m.FragmentationMetric = float32(math.Float32frombits(v))
default:
iNdEx = preIndex
skippy, err := skipBfcMemoryMap(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *MemChunk) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: MemChunk: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: MemChunk: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Address", wireType)
}
m.Address = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Address |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Size_", wireType)
}
m.Size_ = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Size_ |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field RequestedSize", wireType)
}
m.RequestedSize = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.RequestedSize |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Bin", wireType)
}
m.Bin = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Bin |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field OpName", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthBfcMemoryMap
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.OpName = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 6:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field FreedAtCount", wireType)
}
m.FreedAtCount = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.FreedAtCount |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 7:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field ActionCount", wireType)
}
m.ActionCount = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.ActionCount |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 8:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field InUse", wireType)
}
var v int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
m.InUse = bool(v != 0)
case 9:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field StepId", wireType)
}
m.StepId = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.StepId |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipBfcMemoryMap(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *BinSummary) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: BinSummary: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: BinSummary: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Bin", wireType)
}
m.Bin = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Bin |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field TotalBytesInUse", wireType)
}
m.TotalBytesInUse = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.TotalBytesInUse |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field TotalBytesInBin", wireType)
}
m.TotalBytesInBin = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.TotalBytesInBin |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 4:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field TotalChunksInUse", wireType)
}
m.TotalChunksInUse = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.TotalChunksInUse |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 5:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field TotalChunksInBin", wireType)
}
m.TotalChunksInBin = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.TotalChunksInBin |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipBfcMemoryMap(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *SnapShot) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: SnapShot: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: SnapShot: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field ActionCount", wireType)
}
m.ActionCount = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.ActionCount |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Size_", wireType)
}
m.Size_ = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Size_ |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipBfcMemoryMap(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *MemoryDump) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: MemoryDump: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: MemoryDump: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field AllocatorName", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthBfcMemoryMap
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.AllocatorName = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field BinSummary", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthBfcMemoryMap
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.BinSummary = append(m.BinSummary, &BinSummary{})
if err := m.BinSummary[len(m.BinSummary)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Chunk", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthBfcMemoryMap
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Chunk = append(m.Chunk, &MemChunk{})
if err := m.Chunk[len(m.Chunk)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field SnapShot", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthBfcMemoryMap
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.SnapShot = append(m.SnapShot, &SnapShot{})
if err := m.SnapShot[len(m.SnapShot)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 5:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Stats", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthBfcMemoryMap
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Stats == nil {
m.Stats = &MemAllocatorStats{}
}
if err := m.Stats.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipBfcMemoryMap(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthBfcMemoryMap
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipBfcMemoryMap(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
depth := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
case 1:
iNdEx += 8
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowBfcMemoryMap
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLengthBfcMemoryMap
}
iNdEx += length
case 3:
depth++
case 4:
if depth == 0 {
return 0, ErrUnexpectedEndOfGroupBfcMemoryMap
}
depth--
case 5:
iNdEx += 4
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
if iNdEx < 0 {
return 0, ErrInvalidLengthBfcMemoryMap
}
if depth == 0 {
return iNdEx, nil
}
}
return 0, io.ErrUnexpectedEOF
}
var (
ErrInvalidLengthBfcMemoryMap = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowBfcMemoryMap = fmt.Errorf("proto: integer overflow")
ErrUnexpectedEndOfGroupBfcMemoryMap = fmt.Errorf("proto: unexpected end of group")
)
|
fzf/qmk_toolbox | keyboards/gh60/satan/keymaps/olligranlund_iso/keymap.c | #include QMK_KEYBOARD_H
#define BASE 0 // Default layer
#define MOUSE 1 // Space layer
#define NUMPAD 2 // Alt layer
#define CAPS 3 // Caps layer
// General shortenings
#define ESCA KC_ESC
#define MINS KC_MINS
#define EQUL KC_EQL
#define BSPC KC_BSPC
#define DELE KC_DEL
#define LBRC KC_LBRC
#define RBRC KC_RBRC
#define ALTR KC_RALT
#define SCLN KC_SCLN
#define QUOT KC_QUOT
#define NUHS KC_NUHS
#define ENTE KC_ENT
#define NUBS KC_NUBS // Less/ greater sign
#define COMM KC_COMM // Comma
#define FSTO KC_DOT // Full stop
#define SLSH KC_SLSH
#define ALTL KC_LALT
#define GUIL KC_LGUI
#define GUIR KC_RGUI
#define MENO KC_MENU
// Special Actions and Media Keys
#define INSE KC_INS // Insert here
#define HOME KC_HOME // Go to beginning of line
#define ENDI KC_END // go to end of line
#define PSCR KC_PSCR // Print Screen
#define SLCK KC_SLCK // go to end of line
#define PGDN KC_PGDN // go to end of line
#define PGUP KC_PGUP // go to end of line
#define PLPS KC_MPLY // Play/Pause
#define PAUS KC_PAUS // Pause button
#define MUTE KC_MUTE // Mute sound
#define VOLU KC_VOLU // Volume increase
#define VOLD KC_VOLD // Volume decrease
#define MNXT KC_MNXT // next track
#define MPRV KC_MPRV // prev track
#define MSTP KC_MSTP // stop playing
#define MSEL KC_MSEL // Select media (Start playing it)
#define MAIL KC_MAIL // Open default mail app
#define CALC KC_CALC // Open default calculator app
#define MYCM KC_MYCM // Open default file manager
// increase readability
#define XXXXX KC_NO
const uint16_t PROGMEM keymaps[][MATRIX_ROWS][MATRIX_COLS] = {
/* Keymap BASE: (Base Layer) Default Layer
* ,-----------------------------------------------------------.
* |Esc~| 1| 2| 3| 4| 5| 6| 7| 8| 9| 0| -| =|Backsp |
* |-----------------------------------------------------------|
* |Tab | Q| W| E| R| T| Y| U| I| O| P| [| ]| R |
* |-----------------------------------------------------------|
* |CAPS | A| S| D| F| G| H| J| K| L| ;| '| # | R |
* |-----------------------------------------------------------|
* |Shft| \ | Z| X| C| V| B| N| M| ,| .| /|Shift |
* |-----------------------------------------------------------|
* |Ctrl|Gui |Alt | Space |Alt |Gui |FN |Ctrl |
* `-----------------------------------------------------------|
*/
[ BASE ] = KEYMAP_ISO_SPLITRSHIFT(
KC_ESC, KC_1, KC_2, KC_3, KC_4, KC_5, KC_6, KC_7, KC_8, KC_9, KC_0, KC_MINS, KC_EQL, KC_BSPC, \
KC_TAB, KC_Q, KC_W, KC_E, KC_R, KC_T, KC_Y, KC_U, KC_I, KC_O, KC_P, KC_LBRC, KC_RBRC, XXXXX, \
LT(CAPS, KC_CAPS), KC_A, KC_S, KC_D, KC_F, KC_G, KC_H, KC_J, KC_K, KC_L, KC_SCLN,KC_QUOT, KC_NUHS, KC_ENT, \
KC_LSFT, KC_NUBS,KC_Z, KC_X, KC_C, KC_V, KC_B, KC_N, KC_M, KC_COMM,KC_DOT, KC_SLSH, KC_RSFT, XXXXX, \
KC_LCTL, KC_LGUI,KC_LALT, KC_SPC, KC_RALT, MO(CAPS), TG(NUMPAD), TG(MOUSE)),
[ MOUSE ] = KEYMAP( // Mouse controls
_______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, XXXXX,\
_______, _______, _______, _______, _______, _______, _______, KC_MS_BTN1, KC_MS_U, KC_MS_BTN2, _______, _______, _______, _______,\
_______, _______, _______, _______, _______, _______, _______, KC_MS_L, KC_MS_D, KC_MS_R, _______, _______, _______, _______,\
_______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, XXXXX,\
_______, _______, _______, _______, _______, _______, _______, _______),
[ NUMPAD ] = KEYMAP( //Numpad and alt shortcuts
_______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, XXXXX,\
_______, BL_TOGG, BL_DEC, BL_INC, _______, _______, _______, KC_1, KC_2, KC_3, _______, _______, _______, _______,\
_______, RGB_TOG, RGB_MOD, RGB_HUI, RGB_HUD, _______, _______, KC_4, KC_5, KC_6, _______, _______, _______, _______,\
_______, _______, RGB_SAI, RGB_SAD, RGB_VAI, RGB_VAD, _______, KC_7, KC_8, KC_9, KC_0, _______, _______, XXXXX,\
_______, _______, _______, _______, _______, _______, _______, _______),
[ CAPS ] = KEYMAP( // Main "function" key, arrows, media control
KC_GRAVE, KC_F1, KC_F2, KC_F3, KC_F4, KC_F5, KC_F6, KC_F7, KC_F8, KC_F9, KC_F10, KC_F11, KC_F12, KC_DEL, XXXXX,\
_______, KC_MPRV, KC_MPLY, KC_MNXT, _______, _______, _______, PGDN, KC_UP, PGUP, PSCR, SLCK, PAUS, _______,\
_______, KC__VOLDOWN, KC__MUTE, KC__VOLUP, _______, _______, _______, KC_LEFT, KC_DOWN, KC_RIGHT, _______, _______, _______, _______,\
_______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, _______, XXXXX,\
_______, _______, _______, _______, _______, _______, _______, _______)
};
|
rajubairishetti/conduit | conduit-worker/src/test/java/com/inmobi/conduit/local/TestLocalStreamCommit.java | package com.inmobi.conduit.local;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import com.inmobi.conduit.ConduitConfigParser;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import com.inmobi.conduit.Cluster;
import com.inmobi.conduit.FSCheckpointProvider;
public class TestLocalStreamCommit {
static Path rootDir = new Path("/tmp/test-conduit/conduit/");
static FileSystem localFs;
private Set<String> streamsToProcess = new HashSet<String>();
private void createData(Cluster cluster) throws IOException {
Path tmpPath = new Path(cluster.getTmpPath(),
LocalStreamService.class.getName());
Path tmpJobOutputPath = new Path(tmpPath, "jobOut");
Path path1 = new Path(tmpJobOutputPath, "stream1");
Path path2 = new Path(tmpJobOutputPath, "stream2");
streamsToProcess.add("stream1");
streamsToProcess.add("stream2");
localFs.mkdirs(path1);
localFs.mkdirs(path2);
localFs.create(new Path(path1, "file1"));
localFs.create(new Path(path2, "file2"));
}
@BeforeTest
public void setUP() throws Exception {
localFs = FileSystem.getLocal(new Configuration());
//clean up the test data if any thing is left in the previous runs
cleanup();
}
@AfterTest
public void cleanup() throws Exception {
localFs.delete(rootDir, true);
}
@Test
public void testPrepareForCommit() throws Exception {
ConduitConfigParser parser = new ConduitConfigParser(
"src/test/resources/test-merge-mirror-conduit1.xml");
Cluster cluster1 = parser.getConfig().getClusters().get("testcluster1");
LocalStreamService service = new LocalStreamService(parser.getConfig(),
cluster1, null, new FSCheckpointProvider(
"/tmp/test-conduit/conduit/checkpoint"), streamsToProcess);
createData(cluster1);
service.prepareForCommit(System.currentTimeMillis());
Path tmpPath = new Path(cluster1.getTmpPath(),
LocalStreamService.class.getName());
Path tmpConsumerPath = new Path(tmpPath, "testcluster2");
FileStatus[] status = null;
try {
status = localFs.listStatus(tmpConsumerPath);
} catch (FileNotFoundException e) {
status = new FileStatus[0];
}
for (FileStatus tmpStatus : status) {
// opening the consumer file written for testcluster2
// it should not have any entry for stream 1 as testcluster2 is primary
// destination only for stream2
FSDataInputStream inStream = localFs.open(tmpStatus.getPath());
String line;
while ((line = inStream.readLine()) != null) {
assert (!line.contains("stream1"));
}
}
}
}
|
TobiPristupin/RUN | app/src/main/java/com/tobipristupin/simplerun/ui/sharedui/AboutDialog.java | package com.tobipristupin.simplerun.ui.sharedui;
import android.content.Context;
import android.support.v7.app.AlertDialog;
import android.text.SpannableString;
import android.text.method.LinkMovementMethod;
import android.text.util.Linkify;
import android.widget.TextView;
import com.tobipristupin.simplerun.BuildConfig;
import com.tobipristupin.simplerun.R;
public class AboutDialog {
/**
* Shows about dialog. This method does not follow the convention in the other Dialog classes to
* return the dialog instead of showing it because it needs to add hyperlinks to the text and that has
* to be done after calling show().
*
* @param context
*/
public static void showDialog(Context context) {
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setTitle(context.getString(R.string.aboutdialog_title));
String message = String.format(context.getString(R.string.about_dialog), BuildConfig.VERSION_NAME);
final SpannableString spannableString = new SpannableString(message);
Linkify.addLinks(spannableString, Linkify.ALL);
builder.setMessage(spannableString);
AlertDialog dialog = builder.show();
((TextView) dialog.findViewById(android.R.id.message)).setMovementMethod(LinkMovementMethod.getInstance());
}
}
|
Flexberry/ember-flexberry-designer | addon/objects/uml-primitives/fd-uml-base-coll-message.js | <reponame>Flexberry/ember-flexberry-designer
/**
@module ember-flexberry-designer
*/
import { computed } from '@ember/object';
import { isArray } from '@ember/array';
import { isNone } from '@ember/utils';
import { A } from '@ember/array';
import joint from 'npm:jointjs';
import $ from 'jquery';
import FdUmlElement from './fd-uml-element';
/**
An object that describes an CollMessageBase on the UML diagram.
@class FdUmlCollMessageBase
@extends FdUmlElement
*/
export default FdUmlElement.extend({
/**
The name of the class.
@property name
@type String
*/
name: computed.alias('primitive.Name.Text'),
/**
See {{#crossLink "FdUmlPrimitive/JointJS:method"}}here{{/crossLink}}.
@method JointJS
*/
JointJS() {
let properties = this.getProperties('id', 'name', 'size', 'position');
properties.objectModel = this;
return new CollMessageBase(properties);
},
});
/**
Defines the JointJS object, which represents a CollMessageBase.
@for FdUmlCollMessageBase
@class CollMessageBase
@extends basic.Generic
@namespace flexberry.uml
@constructor
*/
export let CollMessageBase = joint.shapes.basic.Generic.define('flexberry.uml.CollMessageBase', {
attrs: {
'.line': {
'stroke': 'black',
'strokeWidth': '1',
'fill': '#ffffff',
'd':'M0,0 L 60,0'
},
'.arrow': {
'ref':'.line',
'stroke': 'black',
'stroke-width':'1',
},
},
}, {
markup: [
'<g class="rotatable">',
'<path class="line"/>',
'<path class="arrow"/>',
'</g>'
].join(''),
// Minimum height.
minHeight: 10,
// Minimum width
minWidth: 60,
getRectangles() {
return [];
},
});
joint.shapes.flexberry.uml.CollMessageBaseView = joint.shapes.flexberry.uml.BaseObjectView.extend({
template: [
'<div class="uml-class-inputs">',
'<textarea under-class-name-input class="class-name-input params-input" value="" rows="1" wrap="off"></textarea>',
'<div class="input-buffer"></div>',
'</div>'
].join(''),
setColors() {
const brushColor = this.getBrushColor();
const textColor = this.getTextColor();
if (!isNone(textColor)) {
this.model.attr('.arrow/stroke', textColor);
this.model.attr('.line/stroke', textColor);
this.model.attr('.arrow/fill', textColor);
}
const inputElements = this.model.inputElements;
if (isArray(inputElements) && (!isNone(textColor) || !isNone(brushColor))) {
inputElements.each(function(index, input) {
if (!isNone(textColor)) {
$(input).find('input, textarea').css('color', textColor);
}
if (!isNone(brushColor)) {
$(input).find('input, textarea').css('background-color', brushColor);
}
});
}
},
getSizeChangers() {
return A();
},
updateRectangles: function (resizedWidth, resizedHeight) {
const minWidth = this.model.attributes.minWidth;
const minHeight = this.model.attributes.minHeight;
const oldSize = this.model.size();
let newHeight = Math.max( resizedHeight || oldSize.height, minHeight)
let newWidth = Math.max( resizedWidth || oldSize.width, minWidth)
let $box = this.$box;
let inputs = $box.find('.class-name-input');
let $buffer = $box.find('.input-buffer');
inputs.each(function() {
let $input = $(this);
$buffer.css('font-weight', $input.css('font-weight'));
$buffer.text($input.val());
$input.width($buffer.width() + 1);
$input[0].style.marginLeft = -$input.width()/2 + 'px';
});
this.model.resize(newWidth, newHeight);
if (this.model.get('highlighted')) {
this.unhighlight();
this.highlight();
}
let paramsBox = this.$box.find('.params-input');
paramsBox.css({
left: newWidth/2,
top: newHeight,
position: 'absolute'
});
},
}); |
kbrock/azure-sdk-for-ruby | management/azure_mgmt_cosmosdb/lib/2020-06-01-preview/generated/azure_mgmt_cosmosdb/models/virtual_network_rule.rb | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Cosmosdb::Mgmt::V2020_06_01_preview
module Models
#
# Virtual Network ACL Rule object
#
class VirtualNetworkRule
include MsRestAzure
# @return [String] Resource ID of a subnet, for example:
# /subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}.
attr_accessor :id
# @return [Boolean] Create firewall rule before the virtual network has
# vnet service endpoint enabled.
attr_accessor :ignore_missing_vnet_service_endpoint
#
# Mapper for VirtualNetworkRule class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'VirtualNetworkRule',
type: {
name: 'Composite',
class_name: 'VirtualNetworkRule',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
ignore_missing_vnet_service_endpoint: {
client_side_validation: true,
required: false,
serialized_name: 'ignoreMissingVNetServiceEndpoint',
type: {
name: 'Boolean'
}
}
}
}
}
end
end
end
end
|
ppartarr/azure-sdk-for-java | sdk/batch/microsoft-azure-batch/src/main/java/com/microsoft/azure/batch/protocol/models/UploadBatchServiceLogsResult.java | <reponame>ppartarr/azure-sdk-for-java
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.batch.protocol.models;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* The result of uploading Batch service log files from a specific Compute
* Node.
*/
public class UploadBatchServiceLogsResult {
/**
* The virtual directory within Azure Blob Storage container to which the
* Batch Service log file(s) will be uploaded.
* The virtual directory name is part of the blob name for each log file
* uploaded, and it is built based poolId, nodeId and a unique identifier.
*/
@JsonProperty(value = "virtualDirectoryName", required = true)
private String virtualDirectoryName;
/**
* The number of log files which will be uploaded.
*/
@JsonProperty(value = "numberOfFilesUploaded", required = true)
private int numberOfFilesUploaded;
/**
* Get the virtual directory name is part of the blob name for each log file uploaded, and it is built based poolId, nodeId and a unique identifier.
*
* @return the virtualDirectoryName value
*/
public String virtualDirectoryName() {
return this.virtualDirectoryName;
}
/**
* Set the virtual directory name is part of the blob name for each log file uploaded, and it is built based poolId, nodeId and a unique identifier.
*
* @param virtualDirectoryName the virtualDirectoryName value to set
* @return the UploadBatchServiceLogsResult object itself.
*/
public UploadBatchServiceLogsResult withVirtualDirectoryName(String virtualDirectoryName) {
this.virtualDirectoryName = virtualDirectoryName;
return this;
}
/**
* Get the numberOfFilesUploaded value.
*
* @return the numberOfFilesUploaded value
*/
public int numberOfFilesUploaded() {
return this.numberOfFilesUploaded;
}
/**
* Set the numberOfFilesUploaded value.
*
* @param numberOfFilesUploaded the numberOfFilesUploaded value to set
* @return the UploadBatchServiceLogsResult object itself.
*/
public UploadBatchServiceLogsResult withNumberOfFilesUploaded(int numberOfFilesUploaded) {
this.numberOfFilesUploaded = numberOfFilesUploaded;
return this;
}
}
|
verus/triaina | android/WebViewBridge/src/triaina/webview/entity/device/NotifyStatusResult.java | <reponame>verus/triaina
package triaina.webview.entity.device;
import triaina.commons.json.annotation.Exclude;
import triaina.webview.entity.Result;
import android.os.Parcel;
import android.os.Parcelable;
public class NotifyStatusResult implements Result {
private String mOrientation;
public NotifyStatusResult() {}
public NotifyStatusResult(Parcel source) {
mOrientation = source.readString();
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(mOrientation);
}
public void setOrientation(String orientation) {
mOrientation = orientation;
}
public String getOrientation() {
return mOrientation;
}
@Exclude
public static final Parcelable.Creator<NotifyStatusResult> CREATOR = new Parcelable.Creator<NotifyStatusResult>() {
@Override
public NotifyStatusResult createFromParcel(Parcel source) {
return new NotifyStatusResult(source);
}
@Override
public NotifyStatusResult[] newArray(int size) {
return new NotifyStatusResult[size];
}
};
@Override
public int describeContents() {
return 0;
}
}
|
SuperDogHuman/teraccoon_lesson_authoring | libs/hooks/lesson/player/useSubtitlePlayer.js | <gh_stars>1-10
import { useState, useCallback, useEffect } from 'react'
export default function useSubtitlePlayer({ elapsedTimeRef, speeches }) {
const [subtitle, setSubtitle] = useState()
const updateSubtitle = useCallback(() => {
const speech = speeches.slice().reverse().find(s => s.elapsedTime <= elapsedTimeRef.current && s.elapsedTime + s.durationSec >= elapsedTimeRef.current)
if (speech) {
setSubtitle(subtitle => {
const newSubtite = { body: speech.subtitle, caption: speech.caption }
if (shouldUpdateState(subtitle, newSubtite)) {
return newSubtite
} else {
return subtitle
}
})
} else {
setSubtitle()
}
}, [elapsedTimeRef, speeches])
function shouldUpdateState(subtitle, newSubtitle) {
if (!subtitle) return true
if (subtitle.body !== newSubtitle.body) return true
return Object.keys(newSubtitle.caption).some(key => {
return newSubtitle.caption[key] !== subtitle.caption[key]
})
}
function seekSubtitle() {
updateSubtitle(0)
}
useEffect(() => {
if (!speeches || speeches.length === 0) return
updateSubtitle(0)
}, [speeches, updateSubtitle])
return { subtitle, updateSubtitle, seekSubtitle }
} |
w2cdmi/cdmi-aws-all | cdmi-paas-server/src/main/java/pw/cdmi/open/controller/EmployeeController.java | <gh_stars>0
package pw.cdmi.open.controller;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.InitBinder;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import pw.cdmi.collection.PageView;
import pw.cdmi.core.http.exception.AWSClientException;
import pw.cdmi.error.ClientReason;
import pw.cdmi.error.GlobalClientError;
import pw.cdmi.open.model.CertificatesInfo;
import pw.cdmi.open.model.WorkStatus;
import pw.cdmi.open.model.entities.Company;
import pw.cdmi.open.model.entities.Department;
import pw.cdmi.open.model.entities.Employee;
import pw.cdmi.open.model.entities.Office;
import pw.cdmi.open.model.entities.People;
import pw.cdmi.open.model.entities.Position;
import pw.cdmi.open.model.entities.PositionalTitle;
import pw.cdmi.open.model.queryObject.EmployeeQuery;
import pw.cdmi.open.service.BusinessOrganizationService;
import pw.cdmi.open.service.EmployeeService;
import pw.cdmi.open.service.PeopleService;
import pw.cdmi.paas.account.model.UserStatus;
import pw.cdmi.paas.account.model.entities.UserAccount;
import pw.cdmi.paas.account.service.UserService;
import pw.cdmi.paas.app.model.entities.SiteUser;
/************************************************************
* 控制器,处理员工管理请求的操作方法
*
* @author 佘朝军
* @version iSoc Service Platform, 2015-5-12
************************************************************/
@Controller
@RequestMapping(value = "/employee")
public class EmployeeController {
private static final Logger log = LoggerFactory.getLogger(EmployeeController.class);
@Autowired
private EmployeeService employeeService;
@Autowired
private BusinessOrganizationService organizationService;
@Autowired
private UserService userService;
@Autowired
private PeopleService peopleService;
/**
* 绑定员工对象的参数
*
* @param binder
*/
@InitBinder("employee")
public void initBinderFirst(WebDataBinder binder) {
binder.setFieldDefaultPrefix("employee.");
}
/**
* 绑定公民信息对象的参数
*
* @param binder
*/
@InitBinder("people")
public void initBinderSecond(WebDataBinder binder) {
binder.setFieldDefaultPrefix("people.");
}
@RequestMapping(value = "/create", method = RequestMethod.POST)
@ResponseBody
public Map<String, Object> createEmployee(@ModelAttribute Employee employee, @ModelAttribute People people) {
Map<String, Object> map = new HashMap<String, Object>();
Integer certificate = people.getCertificate();
try {
if (StringUtils.isNotBlank(people.getIdCard())) {
String code = people.getIdCard();
if (CertificatesInfo.SocialSecurityCode.getValue() == certificate) {
people.setSocialSecurityCode(code);
} else if (CertificatesInfo.DriverLicenseNumber.getValue() == certificate) {
people.setDriverLicenseNumber(code);
} else if (CertificatesInfo.PassportNumber.getValue() == certificate) {
people.setPassportNumber(code);
}
if (CertificatesInfo.IdCard.getValue() != certificate) {
people.setIdCard(null);
}
people.setTrueName(employee.getName());
employee.setSex(people.getSex());
employee.setBirthday(people.getBirthday());
employee.setNation(people.getNation());
}
employee.setStatus(WorkStatus.OK);
employeeService.createEmployee(employee, people);
map.put("message", "success");
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
map.put("message", e.getMessage());
}
return map;
}
@ResponseBody
@RequestMapping(value = "/update", method = RequestMethod.POST)
public Map<String, Object> updateEmployee(Employee employee) {
Map<String, Object> map = new HashMap<String, Object>();
try {
employeeService.adjustmentEmployee(employee);
map.put("message", "success");
} catch (Exception e) {
log.error(e.getMessage().toString());
map.put("message", e.getMessage());
}
return map;
}
@RequestMapping(value = "/edit", method = RequestMethod.POST)
@ResponseBody
public Map<String, Object> editEmployee(@ModelAttribute Employee employee, @ModelAttribute People people) {
Map<String, Object> map = new HashMap<String, Object>();
try {
employeeService.updateEmployee(employee, people);
map.put("message", "success");
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
map.put("message", e.getMessage());
}
return map;
}
/**
* @author jiangdie
* @date 2015-12-21
* @decription : 修改个人信息
* @param employee
* @param people
* @return
*/
@RequestMapping(value = "/edituserinfo", method = RequestMethod.POST)
@ResponseBody
public Map<String, Object> editUserInfo(Employee employee, People people) {
Map<String, Object> map = new HashMap<String, Object>();
try {
employeeService.updateUserInfo(employee, people);
map.put("message", "成功!");
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
map.put("message", e.getMessage());
}
return map;
}
@RequestMapping(value = "/delete", method = RequestMethod.POST)
@ResponseBody
public Map<String, Object> deleteEmployee(String employeeId) {
Map<String, Object> map = new HashMap<String, Object>();
try {
employeeService.deleteEmployeeById(employeeId);
map.put("message", "success");
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
map.put("message", "error");
}
return map;
}
@RequestMapping(value = "/deleteAndCancelIt", method = RequestMethod.POST)
@ResponseBody
public Map<String, Object> deleteEmployeeAndCancelIt(String employeeId) {
String appId = null;
Map<String, Object> map = new HashMap<String, Object>();
try {
Employee emp = employeeService.getSingleEmployeeById(employeeId);
if (null != emp) {
if (null != emp.getAccountId()) {
UserAccount userAccount = userService.getUserAccountByIdAndStatus(emp.getAccountId());
SiteUser siteUser = userService.getSiteUserByAccountId(emp.getAccountId(), appId);
if (null != userAccount) {
userAccount.setStatus(UserStatus.fromValue(7));
userService.updateUserAccount(userAccount);
}
if (null != siteUser) {
siteUser.setStatus(UserStatus.fromValue(7));
userService.updateSiteUser(siteUser);
}
}
employeeService.deleteEmployeeById(employeeId);
map.put("message", "success");
}
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
map.put("message", "error");
}
return map;
}
/*@RequestMapping(value = "/single/id/{id}", method = RequestMethod.GET)
@ResponseBody
public Map<String, Object> getEmployee(@PathVariable("id")
int id) {
Map<String, Object> map = new HashMap<String, Object>();
try {
map = employeeService.getEmployeeById(id);
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
map.put("message", "获取员工信息异常!");
}
return map;
}*/
@RequestMapping(value = "/single/id/{id}", method = RequestMethod.GET)
@ResponseBody
public JSONObject getEmployee(@PathVariable("id") String id) {
JSONObject json = new JSONObject();
Employee employee = employeeService.getSingleEmployeeById(id);
json.put("id", employee.getId());
json.put("name", employee.getName());
json.put("code", employee.getCode());
json.put("education", employee.getEducation());
if (employee.getEducation() != null) {
json.put("educationName", employee.getEducation().getText());
}
json.put("email", employee.getEmail());
json.put("telephone", employee.getTelephone());
json.put("deptManagerId", employee.getDeptManagerId());
if (employee.getDeptManagerId() != null) {
json.put("deptManagerName", "部门主管");
} else {
json.put("deptManagerName", "普通员工");
}
json.put("employeeProperty", employee.getEmployeeProperty());
if (employee.getEmployeeProperty() != null) {
json.put("employeePropertyName", employee.getEmployeeProperty().getText());
}
json.put("emergencyContactName", employee.getEmergencyContactName());
json.put("emergencyContactPhone", employee.getEmergencyContactPhone());
json.put("emergencyContactRelation", employee.getEmergencyContactRelation());
json.put("peopleId", employee.getPeopleId());
if (employee.getPeopleId() != null) {
People people = peopleService.getPeopleById(employee.getPeopleId());
if (StringUtils.isNotBlank(people.getIdCard())) {
json.put("idCard", people.getIdCard());
json.put("certificate", CertificatesInfo.IdCard.getValue());
} else if (StringUtils.isNotBlank(people.getSocialSecurityCode())) {
json.put("idCard", people.getSocialSecurityCode());
json.put("certificate", CertificatesInfo.SocialSecurityCode.getValue());
} else if (StringUtils.isNotBlank(people.getDriverLicenseNumber())) {
json.put("idCard", people.getDriverLicenseNumber());
json.put("certificate", CertificatesInfo.DriverLicenseNumber.getValue());
} else if (StringUtils.isNotBlank(people.getPassportNumber())) {
json.put("idCard", people.getPassportNumber());
json.put("certificate", CertificatesInfo.PassportNumber.getValue());
}
json.put("sex", people.getSex());
if (people.getSex() != null) {
json.put("sexName", people.getSex().getText());
}
json.put("sexName", people.getSex().getText());
if (people.getBirthday() != null) {
json.put("birthday", people.getBirthday());
}
json.put("nation", people.getNation());
if (people.getNation() != null) {
json.put("nationName", people.getNation().getText());
}
}
String globleCompanyId = organizationService.getCurrentCompanyId();
json.put("companyId", employee.getCompanyId());
json.put("officeId", employee.getOfficeId());
if (globleCompanyId != employee.getCompanyId() || null == employee.getOfficeId()) {
json.put("companyAndOfficeId", "company" + employee.getCompanyId());
json.put("showCompanyOrOffice", "company");
} else {
json.put("companyAndOfficeId", "office" + employee.getOfficeId());
json.put("showCompanyOrOffice", "office");
}
if (employee.getCompanyId() != null) {
Company comp = organizationService.getCompany(employee.getCompanyId());
if (comp != null) {
json.put("companyName", comp.getName());
}
}
if (employee.getOfficeId() != null) {
Office office = organizationService.getOffice(employee.getOfficeId());
if (office != null) {
json.put("officeName", office.getName());
}
}
json.put("deptId", employee.getDeptId());
if (employee.getDeptId() != null) {
Department dept = organizationService.getDepartment(employee.getDeptId());
if (dept != null) {
json.put("deptName", dept.getName());
}
}
json.put("positionId", employee.getPositionId());
if (employee.getPositionId() != null) {
Position position = organizationService.getPosition(employee.getPositionId());
if (position != null) {
json.put("positionName", position.getName());
}
}
json.put("positionalTitleId", employee.getPositionalTitleId());
if (employee.getPositionalTitleId() != null) {
PositionalTitle positionalTitle = organizationService.getPositionalTitle(employee.getPositionalTitleId());
if (positionalTitle != null) {
json.put("positionalTitleName", positionalTitle.getName());
}
}
if (employee.getJoinTime() != null) {
json.put("joinTime", employee.getJoinTime());
}
return json;
}
@RequestMapping(value = "/selectList", method = RequestMethod.GET)
@ResponseBody
public JSONArray getAllEmployee() {
JSONArray jsonArray = new JSONArray();
try {
List<Employee> list = employeeService.getAllEmployee();
if (!list.isEmpty()) {
for (Employee employee : list) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("id", employee.getId());
jsonObject.put("name", employee.getName());
jsonArray.add(jsonObject);
}
}
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
}
return jsonArray;
}
@RequestMapping(value = "/selectSiteUserIdAndEmployeeNameList", method = RequestMethod.GET)
@ResponseBody
public List<Object> selectSiteUserIdAndEmployeeNameList() {
return employeeService.selectSiteUserIdAndEmployeeNameList();
}
@RequestMapping(value = "/findListByPage", method = RequestMethod.POST)
@ResponseBody
public JSONObject findEmployeeListByPage(int page, int pageSize, String code, String name, String telephone,
String deptName) {
PageView pageView = new PageView();
pageView = employeeService.findEmployeeByConditionAndPage(page, pageSize, code, name, telephone, deptName);
JSONArray array = new JSONArray();
JSONObject jsonObject = new JSONObject();
@SuppressWarnings("unchecked")
List<Employee> list = pageView.getList();
for (Employee employee : list) {
JSONObject json = new JSONObject();
json.put("id", employee.getId());
json.put("name", employee.getName());
json.put("code", employee.getCode());
json.put("email", employee.getEmail());
json.put("telephone", employee.getTelephone());
json.put("companyId", employee.getCompanyId());
if (employee.getCompanyId() != null) {
Company comp = organizationService.getCompany(employee.getCompanyId());
if (comp != null) {
json.put("companyName", comp.getName());
}
}
json.put("deptId", employee.getDeptId());
if (employee.getDeptId() != null) {
Department dept = organizationService.getDepartment(employee.getDeptId());
if (dept != null) {
json.put("deptName", dept.getName());
}
}
json.put("positionId", employee.getPositionId());
if (employee.getPositionId() != null) {
Position position = organizationService.getPosition(employee.getPositionId());
if (position != null) {
json.put("positionName", position.getName());
}
}
json.put("positionalTitleId", employee.getPositionalTitleId());
if (employee.getPositionalTitleId() != null) {
PositionalTitle positionalTitle = organizationService
.getPositionalTitle(employee.getPositionalTitleId());
if (positionalTitle != null) {
json.put("positionalTitleName", positionalTitle.getName());
}
}
array.add(json);
}
jsonObject.put("data", array);
jsonObject.put("total", pageView.getTotalRecord());
return jsonObject;
}
@RequestMapping(value = "/findList", method = RequestMethod.POST)
@ResponseBody
public List<Map<String, Object>> findEmployeeList(String code, String name, String telephone, int deptId) {
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
try {
list = employeeService.findEmployeeByCondition(code, name, telephone, deptId);
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
}
return list;
}
@RequestMapping(value = "/findByDeptGroupId", method = RequestMethod.GET)
@ResponseBody
public JSONArray findEmployeeByDeptGroupId(String deptGroupId) {
List<Employee> list = employeeService.findEmployeeByDeptGroupId(deptGroupId);
JSONArray array = new JSONArray();
for (Employee employee : list) {
JSONObject json = new JSONObject();
json.put("id", employee.getId());
json.put("name", employee.getName());
json.put("code", employee.getCode());
json.put("deptId", employee.getDeptId());
if (employee.getDeptId() != null) {
Department dept = organizationService.getDepartment(employee.getDeptId());
if (dept != null) {
json.put("deptName", dept.getName());
}
}
array.add(json);
}
return array;
}
@RequestMapping(value = "/getCountByDeptGroupId", method = RequestMethod.GET)
@ResponseBody
public long getCountEmployeeByDeptGroupId(String deptGroupId) {
long count = 0;
try {
if (deptGroupId == null) {
throw new AWSClientException(GlobalClientError.InvalidRequest, ClientReason.InvalidRequest);
}
count = employeeService.getCountEmployeeByDeptGroupId(deptGroupId);
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
}
return count;
}
@RequestMapping(value = "/findByCompanyId", method = RequestMethod.GET)
@ResponseBody
public List<Employee> findByCompanyId(String companyId) {
List<Employee> list = new ArrayList<Employee>();
EmployeeQuery employeeQuery = new EmployeeQuery();
employeeQuery.setCompanyId(companyId);
try {
list = employeeService.findEmployeeByQuery(employeeQuery);
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
}
return list;
}
@RequestMapping(value = "/getCountByCompanyId", method = RequestMethod.GET)
@ResponseBody
public long getCountByCompanyId(String companyId) {
EmployeeQuery employeeQuery = new EmployeeQuery();
employeeQuery.setCompanyId(companyId);
long count = 0;
try {
if (companyId == null) {
throw new AWSClientException(GlobalClientError.InvalidRequest, ClientReason.InvalidRequest);
}
count = employeeService.getCountEmployeeByQuery(employeeQuery);
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
}
return count;
}
@RequestMapping(value = "/findByOfficeId", method = RequestMethod.GET)
@ResponseBody
public JSONArray findByOfficeId(String officeId) {
List<Employee> list = employeeService.findEmployeeByOfficeId(officeId);
JSONArray array = new JSONArray();
for (Employee employee : list) {
JSONObject json = new JSONObject();
json.put("id", employee.getId());
json.put("name", employee.getName());
json.put("code", employee.getCode());
json.put("deptId", employee.getDeptId());
if (employee.getDeptId() != null) {
Department dept = organizationService.getDepartment(employee.getDeptId());
if (dept != null) {
json.put("deptName", dept.getName());
}
}
array.add(json);
}
return array;
}
@RequestMapping(value = "/getCountByOfficeId", method = RequestMethod.GET)
@ResponseBody
public long getCountByOfficeId(String officeId) {
long count = 0;
try {
if (officeId == null) {
throw new AWSClientException(GlobalClientError.InvalidRequest, ClientReason.InvalidRequest);
}
count = employeeService.getCountEmployeeByOfficeId(officeId);
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
}
return count;
}
@RequestMapping(value = "/findByDeptId", method = RequestMethod.GET)
@ResponseBody
public List<Employee> findByDeptId(String deptId) {
List<Employee> result = new ArrayList<Employee>();
List<Employee> data = new ArrayList<Employee>();
EmployeeQuery employeeQuery = new EmployeeQuery();
employeeQuery.setDeptId(deptId);
try {
data = employeeService.findEmployeeByQuery(employeeQuery);
if (null != data) {
for (Employee emp : data) {
WorkStatus ws = emp.getStatus();
int workstatus = ws.getValue();
if (workstatus == 0 || workstatus == 1 || workstatus == 2) {
result.add(emp);
}
}
}
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
}
return result;
}
// @RequestMapping(value = "/getCountByDeptId", method = RequestMethod.GET)
// @ResponseBody
// public int getCountByDeptId(int deptId) {
// EmployeeQuery employeeQuery = new EmployeeQuery();
// employeeQuery.setDeptId(deptId);
// int count = 0;
// try {
// if (deptId == 0) {
// throw new AWSClientException(SystemError.InvalidRequest, "请求失败!");
// }
// count = employeeService.getCountEmployeeByQuery(employeeQuery);
// } catch (Exception e) {
// log.error(e.getStackTrace().toString());
// e.printStackTrace();
// }
// return count;
// }
@RequestMapping(value = "/getCountByDeptId", method = RequestMethod.GET)
@ResponseBody
public int getCountByDeptId(String deptId) {
List<Employee> emp = findByDeptId(deptId);
return emp.size();
}
@RequestMapping(value = "/findByCommissionerId", method = RequestMethod.GET)
@ResponseBody
public List<Employee> findByCommissionerId(String commissionerId) {
List<Employee> list = new ArrayList<Employee>();
if (commissionerId == null) {
throw new AWSClientException(GlobalClientError.InvalidRequest, ClientReason.InvalidRequest);
}
try {
list = employeeService.findEmployeeByCommissionerId(commissionerId);
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
}
return list;
}
@RequestMapping(value = "/getCountByCommissionerId", method = RequestMethod.GET)
@ResponseBody
public long getCountByCommissionerId(String commissionerId) {
long count = 0;
try {
if (commissionerId == null) {
throw new AWSClientException(GlobalClientError.InvalidRequest, ClientReason.InvalidRequest);
}
count = employeeService.getCountEmployeeByCommissionerId(commissionerId);
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
}
return count;
}
@RequestMapping(value = "/findByPositionId", method = RequestMethod.GET)
@ResponseBody
public List<Employee> findByPositionId(String positionId) {
List<Employee> list = new ArrayList<Employee>();
EmployeeQuery employeeQuery = new EmployeeQuery();
employeeQuery.setPositionId(positionId);
try {
list = employeeService.findEmployeeByQuery(employeeQuery);
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
}
return list;
}
@RequestMapping(value = "/getCountByPositionId", method = RequestMethod.GET)
@ResponseBody
public long getCountByPositionId(String positionId) {
EmployeeQuery employeeQuery = new EmployeeQuery();
employeeQuery.setPositionId(positionId);
long count = 0;
try {
if (positionId == null) {
throw new AWSClientException(GlobalClientError.InvalidRequest, ClientReason.InvalidRequest);
}
count = employeeService.getCountEmployeeByQuery(employeeQuery);
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
}
return count;
}
@RequestMapping(value = "/findByPositionalTitleId", method = RequestMethod.GET)
@ResponseBody
public List<Employee> findByPositionalTitleId(String positionalTitleId) {
List<Employee> list = new ArrayList<Employee>();
EmployeeQuery employeeQuery = new EmployeeQuery();
employeeQuery.setPositionalTitleId(positionalTitleId);
try {
list = employeeService.findEmployeeByQuery(employeeQuery);
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
}
return list;
}
@RequestMapping(value = "/getCountByPositionalTitleId", method = RequestMethod.GET)
@ResponseBody
public long getCountByPositionalTitleId(String positionalTitleId) {
EmployeeQuery employeeQuery = new EmployeeQuery();
employeeQuery.setPositionalTitleId(positionalTitleId);
long count = 0;
try {
if (positionalTitleId == null) {
throw new AWSClientException(GlobalClientError.InvalidRequest, ClientReason.InvalidRequest);
}
count = employeeService.getCountEmployeeByQuery(employeeQuery);
} catch (Exception e) {
log.error(e.getStackTrace().toString());
e.printStackTrace();
}
return count;
}
@RequestMapping(value = "/findByAread", method = RequestMethod.GET)
@ResponseBody
public List<Employee> findByAread(String areaId) {
List<Employee> list = new ArrayList<Employee>();
Iterable<Company> companyList = new ArrayList<Company>();
Company company = new Company();
try {
company.setAreaId(areaId);
companyList = organizationService.findSubCompanyList(company);
for (Company comp : companyList) {
String companyId = comp.getId();
EmployeeQuery employeeQuery = new EmployeeQuery();
employeeQuery.setCompanyId(companyId);
list = employeeService.findEmployeeByQuery(employeeQuery);
}
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
}
return list;
}
@RequestMapping(value = "/getCountByAreaId", method = RequestMethod.GET)
@ResponseBody
public long getCountByAreaId(String areaId) {
long count = 0;
Iterable<Company> companyList = new ArrayList<Company>();
Company company = new Company();
try {
company.setAreaId(areaId);
companyList = organizationService.findSubCompanyList(company);
for (Company comp : companyList) {
String companyId = comp.getId();
EmployeeQuery employeeQuery = new EmployeeQuery();
employeeQuery.setCompanyId(companyId);
count = employeeService.getCountEmployeeByQuery(employeeQuery);
}
} catch (Exception e) {
log.error(e.getMessage(), e);
e.printStackTrace();
}
return count;
}
@RequestMapping(value = "/findEmployeeCode", method = RequestMethod.GET)
@ResponseBody
public boolean findEmployeeCode(@RequestParam(value = "employeId", defaultValue = "0") String employeId,
String employeeCode) {
boolean bool = true;
// long companyId = organizationService.getCurrentCompanyId();
List<String> companyIds = organizationService.getCurrentAndSubCompanyIds();
// 在一个部署单元中,工号是唯一的
Employee employee = employeeService.getEmployeeByJobNumber(companyIds, employeeCode);
if (employee != null) { // 系统中该工号存在
if (employee.getId() != employeId) {// 工号对应其他员工,说明该工号已存在,不能使用该工号
bool = false;
}
}
return bool;
}
@RequestMapping(value = "/findEmployeeEmail", method = RequestMethod.GET)
@ResponseBody
public boolean findEmployeeEmail(@RequestParam(value = "employeId", defaultValue = "0") String employeId,
String employeeEmail) {
boolean bool = true;
// long companyId = organizationService.getCurrentCompanyId();
List<String> companyIds = organizationService.getCurrentAndSubCompanyIds();
// 在一个部署单元中,邮箱号是唯一的
Employee employee = employeeService.getEmployeeByEmail(companyIds, employeeEmail);
if (employee != null) { // 系统中该邮箱号存在
if (employee.getId() != employeId) {// 邮箱号对应其他员工,说明该邮箱号已存在,不能使用该邮箱号
bool = false;
}
}
return bool;
}
@RequestMapping(value = "/findEmployeeIdCard", method = RequestMethod.GET)
@ResponseBody
public boolean findEmployeeIdCard(@RequestParam(value = "employeId", defaultValue = "0") String employeId,
String certificateNumber, String certificateType) {
People people = null;
String certificateType1 = CertificatesInfo.IdCard.getValue() + "";
String certificateType2 = CertificatesInfo.DriverLicenseNumber.getValue() + "";
String certificateType3 = CertificatesInfo.SocialSecurityCode.getValue() + "";
String certificateType4 = CertificatesInfo.PassportNumber.getValue() + "";
if (certificateType1.equals(certificateType) || certificateType2.equals(certificateType)
|| certificateType3.equals(certificateType)) {
people = peopleService.getPeopleByIdCode(certificateNumber);
if (people == null) {
people = peopleService.getPeopleByDriverLicenseNumber(certificateNumber);
if (people == null) {
people = peopleService.getPeopleBySocialCode(certificateNumber);
}
}
} else if (certificateType4.equals(certificateType)) {
people = peopleService.getPeopleByPassportNumber(certificateNumber);
}
if (people != null) {// 该证件号已存在
if (employeId != null) {
Employee emp = employeeService.getSingleEmployeeById(employeId);
if ((emp != null) && (emp.getPeopleId() != people.getId())) {// 员工对应的People信息与根据身份证查找到的People不是同一个人,说明该身份证号已被其他用户占用
return false;
}
return true;
}
return false;
}
return true;
}
@RequestMapping(value = "/createEmployeeAndAccount", method = RequestMethod.POST)
@ResponseBody
public Map<String, String> createEmployeeAndAccount(String name, String code, String email, String account,
String pwd) {
Map<String, String> map = new HashMap<String, String>();
try {
Employee emp = new Employee();
emp.setCode(code);
emp.setName(name);
emp.setEmail(email);
String companyId = organizationService.getCurrentCompanyId();
emp.setCompanyId(companyId);
emp.setStatus(WorkStatus.OK);
employeeService.createEmployeeAndAccount(emp, account, pwd);
map.put("message", "success");
} catch (Exception e) {
map.put("message", "failure");
}
return map;
}
}
|
joaosorio23/LAPR4 | base.core/src/test/java/eapli/base/gestaoServicosHD/domain/ServicoTest.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package eapli.base.gestaoServicosHD.domain;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author gonca
*/
public class ServicoTest {
@Test(expected = IllegalArgumentException.class)
public void assegurarCodigoServicoNumerico(){
Servico servico = new Servico(new CodigoServico("???"),new DescricaoBreve("descB1"), new DescricaoCompleta("descCS1"),true);
}
@Test(expected = IllegalArgumentException.class)
public void assegurarDescBreveMenor70Carateres(){
StringBuffer outputBuffer = new StringBuffer(71);
for (int i = 0; i < 71; i++){
outputBuffer.append("1");
}
DescricaoBreve descBreve = new DescricaoBreve(outputBuffer.toString());
Servico servico = new Servico(new CodigoServico("S1"),descBreve, new DescricaoCompleta("descCS1"),true);
}
@Test(expected = IllegalArgumentException.class)
public void assegurarDescCompletaMenor100Carateres(){
StringBuffer outputBuffer = new StringBuffer(101);
for (int i = 0; i < 101; i++){
outputBuffer.append("1");
}
DescricaoCompleta descricaoCompleta = new DescricaoCompleta(outputBuffer.toString());
Servico servico = new Servico(new CodigoServico("S1"),new DescricaoBreve("descB1"),descricaoCompleta,true);
}
@Test(expected = IllegalArgumentException.class)
public void assegurarCodigoServicoNotEmpty(){
Servico servico = new Servico(new CodigoServico(""),new DescricaoBreve("descB1"), new DescricaoCompleta("descCS1"),true);
}
@Test(expected = IllegalArgumentException.class)
public void assegurarDescricaoBreveNotEmpty(){
Servico servico = new Servico(new CodigoServico("S1"),new DescricaoBreve(""), new DescricaoCompleta("descCS1"),true);
}
@Test(expected = IllegalArgumentException.class)
public void assegurarDescricaoCompletaNotEmpty(){
Servico servico = new Servico(new CodigoServico("S1"),new DescricaoBreve("descB1"), new DescricaoCompleta(""),true);
}
public void assegurarFormularioTrue(){
Servico servico = new Servico(new CodigoServico("S1"),new DescricaoBreve("descB1"), new DescricaoCompleta("descCS1"),true);
assertTrue(true);
}
}
|
RealAnna/go-utils | pkg/api/models/uniform.go | package models
import (
"crypto/sha1"
"encoding/hex"
"encoding/json"
"fmt"
"time"
)
// Integration represents a Keptn service a.k.a. Keptn sntegration
// and contains the name, id and subscription data as well as other information
// needed to register a Keptn service to the control plane
type Integration struct {
ID string `json:"id" bson:"_id"`
Name string `json:"name" bson:"name"`
MetaData MetaData `json:"metadata" bson:"metadata"`
// Deprecated: for backwards compatibility Subscription is populated
// but new code shall use Subscriptions
Subscription Subscription `json:"subscription" bson:"subscription"`
Subscriptions []EventSubscription `json:"subscriptions" bson:"subscriptions"`
}
// MetaData contains important information about the Keptn service which is used
// during registering the service to the control plane
type MetaData struct {
Hostname string `json:"hostname" bson:"hostname"`
IntegrationVersion string `json:"integrationversion" bson:"integrationversion"`
DistributorVersion string `json:"distributorversion" bson:"distributorversion"`
Location string `json:"location" bson:"location"`
KubernetesMetaData KubernetesMetaData `json:"kubernetesmetadata" bson:"kubernetesmetadata"`
LastSeen time.Time `json:"lastseen" bson:"lastseen"`
}
// Subscription describes to what events the Keptn service is subscribed to
// Deprecated
type Subscription struct {
Topics []string `json:"topics" bson:"topics"`
Status string `json:"status" bson:"status"`
Filter SubscriptionFilter `json:"filter" bson:"filter"`
}
// EventSubscription describes to what events the Keptn service is subscribed to
type EventSubscription struct {
ID string `json:"id" bson:"id"`
Event string `json:"event" bson:"event"`
Filter EventSubscriptionFilter `json:"filter" bson:"filter"`
}
// SubscriptionFilter is used to filter subscriptions by project stage or service
// Deprecated
type SubscriptionFilter struct {
Project string `json:"project" bson:"project"`
Stage string `json:"stage" bson:"stage"`
Service string `json:"service" bson:"service"`
}
// EventSubscriptionFilter is used to filter subscriptions by projects stages and/or services
type EventSubscriptionFilter struct {
Projects []string `json:"projects" bson:"projects"`
Stages []string `json:"stages" bson:"stages"`
Services []string `json:"services" bson:"services"`
}
// KubernetesMetaData represents metadata specific to Kubernetes
type KubernetesMetaData struct {
Namespace string `json:"namespace" bson:"namespace"`
PodName string `json:"podname" bson:"podname"`
DeploymentName string `json:"deploymentname" bson:"deploymentname"`
}
// IntegrationID is the unique id of a Keptn service a.k.a "Keptn integration"
// It is composed by a name, the namespace the service resides in and the node name of the cluster node
type IntegrationID struct {
Name string `json:"name" bson:"name"`
Namespace string `json:"namespace" bson:"namespace"`
NodeName string `json:"nodename" bson:"nodename"`
}
// Hash computes a hash value of an IntegrationID
// The IntegrationID must have a name, namespace as well as a nodename set
func (i IntegrationID) Hash() (string, error) {
if !i.validate() {
return "", fmt.Errorf("incomplete integration ID. At least 'name','namespace' and 'nodename' must be set")
}
raw := fmt.Sprintf("%s-%s-%s", i.Name, i.Namespace, i.NodeName)
hasher := sha1.New() //nolint:gosec
hasher.Write([]byte(raw))
return hex.EncodeToString(hasher.Sum(nil)), nil
}
func (i IntegrationID) validate() bool {
return i.Name != "" && i.Namespace != "" && i.NodeName != ""
}
func (i *Integration) ToJSON() ([]byte, error) {
if i == nil {
return nil, nil
}
return json.Marshal(i)
}
func (i *Integration) FromJSON(b []byte) error {
var res Integration
if err := json.Unmarshal(b, &res); err != nil {
return err
}
*i = res
return nil
}
func (s *EventSubscription) ToJSON() ([]byte, error) {
if s == nil {
return nil, nil
}
return json.Marshal(s)
}
|
posl/jProphet | src/main/java/jp/posl/jprophet/PatchedProjectGenerator.java | package jp.posl.jprophet;
import java.io.File;
import java.io.IOException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import jp.posl.jprophet.patch.PatchCandidate;
import jp.posl.jprophet.project.Project;
import jp.posl.jprophet.project.ProjectFactory;
import com.github.javaparser.ast.CompilationUnit;
/**
* 修正パッチ候補を元にプロジェクト全体の生成を行うクラス
*/
public class PatchedProjectGenerator {
private final RepairConfiguration config;
private final Project projectForTestValidation;
private final String originalProjectPath;
private final String patchTargetProjectPath;
private File lastPatchedFile;
private File originalFileOfLastPatched;
/**
* オリジナルのプロジェクトをテスト検証用にコピーし,
* 修正パッチ候補を元にパッチ適用後のファイル生成を行うクラスを生成する
* @param config 修正設定. テスト検証用のプロジェクトの生成先パスなど含む
*/
public PatchedProjectGenerator(RepairConfiguration config) {
this.config = config;
this.originalProjectPath = this.config.getTargetProject().getRootPath();
this.patchTargetProjectPath = this.config.getFixedProjectDirPath() + FilenameUtils.getBaseName(this.originalProjectPath);
final File originalProjectDir = new File(this.originalProjectPath);
final File patchTargetProjectDir = new File(this.patchTargetProjectPath);
try {
FileUtils.copyDirectory(originalProjectDir, patchTargetProjectDir);
} catch (IOException e) {
System.err.println(e.getMessage());
e.printStackTrace();
System.exit(-1);
}
final ProjectFactory projectFactory = new ProjectFactory();
this.projectForTestValidation = projectFactory.create(this.config, this.patchTargetProjectPath);
}
/**
* 修正パッチ候補が適用されたファイルを生成する
* @param patchCandidate 修正パッチ候補
*/
public Project applyPatch(PatchCandidate patchCandidate){
if(this.lastPatchedFile != null) this.unpatch();
final String patchTargetFilePath = patchCandidate.getFilePath();
final File patchTargetFile = new File(this.patchTargetProjectPath + patchTargetFilePath.replace(this.originalProjectPath, ""));
final CompilationUnit cu = patchCandidate.getFixedCompilationUnit();
final String patchedSourceCode = NodeUtility.lexicalPreservingPrint(cu);
try {
FileUtils.write(patchTargetFile, patchedSourceCode, "utf-8");
} catch (IOException e) {
System.err.println(e.getMessage());
e.printStackTrace();
System.exit(-1);
}
this.lastPatchedFile = patchTargetFile;
this.originalFileOfLastPatched = new File(patchCandidate.getFilePath());
// このメソッドが呼び出される度にprojectを返すが
// オブジェクトそのものに変更は無い
return this.projectForTestValidation;
}
/**
* 修正パッチが適用されたテスト検証用プロジェクトをアンパッチする
* <p>
* (特定の修正パッチ候補を対象にアンパッチするpublicメソッドにすることも将来的に検討)
* </p>
*/
private void unpatch(){
try {
FileUtils.copyFile(this.originalFileOfLastPatched, this.lastPatchedFile, false);
} catch (IOException e) {
System.err.println(e.getMessage());
e.printStackTrace();
System.exit(-1);
}
}
}
|
XiaoBoya/pkg | plugin/component/tracing/tracing.go | /*
Copyright 2021 The Katanomi Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package tracing
import (
"io"
"github.com/emicklei/go-restful/v3"
"github.com/katanomi/pkg/plugin/config"
"github.com/opentracing/opentracing-go"
"github.com/opentracing/opentracing-go/ext"
"github.com/uber/jaeger-client-go"
jaegercfg "github.com/uber/jaeger-client-go/config"
"github.com/uber/jaeger-lib/metrics"
)
// Config global opentracing config
func Config(c *config.TraceConfig) (io.Closer, error) {
if !c.Enable {
return nil, nil
}
cfg := jaegercfg.Configuration{
Sampler: &jaegercfg.SamplerConfig{
Type: c.SampleType,
Param: c.SampleParam,
SamplingServerURL: c.SampleServerURL,
},
Reporter: &jaegercfg.ReporterConfig{
LocalAgentHostPort: c.JaegerUrl,
LogSpans: true,
},
}
// Initialize tracer with a logger and a metrics factory
closer, err := cfg.InitGlobalTracer(
"plugin",
jaegercfg.Logger(jaeger.StdLogger),
jaegercfg.Metrics(metrics.NullFactory),
)
if err != nil {
return nil, err
}
return closer, nil
}
// Filter tracing filter for go restful, follow opentracing
func Filter(req *restful.Request, resp *restful.Response, chain *restful.FilterChain) {
tracer := opentracing.GlobalTracer()
spanCtx, _ := tracer.Extract(opentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(req.Request.Header))
span, ctx := opentracing.StartSpanFromContext(req.Request.Context(), "handle request", ext.RPCServerOption(spanCtx))
defer span.Finish()
ext.SpanKindRPCClient.Set(span)
ext.HTTPUrl.Set(span, req.Request.URL.String())
ext.HTTPMethod.Set(span, req.Request.Method)
req.Request = req.Request.WithContext(ctx)
chain.ProcessFilter(req, resp)
}
|
falconlee236/Algorithm_Practice | BaekJoon/10819.cpp | <reponame>falconlee236/Algorithm_Practice
/*10819*/
/*Got i1*/
#include <iostream>
#include <vector>
#include <cstdlib>
#include <algorithm>
using namespace std;
int main(){
int n; scanf("%d", &n);
vector<int> v;
for(int i = 0; i < n; i++){
int n; scanf("%d", &n);
v.push_back(n);
}
sort(v.begin(), v.end());
int ans = 0;
do{
int res = 0;
for(int i = 1; i < n; i++) res += abs(v[i] - v[i - 1]);
ans = max(ans, res);
}while(next_permutation(v.begin(), v.end()));
printf("%d", ans);
return 0;
} |
Vladimare/legendary-broccoli | tnwrappers/mutex.cpp | #include "mutex.hpp"
mutex::mutex()
{
this->m.id_mutex = 0;
tn_mutex_create(&this->m, TN_MUTEX_ATTR_CEILING, 1);
}
mutex::~mutex()
{
tn_mutex_delete(&this->m);
}
int mutex::lock(int timeout)
{
return tn_mutex_lock(&this->m, timeout);
}
int mutex::lockPolling()
{
return tn_mutex_lock_polling(&this->m);
}
int mutex::unlock()
{
return tn_mutex_unlock(&this->m);
}
|
nasir-ikram1102/DoctorApp-React | src/_helpers/index.js | export { authHeader } from './auth-header';
export { history } from './history';
export { store } from './store'; |
linminglu/Fgame | game/email/logic/email.go | <filename>game/email/logic/email.go
package logic
import (
"encoding/json"
"fgame/fgame/common/lang"
commonlog "fgame/fgame/common/log"
droplogic "fgame/fgame/game/drop/logic"
droptemplate "fgame/fgame/game/drop/template"
emailentity "fgame/fgame/game/email/entity"
"fgame/fgame/game/email/pbutil"
playeremail "fgame/fgame/game/email/player"
"fgame/fgame/game/global"
inventorylogic "fgame/fgame/game/inventory/logic"
playerinventory "fgame/fgame/game/inventory/player"
itemtypes "fgame/fgame/game/item/types"
"fgame/fgame/game/player"
playerlogic "fgame/fgame/game/player/logic"
"fgame/fgame/game/player/types"
propertylogic "fgame/fgame/game/property/logic"
"fgame/fgame/pkg/idutil"
log "github.com/Sirupsen/logrus"
)
//领取附件逻辑
func HandleGetEmailAttachement(pl player.Player, emailId int64) (err error) {
emailManager := pl.GetPlayerDataManager(types.PlayerEmailDataManagerType).(*playeremail.PlayerEmailDataManager)
//验证参数
_, emailObj := emailManager.GetEmail(emailId)
if emailObj == nil {
log.WithFields(
log.Fields{
"playerId": pl.GetId(),
"emailId": emailId,
}).Warn("email:领取附件请求参数错误,无效的emailId")
playerlogic.SendSystemMessage(pl, lang.CommonArgumentInvalid)
return
}
//是否存在附件
if emailManager.HasNotOrReceiveAttachment(emailId) {
log.WithFields(
log.Fields{
"playerId": pl.GetId(),
"emailId": emailId,
}).Warn("email:领取附件请求,该邮件没有附件信息")
playerlogic.SendSystemMessage(pl, lang.EmailNoAttachment)
return
}
var newItemList []*droptemplate.DropItemData
var resMap map[itemtypes.ItemAutoUseResSubType]int32
if len(emailObj.GetAttachmentInfo()) != 0 {
newItemList, resMap = droplogic.SeperateItemDatas(emailObj.GetAttachmentInfo())
}
inventoryManager := pl.GetPlayerDataManager(types.PlayerInventoryDataManagerType).(*playerinventory.PlayerInventoryDataManager)
//是否足够背包空间
if len(newItemList) > 0 {
if !inventoryManager.HasEnoughSlotsOfItemLevel(newItemList) {
log.WithFields(
log.Fields{
"playerId": pl.GetId(),
"emailId": emailId,
}).Warn("email:领取附件请求,所需背包空间不足")
playerlogic.SendSystemMessage(pl, lang.InventorySlotNoEnough)
return
}
//物品加入背包
flag := inventoryManager.BatchAddOfItemLevel(newItemList, commonlog.InventoryLogReasonEmailAttachment, commonlog.InventoryLogReasonEmailAttachment.String())
if !flag {
panic("email: getAttachment add item should be ok")
}
}
if len(resMap) > 0 {
//增加资源
goldReasonText := commonlog.GoldLogReasonEmailAttachment.String()
silverReasonText := commonlog.SilverLogReasonEmailAttachment.String()
levelReasonText := commonlog.LevelLogReasonEmailAttachment.String()
err = droplogic.AddRes(pl, resMap, commonlog.GoldLogReasonEmailAttachment, goldReasonText, commonlog.SilverLogReasonEmailAttachment, silverReasonText, commonlog.LevelLogReasonEmailAttachment, levelReasonText)
if err != nil {
return
}
}
//设置邮件附件已领取
emailManager.ReceiveEmailAttachment(emailId)
//同步背包
inventorylogic.SnapInventoryChanged(pl)
//同步资源
propertylogic.SnapChangedProperty(pl)
scGetAttachment := pbutil.BuildSCGetAttachment(emailId, newItemList)
pl.SendMsg(scGetAttachment)
return
}
//处理一键领取附件逻辑
func HandleGetEmailAttachementBatch(pl player.Player) (err error) {
emailManager := pl.GetPlayerDataManager(types.PlayerEmailDataManagerType).(*playeremail.PlayerEmailDataManager)
inventoryManager := pl.GetPlayerDataManager(types.PlayerInventoryDataManagerType).(*playerinventory.PlayerInventoryDataManager)
emailObjArr := emailManager.GetNotReceiveAttachmentEmails()
var totalAttacheList []*droptemplate.DropItemData
var emailIdArr []int64
if len(emailObjArr) > 0 {
//获取所有物品map、资源map
for _, emailObj := range emailObjArr {
itemList := emailObj.GetAttachmentInfo()
totalAttacheList = append(totalAttacheList, itemList...)
emailIdArr = append(emailIdArr, emailObj.GetEmailId())
}
var totalItemList []*droptemplate.DropItemData
var totalResMap map[itemtypes.ItemAutoUseResSubType]int32
if len(totalAttacheList) != 0 {
totalItemList, totalResMap = droplogic.SeperateItemDatas(totalAttacheList)
}
//物品加入背包
if len(totalItemList) > 0 {
//是否足够背包空间
if !inventoryManager.HasEnoughSlotsOfItemLevel(totalItemList) {
log.WithFields(
log.Fields{
"playerId": pl.GetId(),
"emailIdArr": emailIdArr,
}).Warn("email:一键领取附件请求,所需背包空间不足")
playerlogic.SendSystemMessage(pl, lang.InventorySlotNoEnough)
return
}
flag := inventoryManager.BatchAddOfItemLevel(totalItemList, commonlog.InventoryLogReasonEmailAttachment, commonlog.InventoryLogReasonEmailAttachment.String())
if !flag {
panic("email: getAttachmentBatch add item should be ok")
}
}
//增加资源
if len(totalResMap) > 0 {
goldReasonText := commonlog.GoldLogReasonEmailAttachment.String()
silverReasonText := commonlog.SilverLogReasonEmailAttachment.String()
levelReasonText := commonlog.LevelLogReasonEmailAttachment.String()
err = droplogic.AddRes(pl, totalResMap, commonlog.GoldLogReasonEmailAttachment, goldReasonText, commonlog.SilverLogReasonEmailAttachment, silverReasonText, commonlog.LevelLogReasonEmailAttachment, levelReasonText)
if err != nil {
return
}
}
//设置邮件附件已领取
for _, emailObj := range emailObjArr {
emailManager.ReceiveEmailAttachment(emailObj.GetEmailId())
}
}
//同步背包
inventorylogic.SnapInventoryChanged(pl)
//同步资源
propertylogic.SnapChangedProperty(pl)
scGetAttachmentBatch := pbutil.BuildSCGetAttachmentBatch(emailIdArr, totalAttacheList)
pl.SendMsg(scGetAttachmentBatch)
return
}
//向玩家推送邮件
func AddEmail(pl player.Player, title string, content string, attachmentInfo map[int32]int32) {
emailManager := pl.GetPlayerDataManager(types.PlayerEmailDataManagerType).(*playeremail.PlayerEmailDataManager)
//推送新的邮件
emailObj := emailManager.AddNewEmail(title, content, convertToDropItem(attachmentInfo))
scAddEmail := pbutil.BuildSCAddEmail(emailObj)
pl.SendMsg(scAddEmail)
}
//向玩家推送邮件:定义创建时间
func AddEmailDefinTime(pl player.Player, title string, content string, createTime int64, attachmentInfo map[int32]int32) {
emailManager := pl.GetPlayerDataManager(types.PlayerEmailDataManagerType).(*playeremail.PlayerEmailDataManager)
//推送新的邮件
emailObj := emailManager.AddEmail(title, content, createTime, convertToDropItem(attachmentInfo))
scAddEmail := pbutil.BuildSCAddEmail(emailObj)
pl.SendMsg(scAddEmail)
}
//向玩家推送离线邮件
func AddOfflineEmail(playerId int64, title string, content string, attachmentInfo map[int32]int32) (err error) {
id, err := idutil.GetId()
if err != nil {
return
}
emailsInfoBytes, err := json.Marshal(convertToDropItem(attachmentInfo))
if err != nil {
return
}
now := global.GetGame().GetTimeService().Now()
mailEntity := &emailentity.PlayerEmailEntity{
Id: id,
PlayerId: playerId,
IsRead: 0,
IsGetAttachment: 0,
Title: title,
Content: content,
AttachementInfo: string(emailsInfoBytes),
UpdateTime: now,
CreateTime: now,
DeleteTime: 0,
}
global.GetGame().GetGlobalUpdater().AddChangedObject(mailEntity)
return err
}
//向玩家推送离线邮件:定义创建时间
func AddOfflineEmailDefinTime(playerId int64, title string, content string, createTime int64, attachmentInfo map[int32]int32) (err error) {
id, err := idutil.GetId()
if err != nil {
return
}
emailsInfoBytes, err := json.Marshal(convertToDropItem(attachmentInfo))
if err != nil {
return
}
now := global.GetGame().GetTimeService().Now()
mailEntity := &emailentity.PlayerEmailEntity{
Id: id,
PlayerId: playerId,
IsRead: 0,
IsGetAttachment: 0,
Title: title,
Content: content,
AttachementInfo: string(emailsInfoBytes),
UpdateTime: now,
CreateTime: createTime,
DeleteTime: 0,
}
global.GetGame().GetGlobalUpdater().AddChangedObject(mailEntity)
return err
}
//向玩家推送邮件-等级物品:定义创建时间
func AddEmailItemLevel(pl player.Player, title string, content string, createTime int64, itemList []*droptemplate.DropItemData) *playeremail.PlayerEmailObject {
emailManager := pl.GetPlayerDataManager(types.PlayerEmailDataManagerType).(*playeremail.PlayerEmailDataManager)
//推送新的邮件
emailObj := emailManager.AddEmail(title, content, createTime, itemList)
scAddEmail := pbutil.BuildSCAddEmail(emailObj)
pl.SendMsg(scAddEmail)
return emailObj
}
//向玩家推送离线邮件-等级物品:定义创建时间
func AddOfflineEmailItemLevel(playerId int64, title string, content string, createTime int64, itemList []*droptemplate.DropItemData) (err error) {
id, err := idutil.GetId()
if err != nil {
return
}
emailsInfoBytes, err := json.Marshal(itemList)
if err != nil {
return
}
now := global.GetGame().GetTimeService().Now()
mailEntity := &emailentity.PlayerEmailEntity{
Id: id,
PlayerId: playerId,
IsRead: 0,
IsGetAttachment: 0,
Title: title,
Content: content,
AttachementInfo: string(emailsInfoBytes),
UpdateTime: now,
CreateTime: createTime,
DeleteTime: 0,
}
global.GetGame().GetGlobalUpdater().AddChangedObject(mailEntity)
return err
}
func convertToDropItem(itemMap map[int32]int32) (itemList []*droptemplate.DropItemData) {
for itemId, num := range itemMap {
level := int32(0)
bind := itemtypes.ItemBindTypeUnBind
newData := droptemplate.CreateItemData(itemId, num, level, bind)
itemList = append(itemList, newData)
}
return
}
|
seakers/ExtUtils | dakota-6.3.0.Windows.x86/include/Teuchos_StandardDependencyXMLConverters.hpp | <reponame>seakers/ExtUtils<filename>dakota-6.3.0.Windows.x86/include/Teuchos_StandardDependencyXMLConverters.hpp<gh_stars>0
// @HEADER
// ***********************************************************************
//
// Teuchos: Common Tools Package
// Copyright (2004) Sandia Corporation
//
// Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
// license for use of this work by or on behalf of the U.S. Government.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the Corporation nor the names of the
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Questions? Contact <NAME> (<EMAIL>)
//
// ***********************************************************************
// @HEADER
#ifndef TEUCHOS_STANDARDDEPENDENCYXMLCONVERTERS_HPP
#define TEUCHOS_STANDARDDEPENDENCYXMLCONVERTERS_HPP
/*! \file Teuchos_StandardDependencyXMLConverters.hpp
* \brief A collection of standard DependencyXMLConverters.
*/
#include "Teuchos_DependencyXMLConverter.hpp"
#include "Teuchos_StandardDependencies.hpp"
#include "Teuchos_XMLDependencyExceptions.hpp"
#include "Teuchos_FunctionObjectXMLConverterDB.hpp"
namespace Teuchos {
/** \brief An xml converter for VisualDepenencies
*/
class TEUCHOS_LIB_DLL_EXPORT VisualDependencyXMLConverter : public DependencyXMLConverter{
public:
/** \name Special converter methods */
//@{
/** \brief Converts any special aspects of a
* specific visual dependency to xml.
*
* @param dependency The dependency being converted.
* @param xmlObj The XMLObject to which the dependency is
* being converted.
* @param entryIDsMap A map containing ParameterEntrys and their associated
* IDs.
* @return An XMLObject representing the VisualDependency.
*/
virtual void convertSpecialVisualAttributes(
RCP<const VisualDependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap) const = 0;
/** \brief Converts any special aspects of a
* specific visual dependency from xml.
*
* @param xmlObj The xml being converted.
* @param dependees The dependees of the visual dependency.
* @param dependents The dependents of the visua dependency.
* @param showIf The showIf attribute of the visual dependency.
* @param entryIDsMap A map containing ParameterEntrys and their associated
* IDs.
* @return The converted VisualDependency.
*/
virtual RCP<VisualDependency> convertSpecialVisualAttributes(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependets,
bool showIf,
const XMLParameterListReader::EntryIDsMap& entryIDsMap) const = 0;
//@}
/** \name Overridden from DependencyXMLConverter */
//@{
/** \brief . */
RCP<Dependency> convertXML(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependets,
const XMLParameterListReader::EntryIDsMap& entryIDsMap,
const IDtoValidatorMap& validatorIDsMap) const;
/** \brief . */
void convertDependency(
const RCP<const Dependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap,
ValidatortoIDMap& validatorIDsMap) const;
//@}
private:
/** \name Private Members */
//@{
/** \brief Gets the name of the showif attribute */
static const std::string& getShowIfAttributeName(){
static const std::string showIfAttributeName = "showIf";
return showIfAttributeName;
}
//@}
};
/** \brief An xml converter for ValidatorDependencies.
*/
class TEUCHOS_LIB_DLL_EXPORT ValidatorDependencyXMLConverter : public DependencyXMLConverter{
public:
/** \name Special converter methods */
//@{
/** \brief Converts any special aspects of a
* specific validator dependency to xml.
*
* @param dependency The dependency being converted.
* @param xmlObj The XMLObject to which the dependency is
* being converted.
* @param validatorIDsMap A map containing validators and their associated
* IDs.
* @return An XMLObject representing the ValidatorDepenency.
*/
virtual void convertSpecialValidatorAttributes(
RCP<const ValidatorDependency> dependency,
XMLObject& xmlObj,
ValidatortoIDMap& validatorIDsMap) const = 0;
/** \brief Converts any special aspects of a
* specific validator dependency from xml.
*
* @param xmlObj The xml being converted.
* @param dependee The dependees of the validator dependency.
* @param dependents The dependents of the validator dependency.
* @param validatorIDsMap A map containing validators and their associated
* IDs.
* @return The converted ValidatorDependency.
*/
virtual RCP<ValidatorDependency> convertSpecialValidatorAttributes(
const XMLObject& xmlObj,
RCP<const ParameterEntry> dependee,
const Dependency::ParameterEntryList dependents,
const IDtoValidatorMap& validatorIDsMap) const = 0;
//@}
/** \name Overridden from DependencyXMLConverter */
//@{
/** \brief . */
RCP<Dependency> convertXML(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependets,
const XMLParameterListReader::EntryIDsMap& entryIDsMap,
const IDtoValidatorMap& validatorIDsMap) const;
/** \brief . */
void convertDependency(
const RCP<const Dependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap,
ValidatortoIDMap& validatorIDsMap) const;
//@}
};
/** \brief An xml converter for StringVisualDepenencies
*
* The valid XML representation of a StringVisualDependency is:
* \code
<Dependency showIf="showIf value" type="StringVisualDependency">
<Dependee parameterId="Id of dependee parameter"/>
<Dependent parameterId="Id of dependent parameter"/>
...Any addiditional Dependents...
<StringValues>
<String value="First value"/>
<String value="Second value"/>
...Other Values...
</StringValues>
</Dependency>
\endcode
The "showIf" XML attribute is optional and if not present will be considered
true.
*/
class TEUCHOS_LIB_DLL_EXPORT StringVisualDependencyXMLConverter : public VisualDependencyXMLConverter{
public:
/** \name Overridden from VisualDependencyConverter */
//@{
/** \brief . */
void convertSpecialVisualAttributes(
RCP<const VisualDependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap) const;
/** \brief . */
RCP<VisualDependency> convertSpecialVisualAttributes(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependets,
bool showIf,
const XMLParameterListReader::EntryIDsMap& entryIDsMap) const;
/** \brief Gets the StringValues Tag */
static const std::string& getStringValuesTagName(){
static const std::string stringValuesTagName = "StringValues";
return stringValuesTagName;
}
//@}
private:
/** \name Private Members */
//@{
/** \brief Gets the String Tag */
static const std::string& getStringTagName(){
static const std::string stringTagName = "String";
return stringTagName;
}
/** \brief Gets the Value attribute name */
static const std::string& getValueAttributeName(){
static const std::string valueAttributeName = "value";
return valueAttributeName;
}
//@}
};
/** \brief An xml converter for BoolVisualDepenencies
*
* The valid XML representation of a BoolVisualDependency is:
* \code
<Dependency showIf="showIf value" type="BoolVisualDependency">
<Dependee parameterId="Id of dependee parameter"/>
<Dependent parameterId="Id of dependent parameter"/>
...Any other dependents...
</Dependency>
\endcode
* The "showIf" XML attribute is optional and if not present will be considered
* true.
*/
class TEUCHOS_LIB_DLL_EXPORT BoolVisualDependencyXMLConverter : public VisualDependencyXMLConverter{
public:
/** \name Overridden from VisualDependencyConverter */
//@{
/** \brief . */
void convertSpecialVisualAttributes(
RCP<const VisualDependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap) const;
/** \brief . */
RCP<VisualDependency> convertSpecialVisualAttributes(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependets,
bool showIf,
const XMLParameterListReader::EntryIDsMap& entryIDsMap) const;
//@}
};
/** \brief An xml converter for NumberVisualDependencies
*
* The valid XML representation of a NumberVisualDependency is:
* \code
<Dependency showIf="showIf value"
type="NumberVisualDependency(number_type_of_dependee)"
>
<Dependee parameterId="Id of dependee parameter"/>
<Dependent parameterId="Id of dependent parameter"/>
...Any additional Dependents...
...Optional function tag...
</Dependency>
\endcode
* The "showIf" XML attribute is optional and if not present will be considered
* true.
*/
template<class T>
class NumberVisualDependencyXMLConverter : public VisualDependencyXMLConverter{
public:
/** \name Overridden from VisualDependencyConverter */
//@{
/** \brief . */
void convertSpecialVisualAttributes(
RCP<const VisualDependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap) const;
/** \brief . */
RCP<VisualDependency> convertSpecialVisualAttributes(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependets,
bool showIf,
const XMLParameterListReader::EntryIDsMap& entryIDsMap) const;
//@}
};
template<class T>
void NumberVisualDependencyXMLConverter<T>::convertSpecialVisualAttributes(
RCP<const VisualDependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap) const
{
RCP<const NumberVisualDependency<T> > castedDependency =
rcp_dynamic_cast<const NumberVisualDependency<T> >(dependency);
RCP<const SimpleFunctionObject<T> > functionObject =
castedDependency->getFunctionObject();
if(functionObject != null){
XMLObject functionXML =
FunctionObjectXMLConverterDB::convertFunctionObject(functionObject);
xmlObj.addChild(functionXML);
}
}
template<class T>
RCP<VisualDependency>
NumberVisualDependencyXMLConverter<T>::convertSpecialVisualAttributes(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependents,
bool showIf,
const XMLParameterListReader::EntryIDsMap& entryIDsMap) const
{
TEUCHOS_TEST_FOR_EXCEPTION(dependees.size() > 1,
TooManyDependeesException,
"A NumberVisualDependency can only have 1 dependee!" <<
std::endl << std::endl);
int functionIndex = xmlObj.findFirstChild(FunctionObject::getXMLTagName());
RCP<SimpleFunctionObject<T> > functionObject = null;
if(functionIndex != -1){
functionObject = rcp_dynamic_cast<SimpleFunctionObject<T> >(
FunctionObjectXMLConverterDB::convertXML(xmlObj.getChild(functionIndex)));
}
return rcp(new NumberVisualDependency<T>(
*(dependees.begin()), dependents, showIf, functionObject));
}
/** \brief An xml converter for ConditionVisualDependencies
*
* The valid XML representation of a ConditionVisualDependency is:
* \code
<Dependency showIf="showIf value" type="ConditionVisualDependency">
<Dependee parameterId="Id of first dependee parameter"/>
<Dependee parameterId="id of second dependee parameter"/>
...Any additional dependees...
<Dependent parameterId="Id of dependent"/>
...Any additional dependents...
...Condition Tag and it's children...
</Dependency>
\endcode
* The "showIf" XML attribute is optional and if not present will be considered
* true.
*/
class TEUCHOS_LIB_DLL_EXPORT ConditionVisualDependencyXMLConverter :
public VisualDependencyXMLConverter
{
public:
/** \name Overridden from VisualDependencyConverter */
//@{
/** \brief . */
void convertSpecialVisualAttributes(
RCP<const VisualDependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap) const;
/** \brief . */
RCP<VisualDependency> convertSpecialVisualAttributes(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependets,
bool showIf,
const XMLParameterListReader::EntryIDsMap& entryIDsMap) const;
//@}
};
/** \brief An xml converter for StringValidatorDependencies
*
* The valid XML representation of a StringValidatorDependency is:
* \code
<Dependency type="StringValidatorDependency"
defaultValidatorId="value of default validator"
/>
<Dependee parameterId="Id of dependee parameter"/>
<Dependent parameterId="Id of dependent parameter"/>
...Additional Dependents...
<ValuesAndValidators>
<Pair value="Value 1" validatorId="Id of first mapped validator"/>
<Pair value="Value 2" validatorId="Id of second mapped validator"/>
...Other value-to-validator mappings
</ValuesAndValidators>
</Dependency>
\endcode
* The "defaultValidatorId" XML attribute is optional.
*/
class TEUCHOS_LIB_DLL_EXPORT StringValidatorDependencyXMLConverter :
public ValidatorDependencyXMLConverter{
public:
/** \name Overridden from ValidatorDependencyConverter */
//@{
/** \brief . */
void convertSpecialValidatorAttributes(
RCP<const ValidatorDependency> dependency,
XMLObject& xmlObj,
ValidatortoIDMap& validatorIDsMap) const;
/** \brief . */
RCP<ValidatorDependency> convertSpecialValidatorAttributes(
const XMLObject& xmlObj,
RCP<const ParameterEntry> dependee,
const Dependency::ParameterEntryList dependents,
const IDtoValidatorMap& validatorIDsMap) const;
/** \brief . */
static const std::string& getValuesAndValidatorsTag(){
static const std::string valuesAndValidatorsTag = "ValuesAndValidators";
return valuesAndValidatorsTag;
}
//@}
private:
/** \name Private Members */
//@{
/** \brief . */
static const std::string& getPairTag(){
static const std::string pairTag = "Pair";
return pairTag;
}
/** \brief . */
static const std::string& getValueAttributeName(){
static const std::string valueAttributeName = "value";
return valueAttributeName;
}
/** \brief . */
static const std::string& getValidatorIdAttributeName(){
static const std::string validatorIDAttributeName = "validatorId";
return validatorIDAttributeName;
}
/** \brief . */
static const std::string& getDefaultValidatorIdAttributeName(){
static const std::string defaultValidatorIdAttributeName =
"defaultValidatorId";
return defaultValidatorIdAttributeName;
}
//@}
};
/** \brief An xml converter for BoolValidatorDependencies
*
* The valid XML representation of a BoolValidatorDependency is:
* \code
<Dependency type="BoolValidatorDependency"
trueValidatorId="Id of the true validator"
falseValidatorId="Id of the false validator"
/>
<Dependee parameterId="Id of dependee parameter"/>
<Dependent parameterId="Id of dependent parameter"/>
...Any other dependent parameters...
</Dependency>
\endcode
* You don't have to include both a "trueValidatorId" and "falseValidatorId"
* XML attribute, but you must include at least one of them.
*/
class TEUCHOS_LIB_DLL_EXPORT BoolValidatorDependencyXMLConverter : public ValidatorDependencyXMLConverter{
public:
/** \name Overridden from ValidatorDependencyConverter */
//@{
/** \brief . */
void convertSpecialValidatorAttributes(
RCP<const ValidatorDependency> dependency,
XMLObject& xmlObj,
ValidatortoIDMap& validatorIDsMap) const;
/** \brief . */
RCP<ValidatorDependency> convertSpecialValidatorAttributes(
const XMLObject& xmlObj,
RCP<const ParameterEntry> dependee,
const Dependency::ParameterEntryList dependents,
const IDtoValidatorMap& validatorIDsMap) const;
//@}
private:
/** \name Private Members */
//@{
/** \brief . */
static const std::string& getFalseValidatorIdAttributeName(){
static const std::string falseValidatorIdAttributeName =
"falseValidatorId";
return falseValidatorIdAttributeName;
}
/** \brief . */
static const std::string& getTrueValidatorIdAttributeName(){
static const std::string trueValidatorIdAttributeName =
"trueValidatorId";
return trueValidatorIdAttributeName;
}
//@}
};
/** \brief An xml converter for RangeValidatorDependencies
*
* The valid XML representation of a RangeValidatorDependency is:
* \code
<Dependency type="RangeValidatorDependency(number_type_of_dependee)"
defaultValidatoId="id of default validator"
/>
<Dependee parameterId="Id of dependee parameter"/>
<Dependent parameterId="Id of dependent parameter"/>
...Any other dependent parameters...
<RangesAndValidators>
<Pair min="min value" max="max value"
validatorId="Id of first mapped validator"/>
<Pair min="min value" max="max value"
validatorId="Id of second mapped validator"/>
...Other range-to-validator mappings...
</RangesAndValidators>
...Optional function tag...
</Dependency>
\endcode
* The "defaultValidatorId" XML attribute is optional.
*/
template<class T>
class RangeValidatorDependencyXMLConverter :
public ValidatorDependencyXMLConverter{
public:
/** \name Overridden from ValidatorDependencyConverter */
//@{
/** \brief . */
void convertSpecialValidatorAttributes(
RCP<const ValidatorDependency> dependency,
XMLObject& xmlObj,
ValidatortoIDMap& validatorIDsMap) const;
/** \brief . */
RCP<ValidatorDependency> convertSpecialValidatorAttributes(
const XMLObject& xmlObj,
RCP<const ParameterEntry> dependee,
const Dependency::ParameterEntryList dependents,
const IDtoValidatorMap& validatorIDsMap) const;
/** \brief . */
static const std::string& getRangesAndValidatorsTag(){
static const std::string rangesAndValidatorsTag = "RangesAndValidators";
return rangesAndValidatorsTag;
}
//@}
private:
/** \name Private Members */
//@{
/** \brief . */
static const std::string& getPairTag(){
static const std::string pairTag = "Pair";
return pairTag;
}
/** \brief . */
static const std::string& getMinAttributeName(){
static const std::string minAttributeName = "min";
return minAttributeName;
}
/** \brief . */
static const std::string& getMaxAttributeName(){
static const std::string maxAttributeName = "max";
return maxAttributeName;
}
/** \brief . */
static const std::string& getValidatorIdAttributeName(){
static const std::string validatorIdAttributeName = "validatorId";
return validatorIdAttributeName;
}
/** \brief . */
static const std::string& getDefaultValidatorIdAttributeName(){
static const std::string defaultValidatorIdAttributeName =
"defaultValidatorId";
return defaultValidatorIdAttributeName;
}
//@}
};
template<class T>
void
RangeValidatorDependencyXMLConverter<T>::convertSpecialValidatorAttributes(
RCP<const ValidatorDependency> dependency,
XMLObject& xmlObj,
ValidatortoIDMap& validatorIDsMap) const
{
RCP<const RangeValidatorDependency<T> > castedDependency =
rcp_dynamic_cast<const RangeValidatorDependency<T> >(dependency, true);
XMLObject rangesAndValidatorsTag(getRangesAndValidatorsTag());
castedDependency->getRangeToValidatorMap();
for(
typename RangeValidatorDependency<T>::RangeToValidatorMap::const_iterator
it = castedDependency->getRangeToValidatorMap().begin();
it != castedDependency->getRangeToValidatorMap().end();
++it)
{
T min = it->first.first;
T max = it->first.second;
if(validatorIDsMap.find(it->second) == validatorIDsMap.end()){
validatorIDsMap.insert(it->second);
}
ParameterEntryValidator::ValidatorID validatorID =
validatorIDsMap.find(it->second)->second;
XMLObject pairTag(getPairTag());
pairTag.addAttribute(getMinAttributeName(), min);
pairTag.addAttribute(getMaxAttributeName(), max);
pairTag.addAttribute(getValidatorIdAttributeName(), validatorID);
rangesAndValidatorsTag.addChild(pairTag);
}
xmlObj.addChild(rangesAndValidatorsTag);
RCP<const ParameterEntryValidator> defaultValidator =
castedDependency->getDefaultValidator();
if(nonnull(defaultValidator)){
if(validatorIDsMap.find(defaultValidator) == validatorIDsMap.end()){
validatorIDsMap.insert(defaultValidator);
}
xmlObj.addAttribute(
getDefaultValidatorIdAttributeName(),
validatorIDsMap.find(defaultValidator)->second);
}
}
template<class T>
RCP<ValidatorDependency>
RangeValidatorDependencyXMLConverter<T>::convertSpecialValidatorAttributes(
const XMLObject& xmlObj,
RCP<const ParameterEntry> dependee,
const Dependency::ParameterEntryList dependents,
const IDtoValidatorMap& validatorIDsMap) const
{
int result = xmlObj.findFirstChild(getRangesAndValidatorsTag());
TEUCHOS_TEST_FOR_EXCEPTION(result == -1,
MissingRangesAndValidatorsTagException,
"Error: All RangeValidatorDependencies must have a " <<
getRangesAndValidatorsTag() << " tag!" << std::endl << std::endl);
XMLObject rangesAndValidatorsTag = xmlObj.getChild(result);
typename RangeValidatorDependency<T>::RangeToValidatorMap
rangesAndValidators;
for(int i = 0 ; i < rangesAndValidatorsTag.numChildren(); ++i){
XMLObject child = rangesAndValidatorsTag.getChild(i);
T min = child.getRequired<T>(getMinAttributeName());
T max = child.getRequired<T>(getMaxAttributeName());
ParameterEntryValidator::ValidatorID currentID =
child.getRequired<ParameterEntryValidator::ValidatorID>(
getValidatorIdAttributeName());
TEUCHOS_TEST_FOR_EXCEPTION(
validatorIDsMap.find(currentID) == validatorIDsMap.end(),
MissingValidatorException,
"Could not find validator in given ValidatorIDsMap! " << std::endl <<
std::endl);
RCP<ParameterEntryValidator> validator =
validatorIDsMap.find(currentID)->second;
rangesAndValidators.insert(
typename RangeValidatorDependency<T>::RangeValidatorPair(
typename RangeValidatorDependency<T>::Range(min, max), validator));
}
RCP<ParameterEntryValidator> defaultValidator = null;
if(xmlObj.hasAttribute(getDefaultValidatorIdAttributeName())){
ParameterEntryValidator::ValidatorID defaultValiID =
xmlObj.getRequired<ParameterEntryValidator::ValidatorID>(
getDefaultValidatorIdAttributeName());
TEUCHOS_TEST_FOR_EXCEPTION(
validatorIDsMap.find(defaultValiID) == validatorIDsMap.end(),
MissingValidatorException,
"Could not find a validator (for the default validator) " <<
"corresponding to the ID " << defaultValiID <<
" in the given validatorIDsMap!" << std::endl << std::endl);
defaultValidator = validatorIDsMap.find(defaultValiID)->second;
}
return rcp(new RangeValidatorDependency<T>(
dependee, dependents, rangesAndValidators, defaultValidator));
}
/**
* \brief A converter used to convert ArrayModifierDepdencies to and from
* xml.
*/
template<class DependeeType, class DependentType>
class ArrayModifierDependencyXMLConverter : public DependencyXMLConverter{
public:
/** \name Overridden from DependencyXMLConverter */
//@{
/** \brief . */
RCP<Dependency> convertXML(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependets,
const XMLParameterListReader::EntryIDsMap& entryIDsMap,
const IDtoValidatorMap& validatorIDsMap) const;
/** \brief . */
void convertDependency(
const RCP<const Dependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap,
ValidatortoIDMap& validatorIDsMap) const;
//@}
protected:
/**
* \brief Obtains a concrete ArrayModifierDependency given a
* dependee, dependtns, and a funciton object.
*
* Because ArrayModifierDependency is an abstact class with pure virtual
* methods we need to be able to get a concrete object to actually
* return. This is the reponsibility of any classes subclassing this one.
*
* @param dependee The dependee to be used in the construction of the
* concrete dependency.
* @param dependents The dependts to be used in the construction of the
* concrete dependency.
* @param function The function object to be used in the construction of the
* concrete dependency.
* @return A concrete dependency object.which subclasses
* ArrayModifierDependency.
*/
virtual RCP<ArrayModifierDependency<DependeeType, DependentType> >
getConcreteDependency(
RCP<const ParameterEntry> dependee,
const Dependency::ParameterEntryList dependents,
RCP<const SimpleFunctionObject<DependeeType> > function) const = 0;
};
template<class DependeeType, class DependentType>
RCP<Dependency>
ArrayModifierDependencyXMLConverter<DependeeType, DependentType>::convertXML(
const XMLObject& xmlObj,
const Dependency::ConstParameterEntryList dependees,
const Dependency::ParameterEntryList dependents,
const XMLParameterListReader::EntryIDsMap& entryIDsMap,
const IDtoValidatorMap& validatorIDsMap) const
{
TEUCHOS_TEST_FOR_EXCEPTION(dependees.size() > 1,
TooManyDependeesException,
"A ArrayModifierDependency can only have 1 dependee!" <<
std::endl << std::endl);
RCP<SimpleFunctionObject<DependeeType> > functionObject = null;
int functionIndex = xmlObj.findFirstChild(FunctionObject::getXMLTagName());
if(functionIndex != -1){
functionObject = rcp_dynamic_cast<SimpleFunctionObject<DependeeType> >(
FunctionObjectXMLConverterDB::convertXML(xmlObj.getChild(functionIndex)));
}
return
getConcreteDependency(*(dependees.begin()), dependents, functionObject);
}
template<class DependeeType, class DependentType>
void
ArrayModifierDependencyXMLConverter<DependeeType, DependentType>::convertDependency(
const RCP<const Dependency> dependency,
XMLObject& xmlObj,
const XMLParameterListWriter::EntryIDsMap& entryIDsMap,
ValidatortoIDMap& validatorIDsMap) const
{
RCP<const ArrayModifierDependency<DependeeType, DependentType> > castedDep =
rcp_dynamic_cast<const ArrayModifierDependency<DependeeType, DependentType> >(
dependency);
RCP<const SimpleFunctionObject<DependeeType> > functionObject =
castedDep->getFunctionObject();
if(functionObject != null){
XMLObject functionXML = FunctionObjectXMLConverterDB::convertFunctionObject(
functionObject);
xmlObj.addChild(functionXML);
}
}
/** \brief An xml converter for NumberArrayLengthDependencies.
*
* The valid XML representation of a NumberArrayLengthDependency is:
* \code
<Dependency
type="NumberArrayLengthDependency(dependee_number_type, type_of_array_values)"
>
<Dependee parameterId="Id of dependee parameter"/>
<Dependent parameterId="Id of dependent parameter"/>
...Any other dependent parameters...
...Optional Function tag...
</Dependency>
\endcode
*/
template<class DependeeType, class DependentType>
class NumberArrayLengthDependencyXMLConverter :
public ArrayModifierDependencyXMLConverter<DependeeType, DependentType>{
protected:
/** \name Overridden from ArrayModifierDependency */
//@{
virtual RCP<ArrayModifierDependency<DependeeType, DependentType> >
getConcreteDependency(
RCP<const ParameterEntry> dependee,
const Dependency::ParameterEntryList dependents,
RCP<const SimpleFunctionObject<DependeeType> > function) const;
//@}
};
template<class DependeeType, class DependentType>
RCP<ArrayModifierDependency<DependeeType, DependentType> >
NumberArrayLengthDependencyXMLConverter<DependeeType, DependentType>::getConcreteDependency(
RCP<const ParameterEntry> dependee,
Dependency::ParameterEntryList dependents,
RCP<const SimpleFunctionObject<DependeeType> > function) const
{
return rcp(
new NumberArrayLengthDependency<DependeeType, DependentType>(
dependee, dependents, function));
}
/**
* \brief A class for converting TwoDRowDependencies
* to and from XML.
*
* The valid XML representation of a TwoDRowDependency is:
* \code
<Dependency
type="TwoDRowDependency(dependee_number_type, type_of_array_values)"
>
<Dependee parameterId="Id of dependee parameter"/>
<Dependent parameterId="Id of dependent parameter"/>
...Any other dependent parameters...
...Optional Function tag...
</Dependency>
\endcode
*/
template<class DependeeType, class DependentType>
class TwoDRowDependencyXMLConverter :
public ArrayModifierDependencyXMLConverter<DependeeType, DependentType>
{
protected:
/** \name Overridden from ArrayModifierDependency */
//@{
virtual RCP<ArrayModifierDependency<DependeeType, DependentType> >
getConcreteDependency(
RCP<const ParameterEntry> dependee,
const Dependency::ParameterEntryList dependents,
RCP<const SimpleFunctionObject<DependeeType> > function) const;
//@}
};
template<class DependeeType, class DependentType>
RCP<ArrayModifierDependency<DependeeType, DependentType> >
TwoDRowDependencyXMLConverter<DependeeType, DependentType>::getConcreteDependency(
RCP<const ParameterEntry> dependee,
Dependency::ParameterEntryList dependents,
RCP<const SimpleFunctionObject<DependeeType> > function) const
{
return rcp(
new TwoDRowDependency<DependeeType, DependentType>(
dependee, dependents, function));
}
/**
* \brief A class for converting TwoDColDependencies
* to and from XML.
*
* The valid XML representation of a TwoDColDependency is:
* \code
<Dependency
type="TwoDColDependency(dependee_number_type, type_of_array_values)"
>
<Dependee parameterId="Id of dependee parameter"/>
<Dependent parameterId="Id of dependent parameter"/>
...Any other dependent parameters...
...Optional Function tag...
</Dependency>
\endcode
*/
template<class DependeeType, class DependentType>
class TwoDColDependencyXMLConverter :
public ArrayModifierDependencyXMLConverter<DependeeType, DependentType>
{
protected:
/** \name Overridden from ArrayModifierDependency */
//@{
virtual RCP<ArrayModifierDependency<DependeeType, DependentType> >
getConcreteDependency(
RCP<const ParameterEntry> dependee,
const Dependency::ParameterEntryList dependents,
RCP<const SimpleFunctionObject<DependeeType> > function) const;
//@}
};
template<class DependeeType, class DependentType>
RCP<ArrayModifierDependency<DependeeType, DependentType> >
TwoDColDependencyXMLConverter<DependeeType, DependentType>::getConcreteDependency(
RCP<const ParameterEntry> dependee,
Dependency::ParameterEntryList dependents,
RCP<const SimpleFunctionObject<DependeeType> > function) const
{
return rcp(
new TwoDColDependency<DependeeType, DependentType>(
dependee, dependents, function));
}
} // namespace Teuchos
#endif // TEUCHOS_STANDARDDEPENDENCYXMLCONVERTERS_HPP
|
jppf-grid/JPPF | server/src/java/org/jppf/management/JPPFNodeConnectionNotifier.java | /*
* JPPF.
* Copyright (C) 2005-2019 JPPF Team.
* http://www.jppf.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jppf.management;
import java.util.concurrent.atomic.AtomicLong;
import javax.management.*;
import org.slf4j.*;
/**
*
* @author <NAME>
* @since 5.1
* @exclude
*/
public final class JPPFNodeConnectionNotifier extends NotificationBroadcasterSupport implements JPPFNodeConnectionNotifierMBean {
/**
* Logger for this class.
*/
private static Logger log = LoggerFactory.getLogger(JPPFNodeConnectionNotifier.class);
/**
* Determines whether the debug level is enabled in the log configuration, without the cost of a method call.
*/
private static boolean debugEnabled = log.isDebugEnabled();
/**
* Singleton instance of this class.
*/
private static final JPPFNodeConnectionNotifier instance = new JPPFNodeConnectionNotifier();
/**
* Notifications sequence number.
*/
private final AtomicLong sequence = new AtomicLong(0L);
/**
* Direct instantiation not permitted.
*/
private JPPFNodeConnectionNotifier() {
}
/**
* Called when a node is connected to the driver.
* @param info information about the connected node.
*/
public void onNodeConnected(final JPPFManagementInfo info) {
if (debugEnabled) log.debug("sending node connected notification for {}", info);
notify(info, true);
}
/**
* Called when a node is disconnected from the driver.
* @param info information about the disconnected node.
*/
public void onNodeDisconnected(final JPPFManagementInfo info) {
if (debugEnabled) log.debug("sending node disconnected notification for {}", info);
notify(info, false);
}
/**
* Send a notification that a node is connected to, or disconnected from, the driver.
* @param info information about the node.
* @param connected {@code true} to indicate that the node is connected, {@code false} otherwise.
*/
private void notify(final JPPFManagementInfo info, final boolean connected) {
final Notification notif = new Notification(connected ? CONNECTED : DISCONNECTED, JPPFNodeConnectionNotifierMBean.MBEAN_NAME, sequence.incrementAndGet(), System.currentTimeMillis());
notif.setUserData(info);
sendNotification(notif);
}
/**
* Get the singleton instance of this class.
* @return a {@link JPPFNodeConnectionNotifier} object.
*/
public static JPPFNodeConnectionNotifier getInstance() {
return instance;
}
@Override
public void addNotificationListener(final NotificationListener listener, final NotificationFilter filter, final Object handback) {
if (debugEnabled) log.debug("adding notification listener");
super.addNotificationListener(listener, filter, handback);
}
@Override
public void removeNotificationListener(final NotificationListener listener) throws ListenerNotFoundException {
if (debugEnabled) log.debug("removing notification listener");
super.removeNotificationListener(listener);
}
@Override
public void removeNotificationListener(final NotificationListener listener, final NotificationFilter filter, final Object handback) throws ListenerNotFoundException {
if (debugEnabled) log.debug("removing notification listener with filter");
super.removeNotificationListener(listener, filter, handback);
}
}
|
RockhoRockho/Project_FG | APPS/MEMBER/context_processors.py | from APPS.MEMBER.models import Member
def flag_processor(request):
flag = 1
member = 0
if request.session.get('user') :
flag = 1
member = Member.objects.get(member_id = request.session.get('user'))
else:
flag = 0
return {'flag': flag, 'member':member} |
parasol-ppl/PPL | src/Utilities/XMLNode.h | #ifndef XML_NODE_H_
#define XML_NODE_H_
#include <memory>
#include <sstream>
#include <string>
#include <unordered_set>
#include <vector>
// Tell TinyXML to use the stl.
#ifndef TIXML_USE_STL
#define TIXML_USE_STL
#endif
#include "tinyxml.h"
#include "PMPLExceptions.h"
////////////////////////////////////////////////////////////////////////////////
/// Wrapper class for XML parsing with TinyXML.
///
/// @ingroup IOUtils
/// @details This is a wrapper class for XML handling with TinyXML. It is read
/// only and supports trivial XML parsing.
////////////////////////////////////////////////////////////////////////////////
class XMLNode {
public:
///@name Construction
///@{
/// Construct an XML node object from an XML file.
/// @param _filename XML Filename
/// @param _desiredNode Desired XML Node to make root of tree
///
/// Will throw ParseException when \p _desiredNode cannot be found or
/// \p _filename is poorly formed input
XMLNode(const std::string& _filename, const std::string& _desiredNode);
private:
/// Private constructor for use within BuildChildVector
/// @param _node New TiXMLNode
/// @param _filename XML filename
/// @param _doc TiXmlDocument from tree's root node
XMLNode(TiXmlNode* _node, const std::string& _filename,
std::shared_ptr<TiXmlDocument> _doc);
///@}
public:
///@name Iteration
///@{
typedef std::vector<XMLNode>::iterator iterator;
/// Get an iterator to this node's first child.
iterator begin();
/// Get an iterator to this node's last child.
iterator end();
///@}
///@name Metadata Accessors
///@{
/// Get the XMLNode name.
const std::string& Name() const;
/// Get the XML filename.
const std::string& Filename() const;
/// Get the directory path containing the XML file.
std::string GetPath() const;
///@}
///@name Content Accessors
///@{
/// Get the text between opening and closing tags.
std::string GetText() const;
///@}
///@name Attribute Parsing
///@{
/// Read XML attribute.
/// @tparam T Type of attribute
/// @param _name Name of attribute
/// @param _req Is attribute required
/// @param _default Default value of attribute
/// @param _min Minimum value of attribute
/// @param _max Maximum value of attribute
/// @param _desc Description of attribute
/// @return Value of attribute
///
/// Reads XML attribute value with \p _name. If _req is specified and no
/// attribute is given, \p _default is returned, otherwise input value is
/// required to be in the range [\p _min, \p _max]. Otherwise, an error is
/// reported and \p _desc is shown to the user.
template <typename T>
T Read(const std::string& _name, const bool _req, const T& _default,
const T& _min, const T& _max, const std::string& _desc);
/// Read XML boolean attribute
/// @param _name Name of attribute
/// @param _req Is attribute required
/// @param _default Default value of attribute
/// @param _desc Description of attribute
/// @return Value of attribute
///
/// Reads XML attribute value with \p _name. If _req is specified and no
/// attribute is given, \p _default is returned. Otherwise, an error is
/// reported and \p _desc is shown to the user.
bool Read(const std::string& _name, const bool _req, const bool _default,
const std::string& _desc);
/// Read XML string attribute
/// @return Value of attribute
///
/// Calls string version of function to avoid confusion with bool -> const
/// char* conversion in compile.
std::string Read(const std::string& _name, const bool _req,
const char* _default, const std::string& _desc);
/// Read XML string attribute
/// @param _name Name of attribute
/// @param _req Is attribute required
/// @param _default Default value of attribute
/// @param _desc Description of attribute
/// @return Value of attribute
///
/// Reads XML attribute value with \p _name. If _req is specified and no
/// attribute is given, \p _default is returned. Otherwise, an error is
/// reported and \p _desc is shown to the user.
std::string Read(const std::string& _name,
const bool _req,
const std::string& _default,
const std::string& _desc);
///@}
///@name Parsing Flow
///@{
/// Ignore unrequested node/attribute errors for this node.
void Ignore();
/// Report warnings for XML tree rooted at this node
/// @param _warningsAsErrors True will throw exceptions for warnings
///
/// To be called after parsing phase. This will report warnings throughout
/// entire XML document. Should only be called on root XML node. Warnings to
/// be reported:
/// - unknown/unparsed nodes
/// - unrequested attribues
void WarnAll(const bool _warningsAsErrors = false);
/// Generate string describing where the node is
/// @return String representing where node is
///
/// To be used with PMPLExceptions, specifically ParseException. Gives
/// string with filename, row (line number), and column of XMLNode.
std::string Where() const;
///@}
private:
///@name Helpers
///@{
/// Generate string describing where the node is.
/// @param _f Filename
/// @param _l Line number
/// @param _c Column number
/// @param _name Report name of node
/// @return String representing where node is
///
/// To be used with PMPLExceptions, specifically ParseException. Gives
/// string with filename, name, row (line number), and column of XMLNode.
std::string Where(const std::string& _f, const int _l, const int _c,
const bool _name = true) const;
/// Generate XMLNodes for all children
///
/// Builds the internal vector of children, used when iterating over
/// the children. This vector is only built with ELEMENT type nodes.
void BuildChildVector();
/// Return error report for attribute being the wrong type
/// @param _name Name of attribute
/// @param _desc Description of attribute
/// @return Error report
std::string AttrWrongType(const std::string& _name, const std::string& _desc)
const;
/// Return error report for missing attribute
/// @param _name Name of attribute
/// @param _desc Description of attribute
/// @return Error report
std::string AttrMissing(const std::string& _name, const std::string& _desc)
const;
/// Return error report for attribute being in an invalid range
/// @tparam T Type of attribute
/// @param _name Name of attribute
/// @param _desc Description of attribute
/// @param _min Minimum value of attribute
/// @param _max Maximum value of attribute
/// @param _val The specified value
/// @return Error report
template <typename T>
std::string AttrInvalidBounds(const std::string& _name,
const std::string& _desc, const T& _min, const T& _max, const T& _val)
const;
/// Recursive function computing whether nodes have been accessed
void ComputeAccessed();
/// Recursive function reporting all unknown/unparsed nodes and unrequested
/// attributes.
/// @param[out] _anyWarnings Initially should be false, and stores whether
/// any warnings have been reported
void WarnAllRec(bool& _anyWarnings);
/// Report unknown node warning to cerr.
void WarnUnknownNode();
/// Report unrequested attributes to cerr.
bool WarnUnrequestedAttributes();
///@}
///@name Internal State
///@{
TiXmlNode* m_node{nullptr}; ///< TiXmlNode
bool m_childBuilt{false}; ///< Have children been parsed into nodes?
bool m_accessed{false}; ///< Has this node been accessed or not?
std::vector<XMLNode> m_children; ///< Children of node
std::unordered_set<std::string> m_reqAttributes; ///< Requested attributes.
std::string m_filename; ///< XML Filename
/// Overall TiXmlDocument. Can be shared by child nodes.
std::shared_ptr<TiXmlDocument> m_doc;
///@}
};
/*---------------------------- Templated Members -----------------------------*/
template <typename T>
T
XMLNode::
Read(const std::string& _name, const bool _req, const T& _default, const T& _min,
const T& _max, const std::string& _desc) {
m_accessed = true;
m_reqAttributes.insert(_name);
T toReturn;
int qr = m_node->ToElement()->QueryValueAttribute(_name, &toReturn);
switch(qr) {
case TIXML_WRONG_TYPE:
throw ParseException(Where(), AttrWrongType(_name, _desc));
break;
case TIXML_NO_ATTRIBUTE:
{
if(_req)
throw ParseException(Where(), AttrMissing(_name, _desc));
else
toReturn = _default;
break;
}
case TIXML_SUCCESS:
{
if(toReturn < _min || toReturn > _max)
throw ParseException(Where(),
AttrInvalidBounds(_name, _desc, _min, _max, toReturn));
break;
}
default:
throw RunTimeException(WHERE, "Logic shouldn't be able to reach this.");
}
return toReturn;
}
template <typename T>
std::string
XMLNode::
AttrInvalidBounds(const std::string& _name, const std::string& _desc,
const T& _min, const T& _max, const T& _val) const {
std::ostringstream oss;
oss << "Invalid value for attribute '" << _name << "'."
<< "\n\tAttribute description: " << _desc << "."
<< "\n\tValid range: [" << _min << ", " << _max << "]"
<< "\n\tValue specified: " << _val;
return oss.str();
}
/*----------------------------------------------------------------------------*/
#endif
|
RusDavies/indigo | indigo_drivers/ccd_qsi/bin_externals/qsiapi-7.6.0/demo_src/ExternalTrigger.cpp | <reponame>RusDavies/indigo<gh_stars>10-100
/*****************************************************************************************
NAME
QSI API External Trigger Demo Application
DESCRIPTION
Simple QSI API External Trigger example
COPYRIGHT (C)
QSI (Quantum Scientific Imaging) 2012
REVISION HISTORY
DRC 08.04.12 Original Version
*****************************************************************************************/
#include "qsiapi.h"
#include <stdio.h>
#include <unistd.h>
#include <iostream>
#include <cmath>
#include <stdlib.h>
int main(int argc, char** argv)
{
QSICamera cam;
try
{
cam.put_UseStructuredExceptions(true);
cam.put_Connected(true);
}
catch (std::runtime_error &err)
{
std::cout << "Cannot connect to camera." << std::endl;
exit(1);
}
////////////////////////////////////////////////////////////////////////////
// Short Wait Trigger Mode
////////////////////////////////////////////////////////////////////////////
// Set Short Wait External Trigger (4 seconds max), Pos to Neg polarity
cam.EnableTriggerMode(QSICamera::ShortWait, QSICamera::HighToLow);
bool imageReady = false;
try
{
std::cout << "Start short wait." << std::endl;
cam.StartExposure(0.3, true);
// Short wait, so this will return with an image, or will timeout
cam.get_ImageReady(&imageReady);
while (!imageReady)
{
sleep(1);
cam.get_ImageReady(&imageReady);
}
long x;
cam.get_NumX(&x);
long y;
cam.get_NumY(&y);
long len = x * y;
unsigned short * pixels = new unsigned short[len];
cam.get_ImageArray(pixels);
//Process image
//Then clean up
delete [] pixels;
std::cout << "Short wait image complete." << std::endl;
}
catch (std::runtime_error &err)
{
// Timeout comes here
std::cout << "Short wait timeout." << std::endl;
}
// turn off external trigger mode
cam.CancelTriggerMode();
/////////////////////////////////////////////////////////////////////////
// Long Wait Trigger Mode
////////////////////////////////////////////////////////////////////////
cam.EnableTriggerMode(QSICamera::LongWait, QSICamera::LowToHigh);
try
{
std::cout << "Start long wait with cancel." << std::endl;
// Start a long wait exposure
cam.StartExposure(0.3, true);
// Sleep for 5 seconds as a demo
sleep(5);
//
// Demo cancelling of the pending trigger
// This would not normally be done...
cam.TerminatePendingTrigger();
std::cout << "Long wait pending conceled." << std::endl;
}
catch (std::runtime_error &err)
{
std::cout << "Long wait cancel exception." << err.what() << std::endl;
exit(1);
}
// Start a new exposure with long wait trigger.
// Note that the trigger mode remains in effect until canceled.
try
{
std::cout << "Start long wait for image." << std::endl;
cam.StartExposure(0.3, true);
cam.get_ImageReady(&imageReady);
while (!imageReady)
{
sleep(1);
cam.get_ImageReady(&imageReady);
}
long x;
cam.get_NumX(&x);
long y;
cam.get_NumY(&y);
long len = x * y;
unsigned short * pixels = new unsigned short[len];
cam.get_ImageArray(pixels);
// Process image
// Then clean up
delete [] pixels;
std::cout << "Long wait image complete." << std::endl;
}
catch (std::runtime_error &err)
{
std::cout << "Long wait exception." << err.what() << std::endl;
exit(1);
}
////////////////////////////////////////////////////////////////////
// Cancel Trigger Mode
////////////////////////////////////////////////////////////////////
cam.CancelTriggerMode();
cam.put_Connected(false);
std::cout << "Trigger test completed." << std::endl;
}
|
jayrulez/rbfx | Source/Tests/Scene/TrackedComponent.cpp | //
// Copyright (c) 2017-2021 the rbfx project.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR rhs
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR rhsWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR rhs DEALINGS IN
// THE SOFTWARE.
//
#include "../CommonUtils.h"
#include "../SceneUtils.h"
#include <Urho3D/Scene/TrackedComponent.h>
class TestComponentRegistry : public ReferencedComponentRegistryBase
{
URHO3D_OBJECT(TestComponentRegistry, ReferencedComponentRegistryBase);
public:
explicit TestComponentRegistry(Context* context);
};
class TestTrackedComponent : public TrackedComponent<TestComponentRegistry, EnabledOnlyTag>
{
URHO3D_OBJECT(TestTrackedComponent, ReferencedComponentBase);
public:
using TrackedComponent<TestComponentRegistry, EnabledOnlyTag>::TrackedComponent;
};
TestComponentRegistry::TestComponentRegistry(Context* context)
: ReferencedComponentRegistryBase(context, TestTrackedComponent::GetTypeStatic())
{
}
TEST_CASE("Tracked components are indexed in the registry")
{
auto context = Tests::GetOrCreateContext(Tests::CreateCompleteContext);
if (!context->IsReflected<TestComponentRegistry>())
context->RegisterFactory<TestComponentRegistry>();
if (!context->IsReflected<TestTrackedComponent>())
context->RegisterFactory<TestTrackedComponent>();
auto scene = MakeShared<Scene>(context);
auto registry = scene->CreateComponent<TestComponentRegistry>();
// Create tracked components
auto node1 = scene->CreateChild("Node 1");
auto component1 = node1->CreateComponent<TestTrackedComponent>();
auto node2 = scene->CreateChild("Node 2");
auto component2 = node2->CreateComponent<TestTrackedComponent>();
auto node3 = scene->CreateChild("Node 3");
auto component3 = node3->CreateComponent<TestTrackedComponent>();
REQUIRE(component1->GetRegistry() == registry);
REQUIRE(component2->GetRegistry() == registry);
REQUIRE(component3->GetRegistry() == registry);
REQUIRE(registry->GetNumTrackedComponents() == 3);
REQUIRE(registry->GetTrackedComponentByIndex(0) == component1);
REQUIRE(registry->GetTrackedComponentByIndex(1) == component2);
REQUIRE(registry->GetTrackedComponentByIndex(2) == component3);
REQUIRE(component1->GetIndexInArray() == 0);
REQUIRE(component2->GetIndexInArray() == 1);
REQUIRE(component3->GetIndexInArray() == 2);
REQUIRE(registry->GetTrackedComponentByReference(InvalidComponentReference) == nullptr);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000001}) == component1);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000002}) == component2);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000003}) == component3);
REQUIRE(component1->GetReference() == ComponentReference{0x00000001});
REQUIRE(component2->GetReference() == ComponentReference{0x00000002});
REQUIRE(component3->GetReference() == ComponentReference{0x00000003});
// Remove tracked component
SharedPtr<Node> shaderNode2{node2};
node2->Remove();
REQUIRE(registry->GetNumTrackedComponents() == 2);
REQUIRE(registry->GetTrackedComponentByIndex(0) == component1);
REQUIRE(registry->GetTrackedComponentByIndex(1) == component3);
REQUIRE(component1->GetIndexInArray() == 0);
REQUIRE(component3->GetIndexInArray() == 1);
REQUIRE(component2->GetIndexInArray() == M_MAX_UNSIGNED);
REQUIRE(registry->GetTrackedComponentByReference(InvalidComponentReference) == nullptr);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000001}) == component1);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000003}) == component3);
REQUIRE(component1->GetReference() == ComponentReference{0x00000001});
REQUIRE(component2->GetReference() == InvalidComponentReference);
REQUIRE(component3->GetReference() == ComponentReference{0x00000003});
shaderNode2 = nullptr;
component2 = nullptr;
// Disable tracked component
const auto node1Reference = component1->GetReference();
node1->SetEnabled(false);
REQUIRE(registry->GetNumTrackedComponents() == 1);
REQUIRE(registry->GetTrackedComponentByIndex(0) == component3);
REQUIRE(component3->GetIndexInArray() == 0);
REQUIRE(component1->GetIndexInArray() == M_MAX_UNSIGNED);
REQUIRE(registry->GetTrackedComponentByReference(InvalidComponentReference) == nullptr);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000003}) == component3);
REQUIRE(component1->GetReference() == InvalidComponentReference);
REQUIRE(component3->GetReference() == ComponentReference{0x00000003});
// Enable tracked component
component1->SetReference(node1Reference);
node1->SetEnabled(true);
REQUIRE(registry->GetNumTrackedComponents() == 2);
REQUIRE(registry->GetTrackedComponentByIndex(0) == component3);
REQUIRE(registry->GetTrackedComponentByIndex(1) == component1);
REQUIRE(component3->GetIndexInArray() == 0);
REQUIRE(component1->GetIndexInArray() == 1);
REQUIRE(registry->GetTrackedComponentByReference(InvalidComponentReference) == nullptr);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000001}) == component1);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000003}) == component3);
REQUIRE(component1->GetReference() == ComponentReference{0x00000001});
REQUIRE(component3->GetReference() == ComponentReference{0x00000003});
// Create disabled tracked component
auto node4 = scene->CreateChild("Node 4");
auto component4 = MakeShared<TestTrackedComponent>(context);
component4->SetEnabled(false);
node4->AddComponent(component4, 0, LOCAL);
REQUIRE(registry->GetNumTrackedComponents() == 2);
REQUIRE(registry->GetTrackedComponentByIndex(0) == component3);
REQUIRE(registry->GetTrackedComponentByIndex(1) == component1);
REQUIRE(component3->GetIndexInArray() == 0);
REQUIRE(component1->GetIndexInArray() == 1);
REQUIRE(component4->GetIndexInArray() == M_MAX_UNSIGNED);
REQUIRE(registry->GetTrackedComponentByReference(InvalidComponentReference) == nullptr);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000001}) == component1);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000003}) == component3);
REQUIRE(component1->GetReference() == ComponentReference{0x00000001});
REQUIRE(component3->GetReference() == ComponentReference{0x00000003});
// Remove registry
registry->Remove();
REQUIRE(component3->GetIndexInArray() == M_MAX_UNSIGNED);
REQUIRE(component1->GetIndexInArray() == M_MAX_UNSIGNED);
REQUIRE(component4->GetIndexInArray() == M_MAX_UNSIGNED);
REQUIRE(component3->GetReference() == InvalidComponentReference);
REQUIRE(component1->GetReference() == InvalidComponentReference);
REQUIRE(component4->GetReference() == InvalidComponentReference);
// Add registry
registry = scene->CreateComponent<TestComponentRegistry>();
REQUIRE(component1->GetIndexInArray() == 0);
REQUIRE(component3->GetIndexInArray() == 1);
REQUIRE(component4->GetIndexInArray() == M_MAX_UNSIGNED);
REQUIRE(registry->GetTrackedComponentByReference(InvalidComponentReference) == nullptr);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000001}) == component1);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000002}) == component3);
REQUIRE(component1->GetReference() == ComponentReference{0x00000001});
REQUIRE(component3->GetReference() == ComponentReference{0x00000002});
REQUIRE(component4->GetReference() == InvalidComponentReference);
// Remove disabled tracked component
node4->Remove();
REQUIRE(registry->GetNumTrackedComponents() == 2);
REQUIRE(registry->GetTrackedComponentByIndex(0) == component1);
REQUIRE(registry->GetTrackedComponentByIndex(1) == component3);
REQUIRE(component1->GetIndexInArray() == 0);
REQUIRE(component3->GetIndexInArray() == 1);
REQUIRE(registry->GetTrackedComponentByReference(InvalidComponentReference) == nullptr);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000001}) == component1);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000002}) == component3);
REQUIRE(component1->GetReference() == ComponentReference{0x00000001});
REQUIRE(component3->GetReference() == ComponentReference{0x00000002});
REQUIRE(component4->GetReference() == InvalidComponentReference);
// Disable and enable tracked component
component1->SetEnabled(false);
component1->SetEnabled(true);
REQUIRE(component1->GetRegistry() == registry);
REQUIRE(component3->GetRegistry() == registry);
REQUIRE(registry->GetNumTrackedComponents() == 2);
REQUIRE(registry->GetTrackedComponentByIndex(0) == component3);
REQUIRE(registry->GetTrackedComponentByIndex(1) == component1);
REQUIRE(component3->GetIndexInArray() == 0);
REQUIRE(component1->GetIndexInArray() == 1);
REQUIRE(registry->GetTrackedComponentByReference(InvalidComponentReference) == nullptr);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x01000001}) == component1);
REQUIRE(registry->GetTrackedComponentByReference(ComponentReference{0x00000002}) == component3);
REQUIRE(component1->GetReference() == ComponentReference{0x01000001});
REQUIRE(component3->GetReference() == ComponentReference{0x00000002});
REQUIRE(component4->GetReference() == InvalidComponentReference);
}
|
Orange-OpenSource/optisam-backend | equipment-service/pkg/repository/v1/dgraph/equipments_test.go | <filename>equipment-service/pkg/repository/v1/dgraph/equipments_test.go
// Copyright (C) 2019 Orange
//
// This software is distributed under the terms and conditions of the 'Apache License 2.0'
// license which can be found in the file 'License.txt' in this package distribution
// or at 'http://www.apache.org/licenses/LICENSE-2.0'.
package dgraph
import (
"context"
"encoding/json"
"errors"
"fmt"
v1 "optisam-backend/equipment-service/pkg/repository/v1"
"strings"
"testing"
"github.com/dgraph-io/dgo/v2/protos/api"
"github.com/stretchr/testify/assert"
)
// var deleteAll = &api.Value{
// Val: &api.Value_DefaultVal{
// DefaultVal: "_STAR_ALL",
// },
// }
func TestEquipmentRepository_CreateEquipmentType(t *testing.T) {
type args struct {
ctx context.Context
eqType *v1.EquipmentType
scopes []string
}
tests := []struct {
name string
lr *EquipmentRepository
args args
setup func() (*v1.EquipmentType, func() error, error)
veryfy func(repo *EquipmentRepository) (*v1.EquipmentType, error)
wantSchemaNodes []*SchemaNode
predicates []string
wantErr bool
}{
{name: "success",
lr: NewEquipmentRepository(dgClient),
args: args{
ctx: context.Background(),
},
setup: func() (*v1.EquipmentType, func() error, error) {
// TODO create two nodes for parent type and data source
mu := &api.Mutation{
CommitNow: true,
Set: []*api.NQuad{
&api.NQuad{
Subject: blankID("parent"),
Predicate: "metadata_parent",
ObjectValue: stringObjectValue("eq_type_1"),
},
&api.NQuad{
Subject: blankID("data_source"),
Predicate: "metadata_source",
ObjectValue: stringObjectValue("eq_type_1"),
},
},
}
assigned, err := dgClient.NewTxn().Mutate(context.Background(), mu)
if err != nil {
return nil, nil, err
}
parentID, ok := assigned.Uids["parent"]
if !ok {
return nil, nil, errors.New("cannot find parent id after mutation in setup")
}
sourceID, ok := assigned.Uids["data_source"]
if !ok {
return nil, nil, errors.New("cannot find source id after mutation in setup")
}
eqType := &v1.EquipmentType{
Type: "MyType",
SourceID: sourceID,
ParentID: parentID,
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeString,
IsSearchable: true,
IsIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_1",
},
&v1.Attribute{
Name: "attr2",
Type: v1.DataTypeInt,
IsSearchable: true,
MappedTo: "mapping_2",
},
&v1.Attribute{
Name: "attr2.1",
Type: v1.DataTypeInt,
MappedTo: "mapping_2.1",
},
&v1.Attribute{
Name: "attr3",
Type: v1.DataTypeFloat,
IsSearchable: true,
MappedTo: "mapping_3",
},
&v1.Attribute{
Name: "attr3.1",
Type: v1.DataTypeFloat,
MappedTo: "mapping_3.1",
},
&v1.Attribute{
Name: "attr4",
Type: v1.DataTypeString,
IsParentIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_4",
},
&v1.Attribute{
Name: "attr4.1",
Type: v1.DataTypeString,
IsSearchable: true,
IsDisplayed: true,
MappedTo: "mapping_4.1",
},
&v1.Attribute{
Name: "attr4.2",
Type: v1.DataTypeString,
IsDisplayed: true,
MappedTo: "mapping_4.2",
},
},
}
return eqType, func() error {
if err := deleteNode(parentID); err != nil {
return err
}
if err := deleteNode(sourceID); err != nil {
return err
}
return nil
}, nil
},
veryfy: func(repo *EquipmentRepository) (*v1.EquipmentType, error) {
eqType, err := repo.equipmentTypeByType(context.Background(), "MyType", []string{"scope1"})
if err != nil {
return nil, err
}
return eqType, nil
},
wantSchemaNodes: []*SchemaNode{
&SchemaNode{
Predicate: "equipment.MyType.attr2",
Type: "int",
Index: true,
Tokenizer: []string{"int"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr2.1",
Type: "int",
},
&SchemaNode{
Predicate: "equipment.MyType.attr3",
Type: "float",
Index: true,
Tokenizer: []string{"float"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr3.1",
Type: "float",
},
&SchemaNode{
Predicate: "equipment.MyType.attr4.1",
Type: "string",
Index: true,
Tokenizer: []string{"trigram"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr4.2",
Type: "string",
},
},
predicates: []string{
"equipment.MyType.attr2",
"equipment.MyType.attr2.1",
"equipment.MyType.attr3",
"equipment.MyType.attr3.1",
"equipment.MyType.attr4.1",
"equipment.MyType.attr4.2",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
eqType, cleanup, err := tt.setup()
if !assert.Empty(t, err, "error is not expect in setup") {
return
}
defer func() {
err := cleanup()
assert.Empty(t, err, "error is not expect in cleanup")
}()
got, err := tt.lr.CreateEquipmentType(tt.args.ctx, eqType, tt.args.scopes)
if (err != nil) != tt.wantErr {
t.Errorf("EquipmentRepository.CreateEquipmentType() error = %v, wantErr %v", err, tt.wantErr)
return
}
defer func() {
err := deleteNode(got.ID)
assert.Empty(t, err, "error is not expect in deleteNode")
}()
want, err := tt.veryfy(tt.lr)
if !assert.Empty(t, err, "error is not expect in verify") {
return
}
if !tt.wantErr {
compareEquipmentType(t, "EquipmentType", want, got)
sns, err := querySchema(tt.predicates...)
if !assert.Emptyf(t, err, "error is not expect while quering schema for predicates: %v", tt.predicates) {
return
}
compareSchemaNodeAll(t, "schemaNodes", tt.wantSchemaNodes, sns)
}
})
}
}
func TestEquipmentRepository_EquipmentTypes(t *testing.T) {
type args struct {
ctx context.Context
scopes []string
}
tests := []struct {
name string
lr *EquipmentRepository
args args
setup func(repo *EquipmentRepository) ([]*v1.EquipmentType, func() error, error)
wantErr bool
}{
{name: "success",
lr: NewEquipmentRepository(dgClient),
args: args{
ctx: context.Background(),
scopes: []string{"scope1"},
},
setup: func(repo *EquipmentRepository) ([]*v1.EquipmentType, func() error, error) {
// TODO create two nodes for parent type and data source
mu := &api.Mutation{
CommitNow: true,
Set: []*api.NQuad{
&api.NQuad{
Subject: blankID("parent"),
Predicate: "metadata_parent",
ObjectValue: stringObjectValue("eq_type_1"),
},
&api.NQuad{
Subject: blankID("data_source"),
Predicate: "metadata_source",
ObjectValue: stringObjectValue("eq_type_1"),
},
},
}
assigned, err := dgClient.NewTxn().Mutate(context.Background(), mu)
if err != nil {
return nil, nil, err
}
parentID, ok := assigned.Uids["parent"]
if !ok {
return nil, nil, errors.New("cannot find parent id after mutation in setup")
}
sourceID, ok := assigned.Uids["data_source"]
if !ok {
return nil, nil, errors.New("cannot find source id after mutation in setup")
}
eqTypes := []*v1.EquipmentType{
&v1.EquipmentType{
Type: "MyType1",
SourceID: sourceID,
ParentID: parentID,
Scopes: []string{"scope1"},
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeString,
IsSearchable: true,
IsIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_1",
},
&v1.Attribute{
Name: "attr2",
Type: v1.DataTypeString,
IsSearchable: false,
IsParentIdentifier: true,
IsDisplayed: false,
MappedTo: "mapping_2",
},
},
},
&v1.EquipmentType{
Type: "MyType2",
SourceID: sourceID,
ParentID: parentID,
Scopes: []string{"scope1"},
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeString,
IsSearchable: true,
IsIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_1",
},
},
},
}
for _, eqType := range eqTypes {
_, err := repo.CreateEquipmentType(context.Background(), eqType, eqType.Scopes)
if err != nil {
fmt.Print(err)
return nil, nil, err
}
}
return eqTypes, func() error {
return deleteNodes(parentID, sourceID, eqTypes[0].ID, eqTypes[1].ID)
}, nil
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
want, cleanup, err := tt.setup(tt.lr)
if !assert.Empty(t, err, "error is not expected in setup") {
return
}
defer func() {
err := cleanup()
assert.Empty(t, err, "error is not expected in cleanup")
}()
got, err := tt.lr.EquipmentTypes(tt.args.ctx, tt.args.scopes)
if (err != nil) != tt.wantErr {
t.Errorf("EquipmentRepository.EquipmentTypes() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !tt.wantErr {
compareEquipmentTypeAll(t, "EquipmentTypes", want, got)
}
})
}
}
func TestEquipmentRepository_UpdateEquipmentType(t *testing.T) {
type args struct {
ctx context.Context
id string
typ string
req *v1.UpdateEquipmentRequest
scopes []string
}
tests := []struct {
name string
lr *EquipmentRepository
args args
setup func() (*v1.EquipmentType, string, func() error, error)
veryfy func(repo *EquipmentRepository) (*v1.EquipmentType, error)
//wantRetType []*v1.Attribute
wantSchemaNodes []*SchemaNode
predicates []string
wantErr bool
}{
{name: "SUCCESS - no change in parent",
lr: NewEquipmentRepository(dgClient),
args: args{
ctx: context.Background(),
req: &v1.UpdateEquipmentRequest{
Attr: []*v1.Attribute{
&v1.Attribute{
Name: "attr4",
Type: 1,
IsIdentifier: false,
IsDisplayed: true,
IsSearchable: true,
IsParentIdentifier: false,
MappedTo: "mapping_4",
},
&v1.Attribute{
Name: "attr5",
Type: 2,
IsIdentifier: false,
IsDisplayed: true,
IsSearchable: false,
IsParentIdentifier: false,
MappedTo: "mapping_5",
},
&v1.Attribute{
Name: "attr6",
Type: v1.DataTypeFloat,
IsSearchable: true,
MappedTo: "mapping_6",
},
},
},
scopes: []string{"scope1"},
},
setup: func() (*v1.EquipmentType, string, func() error, error) {
mu := &api.Mutation{
CommitNow: true,
Set: []*api.NQuad{
&api.NQuad{
Subject: blankID("parent"),
Predicate: "metadata_parent",
ObjectValue: stringObjectValue("eq_type_1"),
},
&api.NQuad{
Subject: blankID("data_source"),
Predicate: "metadata_source",
ObjectValue: stringObjectValue("eq_type_1"),
},
},
}
assigned, err := dgClient.NewTxn().Mutate(context.Background(), mu)
if err != nil {
return nil, "", nil, err
}
parentID, ok := assigned.Uids["parent"]
if !ok {
return nil, "", nil, errors.New("cannot find parent id after mutation in setup")
}
sourceID, ok := assigned.Uids["data_source"]
if !ok {
return nil, "", nil, errors.New("cannot find source id after mutation in setup")
}
eqType := &v1.EquipmentType{
Type: "MyType",
SourceID: sourceID,
ParentID: parentID,
Scopes: []string{"scope1"},
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeInt,
IsSearchable: true,
MappedTo: "mapping_1",
},
&v1.Attribute{
Name: "attr2",
Type: v1.DataTypeString,
IsParentIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_2",
},
&v1.Attribute{
Name: "attr3",
Type: v1.DataTypeString,
IsSearchable: true,
IsDisplayed: true,
MappedTo: "mapping_3",
},
},
}
repo := NewEquipmentRepository(dgClient)
retEqp, err := repo.CreateEquipmentType(context.Background(), eqType, eqType.Scopes)
if err != nil {
return nil, "", nil, errors.New("cannot create equipment in setup")
}
return retEqp, "", func() error {
return deleteNodes(parentID, sourceID, retEqp.ID)
}, nil
},
veryfy: func(repo *EquipmentRepository) (*v1.EquipmentType, error) {
eqType, err := repo.equipmentTypeByType(context.Background(), "MyType", []string{"scope1"})
if err != nil {
return nil, err
}
return eqType, nil
},
wantSchemaNodes: []*SchemaNode{
&SchemaNode{
Predicate: "equipment.MyType.attr1",
Type: "int",
Index: true,
Tokenizer: []string{"int"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr3",
Type: "string",
Index: true,
Tokenizer: []string{"trigram"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr4",
Type: "string",
Index: true,
Tokenizer: []string{"trigram"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr5",
Type: "int",
},
&SchemaNode{
Predicate: "equipment.MyType.attr6",
Type: "float",
Index: true,
Tokenizer: []string{"float"},
},
},
predicates: []string{
"equipment.MyType.attr1",
"equipment.MyType.attr3",
"equipment.MyType.attr4",
"equipment.MyType.attr5",
"equipment.MyType.attr6",
},
wantErr: false,
},
{name: "SUCCESS - parent created ",
lr: NewEquipmentRepository(dgClient),
args: args{
ctx: context.Background(),
req: &v1.UpdateEquipmentRequest{
Attr: []*v1.Attribute{
&v1.Attribute{
Name: "attr3",
Type: v1.DataTypeString,
IsParentIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_3",
},
&v1.Attribute{
Name: "attr4",
Type: v1.DataTypeInt,
IsIdentifier: false,
IsDisplayed: true,
IsSearchable: false,
IsParentIdentifier: false,
MappedTo: "mapping_4",
},
&v1.Attribute{
Name: "attr5",
Type: v1.DataTypeFloat,
IsSearchable: true,
MappedTo: "mapping_5",
},
},
},
},
setup: func() (*v1.EquipmentType, string, func() error, error) {
mu := &api.Mutation{
CommitNow: true,
Set: []*api.NQuad{
// &api.NQuad{
// Subject: blankID("parent"),
// Predicate: "metadata_parent",
// ObjectValue: stringObjectValue("eq_type_1"),
// },
&api.NQuad{
Subject: blankID("data_source"),
Predicate: "metadata_source",
ObjectValue: stringObjectValue("eq_type_1"),
},
},
}
assigned, err := dgClient.NewTxn().Mutate(context.Background(), mu)
if err != nil {
return nil, "", nil, err
}
// parentID, ok := assigned.Uids["parent"]
// if !ok {
// return nil, "", nil, errors.New("cannot find parent id after mutation in setup")
// }
sourceID, ok := assigned.Uids["data_source"]
if !ok {
return nil, "", nil, errors.New("cannot find source id after mutation in setup")
}
repo := NewEquipmentRepository(dgClient)
eqType1 := &v1.EquipmentType{
Type: "MyType2",
SourceID: sourceID,
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeInt,
IsSearchable: true,
MappedTo: "mapping_1",
},
&v1.Attribute{
Name: "attr2",
Type: v1.DataTypeString,
IsSearchable: true,
IsDisplayed: true,
MappedTo: "mapping_2",
},
},
Scopes: []string{"scope1"},
}
equip1, err := repo.CreateEquipmentType(context.Background(), eqType1, eqType1.Scopes)
if err != nil {
return nil, "", nil, errors.New("cannot create equipment in setup")
}
eqType2 := &v1.EquipmentType{
Type: "MyType",
SourceID: sourceID,
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeInt,
IsSearchable: true,
MappedTo: "mapping_1",
},
&v1.Attribute{
Name: "attr2",
Type: v1.DataTypeString,
IsSearchable: true,
IsDisplayed: true,
MappedTo: "mapping_2",
},
},
Scopes: []string{"scope1"},
}
retEqp, err := repo.CreateEquipmentType(context.Background(), eqType2, eqType2.Scopes)
if err != nil {
return nil, "", nil, errors.New("cannot create equipment in setup")
}
return retEqp, equip1.ID, func() error {
return deleteNodes(sourceID, equip1.ID, retEqp.ID)
}, nil
},
veryfy: func(repo *EquipmentRepository) (*v1.EquipmentType, error) {
eqType, err := repo.equipmentTypeByType(context.Background(), "MyType", []string{"scope1"})
if err != nil {
return nil, err
}
return eqType, nil
},
wantSchemaNodes: []*SchemaNode{
&SchemaNode{
Predicate: "equipment.MyType.attr1",
Type: "int",
Index: true,
Tokenizer: []string{"int"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr2",
Type: "string",
Index: true,
Tokenizer: []string{"trigram"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr4",
Type: "int",
},
&SchemaNode{
Predicate: "equipment.MyType.attr5",
Type: "float",
Index: true,
Tokenizer: []string{"float"},
},
},
predicates: []string{
"equipment.MyType.attr1",
"equipment.MyType.attr2",
"equipment.MyType.attr4",
"equipment.MyType.attr5",
},
wantErr: false,
},
{name: "SUCCESS - parent updated ",
lr: NewEquipmentRepository(dgClient),
args: args{
ctx: context.Background(),
req: &v1.UpdateEquipmentRequest{
Attr: []*v1.Attribute{
&v1.Attribute{
Name: "attr3",
Type: v1.DataTypeString,
IsParentIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_3",
},
&v1.Attribute{
Name: "attr4",
Type: v1.DataTypeInt,
IsIdentifier: false,
IsDisplayed: true,
IsSearchable: false,
IsParentIdentifier: false,
MappedTo: "mapping_4",
},
&v1.Attribute{
Name: "attr5",
Type: v1.DataTypeFloat,
IsSearchable: true,
MappedTo: "mapping_5",
},
},
},
},
setup: func() (*v1.EquipmentType, string, func() error, error) {
mu := &api.Mutation{
CommitNow: true,
Set: []*api.NQuad{
&api.NQuad{
Subject: blankID("parent"),
Predicate: "metadata_parent",
ObjectValue: stringObjectValue("eq_type_1"),
},
&api.NQuad{
Subject: blankID("data_source"),
Predicate: "metadata_source",
ObjectValue: stringObjectValue("eq_type_1"),
},
},
}
assigned, err := dgClient.NewTxn().Mutate(context.Background(), mu)
if err != nil {
return nil, "", nil, err
}
parentID, ok := assigned.Uids["parent"]
if !ok {
return nil, "", nil, errors.New("cannot find parent id after mutation in setup")
}
sourceID, ok := assigned.Uids["data_source"]
if !ok {
return nil, "", nil, errors.New("cannot find source id after mutation in setup")
}
repo := NewEquipmentRepository(dgClient)
eqType1 := &v1.EquipmentType{
Type: "MyType2",
SourceID: sourceID,
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeInt,
IsSearchable: true,
MappedTo: "mapping_1",
},
&v1.Attribute{
Name: "attr2",
Type: v1.DataTypeString,
IsSearchable: true,
IsDisplayed: true,
MappedTo: "mapping_2",
},
},
Scopes: []string{"scope1"},
}
equip1, err := repo.CreateEquipmentType(context.Background(), eqType1, eqType1.Scopes)
if err != nil {
return nil, "", nil, errors.New("cannot create equipment in setup")
}
eqType2 := &v1.EquipmentType{
Type: "MyType",
SourceID: sourceID,
ParentID: parentID,
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeInt,
IsSearchable: true,
MappedTo: "mapping_1",
},
&v1.Attribute{
Name: "attr2",
Type: v1.DataTypeString,
IsSearchable: true,
IsDisplayed: true,
MappedTo: "mapping_2",
},
},
Scopes: []string{"scope1"},
}
retEqp, err := repo.CreateEquipmentType(context.Background(), eqType2, eqType2.Scopes)
if err != nil {
return nil, "", nil, errors.New("cannot create equipment in setup")
}
return retEqp, equip1.ID, func() error {
return deleteNodes(parentID, sourceID, equip1.ID, retEqp.ID)
}, nil
},
veryfy: func(repo *EquipmentRepository) (*v1.EquipmentType, error) {
eqType, err := repo.equipmentTypeByType(context.Background(), "MyType", []string{"scope1"})
if err != nil {
return nil, err
}
return eqType, nil
},
wantSchemaNodes: []*SchemaNode{
&SchemaNode{
Predicate: "equipment.MyType.attr1",
Type: "int",
Index: true,
Tokenizer: []string{"int"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr2",
Type: "string",
Index: true,
Tokenizer: []string{"trigram"},
},
&SchemaNode{
Predicate: "equipment.MyType.attr4",
Type: "int",
},
&SchemaNode{
Predicate: "equipment.MyType.attr5",
Type: "float",
Index: true,
Tokenizer: []string{"float"},
},
},
predicates: []string{
"equipment.MyType.attr1",
"equipment.MyType.attr2",
"equipment.MyType.attr4",
"equipment.MyType.attr5",
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, parID, cleanup, err := tt.setup()
if !assert.Empty(t, err, "error is not expect in setup") {
return
}
defer func() {
err := cleanup()
assert.Empty(t, err, "error is not expect in cleanup")
}()
tt.args.req.ParentID = parID
gotRetType, err := tt.lr.UpdateEquipmentType(tt.args.ctx, got.ID, got.Type, tt.args.req, tt.args.scopes)
if (err != nil) != tt.wantErr {
t.Errorf("EquipmentRepository.UpdateEquipmentType() error = %v, wantErr %v", err, tt.wantErr)
return
}
defer func() {
err := deleteNode(got.ID)
assert.Empty(t, err, "error is not expect in deleteNode")
}()
want, err := tt.veryfy(tt.lr)
if !assert.Empty(t, err, "error is not expect in verify") {
return
}
if !tt.wantErr {
got.Attributes = append(got.Attributes, gotRetType...)
if parID != "" {
got.ParentID = parID
}
compareEquipmentType(t, "EquipmentType", want, got)
sns, err := querySchema(tt.predicates...)
if !assert.Emptyf(t, err, "error is not expect while quering schema for predicates: %v", tt.predicates) {
return
}
compareSchemaNodeAll(t, "schemaNodes", tt.wantSchemaNodes, sns)
}
})
}
}
func TestEquipmentRepository_EquipmentWithID(t *testing.T) {
type args struct {
ctx context.Context
id string
scopes []string
}
tests := []struct {
name string
lr *EquipmentRepository
args args
setup func() (*v1.EquipmentType, func() error, error)
wantSchemaNodes []*SchemaNode
wantErr bool
}{
{name: "success",
lr: NewEquipmentRepository(dgClient),
args: args{
ctx: context.Background(),
},
setup: func() (*v1.EquipmentType, func() error, error) {
// TODO create two nodes for parent type and data source
mu := &api.Mutation{
CommitNow: true,
Set: []*api.NQuad{
&api.NQuad{
Subject: blankID("parent"),
Predicate: "metadata_parent",
ObjectValue: stringObjectValue("eq_type_1"),
},
&api.NQuad{
Subject: blankID("data_source"),
Predicate: "metadata_source",
ObjectValue: stringObjectValue("eq_type_1"),
},
},
}
assigned, err := dgClient.NewTxn().Mutate(context.Background(), mu)
if err != nil {
return nil, nil, err
}
parentID, ok := assigned.Uids["parent"]
if !ok {
return nil, nil, errors.New("cannot find parent id after mutation in setup")
}
sourceID, ok := assigned.Uids["data_source"]
if !ok {
return nil, nil, errors.New("cannot find source id after mutation in setup")
}
eqType := &v1.EquipmentType{
Type: "MyType",
SourceID: sourceID,
ParentID: parentID,
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeString,
IsSearchable: true,
IsIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_1",
},
&v1.Attribute{
Name: "attr2",
Type: v1.DataTypeInt,
IsSearchable: true,
MappedTo: "mapping_2",
},
&v1.Attribute{
Name: "attr3",
Type: v1.DataTypeString,
IsParentIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_3",
},
&v1.Attribute{
Name: "attr4",
Type: v1.DataTypeString,
IsDisplayed: true,
MappedTo: "mapping_4",
},
},
}
repo := NewEquipmentRepository(dgClient)
retEqp, err := repo.CreateEquipmentType(context.Background(), eqType, []string{})
if err != nil {
return nil, nil, errors.New("cannot create equipment in setup")
}
return retEqp, func() error {
if err := deleteNode(parentID); err != nil {
return err
}
if err := deleteNode(sourceID); err != nil {
return err
}
return nil
}, nil
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, cleanup, err := tt.setup()
if !assert.Empty(t, err, "error is not expect in setup") {
return
}
defer func() {
err := cleanup()
assert.Empty(t, err, "error is not expect in cleanup")
}()
defer func() {
err := deleteNode(got.ID)
assert.Empty(t, err, "error is not expect in deleteNode")
}()
want, err := tt.lr.EquipmentWithID(tt.args.ctx, got.ID, tt.args.scopes)
if (err != nil) != tt.wantErr {
t.Errorf("EquipmentRepository.EquipmentWithID() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !tt.wantErr {
compareEquipmentType(t, "EquipmentType", want, got)
}
})
}
}
func compareEquipmentTypeAll(t *testing.T, name string, exp []*v1.EquipmentType, act []*v1.EquipmentType) {
if !assert.Lenf(t, act, len(exp), "expected number of elemnts are: %d", len(exp)) {
return
}
for i := range exp {
compareEquipmentType(t, fmt.Sprintf("%s[%d]", name, i), exp[i], act[i])
}
}
func compareEquipmentType(t *testing.T, name string, exp *v1.EquipmentType, act *v1.EquipmentType) {
if exp == nil && act == nil {
return
}
if exp == nil {
assert.Nil(t, act, "equipment Type is expected to be nil")
}
if exp.ID != "" {
assert.Equalf(t, exp.ID, act.ID, "%s.ID are not same", name)
}
if exp.ParentID != "" {
assert.Equalf(t, exp.ParentID, act.ParentID, "%s.ParentID are not same", name)
}
assert.Equalf(t, exp.Type, act.Type, "%s.Type are not same", name)
assert.Equalf(t, exp.SourceID, act.SourceID, "%s.SourceID are not same", name)
}
func compareAttributeAll(t *testing.T, name string, exp []*v1.Attribute, act []*v1.Attribute) {
if !assert.Lenf(t, act, len(exp), "expected number of elemnts are: %d", len(exp)) {
return
}
for i := range exp {
compareAttribute(t, fmt.Sprintf("%s[%d]", name, i), exp[i], act[i])
}
}
func compareAttribute(t *testing.T, name string, exp *v1.Attribute, act *v1.Attribute) {
if exp == nil && act == nil {
return
}
if exp == nil {
assert.Nil(t, act, "attribute is expected to be nil")
}
if exp.ID != "" {
assert.Equalf(t, exp.ID, act.ID, "%s.ID are not same", name)
}
assert.Equalf(t, exp.Type, act.Type, "%s.Type are not same", name)
assert.Equalf(t, exp.Name, act.Name, "%s.Name are not same", name)
assert.Equalf(t, exp.IsIdentifier, act.IsIdentifier, "%s.IsIdentifier are not same", name)
assert.Equalf(t, exp.IsDisplayed, act.IsDisplayed, "%s.IsDisplayed are not same", name)
assert.Equalf(t, exp.IsSearchable, act.IsSearchable, "%s.Type are not same", name)
assert.Equalf(t, exp.IsParentIdentifier, act.IsParentIdentifier, "%s.IsParentIdentifier are not same", name)
assert.Equalf(t, exp.MappedTo, act.MappedTo, "%s.Type are not same", name)
}
func compareSchemaNodeAll(t *testing.T, name string, exp []*SchemaNode, act []*SchemaNode) {
if !assert.Lenf(t, act, len(exp), "expected number of elements are: %d", len(exp)) {
return
}
for i := range exp {
actIdx := indexForPredicte(exp[i].Predicate, act)
if assert.NotEqualf(t, -1, "%s.Predicate is not found in expected nodes", fmt.Sprintf("%s[%d]", name, i)) {
}
compareSchemaNode(t, fmt.Sprintf("%s[%d]", name, i), exp[i], act[actIdx])
}
}
func indexForPredicte(predicate string, schemas []*SchemaNode) int {
for i := range schemas {
if schemas[i].Predicate == predicate {
return i
}
}
return -1
}
func compareSchemaNode(t *testing.T, name string, exp *SchemaNode, act *SchemaNode) {
if exp == nil && act == nil {
return
}
if exp == nil {
assert.Nil(t, act, "attribute is expected to be nil")
}
assert.Equalf(t, exp.Predicate, act.Predicate, "%s.Predicate are not same", name)
assert.Equalf(t, exp.Type, act.Type, "%s.Type are not same", name)
assert.Equalf(t, exp.Index, act.Index, "%s.Index are not same", name)
assert.ElementsMatchf(t, exp.Tokenizer, act.Tokenizer, "%s.Tokenizer are not same", name)
assert.Equalf(t, exp.Reverse, act.Reverse, "%s.Reverse are not same", name)
assert.Equalf(t, exp.Count, act.Count, "%s.Count are not same", name)
assert.Equalf(t, exp.List, act.List, "%s.List are not same", name)
assert.Equalf(t, exp.Upsert, act.Upsert, "%s.Upsert are not same", name)
assert.Equalf(t, exp.Lang, act.Lang, "%s.Lang are not same", name)
}
type SchemaNode struct {
Predicate string `json:"predicate,omitempty"`
Type string `json:"type,omitempty"`
Index bool `json:"index,omitempty"`
Tokenizer []string `json:"tokenizer,omitempty"`
Reverse bool `json:"reverse,omitempty"`
Count bool `json:"count,omitempty"`
List bool `json:"list,omitempty"`
Upsert bool `json:"upsert,omitempty"`
Lang bool `json:"lang,omitempty"`
}
func querySchema(predicates ...string) ([]*SchemaNode, error) {
if len(predicates) == 0 {
return nil, nil
}
q := `
schema (pred: [` + strings.Join(predicates, ",") + `]) {
type
index
reverse
tokenizer
list
count
upsert
lang
}
`
// fmt.Println(q)
resp, err := dgClient.NewTxn().Query(context.Background(), q)
if err != nil {
return nil, err
}
type data struct {
Schema []*SchemaNode
}
d := &data{}
if err := json.Unmarshal(resp.Json, d); err != nil {
return nil, err
}
return d.Schema, nil
}
func deleteNodes(ids ...string) error {
for _, id := range ids {
if err := deleteNode(id); err != nil {
return err
}
}
return nil
}
func deleteNode(id string) error {
mu := &api.Mutation{
CommitNow: true,
DeleteJson: []byte(`{"uid": "` + id + `"}`),
// Del: []*api.NQuad{
// &api.NQuad{
// Subject: id,
// Predicate: "*",
// ObjectValue: deleteAll,
// },
}
// delete all the data
_, err := dgClient.NewTxn().Mutate(context.Background(), mu)
if err != nil {
return err
}
return nil
}
func attributeIndex(expAttr *v1.Attribute, actAttr []*v1.Attribute) int {
for i := range actAttr {
if expAttr.Name == actAttr[i].Name {
return i
}
}
return -1
}
// func TestEquipmentRepository_EquipmentWithID(t *testing.T) {
// type args struct {
// ctx context.Context
// id string
// scopes []string
// }
// tests := []struct {
// name string
// lr *EquipmentRepository
// args args
// setup func() (*v1.EquipmentType, func() error, error)
// wantSchemaNodes []*api.SchemaNode
// wantErr bool
// }{
// {name: "success",
// lr: NewEquipmentRepository(dgClient),
// args: args{
// ctx: context.Background(),
// },
// setup: func() (*v1.EquipmentType, func() error, error) {
// // TODO create two nodes for parent type and data source
// mu := &api.Mutation{
// CommitNow: true,
// Set: []*api.NQuad{
// &api.NQuad{
// Subject: blankID("parent"),
// Predicate: "metadata_parent",
// ObjectValue: stringObjectValue("eq_type_1"),
// },
// &api.NQuad{
// Subject: blankID("data_source"),
// Predicate: "metadata_source",
// ObjectValue: stringObjectValue("eq_type_1"),
// },
// },
// }
// assigned, err := dgClient.NewTxn().Mutate(context.Background(), mu)
// if err != nil {
// return nil, nil, err
// }
// parentID, ok := assigned.Uids["parent"]
// if !ok {
// return nil, nil, errors.New("cannot find parent id after mutation in setup")
// }
// sourceID, ok := assigned.Uids["data_source"]
// if !ok {
// return nil, nil, errors.New("cannot find source id after mutation in setup")
// }
// eqType := &v1.EquipmentType{
// Type: "MyType",
// SourceID: sourceID,
// ParentID: parentID,
// Attributes: []*v1.Attribute{
// &v1.Attribute{
// Name: "attr1",
// Type: v1.DataTypeString,
// IsSearchable: true,
// IsIdentifier: true,
// IsDisplayed: true,
// MappedTo: "mapping_1",
// },
// &v1.Attribute{
// Name: "attr2",
// Type: v1.DataTypeInt,
// IsSearchable: true,
// MappedTo: "mapping_2",
// },
// &v1.Attribute{
// Name: "attr3",
// Type: v1.DataTypeString,
// IsParentIdentifier: true,
// IsDisplayed: true,
// MappedTo: "mapping_3",
// },
// &v1.Attribute{
// Name: "attr4",
// Type: v1.DataTypeString,
// IsDisplayed: true,
// MappedTo: "mapping_4",
// },
// },
// }
// repo := NewEquipmentRepository(dgClient)
// retEqp, err := repo.CreateEquipmentType(context.Background(), eqType, []string{})
// if err != nil {
// return nil, nil, errors.New("cannot create equipment in setup")
// }
// return retEqp, func() error {
// if err := deleteNode(parentID); err != nil {
// return err
// }
// if err := deleteNode(sourceID); err != nil {
// return err
// }
// return nil
// }, nil
// },
// },
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// got, cleanup, err := tt.setup()
// if !assert.Empty(t, err, "error is not expect in setup") {
// return
// }
// defer func() {
// err := cleanup()
// assert.Empty(t, err, "error is not expect in cleanup")
// }()
// defer func() {
// err := deleteNode(got.ID)
// assert.Empty(t, err, "error is not expect in deleteNode")
// }()
// want, err := tt.lr.EquipmentWithID(tt.args.ctx, got.ID, tt.args.scopes)
// if (err != nil) != tt.wantErr {
// t.Errorf("EquipmentRepository.EquipmentWithID() error = %v, wantErr %v", err, tt.wantErr)
// return
// }
// if !tt.wantErr {
// compareEquipmentType(t, "EquipmentType", want, got)
// }
// })
// }
// }
func TestEquipmentRepository_EquipmentTypeChildren(t *testing.T) {
type args struct {
ctx context.Context
eqTypeID string
depth int
scopes []string
}
tests := []struct {
name string
lr *EquipmentRepository
args args
setup func(repo *EquipmentRepository) (string, []*v1.EquipmentType, func() error, error)
wantErr bool
}{
{name: "success",
lr: NewEquipmentRepository(dgClient),
args: args{
ctx: context.Background(),
depth: 2,
scopes: []string{"scope1"},
},
setup: func(repo *EquipmentRepository) (string, []*v1.EquipmentType, func() error, error) {
// TODO create two nodes for parent type and data source
mu := &api.Mutation{
CommitNow: true,
Set: []*api.NQuad{
&api.NQuad{
Subject: blankID("parent"),
Predicate: "metadata_parent",
ObjectValue: stringObjectValue("eq_type_1"),
},
&api.NQuad{
Subject: blankID("data_source"),
Predicate: "metadata_source",
ObjectValue: stringObjectValue("eq_type_1"),
},
},
}
assigned, err := dgClient.NewTxn().Mutate(context.Background(), mu)
if err != nil {
return "", nil, nil, err
}
parentID, ok := assigned.Uids["parent"]
if !ok {
return "", nil, nil, errors.New("cannot find parent id after mutation in setup")
}
sourceID, ok := assigned.Uids["data_source"]
if !ok {
return "", nil, nil, errors.New("cannot find source id after mutation in setup")
}
eqTypes := []*v1.EquipmentType{
&v1.EquipmentType{
Type: "MyType1",
SourceID: sourceID,
Scopes: []string{"scope1"},
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeString,
IsSearchable: true,
IsIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_1",
},
&v1.Attribute{
Name: "attr2",
Type: v1.DataTypeString,
IsSearchable: false,
// IsParentIdentifier: true,
IsDisplayed: false,
MappedTo: "mapping_2",
},
},
},
&v1.EquipmentType{
Type: "MyType2",
SourceID: sourceID,
Scopes: []string{"scope1"},
Attributes: []*v1.Attribute{
&v1.Attribute{
Name: "attr1",
Type: v1.DataTypeString,
IsSearchable: true,
IsIdentifier: true,
IsDisplayed: true,
IsParentIdentifier: true,
MappedTo: "mapping_1",
},
},
},
}
eqType1, err := repo.CreateEquipmentType(context.Background(), eqTypes[0], eqTypes[0].Scopes)
if err != nil {
return "", nil, nil, err
}
eqTypes[1].ParentID = eqType1.ID
eqType2, err := repo.CreateEquipmentType(context.Background(), eqTypes[1], eqTypes[1].Scopes)
if err != nil {
return "", nil, nil, err
}
return eqType1.ID, []*v1.EquipmentType{eqTypes[1]}, func() error {
return deleteNodes(parentID, sourceID, eqType1.ID, eqType2.ID)
}, nil
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
eqTypeID, want, cleanup, err := tt.setup(tt.lr)
if !assert.Empty(t, err, "error is not expected in setup") {
return
}
defer func() {
err := cleanup()
assert.Empty(t, err, "error is not expected in cleanup")
}()
got, err := tt.lr.EquipmentTypeChildren(tt.args.ctx, eqTypeID, tt.args.depth, tt.args.scopes)
if (err != nil) != tt.wantErr {
t.Errorf("EquipmentRepository.EquipmentTypeChildren() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !tt.wantErr {
compareEquipmentTypeAll(t, "EquipmentRepository.EquipmentTypeChildren()", want, got)
}
})
}
}
func TestEquipmentRepository_DeleteEquipmentType(t *testing.T) {
type args struct {
ctx context.Context
eqType string
scope string
}
tests := []struct {
name string
lr *EquipmentRepository
setup func(repo *EquipmentRepository) ([]*v1.EquipmentType, func() error, error)
verify func(repo *EquipmentRepository) ([]*v1.EquipmentType, error)
args args
wantErr bool
}{
{name: "success",
lr: NewEquipmentRepository(dgClient),
args: args{
ctx: context.Background(),
eqType: "MyType1",
scope: "scope1",
},
setup: func(repo *EquipmentRepository) ([]*v1.EquipmentType, func() error, error) {
mu := &api.Mutation{
CommitNow: true,
Set: []*api.NQuad{
{
Subject: blankID("parent"),
Predicate: "metadata_parent",
ObjectValue: stringObjectValue("parent1"),
},
{
Subject: blankID("data_source"),
Predicate: "metadata_source",
ObjectValue: stringObjectValue("data_source1"),
},
{
Subject: blankID("parent"),
Predicate: "metadata_parent",
ObjectValue: stringObjectValue("parent2"),
},
{
Subject: blankID("data_source"),
Predicate: "metadata_source",
ObjectValue: stringObjectValue("data_source2"),
},
},
}
assigned, err := dgClient.NewTxn().Mutate(context.Background(), mu)
if err != nil {
return nil, nil, err
}
parentID1, ok := assigned.Uids["parent1"]
if !ok {
return nil, nil, errors.New("cannot find parent id after mutation in setup")
}
sourceID1, ok := assigned.Uids["data_source1"]
if !ok {
return nil, nil, errors.New("cannot find source id after mutation in setup")
}
parentID2, ok := assigned.Uids["parent2"]
if !ok {
return nil, nil, errors.New("cannot find parent id after mutation in setup")
}
sourceID2, ok := assigned.Uids["data_source2"]
if !ok {
return nil, nil, errors.New("cannot find source id after mutation in setup")
}
eqTypes := []*v1.EquipmentType{
{
Type: "MyType1",
SourceID: sourceID1,
ParentID: parentID1,
Scopes: []string{"scope1"},
Attributes: []*v1.Attribute{
{
Name: "attr1",
Type: v1.DataTypeString,
IsSearchable: true,
IsIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_1",
},
{
Name: "attr2",
Type: v1.DataTypeString,
IsSearchable: false,
IsParentIdentifier: true,
IsDisplayed: false,
MappedTo: "mapping_2",
},
},
},
{
Type: "MyType2",
SourceID: sourceID2,
ParentID: parentID2,
Scopes: []string{"scope1"},
Attributes: []*v1.Attribute{
{
Name: "attr1",
Type: v1.DataTypeString,
IsSearchable: true,
IsIdentifier: true,
IsDisplayed: true,
MappedTo: "mapping_1",
},
},
},
}
for _, eqType := range eqTypes {
_, err := repo.CreateEquipmentType(context.Background(), eqType, eqType.Scopes)
if err != nil {
fmt.Print(err)
return nil, nil, err
}
}
return eqTypes[1:], func() error {
return deleteNodes(parentID1, sourceID1, parentID2, sourceID2, eqTypes[0].ID, eqTypes[1].ID)
}, nil
},
verify: func(repo *EquipmentRepository) ([]*v1.EquipmentType, error) {
eqTypes, err := repo.EquipmentTypes(context.Background(), []string{"scope1"})
if err != nil {
return nil, err
}
return eqTypes, nil
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
wantEqTypes, cleanup, err := tt.setup(tt.lr)
if !assert.Empty(t, err, "error is not expected in setup") {
return
}
defer func() {
err := cleanup()
assert.Empty(t, err, "error is not expected in cleanup")
}()
if err := tt.lr.DeleteEquipmentType(tt.args.ctx, tt.args.eqType, tt.args.scope); (err != nil) != tt.wantErr {
t.Errorf("EquipmentRepository.DeleteEquipmentType() error = %v, wantErr %v", err, tt.wantErr)
}
if !tt.wantErr {
actEqTypes, err := tt.verify(tt.lr)
assert.Empty(t, err, "error is not expected in verify")
compareEquipmentTypeAll(t, "DeleteEquipmentType", wantEqTypes, actEqTypes)
}
})
}
}
|
khawarhasham/ZoP-App | app/src/main/java/fi/aalto/legroup/zop/playback/AnnotationEditor.java | <reponame>khawarhasham/ZoP-App
package fi.aalto.legroup.zop.playback;
import android.graphics.PointF;
import fi.aalto.legroup.zop.entities.Annotation;
public interface AnnotationEditor {
/**
* Prompt the user to create a new annotation at the specified position.
*
* @param position Position for the annotation
*/
public void createAnnotation(PointF position);
/**
* Prompt the user to edit an annotation.
*
* @param annotation Annotation to edit
*/
public void editAnnotation(Annotation annotation);
/**
* Move an annotation to a new position.
*
* @param annotation Annotation in question
* @param position New position for the annotation
*/
public void moveAnnotation(Annotation annotation, PointF position);
}
|
bug00r/irc-bot | Context/src/de/bug0r/bot/context/response/impl/StringResponse.java | <reponame>bug00r/irc-bot
package de.bug0r.bot.context.response.impl;
import de.bug0r.bot.context.response.ContextResponse;
public class StringResponse implements ContextResponse {
public StringResponse(String response) {
if ( response == null ) throw new NullPointerException("no response string given");
this.response = response;
}
@Override
public Object getContent() {
return response;
}
private String response;
}
|
C4Coin/py-fhm-evm | eth/beacon/genesis_helpers.py | from typing import (
List,
TYPE_CHECKING,
)
from eth_typing import (
Hash32,
)
from eth.constants import (
ZERO_HASH32,
)
from eth.beacon.config import BeaconConfig # noqa: F401
from eth.beacon.types.active_state import ActiveState
from eth.beacon.types.block import BaseBeaconBlock
from eth.beacon.types.crosslink_record import CrosslinkRecord
from eth.beacon.types.crystallized_state import CrystallizedState
from eth.beacon.helpers import (
get_new_shuffling,
)
if TYPE_CHECKING:
from eth.beacon.types.validator_record import ValidatorRecord # noqa: F401
def get_genesis_active_state(beacon_config: 'BeaconConfig') -> ActiveState:
recent_block_hashes = [ZERO_HASH32] * beacon_config.cycle_length * 2
return ActiveState(
pending_attestations=[],
recent_block_hashes=recent_block_hashes,
)
def get_genesis_crystallized_state(
validators: List['ValidatorRecord'],
init_shuffling_seed: Hash32,
beacon_config: 'BeaconConfig') -> CrystallizedState:
current_dynasty = 1
crosslinking_start_shard = 0
shard_and_committee_for_slots = get_new_shuffling(
init_shuffling_seed,
validators,
current_dynasty,
crosslinking_start_shard,
beacon_config=beacon_config,
)
# concatenate with itself to span 2*CYCLE_LENGTH
shard_and_committee_for_slots = shard_and_committee_for_slots + shard_and_committee_for_slots
return CrystallizedState(
validators=validators,
last_state_recalc=0,
shard_and_committee_for_slots=shard_and_committee_for_slots,
last_justified_slot=0,
justified_streak=0,
last_finalized_slot=0,
current_dynasty=current_dynasty,
crosslink_records=[
CrosslinkRecord(
dynasty=0,
slot=0,
hash=ZERO_HASH32,
)
for _ in range(beacon_config.shard_count)
],
dynasty_seed=init_shuffling_seed,
dynasty_start=0,
)
def get_genesis_block(active_state_root: Hash32,
crystallized_state_root: Hash32) -> BaseBeaconBlock:
return BaseBeaconBlock(
parent_hash=ZERO_HASH32,
slot_number=0,
randao_reveal=ZERO_HASH32,
attestations=[],
pow_chain_ref=ZERO_HASH32,
active_state_root=active_state_root,
crystallized_state_root=crystallized_state_root,
)
|
DXCyber409/AndroidNativeEmulator | androidemu/native/hooks.py | import logging
from androidemu.hooker import Hooker
from androidemu.native.memory import NativeMemory
from androidemu.java.helpers.native_method import native_method
from androidemu.utils import memory_helpers
from unicorn import arm_const
from unicorn import Uc
logger = logging.getLogger(__name__)
class NativeHooks:
"""
:type memory NativeMemory
:type modules Modules
:type hooker Hooker
"""
def __init__(self, emu, memory, modules, hooker):
self._module_mgr = modules
self._emu = emu
self._memory = memory
self.atexit = []
modules.add_symbol_hook('__system_property_get', hooker.write_function(self.system_property_get) + 1)
modules.add_symbol_hook('dlopen', hooker.write_function(self.mydlopen) + 1)
modules.add_symbol_hook('pthread_create', hooker.write_function(self.pass_hook("pthread_create")) + 1)
modules.add_symbol_hook('pthread_join', hooker.write_function(self.nop('pthread_join')) + 1)
modules.add_symbol_hook('vfprintf', hooker.write_function(self.nop('vfprintf')) + 1)
modules.add_symbol_hook('fprintf', hooker.write_function(self.pass_hook('fprintf')) + 1)
modules.add_symbol_hook('cacheflush', hooker.write_function(self.pass_hook('cacheflush')) + 1)
modules.add_symbol_hook('dladdr', hooker.write_function(self.dladdr) + 1)
modules.add_symbol_hook('dlsym', hooker.write_function(self.dlsym) + 1)
modules.add_symbol_hook('__android_log_print', hooker.write_function(self.__android_log_print) + 1)
#memory
modules.add_symbol_hook('malloc', hooker.write_function(self.malloc) + 1)
modules.add_symbol_hook('free', hooker.write_function(self.free) + 1)
modules.add_symbol_hook('calloc', hooker.write_function(self.calloc) + 1)
# others
modules.add_symbol_hook('dlerror', hooker.write_function(self.dlerror) + 1)
def pass_hook(self, name):
@native_method
def nop_inside(emu, p1, p2, p3 ,p4):
logger.info('Symbol hook not implemented %s passed %x %x %x %x' % (name, p1, p2, p3, p4))
return nop_inside
@native_method
def calloc(self, mu, num, size):
logger.info("calloc(%d,%d)", num, size)
addr = self._memory.allocate(num * size)
mu.mem_write(addr, bytes(num * size))
return addr
@native_method
def dlerror(self, mu):
logger.info("dlerror")
data = 'dlerror handler...emu,...,'
addr = self._memory.allocate(len(data))
memory_helpers.write_utf8(mu, addr, data)
return addr
@native_method
def malloc(self, mu, malloc_len):
logger.info("malloc(%d)" % malloc_len)
return self._memory.allocate(malloc_len)
@native_method
def free(self, mu, addr):
logger.info("free")
return 1
@native_method
def __android_log_print(self, uc, fmt, args):
pass
# logger.info(fmt % args)
@native_method
def dlsym(self, uc, handle, name_ptr):
name = memory_helpers.read_utf8(uc, name_ptr)
logger.debug("Called dlsym(0x%x, %s)" % (handle, name))
for mod in self._module_mgr.modules:
sym = mod.find_symbol(name)
if sym != None:
return sym.address
lr = uc.reg_read(arm_const.UC_ARM_REG_LR)
raise RuntimeError("dlsym(0x%x, %s) Not found lr:%x" % (handle, name, lr))
uc.emu_stop()
#if mod.base == handle:
# x = mod.find_symbol(name)
# return x.address
@native_method
def system_property_get(self, uc, name_ptr, buf_ptr):
name = memory_helpers.read_utf8(uc, name_ptr)
logger.debug("Called __system_property_get(%s, 0x%x)" % (name, buf_ptr))
if name in self._emu.system_properties:
memory_helpers.write_utf8(uc, buf_ptr, self._emu.system_properties[name])
else:
logger.warning('%s was not found in system_properties dictionary.' % name)
# raise ValueError('%s was not found in system_properties dictionary.' % name)
return None
@native_method
def mydlopen(self, uc, path):
path = memory_helpers.read_utf8(uc, path)
logger.debug("Called dlopen(%s)" % path)
for mod in self._module_mgr.modules:
if mod.filename.split('/')[-1] == path.split('/')[-1]:
return mod.base
elif mod.filename.split('/')[-1] == path:
return mod.base
if path == 'liblog.so':
return 0x4
raise RuntimeError("[dlopen] %s was not loaded!" % path)
@native_method
def dladdr(self, uc, addr, info):
infos = memory_helpers.read_uints(uc, info, 4)
nm = self._emu.native_memory
if addr == 0:
addr = uc.reg_read(arm_const.UC_ARM_REG_PC)
# isfind = False
for mod in self._module_mgr.modules:
if mod.base <= addr < mod.base + mod.size:
dli_fname = nm.allocate(len(mod.filename) + 1)
memory_helpers.write_utf8(uc, dli_fname, mod.filename + '\x00')
memory_helpers.write_uints(uc, info, [dli_fname, mod.base, 0, 0])
return 1
def nop(self, name):
@native_method
def nop_inside(emu):
raise NotImplementedError('Symbol hook not implemented %s' % name)
return nop_inside
|
shwogud/mno-enterprise | api/spec/controllers/mno_enterprise/jpi/v1/admin/invites_controller_spec.rb | <filename>api/spec/controllers/mno_enterprise/jpi/v1/admin/invites_controller_spec.rb<gh_stars>1-10
require 'rails_helper'
module MnoEnterprise
RSpec.describe Jpi::V1::Admin::InvitesController do
include MnoEnterprise::TestingSupport::SharedExamples::JpiV1Admin
routes { MnoEnterprise::Engine.routes }
before { request.env['HTTP_ACCEPT'] = 'application/json' }
#===============================================
# Assignments
#===============================================
# Stub user and user call
let(:user) { build(:user, admin_role: 'admin') }
before do
api_stub_for(get: "/users/#{user.id}", response: from_api(user))
sign_in user
end
let(:organization) { FactoryGirl.build(:organization) }
let(:invitee) { FactoryGirl.build(:user) }
let(:invite) { FactoryGirl.build(:org_invite, user: invitee, organization: organization, status: 'staged') }
# Stub ActionMailer
let(:message_delivery) { instance_double(ActionMailer::MessageDelivery) }
before { allow(message_delivery).to receive(:deliver_later).with(no_args) }
# API stubs
before do
api_stub_for(get: "/organizations/#{organization.id}", response: from_api(organization))
api_stub_for(get: "/organizations/#{organization.id}/org_invites?filter[status.in][]=pending&filter[status.in][]=staged&filter[status.in][]=accepted&filter[user_id]=#{invitee.id}", response: from_api([invite]))
allow(MnoEnterprise::User).to receive(:find) do |user_id|
case user_id.to_i
when user.id then user
when invitee.id then invitee
end
end
api_stub_for(put: "/org_invites/#{invite.id}", response: from_api(invite))
end
# unconfirmed
describe 'POST #create' do
subject { post :create, user_id: invitee.id, organization_id: organization.id }
before { allow(SystemNotificationMailer).to receive(:organization_invite).with(invite).and_return(message_delivery) }
it_behaves_like 'a jpi v1 admin action'
context 'existing user' do
it 'sends the invitation email' do
expect(SystemNotificationMailer).to receive(:organization_invite).with(invite).and_return(message_delivery)
subject
expect(response).to be_success
end
end
context 'new user' do
before { invitee.confirmed_at = nil }
it 'sends organization invite to new user' do
expect(SystemNotificationMailer).to receive(:organization_invite).with(invite).and_return(message_delivery)
subject
expect(response).to be_success
end
end
end
end
end
|
ianrae/dnallang | src/main/java/org/dnal/core/repository/MockRepositoryFactory.java | <reponame>ianrae/dnallang
package org.dnal.core.repository;
import org.dnal.core.DType;
public class MockRepositoryFactory implements RepositoryFactory {
@Override
public Repository createFor(DType type) {
return new MockRepository(type);
}
}
|
libpronet/libpronet | src/pronet/pro_rtp/rtp_port_allocator.cpp | /*
* Copyright (C) 2018-2019 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License"),
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file is part of LibProNet (https://github.com/libpronet/libpronet)
*/
#include "rtp_port_allocator.h"
#include "../pro_shared/pro_shared.h"
#include "../pro_util/pro_memory_pool.h"
#include "../pro_util/pro_thread_mutex.h"
#include "../pro_util/pro_z.h"
/////////////////////////////////////////////////////////////////////////////
////
#define DEFAULT_MIN_PORT 3004
#define DEFAULT_MAX_PORT 9999
/////////////////////////////////////////////////////////////////////////////
////
CRtpPortAllocator::CRtpPortAllocator()
{
m_portBase = DEFAULT_MIN_PORT;
m_portSpan = DEFAULT_MAX_PORT - DEFAULT_MIN_PORT;
m_portItr = (unsigned short)(ProRand_0_1() * m_portSpan);
}
bool
CRtpPortAllocator::SetPortRange(unsigned short minPort,
unsigned short maxPort)
{
if (minPort == 0 || maxPort == 0 || minPort > maxPort)
{
return (false);
}
m_lock.Lock();
m_portBase = minPort;
m_portSpan = maxPort - minPort;
m_portItr = (unsigned short)(ProRand_0_1() * m_portSpan);
m_lock.Unlock();
return (true);
}
void
CRtpPortAllocator::GetPortRange(unsigned short& minPort,
unsigned short& maxPort) const
{
m_lock.Lock();
minPort = m_portBase;
maxPort = m_portBase + m_portSpan;
m_lock.Unlock();
}
unsigned short
CRtpPortAllocator::AllocPort(bool rfc)
{
m_lock.Lock();
unsigned short port = m_portBase + m_portItr % m_portSpan;
m_portItr += rfc ? 2 : 1;
m_lock.Unlock();
if (rfc && port % 2 != 0)
{
--port;
}
return (port);
}
|
brechmos-stsci/deleteme | cubeviz/layout.py | <filename>cubeviz/layout.py<gh_stars>0
import os
from collections import OrderedDict
import numpy as np
from qtpy import QtWidgets, QtCore
from qtpy.QtWidgets import QMenu, QAction
from glue.utils.qt import load_ui
from glue.utils.qt import get_qapp
from glue.config import qt_fixed_layout_tab
from glue.external.echo import keep_in_sync, SelectionCallbackProperty
from glue.external.echo.qt import connect_combo_selection
from glue.core.data_combo_helper import ComponentIDComboHelper
from glue.core.message import SettingsChangeMessage
from glue.utils.matplotlib import freeze_margins
from specviz.third_party.glue.data_viewer import SpecVizViewer
from .toolbar import CubevizToolbar
from .image_viewer import CubevizImageViewer
from .controls.slice import SliceController
from .controls.overlay import OverlayController
from .tools import arithmetic_gui, moment_maps, smoothing
class WidgetWrapper(QtWidgets.QWidget):
def __init__(self, widget=None, tab_widget=None, parent=None):
super(WidgetWrapper, self).__init__(parent=parent)
self.tab_widget = tab_widget
self._widget = widget
self.layout = QtWidgets.QVBoxLayout()
self.layout.setContentsMargins(0, 0, 0, 0)
self.layout.addWidget(widget)
self.setLayout(self.layout)
def widget(self):
return self._widget
@qt_fixed_layout_tab
class CubeVizLayout(QtWidgets.QWidget):
"""
The 'CubeViz' layout, with three image viewers and one spectrum viewer.
"""
LABEL = "CubeViz"
subWindowActivated = QtCore.Signal(object)
single_viewer_attribute = SelectionCallbackProperty(default_index=0)
viewer1_attribute = SelectionCallbackProperty(default_index=0)
viewer2_attribute = SelectionCallbackProperty(default_index=1)
viewer3_attribute = SelectionCallbackProperty(default_index=2)
def __init__(self, session=None, parent=None):
super(CubeVizLayout, self).__init__(parent=parent)
if not hasattr(session.application, '_has_cubeviz_toolbar'):
cubeviz_toolbar = CubevizToolbar(application=session.application)
session.application.insertToolBar(session.application._data_toolbar,
cubeviz_toolbar)
self.session = session
self._has_data = False
self._wavelengths = None
self._option_buttons = []
self._data = None
self.ui = load_ui('layout.ui', self,
directory=os.path.dirname(__file__))
# Create the views and register to the hub.
self.single_view = WidgetWrapper(CubevizImageViewer(self.session), tab_widget=self)
self.left_view = WidgetWrapper(CubevizImageViewer(self.session), tab_widget=self)
self.middle_view = WidgetWrapper(CubevizImageViewer(self.session), tab_widget=self)
self.right_view = WidgetWrapper(CubevizImageViewer(self.session), tab_widget=self)
self.specviz = WidgetWrapper(SpecVizViewer(self.session), tab_widget=self)
self.single_view._widget.register_to_hub(self.session.hub)
self.left_view._widget.register_to_hub(self.session.hub)
self.middle_view._widget.register_to_hub(self.session.hub)
self.right_view._widget.register_to_hub(self.session.hub)
self.specviz._widget.register_to_hub(self.session.hub)
self.all_views = [self.single_view, self.left_view, self.middle_view, self.right_view]
# TODO: determine whether to rename this or get rid of it
self.cube_views = self.all_views
self.split_views = self.cube_views[1:]
self._synced_checkboxes = [
self.ui.singleviewer_synced_checkbox,
self.ui.viewer1_synced_checkbox,
self.ui.viewer2_synced_checkbox,
self.ui.viewer3_synced_checkbox
]
for view, checkbox in zip(self.all_views, self._synced_checkboxes):
view._widget.assign_synced_checkbox(checkbox)
# Add the views to the layouts.
self.ui.single_image_layout.addWidget(self.single_view)
self.ui.image_row_layout.addWidget(self.left_view)
self.ui.image_row_layout.addWidget(self.middle_view)
self.ui.image_row_layout.addWidget(self.right_view)
self.ui.specviz_layout.addWidget(self.specviz)
self.subWindowActivated.connect(self._update_active_view)
self.ui.sync_button.clicked.connect(self._on_sync_click)
self.ui.button_toggle_image_mode.clicked.connect(
self._toggle_image_mode)
# This is a list of helpers for the viewer combo boxes. New data
# collections should be added to each helper in this list using the
# ``append_data`` method to ensure that the new data components are
# populated into the combo boxes.
self._viewer_combo_helpers = []
# This tracks the current positions of cube viewer axes when they are hidden
self._viewer_axes_positions = []
# Indicates whether cube viewer toolbars are currently visible or not
self._toolbars_visible = True
self._slice_controller = SliceController(self)
self._overlay_controller = OverlayController(self)
# Add menu buttons to the cubeviz toolbar.
self._init_menu_buttons()
# This maps the combo box indicies to the glue data component labels
self._component_labels = []
self.sync = {}
# Track the slice index of the synced viewers. This is updated by the
# slice controller
self.synced_index = None
app = get_qapp()
app.installEventFilter(self)
self._last_click = None
self._active_view = None
self._active_cube = None
self._last_active_view = None
self._active_split_cube = None
# Set the default to parallel image viewer
self._single_viewer_mode = False
self.ui.button_toggle_image_mode.setText('Single Image Viewer')
self.ui.viewer_control_frame.setCurrentIndex(0)
def _init_menu_buttons(self):
"""
Add the two menu buttons to the tool bar. Currently two are defined:
View - for changing the view of the active window
Data Processing - for applying a data processing step to the data.
:return:
"""
self._option_buttons = [
self.ui.view_option_button,
self.ui.cube_option_button
]
# Create the View Menu
view_menu = self._dict_to_menu(OrderedDict([
('RA-DEC', lambda: None),
('RA-Spectral', lambda: None),
('DEC-Spectral', lambda: None),
('Hide Axes', ['checkable', self._toggle_viewer_axes]),
('Hide Toolbars', ['checkable', self._toggle_toolbars])
]))
self.ui.view_option_button.setMenu(view_menu)
# Create the Data Processing Menu
cube_menu = self._dict_to_menu(OrderedDict([
('Spatial Smoothing', lambda: self._open_dialog('Spatial Smoothing', None)),
('Moment Maps', lambda: self._open_dialog('Moment Maps', None)),
('Arithmetic Operations', lambda: self._open_dialog('Arithmetic Operations', None))
]))
self.ui.cube_option_button.setMenu(cube_menu)
def _dict_to_menu(self, menu_dict):
'''Stolen shamelessly from specviz. Thanks!'''
menu_widget = QMenu()
for k, v in menu_dict.items():
if isinstance(v, dict):
new_menu = menu_widget.addMenu(k)
self._dict_to_menu(v, menu_widget=new_menu)
else:
act = QAction(k, menu_widget)
if isinstance(v, list):
if v[0] == 'checkable':
v = v[1]
act.setCheckable(True)
act.setChecked(False)
act.triggered.connect(v)
menu_widget.addAction(act)
return menu_widget
def _handle_settings_change(self, message):
if isinstance(message, SettingsChangeMessage):
self._slice_controller.update_index(self.synced_index)
def _set_pos_and_margin(self, axes, pos, marg):
axes.set_position(pos)
freeze_margins(axes, marg)
def _hide_viewer_axes(self):
for viewer in self.cube_views:
viewer._widget.toggle_hidden_axes(True)
axes = viewer._widget.axes
# Save current axes position and margins so they can be restored
pos = axes.get_position(), axes.resizer.margins
self._viewer_axes_positions.append(pos)
self._set_pos_and_margin(axes, [0, 0, 1, 1], [0, 0, 0, 0])
viewer._widget.figure.canvas.draw()
def _toggle_viewer_axes(self):
# If axes are currently hidden, restore the original positions
if self._viewer_axes_positions:
for viewer, pos in zip(self.cube_views, self._viewer_axes_positions):
viewer._widget.toggle_hidden_axes(False)
axes = viewer._widget.axes
self._set_pos_and_margin(axes, *pos)
viewer._widget.figure.canvas.draw()
self._viewer_axes_positions = []
# Record current positions if axes are currently hidden and hide them
else:
self._hide_viewer_axes()
def _toggle_toolbars(self):
self._toolbars_visible = not self._toolbars_visible
for viewer in self.cube_views:
viewer._widget.toolbar.setVisible(self._toolbars_visible)
def _open_dialog(self, name, widget):
if name == 'Spatial Smoothing':
ex = smoothing.SelectSmoothing(self._data, parent=self, allow_preview=True)
if name == 'Arithmetic Operations':
ex = arithmetic_gui.SelectArithmetic(self._data, self.session.data_collection, parent=self)
if name == "Moment Maps":
moment_maps.MomentMapsGUI(
self._data, self.session.data_collection, parent=self)
def add_new_data_component(self, name):
self._component_labels.append(str(name))
# TODO: udpate the active view with the new component
def _enable_option_buttons(self):
for button in self._option_buttons:
button.setEnabled(True)
self.ui.sync_button.setEnabled(True)
def _get_change_viewer_func(self, view_index):
def change_viewer(dropdown_index):
view = self.all_views[view_index].widget()
label = self._component_labels[dropdown_index]
if view.is_smoothing_preview_active:
view.end_smoothing_preview()
view.update_axes_title(title=str(label))
view.state.layers[0].attribute = self._data.id[label]
return change_viewer
def _enable_viewer_combo(self, data, index, combo_label, selection_label):
print('enable_viewer_combo {} {} {}'.format(index, combo_label, selection_label))
combo = getattr(self.ui, combo_label)
connect_combo_selection(self, selection_label, combo)
helper = ComponentIDComboHelper(self, selection_label)
helper.set_multiple_data([data])
combo.setEnabled(True)
combo.currentIndexChanged.connect(self._get_change_viewer_func(index))
self._viewer_combo_helpers.append(helper)
def _enable_all_viewer_combos(self, data):
"""
Setup the dropdown boxes that correspond to each of the left, middle,
and right views. The combo boxes initially are set to have FLUX,
Error, DQ but will be dynamic depending on the type of data available
either from being loaded in or by being processed.
:return:
"""
self._enable_viewer_combo(
data, 0, 'single_viewer_combo', 'single_viewer_attribute')
view = self.all_views[0].widget()
view.update_axes_title(str(getattr(self, 'single_viewer_attribute')))
for i in range(1,4):
combo_label = 'viewer{0}_combo'.format(i)
selection_label = 'viewer{0}_attribute'.format(i)
self._enable_viewer_combo(data, i, combo_label, selection_label)
view = self.all_views[i].widget()
view.update_axes_title(str(getattr(self, selection_label)))
def add_overlay(self, data, label):
self._overlay_controller.add_overlay(data, label)
def add_data(self, data):
"""
Called by a function outside the class in order to add data to cubeviz.
:param data:
:return:
"""
self._data = data
self.specviz._widget.add_data(data)
for checkbox in self._synced_checkboxes:
checkbox.setEnabled(True)
self._has_data = True
self._active_view = self.left_view
self._active_cube = self.left_view
self._last_active_view = self.single_view
self._active_split_cube = self.left_view
# Set the component labels to what was actually in the file.
self._component_labels = [str(x).strip() for x in data.component_ids() if not x in data.coordinate_components]
# Store pointer to wavelength information
self._wavelengths = self.single_view._widget._data[0].get_component('Wave')[:,0,0]
# Pass WCS and wavelength information to slider controller and enable
wcs = self.session.data_collection.data[0].coords.wcs
self._slice_controller.enable(wcs, self._wavelengths)
self._enable_option_buttons()
self._setup_syncing()
self._enable_all_viewer_combos(data)
self.subWindowActivated.emit(self._active_view)
def eventFilter(self, obj, event):
if event.type() == QtCore.QEvent.MouseButtonPress:
if not (self.isVisible() and self.isActiveWindow()):
return super(CubeVizLayout, self).eventFilter(obj, event)
# Find global click position
click_pos = event.globalPos()
# If the click position is the same as the last one, we shouldn't
# do anything.
if click_pos != self._last_click:
# Determine if the event falls inside any of the viewers
for viewer in self.subWindowList():
relative_click_pos = viewer.mapFromGlobal(click_pos)
if viewer.rect().contains(relative_click_pos):
self.subWindowActivated.emit(viewer)
break
self._last_click = click_pos
return super(CubeVizLayout, self).eventFilter(obj, event)
def _toggle_image_mode(self, event=None):
new_active_view = self._last_active_view
self._last_active_view = self._active_view
# Currently in single image, moving to split image
if self._single_viewer_mode:
self._active_cube = self._active_split_cube
self._activate_split_image_mode(event)
self._single_viewer_mode = False
self.ui.button_toggle_image_mode.setText('Single Image Viewer')
self.ui.viewer_control_frame.setCurrentIndex(0)
if self.single_view._widget.synced:
for view in self.split_views:
if view._widget.synced:
view._widget.update_slice_index(self.single_view._widget.slice_index)
# Currently in split image, moving to single image
else:
self._active_split_cube = self._active_cube
self._active_view = self.single_view
self._active_cube = self.single_view
self._activate_single_image_mode(event)
self._single_viewer_mode = True
self.ui.button_toggle_image_mode.setText('Split Image Viewer')
self.ui.viewer_control_frame.setCurrentIndex(1)
self.subWindowActivated.emit(new_active_view)
# Update the slice index to reflect the state of the active cube
self._slice_controller.update_index(self._active_cube._widget.slice_index)
def _activate_single_image_mode(self, event=None):
vsplitter = self.ui.vertical_splitter
hsplitter = self.ui.horizontal_splitter
vsizes = list(vsplitter.sizes())
hsizes = list(hsplitter.sizes())
vsizes = 0, max(10, vsizes[0] + vsizes[1])
hsizes = max(10, sum(hsizes) * 0.4), max(10, sum(hsizes) * 0.6)
vsplitter.setSizes(vsizes)
hsplitter.setSizes(hsizes)
def _activate_split_image_mode(self, event=None):
vsplitter = self.ui.vertical_splitter
hsplitter = self.ui.horizontal_splitter
vsizes = list(vsplitter.sizes())
hsizes = list(hsplitter.sizes())
vsizes = max(10, sum(vsizes) / 2), max(10, sum(vsizes) / 2)
# TODO: Might be a bug here, should the hsizes be based on vsizes? If so, not sure we need to calculate
# TODO: the hsizes above.
hsizes = 0, max(10, vsizes[0] + vsizes[1])
vsplitter.setSizes(vsizes)
hsplitter.setSizes(hsizes)
def _update_active_view(self, view):
if self._has_data:
self._active_view = view
if isinstance(view._widget, CubevizImageViewer):
self._active_cube = view
index = self._active_cube._widget.slice_index
self._slice_controller.update_index(index)
def activeSubWindow(self):
return self._active_view
def subWindowList(self):
return [self.single_view, self.left_view, self.middle_view, self.right_view, self.specviz]
def _setup_syncing(self):
for attribute in ['x_min', 'x_max', 'y_min', 'y_max']:
sync1 = keep_in_sync(self.left_view._widget.state, attribute,
self.middle_view._widget.state, attribute)
sync2 = keep_in_sync(self.middle_view._widget.state, attribute,
self.right_view._widget.state, attribute)
self.sync[attribute] = sync1, sync2
self._on_sync_click()
def _on_sync_click(self, event=None):
index = self._active_cube._widget.slice_index
for view in self.cube_views:
view._widget.synced = True
if view != self._active_cube:
view._widget.update_slice_index(index)
self._slice_controller.update_index(index)
def start_smoothing_preview(self, preview_function, component_id, preview_title=None):
"""
Starts smoothing preview. This function preforms the following steps
1) SelectSmoothing passes parameters.
2) The left and single viewers' combo box is set to component_id
3) The set_smoothing_preview is called to setup on the fly smoothing
:param preview_function: function: Single-slice smoothing function
:param component_id: int: Which component to preview
:param preview_title: str: Title displayed when previewing
"""
# For single and first viewer:
for view_index in [0, 1]:
view = self.all_views[view_index].widget()
if view_index == 0:
combo_label = 'single_viewer_combo'
else:
combo_label = 'viewer{0}_combo'.format(view_index)
combo = getattr(self.ui, combo_label)
component_index = self._component_labels.index(component_id)
combo.setCurrentIndex(component_index)
view.set_smoothing_preview(preview_function, preview_title)
def end_smoothing_preview(self):
"""
End preview and change viewer combo index to the first component.
"""
for view_index in [0,1]:
view = self.all_views[view_index].widget()
view.end_smoothing_preview()
if view_index == 0:
combo_label = 'single_viewer_combo'
else:
combo_label = 'viewer{0}_combo'.format(view_index)
combo = getattr(self.ui, combo_label)
combo.setCurrentIndex(0)
combo.currentIndexChanged.emit(0)
def showEvent(self, event):
super(CubeVizLayout, self).showEvent(event)
# Make split image mode the default layout
self._activate_split_image_mode()
self._update_active_view(self.left_view)
|
Pandinosaurus/gpu.js | src/browser.js | <reponame>Pandinosaurus/gpu.js<filename>src/browser.js
import { GPU } from './base-gpu';
import { alias } from './alias';
import { utils } from './utils';
import * as common from './common';
import { Input, input } from './input';
import { Texture } from './texture';
import { FunctionBuilder } from './backend/function-builder';
import { FunctionNode } from './backend/function-node';
import { CPUFunctionNode } from './backend/cpu/function-node';
import { CPUKernel } from './backend/cpu/kernel';
import { WebGLFunctionNode } from './backend/web-gl/function-node';
import { WebGLKernel } from './backend/web-gl/kernel';
import { WebGL2FunctionNode } from './backend/web-gl2/function-node';
import { WebGL2Kernel } from './backend/web-gl2/kernel';
import { GLKernel } from './backend/gl/kernel';
import { Kernel } from './backend/kernel';
/**
* Stub for HeadlessGL.
*/
class HeadlessGLKernel extends WebGLKernel {
static get isSupported() { return false }
static isContextMatch() { return false }
static getIsTextureFloat() { return false }
static getIsDrawBuffers() { return false }
static getChannelCount() { return 1 }
static get testCanvas() { return null }
static get testContext() { return null }
static get features() { return null }
static setupFeatureChecks() {}
static destroyContext() {}
initCanvas() { return {} }
initContext() { return null }
toString() { return '' }
initExtensions() {}
build() {}
destroyExtensions() {}
setOutput() {}
static getFeatures() {
return Object.freeze({
isFloatRead: false,
isIntegerDivisionAccurate: false,
isTextureFloat: false,
isDrawBuffers: false,
kernelMap: false,
channelCount: 1,
});
}
};
const lib = GPU;
lib.alias = alias;
lib.CPUFunctionNode = CPUFunctionNode;
lib.CPUKernel = CPUKernel;
lib.FunctionBuilder = FunctionBuilder;
lib.FunctionNode = FunctionNode;
lib.HeadlessGLKernel = HeadlessGLKernel;
lib.Input = Input;
lib.input = input;
lib.Texture = Texture;
lib.utils = { ...common, ...utils };
lib.WebGL2FunctionNode = WebGL2FunctionNode;
lib.WebGL2Kernel = WebGL2Kernel;
lib.WebGLFunctionNode = WebGLFunctionNode;
lib.WebGLKernel = WebGLKernel;
lib.GLKernel = GLKernel;
lib.Kernel = Kernel;
export default lib;
|
qqqkoko123/qianghongbao | src/com/qqq/WXhongbao/rg_ZFBLiaoTianJieMian.java | <reponame>qqqkoko123/qianghongbao
package com.qqq.WXhongbao;
public class rg_ZFBLiaoTianJieMian extends rg_RenWuJiChuLei {
public rg_ZFBLiaoTianJieMian () { }
public static rg_MoZhangAiFuWuChuangKouShiJianJieShouQi rg_MoNiCaoZuoShiJianFaSongQi10;
protected static rg_ZFBLiaoTianJieMian rg_RenWuDuiXiang13;
protected String rg_ZFBLiaoTianJieMianChuangKouLeiMing = "com.alipay.mobile.chatapp.ui.GroupChatMsgActivity_";
public void rg_ShouDaoXinShiJian1 (android.view.accessibility.AccessibilityEvent rg_ShiJianDuiXiang81) {
super.rg_ShouDaoXinShiJian1 (rg_ShiJianDuiXiang81);
if (rg_QiYongZhuangTai3 == false)
{
return;
}
if (huoShan.Java.JiBen.rg_WenBenXingLei.rg_WenBenXiangDeng (String.valueOf(rg_ShiJianDuiXiang81.getPackageName()), "com.eg.android.AlipayGphone", false) == false)
{
return;
}
if (huoShan.Java.JiBen.rg_WenBenXingLei.rg_WenBenXiangDeng (String.valueOf(rg_ShiJianDuiXiang81.getClassName()), rg_ZFBLiaoTianJieMianChuangKouLeiMing, false) || huoShan.Java.JiBen.rg_WenBenXingLei.rg_WenBenXiangDeng (String.valueOf(rg_ShiJianDuiXiang81.getClassName()), "android.widget.ListView", false) || huoShan.Java.JiBen.rg_WenBenXingLei.rg_WenBenXiangDeng (String.valueOf(rg_ShiJianDuiXiang81.getClassName()), "android.widget.FrameLayout", false) || huoShan.Java.JiBen.rg_WenBenXingLei.rg_WenBenXiangDeng (String.valueOf(rg_ShiJianDuiXiang81.getClassName()), "android.app.AlertDialog", false))
{
java.util.List<android.view.accessibility.AccessibilityNodeInfo> rg_JieDianLieBiao7;
android.view.accessibility.AccessibilityNodeInfo rg_JieDian10;
android.view.accessibility.AccessibilityNodeInfo rg_ZiJieDian1;
android.view.accessibility.AccessibilityNodeInfo rg_ZiJieDian2;
android.view.accessibility.AccessibilityNodeInfo rg_ZiJieDian3;
int rg_ChengYuanShu27;
android.graphics.Rect rg_ZuoBiao = new android.graphics.Rect ();
rg_JieDianLieBiao7 = (rg_FuWuDuiXiang3.getRootInActiveWindow ().findAccessibilityNodeInfosByText("查看红包"));
rg_ChengYuanShu27 = rg_JieDianLieBiao7.size ();
if (rg_JieDianLieBiao7 != null && rg_JieDianLieBiao7.size () > 0)
{
rg_JieDian10 = rg_JieDianLieBiao7.get (rg_QuanJuBianLiangLei.rg_ChengYuanSuoYinzfb);
if (rg_JieDian10 != null)
{
if (rg_QuanJuBianLiangLei.rg_ChengYuanSuoYinzfb <= rg_JieDianLieBiao7.size () - 2)
{
rg_QuanJuBianLiangLei.rg_ChengYuanSuoYinzfb = rg_QuanJuBianLiangLei.rg_ChengYuanSuoYinzfb + 1;
}
else
{
rg_QuanJuBianLiangLei.rg_ChengYuanSuoYinzfb = 0;
}
rg_ChanJiJieDian1 (rg_JieDian10.getParent());
rg_ZFBLiaoTianJieMian.rg_QuRenWuDuiXiang10 ().rg_QiYongZhuangTai3 = false;
rg_ZFBGongBaoKaiJieMian.rg_QuRenWuDuiXiang11 ().rg_QiYongZhuangTai3 = true;
}
}
else
{
rg_ZFBLiaoTianJieMian.rg_QuRenWuDuiXiang10 ().rg_QiYongZhuangTai3 = true;
rg_ZFBGongBaoKaiJieMian.rg_QuRenWuDuiXiang11 ().rg_QiYongZhuangTai3 = false;
}
}
return;
}
public static synchronized rg_ZFBLiaoTianJieMian rg_QuRenWuDuiXiang10 () {
if (rg_RenWuDuiXiang13 == null)
{
rg_RenWuDuiXiang13 = new rg_ZFBLiaoTianJieMian ();
}
return (rg_RenWuDuiXiang13);
}
public static void rg_ChuShiHuaMoNiShiJianFaSong10 (rg_MoZhangAiFuWuChuangKouShiJianJieShouQi rg_JieShouDuiXiang13) {
rg_MoNiCaoZuoShiJianFaSongQi10 = rg_JieShouDuiXiang13;
}
}
|
dune-community/dune-xt-data | dune/xt/data/quadratures/gausslobatto/data/gausslobatto91.cxx | <filename>dune/xt/data/quadratures/gausslobatto/data/gausslobatto91.cxx
// This file is part of the dune-xt-data project:
// https://github.com/dune-community/dune-xt-data
// Copyright 2009-2018 dune-xt-data developers and contributors. All rights reserved.
// License: Dual licensed as BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
// or GPL-2.0+ (http://opensource.org/licenses/gpl-license)
// with "runtime exception" (http://www.dune-project.org/license.html)
// Authors:
// <NAME> (2018)
// <NAME> (2018 - 2019)
//
// This file is part of the dune-gdt project:
#include "../gausslobatto_data.hh"
namespace Dune::XT::Data {
template <>
std::vector<std::vector<double>> GaussLobattoData<91>::get()
{
return {{-1, 2.442002442002442002442E-4},
{-0.999103798687492262626, 0.001504960046072077155},
{-0.996996714692221155163, 0.0027085287311679588569},
{-0.99368800327501366062, 0.003908201644267197621},
{-0.989181907522294422074, 0.0051030787771151763315},
{-0.983483902210260281696, 0.006291785552222116799},
{-0.976600865910292186241, 0.007472903898052906307},
{-0.968541096939741031863, 0.0086450149534754715323},
{-0.959314308800405312043, 0.0098067080095429056011},
{-0.948931620097228569369, 0.0109565839998018215409},
{-0.9374055417262377809618, 0.0120932577392473193723},
{-0.924749962041447334067, 0.0132153597935366162494},
{-0.9109801302310282682582, 0.0143215382106550894562},
{-0.8961126379980851663777, 0.015410460186440101208},
{-0.8801653995980110666353, 0.016480813688136995404},
{-0.8631576302694591129692, 0.017531309044352929403},
{-0.8451098230909896187605, 0.0185606805039123703807},
{-0.8260437242942947047771, 0.0195676877637628511257},
{-0.8059823070653248202356, 0.0205511174650751616053},
{-0.7849497438657486677899, 0.021509784656238629744},
{-0.7629713773085884627407, 0.0224425342212572320478},
{-0.740073689623412343981, 0.0233482422719742375972},
{-0.7162842707480514154656, 0.0242258175025331415378},
{-0.6916317850853976930666, 0.0250742025044932225609},
{-0.666145936965408163067, 0.0258923750410456539301},
{-0.6398574348539760278359, 0.0266793492788138149148},
{-0.612797954351824912885, 0.0274341769757656336336},
{-0.5850001000280299731203, 0.028155948623814430856},
{-0.5564973661341676375712, 0.0288437945447366750757},
{-0.5273240962464402413715, 0.0294968859380896133786},
{-0.4975154418844106298964, 0.0301144358798684774464},
{-0.4671073201562129552152, 0.030695700270701591311},
{-0.436136370481277534585, 0.031239978732442022201},
{-0.4046399104427181983408, 0.031746615452076261817},
{-0.3726558908225785544758, 0.0322149999719336752105},
{-0.340222849874117691808, 0.03264456792524499792414},
{-0.3073798668862347898906, 0.03303480171616389920545},
{-0.274166515095984755375, 0.033385231143432466076},
{-0.2406228140059223069603, 0.03369543396693930941409},
{-0.2067891811637289193304, 0.0339650364164877605982},
{-0.1727063834622248333017, 0.03419371364216122816606},
{-0.138415488018446154799, 0.0343811901057431313887},
{-0.1039578126909741962862, 0.0345272399127198345459},
{-0.0693748762951400449004, 0.03463168708446658494},
{-0.034708348576091343205, 0.0346944057702885218413},
{0, 0.03471532039906128497633},
{0.0347083485760913432051, 0.0346944057702885218413},
{0.06937487629514004490043, 0.0346316870844665849397},
{0.1039578126909741962862, 0.0345272399127198345459},
{0.138415488018446154799, 0.034381190105743131389},
{0.1727063834622248333017, 0.0341937136421612281661},
{0.2067891811637289193304, 0.0339650364164877605982},
{0.2406228140059223069603, 0.0336954339669393094141},
{0.2741665150959847553753, 0.0333852311434324660757},
{0.3073798668862347898906, 0.0330348017161638992054},
{0.340222849874117691808, 0.0326445679252449979241},
{0.3726558908225785544758, 0.0322149999719336752105},
{0.404639910442718198341, 0.031746615452076261817},
{0.436136370481277534585, 0.0312399787324420222012},
{0.467107320156212955215, 0.0306957002707015913111},
{0.4975154418844106298964, 0.0301144358798684774464},
{0.5273240962464402413715, 0.0294968859380896133786},
{0.556497366134167637571, 0.02884379454473667507566},
{0.5850001000280299731203, 0.028155948623814430856},
{0.612797954351824912885, 0.0274341769757656336336},
{0.6398574348539760278359, 0.0266793492788138149148},
{0.6661459369654081630668, 0.0258923750410456539301},
{0.6916317850853976930666, 0.0250742025044932225609},
{0.7162842707480514154656, 0.0242258175025331415378},
{0.7400736896234123439807, 0.0233482422719742375972},
{0.7629713773085884627407, 0.022442534221257232048},
{0.7849497438657486677899, 0.0215097846562386297444},
{0.8059823070653248202356, 0.0205511174650751616053},
{0.826043724294294704777, 0.019567687763762851126},
{0.8451098230909896187605, 0.018560680503912370381},
{0.8631576302694591129692, 0.017531309044352929403},
{0.8801653995980110666353, 0.0164808136881369954038},
{0.8961126379980851663777, 0.015410460186440101208},
{0.9109801302310282682582, 0.014321538210655089456},
{0.924749962041447334067, 0.01321535979353661624941},
{0.9374055417262377809618, 0.012093257739247319372},
{0.9489316200972285693693, 0.010956583999801821541},
{0.9593143088004053120433, 0.0098067080095429056011},
{0.9685410969397410318628, 0.008645014953475471532},
{0.9766008659102921862414, 0.007472903898052906307},
{0.9834839022102602816957, 0.0062917855522221167994},
{0.989181907522294422074, 0.0051030787771151763315},
{0.99368800327501366062, 0.003908201644267197621},
{0.996996714692221155163, 0.0027085287311679588569},
{0.9991037986874922626258, 0.001504960046072077155},
{1, 2.442002442002442002442E-4}};
}
} // namespace Dune::XT::Data
|
slavslav/sydevs | src/examples/research/realtime/realtime.h | #pragma once
#ifndef SYDEVS_EXAMPLES_REALTIME_H_
#define SYDEVS_EXAMPLES_REALTIME_H_
#include <examples/research/realtime/bouncing_ball_interactive_system.h>
#include <sydevs/systems/real_time_simulation.h>
namespace sydevs_examples {
using namespace sydevs;
using namespace sydevs::systems;
class realtime
{
public:
realtime();
void mainloop();
private:
void observation_phase();
void interaction_phase();
void print_header();
void print_footer();
void print_frame(distance x);
void print_menu();
std::unique_ptr<real_time_simulation<bouncing_ball_interactive_system>> sim_ptr_;
clock_time clock_t0_;
duration t_;
bool high_g_;
bool fast_as_possible_;
float64 t_syn_rate_;
};
} // namespace
#endif
|
RuscelloDB/poc | src/test/java/com/ruscello/MbassadorEventBusTest.java | package com.ruscello;
import com.ruscello.bus.AnotherFakeMessage;
import com.ruscello.bus.FakeMessage;
import com.ruscello.bus.MbassadorEventBusListener;
import com.ruscello.bus.Message;
import net.engio.mbassy.bus.MBassador;
import org.junit.jupiter.api.Test;
import java.util.HashMap;
import java.util.Map;
public class MbassadorEventBusTest {
// SubscriptionManager getSubscriptionsByMessageType --> Class[] types = ReflectionUtils.getSuperTypes(messageType);
// ReflectionUtils.getSuperTypes(messageType); --> class.getSuperclass
@Test
public void test() {
MBassador bus = new MBassador();
bus.subscribe(new MbassadorEventBusListener());
System.out.println("post message to bus");
bus.publish(new Message());
System.out.println("post fake message to bus");
bus.publish(new FakeMessage());
// posting map doesn't work
System.out.println("post map to bus");
Map<String, String> map = new HashMap<>();
//map.put("key", "value");
bus.publish(map);
System.out.println("post another fake message to bus");
bus.post(new AnotherFakeMessage()).asynchronously();
System.out.println("fin");
}
}
|
phantomDai/CMuJava | src/test/concurrence/san/SAN_AtomicInteger.java | /*
* @author: GuYouda
* @date: 2018/4/18
* @time: 21:52
* @des:
*/
package test.concurrence.san;
import java.util.concurrent.atomic.AtomicInteger;
public class SAN_AtomicInteger {
private AtomicInteger count = new AtomicInteger();
public void increment() {
int temp = count.getAndSet(100);
count.incrementAndGet();
}
//使用AtomicInteger之后,不需要加锁,也可以实现线程安全。
public int test() {
int tt = 100;
tt = count.getAndSet(99);
return count.get();
}
}
|
autiwg/bartender | bartender/users/generators.py | from uuid import uuid4
def generate_invite_token():
return str(uuid4())[:8]
|
sqjian/toolkit | net/http/srv_test.go | <gh_stars>0
package http_test
import (
"context"
"fmt"
"github.com/julienschmidt/httprouter"
"github.com/sqjian/go-kit/log"
httpUtil "github.com/sqjian/go-kit/net/http"
"net/http"
"testing"
"time"
)
func Index(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fmt.Print(w, "Welcome!\n")
}
func Hello(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {
fmt.Fprintf(w, "hello, %s!\n", ps.ByName("name"))
}
func TestServe(t *testing.T) {
checkErr := func(err error) {
if err != nil {
t.Fatal(err)
}
}
logger, loggerErr := log.NewLogger(
log.WithFileName("go-kit.log"),
log.WithMaxSize(3),
log.WithMaxBackups(3),
log.WithMaxAge(3),
log.WithLevel(log.Debug),
log.WithConsole(false),
)
checkErr(loggerErr)
router := httprouter.New()
router.GET("/", Index)
router.GET("/hello/:name", Hello)
ctx, cancel := context.WithCancel(context.Background())
go func() {
time.Sleep(time.Second * 3)
cancel()
}()
err := httpUtil.Serve(ctx, "0.0.0.0:8888", router, httpUtil.WithSrvLogger(logger))
if err != nil {
t.Fatal(err)
}
}
|
cdzombak/little-photo-printer | Little Image Printer/Categories/NSString+DPZURLEncode.h | //
// NSString+DPZURLEncode.h
//
// Created by <NAME> on 26/08/2012.
// Copyright (c) 2012 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface NSString (DPZURLEncode)
- (NSString *)dpz_urlEncode;
- (NSString *)dpz_urlDecode;
@end
|
wincle626/Xilinx_Embedded_Driver_Example | XilinxProcessorIPLib/drivers/nandps/doc/html/api/xnandps__sinit_8c.js | <filename>XilinxProcessorIPLib/drivers/nandps/doc/html/api/xnandps__sinit_8c.js
var xnandps__sinit_8c =
[
[ "XNandPs_LookupConfig", "group__nandps__v2__2.html#gad64edb80b795bb18ecceb9428b458903", null ],
[ "XNandPs_ConfigTable", "group__nandps__v2__2.html#ga9fda36b00c8235b40746d8a29361acdc", null ]
]; |
AMorgaut/WOpenUI5Core | bower_components/openui5-sap.ui.core/test-resources/sap/ui/core/demokit/sample/PatternMatching/patternApp/view/PatternTable.controller.js | jQuery.sap.require("sap.ui.core.routing.Router");
jQuery.sap.require("patternApp.model.Pattern");
sap.ui.controller("patternApp.view.PatternTable", {
onInit : function () {
var oRouter = new sap.ui.core.routing.Router();
this._oModel = new patternApp.model.Pattern(oRouter);
this.getView().setModel(this._oModel);
oRouter.initialize();
},
onSetHash : function () {
var sNewHash = this.getView().byId("hash").getValue();
// Dont reset the list if the hash is the same
if (sNewHash === this._sHash) {
return;
}
this._oModel.resetMatched();
this._sHash = sNewHash;
// Call replace hash here since setHash add history entries.
sap.ui.core.routing.HashChanger.getInstance().replaceHash(sNewHash);
},
onAddPattern : function () {
this._oModel.addPattern(this.getView().byId("pattern").getValue());
},
handleValueHelp : function () {
// create value help dialog
if (!this._valueHelpDialog) {
this._valueHelpDialog = sap.ui.xmlfragment("patternApp.view.Dialog", this);
this.getView().addDependent(this._valueHelpDialog);
}
// open value help dialog
this._valueHelpDialog.open();
},
handleValueHelpSearch : function (oEvent) {
var sValue = oEvent.getParameter("value");
var oFilter = new sap.ui.model.Filter("pattern", sap.ui.model.FilterOperator.Contains, sValue);
oEvent.getSource().getBinding("items").filter([oFilter]);
},
handleValueHelpClose : function (oEvent) {
var oSelectedItem = oEvent.getParameter("selectedItem");
if (oSelectedItem) {
var oPatternInput = this.getView().byId("hash");
oPatternInput.setValue(oSelectedItem.getTitle());
}
oEvent.getSource().getBinding("items").filter([]);
},
formatMatched : function (bValue) {
if (bValue) {
// Green color for matched patterns
return "Success";
}
// Red color for unmatched ones
return "Error";
},
formatHash : function (sValue) {
if (!sValue) {
return "empty";
}
return sValue;
}
});
|
stuartpb/nectarjs | compiler/native/env/wasm.js | var WASM =
{
name: "wasm",
main: "wasm.cpp",
compiler: "em++",
stdlib: [{bind: "Nectar", module: "WASM"},"console", "Math", "JSON"],
check: {
"env": {
"node": true,
"es6": true
},
"extends": "eslint:recommended",
"rules": {
"no-console": "off",
"indent": "off",
"linebreak-style": "off",
"no-unused-vars": ["warn", { "vars": "all", "args": "after-used", "ignoreRestSiblings": false }],
"no-const-assign": "error",
},
"globals":
{
"undefined": false,
"eval": false,
"__njs_typeof": false,
"console": false,
"module": false,
"require": false,
"__Nectar_Log_Console": false,
"__Nectar_InitVar": false,
"__Nectar_Object_Keys": false,
"__Nectar_Object_Stringify": false,
"__Nectar_Call_Function": false,
"__NJS_ARGS": false,
"__NJS_ENV": false,
"__NJS_PLATFORM": false,
"__Nectar_typeof": false,
"__Nectar_THIS": false,
"__Nectar_instanceof": false,
"__Nectar_delete": false,
"__Nectar_EQUAL_VALUE_AND_TYPE": false,
"__Nectar_NOT_EQUAL_VALUE_AND_TYPE": false,
"JSON": false,
"Object": false,
"isNaN": false,
"Array": false,
}
},
out: function(_name)
{
if(CLI.cli["--target"])
{
if(CLI.cli["--target"].argument == "js") return _name + ".asm.js";
else if(CLI.cli["--target"].argument == "wasm") return _name + ".wasm";
else if(CLI.cli["--target"].argument == "html") return _name + ".html";
else
{
console.log("[!] Invalid target, expected js, wasm, html");
process.exit(1);
}
}
return _name + ".wasm";
},
cli: function(compiler, preset, out, _in, option)
{
/*
var _cachePath = path.join(process.cwd(), "..", "cached_" + COMPILER.ENV.name + "_" + VERSION);
var _precompiled = path.join(_cachePath, "nectar.o");
if(!fs.existsSync(_precompiled))
{
console.log(`[+] Creating Nectar binary lib for ${COMPILER.ENV.name + "_" + VERSION}`);
try { fs.mkdirSync(_cachePath); } catch(e){};
execSync(`${compiler} -std=c++17 -c nectar.cpp -O3 -o "${_precompiled}"`);
console.log("[+] Compiling with precompiled Nectar lib");
}
*/
var _cliOption = "";
if(CLI.cli["--option"]) _cliOption = CLI.cli["--option"].argument;
if(preset == "none")
{
return `${compiler} -D__NJS_REGISTER_SIZE=${COMPILER.REGISTER} ${_in} -O1 -w -s TOTAL_MEMORY=33554432 ${COMPILER.LIBS} -o ${out} ${_cliOption}`;
}
else if(preset == "size")
{
return `${compiler} -D__NJS_REGISTER_SIZE=${COMPILER.REGISTER} ${_in} -Os -fno-exceptions -fno-rtti -fno-stack-protector -fomit-frame-pointer -w -s TOTAL_MEMORY=33554432 ${COMPILER.LIBS} -o ${out} ${_cliOption}`;
}
else
{
return `${compiler} -D__NJS_REGISTER_SIZE=${COMPILER.REGISTER} ${_in} -O3 -w -s TOTAL_MEMORY=33554432 ${COMPILER.LIBS} -o ${out} ${_cliOption}`;
}
}
}
module.exports = WASM;
|
shaojiankui/iOS10-Runtime-Headers | Frameworks/AVFoundation.framework/AVMutableMetadataItem.h | <filename>Frameworks/AVFoundation.framework/AVMutableMetadataItem.h
/* Generated by RuntimeBrowser
Image: /System/Library/Frameworks/AVFoundation.framework/AVFoundation
*/
@interface AVMutableMetadataItem : AVMetadataItem {
AVMutableMetadataItemInternal * _mutablePriv;
}
@property (nonatomic, copy) NSString *dataType;
@property (nonatomic) struct { long long x1; int x2; unsigned int x3; long long x4; } duration;
@property (nonatomic, copy) NSString *extendedLanguageTag;
@property (nonatomic, copy) NSDictionary *extraAttributes;
@property (nonatomic, copy) NSString *identifier;
@property (nonatomic, copy) NSLocale *locale;
@property (nonatomic) struct { long long x1; int x2; unsigned int x3; long long x4; } time;
@property (nonatomic, copy) <NSObject><NSCopying> *value;
+ (id)keyPathsForValuesAffectingIdentifier;
+ (id)metadataItem;
- (id)copyWithZone:(struct _NSZone { }*)arg1;
- (id)dataType;
- (struct { long long x1; int x2; unsigned int x3; long long x4; })duration;
- (id)extendedLanguageTag;
- (id)extraAttributes;
- (id)identifier;
- (id)key;
- (id)keySpace;
- (id)languageCode;
- (id)locale;
- (void)setDataType:(id)arg1;
- (void)setDuration:(struct { long long x1; int x2; unsigned int x3; long long x4; })arg1;
- (void)setExtendedLanguageTag:(id)arg1;
- (void)setExtraAttributes:(id)arg1;
- (void)setIdentifier:(id)arg1;
- (void)setKey:(id)arg1;
- (void)setKeySpace:(id)arg1;
- (void)setLocale:(id)arg1;
- (void)setStartDate:(id)arg1;
- (void)setTime:(struct { long long x1; int x2; unsigned int x3; long long x4; })arg1;
- (void)setValue:(id)arg1;
- (id)startDate;
- (struct { long long x1; int x2; unsigned int x3; long long x4; })time;
- (id)value;
@end
|
ScalablyTyped/SlinkyTyped | d/devextreme/src/main/scala/typingsSlinky/devextreme/anon/CustomDataExpr.scala | package typingsSlinky.devextreme.anon
import typingsSlinky.devextreme.mod.DevExpress.data.DataSourceOptions
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait CustomDataExpr extends StObject {
var customDataExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var dataSource: js.UndefOr[
js.Array[_] | typingsSlinky.devextreme.mod.DevExpress.data.DataSource | DataSourceOptions
] = js.native
var fromExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var fromLineEndExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var fromPointIndexExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var keyExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var lineTypeExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var lockedExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var pointsExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var styleExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var textExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var textStyleExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var toExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var toLineEndExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var toPointIndexExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
var zIndexExpr: js.UndefOr[String | (js.Function1[/* data */ js.Any, _])] = js.native
}
object CustomDataExpr {
@scala.inline
def apply(): CustomDataExpr = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[CustomDataExpr]
}
@scala.inline
implicit class CustomDataExprMutableBuilder[Self <: CustomDataExpr] (val x: Self) extends AnyVal {
@scala.inline
def setCustomDataExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "customDataExpr", value.asInstanceOf[js.Any])
@scala.inline
def setCustomDataExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "customDataExpr", js.Any.fromFunction1(value))
@scala.inline
def setCustomDataExprUndefined: Self = StObject.set(x, "customDataExpr", js.undefined)
@scala.inline
def setDataSource(value: js.Array[_] | typingsSlinky.devextreme.mod.DevExpress.data.DataSource | DataSourceOptions): Self = StObject.set(x, "dataSource", value.asInstanceOf[js.Any])
@scala.inline
def setDataSourceUndefined: Self = StObject.set(x, "dataSource", js.undefined)
@scala.inline
def setDataSourceVarargs(value: js.Any*): Self = StObject.set(x, "dataSource", js.Array(value :_*))
@scala.inline
def setFromExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "fromExpr", value.asInstanceOf[js.Any])
@scala.inline
def setFromExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "fromExpr", js.Any.fromFunction1(value))
@scala.inline
def setFromExprUndefined: Self = StObject.set(x, "fromExpr", js.undefined)
@scala.inline
def setFromLineEndExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "fromLineEndExpr", value.asInstanceOf[js.Any])
@scala.inline
def setFromLineEndExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "fromLineEndExpr", js.Any.fromFunction1(value))
@scala.inline
def setFromLineEndExprUndefined: Self = StObject.set(x, "fromLineEndExpr", js.undefined)
@scala.inline
def setFromPointIndexExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "fromPointIndexExpr", value.asInstanceOf[js.Any])
@scala.inline
def setFromPointIndexExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "fromPointIndexExpr", js.Any.fromFunction1(value))
@scala.inline
def setFromPointIndexExprUndefined: Self = StObject.set(x, "fromPointIndexExpr", js.undefined)
@scala.inline
def setKeyExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "keyExpr", value.asInstanceOf[js.Any])
@scala.inline
def setKeyExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "keyExpr", js.Any.fromFunction1(value))
@scala.inline
def setKeyExprUndefined: Self = StObject.set(x, "keyExpr", js.undefined)
@scala.inline
def setLineTypeExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "lineTypeExpr", value.asInstanceOf[js.Any])
@scala.inline
def setLineTypeExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "lineTypeExpr", js.Any.fromFunction1(value))
@scala.inline
def setLineTypeExprUndefined: Self = StObject.set(x, "lineTypeExpr", js.undefined)
@scala.inline
def setLockedExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "lockedExpr", value.asInstanceOf[js.Any])
@scala.inline
def setLockedExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "lockedExpr", js.Any.fromFunction1(value))
@scala.inline
def setLockedExprUndefined: Self = StObject.set(x, "lockedExpr", js.undefined)
@scala.inline
def setPointsExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "pointsExpr", value.asInstanceOf[js.Any])
@scala.inline
def setPointsExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "pointsExpr", js.Any.fromFunction1(value))
@scala.inline
def setPointsExprUndefined: Self = StObject.set(x, "pointsExpr", js.undefined)
@scala.inline
def setStyleExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "styleExpr", value.asInstanceOf[js.Any])
@scala.inline
def setStyleExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "styleExpr", js.Any.fromFunction1(value))
@scala.inline
def setStyleExprUndefined: Self = StObject.set(x, "styleExpr", js.undefined)
@scala.inline
def setTextExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "textExpr", value.asInstanceOf[js.Any])
@scala.inline
def setTextExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "textExpr", js.Any.fromFunction1(value))
@scala.inline
def setTextExprUndefined: Self = StObject.set(x, "textExpr", js.undefined)
@scala.inline
def setTextStyleExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "textStyleExpr", value.asInstanceOf[js.Any])
@scala.inline
def setTextStyleExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "textStyleExpr", js.Any.fromFunction1(value))
@scala.inline
def setTextStyleExprUndefined: Self = StObject.set(x, "textStyleExpr", js.undefined)
@scala.inline
def setToExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "toExpr", value.asInstanceOf[js.Any])
@scala.inline
def setToExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "toExpr", js.Any.fromFunction1(value))
@scala.inline
def setToExprUndefined: Self = StObject.set(x, "toExpr", js.undefined)
@scala.inline
def setToLineEndExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "toLineEndExpr", value.asInstanceOf[js.Any])
@scala.inline
def setToLineEndExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "toLineEndExpr", js.Any.fromFunction1(value))
@scala.inline
def setToLineEndExprUndefined: Self = StObject.set(x, "toLineEndExpr", js.undefined)
@scala.inline
def setToPointIndexExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "toPointIndexExpr", value.asInstanceOf[js.Any])
@scala.inline
def setToPointIndexExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "toPointIndexExpr", js.Any.fromFunction1(value))
@scala.inline
def setToPointIndexExprUndefined: Self = StObject.set(x, "toPointIndexExpr", js.undefined)
@scala.inline
def setZIndexExpr(value: String | (js.Function1[/* data */ js.Any, _])): Self = StObject.set(x, "zIndexExpr", value.asInstanceOf[js.Any])
@scala.inline
def setZIndexExprFunction1(value: /* data */ js.Any => _): Self = StObject.set(x, "zIndexExpr", js.Any.fromFunction1(value))
@scala.inline
def setZIndexExprUndefined: Self = StObject.set(x, "zIndexExpr", js.undefined)
}
}
|
binofet/ice | Engine/include/Math/Vector/icVector2Int.inl | <filename>Engine/include/Math/Vector/icVector2Int.inl
#include "Math/Vector/icVector2Int.h"
/*! Default Constructor
**/
FORCE_INLINE icVector2Int::icVector2Int(void)
{
}// END FUNCTION icVector2Int(void)
/*! Overloaded Constructor, initializes values
**/
FORCE_INLINE icVector2Int::icVector2Int(const int xIn,
const int yIn )
{
x = xIn;
y = yIn;
}// END FUNCTION icVector2Int (icReal,icReal,icReal)
/*! Function to set all 3 values in 1 call
*
* @param[in] xIn X value
* @param[in] yIn Y value
*
**/
FORCE_INLINE void icVector2Int::Set( const int xIn,
const int yIn )
{
x = xIn;
y = yIn;
}// END FUNCTION Set
/*! Computes the dot product of two vectors
*
* @param[in] v1 Vector1
* @param[in] v2 Vector2
* @returns const icReal Dot product of these vectors
*
**/
FORCE_INLINE const icReal Dot( const icVector2Int& v1, const icVector2Int& v2 )
{
return (icReal)(v1.x * v2.x + v1.y * v2.y );
}// END FUNCTION Dot( const icVector2Int& v1, const icVector2Int& v2 )
/*! Returns the distance between this and another vector
*
* @param[in] Rhs Another vector
* @returns const icReal Distance between these vectors
*
**/
FORCE_INLINE const icReal icVector2Int::Distance( const icVector2Int& Rhs ) const
{
return icSqrt( (icReal)(x-Rhs.x)*(x-Rhs.x) + (y-Rhs.y)*(y-Rhs.y) );
}// END FUNCTION Distance( const icVector2Int& Rhs ) const
/*! Returns the squared distance between this and another vector
*
* @param[in] Rhs Another vector
* @returns const icReal Squared distance between these vectors
*
**/
FORCE_INLINE const icReal
icVector2Int::DistanceSqr( const icVector2Int& Rhs ) const
{
return (icReal)((x-Rhs.x)*(x-Rhs.x) + (y-Rhs.y)*(y-Rhs.y));
}// END FUNCTION DistanceSqr( const icVector2Int& Rhs ) const
/*! Computes the dot product of two vectors
*
* @param[in] Rhs Vector to use in dot calc
* @returns const icReal Dot product of these vectors
*
**/
FORCE_INLINE const icReal icVector2Int::Dot( const icVector2Int& Rhs ) const
{
return (icReal)( x*Rhs.x + y*Rhs.y );
}// END FUNCTION Dot( const icVector2Int& Rhs ) const
/*! Determines if the vector is effectively zero
*
* @returns bool true if the vector is the zero vector
**/
FORCE_INLINE bool icVector2Int::IsZero(void)
{
return (LengthSqr() < ICEPSILON);
}// END FUNCTION IsZero(void)
/*! Computes the length of this vector
*
* @returns const icReal The length of this vector
*
**/
FORCE_INLINE const icReal icVector2Int::Length( void ) const
{
return icSqrt( (icReal)(x*x + y*y) );
}// END FUNCTION Length( void ) const
/*! Computes the squared length of this vector
*
* @returns const icReal The squared length of this vector
*
**/
FORCE_INLINE const icReal icVector2Int::LengthSqr( void ) const
{
return (icReal)( x*x + y*y );
}// END FUNCTION LengthSqr( void ) const
///*! Normalizes the length of this vector to 1
//**/
//FORCE_INLINE const icVector2Int& icVector2Int::Normalize( void )
//{
// icReal invLength = icInvSqrt( x*x + y*y );
// x *= invLength;
// y *= invLength;
// return *this;
//}// END FUNCTION Normalize( void )
/*! Overloaded [] operator
*
* This does no bounds checking
*
* @param[in] Index Index to element (0<=index<=2)
* @returns const icReal Element at index
*
**/
FORCE_INLINE const int icVector2Int::operator[]( const uint Index ) const
{
const int* p = &x;
return p[Index];
}// END FUNCTION operator[]( const uint Index ) const
/*! Overloaded + operator, adds vector to this
*
* @param[in] Rhs Vector on the right
* @returns icVector2Int The sum of the two vectors
**/
FORCE_INLINE icVector2Int icVector2Int::operator+( const icVector2Int& Rhs ) const
{
icVector2Int v(x+Rhs.x, y+Rhs.y);
return v;
}// END FUNCTION operator+( const icVector2Int& Rhs ) const
/*! Overloaded - operator, subtracts vector to this
*
* @param[in] Rhs Vector on the right
* @returns icVector2Int The difference of the two vectors
**/
FORCE_INLINE icVector2Int icVector2Int::operator-( const icVector2Int& Rhs ) const
{
icVector2Int v(x-Rhs.x,y-Rhs.y);
return v;
}// END FUNCTION operator-( const icVector2Int& Rhs ) const
/*! Overloaded * operator, Scales given vector
*
* @param[in] Scalar Scalar on the right
* @returns icVector2Int The new vector, scaled
**/
FORCE_INLINE icVector2Int icVector2Int::operator*( const int& Scalar ) const
{
icVector2Int v(x*Scalar,y*Scalar);
return v;
}// END FUNCTION operator*( const icReal& Rhs ) const
/*! Overloaded * operator, Scales given vector
*
* @param[in] Scalar Scalar on the right
* @returns icVector2Int The new vector, scaled
**/
FORCE_INLINE icVector2Int operator*( const int& Lhs, const icVector2Int& Rhs)
{
icVector2Int v(Lhs*Rhs.x,Lhs*Rhs.y);
return v;
}// END FUNCTINO operator*( const icReal& Lhs, const icVector2Int& Rhs)
//FORCE_INLINE icVector2Int icVector2Int::operator/( const int& Scalar ) const
//{
// icVector2Int v(x/Scalar,y/Scalar);
// return v;
//}
FORCE_INLINE icVector2Int icVector2Int::operator-( void ) const
{
icVector2Int v(-x,-y);
return v;
}
/*! Overloaded [] operator
*
* This does no bounds checking
*
* @param[in] Index Index to element (0<=index<=2)
* @returns const icReal Element at index
*
**/
FORCE_INLINE int& icVector2Int::operator[]( const uint Index )
{
int* p = &x;
return p[Index];
}// END FUNCTION operator[]( const uint Index )
/*! Overloaded *= operator, scales the vector
*
* @param[in] Rhs Scalar
*
**/
FORCE_INLINE void icVector2Int::operator*=( const int& Scalar )
{
x *= Scalar;
y *= Scalar;
}// END FUNCTION operator*=( const icReal& Rhs )
/*! Overloaded += operator, Adds vectors
*
* @param[in] Rhs Vector on the right
*
**/
FORCE_INLINE void icVector2Int::operator+=( const icVector2Int& Rhs )
{
x += Rhs.x;
y += Rhs.y;
}// END FUNCTION operator+=( const icVector2Int& Rhs );
/*! Overloaded += operator, Adds vectors
*
* @param[in] Rhs Vector on the right
*
**/
FORCE_INLINE void icVector2Int::operator-=( const icVector2Int& Rhs )
{
x -= Rhs.x;
y -= Rhs.y;
}// END FUNCTION operator-=( const icVector2Int& Rhs )
/*! Auto cast to icReal pointer
**/
FORCE_INLINE icVector2Int::operator const int*( void ) const
{
return &x;
}// END FUNCTION |
jpaijh/ZMT | ZMT_KFD/app/src/main/java/com/example/ZMTCSD/activity/BaseActivity.java | package com.example.ZMTCSD.activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Handler;
import android.os.Message;
import android.support.design.widget.Snackbar;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.alibaba.fastjson.JSON;
import com.android.volley.AuthFailureError;
import com.android.volley.DefaultRetryPolicy;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.JsonObjectRequest;
import com.android.volley.toolbox.Volley;
import com.apkfuns.logutils.LogUtils;
import com.example.ZMTCSD.R;
import com.example.ZMTCSD.dal.MoreUserDal;
import com.example.ZMTCSD.entity.UserLoginEntity;
import com.example.ZMTCSD.utils.DateUtil;
import com.example.ZMTCSD.utils.VolleyErrorHelper;
import com.example.ZMTCSD.AppDelegate;
import org.androidannotations.annotations.AfterViews;
import org.androidannotations.annotations.Background;
import org.androidannotations.annotations.EActivity;
import java.util.Map;
@EActivity
public class BaseActivity extends AppCompatActivity {
/**
* @AfterViews 布局初始化完成
*/
@AfterViews
public void onAfterViews() {
}
/**
* @Background 网络请求数据
*/
@Background
public void onBackgrounds() {
}
/**
* 获取颜色资源
*/
public int getColors(int colorRes) {
return getResources().getColor(colorRes);
}
/**
* 获取文字资源
*
* @param stringRes
* @return
*/
public String getStrings(int stringRes) {
return getResources().getString(stringRes);
}
/**
* 获取文字数组资源
*
* @param strings
* @return
*/
public String[] getsStrings(int strings) {
return getResources().getStringArray(strings);
}
/**
* 获取高度的资源
*
* @param dimensRes
* @return
*/
public float getDimension(int dimensRes) {
return getResources().getDimension(dimensRes);
}
/**
* 保存当前时间
*
* @param str 键名
*/
public void saveCurrentTime(String str) {
getSharedPreferences(AppDelegate.SP_REFRESH_DATE, Context.MODE_PRIVATE).edit().
putString(str, DateUtil.longDateToStrMDHMS(System.currentTimeMillis())).commit();
}
/**
* 创建 正在刷新... - 上次刷新时间 Snackbar(用 SharedPreferences 保存上次刷新时间)
*
* @param view ViewGroup 子类
* @param str 记录上次刷新时间的标记:统一用TAG
*/
public void createRefreshSnackbar(ViewGroup view, String str) {
String currentDate = DateUtil.longDateToStrMDHMS(System.currentTimeMillis());
SharedPreferences spRefreshDate = getSharedPreferences(AppDelegate.SP_REFRESH_DATE, Context.MODE_PRIVATE);
String lastRefreshDate = spRefreshDate.getString(str, currentDate);
Snackbar snackbar = Snackbar.make(view, "正在刷新...", Snackbar.LENGTH_SHORT);
snackbar.setAction("上次刷新时间:" + lastRefreshDate, new View.OnClickListener() {
@Override
public void onClick(View v) {
// 这个点击事件必须加上不然 snackbar_action 就无法显示
// snackbar.dismiss();
}
});
TextView snackbar_action = (TextView) snackbar.getView().findViewById(R.id.snackbar_action);
snackbar_action.setTextSize(11f);
snackbar.setActionTextColor(getColors(R.color.white));
Snackbar.SnackbarLayout snackBarLayout = (Snackbar.SnackbarLayout) snackbar.getView();
snackBarLayout.setBackgroundColor(getColors(R.color.color_theme));
snackBarLayout.setAlpha(0.9f);
snackbar.show();
spRefreshDate.edit().putString(str, currentDate).commit();
}
/**
* 创建 正在加载... Snackbar
*
* @param view ViewGroup 子类
*/
public void createLoadingSnackbar(ViewGroup view) {
Snackbar snackbar = Snackbar.make(view, "正在加载...", Snackbar.LENGTH_INDEFINITE);
Snackbar.SnackbarLayout snackBarLayout = (Snackbar.SnackbarLayout) snackbar.getView();
snackBarLayout.setBackgroundColor(getColors(R.color.color_theme));
snackBarLayout.setAlpha(0.9f);
snackbar.show();
}
/**
* 创建 加载完成... Snackbar
*
* @param view ViewGroup 子类
*/
public void createLoadingCompleteSnackbar(ViewGroup view) {
Snackbar snackbar = Snackbar.make(view, "加载完成...", Snackbar.LENGTH_SHORT);
Snackbar.SnackbarLayout snackBarLayout = (Snackbar.SnackbarLayout) snackbar.getView();
snackBarLayout.setBackgroundColor(getColors(R.color.decoView_line1_start_color));
snackBarLayout.setAlpha(0.9f);
snackbar.show();
}
/**
* 创建 已经没有数据了... SnackbarUserLoginEntity
*
* @param view ViewGroup 子类
*/
public void createNoDateSnackbar(ViewGroup view) {
Snackbar snackbar = Snackbar.make(view, "已经没有数据了...", Snackbar.LENGTH_SHORT);
Snackbar.SnackbarLayout snackBarLayout = (Snackbar.SnackbarLayout) snackbar.getView();
snackBarLayout.setBackgroundColor(getColors(R.color.red));
snackBarLayout.setAlpha(0.8f);
snackbar.show();
}
/**
* 创建 刷新成功... Snackbar
*
* @param view ViewGroup 子类
*/
public void createRefreshCompleteSnackbar(ViewGroup view) {
Snackbar snackbar = Snackbar.make(view, "刷新成功...", Snackbar.LENGTH_SHORT);
Snackbar.SnackbarLayout snackBarLayout = (Snackbar.SnackbarLayout) snackbar.getView();
snackBarLayout.setBackgroundColor(getColors(R.color.decoView_line1_start_color));
snackBarLayout.setAlpha(0.9f);
snackbar.show();
}
/**
* 显示 View
*
* @param view
*/
public void showView(View view) {
view.setVisibility(View.VISIBLE);
}
/**
* 隐藏 View 但占领位置
*
* @param view
*/
public void InvisibleView(View view) {
view.setVisibility(View.INVISIBLE);
}
/**
* 隐藏 View
*
* @param view
*/
public void hideView(View view) {
view.setVisibility(View.GONE);
}
private RequestQueue mRequestQueue;
private boolean iswin;
public boolean isRefreshWin(int second) {
long unsend = MoreUserDal.GetSecond() + MoreUserDal.GetExpiresin();
// int unsend= MoreUserDal.GetSecond() +20;
if (second > unsend) {
Refreshtoken();
try {
Thread.sleep(1500);
} catch (InterruptedException e) {
e.printStackTrace();
}
return iswin;
} else {
return true;
}
}
public void Refreshtoken() {
mRequestQueue = Volley.newRequestQueue(this);
final String url_RefreshToken = MoreUserDal.GetServerUrl() + "/api/OAuth/Token";
LogUtils.d("刷新token" + url_RefreshToken);
JsonObjectRequest jsonObjectRequest = new JsonObjectRequest(Request.Method.POST, url_RefreshToken, new Response.Listener<org.json.JSONObject>() {
@Override
public void onResponse(org.json.JSONObject response) {
UserLoginEntity userLoginEntity = JSON.parseObject(response.toString(), UserLoginEntity.class);
// 登陆成功:保存用户名
int second = (int) (System.currentTimeMillis() / 1000);
LogUtils.e("更新前" + MoreUserDal.GetAccessToken());
MoreUserDal.UpdateMoreUser(userLoginEntity, second);
mHandler.sendMessage(mHandler.obtainMessage(0));
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
LogUtils.d("刷新失败" + VolleyErrorHelper.getMessage(error, BaseActivity.this));
mHandler.sendMessage(mHandler.obtainMessage(1));
}
}) {
@Override
public String getBodyContentType() {
return "application/x-www-form-urlencoded";
}
@Override
public byte[] getBody() {
String string = "grant_type=refresh_token&refresh_token=" + MoreUserDal.GetRefreshToken();
return string.getBytes();
}
@Override
public Map<String, String> getHeaders() throws AuthFailureError {
return super.getHeaders();
}
};
jsonObjectRequest.setTag(this);
jsonObjectRequest.setRetryPolicy(new DefaultRetryPolicy(50 * 1000, 1, 1.0f));
mRequestQueue.add(jsonObjectRequest);
}
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case 0:
iswin = true;
break;
case 1:
iswin = false;
break;
default:
break;
}
}
};
}
|
cemizm/AnySense-2.0 | Firmware/shared/Application/include/SensorAdapter.h | <gh_stars>0
/*
* SensorAdapter.h
*
* Created on: 11.03.2016
* Author: cem
*/
#ifndef APPLICATION_SOURCE_SENSORADAPTER_H_
#define APPLICATION_SOURCE_SENSORADAPTER_H_
#include "OSAL.h"
namespace App
{
class SensorAdapterBase
{
protected:
OSAL::EventFlag eventFlag;
public:
static const uint16_t Workspace = 384;
enum Procotol
{
None = 0, FrSky = 1,
};
SensorAdapterBase() :
eventFlag()
{
}
virtual Procotol Handles()
{
return Procotol::None;
}
virtual void Init()
{
eventFlag.clear();
}
virtual void Run(void)
{
eventFlag.wait();
}
virtual void DeInit()
{
eventFlag.signal();
}
virtual void UpdateConfiguration(void)
{
}
virtual ~SensorAdapterBase(void)
{
}
};
template<SensorAdapterBase::Procotol protocol>
class SensorAdapter: public SensorAdapterBase
{
protected:
static const uint16_t ConfigKey = 0xCBA0 + protocol;
public:
SensorAdapter() :
SensorAdapterBase()
{
}
Procotol Handles() override
{
return protocol;
}
virtual ~SensorAdapter(void)
{
}
};
} /* namespace App */
#endif /* APPLICATION_SOURCE_SENSORADAPTER_H_ */
|
Andreas237/AndroidPolicyAutomation | ExtractedJars/iRobot_com.irobot.home/javafiles/com/irobot/home/fragments/bu$b.java | <gh_stars>1-10
// Decompiled by Jad v1.5.8g. Copyright 2001 <NAME>.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3) annotate safe
package com.irobot.home.fragments;
// Referenced classes of package com.irobot.home.fragments:
// bu
public static interface bu$b
{
public abstract void a();
public abstract void a(int i);
public abstract void a(String s);
public abstract void a(boolean flag);
public abstract void b();
public abstract void b(String s);
}
|
leonidmelnik/RestKit | Specs/restkitspecs_rails/config/initializers/inflections.rb | <reponame>leonidmelnik/RestKit
# Be sure to restart your server when you modify this file.
ActiveSupport::Inflector.inflections do |inflect|
inflect.irregular 'human', 'humans'
end
|
marc-henrard/muRisQ-ir-models | src/main/java/marc/henrard/murisq/pricer/swaption/SingleCurrencyModelSwaptionPhysicalProductPricer.java | <gh_stars>1-10
/**
* Copyright (C) 2015 - present by <NAME>.
*/
package marc.henrard.murisq.pricer.swaption;
import com.opengamma.strata.basics.currency.CurrencyAmount;
import com.opengamma.strata.collect.ArgChecker;
import com.opengamma.strata.pricer.impl.option.BlackFormulaRepository;
import com.opengamma.strata.pricer.rate.RatesProvider;
import com.opengamma.strata.pricer.swap.DiscountingSwapProductPricer;
import com.opengamma.strata.product.common.PayReceive;
import com.opengamma.strata.product.common.PutCall;
import com.opengamma.strata.product.swap.ResolvedSwapLeg;
import com.opengamma.strata.product.swap.SwapLegType;
import com.opengamma.strata.product.swaption.PhysicalSwaptionSettlement;
import com.opengamma.strata.product.swaption.ResolvedSwaption;
import marc.henrard.murisq.model.bachelier.BachelierFormula;
import marc.henrard.murisq.model.generic.SingleCurrencyModelParameters;
/**
* Price of physical delivery European swaptions in single currency models.
*
* @author <NAME>
*/
public abstract class SingleCurrencyModelSwaptionPhysicalProductPricer {
/** The pricer used for swap measures. */
private static final DiscountingSwapProductPricer PRICER_SWAP =
DiscountingSwapProductPricer.DEFAULT;
/**
* Computes the present value of a swaption in a given model.
* <p>
* The result is expressed using the currency of the swaption.
*
* @param swaption the product to price
* @param multicurve the rates provider
* @param model the rational model parameters
* @return the present value of the swaption product
*/
public abstract CurrencyAmount presentValue(
ResolvedSwaption swaption,
RatesProvider multicurve,
SingleCurrencyModelParameters model);
/**
* Computes the implied volatility in the Black model.
* <p>
* The swaption price is computed in the rational model and the implied volatility for that price is computed.
* The implied volatility may failed if the model price is outside the Black possible prices.
*
* @param swaption the product to price
* @param multicurve the rates provider
* @param model the rational model parameters
* @return the implied volatility in the Black model
*/
public double impliedVolatilityBlack(
ResolvedSwaption swaption,
RatesProvider multicurve,
SingleCurrencyModelParameters model) {
double price = presentValue(swaption, multicurve, model).getAmount();
double parRate = PRICER_SWAP.parRate(swaption.getUnderlying(), multicurve);
double timeToEpiry = model.relativeTime(swaption.getExpiry());
ResolvedSwapLeg legFixed = swaption.getUnderlying().getLegs(SwapLegType.FIXED).get(0);
double pvbp = PRICER_SWAP.getLegPricer().pvbp(legFixed, multicurve);
double strike = PRICER_SWAP.getLegPricer().couponEquivalent(legFixed, multicurve, pvbp);
return BlackFormulaRepository
.impliedVolatility(Math.abs(price / pvbp), parRate, strike, timeToEpiry, !isReceiver(swaption));
}
/**
* Computes the implied volatility in the Bachelier model.
* <p>
* The swaption price is computed in the rational model and the implied volatility for that price is computed.
* The Bachelier formula inversion is done using {@link BachelierFormula#impliedVolatilityApproxLfk4}.
*
* @param swaption the product to price
* @param multicurve the rates provider
* @param model the rational model parameters
* @return the implied volatility in the Bachelier model
*/
public double impliedVolatilityBachelier(
ResolvedSwaption swaption,
RatesProvider multicurve,
SingleCurrencyModelParameters model) {
double price = presentValue(swaption, multicurve, model).getAmount();
double timeToEpiry = model.relativeTime(swaption.getExpiry());
return impliedVolatilityBachelier(swaption, multicurve, price, timeToEpiry);
}
/**
* Computes the implied volatility in the Bachelier model.
* <p>
* The implied volatility for the given price is computed.
* The Bachelier formula inversion is done using {@link BachelierFormula#impliedVolatilityApproxLfk4}.
*
* @param swaption the product to price
* @param multicurve the rates provider
* @param price the swaption price
* @param timeToEpiry the time to expiry as computed by the model
* @return the implied volatility in the Bachelier model
*/
public double impliedVolatilityBachelier(
ResolvedSwaption swaption,
RatesProvider multicurve,
double price,
double timeToEpiry) {
double parRate = PRICER_SWAP.parRate(swaption.getUnderlying(), multicurve);
ResolvedSwapLeg legFixed = swaption.getUnderlying().getLegs(SwapLegType.FIXED).get(0);
double pvbp = PRICER_SWAP.getLegPricer().pvbp(legFixed, multicurve);
double strike = PRICER_SWAP.getLegPricer().couponEquivalent(legFixed, multicurve, pvbp);
return BachelierFormula.impliedVolatilityApproxLfk4(
Math.abs(price), parRate, strike, timeToEpiry, Math.abs(pvbp),
isReceiver(swaption) ? PutCall.PUT : PutCall.CALL);
}
/**
* Validates that the rates and volatilities providers are coherent and that the swaption is acceptable.
*
* @param rates the rate provider
* @param swaption the swaption
* @param model the rational one-factor model
*/
protected void validate(
RatesProvider rates,
ResolvedSwaption swaption,
SingleCurrencyModelParameters model) {
ArgChecker.isTrue(model.getValuationDate().equals(rates.getValuationDate()),
"volatility and rate data should be for the same date");
ArgChecker.isFalse(swaption.getUnderlying().isCrossCurrency(),
"underlying swap should be single currency");
ArgChecker.isTrue(swaption.getSwaptionSettlement() == PhysicalSwaptionSettlement.DEFAULT,
"swaption should be physical settlement");
}
/**
* Returns true is the underlying swap is a receiver and false otherwise.
*
* @param swaption the swaption
* @return the receiver flag
*/
private boolean isReceiver(ResolvedSwaption swaption) {
return swaption.getUnderlying().getLegs(SwapLegType.FIXED).get(0).getPayReceive()
.equals(PayReceive.RECEIVE);
}
}
|
shenzeyu/recommend | src/main/java/com/usst/app/good/goodBrand/model/GoodBrand.java | package com.usst.app.good.goodBrand.model;
import java.util.List;
import com.usst.code.bean.Base;
public class GoodBrand extends Base {
private static final long serialVersionUID = -2446719740797638726L;
private Integer level;
private String remark;
private String pic;
private String picId;
private String goodTypeId;
private String isSele;
private String brand;
private String idStr;
private List<GoodBrand> children;
public String getRemark() {
return this.remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public List<GoodBrand> getChildren() {
return this.children;
}
public void setChildren(List<GoodBrand> children) {
this.children = children;
}
public Integer getLevel() {
return this.level;
}
public void setLevel(Integer level) {
this.level = level;
}
public String getPic() {
return this.pic;
}
public void setPic(String pic) {
this.pic = pic;
}
public String getPicId() {
return this.picId;
}
public void setPicId(String picId) {
this.picId = picId;
}
public String getGoodTypeId() {
return this.goodTypeId;
}
public void setGoodTypeId(String goodTypeId) {
this.goodTypeId = goodTypeId;
}
public String getIsSele() {
return this.isSele;
}
public void setIsSele(String isSele) {
this.isSele = isSele;
}
public String getBrand() {
return this.brand;
}
public void setBrand(String brand) {
this.brand = brand;
}
public String getIdStr() {
return this.idStr;
}
public void setIdStr(String idStr) {
this.idStr = idStr;
}
}
|
Tylarb/gpdb | src/backend/gporca/libgpopt/include/gpopt/engine/CEnumeratorConfig.h | //---------------------------------------------------------------------------
// Greenplum Database
// Copyright (C) 2013 EMC Corp.
//
// @filename:
// CEnumeratorConfig.h
//
// @doc:
// Configurations of plan enumerator
//---------------------------------------------------------------------------
#ifndef GPOPT_CEnumeratorConfig_H
#define GPOPT_CEnumeratorConfig_H
#include "gpos/base.h"
#include "gpos/common/CDynamicPtrArray.h"
#include "gpos/common/CRefCount.h"
#include "gpopt/cost/CCost.h"
#include "naucrates/traceflags/traceflags.h"
#define GPOPT_UNBOUNDED_COST_THRESHOLD 0.0
namespace gpos
{
class CWStringDynamic;
}
namespace gpopt
{
using namespace gpos;
// fwd declarations
class CExpression;
// type definition of plan checker
typedef BOOL (FnPlanChecker)(CExpression *);
//---------------------------------------------------------------------------
// @class:
// CEnumeratorConfig
//
// @doc:
// Configurations of plan enumerator
//
//---------------------------------------------------------------------------
class CEnumeratorConfig : public CRefCount
{
private:
//---------------------------------------------------------------------------
// @class:
// SSamplePlan
//
// @doc:
// Internal structure to represent samples of plan space
//
//---------------------------------------------------------------------------
struct SSamplePlan
{
private:
// plan id
ULLONG m_plan_id;
// plan cost
CCost m_cost;
public:
// ctor
SSamplePlan
(
ULLONG plan_id,
CCost cost
)
:
m_plan_id(plan_id),
m_cost(cost)
{}
// dtor
virtual
~SSamplePlan()
{};
// return plan id
ULLONG GetPlanId() const
{
return m_plan_id;
}
// return plan cost
CCost Cost() const
{
return m_cost;
}
}; // struct SSamplePlan
// array og unsigned long long int
typedef CDynamicPtrArray<SSamplePlan, CleanupDelete> SSamplePlanArray;
// memory pool
CMemoryPool *m_mp;
// identifier of chosen plan
ULLONG m_plan_id;
// size of plan space
ULLONG m_ullSpaceSize;
// number of required samples
ULLONG m_ullInputSamples;
// cost of best plan found
CCost m_costBest;
// max cost of a created plan sample
CCost m_costMax;
// max cost of accepted samples as a ratio to best plan cost
CDouble m_dCostThreshold;
// sampled plans
SSamplePlanArray *m_pdrgpsp;
// step value used in fitting cost distribution
CDouble m_dStep;
// x-values of fitted cost distribution
DOUBLE* m_pdX;
// y-values of fitted cost distribution
DOUBLE *m_pdY;
// size of fitted cost distribution
ULONG m_ulDistrSize;
// restrict plan sampling to plans satisfying required properties
BOOL m_fSampleValidPlans;
// plan checker function
FnPlanChecker *m_pfpc;
// initialize size of cost distribution
void InitCostDistrSize();
// inaccessible copy ctor
CEnumeratorConfig(const CEnumeratorConfig &);
// compute Gaussian probability value
static
DOUBLE DGaussian(DOUBLE d, DOUBLE dMean, DOUBLE dStd);
public:
// ctor
CEnumeratorConfig
(
CMemoryPool *mp,
ULLONG plan_id,
ULLONG ullSamples,
CDouble cost_threshold = GPOPT_UNBOUNDED_COST_THRESHOLD
);
// dtor
virtual
~CEnumeratorConfig();
// return plan id
ULLONG GetPlanId() const
{
return m_plan_id;
}
// return enumerated space size
ULLONG GetPlanSpaceSize() const
{
return m_ullSpaceSize;
}
// set plan space size
void SetPlanSpaceSize
(
ULLONG ullSpaceSize
)
{
m_ullSpaceSize = ullSpaceSize;
}
// return number of required samples
ULLONG UllInputSamples() const
{
return m_ullInputSamples;
}
// return number of created samples
ULONG UlCreatedSamples() const
{
return m_pdrgpsp->Size();
}
// set plan id
void SetPlanId
(
ULLONG plan_id
)
{
m_plan_id = plan_id;
}
// return cost threshold
CDouble DCostThreshold() const
{
return m_dCostThreshold;
}
// return id of a plan sample
ULLONG UllPlanSample
(
ULONG ulPos
)
const
{
return (*m_pdrgpsp)[ulPos]->GetPlanId();
}
// set cost of best plan found
void SetBestCost
(
CCost cost
)
{
m_costBest = cost;
}
// return cost of best plan found
CCost CostBest() const
{
return m_costBest;
}
// return cost of a plan sample
CCost CostPlanSample
(
ULONG ulPos
)
const
{
return (*m_pdrgpsp)[ulPos]->Cost();
}
// add a new plan to sample
BOOL FAddSample(ULLONG plan_id, CCost cost);
// clear samples
void ClearSamples();
// return x-value of cost distribution
CDouble DCostDistrX(ULONG ulPos) const;
// return y-value of cost distribution
CDouble DCostDistrY(ULONG ulPos) const;
// fit cost distribution on generated samples
void FitCostDistribution();
// return size of fitted cost distribution
ULONG UlCostDistrSize() const
{
return m_ulDistrSize;
}
// is enumeration enabled?
BOOL FEnumerate() const
{
return GPOS_FTRACE(EopttraceEnumeratePlans);
}
// is sampling enabled?
BOOL FSample() const
{
return GPOS_FTRACE(EopttraceSamplePlans);
}
// return plan checker function
FnPlanChecker *Pfpc() const
{
return m_pfpc;
}
// set plan checker function
void SetPlanChecker
(
FnPlanChecker *pfpc
)
{
GPOS_ASSERT(NULL != pfpc);
m_pfpc = pfpc;
}
// restrict sampling to plans satisfying required properties
// we need to change settings for testing
void SetSampleValidPlans
(
BOOL fSampleValidPlans
)
{
m_fSampleValidPlans = fSampleValidPlans;
}
// return true if sampling can only generate valid plans
BOOL FSampleValidPlans() const
{
return m_fSampleValidPlans;
}
// check given plan using PlanChecker function
BOOL FCheckPlan
(
CExpression *pexpr
)
const
{
GPOS_ASSERT(NULL != pexpr);
if (NULL != m_pfpc)
{
return m_pfpc(pexpr);
}
return true;
}
// dump samples to an output file
void DumpSamples(CWStringDynamic *str, ULONG ulSessionId, ULONG ulCommandId);
// dump fitted cost distribution to an output file
void DumpCostDistr(CWStringDynamic *str, ULONG ulSessionId, ULONG ulCommandId);
// print ids of plans in the generated sample
void PrintPlanSample() const;
// compute Gaussian kernel density
static
void GussianKernelDensity(DOUBLE *pdObervationX, DOUBLE *pdObervationY, ULONG ulObservations, DOUBLE *pdX, DOUBLE *pdY, ULONG size);
// generate default enumerator configurations
static
CEnumeratorConfig *PecDefault
(
CMemoryPool *mp
)
{
return GPOS_NEW(mp) CEnumeratorConfig(mp, 0 /*plan_id*/, 0 /*ullSamples*/);
}
// generate enumerator configuration for a given plan id
static
CEnumeratorConfig *GetEnumeratorCfg
(
CMemoryPool *mp,
ULLONG plan_id
)
{
return GPOS_NEW(mp) CEnumeratorConfig(mp, plan_id, 0/*ullSamples*/);
}
}; // class CEnumeratorConfig
}
#endif // !GPOPT_CEnumeratorConfig_H
// EOF
|
AdamCottrill/FishNetPortal | fn_portal/api/serializers/FN0_serializers.py | <filename>fn_portal/api/serializers/FN0_serializers.py
"""Serializers for models in fn_portal"""
import re
from common.models import Lake
from django.contrib.auth import get_user_model
from fn_portal.models import (
FNProtocol,
FN011,
FN013,
FN014,
FN022,
FN026,
FN028,
Gear,
ProjectGearProcessType,
)
from rest_framework import serializers
from .common_serializers import LakeSerializer, UserSerializer
User = get_user_model()
class FNProtocolSerializer(serializers.ModelSerializer):
class Meta:
model = FNProtocol
lookup_field = "abbrev"
fields = (
"abbrev",
"label",
)
class FN011SimpleSerializer(serializers.ModelSerializer):
class Meta:
model = FN011
lookup_field = "prj_cd"
fields = (
"prj_cd",
"prj_nm",
)
# Serializers define the API representation.
class FN011Serializer(serializers.ModelSerializer):
protocol = serializers.CharField(read_only=True, source="protocol.abbrev")
lake = LakeSerializer(many=False)
prj_ldr = UserSerializer(many=False)
class Meta:
model = FN011
lookup_field = "prj_cd"
fields = (
"id",
"year",
"prj_cd",
"slug",
"prj_nm",
"prj_ldr",
"prj_date0",
"prj_date1",
"protocol",
"source",
"lake",
"comment0",
)
class FN011WizardSerializer(serializers.ModelSerializer):
"""
A FN011 serializer that will be used exclusivly by our project
wizard endpoint. This serializer is a simplified version of the
main FN011 serializer which is used mostly for read-only
operations and includes nested objects for project lead, protocol
and lake. This serializer is used to create new FN011 records and
has uses slugs to identify the related enties.
"""
lake = serializers.SlugRelatedField(
queryset=Lake.objects.all(), slug_field="abbrev"
)
prj_ldr = serializers.SlugRelatedField(
queryset=User.objects.all(), slug_field="username"
)
protocol = serializers.SlugRelatedField(
queryset=FNProtocol.objects.all(), slug_field="abbrev"
)
class Meta:
model = FN011
lookup_field = "prj_cd"
fields = (
"prj_cd",
"prj_nm",
"prj_date0",
"prj_date1",
"comment0",
"prj_ldr",
"protocol",
"lake",
)
def validate_prj_cd(self, value):
"""ensure that the project code is a valid FN-II project code"""
pattern = "^[A-Z]{3}_[A-Z]{2}[0-9]{2}_[A-Z0-9]{3}$"
if re.fullmatch(pattern, value) is None:
raise serializers.ValidationError("That is not a valid FN-II project code.")
return value
def validate(self, data):
"""Make sure that:
+ start date occurs on or before end date,
+ start date and end date are in the same calendar year, and
+ that year in both dates agree with the year in prj_cd
+ project code siffux matches lake
"""
if data["prj_date0"] > data["prj_date1"]:
raise serializers.ValidationError(
{"prj_date1": "project end date must occur on or after start date"}
)
if data["prj_date0"].year != data["prj_date1"].year:
raise serializers.ValidationError(
{"prj_date1": "project start and end occur in different years."}
)
prj_cd = data["prj_cd"]
if str(data["prj_date0"].year)[2:] != prj_cd[6:8]:
raise serializers.ValidationError(
{
"prj_date0": "year of project start is not consistent with year in project code."
}
)
if str(data["prj_date1"].year)[2:] != prj_cd[6:8]:
raise serializers.ValidationError(
{
"prj_date1": "year of project end is not consistent with year in project code."
}
)
lake_project_prefixes = {
"HU": ["LHA", "LHR"],
"SU": ["LSA", "LSR"],
"ON": ["LOA", "LOM"],
"ER": ["LEA", "LEM"],
"SC": ["LEA", "LEM"],
}
lake = data["lake"]
suffix = data["prj_cd"][:3]
if suffix not in lake_project_prefixes.get(lake.abbrev):
raise serializers.ValidationError(
{
"prj_cd": f"project code suffix ({suffix}) is not consistent with selected lake ({lake.abbrev})."
}
)
return data
class ProjectGearProcessTypeSerializer(serializers.ModelSerializer):
"""
A serializer used by the Project Wizard to create
Project-Gear-ProcessType entries. Accepts json object of the form:
{slug:"lha_ia21_123", gear:"GL50", process_type:"1"}
"""
project = serializers.SlugRelatedField(
many=False, queryset=FN011.objects.all(), slug_field="slug"
)
gear = serializers.SlugRelatedField(
many=False, queryset=Gear.objects.all(), slug_field="gr_code"
)
class Meta:
model = ProjectGearProcessType
fields = ("project", "gear", "process_type")
class FN013Serializer(serializers.ModelSerializer):
"""Class to serialize the FN013 (gears) used in each project."""
project = serializers.SlugRelatedField(
many=False, read_only=True, slug_field="slug"
)
class Meta:
model = FN013
fields = ("project", "gr", "effcnt", "effdst", "gr_des", "slug")
class FN013ListSerializer(serializers.ModelSerializer):
"""A class to list gears. This is a readonly seralizer that return the
data as expected from FN-II. This serializer returns Gear objects
in a format that emulates the FN013 records. It does not include
id or slug values as FN013 records do not actually exist.
"""
prj_cd = serializers.CharField(read_only=True)
gr = serializers.CharField(read_only=True, source="gr_code")
class Meta:
model = Gear
fields = ("prj_cd", "gr", "effcnt", "effdst", "gr_des")
class FN014Serializer(serializers.ModelSerializer):
"""Class to serialize the FN014 (gear/panel detail) used in each project."""
gear = serializers.SlugRelatedField(many=False, read_only=True, slug_field="slug")
class Meta:
model = FN014
fields = (
"gear",
"eff",
"mesh",
"grlen",
"grht",
"grwid",
"grcol",
"grmat",
"gryarn",
"grknot",
"eff_des",
"slug",
)
class FN022ListSerializer(serializers.ModelSerializer):
"""A class to list serializers. This is a readonly seralizer that return the data
as expected from FN-II. Project is replaced by prj_cd.
"""
prj_cd = serializers.CharField(read_only=True, source="project.prj_cd")
class Meta:
model = FN022
fields = ("prj_cd", "ssn", "ssn_des", "ssn_date0", "ssn_date1", "slug", "id")
class FN022Serializer(serializers.ModelSerializer):
"""Class to serialize the seasons (temporal strata) used in each project."""
project = serializers.SlugRelatedField(
many=False, queryset=FN011.objects.all(), slug_field="slug"
)
class Meta:
model = FN022
fields = ("project", "ssn", "ssn_des", "ssn_date0", "ssn_date1", "slug")
def validate(self, data):
"""Make sure that:
+ season start date occurs on or before end date,
+ season start date and end date are in the same calendar year, and
+ that year in both dates agree with the year in prj_cd
"""
if data["ssn_date0"] > data["ssn_date1"]:
raise serializers.ValidationError(
{"ssn_date1": "season end date must occur on or after start date"}
)
if data["ssn_date0"].year != data["ssn_date1"].year:
raise serializers.ValidationError(
{"ssn_date1": "season start and end occur in different years."}
)
yr = data["project"].prj_cd[6:8]
project_year = f"19{yr}" if int(yr) > 50 else f"20{yr}"
if str(data["ssn_date0"].year) != project_year:
raise serializers.ValidationError(
{"ssn_date0": "season start year is not constistent with project year."}
)
if str(data["ssn_date1"].year) != project_year:
raise serializers.ValidationError(
{"ssn_date1": "season end year is not constistent with project year."}
)
return data
class FN026ListSerializer(serializers.ModelSerializer):
"""This is a super minimal serializer for spatial strata associated
with a project. It is used by api endpoint to return read-only
data in FN-II format. Fast and flat. The same as
FN026SimpleSerializer but project is repalced with prj_cd.
"""
prj_cd = serializers.CharField(read_only=True, source="project.prj_cd")
class Meta:
model = FN026
fields = ("prj_cd", "space", "space_des", "dd_lat", "dd_lon", "slug", "id")
class FN026SimpleSerializer(serializers.ModelSerializer):
"""This is a super minimal serializer for creating spatial strata associated with
a project. It is used by the project wizard convert the project
code and and spatial strata information to database entries.
This serializer is identical to the FN026ListSerializer except
that the read-only field prj_cd has been replaced with a slug related field
'project'
"""
project = serializers.SlugRelatedField(
many=False, queryset=FN011.objects.all(), slug_field="slug"
)
class Meta:
model = FN026
fields = (
"project",
"space",
"space_des",
"dd_lat",
"dd_lon",
)
class FN026Serializer(serializers.ModelSerializer):
class Meta:
model = FN026
fields = (
"project",
"label",
"space",
"space_des",
"area_lst",
"grdep_ge",
"grdep_lt",
"sidep_ge",
"sidep_lt",
"grid_ge",
"grid_lt",
"site_lst",
"sitp_lst",
"dd_lat",
"dd_lon",
)
class FN028ListSerializer(serializers.ModelSerializer):
"""This is a super minimal serializer for fishing mode associated with
a project. It is used by api endpoint to return read-only data in
FN-II format. Fast and flat. The same as FN028SimpleSerializer but
project is repalced with prj_cd.
"""
prj_cd = serializers.CharField(read_only=True, source="project.prj_cd")
gear = serializers.CharField(read_only=True, source="gear.gr_code")
class Meta:
model = FN028
fields = ("prj_cd", "mode", "mode_des", "gear", "gruse", "orient", "slug", "id")
class FN028SimpleSerializer(serializers.ModelSerializer):
"""This is a super minimal serializer for fishing mode associated with
a project. It is used by the project wizard convert the project
code and mode information to database entries.
This serializer is identical to the FN028List Serializer except
that the read-only field prj_cd has been replaced with a slug related field
'project'
"""
project = serializers.SlugRelatedField(
many=False, queryset=FN011.objects.all(), slug_field="slug"
)
gear = serializers.SlugRelatedField(
many=False, queryset=Gear.objects.all(), slug_field="gr_code"
)
class Meta:
model = FN028
fields = (
"project",
"mode",
"mode_des",
"gear",
"gruse",
"orient",
)
class FN028Serializer(serializers.ModelSerializer):
class Meta:
model = FN028
fields = (
"project",
"mode",
"mode_des",
"gear",
"gruse",
"orient",
"effdur_ge",
"effdur_lt",
"efftm0_ge",
"efftm0_lt",
"slug",
)
|
NifTK/NifTK | Libraries/ITK/RegistrationToolbox/Construction/itkImageRegistrationFactory.h | /*=============================================================================
NifTK: A software platform for medical image computing.
Copyright (c) University College London (UCL). All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE.
See LICENSE.txt in the top level directory for details.
=============================================================================*/
#ifndef itkImageRegistrationFactory_h
#define itkImageRegistrationFactory_h
#include <itkProcessObject.h>
#include <itkConstantBoundaryCondition.h>
#include <itkWindowedSincInterpolateImageFunction.h>
// Interpolators.
#include <itkInterpolateImageFunction.h>
#include <itkLinearInterpolateImageFunction.h>
#include <itkNearestNeighborInterpolateImageFunction.h>
#include <itkBSplineInterpolateImageFunction.h>
// Similarity Measures
#include <itkSimilarityMeasure.h>
#include <itkMSDImageToImageMetric.h>
#include <itkNCCImageToImageMetric.h>
#include <itkSSDImageToImageMetric.h>
#include <itkSADImageToImageMetric.h>
#include <itkRIUImageToImageMetric.h>
#include <itkPIUImageToImageMetric.h>
#include <itkJEImageToImageMetric.h>
#include <itkMIImageToImageMetric.h>
#include <itkNMIImageToImageMetric.h>
#include <itkCRImageToImageMetric.h>
// Transformations
#include <itkTransform.h>
#include <itkPerspectiveProjectionTransform.h>
#include <itkEulerAffineTransform.h>
#include <itkUCLBSplineTransform.h>
#include <itkFluidDeformableTransform.h>
#include <itkAffineTransform.h>
#include <itkPCADeformationModelTransform.h>
#include <itkTranslationPCADeformationModelTransform.h>
// Optimizers
#include <itkSingleValuedNonLinearOptimizer.h>
#include <itkGradientDescentOptimizer.h>
#include <itkUCLSimplexOptimizer.h>
#include <itkUCLRegularStepGradientDescentOptimizer.h>
#include <itkConjugateGradientOptimizer.h>
#include <itkPowellOptimizer.h>
#include <itkUCLRegularStepOptimizer.h>
#include <itkUCLPowellOptimizer.h>
// Commands
#include <itkIterationUpdateCommand.h>
#include <itkVnlIterationUpdateCommand.h>
// Registration methods.
#include <itkSingleResolutionImageRegistrationMethod.h>
#include <itkMaskedImageRegistrationMethod.h>
#include <itkTranslationThenRotationImageRegistrationMethod.h>
#include <itkTranslateRotateScaleImageRegistrationMethod.h>
#include <itkRigidPlusScaleImageRegistrationMethod.h>
#include <itkBlockMatchingMethod.h>
// Multi-resolution methods
#include <itkMultiResolutionImageRegistrationWrapper.h>
namespace itk
{
enum
InterpolationTypeEnum
{
UNKNOWN_INTERP,
NEAREST,
LINEAR,
BSPLINE,
SINC
};
enum
MetricTypeEnum
{
UNKNOWN_METRIC,
SSD, // Sum of Squared Differences
MSD, // Mean of Squared Differences
SAD, // Sum of Absolute Differences
NCC, // Normalized Cross Correlation
RIU, // Woods Ratio Image Uniformity
PIU, // Woods Partitioned Image Uniformity
JE, // Joint Entropy
MI, // Mutual information
NMI, // Normalized Mutual Information
CR // Correlation Ratio
};
enum
TransformTypeEnum
{
UNKNOWN_TRANSFORM,
TRANSLATION, // Mainly for testing.
RIGID, // Rigid, so rotations and translations, 3DOF in 2D and 6DOF in 3D.
RIGID_SCALE, // Rigid plus scale, 5DOF in 2D, 9DOF in 3D.
AFFINE // Affine. 7DOF in 2D, 12DOF in 3D.
};
enum
OptimizerTypeEnum
{
UNKNOWN_OPTIMIZER,
SIMPLEX, // For when you dont trust, or don't have derivative.
GRADIENT_DESCENT, // Standard gradient descent.
REGSTEP_GRADIENT_DESCENT, // Regular step size gradient descent.
CONJUGATE_GRADIENT_DESCENT, // Conjugate gradients.
POWELL, // also, doesnt require derivative.
SIMPLE_REGSTEP, // Simple multi-regular step in each direction.
UCLPOWELL
};
enum
SingleResRegistrationMethodTypeEnum
{
UNKNOWN_METHOD,
SINGLE_RES_MASKED, // The 'default' method, simply optimises transform wrt metric.
SINGLE_RES_TRANS_ROTATE, // Switching method, just does rigid, but alternates translation and rotation
SINGLE_RES_TRANS_ROTATE_SCALE, // Switching method, separately does translation, rotation and scale
SINGLE_RES_RIGID_SCALE, // Switching method, alternates rigid, and then scale.
SINGLE_RES_BLOCK_MATCH
};
enum
MultiResRegistrationMethodTypeEnum
{
UNKNOWN_MULTI,
MULTI_RES_NORMAL
};
/**
* \class ImageRegistrationFactory
* \brief Parameterised Factory Pattern [2] for creating registration objects.
*
* The purpose of this class is to:
*
* a.) Define the types that we can create, so look at the typedefs below, and enums above.
*
* b.) Set reasonable defaults, if necessary.
*
* i.e. There is NO clever logic here, it's just "how do I create an object"
*
* While there are many ways of doing this, this one was deemed to be the simplest.
*/
template <typename TInputImageType, unsigned int Dimension, class TScalarType>
class ITK_EXPORT ImageRegistrationFactory : public Object
{
public:
/** Standard class typedefs. */
typedef ImageRegistrationFactory Self;
typedef Object Superclass;
typedef SmartPointer<Self> Pointer;
typedef SmartPointer<const Self> ConstPointer;
/** Method for creation through the object factory. */
itkNewMacro(Self);
/** Run-time type information (and related methods). */
itkTypeMacro(ImageRegistrationFactory, Object);
/** Typedefs. */
typedef typename TInputImageType::PixelType InputPixelType;
/** Iteration Update Commands. */
typedef itk::IterationUpdateCommand IterationUpdateCommandType;
typedef itk::VnlIterationUpdateCommand VnlIterationUpdateCommandType;
/** Interpolators. */
typedef itk::InterpolateImageFunction< TInputImageType, TScalarType> InterpolatorType;
typedef itk::NearestNeighborInterpolateImageFunction< TInputImageType, TScalarType> NearestNeighbourInterpolatorType;
typedef itk::LinearInterpolateImageFunction< TInputImageType, TScalarType > LinearInterpolatorType;
typedef itk::BSplineInterpolateImageFunction< TInputImageType, TScalarType > BSplineInterpolatorType;
typedef itk::ConstantBoundaryCondition< TInputImageType > BoundaryConditionType;
const static unsigned int WindowRadius = 5;
typedef itk::Function::WelchWindowFunction<WindowRadius> WindowFunctionType;
typedef itk::WindowedSincInterpolateImageFunction<
TInputImageType,
WindowRadius,
WindowFunctionType,
BoundaryConditionType,
TScalarType > SincInterpolatorType;
/** Similarity Measures. We use our base class SimilarityMeasure, not ImageToImageMetric. */
typedef itk::SimilarityMeasure<TInputImageType, TInputImageType> MetricType;
typedef itk::SSDImageToImageMetric<TInputImageType, TInputImageType > SSDMetricType;
typedef itk::MSDImageToImageMetric<TInputImageType, TInputImageType > MSDMetricType;
typedef itk::NCCImageToImageMetric<TInputImageType, TInputImageType > NCCMetricType;
typedef itk::SADImageToImageMetric<TInputImageType, TInputImageType > SADMetricType;
typedef itk::RIUImageToImageMetric<TInputImageType, TInputImageType > RIUMetricType;
typedef itk::PIUImageToImageMetric<TInputImageType, TInputImageType > PIUMetricType;
typedef itk::JEImageToImageMetric<TInputImageType, TInputImageType > JEMetricType;
typedef itk::MIImageToImageMetric<TInputImageType, TInputImageType > MIMetricType;
typedef itk::NMIImageToImageMetric<TInputImageType, TInputImageType > NMIMetricType;
typedef itk::CRImageToImageMetric<TInputImageType, TInputImageType > CRMetricType;
/** Transformations */
typedef itk::Transform< TScalarType, Dimension, Dimension > TransformType;
typedef itk::PerspectiveProjectionTransform<TScalarType> PerspectiveProjectionTransformType;
typedef itk::EulerAffineTransform<TScalarType, Dimension, Dimension> EulerAffineTransformType;
typedef itk::UCLBSplineTransform<TInputImageType, TScalarType, Dimension, float> BSplineDeformableTransformType;
typedef itk::FluidDeformableTransform<TInputImageType, TScalarType, Dimension, float> FluidDeformableTransformType;
typedef itk::AffineTransform<TScalarType, Dimension> ITKAffineTransformType;
typedef itk::PCADeformationModelTransform<TScalarType, Dimension> PCADeformationModelTransformType;
typedef itk::TranslationPCADeformationModelTransform<TScalarType, Dimension> TranslationPCADeformationModelTransformType;
/** Optimisers. */
typedef itk::SingleValuedNonLinearOptimizer OptimizerType;
typedef itk::UCLSimplexOptimizer SimplexType;
typedef SimplexType* SimplexPointer;
typedef itk::GradientDescentOptimizer GradientDescentType;
typedef GradientDescentType* GradientDescentPointer;
typedef itk::UCLRegularStepGradientDescentOptimizer RegularStepGradientDescentType;
typedef RegularStepGradientDescentType* RegularStepGradientDescentPointer;
typedef itk::ConjugateGradientOptimizer ConjugateGradientType;
typedef ConjugateGradientType* ConjugateGradientPointer;
typedef itk::PowellOptimizer PowellOptimizerType;
typedef PowellOptimizerType* PowellOptimizerPointer;
typedef itk::UCLRegularStepOptimizer UCLRegularStepOptimizerType;
typedef UCLRegularStepOptimizerType* UCLRegularStepOptimizerTypePointer;
typedef itk::UCLPowellOptimizer UCLPowellOptimizerType;
typedef UCLPowellOptimizerType* UCLPowellOptimizerPointer;
/** Registration Methods. */
typedef itk::MaskedImageRegistrationMethod<TInputImageType> SingleResRegistrationType;
typedef itk::TranslationThenRotationImageRegistrationMethod<TInputImageType> TranslationThenRotationRegistrationType;
typedef itk::TranslateRotateScaleImageRegistrationMethod<TInputImageType> TranslateRotateScaleRegistrationType;
typedef itk::RigidPlusScaleImageRegistrationMethod<TInputImageType> RigidPlusScaleRegistrationType;
typedef itk::BlockMatchingMethod<TInputImageType, TScalarType> BlockMatchingRegistrationType;
/** Multi-resolution methods. */
typedef itk::MultiResolutionImageRegistrationWrapper
<TInputImageType> MultiResRegistrationType;
/** Creates a single-resolution method. */
virtual typename SingleResRegistrationType::Pointer CreateSingleResRegistration(SingleResRegistrationMethodTypeEnum type);
/** Creates a multi-resolution method. */
virtual typename MultiResRegistrationType::Pointer CreateMultiResRegistration(MultiResRegistrationMethodTypeEnum type);
/** Create an interpolator. */
virtual typename InterpolatorType::Pointer CreateInterpolator(InterpolationTypeEnum type);
/** Create a Metric. */
virtual typename MetricType::Pointer CreateMetric(MetricTypeEnum type);
/** Create a transform. */
virtual typename TransformType::Pointer CreateTransform(TransformTypeEnum type);
/** Create a transform from a file */
virtual typename TransformType::Pointer CreateTransform(std::string transfomFilename);
/** Create an optimiser. */
virtual typename OptimizerType::Pointer CreateOptimizer(OptimizerTypeEnum optimizerType);
/** You need to create one of these, dependent on the type of optimizer. */
virtual typename IterationUpdateCommandType::Pointer CreateIterationUpdateCommand(OptimizerTypeEnum optimizerType);
protected:
ImageRegistrationFactory();
virtual ~ImageRegistrationFactory() {};
void PrintSelf(std::ostream& os, Indent indent) const override;
private:
ImageRegistrationFactory(const Self&); // purposefully not implemented
void operator=(const Self&); // purposefully not implemented
};
} // end namespace
#ifndef ITK_MANUAL_INSTANTIATION
#include "itkImageRegistrationFactory.txx"
#endif
#endif
|
cyjake/material | icons/FileFindOutlineIcon.js | <filename>icons/FileFindOutlineIcon.js
import React from 'react'
const DEFAULT_SIZE = 24
export default ({
fill = 'currentColor',
width = DEFAULT_SIZE,
height = DEFAULT_SIZE,
style = {},
...props
}) => (
<svg
viewBox={ `0 0 ${ DEFAULT_SIZE } ${ DEFAULT_SIZE }` }
style={{ fill, width, height, ...style }}
{ ...props }
>
<path d="M14,2H6C4.9,2 4,2.9 4,4V20C4,21.1 4.9,22 6,22H18C19.1,22 20,21.1 20,20V8L14,2M6,4H13L18,9V17.58L16.16,15.74C17.44,13.8 17.23,11.17 15.5,9.46C14.55,8.5 13.28,8 12,8C10.72,8 9.45,8.5 8.47,9.46C6.5,11.41 6.5,14.57 8.47,16.5C9.44,17.5 10.72,17.97 12,17.97C12.96,17.97 13.92,17.69 14.75,17.14L17.6,20H6V4M14.11,15.1C13.55,15.66 12.8,16 12,16C11.2,16 10.45,15.67 9.89,15.1C9.33,14.54 9,13.79 9,13C9,12.19 9.32,11.44 9.89,10.88C10.45,10.31 11.2,10 12,10C12.8,10 13.55,10.31 14.11,10.88C14.67,11.44 15,12.19 15,13C15,13.79 14.68,14.54 14.11,15.1Z" />
</svg>
)
|
fantongkw/oa | src/main/java/com/ccc/oa/config/SecurityConfig.java | package com.ccc.oa.config;
import com.ccc.oa.security.CustomUserDetailsService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.builders.WebSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.web.firewall.DefaultHttpFirewall;
import org.springframework.security.web.firewall.HttpFirewall;
import org.springframework.session.FindByIndexNameSessionRepository;
import org.springframework.session.Session;
import org.springframework.session.security.SpringSessionBackedSessionRegistry;
@EnableWebSecurity
public class SecurityConfig extends WebSecurityConfigurerAdapter {
private final CustomUserDetailsService customUserDetailsService;
private final FindByIndexNameSessionRepository<? extends Session> sessionRepository;
@Autowired
public SecurityConfig(CustomUserDetailsService customUserDetailsService, FindByIndexNameSessionRepository<? extends Session> sessionRepository) {
this.customUserDetailsService = customUserDetailsService;
this.sessionRepository = sessionRepository;
}
@Override
protected void configure(HttpSecurity http) throws Exception {
http
.authorizeRequests()
.antMatchers("/registered").permitAll()
.antMatchers("/personal/**").hasRole("USER")
.antMatchers("/app/**").hasRole("USER")
.antMatchers("/dept/**").hasRole("DEPTS")
.antMatchers("/user/**").hasRole("USERS")
.antMatchers("/role/**").hasRole("ROLES")
.antMatchers("/notice").hasRole("USER")
.antMatchers("/**").permitAll()
.and()
.sessionManagement()
.maximumSessions(1)
.sessionRegistry(sessionRegistry())
.and()
.and()
.formLogin()
.loginPage("/login")
.failureUrl("/login-error")
.permitAll()
.and()
.rememberMe()
.and()
.logout()
.logoutSuccessUrl("/login-logout")
.invalidateHttpSession(true)
.permitAll()
.and()
.rememberMe()
.and()
.httpBasic();
}
@Autowired
public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception {
auth
.userDetailsService(customUserDetailsService).passwordEncoder(passwordEncoder());
}
@Bean
public PasswordEncoder passwordEncoder() {
return new BCryptPasswordEncoder();
}
@Bean
SpringSessionBackedSessionRegistry sessionRegistry() {
return new SpringSessionBackedSessionRegistry<>(this.sessionRepository);
}
@Bean
@Override
public AuthenticationManager authenticationManagerBean() throws Exception {
return super.authenticationManagerBean();
}
@Bean
public HttpFirewall httpFirewall() {
return new DefaultHttpFirewall();
}
@Override
public void configure(WebSecurity web) throws Exception {
super.configure(web);
web.httpFirewall(httpFirewall());
}
} |
MylesIsCool/ViaVersion | bungee/src/main/java/com/viaversion/viaversion/bungee/providers/BungeeMainHandProvider.java | /*
* This file is part of ViaVersion - https://github.com/ViaVersion/ViaVersion
* Copyright (C) 2016-2022 ViaVersion and contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.viaversion.viaversion.bungee.providers;
import com.viaversion.viaversion.api.connection.ProtocolInfo;
import com.viaversion.viaversion.api.connection.UserConnection;
import com.viaversion.viaversion.protocols.protocol1_9to1_8.providers.MainHandProvider;
import net.md_5.bungee.api.ProxyServer;
import net.md_5.bungee.api.connection.ProxiedPlayer;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/*
This solves the wrong mainhand issue when you join with BungeeCord on a 1.8 server, and switch to a 1.9 or higher.
*/
public class BungeeMainHandProvider extends MainHandProvider {
private static Method getSettings = null;
private static Method setMainHand = null;
static {
try {
getSettings = Class.forName("net.md_5.bungee.UserConnection").getDeclaredMethod("getSettings");
setMainHand = Class.forName("net.md_5.bungee.protocol.packet.ClientSettings").getDeclaredMethod("setMainHand", int.class);
} catch (Exception ignored) {
}
}
@Override
public void setMainHand(UserConnection user, int hand) {
ProtocolInfo info = user.getProtocolInfo();
if (info == null || info.getUuid() == null) return;
ProxiedPlayer player = ProxyServer.getInstance().getPlayer(info.getUuid());
if (player == null) return;
try {
Object settings = getSettings.invoke(player);
if (settings != null) {
setMainHand.invoke(settings, hand);
}
} catch (IllegalAccessException | InvocationTargetException e) {
e.printStackTrace();
}
}
}
|
alexeykarlyganov/restful-spring-application | src/test/java/org/alexeykarlyganov/rest/PingControllerIntegrationTest.java | <gh_stars>0
package org.alexeykarlyganov.rest;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.hamcrest.Matchers.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import org.alexeykarlyganov.rest.controllers.system.PingController;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MockMvc;
@SpringBootTest(classes = RestApplication.class)
@AutoConfigureMockMvc
public class PingControllerIntegrationTest {
@Autowired
private PingController pingController;
@Autowired
private MockMvc mockMvc;
@Test
public void controllerShouldLoad()
{
assertThat(pingController).isNotNull();
}
@Test
public void shouldReturnNameOfProject() throws Exception {
this.mockMvc.perform(get("/system/ping")
.contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$.name", is("rest")));
}
@Test
public void shouldContainsBuildAndStartTime() throws Exception {
this.mockMvc.perform(get("/system/ping")
.contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$.startTime", is(notNullValue())))
.andExpect(jsonPath("$.buildTime", is(notNullValue())));
}
}
|
benqus/wig | src/methods/extend.js | /**
* Merges all argument objects into the first one.
* @param {object} obj
* @returns {object}
*/
function extend(obj) {
var args = Array.prototype.slice.call(arguments, 1),
argsLength = args.length,
key,
i;
for (i = 0; i < argsLength; i += 1) {
if (args[i] && typeof args[i] === 'object') {
for (key in args[i]) {
obj[key] = args[i][key];
}
}
}
return obj;
}
wig.extend = extend; |
txazo/java | src/main/java/org/txazo/java/concurrency/extension/CountDownLatchTest.java | <reponame>txazo/java
package org.txazo.java.concurrency.extension;
import org.apache.commons.lang3.RandomUtils;
import org.junit.Test;
import java.util.concurrent.CountDownLatch;
/**
* CountDownLatch
* <p>
* 1) 一个或多个线程等待其它线程完成操作
* 2) countDown(): 计数减一
* 3) await(): 阻塞当前线程, 直到计数为0
* 4) 一个线程调用countDown() happen-before 另一个线程调用await()
*
* @see CountDownLatch
* @see CountDownLatch#await()
* @see CountDownLatch#countDown()
*/
public class CountDownLatchTest {
@Test
public void test() throws InterruptedException {
final CountDownLatch begin = new CountDownLatch(1);
final CountDownLatch end = new CountDownLatch(10);
for (int i = 0; i < 10; i++) {
final int j = i;
new Thread(new Runnable() {
@Override
public void run() {
try {
begin.await();
System.out.println("Thread " + j + " begin");
Thread.sleep(RandomUtils.nextInt(1000, 5000));
System.out.println("Thread " + j + " finish");
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
/** countDown()建议放在finally中执行 */
end.countDown();
}
}
}).start();
}
Thread.sleep(2000);
System.out.println("begin");
/** 控制多个线程同时开始执行 */
begin.countDown();
/** 等待多个线程执行完毕 */
end.await();
System.out.println("end");
}
}
|
technosaurus/quad-wheel | jsiCodeInt.h | static OpCodes *codes_new(int size);
static OpCodes *codes_join(OpCodes *a, OpCodes *b);
static OpCodes *codes_join3(OpCodes *a, OpCodes *b, OpCodes *c);
static OpCodes *codes_join4(OpCodes *a, OpCodes *b, OpCodes *c, OpCodes *d);
static OpCodes *code_push_undef();
static OpCodes *code_push_bool(int v);
static OpCodes *code_push_num(Jsi_Number *v);
static OpCodes *code_push_string(jsi_Pstate *p, jsi_Pline *line, const char *str);
static OpCodes *code_push_index(jsi_Pstate *p, jsi_Pline *line, char *varname);
static OpCodes *code_push_this(jsi_Pstate *p, jsi_Pline *line);
static OpCodes *code_push_top();
static OpCodes *code_push_top2() ;
static OpCodes *code_unref() ;
static OpCodes *code_push_args() ;
static OpCodes *code_push_func(jsi_Pstate *p, jsi_Pline *line, struct Jsi_Func *fun) ;
static OpCodes *code_push_regex(jsi_Pstate *p, jsi_Pline *line, Jsi_Regex *reg) ;
static OpCodes *code_local(jsi_Pstate *p, jsi_Pline *line, const char *varname) ;
static OpCodes *code_nop() ;
static OpCodes *code_neg() ;
static OpCodes *code_pos() ;
static OpCodes *code_bnot() ;
static OpCodes *code_not() ;
static OpCodes *code_mul() ;
static OpCodes *code_div() ;
static OpCodes *code_mod() ;
static OpCodes *code_add() ;
static OpCodes *code_sub() ;
static OpCodes *code_in() ;
static OpCodes *code_less() ;
static OpCodes *code_greater() ;
static OpCodes *code_lessequ() ;
static OpCodes *code_greaterequ() ;
static OpCodes *code_equal() ;
static OpCodes *code_notequal() ;
static OpCodes *code_eequ() ;
static OpCodes *code_nneq() ;
static OpCodes *code_band() ;
static OpCodes *code_bor() ;
static OpCodes *code_bxor() ;
static OpCodes *code_shf(int right) ;
static OpCodes *code_instanceof() ;
static OpCodes *code_assign(jsi_Pstate *p, jsi_Pline *line, int h) ;
static OpCodes *code_subscript(jsi_Pstate *p, jsi_Pline *line, int right_val) ;
static OpCodes *code_inc(jsi_Pstate *p, jsi_Pline *line, int e) ;
static OpCodes *code_dec(jsi_Pstate *p, jsi_Pline *line, int e) ;
static OpCodes *code_typeof(jsi_Pstate *p, jsi_Pline *line, int e) ;
static OpCodes *code_fcall(jsi_Pstate *p, jsi_Pline *line, int argc) ;
static OpCodes *code_newfcall(jsi_Pstate *p, jsi_Pline *line, int argc) ;
static OpCodes *code_ret(int n) ;
static OpCodes *code_delete(int n) ;
static OpCodes *code_chthis(int n) ;
static OpCodes *code_pop(int n) ;
static OpCodes *code_jfalse(int off) ;
static OpCodes *code_jtrue(int off) ;
static OpCodes *code_jfalse_np(int off) ;
static OpCodes *code_jtrue_np(int off) ;
static OpCodes *code_jmp(int off) ;
static OpCodes *code_object(jsi_Pstate *p, jsi_Pline *line, int c) ;
static OpCodes *code_array(jsi_Pstate *p, jsi_Pline *line, int c) ;
static OpCodes *code_key() ;
static OpCodes *code_next() ;
static OpCodes *code_eval(jsi_Pstate *p, jsi_Pline *line, int argc) ;
static OpCodes *code_stry(jsi_Pstate *p, jsi_Pline *line, int trylen, int catchlen, int finlen)
static OpCodes *code_etry(jsi_Pstate *p, jsi_Pline *line) ;
static OpCodes *code_scatch(jsi_Pstate *p, jsi_Pline *line, const char *var) ;
static OpCodes *code_ecatch(jsi_Pstate *p, jsi_Pline *line) ;
static OpCodes *code_sfinal(jsi_Pstate *p, jsi_Pline *line) ;
static OpCodes *code_efinal(jsi_Pstate *p, jsi_Pline *line) ;
static OpCodes *code_throw(jsi_Pstate *p, jsi_Pline *line) ;
static OpCodes *code_with(jsi_Pstate *p, jsi_Pline *line, int withlen) ;
static OpCodes *code_ewith(jsi_Pstate *p, jsi_Pline *line) ;
static OpCodes *code_debug(jsi_Pstate *p, jsi_Pline *line) ;
static OpCodes *code_reserved(jsi_Pstate *p, jsi_Pline *line, int type, char *id);
|
shanghua521/javafx-danmaku | src/main/java/com/wang/javafxdanmaku/GlobalMouseListenerExample.java | <reponame>shanghua521/javafx-danmaku
package com.wang.javafxdanmaku;
import com.github.kwhat.jnativehook.mouse.NativeMouseEvent;
import com.github.kwhat.jnativehook.mouse.NativeMouseInputListener;
import javafx.application.Platform;
import javafx.stage.Stage;
import java.awt.*;
import java.awt.event.InputEvent;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class GlobalMouseListenerExample implements NativeMouseInputListener {
private final Stage stage;
private boolean mark = true;
public GlobalMouseListenerExample(Stage stage) {
this.stage = stage;
}
public void nativeMouseClicked(NativeMouseEvent e) {
if (mark) {
if (e.getX() >= stage.getX() && e.getX() <= stage.getX() + stage.getWidth() && e.getY() >= stage.getY() - 156 && e.getY() <= stage.getY() + stage.getHeight() - 156) {
try {
Robot robot = new Robot();
robot.mouseMove(e.getX(), e.getY() + 156);
Platform.runLater(() -> {
stage.setOnHidden(event -> {
robot.mousePress(InputEvent.BUTTON3_DOWN_MASK);
robot.mouseRelease(InputEvent.BUTTON3_DOWN_MASK);
});
try {
TimeUnit.SECONDS.sleep(5);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
stage.setAlwaysOnTop(true);
stage.show();
});
} catch (AWTException ignored) {
}
var nativeMouseEvent = new NativeMouseEvent(e.getID(), e.getModifiers(), e.getX(), e.getY(), 1);
// GlobalScreen.postNativeEvent(nativeMouseEvent);
mark = false;
return;
}
}
mark = true;
// System.out.println("x: " + e.getX() + " Y: " + e.getY());
// System.out.println("x: " + stage.getX() + " Y: " + (stage.getY() - 156.0));
// System.out.println("x: " + (stage.getX() + stage.getWidth()) + " Y: " + (stage.getY() + stage.getHeight() - 156.0));
}
} |
twmarshall/cloudbreak | environment/src/test/java/com/sequenceiq/environment/environment/validation/network/TestHelper.java | package com.sequenceiq.environment.environment.validation.network;
import java.util.HashMap;
import java.util.Map;
import com.sequenceiq.cloudbreak.cloud.model.CloudSubnet;
import com.sequenceiq.environment.network.dto.AwsParams;
import com.sequenceiq.environment.network.dto.AzureParams;
import com.sequenceiq.environment.network.dto.NetworkDto;
public class TestHelper {
public NetworkDto getNetworkDto(AzureParams azureParams, AwsParams awsParams, String networkId, String networkCidr, Integer numberOfSubnets) {
return NetworkDto.builder()
.withId(1L)
.withName("networkName")
.withResourceCrn("aResourceCRN")
.withAzure(azureParams)
.withAws(awsParams)
.withNetworkCidr(networkCidr)
.withNetworkId(networkId)
.withSubnetMetas(getSubnetMetas(numberOfSubnets))
.build();
}
Map<String, CloudSubnet> getSubnetMetas(Integer numberOfSubnets) {
if (numberOfSubnets == null) {
return null;
}
Map<String, CloudSubnet> subnetMetas = new HashMap<>();
for (int i = 0; i < numberOfSubnets; i++) {
subnetMetas.put("key" + i, getCloudSubnet(
"eu-west-" + i + "a"));
}
return subnetMetas;
}
CloudSubnet getCloudSubnet(String availabilityZone) {
return new CloudSubnet("eu-west-1", "name", availabilityZone, "cidr");
}
public AzureParams getAzureParams(boolean noPublicIp, boolean withNetworkId, boolean withResourceGroupName) {
AzureParams.AzureParamsBuilder azureParamsBuilder = AzureParams.AzureParamsBuilder
.anAzureParams();
if (withNetworkId) {
azureParamsBuilder
.withNetworkId("aNetworkId");
}
if (withResourceGroupName) {
azureParamsBuilder
.withResourceGroupName("aResourceGroupId");
}
return azureParamsBuilder
.withNoPublicIp(noPublicIp)
.build();
}
}
|
zealoussnow/chromium | chrome/chrome_cleaner/os/file_removal_status_updater.h | <filename>chrome/chrome_cleaner/os/file_removal_status_updater.h
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_CHROME_CLEANER_OS_FILE_REMOVAL_STATUS_UPDATER_H_
#define CHROME_CHROME_CLEANER_OS_FILE_REMOVAL_STATUS_UPDATER_H_
#include <map>
#include <unordered_map>
#include "base/files/file_path.h"
#include "base/memory/singleton.h"
#include "base/synchronization/lock.h"
#include "chrome/chrome_cleaner/logging/proto/removal_status.pb.h"
namespace chrome_cleaner {
namespace internal {
// RemovalStatus update control utilities, exposed in the internal namespace so
// they can be accessed by tests.
// Indicates the action to be taken on RemovalStatus updates for files and
// folders.
enum RemovalStatusOverridePermission {
// Ignore, this is expected and we shouldn't change the current value.
// Example: updating removal status to NOT_FOUND after deleting the file.
kSkip,
// Override, this is an actual update and we should keep the most recent
// value. Example: updating removal status to FAILED_TO_SCHEDULE_REMOVAL
// when previous knowledge was FAILED_TO_REMOVE.
kOkToOverride,
// This should never happen in the code, and we should raise an error.
// TODO(joenotcharles): Currently there is no error, and kNotAllowed is
// implemented as kSkip. This is because DCHECK writes an error message to
// the log, and until recently this took the logging lock which might already
// be held while checking this permission. Now that it's safe to DCHECK while
// the logging lock is held we should add a DCHECK.
kNotAllowed,
};
// Maps pairs of RemovalStatus to the expected permission.
typedef std::map<RemovalStatus,
std::map<RemovalStatus, RemovalStatusOverridePermission>>
RemovalStatusOverridePermissionMap;
// Returns the overriding map.
const RemovalStatusOverridePermissionMap&
GetRemovalStatusOverridePermissionMap();
} // namespace internal
// This class manages a map of remove statuses for all files and folders
// encountered during cleaning, keyed by path. It does not distinguish whether
// the path refers to a file or a folder.
class FileRemovalStatusUpdater {
public:
struct FileRemovalStatus {
// The full path that was passed to UpdateRemovalStatus or
// UpdateQuarantineStatus. This is needed because when a file removal status
// is logged, GetFileInformationProtoObject can be called, which needs a
// full path that can be resolved.
base::FilePath path;
// The removal status of the last attempted update at the above path.
RemovalStatus removal_status = REMOVAL_STATUS_UNSPECIFIED;
// The quarantine status of the last attempted update at the above path.
QuarantineStatus quarantine_status = QUARANTINE_STATUS_UNSPECIFIED;
};
typedef std::unordered_map<std::wstring, FileRemovalStatus>
SanitizedPathToRemovalStatusMap;
static FileRemovalStatusUpdater* GetInstance();
virtual ~FileRemovalStatusUpdater();
// Clears all saved removal statuses.
void Clear();
// Updates removal status for a file or folder given by |path|. Checks the
// RemovalStatusOverridePermissionMap to see if the update is allowed, and
// silently does nothing if the permission is kSkip.
void UpdateRemovalStatus(const base::FilePath& path, RemovalStatus status);
// Returns the removal status of |path|, or REMOVAL_STATUS_UNSPECIFIED if
// the removal status have never been updated for that path.
RemovalStatus GetRemovalStatus(const base::FilePath& path) const;
// Returns the removal status of |sanitized_path|, or
// REMOVAL_STATUS_UNSPECIFIED if the removal status have never
// been updated for an unsanitized form of that path.
RemovalStatus GetRemovalStatusOfSanitizedPath(
const std::wstring& sanitized_path) const;
// Updates quarantine status for a file given by |path|.
// Note: UpdateRemovalStatus should be called for |path| at some point as
// well, because it is invalid to quarantine a file that doesn't have some
// removal status.
void UpdateQuarantineStatus(const base::FilePath& path,
QuarantineStatus status);
// Returns the quarantine status of |path|, or QUARANTINE_STATUS_UNSPECIFIED
// if the quarantine status have never been updated for that path.
QuarantineStatus GetQuarantineStatus(const base::FilePath& path) const;
// Returns all saved removal statuses, keyed by sanitized path. Each
// sanitized path is mapped to a single FileRemovalStatus which holds the
// path and status values from the most recent call to UpdateRemovalStatus or
// UpdateQuarantineStatus that had an effect.
SanitizedPathToRemovalStatusMap GetAllRemovalStatuses() const;
private:
friend struct base::DefaultSingletonTraits<FileRemovalStatusUpdater>;
FileRemovalStatusUpdater();
// Locks access to |removal_statuses_|.
mutable base::Lock removal_status_lock_;
SanitizedPathToRemovalStatusMap removal_statuses_;
};
} // namespace chrome_cleaner
#endif // CHROME_CHROME_CLEANER_OS_FILE_REMOVAL_STATUS_UPDATER_H_
|
ChengCat/dale | src/dale/NativeTypes/NativeTypes.h | <reponame>ChengCat/dale
#ifndef DALE_NATIVETYPES
#define DALE_NATIVETYPES
#include <vector>
#include "../llvm_LinkAll.h"
namespace dale {
/*! NativeTypes
Provides methods for getting the LLVM types that correspond to the
native types of the current platform. The get*Type methods are
self-explanatory, and the get*Size methods return the number of
bits required by the size. The other methods are documented below.
*/
class NativeTypes {
private:
llvm::IntegerType *native_char_type;
llvm::IntegerType *native_int_type;
llvm::IntegerType *native_uint_type;
llvm::IntegerType *native_intptr_type;
llvm::IntegerType *native_size_type;
llvm::IntegerType *native_ptrdiff_type;
llvm::Type *native_float_type;
llvm::Type *native_double_type;
llvm::Type *native_longdouble_type;
int native_ptr_size;
int native_int_size;
int native_size_size;
int native_ptrdiff_size;
llvm::Value *llvm_true;
llvm::Value *llvm_false;
llvm::Value *llvm_zero;
llvm::Value *llvm_one;
std::vector<llvm::Value *> zeros;
public:
NativeTypes();
llvm::IntegerType *getNativeIntType();
llvm::IntegerType *getNativeUIntType();
llvm::IntegerType *getNativeCharType();
llvm::IntegerType *getNativeIntptrType();
llvm::IntegerType *getNativePtrDiffType();
llvm::Type *getNativeFloatType();
llvm::Type *getNativeDoubleType();
llvm::Type *getNativeLongDoubleType();
llvm::IntegerType *getNativeSizeType();
int getNativeIntSize();
int getNativePtrSize();
int getNativeSizeSize();
int getNativePtrDiffSize();
/*! Convert an internal size to a real size.
* @param size An internal size.
*
* Internal sizes are returned by Type::getIntegerSize and
* integerTypeToSize.
*/
int internalSizeToRealSize(int size);
/*! Get an LLVM true boolean value.
*/
llvm::Value *getLLVMTrue();
/*! Get an LLVM false boolean value.
*/
llvm::Value *getLLVMFalse();
/*! Get an LLVM native integer value of zero.
*/
llvm::Value *getLLVMZero();
/*! Get an array of two LLVM zeros.
*/
llvm::ArrayRef<llvm::Value *> getTwoLLVMZeros();
/*! Get an LLVM native integer value of one.
*/
llvm::Value *getLLVMOne();
/*! Get an LLVM native integer value for the given number.
* @param n The number.
*/
llvm::ConstantInt *getNativeInt(int n);
/*! Get an LLVM constant integer value for the given type and
* number.
* @param type The type.
* @param nstr The number, as a string.
*
* This handles hexadecimal numbers as well as normal decimal
* numbers. Hexadecimal numbers must be prefixed with "0x".
*/
llvm::ConstantInt *getConstantInt(llvm::IntegerType *type,
const char *nstr);
};
}
#endif
|
AnthonyNg404/Database | proj5/src/main/java/edu/berkeley/cs186/database/ThreadPool.java | package edu.berkeley.cs186.database;
import java.util.concurrent.*;
class ThreadPool extends ThreadPoolExecutor {
ThreadPool() {
super(0, Integer.MAX_VALUE, 60L, TimeUnit.SECONDS, new SynchronousQueue<>());
}
@Override
protected void afterExecute(Runnable r, Throwable t) {
super.afterExecute(r, t);
if (t == null && r instanceof Future) {
try {
((Future) r).get();
} catch (CancellationException ce) {
t = ce;
} catch (ExecutionException ee) {
t = ee.getCause();
} catch (InterruptedException ie) {
Thread.currentThread().interrupt(); // ignore/reset
}
}
if (t != null) {
rethrow(t);
}
}
@SuppressWarnings("unchecked")
private static <T extends Throwable> void rethrow(Throwable t) throws T {
// rethrows checked exceptions as unchecked
throw (T) t;
}
}
|
MehmetErer/anima | anima/ui/scripts/tde_ui.py | # -*- coding: utf-8 -*-
import logging
from anima import logger
def version_dialog(logging_level=logging.WARNING, mode=2):
"""Helper function for version_dialog UI for Maya
"""
# connect to db
from anima.utils import do_db_setup
do_db_setup()
# use PySide2
from anima import ui
ui.SET_PYSIDE2()
from anima.ui import version_dialog
from anima.env import equalizer
e = equalizer.Equalizer()
e.name = tde4.get3DEVersion().split(" ")[0]
logger.setLevel(logging_level)
# set the parent object to the maya main window
version_dialog.UI(environment=e, mode=mode)
|
ramkumarkoppu/NUCLEO-F767ZI-ESW | Tools/STM32FW/STM32Cube_FW_F7_V1.16.2/Projects/STM32746G-Discovery/Applications/STemWin/STemWin_fonts/STemWin/App/generated/fonts/eng40.c | /*********************************************************************
* SEGGER Microcontroller GmbH & Co. KG *
* Solutions for real time microcontroller applications *
* www.segger.com *
**********************************************************************
* *
* C-file generated by *
* *
* emWin Font Converter (ST) version 5.40 *
* Compiled Mar 17 2017, 15:34:36 *
* *
* (c) 1998 - 2017 Segger Microcontroller GmbH & Co. KG *
* *
**********************************************************************
* *
* Source file: eng40.c *
* Font: Arial *
* Height: 40 *
* *
**********************************************************************
* *
* Initial font height: 40 *
* Range disabled: 0000 - FFFF *
* Read pattern file: C:\Users\bennacef\Desktop\engl.txt *
* *
**********************************************************************
*/
#include "GUI.h"
#ifndef GUI_CONST_STORAGE
#define GUI_CONST_STORAGE const
#endif
/* The following line needs to be included in any file selecting the
font.
*/
extern GUI_CONST_STORAGE GUI_FONT GUI_Fonteng40;
/* Start of unicode area <Basic Latin> */
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_0062[ 50] = { /* code 0062, LATIN SMALL LETTER B */
XXX_____,________,
XXX_____,________,
XXX_____,________,
XXX_____,________,
XXX_____,________,
XXX_____,________,
XXX_____,________,
XXX___XX,XXX_____,
XXX_XXXX,XXXXX___,
XXXXXXXX,XXXXXX__,
XXXXXX__,__XXXXX_,
XXXXX___,____XXX_,
XXXX____,____XXXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXXX____,____XXX_,
XXXX____,___XXXX_,
XXXXXX__,__XXXXX_,
XXXXXXXX,XXXXXX__,
XXX_XXXX,XXXXX___,
XXX___XX,XXX_____};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_0063[ 36] = { /* code 0063, LATIN SMALL LETTER C */
_____XXX,XXX_____,
___XXXXX,XXXXX___,
__XXXXXX,XXXXXX__,
_XXXXX__,___XXXX_,
_XXX____,____XXXX,
XXXX____,_____XXX,
XXX_____,________,
XXX_____,________,
XXX_____,________,
XXX_____,________,
XXX_____,________,
XXX_____,_____XXX,
XXXX____,_____XXX,
_XXX____,____XXX_,
_XXXXX__,___XXXX_,
__XXXXXX,XXXXXX__,
___XXXXX,XXXXX___,
_____XXX,XXX_____};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_0065[ 36] = { /* code 0065, LATIN SMALL LETTER E */
_____XXX,XXX_____,
___XXXXX,XXXXX___,
__XXXXXX,XXXXXX__,
_XXXXX__,___XXXX_,
_XXX____,____XXX_,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXXXXXXX,XXXXXXXX,
XXXXXXXX,XXXXXXXX,
XXXXXXXX,XXXXXXXX,
XXX_____,________,
XXX_____,________,
XXXX____,_____XXX,
_XXX____,____XXX_,
_XXXXX__,___XXXX_,
__XXXXXX,XXXXXX__,
___XXXXX,XXXXX___,
_____XXX,XXX_____};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_0069[ 25] = { /* code 0069, LATIN SMALL LETTER I */
XXX_____,
XXX_____,
XXX_____,
________,
________,
________,
________,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_006C[ 25] = { /* code 006C, LATIN SMALL LETTER L */
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____,
XXX_____};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_006D[ 72] = { /* code 006D, LATIN SMALL LETTER M */
XXX___XX,XXX_____,_XXXXX__,________,
XXX_XXXX,XXXX___X,XXXXXXXX,________,
XXX_XXXX,XXXXX_XX,XXXXXXXX,________,
XXXXX___,__XXXXXX,_____XXX,X_______,
XXXX____,___XXXX_,______XX,X_______,
XXXX____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______,
XXX_____,___XXX__,______XX,X_______};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_006E[ 36] = { /* code 006E, LATIN SMALL LETTER N */
XXX___XX,XXXX____,
XXX_XXXX,XXXXX___,
XXXXXXXX,XXXXXX__,
XXXXXX__,__XXXXX_,
XXXX____,___XXXX_,
XXXX____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_006F[ 36] = { /* code 006F, LATIN SMALL LETTER O */
_____XXX,XXX_____,
___XXXXX,XXXXX___,
__XXXXXX,XXXXXX__,
_XXXXX__,__XXXXX_,
_XXX____,____XXX_,
XXXX____,____XXXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXX_____,_____XXX,
XXXX____,____XXXX,
_XXX____,____XXX_,
_XXXXX__,__XXXXX_,
__XXXXXX,XXXXXX__,
___XXXXX,XXXXX___,
_____XXX,XXX_____};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_0074[ 48] = { /* code 0074, LATIN SMALL LETTER T */
_____X__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
XXXXXXXX,XX______,
XXXXXXXX,XX______,
XXXXXXXX,XX______,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXX__,________,
___XXXXX,XX______,
____XXXX,XX______,
_____XXX,XX______};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_0075[ 36] = { /* code 0075, LATIN SMALL LETTER U */
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,____XXX_,
XXX_____,___XXXX_,
XXXX____,___XXXX_,
XXXXX___,_XXXXXX_,
_XXXXXXX,XXX_XXX_,
__XXXXXX,XX__XXX_,
___XXXXX,X___XXX_};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_0076[ 54] = { /* code 0076, LATIN SMALL LETTER V */
XXX_____,______XX,X_______,
_XXX____,_____XXX,________,
_XXX____,_____XXX,________,
_XXXX___,_____XXX,________,
__XXX___,____XXX_,________,
__XXX___,____XXX_,________,
___XXX__,___XXX__,________,
___XXX__,___XXX__,________,
___XXX__,___XXX__,________,
____XXX_,__XXX___,________,
____XXX_,__XXX___,________,
_____XXX,_XXX____,________,
_____XXX,_XXX____,________,
_____XXX,_XXX____,________,
______XX,XXX_____,________,
______XX,XXX_____,________,
_______X,XX______,________,
_______X,XX______,________};
GUI_CONST_STORAGE unsigned char acGUI_Fonteng40_0077[ 72] = { /* code 0077, LATIN SMALL LETTER W */
_XXX____,____XXX_,_______X,XX______,
_XXX____,____XXX_,_______X,XX______,
_XXXX___,___XXXXX,______XX,XX______,
__XXX___,___XXXXX,______XX,X_______,
__XXX___,___XX_XX,______XX,X_______,
___XXX__,___XX_XX,_____XXX,________,
___XXX__,__XXX_XX,X____XXX,________,
___XXX__,__XXX__X,X____XXX,________,
____XXX_,__XX___X,X___XXX_,________,
____XXX_,__XX___X,X___XXX_,________,
_____XX_,_XXX___X,XX__XX__,________,
_____XXX,_XXX____,XX_XXX__,________,
_____XXX,_XX_____,XX_XXX__,________,
______XX,_XX_____,XX_XX___,________,
______XX,XXX_____,XXXXX___,________,
______XX,XXX_____,XXXXX___,________,
_______X,XX______,_XXX____,________,
_______X,XX______,_XXX____,________};
GUI_CONST_STORAGE GUI_CHARINFO_EXT GUI_Fonteng40_CharInfo[12] = {
{ 16, 25, 2, 7, 19, acGUI_Fonteng40_0062 } /* code 0062, LATIN SMALL LETTER B */
,{ 16, 18, 1, 14, 18, acGUI_Fonteng40_0063 } /* code 0063, LATIN SMALL LETTER C */
,{ 16, 18, 1, 14, 18, acGUI_Fonteng40_0065 } /* code 0065, LATIN SMALL LETTER E */
,{ 3, 25, 2, 7, 7, acGUI_Fonteng40_0069 } /* code 0069, LATIN SMALL LETTER I */
,{ 3, 25, 2, 7, 7, acGUI_Fonteng40_006C } /* code 006C, LATIN SMALL LETTER L */
,{ 25, 18, 2, 14, 29, acGUI_Fonteng40_006D } /* code 006D, LATIN SMALL LETTER M */
,{ 15, 18, 2, 14, 19, acGUI_Fonteng40_006E } /* code 006E, LATIN SMALL LETTER N */
,{ 16, 18, 1, 14, 18, acGUI_Fonteng40_006F } /* code 006F, LATIN SMALL LETTER O */
,{ 10, 24, 0, 8, 10, acGUI_Fonteng40_0074 } /* code 0074, LATIN SMALL LETTER T */
,{ 15, 18, 2, 14, 19, acGUI_Fonteng40_0075 } /* code 0075, LATIN SMALL LETTER U */
,{ 17, 18, 0, 14, 17, acGUI_Fonteng40_0076 } /* code 0076, LATIN SMALL LETTER V */
,{ 27, 18, -1, 14, 25, acGUI_Fonteng40_0077 } /* code 0077, LATIN SMALL LETTER W */
};
GUI_CONST_STORAGE GUI_FONT_PROP_EXT GUI_Fonteng40_Prop5 = {
0x0074 /* first character */
,0x0077 /* last character */
,&GUI_Fonteng40_CharInfo[ 8] /* address of first character */
,(GUI_CONST_STORAGE GUI_FONT_PROP_EXT *)0 /* pointer to next GUI_FONT_PROP_EXT */
};
GUI_CONST_STORAGE GUI_FONT_PROP_EXT GUI_Fonteng40_Prop4 = {
0x006C /* first character */
,0x006F /* last character */
,&GUI_Fonteng40_CharInfo[ 4] /* address of first character */
,&GUI_Fonteng40_Prop5 /* pointer to next GUI_FONT_PROP_EXT */
};
GUI_CONST_STORAGE GUI_FONT_PROP_EXT GUI_Fonteng40_Prop3 = {
0x0069 /* first character */
,0x0069 /* last character */
,&GUI_Fonteng40_CharInfo[ 3] /* address of first character */
,&GUI_Fonteng40_Prop4 /* pointer to next GUI_FONT_PROP_EXT */
};
GUI_CONST_STORAGE GUI_FONT_PROP_EXT GUI_Fonteng40_Prop2 = {
0x0065 /* first character */
,0x0065 /* last character */
,&GUI_Fonteng40_CharInfo[ 2] /* address of first character */
,&GUI_Fonteng40_Prop3 /* pointer to next GUI_FONT_PROP_EXT */
};
GUI_CONST_STORAGE GUI_FONT_PROP_EXT GUI_Fonteng40_Prop1 = {
0x0062 /* first character */
,0x0063 /* last character */
,&GUI_Fonteng40_CharInfo[ 0] /* address of first character */
,&GUI_Fonteng40_Prop2 /* pointer to next GUI_FONT_PROP_EXT */
};
GUI_CONST_STORAGE GUI_FONT GUI_Fonteng40 = {
GUI_FONTTYPE_PROP_EXT /* type of font */
,40 /* height of font */
,40 /* space of font y */
,1 /* magnification x */
,1 /* magnification y */
,{&GUI_Fonteng40_Prop1}
,32 /* Baseline */
,18 /* Height of lowercase characters */
,25 /* Height of capital characters */
};
|
AllaDihtiarova/app_network | front/src/components/Body/AddNewPost/AddNewPost.js | import * as React from 'react';
import { useQuery } from 'react-query'
import { Formik, Form, Field } from 'formik'
import * as Yup from 'yup'
import Button from '@mui/material/Button';
import { TextField } from 'formik-mui';
import { addPost } from '../../../containes/AddNewPostContainer/api/crud'
import { getAllAccess } from '../../../containes/AccessListContainer/api/crud';
import AutocompleteFormic from '../../FormicAutocomplete/Autocomplete';
import UploadImage from '../../UploadImage/UploadImage';
const AddNewPost = () => {
const { data } = useQuery('posts/access', () => getAllAccess())
const access = data?.data || []
const options = access.map((ac) => {
const option = { value: ac.id, label: ac.access_type }
return option
})
const [value, setValue] = React.useState(options[0]);
const crDate = Date.now()
const shema = Yup.object().shape({
title: Yup.string().required(),
contentPost: Yup.string().required(),
createDate: Yup.date().required(),
userId: Yup.number().required(),
access: Yup.number()
})
const onPostSubmit = (data) => {
addPost({ ...data, accessId: value })
}
const changeAccess = (_, newValue) => {
setValue(newValue.value)
}
return (
<>
<div>{`value: ${value !== null ? `${value}` : 'null'}`}</div>
<Formik
initialValues={{ userId: 7, createDate: crDate, title: '', contentPost: ""}}
validationSchema={shema}
onSubmit={onPostSubmit}>
{({ errors }) =>
<>
<div>Errors: {JSON.stringify(errors)}</div>
<Form>
<label>
User id:
<Field component={TextField} type="text" name="userId"></Field>
</label>
<label>
Create date:
<Field component={TextField} type="date" name="createDate"></Field>
</label>
<label>
Title:
<Field component={TextField} type="text" name="title"></Field>
</label>
<label>
Post content:
<Field component={TextField} type="text" name="contentPost"></Field>
</label>
<Field
component={AutocompleteFormic}
name="access"
options={options}
onChange={changeAccess}
>
</Field>
<Field component={ UploadImage}/>
<Button type='submit' variant="contained">Save</Button>
</Form>
</>
}
</Formik>
</>
)
}
export default AddNewPost |
chenjianjx/srb4jfullsample | webapp/src/main/java/com/github/chenjianjx/srb4jfullsample/webapp/bo/portal/support/BoResourcePaths.java | <gh_stars>1-10
package com.github.chenjianjx.srb4jfullsample.webapp.bo.portal.support;
/**
* put all paths in a single class for each locating of code based on an url
*/
public class BoResourcePaths {
//homepage
public static final String DASHBOARD = "/";
//auth
public static final String LOGIN = "/login";
public static final String LOGOUT = "/logout";
//staff user
public static final String CHANGE_PASSWORD = "/staffusers/admin/change-password";
//front user
public static final String FRONT_USER_LIST = "/frontusers";
}
|
opensingular/singular-apps | requirement/requirement-module/src/main/java/org/opensingular/requirement/module/ActionProviderBuilder.java | <gh_stars>1-10
/*
* Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opensingular.requirement.module;
import java.util.ArrayList;
import java.util.List;
import org.opensingular.flow.core.TaskType;
import org.opensingular.requirement.module.box.BoxItemData;
import org.opensingular.requirement.module.box.action.BoxItemActionList;
import org.opensingular.requirement.module.persistence.filter.BoxFilter;
public class ActionProviderBuilder implements ActionProvider {
private List<ActionConfigurer> actionConfigurers = new ArrayList<>();
public ActionProviderBuilder addViewAction() {
actionConfigurers.add((line, filter, list) -> list.addViewAction(line));
return this;
}
public ActionProviderBuilder addEditAction() {
actionConfigurers.add((line, filter, list) -> list.addEditAction(line));
return this;
}
public ActionProviderBuilder addDeleteAction() {
actionConfigurers.add((line, filter, list) -> list.addDeleteAction(line));
return this;
}
public ActionProviderBuilder addAssignAction() {
actionConfigurers.add((line, filter, list) -> {
if (line.getAllocatedSUserId() == null && TaskType.HUMAN == line.getTaskType()) {
list.addAssignAction(line);
}
});
return this;
}
public ActionProviderBuilder addRelocateAction() {
actionConfigurers.add((line, filter, list) -> {
if (TaskType.HUMAN == line.getTaskType()) {
list.addRelocateAction(line);
}
});
return this;
}
public ActionProviderBuilder addAnalyseAction() {
actionConfigurers.add((line, filter, list) -> {
if (filter.getIdUsuarioLogado() != null && filter.getIdUsuarioLogado().equalsIgnoreCase((String) line.getAllocatedSUserId())) {
list.addAnalyseAction(line);
}
});
return this;
}
public ActionProviderBuilder addCustomActions(ActionConfigurer configurer) {
actionConfigurers.add(configurer);
return this;
}
public ActionProviderBuilder addHistoryAction() {
actionConfigurers.add((line, filter, list) -> list.addHistoryAction(line));
return this;
}
public ActionProviderBuilder addExtratoAction() {
actionConfigurers.add((line, filter, list) -> list.addExtratoAction(line));
return this;
}
@Override
public BoxItemActionList getLineActions(BoxItemData line, BoxFilter filter) {
BoxItemActionList list = new BoxItemActionList();
for (ActionConfigurer configurer : actionConfigurers) {
configurer.configure(line, filter, list);
}
return list;
}
@FunctionalInterface
public interface ActionConfigurer {
void configure(BoxItemData line, BoxFilter filter, BoxItemActionList list);
}
} |
lechium/tvOS130Headers | System/Library/PrivateFrameworks/SlideshowKit.framework/PlugIns/OpusMarimbaProducer.opplugin/MRShiftingTilesIteration.h | <reponame>lechium/tvOS130Headers
/*
* This header is generated by classdump-dyld 1.0
* on Tuesday, November 5, 2019 at 2:45:05 AM Mountain Standard Time
* Operating System: Version 13.0 (Build 17J586)
* Image Source: /System/Library/PrivateFrameworks/SlideshowKit.framework/PlugIns/OpusMarimbaProducer.opplugin/OpusMarimbaProducer
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
@interface MRShiftingTilesIteration : NSObject {
long long layout;
long long age;
long long slideIndex;
double width;
double startTime;
double duration;
}
-(id)description;
@end
|
relokin/parsec | pkgs/libs/glib/src/gobject/gvaluearray.h | <filename>pkgs/libs/glib/src/gobject/gvaluearray.h
/* GObject - GLib Type, Object, Parameter and Signal Library
* Copyright (C) 2001 Red Hat, Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307, USA.
*
* gvaluearray.h: GLib array type holding GValues
*/
#if !defined (__GLIB_GOBJECT_H_INSIDE__) && !defined (GOBJECT_COMPILATION)
#error "Only <glib-object.h> can be included directly."
#endif
#ifndef __G_VALUE_ARRAY_H__
#define __G_VALUE_ARRAY_H__
#include <gobject/gvalue.h>
G_BEGIN_DECLS
/* --- typedefs & structs --- */
typedef struct _GValueArray GValueArray;
struct _GValueArray
{
guint n_values;
GValue *values;
/*< private >*/
guint n_prealloced;
};
/* --- prototypes --- */
GValue* g_value_array_get_nth (GValueArray *value_array,
guint index_);
GValueArray* g_value_array_new (guint n_prealloced);
void g_value_array_free (GValueArray *value_array);
GValueArray* g_value_array_copy (const GValueArray *value_array);
GValueArray* g_value_array_prepend (GValueArray *value_array,
const GValue *value);
GValueArray* g_value_array_append (GValueArray *value_array,
const GValue *value);
GValueArray* g_value_array_insert (GValueArray *value_array,
guint index_,
const GValue *value);
GValueArray* g_value_array_remove (GValueArray *value_array,
guint index_);
GValueArray* g_value_array_sort (GValueArray *value_array,
GCompareFunc compare_func);
GValueArray* g_value_array_sort_with_data (GValueArray *value_array,
GCompareDataFunc compare_func,
gpointer user_data);
G_END_DECLS
#endif /* __G_VALUE_ARRAY_H__ */
|
vmilkovic/primjena-blockchain-tehnologije | vjezba5/DPser-part.py | import rpyc
from Crypto.Signature import pkcs1_15
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
#############
## SERVER ##
#############
#dictionary 'popis' s adresama i stanjima uz njih
#Primjer dvije adrese (u bitno skraćenom obliku zbog preglednosti) ...
#... kojima je pridruženo stanje 6.0 odnosno 4.0:
#{'242263147508167995681': 6.0, '761501431346415241697': 4.0}
popis={}
class MyService(rpyc.Service):
def on_connect(self, conn):
pass
def on_disconnect(self, conn):
pass
#provjerava da li u rječniku popis postoji adresa
def exposed_provjeri_adresu(self,adresa):
if adresa not in popis:
return "Adresa ne postoji"
else:
return popis[adresa]
#u rječnik dodaje adresu koju je dojavio klijent i postavlja joj stanje u 5.0
#svaka nova adresa dobije 5.00 tokena uz sebe
def exposed_registriraj_adresu(self,adresa):
if adresa not in popis:
popis[adresa]=5.00
print(popis)
return "Adresa registrirana"
else:
return "Adresa već postoji"
#exposed_transakcija je funkcija koju klijent poziva ukoliko želi transakciju sa svoje adrese na neku drugu
#message je byte string u formatu: b'adresa_pošiljatelja#adresa_primatelja#iznos'
#signature je hash kod kodiran s privatnim ključem klijenta. hash kod je ...
#...napravljen na strani klijenta na temelju sadržaja varijable message
def exposed_transakcija(self,message,signature):
#def exposed_transakcija(self,message):
h = SHA256.new(message) #radimo hash kod na strani servera - hash se radi na temelju ...
#... primljene poruke message
message=message.decode() #od primljenog byte stringa radimo regularan string
message_list = message.split("#") #string cjepkamo u listu na temelju graničnika #
#na temelju adrese klijenta koja se sada nalazi u message_list[0], sastavljamo ...
#...javni ključ klijenta kako bi pomoću njega dekodirali signature i izvukli hash
puKey=RSA.construct((int(message_list[0]),65537),True) #sastavljanje javnog ključa klijenta na temelju adrese klijenta
#javni ključ nam je potreban za dekodiranje signaturea -> dekodiranjem iz signature-a dobijemo ...
#... hash kod koji je klijent izračunao prilikom slanja svoje poruke
try:
#uspoređujemo hash koji smo stvorili na serveru (h) sa hashom primljenim unutar signature-a.
#pri tom moramo priložiti public key klijenta za dekodiranje njegovog signature-a.
pkcs1_15.new(puKey).verify(h, signature)
except (ValueError, TypeError):
return "Digitalni potpis nije ispravan"
#ukoliko je provjera potpisa prošla pozitivno, ide se na provjeru postojanja adresa i promjenu iznosa uz njih
adrPos=message_list[0]
adrPri=message_list[1]
if adrPos not in popis:
return "Adresa pošiljatelja ne postoji"
elif adrPri not in popis:
return "Adresa primatelja ne postoji"
else:
if float(message_list[2]) < 0:
return "Nisu dozvoljene transakcije s negativnim iznosima"
if popis[adrPos]-float(message_list[2]) < 0:
return "Pošiljatelja nema dovoljno srestava za izvršavanje transakcije"
#pošiljatelju se skida iznos, a primatelju dodaje
popis[adrPos]=popis[adrPos]-float(message_list[2])
popis[adrPri]=popis[adrPri]+float(message_list[2])
return "Transakcija uspješna!"
if __name__ == "__main__":
from rpyc.utils.server import ThreadedServer
t = ThreadedServer(MyService, port=25555)
t.start()
|
github-vet/bots | cmd/vet-bot/loopclosure/testdata/src/safe-usage/safe-usage.go | <filename>cmd/vet-bot/loopclosure/testdata/src/safe-usage/safe-usage.go
package main
import (
"fmt"
"sync"
)
func main() {
wg := sync.WaitGroup{}
x := []int{1, 2, 3, 4, 5}
for _, v := range x { // want `range-loop variable v used in defer or goroutine at line 17`
fmt.Println(v)
if v == 4 {
wg.Add(1)
go func() {
v += 10
wg.Done()
}()
break
}
}
wg.Wait()
}
|
zealoussnow/chromium | ios/web/navigation/navigation_manager_util.h | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef IOS_WEB_NAVIGATION_NAVIGATION_MANAGER_UTIL_H_
#define IOS_WEB_NAVIGATION_NAVIGATION_MANAGER_UTIL_H_
// This file contains extensions for web::NavigationManager API without making
// them part of ios/web/public.
namespace web {
class NavigationContextImpl;
class NavigationItemImpl;
class NavigationManager;
class NavigationManagerImpl;
// Returns committed or pending navigation item for the given navigation context
// or null if item is not found. Item's unique id is retrieved via GetUniqueID
// method if |context| is null.
NavigationItemImpl* GetItemWithUniqueID(
NavigationManagerImpl* navigation_manager,
NavigationContextImpl* context);
// Returns committed navigation item with given |unique_id| or null if item
// is not found or it is pending. Item's unique id is retrieved via GetUniqueID
// method.
NavigationItemImpl* GetCommittedItemWithUniqueID(
NavigationManagerImpl* navigation_manager,
int unique_id);
// Returns committed navigation item index with given |unique_id| or -1 if item
// is not found or it is pending. Item's unique id is retrieved via GetUniqueID
// method.
int GetCommittedItemIndexWithUniqueID(NavigationManager* navigation_manager,
int unique_id);
} // namespace web
#endif // IOS_WEB_NAVIGATION_NAVIGATION_MANAGER_UTIL_H_
|
im97mori-github/JavaBLEUtil | characteristic/u2b1b/src/test/java/org/im97mori/ble/characteristic/u2b1b/VolumeFlowTest.java | package org.im97mori.ble.characteristic.u2b1b;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.im97mori.ble.BLEUtils;
import org.junit.Test;
public class VolumeFlowTest {
@Test
public void test_constructor_00001() {
//@formatter:off
byte[] data = new byte[2];
data[ 0] = (byte) VolumeFlow.VOLUME_FLOW_VALUE_IS_NOT_KNOWN;
data[ 1] = (byte) (VolumeFlow.VOLUME_FLOW_VALUE_IS_NOT_KNOWN >> 8);
//@formatter:on
VolumeFlow result1 = new VolumeFlow(data);
assertEquals(VolumeFlow.VOLUME_FLOW_VALUE_IS_NOT_KNOWN, result1.getVolumeFlow());
assertTrue(result1.isVolumeFlowValueIsNotKnown());
}
@Test
public void test_constructor_00002() {
//@formatter:off
byte[] data = new byte[2];
data[ 0] = 0;
data[ 1] = 0;
//@formatter:on
VolumeFlow result1 = new VolumeFlow(data);
assertEquals(BLEUtils.createUInt16(data, 0), result1.getVolumeFlow());
assertFalse(result1.isVolumeFlowValueIsNotKnown());
assertEquals(VolumeFlow.VOLUME_FLOW_VALUE_MINIMUM, result1.getVolumeFlowLitterPerSecond(), 0);
}
@Test
public void test_constructor_00003() {
//@formatter:off
byte[] data = new byte[2];
data[ 0] = (byte) 65534;
data[ 1] = (byte) (65534 >> 8);
//@formatter:on
VolumeFlow result1 = new VolumeFlow(data);
assertEquals(BLEUtils.createUInt16(data, 0), result1.getVolumeFlow());
assertFalse(result1.isVolumeFlowValueIsNotKnown());
assertEquals(VolumeFlow.VOLUME_FLOW_VALUE_MAXIMUM, result1.getVolumeFlowLitterPerSecond(), 0);
}
@Test
public void test_constructor_00004() {
//@formatter:off
byte[] data = new byte[2];
data[ 0] = 0x01;
data[ 1] = 0x02;
//@formatter:on
VolumeFlow result1 = new VolumeFlow(data);
assertEquals(BLEUtils.createUInt16(data, 0), result1.getVolumeFlow());
assertFalse(result1.isVolumeFlowValueIsNotKnown());
assertEquals(VolumeFlow.VOLUME_FLOW_VALUE_UNIT * BLEUtils.createUInt16(data, 0), result1.getVolumeFlowLitterPerSecond(), 0);
}
@Test
public void test_constructor_00101() {
int volumeFlow = VolumeFlow.VOLUME_FLOW_VALUE_IS_NOT_KNOWN;
VolumeFlow result1 = new VolumeFlow(volumeFlow);
assertEquals(VolumeFlow.VOLUME_FLOW_VALUE_IS_NOT_KNOWN, result1.getVolumeFlow());
assertTrue(result1.isVolumeFlowValueIsNotKnown());
}
@Test
public void test_constructor_00102() {
int volumeFlow = 0;
VolumeFlow result1 = new VolumeFlow(volumeFlow);
assertEquals(volumeFlow, result1.getVolumeFlow());
assertFalse(result1.isVolumeFlowValueIsNotKnown());
assertEquals(VolumeFlow.VOLUME_FLOW_VALUE_MINIMUM, result1.getVolumeFlowLitterPerSecond(), 0);
}
@Test
public void test_constructor_00103() {
int volumeFlow = 65534;
VolumeFlow result1 = new VolumeFlow(volumeFlow);
assertEquals(volumeFlow, result1.getVolumeFlow());
assertFalse(result1.isVolumeFlowValueIsNotKnown());
assertEquals(VolumeFlow.VOLUME_FLOW_VALUE_MAXIMUM, result1.getVolumeFlowLitterPerSecond(), 0);
}
@Test
public void test_constructor_00104() {
int volumeFlow = 1;
VolumeFlow result1 = new VolumeFlow(volumeFlow);
assertEquals(volumeFlow, result1.getVolumeFlow());
assertFalse(result1.isVolumeFlowValueIsNotKnown());
assertEquals(VolumeFlow.VOLUME_FLOW_VALUE_UNIT * volumeFlow, result1.getVolumeFlowLitterPerSecond(), 0);
}
@Test
public void test_parcelable_00101() {
//@formatter:off
byte[] data = new byte[2];
data[ 0] = (byte) VolumeFlow.VOLUME_FLOW_VALUE_IS_NOT_KNOWN;
data[ 1] = (byte) (VolumeFlow.VOLUME_FLOW_VALUE_IS_NOT_KNOWN >> 8);
//@formatter:on
VolumeFlow result1 = new VolumeFlow(data);
assertArrayEquals(data, result1.getBytes());
}
@Test
public void test_parcelable_00102() {
//@formatter:off
byte[] data = new byte[2];
data[ 0] = 0;
data[ 1] = 0;
//@formatter:on
VolumeFlow result1 = new VolumeFlow(data);
assertArrayEquals(data, result1.getBytes());
}
@Test
public void test_parcelable_00103() {
//@formatter:off
byte[] data = new byte[2];
data[ 0] = (byte) 65534;
data[ 1] = (byte) (65534 >> 8);
//@formatter:on
VolumeFlow result1 = new VolumeFlow(data);
assertArrayEquals(data, result1.getBytes());
}
@Test
public void test_parcelable_00104() {
//@formatter:off
byte[] data = new byte[2];
data[ 0] = 0x01;
data[ 1] = 0x02;
//@formatter:on
VolumeFlow result1 = new VolumeFlow(data);
assertArrayEquals(data, result1.getBytes());
}
}
|
polarr/competitive-programming | Misc/CSESProblemset/Introductory/IncreasingArray.cpp | #include <bits/stdc++.h>
using namespace std;
int main(){
int l;
cin >> l;
vector <int> k;
for (int i = 0; i < l; ++i){
int x;
cin >> x;
k.push_back (x);
}
unsigned long long ans = 0;
for (int i = 1; i < k.size(); i++){
if (k[i] < k[i - 1]){
ans += k[i - 1] - k[i];
k[i] = k[i - 1];
}
}
cout << ans;
return 0;
} |
matrix65537/lab | WorkSpace01/M01/src/main/java/org/laoguo/pool/Main.java | <gh_stars>0
package org.laoguo.pool;
import java.util.concurrent.TimeUnit;
class MyJob implements Runnable {
private String name;
public MyJob(String name) {
this.name = name;
}
public void run() {
for (int i = 0; i < 2; i++) {
System.out.println(this.name + ": " + i);
try {
TimeUnit.SECONDS.sleep(2);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
public class Main {
public static void main(String[] args) {
ThreadPool<MyJob> threadPool = new DefaultThreadPool<MyJob>();
for (int i = 0; i < 100; i++) {
threadPool.execute(new MyJob("Thread" + i));
}
threadPool.shutdown();
}
}
|
likelion-contents/Origin_SpringBoot | Mission/Mission_5_Basic/src/main/java/dev/aquashdw/community/controller/AreaController.java | package dev.aquashdw.community.controller;
import dev.aquashdw.community.controller.dto.AreaDto;
import dev.aquashdw.community.service.AreaService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.Collection;
@RestController
@RequestMapping("area")
public class AreaController {
private static final Logger logger = LoggerFactory.getLogger(AreaController.class);
private final AreaService areaService;
public AreaController(AreaService areaService) {
this.areaService = areaService;
}
@PostMapping
public ResponseEntity<AreaDto> createArea(@RequestBody AreaDto dto){
return ResponseEntity.ok(this.areaService.createArea(dto));
}
@GetMapping("{id}")
public ResponseEntity<AreaDto> readArea(@PathVariable("id") Long id){
return ResponseEntity.ok(this.areaService.readArea(id));
}
@GetMapping
public ResponseEntity<Collection<AreaDto>> readAreaAll() {
return ResponseEntity.ok(this.areaService.readAreaAll());
}
// http://localhost:8080/area/get-location-info?latitude=37.00000&longitude=127.00000
@GetMapping("get-location-info")
public ResponseEntity<AreaDto> getLocationInfo(
@RequestParam(value = "latitude", defaultValue = "37.4877") Double latitude,
@RequestParam(value = "longitude", defaultValue = "127.0174") Double longitude
){
logger.debug("lat: {}, long: {}", latitude, longitude);
return ResponseEntity.ok(this.areaService.closeArea(latitude, longitude));
}
}
|
MDMN1/PenguBot | src/commands/Manipulation/illegal.js | const { Command } = require("../../index");
module.exports = class extends Command {
constructor(...args) {
super(...args, {
cooldown: 8,
aliases: ["isnowillegal", "trumpillegal"],
requiredPermissions: ["ATTACH_FILES", "USE_EXTERNAL_EMOJIS", "EMBED_LINKS"],
description: language => language.get("COMMAND_ILLEGAL_DESCRIPTION"),
extendedHelp: "No extended help available.",
usage: "<name:string>"
});
}
async run(msg, [name]) {
const image = await this.client.funcs.images("generate/illegal", { text: name })
.catch(() => null);
if (!image) return msg.sendMessage(`${this.client.emotes.cross} You got Trumped, couldn't create a new bill! Try something else.`);
return msg.channel.sendFile(image);
}
};
|
bdamore/logostest | server/api/pathway/pathway.ctrl.js | var Pathway = require('./pathway.model');
module.exports = {
find: function (req, res) {
Pathway.find(req.query)
.exec(function (err, response) {
if (err) {
res.send(err);
} else {
res.send(response);
}
});
},
save: function (req, res) {
var newPathway = new Pathway(req.body);
newPathway.save(function (err, response) {
if (err) {
res.send(err);
} else {
res.send(response);
}
});
}
};
////DELETE ONCE PUSHED LIVE////
//For testing with Postman, please don't change this info!
/*
{
"pathway": [
{
"name": "Logos",
"completion": {
"amount_completed": 0,
"total_to_complete": 100,
"complete": false
},
"stages": [
{
"name": "Stage 1",
"amount_completed": 0,
"total_to_complete": 0,
"complete": false,
"evaluations": [
{
"name": "Excersize 1",
"content": {
"video": "https://www.youtube.com/watch?v=8A6Uai5sQVw",
"image": "http://www.70sbig.com/wp-content/uploads/2012/03/halfsquat-fuck.jpg",
"progressions": [
{
"explanation": "Run 100 yards.",
"complete": false
}
],
"explanation": "Here is an explanation for this excersize.",
"question": "",
"answer": "",
"complete": false,
"completed_on": ""
},
"total_to_complete": 0,
"complete": false,
"needs_approval": true,
"approved_by": "",
"approved_on": "",
"approved": false
},
]
}
]
}
]
}
*/ |
markkurossi/mpc | compiler/utils/point.go | <reponame>markkurossi/mpc<filename>compiler/utils/point.go
//
// Copyright (c) 2020-2021 <NAME>
//
// All rights reserved.
//
package utils
import (
"fmt"
)
// Locator is an interface that implements Location method for
// returning item's input data position.
type Locator interface {
Location() Point
}
// Point specifies a position in the compiler input data.
type Point struct {
Source string
Line int // 1-based
Col int // 0-based
}
// Location implements the Locator interface.
func (p Point) Location() Point {
return p
}
func (p Point) String() string {
return fmt.Sprintf("%s:%d:%d", p.Source, p.Line, p.Col)
}
// Undefined tests if the input position is undefined.
func (p Point) Undefined() bool {
return p.Line == 0
}
|
Dimitriusos/ui | lib/nodes/app/components/driver-other/component.js | <gh_stars>1-10
export { default } from 'nodes/components/driver-other/component';
|
metux/chromium-deb | third_party/WebKit/Source/core/html/parser/BackgroundHTMLParser.cpp | <reponame>metux/chromium-deb
/*
* Copyright (C) 2013 Google, Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY GOOGLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GOOGLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "core/html/parser/BackgroundHTMLParser.h"
#include <memory>
#include "core/HTMLNames.h"
#include "core/html/parser/HTMLDocumentParser.h"
#include "core/html/parser/TextResourceDecoder.h"
#include "core/html/parser/XSSAuditor.h"
#include "platform/CrossThreadFunctional.h"
#include "platform/Histogram.h"
#include "platform/WebTaskRunner.h"
#include "platform/instrumentation/tracing/TraceEvent.h"
#include "platform/wtf/CurrentTime.h"
#include "platform/wtf/Functional.h"
#include "platform/wtf/PtrUtil.h"
#include "platform/wtf/text/TextPosition.h"
#include "public/platform/Platform.h"
namespace blink {
// On a network with high latency and high bandwidth, using a device with a fast
// CPU, we could end up speculatively tokenizing the whole document, well ahead
// of when the main-thread actually needs it. This is a waste of memory (and
// potentially time if the speculation fails). So we limit our outstanding
// tokens arbitrarily to 10,000. Our maximal memory spent speculating will be
// approximately:
// (defaultOutstandingTokenLimit + defaultPendingTokenLimit) *
// sizeof(CompactToken)
//
// We use a separate low and high water mark to avoid
// constantly topping off the main thread's token buffer. At time of writing,
// this is (10000 + 1000) * 28 bytes = ~308kb of memory. These numbers have not
// been tuned.
static const size_t kDefaultOutstandingTokenLimit = 10000;
// We limit our chucks to 1000 tokens, to make sure the main thread is never
// waiting on the parser thread for tokens. This was tuned in
// https://bugs.webkit.org/show_bug.cgi?id=110408.
static const size_t kDefaultPendingTokenLimit = 1000;
using namespace HTMLNames;
#if DCHECK_IS_ON()
static void CheckThatTokensAreSafeToSendToAnotherThread(
const CompactHTMLTokenStream* tokens) {
for (size_t i = 0; i < tokens->size(); ++i)
DCHECK(tokens->at(i).IsSafeToSendToAnotherThread());
}
static void CheckThatPreloadsAreSafeToSendToAnotherThread(
const PreloadRequestStream& preloads) {
for (size_t i = 0; i < preloads.size(); ++i)
DCHECK(preloads[i]->IsSafeToSendToAnotherThread());
}
static void CheckThatXSSInfosAreSafeToSendToAnotherThread(
const XSSInfoStream& infos) {
for (size_t i = 0; i < infos.size(); ++i)
DCHECK(infos[i]->IsSafeToSendToAnotherThread());
}
#endif
WeakPtr<BackgroundHTMLParser> BackgroundHTMLParser::Create(
std::unique_ptr<Configuration> config,
RefPtr<WebTaskRunner> loading_task_runner) {
auto* background_parser = new BackgroundHTMLParser(
std::move(config), std::move(loading_task_runner));
return background_parser->weak_factory_.CreateWeakPtr();
}
void BackgroundHTMLParser::Init(
const KURL& document_url,
std::unique_ptr<CachedDocumentParameters> cached_document_parameters,
const MediaValuesCached::MediaValuesCachedData& media_values_cached_data) {
preload_scanner_.reset(new TokenPreloadScanner(
document_url, std::move(cached_document_parameters),
media_values_cached_data,
TokenPreloadScanner::ScannerType::kMainDocument));
}
BackgroundHTMLParser::Configuration::Configuration()
: outstanding_token_limit(kDefaultOutstandingTokenLimit),
pending_token_limit(kDefaultPendingTokenLimit),
should_coalesce_chunks(false) {}
BackgroundHTMLParser::BackgroundHTMLParser(
std::unique_ptr<Configuration> config,
RefPtr<WebTaskRunner> loading_task_runner)
: weak_factory_(this),
token_(WTF::WrapUnique(new HTMLToken)),
tokenizer_(HTMLTokenizer::Create(config->options)),
tree_builder_simulator_(config->options),
options_(config->options),
outstanding_token_limit_(config->outstanding_token_limit),
parser_(config->parser),
pending_tokens_(WTF::WrapUnique(new CompactHTMLTokenStream)),
pending_token_limit_(config->pending_token_limit),
xss_auditor_(std::move(config->xss_auditor)),
decoder_(std::move(config->decoder)),
loading_task_runner_(std::move(loading_task_runner)),
tokenized_chunk_queue_(std::move(config->tokenized_chunk_queue)),
pending_csp_meta_token_index_(
HTMLDocumentParser::TokenizedChunk::kNoPendingToken),
starting_script_(false),
should_coalesce_chunks_(config->should_coalesce_chunks) {
DCHECK_GT(outstanding_token_limit_, 0u);
DCHECK_GT(pending_token_limit_, 0u);
DCHECK_GE(outstanding_token_limit_, pending_token_limit_);
}
BackgroundHTMLParser::~BackgroundHTMLParser() {}
void BackgroundHTMLParser::AppendRawBytesFromMainThread(
std::unique_ptr<Vector<char>> buffer,
double bytes_received_time) {
DCHECK(decoder_);
DEFINE_STATIC_LOCAL(CustomCountHistogram, queue_delay,
("Parser.AppendBytesDelay", 1, 5000, 50));
queue_delay.Count(MonotonicallyIncreasingTimeMS() - bytes_received_time);
UpdateDocument(decoder_->Decode(buffer->data(), buffer->size()));
}
void BackgroundHTMLParser::AppendDecodedBytes(const String& input) {
DCHECK(!input_.Current().IsClosed());
input_.Append(input);
PumpTokenizer();
}
void BackgroundHTMLParser::SetDecoder(
std::unique_ptr<TextResourceDecoder> decoder) {
DCHECK(decoder);
decoder_ = std::move(decoder);
}
void BackgroundHTMLParser::Flush() {
DCHECK(decoder_);
UpdateDocument(decoder_->Flush());
}
void BackgroundHTMLParser::UpdateDocument(const String& decoded_data) {
DocumentEncodingData encoding_data(*decoder_.get());
if (encoding_data != last_seen_encoding_data_) {
last_seen_encoding_data_ = encoding_data;
xss_auditor_->SetEncoding(encoding_data.Encoding());
RunOnMainThread(
&HTMLDocumentParser::DidReceiveEncodingDataFromBackgroundParser,
parser_, encoding_data);
}
if (decoded_data.IsEmpty())
return;
AppendDecodedBytes(decoded_data);
}
void BackgroundHTMLParser::ResumeFrom(std::unique_ptr<Checkpoint> checkpoint) {
parser_ = checkpoint->parser;
token_ = std::move(checkpoint->token);
tokenizer_ = std::move(checkpoint->tokenizer);
tree_builder_simulator_.SetState(checkpoint->tree_builder_state);
input_.RewindTo(checkpoint->input_checkpoint, checkpoint->unparsed_input);
preload_scanner_->RewindTo(checkpoint->preload_scanner_checkpoint);
starting_script_ = false;
tokenized_chunk_queue_->Clear();
PumpTokenizer();
}
void BackgroundHTMLParser::StartedChunkWithCheckpoint(
HTMLInputCheckpoint input_checkpoint) {
// Note, we should not have to worry about the index being invalid as messages
// from the main thread will be processed in FIFO order.
input_.InvalidateCheckpointsBefore(input_checkpoint);
PumpTokenizer();
}
void BackgroundHTMLParser::Finish() {
MarkEndOfFile();
PumpTokenizer();
}
void BackgroundHTMLParser::Stop() {
delete this;
}
void BackgroundHTMLParser::ForcePlaintextForTextDocument() {
// This is only used by the TextDocumentParser (a subclass of
// HTMLDocumentParser) to force us into the PLAINTEXT state w/o using a
// <plaintext> tag. The TextDocumentParser uses a <pre> tag for historical /
// compatibility reasons.
tokenizer_->SetState(HTMLTokenizer::kPLAINTEXTState);
}
void BackgroundHTMLParser::MarkEndOfFile() {
DCHECK(!input_.Current().IsClosed());
input_.Append(String(&kEndOfFileMarker, 1));
input_.Close();
}
void BackgroundHTMLParser::PumpTokenizer() {
TRACE_EVENT0("loading", "BackgroundHTMLParser::pumpTokenizer");
HTMLTreeBuilderSimulator::SimulatedToken simulated_token =
HTMLTreeBuilderSimulator::kOtherToken;
// No need to start speculating until the main thread has almost caught up.
if (input_.TotalCheckpointTokenCount() > outstanding_token_limit_)
return;
bool should_notify_main_thread = false;
while (true) {
if (xss_auditor_->IsEnabled())
source_tracker_.Start(input_.Current(), tokenizer_.get(), *token_);
if (!tokenizer_->NextToken(input_.Current(), *token_)) {
// We've reached the end of our current input.
should_notify_main_thread |= QueueChunkForMainThread();
break;
}
if (xss_auditor_->IsEnabled())
source_tracker_.end(input_.Current(), tokenizer_.get(), *token_);
{
TextPosition position = TextPosition(input_.Current().CurrentLine(),
input_.Current().CurrentColumn());
if (std::unique_ptr<XSSInfo> xss_info =
xss_auditor_->FilterToken(FilterTokenRequest(
*token_, source_tracker_, tokenizer_->ShouldAllowCDATA()))) {
xss_info->text_position_ = position;
pending_xss_infos_.push_back(std::move(xss_info));
}
CompactHTMLToken token(token_.get(), position);
bool should_evaluate_for_document_write = false;
bool is_csp_meta_tag = false;
preload_scanner_->Scan(token, input_.Current(), pending_preloads_,
&viewport_description_, &is_csp_meta_tag,
&should_evaluate_for_document_write);
simulated_token =
tree_builder_simulator_.Simulate(token, tokenizer_.get());
// Break chunks before a script tag is inserted and flag the chunk as
// starting a script so the main parser can decide if it should yield
// before processing the chunk.
if (simulated_token == HTMLTreeBuilderSimulator::kScriptStart) {
should_notify_main_thread |= QueueChunkForMainThread();
starting_script_ = true;
}
pending_tokens_->push_back(token);
if (is_csp_meta_tag) {
pending_csp_meta_token_index_ = pending_tokens_->size() - 1;
}
if (should_evaluate_for_document_write) {
likely_document_write_script_indices_.push_back(
pending_tokens_->size() - 1);
}
}
token_->Clear();
if (simulated_token == HTMLTreeBuilderSimulator::kScriptEnd ||
simulated_token == HTMLTreeBuilderSimulator::kStyleEnd ||
simulated_token == HTMLTreeBuilderSimulator::kLink ||
pending_tokens_->size() >= pending_token_limit_) {
should_notify_main_thread |= QueueChunkForMainThread();
// If we're far ahead of the main thread, yield for a bit to avoid
// consuming too much memory.
if (input_.TotalCheckpointTokenCount() > outstanding_token_limit_)
break;
}
if (!should_coalesce_chunks_ && should_notify_main_thread) {
RunOnMainThread(&HTMLDocumentParser::NotifyPendingTokenizedChunks,
parser_);
should_notify_main_thread = false;
}
}
// Wait to notify the main thread about the chunks until we're at the limit.
// This lets the background parser generate lots of valuable preloads before
// anything expensive (extensions, scripts) take up time on the main thread. A
// busy main thread can cause preload delays.
if (should_notify_main_thread) {
RunOnMainThread(&HTMLDocumentParser::NotifyPendingTokenizedChunks, parser_);
}
}
bool BackgroundHTMLParser::QueueChunkForMainThread() {
if (pending_tokens_->IsEmpty())
return false;
#if DCHECK_IS_ON()
CheckThatTokensAreSafeToSendToAnotherThread(pending_tokens_.get());
CheckThatPreloadsAreSafeToSendToAnotherThread(pending_preloads_);
CheckThatXSSInfosAreSafeToSendToAnotherThread(pending_xss_infos_);
#endif
std::unique_ptr<HTMLDocumentParser::TokenizedChunk> chunk =
WTF::WrapUnique(new HTMLDocumentParser::TokenizedChunk);
TRACE_EVENT_WITH_FLOW0("blink,loading",
"BackgroundHTMLParser::sendTokensToMainThread",
chunk.get(), TRACE_EVENT_FLAG_FLOW_OUT);
chunk->preloads.swap(pending_preloads_);
if (viewport_description_.set)
chunk->viewport = viewport_description_;
chunk->xss_infos.swap(pending_xss_infos_);
chunk->tokenizer_state = tokenizer_->GetState();
chunk->tree_builder_state = tree_builder_simulator_.GetState();
chunk->input_checkpoint = input_.CreateCheckpoint(pending_tokens_->size());
chunk->preload_scanner_checkpoint = preload_scanner_->CreateCheckpoint();
chunk->tokens = std::move(pending_tokens_);
chunk->starting_script = starting_script_;
chunk->likely_document_write_script_indices.swap(
likely_document_write_script_indices_);
chunk->pending_csp_meta_token_index = pending_csp_meta_token_index_;
starting_script_ = false;
pending_csp_meta_token_index_ =
HTMLDocumentParser::TokenizedChunk::kNoPendingToken;
bool is_empty = tokenized_chunk_queue_->Enqueue(std::move(chunk));
pending_tokens_ = WTF::WrapUnique(new CompactHTMLTokenStream);
return is_empty;
}
// If the background parser is already running on the main thread, then it is
// not necessary to post a task to the main thread to run asynchronously. The
// main parser deals with chunking up its own work.
// TODO(csharrison): This is a pretty big hack because we don't actually need a
// CrossThreadClosure in these cases. This is just experimental.
template <typename FunctionType, typename... Ps>
void BackgroundHTMLParser::RunOnMainThread(FunctionType function,
Ps&&... parameters) {
if (IsMainThread()) {
(*WTF::Bind(function, std::forward<Ps>(parameters)...))();
} else {
loading_task_runner_->PostTask(
BLINK_FROM_HERE,
CrossThreadBind(function, std::forward<Ps>(parameters)...));
}
}
} // namespace blink
|
FAOSTAT/faostat | ui/src/js/views/compare-filter-box-view.js | /*global define, _:false, $, console, amplify, FM*/
define([
'jquery',
'loglevel',
'views/base/view',
'globals/Common',
'config/Config',
'config/Events',
'config/Analytics',
'config/compare/Events',
'config/compare/Config',
'text!templates/compare/compare_filter_box.hbs',
'text!templates/compare/filter_container.hbs',
'i18n!nls/compare',
'handlebars',
'faostatapiclient',
'underscore',
'lib/compare/compare-filter',
'q',
'amplify'
], function ($, log, View, Common, C, E, A, EC, CM, template, templateFilterContainer, i18nLabels, Handlebars, API, _, Filter, Q) {
'use strict';
var s = {
GROUPS: '[data-role="groups"]',
DOMAINS: '[data-role="domains"]',
FILTERS: '[data-role="filters"]',
REMOVE_FILTER_BOX: '[data-role="remove_filter_box"]',
COLLAPSE_FILTER_BOX: '[data-role="collapse_filter_box"]',
PANEL_BODY: '[data-role="panel_body_filter_box"]',
GROUP_HEADING_TITLE: '[data-role="group-heading-title"]',
DOMAIN_HEADING_TITLE: '[data-role="domain-heading-title"]'
};
var CompareFiltersBoxView = View.extend({
autoRender: true,
className: 'compare',
template: template,
initialize: function (options) {
this.o = options || {};
this.o.groups = {};
this.o.domains = {};
// list of the dimensions
this.o.filters = {};
this.DIMENSION_PARAMETER_MAPPING = {};
},
getTemplateData: function () {
return i18nLabels;
},
attach: function () {
View.prototype.attach.call(this, arguments);
this.initVariables();
this.initComponents();
this.bindEventListeners();
this.configurePage();
},
initVariables: function () {
// init lang
this.o.lang = Common.getLocale();
this.$GROUPS = this.$el.find(s.GROUPS);
this.$DOMAINS = this.$el.find(s.DOMAINS);
this.$FILTERS = this.$el.find(s.FILTERS);
this.$REMOVE_FILTER_BOX = this.$el.find(s.REMOVE_FILTER_BOX);
this.$COLLAPSE_FILTER_BOX = this.$el.find(s.COLLAPSE_FILTER_BOX);
this.$PANEL_BODY = this.$el.find(s.PANEL_BODY);
this.$GROUP_HEADING_TITLE = this.$el.find(s.GROUP_HEADING_TITLE);
this.$DOMAIN_HEADING_TITLE = this.$el.find(s.DOMAIN_HEADING_TITLE);
},
initComponents: function () {
},
configurePage: function () {
var self = this;
amplify.publish(E.LOADING_SHOW, { container: this.$GROUPS});
API.groups({
whitelist: CM.groups.whitelist || [],
blacklist: CM.groups.blacklist || []
}).then(function(json) {
self.createGroupFilter(json);
}).fail(function(e) {
log.error("CompareFilterBox.configurePage", e);
amplify.publish(E.LOADING_HIDE, { container: self.$GROUPS});
amplify.publish(E.CONNECTION_PROBLEM);
});
},
createGroupFilter: function(json) {
var self = this,
groupsData = json.data,
filter = new Filter({
container: this.$GROUPS,
title: i18nLabels.groups,
addEmptySelection: true,
//placeholder: i18nLabels.please_select_an_option,
placeholder: " ",
data: groupsData
});
// cache groups dropdown
this.o.groups = {
filter: filter,
// TODO: keep track of the filter
json: json
};
this.o.groups.$DD = filter.getDropDown();
// TODO: make it nicer the default code selection
//self.onGroupChange(this.o.groups.$DD.find(":selected").val(), this.o.groups.$DD.find(":selected").text());
this.o.groups.$DD.change(function(e) {
self.onGroupChange(e.val, e.added.text);
});
},
createDomainFilter: function(json) {
var self = this;
// TODO: remove filters (dispose)
this.$FILTERS.empty();
var filter = new Filter({
container: this.$DOMAINS,
title: i18nLabels.domains,
//placeholder: i18nLabels.please_select_an_option,
placeholder: " ",
data: json.data
});
// cache groups dropdown
this.o.domains = {
filter: filter,
// TODO: keep track of the filter
json: json
};
this.o.domains.$DD = filter.getDropDown();
// TODO: make it nicer the default code selection
// self.onDomainChange(this.o.domains.$DD.find(":selected").val(), this.o.domains.$DD.find(":selected").text());
this.o.domains.$DD.change(function(e) {
self.onDomainChange(e.val, e.added.text);
});
},
onGroupChange: function(code, label) {
var self = this;
// TODO: dispose domains and filters container
this.$GROUP_HEADING_TITLE.html(label);
this.$DOMAIN_HEADING_TITLE.empty();
// loading domains by group
this.$DOMAINS.empty();
amplify.publish(E.LOADING_SHOW, { container: this.$DOMAINS});
API.domains({
group_code: code,
whitelist: CM.domains.whitelist || [],
blacklist: CM.domains.blacklist || []
}).then(function(json) {
self.createDomainFilter(json);
});
},
onDomainChange: function(code, label) {
this.domainCode = code;
this.$DOMAIN_HEADING_TITLE.html(" - " + label);
// get dimensions and create new filters
this.createFiltersByDomain();
},
// Filters by domains
createFiltersByDomain: function() {
// TODO: remove filters (dispose)
this.$FILTERS.empty();
// clean old filters
this.o.filters = {};
var $CONTAINER = this._createRandomElement(this.$FILTERS);
// loading filters by domain
amplify.publish(E.LOADING_SHOW, { container: $CONTAINER});
// parse the dimensions to create dinamically the dropdowns needed
API.dimensions({
domain_code: this.domainCode,
full: true
}).then(_.bind(this._preloadDomainDimensions, this))
.then(_.bind(function(json) {
amplify.publish(E.LOADING_HIDE, { container: $CONTAINER});
var indexRequest = 0;
_.each(this.DIMENSION_PARAMETER_MAPPING, _.bind(function(parameter, id) {
// _.each(json, _.bind(function(v) {
log.info("-----------id" , id, parameter, $CONTAINER.length)
try {
var v = json[indexRequest];
// TODO: to be changed
v.container = this.createFilterContainer($CONTAINER, id);
// TODO: get label from metadata
v.title = i18nLabels[id.toLowerCase()] || id;
//v.parameter = this.DIMENSION_PARAMETER_MAPPING[id];
v.parameter = id;
v.id = id;
v.ddOptions = {
multiple: true,
addEmptySelection: true,
//placeholder: "SELECT a",
allowClear: false,
show: (CM.filters.blacklistCodesID.indexOf(id) <= -1)
};
// v.placeholder = i18nLabels.please_select_an_option;
v.placeholder = " ";
this.o.filters[id] = {};
this.o.filters[id].filter = new Filter(v);
indexRequest += 1;
}catch(e) {
log.error(e);
}
}, this));
}, this));
},
_preloadDomainDimensions: function (json) {
var r = [],
domainCode = this.domainCode,
lang = this.o.lang,
self = this;
// TODO: cache of the dimensions paramenter (a the moment the i.e. /codes/areagroup don't return the paramenter)
this.DIMENSION_PARAMETER_MAPPING = {};
// Q.all to return all the request at the same time
_.each(json.data, _.bind(function (c) {
var id = c.id;
// caching the parameter to use with the getData
this.DIMENSION_PARAMETER_MAPPING[id] = c.parameter;
r.push(
API.codes({
id: id,
domain_code: domainCode,
show_lists: false,
})
);
}, this));
return Q.all(r);
},
createFilterContainer: function ($CONTAINER, id) {
var t = Handlebars.compile(templateFilterContainer);
$CONTAINER.append(t({id: id}));
return $CONTAINER.find('[data-role="filter-'+ id +'"]');
},
getFilters: function () {
// check if domain or group code is selected
if (this.o.groups.$DD.val() === '') {
amplify.publish(E.NOTIFICATION_WARNING, {
title: i18nLabels.warning,
text: 'Missing Group selection'
});
log.error("Missing goup selection")
throw new Exception("Missing groups")
}
if (this.o.domains.$DD.val() === '') {
amplify.publish(E.NOTIFICATION_WARNING, {
title: i18nLabels.warning,
text: 'Missing Domain selection'
});
throw new Exception("Missing domains")
}
// filters to be returned to the compare-view
var f = [];
// TODO: get all the filters mapping
// for all the filters get
// the id
// i.e. metadata.parameters.parameter: "@List1Codes",
// with the relative values
// to pass to the getData
// TODO: how to handle the domain?
var domain = {};
domain.id = 'domain';
// TODO: the parameter in theory should be dynamic
domain.parameter = 'domain_code';
// TODO: change domains variable name
domain.codes = [this.o.domains.$DD.val()];
f.push(domain);
// Get all the selected values from the filters multiselections dropdown
_.each(Object.keys(this.o.filters), _.bind(function (filterKey) {
f.push(this.o.filters[filterKey].filter.getFilter());
}, this));
return f;
},
removeFilterBox: function(e) {
e.preventDefault();
// TODO: onRemove the filter add popup to check if the user want to remove it?
//amplify.publish(E.NOTIFICATION_ACCEPT, {filter: this});
amplify.publish(EC.FILTER_BOX_REMOVE, {filter: this});
amplify.publish(E.GOOGLE_ANALYTICS_EVENT, {
category: A.compare.remove_filter.category,
action: A.compare.remove_filter.action,
label: ""
});
},
collapseFilterBox: function(e) {
e.preventDefault();
var self = this;
this.$PANEL_BODY.toggle("fast", function() {
self.$COLLAPSE_FILTER_BOX.removeClass("fa-chevron-up");
self.$COLLAPSE_FILTER_BOX.removeClass("fa-chevron-down");
if ( self.$PANEL_BODY.is(":visible")) {
self.$COLLAPSE_FILTER_BOX.addClass("fa-chevron-down");
} else {
self.$COLLAPSE_FILTER_BOX.addClass("fa-chevron-up");
}
});
},
getDomainCode: function() {
return this.o.domains.$DD.val();
},
getDomainName: function() {
return this.o.domains.$DD.select2('data').text;
},
getGroupName: function() {
return this.o.groups.$DD.select2('data').text;
},
_createRandomElement: function($CONTAINER, empty) {
var empty = (empty !== undefined && typeof(empty) === "boolean")? empty : true,
id = Math.random().toString().replace(".", "");
if(empty) {
$CONTAINER.empty();
}
$CONTAINER.append("<div id='"+ id +"'>");
return $CONTAINER.find('#' + id);
},
bindEventListeners: function () {
this.$REMOVE_FILTER_BOX.on('click', _.bind(this.removeFilterBox, this));
this.$COLLAPSE_FILTER_BOX.on('click', _.bind(this.collapseFilterBox, this));
},
unbindEventListeners: function () {
},
disposeDomains: function () {
console.warn('TODO dispose domains');
this.$DOMAINS.empty();
},
disposeFilters: function () {
console.warn('TODO dispose filters');
},
dispose: function () {
this.unbindEventListeners();
View.prototype.dispose.call(this, arguments);
}
});
return CompareFiltersBoxView;
});
|
grokify/ringcentral-sdk-go-scg | engagedigital/v1/client/model_identity_group.go | /*
* Engage Digital API
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* API version: 1.0
* Generated by: OpenAPI Generator (https://openapi-generator.tech)
*/
package engagedigital
import (
"time"
)
type IdentityGroup struct {
AvatarUrl string `json:"avatar_url,omitempty"`
Company string `json:"company,omitempty"`
CreatedAt time.Time `json:"created_at,omitempty"`
CustomFieldValues map[string]interface{} `json:"custom_field_values,omitempty"`
Emails []string `json:"emails,omitempty"`
Firstname string `json:"firstname,omitempty"`
Gender string `json:"gender,omitempty"`
HomePhones []string `json:"home_phones,omitempty"`
Id string `json:"id"`
IdentityIds []string `json:"identity_ids,omitempty"`
Lastname string `json:"lastname,omitempty"`
MobilePhones []string `json:"mobile_phones,omitempty"`
Notes string `json:"notes,omitempty"`
TagIds []string `json:"tag_ids,omitempty"`
UpdatedAt time.Time `json:"updated_at,omitempty"`
}
|
clarkdonald/eecs494explore3d | jni/external/zenilib/zeni_core/zeni_core.h | /* This file is part of the Zenipex Library (zenilib).
* Copyright (C) 2011 <NAME> (bazald).
*
* zenilib is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* zenilib is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with zenilib. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef ZENILIB_CORE_H
#define ZENILIB_CORE_H
#include <zeni.h>
#ifndef ZENI_CORE_DLL
#define ZENI_CORE_DLL __declspec(dllimport)
#endif
#ifndef ZENI_CORE_EXT
#define ZENI_CORE_EXT extern
#endif
#include <Zeni/Core.h>
#include <Zeni/Controllers.h>
#include <Zeni/Timer.h>
#include <Zeni/Timer.hxx>
#endif
|
Magolves/sedona2cpp | Sedona/src/main/java/sedonac/translate/CppStdHeader.java | /*
* Copyright (c) 2007 Tridium, Inc.
* Copyright (c) 2019. <NAME> (translation support)
* Licensed under the Academic Free License version 3.0
*
*/
package sedonac.translate;
import sedonac.Compiler;
import sedonac.ast.Expr;
import sedonac.ast.KitDef;
import sedonac.ast.TypeDef;
import sedonac.namespace.Type;
import java.io.File;
import java.util.Date;
public class CppStdHeader extends AbstractKitTranslator {
private boolean written = false;
public CppStdHeader(Compiler compiler, KitDef kit) {
super(compiler, kit);
}
@Override
public File toFile() {
return new File(outDir, "types.h");
}
@Override
public void doTranslate() {
// This file needs to be written only once
if (written) return;
types();
written = true;
}
@Override
protected String getHeaderText() {
return "Common type definitions";
}
public void types() {
String barrier = "__TYPES_H__";
nl();
w("#ifndef ").w(barrier).nl();
w("#define ").w(barrier).nl();
nl();
w("/* Check for C++11 (2011) */").nl();
w("#if __cplusplus < 201103L").nl();
indent++;
indent().w("#error \"Library requires C++11\"").nl();
indent--;
w("#endif").nl().nl();
w("/* Primitive types (modify to match platform specific types/sizes) */").nl();
w("typedef unsigned char uint8;").nl();
w("typedef unsigned short uint16;").nl();
w("/* Check for compiler long size */").nl();
w("# if __WORDSIZE == 64").nl();
w("/* 64bit */").nl();
w("typedef long int int64;").nl();
w("typedef int int32;").nl();
w("# else").nl();
w("/* 32bit */").nl();
w("typedef long int int32;").nl();
w("typedef long long int int64;").nl();
w("# endif").nl();
nl();
w("#endif").nl();
}
}
|
Ron423c/chromium | components/autofill/core/browser/autofill_form_test_utils.h | <gh_stars>0
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_FORM_TEST_UTILS_H_
#define COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_FORM_TEST_UTILS_H_
#include <vector>
#include "base/optional.h"
#include "components/autofill/core/browser/autofill_field.h"
#include "components/autofill/core/browser/field_types.h"
#include "components/autofill/core/common/form_data.h"
#include "components/autofill/core/common/form_field_data.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace autofill {
namespace test {
namespace {
// Default label assigned to fields.
constexpr char kLabelText[] = "label";
// Default name attribute assigned to fields.
constexpr char kNameText[] = "name";
// Default form url.
constexpr char kFormUrl[] = "http://example.com/form.html";
// Default form action url.
constexpr char kFormActionUrl[] = "http://example.com/submit.html";
} // namespace
namespace internal {
// Expected FormFieldData are constructed based on these descriptions.
template <typename = void>
struct FieldDataDescription {
ServerFieldType role = ServerFieldType::EMPTY_TYPE;
bool is_focusable = true;
const char* label = kLabelText;
const char* name = kNameText;
base::Optional<const char*> value = base::nullopt;
const char* autocomplete_attribute = nullptr;
const char* form_control_type = "text";
bool should_autocomplete = true;
base::Optional<bool> is_autofilled = base::nullopt;
};
// Attributes provided to the test form.
template <typename = void>
struct TestFormAttributes {
const char* description_for_logging;
std::vector<FieldDataDescription<>> fields;
base::Optional<FormRendererId> unique_renderer_id = base::nullopt;
const char* name = "TestForm";
const char* url = kFormUrl;
const char* action = kFormActionUrl;
base::Optional<url::Origin> main_frame_origin = base::nullopt;
bool is_formless_checkout = false;
bool is_form_tag = true;
};
// Flags determining whether the corresponding check should be run on the test
// form.
template <typename = void>
struct TestFormFlags {
// false means the function is not to be called.
bool determine_heuristic_type = false;
bool parse_query_response = false;
// false means the corresponding check is not supposed to run.
bool is_autofillable = false;
bool should_be_parsed = false;
bool should_be_queried = false;
bool should_be_uploaded = false;
bool has_author_specified_types = false;
bool has_author_specified_upi_vpa_hint = false;
// first value denotes whether the comparison is to be done while second
// denotes EXPECT_TRUE for true and EXPECT_FALSE for false.
std::pair<bool, bool> is_complete_credit_card_form = {false, false};
// base::nullopt means no checking.
base::Optional<int> field_count = base::nullopt;
base::Optional<int> autofill_count = base::nullopt;
base::Optional<int> section_count = base::nullopt;
base::Optional<int> response_field_count = base::nullopt;
};
// Expected field type values to be verified with the test form.
template <typename = void>
struct ExpectedFieldTypeValues {
std::vector<HtmlFieldType> expected_html_type = {};
std::vector<AutofillField::PhonePart> expected_phone_part = {};
std::vector<ServerFieldType> expected_heuristic_type = {};
std::vector<ServerFieldType> expected_overall_type = {};
};
// Describes a test case for the parser.
template <typename = void>
struct FormStructureTestCase {
TestFormAttributes<> form_attributes;
TestFormFlags<> form_flags;
ExpectedFieldTypeValues<> expected_field_types;
};
} // namespace internal
using FieldDataDescription = internal::FieldDataDescription<>;
using TestFormAttributes = internal::TestFormAttributes<>;
using FormStructureTestCase = internal::FormStructureTestCase<>;
// Describes the |form_data|. Use this in SCOPED_TRACE if other logging
// messages might refer to the form.
testing::Message DescribeFormData(const FormData& form_data);
// Returns the form field relevant to the |role|.
FormFieldData CreateFieldByRole(ServerFieldType role);
// Creates a FormData to be fed to the parser.
FormData GetFormData(const TestFormAttributes& test_form_attributes);
class FormStructureTest : public testing::Test {
protected:
// Iterates over |test_cases|, creates a FormData for each, runs the parser
// and checks the results.
static void CheckFormStructureTestData(
const std::vector<FormStructureTestCase>& test_cases);
};
} // namespace test
} // namespace autofill
#endif // COMPONENTS_AUTOFILL_CORE_BROWSER_AUTOFILL_FORM_TEST_UTILS_H_
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.