text stringlengths 2 99k | meta dict |
|---|---|
package com.example.expressiontree;
/**
* @class CompositeDivideNode
*
* @brief A node containing left and right children. The meaning of
* this node is left / right. It plays the role of a
* "Composite" in the Composite pattern.
*/
public class CompositeDivideNode extends CompositeBinaryNode {
/** Ctor */
public CompositeDivideNode(ComponentNode left,
ComponentNode right) {
super(left, right);
}
/** Return the printable character stored in the node. */
public int item() {
return '/';
}
/**
* Define the @a accept() operation used for the Visitor pattern
* to accept the @a visitor.
*/
public void accept(Visitor visitor) {
visitor.visit(this);
}
}
| {
"pile_set_name": "Github"
} |
function il2ssa_irval_var(v)
visit:ir2(irval: v) {
once irval {
var -> nm
| else -> []
}
}
function il2ssa_use(e)
collector(add, get) {
iter:ir2(irexpr: e) {
once irval {
var -> add(nm)
| else -> []
}
};
return unifiq(get())
}
function il2ssa_term_labels(t)
collector(add, get) {
iter:ir2(irterm: t) {
once irlabel : add(node);
};
return unifiq(get())
}
function il2ssa_term_uses(t)
collector(add, get) {
iter:ir2(irterm: t) {
once irval {
var -> add(nm)
| else -> []
}
};
return unifiq(get())
}
function il2ssa_genssa(code)
collector(add, get) {
consts = mkhash(); otyps = mkhash();
return [ /* A list: [consts; gencode; allocas; otyps] */
// consts:
consts;
// gencode:
visit:ir2(ircode: code) {
deep irbblock {
bblock -> {
<tuses:tl> = t;
'b'(name, c::[[%Sm<<(name,"__EXIT");'use'(@tuses)]], tl)
}
};
once irterm : forall {
il2ssa_term_uses(node):il2ssa_term_labels(node);
};
once irexpr {
load -> { p = il2ssa_irval_var(ptr);
if(p) 'load'(p) else 'use'() }
| store -> { d1 = il2ssa_irval_var(ptr);
e0 = il2ssa_irval_var(e);
e1 = if(e0) e0 else {
nm = gensym();
ohashput(consts, nm, e);
return nm
};
if(d1) {
'store'(d1,e1)
} else { 'use'(@il2ssa_use(node)) }
}
| alloca -> {
add(caar(stack));
ohashput(otyps, caar(stack), t);
return 'store'(caar(stack),'_UNDEF')
}
| else -> 'use'(@il2ssa_use(node))
};
deep irpair : {
if(nm) node else [gensym(); e]
};
};
// allocas:
get();
// otyps:
otyps]
}
function il2ssa_dossa(code) {
// Prepare the genssa form
<[consts; gencode; allocas; otyps]> = il2ssa_genssa(code);
// Refine the list of allocas
noalloc = mkhash();
donotalloc(ns) = {
iter n in ns do if n {
ohashput(noalloc, n, true)
}
};
visit:ir2(ircode: code) {
once irexpr {
| load -> []
| store -> donotalloc([il2ssa_irval_var(e)])
| else -> donotalloc(il2ssa_use(node))
}
};
// Perform the genssa mumbo-jumbo
nallocas = filter a in allocas as not(ohashget(noalloc, a));
nssa = ssa_transform(gencode, nallocas);
return [consts; nallocas; nssa; otyps]
}
function il2ssa_fullssa(code)
{
<[consts; nallocas; nssa; otyps]> = il2ssa_dossa(code);
<[vmap;ngen;DT]> = nssa;
loops = ssa_find_loops(ngen, DT);
origtype(nm) = ohashget(otyps, nm);
// Decode the genssa representation: fill the hashtables
genh = mkhash();
remap0(n) = {
do loop(x = ohashget(vmap, n), p = n) {
if(x) return loop(ohashget(vmap, x), x)
else return p }};
remap(n) = {
n0 = remap0(n);
chk = ohashget(consts, n0);
if(chk) chk else 'var'(n0)
};
nah = mkhash(); iter a in nallocas do ohashput(nah, a, a);
nahp(n) = ohashget(nah, n);
nahv(v) = {
match v with
var(nm) -> nahp(nm)
| else -> []
};
still = mkhash();
stillthere(k) = ohashget(still, k);
markpresense(k) = ohashput(still, k, k);
visit:genssa(code: ngen) {
deep bblock { b -> iter ops do ops(name) };
deep oppair : fun(bb) { markpresense(name); op(bb, name) };
deep iop {
phi -> fun(bb, tgt) {
nphi = [tgt;'phi'(origtype(orig), @zip(map vals do remap(vals), prevs))];
ohashput(genh, bb,
ohashget(genh, bb)::[nphi]) }
| else -> fun(bb, tgt) {[]} }};
// Apply the decoded remapping and phi insertion:
ret = visit:ir2(ircode: code) {
deep irbblock {
bblock ->
mk:node(c = ohashget(genh, name) :: map append [k;v] in c do
if (v && stillthere(k)) [[k;v]] else [])
};
deep irexpr {
alloca -> if(nahp(caar(stack))) [] else node
| load -> if(nahv(ptr)) [] else node
| store -> if(nahv(ptr)) [] else node
| else -> node
};
deep irval {
var -> {
chk = remap(nm);
if(chk) chk else node
}
| else -> node
};
};
return [['loops'(cadr(loops))];ret]
} | {
"pile_set_name": "Github"
} |
// Sample event data
module.exports ={
"session": {
"new": true,
"sessionId": "amzn1.echo-api.session.[unique-value-here]",
"attributes": {},
"user": {
"userId": "amzn1.ask.account.[unique-value-here]"
},
"application": {
"applicationId": "amzn1.ask.skill.[unique-value-here]"
}
},
"request": {
"locale": "en-US",
"timestamp": "2016-10-27T18:21:44Z",
"type": "IntentRequest",
"requestId": "amzn1.echo-api.request.[unique-value-here]",
"dialogState": "COMPLETED",
"intent": {
"slots": {
"toCity": {
"name": "toCity",
"confirmationStatus": "NONE",
"value": "Portland"
},
"travelDate": {
"name": "travelDate",
"confirmationStatus": "NONE",
"value": "2017-04-21"
},
"fromCity": {
"name": "fromCity",
"confirmationStatus": "NONE",
"value": "Seattle"
},
"activity": {
"name": "activity",
"confirmationStatus": "NONE",
"value": "hiking"
}
},
"name": "PlanMyTrip"
}
},
"version": "1.0",
"sessionAttributes": {},
};
| {
"pile_set_name": "Github"
} |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#import "GPBCodedInputStream_PackagePrivate.h"
#import "GPBDictionary_PackagePrivate.h"
#import "GPBMessage_PackagePrivate.h"
#import "GPBUnknownFieldSet_PackagePrivate.h"
#import "GPBUtilities_PackagePrivate.h"
#import "GPBWireFormat.h"
NSString *const GPBCodedInputStreamException =
GPBNSStringifySymbol(GPBCodedInputStreamException);
NSString *const GPBCodedInputStreamUnderlyingErrorKey =
GPBNSStringifySymbol(GPBCodedInputStreamUnderlyingErrorKey);
NSString *const GPBCodedInputStreamErrorDomain =
GPBNSStringifySymbol(GPBCodedInputStreamErrorDomain);
// Matching:
// https://github.com/google/protobuf/blob/master/java/core/src/main/java/com/google/protobuf/CodedInputStream.java#L62
// private static final int DEFAULT_RECURSION_LIMIT = 100;
// https://github.com/google/protobuf/blob/master/src/google/protobuf/io/coded_stream.cc#L86
// int CodedInputStream::default_recursion_limit_ = 100;
static const NSUInteger kDefaultRecursionLimit = 100;
static void RaiseException(NSInteger code, NSString *reason) {
NSDictionary *errorInfo = nil;
if ([reason length]) {
errorInfo = @{ GPBErrorReasonKey: reason };
}
NSError *error = [NSError errorWithDomain:GPBCodedInputStreamErrorDomain
code:code
userInfo:errorInfo];
NSDictionary *exceptionInfo =
@{ GPBCodedInputStreamUnderlyingErrorKey: error };
[[[NSException alloc] initWithName:GPBCodedInputStreamException
reason:reason
userInfo:exceptionInfo] raise];
}
static void CheckRecursionLimit(GPBCodedInputStreamState *state) {
if (state->recursionDepth >= kDefaultRecursionLimit) {
RaiseException(GPBCodedInputStreamErrorRecursionDepthExceeded, nil);
}
}
static void CheckSize(GPBCodedInputStreamState *state, size_t size) {
size_t newSize = state->bufferPos + size;
if (newSize > state->bufferSize) {
RaiseException(GPBCodedInputStreamErrorInvalidSize, nil);
}
if (newSize > state->currentLimit) {
// Fast forward to end of currentLimit;
state->bufferPos = state->currentLimit;
RaiseException(GPBCodedInputStreamErrorSubsectionLimitReached, nil);
}
}
static int8_t ReadRawByte(GPBCodedInputStreamState *state) {
CheckSize(state, sizeof(int8_t));
return ((int8_t *)state->bytes)[state->bufferPos++];
}
static int32_t ReadRawLittleEndian32(GPBCodedInputStreamState *state) {
CheckSize(state, sizeof(int32_t));
int32_t value = OSReadLittleInt32(state->bytes, state->bufferPos);
state->bufferPos += sizeof(int32_t);
return value;
}
static int64_t ReadRawLittleEndian64(GPBCodedInputStreamState *state) {
CheckSize(state, sizeof(int64_t));
int64_t value = OSReadLittleInt64(state->bytes, state->bufferPos);
state->bufferPos += sizeof(int64_t);
return value;
}
static int64_t ReadRawVarint64(GPBCodedInputStreamState *state) {
int32_t shift = 0;
int64_t result = 0;
while (shift < 64) {
int8_t b = ReadRawByte(state);
result |= (int64_t)(b & 0x7F) << shift;
if ((b & 0x80) == 0) {
return result;
}
shift += 7;
}
RaiseException(GPBCodedInputStreamErrorInvalidVarInt, @"Invalid VarInt64");
return 0;
}
static int32_t ReadRawVarint32(GPBCodedInputStreamState *state) {
return (int32_t)ReadRawVarint64(state);
}
static void SkipRawData(GPBCodedInputStreamState *state, size_t size) {
CheckSize(state, size);
state->bufferPos += size;
}
double GPBCodedInputStreamReadDouble(GPBCodedInputStreamState *state) {
int64_t value = ReadRawLittleEndian64(state);
return GPBConvertInt64ToDouble(value);
}
float GPBCodedInputStreamReadFloat(GPBCodedInputStreamState *state) {
int32_t value = ReadRawLittleEndian32(state);
return GPBConvertInt32ToFloat(value);
}
uint64_t GPBCodedInputStreamReadUInt64(GPBCodedInputStreamState *state) {
uint64_t value = ReadRawVarint64(state);
return value;
}
uint32_t GPBCodedInputStreamReadUInt32(GPBCodedInputStreamState *state) {
uint32_t value = ReadRawVarint32(state);
return value;
}
int64_t GPBCodedInputStreamReadInt64(GPBCodedInputStreamState *state) {
int64_t value = ReadRawVarint64(state);
return value;
}
int32_t GPBCodedInputStreamReadInt32(GPBCodedInputStreamState *state) {
int32_t value = ReadRawVarint32(state);
return value;
}
uint64_t GPBCodedInputStreamReadFixed64(GPBCodedInputStreamState *state) {
uint64_t value = ReadRawLittleEndian64(state);
return value;
}
uint32_t GPBCodedInputStreamReadFixed32(GPBCodedInputStreamState *state) {
uint32_t value = ReadRawLittleEndian32(state);
return value;
}
int32_t GPBCodedInputStreamReadEnum(GPBCodedInputStreamState *state) {
int32_t value = ReadRawVarint32(state);
return value;
}
int32_t GPBCodedInputStreamReadSFixed32(GPBCodedInputStreamState *state) {
int32_t value = ReadRawLittleEndian32(state);
return value;
}
int64_t GPBCodedInputStreamReadSFixed64(GPBCodedInputStreamState *state) {
int64_t value = ReadRawLittleEndian64(state);
return value;
}
int32_t GPBCodedInputStreamReadSInt32(GPBCodedInputStreamState *state) {
int32_t value = GPBDecodeZigZag32(ReadRawVarint32(state));
return value;
}
int64_t GPBCodedInputStreamReadSInt64(GPBCodedInputStreamState *state) {
int64_t value = GPBDecodeZigZag64(ReadRawVarint64(state));
return value;
}
BOOL GPBCodedInputStreamReadBool(GPBCodedInputStreamState *state) {
return ReadRawVarint32(state) != 0;
}
int32_t GPBCodedInputStreamReadTag(GPBCodedInputStreamState *state) {
if (GPBCodedInputStreamIsAtEnd(state)) {
state->lastTag = 0;
return 0;
}
state->lastTag = ReadRawVarint32(state);
// Tags have to include a valid wireformat.
if (!GPBWireFormatIsValidTag(state->lastTag)) {
RaiseException(GPBCodedInputStreamErrorInvalidTag,
@"Invalid wireformat in tag.");
}
// Zero is not a valid field number.
if (GPBWireFormatGetTagFieldNumber(state->lastTag) == 0) {
RaiseException(GPBCodedInputStreamErrorInvalidTag,
@"A zero field number on the wire is invalid.");
}
return state->lastTag;
}
NSString *GPBCodedInputStreamReadRetainedString(
GPBCodedInputStreamState *state) {
int32_t size = ReadRawVarint32(state);
NSString *result;
if (size == 0) {
result = @"";
} else {
CheckSize(state, size);
result = [[NSString alloc] initWithBytes:&state->bytes[state->bufferPos]
length:size
encoding:NSUTF8StringEncoding];
state->bufferPos += size;
if (!result) {
#ifdef DEBUG
// https://developers.google.com/protocol-buffers/docs/proto#scalar
NSLog(@"UTF-8 failure, is some field type 'string' when it should be "
@"'bytes'?");
#endif
RaiseException(GPBCodedInputStreamErrorInvalidUTF8, nil);
}
}
return result;
}
NSData *GPBCodedInputStreamReadRetainedBytes(GPBCodedInputStreamState *state) {
int32_t size = ReadRawVarint32(state);
if (size < 0) return nil;
CheckSize(state, size);
NSData *result = [[NSData alloc] initWithBytes:state->bytes + state->bufferPos
length:size];
state->bufferPos += size;
return result;
}
NSData *GPBCodedInputStreamReadRetainedBytesNoCopy(
GPBCodedInputStreamState *state) {
int32_t size = ReadRawVarint32(state);
if (size < 0) return nil;
CheckSize(state, size);
// Cast is safe because freeWhenDone is NO.
NSData *result = [[NSData alloc]
initWithBytesNoCopy:(void *)(state->bytes + state->bufferPos)
length:size
freeWhenDone:NO];
state->bufferPos += size;
return result;
}
size_t GPBCodedInputStreamPushLimit(GPBCodedInputStreamState *state,
size_t byteLimit) {
byteLimit += state->bufferPos;
size_t oldLimit = state->currentLimit;
if (byteLimit > oldLimit) {
RaiseException(GPBCodedInputStreamErrorInvalidSubsectionLimit, nil);
}
state->currentLimit = byteLimit;
return oldLimit;
}
void GPBCodedInputStreamPopLimit(GPBCodedInputStreamState *state,
size_t oldLimit) {
state->currentLimit = oldLimit;
}
size_t GPBCodedInputStreamBytesUntilLimit(GPBCodedInputStreamState *state) {
return state->currentLimit - state->bufferPos;
}
BOOL GPBCodedInputStreamIsAtEnd(GPBCodedInputStreamState *state) {
return (state->bufferPos == state->bufferSize) ||
(state->bufferPos == state->currentLimit);
}
void GPBCodedInputStreamCheckLastTagWas(GPBCodedInputStreamState *state,
int32_t value) {
if (state->lastTag != value) {
RaiseException(GPBCodedInputStreamErrorInvalidTag, @"Unexpected tag read");
}
}
@implementation GPBCodedInputStream
+ (instancetype)streamWithData:(NSData *)data {
return [[[self alloc] initWithData:data] autorelease];
}
- (instancetype)initWithData:(NSData *)data {
if ((self = [super init])) {
#ifdef DEBUG
NSCAssert([self class] == [GPBCodedInputStream class],
@"Subclassing of GPBCodedInputStream is not allowed.");
#endif
buffer_ = [data retain];
state_.bytes = (const uint8_t *)[data bytes];
state_.bufferSize = [data length];
state_.currentLimit = state_.bufferSize;
}
return self;
}
- (void)dealloc {
[buffer_ release];
[super dealloc];
}
// Direct access is use for speed, to avoid even internally declaring things
// read/write, etc. The warning is enabled in the project to ensure code calling
// protos can turn on -Wdirect-ivar-access without issues.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdirect-ivar-access"
- (int32_t)readTag {
return GPBCodedInputStreamReadTag(&state_);
}
- (void)checkLastTagWas:(int32_t)value {
GPBCodedInputStreamCheckLastTagWas(&state_, value);
}
- (BOOL)skipField:(int32_t)tag {
NSAssert(GPBWireFormatIsValidTag(tag), @"Invalid tag");
switch (GPBWireFormatGetTagWireType(tag)) {
case GPBWireFormatVarint:
GPBCodedInputStreamReadInt32(&state_);
return YES;
case GPBWireFormatFixed64:
SkipRawData(&state_, sizeof(int64_t));
return YES;
case GPBWireFormatLengthDelimited:
SkipRawData(&state_, ReadRawVarint32(&state_));
return YES;
case GPBWireFormatStartGroup:
[self skipMessage];
GPBCodedInputStreamCheckLastTagWas(
&state_, GPBWireFormatMakeTag(GPBWireFormatGetTagFieldNumber(tag),
GPBWireFormatEndGroup));
return YES;
case GPBWireFormatEndGroup:
return NO;
case GPBWireFormatFixed32:
SkipRawData(&state_, sizeof(int32_t));
return YES;
}
}
- (void)skipMessage {
while (YES) {
int32_t tag = GPBCodedInputStreamReadTag(&state_);
if (tag == 0 || ![self skipField:tag]) {
return;
}
}
}
- (BOOL)isAtEnd {
return GPBCodedInputStreamIsAtEnd(&state_);
}
- (size_t)position {
return state_.bufferPos;
}
- (size_t)pushLimit:(size_t)byteLimit {
return GPBCodedInputStreamPushLimit(&state_, byteLimit);
}
- (void)popLimit:(size_t)oldLimit {
GPBCodedInputStreamPopLimit(&state_, oldLimit);
}
- (double)readDouble {
return GPBCodedInputStreamReadDouble(&state_);
}
- (float)readFloat {
return GPBCodedInputStreamReadFloat(&state_);
}
- (uint64_t)readUInt64 {
return GPBCodedInputStreamReadUInt64(&state_);
}
- (int64_t)readInt64 {
return GPBCodedInputStreamReadInt64(&state_);
}
- (int32_t)readInt32 {
return GPBCodedInputStreamReadInt32(&state_);
}
- (uint64_t)readFixed64 {
return GPBCodedInputStreamReadFixed64(&state_);
}
- (uint32_t)readFixed32 {
return GPBCodedInputStreamReadFixed32(&state_);
}
- (BOOL)readBool {
return GPBCodedInputStreamReadBool(&state_);
}
- (NSString *)readString {
return [GPBCodedInputStreamReadRetainedString(&state_) autorelease];
}
- (void)readGroup:(int32_t)fieldNumber
message:(GPBMessage *)message
extensionRegistry:(GPBExtensionRegistry *)extensionRegistry {
CheckRecursionLimit(&state_);
++state_.recursionDepth;
[message mergeFromCodedInputStream:self extensionRegistry:extensionRegistry];
GPBCodedInputStreamCheckLastTagWas(
&state_, GPBWireFormatMakeTag(fieldNumber, GPBWireFormatEndGroup));
--state_.recursionDepth;
}
- (void)readUnknownGroup:(int32_t)fieldNumber
message:(GPBUnknownFieldSet *)message {
CheckRecursionLimit(&state_);
++state_.recursionDepth;
[message mergeFromCodedInputStream:self];
GPBCodedInputStreamCheckLastTagWas(
&state_, GPBWireFormatMakeTag(fieldNumber, GPBWireFormatEndGroup));
--state_.recursionDepth;
}
- (void)readMessage:(GPBMessage *)message
extensionRegistry:(GPBExtensionRegistry *)extensionRegistry {
CheckRecursionLimit(&state_);
int32_t length = ReadRawVarint32(&state_);
size_t oldLimit = GPBCodedInputStreamPushLimit(&state_, length);
++state_.recursionDepth;
[message mergeFromCodedInputStream:self extensionRegistry:extensionRegistry];
GPBCodedInputStreamCheckLastTagWas(&state_, 0);
--state_.recursionDepth;
GPBCodedInputStreamPopLimit(&state_, oldLimit);
}
- (void)readMapEntry:(id)mapDictionary
extensionRegistry:(GPBExtensionRegistry *)extensionRegistry
field:(GPBFieldDescriptor *)field
parentMessage:(GPBMessage *)parentMessage {
CheckRecursionLimit(&state_);
int32_t length = ReadRawVarint32(&state_);
size_t oldLimit = GPBCodedInputStreamPushLimit(&state_, length);
++state_.recursionDepth;
GPBDictionaryReadEntry(mapDictionary, self, extensionRegistry, field,
parentMessage);
GPBCodedInputStreamCheckLastTagWas(&state_, 0);
--state_.recursionDepth;
GPBCodedInputStreamPopLimit(&state_, oldLimit);
}
- (NSData *)readBytes {
return [GPBCodedInputStreamReadRetainedBytes(&state_) autorelease];
}
- (uint32_t)readUInt32 {
return GPBCodedInputStreamReadUInt32(&state_);
}
- (int32_t)readEnum {
return GPBCodedInputStreamReadEnum(&state_);
}
- (int32_t)readSFixed32 {
return GPBCodedInputStreamReadSFixed32(&state_);
}
- (int64_t)readSFixed64 {
return GPBCodedInputStreamReadSFixed64(&state_);
}
- (int32_t)readSInt32 {
return GPBCodedInputStreamReadSInt32(&state_);
}
- (int64_t)readSInt64 {
return GPBCodedInputStreamReadSInt64(&state_);
}
#pragma clang diagnostic pop
@end
| {
"pile_set_name": "Github"
} |
namespace HandyControl.Controls
{
public interface IGravatarGenerator
{
object GetGravatar(string id);
}
} | {
"pile_set_name": "Github"
} |
class ToolBarButtonStyle(Enum,IComparable,IFormattable,IConvertible):
"""
Specifies the button style within a toolbar.
enum ToolBarButtonStyle,values: DropDownButton (4),PushButton (1),Separator (3),ToggleButton (2)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
DropDownButton=None
PushButton=None
Separator=None
ToggleButton=None
value__=None
| {
"pile_set_name": "Github"
} |
/* Configuration space parsing helpers for virtio.
*
* The configuration is [type][len][... len bytes ...] fields.
*
* Copyright 2007 Rusty Russell, IBM Corporation.
* GPL v2 or later.
*/
#include <linux/err.h>
#include <linux/virtio.h>
#include <linux/virtio_config.h>
#include <linux/bug.h>
| {
"pile_set_name": "Github"
} |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.world;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.MapMaker;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import it.unimi.dsi.fastutil.ints.Int2ReferenceLinkedOpenHashMap;
import it.unimi.dsi.fastutil.ints.Int2ReferenceMap;
import it.unimi.dsi.fastutil.ints.Int2ReferenceOpenHashMap;
import it.unimi.dsi.fastutil.ints.IntIterator;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import it.unimi.dsi.fastutil.ints.IntSet;
import it.unimi.dsi.fastutil.objects.ObjectIterator;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.nbt.CompressedStreamTools;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.server.MinecraftServer;
import net.minecraft.util.datafix.FixTypes;
import net.minecraft.world.DimensionType;
import net.minecraft.world.EnumDifficulty;
import net.minecraft.world.GameType;
import net.minecraft.world.MinecraftException;
import net.minecraft.world.ServerWorldEventHandler;
import net.minecraft.world.World;
import net.minecraft.world.WorldProvider;
import net.minecraft.world.WorldServer;
import net.minecraft.world.WorldSettings;
import net.minecraft.world.WorldType;
import net.minecraft.world.chunk.storage.AnvilSaveHandler;
import net.minecraft.world.storage.ISaveHandler;
import net.minecraft.world.storage.WorldInfo;
import org.spongepowered.api.GameState;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.world.UnloadWorldEvent;
import org.spongepowered.api.util.file.CopyFileVisitor;
import org.spongepowered.api.util.file.DeleteFileVisitor;
import org.spongepowered.api.util.file.ForwardingFileVisitor;
import org.spongepowered.api.world.DimensionTypes;
import org.spongepowered.api.world.SerializationBehaviors;
import org.spongepowered.api.world.WorldArchetype;
import org.spongepowered.api.world.storage.WorldProperties;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.bridge.server.MinecraftServerBridge;
import org.spongepowered.common.bridge.server.integrated.IntegratedServerBridge;
import org.spongepowered.common.bridge.world.DimensionTypeBridge;
import org.spongepowered.common.bridge.world.WorldBridge;
import org.spongepowered.common.bridge.world.WorldInfoBridge;
import org.spongepowered.common.bridge.world.WorldServerBridge;
import org.spongepowered.common.bridge.world.WorldServerBridge_AsyncLighting;
import org.spongepowered.common.bridge.world.WorldSettingsBridge;
import org.spongepowered.common.bridge.world.chunk.ChunkProviderServerBridge;
import org.spongepowered.common.config.SpongeConfig;
import org.spongepowered.common.config.type.GeneralConfigBase;
import org.spongepowered.common.config.type.GlobalConfig;
import org.spongepowered.common.data.util.DataUtil;
import org.spongepowered.common.event.tracking.IPhaseState;
import org.spongepowered.common.event.tracking.PhaseContext;
import org.spongepowered.common.event.tracking.PhaseTracker;
import org.spongepowered.common.event.tracking.phase.general.GeneralPhase;
import org.spongepowered.common.mixin.core.server.MinecraftServerAccessor;
import org.spongepowered.common.util.Constants;
import org.spongepowered.common.util.SpongeHooks;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileVisitResult;
import java.nio.file.FileVisitor;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import javax.annotation.Nullable;
@SuppressWarnings("ConstantConditions")
public final class WorldManager {
private static final boolean MIGRATE_WORLDS = Boolean.parseBoolean(System.getProperty("sponge.world.migrate_old", "true"));
private static final DirectoryStream.Filter<Path> LEVEL_AND_SPONGE =
entry -> Files.isDirectory(entry) && Files.exists(entry.resolve("level.dat")) && Files.exists(entry.resolve("level_sponge.dat"));
private static final Int2ReferenceMap<DimensionType> dimensionTypeByTypeId = new Int2ReferenceOpenHashMap<>(3);
private static final Int2ReferenceMap<DimensionType> dimensionTypeByDimensionId = new Int2ReferenceOpenHashMap<>(3);
private static final Int2ObjectMap<Path> dimensionPathByDimensionId = new Int2ObjectOpenHashMap<>(3);
private static final Int2ObjectOpenHashMap<WorldServer> worldByDimensionId = new Int2ObjectOpenHashMap<>(3);
private static final Map<String, WorldProperties> worldPropertiesByFolderName = new HashMap<>(3);
private static final Map<UUID, WorldProperties> worldPropertiesByWorldUuid = new HashMap<>(3);
private static final Int2ObjectMap<String> worldFolderByDimensionId = new Int2ObjectOpenHashMap<>();
private static final BiMap<String, UUID> worldUuidByFolderName = HashBiMap.create(3);
private static final IntSet usedDimensionIds = new IntOpenHashSet();
private static final Map<WorldServer, WorldServer> weakWorldByWorld = new MapMaker().weakKeys().weakValues().concurrencyLevel(1).makeMap();
private static final Queue<WorldServer> unloadQueue = new ArrayDeque<>();
private static final Comparator<WorldServer>
WORLD_SERVER_COMPARATOR =
(world1, world2) -> {
final int world1DimId = ((WorldServerBridge) world1).bridge$getDimensionId();
if (world2 == null) {
return world1DimId;
}
final int world2DimId = ((WorldServerBridge) world2).bridge$getDimensionId();
return world1DimId - world2DimId;
};
private static boolean isVanillaRegistered = false;
private static int lastUsedDimensionId = 0;
public static void registerVanillaTypesAndDimensions() {
if (!isVanillaRegistered) {
WorldManager.registerDimensionType(0, DimensionType.OVERWORLD);
WorldManager.registerDimensionType(-1, DimensionType.NETHER);
WorldManager.registerDimensionType(1, DimensionType.THE_END);
WorldManager.registerDimension(0, DimensionType.OVERWORLD);
WorldManager.registerDimension(-1, DimensionType.NETHER);
WorldManager.registerDimension(1, DimensionType.THE_END);
}
isVanillaRegistered = true;
}
public static void registerDimensionType(final DimensionType type) {
checkNotNull(type);
final OptionalInt optNextDimensionTypeId = getNextFreeDimensionTypeId();
optNextDimensionTypeId.ifPresent(integer -> registerDimensionType(integer, type));
}
public static void registerDimensionType(final int dimensionTypeId, final DimensionType type) {
checkNotNull(type);
dimensionTypeByTypeId.putIfAbsent(dimensionTypeId, type);
}
private static OptionalInt getNextFreeDimensionTypeId() {
Integer highestDimensionTypeId = null;
for (IntIterator iterator = dimensionTypeByTypeId.keySet().iterator(); iterator.hasNext();) {
final int dimensionTypeId = iterator.nextInt();
if (highestDimensionTypeId == null || highestDimensionTypeId < dimensionTypeId) {
highestDimensionTypeId = dimensionTypeId;
}
}
if (highestDimensionTypeId != null && highestDimensionTypeId < 127) {
return OptionalInt.of(++highestDimensionTypeId);
}
return OptionalInt.empty();
}
public static Integer getNextFreeDimensionId() {
int next = lastUsedDimensionId;
while (usedDimensionIds.contains(next) || !checkAvailable(next)) {
next++;
}
return lastUsedDimensionId = next;
}
private static boolean checkAvailable(final int dimensionId) {
if (worldByDimensionId.containsKey(dimensionId)) {
usedDimensionIds.add(dimensionId);
return false;
}
return true;
}
public static void registerDimension(final int dimensionId, final DimensionType type) {
checkNotNull(type);
if (!dimensionTypeByTypeId.containsValue(type)) {
return;
}
final DimensionType previous = dimensionTypeByDimensionId.putIfAbsent(dimensionId, type);
if (previous != null) {
return;
}
if (dimensionId >= 0) {
usedDimensionIds.add(dimensionId);
}
}
public static void unregisterDimension(final int dimensionId) {
final DimensionType previous = dimensionTypeByDimensionId.remove(dimensionId);
if (previous == null) {
throw new IllegalArgumentException("Failed to unregister dimension [" + dimensionId + "] as it is not registered!");
}
}
private static void registerVanillaDimensionPaths(final Path savePath) {
WorldManager.registerDimensionPath(0, savePath);
WorldManager.registerDimensionPath(-1, savePath.resolve("DIM-1"));
WorldManager.registerDimensionPath(1, savePath.resolve("DIM1"));
}
public static void registerDimensionPath(final int dimensionId, final Path dimensionDataRoot) {
checkNotNull(dimensionDataRoot);
dimensionPathByDimensionId.put(dimensionId, dimensionDataRoot);
}
public static Path getDimensionPath(final int dimensionId) {
return dimensionPathByDimensionId.get(dimensionId);
}
public static Optional<DimensionType> getDimensionType(final int dimensionId) {
return Optional.ofNullable(dimensionTypeByDimensionId.get(dimensionId));
}
public static Optional<DimensionType> getDimensionTypeByTypeId(final int dimensionTypeId) {
return Optional.ofNullable(dimensionTypeByTypeId.get(dimensionTypeId));
}
public static Optional<DimensionType> getDimensionType(final Class<? extends WorldProvider> providerClass) {
checkNotNull(providerClass);
for (final Object rawDimensionType : dimensionTypeByTypeId.values()) {
final DimensionType dimensionType = (DimensionType) rawDimensionType;
if (((org.spongepowered.api.world.DimensionType) (Object) dimensionType).getDimensionClass().equals(providerClass)) {
return Optional.of(dimensionType);
}
}
return Optional.empty();
}
public static Collection<DimensionType> getDimensionTypes() {
return dimensionTypeByTypeId.values();
}
public static int[] getRegisteredDimensionIdsFor(final DimensionType type) {
return dimensionTypeByDimensionId.int2ReferenceEntrySet().stream()
.filter(entry -> entry.getValue() == type)
.mapToInt(Int2ReferenceMap.Entry::getIntKey)
.toArray();
}
public static int[] getRegisteredDimensionIds() {
return dimensionTypeByDimensionId.keySet().toIntArray();
}
@Nullable
private static Path getWorldFolder(final DimensionType dimensionType, final int dimensionId) {
return dimensionPathByDimensionId.get(dimensionId);
}
public static boolean isDimensionRegistered(final int dimensionId) {
return dimensionTypeByDimensionId.containsKey(dimensionId);
}
private static Int2ReferenceMap<DimensionType> sortedDimensionMap() {
final Int2ReferenceMap<DimensionType> copy = new Int2ReferenceOpenHashMap<>(dimensionTypeByDimensionId);
final Int2ReferenceMap<DimensionType> newMap = new Int2ReferenceLinkedOpenHashMap<>();
newMap.put(0, copy.remove(0));
DimensionType removed = copy.remove(-1);
if (removed != null) {
newMap.put(-1, removed);
}
removed = copy.remove(1);
if (removed != null) {
newMap.put(1, removed);
}
final int[] ids = copy.keySet().toIntArray();
Arrays.sort(ids);
for (final int id : ids) {
newMap.put(id, copy.get(id));
}
return newMap;
}
public static ObjectIterator<Int2ObjectMap.Entry<WorldServer>> worldsIterator() {
return worldByDimensionId.int2ObjectEntrySet().fastIterator();
}
public static Collection<WorldServer> getWorlds() {
return worldByDimensionId.values();
}
public static Optional<WorldServer> getWorldByDimensionId(final int dimensionId) {
return Optional.ofNullable(worldByDimensionId.get(dimensionId));
}
public static Optional<String> getWorldFolderByDimensionId(final int dimensionId) {
return Optional.ofNullable(worldFolderByDimensionId.get(dimensionId));
}
public static int[] getLoadedWorldDimensionIds() {
return worldByDimensionId.keySet().toIntArray();
}
public static Optional<WorldServer> getWorld(final String worldName) {
for (final WorldServer worldServer : getWorlds()) {
final org.spongepowered.api.world.World apiWorld = (org.spongepowered.api.world.World) worldServer;
if (apiWorld.getName().equals(worldName)) {
return Optional.of(worldServer);
}
}
return Optional.empty();
}
private static void registerWorldProperties(final WorldProperties properties) {
checkNotNull(properties);
worldPropertiesByFolderName.put(properties.getWorldName(), properties);
worldPropertiesByWorldUuid.put(properties.getUniqueId(), properties);
worldUuidByFolderName.put(properties.getWorldName(), properties.getUniqueId());
final Integer dimensionId = ((WorldInfoBridge) properties).bridge$getDimensionId();
worldFolderByDimensionId.put(dimensionId, properties.getWorldName());
usedDimensionIds.add(dimensionId);
}
public static void unregisterWorldProperties(final WorldProperties properties, final boolean freeDimensionId) {
checkNotNull(properties);
worldPropertiesByFolderName.remove(properties.getWorldName());
worldPropertiesByWorldUuid.remove(properties.getUniqueId());
worldUuidByFolderName.remove(properties.getWorldName());
final Integer dimensionId = ((WorldInfoBridge) properties).bridge$getDimensionId();
worldFolderByDimensionId.remove(dimensionId);
if (dimensionId != null && freeDimensionId) {
usedDimensionIds.remove(dimensionId.intValue());
}
}
// used by SpongeForge client
public static void unregisterAllWorldSettings() {
worldPropertiesByFolderName.clear();
worldPropertiesByWorldUuid.clear();
worldUuidByFolderName.clear();
worldByDimensionId.clear();
worldFolderByDimensionId.clear();
dimensionTypeByDimensionId.clear();
dimensionPathByDimensionId.clear();
usedDimensionIds.clear();
weakWorldByWorld.clear();
isVanillaRegistered = false;
// This is needed to ensure that DimensionType is usable by GuiListWorldSelection, which is only ever used when the server isn't running
registerVanillaTypesAndDimensions();
}
public static Optional<WorldProperties> getWorldProperties(final String folderName) {
checkNotNull(folderName);
return Optional.ofNullable(worldPropertiesByFolderName.get(folderName));
}
public static Collection<WorldProperties> getAllWorldProperties() {
return Collections.unmodifiableCollection(worldPropertiesByFolderName.values());
}
public static Optional<WorldProperties> getWorldProperties(final UUID uuid) {
checkNotNull(uuid);
return Optional.ofNullable(worldPropertiesByWorldUuid.get(uuid));
}
public static Optional<UUID> getUuidForFolder(final String folderName) {
checkNotNull(folderName);
return Optional.ofNullable(worldUuidByFolderName.get(folderName));
}
public static Optional<String> getFolderForUuid(final UUID uuid) {
checkNotNull(uuid);
return Optional.ofNullable(worldUuidByFolderName.inverse().get(uuid));
}
public static WorldProperties createWorldProperties(final String folderName, final WorldArchetype archetype) {
return createWorldProperties(folderName, archetype, null);
}
@SuppressWarnings("ConstantConditions")
public static WorldProperties createWorldProperties(final String folderName, final WorldArchetype archetype, @Nullable final Integer dimensionId) {
checkNotNull(folderName);
checkNotNull(archetype);
final Optional<WorldServer> optWorldServer = getWorld(folderName);
if (optWorldServer.isPresent()) {
return ((org.spongepowered.api.world.World) optWorldServer.get()).getProperties();
}
final Optional<WorldProperties> optWorldProperties = WorldManager.getWorldProperties(folderName);
if (optWorldProperties.isPresent()) {
return optWorldProperties.get();
}
final ISaveHandler saveHandler;
try (PhaseContext<?> ignore = GeneralPhase.State.SAVE_HANDLER_CREATION.createPhaseContext()
.createFiles(!archetype.getSerializationBehavior().equals(SerializationBehaviors.NONE))
.buildAndSwitch()) {
saveHandler = new AnvilSaveHandler(WorldManager.getCurrentSavesDirectory().get().toFile(), folderName, true,
((MinecraftServerAccessor) SpongeImpl.getServer()).accessor$getDataFixer());
}
WorldInfo worldInfo = saveHandler.loadWorldInfo();
if (worldInfo == null) {
worldInfo = new WorldInfo((WorldSettings) (Object) archetype, folderName);
// Don't want to randomize the seed if there is an existing save file!
if (archetype.isSeedRandomized()) {
((WorldProperties) worldInfo).setSeed(SpongeImpl.random.nextLong());
}
} else {
// DimensionType must be set before world config is created to get proper path
((WorldInfoBridge) worldInfo).bridge$setDimensionType(archetype.getDimensionType());
((WorldInfoBridge) worldInfo).bridge$createWorldConfig();
((WorldProperties) worldInfo).setGeneratorModifiers(archetype.getGeneratorModifiers());
}
setUuidOnProperties(getCurrentSavesDirectory().get(), (WorldProperties) worldInfo);
if (dimensionId != null) {
((WorldInfoBridge) worldInfo).bridge$setDimensionId(dimensionId);
} else if (((WorldInfoBridge) worldInfo).bridge$getDimensionId() == null
|| getWorldByDimensionId(((WorldInfoBridge) worldInfo).bridge$getDimensionId()).isPresent()) {
// DimensionID is null or 0 or the dimensionID is already assigned to a loaded world
((WorldInfoBridge) worldInfo).bridge$setDimensionId(WorldManager.getNextFreeDimensionId());
}
((WorldProperties) worldInfo).setGeneratorType(archetype.getGeneratorType());
((WorldInfoBridge) worldInfo).bridge$getConfigAdapter().save();
registerWorldProperties((WorldProperties) worldInfo);
SpongeImpl.postEvent(SpongeEventFactory.createConstructWorldPropertiesEvent(Sponge.getCauseStackManager().getCurrentCause(), archetype,
(WorldProperties) worldInfo));
if (archetype.getSerializationBehavior() != SerializationBehaviors.NONE) {
saveHandler.saveWorldInfoWithPlayer(worldInfo, SpongeImpl.getServer().getPlayerList().getHostPlayerData());
}
return (WorldProperties) worldInfo;
}
public static boolean saveWorldProperties(final WorldProperties properties) {
checkNotNull(properties);
final Optional<WorldServer> optWorldServer = getWorldByDimensionId(((WorldInfoBridge) properties).bridge$getDimensionId());
// If the World represented in the properties is still loaded, save the properties and have the World reload its info
if (optWorldServer.isPresent()) {
final WorldServer worldServer = optWorldServer.get();
worldServer.getSaveHandler().saveWorldInfo((WorldInfo) properties);
worldServer.getSaveHandler().loadWorldInfo();
} else {
new AnvilSaveHandler(WorldManager.getCurrentSavesDirectory().get().toFile(), properties.getWorldName(), true, ((MinecraftServerAccessor) SpongeImpl.getServer()).accessor$getDataFixer()).saveWorldInfo((WorldInfo) properties);
}
((WorldInfoBridge) properties).bridge$getConfigAdapter().save();
// No return values or exceptions so can only assume true.
return true;
}
public static void unloadQueuedWorlds() {
WorldServer server;
while ((server = unloadQueue.poll()) != null) {
unloadWorld(server, true, false);
}
unloadQueue.clear();
}
public static void queueWorldToUnload(final WorldServer worldServer) {
checkNotNull(worldServer);
unloadQueue.add(worldServer);
}
public static boolean unloadWorld(final WorldServer worldServer, final boolean checkConfig, final boolean isShuttingDown) {
checkNotNull(worldServer);
final MinecraftServer server = SpongeImpl.getServer();
// Likely leaked, don't want to drop leaked world data
if (!worldByDimensionId.containsValue(worldServer)) {
return false;
}
// Vanilla sometimes doesn't remove player entities from world first
if (!isShuttingDown) {
if (!worldServer.playerEntities.isEmpty()) {
return false;
}
// We only check config if base game wants to unload world. If mods/plugins say unload, we unload
if (checkConfig) {
if (((WorldProperties) worldServer.getWorldInfo()).doesKeepSpawnLoaded()) {
return false;
}
}
}
final SpongeConfig<GlobalConfig> globalConfigAdapter = SpongeImpl.getGlobalConfigAdapter();
try (final PhaseContext<?> ignored = GeneralPhase.State.WORLD_UNLOAD.createPhaseContext().source(worldServer)) {
ignored.buildAndSwitch();
final UnloadWorldEvent event = SpongeEventFactory.createUnloadWorldEvent(Sponge.getCauseStackManager().getCurrentCause(),
(org.spongepowered.api.world.World) worldServer);
final boolean isCancelled = SpongeImpl.postEvent(event);
if (!isShuttingDown && isCancelled) {
return false;
}
final WorldServerBridge mixinWorldServer = (WorldServerBridge) worldServer;
final int dimensionId = mixinWorldServer.bridge$getDimensionId();
SpongeImpl.getLogger().info("Unloading world [{}] ({}/{})", worldServer.getWorldInfo().getWorldName(),
((org.spongepowered.api.world.World) worldServer).getDimension().getType().getId(), dimensionId);
try {
try {
// Stop the lighting executor only when the world is going to unload - there's no point in running any more lighting tasks.
if (globalConfigAdapter.getConfig().getModules().useOptimizations() && globalConfigAdapter.getConfig().getOptimizations().useAsyncLighting()) {
((WorldServerBridge_AsyncLighting) worldServer).asyncLightingBridge$getLightingExecutor().shutdownNow();
}
} catch (Exception e) {
e.printStackTrace();
}
// Don't save if server is stopping to avoid duplicate saving.
if (!isShuttingDown) {
saveWorld(worldServer, true);
}
((WorldInfoBridge) worldServer.getWorldInfo()).bridge$getConfigAdapter().save();
} catch (Exception e) {
e.printStackTrace();
} finally {
worldByDimensionId.remove(dimensionId);
weakWorldByWorld.remove(worldServer);
((MinecraftServerBridge) server).bridge$removeWorldTickTimes(dimensionId);
reorderWorldsVanillaFirst();
}
}
return true;
}
public static void saveWorld(final WorldServer worldServer, final boolean flush) throws MinecraftException {
if (((WorldProperties) worldServer.getWorldInfo()).getSerializationBehavior() != SerializationBehaviors.NONE) {
worldServer.saveAllChunks(true, null);
}
if (flush) {
worldServer.flush();
}
}
public static Optional<WorldServer> loadWorld(final UUID uuid) {
checkNotNull(uuid);
// If someone tries to load loaded world, return it
final Optional<org.spongepowered.api.world.World> optWorld = Sponge.getServer().getWorld(uuid);
if (optWorld.isPresent()) {
return Optional.of((WorldServer) optWorld.get());
}
// Check if we even know of this UUID's folder
final String worldFolder = worldUuidByFolderName.inverse().get(uuid);
// We don't know of this UUID at all.
if (worldFolder == null) {
return Optional.empty();
}
return loadWorld(worldFolder, null);
}
public static Optional<WorldServer> loadWorld(final String worldName) {
checkNotNull(worldName);
return loadWorld(worldName, null);
}
public static Optional<WorldServer> loadWorld(final WorldProperties properties) {
checkNotNull(properties);
return loadWorld(properties.getWorldName(), properties);
}
private static Optional<WorldServer> loadWorld(final String worldName, @Nullable WorldProperties properties) {
checkNotNull(worldName);
final Path currentSavesDir = WorldManager.getCurrentSavesDirectory().orElseThrow(() -> new IllegalStateException("Attempt "
+ "made to load world too early!"));
final MinecraftServer server = SpongeImpl.getServer();
final Optional<WorldServer> optExistingWorldServer = getWorld(worldName);
if (optExistingWorldServer.isPresent()) {
return optExistingWorldServer;
}
if (!server.getAllowNether()) {
SpongeImpl.getLogger().error("Unable to load world [{}]. Multi-world is disabled via [allow-nether] in [server.properties].", worldName);
return Optional.empty();
}
// If the world has a world properties but there is no serialization, then we won't have a world directory.
// However, the WorldProperties might be null because we just weren't supplied them. If we weren't supplied
// them, then we try to get the properties from storage, which will mean that something is serialized and
// therefore the serialization behavior will not be NONE.
final boolean filesShouldExist = properties == null || properties.getSerializationBehavior() != SerializationBehaviors.NONE;
final Path worldFolder = currentSavesDir.resolve(worldName);
if (!Files.isDirectory(worldFolder) && filesShouldExist) {
SpongeImpl.getLogger().error("Unable to load world [{}]. We cannot find its folder under [{}].", worldFolder, currentSavesDir);
return Optional.empty();
}
final ISaveHandler saveHandler;
// Don't attempt to create files if the world properties exists and indicates nothing should be saved.
try (PhaseContext<?> ignore = GeneralPhase.State.SAVE_HANDLER_CREATION.createPhaseContext()
.createFiles(filesShouldExist)
.buildAndSwitch()) {
saveHandler = new AnvilSaveHandler(currentSavesDir.toFile(), worldName, true,
((MinecraftServerAccessor) SpongeImpl.getServer()).accessor$getDataFixer());
}
// We weren't given a properties, see if one is cached on the file system
if (properties == null) {
properties = (WorldProperties) saveHandler.loadWorldInfo();
// We tried :'(
if (properties == null) {
SpongeImpl.getLogger().error("Unable to load world [{}]. No world properties was found!", worldName);
return Optional.empty();
}
}
Integer dimensionId = ((WorldInfoBridge) properties).bridge$getDimensionId();
if (dimensionId == null) {
dimensionId = getNextFreeDimensionId();
((WorldInfoBridge) properties).bridge$setDimensionId(dimensionId);
}
setUuidOnProperties(getCurrentSavesDirectory().get(), properties);
registerWorldProperties(properties);
final WorldInfo worldInfo = (WorldInfo) properties;
((WorldInfoBridge) worldInfo).bridge$createWorldConfig();
// check if enabled
if (!((WorldProperties) worldInfo).isEnabled()) {
SpongeImpl.getLogger().error("Unable to load world [{}] ({}/{}). It is disabled.", properties.getWorldName(), properties.getDimensionType().getId(), dimensionId);
return Optional.empty();
}
registerDimension(dimensionId, (DimensionType) (Object) properties.getDimensionType());
registerDimensionPath(dimensionId, worldFolder);
SpongeImpl.getLogger().info("Loading world [{}] ({}/{})", properties.getWorldName(), properties.getDimensionType().getId(), dimensionId);
final WorldServer worldServer = createWorldFromProperties(dimensionId, saveHandler, (WorldInfo) properties, new WorldSettings((WorldInfo)
properties));
// Set the worlds on the Minecraft server
reorderWorldsVanillaFirst();
return Optional.of(worldServer);
}
public static void loadAllWorlds(final long defaultSeed, final WorldType defaultWorldType, final String generatorOptions) {
final MinecraftServer server = SpongeImpl.getServer();
final Path currentSavesDir = getCurrentSavesDirectory().get();
try {
// Symlink needs special handling
if (Files.isSymbolicLink(currentSavesDir)) {
final Path actualPathLink = Files.readSymbolicLink(currentSavesDir);
if (Files.notExists(actualPathLink)) {
Files.createDirectories(actualPathLink);
} else if (!Files.isDirectory(actualPathLink)) {
throw new IOException("Saves directory [" + currentSavesDir + "] symlink to [" + actualPathLink + "] is not a directory!");
}
} else {
Files.createDirectories(currentSavesDir);
}
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
WorldManager.registerVanillaDimensionPaths(currentSavesDir);
if (MIGRATE_WORLDS) {
WorldMigrator.migrateWorldsTo(currentSavesDir);
} else {
SpongeImpl.getLogger().info("World migration is disabled. Old worlds will not be migrated...");
}
registerExistingSpongeDimensions(currentSavesDir);
for (final Int2ReferenceMap.Entry<DimensionType> entry: sortedDimensionMap().int2ReferenceEntrySet()) {
final int dimensionId = entry.getIntKey();
final DimensionType dimensionType = entry.getValue();
final org.spongepowered.api.world.DimensionType apiDimensionType = (org.spongepowered.api.world.DimensionType) (Object) dimensionType;
// Skip all worlds besides dimension 0 if multi-world is disabled
if (dimensionId != 0 && !server.getAllowNether()) {
continue;
}
// Skip already loaded worlds by plugins
if (getWorldByDimensionId(dimensionId).isPresent()) {
continue;
}
// Step 1 - Grab the world's data folder
final Path worldFolder = getWorldFolder(dimensionType, dimensionId);
if (worldFolder == null) {
SpongeImpl.getLogger().error("An attempt was made to load a world in dimension [{}] ({}) that has no registered world folder!",
apiDimensionType.getId(), dimensionId);
continue;
}
final String worldFolderName = worldFolder.getFileName().toString();
// Step 2 - See if we are allowed to load it
if (dimensionId != 0) {
final SpongeConfig<? extends GeneralConfigBase> spongeConfig = SpongeHooks.getConfigAdapter(((DimensionTypeBridge)(Object) dimensionType).bridge$getConfigPath(), worldFolderName);
if (!spongeConfig.getConfig().getWorld().isWorldEnabled()) {
SpongeImpl.getLogger().warn("World [{}] ({}/{}) is disabled. World will not be loaded...", worldFolder,
apiDimensionType.getId(), dimensionId);
continue;
}
}
// Step 3 - Get our world information from disk
final ISaveHandler saveHandler;
if (dimensionId == 0) {
saveHandler = server.getActiveAnvilConverter().getSaveLoader(server.getFolderName(), true);
} else {
saveHandler = new AnvilSaveHandler(WorldManager.getCurrentSavesDirectory().get().toFile(), worldFolderName, true, ((MinecraftServerAccessor) SpongeImpl.getServer()).accessor$getDataFixer());
}
WorldInfo worldInfo = saveHandler.loadWorldInfo();
final WorldSettings worldSettings;
// If this is integrated server, we need to use the WorldSettings from the client's Single Player menu to construct the worlds
if (server instanceof IntegratedServerBridge) {
worldSettings = ((IntegratedServerBridge) server).bridge$getSettings();
// If this is overworld and a new save, the WorldInfo has already been made but we want to still fire the construct event.
if (dimensionId == 0 && ((IntegratedServerBridge) server).bridge$isNewSave()) {
SpongeImpl.postEvent(SpongeEventFactory.createConstructWorldPropertiesEvent(Sponge.getCauseStackManager().getCurrentCause(), (WorldArchetype)
(Object) worldSettings, (WorldProperties) worldInfo));
}
} else {
// WorldSettings will be null here on dedicated server so we need to build one
worldSettings = new WorldSettings(defaultSeed, server.getGameType(), server.canStructuresSpawn(), server.isHardcore(),
defaultWorldType);
}
if (worldInfo == null) {
// Step 4 - At this point, we have either have the WorldInfo or we have none. If we have none, we'll use the settings built above to
// create the WorldInfo
worldInfo = createWorldInfoFromSettings(currentSavesDir, apiDimensionType,
dimensionId, worldFolderName, worldSettings, generatorOptions);
} else {
// create config
((WorldInfoBridge) worldInfo).bridge$setDimensionType(apiDimensionType);
((WorldInfoBridge) worldInfo).bridge$createWorldConfig();
((WorldProperties) worldInfo).setGenerateSpawnOnLoad(((DimensionTypeBridge) (Object) dimensionType).bridge$shouldGenerateSpawnOnLoad());
if (((WorldInfoBridge) worldInfo).bridge$getDimensionId() == null) {
((WorldInfoBridge) worldInfo).bridge$setDimensionId(dimensionId);
}
}
// Safety check to ensure we'll get a unique id no matter what
UUID uniqueId = ((WorldProperties) worldInfo).getUniqueId();
if (uniqueId == null) {
setUuidOnProperties(dimensionId == 0 ? currentSavesDir.getParent() : currentSavesDir, (WorldProperties) worldInfo);
uniqueId = ((WorldProperties) worldInfo).getUniqueId();
}
// Check if this world's unique id has already been registered
final String previousWorldForUUID = worldUuidByFolderName.inverse().get(uniqueId);
if (previousWorldForUUID != null) {
SpongeImpl.getLogger().error("UUID [{}] has already been registered by world [{}] but is attempting to be registered by world [{}]."
+ " This means worlds have been copied outside of Sponge. Skipping world load...", uniqueId, previousWorldForUUID, worldInfo.getWorldName());
continue;
}
// Keep the LevelName in the LevelInfo up to date with the directory name
if (!worldInfo.getWorldName().equals(worldFolderName)) {
worldInfo.setWorldName(worldFolderName);
}
// Step 5 - Load server resource pack from dimension 0
if (dimensionId == 0) {
((MinecraftServerAccessor) server).accessor$setResourcePackFromWorld(worldFolderName, saveHandler);
}
// Step 6 - Cache the WorldProperties we've made so we don't load from disk later.
registerWorldProperties((WorldProperties) worldInfo);
if (dimensionId != 0 && !((WorldProperties) worldInfo).loadOnStartup()) {
SpongeImpl.getLogger().warn("World [{}] ({}/{}) is set to not load on startup. To load it later, enable "
+ "[load-on-startup] in config or use a plugin.", worldInfo.getWorldName(), apiDimensionType.getId(), dimensionId);
continue;
}
// Step 7 - Finally, we can create the world and tell it to load
final WorldServer worldServer = createWorldFromProperties(dimensionId, saveHandler, worldInfo, worldSettings);
SpongeImpl.getLogger().info("Loading world [{}] ({}/{})", ((org.spongepowered.api.world.World) worldServer).getName(),
apiDimensionType.getId(), dimensionId);
}
// Set the worlds on the Minecraft server
reorderWorldsVanillaFirst();
}
private static WorldInfo createWorldInfoFromSettings(final Path currentSaveRoot, final org.spongepowered.api.world.DimensionType dimensionType, final int
dimensionId, final String worldFolderName, final WorldSettings worldSettings, final String generatorOptions) {
worldSettings.setGeneratorOptions(generatorOptions);
((WorldSettingsBridge) (Object) worldSettings).bridge$setDimensionType(dimensionType);
((WorldSettingsBridge)(Object) worldSettings).bridge$setGenerateSpawnOnLoad(((DimensionTypeBridge) dimensionType).bridge$shouldGenerateSpawnOnLoad());
final WorldInfo worldInfo = new WorldInfo(worldSettings, worldFolderName);
setUuidOnProperties(dimensionId == 0 ? currentSaveRoot.getParent() : currentSaveRoot, (WorldProperties) worldInfo);
((WorldInfoBridge) worldInfo).bridge$setDimensionId(dimensionId);
SpongeImpl.postEvent(SpongeEventFactory.createConstructWorldPropertiesEvent(Sponge.getCauseStackManager().getCurrentCause(),
(WorldArchetype) (Object) worldSettings, (WorldProperties) worldInfo));
return worldInfo;
}
@SuppressWarnings("ConstantConditions")
private static WorldServer createWorldFromProperties(
final int dimensionId, final ISaveHandler saveHandler, final WorldInfo worldInfo, @Nullable final WorldSettings
worldSettings) {
final MinecraftServer server = SpongeImpl.getServer();
final WorldServer worldServer = new WorldServer(server, saveHandler, worldInfo, dimensionId, server.profiler);
worldByDimensionId.put(dimensionId, worldServer);
weakWorldByWorld.put(worldServer, worldServer);
WorldManager.reorderWorldsVanillaFirst();
((MinecraftServerBridge) server).bridge$putWorldTickTimes(dimensionId, new long[100]);
worldServer.init();
worldServer.addEventListener(new ServerWorldEventHandler(server, worldServer));
// This code changes from Mojang's to account for per-world API-set GameModes.
if (!server.isSinglePlayer() && worldServer.getWorldInfo().getGameType() == GameType.NOT_SET) {
worldServer.getWorldInfo().setGameType(server.getGameType());
}
((ChunkProviderServerBridge) worldServer.getChunkProvider()).bridge$setForceChunkRequests(true);
try {
SpongeImpl.postEvent(SpongeEventFactory.createLoadWorldEvent(Sponge.getCauseStackManager().getCurrentCause(),
(org.spongepowered.api.world.World) worldServer));
// WorldSettings is only non-null here if this is a newly generated WorldInfo and therefore we need to initialize to calculate spawn.
if (worldSettings != null) {
worldServer.initialize(worldSettings);
}
if (((DimensionTypeBridge) ((org.spongepowered.api.world.World) worldServer).getDimension().getType()).bridge$shouldLoadSpawn()) {
((MinecraftServerBridge) server).bridge$prepareSpawnArea(worldServer);
}
// While we try to prevnt mods from changing a worlds' WorldInfo, we aren't always
// successful. We re-do the fake world check to catch any changes made to WorldInfo
// that would make it invalid
((WorldBridge) worldServer).bridge$clearFakeCheck();
return worldServer;
} finally {
((ChunkProviderServerBridge) worldServer.getChunkProvider()).bridge$setForceChunkRequests(false);
}
}
@SuppressWarnings({"rawtypes", "unchecked"})
public static boolean mkdirsIfSaveable(File dir) {
if (PhaseTracker.getInstance().getSidedThread() == Thread.currentThread()) {
IPhaseState state = PhaseTracker.getInstance().getCurrentState();
if (!state.shouldCreateWorldDirectories(PhaseTracker.getInstance().getCurrentContext())) {
return false;
}
}
Path path = dir.toPath();
Optional<Path> savesDirOpt = getCurrentSavesDirectory();
if (!savesDirOpt.isPresent()) {
return dir.mkdirs();
}
Path savesDir = savesDirOpt.get();
if (path.startsWith(savesDir) && !path.equals(savesDir)) {
Path worldName = savesDir.relativize(path).getName(0);
WorldProperties props;
if (worldPropertiesByFolderName.containsKey(worldName.toString())) {
props = worldPropertiesByFolderName.get(worldName.toString());
} else if (worldPropertiesByWorldUuid.size() == 1) {
// Happens while creating the WorldServer object for the main
// world: getDefaultWorld() doesn't work until afterwards
props = worldPropertiesByWorldUuid.values().iterator().next();
} else {
Optional<WorldProperties> overworld = Sponge.getServer().getDefaultWorld();
if (!overworld.isPresent()) {
return dir.mkdirs();
}
props = overworld.get();
}
if (props.getSerializationBehavior() == SerializationBehaviors.NONE) {
return false;
}
}
return dir.mkdirs();
}
/**
* Internal use only - Namely for SpongeForge.
* @param dimensionId The world instance dimension id
* @param worldServer The world server
*/
public static void forceAddWorld(final int dimensionId, final WorldServer worldServer) {
worldByDimensionId.put(dimensionId, worldServer);
weakWorldByWorld.put(worldServer, worldServer);
((MinecraftServerBridge) SpongeImpl.getServer()).bridge$putWorldTickTimes(dimensionId, new long[100]);
}
public static void reorderWorldsVanillaFirst() {
final List<WorldServer> sorted = new LinkedList<>();
final List<Integer> vanillaWorldIds = new ArrayList<>();
WorldServer worldServer = worldByDimensionId.get(0);
if (worldServer != null) {
vanillaWorldIds.add(0);
sorted.add(worldServer);
}
worldServer = worldByDimensionId.get(-1);
if (worldServer != null) {
vanillaWorldIds.add(-1);
sorted.add(worldServer);
}
worldServer = worldByDimensionId.get(1);
if (worldServer != null) {
vanillaWorldIds.add(1);
sorted.add(worldServer);
}
final List<WorldServer> worlds = new ArrayList<>(worldByDimensionId.values());
final Iterator<WorldServer> iterator = worlds.iterator();
while(iterator.hasNext()) {
final WorldServerBridge mixinWorld = (WorldServerBridge) iterator.next();
final int dimensionId = mixinWorld.bridge$getDimensionId();
if (vanillaWorldIds.contains(dimensionId)) {
iterator.remove();
}
}
worlds.sort(WORLD_SERVER_COMPARATOR);
sorted.addAll(worlds);
SpongeImpl.getServer().worlds = sorted.toArray(new WorldServer[0]);
}
/**
* Parses a {@link UUID} from disk from other known plugin platforms and sets it on the
* {@link WorldProperties}. Currently only Bukkit is supported.
*/
private static void setUuidOnProperties(final Path savesRoot, final WorldProperties properties) {
checkNotNull(properties);
UUID uuid;
if (properties.getUniqueId() == null || properties.getUniqueId().equals(Constants.World.INVALID_WORLD_UUID)) {
// Check if Bukkit's uid.dat file is here and use it
final Path uidPath = savesRoot.resolve(properties.getWorldName()).resolve("uid.dat");
if (Files.notExists(uidPath)) {
uuid = UUID.randomUUID();
} else {
try(final DataInputStream dis = new DataInputStream(Files.newInputStream(uidPath))) {
uuid = new UUID(dis.readLong(), dis.readLong());
} catch (IOException e) {
SpongeImpl.getLogger().error("World folder [{}] has an existing Bukkit unique identifier for it but we encountered issues parsing "
+ "the file. We will have to use a new unique id. Please report this to Sponge ASAP.", properties.getWorldName(), e);
uuid = UUID.randomUUID();
}
}
} else {
uuid = properties.getUniqueId();
}
((WorldInfoBridge) properties).bridge$setUniqueId(uuid);
}
/**
* Handles registering existing Sponge dimensions that are not the root dimension (known as overworld).
*/
private static void registerExistingSpongeDimensions(final Path rootPath) {
try (final DirectoryStream<Path> stream = Files.newDirectoryStream(rootPath, LEVEL_AND_SPONGE)) {
for (final Path worldPath : stream) {
final Path spongeLevelPath = worldPath.resolve("level_sponge.dat");
final String worldFolderName = worldPath.getFileName().toString();
final NBTTagCompound compound;
try {
compound = CompressedStreamTools.readCompressed(Files.newInputStream(spongeLevelPath));
} catch (IOException e) {
SpongeImpl.getLogger().error("Failed loading Sponge data for World [{}]}. Report to Sponge ASAP.", worldFolderName, e);
continue;
}
NBTTagCompound spongeDataCompound = compound.getCompoundTag(Constants.Sponge.SPONGE_DATA);
if (!compound.hasKey(Constants.Sponge.SPONGE_DATA)) {
SpongeImpl.getLogger()
.error("World [{}] has Sponge related data in the form of [level-sponge.dat] but the structure is not proper."
+ " Generally, the data is within a [{}] tag but it is not for this world. Report to Sponge ASAP.",
worldFolderName, Constants.Sponge.SPONGE_DATA);
continue;
}
if (!spongeDataCompound.hasKey(Constants.Sponge.World.DIMENSION_ID)) {
SpongeImpl.getLogger().error("World [{}] has no dimension id. Report this to Sponge ASAP.", worldFolderName);
continue;
}
spongeDataCompound = DataUtil.spongeDataFixer.process(FixTypes.LEVEL, spongeDataCompound);
final int dimensionId = spongeDataCompound.getInteger(Constants.Sponge.World.DIMENSION_ID);
// We do not handle Vanilla dimensions, skip them
if (dimensionId == 0 || dimensionId == -1 || dimensionId == 1) {
continue;
}
if (dimensionTypeByDimensionId.containsKey(dimensionId)) {
SpongeImpl.getLogger().warn("World [{}] ({}) is attempting to be registered as an " +
"existing dimension but it's dimension id has already been registered for folder " +
"[{}]. This means the world has been copied outside of Sponge. This is not a " +
"supported configuration.", worldFolderName, dimensionId, worldFolderByDimensionId
.get(dimensionId));
continue;
}
if (!spongeDataCompound.hasUniqueId(Constants.UUID)) {
SpongeImpl.getLogger().error("World [{}] ({}) has no valid unique identifier. Report this to Sponge ASAP.", worldFolderName, dimensionId);
continue;
}
String dimensionTypeId = "overworld";
if (spongeDataCompound.hasKey(Constants.Sponge.World.DIMENSION_TYPE)) {
dimensionTypeId = spongeDataCompound.getString(Constants.Sponge.World.DIMENSION_TYPE);
} else {
SpongeImpl.getLogger().warn("World [{}] ({}) has no specified dimension type. Defaulting to [{}}]...", worldFolderName,
dimensionId, DimensionTypes.OVERWORLD.getName());
}
dimensionTypeId = fixDimensionTypeId(dimensionTypeId);
final org.spongepowered.api.world.DimensionType dimensionType
= Sponge.getRegistry().getType(org.spongepowered.api.world.DimensionType.class, dimensionTypeId).orElse(null);
if (dimensionType == null) {
SpongeImpl.getLogger().warn("World [{}] ({}) has specified dimension type that is not registered. Skipping...", worldFolderName, dimensionId);
continue;
}
spongeDataCompound.setString(Constants.Sponge.World.DIMENSION_TYPE, dimensionTypeId);
worldFolderByDimensionId.put(dimensionId, worldFolderName);
registerDimensionPath(dimensionId, rootPath.resolve(worldFolderName));
registerDimension(dimensionId, (DimensionType)(Object) dimensionType);
}
} catch (IOException e) {
e.printStackTrace();
}
}
// Checks if the saved dimension type contains a modid and if not, attempts to locate one
public static String fixDimensionTypeId(final String name) {
// Since we now store the modid, we need to support older save files that only include id without modid.
if (!name.contains(":")) {
for (final org.spongepowered.api.world.DimensionType type : Sponge.getRegistry().getAllOf(org.spongepowered.api.world.DimensionType.class)) {
final String typeId = (type.getId().substring(type.getId().lastIndexOf(":") + 1));
if (typeId.equals(name)) {
return type.getId();
// Note: We don't update the NBT here but instead fix it on next
// world save in case there are 2 types using same name.
}
}
}
return name;
}
public static CompletableFuture<Optional<WorldProperties>> copyWorld(final WorldProperties worldProperties, final String copyName) {
checkArgument(worldPropertiesByFolderName.containsKey(worldProperties.getWorldName()), "World properties not registered!");
checkArgument(!worldPropertiesByFolderName.containsKey(copyName), "Destination world name already is registered!");
final WorldInfo info = (WorldInfo) worldProperties;
final WorldServer worldServer = worldByDimensionId.get(((WorldInfoBridge) info).bridge$getDimensionId().intValue());
if (worldServer != null) {
try {
saveWorld(worldServer, true);
} catch (MinecraftException e) {
throw new RuntimeException(e);
}
((MinecraftServerBridge) SpongeImpl.getServer()).bridge$setSaveEnabled(false);
}
final CompletableFuture<Optional<WorldProperties>> future = SpongeImpl.getScheduler().submitAsyncTask(new CopyWorldTask(info, copyName));
if (worldServer != null) { // World was loaded
future.thenRun(() -> ((MinecraftServerBridge) SpongeImpl.getServer()).bridge$setSaveEnabled(true));
}
return future;
}
public static Optional<WorldProperties> renameWorld(final WorldProperties worldProperties, final String newName) {
checkNotNull(worldProperties);
checkNotNull(newName);
checkState(!worldByDimensionId.containsKey(((WorldInfoBridge) worldProperties).bridge$getDimensionId()), "World is still loaded!");
final Path oldWorldFolder = getCurrentSavesDirectory().get().resolve(worldProperties.getWorldName());
final Path newWorldFolder = oldWorldFolder.resolveSibling(newName);
if (Files.exists(newWorldFolder)) {
return Optional.empty();
}
try {
Files.move(oldWorldFolder, newWorldFolder);
} catch (IOException e) {
SpongeImpl.getLogger().error("Failed to move world folder " + worldProperties.getWorldName(), e);
return Optional.empty();
}
unregisterWorldProperties(worldProperties, false);
final WorldInfo info = new WorldInfo((WorldInfo) worldProperties);
info.setWorldName(newName);
// As we are moving a world, we want to move the dimension ID and UUID with the world to ensure
// plugins and Sponge do not break.
((WorldInfoBridge) info).bridge$setUniqueId(worldProperties.getUniqueId());
if (((WorldInfoBridge) worldProperties).bridge$getDimensionId() != null) {
((WorldInfoBridge) info).bridge$setDimensionId(((WorldInfoBridge) worldProperties).bridge$getDimensionId());
}
((WorldInfoBridge) info).bridge$createWorldConfig();
new AnvilSaveHandler(WorldManager.getCurrentSavesDirectory().get().toFile(), newName, true, ((MinecraftServerAccessor) SpongeImpl.getServer()).accessor$getDataFixer())
.saveWorldInfo(info);
registerWorldProperties((WorldProperties) info);
return Optional.of((WorldProperties) info);
}
public static CompletableFuture<Boolean> deleteWorld(final WorldProperties worldProperties) {
checkNotNull(worldProperties);
checkArgument(worldPropertiesByWorldUuid.containsKey(worldProperties.getUniqueId()), "World properties not registered!");
checkState(!worldByDimensionId.containsKey(((WorldInfoBridge) worldProperties).bridge$getDimensionId()), "World not unloaded!");
return SpongeImpl.getScheduler().submitAsyncTask(new DeleteWorldTask(worldProperties));
}
/**
* Called when the server wants to update the difficulty on all worlds.
*
* If the world has a difficulty set via external means (command, plugin, mod) then we honor that difficulty always.
*/
public static void updateServerDifficulty() {
final EnumDifficulty serverDifficulty = SpongeImpl.getServer().getDifficulty();
for (final WorldServer worldServer : getWorlds()) {
final boolean alreadySet = ((WorldInfoBridge) worldServer.getWorldInfo()).bridge$hasCustomDifficulty();
adjustWorldForDifficulty(worldServer, alreadySet ? worldServer.getWorldInfo().getDifficulty() : serverDifficulty, false);
}
}
public static void adjustWorldForDifficulty(final WorldServer worldServer, EnumDifficulty difficulty, final boolean isCustom) {
final MinecraftServer server = SpongeImpl.getServer();
if (worldServer.getWorldInfo().isHardcoreModeEnabled()) {
difficulty = EnumDifficulty.HARD;
worldServer.setAllowedSpawnTypes(true, true);
} else if (SpongeImpl.getServer().isSinglePlayer()) {
worldServer.setAllowedSpawnTypes(worldServer.getDifficulty() != EnumDifficulty.PEACEFUL, true);
} else {
worldServer.setAllowedSpawnTypes(server.allowSpawnMonsters(), server.getCanSpawnAnimals());
}
if (isCustom) {
worldServer.getWorldInfo().setDifficulty(difficulty);
} else if (!((WorldInfoBridge) worldServer.getWorldInfo()).bridge$hasCustomDifficulty()) {
((WorldInfoBridge) worldServer.getWorldInfo()).bridge$forceSetDifficulty(difficulty);
}
}
private static class CopyWorldTask implements Callable<Optional<WorldProperties>> {
private final WorldInfo oldInfo;
private final String newName;
CopyWorldTask(final WorldInfo info, final String newName) {
this.oldInfo = info;
this.newName = newName;
}
@Override
public Optional<WorldProperties> call() throws Exception {
Path oldWorldFolder = getCurrentSavesDirectory().get().resolve(this.oldInfo.getWorldName());
final Path newWorldFolder = getCurrentSavesDirectory().get().resolve(this.newName);
if (Files.exists(newWorldFolder)) {
return Optional.empty();
}
FileVisitor<Path> visitor = new CopyFileVisitor(newWorldFolder);
if (((WorldInfoBridge) this.oldInfo).bridge$getDimensionId() == 0) {
oldWorldFolder = getCurrentSavesDirectory().get();
visitor = new ForwardingFileVisitor<Path>(visitor) {
private boolean root = true;
@Override
public FileVisitResult preVisitDirectory(final Path dir, final BasicFileAttributes attrs) throws IOException {
if (!this.root && Files.exists(dir.resolve("level.dat"))) {
return FileVisitResult.SKIP_SUBTREE;
}
this.root = false;
return super.preVisitDirectory(dir, attrs);
}
};
}
// Copy the world folder
Files.walkFileTree(oldWorldFolder, visitor);
final WorldInfo info = new WorldInfo(this.oldInfo);
info.setWorldName(this.newName);
((WorldInfoBridge) info).bridge$setDimensionId(WorldManager.getNextFreeDimensionId());
((WorldInfoBridge) info).bridge$setUniqueId(UUID.randomUUID());
((WorldInfoBridge) info).bridge$createWorldConfig();
new AnvilSaveHandler(WorldManager.getCurrentSavesDirectory().get().toFile(), this.newName, true, ((MinecraftServerAccessor) SpongeImpl.getServer()).accessor$getDataFixer())
.saveWorldInfo(info);
registerWorldProperties((WorldProperties) info);
return Optional.of((WorldProperties) info);
}
}
private static class DeleteWorldTask implements Callable<Boolean> {
private final WorldProperties props;
DeleteWorldTask(final WorldProperties props) {
this.props = props;
}
@Override
public Boolean call() {
final Path worldFolder = getCurrentSavesDirectory().get().resolve(this.props.getWorldName());
if (!Files.exists(worldFolder)) {
unregisterWorldProperties(this.props, true);
return true;
}
try {
Files.walkFileTree(worldFolder, DeleteFileVisitor.INSTANCE);
unregisterWorldProperties(this.props, true);
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
}
public static void sendDimensionRegistration(final EntityPlayerMP playerMP, final WorldProvider provider) {
// Do nothing in Common
}
public static void loadDimensionDataMap(@Nullable final NBTTagCompound compound) {
usedDimensionIds.clear();
lastUsedDimensionId = 0;
if (compound == null) {
for (IntIterator iterator = dimensionTypeByDimensionId.keySet().iterator(); iterator.hasNext();) {
final int dimensionId = iterator.nextInt();
if (dimensionId >= 0) {
usedDimensionIds.add(dimensionId);
}
}
} else {
for (final int id : compound.getIntArray(Constants.Forge.USED_DIMENSION_IDS)) {
usedDimensionIds.add(id);
}
// legacy data (load but don't save)
final int[] intArray = compound.getIntArray(Constants.Legacy.LEGACY_DIMENSION_ARRAY);
for (int i = 0; i < intArray.length; i++) {
final int data = intArray[i];
if (data == 0) continue;
for (int j = 0; j < Integer.SIZE; j++) {
if ((data & (1 << j)) != 0) usedDimensionIds.add(i * Integer.SIZE + j);
}
}
}
}
public static NBTTagCompound saveDimensionDataMap() {
final NBTTagCompound dimMap = new NBTTagCompound();
dimMap.setIntArray(Constants.Forge.USED_DIMENSION_IDS, usedDimensionIds.toIntArray());
return dimMap;
}
public static Optional<Path> getCurrentSavesDirectory() {
final Optional<WorldServer> optWorldServer = getWorldByDimensionId(0);
if (optWorldServer.isPresent()) {
return Optional.of(optWorldServer.get().getSaveHandler().getWorldDirectory().toPath());
} else if (SpongeImpl.getGame().getState().ordinal() >= GameState.SERVER_ABOUT_TO_START.ordinal()) {
final MinecraftServer server = SpongeImpl.getServer();
return Optional.of(((MinecraftServerAccessor) server).accessor$getAnvilFile().toPath().resolve(server.getFolderName()));
}
return Optional.empty();
}
public static Map<WorldServer, WorldServer> getWeakWorldMap() {
return weakWorldByWorld;
}
public static int getClientDimensionId(final EntityPlayerMP player, final World world) {
final DimensionType type = world.provider.getDimensionType();
if (type == DimensionType.OVERWORLD) {
return 0;
} else if (type == DimensionType.NETHER) {
return -1;
} else if (type == DimensionType.THE_END) {
return 1;
} else {
return ((WorldServerBridge) world).bridge$getDimensionId();
}
}
public static boolean isKnownWorld(final WorldServer world) {
return weakWorldByWorld.containsKey(world);
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2002 - 2003
* NetGroup, Politecnico di Torino (Italy)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the Politecnico di Torino nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef __CONFIG_PARAMS_H__
#define __CONFIG_PARAMS_H__
//
// Parameters set from the configuration file.
//
#define MAX_LINE 2048 /* Maximum chars allowed for the host list (in passive mode) */
#define MAX_HOST_LIST 64000
#define MAX_ACTIVE_LIST 10
struct active_pars
{
char address[MAX_LINE + 1]; // keeps the network address (either numeric or literal) to of the active client
char port[MAX_LINE + 1]; // keeps the network port to bind to
int ai_family; // address faimly to use
};
extern char hostlist[MAX_HOST_LIST + 1]; //!< Keeps the list of the hosts that are allowed to connect to this server
extern struct active_pars activelist[MAX_ACTIVE_LIST]; //!< Keeps the list of the hosts (host, port) on which I want to connect to (active mode)
extern int nullAuthAllowed; //!< '1' if we permit NULL authentication, '0' otherwise
extern char loadfile[MAX_LINE + 1]; //!< Name of the file from which we have to load the configuration
#endif
| {
"pile_set_name": "Github"
} |
package ai.labs.resources.impl.botmanagement.mongo;
import ai.labs.models.BotTriggerConfiguration;
import ai.labs.resources.rest.botmanagement.IBotTriggerStore;
import ai.labs.serialization.IDocumentBuilder;
import ai.labs.serialization.IJsonSerialization;
import ai.labs.utilities.RuntimeUtilities;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.Indexes;
import lombok.extern.slf4j.Slf4j;
import org.bson.Document;
import javax.inject.Inject;
import java.io.IOException;
import static ai.labs.persistence.IResourceStore.ResourceAlreadyExistsException;
import static ai.labs.persistence.IResourceStore.ResourceNotFoundException;
import static ai.labs.persistence.IResourceStore.ResourceStoreException;
/**
* @author ginccc
*/
@Slf4j
public class BotTriggerStore implements IBotTriggerStore {
private static final String COLLECTION_BOT_TRIGGERS = "bottriggers";
private static final String INTENT_FIELD = "intent";
private final MongoCollection<Document> collection;
private final IDocumentBuilder documentBuilder;
private final IJsonSerialization jsonSerialization;
private BotTriggerResourceStore botTriggerStore;
@Inject
public BotTriggerStore(MongoDatabase database,
IJsonSerialization jsonSerialization,
IDocumentBuilder documentBuilder) {
this.jsonSerialization = jsonSerialization;
RuntimeUtilities.checkNotNull(database, "database");
this.collection = database.getCollection(COLLECTION_BOT_TRIGGERS);
this.documentBuilder = documentBuilder;
this.botTriggerStore = new BotTriggerResourceStore();
collection.createIndex(Indexes.ascending(INTENT_FIELD), new IndexOptions().unique(true));
}
@Override
public BotTriggerConfiguration readBotTrigger(String intent)
throws ResourceNotFoundException, ResourceStoreException {
RuntimeUtilities.checkNotNull(intent, INTENT_FIELD);
return botTriggerStore.readBotTrigger(intent);
}
@Override
public void updateBotTrigger(String intent, BotTriggerConfiguration botTriggerConfiguration)
throws ResourceStoreException {
RuntimeUtilities.checkNotNull(intent, INTENT_FIELD);
RuntimeUtilities.checkNotNull(botTriggerConfiguration, "botTriggerConfiguration");
botTriggerStore.updateBotTrigger(intent, botTriggerConfiguration);
}
@Override
public void createBotTrigger(BotTriggerConfiguration botTriggerConfiguration)
throws ResourceAlreadyExistsException, ResourceStoreException {
RuntimeUtilities.checkNotNull(botTriggerConfiguration, "botTriggerConfiguration");
botTriggerStore.createBotTrigger(botTriggerConfiguration);
}
@Override
public void deleteBotTrigger(String intent) {
RuntimeUtilities.checkNotNull(intent, INTENT_FIELD);
botTriggerStore.deleteBotTrigger(intent);
}
private class BotTriggerResourceStore {
BotTriggerConfiguration readBotTrigger(String intent)
throws ResourceStoreException, ResourceNotFoundException {
Document filter = new Document();
filter.put(INTENT_FIELD, intent);
try {
Document document = collection.find(filter).first();
if (document != null) {
return documentBuilder.build(document, BotTriggerConfiguration.class);
} else {
String message = "BotTriggerConfiguration with intent=%s does not exist";
message = String.format(message, intent);
throw new ResourceNotFoundException(message);
}
} catch (IOException e) {
throw new ResourceStoreException(e.getLocalizedMessage(), e);
}
}
void updateBotTrigger(String intent, BotTriggerConfiguration botTriggerConfiguration)
throws ResourceStoreException {
Document document = createDocument(botTriggerConfiguration);
collection.replaceOne(new Document(INTENT_FIELD, intent), document);
}
void createBotTrigger(BotTriggerConfiguration botTriggerConfiguration)
throws ResourceStoreException, ResourceAlreadyExistsException {
if (collection.find(new Document(INTENT_FIELD, botTriggerConfiguration.getIntent())).first() != null) {
String message = "BotTriggerConfiguration with intent=%s already exists";
message = String.format(message, botTriggerConfiguration.getIntent());
throw new ResourceAlreadyExistsException(message);
}
collection.insertOne(createDocument(botTriggerConfiguration));
}
void deleteBotTrigger(String intent) {
collection.deleteOne(new Document(INTENT_FIELD, intent));
}
private Document createDocument(BotTriggerConfiguration botTriggerConfiguration)
throws ResourceStoreException {
try {
return jsonSerialization.deserialize(jsonSerialization.serialize(botTriggerConfiguration),
Document.class);
} catch (IOException e) {
throw new ResourceStoreException(e.getLocalizedMessage(), e);
}
}
}
}
| {
"pile_set_name": "Github"
} |
"""
```
one_draw(m::AbstractDSGEModel, data::Matrix{Float64}; use_chand_recursion::Bool = true,
verbose::Symbol = :low)
```
Finds and returns one valid draw from parameter distribution, along with its log likelihood and log posterior.
"""
function one_draw(m::AbstractDSGEModel, data::Matrix{Float64};
use_chand_recursion::Bool = true, verbose::Symbol = :low)
success = false
draw = vec(rand(m.parameters, 1))
draw_loglh = draw_logpost = 0.0
while !success
try
update!(m, draw)
draw_loglh = likelihood(m, data, catch_errors = true,
use_chand_recursion = use_chand_recursion,
verbose = verbose)
draw_logpost = prior(m)
if (draw_loglh == -Inf) | (draw_loglh === NaN)
draw_loglh = draw_logpost = -Inf
end
catch err
if isa(err, ParamBoundsError)
draw_loglh = draw_logpost = -Inf
elseif isa(err, PosDefException) || isa(err, SingularException) ||
isa(err, LinearAlgebra.LAPACKException) || isa(err, DSGE.SteadyStateConvergenceError)
draw_loglh = draw_logpost = -Inf
else
throw(err)
end
end
if any(isinf.(draw_loglh))
draw = vec(rand(m.parameters, 1))
else
success = true
end
end
return vector_reshape(draw, draw_loglh, draw_logpost)
end
"""
```
initial_draw!(m::AbstractDSGEModel, data::Matrix{Float64}, c::ParticleCloud)
initial_draw!(m::AbstractDSGEModel, data::Matrix{Float64}, c::Cloud)
```
Draw from a general starting distribution (set by default to be from the prior) to
initialize the SMC algorithm. Returns a tuple (logpost, loglh) and modifies the
particle objects in the particle cloud in place.
"""
function initial_draw!(m::AbstractModel, data::Matrix{Float64},
c::Union{Cloud, ParticleCloud};
parallel::Bool = false, use_chand_recursion::Bool = true,
verbose::Symbol = :low)
n_parts = length(c)
# ================== Define closure on one_draw function ==================
sendto(workers(), m = m)
sendto(workers(), data = data)
sendto(workers(), verbose = verbose)
sendto(workers(), use_chand_recursion = use_chand_recursion)
one_draw_closure() = one_draw(m, data; use_chand_recursion = use_chand_recursion,
verbose = verbose)
@everywhere one_draw_closure() = one_draw(m, data;
use_chand_recursion = use_chand_recursion,
verbose = verbose)
# =========================================================================
# For each particle, finds valid parameter draw and returns likelihood & posterior
draws, loglh, logpost = if parallel
@sync @distributed (vector_reduce) for i in 1:n_parts
one_draw_closure()
end
else
vector_reduce([one_draw_closure() for i in 1:n_parts]...)
end
update_draws!(c, draws)
update_loglh!(c, vec(loglh))
update_logpost!(c, vec(logpost))
update_old_loglh!(c, zeros(n_parts))
# Need to call `set_weights` as opposed to `update_weights`
# since update_weights will multiply and 0*anything = 0
set_weights!(c, ones(n_parts))
end
"""
```
function draw_likelihood(m, data, draw_vec; verbose::Symbol = :low)
```
Computes likelihood of a particular parameter draw; returns loglh and logpost.
"""
function draw_likelihood(m::AbstractDSGEModel, data::Matrix{Float64},
draw_vec::Vector{Float64}; verbose::Symbol = :low)
update!(m, draw_vec)
loglh = likelihood(m, data, verbose = verbose)
logpost = prior(m)
return scalar_reshape(loglh, logpost)
end
"""
```
initialize_likelihoods!(m::AbstractDSGEModel, data::Matrix{Float64},
c::Union{Cloud, ParticleCloud};
parallel::Bool = false, verbose::Symbol = :low)
```
This function is made for transfering the log-likelihood values saved in the
Cloud from a previous estimation to each particle's respective old_loglh
field, and for evaluating/saving the likelihood and posterior at the new data, which
here is just the argument, data.
"""
function initialize_likelihoods!(m::AbstractDSGEModel, data::Matrix{Float64},
c::Union{Cloud, ParticleCloud};
parallel::Bool = false, verbose::Symbol = :low)
n_parts = length(c)
draws = (typeof(c) <: Cloud) ? get_vals(c; transpose = false) : Matrix{Float64}(get_vals(c)')
# Retire log-likelihood values from the old estimation to the field old_loglh
update_old_loglh!(c, get_loglh(c))
# ============== Define closure on draw_likelihood function ===============
sendto(workers(), m = m)
sendto(workers(), data = data)
sendto(workers(), verbose = verbose)
draw_likelihood_closure(draw::Vector{Float64}) = draw_likelihood(m, data, draw;
verbose = verbose)
@everywhere draw_likelihood_closure(draw::Vector{Float64}) = draw_likelihood(m, data,
draw; verbose = verbose)
# =========================================================================
# TODO: handle when the likelihood with new data cannot be evaluated (returns -Inf),
# even if the likelihood was not -Inf prior to incorporating new data
loglh, logpost = if parallel
@sync @distributed (scalar_reduce) for i in 1:n_parts
draw_likelihood_closure(draws[i, :])
end
else
scalar_reduce([draw_likelihood_closure(draws[i, :]) for i in 1:n_parts]...)
end
update_loglh!(c, loglh)
update_logpost!(c, logpost)
end
"""
```
function initialize_cloud_settings!(m::AbstractDSGEModel, cloud::ParticleCloud;
tempered_update::Bool = false)
```
Initializes stage index, number of Φ stages, c, resamples, acceptance, and sampling time.
"""
function initialize_cloud_settings!(m::AbstractDSGEModel,
cloud::Union{ParticleCloud,Cloud};
tempered_update::Bool = false)
if tempered_update
cloud.ESS = [cloud.ESS[end]]
else
cloud.ESS[1] = get_setting(m, :n_particles)
end
cloud.stage_index = 1
cloud.n_Φ = get_setting(m, :n_Φ)
cloud.resamples = 0
cloud.c = get_setting(m, :step_size_smc)
cloud.accept = get_setting(m, :target_accept)
cloud.total_sampling_time = 0.
cloud.tempering_schedule = zeros(1)
end
| {
"pile_set_name": "Github"
} |
// DIE's signature file
init("packer","IMPostor Pack");
function detect(bShowType,bShowVersion,bShowOptions)
{
if(PE.compareEP("BE........83C601FFE600000000....000000000000000000......00..02....00100000000200"))
{
sVersion="1.0";
bDetected=1;
}
return result(bShowType,bShowVersion,bShowOptions);
}
| {
"pile_set_name": "Github"
} |
config FS_ENCRYPTION
tristate "FS Encryption (Per-file encryption)"
select CRYPTO
select CRYPTO_AES
select CRYPTO_CBC
select CRYPTO_ECB
select CRYPTO_XTS
select CRYPTO_CTS
select CRYPTO_CTR
select CRYPTO_SHA256
select KEYS
help
Enable encryption of files and directories. This
feature is similar to ecryptfs, but it is more memory
efficient since it avoids caching the encrypted and
decrypted pages in the page cache.
| {
"pile_set_name": "Github"
} |
/* Helpers for managing scan queues
*
* See copyright notice in main.c
*/
#include <linux/gfp.h>
#include <linux/kernel.h>
#include <linux/string.h>
#include <linux/ieee80211.h>
#include <net/cfg80211.h>
#include "hermes.h"
#include "orinoco.h"
#include "main.h"
#include "scan.h"
#define ZERO_DBM_OFFSET 0x95
#define MAX_SIGNAL_LEVEL 0x8A
#define MIN_SIGNAL_LEVEL 0x2F
#define SIGNAL_TO_DBM(x) \
(clamp_t(s32, (x), MIN_SIGNAL_LEVEL, MAX_SIGNAL_LEVEL) \
- ZERO_DBM_OFFSET)
#define SIGNAL_TO_MBM(x) (SIGNAL_TO_DBM(x) * 100)
static int symbol_build_supp_rates(u8 *buf, const __le16 *rates)
{
int i;
u8 rate;
buf[0] = WLAN_EID_SUPP_RATES;
for (i = 0; i < 5; i++) {
rate = le16_to_cpu(rates[i]);
/* NULL terminated */
if (rate == 0x0)
break;
buf[i + 2] = rate;
}
buf[1] = i;
return i + 2;
}
static int prism_build_supp_rates(u8 *buf, const u8 *rates)
{
int i;
buf[0] = WLAN_EID_SUPP_RATES;
for (i = 0; i < 8; i++) {
/* NULL terminated */
if (rates[i] == 0x0)
break;
buf[i + 2] = rates[i];
}
buf[1] = i;
/* We might still have another 2 rates, which need to go in
* extended supported rates */
if (i == 8 && rates[i] > 0) {
buf[10] = WLAN_EID_EXT_SUPP_RATES;
for (; i < 10; i++) {
/* NULL terminated */
if (rates[i] == 0x0)
break;
buf[i + 2] = rates[i];
}
buf[11] = i - 8;
}
return (i < 8) ? i + 2 : i + 4;
}
static void orinoco_add_hostscan_result(struct orinoco_private *priv,
const union hermes_scan_info *bss)
{
struct wiphy *wiphy = priv_to_wiphy(priv);
struct ieee80211_channel *channel;
struct cfg80211_bss *cbss;
u8 *ie;
u8 ie_buf[46];
u64 timestamp;
s32 signal;
u16 capability;
u16 beacon_interval;
int ie_len;
int freq;
int len;
len = le16_to_cpu(bss->a.essid_len);
/* Reconstruct SSID and bitrate IEs to pass up */
ie_buf[0] = WLAN_EID_SSID;
ie_buf[1] = len;
memcpy(&ie_buf[2], bss->a.essid, len);
ie = ie_buf + len + 2;
ie_len = ie_buf[1] + 2;
switch (priv->firmware_type) {
case FIRMWARE_TYPE_SYMBOL:
ie_len += symbol_build_supp_rates(ie, bss->s.rates);
break;
case FIRMWARE_TYPE_INTERSIL:
ie_len += prism_build_supp_rates(ie, bss->p.rates);
break;
case FIRMWARE_TYPE_AGERE:
default:
break;
}
freq = ieee80211_dsss_chan_to_freq(le16_to_cpu(bss->a.channel));
channel = ieee80211_get_channel(wiphy, freq);
if (!channel) {
printk(KERN_DEBUG "Invalid channel designation %04X(%04X)",
bss->a.channel, freq);
return; /* Then ignore it for now */
}
timestamp = 0;
capability = le16_to_cpu(bss->a.capabilities);
beacon_interval = le16_to_cpu(bss->a.beacon_interv);
signal = SIGNAL_TO_MBM(le16_to_cpu(bss->a.level));
cbss = cfg80211_inform_bss(wiphy, channel, bss->a.bssid, timestamp,
capability, beacon_interval, ie_buf, ie_len,
signal, GFP_KERNEL);
cfg80211_put_bss(cbss);
}
void orinoco_add_extscan_result(struct orinoco_private *priv,
struct agere_ext_scan_info *bss,
size_t len)
{
struct wiphy *wiphy = priv_to_wiphy(priv);
struct ieee80211_channel *channel;
struct cfg80211_bss *cbss;
const u8 *ie;
u64 timestamp;
s32 signal;
u16 capability;
u16 beacon_interval;
size_t ie_len;
int chan, freq;
ie_len = len - sizeof(*bss);
ie = cfg80211_find_ie(WLAN_EID_DS_PARAMS, bss->data, ie_len);
chan = ie ? ie[2] : 0;
freq = ieee80211_dsss_chan_to_freq(chan);
channel = ieee80211_get_channel(wiphy, freq);
timestamp = le64_to_cpu(bss->timestamp);
capability = le16_to_cpu(bss->capabilities);
beacon_interval = le16_to_cpu(bss->beacon_interval);
ie = bss->data;
signal = SIGNAL_TO_MBM(bss->level);
cbss = cfg80211_inform_bss(wiphy, channel, bss->bssid, timestamp,
capability, beacon_interval, ie, ie_len,
signal, GFP_KERNEL);
cfg80211_put_bss(cbss);
}
void orinoco_add_hostscan_results(struct orinoco_private *priv,
unsigned char *buf,
size_t len)
{
int offset; /* In the scan data */
size_t atom_len;
bool abort = false;
switch (priv->firmware_type) {
case FIRMWARE_TYPE_AGERE:
atom_len = sizeof(struct agere_scan_apinfo);
offset = 0;
break;
case FIRMWARE_TYPE_SYMBOL:
/* Lack of documentation necessitates this hack.
* Different firmwares have 68 or 76 byte long atoms.
* We try modulo first. If the length divides by both,
* we check what would be the channel in the second
* frame for a 68-byte atom. 76-byte atoms have 0 there.
* Valid channel cannot be 0. */
if (len % 76)
atom_len = 68;
else if (len % 68)
atom_len = 76;
else if (len >= 1292 && buf[68] == 0)
atom_len = 76;
else
atom_len = 68;
offset = 0;
break;
case FIRMWARE_TYPE_INTERSIL:
offset = 4;
if (priv->has_hostscan) {
atom_len = le16_to_cpup((__le16 *)buf);
/* Sanity check for atom_len */
if (atom_len < sizeof(struct prism2_scan_apinfo)) {
printk(KERN_ERR "%s: Invalid atom_len in scan "
"data: %zu\n", priv->ndev->name,
atom_len);
abort = true;
goto scan_abort;
}
} else
atom_len = offsetof(struct prism2_scan_apinfo, atim);
break;
default:
abort = true;
goto scan_abort;
}
/* Check that we got an whole number of atoms */
if ((len - offset) % atom_len) {
printk(KERN_ERR "%s: Unexpected scan data length %zu, "
"atom_len %zu, offset %d\n", priv->ndev->name, len,
atom_len, offset);
abort = true;
goto scan_abort;
}
/* Process the entries one by one */
for (; offset + atom_len <= len; offset += atom_len) {
union hermes_scan_info *atom;
atom = (union hermes_scan_info *) (buf + offset);
orinoco_add_hostscan_result(priv, atom);
}
scan_abort:
if (priv->scan_request) {
cfg80211_scan_done(priv->scan_request, abort);
priv->scan_request = NULL;
}
}
void orinoco_scan_done(struct orinoco_private *priv, bool abort)
{
if (priv->scan_request) {
cfg80211_scan_done(priv->scan_request, abort);
priv->scan_request = NULL;
}
}
| {
"pile_set_name": "Github"
} |
# encoding: utf-8
module Adhearsion
module Rayo
module Component
class Record < ComponentNode
register :record, :record
VALID_DIRECTIONS = [:duplex, :send, :recv].freeze
# @return [String] the codec to use for recording
attribute :format
# @return [Integer] Controls how long the recognizer should wait after the end of the prompt for the caller to speak before sending a Recorder event.
attribute :initial_timeout, Integer
# @return [Integer] Controls the length of a period of silence after callers have spoken to conclude they finished.
attribute :final_timeout, Integer
# @return [Integer] Indicates the maximum duration for the recording.
attribute :max_duration, Integer
# @return [true, false] Indicates whether record will be preceded with a beep.
attribute :start_beep, Boolean
# @return [true, false] Indicates whether record will be followed by a beep.
attribute :stop_beep, Boolean
# @return [true, false] Whether subsequent record will start in PAUSE mode.
attribute :start_paused, Boolean
# @return [Symbol] the direction of media to be recorded.
attribute :direction, Symbol
def direction=(direction)
if direction && !VALID_DIRECTIONS.include?(direction.to_sym)
raise ArgumentError, "Invalid Direction (#{direction}), use: #{VALID_DIRECTIONS*' '}"
end
super
end
# @return [true, false] wether to mix audio down or not
attribute :mix, Boolean
def rayo_attributes
{
'format' => format,
'initial-timeout' => initial_timeout,
'final-timeout' => final_timeout,
'max-duration' => max_duration,
'start-beep' => start_beep,
'stop-beep' => stop_beep,
'start-paused' => start_paused,
'direction' => direction,
'mix' => mix
}
end
state_machine :state do
event :paused do
transition :executing => :paused
end
event :resumed do
transition :paused => :executing
end
end
# Pauses a running Record
#
# @return [Command::Record::Pause] an Rayo pause message for the current Record
#
# @example
# record_obj.pause_action.to_xml
#
# returns:
# <pause xmlns="urn:xmpp:rayo:record:1"/>
def pause_action
Pause.new :component_id => component_id, :target_call_id => target_call_id
end
##
# Sends an Rayo pause message for the current Record
#
def pause!
raise InvalidActionError, "Cannot pause a Record that is not executing" unless executing?
pause_action.tap do |action|
result = write_action action
paused! if result
end
end
##
# Create an Rayo resume message for the current Record
#
# @return [Command::Record::Resume] an Rayo resume message
#
# @example
# record_obj.resume_action.to_xml
#
# returns:
# <resume xmlns="urn:xmpp:rayo:record:1"/>
def resume_action
Resume.new :component_id => component_id, :target_call_id => target_call_id
end
##
# Sends an Rayo resume message for the current Record
#
def resume!
raise InvalidActionError, "Cannot resume a Record that is not paused." unless paused?
resume_action.tap do |action|
result = write_action action
resumed! if result
end
end
##
# Directly returns the recording for the component
# @return [Adhearsion::Rayo::Component::Record::Recording] The recording object
#
def recording
complete_event.recording
end
##
# Directly returns the recording URI for the component
# @return [String] The recording URI
#
def recording_uri
recording.uri
end
class Pause < CommandNode # :nodoc:
register :pause, :record
end
class Resume < CommandNode # :nodoc:
register :resume, :record
end
class Recording < Event
register :recording, :record_complete
attribute :uri
attribute :duration, Integer
attribute :size, Integer
end
class Complete
class MaxDuration < Event::Complete::Reason
register :'max-duration', :record_complete
end
class InitialTimeout < Event::Complete::Reason
register :'initial-timeout', :record_complete
end
class FinalTimeout < Event::Complete::Reason
register :'final-timeout', :record_complete
end
end
end
end
end
end
| {
"pile_set_name": "Github"
} |
'use strict';
var dbm;
var type;
var seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return Promise.all([
db.runSql(`update community set reference_id = 'dos' where community_name = 'U.S. Department of State Student Internship Program (Unpaid)'`),
db.runSql(`update community set reference_id = 'vsfs' where community_name = 'Virtual Student Federal Service (VSFS)'`),
]);
};
exports.down = function (db) {
return null;
};
exports._meta = {
'version': 1,
};
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.5.0_11) on Mon Jan 19 00:08:30 CET 2009 -->
<TITLE>
GeoSchedulerApp
</TITLE>
<META NAME="keywords" CONTENT="it.eng.spagobi.engines.geo.application.GeoSchedulerApp class">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
parent.document.title="GeoSchedulerApp";
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/GeoSchedulerApp.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV CLASS
<A HREF="../../../../../../it/eng/spagobi/engines/geo/application/GeoSchedulerApp.DumbJob.html" title="class in it.eng.spagobi.engines.geo.application"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?it/eng/spagobi/engines/geo/application/GeoSchedulerApp.html" target="_top"><B>FRAMES</B></A>
<A HREF="GeoSchedulerApp.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: <A HREF="#nested_class_summary">NESTED</A> | FIELD | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
it.eng.spagobi.engines.geo.application</FONT>
<BR>
Class GeoSchedulerApp</H2>
<PRE>
java.lang.Object
<IMG SRC="../../../../../../resources/inherit.gif" ALT="extended by "><B>it.eng.spagobi.engines.geo.application.GeoSchedulerApp</B>
</PRE>
<HR>
<DL>
<DT><PRE>public class <B>GeoSchedulerApp</B><DT>extends java.lang.Object</DL>
</PRE>
<P>
The Class GeoSchedulerApp.
<P>
<P>
<DL>
<DT><B>Author:</B></DT>
<DD>Andrea Gioia (andrea.gioia@eng.it)</DD>
</DL>
<HR>
<P>
<!-- ======== NESTED CLASS SUMMARY ======== -->
<A NAME="nested_class_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Nested Class Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../it/eng/spagobi/engines/geo/application/GeoSchedulerApp.DumbJob.html" title="class in it.eng.spagobi.engines.geo.application">GeoSchedulerApp.DumbJob</A></B></CODE>
<BR>
The Class DumbJob.</TD>
</TR>
</TABLE>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<A NAME="constructor_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Constructor Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../../../it/eng/spagobi/engines/geo/application/GeoSchedulerApp.html#GeoSchedulerApp()">GeoSchedulerApp</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<!-- ========== METHOD SUMMARY =========== -->
<A NAME="method_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Method Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static void</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../it/eng/spagobi/engines/geo/application/GeoSchedulerApp.html#main(java.lang.String[])">main</A></B>(java.lang.String[] args)</CODE>
<BR>
The main method.</TD>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from class java.lang.Object</B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</CODE></TD>
</TR>
</TABLE>
<P>
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<A NAME="constructor_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Constructor Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="GeoSchedulerApp()"><!-- --></A><H3>
GeoSchedulerApp</H3>
<PRE>
public <B>GeoSchedulerApp</B>()</PRE>
<DL>
</DL>
<!-- ============ METHOD DETAIL ========== -->
<A NAME="method_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Method Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="main(java.lang.String[])"><!-- --></A><H3>
main</H3>
<PRE>
public static void <B>main</B>(java.lang.String[] args)
throws SchedulerException</PRE>
<DL>
<DD>The main method.
<P>
<DD><DL>
<DT><B>Parameters:</B><DD><CODE>args</CODE> - the arguments
<DT><B>Throws:</B>
<DD><CODE>SchedulerException</CODE> - the scheduler exception</DL>
</DD>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/GeoSchedulerApp.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV CLASS
<A HREF="../../../../../../it/eng/spagobi/engines/geo/application/GeoSchedulerApp.DumbJob.html" title="class in it.eng.spagobi.engines.geo.application"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?it/eng/spagobi/engines/geo/application/GeoSchedulerApp.html" target="_top"><B>FRAMES</B></A>
<A HREF="GeoSchedulerApp.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: <A HREF="#nested_class_summary">NESTED</A> | FIELD | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
| {
"pile_set_name": "Github"
} |
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
/*
* This file is part of the LibreOffice project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
#include "precompiled_sb.hxx"
/* vim:set shiftwidth=4 softtabstop=4 expandtab: */
| {
"pile_set_name": "Github"
} |
<?php
/**
* Base HTTP transport
*
* @package Requests
* @subpackage Transport
*/
/**
* Base HTTP transport
*
* @package Requests
* @subpackage Transport
*/
interface Requests_Transport {
/**
* Perform a request
*
* @param string $url URL to request
* @param array $headers Associative array of request headers
* @param string|array $data Data to send either as the POST body, or as parameters in the URL for a GET/HEAD
* @param array $options Request options, see {@see Requests::response()} for documentation
* @return string Raw HTTP result
*/
public function request($url, $headers = array(), $data = array(), $options = array());
/**
* Send multiple requests simultaneously
*
* @param array $requests Request data (array of 'url', 'headers', 'data', 'options') as per {@see Requests_Transport::request}
* @param array $options Global options, see {@see Requests::response()} for documentation
* @return array Array of Requests_Response objects (may contain Requests_Exception or string responses as well)
*/
public function request_multiple($requests, $options);
/**
* Self-test whether the transport can be used
* @return bool
*/
public static function test();
} | {
"pile_set_name": "Github"
} |
/*
* RHQ Management Platform
* Copyright (C) 2005-2013 Red Hat, Inc.
* All rights reserved.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation version 2 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
*/
package org.rhq.enterprise.server.scheduler.jobs;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.rhq.core.domain.resource.ResourceType;
import org.rhq.enterprise.server.inventory.InventoryManagerLocal;
import org.rhq.enterprise.server.util.LookupUtil;
public class PurgeResourceTypesJob extends AbstractStatefulJob {
private static final Log LOG = LogFactory.getLog(PurgeResourceTypesJob.class);
private static class DeletionStats {
int numDeleted;
long deletionTime;
@Override
public String toString() {
return PurgeResourceTypesJob.class.getSimpleName() + ": deleted " + numDeleted + " in " + deletionTime + " ms";
}
}
@Override
public void executeJobCode(JobExecutionContext context) throws JobExecutionException {
InventoryManagerLocal inventoryMgr = LookupUtil.getInventoryManager();
DeletionStats stats = new DeletionStats();
long startTotalTime = System.currentTimeMillis();
List<ResourceType> deletedTypes = inventoryMgr.getDeletedTypes();
for (ResourceType deletedType : deletedTypes) {
if (inventoryMgr.isReadyForPermanentRemoval(deletedType)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Permanently removing " + deletedType);
}
long startTime = System.currentTimeMillis();
inventoryMgr.purgeDeletedResourceType(deletedType);
long endTime = System.currentTimeMillis();
stats.numDeleted++;
if (LOG.isDebugEnabled()) {
LOG.debug("Deleted " + deletedType + " in " + (endTime - startTime) + " ms");
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug(deletedType + " has been deleted but is not yet ready for permanent removal.");
}
}
}
long stopTotalTime = System.currentTimeMillis();
stats.deletionTime = stopTotalTime - startTotalTime;
LOG.debug(stats);
}
}
| {
"pile_set_name": "Github"
} |
//
// Written by Martin Steinegger
//
// Wrapper for KSW2 aligner.
// Local banded nucleotide aligner
//
#include <Parameters.h>
#include <NucleotideMatrix.h>
#include "StripedSmithWaterman.h"
#include "Util.h"
#include "SubstitutionMatrix.h"
#include "Debug.h"
class BandedNucleotideAligner {
public:
BandedNucleotideAligner(BaseMatrix *subMat, size_t maxSequenceLength, int gapo, int gape, int zdrop);
~BandedNucleotideAligner();
void initQuery(Sequence *q);
s_align align(Sequence * targetSeqObj, int diagonal, bool reverse,
std::string & backtrace, int & aaIds, EvalueComputation * evaluer, bool wrappedScoring=false);
private:
SubstitutionMatrix::FastMatrix fastMatrix;
uint8_t * targetSeqRev;
int targetSeqRevDataLen;
uint8_t * querySeq;
uint8_t * querySeqRev;
int querySeqRevDataLen;
uint8_t * queryRevCompSeq;
char * queryRevCompCharSeq;
uint8_t * queryRevCompSeqRev;
Sequence * querySeqObj;
int8_t * mat;
NucleotideMatrix * subMat;
// uint32_t * cigar;
int gapo;
int gape;
int zdrop;
};
| {
"pile_set_name": "Github"
} |
package io.ebeaninternal.server.deploy;
/**
* Collects (List/Set/Map) of elements.
*/
public interface ElementCollector {
/**
* Add an element.
*/
void addElement(Object element);
/**
* Add an element.
*/
void addKeyValue(Object key, Object element);
/**
* Return the populated collection/map.
*/
Object collection();
}
| {
"pile_set_name": "Github"
} |
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package gensupport is an internal implementation detail used by code
// generated by the google-api-go-generator tool.
//
// This package may be modified at any time without regard for backwards
// compatibility. It should not be used directly by API users.
package gensupport
| {
"pile_set_name": "Github"
} |
//
// WindowsConsoleChannel.cpp
//
// Library: Foundation
// Package: Logging
// Module: WindowsConsoleChannel
//
// Copyright (c) 2007, Applied Informatics Software Engineering GmbH.
// and Contributors.
//
// SPDX-License-Identifier: BSL-1.0
//
#include "Poco/WindowsConsoleChannel.h"
#include "Poco/Message.h"
#include "Poco/UnicodeConverter.h"
#include "Poco/String.h"
#include "Poco/Exception.h"
namespace Poco {
WindowsConsoleChannel::WindowsConsoleChannel():
_isFile(false),
_hConsole(INVALID_HANDLE_VALUE)
{
_hConsole = GetStdHandle(STD_OUTPUT_HANDLE);
// check whether the console has been redirected
DWORD mode;
_isFile = (GetConsoleMode(_hConsole, &mode) == 0);
}
WindowsConsoleChannel::~WindowsConsoleChannel()
{
}
void WindowsConsoleChannel::log(const Message& msg)
{
std::string text = msg.getText();
text += "\r\n";
if (_isFile)
{
DWORD written;
WriteFile(_hConsole, text.data(), static_cast<DWORD>(text.size()), &written, NULL);
}
else
{
std::wstring utext;
UnicodeConverter::toUTF16(text, utext);
DWORD written;
WriteConsoleW(_hConsole, utext.data(), static_cast<DWORD>(utext.size()), &written, NULL);
}
}
WindowsColorConsoleChannel::WindowsColorConsoleChannel():
_enableColors(true),
_isFile(false),
_hConsole(INVALID_HANDLE_VALUE)
{
_hConsole = GetStdHandle(STD_OUTPUT_HANDLE);
// check whether the console has been redirected
DWORD mode;
_isFile = (GetConsoleMode(_hConsole, &mode) == 0);
initColors();
}
WindowsColorConsoleChannel::~WindowsColorConsoleChannel()
{
}
void WindowsColorConsoleChannel::log(const Message& msg)
{
std::string text = msg.getText();
text += "\r\n";
if (_enableColors && !_isFile)
{
WORD attr = _colors[0];
attr &= 0xFFF0;
attr |= _colors[msg.getPriority()];
SetConsoleTextAttribute(_hConsole, attr);
}
if (_isFile)
{
DWORD written;
WriteFile(_hConsole, text.data(), static_cast<DWORD>(text.size()), &written, NULL);
}
else
{
std::wstring utext;
UnicodeConverter::toUTF16(text, utext);
DWORD written;
WriteConsoleW(_hConsole, utext.data(), static_cast<DWORD>(utext.size()), &written, NULL);
}
if (_enableColors && !_isFile)
{
SetConsoleTextAttribute(_hConsole, _colors[0]);
}
}
void WindowsColorConsoleChannel::setProperty(const std::string& name, const std::string& value)
{
if (name == "enableColors")
{
_enableColors = icompare(value, "true") == 0;
}
else if (name == "traceColor")
{
_colors[Message::PRIO_TRACE] = parseColor(value);
}
else if (name == "debugColor")
{
_colors[Message::PRIO_DEBUG] = parseColor(value);
}
else if (name == "informationColor")
{
_colors[Message::PRIO_INFORMATION] = parseColor(value);
}
else if (name == "noticeColor")
{
_colors[Message::PRIO_NOTICE] = parseColor(value);
}
else if (name == "warningColor")
{
_colors[Message::PRIO_WARNING] = parseColor(value);
}
else if (name == "errorColor")
{
_colors[Message::PRIO_ERROR] = parseColor(value);
}
else if (name == "criticalColor")
{
_colors[Message::PRIO_CRITICAL] = parseColor(value);
}
else if (name == "fatalColor")
{
_colors[Message::PRIO_FATAL] = parseColor(value);
}
else
{
Channel::setProperty(name, value);
}
}
std::string WindowsColorConsoleChannel::getProperty(const std::string& name) const
{
if (name == "enableColors")
{
return _enableColors ? "true" : "false";
}
else if (name == "traceColor")
{
return formatColor(_colors[Message::PRIO_TRACE]);
}
else if (name == "debugColor")
{
return formatColor(_colors[Message::PRIO_DEBUG]);
}
else if (name == "informationColor")
{
return formatColor(_colors[Message::PRIO_INFORMATION]);
}
else if (name == "noticeColor")
{
return formatColor(_colors[Message::PRIO_NOTICE]);
}
else if (name == "warningColor")
{
return formatColor(_colors[Message::PRIO_WARNING]);
}
else if (name == "errorColor")
{
return formatColor(_colors[Message::PRIO_ERROR]);
}
else if (name == "criticalColor")
{
return formatColor(_colors[Message::PRIO_CRITICAL]);
}
else if (name == "fatalColor")
{
return formatColor(_colors[Message::PRIO_FATAL]);
}
else
{
return Channel::getProperty(name);
}
}
WORD WindowsColorConsoleChannel::parseColor(const std::string& color) const
{
if (icompare(color, "default") == 0)
return _colors[0];
else if (icompare(color, "black") == 0)
return CC_BLACK;
else if (icompare(color, "red") == 0)
return CC_RED;
else if (icompare(color, "green") == 0)
return CC_GREEN;
else if (icompare(color, "brown") == 0)
return CC_BROWN;
else if (icompare(color, "blue") == 0)
return CC_BLUE;
else if (icompare(color, "magenta") == 0)
return CC_MAGENTA;
else if (icompare(color, "cyan") == 0)
return CC_CYAN;
else if (icompare(color, "gray") == 0)
return CC_GRAY;
else if (icompare(color, "darkGray") == 0)
return CC_DARKGRAY;
else if (icompare(color, "lightRed") == 0)
return CC_LIGHTRED;
else if (icompare(color, "lightGreen") == 0)
return CC_LIGHTGREEN;
else if (icompare(color, "yellow") == 0)
return CC_YELLOW;
else if (icompare(color, "lightBlue") == 0)
return CC_LIGHTBLUE;
else if (icompare(color, "lightMagenta") == 0)
return CC_LIGHTMAGENTA;
else if (icompare(color, "lightCyan") == 0)
return CC_LIGHTCYAN;
else if (icompare(color, "white") == 0)
return CC_WHITE;
else throw InvalidArgumentException("Invalid color value", color);
}
std::string WindowsColorConsoleChannel::formatColor(WORD color) const
{
switch (color)
{
case CC_BLACK: return "black";
case CC_RED: return "red";
case CC_GREEN: return "green";
case CC_BROWN: return "brown";
case CC_BLUE: return "blue";
case CC_MAGENTA: return "magenta";
case CC_CYAN: return "cyan";
case CC_GRAY: return "gray";
case CC_DARKGRAY: return "darkGray";
case CC_LIGHTRED: return "lightRed";
case CC_LIGHTGREEN: return "lightGreen";
case CC_YELLOW: return "yellow";
case CC_LIGHTBLUE: return "lightBlue";
case CC_LIGHTMAGENTA: return "lightMagenta";
case CC_LIGHTCYAN: return "lightCyan";
case CC_WHITE: return "white";
default: return "invalid";
}
}
void WindowsColorConsoleChannel::initColors()
{
if (!_isFile)
{
CONSOLE_SCREEN_BUFFER_INFO csbi;
GetConsoleScreenBufferInfo(_hConsole, &csbi);
_colors[0] = csbi.wAttributes;
}
else
{
_colors[0] = CC_WHITE;
}
_colors[Message::PRIO_FATAL] = CC_LIGHTRED;
_colors[Message::PRIO_CRITICAL] = CC_LIGHTRED;
_colors[Message::PRIO_ERROR] = CC_LIGHTRED;
_colors[Message::PRIO_WARNING] = CC_YELLOW;
_colors[Message::PRIO_NOTICE] = _colors[0];
_colors[Message::PRIO_INFORMATION] = _colors[0];
_colors[Message::PRIO_DEBUG] = CC_GRAY;
_colors[Message::PRIO_TRACE] = CC_GRAY;
}
} // namespace Poco
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_144) on Wed Sep 06 08:23:27 PDT 2017 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>BeanAttributesConfigurator (Java(TM) EE 8 Specification APIs)</title>
<meta name="date" content="2017-09-06">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="BeanAttributesConfigurator (Java(TM) EE 8 Specification APIs)";
}
}
catch(err) {
}
//-->
var methods = {"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6};
var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/BeanAttributesConfigurator.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../javax/enterprise/inject/spi/configurator/AnnotatedTypeConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" target="_top">Frames</a></li>
<li><a href="BeanAttributesConfigurator.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">javax.enterprise.inject.spi.configurator</div>
<h2 title="Interface BeanAttributesConfigurator" class="title">Interface BeanAttributesConfigurator<T></h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt><span class="paramLabel">Type Parameters:</span></dt>
<dd><code>T</code> - the class of the bean instance</dd>
</dl>
<hr>
<br>
<pre>public interface <span class="typeNameLabel">BeanAttributesConfigurator<T></span></pre>
<div class="block">This API is an helper to configure a new <a href="../../../../../javax/enterprise/inject/spi/BeanAttributes.html" title="interface in javax.enterprise.inject.spi"><code>BeanAttributes</code></a> instance.
CDI container must provides an implementation of this interface.
This configurator is not thread safe and shall not be used concurrently.</div>
<dl>
<dt><span class="simpleTagLabel">Since:</span></dt>
<dd>2.0</dd>
<dt><span class="simpleTagLabel">Author:</span></dt>
<dd>Antoine Sabot-Durand</dd>
<dt><span class="seeLabel">See Also:</span></dt>
<dd><a href="../../../../../javax/enterprise/inject/spi/ProcessBeanAttributes.html#configureBeanAttributes--"><code>ProcessBeanAttributes.configureBeanAttributes()</code></a></dd>
</dl>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addQualifier-java.lang.annotation.Annotation-">addQualifier</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a> qualifier)</code>
<div class="block">Add a qualifier to the configured bean</div>
</td>
</tr>
<tr id="i1" class="rowColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addQualifiers-java.lang.annotation.Annotation...-">addQualifiers</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>... qualifiers)</code>
<div class="block">Add qualifiers to the bean.</div>
</td>
</tr>
<tr id="i2" class="altColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addQualifiers-java.util.Set-">addQualifiers</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>> qualifiers)</code>
<div class="block">Add qualifiers to the bean.</div>
</td>
</tr>
<tr id="i3" class="rowColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addStereotype-java.lang.Class-">addStereotype</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a><? extends <a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>> stereotype)</code>
<div class="block">Add a stereotype to the configured bean</div>
</td>
</tr>
<tr id="i4" class="altColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addStereotypes-java.util.Set-">addStereotypes</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a><? extends <a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>>> stereotypes)</code>
<div class="block">Add stereotypes to the configured bean</div>
</td>
</tr>
<tr id="i5" class="rowColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addTransitiveTypeClosure-java.lang.reflect.Type-">addTransitiveTypeClosure</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a> type)</code>
<div class="block">Adds an unrestricted set of bean types for the given type as if it represented a bean class of a managed bean.</div>
</td>
</tr>
<tr id="i6" class="altColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addType-java.lang.reflect.Type-">addType</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a> type)</code>
<div class="block">Add a type to the bean types</div>
</td>
</tr>
<tr id="i7" class="rowColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addType-javax.enterprise.util.TypeLiteral-">addType</a></span>(<a href="../../../../../javax/enterprise/util/TypeLiteral.html" title="class in javax.enterprise.util">TypeLiteral</a><?> typeLiteral)</code>
<div class="block">Add a type to the bean types</div>
</td>
</tr>
<tr id="i8" class="altColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addTypes-java.util.Set-">addTypes</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a>> types)</code>
<div class="block">Add types to the bean types</div>
</td>
</tr>
<tr id="i9" class="rowColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#addTypes-java.lang.reflect.Type...-">addTypes</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a>... types)</code>
<div class="block">Add types to the bean types</div>
</td>
</tr>
<tr id="i10" class="altColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#alternative-boolean-">alternative</a></span>(boolean value)</code>
<div class="block">Change the alternative status of the configured bean.</div>
</td>
</tr>
<tr id="i11" class="rowColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#name-java.lang.String-">name</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name)</code>
<div class="block">Set the name of the configured bean</div>
</td>
</tr>
<tr id="i12" class="altColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#qualifiers-java.lang.annotation.Annotation...-">qualifiers</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>... qualifiers)</code>
<div class="block">Replace all qualifiers.</div>
</td>
</tr>
<tr id="i13" class="rowColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#qualifiers-java.util.Set-">qualifiers</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>> qualifiers)</code>
<div class="block">Replace all qualifiers.</div>
</td>
</tr>
<tr id="i14" class="altColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#scope-java.lang.Class-">scope</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a><? extends <a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>> scope)</code>
<div class="block">Replace Bean scope</div>
</td>
</tr>
<tr id="i15" class="rowColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#stereotypes-java.util.Set-">stereotypes</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a><? extends <a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>>> stereotypes)</code>
<div class="block">Replace stereotypes on the configured bean</div>
</td>
</tr>
<tr id="i16" class="altColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#types-java.util.Set-">types</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a>> types)</code>
<div class="block">Replace bean types</div>
</td>
</tr>
<tr id="i17" class="rowColor">
<td class="colFirst"><code><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>></code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html#types-java.lang.reflect.Type...-">types</a></span>(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a>... types)</code>
<div class="block">Replace bean types</div>
</td>
</tr>
</table>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="addType-java.lang.reflect.Type-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addType</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addType(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a> type)</pre>
<div class="block">Add a type to the bean types</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>type</code> - the type to add</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="addType-javax.enterprise.util.TypeLiteral-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addType</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addType(<a href="../../../../../javax/enterprise/util/TypeLiteral.html" title="class in javax.enterprise.util">TypeLiteral</a><?> typeLiteral)</pre>
<div class="block">Add a type to the bean types</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>typeLiteral</code> - the type to add</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="addTypes-java.lang.reflect.Type...-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addTypes</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addTypes(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a>... types)</pre>
<div class="block">Add types to the bean types</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>types</code> - types to add</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="addTypes-java.util.Set-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addTypes</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addTypes(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a>> types)</pre>
<div class="block">Add types to the bean types</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>types</code> - types to add</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="addTransitiveTypeClosure-java.lang.reflect.Type-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addTransitiveTypeClosure</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addTransitiveTypeClosure(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a> type)</pre>
<div class="block">Adds an unrestricted set of bean types for the given type as if it represented a bean class of a managed bean.
Illegal bean types are omitted.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>type</code> - to build the closure from</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="types-java.lang.reflect.Type...-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>types</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> types(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a>... types)</pre>
<div class="block">Replace bean types</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>types</code> - the types of the configured bean</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="types-java.util.Set-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>types</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> types(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Type.html?is-external=true" title="class or interface in java.lang.reflect">Type</a>> types)</pre>
<div class="block">Replace bean types</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>types</code> - the types of the configured bean</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="scope-java.lang.Class-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>scope</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> scope(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a><? extends <a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>> scope)</pre>
<div class="block">Replace Bean scope</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>scope</code> - new scope for the configured bean</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="addQualifier-java.lang.annotation.Annotation-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addQualifier</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addQualifier(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a> qualifier)</pre>
<div class="block">Add a qualifier to the configured bean</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>qualifier</code> - qualifier to add</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="addQualifiers-java.lang.annotation.Annotation...-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addQualifiers</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addQualifiers(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>... qualifiers)</pre>
<div class="block">Add qualifiers to the bean.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>qualifiers</code> - qualifiers to add</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="addQualifiers-java.util.Set-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addQualifiers</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addQualifiers(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>> qualifiers)</pre>
<div class="block">Add qualifiers to the bean.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>qualifiers</code> - qualifiers to add</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="qualifiers-java.lang.annotation.Annotation...-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>qualifiers</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> qualifiers(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>... qualifiers)</pre>
<div class="block">Replace all qualifiers.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>qualifiers</code> - qualifiers for the build bean</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="qualifiers-java.util.Set-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>qualifiers</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> qualifiers(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>> qualifiers)</pre>
<div class="block">Replace all qualifiers.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>qualifiers</code> - for the configured bean</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="addStereotype-java.lang.Class-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addStereotype</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addStereotype(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a><? extends <a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>> stereotype)</pre>
<div class="block">Add a stereotype to the configured bean</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>stereotype</code> - stereotype to add</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="addStereotypes-java.util.Set-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>addStereotypes</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> addStereotypes(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a><? extends <a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>>> stereotypes)</pre>
<div class="block">Add stereotypes to the configured bean</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>stereotypes</code> - stereotypes to add</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="stereotypes-java.util.Set-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>stereotypes</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> stereotypes(<a href="http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a><<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a><? extends <a href="http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true" title="class or interface in java.lang.annotation">Annotation</a>>> stereotypes)</pre>
<div class="block">Replace stereotypes on the configured bean</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>stereotypes</code> - for the configured bean</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="name-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>name</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> name(<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name)</pre>
<div class="block">Set the name of the configured bean</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>name</code> - name for the configured bean</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
<a name="alternative-boolean-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>alternative</h4>
<pre><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator">BeanAttributesConfigurator</a><<a href="../../../../../javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" title="type parameter in BeanAttributesConfigurator">T</a>> alternative(boolean value)</pre>
<div class="block">Change the alternative status of the configured bean.
By default the configured bean is not an alternative.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>value</code> - value for alternative property</dd>
<dt><span class="returnLabel">Returns:</span></dt>
<dd>self</dd>
</dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/BeanAttributesConfigurator.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../javax/enterprise/inject/spi/configurator/AnnotatedTypeConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../../javax/enterprise/inject/spi/configurator/BeanConfigurator.html" title="interface in javax.enterprise.inject.spi.configurator"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?javax/enterprise/inject/spi/configurator/BeanAttributesConfigurator.html" target="_top">Frames</a></li>
<li><a href="BeanAttributesConfigurator.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 1996-2017, <a href="http://www.oracle.com">Oracle</a> and/or its affiliates. All Rights Reserved. Use is subject to <a href="../../../../../doc-files/speclicense.html" target="_top">license terms</a>.</small></p>
</body>
</html>
| {
"pile_set_name": "Github"
} |
* Fork the repository
* Clone it on your PC
* `npm install` or `yarn install`
* Make changes, commit open PR
### Notes
* Please don't use jQuery or jQuery based plugins since there are many pure Vue alternatives
This project uses [vue-cli 3](https://github.com/vuejs/vue-cli).
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef UI_OAK_OAK_EXPORT_H_
#define UI_OAK_OAK_EXPORT_H_
#pragma once
// Defines AURA_EXPORT so that functionality implemented by the aura module
// can be exported to consumers.
#if defined(COMPONENT_BUILD)
#if defined(WIN32)
#if defined(OAK_IMPLEMENTATION)
#define OAK_EXPORT __declspec(dllexport)
#else
#define OAK_EXPORT __declspec(dllimport)
#endif // defined(OAK_IMPLEMENTATION)
#else // defined(WIN32)
#define OAK_EXPORT __attribute__((visibility("default")))
#endif
#else // defined(COMPONENT_BUILD)
#define OAK_EXPORT
#endif
#endif // UI_OAK_OAK_EXPORT_H_
| {
"pile_set_name": "Github"
} |
/*
DO NOT EDIT THIS FILE!
it has been automatically created by scripts/dev/credits from
the information found in the various ext/.../CREDITS and
sapi/.../CREDITS files
if you want to change an entry you have to edit the appropriate
CREDITS file instead
*/
CREDIT_LINE("BC Math", "Andi Gutmans");
CREDIT_LINE("Bzip2", "Sterling Hughes");
CREDIT_LINE("Calendar", "Shane Caraveo, Colin Viebrock, Hartmut Holzgraefe, Wez Furlong");
CREDIT_LINE("COM and .Net", "Wez Furlong");
CREDIT_LINE("ctype", "Hartmut Holzgraefe");
CREDIT_LINE("cURL", "Sterling Hughes");
CREDIT_LINE("Date/Time Support", "Derick Rethans");
CREDIT_LINE("DB-LIB (MS SQL, Sybase)", "Wez Furlong, Frank M. Kromann, Adam Baratz");
CREDIT_LINE("DBA", "Sascha Schumann, Marcus Boerger");
CREDIT_LINE("DOM", "Christian Stocker, Rob Richards, Marcus Boerger");
CREDIT_LINE("enchant", "Pierre-Alain Joye, Ilia Alshanetsky");
CREDIT_LINE("EXIF", "Rasmus Lerdorf, Marcus Boerger");
CREDIT_LINE("FFI", "Dmitry Stogov");
CREDIT_LINE("fileinfo", "Ilia Alshanetsky, Pierre Alain Joye, Scott MacVicar, Derick Rethans, Anatol Belski");
CREDIT_LINE("Firebird driver for PDO", "Ard Biesheuvel");
CREDIT_LINE("FTP", "Stefan Esser, Andrew Skalski");
CREDIT_LINE("GD imaging", "Rasmus Lerdorf, Stig Bakken, Jim Winstead, Jouni Ahto, Ilia Alshanetsky, Pierre-Alain Joye, Marcus Boerger, Mark Randall");
CREDIT_LINE("GetText", "Alex Plotnick");
CREDIT_LINE("GNU GMP support", "Stanislav Malyshev");
CREDIT_LINE("Iconv", "Rui Hirokawa, Stig Bakken, Moriyoshi Koizumi");
CREDIT_LINE("IMAP", "Rex Logan, Mark Musone, Brian Wang, Kaj-Michael Lang, Antoni Pamies Olive, Rasmus Lerdorf, Andrew Skalski, Chuck Hagenbuch, Daniel R Kalowsky");
CREDIT_LINE("Input Filter", "Rasmus Lerdorf, Derick Rethans, Pierre-Alain Joye, Ilia Alshanetsky");
CREDIT_LINE("Internationalization", "Ed Batutis, Vladimir Iordanov, Dmitry Lakhtyuk, Stanislav Malyshev, Vadim Savchuk, Kirti Velankar");
CREDIT_LINE("JSON", "Jakub Zelenka, Omar Kilani, Scott MacVicar");
CREDIT_LINE("LDAP", "Amitay Isaacs, Eric Warnke, Rasmus Lerdorf, Gerrit Thomson, Stig Venaas");
CREDIT_LINE("LIBXML", "Christian Stocker, Rob Richards, Marcus Boerger, Wez Furlong, Shane Caraveo");
CREDIT_LINE("Multibyte String Functions", "Tsukada Takuya, Rui Hirokawa");
CREDIT_LINE("MySQL driver for PDO", "George Schlossnagle, Wez Furlong, Ilia Alshanetsky, Johannes Schlueter");
CREDIT_LINE("MySQLi", "Zak Greant, Georg Richter, Andrey Hristov, Ulf Wendel");
CREDIT_LINE("MySQLnd", "Andrey Hristov, Ulf Wendel, Georg Richter, Johannes Schlüter");
CREDIT_LINE("OCI8", "Stig Bakken, Thies C. Arntzen, Andy Sautins, David Benson, Maxim Maletsky, Harald Radi, Antony Dovgal, Andi Gutmans, Wez Furlong, Christopher Jones, Oracle Corporation");
CREDIT_LINE("ODBC driver for PDO", "Wez Furlong");
CREDIT_LINE("ODBC", "Stig Bakken, Andreas Karajannis, Frank M. Kromann, Daniel R. Kalowsky");
CREDIT_LINE("Opcache", "Andi Gutmans, Zeev Suraski, Stanislav Malyshev, Dmitry Stogov, Xinchen Hui");
CREDIT_LINE("OpenSSL", "Stig Venaas, Wez Furlong, Sascha Kettler, Scott MacVicar, Eliot Lear");
CREDIT_LINE("Oracle (OCI) driver for PDO", "Wez Furlong");
CREDIT_LINE("pcntl", "Jason Greene, Arnaud Le Blanc");
CREDIT_LINE("Perl Compatible Regexps", "Andrei Zmievski");
CREDIT_LINE("PHP Archive", "Gregory Beaver, Marcus Boerger");
CREDIT_LINE("PHP Data Objects", "Wez Furlong, Marcus Boerger, Sterling Hughes, George Schlossnagle, Ilia Alshanetsky");
CREDIT_LINE("PHP hash", "Sara Golemon, Rasmus Lerdorf, Stefan Esser, Michael Wallner, Scott MacVicar");
CREDIT_LINE("Posix", "Kristian Koehntopp");
CREDIT_LINE("PostgreSQL driver for PDO", "Edin Kadribasic, Ilia Alshanetsky");
CREDIT_LINE("PostgreSQL", "Jouni Ahto, Zeev Suraski, Yasuo Ohgaki, Chris Kings-Lynne");
CREDIT_LINE("Pspell", "Vlad Krupin");
CREDIT_LINE("Readline", "Thies C. Arntzen");
CREDIT_LINE("Reflection", "Marcus Boerger, Timm Friebe, George Schlossnagle, Andrei Zmievski, Johannes Schlueter");
CREDIT_LINE("Sessions", "Sascha Schumann, Andrei Zmievski");
CREDIT_LINE("Shared Memory Operations", "Slava Poliakov, Ilia Alshanetsky");
CREDIT_LINE("SimpleXML", "Sterling Hughes, Marcus Boerger, Rob Richards");
CREDIT_LINE("SNMP", "Rasmus Lerdorf, Harrie Hazewinkel, Mike Jackson, Steven Lawrance, Johann Hanne, Boris Lytochkin");
CREDIT_LINE("SOAP", "Brad Lafountain, Shane Caraveo, Dmitry Stogov");
CREDIT_LINE("Sockets", "Chris Vandomelen, Sterling Hughes, Daniel Beulshausen, Jason Greene");
CREDIT_LINE("Sodium", "Frank Denis");
CREDIT_LINE("SPL", "Marcus Boerger, Etienne Kneuss");
CREDIT_LINE("SQLite 3.x driver for PDO", "Wez Furlong");
CREDIT_LINE("SQLite3", "Scott MacVicar, Ilia Alshanetsky, Brad Dewar");
CREDIT_LINE("System V Message based IPC", "Wez Furlong");
CREDIT_LINE("System V Semaphores", "Tom May");
CREDIT_LINE("System V Shared Memory", "Christian Cartus");
CREDIT_LINE("tidy", "John Coggeshall, Ilia Alshanetsky");
CREDIT_LINE("tokenizer", "Andrei Zmievski, Johannes Schlueter");
CREDIT_LINE("XML", "Stig Bakken, Thies C. Arntzen, Sterling Hughes");
CREDIT_LINE("XMLReader", "Rob Richards");
CREDIT_LINE("XMLWriter", "Rob Richards, Pierre-Alain Joye");
CREDIT_LINE("XSL", "Christian Stocker, Rob Richards");
CREDIT_LINE("Zip", "Pierre-Alain Joye, Remi Collet");
CREDIT_LINE("Zlib", "Rasmus Lerdorf, Stefan Roehrich, Zeev Suraski, Jade Nicoletti, Michael Wallner");
| {
"pile_set_name": "Github"
} |
#
# The MIT License
# Copyright (c) 2015 Estonian Information System Authority (RIA), Population Register Centre (VRK)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Base class for requests that are associated with processing.
class RequestWithProcessing < Request
belongs_to :request_processing, :autosave => true, :inverse_of => :requests
def self.approve(request_id)
get_request_with_processing(request_id).request_processing.approve()
end
def self.decline(request_id)
get_request_with_processing(request_id).request_processing.decline()
end
def register()
verify_origin()
verify_request()
processing = find_processing
if processing == nil then
processing = new_processing
end
processing.add_request(self)
Rails.logger.info("Processing: #{processing.status}")
save!
end
# Finds the processing for this request.
def find_processing
throw "This method must be reimplemented in a subclass"
end
# Creates new instance of processing of the correct class for this request
def new_processing
throw "This method must be reimplemented in a subclass"
end
def can_revoke?
request_processing.status.eql?(RequestProcessing::WAITING) &&
origin.eql?(Request::CENTER)
end
def get_revoking_request_id
throw "This method must be reimplemented in a subclass"
end
private
def self.get_request_with_processing(request_id)
request = Request.find(request_id)
unless request
raise "No request with id '#{request_id}' found"
end
unless request.respond_to?(:request_processing)
raise "Request with id '#{request_id}' does not have processing"
end
return request
end
end
| {
"pile_set_name": "Github"
} |
# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import logging
from synapse.api.constants import PresenceState
from synapse.api.errors import Codes, StoreError, SynapseError
from synapse.api.filtering import DEFAULT_FILTER_COLLECTION, FilterCollection
from synapse.events.utils import (
format_event_for_client_v2_without_room_id,
format_event_raw,
)
from synapse.handlers.presence import format_user_presence_state
from synapse.handlers.sync import SyncConfig
from synapse.http.servlet import RestServlet, parse_boolean, parse_integer, parse_string
from synapse.types import StreamToken
from synapse.util import json_decoder
from ._base import client_patterns, set_timeline_upper_limit
logger = logging.getLogger(__name__)
class SyncRestServlet(RestServlet):
"""
GET parameters::
timeout(int): How long to wait for new events in milliseconds.
since(batch_token): Batch token when asking for incremental deltas.
set_presence(str): What state the device presence should be set to.
default is "online".
filter(filter_id): A filter to apply to the events returned.
Response JSON::
{
"next_batch": // batch token for the next /sync
"presence": // presence data for the user.
"rooms": {
"join": { // Joined rooms being updated.
"${room_id}": { // Id of the room being updated
"event_map": // Map of EventID -> event JSON.
"timeline": { // The recent events in the room if gap is "true"
"limited": // Was the per-room event limit exceeded?
// otherwise the next events in the room.
"events": [] // list of EventIDs in the "event_map".
"prev_batch": // back token for getting previous events.
}
"state": {"events": []} // list of EventIDs updating the
// current state to be what it should
// be at the end of the batch.
"ephemeral": {"events": []} // list of event objects
}
},
"invite": {}, // Invited rooms being updated.
"leave": {} // Archived rooms being updated.
}
}
"""
PATTERNS = client_patterns("/sync$")
ALLOWED_PRESENCE = {"online", "offline", "unavailable"}
def __init__(self, hs):
super().__init__()
self.hs = hs
self.auth = hs.get_auth()
self.sync_handler = hs.get_sync_handler()
self.clock = hs.get_clock()
self.filtering = hs.get_filtering()
self.presence_handler = hs.get_presence_handler()
self._server_notices_sender = hs.get_server_notices_sender()
self._event_serializer = hs.get_event_client_serializer()
async def on_GET(self, request):
if b"from" in request.args:
# /events used to use 'from', but /sync uses 'since'.
# Lets be helpful and whine if we see a 'from'.
raise SynapseError(
400, "'from' is not a valid query parameter. Did you mean 'since'?"
)
requester = await self.auth.get_user_by_req(request, allow_guest=True)
user = requester.user
device_id = requester.device_id
timeout = parse_integer(request, "timeout", default=0)
since = parse_string(request, "since")
set_presence = parse_string(
request,
"set_presence",
default="online",
allowed_values=self.ALLOWED_PRESENCE,
)
filter_id = parse_string(request, "filter", default=None)
full_state = parse_boolean(request, "full_state", default=False)
logger.debug(
"/sync: user=%r, timeout=%r, since=%r, "
"set_presence=%r, filter_id=%r, device_id=%r",
user,
timeout,
since,
set_presence,
filter_id,
device_id,
)
request_key = (user, timeout, since, filter_id, full_state, device_id)
if filter_id is None:
filter_collection = DEFAULT_FILTER_COLLECTION
elif filter_id.startswith("{"):
try:
filter_object = json_decoder.decode(filter_id)
set_timeline_upper_limit(
filter_object, self.hs.config.filter_timeline_limit
)
except Exception:
raise SynapseError(400, "Invalid filter JSON")
self.filtering.check_valid_filter(filter_object)
filter_collection = FilterCollection(filter_object)
else:
try:
filter_collection = await self.filtering.get_user_filter(
user.localpart, filter_id
)
except StoreError as err:
if err.code != 404:
raise
# fix up the description and errcode to be more useful
raise SynapseError(400, "No such filter", errcode=Codes.INVALID_PARAM)
sync_config = SyncConfig(
user=user,
filter_collection=filter_collection,
is_guest=requester.is_guest,
request_key=request_key,
device_id=device_id,
)
if since is not None:
since_token = StreamToken.from_string(since)
else:
since_token = None
# send any outstanding server notices to the user.
await self._server_notices_sender.on_user_syncing(user.to_string())
affect_presence = set_presence != PresenceState.OFFLINE
if affect_presence:
await self.presence_handler.set_state(
user, {"presence": set_presence}, True
)
context = await self.presence_handler.user_syncing(
user.to_string(), affect_presence=affect_presence
)
with context:
sync_result = await self.sync_handler.wait_for_sync_for_user(
sync_config,
since_token=since_token,
timeout=timeout,
full_state=full_state,
)
# the client may have disconnected by now; don't bother to serialize the
# response if so.
if request._disconnected:
logger.info("Client has disconnected; not serializing response.")
return 200, {}
time_now = self.clock.time_msec()
response_content = await self.encode_response(
time_now, sync_result, requester.access_token_id, filter_collection
)
logger.debug("Event formatting complete")
return 200, response_content
async def encode_response(self, time_now, sync_result, access_token_id, filter):
logger.debug("Formatting events in sync response")
if filter.event_format == "client":
event_formatter = format_event_for_client_v2_without_room_id
elif filter.event_format == "federation":
event_formatter = format_event_raw
else:
raise Exception("Unknown event format %s" % (filter.event_format,))
joined = await self.encode_joined(
sync_result.joined,
time_now,
access_token_id,
filter.event_fields,
event_formatter,
)
invited = await self.encode_invited(
sync_result.invited, time_now, access_token_id, event_formatter
)
archived = await self.encode_archived(
sync_result.archived,
time_now,
access_token_id,
filter.event_fields,
event_formatter,
)
logger.debug("building sync response dict")
return {
"account_data": {"events": sync_result.account_data},
"to_device": {"events": sync_result.to_device},
"device_lists": {
"changed": list(sync_result.device_lists.changed),
"left": list(sync_result.device_lists.left),
},
"presence": SyncRestServlet.encode_presence(sync_result.presence, time_now),
"rooms": {"join": joined, "invite": invited, "leave": archived},
"groups": {
"join": sync_result.groups.join,
"invite": sync_result.groups.invite,
"leave": sync_result.groups.leave,
},
"device_one_time_keys_count": sync_result.device_one_time_keys_count,
"next_batch": sync_result.next_batch.to_string(),
}
@staticmethod
def encode_presence(events, time_now):
return {
"events": [
{
"type": "m.presence",
"sender": event.user_id,
"content": format_user_presence_state(
event, time_now, include_user_id=False
),
}
for event in events
]
}
async def encode_joined(
self, rooms, time_now, token_id, event_fields, event_formatter
):
"""
Encode the joined rooms in a sync result
Args:
rooms(list[synapse.handlers.sync.JoinedSyncResult]): list of sync
results for rooms this user is joined to
time_now(int): current time - used as a baseline for age
calculations
token_id(int): ID of the user's auth token - used for namespacing
of transaction IDs
event_fields(list<str>): List of event fields to include. If empty,
all fields will be returned.
event_formatter (func[dict]): function to convert from federation format
to client format
Returns:
dict[str, dict[str, object]]: the joined rooms list, in our
response format
"""
joined = {}
for room in rooms:
joined[room.room_id] = await self.encode_room(
room,
time_now,
token_id,
joined=True,
only_fields=event_fields,
event_formatter=event_formatter,
)
return joined
async def encode_invited(self, rooms, time_now, token_id, event_formatter):
"""
Encode the invited rooms in a sync result
Args:
rooms(list[synapse.handlers.sync.InvitedSyncResult]): list of
sync results for rooms this user is joined to
time_now(int): current time - used as a baseline for age
calculations
token_id(int): ID of the user's auth token - used for namespacing
of transaction IDs
event_formatter (func[dict]): function to convert from federation format
to client format
Returns:
dict[str, dict[str, object]]: the invited rooms list, in our
response format
"""
invited = {}
for room in rooms:
invite = await self._event_serializer.serialize_event(
room.invite,
time_now,
token_id=token_id,
event_format=event_formatter,
is_invite=True,
)
unsigned = dict(invite.get("unsigned", {}))
invite["unsigned"] = unsigned
invited_state = list(unsigned.pop("invite_room_state", []))
invited_state.append(invite)
invited[room.room_id] = {"invite_state": {"events": invited_state}}
return invited
async def encode_archived(
self, rooms, time_now, token_id, event_fields, event_formatter
):
"""
Encode the archived rooms in a sync result
Args:
rooms (list[synapse.handlers.sync.ArchivedSyncResult]): list of
sync results for rooms this user is joined to
time_now(int): current time - used as a baseline for age
calculations
token_id(int): ID of the user's auth token - used for namespacing
of transaction IDs
event_fields(list<str>): List of event fields to include. If empty,
all fields will be returned.
event_formatter (func[dict]): function to convert from federation format
to client format
Returns:
dict[str, dict[str, object]]: The invited rooms list, in our
response format
"""
joined = {}
for room in rooms:
joined[room.room_id] = await self.encode_room(
room,
time_now,
token_id,
joined=False,
only_fields=event_fields,
event_formatter=event_formatter,
)
return joined
async def encode_room(
self, room, time_now, token_id, joined, only_fields, event_formatter
):
"""
Args:
room (JoinedSyncResult|ArchivedSyncResult): sync result for a
single room
time_now (int): current time - used as a baseline for age
calculations
token_id (int): ID of the user's auth token - used for namespacing
of transaction IDs
joined (bool): True if the user is joined to this room - will mean
we handle ephemeral events
only_fields(list<str>): Optional. The list of event fields to include.
event_formatter (func[dict]): function to convert from federation format
to client format
Returns:
dict[str, object]: the room, encoded in our response format
"""
def serialize(events):
return self._event_serializer.serialize_events(
events,
time_now=time_now,
# We don't bundle "live" events, as otherwise clients
# will end up double counting annotations.
bundle_aggregations=False,
token_id=token_id,
event_format=event_formatter,
only_event_fields=only_fields,
)
state_dict = room.state
timeline_events = room.timeline.events
state_events = state_dict.values()
for event in itertools.chain(state_events, timeline_events):
# We've had bug reports that events were coming down under the
# wrong room.
if event.room_id != room.room_id:
logger.warning(
"Event %r is under room %r instead of %r",
event.event_id,
room.room_id,
event.room_id,
)
serialized_state = await serialize(state_events)
serialized_timeline = await serialize(timeline_events)
account_data = room.account_data
result = {
"timeline": {
"events": serialized_timeline,
"prev_batch": room.timeline.prev_batch.to_string(),
"limited": room.timeline.limited,
},
"state": {"events": serialized_state},
"account_data": {"events": account_data},
}
if joined:
ephemeral_events = room.ephemeral
result["ephemeral"] = {"events": ephemeral_events}
result["unread_notifications"] = room.unread_notifications
result["summary"] = room.summary
result["org.matrix.msc2654.unread_count"] = room.unread_count
return result
def register_servlets(hs, http_server):
SyncRestServlet(hs).register(http_server)
| {
"pile_set_name": "Github"
} |
/* -*-c++-*- */
/* osgEarth - Dynamic map generation toolkit for OpenSceneGraph
* Copyright 2020 Pelican Mapping
* http://osgearth.org
*
* osgEarth is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>
*/
#ifndef OSGEARTH_FEATURES_FEATURE_IMAGE_LAYER
#define OSGEARTH_FEATURES_FEATURE_IMAGE_LAYER 1
#include <osgEarth/ImageLayer>
#include <osgEarth/LayerReference>
#include <osgEarth/FeatureSource>
#include <osgEarth/StyleSheet>
namespace osgEarth
{
namespace Util
{
class OSGEARTH_EXPORT FeatureImageRenderer
{
public:
bool render(
const TileKey& key,
Session* session,
const StyleSheet* styles,
osg::Image* target,
ProgressCallback* progress) const;
protected:
virtual bool renderFeaturesForStyle(
Session* session,
const Style& style,
const FeatureList& features,
const GeoExtent& imageExtent,
osg::Image* out_image ) const = 0;
osg::ref_ptr<FeatureFilterChain> _filterChain;
private:
bool queryAndRenderFeaturesForStyle(
Session* session,
const Style& style,
const Query& query,
const GeoExtent& imageExtent,
osg::Image* out_image,
ProgressCallback* progress) const;
void getFeatures(
Session* session,
const Query& query,
const GeoExtent& imageExtent,
FeatureList& features,
ProgressCallback* progress) const;
};
}
/**
* Rasterizes feature data into an image layer.
*/
class OSGEARTH_EXPORT FeatureImageLayer : public osgEarth::ImageLayer,
public osgEarth::Util::FeatureImageRenderer
{
public: // serialization
class OSGEARTH_EXPORT Options : public ImageLayer::Options {
public:
META_LayerOptions(osgEarth, Options, ImageLayer::Options);
OE_OPTION_LAYER(FeatureSource, featureSource);
OE_OPTION_VECTOR(ConfigOptions, filters);
OE_OPTION_LAYER(StyleSheet, styleSheet);
OE_OPTION(double, gamma);
virtual Config getConfig() const;
private:
void fromConfig( const Config& conf );
};
public:
META_Layer(osgEarth, FeatureImageLayer, Options, osgEarth::ImageLayer, FeatureImage);
//! Sets the feature source to get road data from; call either this
//! or setFeatureSourceLayer
void setFeatureSource(FeatureSource* source);
inline FeatureSource* getFeatureSource() const;
//! Style sheet to use to render feature data
void setStyleSheet(StyleSheet* styles);
inline StyleSheet* getStyleSheet() const;
public: // FeatureImageRenderer
virtual bool renderFeaturesForStyle(
Session* session,
const Style& style,
const FeatureList& features,
const GeoExtent& imageExtent,
osg::Image* out_image ) const;
public: // ImageLayer
// Opens the layer and returns a status
virtual Status openImplementation();
virtual GeoImage createImageImplementation(const TileKey& key, ProgressCallback* progress) const;
protected: // Layer
// Called by Map when it adds this layer
virtual void addedToMap(const class Map*);
// Called by Map when it removes this layer
virtual void removedFromMap(const class Map*);
// post-ctor initialization
virtual void init();
protected:
virtual ~FeatureImageLayer() { }
private:
osg::ref_ptr<Session> _session;
osg::ref_ptr<const FeatureProfile> _featureProfile;
optional<double> _gamma;
void updateSession();
bool renderFeaturesForStyle(
Session* session,
const Style& style,
const FeatureList& features,
osg::Referenced* buildData,
const GeoExtent& imageExtent,
osg::Image* image ) const;
osg::Image* allocateImage() const;
bool preProcess(osg::Image* image) const;
bool postProcess(osg::Image* image) const;
};
// template/inline impls .................................................
FeatureSource* FeatureImageLayer::getFeatureSource() const { return options().featureSource().getLayer(); }
StyleSheet* FeatureImageLayer::getStyleSheet() const { return options().styleSheet().getLayer(); }
} // namespace osgEarth
OSGEARTH_SPECIALIZE_CONFIG(osgEarth::FeatureImageLayer::Options);
#endif // OSGEARTH_FEATURES_FEATURE_IMAGE_LAYER
| {
"pile_set_name": "Github"
} |
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
import service
import lex
@service.api
def put(request, name):
return {
'status' : lex.create_bot_version(name)
}
@service.api
def delete(request, name, version):
return {
'status' : lex.delete_bot(name, version)
}
| {
"pile_set_name": "Github"
} |
---
description: "Automatically generated file. DO NOT MODIFY"
---
```javascript
const options = {
authProvider,
};
const client = Client.init(options);
let res = await client.api('/print/operations/{id}')
.version('beta')
.get();
``` | {
"pile_set_name": "Github"
} |
package restful
// Copyright 2013 Ernest Micklei. All rights reserved.
// Use of this source code is governed by a license
// that can be found in the LICENSE file.
import (
"bufio"
"errors"
"net"
"net/http"
)
// DefaultResponseMimeType is DEPRECATED, use DefaultResponseContentType(mime)
var DefaultResponseMimeType string
//PrettyPrintResponses controls the indentation feature of XML and JSON serialization
var PrettyPrintResponses = true
// Response is a wrapper on the actual http ResponseWriter
// It provides several convenience methods to prepare and write response content.
type Response struct {
http.ResponseWriter
requestAccept string // mime-type what the Http Request says it wants to receive
routeProduces []string // mime-types what the Route says it can produce
statusCode int // HTTP status code that has been written explicitly (if zero then net/http has written 200)
contentLength int // number of bytes written for the response body
prettyPrint bool // controls the indentation feature of XML and JSON serialization. It is initialized using var PrettyPrintResponses.
err error // err property is kept when WriteError is called
hijacker http.Hijacker // if underlying ResponseWriter supports it
}
// NewResponse creates a new response based on a http ResponseWriter.
func NewResponse(httpWriter http.ResponseWriter) *Response {
hijacker, _ := httpWriter.(http.Hijacker)
return &Response{ResponseWriter: httpWriter, routeProduces: []string{}, statusCode: http.StatusOK, prettyPrint: PrettyPrintResponses, hijacker: hijacker}
}
// DefaultResponseContentType set a default.
// If Accept header matching fails, fall back to this type.
// Valid values are restful.MIME_JSON and restful.MIME_XML
// Example:
// restful.DefaultResponseContentType(restful.MIME_JSON)
func DefaultResponseContentType(mime string) {
DefaultResponseMimeType = mime
}
// InternalServerError writes the StatusInternalServerError header.
// DEPRECATED, use WriteErrorString(http.StatusInternalServerError,reason)
func (r Response) InternalServerError() Response {
r.WriteHeader(http.StatusInternalServerError)
return r
}
// Hijack implements the http.Hijacker interface. This expands
// the Response to fulfill http.Hijacker if the underlying
// http.ResponseWriter supports it.
func (r *Response) Hijack() (net.Conn, *bufio.ReadWriter, error) {
if r.hijacker == nil {
return nil, nil, errors.New("http.Hijacker not implemented by underlying http.ResponseWriter")
}
return r.hijacker.Hijack()
}
// PrettyPrint changes whether this response must produce pretty (line-by-line, indented) JSON or XML output.
func (r *Response) PrettyPrint(bePretty bool) {
r.prettyPrint = bePretty
}
// AddHeader is a shortcut for .Header().Add(header,value)
func (r Response) AddHeader(header string, value string) Response {
r.Header().Add(header, value)
return r
}
// SetRequestAccepts tells the response what Mime-type(s) the HTTP request said it wants to accept. Exposed for testing.
func (r *Response) SetRequestAccepts(mime string) {
r.requestAccept = mime
}
// EntityWriter returns the registered EntityWriter that the entity (requested resource)
// can write according to what the request wants (Accept) and what the Route can produce or what the restful defaults say.
// If called before WriteEntity and WriteHeader then a false return value can be used to write a 406: Not Acceptable.
func (r *Response) EntityWriter() (EntityReaderWriter, bool) {
sorted := sortedMimes(r.requestAccept)
for _, eachAccept := range sorted {
for _, eachProduce := range r.routeProduces {
if eachProduce == eachAccept.media {
if w, ok := entityAccessRegistry.accessorAt(eachAccept.media); ok {
return w, true
}
}
}
if eachAccept.media == "*/*" {
for _, each := range r.routeProduces {
if w, ok := entityAccessRegistry.accessorAt(each); ok {
return w, true
}
}
}
}
// if requestAccept is empty
writer, ok := entityAccessRegistry.accessorAt(r.requestAccept)
if !ok {
// if not registered then fallback to the defaults (if set)
if DefaultResponseMimeType == MIME_JSON {
return entityAccessRegistry.accessorAt(MIME_JSON)
}
if DefaultResponseMimeType == MIME_XML {
return entityAccessRegistry.accessorAt(MIME_XML)
}
// Fallback to whatever the route says it can produce.
// https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
for _, each := range r.routeProduces {
if w, ok := entityAccessRegistry.accessorAt(each); ok {
return w, true
}
}
if trace {
traceLogger.Printf("no registered EntityReaderWriter found for %s", r.requestAccept)
}
}
return writer, ok
}
// WriteEntity calls WriteHeaderAndEntity with Http Status OK (200)
func (r *Response) WriteEntity(value interface{}) error {
return r.WriteHeaderAndEntity(http.StatusOK, value)
}
// WriteHeaderAndEntity marshals the value using the representation denoted by the Accept Header and the registered EntityWriters.
// If no Accept header is specified (or */*) then respond with the Content-Type as specified by the first in the Route.Produces.
// If an Accept header is specified then respond with the Content-Type as specified by the first in the Route.Produces that is matched with the Accept header.
// If the value is nil then no response is send except for the Http status. You may want to call WriteHeader(http.StatusNotFound) instead.
// If there is no writer available that can represent the value in the requested MIME type then Http Status NotAcceptable is written.
// Current implementation ignores any q-parameters in the Accept Header.
// Returns an error if the value could not be written on the response.
func (r *Response) WriteHeaderAndEntity(status int, value interface{}) error {
writer, ok := r.EntityWriter()
if !ok {
r.WriteHeader(http.StatusNotAcceptable)
return nil
}
return writer.Write(r, status, value)
}
// WriteAsXml is a convenience method for writing a value in xml (requires Xml tags on the value)
// It uses the standard encoding/xml package for marshalling the value ; not using a registered EntityReaderWriter.
func (r *Response) WriteAsXml(value interface{}) error {
return writeXML(r, http.StatusOK, MIME_XML, value)
}
// WriteHeaderAndXml is a convenience method for writing a status and value in xml (requires Xml tags on the value)
// It uses the standard encoding/xml package for marshalling the value ; not using a registered EntityReaderWriter.
func (r *Response) WriteHeaderAndXml(status int, value interface{}) error {
return writeXML(r, status, MIME_XML, value)
}
// WriteAsJson is a convenience method for writing a value in json.
// It uses the standard encoding/json package for marshalling the value ; not using a registered EntityReaderWriter.
func (r *Response) WriteAsJson(value interface{}) error {
return writeJSON(r, http.StatusOK, MIME_JSON, value)
}
// WriteJson is a convenience method for writing a value in Json with a given Content-Type.
// It uses the standard encoding/json package for marshalling the value ; not using a registered EntityReaderWriter.
func (r *Response) WriteJson(value interface{}, contentType string) error {
return writeJSON(r, http.StatusOK, contentType, value)
}
// WriteHeaderAndJson is a convenience method for writing the status and a value in Json with a given Content-Type.
// It uses the standard encoding/json package for marshalling the value ; not using a registered EntityReaderWriter.
func (r *Response) WriteHeaderAndJson(status int, value interface{}, contentType string) error {
return writeJSON(r, status, contentType, value)
}
// WriteError write the http status and the error string on the response. err can be nil.
func (r *Response) WriteError(httpStatus int, err error) error {
r.err = err
if err == nil {
r.WriteErrorString(httpStatus, "")
} else {
r.WriteErrorString(httpStatus, err.Error())
}
return err
}
// WriteServiceError is a convenience method for a responding with a status and a ServiceError
func (r *Response) WriteServiceError(httpStatus int, err ServiceError) error {
r.err = err
return r.WriteHeaderAndEntity(httpStatus, err)
}
// WriteErrorString is a convenience method for an error status with the actual error
func (r *Response) WriteErrorString(httpStatus int, errorReason string) error {
if r.err == nil {
// if not called from WriteError
r.err = errors.New(errorReason)
}
r.WriteHeader(httpStatus)
if _, err := r.Write([]byte(errorReason)); err != nil {
return err
}
return nil
}
// Flush implements http.Flusher interface, which sends any buffered data to the client.
func (r *Response) Flush() {
if f, ok := r.ResponseWriter.(http.Flusher); ok {
f.Flush()
} else if trace {
traceLogger.Printf("ResponseWriter %v doesn't support Flush", r)
}
}
// WriteHeader is overridden to remember the Status Code that has been written.
// Changes to the Header of the response have no effect after this.
func (r *Response) WriteHeader(httpStatus int) {
r.statusCode = httpStatus
r.ResponseWriter.WriteHeader(httpStatus)
}
// StatusCode returns the code that has been written using WriteHeader.
func (r Response) StatusCode() int {
if 0 == r.statusCode {
// no status code has been written yet; assume OK
return http.StatusOK
}
return r.statusCode
}
// Write writes the data to the connection as part of an HTTP reply.
// Write is part of http.ResponseWriter interface.
func (r *Response) Write(bytes []byte) (int, error) {
written, err := r.ResponseWriter.Write(bytes)
r.contentLength += written
return written, err
}
// ContentLength returns the number of bytes written for the response content.
// Note that this value is only correct if all data is written through the Response using its Write* methods.
// Data written directly using the underlying http.ResponseWriter is not accounted for.
func (r Response) ContentLength() int {
return r.contentLength
}
// CloseNotify is part of http.CloseNotifier interface
func (r Response) CloseNotify() <-chan bool {
return r.ResponseWriter.(http.CloseNotifier).CloseNotify()
}
// Error returns the err created by WriteError
func (r Response) Error() error {
return r.err
}
| {
"pile_set_name": "Github"
} |
v1/read/aggregate-collation.json
v1/read/aggregate.json
v1/read/aggregate-out.json
v1/read/count-collation.json
v1/read/count-empty.json
v1/read/count.json
v1/read/distinct-collation.json
v1/read/distinct.json
v1/read/find-collation.json
v1/read/find.json
v1/write/bulkWrite-arrayFilters.json
v1/write/bulkWrite-collation.json
v1/write/bulkWrite.json
v1/write/deleteMany-collation.json
v1/write/deleteMany.json
v1/write/deleteOne-collation.json
v1/write/deleteOne.json
v1/write/findOneAndDelete-collation.json
v1/write/findOneAndDelete.json
v1/write/findOneAndReplace-collation.json
v1/write/findOneAndReplace.json
v1/write/findOneAndReplace-upsert.json
v1/write/findOneAndUpdate-arrayFilters.json
v1/write/findOneAndUpdate-collation.json
v1/write/findOneAndUpdate.json
v1/write/insertMany.json
v1/write/insertOne.json
v1/write/replaceOne-collation.json
v1/write/replaceOne.json
v1/write/updateMany-arrayFilters.json
v1/write/updateMany-collation.json
v1/write/updateMany.json
v1/write/updateOne-arrayFilters.json
v1/write/updateOne-collation.json
v1/write/updateOne.json
v2/aggregate-merge.json
v2/aggregate-out-readConcern.json
v2/bulkWrite-arrayFilters.json
v2/bulkWrite-delete-hint-clientError.json
v2/bulkWrite-delete-hint.json
v2/bulkWrite-delete-hint-serverError.json
v2/bulkWrite-update-hint-clientError.json
v2/bulkWrite-update-hint.json
v2/bulkWrite-update-hint-serverError.json
v2/db-aggregate.json
v2/deleteMany-hint-clientError.json
v2/deleteMany-hint.json
v2/deleteMany-hint-serverError.json
v2/deleteOne-hint-clientError.json
v2/deleteOne-hint.json
v2/deleteOne-hint-serverError.json
v2/findOneAndDelete-hint-clientError.json
v2/findOneAndDelete-hint.json
v2/findOneAndDelete-hint-serverError.json
v2/findOneAndReplace-hint-clientError.json
v2/findOneAndReplace-hint.json
v2/findOneAndReplace-hint-serverError.json
v2/findOneAndUpdate-hint-clientError.json
v2/findOneAndUpdate-hint.json
v2/findOneAndUpdate-hint-serverError.json
v2/replaceOne-hint.json
v2/unacknowledged-bulkWrite-delete-hint-clientError.json
v2/unacknowledged-bulkWrite-delete-hint.json
v2/unacknowledged-bulkWrite-update-hint-clientError.json
v2/unacknowledged-bulkWrite-update-hint.json
v2/unacknowledged-deleteMany-hint-clientError.json
v2/unacknowledged-deleteMany-hint.json
v2/unacknowledged-deleteOne-hint-clientError.json
v2/unacknowledged-deleteOne-hint.json
v2/unacknowledged-findOneAndDelete-hint-clientError.json
v2/unacknowledged-findOneAndDelete-hint.json
v2/unacknowledged-findOneAndReplace-hint-clientError.json
v2/unacknowledged-findOneAndReplace-hint.json
v2/unacknowledged-findOneAndUpdate-hint-clientError.json
v2/unacknowledged-findOneAndUpdate-hint.json
v2/unacknowledged-replaceOne-hint-clientError.json
v2/unacknowledged-replaceOne-hint.json
v2/unacknowledged-updateMany-hint-clientError.json
v2/unacknowledged-updateMany-hint.json
v2/unacknowledged-updateOne-hint-clientError.json
v2/unacknowledged-updateOne-hint.json
v2/updateMany-hint-clientError.json
v2/updateMany-hint.json
v2/updateMany-hint-serverError.json
v2/updateOne-hint-clientError.json
v2/updateOne-hint.json
v2/updateOne-hint-serverError.json
v2/find-allowdiskuse.json
v2/updateWithPipelines.json | {
"pile_set_name": "Github"
} |
import warnings
from io import BytesIO
from pytest import mark
from translate.convert import test_convert
from translate.storage import po, xliff
from translate.tools import pretranslate
class TestPretranslate:
xliff_skeleton = '''<?xml version="1.0" encoding="utf-8"?>
<xliff version="1.1" xmlns="urn:oasis:names:tc:xliff:document:1.1">
<file original="doc.txt" source-language="en-US">
<body>
%s
</body>
</file>
</xliff>'''
def setup_method(self, method):
warnings.resetwarnings()
def teardown_method(self, method):
warnings.resetwarnings()
def pretranslatepo(self, input_source, template_source=None):
"""helper that converts strings to po source without requiring files"""
input_file = BytesIO(input_source.encode())
if template_source:
template_file = BytesIO(template_source.encode())
else:
template_file = None
output_file = BytesIO()
pretranslate.pretranslate_file(input_file, output_file, template_file)
output_file.seek(0)
return po.pofile(output_file.read())
def pretranslatexliff(self, input_source, template_source=None):
"""helper that converts strings to po source without requiring files"""
input_file = BytesIO(input_source)
if template_source:
template_file = BytesIO(template_source)
else:
template_file = None
output_file = BytesIO()
pretranslate.pretranslate_file(input_file, output_file, template_file)
output_file.seek(0)
return xliff.xlifffile(output_file.read())
def singleunit(self, pofile):
"""
checks that the pofile contains a single non-header unit, and
returns it
"""
if len(pofile.units) == 2 and pofile.units[0].isheader():
print(pofile.units[1])
return pofile.units[1]
else:
print(pofile.units[0])
return pofile.units[0]
def test_pretranslatepo_blank(self):
"""
checks that the pretranslatepo function is working for a simple file
initialisation
"""
input_source = '''#: simple.label%ssimple.accesskey\nmsgid "A &hard coded newline.\\n"\nmsgstr ""\n''' % po.lsep
newpo = self.pretranslatepo(input_source)
assert str(self.singleunit(newpo)) == input_source
def test_merging_simple(self):
"""checks that the pretranslatepo function is working for a simple merge"""
input_source = '''#: simple.label%ssimple.accesskey\nmsgid "A &hard coded newline.\\n"\nmsgstr ""\n''' % po.lsep
template_source = '''#: simple.label%ssimple.accesskey\nmsgid "A &hard coded newline.\\n"\nmsgstr "&Hart gekoeerde nuwe lyne\\n"\n''' % po.lsep
newpo = self.pretranslatepo(input_source, template_source)
assert str(self.singleunit(newpo)) == template_source
def test_merging_messages_marked_fuzzy(self):
"""test that when we merge PO files with a fuzzy message that it remains fuzzy"""
input_source = '''#: simple.label%ssimple.accesskey\nmsgid "A &hard coded newline.\\n"\nmsgstr ""\n''' % po.lsep
template_source = '''#: simple.label%ssimple.accesskey\n#, fuzzy\nmsgid "A &hard coded newline.\\n"\nmsgstr "&Hart gekoeerde nuwe lyne\\n"\n''' % po.lsep
newpo = self.pretranslatepo(input_source, template_source)
assert str(self.singleunit(newpo)) == template_source
def test_merging_plurals_with_fuzzy_matching(self):
"""test that when we merge PO files with a fuzzy message that it remains fuzzy"""
input_source = r'''#: file.cpp:2
msgid "%d manual"
msgid_plural "%d manuals"
msgstr[0] ""
msgstr[1] ""
'''
template_source = r'''#: file.cpp:3
#, fuzzy
msgid "%d manual"
msgid_plural "%d manuals"
msgstr[0] "%d handleiding."
msgstr[1] "%d handleidings."
'''
# The #: comment and msgid's are different between the pot and the po
poexpected = r'''#: file.cpp:2
#, fuzzy
msgid "%d manual"
msgid_plural "%d manuals"
msgstr[0] "%d handleiding."
msgstr[1] "%d handleidings."
'''
newpo = self.pretranslatepo(input_source, template_source)
assert str(self.singleunit(newpo)) == poexpected
@mark.xfail(reason="Not Implemented")
def test_merging_msgid_change(self):
"""
tests that if the msgid changes but the location stays the same that
we merge
"""
input_source = '''#: simple.label\n#: simple.accesskey\nmsgid "Its &hard coding a newline.\\n"\nmsgstr ""\n'''
template_source = '''#: simple.label\n#: simple.accesskey\nmsgid "A &hard coded newline.\\n"\nmsgstr "&Hart gekoeerde nuwe lyne\\n"\n'''
poexpected = '''#: simple.label\n#: simple.accesskey\n#, fuzzy\nmsgid "Its &hard coding a newline.\\n"\nmsgstr "&Hart gekoeerde nuwe lyne\\n"\n'''
newpo = self.pretranslatepo(input_source, template_source)
print(bytes(newpo))
assert bytes(newpo).decode('utf-8') == poexpected
def test_merging_location_change(self):
"""
tests that if the location changes but the msgid stays the same that
we merge
"""
input_source = '''#: new_simple.label%snew_simple.accesskey\nmsgid "A &hard coded newline.\\n"\nmsgstr ""\n''' % po.lsep
template_source = '''#: simple.label%ssimple.accesskey\nmsgid "A &hard coded newline.\\n"\nmsgstr "&Hart gekoeerde nuwe lyne\\n"\n''' % po.lsep
poexpected = '''#: new_simple.label%snew_simple.accesskey\nmsgid "A &hard coded newline.\\n"\nmsgstr "&Hart gekoeerde nuwe lyne\\n"\n''' % po.lsep
newpo = self.pretranslatepo(input_source, template_source)
print(bytes(newpo))
assert bytes(newpo).decode('utf-8') == poexpected
def test_merging_location_and_whitespace_change(self):
"""
test that even if the location changes that if the msgid only has
whitespace changes we can still merge
"""
input_source = '''#: singlespace.label%ssinglespace.accesskey\nmsgid "&We have spaces"\nmsgstr ""\n''' % po.lsep
template_source = '''#: doublespace.label%sdoublespace.accesskey\nmsgid "&We have spaces"\nmsgstr "&One het spasies"\n''' % po.lsep
poexpected = '''#: singlespace.label%ssinglespace.accesskey\n#, fuzzy\nmsgid "&We have spaces"\nmsgstr "&One het spasies"\n''' % po.lsep
newpo = self.pretranslatepo(input_source, template_source)
print(bytes(newpo))
assert bytes(newpo).decode('utf-8') == poexpected
@mark.xfail(reason="Not Implemented")
def test_merging_accelerator_changes(self):
"""
test that a change in the accelerator localtion still allows
merging
"""
input_source = '''#: someline.c\nmsgid "A&bout"\nmsgstr ""\n'''
template_source = '''#: someline.c\nmsgid "&About"\nmsgstr "&Info"\n'''
poexpected = '''#: someline.c\nmsgid "A&bout"\nmsgstr "&Info"\n'''
newpo = self.pretranslatepo(input_source, template_source)
print(bytes(newpo))
assert bytes(newpo).decode('utf-8') == poexpected
@mark.xfail(reason="Not Implemented")
def test_lines_cut_differently(self):
"""
Checks that the correct formatting is preserved when pot an po lines
differ.
"""
input_source = '''#: simple.label\nmsgid "Line split "\n"differently"\nmsgstr ""\n'''
template_source = '''#: simple.label\nmsgid "Line"\n" split differently"\nmsgstr "Lyne verskillend gesny"\n'''
newpo = self.pretranslatepo(input_source, template_source)
newpounit = self.singleunit(newpo)
assert str(newpounit) == template_source
def test_merging_automatic_comments_dont_duplicate(self):
"""ensure that we can merge #. comments correctly"""
input_source = '''#. Row 35\nmsgid "&About"\nmsgstr ""\n'''
template_source = '''#. Row 35\nmsgid "&About"\nmsgstr "&Info"\n'''
newpo = self.pretranslatepo(input_source, template_source)
newpounit = self.singleunit(newpo)
assert str(newpounit) == template_source
def test_merging_automatic_comments_new_overides_old(self):
"""ensure that new #. comments override the old comments"""
input_source = '''#. new comment\n#: someline.c\nmsgid "&About"\nmsgstr ""\n'''
template_source = '''#. old comment\n#: someline.c\nmsgid "&About"\nmsgstr "&Info"\n'''
poexpected = '''#. new comment\n#: someline.c\nmsgid "&About"\nmsgstr "&Info"\n'''
newpo = self.pretranslatepo(input_source, template_source)
newpounit = self.singleunit(newpo)
assert str(newpounit) == poexpected
def test_merging_comments_with_blank_comment_lines(self):
"""
test that when we merge a comment that has a blank line we keep the
blank line
"""
input_source = '''#: someline.c\nmsgid "About"\nmsgstr ""\n'''
template_source = '''# comment1\n#\n# comment2\n#: someline.c\nmsgid "About"\nmsgstr "Omtrent"\n'''
poexpected = template_source
newpo = self.pretranslatepo(input_source, template_source)
newpounit = self.singleunit(newpo)
assert str(newpounit) == poexpected
def test_empty_commentlines(self):
input_source = '''#: paneSecurity.title
msgid "Security"
msgstr ""
'''
template_source = '''# - Contributor(s):
# -
# - Alternatively, the
# -
#: paneSecurity.title
msgid "Security"
msgstr "Sekuriteit"
'''
poexpected = template_source
newpo = self.pretranslatepo(input_source, template_source)
newpounit = self.singleunit(newpo)
print("expected")
print(poexpected)
print("got:")
print(str(newpounit))
assert str(newpounit) == poexpected
def test_merging_msgidcomments(self):
"""ensure that we can merge msgidcomments messages"""
input_source = r'''#: window.width
msgid ""
"_: Do not translate this.\n"
"36em"
msgstr ""
'''
template_source = r'''#: window.width
msgid ""
"_: Do not translate this.\n"
"36em"
msgstr "36em"
'''
newpo = self.pretranslatepo(input_source, template_source)
newpounit = self.singleunit(newpo)
assert str(newpounit) == template_source
def test_merging_plurals(self):
"""ensure that we can merge plural messages"""
input_source = '''msgid "One"\nmsgid_plural "Two"\nmsgstr[0] ""\nmsgstr[1] ""\n'''
template_source = '''msgid "One"\nmsgid_plural "Two"\nmsgstr[0] "Een"\nmsgstr[1] "Twee"\nmsgstr[2] "Drie"\n'''
newpo = self.pretranslatepo(input_source, template_source)
print(newpo)
newpounit = self.singleunit(newpo)
assert str(newpounit) == template_source
def test_merging_resurect_obsolete_messages(self):
"""
check that we can reuse old obsolete messages if the message comes
back
"""
input_source = '''#: resurect.c\nmsgid "&About"\nmsgstr ""\n'''
template_source = '''#~ msgid "&About"\n#~ msgstr "&Omtrent"\n'''
expected = '''#: resurect.c\nmsgid "&About"\nmsgstr "&Omtrent"\n'''
newpo = self.pretranslatepo(input_source, template_source)
print(bytes(newpo))
assert bytes(newpo).decode('utf-8') == expected
def test_merging_comments(self):
"""Test that we can merge comments correctly"""
input_source = '''#. Don't do it!\n#: file.py:1\nmsgid "One"\nmsgstr ""\n'''
template_source = '''#. Don't do it!\n#: file.py:2\nmsgid "One"\nmsgstr "Een"\n'''
poexpected = '''#. Don't do it!\n#: file.py:1\nmsgid "One"\nmsgstr "Een"\n'''
newpo = self.pretranslatepo(input_source, template_source)
print(newpo)
newpounit = self.singleunit(newpo)
assert str(newpounit) == poexpected
def test_merging_typecomments(self):
"""Test that we can merge with typecomments"""
input_source = '''#: file.c:1\n#, c-format\nmsgid "%d pipes"\nmsgstr ""\n'''
template_source = '''#: file.c:2\nmsgid "%d pipes"\nmsgstr "%d pype"\n'''
poexpected = '''#: file.c:1\n#, c-format\nmsgid "%d pipes"\nmsgstr "%d pype"\n'''
newpo = self.pretranslatepo(input_source, template_source)
newpounit = self.singleunit(newpo)
print(newpounit)
assert str(newpounit) == poexpected
input_source = '''#: file.c:1\n#, c-format\nmsgid "%d computers"\nmsgstr ""\n'''
template_source = '''#: file.c:2\n#, c-format\nmsgid "%s computers "\nmsgstr "%s-rekenaars"\n'''
poexpected = '''#: file.c:1\n#, fuzzy, c-format\nmsgid "%d computers"\nmsgstr "%s-rekenaars"\n'''
newpo = self.pretranslatepo(input_source, template_source)
newpounit = self.singleunit(newpo)
assert newpounit.isfuzzy()
assert newpounit.hastypecomment("c-format")
def test_xliff_states(self):
"""Test correct maintenance of XLIFF states."""
xlf_template = self.xliff_skeleton % (
'''<trans-unit id="1" xml:space="preserve">
<source> File 1 </source>
</trans-unit>''')
xlf_old = self.xliff_skeleton % (
'''<trans-unit id="1" xml:space="preserve" approved="yes">
<source> File 1 </source>
<target> Lêer 1 </target>
</trans-unit>''')
template = xliff.xlifffile.parsestring(xlf_template)
old = xliff.xlifffile.parsestring(xlf_old)
# Serialize files
new = self.pretranslatexliff(bytes(template), bytes(old))
print(bytes(old))
print('---')
print(bytes(new))
assert new.units[0].isapproved()
# Layout might have changed, so we won't compare the serialised
# versions
class TestPretranslateCommand(test_convert.TestConvertCommand, TestPretranslate):
"""Tests running actual pretranslate commands on files"""
convertmodule = pretranslate
def test_help(self, capsys):
"""tests getting help"""
options = test_convert.TestConvertCommand.test_help(self, capsys)
options = self.help_check(options, "-t TEMPLATE, --template=TEMPLATE")
options = self.help_check(options, "--tm")
options = self.help_check(options, "-s MIN_SIMILARITY, --similarity=MIN_SIMILARITY")
options = self.help_check(options, "--nofuzzymatching", last=True)
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2017 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package aggregator
import (
"sync"
"time"
schema "github.com/m3db/m3/src/aggregator/generated/proto/flush"
"github.com/m3db/m3/src/x/clock"
xsync "github.com/m3db/m3/src/x/sync"
"github.com/m3db/m3/src/x/watch"
"github.com/uber-go/tally"
"go.uber.org/zap"
)
type standardFollowerFlusherMetrics struct {
shardNotFound tally.Counter
resolutionNotFound tally.Counter
kvUpdates tally.Counter
flushWindowsNotEnded tally.Counter
}
func newStandardFlusherMetrics(scope tally.Scope) standardFollowerFlusherMetrics {
return standardFollowerFlusherMetrics{
shardNotFound: scope.Counter("shard-not-found"),
resolutionNotFound: scope.Counter("resolution-not-found"),
kvUpdates: scope.Counter("kv-updates"),
flushWindowsNotEnded: scope.Counter("flush-windows-not-ended"),
}
}
type forwardedFollowerFlusherMetrics struct {
shardNotFound tally.Counter
resolutionNotFound tally.Counter
nilForwardedTimes tally.Counter
numForwardedTimesNotFound tally.Counter
kvUpdates tally.Counter
flushWindowsNotEnded tally.Counter
}
func newForwardedFlusherMetrics(scope tally.Scope) forwardedFollowerFlusherMetrics {
return forwardedFollowerFlusherMetrics{
shardNotFound: scope.Counter("shard-not-found"),
resolutionNotFound: scope.Counter("resolution-not-found"),
nilForwardedTimes: scope.Counter("nil-forwarded-times"),
numForwardedTimesNotFound: scope.Counter("num-forwarded-times-not-found"),
kvUpdates: scope.Counter("kv-updates"),
flushWindowsNotEnded: scope.Counter("flush-windows-not-ended"),
}
}
type followerFlushManagerMetrics struct {
watchCreateErrors tally.Counter
kvUpdateFlush tally.Counter
forcedFlush tally.Counter
notCampaigning tally.Counter
standard standardFollowerFlusherMetrics
forwarded forwardedFollowerFlusherMetrics
timed standardFollowerFlusherMetrics
}
func newFollowerFlushManagerMetrics(scope tally.Scope) followerFlushManagerMetrics {
standardScope := scope.Tagged(map[string]string{"flusher-type": "standard"})
forwardedScope := scope.Tagged(map[string]string{"flusher-type": "forwarded"})
timedScope := scope.Tagged(map[string]string{"flusher-type": "timed"})
return followerFlushManagerMetrics{
watchCreateErrors: scope.Counter("watch-create-errors"),
kvUpdateFlush: scope.Counter("kv-update-flush"),
forcedFlush: scope.Counter("forced-flush"),
notCampaigning: scope.Counter("not-campaigning"),
standard: newStandardFlusherMetrics(standardScope),
forwarded: newForwardedFlusherMetrics(forwardedScope),
timed: newStandardFlusherMetrics(timedScope),
}
}
type followerFlushManager struct {
sync.RWMutex
sync.WaitGroup
nowFn clock.NowFn
checkEvery time.Duration
workers xsync.WorkerPool
placementManager PlacementManager
electionManager ElectionManager
flushTimesManager FlushTimesManager
maxBufferSize time.Duration
forcedFlushWindowSize time.Duration
logger *zap.Logger
scope tally.Scope
doneCh <-chan struct{}
received *schema.ShardSetFlushTimes
processed *schema.ShardSetFlushTimes
flushTimesState flushTimesState
flushMode followerFlushMode
lastFlushed time.Time
openedAt time.Time
flushTask *followerFlushTask
sleepFn sleepFn
metrics followerFlushManagerMetrics
}
func newFollowerFlushManager(
doneCh <-chan struct{},
opts FlushManagerOptions,
) roleBasedFlushManager {
nowFn := opts.ClockOptions().NowFn()
instrumentOpts := opts.InstrumentOptions()
scope := instrumentOpts.MetricsScope()
mgr := &followerFlushManager{
nowFn: nowFn,
checkEvery: opts.CheckEvery(),
workers: opts.WorkerPool(),
placementManager: opts.PlacementManager(),
electionManager: opts.ElectionManager(),
flushTimesManager: opts.FlushTimesManager(),
maxBufferSize: opts.MaxBufferSize(),
forcedFlushWindowSize: opts.ForcedFlushWindowSize(),
logger: instrumentOpts.Logger(),
scope: scope,
doneCh: doneCh,
flushTimesState: flushTimesUninitialized,
flushMode: unknownFollowerFlush,
lastFlushed: nowFn(),
sleepFn: time.Sleep,
metrics: newFollowerFlushManagerMetrics(scope),
}
mgr.flushTask = &followerFlushTask{mgr: mgr}
return mgr
}
func (mgr *followerFlushManager) Open() {
mgr.Lock()
defer mgr.Unlock()
mgr.openedAt = mgr.nowFn()
mgr.Add(1)
go mgr.watchFlushTimes()
}
// NB(xichen): no actions needed for initializing the follower flush manager.
func (mgr *followerFlushManager) Init([]*flushBucket) {}
func (mgr *followerFlushManager) Prepare(buckets []*flushBucket) (flushTask, time.Duration) {
// NB(xichen): a flush is triggered in the following scenarios:
// * The flush times persisted in kv have been updated since last flush, or
// * Sufficient time (a.k.a. maxBufferSize) has elapsed since last flush.
mgr.Lock()
defer mgr.Unlock()
var (
now = mgr.nowFn()
needsFlush bool
flushersByInterval []flushersGroup
)
if mgr.flushTimesState == flushTimesUpdated {
mgr.flushTimesState = flushTimesProcessed
mgr.processed = mgr.received
mgr.flushMode = kvUpdateFollowerFlush
flushersByInterval = mgr.flushersFromKVUpdateWithLock(buckets)
needsFlush = true
mgr.metrics.kvUpdateFlush.Inc(1)
} else {
durationSinceLastFlush := now.Sub(mgr.lastFlushed)
// If the follower has accumulated enough data since last flush without receiving a KV
// update, we enter the forced flush mode.
if mgr.flushMode != forcedFollowerFlush && durationSinceLastFlush >= mgr.maxBufferSize {
mgr.flushMode = forcedFollowerFlush
}
// Each time the follower flushes the olded data accumulated for a given window size.
if mgr.flushMode == forcedFollowerFlush && durationSinceLastFlush >= mgr.forcedFlushWindowSize {
flushBeforeNanos := now.Add(-mgr.maxBufferSize).UnixNano()
flushersByInterval = mgr.flushersFromForcedFlush(buckets, flushBeforeNanos)
needsFlush = true
mgr.metrics.forcedFlush.Inc(1)
}
}
if !needsFlush {
return nil, mgr.checkEvery
}
mgr.lastFlushed = now
mgr.flushTask.flushersByInterval = flushersByInterval
return mgr.flushTask, 0
}
// NB(xichen): The follower flush manager flushes data based on the flush times
// stored in kv and does not need to take extra actions when a new bucket is added.
func (mgr *followerFlushManager) OnBucketAdded(int, *flushBucket) {}
// NB(xichen): The follower flush manager flushes data based on the flush times
// stored in kv and does not need to take extra actions when a new flusher is added.
func (mgr *followerFlushManager) OnFlusherAdded(
bucketIdx int,
bucket *flushBucket,
flusher flushingMetricList,
) {
}
// The follower flush manager may only lead if and only if all the following conditions
// are met:
// * The instance is campaigning.
// * All the aggregation windows since the flush manager is opened have ended.
func (mgr *followerFlushManager) CanLead() bool {
mgr.RLock()
defer mgr.RUnlock()
if !mgr.electionManager.IsCampaigning() {
mgr.metrics.notCampaigning.Inc(1)
return false
}
if mgr.processed == nil {
return false
}
for _, shardFlushTimes := range mgr.processed.ByShard {
// If the shard is tombstoned, there is no need to examine its flush times.
if shardFlushTimes.Tombstoned {
continue
}
// Check that for standard metrics, all the open windows containing the process
// start time are closed, meaning the standard metrics that didn't make to the
// process have been flushed successfully downstream.
if !mgr.canLead(shardFlushTimes.StandardByResolution, mgr.metrics.standard) {
return false
}
if !mgr.canLead(shardFlushTimes.TimedByResolution, mgr.metrics.timed) {
return false
}
// Check that the forwarded metrics have been flushed past the process start
// time, meaning the forwarded metrics that didn't make to the process have been
// flushed successfully downstream.
for windowNanos, fbr := range shardFlushTimes.ForwardedByResolution {
if fbr == nil {
mgr.metrics.forwarded.nilForwardedTimes.Inc(1)
return false
}
// Since the timestamps of the forwarded metrics are aligned to the resolution
// boundaries, we simply need to make sure that all forwarded metrics in or before
// the window containing the process start time have been flushed to assert that
// the process can safely take over leadership.
windowSize := time.Duration(windowNanos)
waitTillFlushedTime := mgr.openedAt.Truncate(windowSize)
if waitTillFlushedTime.Equal(mgr.openedAt) {
waitTillFlushedTime = waitTillFlushedTime.Add(-windowSize)
}
for _, lastFlushedNanos := range fbr.ByNumForwardedTimes {
if lastFlushedNanos <= waitTillFlushedTime.UnixNano() {
mgr.metrics.forwarded.flushWindowsNotEnded.Inc(1)
return false
}
}
}
}
return true
}
func (mgr *followerFlushManager) canLead(
flushTimes map[int64]int64,
metrics standardFollowerFlusherMetrics,
) bool {
for windowNanos, lastFlushedNanos := range flushTimes {
windowSize := time.Duration(windowNanos)
windowEndAt := mgr.openedAt.Truncate(windowSize)
if windowEndAt.Before(mgr.openedAt) {
windowEndAt = windowEndAt.Add(windowSize)
}
if lastFlushedNanos < windowEndAt.UnixNano() {
metrics.flushWindowsNotEnded.Inc(1)
return false
}
}
return true
}
func (mgr *followerFlushManager) Close() { mgr.Wait() }
func (mgr *followerFlushManager) flushersFromKVUpdateWithLock(buckets []*flushBucket) []flushersGroup {
flushersByInterval := make([]flushersGroup, len(buckets))
for i, bucket := range buckets {
bucketID := bucket.bucketID
flushersByInterval[i].interval = bucket.interval
flushersByInterval[i].duration = bucket.duration
switch bucketID.listType {
case standardMetricListType:
flushersByInterval[i].flushers = mgr.standardFlushersFromKVUpdateWithLock(
bucketID.standard.resolution,
bucket.flushers,
getStandardFlushTimesByResolutionFn,
mgr.metrics.standard,
mgr.logger.With(zap.String("flusherType", "standard")),
)
case forwardedMetricListType:
flushersByInterval[i].flushers = mgr.forwardedFlushersFromKVUpdateWithLock(bucketID.forwarded, bucket.flushers)
case timedMetricListType:
flushersByInterval[i].flushers = mgr.standardFlushersFromKVUpdateWithLock(
bucketID.timed.resolution,
bucket.flushers,
getTimedFlushTimesByResolutionFn,
mgr.metrics.timed,
mgr.logger.With(zap.String("flusherType", "timed")),
)
default:
panic("should never get here")
}
}
return flushersByInterval
}
func (mgr *followerFlushManager) standardFlushersFromKVUpdateWithLock(
resolution time.Duration,
flushers []flushingMetricList,
getFlushTimesByResolutionFn getFlushTimesByResolutionFn,
metrics standardFollowerFlusherMetrics,
logger *zap.Logger,
) []flusherWithTime {
var (
flushersWithTime = make([]flusherWithTime, 0, defaultInitialFlushCapacity)
)
for _, flusher := range flushers {
shard := flusher.Shard()
shardFlushTimes, exists := mgr.received.ByShard[shard]
if !exists {
metrics.shardNotFound.Inc(1)
logger.Warn("shard not found in flush times",
zap.Uint32("shard", shard),
)
continue
}
flushTimes := getFlushTimesByResolutionFn(shardFlushTimes)
lastFlushedAtNanos, exists := flushTimes[int64(resolution)]
if !exists {
metrics.resolutionNotFound.Inc(1)
logger.Warn("resolution not found in flush times",
zap.Uint32("shard", shard),
zap.Stringer("resolution", resolution),
)
continue
}
newFlushTarget := flusherWithTime{
flusher: flusher,
flushBeforeNanos: lastFlushedAtNanos,
}
flushersWithTime = append(flushersWithTime, newFlushTarget)
metrics.kvUpdates.Inc(1)
}
return flushersWithTime
}
func (mgr *followerFlushManager) forwardedFlushersFromKVUpdateWithLock(
listID forwardedMetricListID,
flushers []flushingMetricList,
) []flusherWithTime {
var (
resolution = listID.resolution
numForwardedTimes = listID.numForwardedTimes
flushersWithTime = make([]flusherWithTime, 0, defaultInitialFlushCapacity)
)
for _, flusher := range flushers {
shard := flusher.Shard()
shardFlushTimes, exists := mgr.received.ByShard[shard]
if !exists {
mgr.metrics.forwarded.shardNotFound.Inc(1)
mgr.logger.Warn("shard not found in flush times",
zap.String("flusherType", "forwarded"),
zap.Uint32("shard", shard),
)
continue
}
flushTimesForResolution, exists := shardFlushTimes.ForwardedByResolution[int64(resolution)]
if !exists {
mgr.metrics.forwarded.resolutionNotFound.Inc(1)
mgr.logger.Warn("resolution not found in flush times",
zap.String("flusherType", "forwarded"),
zap.Uint32("shard", shard),
zap.Stringer("resolution", resolution),
)
continue
}
if flushTimesForResolution == nil {
mgr.metrics.forwarded.nilForwardedTimes.Inc(1)
mgr.logger.Warn("nil flush times",
zap.String("flusherType", "forwarded"),
zap.Uint32("shard", shard),
zap.Stringer("resolution", resolution),
)
continue
}
lastFlushedAtNanos, exists := flushTimesForResolution.ByNumForwardedTimes[int32(numForwardedTimes)]
if !exists {
mgr.metrics.forwarded.numForwardedTimesNotFound.Inc(1)
mgr.logger.Warn("numForwardedTimes not found in flush times",
zap.String("flusherType", "forwarded"),
zap.Uint32("shard", shard),
zap.Stringer("resolution", resolution),
zap.Int("numForwardedTimes", numForwardedTimes),
)
continue
}
newFlushTarget := flusherWithTime{
flusher: flusher,
flushBeforeNanos: lastFlushedAtNanos,
}
flushersWithTime = append(flushersWithTime, newFlushTarget)
mgr.metrics.forwarded.kvUpdates.Inc(1)
}
return flushersWithTime
}
func (mgr *followerFlushManager) flushersFromForcedFlush(
buckets []*flushBucket,
flushBeforeNanos int64,
) []flushersGroup {
flushersByInterval := make([]flushersGroup, len(buckets))
for i, bucket := range buckets {
flushersByInterval[i].interval = bucket.interval
flushersByInterval[i].duration = bucket.duration
flushersByInterval[i].flushers = make([]flusherWithTime, 0, defaultInitialFlushCapacity)
for _, flusher := range bucket.flushers {
newFlushTarget := flusherWithTime{
flusher: flusher,
flushBeforeNanos: flushBeforeNanos,
}
flushersByInterval[i].flushers = append(flushersByInterval[i].flushers, newFlushTarget)
}
}
return flushersByInterval
}
func (mgr *followerFlushManager) watchFlushTimes() {
defer mgr.Done()
var (
throttlePeriod = time.Second
flushTimesWatch watch.Watch
err error
)
for {
if flushTimesWatch == nil {
flushTimesWatch, err = mgr.flushTimesManager.Watch()
if err != nil {
mgr.metrics.watchCreateErrors.Inc(1)
mgr.sleepFn(throttlePeriod)
continue
}
}
select {
case <-flushTimesWatch.C():
mgr.Lock()
mgr.received = flushTimesWatch.Get().(*schema.ShardSetFlushTimes)
mgr.flushTimesState = flushTimesUpdated
mgr.Unlock()
case <-mgr.doneCh:
return
}
}
}
type followerFlushTask struct {
mgr *followerFlushManager
flushersByInterval []flushersGroup
}
func (t *followerFlushTask) Run() {
var (
mgr = t.mgr
wgWorkers sync.WaitGroup
)
for _, group := range t.flushersByInterval {
start := mgr.nowFn()
for _, flusherWithTime := range group.flushers {
flusherWithTime := flusherWithTime
wgWorkers.Add(1)
mgr.workers.Go(func() {
flusherWithTime.flusher.DiscardBefore(flusherWithTime.flushBeforeNanos)
wgWorkers.Done()
})
}
wgWorkers.Wait()
group.duration.Record(mgr.nowFn().Sub(start))
}
}
type flushTimesState int
const (
flushTimesUninitialized flushTimesState = iota
flushTimesUpdated
flushTimesProcessed
)
type followerFlushMode int
const (
unknownFollowerFlush followerFlushMode = iota
kvUpdateFollowerFlush
forcedFollowerFlush
)
type flusherWithTime struct {
flusher flushingMetricList
flushBeforeNanos int64
}
type flushersGroup struct {
interval time.Duration
duration tally.Timer
flushers []flusherWithTime
}
| {
"pile_set_name": "Github"
} |
if ENABLE_FT
aggincludedir = $(includedir)/agg2
agginclude_HEADERS = agg_font_freetype.h
lib_LTLIBRARIES = libaggfontfreetype.la
libaggfontfreetype_la_LDFLAGS = -version-info @AGG_LIB_VERSION@ @FREETYPE_LIBS@
libaggfontfreetype_la_SOURCES = agg_font_freetype.cpp
libaggfontfreetype_la_CXXFLAGS = -I$(top_srcdir)/include @FREETYPE_CFLAGS@
endif
| {
"pile_set_name": "Github"
} |
/*************************************************************
*
* MathJax/localization/sk/TeX.js
*
* Copyright (c) 2009-2018 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
MathJax.Localization.addTranslation("sk","TeX",{
version: "2.7.5",
isLoaded: true,
strings: {
ExtraOpenMissingClose: "Prebyto\u010Dn\u00E1 otv\u00E1raj\u00FAca alebo uzavieracia z\u00E1tvorka",
ExtraCloseMissingOpen: "Prebyto\u010Dn\u00E1 uzavieracia alebo otv\u00E1raj\u00FAca z\u00E1tvorka",
MissingLeftExtraRight: "Ch\u00FDbaj\u00FAci \\left alebo prebyto\u010Dn\u00FD \\right",
MissingScript: "Ch\u00FDba argument horn\u00E9ho alebo doln\u00E9ho indexu",
ExtraLeftMissingRight: "Prebyto\u010Dn\u00FD \\left alebo ch\u00FDbaj\u00FAci \\right",
Misplaced: "Chybne umiestnen\u00FD %1",
MissingOpenForSub: "Ch\u00FDbaj\u00FAca otv\u00E1racia zlo\u017Een\u00E1 z\u00E1tvorka pre doln\u00FD index",
MissingOpenForSup: "Ch\u00FDbaj\u00FAca otv\u00E1racia zlo\u017Een\u00E1 z\u00E1tvorka pre horn\u00FD index",
AmbiguousUseOf: "Nejednozna\u010Dn\u00E9 pou\u017Eitie %1",
EnvBadEnd: "\\begin{%1} bolo uzavret\u00E9 \\end{%2}",
EnvMissingEnd: "Ch\u00FDbaj\u00FAci \\end{%1}",
MissingBoxFor: "Ch\u00FDba box pre %1",
MissingCloseBrace: "Ch\u00FDba uzavieracia z\u00E1tvorka",
UndefinedControlSequence: "Nedefinovan\u00E1 riadiaca sekvencia %1",
DoubleExponent: "Dvojit\u00FD exponent: pre jednozna\u010Dnos\u0165 pou\u017Eite zlo\u017Een\u00E9 z\u00E1tvorky",
DoubleSubscripts: "Dvojit\u00FD doln\u00FD index: pre jednozna\u010Dnos\u0165 pou\u017Eite zlo\u017Een\u00E9 z\u00E1tvorky",
DoubleExponentPrime: "Symbol \u010Diarky sp\u00F4sobil dvojit\u00FD exponent: pre jednozna\u010Dnos\u0165 pou\u017Eite zlo\u017Een\u00E9 z\u00E1tvorky",
CantUseHash1: "V matematickom re\u017Eime nem\u00F4\u017Eete pou\u017Ei\u0165 znak \u201E#\u201C pre parametre makier",
MisplacedMiddle: "%1 mus\u00ED by\u0165 medzi \\left a \\right",
MisplacedLimits: "%1 je povolen\u00E9 len pri oper\u00E1toroch",
MisplacedMoveRoot: "%1 sa m\u00F4\u017Ee vyskytn\u00FA\u0165 len v koreni",
MultipleCommand: "Viacn\u00E1sobn\u00FD %1",
IntegerArg: "Argument pre %1 mus\u00ED by\u0165 cel\u00E9 \u010D\u00EDslo",
NotMathMLToken: "%1 nie je primit\u00EDvny element",
InvalidMathMLAttr: "Neplatn\u00FD atrib\u00FAt MathML: %1",
UnknownAttrForElement: "%1 nie je zn\u00E1mym atrib\u00FAtom pre %2",
MaxMacroSub1: "Prekro\u010Den\u00FD maxim\u00E1lny po\u010Det substit\u00FAci\u00ED makra MathJaxu; nejde o rekurz\u00EDvne volanie makra?",
MaxMacroSub2: "Prekro\u010Den\u00FD maxim\u00E1lny po\u010Det substit\u00FAci\u00ED MathJaxu; nejde o rekurz\u00EDvne LaTeXov\u00E9 prostredie?",
MissingArgFor: "Ch\u00FDba argument pre %1",
ExtraAlignTab: "Prebyto\u010Dn\u00FD vyrovn\u00E1vac\u00ED tabul\u00E1tor v texte \\cases",
BracketMustBeDimension: "Z\u00E1tvorkov\u00FD argument pre %1 mus\u00ED by\u0165 rozmer",
InvalidEnv: "Neplatn\u00E1 premenn\u00E1 prostredia \u201E%1\u201C",
UnknownEnv: "Nezn\u00E1me prostredie \u201E%1\u201C",
ExtraCloseLooking: "Prebyto\u010Dn\u00E1 uzavieracia z\u00E1tvorka, zatia\u013E \u010Do bolo o\u010Dak\u00E1van\u00E9 %1",
MissingCloseBracket: "Pri argumente pre %1 nebola n\u00E1jden\u00E1 uzavieracia \u201E]\u201C",
MissingOrUnrecognizedDelim: "Ch\u00FDbaj\u00FAci alebo nerozpoznan\u00FD odde\u013Eova\u010D pre %1",
MissingDimOrUnits: "Ch\u00FDbaj\u00FAci rozmer alebo jeho jednotka pre %1",
TokenNotFoundForCommand: "Nen\u00E1jden\u00E9 %1 pre %2",
MathNotTerminated: "V textovom boxe nie je ukon\u010Den\u00E1 matematika",
IllegalMacroParam: "Neplatn\u00FD odkaz na parameter makra",
MaxBufferSize: "Prekro\u010Den\u00E1 ve\u013Ekos\u0165 internej pam\u00E4te MathJaxu; nejde o rekurz\u00EDvne volanie makra?",
CommandNotAllowedInEnv: "V prostred\u00ED %2 nie je povolen\u00FD %1",
MultipleLabel: "Viacn\u00E1sobn\u00E1 defin\u00EDcia ozna\u010Denia %1",
CommandAtTheBeginingOfLine: "%1 mus\u00ED by\u0165 umiestnen\u00E9 na za\u010Diatku riadku",
IllegalAlign: "Pri %1 uveden\u00E9 neplatn\u00E9 zarovnanie",
BadMathStyleFor: "Chybn\u00FD \u0161t\u00FDl matematiky pri %1",
PositiveIntegerArg: "Argument %1 mus\u00ED by\u0165 kladn\u00E9 cel\u00E9 \u010D\u00EDslo",
ErroneousNestingEq: "Chybn\u00E9 zanorovanie \u0161trukt\u00FAry rovn\u00EDc",
MultlineRowsOneCol: "Riadky v prostred\u00ED %1 musia ma\u0165 pr\u00E1ve jeden st\u013Apec",
MultipleBBoxProperty: "Pri %2 je %1 uveden\u00E9 dvakr\u00E1t",
InvalidBBoxProperty: "\u201E%1\u201C nevyzer\u00E1 ako farba, rozmer paddingu alebo \u0161t\u00FDl",
ExtraEndMissingBegin: "Prebato\u010Dn\u00FD %1 alebo ch\u00FDbaj\u00FAci \\begingroup",
GlobalNotFollowedBy: "Za %1 ch\u00FDba \\let, \\def alebo \\newcommand",
UndefinedColorModel: "Farebn\u00FD model \u201E%1\u201C nie je definovan\u00FD",
ModelArg1: "Farebn\u00E9 hodnoty modelu %1 vy\u017Eaduj\u00FA tri \u010D\u00EDsla",
InvalidDecimalNumber: "Neplatn\u00E9 desatinn\u00E9 \u010D\u00EDslo",
ModelArg2: "Farebn\u00E9 hodnoty modelu %1 musia le\u017Ea\u0165 medzi %2 a %3",
InvalidNumber: "Neplatn\u00E9 \u010D\u00EDslo",
NewextarrowArg1: "Prv\u00FDm argumentom %1 mus\u00ED by\u0165 n\u00E1zov riadiacej sekvencie",
NewextarrowArg2: "Druh\u00FDm argumentom %1 musia by\u0165 dve cel\u00E9 \u010D\u00EDsla oddelen\u00E9 \u010Diarkou",
NewextarrowArg3: "Tret\u00EDm argumentom %1 mus\u00ED by\u0165 \u010D\u00EDslo znaku Unicode",
NoClosingChar: "Nen\u00E1jden\u00FD uzavierac\u00ED %1",
IllegalControlSequenceName: "Neplatn\u00FD n\u00E1zov riadiacej sekvencie pre %1",
IllegalParamNumber: "Pre %1 uveden\u00FD neplatn\u00FD po\u010Det parametrov",
MissingCS: "Za %1 mus\u00ED by\u0165 riadiaca sekvencia",
CantUseHash2: "Chybn\u00E9 pou\u017Eitie # v \u0161abl\u00F3ne pre %1",
SequentialParam: "Parametre pre %1 musia by\u0165 \u010D\u00EDslovan\u00E9 postupne",
MissingReplacementString: "V defin\u00EDcii %1 ch\u00FDba nahradzuj\u00FAci re\u0165azec",
MismatchUseDef: "Pou\u017Eitie %1 nezodpoved\u00E1 jeho defin\u00EDcii",
RunawayArgument: "Zbl\u00FAdil\u00FD argument pre %1?",
NoClosingDelim: "Nepodarilo sa n\u00E1js\u0165 ukon\u010Dovac\u00ED znak pre %1"
}
});
MathJax.Ajax.loadComplete("[MathJax]/localization/sk/TeX.js");
| {
"pile_set_name": "Github"
} |
package model
import (
"encoding/json"
"fmt"
)
// DependencyNetworker provides answers to questions about the
// dependencies of a task and is available white generating
// migrations. Methods should do nothing in particular
//
// Implementations should be mutable and thread-safe.
//
// The DependencyNetworker interface definition is in the model
// package because it has no external dependencies, and placing it in
// other packages would lead to cycles. The default implementation
// is in the top level package.
type DependencyNetworker interface {
// Add inserts a list of dependencies for a given item. If the
// slice of dependencies is empty, Add is a noop. Furthermore,
// the Add method provides no validation, and will do nothing
// to prevent cycles or broken dependencies.
Add(string, []string)
// Resolve, returns all of the dependencies for the specified task.
Resolve(string) []string
// All returns a list of all tasks that have registered
// dependencies.
All() []string
// Network returns the dependency graph for all registered
// tasks as a mapping of task IDs to the IDs of its
// dependencies.
Network() map[string][]string
// Validate returns errors if there are either dependencies
// specified that do not have tasks available *or* if there
// are dependency cycles.
Validate() error
// AddGroup and GetGroup set and return the lists of tasks
// that belong to a specific task group. Unlike the specific
// task dependency setters.
AddGroup(string, []string)
GetGroup(string) []string
// For introspection and convince, DependencyNetworker
// composes implementations of common interfaces.
fmt.Stringer
json.Marshaler
}
| {
"pile_set_name": "Github"
} |
// Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
package test
import (
"encoding/json"
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
// Asserts two JSON files are semantically equal
// (ignores white-space and attribute order)
func AssertJsonsEqual(t *testing.T, expectedJson []byte, actualJson []byte) {
result, err := areJsonsEqual(expectedJson, actualJson)
if err != nil {
t.Errorf("json comparison failed. EXPECTED json: %s\n\n ACTUAL json: %s\n\n . Error details: %s",
string(expectedJson), string(actualJson), err)
}
assert.True(t, result)
}
func areJsonsEqual(expectedJson []byte, actualJson []byte) (bool, error) {
expectedMap, err := unmarshalToMap(expectedJson)
if err != nil {
return false, fmt.Errorf("failed to unmarshal to map. details: %s", err)
}
actualMap, err := unmarshalToMap(actualJson)
if err != nil {
return false, fmt.Errorf("failed to unmarshal to map. details: %s", err)
}
return areMapsEqual(expectedMap, actualMap), nil
}
func unmarshalToMap(inputJson []byte) (map[string]interface{}, error) {
var m map[string]interface{}
if err := json.Unmarshal(inputJson, &m); err != nil {
return nil, err
}
return m, nil
}
func areMapsEqual(map1 map[string]interface{}, map2 map[string]interface{}) bool {
if map1 == nil || map2 == nil {
return false
}
if len(map1) != len(map2) {
return false
}
for k, v1 := range map1 {
v2, found := map2[k]
if !found {
return false
}
switch v1 := v1.(type) {
case []interface{}:
// compare lists
if !areListsEqual(v1, v2.([]interface{})) {
return false
}
case map[string]interface{}:
// compare maps
if !areMapsEqual(v1, v2.(map[string]interface{})) {
return false
}
default:
if v1 != v2 {
return false
}
}
}
return true
}
func areListsEqual(list1 []interface{}, list2 []interface{}) bool {
if list1 == nil || list2 == nil {
return false
}
if len(list1) != len(list2) {
return false
}
for index := range list1 {
v1 := list1[index]
v2 := list2[index]
switch v1 := v1.(type) {
case []interface{}:
// compare lists
if !areListsEqual(v1, v2.([]interface{})) {
return false
}
case map[string]interface{}:
// compare maps
if !areMapsEqual(v1, v2.(map[string]interface{})) {
return false
}
default:
if v1 != v2 {
return false
}
}
}
return true
}
| {
"pile_set_name": "Github"
} |
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/class-gpuVector.R
\docType{class}
\name{igpuVector-class}
\alias{igpuVector-class}
\title{igpuVector Class}
\description{
An integer vector in the S4 \code{gpuVector}
representation.
}
\section{Slots}{
\describe{
\item{\code{address}:}{An integer vector object}
}
}
\seealso{
\code{\link{gpuVector-class}}
}
\author{
Charles Determan Jr.
}
| {
"pile_set_name": "Github"
} |
import React from 'react'
import t from 'prop-types'
import styled from 'styled-components'
import { Footer } from 'ui'
function FooterCheckout ({ children, justifyContent }) {
return (
<Footer>
<FooterContent justifyContent={justifyContent}>
{children}
</FooterContent>
</Footer>
)
}
FooterCheckout.propTypes = {
children: t.node.isRequired,
justifyContent: t.string
}
const FooterContent = styled.div`
display: flex;
justify-content: ${({ justifyContent }) => justifyContent || 'flex-end'};
`
export default FooterCheckout
| {
"pile_set_name": "Github"
} |
<?php
namespace Oro\Bundle\WorkflowBundle\Tests\Unit\Entity;
use Oro\Bundle\WorkflowBundle\Entity\WorkflowDefinition;
use Oro\Bundle\WorkflowBundle\Entity\WorkflowRestriction;
use Oro\Bundle\WorkflowBundle\Entity\WorkflowStep;
use Symfony\Component\PropertyAccess\PropertyAccess;
class WorkflowRestrictionTest extends \PHPUnit\Framework\TestCase
{
/**
* @dataProvider propertiesDataProvider
*
* @param string $property
* @param mixed $value
*/
public function testSettersAndGetters($property, $value)
{
$restriction = new WorkflowRestriction();
$accessor = PropertyAccess::createPropertyAccessor();
$accessor->setValue($restriction, $property, $value);
$this->assertEquals($value, $accessor->getValue($restriction, $property));
}
public function propertiesDataProvider()
{
return [
['attribute', 'test'],
['step', new WorkflowStep()],
['definition', new WorkflowDefinition()],
['entityClass', 'TestEntity'],
['field', 'test'],
['mode', 'full'],
['values', []],
];
}
public function testImport()
{
$restriction = new WorkflowRestriction();
$step = new WorkflowStep();
$step->setName('step');
$definition = new WorkflowDefinition();
$definition->addStep($step);
$restriction->setAttribute('attribute');
$restriction->setStep($step);
$restriction->setEntityClass('TestEntity');
$restriction->setField('test');
$restriction->setMode('allow');
$restriction->setValues(['1']);
$newRestriction = new WorkflowRestriction();
$newRestriction->setDefinition($definition);
$this->assertEquals($newRestriction, $newRestriction->import($restriction));
$this->assertEquals('attribute', $newRestriction->getAttribute());
$this->assertEquals($step, $newRestriction->getStep());
$this->assertEquals('TestEntity', $newRestriction->getEntityClass());
$this->assertEquals('test', $newRestriction->getField());
$this->assertEquals('allow', $newRestriction->getMode());
$this->assertEquals(['1'], $newRestriction->getValues());
$this->assertEquals($restriction->getHashKey(), $newRestriction->getHashKey());
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef NET_CERT_TEST_KEYCHAIN_SEARCH_LIST_MAC_H_
#define NET_CERT_TEST_KEYCHAIN_SEARCH_LIST_MAC_H_
#include <memory>
#include <CoreServices/CoreServices.h>
#include <Security/Security.h>
#include "base/mac/scoped_cftyperef.h"
#include "net/base/net_export.h"
namespace net {
class NET_EXPORT TestKeychainSearchList {
public:
~TestKeychainSearchList();
// Creates a TestKeychainSearchList, which will be used by HasInstance and
// GetInstance.
// Only one TestKeychainSearchList object may exist at a time, returns nullptr
// if one exists already.
static std::unique_ptr<TestKeychainSearchList> Create();
// Returns true if a TestKeychainSearchList currently exists.
static bool HasInstance();
// Returns the current TestKeychainSearchList instance, if any.
static TestKeychainSearchList* GetInstance();
// Copies the test keychain search list into |keychain_search_list|.
OSStatus CopySearchList(CFArrayRef* keychain_search_list) const;
// Adds |keychain| to the end of the test keychain search list.
void AddKeychain(SecKeychainRef keychain);
private:
TestKeychainSearchList();
base::ScopedCFTypeRef<CFMutableArrayRef> scoped_keychain_search_list;
};
} // namespace net
#endif // NET_CERT_TEST_KEYCHAIN_SEARCH_LIST_MAC_H_
| {
"pile_set_name": "Github"
} |
// Django/Jinja2 syntax definition for Prism.js <http://prismjs.com> syntax highlighter.
// Mostly it works OK but can paint code incorrectly on complex html/template tag combinations.
var _django_template = {
'property': {
pattern: /(?:{{|{%)[\s\S]*?(?:%}|}})/g,
greedy: true,
inside: {
'string': {
pattern: /("|')(?:\\.|(?!\1)[^\\\r\n])*\1/,
greedy: true
},
'keyword': /\b(?:\||load|verbatim|widthratio|ssi|firstof|for|url|ifchanged|csrf_token|lorem|ifnotequal|autoescape|now|templatetag|debug|cycle|ifequal|regroup|comment|filter|endfilter|if|spaceless|with|extends|block|include|else|empty|endif|endfor|as|endblock|endautoescape|endverbatim|trans|endtrans|[Tt]rue|[Ff]alse|[Nn]one|in|is|static|macro|endmacro|call|endcall|set|endset|raw|endraw)\b/,
'operator' : /[-+=]=?|!=|\*\*?=?|\/\/?=?|<[<=>]?|>[=>]?|[&|^~]|\b(?:or|and|not)\b/,
'function': /\b(?:_|abs|add|addslashes|attr|batch|callable|capfirst|capitalize|center|count|cut|d|date|default|default_if_none|defined|dictsort|dictsortreversed|divisibleby|e|equalto|escape|escaped|escapejs|even|filesizeformat|first|float|floatformat|force_escape|forceescape|format|get_digit|groupby|indent|int|iriencode|iterable|join|last|length|length_is|linebreaks|linebreaksbr|linenumbers|list|ljust|lower|make_list|map|mapping|number|odd|phone2numeric|pluralize|pprint|random|reject|rejectattr|removetags|replace|reverse|rjust|round|safe|safeseq|sameas|select|selectattr|sequence|slice|slugify|sort|string|stringformat|striptags|sum|time|timesince|timeuntil|title|trim|truncate|truncatechars|truncatechars_html|truncatewords|truncatewords_html|undefined|unordered_list|upper|urlencode|urlize|urlizetrunc|wordcount|wordwrap|xmlattr|yesno)\b/,
'important': /\b-?\d+(?:\.\d+)?\b/,
'variable': /\b\w+?\b/,
'punctuation' : /[[\];(),.:]/
}
}
};
Prism.languages.django = Prism.languages.extend('markup', {'comment': /(?:<!--|{#)[\s\S]*?(?:#}|-->)/});
// Updated html tag pattern to allow template tags inside html tags
Prism.languages.django.tag.pattern = /<\/?(?!\d)[^\s>\/=$<]+(?:\s+[^\s>\/=]+(?:=(?:("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|[^>=]+))?)*\s*\/?>/i;
Prism.languages.insertBefore('django', 'entity', _django_template);
Prism.languages.insertBefore('inside', 'tag', _django_template, Prism.languages.django.tag);
if (Prism.languages.javascript) {
// Combine js code and template tags painting inside <script> blocks
Prism.languages.insertBefore('inside', 'string', _django_template, Prism.languages.django.script);
Prism.languages.django.script.inside.string.inside = _django_template;
}
if (Prism.languages.css) {
// Combine css code and template tags painting inside <style> blocks
Prism.languages.insertBefore('inside', 'atrule', {'tag': _django_template.property}, Prism.languages.django.style);
Prism.languages.django.style.inside.string.inside = _django_template;
}
// Add an Jinja2 alias
Prism.languages.jinja2 = Prism.languages.django;
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.ly.train.flower.sample.web.dao.GoodsDao">
<select id="findGoodsIdForRecommend" parameterType="int" resultType="java.lang.Integer">
select goodsId from goods_recommend where customerId = #{customerId}
</select>
</mapper>
| {
"pile_set_name": "Github"
} |
<?php
/**
* Copyright since 2007 PrestaShop SA and Contributors
* PrestaShop is an International Registered Trademark & Property of PrestaShop SA
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.md.
* It is also available through the world-wide-web at this URL:
* https://opensource.org/licenses/OSL-3.0
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@prestashop.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade PrestaShop to newer
* versions in the future. If you wish to customize PrestaShop for your
* needs please refer to https://devdocs.prestashop.com/ for more information.
*
* @author PrestaShop SA and Contributors <contact@prestashop.com>
* @copyright Since 2007 PrestaShop SA and Contributors
* @license https://opensource.org/licenses/OSL-3.0 Open Software License (OSL 3.0)
*/
namespace PrestaShop\PrestaShop\Adapter\Requirement;
use ConfigurationTest;
use Symfony\Component\Translation\TranslatorInterface;
/**
* Check system requirements of a PrestaShop website.
*/
class CheckRequirements
{
/**
* @var TranslatorInterface
*/
private $translator;
/**
* @param TranslatorInterface $translator
*/
public function __construct(TranslatorInterface $translator)
{
$this->translator = $translator;
}
/**
* Returns a summary of all system requirements.
*
* @return array
*/
public function getSummary()
{
$paramsRequiredResults = ConfigurationTest::check(ConfigurationTest::getDefaultTests());
$isHostMode = defined('_PS_HOST_MODE_');
$paramsOptionalResults = !$isHostMode ? ConfigurationTest::check(ConfigurationTest::getDefaultTestsOp()) : [];
$failRequired = in_array('fail', $paramsRequiredResults);
$testsErrors = $this->getErrorMessages();
if ($failRequired && 'ok' !== $paramsRequiredResults['files']) {
$tmp = ConfigurationTest::test_files(true);
if (is_array($tmp) && count($tmp)) {
$testsErrors['files'] = $testsErrors['files'] . '<br/>(' . implode(', ', $tmp) . ')';
}
}
$testsErrors = $this->fillMissingDescriptions($testsErrors, $paramsRequiredResults);
$results = [
'failRequired' => $failRequired,
'testsErrors' => $testsErrors,
'testsRequired' => $paramsRequiredResults,
];
if (!$isHostMode) {
$results = array_merge($results, [
'testsErrors' => $this->fillMissingDescriptions($testsErrors, $paramsOptionalResults),
'failOptional' => in_array('fail', $paramsOptionalResults),
'testsOptional' => $paramsOptionalResults,
]);
}
return $results;
}
/**
* @return array
*/
private function getErrorMessages()
{
return [
'phpversion' => $this->translator->trans('Update your PHP version.', [], 'Admin.Advparameters.Notification'),
'upload' => $this->translator->trans('Configure your server to allow file uploads.', [], 'Admin.Advparameters.Notification'),
'system' => $this->translator->trans('Configure your server to allow the creation of directories and files with write permissions.', [], 'Admin.Advparameters.Notification'),
'curl' => $this->translator->trans('Enable the CURL extension on your server.', [], 'Admin.Advparameters.Notification'),
'dom' => $this->translator->trans('Enable the DOM extension on your server.', [], 'Admin.Advparameters.Notification'),
'fileinfo' => $this->translator->trans('Enable the Fileinfo extension on your server.', [], 'Admin.Advparameters.Notification'),
'gd' => $this->translator->trans('Enable the GD library on your server.', [], 'Admin.Advparameters.Notification'),
'json' => $this->translator->trans('Enable the JSON extension on your server.', [], 'Admin.Advparameters.Notification'),
'mbstring' => $this->translator->trans('Enable the Mbstring extension on your server.', [], 'Admin.Advparameters.Notification'),
'openssl' => $this->translator->trans('Enable the OpenSSL extension on your server.', [], 'Admin.Advparameters.Notification'),
'pdo_mysql' => $this->translator->trans('Enable the PDO Mysql extension on your server.', [], 'Admin.Advparameters.Notification'),
'simplexml' => $this->translator->trans('Enable the XML extension on your server.', [], 'Admin.Advparameters.Notification'),
'zip' => $this->translator->trans('Enable the ZIP extension on your server.', [], 'Admin.Advparameters.Notification'),
'mysql_support' => $this->translator->trans('Enable the MySQL support on your server.', [], 'Admin.Advparameters.Notification'),
'config_dir' => $this->translator->trans('Set write permissions for the "config" folder.', [], 'Admin.Advparameters.Notification'),
'cache_dir' => $this->translator->trans('Set write permissions for the "cache" folder.', [], 'Admin.Advparameters.Notification'),
'sitemap' => $this->translator->trans('Set write permissions for the "sitemap.xml" file.', [], 'Admin.Advparameters.Notification'),
'img_dir' => $this->translator->trans('Set write permissions for the "img" folder and subfolders.', [], 'Admin.Advparameters.Notification'),
'log_dir' => $this->translator->trans('Set write permissions for the "log" folder and subfolders.', [], 'Admin.Advparameters.Notification'),
'mails_dir' => $this->translator->trans('Set write permissions for the "mails" folder and subfolders.', [], 'Admin.Advparameters.Notification'),
'module_dir' => $this->translator->trans('Set write permissions for the "modules" folder and subfolders.', [], 'Admin.Advparameters.Notification'),
'theme_cache_dir' => $this->translator->trans('Set write permissions for the "themes/%s/cache/" folder and subfolders, recursively.', ['%s' => _THEME_NAME_], 'Admin.Advparameters.Notification'),
'theme_lang_dir' => $this->translator->trans('Set write permissions for the "themes/%s/lang/" folder and subfolders, recursively.', ['%s' => _THEME_NAME_], 'Admin.Advparameters.Notification'),
'theme_pdf_lang_dir' => $this->translator->trans('Set write permissions for the "themes/%s/pdf/lang/" folder and subfolders, recursively.', ['%s' => _THEME_NAME_], 'Admin.Advparameters.Notification'),
'config_sf2_dir' => $this->translator->trans('Set write permissions for the "app/config/" folder and subfolders, recursively.', [], 'Admin.Advparameters.Notification'),
'translations_sf2' => $this->translator->trans('Set write permissions for the "app/Resources/translations/" folder and subfolders, recursively.', [], 'Admin.Advparameters.Notification'),
'translations_dir' => $this->translator->trans('Set write permissions for the "translations" folder and subfolders.', [], 'Admin.Advparameters.Notification'),
'customizable_products_dir' => $this->translator->trans('Set write permissions for the "upload" folder and subfolders.', [], 'Admin.Advparameters.Notification'),
'virtual_products_dir' => $this->translator->trans('Set write permissions for the "download" folder and subfolders.', [], 'Admin.Advparameters.Notification'),
'fopen' => $this->translator->trans('Allow the PHP fopen() function on your server.', [], 'Admin.Advparameters.Notification'),
'gz' => $this->translator->trans('Enable GZIP compression on your server.', [], 'Admin.Advparameters.Notification'),
'files' => $this->translator->trans('Some PrestaShop files are missing from your server.', [], 'Admin.Advparameters.Notification'),
'new_phpversion' => $this->translator->trans('You are using PHP %s version. Soon, the latest PHP version supported by PrestaShop will be PHP 5.6. To make sure you’re ready for the future, we recommend you to upgrade to PHP 5.6 now!', ['%s' => PHP_VERSION], 'Admin.Advparameters.Notification'),
'apache_mod_rewrite' => $this->translator->trans('Enable the Apache mod_rewrite module', [], 'Admin.Advparameters.Notification'),
];
}
/**
* Add default message on missing check descriptions.
*
* @param array $errorMessages
* @param array $checks
*
* @return array Error messages with fallback for missing entries
*/
private function fillMissingDescriptions($errorMessages, $checks)
{
foreach (array_keys(array_diff_key($checks, $errorMessages)) as $key) {
$errorMessages[$key] = $this->translator->trans('%key% (missing description)', ['%key%' => $key], 'Admin.Advparameters.Feature');
}
return $errorMessages;
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Constants that were deprecated or moved to enums in the FreeBSD headers. Keep
// them here for backwards compatibility.
package unix
const (
IFF_SMART = 0x20
IFT_1822 = 0x2
IFT_A12MPPSWITCH = 0x82
IFT_AAL2 = 0xbb
IFT_AAL5 = 0x31
IFT_ADSL = 0x5e
IFT_AFLANE8023 = 0x3b
IFT_AFLANE8025 = 0x3c
IFT_ARAP = 0x58
IFT_ARCNET = 0x23
IFT_ARCNETPLUS = 0x24
IFT_ASYNC = 0x54
IFT_ATM = 0x25
IFT_ATMDXI = 0x69
IFT_ATMFUNI = 0x6a
IFT_ATMIMA = 0x6b
IFT_ATMLOGICAL = 0x50
IFT_ATMRADIO = 0xbd
IFT_ATMSUBINTERFACE = 0x86
IFT_ATMVCIENDPT = 0xc2
IFT_ATMVIRTUAL = 0x95
IFT_BGPPOLICYACCOUNTING = 0xa2
IFT_BSC = 0x53
IFT_CCTEMUL = 0x3d
IFT_CEPT = 0x13
IFT_CES = 0x85
IFT_CHANNEL = 0x46
IFT_CNR = 0x55
IFT_COFFEE = 0x84
IFT_COMPOSITELINK = 0x9b
IFT_DCN = 0x8d
IFT_DIGITALPOWERLINE = 0x8a
IFT_DIGITALWRAPPEROVERHEADCHANNEL = 0xba
IFT_DLSW = 0x4a
IFT_DOCSCABLEDOWNSTREAM = 0x80
IFT_DOCSCABLEMACLAYER = 0x7f
IFT_DOCSCABLEUPSTREAM = 0x81
IFT_DS0 = 0x51
IFT_DS0BUNDLE = 0x52
IFT_DS1FDL = 0xaa
IFT_DS3 = 0x1e
IFT_DTM = 0x8c
IFT_DVBASILN = 0xac
IFT_DVBASIOUT = 0xad
IFT_DVBRCCDOWNSTREAM = 0x93
IFT_DVBRCCMACLAYER = 0x92
IFT_DVBRCCUPSTREAM = 0x94
IFT_ENC = 0xf4
IFT_EON = 0x19
IFT_EPLRS = 0x57
IFT_ESCON = 0x49
IFT_ETHER = 0x6
IFT_FAITH = 0xf2
IFT_FAST = 0x7d
IFT_FASTETHER = 0x3e
IFT_FASTETHERFX = 0x45
IFT_FDDI = 0xf
IFT_FIBRECHANNEL = 0x38
IFT_FRAMERELAYINTERCONNECT = 0x3a
IFT_FRAMERELAYMPI = 0x5c
IFT_FRDLCIENDPT = 0xc1
IFT_FRELAY = 0x20
IFT_FRELAYDCE = 0x2c
IFT_FRF16MFRBUNDLE = 0xa3
IFT_FRFORWARD = 0x9e
IFT_G703AT2MB = 0x43
IFT_G703AT64K = 0x42
IFT_GIF = 0xf0
IFT_GIGABITETHERNET = 0x75
IFT_GR303IDT = 0xb2
IFT_GR303RDT = 0xb1
IFT_H323GATEKEEPER = 0xa4
IFT_H323PROXY = 0xa5
IFT_HDH1822 = 0x3
IFT_HDLC = 0x76
IFT_HDSL2 = 0xa8
IFT_HIPERLAN2 = 0xb7
IFT_HIPPI = 0x2f
IFT_HIPPIINTERFACE = 0x39
IFT_HOSTPAD = 0x5a
IFT_HSSI = 0x2e
IFT_HY = 0xe
IFT_IBM370PARCHAN = 0x48
IFT_IDSL = 0x9a
IFT_IEEE80211 = 0x47
IFT_IEEE80212 = 0x37
IFT_IEEE8023ADLAG = 0xa1
IFT_IFGSN = 0x91
IFT_IMT = 0xbe
IFT_INTERLEAVE = 0x7c
IFT_IP = 0x7e
IFT_IPFORWARD = 0x8e
IFT_IPOVERATM = 0x72
IFT_IPOVERCDLC = 0x6d
IFT_IPOVERCLAW = 0x6e
IFT_IPSWITCH = 0x4e
IFT_IPXIP = 0xf9
IFT_ISDN = 0x3f
IFT_ISDNBASIC = 0x14
IFT_ISDNPRIMARY = 0x15
IFT_ISDNS = 0x4b
IFT_ISDNU = 0x4c
IFT_ISO88022LLC = 0x29
IFT_ISO88023 = 0x7
IFT_ISO88024 = 0x8
IFT_ISO88025 = 0x9
IFT_ISO88025CRFPINT = 0x62
IFT_ISO88025DTR = 0x56
IFT_ISO88025FIBER = 0x73
IFT_ISO88026 = 0xa
IFT_ISUP = 0xb3
IFT_L3IPXVLAN = 0x89
IFT_LAPB = 0x10
IFT_LAPD = 0x4d
IFT_LAPF = 0x77
IFT_LOCALTALK = 0x2a
IFT_LOOP = 0x18
IFT_MEDIAMAILOVERIP = 0x8b
IFT_MFSIGLINK = 0xa7
IFT_MIOX25 = 0x26
IFT_MODEM = 0x30
IFT_MPC = 0x71
IFT_MPLS = 0xa6
IFT_MPLSTUNNEL = 0x96
IFT_MSDSL = 0x8f
IFT_MVL = 0xbf
IFT_MYRINET = 0x63
IFT_NFAS = 0xaf
IFT_NSIP = 0x1b
IFT_OPTICALCHANNEL = 0xc3
IFT_OPTICALTRANSPORT = 0xc4
IFT_OTHER = 0x1
IFT_P10 = 0xc
IFT_P80 = 0xd
IFT_PARA = 0x22
IFT_PFLOG = 0xf6
IFT_PFSYNC = 0xf7
IFT_PLC = 0xae
IFT_POS = 0xab
IFT_PPPMULTILINKBUNDLE = 0x6c
IFT_PROPBWAP2MP = 0xb8
IFT_PROPCNLS = 0x59
IFT_PROPDOCSWIRELESSDOWNSTREAM = 0xb5
IFT_PROPDOCSWIRELESSMACLAYER = 0xb4
IFT_PROPDOCSWIRELESSUPSTREAM = 0xb6
IFT_PROPMUX = 0x36
IFT_PROPWIRELESSP2P = 0x9d
IFT_PTPSERIAL = 0x16
IFT_PVC = 0xf1
IFT_QLLC = 0x44
IFT_RADIOMAC = 0xbc
IFT_RADSL = 0x5f
IFT_REACHDSL = 0xc0
IFT_RFC1483 = 0x9f
IFT_RS232 = 0x21
IFT_RSRB = 0x4f
IFT_SDLC = 0x11
IFT_SDSL = 0x60
IFT_SHDSL = 0xa9
IFT_SIP = 0x1f
IFT_SLIP = 0x1c
IFT_SMDSDXI = 0x2b
IFT_SMDSICIP = 0x34
IFT_SONET = 0x27
IFT_SONETOVERHEADCHANNEL = 0xb9
IFT_SONETPATH = 0x32
IFT_SONETVT = 0x33
IFT_SRP = 0x97
IFT_SS7SIGLINK = 0x9c
IFT_STACKTOSTACK = 0x6f
IFT_STARLAN = 0xb
IFT_STF = 0xd7
IFT_T1 = 0x12
IFT_TDLC = 0x74
IFT_TERMPAD = 0x5b
IFT_TR008 = 0xb0
IFT_TRANSPHDLC = 0x7b
IFT_TUNNEL = 0x83
IFT_ULTRA = 0x1d
IFT_USB = 0xa0
IFT_V11 = 0x40
IFT_V35 = 0x2d
IFT_V36 = 0x41
IFT_V37 = 0x78
IFT_VDSL = 0x61
IFT_VIRTUALIPADDRESS = 0x70
IFT_VOICEEM = 0x64
IFT_VOICEENCAP = 0x67
IFT_VOICEFXO = 0x65
IFT_VOICEFXS = 0x66
IFT_VOICEOVERATM = 0x98
IFT_VOICEOVERFRAMERELAY = 0x99
IFT_VOICEOVERIP = 0x68
IFT_X213 = 0x5d
IFT_X25 = 0x5
IFT_X25DDN = 0x4
IFT_X25HUNTGROUP = 0x7a
IFT_X25MLP = 0x79
IFT_X25PLE = 0x28
IFT_XETHER = 0x1a
IPPROTO_MAXID = 0x34
IPV6_FAITH = 0x1d
IP_FAITH = 0x16
MAP_NORESERVE = 0x40
MAP_RENAME = 0x20
NET_RT_MAXID = 0x6
RTF_PRCLONING = 0x10000
RTM_OLDADD = 0x9
RTM_OLDDEL = 0xa
SIOCADDRT = 0x8030720a
SIOCALIFADDR = 0x8118691b
SIOCDELRT = 0x8030720b
SIOCDLIFADDR = 0x8118691d
SIOCGLIFADDR = 0xc118691c
SIOCGLIFPHYADDR = 0xc118694b
SIOCSLIFPHYADDR = 0x8118694a
)
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_THUMBNAILS_THUMBNAIL_TAB_HELPER_H_
#define CHROME_BROWSER_THUMBNAILS_THUMBNAIL_TAB_HELPER_H_
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "chrome/browser/thumbnails/thumbnailing_context.h"
#include "content/public/browser/notification_observer.h"
#include "content/public/browser/notification_registrar.h"
#include "content/public/browser/readback_types.h"
#include "content/public/browser/web_contents_observer.h"
#include "content/public/browser/web_contents_user_data.h"
namespace content {
class RenderViewHost;
class RenderWidgetHost;
}
class ThumbnailTabHelper
: public content::NotificationObserver,
public content::WebContentsObserver,
public content::WebContentsUserData<ThumbnailTabHelper> {
public:
~ThumbnailTabHelper() override;
private:
explicit ThumbnailTabHelper(content::WebContents* contents);
friend class content::WebContentsUserData<ThumbnailTabHelper>;
// content::NotificationObserver overrides.
void Observe(int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) override;
// content::WebContentsObserver overrides.
void RenderViewDeleted(content::RenderViewHost* render_view_host) override;
void DidStartLoading() override;
void NavigationStopped() override;
// Update the thumbnail of the given tab contents if necessary.
void UpdateThumbnailIfNecessary();
// Initiate asynchronous generation of a thumbnail from the web contents.
void AsyncProcessThumbnail(
scoped_refptr<thumbnails::ThumbnailService> thumbnail_service);
// Create a thumbnail from the web contents bitmap.
void ProcessCapturedBitmap(
scoped_refptr<thumbnails::ThumbnailingAlgorithm> algorithm,
const SkBitmap& bitmap,
content::ReadbackResponse response);
// Pass the thumbnail to the thumbnail service.
void UpdateThumbnail(
const thumbnails::ThumbnailingContext& context,
const SkBitmap& thumbnail);
// Clean up after thumbnail generation has ended.
void CleanUpFromThumbnailGeneration();
// Called when a render view host was created for a WebContents.
void RenderViewHostCreated(content::RenderViewHost* renderer);
// Indicates that the given widget has changed is visibility.
void WidgetHidden(content::RenderWidgetHost* widget);
content::NotificationRegistrar registrar_;
scoped_refptr<thumbnails::ThumbnailingContext> thumbnailing_context_;
bool load_interrupted_;
base::WeakPtrFactory<ThumbnailTabHelper> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(ThumbnailTabHelper);
};
#endif // CHROME_BROWSER_THUMBNAILS_THUMBNAIL_TAB_HELPER_H_
| {
"pile_set_name": "Github"
} |
namespace EasyLicense.Lib.License
{
/// <summary>
/// License Type
/// </summary>
public enum LicenseType
{
/// <summary>
/// No type specified
/// </summary>
None,
/// <summary>
/// For trial use
/// </summary>
Trial,
/// <summary>
/// Standard license
/// </summary>
Standard,
/// <summary>
/// For personal use
/// </summary>
Personal,
/// <summary>
/// Floating license
/// </summary>
Floating,
/// <summary>
/// Subscription based license
/// </summary>
Subscription
}
} | {
"pile_set_name": "Github"
} |
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy;
import com.google.common.collect.Lists;
import org.gradle.api.internal.artifacts.metadata.ModuleVersionMetaData;
import java.util.*;
public class ChainVersionMatcher implements VersionMatcher {
private final List<VersionMatcher> matchers = Lists.newArrayList();
public void add(VersionMatcher matcher) {
matchers.add(matcher);
}
public boolean canHandle(String selector) {
// not expected to be called
throw new UnsupportedOperationException("canHandle");
}
public boolean isDynamic(String selector) {
return getCompatibleMatcher(selector).isDynamic(selector);
}
public boolean needModuleMetadata(String selector) {
return getCompatibleMatcher(selector).needModuleMetadata(selector);
}
public boolean accept(String selector, String candidate) {
return getCompatibleMatcher(selector).accept(selector, candidate);
}
public boolean accept(String selector, ModuleVersionMetaData candidate) {
return getCompatibleMatcher(selector).accept(selector, candidate);
}
public int compare(String selector, String candidate) {
return getCompatibleMatcher(selector).compare(selector, candidate);
}
private VersionMatcher getCompatibleMatcher(String selector) {
for (VersionMatcher matcher : matchers) {
if (matcher.canHandle(selector)) {
return matcher;
}
}
throw new IllegalArgumentException("Invalid version selector: " + selector);
}
}
| {
"pile_set_name": "Github"
} |
/*
* NOTE: This copyright does *not* cover user programs that use HQ
* program services by normal system calls through the application
* program interfaces provided as part of the Hyperic Plug-in Development
* Kit or the Hyperic Client Development Kit - this is merely considered
* normal use of the program, and does *not* fall under the heading of
* "derived work".
*
* Copyright (C) [2004, 2005, 2006], Hyperic, Inc.
* This file is part of HQ.
*
* HQ is free software; you can redistribute it and/or modify
* it under the terms version 2 of the GNU General Public License as
* published by the Free Software Foundation. This program is distributed
* in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA.
*/
package org.hyperic.util.pager;
import java.io.Serializable;
public class SortAttribute implements Serializable {
private SortAttribute () {}
public static final int DEFAULT = 0;
// Generic attributes
public static final int NAME = 1;
public static final int CTIME = 2;
// Authz sort attributes - specifieds which column to store on
// for example, for 'subject_name', sort on column #3
public static final int ROLE_NAME = 1;
public static final int RESGROUP_NAME = 2;
public static final int RESTYPE_NAME = 4;
public static final int RESOURCE_NAME = 5;
public static final int OPERATION_NAME = 6;
public static final int ROLE_MEMBER_CNT= 17;
public static final int SUBJECT_NAME = 3;
public static final int FIRST_NAME = 7;
public static final int LAST_NAME = 8;
// Event sort attributes
public static final int EVENT_LOG_CTIME = 1;
// Control sort attributes
public static final int CONTROL_ACTION = 9;
public static final int CONTROL_STATUS = 10;
public static final int CONTROL_STARTED = 11;
public static final int CONTROL_ELAPSED = 12;
public static final int CONTROL_DATESCHEDULED = 13;
public static final int CONTROL_DESCRIPTION = 14;
public static final int CONTROL_NEXTFIRE = 15;
public static final int CONTROL_ENTITYNAME = 16;
public static final int OWNER_NAME = 21;
public static final int SERVICE_NAME = 22;
public static final int SERVICE_TYPE = 23;
public static final int RT_NAME = 24;
public static final int RT_LOW = 25;
public static final int RT_AVG = 26;
public static final int RT_PEAK = 27;
}
| {
"pile_set_name": "Github"
} |
React;
function f(){
React;
}
React.Children.map(x, y, z);
| {
"pile_set_name": "Github"
} |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2008-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package griffon.annotations.core;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* The annotated element must not be null.
* <p>
* Annotated fields must not be null after construction has completed.
* <p>
* When this annotation is applied to a method it applies to the method return value.
*
* @since 3.0.0
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Nonnull {
}
| {
"pile_set_name": "Github"
} |
//
// EventUnsubscriptionViaAnonymousDelegateIssue.cs
//
// Author:
// Mike Krüger <mkrueger@xamarin.com>
//
// Copyright (c) 2013 Xamarin Inc. (http://xamarin.com)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.Linq;
using ICSharpCode.NRefactory.PatternMatching;
using ICSharpCode.NRefactory.Refactoring;
using ICSharpCode.NRefactory.Semantics;
namespace ICSharpCode.NRefactory.CSharp.Refactoring
{
[IssueDescription("Event unsubscription via anonymous delegate",
Description="Event unsubscription via anonymous delegate is useless",
Category = IssueCategories.CodeQualityIssues,
Severity = Severity.Warning,
AnalysisDisableKeyword = "EventUnsubscriptionViaAnonymousDelegate")]
public class EventUnsubscriptionViaAnonymousDelegateIssue : GatherVisitorCodeIssueProvider
{
protected override IGatherVisitor CreateVisitor(BaseRefactoringContext context)
{
return new GatherVisitor(context);
}
class GatherVisitor : GatherVisitorBase<EventUnsubscriptionViaAnonymousDelegateIssue>
{
public GatherVisitor (BaseRefactoringContext ctx) : base (ctx)
{
}
public override void VisitAssignmentExpression(AssignmentExpression assignmentExpression)
{
base.VisitAssignmentExpression(assignmentExpression);
if (assignmentExpression.Operator != AssignmentOperatorType.Subtract)
return;
if (!(assignmentExpression.Right is AnonymousMethodExpression || assignmentExpression.Right is LambdaExpression))
return;
var rr = ctx.Resolve(assignmentExpression.Left) as MemberResolveResult;
if (rr == null || rr.Member.SymbolKind != ICSharpCode.NRefactory.TypeSystem.SymbolKind.Event)
return;
AddIssue(new CodeIssue(
assignmentExpression.OperatorToken,
ctx.TranslateString("Event unsubscription via anonymous delegate is useless")
));
}
}
}
}
| {
"pile_set_name": "Github"
} |
/*-------------------------------------------------------------------------------------------------------------------*\
| Copyright (C) 2014-15 PayPal |
| |
| Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance |
| with the License. |
| |
| You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software distributed under the License is distributed |
| on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for |
| the specific language governing permissions and limitations under the License. |
\*-------------------------------------------------------------------------------------------------------------------*/
package com.paypal.selion.testcomponents;
import java.util.Map;
import com.paypal.selion.testcomponents.BasicPageImpl;
import com.paypal.selion.platform.html.Button;
import com.paypal.selion.platform.html.Link;
import com.paypal.selion.platform.html.Container;
import com.paypal.selion.platform.html.ParentTraits;
public class TestInitializeElementsPage extends BasicPageImpl {
private HeaderContainer headerContainer;
private Button preLoginButton;
/*
* SeLion GUI Html Object Standard
*
* The naming conventions followed in SeLion for html elements are as follows,
*
* <alias>{corresponding class name of the html element in SeLion}
* where - <alias> will be the object name with its first letter in lowercase.
*
*
* <alias>Button For Button html element.
* <alias>CheckBox For Check Box html element.
* <alias>DatePicker For Date Picker html element.
* <alias>Form For Form html element.
* <alias>Image For Image html element.
* <alias>Label For Label html element.
* <alias>Link For Link html element.
* <alias>RadioButton For Radio Button html element.
* <alias>List For Select List html element.
* <alias>Table For Table html element.
* <alias>TextField For Text Field html element.
* <alias>Container For Container html element.
*
*/
private static String CLASS_NAME = "TestInitializeElementsPage";
private static String PAGE_DOMAIN = "paypal";
/**
* Creates a new TestInitializeElementsPage object
*/
public TestInitializeElementsPage() {
super();
super.initPage(PAGE_DOMAIN, CLASS_NAME);
}
/**
* Creates a new TestInitializeElementsPage object
* @param siteLocale
* The Country locale for the site you are accessing
*/
public TestInitializeElementsPage(String siteLocale) {
super();
super.initPage(PAGE_DOMAIN, CLASS_NAME, siteLocale);
}
public TestInitializeElementsPage getPage() {
return this;
}
/**
* Used to get the HeaderContainer.
*
* @return headerContainer
*/
public HeaderContainer getHeaderContainer() {
HeaderContainer element = this.headerContainer;
if (element == null) {
this.headerContainer = new HeaderContainer(this.getObjectMap().get("headerContainer"),
"headerContainer", this, this.getObjectContainerMap().get("headerContainer"));
}
return this.headerContainer;
}
/**
* Used to get HeaderContainer at specified index.
* @return headerContainer at index
*/
public HeaderContainer getHeaderContainer(int index) {
getHeaderContainer().setIndex(index);
return headerContainer;
}
/**
* Used to get preLoginButton in the page TestInitializeElementsPage
*
* @return preLoginButton
*/
public Button getPreLoginButton() {
Button element = this.preLoginButton;
if (element == null) {
this.preLoginButton = new Button(this.getObjectMap().get("preLoginButton"), "preLoginButton", this);
}
return this.preLoginButton;
}
/**
* Used to click preLoginButton in the page TestInitializeElementsPage and check that resulting page contains
* expected item.
*/
public void clickPreLoginButton(Object... expected) {
getPreLoginButton().click(expected);
}
/**
* Used to click preLoginButton in the page TestInitializeElementsPage
*/
public void clickPreLoginButton() {
getPreLoginButton().click();
}
/**
* Used to get the value of preLoginButton in the page TestInitializeElementsPage.
* @return text in preLoginButton
*/
public String getPreLoginButtonValue() {
return getPreLoginButton().getText();
}
public class HeaderContainer extends Container {
private Link someLink;
/**
* HeaderContainer Construction method <br>
* <br>
* <b>Usage:</b>
*
* <pre>
* private TestInitializeElementsPage pageInstance = new TestInitializeElementsPage();
* private TestInitializeElementsPage.HeaderContainer headerContainer = pageInstance.new HeaderContainer("//span[@id='containerLocator']");
* </pre>
*
* @param locator
* the element locator
*/
public HeaderContainer(String locator) {
super(locator);
}
/**
* Use this constructor to override default controlName for logging purposes. Default controlName would be the
* element locator.
*
* @param locator
* the element locator
* @param controlName
* the control name used for logging
*/
public HeaderContainer(String locator, String controlName) {
super(locator, controlName);
}
/**
* Use this constructor to override default controlName and assign a parent
*
* @param locator
* A String that represents the means to locate this element (could be id/name/xpath/css locator).
* @param controlName
* the control name used for logging.
* @param parent
* A {@link ParentTraits} object that represents the parent element for this element.
*
*/
public HeaderContainer(String locator, String controlName, ParentTraits parent) {
super(locator, controlName, parent);
}
public HeaderContainer(String locator, String controlName, ParentTraits parent,
Map<String, String> containerElements) {
super(locator, controlName, parent, containerElements);
}
private HeaderContainer getContainer() {
if (!isInitialized()) {
getObjectMap();
}
return this;
}
/**
*
* Used to get someLink in headerContainer
*
* @return someLink
*/
public Link getSomeLink() {
Link containerElement = getContainer().someLink;
if (containerElement == null) {
getContainer().someLink = new Link(this.containerElements.get("someLink"), "someLink", this);
}
return getContainer().someLink;
}
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<!--Generated by crowdin.com-->
<resources></resources>
| {
"pile_set_name": "Github"
} |
table_create Tags TABLE_PAT_KEY ShortText
table_create Memos TABLE_HASH_KEY ShortText
column_create Memos tag COLUMN_SCALAR Tags
column_create Memos user COLUMN_SCALAR ShortText
column_create Memos priority COLUMN_SCALAR Int64
load --table Memos
[
{"_key": "Groonga1", "tag": "Groonga", "user": "user1", "priority": 10},
{"_key": "Groonga2", "tag": "Groonga", "user": "user1", "priority": 20},
{"_key": "Groonga3", "tag": "Groonga", "user": "user1", "priority": 60},
{"_key": "Groonga4", "tag": "Groonga", "user": "user2", "priority": 61},
{"_key": "Groonga5", "tag": "Groonga", "user": "user2", "priority": 24},
{"_key": "Groonga6", "tag": "Groonga", "user": "user2", "priority": 8},
{"_key": "Mroonga1", "tag": "Mroonga", "user": "user3", "priority": 7},
{"_key": "Mroonga2", "tag": "Mroonga", "user": "user3", "priority": 3},
{"_key": "Mroonga3", "tag": "Mroonga", "user": "user3", "priority": -4},
{"_key": "Mroonga4", "tag": "Mroonga", "user": "user4", "priority": -1},
{"_key": "Mroonga5", "tag": "Mroonga", "user": "user4", "priority": -2},
{"_key": "Mroonga6", "tag": "Mroonga", "user": "user4", "priority": -3}
]
select Memos \
--limit 0 \
--drilldowns[tag].keys tag,user \
--drilldowns[tag].calc_types AVG \
--drilldowns[tag].calc_target priority \
--drilldowns[tag].output_columns _value.tag,_value.user,_avg
| {
"pile_set_name": "Github"
} |
return {
trident_long = {},
}
| {
"pile_set_name": "Github"
} |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef SERVICES_DEVICE_USB_USB_SERVICE_WIN_H_
#define SERVICES_DEVICE_USB_USB_SERVICE_WIN_H_
#include "services/device/usb/usb_service.h"
#include <list>
#include <unordered_map>
#include "base/containers/flat_map.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/scoped_observer.h"
#include "base/sequenced_task_runner.h"
#include "base/strings/string16.h"
#include "device/base/device_monitor_win.h"
#include "services/device/usb/usb_device_win.h"
namespace device {
class UsbServiceWin final : public DeviceMonitorWin::Observer,
public UsbService {
public:
UsbServiceWin();
~UsbServiceWin() override;
private:
class BlockingTaskRunnerHelper;
// device::UsbService implementation
void GetDevices(GetDevicesCallback callback) override;
// device::DeviceMonitorWin::Observer implementation
void OnDeviceAdded(const GUID& class_guid,
const base::string16& device_path) override;
void OnDeviceRemoved(const GUID& class_guid,
const base::string16& device_path) override;
// Methods called by BlockingThreadHelper
void HelperStarted();
void CreateDeviceObject(
const base::string16& device_path,
const base::string16& hub_path,
const base::flat_map<int, UsbDeviceWin::FunctionInfo>& functions,
uint32_t bus_number,
uint32_t port_number,
const base::string16& driver_name);
void UpdateFunction(const base::string16& device_path,
int interface_number,
const UsbDeviceWin::FunctionInfo& function_info);
void DeviceReady(scoped_refptr<UsbDeviceWin> device, bool success);
bool enumeration_ready() {
return helper_started_ && first_enumeration_countdown_ == 0;
}
// Enumeration callbacks are queued until an enumeration completes.
bool helper_started_ = false;
uint32_t first_enumeration_countdown_ = 0;
std::list<GetDevicesCallback> enumeration_callbacks_;
scoped_refptr<base::SequencedTaskRunner> blocking_task_runner_;
std::unique_ptr<BlockingTaskRunnerHelper, base::OnTaskRunnerDeleter> helper_;
std::unordered_map<base::string16, scoped_refptr<UsbDeviceWin>>
devices_by_path_;
ScopedObserver<DeviceMonitorWin, DeviceMonitorWin::Observer> device_observer_;
base::WeakPtrFactory<UsbServiceWin> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(UsbServiceWin);
};
} // namespace device
#endif // SERVICES_DEVICE_USB_USB_SERVICE_WIN_H_
| {
"pile_set_name": "Github"
} |
#ifndef __WINBOND_WB35REG_F_H
#define __WINBOND_WB35REG_F_H
#include "wbhal.h"
/*
* ====================================
* Interface function declare
* ====================================
*/
unsigned char Wb35Reg_initial(struct hw_data *hw_data);
void Uxx_power_on_procedure(struct hw_data *hw_data);
void Uxx_power_off_procedure(struct hw_data *hw_data);
void Uxx_ReadEthernetAddress(struct hw_data *hw_data);
void Dxx_initial(struct hw_data *hw_data);
void Mxx_initial(struct hw_data *hw_data);
void RFSynthesizer_initial(struct hw_data *hw_data);
void RFSynthesizer_SwitchingChannel(struct hw_data *hw_data, struct chan_info channel);
void BBProcessor_initial(struct hw_data *hw_data);
void BBProcessor_RateChanging(struct hw_data *hw_data, u8 rate);
u8 RFSynthesizer_SetPowerIndex(struct hw_data *hw_data, u8 power_index);
u8 RFSynthesizer_SetMaxim2828_24Power(struct hw_data *, u8 index);
u8 RFSynthesizer_SetMaxim2828_50Power(struct hw_data *, u8 index);
u8 RFSynthesizer_SetMaxim2827_24Power(struct hw_data *, u8 index);
u8 RFSynthesizer_SetMaxim2827_50Power(struct hw_data *, u8 index);
u8 RFSynthesizer_SetMaxim2825Power(struct hw_data *, u8 index);
u8 RFSynthesizer_SetAiroha2230Power(struct hw_data *, u8 index);
u8 RFSynthesizer_SetAiroha7230Power(struct hw_data *, u8 index);
u8 RFSynthesizer_SetWinbond242Power(struct hw_data *, u8 index);
void GetTxVgaFromEEPROM(struct hw_data *hw_data);
void EEPROMTxVgaAdjust(struct hw_data *hw_data);
#define RFWriteControlData(_A, _V) Wb35Reg_Write(_A, 0x0864, _V)
void Wb35Reg_destroy(struct hw_data *hw_data);
unsigned char Wb35Reg_Read(struct hw_data *hw_data, u16 register_no, u32 *register_value);
unsigned char Wb35Reg_ReadSync(struct hw_data *hw_data, u16 register_no, u32 *register_value);
unsigned char Wb35Reg_Write(struct hw_data *hw_data, u16 register_no, u32 register_value);
unsigned char Wb35Reg_WriteSync(struct hw_data *hw_data, u16 register_no, u32 register_value);
unsigned char Wb35Reg_WriteWithCallbackValue(struct hw_data *hw_data,
u16 register_no,
u32 register_value,
s8 *value,
s8 len);
unsigned char Wb35Reg_BurstWrite(struct hw_data *hw_data,
u16 register_no,
u32 *register_data,
u8 number_of_data,
u8 flag);
void Wb35Reg_EP0VM(struct hw_data *hw_data);
void Wb35Reg_EP0VM_start(struct hw_data *hw_data);
void Wb35Reg_EP0VM_complete(struct urb *urb);
u32 BitReverse(u32 data, u32 data_length);
void CardGetMulticastBit(u8 address[MAC_ADDR_LENGTH], u8 *byte, u8 *value);
u32 CardComputeCrc(u8 *buffer, u32 length);
void Wb35Reg_phy_calibration(struct hw_data *hw_data);
void Wb35Reg_Update(struct hw_data *hw_data, u16 register_no, u32 register_value);
unsigned char adjust_TXVGA_for_iq_mag(struct hw_data *hw_data);
#endif
| {
"pile_set_name": "Github"
} |
/*
DataFlash Remote(via MAVLink) logging
*/
#include "DataFlash_MAVLink.h"
#if DATAFLASH_MAVLINK_SUPPORT
#include "LogStructure.h"
#define REMOTE_LOG_DEBUGGING 0
#if REMOTE_LOG_DEBUGGING
#include <stdio.h>
# define Debug(fmt, args ...) do {printf("%s:%d: " fmt "\n", __FUNCTION__, __LINE__, ## args); hal.scheduler->delay(1); } while(0)
#else
# define Debug(fmt, args ...)
#endif
#include <GCS_MAVLink/GCS.h>
extern const AP_HAL::HAL& hal;
// initialisation
void DataFlash_MAVLink::Init()
{
semaphore = hal.util->new_semaphore();
if (semaphore == nullptr) {
AP_HAL::panic("Failed to create DataFlash_MAVLink semaphore");
return;
}
DataFlash_Backend::Init();
_blocks = nullptr;
while (_blockcount >= 8) { // 8 is a *magic* number
_blocks = (struct dm_block *) malloc(_blockcount * sizeof(_blocks[0]));
if (_blocks != nullptr) {
break;
}
_blockcount /= 2;
}
if (_blocks == nullptr) {
return;
}
free_all_blocks();
stats_init();
_initialised = true;
_logging_started = true; // in actual fact, we throw away
// everything until a client connects.
// This stops calls to start_new_log from
// the vehicles
}
bool DataFlash_MAVLink::logging_failed() const
{
return !_sending_to_client;
}
uint32_t DataFlash_MAVLink::bufferspace_available() {
return (_blockcount_free * 200 + remaining_space_in_current_block());
}
uint8_t DataFlash_MAVLink::remaining_space_in_current_block() {
// note that _current_block *could* be NULL ATM.
return (MAVLINK_MSG_REMOTE_LOG_DATA_BLOCK_FIELD_DATA_LEN - _latest_block_len);
}
void DataFlash_MAVLink::enqueue_block(dm_block_queue_t &queue, struct dm_block *block)
{
if (queue.youngest != nullptr) {
queue.youngest->next = block;
} else {
queue.oldest = block;
}
queue.youngest = block;
}
struct DataFlash_MAVLink::dm_block *DataFlash_MAVLink::dequeue_seqno(DataFlash_MAVLink::dm_block_queue_t &queue, uint32_t seqno)
{
struct dm_block *prev = nullptr;
for (struct dm_block *block=queue.oldest; block != nullptr; block=block->next) {
if (block->seqno == seqno) {
if (prev == nullptr) {
if (queue.youngest == queue.oldest) {
queue.oldest = nullptr;
queue.youngest = nullptr;
} else {
queue.oldest = block->next;
}
} else {
if (queue.youngest == block) {
queue.youngest = prev;
}
prev->next = block->next;
}
block->next = nullptr;
return block;
}
prev = block;
}
return nullptr;
}
bool DataFlash_MAVLink::free_seqno_from_queue(uint32_t seqno, dm_block_queue_t &queue)
{
struct dm_block *block = dequeue_seqno(queue, seqno);
if (block != nullptr) {
block->next = _blocks_free;
_blocks_free = block;
_blockcount_free++; // comment me out to expose a bug!
return true;
}
return false;
}
bool DataFlash_MAVLink::WritesOK() const
{
if (!_sending_to_client) {
return false;
}
return true;
}
/* Write a block of data at current offset */
// DM_write: 70734 events, 0 overruns, 167806us elapsed, 2us avg, min 1us max 34us 0.620us rms
bool DataFlash_MAVLink::_WritePrioritisedBlock(const void *pBuffer, uint16_t size, bool is_critical)
{
if (!semaphore->take_nonblocking()) {
dropped++;
return false;
}
if (! WriteBlockCheckStartupMessages()) {
semaphore->give();
return false;
}
if (bufferspace_available() < size) {
if (_startup_messagewriter->finished()) {
// do not count the startup packets as being dropped...
dropped++;
}
semaphore->give();
return false;
}
uint16_t copied = 0;
while (copied < size) {
if (_current_block == nullptr) {
_current_block = next_block();
if (_current_block == nullptr) {
// should not happen - there's a sanity check above
internal_error();
semaphore->give();
return false;
}
}
uint16_t remaining_to_copy = size - copied;
uint16_t _curr_remaining = remaining_space_in_current_block();
uint16_t to_copy = (remaining_to_copy > _curr_remaining) ? _curr_remaining : remaining_to_copy;
memcpy(&(_current_block->buf[_latest_block_len]), &((const uint8_t *)pBuffer)[copied], to_copy);
copied += to_copy;
_latest_block_len += to_copy;
if (_latest_block_len == MAVLINK_MSG_REMOTE_LOG_DATA_BLOCK_FIELD_DATA_LEN) {
//block full, mark it to be sent:
enqueue_block(_blocks_pending, _current_block);
_current_block = next_block();
}
}
semaphore->give();
return true;
}
//Get a free block
struct DataFlash_MAVLink::dm_block *DataFlash_MAVLink::next_block()
{
DataFlash_MAVLink::dm_block *ret = _blocks_free;
if (ret != nullptr) {
_blocks_free = ret->next;
_blockcount_free--;
ret->seqno = _next_seq_num++;
ret->last_sent = 0;
ret->next = nullptr;
_latest_block_len = 0;
}
return ret;
}
void DataFlash_MAVLink::free_all_blocks()
{
_blocks_free = nullptr;
_current_block = nullptr;
_blocks_pending.sent_count = 0;
_blocks_pending.oldest = _blocks_pending.youngest = nullptr;
_blocks_retry.sent_count = 0;
_blocks_retry.oldest = _blocks_retry.youngest = nullptr;
_blocks_sent.sent_count = 0;
_blocks_sent.oldest = _blocks_sent.youngest = nullptr;
// add blocks to the free stack:
for(uint8_t i=0; i < _blockcount; i++) {
_blocks[i].next = _blocks_free;
_blocks_free = &_blocks[i];
// this value doesn't really matter, but it stops valgrind
// complaining when acking blocks (we check seqno before
// state). Also, when we receive ACKs we check seqno, and we
// want to ack the *real* block zero!
_blocks[i].seqno = 9876543;
}
_blockcount_free = _blockcount;
_latest_block_len = 0;
}
void DataFlash_MAVLink::stop_logging()
{
if (_sending_to_client) {
_sending_to_client = false;
_last_response_time = AP_HAL::millis();
}
}
void DataFlash_MAVLink::handle_ack(mavlink_channel_t chan,
mavlink_message_t* msg,
uint32_t seqno)
{
if (!_initialised) {
return;
}
if(seqno == MAV_REMOTE_LOG_DATA_BLOCK_STOP) {
Debug("Received stop-logging packet");
stop_logging();
return;
}
if(seqno == MAV_REMOTE_LOG_DATA_BLOCK_START) {
if (!_sending_to_client) {
Debug("Starting New Log");
free_all_blocks();
// _current_block = next_block();
// if (_current_block == nullptr) {
// Debug("No free blocks?!!!\n");
// return;
// }
stats_init();
_sending_to_client = true;
_target_system_id = msg->sysid;
_target_component_id = msg->compid;
_chan = chan;
_next_seq_num = 0;
start_new_log_reset_variables();
_last_response_time = AP_HAL::millis();
Debug("Target: (%u/%u)", _target_system_id, _target_component_id);
}
return;
}
// check SENT blocks (VERY likely to be first on the list):
if (free_seqno_from_queue(seqno, _blocks_sent)) {
// celebrate
_last_response_time = AP_HAL::millis();
} else if(free_seqno_from_queue(seqno, _blocks_retry)) {
// party
_last_response_time = AP_HAL::millis();
} else {
// probably acked already and put on the free list.
}
}
void DataFlash_MAVLink::remote_log_block_status_msg(mavlink_channel_t chan,
mavlink_message_t* msg)
{
mavlink_remote_log_block_status_t packet;
mavlink_msg_remote_log_block_status_decode(msg, &packet);
if (!semaphore->take_nonblocking()) {
return;
}
if(packet.status == 0){
handle_retry(packet.seqno);
} else{
handle_ack(chan, msg, packet.seqno);
}
semaphore->give();
}
void DataFlash_MAVLink::handle_retry(uint32_t seqno)
{
if (!_initialised || !_sending_to_client) {
return;
}
struct dm_block *victim = dequeue_seqno(_blocks_sent, seqno);
if (victim != nullptr) {
_last_response_time = AP_HAL::millis();
enqueue_block(_blocks_retry, victim);
}
}
void DataFlash_MAVLink::internal_error() {
internal_errors++;
DataFlash_Backend::internal_error();
}
void DataFlash_MAVLink::stats_init() {
dropped = 0;
internal_errors = 0;
stats.resends = 0;
stats_reset();
}
void DataFlash_MAVLink::stats_reset() {
stats.state_free = 0;
stats.state_free_min = -1; // unsigned wrap
stats.state_free_max = 0;
stats.state_pending = 0;
stats.state_pending_min = -1; // unsigned wrap
stats.state_pending_max = 0;
stats.state_retry = 0;
stats.state_retry_min = -1; // unsigned wrap
stats.state_retry_max = 0;
stats.state_sent = 0;
stats.state_sent_min = -1; // unsigned wrap
stats.state_sent_max = 0;
stats.collection_count = 0;
}
void DataFlash_MAVLink::Log_Write_DF_MAV(DataFlash_MAVLink &df)
{
if (df.stats.collection_count == 0) {
return;
}
struct log_DF_MAV_Stats pkt = {
LOG_PACKET_HEADER_INIT(LOG_DF_MAV_STATS),
timestamp : AP_HAL::millis(),
seqno : df._next_seq_num-1,
dropped : df.dropped,
retries : df._blocks_retry.sent_count,
resends : df.stats.resends,
internal_errors : df.internal_errors,
state_free_avg : (uint8_t)(df.stats.state_free/df.stats.collection_count),
state_free_min : df.stats.state_free_min,
state_free_max : df.stats.state_free_max,
state_pending_avg : (uint8_t)(df.stats.state_pending/df.stats.collection_count),
state_pending_min : df.stats.state_pending_min,
state_pending_max : df.stats.state_pending_max,
state_sent_avg : (uint8_t)(df.stats.state_sent/df.stats.collection_count),
state_sent_min : df.stats.state_sent_min,
state_sent_max : df.stats.state_sent_max,
};
WriteBlock(&pkt,sizeof(pkt));
}
void DataFlash_MAVLink::stats_log()
{
if (!_initialised || !_logging_started) {
return;
}
if (stats.collection_count == 0) {
return;
}
Log_Write_DF_MAV(*this);
#if REMOTE_LOG_DEBUGGING
printf("D:%d Retry:%d Resent:%d E:%d SF:%d/%d/%d SP:%d/%d/%d SS:%d/%d/%d SR:%d/%d/%d\n",
dropped,
_blocks_retry.sent_count,
stats.resends,
internal_errors,
stats.state_free_min,
stats.state_free_max,
stats.state_free/stats.collection_count,
stats.state_pending_min,
stats.state_pending_max,
stats.state_pending/stats.collection_count,
stats.state_sent_min,
stats.state_sent_max,
stats.state_sent/stats.collection_count,
stats.state_retry_min,
stats.state_retry_max,
stats.state_retry/stats.collection_count
);
#endif
stats_reset();
}
uint8_t DataFlash_MAVLink::stack_size(struct dm_block *stack)
{
uint8_t ret = 0;
for (struct dm_block *block=stack; block != nullptr; block=block->next) {
ret++;
}
return ret;
}
uint8_t DataFlash_MAVLink::queue_size(dm_block_queue_t queue)
{
return stack_size(queue.oldest);
}
void DataFlash_MAVLink::stats_collect()
{
if (!_initialised || !_logging_started) {
return;
}
if (!semaphore->take_nonblocking()) {
return;
}
uint8_t pending = queue_size(_blocks_pending);
uint8_t sent = queue_size(_blocks_sent);
uint8_t retry = queue_size(_blocks_retry);
uint8_t sfree = stack_size(_blocks_free);
if (sfree != _blockcount_free) {
internal_error();
}
semaphore->give();
stats.state_pending += pending;
stats.state_sent += sent;
stats.state_free += sfree;
stats.state_retry += retry;
if (pending < stats.state_pending_min) {
stats.state_pending_min = pending;
}
if (pending > stats.state_pending_max) {
stats.state_pending_max = pending;
}
if (retry < stats.state_retry_min) {
stats.state_retry_min = retry;
}
if (retry > stats.state_retry_max) {
stats.state_retry_max = retry;
}
if (sent < stats.state_sent_min) {
stats.state_sent_min = sent;
}
if (sent > stats.state_sent_max) {
stats.state_sent_max = sent;
}
if (sfree < stats.state_free_min) {
stats.state_free_min = sfree;
}
if (sfree > stats.state_free_max) {
stats.state_free_max = sfree;
}
stats.collection_count++;
}
/* while we "successfully" send log blocks from a queue, move them to
* the sent list. DO NOT call this for blocks already sent!
*/
bool DataFlash_MAVLink::send_log_blocks_from_queue(dm_block_queue_t &queue)
{
uint8_t sent_count = 0;
while (queue.oldest != nullptr) {
if (sent_count++ > _max_blocks_per_send_blocks) {
return false;
}
if (! send_log_block(*queue.oldest)) {
return false;
}
queue.sent_count++;
struct DataFlash_MAVLink::dm_block *tmp = dequeue_seqno(queue,queue.oldest->seqno);
if (tmp != nullptr) { // should never be nullptr
enqueue_block(_blocks_sent, tmp);
} else {
internal_error();
}
}
return true;
}
void DataFlash_MAVLink::push_log_blocks()
{
if (!_initialised || !_logging_started ||!_sending_to_client) {
return;
}
DataFlash_Backend::WriteMoreStartupMessages();
if (!semaphore->take_nonblocking()) {
return;
}
if (! send_log_blocks_from_queue(_blocks_retry)) {
semaphore->give();
return;
}
if (! send_log_blocks_from_queue(_blocks_pending)) {
semaphore->give();
return;
}
semaphore->give();
}
void DataFlash_MAVLink::do_resends(uint32_t now)
{
if (!_initialised || !_logging_started ||!_sending_to_client) {
return;
}
uint8_t count_to_send = 5;
if (_blockcount < count_to_send) {
count_to_send = _blockcount;
}
uint32_t oldest = now - 100; // 100 milliseconds before resend. Hmm.
while (count_to_send-- > 0) {
if (!semaphore->take_nonblocking()) {
return;
}
for (struct dm_block *block=_blocks_sent.oldest; block != nullptr; block=block->next) {
// only want to send blocks every now-and-then:
if (block->last_sent < oldest) {
if (! send_log_block(*block)) {
// failed to send the block; try again later....
semaphore->give();
return;
}
stats.resends++;
}
}
semaphore->give();
}
}
// NOTE: any functions called from these periodic functions MUST
// handle locking of the blocks structures by taking the semaphore
// appropriately!
void DataFlash_MAVLink::periodic_10Hz(const uint32_t now)
{
do_resends(now);
stats_collect();
}
void DataFlash_MAVLink::periodic_1Hz(const uint32_t now)
{
if (_sending_to_client &&
_last_response_time + 10000 < _last_send_time) {
// other end appears to have timed out!
Debug("Client timed out");
_sending_to_client = false;
return;
}
stats_log();
}
void DataFlash_MAVLink::periodic_fullrate(uint32_t now)
{
push_log_blocks();
}
//TODO: handle full txspace properly
bool DataFlash_MAVLink::send_log_block(struct dm_block &block)
{
mavlink_channel_t chan = mavlink_channel_t(_chan - MAVLINK_COMM_0);
if (!_initialised) {
return false;
}
if (!HAVE_PAYLOAD_SPACE(chan, REMOTE_LOG_DATA_BLOCK)) {
return false;
}
if (comm_get_txspace(chan) < 500) {
return false;
}
#if CONFIG_HAL_BOARD == HAL_BOARD_SITL
if (rand() < 0.1) {
return false;
}
#endif
#if DF_MAVLINK_DISABLE_INTERRUPTS
irqstate_t istate = irqsave();
#endif
// DM_packing: 267039 events, 0 overruns, 8440834us elapsed, 31us avg, min 31us max 32us 0.488us rms
hal.util->perf_begin(_perf_packing);
mavlink_message_t msg;
mavlink_status_t *chan_status = mavlink_get_channel_status(chan);
uint8_t saved_seq = chan_status->current_tx_seq;
chan_status->current_tx_seq = mavlink_seq++;
// Debug("Sending block (%d)", block.seqno);
mavlink_msg_remote_log_data_block_pack(mavlink_system.sysid,
MAV_COMP_ID_LOG,
&msg,
_target_system_id,
_target_component_id,
block.seqno,
block.buf);
hal.util->perf_end(_perf_packing);
#if DF_MAVLINK_DISABLE_INTERRUPTS
irqrestore(istate);
#endif
block.last_sent = AP_HAL::millis();
chan_status->current_tx_seq = saved_seq;
// _last_send_time is set even if we fail to send the packet; if
// the txspace is repeatedly chockas we should not add to the
// problem and stop attempting to log
_last_send_time = AP_HAL::millis();
_mavlink_resend_uart(chan, &msg);
return true;
}
#endif
| {
"pile_set_name": "Github"
} |
var _ = require('underscore'),
parse = require('../parse'),
$ = require('../static'),
updateDOM = parse.update,
evaluate = parse.evaluate,
utils = require('../utils'),
domEach = utils.domEach,
encode = utils.encode,
slice = Array.prototype.slice;
// Create an array of nodes, recursing into arrays and parsing strings if
// necessary
var makeDomArray = function(elem) {
if (elem == null) {
return [];
} else if (elem.cheerio) {
return elem.toArray();
} else if (_.isArray(elem)) {
return _.flatten(elem.map(makeDomArray));
} else if (_.isString(elem)) {
return evaluate(elem);
} else {
return [elem];
}
};
var _insert = function(concatenator) {
return function() {
var elems = slice.call(arguments),
dom = makeDomArray(elems);
if (_.isFunction(elems[0])) {
return this.each(function(i, el) {
dom = makeDomArray(elems[0].call(el, i, this.html()));
updateDOM(concatenator(dom, el.children || (el.children = [])), el);
});
} else {
return domEach(this, function(i, el) {
updateDOM(concatenator(dom, el.children || (el.children = [])), el);
});
}
};
};
var append = exports.append = _insert(function(dom, children) {
return children.concat(dom);
});
var prepend = exports.prepend = _insert(function(dom, children) {
return dom.concat(children);
});
var after = exports.after = function() {
var elems = slice.call(arguments),
dom = makeDomArray(elems);
domEach(this, function(i, el) {
var parent = el.parent || el.root,
siblings = parent.children,
index = siblings.indexOf(el);
// If not found, move on
if (!~index) return;
if (_.isFunction(elems[0])) {
dom = makeDomArray(elems[0].call(el, i));
}
// Add element after `this` element
siblings.splice.apply(siblings, [++index, 0].concat(dom));
// Update next, prev, and parent pointers
updateDOM(siblings, parent);
});
return this;
};
var before = exports.before = function() {
var elems = slice.call(arguments),
dom = makeDomArray(elems);
domEach(this, function(i, el) {
var parent = el.parent || el.root,
siblings = parent.children,
index = siblings.indexOf(el);
// If not found, move on
if (!~index) return;
if (_.isFunction(elems[0])) {
dom = makeDomArray(elems[0].call(el, i));
}
// Add element before `el` element
siblings.splice.apply(siblings, [index, 0].concat(dom));
// Update next, prev, and parent pointers
updateDOM(siblings, parent);
});
return this;
};
/*
remove([selector])
*/
var remove = exports.remove = function(selector) {
var elems = this;
// Filter if we have selector
if (selector)
elems = elems.filter(selector);
domEach(elems, function(i, el) {
var parent = el.parent || el.root,
siblings = parent.children,
index = siblings.indexOf(el);
if (!~index) return;
siblings.splice(index, 1);
// Update next, prev, and parent pointers
updateDOM(siblings, parent);
});
return this;
};
var replaceWith = exports.replaceWith = function(content) {
var dom = makeDomArray(content);
domEach(this, function(i, el) {
var parent = el.parent || el.root,
siblings = parent.children,
index;
if (_.isFunction(content)) {
dom = makeDomArray(content.call(el, i));
}
// In the case that `dom` contains nodes that already exist in other
// structures, ensure those nodes are properly removed.
updateDOM(dom, null);
index = siblings.indexOf(el);
// Completely remove old element
siblings.splice.apply(siblings, [index, 1].concat(dom));
el.parent = el.prev = el.next = null;
updateDOM(siblings, parent);
});
return this;
};
var empty = exports.empty = function() {
domEach(this, function(i, el) {
el.children = [];
});
return this;
};
/**
* Set/Get the HTML
*/
var html = exports.html = function(str) {
if (str === undefined) {
if (!this[0] || !this[0].children) return null;
return $.html(this[0].children);
}
str = str.cheerio ? str.toArray() : evaluate(str);
domEach(this, function(i, el) {
el.children = str;
updateDOM(el.children, el);
});
return this;
};
var toString = exports.toString = function() {
return $.html(this);
};
var text = exports.text = function(str) {
// If `str` is undefined, act as a "getter"
if (str === undefined) {
return $.text(this);
} else if (_.isFunction(str)) {
// Function support
return this.each(function(i, el) {
return this.text(str.call(el, i, this.text()));
});
}
var elem = {
data: encode(str),
type: 'text',
parent: null,
prev: null,
next: null,
children: []
};
// Append text node to each selected elements
domEach(this, function(i, el) {
el.children = elem;
updateDOM(el.children, el);
});
return this;
};
var clone = exports.clone = function() {
// Turn it into HTML, then recreate it,
// Seems to be the easiest way to reconnect everything correctly
return this._make($.html(this));
};
| {
"pile_set_name": "Github"
} |
config const n = 11;
var s0 = "kiss kiss";
var l0: atomic int;
var l1: atomic int;
begin {
l0.write(1);
l1.waitFor(1);
writeln(s0);
}
writeln(s0);
l0.waitFor(1);
s0 = "bang bang";
l1.write(1);
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2019 Amazon.com, Inc. or its affiliates.
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package software.amazon.kinesis.multilang;
import java.util.concurrent.ExecutorService;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import software.amazon.kinesis.coordinator.KinesisClientLibConfiguration;
import software.amazon.kinesis.multilang.config.MultiLangDaemonConfiguration;
import software.amazon.kinesis.processor.ShardRecordProcessorFactory;
import software.amazon.kinesis.processor.ShardRecordProcessor;
/**
* Creates {@link MultiLangShardRecordProcessor}'s.
*/
@Slf4j
public class MultiLangRecordProcessorFactory implements ShardRecordProcessorFactory {
private static final String COMMAND_DELIMETER_REGEX = " +";
private final String command;
private final String[] commandArray;
private final ObjectMapper objectMapper;
private final ExecutorService executorService;
private final MultiLangDaemonConfiguration configuration;
/**
* @param command The command that will do processing for this factory's record processors.
* @param executorService An executor service to use while processing inputs and outputs of the child process.
*/
public MultiLangRecordProcessorFactory(String command, ExecutorService executorService,
MultiLangDaemonConfiguration configuration) {
this(command, executorService, new ObjectMapper(), configuration);
}
/**
* @param command The command that will do processing for this factory's record processors.
* @param executorService An executor service to use while processing inputs and outputs of the child process.
* @param objectMapper An object mapper used to convert messages to json to be written to the child process
*/
public MultiLangRecordProcessorFactory(String command, ExecutorService executorService, ObjectMapper objectMapper,
MultiLangDaemonConfiguration configuration) {
this.command = command;
this.commandArray = command.split(COMMAND_DELIMETER_REGEX);
this.executorService = executorService;
this.objectMapper = objectMapper;
this.configuration = configuration;
}
@Override
public ShardRecordProcessor shardRecordProcessor() {
log.debug("Creating new record processor for client executable: {}", command);
/*
* Giving ProcessBuilder the command as an array of Strings allows users to specify command line arguments.
*/
return new MultiLangShardRecordProcessor(new ProcessBuilder(commandArray), executorService, this.objectMapper,
this.configuration);
}
String[] getCommandArray() {
return commandArray;
}
}
| {
"pile_set_name": "Github"
} |
package geektime.spring.springbucks.waiter.controller;
import geektime.spring.springbucks.waiter.controller.request.NewOrderRequest;
import geektime.spring.springbucks.waiter.model.Coffee;
import geektime.spring.springbucks.waiter.model.CoffeeOrder;
import geektime.spring.springbucks.waiter.service.CoffeeOrderService;
import geektime.spring.springbucks.waiter.service.CoffeeService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.servlet.ModelAndView;
import javax.validation.Valid;
import java.util.List;
@Controller
@RequestMapping("/order")
@Slf4j
public class CoffeeOrderController {
@Autowired
private CoffeeOrderService orderService;
@Autowired
private CoffeeService coffeeService;
@GetMapping("/{id}")
@ResponseBody
public CoffeeOrder getOrder(@PathVariable("id") Long id) {
return orderService.get(id);
}
@PostMapping(path = "/", consumes = MediaType.APPLICATION_JSON_VALUE,
produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
@ResponseBody
@ResponseStatus(HttpStatus.CREATED)
public CoffeeOrder create(@RequestBody NewOrderRequest newOrder) {
log.info("Receive new Order {}", newOrder);
Coffee[] coffeeList = coffeeService.getCoffeeByName(newOrder.getItems())
.toArray(new Coffee[] {});
return orderService.createOrder(newOrder.getCustomer(), coffeeList);
}
@ModelAttribute
public List<Coffee> coffeeList() {
return coffeeService.getAllCoffee();
}
@GetMapping(path = "/")
public ModelAndView showCreateForm() {
return new ModelAndView("create-order-form");
}
@PostMapping(path = "/", consumes = MediaType.APPLICATION_FORM_URLENCODED_VALUE)
public String createOrder(@Valid NewOrderRequest newOrder,
BindingResult result, ModelMap map) {
if (result.hasErrors()) {
log.warn("Binding Result: {}", result);
map.addAttribute("message", result.toString());
return "create-order-form";
}
log.info("Receive new Order {}", newOrder);
Coffee[] coffeeList = coffeeService.getCoffeeByName(newOrder.getItems())
.toArray(new Coffee[] {});
CoffeeOrder order = orderService.createOrder(newOrder.getCustomer(), coffeeList);
return "redirect:/order/" + order.getId();
}
}
| {
"pile_set_name": "Github"
} |
# send
[![NPM Version][npm-image]][npm-url]
[![NPM Downloads][downloads-image]][downloads-url]
[![Linux Build][travis-image]][travis-url]
[![Windows Build][appveyor-image]][appveyor-url]
[![Test Coverage][coveralls-image]][coveralls-url]
[![Gratipay][gratipay-image]][gratipay-url]
Send is a library for streaming files from the file system as a http response
supporting partial responses (Ranges), conditional-GET negotiation (If-Match,
If-Unmodified-Since, If-None-Match, If-Modified-Since), high test coverage,
and granular events which may be leveraged to take appropriate actions in your
application or framework.
Looking to serve up entire folders mapped to URLs? Try [serve-static](https://www.npmjs.org/package/serve-static).
## Installation
This is a [Node.js](https://nodejs.org/en/) module available through the
[npm registry](https://www.npmjs.com/). Installation is done using the
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
```bash
$ npm install send
```
## API
<!-- eslint-disable no-unused-vars -->
```js
var send = require('send')
```
### send(req, path, [options])
Create a new `SendStream` for the given path to send to a `res`. The `req` is
the Node.js HTTP request and the `path` is a urlencoded path to send (urlencoded,
not the actual file-system path).
#### Options
##### acceptRanges
Enable or disable accepting ranged requests, defaults to true.
Disabling this will not send `Accept-Ranges` and ignore the contents
of the `Range` request header.
##### cacheControl
Enable or disable setting `Cache-Control` response header, defaults to
true. Disabling this will ignore the `immutable` and `maxAge` options.
##### dotfiles
Set how "dotfiles" are treated when encountered. A dotfile is a file
or directory that begins with a dot ("."). Note this check is done on
the path itself without checking if the path actually exists on the
disk. If `root` is specified, only the dotfiles above the root are
checked (i.e. the root itself can be within a dotfile when when set
to "deny").
- `'allow'` No special treatment for dotfiles.
- `'deny'` Send a 403 for any request for a dotfile.
- `'ignore'` Pretend like the dotfile does not exist and 404.
The default value is _similar_ to `'ignore'`, with the exception that
this default will not ignore the files within a directory that begins
with a dot, for backward-compatibility.
##### end
Byte offset at which the stream ends, defaults to the length of the file
minus 1. The end is inclusive in the stream, meaning `end: 3` will include
the 4th byte in the stream.
##### etag
Enable or disable etag generation, defaults to true.
##### extensions
If a given file doesn't exist, try appending one of the given extensions,
in the given order. By default, this is disabled (set to `false`). An
example value that will serve extension-less HTML files: `['html', 'htm']`.
This is skipped if the requested file already has an extension.
##### immutable
Enable or diable the `immutable` directive in the `Cache-Control` response
header, defaults to `false`. If set to `true`, the `maxAge` option should
also be specified to enable caching. The `immutable` directive will prevent
supported clients from making conditional requests during the life of the
`maxAge` option to check if the file has changed.
##### index
By default send supports "index.html" files, to disable this
set `false` or to supply a new index pass a string or an array
in preferred order.
##### lastModified
Enable or disable `Last-Modified` header, defaults to true. Uses the file
system's last modified value.
##### maxAge
Provide a max-age in milliseconds for http caching, defaults to 0.
This can also be a string accepted by the
[ms](https://www.npmjs.org/package/ms#readme) module.
##### root
Serve files relative to `path`.
##### start
Byte offset at which the stream starts, defaults to 0. The start is inclusive,
meaning `start: 2` will include the 3rd byte in the stream.
#### Events
The `SendStream` is an event emitter and will emit the following events:
- `error` an error occurred `(err)`
- `directory` a directory was requested `(res, path)`
- `file` a file was requested `(path, stat)`
- `headers` the headers are about to be set on a file `(res, path, stat)`
- `stream` file streaming has started `(stream)`
- `end` streaming has completed
#### .pipe
The `pipe` method is used to pipe the response into the Node.js HTTP response
object, typically `send(req, path, options).pipe(res)`.
### .mime
The `mime` export is the global instance of of the
[`mime` npm module](https://www.npmjs.com/package/mime).
This is used to configure the MIME types that are associated with file extensions
as well as other options for how to resolve the MIME type of a file (like the
default type to use for an unknown file extension).
## Error-handling
By default when no `error` listeners are present an automatic response will be
made, otherwise you have full control over the response, aka you may show a 5xx
page etc.
## Caching
It does _not_ perform internal caching, you should use a reverse proxy cache
such as Varnish for this, or those fancy things called CDNs. If your
application is small enough that it would benefit from single-node memory
caching, it's small enough that it does not need caching at all ;).
## Debugging
To enable `debug()` instrumentation output export __DEBUG__:
```
$ DEBUG=send node app
```
## Running tests
```
$ npm install
$ npm test
```
## Examples
### Small example
```js
var http = require('http')
var parseUrl = require('parseurl')
var send = require('send')
var server = http.createServer(function onRequest (req, res) {
send(req, parseUrl(req).pathname).pipe(res)
})
server.listen(3000)
```
### Custom file types
```js
var http = require('http')
var parseUrl = require('parseurl')
var send = require('send')
// Default unknown types to text/plain
send.mime.default_type = 'text/plain'
// Add a custom type
send.mime.define({
'application/x-my-type': ['x-mt', 'x-mtt']
})
var server = http.createServer(function onRequest (req, res) {
send(req, parseUrl(req).pathname).pipe(res)
})
server.listen(3000)
```
### Custom directory index view
This is a example of serving up a structure of directories with a
custom function to render a listing of a directory.
```js
var http = require('http')
var fs = require('fs')
var parseUrl = require('parseurl')
var send = require('send')
// Transfer arbitrary files from within /www/example.com/public/*
// with a custom handler for directory listing
var server = http.createServer(function onRequest (req, res) {
send(req, parseUrl(req).pathname, {index: false, root: '/www/example.com/public'})
.once('directory', directory)
.pipe(res)
})
server.listen(3000)
// Custom directory handler
function directory (res, path) {
var stream = this
// redirect to trailing slash for consistent url
if (!stream.hasTrailingSlash()) {
return stream.redirect(path)
}
// get directory list
fs.readdir(path, function onReaddir (err, list) {
if (err) return stream.error(err)
// render an index for the directory
res.setHeader('Content-Type', 'text/plain; charset=UTF-8')
res.end(list.join('\n') + '\n')
})
}
```
### Serving from a root directory with custom error-handling
```js
var http = require('http')
var parseUrl = require('parseurl')
var send = require('send')
var server = http.createServer(function onRequest (req, res) {
// your custom error-handling logic:
function error (err) {
res.statusCode = err.status || 500
res.end(err.message)
}
// your custom headers
function headers (res, path, stat) {
// serve all files for download
res.setHeader('Content-Disposition', 'attachment')
}
// your custom directory handling logic:
function redirect () {
res.statusCode = 301
res.setHeader('Location', req.url + '/')
res.end('Redirecting to ' + req.url + '/')
}
// transfer arbitrary files from within
// /www/example.com/public/*
send(req, parseUrl(req).pathname, {root: '/www/example.com/public'})
.on('error', error)
.on('directory', redirect)
.on('headers', headers)
.pipe(res)
})
server.listen(3000)
```
## License
[MIT](LICENSE)
[npm-image]: https://img.shields.io/npm/v/send.svg
[npm-url]: https://npmjs.org/package/send
[travis-image]: https://img.shields.io/travis/pillarjs/send/master.svg?label=linux
[travis-url]: https://travis-ci.org/pillarjs/send
[appveyor-image]: https://img.shields.io/appveyor/ci/dougwilson/send/master.svg?label=windows
[appveyor-url]: https://ci.appveyor.com/project/dougwilson/send
[coveralls-image]: https://img.shields.io/coveralls/pillarjs/send/master.svg
[coveralls-url]: https://coveralls.io/r/pillarjs/send?branch=master
[downloads-image]: https://img.shields.io/npm/dm/send.svg
[downloads-url]: https://npmjs.org/package/send
[gratipay-image]: https://img.shields.io/gratipay/dougwilson.svg
[gratipay-url]: https://www.gratipay.com/dougwilson/
| {
"pile_set_name": "Github"
} |
好奇心原文链接:[W 酒店如何选择落脚城市?_文化_好奇心日报-吴亭蓉](https://www.qdaily.com/articles/1117.html)
WebArchive归档链接:[W 酒店如何选择落脚城市?_文化_好奇心日报-吴亭蓉](http://web.archive.org/web/20160430190017/http://www.qdaily.com:80/articles/1117.html)
 | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright (C) 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_focused="true"
android:drawable="@drawable/abc_textfield_search_selected_holo_light" />
<item android:drawable="@drawable/abc_textfield_search_default_holo_light" />
</selector>
| {
"pile_set_name": "Github"
} |
/// Grid system
//
// Generate semantic grid columns with these mixins.
@mixin make-container($gutter: $grid-gutter-width) {
margin-left: auto;
margin-right: auto;
padding-left: ($gutter / 2);
padding-right: ($gutter / 2);
@if not $enable-flex {
@include clearfix();
}
}
// For each breakpoint, define the maximum width of the container in a media query
@mixin make-container-max-widths($max-widths: $container-max-widths) {
@each $breakpoint, $container-max-width in $max-widths {
@include media-breakpoint-up($breakpoint) {
max-width: $container-max-width;
}
}
}
@mixin make-row($gutter: $grid-gutter-width) {
@if $enable-flex {
display: flex;
flex-wrap: wrap;
} @else {
@include clearfix();
}
margin-left: ($gutter / -2);
margin-right: ($gutter / -2);
}
@mixin make-col($gutter: $grid-gutter-width) {
position: relative;
@if not $enable-flex {
float: left;
}
min-height: 1px;
padding-left: ($gutter / 2);
padding-right: ($gutter / 2);
}
@mixin make-col-span($size, $columns: $grid-columns) {
@if $enable-flex {
flex: 0 0 percentage($size / $columns);
} @else {
width: percentage($size / $columns);
}
}
@mixin make-col-offset($size, $columns: $grid-columns) {
margin-left: percentage($size / $columns);
}
@mixin make-col-push($size, $columns: $grid-columns) {
left: if($size > 0, percentage($size / $columns), auto);
}
@mixin make-col-pull($size, $columns: $grid-columns) {
right: if($size > 0, percentage($size / $columns), auto);
}
@mixin make-col-modifier($type, $size, $columns) {
// Work around the lack of dynamic mixin @include support (https://github.com/sass/sass/issues/626)
@if $type == push {
@include make-col-push($size, $columns);
} @else if $type == pull {
@include make-col-pull($size, $columns);
} @else if $type == offset {
@include make-col-offset($size, $columns);
}
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
import { args } from './State';
import { getConfig } from './index';
const ENVIRONMENT_VARIABLE_PREFIX: string = 'GCB_';
export function getConfigValue(name: string, defaultValue?: string | boolean): string | boolean {
// Try to get config value from environment variable.
const envVariable: string = ENVIRONMENT_VARIABLE_PREFIX + name.toUpperCase();
const envValue: string | undefined = process.env[envVariable];
const argsValue: string | boolean = args[name.toLowerCase()];
// getConfig can be undefined during the first few calls to this function because the build config is initialized
// before the getConfig function is defined. In those cases, a defaultValue is provided.
const configValue: string | boolean = ((getConfig ? getConfig() : {}) || {})[name];
return _firstDefinedValue(argsValue, envValue, defaultValue, configValue);
}
export function getFlagValue(name: string, defaultValue?: boolean): boolean {
const configValue: string | boolean = getConfigValue(name, defaultValue);
return configValue === 'true' || configValue === true;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function _firstDefinedValue(...values: (string | boolean | undefined)[]): any {
for (const value of values) {
if (value !== undefined) {
return value;
}
}
return undefined;
}
| {
"pile_set_name": "Github"
} |
@HD VN:1.4 SO:queryname
@SQ SN:ref1 LN:10000000045
@SQ SN:ref2 LN:10000000040
of1 99 ref1 10000000010 30 23M = 10000000008 21 AAGTCGGCAGCGTCAGATGTGTA ??????????????????????? MQ:i:30 MC:Z:23M
of1 147 ref1 10000000008 30 23M = 10000000010 -21 CTGTCTCTTATACACATCTCCTT ??????????????????????? MQ:i:30 MC:Z:23M
r001 83 ref1 10000000037 30 9M = 10000000007 -39 CAGCGCCAT * MQ:i:30 MC:Z:8M4I4M1D3M
r001 163 ref1 10000000007 30 8M4I4M1D3M = 10000000037 39 TTAGATAAAGAGGATACTG * XX:B:S,12561,2,20,112 YY:i:100 MQ:i:30 MC:Z:9M
r002 0 ref1 10000000009 30 1S2I6M1P1I1P1I4M2I * 0 0 AAAAGATAAGGGATAAA * XA:Z:abc XB:i:-10
r003 33 ref1 10000000009 30 5H6M = 10000000029 25 AGCTAA * MQ:i:30 MC:Z:6H5M
r003 17 ref1 10000000029 30 6H5M = 10000000009 -25 TAGGC * MQ:i:30 MC:Z:5H6M
r004 0 ref1 10000000016 30 6M14N1I5M * 0 0 ATAGCTCTCAGC *
r007 9 ref1 10000000009 30 5H6M = 10000000009 0 AGCTAA * MC:Z:*
r007 5 ref1 10000000009 30 * = 10000000009 0 GGGGGG * MQ:i:30 MC:Z:5H6M
u1 4 * 0 30 * * 0 0 TAATTGGGTCTTCAGAGCACCTA ???????????????????????
x1 0 ref2 10000000001 30 20M * 0 0 AGGTTTTATAAAACAAATAA *
x2 0 ref2 10000000002 30 21M * 0 0 GGTTTTATAAAACAAATAATT ?????????????????????
x3 0 ref2 10000000006 30 9M4I13M * 0 0 TTATAAAACAAATAATTAAGTCTACA ??????????????????????????
x4 0 ref2 10000000010 30 25M * 0 0 CAAATAATTAAGTCTACAGAGCAAC ?????????????????????????
x5 0 ref2 10000000012 30 24M * 0 0 AATAATTAAGTCTACAGAGCAACT ????????????????????????
x6 0 ref2 10000000014 30 23M * 0 0 TAATTAAGTCTACAGAGCAACTA ???????????????????????
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!-- This file was written by the internal XML-Handler of Y-Files.-->
<!DOCTYPE graphml SYSTEM "http://www.graphdrawing.org/dtds/graphml.dtd">
<graphml>
<graph id="G">
<node id="n0"/>
<node id="n1"/>
<node id="n2"/>
<node id="n3"/>
<node id="n4"/>
<node id="n5"/>
<node id="n6"/>
<node id="n7"/>
<node id="n8"/>
<node id="n9"/>
<node id="n10"/>
<node id="n11"/>
<node id="n12"/>
<node id="n13"/>
<node id="n14"/>
<node id="n15"/>
<node id="n16"/>
<node id="n17"/>
<node id="n18"/>
<node id="n19"/>
<node id="n20"/>
<node id="n21"/>
<node id="n22"/>
<node id="n23"/>
<node id="n24"/>
<node id="n25"/>
<node id="n26"/>
<node id="n27"/>
<node id="n28"/>
<node id="n29"/>
<node id="n30"/>
<node id="n31"/>
<node id="n32"/>
<node id="n33"/>
<node id="n34"/>
<node id="n35"/>
<node id="n36"/>
<node id="n37"/>
<node id="n38"/>
<node id="n39"/>
<node id="n40"/>
<node id="n41"/>
<node id="n42"/>
<node id="n43"/>
<node id="n44"/>
<node id="n45"/>
<node id="n46"/>
<node id="n47"/>
<node id="n48"/>
<node id="n49"/>
<node id="n50"/>
<node id="n51"/>
<node id="n52"/>
<edge id="e0" source="n0" target="n1"/>
<edge id="e1" source="n1" target="n4"/>
<edge id="e2" source="n5" target="n1"/>
<edge id="e3" source="n6" target="n5"/>
<edge id="e4" source="n50" target="n2"/>
<edge id="e5" source="n50" target="n6"/>
<edge id="e6" source="n50" target="n18"/>
<edge id="e7" source="n50" target="n22"/>
<edge id="e8" source="n50" target="n25"/>
<edge id="e9" source="n2" target="n3"/>
<edge id="e10" source="n3" target="n10"/>
<edge id="e11" source="n10" target="n11"/>
<edge id="e12" source="n16" target="n0"/>
<edge id="e13" source="n16" target="n8"/>
<edge id="e14" source="n16" target="n10"/>
<edge id="e15" source="n16" target="n19"/>
<edge id="e16" source="n16" target="n20"/>
<edge id="e17" source="n16" target="n21"/>
<edge id="e18" source="n8" target="n14"/>
<edge id="e19" source="n14" target="n23"/>
<edge id="e20" source="n18" target="n14"/>
<edge id="e21" source="n40" target="n10"/>
<edge id="e22" source="n40" target="n17"/>
<edge id="e23" source="n40" target="n18"/>
<edge id="e24" source="n40" target="n19"/>
<edge id="e25" source="n40" target="n21"/>
<edge id="e26" source="n40" target="n26"/>
<edge id="e27" source="n17" target="n1"/>
<edge id="e28" source="n22" target="n17"/>
<edge id="e29" source="n19" target="n35"/>
<edge id="e30" source="n24" target="n3"/>
<edge id="e31" source="n24" target="n5"/>
<edge id="e32" source="n24" target="n19"/>
<edge id="e33" source="n24" target="n20"/>
<edge id="e34" source="n24" target="n25"/>
<edge id="e35" source="n24" target="n26"/>
<edge id="e36" source="n20" target="n39"/>
<edge id="e37" source="n29" target="n20"/>
<edge id="e38" source="n29" target="n21"/>
<edge id="e39" source="n29" target="n26"/>
<edge id="e40" source="n29" target="n27"/>
<edge id="e41" source="n29" target="n30"/>
<edge id="e42" source="n21" target="n43"/>
<edge id="e43" source="n26" target="n41"/>
<edge id="e44" source="n27" target="n1"/>
<edge id="e45" source="n31" target="n27"/>
<edge id="e46" source="n31" target="n30"/>
<edge id="e47" source="n30" target="n14"/>
<edge id="e48" source="n51" target="n7"/>
<edge id="e49" source="n51" target="n31"/>
<edge id="e50" source="n7" target="n0"/>
<edge id="e51" source="n7" target="n8"/>
<edge id="e52" source="n9" target="n7"/>
<edge id="e53" source="n52" target="n9"/>
<edge id="e54" source="n52" target="n31"/>
<edge id="e55" source="n32" target="n29"/>
<edge id="e56" source="n47" target="n15"/>
<edge id="e57" source="n47" target="n32"/>
<edge id="e58" source="n47" target="n33"/>
<edge id="e59" source="n47" target="n42"/>
<edge id="e60" source="n15" target="n16"/>
<edge id="e61" source="n33" target="n24"/>
<edge id="e62" source="n42" target="n40"/>
<edge id="e63" source="n34" target="n29"/>
<edge id="e64" source="n46" target="n16"/>
<edge id="e65" source="n46" target="n24"/>
<edge id="e66" source="n46" target="n34"/>
<edge id="e67" source="n46" target="n44"/>
<edge id="e68" source="n44" target="n40"/>
<edge id="e69" source="n36" target="n29"/>
<edge id="e70" source="n45" target="n24"/>
<edge id="e71" source="n45" target="n28"/>
<edge id="e72" source="n45" target="n36"/>
<edge id="e73" source="n45" target="n40"/>
<edge id="e74" source="n28" target="n16"/>
<edge id="e75" source="n25" target="n14"/>
<edge id="e76" source="n12" target="n13"/>
<edge id="e77" source="n49" target="n12"/>
<edge id="e78" source="n37" target="n38"/>
<edge id="e79" source="n4" target="n13"/>
<edge id="e80" source="n4" target="n37"/>
<edge id="e81" source="n4" target="n48"/>
</graph>
</graphml> | {
"pile_set_name": "Github"
} |
<?php
return [
/*
|--------------------------------------------------------------------------
| Default Broadcaster
|--------------------------------------------------------------------------
|
| This option controls the default broadcaster that will be used by the
| framework when an event needs to be broadcast. You may set this to
| any of the connections defined in the "connections" array below.
|
| Supported: "pusher", "redis", "log", "null"
|
*/
'default' => env('BROADCAST_DRIVER', 'null'),
/*
|--------------------------------------------------------------------------
| Broadcast Connections
|--------------------------------------------------------------------------
|
| Here you may define all of the broadcast connections that will be used
| to broadcast events to other systems or over websockets. Samples of
| each available type of connection are provided inside this array.
|
*/
'connections' => [
'pusher' => [
'driver' => 'pusher',
'key' => env('PUSHER_APP_KEY'),
'secret' => env('PUSHER_APP_SECRET'),
'app_id' => env('PUSHER_APP_ID'),
'options' => [
'cluster' => env('PUSHER_APP_CLUSTER'),
'encrypted' => true,
],
],
'redis' => [
'driver' => 'redis',
'connection' => 'default',
],
'log' => [
'driver' => 'log',
],
'null' => [
'driver' => 'null',
],
],
];
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using KitchenPC.Data;
using KitchenPC.Data.DTO;
using log4net;
using NHibernate;
using NHibernate.Persister.Entity;
namespace KitchenPC.DB.Provisioning
{
public class DatabaseExporter : IDisposable, IProvisioner
{
readonly IStatelessSession session;
public static ILog Log = LogManager.GetLogger(typeof (DatabaseExporter));
public DatabaseExporter(IStatelessSession session)
{
this.session = session;
}
IEnumerable<D> ImportTableData<T, D>(Func<IDataReader, D> action) where T : new()
{
using (var cmd = session.Connection.CreateCommand())
{
var persister = session.GetSessionImplementation().GetEntityPersister(null, new T()) as ILockable;
if (persister == null) throw new NullReferenceException();
cmd.CommandType = CommandType.TableDirect;
cmd.CommandText = persister.RootTableName;
using (var reader = cmd.ExecuteReader())
{
while (reader.Read())
{
yield return action(reader);
}
}
}
}
public IngredientForms[] IngredientForms()
{
var list = ImportTableData<Models.IngredientForms, IngredientForms>(r => new IngredientForms
{
IngredientFormId = (Guid) r["IngredientFormId"],
IngredientId = (Guid) r["IngredientId"],
ConvMultiplier = (short) r["ConvMultiplier"],
FormAmount = (float) r["FormAmount"],
UnitType = Unit.Parse<Units>(r["UnitType"]),
UnitName = r["UnitName"] as String,
FormUnit = Unit.Parse<Units>(r["FormUnit"]),
FormDisplayName = r["FormDisplayName"] as String
}).ToArray();
Log.DebugFormat("Read {0} row(s) from IngredientForms.", list.Count());
return list;
}
public IngredientMetadata[] IngredientMetadata()
{
var list = ImportTableData<Models.IngredientMetadata, IngredientMetadata>(r => new IngredientMetadata
{
IngredientMetadataId = (Guid) r["IngredientMetadataId"],
IngredientId = (Guid) r["IngredientId"],
HasMeat = r["HasMeat"] as Boolean?,
CarbsPerUnit = r["CarbsPerUnit"] as Single?,
HasRedMeat = r["HasRedMeat"] as Boolean?,
SugarPerUnit = r["SugarPerUnit"] as Single?,
HasPork = r["HasPork"] as Boolean?,
FatPerUnit = r["FatPerUnit"] as Single?,
SodiumPerUnit = r["SodiumPerUnit"] as Single?,
CaloriesPerUnit = r["CaloriesPerUnit"] as Single?,
Spicy = (Int16) r["Spicy"],
Sweet = (Int16) r["Sweet"],
HasGluten = r["HasGluten"] as Boolean?,
HasAnimal = r["HasAnimal"] as Boolean?,
}).ToArray();
Log.DebugFormat("Read {0} row(s) from IngredientMetadata.", list.Count());
return list;
}
public Data.DTO.Ingredients[] Ingredients()
{
var list = ImportTableData<Models.Ingredients, Data.DTO.Ingredients>(r => new Data.DTO.Ingredients
{
IngredientId = (Guid) r["IngredientId"],
UsdaId = r["UsdaId"] as String,
FoodGroup = r["FoodGroup"] as String,
DisplayName = r["DisplayName"] as String,
ManufacturerName = r["ManufacturerName"] as String,
ConversionType = Unit.Parse<UnitType>(r["ConversionType"]),
UnitName = r["UnitName"] as String,
UsdaDesc = r["UsdaDesc"] as String,
UnitWeight = (Int32) r["UnitWeight"]
}).ToArray();
Log.DebugFormat("Read {0} row(s) from Ingredients.", list.Count());
return list;
}
public NlpAnomalousIngredients[] NlpAnomalousIngredients()
{
var list = ImportTableData<Models.NlpAnomalousIngredients, NlpAnomalousIngredients>(r => new NlpAnomalousIngredients
{
AnomalousIngredientId = (Guid) r["AnomalousIngredientId"],
Name = r["Name"] as String,
IngredientId = (Guid) r["IngredientId"],
WeightFormId = r["WeightFormId"] as Guid?,
VolumeFormId = r["VolumeFormId"] as Guid?,
UnitFormId = r["UnitFormId"] as Guid?
}).ToArray();
Log.DebugFormat("Read {0} row(s) from NlpAnomalousIngredients.", list.Count());
return list;
}
public NlpDefaultPairings[] NlpDefaultPairings()
{
var list = ImportTableData<Models.NlpDefaultPairings, NlpDefaultPairings>(r => new NlpDefaultPairings
{
DefaultPairingId = (Guid) r["DefaultPairingId"],
IngredientId = (Guid) r["IngredientId"],
WeightFormId = r["WeightFormId"] as Guid?,
VolumeFormId = r["VolumeFormId"] as Guid?,
UnitFormId = r["UnitFormId"] as Guid?
}).ToArray();
Log.DebugFormat("Read {0} row(s) from NlpDefaultPairings.", list.Count());
return list;
}
public NlpFormSynonyms[] NlpFormSynonyms()
{
var list = ImportTableData<Models.NlpFormSynonyms, NlpFormSynonyms>(r => new NlpFormSynonyms
{
FormSynonymId = (Guid) r["FormSynonymId"],
IngredientId = (Guid) r["IngredientId"],
FormId = (Guid) r["FormId"],
Name = r["Name"] as String
}).ToArray();
Log.DebugFormat("Read {0} row(s) from NlpFormSynonyms.", list.Count());
return list;
}
public NlpIngredientSynonyms[] NlpIngredientSynonyms()
{
var list = ImportTableData<Models.NlpIngredientSynonyms, NlpIngredientSynonyms>(r => new NlpIngredientSynonyms
{
IngredientSynonymId = (Guid) r["IngredientSynonymId"],
IngredientId = (Guid) r["IngredientId"],
Alias = r["Alias"] as String,
Prepnote = r["Prepnote"] as String
}).ToArray();
Log.DebugFormat("Read {0} row(s) from NlpIngredientSynonyms.", list.Count());
return list;
}
public NlpPrepNotes[] NlpPrepNotes()
{
var list = ImportTableData<Models.NlpPrepNotes, NlpPrepNotes>(r => new NlpPrepNotes
{
Name = r["Name"] as String
}).ToArray();
Log.DebugFormat("Read {0} row(s) from NlpPrepNotes.", list.Count());
return list;
}
public NlpUnitSynonyms[] NlpUnitSynonyms()
{
var list = ImportTableData<Models.NlpUnitSynonyms, NlpUnitSynonyms>(r => new NlpUnitSynonyms
{
UnitSynonymId = (Guid) r["UnitSynonymId"],
IngredientId = (Guid) r["IngredientId"],
FormId = (Guid) r["FormId"],
Name = r["Name"] as String
}).ToArray();
Log.DebugFormat("Read {0} row(s) from NlpUnitSynonyms.", list.Count());
return list;
}
public List<Data.DTO.Recipes> Recipes()
{
var list = ImportTableData<Models.Recipes, Data.DTO.Recipes>(r => new Data.DTO.Recipes
{
RecipeId = (Guid) r["RecipeId"],
CookTime = r["CookTime"] as Int16?,
Steps = r["Steps"] as String,
PrepTime = r["PrepTime"] as Int16?,
Rating = (Int16) r["Rating"],
Description = r["Description"] as String,
Title = r["Title"] as String,
Hidden = (bool) r["Hidden"],
Credit = r["Credit"] as String,
CreditUrl = r["CreditUrl"] as String,
DateEntered = (DateTime) r["DateEntered"],
ServingSize = (Int16) r["ServingSize"],
ImageUrl = r["ImageUrl"] as String
}).ToList();
Log.DebugFormat("Read {0} row(s) from Recipes.", list.Count());
return list;
}
public List<RecipeMetadata> RecipeMetadata()
{
var list = ImportTableData<Models.RecipeMetadata, RecipeMetadata>(r => new RecipeMetadata
{
RecipeMetadataId = (Guid) r["RecipeMetadataId"],
RecipeId = (Guid) r["RecipeId"],
PhotoRes = (Int32) r["PhotoRes"],
Commonality = (Single) r["Commonality"],
UsdaMatch = (bool) r["UsdaMatch"],
MealBreakfast = (bool) r["MealBreakfast"],
MealLunch = (bool) r["MealLunch"],
MealDinner = (bool) r["MealDinner"],
MealDessert = (bool) r["MealDessert"],
DietNomeat = (bool) r["DietNomeat"],
DietGlutenFree = (bool) r["DietGlutenFree"],
DietNoRedMeat = (bool) r["DietNoRedMeat"],
DietNoAnimals = (bool) r["DietNoAnimals"],
DietNoPork = (bool) r["DietNoPork"],
NutritionTotalfat = (Int16) r["NutritionTotalfat"],
NutritionTotalSodium = (Int16) r["NutritionTotalSodium"],
NutritionLowSodium = (bool) r["NutritionLowSodium"],
NutritionLowSugar = (bool) r["NutritionLowSugar"],
NutritionLowCalorie = (bool) r["NutritionLowCalorie"],
NutritionTotalSugar = (Int16) r["NutritionTotalSugar"],
NutritionTotalCalories = (Int16) r["NutritionTotalCalories"],
NutritionLowFat = (bool) r["NutritionLowFat"],
NutritionLowCarb = (bool) r["NutritionLowCarb"],
NutritionTotalCarbs = (Int16) r["NutritionTotalCarbs"],
SkillQuick = (bool) r["SkillQuick"],
SkillEasy = (bool) r["SkillEasy"],
SkillCommon = (bool) r["SkillCommon"],
TasteMildToSpicy = (Int16) r["TasteMildToSpicy"],
TasteSavoryToSweet = (Int16) r["TasteSavoryToSweet"]
}).ToList();
Log.DebugFormat("Read {0} row(s) from RecipeMetadata.", list.Count());
return list;
}
public List<RecipeIngredients> RecipeIngredients()
{
var list = ImportTableData<Models.RecipeIngredients, RecipeIngredients>(r => new RecipeIngredients
{
RecipeIngredientId = (Guid) r["RecipeIngredientId"],
RecipeId = (Guid) r["RecipeId"],
IngredientId = (Guid) r["IngredientId"],
IngredientFormId = r["IngredientFormId"] as Guid?,
Unit = Unit.Parse<Units>(r["Unit"]),
QtyLow = r["QtyLow"] as Single?,
DisplayOrder = (Int16) r["DisplayOrder"],
PrepNote = r["PrepNote"] as String,
Qty = r["Qty"] as Single?,
Section = r["Section"] as String
}).ToList();
Log.DebugFormat("Read {0} row(s) from RecipeIngredients.", list.Count());
return list;
}
public List<Favorites> Favorites()
{
var list = ImportTableData<Models.Favorites, Favorites>(r => new Favorites
{
FavoriteId = (Guid) r["FavoriteId"],
UserId = (Guid) r["UserId"],
RecipeId = (Guid) r["RecipeId"],
MenuId = r["MenuId"] as Guid?
}).ToList();
Log.DebugFormat("Read {0} row(s) from Favorites.", list.Count());
return list;
}
public List<Data.DTO.Menus> Menus()
{
var list = ImportTableData<Models.Menus, Data.DTO.Menus>(r => new Data.DTO.Menus
{
MenuId = (Guid) r["MenuId"],
UserId = (Guid) r["UserId"],
Title = r["Title"] as String,
CreatedDate = (DateTime) r["CreatedDate"]
}).ToList();
Log.DebugFormat("Read {0} row(s) from Menus.", list.Count());
return list;
}
public List<QueuedRecipes> QueuedRecipes()
{
var list = ImportTableData<Models.QueuedRecipes, QueuedRecipes>(r => new QueuedRecipes
{
QueueId = (Guid) r["QueueId"],
UserId = (Guid) r["UserId"],
RecipeId = (Guid) r["RecipeId"],
QueuedDate = (DateTime) r["QueuedDate"]
}).ToList();
Log.DebugFormat("Read {0} row(s) from QueuedRecipes.", list.Count());
return list;
}
public List<RecipeRatings> RecipeRatings()
{
var list = ImportTableData<Models.RecipeRatings, RecipeRatings>(r => new RecipeRatings
{
RatingId = (Guid) r["RatingId"],
UserId = (Guid) r["UserId"],
RecipeId = (Guid) r["RecipeId"],
Rating = (Int16) r["Rating"]
}).ToList();
Log.DebugFormat("Read {0} row(s) from RecipeRatings.", list.Count());
return list;
}
public List<Data.DTO.ShoppingLists> ShoppingLists()
{
var list = ImportTableData<Models.ShoppingLists, Data.DTO.ShoppingLists>(r => new Data.DTO.ShoppingLists
{
ShoppingListId = (Guid) r["ShoppingListId"],
UserId = (Guid) r["UserId"],
Title = r["Title"] as String
}).ToList();
Log.DebugFormat("Read {0} row(s) from ShoppingLists.", list.Count());
return list;
}
public List<ShoppingListItems> ShoppingListItems()
{
var list = ImportTableData<Models.ShoppingListItems, ShoppingListItems>(r => new ShoppingListItems
{
ItemId = (Guid) r["ItemId"],
Raw = r["Raw"] as String,
Qty = r["Qty"] as Single?,
Unit = Unit.ParseNullable<Units>(r["Unit"]),
UserId = (Guid) r["UserId"],
IngredientId = r["IngredientId"] as Guid?,
RecipeId = r["RecipeId"] as Guid?,
ShoppingListId = r["ShoppingListId"] as Guid?,
CrossedOut = (bool) r["CrossedOut"]
}).ToList();
Log.DebugFormat("Read {0} row(s) from ShoppingListItems.", list.Count());
return list;
}
public void Dispose()
{
session.Dispose();
}
}
} | {
"pile_set_name": "Github"
} |
g1 1 . A . . PASS . GT:DP:RE:GQ:AVR 0/0:5:0.000:31.0:0.22 0/0:5:0.000:31.0:0.38
g1 8 . A . . PASS . GT:DP:RE:GQ:AVR 0/0:7:0.000:31.0:0.23 0/0:7:0.000:31.0:0.37
g1 11 . A . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:2:1.002:36.0:A,2,1.002:0.24 0/0:2:1.002:36.0:A,2,1.002:0.36:a1000.0
g1 54 . T . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:2:1.002:36.0:T,2,1.002:0.25 0/0:2:1.002:36.0:T,2,1.002:0.35:a1000.0
g1 65 . C . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:3:1.504:37.0:C,3,1.504:0.26 0/0:3:1.504:37.0:C,3,1.504:0.34:a1000.0
g1 76 . G . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:5:2.506:41.0:G,5,2.506:0.27 0/0:5:2.506:41.0:G,5,2.506:0.33:a1000.0
g1 87 . A . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:5:2.506:43.0:A,5,2.506:0.28 0/0:5:2.506:43.0:A,5,2.506:0.32:a1000.0
g1 98 . C . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:5:2.506:41.0:C,5,2.506:0.29 0/0:5:2.506:41.0:C,5,2.506:0.31:a1000.0
g1 109 . T . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:5:2.506:43.0:T,5,2.506:0.3 0/0:5:2.506:43.0:T,5,2.506:0.3:a1000.0
g1 110 . G . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:5:2.506:41.0:G,5,2.506:0.31 0/0:5:2.506:41.0:G,5,2.506:0.29:a1000.0
g1 115 . G . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:4:2.005:20.0:C,2,1.002,T,2,1.002:0.32 0/0:4:2.005:20.0:C,2,1.002,T,2,1.002:0.28:a1000.0
g1 125 . T . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:4:2.005:41.0:T,4,2.005:0.33 0/0:4:2.005:41.0:T,4,2.005:0.27:a1000.0
g1 135 . C . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:3:1.504:37.0:C,3,1.504:0.34 0/0:3:1.504:37.0:C,3,1.504:0.26:a1000.0
g1 145 . A . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:1:0.501:33.0:A,1,0.501:0.35 0/0:1:0.501:33.0:A,1,0.501:0.25:a1000.0
g1 155 . G . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:1:0.501:31.0:G,1,0.501:0.36 0/0:1:0.501:31.0:G,1,0.501:0.24:a1000.0
g1 165 . C . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:1:0.501:31.0:C,1,0.501:0.37 0/0:1:0.501:31.0:C,1,0.501:0.23:a1000.0
g1 175 . T . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:1:0.501:34.0:T,1,0.501:0.38 0/0:1:0.501:34.0:T,1,0.501:0.22:a1000.0
g1 185 . A . . a1000.0 . GT:DP:RE:GQ:RS:AVR:FT 0/0:1:0.501:33.0:A,1,0.501:0.39 0/0:1:0.501:33.0:A,1,0.501:0.21:a1000.0
g1 195 . G . . PASS . GT:DP:RE:GQ:AVR 0/0:7:0.000:48.0:0.4 0/0:7:0.000:48.0:0.2
g1 205 . G . . PASS . GT:DP:RE:GQ:AVR 0/0:7:0.000:48.0:0.41 0/0:7:0.000:48.0:0.19
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE score-partwise PUBLIC "-//Recordare//DTD MusicXML 1.0 Partwise//EN"
"http://www.musicxml.org/dtds/partwise.dtd">
<score-partwise>
<identification>
<miscellaneous>
<miscellaneous-field name="description">Some grace notes and after-graces
(indicated by steal-time-previous and steal-time-following).</miscellaneous-field>
</miscellaneous>
</identification>
<part-list>
<score-part id="P1">
<part-name>MusicXML Part</part-name>
</score-part>
</part-list>
<!--=========================================================-->
<part id="P1">
<measure number="25">
<attributes>
<divisions>32</divisions>
<key>
<fifths>0</fifths>
<mode>major</mode>
</key>
<time>
<beats>4</beats>
<beat-type>4</beat-type>
</time>
<clef>
<sign>G</sign>
<line>2</line>
</clef>
</attributes>
<note>
<pitch>
<step>E</step>
<octave>5</octave>
</pitch>
<duration>64</duration>
<voice>1</voice>
<type>half</type>
<staff>1</staff>
</note>
<note>
<grace steal-time-previous="20"/>
<pitch>
<step>G</step>
<octave>5</octave>
</pitch>
<voice>1</voice>
<type>16th</type>
<staff>1</staff>
</note>
<note>
<grace steal-time-following="20"/>
<pitch>
<step>A</step>
<octave>5</octave>
</pitch>
<voice>1</voice>
<type>16th</type>
<staff>1</staff>
</note>
<note>
<grace/>
<pitch>
<step>A</step>
<octave>5</octave>
</pitch>
<voice>1</voice>
<type>16th</type>
<staff>1</staff>
</note>
<note>
<pitch>
<step>E</step>
<octave>5</octave>
</pitch>
<duration>64</duration>
<voice>1</voice>
<type>half</type>
<staff>1</staff>
</note>
<note>
<grace/>
<pitch>
<step>G</step>
<octave>5</octave>
</pitch>
<voice>1</voice>
<type>16th</type>
<staff>1</staff>
<beam number="1">begin</beam>
<beam number="2">begin</beam>
</note>
<note>
<grace/>
<pitch>
<step>A</step>
<octave>5</octave>
</pitch>
<voice>1</voice>
<type>16th</type>
<staff>1</staff>
<beam number="1">end</beam>
<beam number="2">end</beam>
</note>
</measure>
</part>
<!--=========================================================-->
</score-partwise>
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2015-2016 K Team. All Rights Reserved.
package org.kframework.utils;
import java.util.stream.Collectors;
/**
* {@link BitSet} implementation backed by four words. Faster than using the JDK {@link java.util.BitSet}.
*/
public class FourWordBitSet implements BitSet<FourWordBitSet> {
private static final long WORD_MASK = 0xffffffffffffffffL;
private long word0;
private long word1;
private long word2;
private long word3;
public FourWordBitSet(long word0, long word1, long word2, long word3) {
this.word0 = word0;
this.word1 = word1;
this.word2 = word2;
this.word3 = word3;
}
public FourWordBitSet() {
this(0, 0, 0, 0);
}
@Override
public void or(FourWordBitSet bitSet) {
word0 |= bitSet.word0;
word1 |= bitSet.word1;
word2 |= bitSet.word2;
word3 |= bitSet.word3;
}
@Override
public void and(FourWordBitSet bitSet) {
word0 &= bitSet.word0;
word1 &= bitSet.word1;
word2 &= bitSet.word2;
word3 &= bitSet.word3;
}
@Override
public boolean intersects(FourWordBitSet bitSet) {
return (word0 & bitSet.word0) != 0
|| (word1 & bitSet.word1) != 0
|| (word2 & bitSet.word2) != 0
|| (word3 & bitSet.word3) != 0;
}
@Override
public boolean subset(FourWordBitSet bitSet) {
return word0 == (word0 & bitSet.word0)
&& word1 == (word1 & bitSet.word1)
&& word2 == (word2 & bitSet.word2)
&& word3 == (word3 & bitSet.word3);
}
@Override
public boolean get(int i) {
assert i < size();
if (i < Long.SIZE) {
return (word0 & 1L << i) != 0;
}
i -= Long.SIZE;
if (i < Long.SIZE) {
return (word1 & 1L << i) != 0;
}
i -= Long.SIZE;
if (i < Long.SIZE) {
return (word2 & 1L << i) != 0;
}
i -= Long.SIZE;
return (word3 & 1L << i) != 0;
}
@Override
public void set(int i) {
assert i < size();
if (i < Long.SIZE) {
word0 |= 1L << i;
return;
}
i -= Long.SIZE;
if (i < Long.SIZE) {
word1 |= 1L << i;
return;
}
i -= Long.SIZE;
if (i < Long.SIZE) {
word2 |= 1L << i;
return;
}
i -= Long.SIZE;
word3 |= 1L << i;
}
@Override
public void clear(int i) {
assert i < size();
if (i < Long.SIZE) {
word0 &= ~(1L << i);
return;
}
i -= Long.SIZE;
if (i < Long.SIZE) {
word1 &= ~(1L << i);
return;
}
i -= Long.SIZE;
if (i < Long.SIZE) {
word2 &= ~(1L << i);
return;
}
i -= Long.SIZE;
word3 &= ~(1L << i);
}
@Override
public int nextSetBit(int i) {
assert i <= size();
if (i == size()) {
return -1;
}
if (i < Long.SIZE) {
long maskedWord = word0 & (WORD_MASK << i);
if (maskedWord != 0) {
return Long.numberOfTrailingZeros(maskedWord);
}
i = Long.SIZE;
}
i -= Long.SIZE;
if (i < Long.SIZE) {
long maskedWord = word1 & (WORD_MASK << i);
if (maskedWord != 0) {
return Long.numberOfTrailingZeros(maskedWord) + Long.SIZE;
}
i = Long.SIZE;
}
i -= Long.SIZE;
if (i < Long.SIZE) {
long maskedWord = word2 & (WORD_MASK << i);
if (maskedWord != 0) {
return Long.numberOfTrailingZeros(maskedWord) + 2 * Long.SIZE;
}
i = Long.SIZE;
}
i -= Long.SIZE;
long maskedWord = word3 & (WORD_MASK << i);
return maskedWord == 0 ? -1 : Long.numberOfTrailingZeros(maskedWord) + 3 * Long.SIZE;
}
@Override
public boolean isEmpty() {
return word0 == 0
&& word1 == 0
&& word2 == 0
&& word3 == 0;
}
@Override
public int length() {
//return Long.SIZE - Long.numberOfLeadingZeros(word);
return size();
}
@Override
public int size() {
return 4 * Long.SIZE;
}
@Override
public int cardinality() {
return Long.bitCount(word0) + Long.bitCount(word1) + Long.bitCount(word2) + Long.bitCount(word3);
}
@Override
public void clear() {
word0 = 0;
word1 = 0;
word2 = 0;
word3 = 0;
}
@Override
public FourWordBitSet clone() {
return new FourWordBitSet(word0, word1, word2, word3);
}
@Override
public String toString() {
return "{ " + stream().mapToObj(i -> Integer.toString(i)).collect(Collectors.joining(", ")) + "}";
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/snapshot/snapshot-data.h"
#include "src/common/assert-scope.h"
#include "src/snapshot/serializer.h"
#ifdef V8_SNAPSHOT_COMPRESSION
#include "src/snapshot/snapshot-compression.h"
#endif
namespace v8 {
namespace internal {
void SerializedData::AllocateData(uint32_t size) {
DCHECK(!owns_data_);
data_ = NewArray<byte>(size);
size_ = size;
owns_data_ = true;
}
// static
constexpr uint32_t SerializedData::kMagicNumber;
SnapshotData::SnapshotData(const Serializer* serializer) {
DisallowGarbageCollection no_gc;
std::vector<Reservation> reservations = serializer->EncodeReservations();
const std::vector<byte>* payload = serializer->Payload();
// Calculate sizes.
uint32_t reservation_size =
static_cast<uint32_t>(reservations.size()) * kUInt32Size;
uint32_t payload_offset = kHeaderSize + reservation_size;
uint32_t padded_payload_offset = POINTER_SIZE_ALIGN(payload_offset);
uint32_t size =
padded_payload_offset + static_cast<uint32_t>(payload->size());
// Allocate backing store and create result data.
AllocateData(size);
// Zero out pre-payload data. Part of that is only used for padding.
memset(data_, 0, padded_payload_offset);
// Set header values.
SetMagicNumber();
SetHeaderValue(kNumReservationsOffset, static_cast<int>(reservations.size()));
SetHeaderValue(kPayloadLengthOffset, static_cast<int>(payload->size()));
// Copy reservation chunk sizes.
CopyBytes(data_ + kHeaderSize, reinterpret_cast<byte*>(reservations.data()),
reservation_size);
// Copy serialized data.
CopyBytes(data_ + padded_payload_offset, payload->data(),
static_cast<size_t>(payload->size()));
}
std::vector<SerializedData::Reservation> SnapshotData::Reservations() const {
uint32_t size = GetHeaderValue(kNumReservationsOffset);
std::vector<SerializedData::Reservation> reservations(size);
memcpy(reservations.data(), data_ + kHeaderSize,
size * sizeof(SerializedData::Reservation));
return reservations;
}
Vector<const byte> SnapshotData::Payload() const {
uint32_t reservations_size =
GetHeaderValue(kNumReservationsOffset) * kUInt32Size;
uint32_t padded_payload_offset =
POINTER_SIZE_ALIGN(kHeaderSize + reservations_size);
const byte* payload = data_ + padded_payload_offset;
uint32_t length = GetHeaderValue(kPayloadLengthOffset);
DCHECK_EQ(data_ + size_, payload + length);
return Vector<const byte>(payload, length);
}
} // namespace internal
} // namespace v8
| {
"pile_set_name": "Github"
} |
## Version 1.5 (2020-01-07)
Changes:
- Dropped support Go 1.9 and lower (#823, #829, #886, #1016, #1017)
- Improve buffer handling (#890)
- Document potentially insecure TLS configs (#901)
- Use a double-buffering scheme to prevent data races (#943)
- Pass uint64 values without converting them to string (#838, #955)
- Update collations and make utf8mb4 default (#877, #1054)
- Make NullTime compatible with sql.NullTime in Go 1.13+ (#995)
- Removed CloudSQL support (#993, #1007)
- Add Go Module support (#1003)
New Features:
- Implement support of optional TLS (#900)
- Check connection liveness (#934, #964, #997, #1048, #1051, #1052)
- Implement Connector Interface (#941, #958, #1020, #1035)
Bugfixes:
- Mark connections as bad on error during ping (#875)
- Mark connections as bad on error during dial (#867)
- Fix connection leak caused by rapid context cancellation (#1024)
- Mark connections as bad on error during Conn.Prepare (#1030)
## Version 1.4.1 (2018-11-14)
Bugfixes:
- Fix TIME format for binary columns (#818)
- Fix handling of empty auth plugin names (#835)
- Fix caching_sha2_password with empty password (#826)
- Fix canceled context broke mysqlConn (#862)
- Fix OldAuthSwitchRequest support (#870)
- Fix Auth Response packet for cleartext password (#887)
## Version 1.4 (2018-06-03)
Changes:
- Documentation fixes (#530, #535, #567)
- Refactoring (#575, #579, #580, #581, #603, #615, #704)
- Cache column names (#444)
- Sort the DSN parameters in DSNs generated from a config (#637)
- Allow native password authentication by default (#644)
- Use the default port if it is missing in the DSN (#668)
- Removed the `strict` mode (#676)
- Do not query `max_allowed_packet` by default (#680)
- Dropped support Go 1.6 and lower (#696)
- Updated `ConvertValue()` to match the database/sql/driver implementation (#760)
- Document the usage of `0000-00-00T00:00:00` as the time.Time zero value (#783)
- Improved the compatibility of the authentication system (#807)
New Features:
- Multi-Results support (#537)
- `rejectReadOnly` DSN option (#604)
- `context.Context` support (#608, #612, #627, #761)
- Transaction isolation level support (#619, #744)
- Read-Only transactions support (#618, #634)
- `NewConfig` function which initializes a config with default values (#679)
- Implemented the `ColumnType` interfaces (#667, #724)
- Support for custom string types in `ConvertValue` (#623)
- Implemented `NamedValueChecker`, improving support for uint64 with high bit set (#690, #709, #710)
- `caching_sha2_password` authentication plugin support (#794, #800, #801, #802)
- Implemented `driver.SessionResetter` (#779)
- `sha256_password` authentication plugin support (#808)
Bugfixes:
- Use the DSN hostname as TLS default ServerName if `tls=true` (#564, #718)
- Fixed LOAD LOCAL DATA INFILE for empty files (#590)
- Removed columns definition cache since it sometimes cached invalid data (#592)
- Don't mutate registered TLS configs (#600)
- Make RegisterTLSConfig concurrency-safe (#613)
- Handle missing auth data in the handshake packet correctly (#646)
- Do not retry queries when data was written to avoid data corruption (#302, #736)
- Cache the connection pointer for error handling before invalidating it (#678)
- Fixed imports for appengine/cloudsql (#700)
- Fix sending STMT_LONG_DATA for 0 byte data (#734)
- Set correct capacity for []bytes read from length-encoded strings (#766)
- Make RegisterDial concurrency-safe (#773)
## Version 1.3 (2016-12-01)
Changes:
- Go 1.1 is no longer supported
- Use decimals fields in MySQL to format time types (#249)
- Buffer optimizations (#269)
- TLS ServerName defaults to the host (#283)
- Refactoring (#400, #410, #437)
- Adjusted documentation for second generation CloudSQL (#485)
- Documented DSN system var quoting rules (#502)
- Made statement.Close() calls idempotent to avoid errors in Go 1.6+ (#512)
New Features:
- Enable microsecond resolution on TIME, DATETIME and TIMESTAMP (#249)
- Support for returning table alias on Columns() (#289, #359, #382)
- Placeholder interpolation, can be actived with the DSN parameter `interpolateParams=true` (#309, #318, #490)
- Support for uint64 parameters with high bit set (#332, #345)
- Cleartext authentication plugin support (#327)
- Exported ParseDSN function and the Config struct (#403, #419, #429)
- Read / Write timeouts (#401)
- Support for JSON field type (#414)
- Support for multi-statements and multi-results (#411, #431)
- DSN parameter to set the driver-side max_allowed_packet value manually (#489)
- Native password authentication plugin support (#494, #524)
Bugfixes:
- Fixed handling of queries without columns and rows (#255)
- Fixed a panic when SetKeepAlive() failed (#298)
- Handle ERR packets while reading rows (#321)
- Fixed reading NULL length-encoded integers in MySQL 5.6+ (#349)
- Fixed absolute paths support in LOAD LOCAL DATA INFILE (#356)
- Actually zero out bytes in handshake response (#378)
- Fixed race condition in registering LOAD DATA INFILE handler (#383)
- Fixed tests with MySQL 5.7.9+ (#380)
- QueryUnescape TLS config names (#397)
- Fixed "broken pipe" error by writing to closed socket (#390)
- Fixed LOAD LOCAL DATA INFILE buffering (#424)
- Fixed parsing of floats into float64 when placeholders are used (#434)
- Fixed DSN tests with Go 1.7+ (#459)
- Handle ERR packets while waiting for EOF (#473)
- Invalidate connection on error while discarding additional results (#513)
- Allow terminating packets of length 0 (#516)
## Version 1.2 (2014-06-03)
Changes:
- We switched back to a "rolling release". `go get` installs the current master branch again
- Version v1 of the driver will not be maintained anymore. Go 1.0 is no longer supported by this driver
- Exported errors to allow easy checking from application code
- Enabled TCP Keepalives on TCP connections
- Optimized INFILE handling (better buffer size calculation, lazy init, ...)
- The DSN parser also checks for a missing separating slash
- Faster binary date / datetime to string formatting
- Also exported the MySQLWarning type
- mysqlConn.Close returns the first error encountered instead of ignoring all errors
- writePacket() automatically writes the packet size to the header
- readPacket() uses an iterative approach instead of the recursive approach to merge splitted packets
New Features:
- `RegisterDial` allows the usage of a custom dial function to establish the network connection
- Setting the connection collation is possible with the `collation` DSN parameter. This parameter should be preferred over the `charset` parameter
- Logging of critical errors is configurable with `SetLogger`
- Google CloudSQL support
Bugfixes:
- Allow more than 32 parameters in prepared statements
- Various old_password fixes
- Fixed TestConcurrent test to pass Go's race detection
- Fixed appendLengthEncodedInteger for large numbers
- Renamed readLengthEnodedString to readLengthEncodedString and skipLengthEnodedString to skipLengthEncodedString (fixed typo)
## Version 1.1 (2013-11-02)
Changes:
- Go-MySQL-Driver now requires Go 1.1
- Connections now use the collation `utf8_general_ci` by default. Adding `&charset=UTF8` to the DSN should not be necessary anymore
- Made closing rows and connections error tolerant. This allows for example deferring rows.Close() without checking for errors
- `[]byte(nil)` is now treated as a NULL value. Before, it was treated like an empty string / `[]byte("")`
- DSN parameter values must now be url.QueryEscape'ed. This allows text values to contain special characters, such as '&'.
- Use the IO buffer also for writing. This results in zero allocations (by the driver) for most queries
- Optimized the buffer for reading
- stmt.Query now caches column metadata
- New Logo
- Changed the copyright header to include all contributors
- Improved the LOAD INFILE documentation
- The driver struct is now exported to make the driver directly accessible
- Refactored the driver tests
- Added more benchmarks and moved all to a separate file
- Other small refactoring
New Features:
- Added *old_passwords* support: Required in some cases, but must be enabled by adding `allowOldPasswords=true` to the DSN since it is insecure
- Added a `clientFoundRows` parameter: Return the number of matching rows instead of the number of rows changed on UPDATEs
- Added TLS/SSL support: Use a TLS/SSL encrypted connection to the server. Custom TLS configs can be registered and used
Bugfixes:
- Fixed MySQL 4.1 support: MySQL 4.1 sends packets with lengths which differ from the specification
- Convert to DB timezone when inserting `time.Time`
- Splitted packets (more than 16MB) are now merged correctly
- Fixed false positive `io.EOF` errors when the data was fully read
- Avoid panics on reuse of closed connections
- Fixed empty string producing false nil values
- Fixed sign byte for positive TIME fields
## Version 1.0 (2013-05-14)
Initial Release
| {
"pile_set_name": "Github"
} |
namespace NHibernate.AdoNet.Util
{
public interface IFormatter
{
string Format(string source);
}
} | {
"pile_set_name": "Github"
} |
/*
(c) 2014-2015 Glen Joseph Fernandes
<glenjofe -at- gmail.com>
Distributed under the Boost Software
License, Version 1.0.
http://boost.org/LICENSE_1_0.txt
*/
#ifndef BOOST_ALIGN_DETAIL_MAX_ALIGN_HPP
#define BOOST_ALIGN_DETAIL_MAX_ALIGN_HPP
#include <boost/align/detail/max_size.hpp>
#include <boost/align/alignment_of.hpp>
namespace boost {
namespace alignment {
namespace detail {
template<class A, class B>
struct max_align
: max_size<alignment_of<A>::value, alignment_of<B>::value> { };
} /* .detail */
} /* .alignment */
} /* .boost */
#endif
| {
"pile_set_name": "Github"
} |
Monkey HTTP Server
==================
Copyright 2001-2014 Monkey Software LLC
This product includes software developed at
Monkey Software LLC (http://monkey.io).
note:
-----
Starting from May 8th 2014, the source code on this directory
is Licensed under the Apache License v2.0 and Copyrighted to
Monkey Software LLC.
| {
"pile_set_name": "Github"
} |
- content_for :title do
= t('admin2.manage_transactions.show_transaction')
- pr = @transactions_presenter
- transaction = pr.transaction
- listing_title = transaction.listing_title || t('admin.communities.transactions.not_available')
- link = link_to_unless(transaction.listing.deleted, listing_title, listing_path(transaction.listing_id)) || listing_title
- transaction_title = t('admin2.manage_transactions.transaction_for', link: link, transaction_id: transaction.id).html_safe
.content-card-header
= link_to t('admin2.manage_transactions.back_to_list'), admin2_transactions_reviews_manage_transactions_path(direction: :desc, sort: :last_activity), class: %i[content-card-header-title]
.content-card-section-container
%section.content-card-section
%h2
= transaction_title
= render 'header', pr: pr
= render 'status', transaction: transaction
- if pr.show_next_step? || pr.shipping? || pr.pickup?
= render 'next_steps', pr: pr, transaction: transaction, listing_title: listing_title
%section.transaction-payment-breakdown
.payment-breakdown-wrapper
%h2= t('admin2.manage_transactions.payment_breakdown')
= render 'buyer_pays', pr: pr, transaction: transaction
= render 'seller_receives', pr: pr, transaction: transaction
= render 'marketplace_receives', pr: pr
%section.transaction-conversation
.conversation-wrapper
%h2= t('admin2.manage_transactions.timeline_and_conversation', count: transaction.conversation&.messages&.size.to_i)
= render partial: 'message', collection: pr.messages_and_actions, as: :message_or_action, locals: { starter: pr.buyer }
- content_for :popup_layout do
= render 'mark_as_completed_popup', transaction: transaction
= render 'dispute_popup', transaction: transaction
= render 'mark_as_refunded_popup', transaction: transaction
= render 'dismiss_and_payout_popup', transaction: transaction
| {
"pile_set_name": "Github"
} |
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x",
"filename" : "3.png"
},
{
"idiom" : "universal",
"scale" : "2x",
"filename" : "3@2x.png"
},
{
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
} | {
"pile_set_name": "Github"
} |
// Copyright 2016 Canonical Ltd.
// Licensed under the LGPLv3, see LICENCE file for details.
// Package ansiterm provides a Writer that writes out the ANSI escape
// codes for color and styles.
package ansiterm
| {
"pile_set_name": "Github"
} |
/*
Package egoscale is a mapping for the Exoscale API (https://community.exoscale.com/api/compute/).
Requests and Responses
To build a request, construct the adequate struct. This library expects a pointer for efficiency reasons only. The response is a struct corresponding to the data at stake. E.g. DeployVirtualMachine gives a VirtualMachine, as a pointer as well to avoid big copies.
Then everything within the struct is not a pointer. Find below some examples of how egoscale may be used. If anything feels odd or unclear, please let us know: https://github.com/exoscale/egoscale/issues
req := &egoscale.DeployVirtualMachine{
Size: 10,
ServiceOfferingID: egoscale.MustParseUUID("..."),
TemplateID: egoscale.MustParseUUID("..."),
ZoneID: egoscale.MastParseUUID("..."),
}
fmt.Println("Deployment started")
resp, err := cs.Request(req)
if err != nil {
panic(err)
}
vm := resp.(*egoscale.VirtualMachine)
fmt.Printf("Virtual Machine ID: %s\n", vm.ID)
This example deploys a virtual machine while controlling the job status as it goes. It enables a finer control over errors, e.g. HTTP timeout, and eventually a way to kill it of (from the client side).
req := &egoscale.DeployVirtualMachine{
Size: 10,
ServiceOfferingID: egoscale.MustParseUUID("..."),
TemplateID: egoscale.MustParseUUID("..."),
ZoneID: egoscale.MustParseUUID("..."),
}
vm := &egoscale.VirtualMachine{}
fmt.Println("Deployment started")
cs.AsyncRequest(req, func(jobResult *egoscale.AsyncJobResult, err error) bool {
if err != nil {
// any kind of error
panic(err)
}
// Keep waiting
if jobResult.JobStatus == egoscale.Pending {
fmt.Println("wait...")
return true
}
// Unmarshal the response into the response struct
if err := jobResult.Response(vm); err != nil {
// JSON unmarshaling error
panic(err)
}
// Stop waiting
return false
})
fmt.Printf("Virtual Machine ID: %s\n", vm.ID)
Debugging and traces
As this library is mostly an HTTP client, you can reuse all the existing tools around it.
cs := egoscale.NewClient("https://api.exoscale.com/compute", "EXO...", "...")
// sets a logger on stderr
cs.Logger = log.New(os.Stderr, "prefix", log.LstdFlags)
// activates the HTTP traces
cs.TraceOn()
Nota bene: when running the tests or the egoscale library via another tool, e.g. the exo cli, the environment variable EXOSCALE_TRACE=prefix does the above configuration for you. As a developer using egoscale as a library, you'll find it more convenient to plug your favorite io.Writer as it's a Logger.
APIs
All the available APIs on the server and provided by the API Discovery plugin.
cs := egoscale.NewClient("https://api.exoscale.com/compute", "EXO...", "...")
resp, err := cs.Request(&egoscale.ListAPIs{})
if err != nil {
panic(err)
}
for _, api := range resp.(*egoscale.ListAPIsResponse).API {
fmt.Printf("%s %s\n", api.Name, api.Description)
}
// Output:
// listNetworks Lists all available networks
// ...
Security Groups
Security Groups provide a way to isolate traffic to VMs. Rules are added via the two Authorization commands.
resp, err := cs.Request(&egoscale.CreateSecurityGroup{
Name: "Load balancer",
Description: "Open HTTP/HTTPS ports from the outside world",
})
securityGroup := resp.(*egoscale.SecurityGroup)
resp, err = cs.Request(&egoscale.AuthorizeSecurityGroupIngress{
Description: "SSH traffic",
SecurityGroupID: securityGroup.ID,
CidrList: []CIDR{
*egoscale.MustParseCIDR("0.0.0.0/0"),
*egoscale.MustParseCIDR("::/0"),
},
Protocol: "tcp",
StartPort: 22,
EndPort: 22,
})
// The modified SecurityGroup is returned
securityGroup := resp.(*egoscale.SecurityGroup)
// ...
err = client.BooleanRequest(&egoscale.DeleteSecurityGroup{
ID: securityGroup.ID,
})
// ...
Security Group also implement the generic List, Get and Delete interfaces (Listable and Deletable).
// List all Security Groups
sgs, _ := cs.List(&egoscale.SecurityGroup{})
for _, s := range sgs {
sg := s.(egoscale.SecurityGroup)
// ...
}
// Get a Security Group
sgQuery := &egoscale.SecurityGroup{Name: "Load balancer"}
resp, err := cs.Get(sgQuery); err != nil {
...
}
sg := resp.(*egoscale.SecurityGroup)
if err := cs.Delete(sg); err != nil {
...
}
// The SecurityGroup has been deleted
See: https://community.exoscale.com/documentation/compute/security-groups/
Zones
A Zone corresponds to a Data Center. You may list them. Zone implements the Listable interface, which let you perform a list in two different ways. The first exposes the underlying request while the second one hide them and you only manipulate the structs of your interest.
// Using ListZones request
req := &egoscale.ListZones{}
resp, err := client.Request(req)
if err != nil {
panic(err)
}
for _, zone := range resp.(*egoscale.ListZonesResponse) {
...
}
// Using client.List
zone := &egoscale.Zone{}
zones, err := client.List(zone)
if err != nil {
panic(err)
}
for _, z := range zones {
zone := z.(egoscale.Zone)
...
}
Elastic IPs
An Elastic IP is a way to attach an IP address to many Virtual Machines. The API side of the story configures the external environment, like the routing. Some work is required within the machine to properly configure the interfaces.
See: https://community.exoscale.com/documentation/compute/eip/
*/
package egoscale
| {
"pile_set_name": "Github"
} |
pattern: ^%{POSTFIX_PIPE}$
data: "95ECE24E0: to=<tom@example.com>, relay=dovecot, delay=0.12, delays=0.03/0/0/0.08, dsn=5.4.6, status=bounced (mail forwarding loop for tom@example.com)"
results:
postfix_queueid: 95ECE24E0
postfix_keyvalue_data: to=<tom@example.com>, relay=dovecot, delay=0.12, delays=0.03/0/0/0.08, dsn=5.4.6
postfix_status: bounced
postfix_pipe_response: mail forwarding loop for tom@example.com
| {
"pile_set_name": "Github"
} |
package testing
import (
"fmt"
"net/http"
"testing"
"github.com/gophercloud/gophercloud/openstack/identity/v3/domains"
th "github.com/gophercloud/gophercloud/testhelper"
"github.com/gophercloud/gophercloud/testhelper/client"
)
// ListOutput provides a single page of Domain results.
const ListOutput = `
{
"links": {
"next": null,
"previous": null,
"self": "http://example.com/identity/v3/domains"
},
"domains": [
{
"enabled": true,
"id": "2844b2a08be147a08ef58317d6471f1f",
"links": {
"self": "http://example.com/identity/v3/domains/2844b2a08be147a08ef58317d6471f1f"
},
"name": "domain one",
"description": "some description"
},
{
"enabled": true,
"id": "9fe1d3",
"links": {
"self": "https://example.com/identity/v3/domains/9fe1d3"
},
"name": "domain two"
}
]
}
`
// GetOutput provides a Get result.
const GetOutput = `
{
"domain": {
"enabled": true,
"id": "9fe1d3",
"links": {
"self": "https://example.com/identity/v3/domains/9fe1d3"
},
"name": "domain two"
}
}
`
// CreateRequest provides the input to a Create request.
const CreateRequest = `
{
"domain": {
"name": "domain two"
}
}
`
// UpdateRequest provides the input to as Update request.
const UpdateRequest = `
{
"domain": {
"description": "Staging Domain"
}
}
`
// UpdateOutput provides an update result.
const UpdateOutput = `
{
"domain": {
"enabled": true,
"id": "9fe1d3",
"links": {
"self": "https://example.com/identity/v3/domains/9fe1d3"
},
"name": "domain two",
"description": "Staging Domain"
}
}
`
// FirstDomain is the first domain in the List request.
var FirstDomain = domains.Domain{
Enabled: true,
ID: "2844b2a08be147a08ef58317d6471f1f",
Links: map[string]interface{}{
"self": "http://example.com/identity/v3/domains/2844b2a08be147a08ef58317d6471f1f",
},
Name: "domain one",
Description: "some description",
}
// SecondDomain is the second domain in the List request.
var SecondDomain = domains.Domain{
Enabled: true,
ID: "9fe1d3",
Links: map[string]interface{}{
"self": "https://example.com/identity/v3/domains/9fe1d3",
},
Name: "domain two",
}
// SecondDomainUpdated is how SecondDomain should look after an Update.
var SecondDomainUpdated = domains.Domain{
Enabled: true,
ID: "9fe1d3",
Links: map[string]interface{}{
"self": "https://example.com/identity/v3/domains/9fe1d3",
},
Name: "domain two",
Description: "Staging Domain",
}
// ExpectedDomainsSlice is the slice of domains expected to be returned from ListOutput.
var ExpectedDomainsSlice = []domains.Domain{FirstDomain, SecondDomain}
// HandleListDomainsSuccessfully creates an HTTP handler at `/domains` on the
// test handler mux that responds with a list of two domains.
func HandleListDomainsSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/domains", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "GET")
th.TestHeader(t, r, "Accept", "application/json")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
fmt.Fprintf(w, ListOutput)
})
}
// HandleGetDomainSuccessfully creates an HTTP handler at `/domains` on the
// test handler mux that responds with a single domain.
func HandleGetDomainSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/domains/9fe1d3", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "GET")
th.TestHeader(t, r, "Accept", "application/json")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
fmt.Fprintf(w, GetOutput)
})
}
// HandleCreateDomainSuccessfully creates an HTTP handler at `/domains` on the
// test handler mux that tests domain creation.
func HandleCreateDomainSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/domains", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "POST")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, CreateRequest)
w.WriteHeader(http.StatusCreated)
fmt.Fprintf(w, GetOutput)
})
}
// HandleDeleteDomainSuccessfully creates an HTTP handler at `/domains` on the
// test handler mux that tests domain deletion.
func HandleDeleteDomainSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/domains/9fe1d3", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "DELETE")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
w.WriteHeader(http.StatusNoContent)
})
}
// HandleUpdateDomainSuccessfully creates an HTTP handler at `/domains` on the
// test handler mux that tests domain update.
func HandleUpdateDomainSuccessfully(t *testing.T) {
th.Mux.HandleFunc("/domains/9fe1d3", func(w http.ResponseWriter, r *http.Request) {
th.TestMethod(t, r, "PATCH")
th.TestHeader(t, r, "X-Auth-Token", client.TokenID)
th.TestJSONRequest(t, r, UpdateRequest)
w.WriteHeader(http.StatusOK)
fmt.Fprintf(w, UpdateOutput)
})
}
| {
"pile_set_name": "Github"
} |
/**
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2011-2013 ForgeRock AS. All Rights Reserved
*
* The contents of this file are subject to the terms
* of the Common Development and Distribution License
* (the License). You may not use this file except in
* compliance with the License.
*
* You can obtain a copy of the License at
* http://forgerock.org/license/CDDLv1.0.html
* See the License for the specific language governing
* permission and limitations under the License.
*
* When distributing Covered Code, include this CDDL
* Header Notice in each file and include the License file
* at http://forgerock.org/license/CDDLv1.0.html
* If applicable, add the following below the CDDL Header,
* with the fields enclosed by brackets [] replaced by
* your own identifying information:
* "Portions Copyrighted [year] [name of copyright owner]"
*
*/
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-661
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2013.01.21 at 10:40:04 AM PST
//
package com.sun.identity.entitlement.xacml3.core;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for AssociatedAdviceType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="AssociatedAdviceType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{urn:oasis:names:tc:xacml:3.0:core:schema:wd-17}Advice" maxOccurs="unbounded"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "AssociatedAdviceType", propOrder = {
"advice"
})
public class AssociatedAdvice implements XACMLRootElement {
@XmlElement(name = "Advice", required = true)
protected List<Advice> advice;
/**
* Gets the value of the advice property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the advice property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAdvice().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Advice }
*
*
*/
public List<Advice> getAdvice() {
if (advice == null) {
advice = new ArrayList<Advice>();
}
return this.advice;
}
/**
* Default toXML Method to Marshal Object into XML.
* @return String - Marshaled Results into XML String.
*/
public String toXML() {
StringBuilder stringBuilder = new StringBuilder();
// Return Marshaled Data.
return stringBuilder.toString();
}
}
| {
"pile_set_name": "Github"
} |
# -*- coding: utf-8 -*-
"""
AudioDataset
"""
import codecs
import os
import torch
import torchtext
from onmt.inputters.dataset_base import DatasetBase, PAD_WORD, BOS_WORD, \
EOS_WORD
class AudioDataset(DatasetBase):
""" Dataset for data_type=='audio'
Build `Example` objects, `Field` objects, and filter_pred function
from audio corpus.
Args:
fields (dict): a dictionary of `torchtext.data.Field`.
src_examples_iter (dict iter): preprocessed source example
dictionary iterator.
tgt_examples_iter (dict iter): preprocessed target example
dictionary iterator.
num_src_feats (int): number of source side features.
num_tgt_feats (int): number of target side features.
tgt_seq_length (int): maximum target sequence length.
sample_rate (int): sample rate.
window_size (float): window size for spectrogram in seconds.
window_stride (float): window stride for spectrogram in seconds.
window (str): window type for spectrogram generation.
normalize_audio (bool): subtract spectrogram by mean and divide
by std or not.
use_filter_pred (bool): use a custom filter predicate to filter
out examples?
"""
def __init__(self, fields, src_examples_iter, tgt_examples_iter,
num_src_feats=0, num_tgt_feats=0,
tgt_seq_length=0, sample_rate=0,
window_size=0.0, window_stride=0.0, window=None,
normalize_audio=True, use_filter_pred=True):
self.data_type = 'audio'
self.sample_rate = sample_rate
self.window_size = window_size
self.window_stride = window_stride
self.window = window
self.normalize_audio = normalize_audio
self.n_src_feats = num_src_feats
self.n_tgt_feats = num_tgt_feats
if tgt_examples_iter is not None:
examples_iter = (self._join_dicts(src, tgt) for src, tgt in
zip(src_examples_iter, tgt_examples_iter))
else:
examples_iter = src_examples_iter
# Peek at the first to see which fields are used.
ex, examples_iter = self._peek(examples_iter)
keys = ex.keys()
out_fields = [(k, fields[k]) if k in fields else (k, None)
for k in keys]
example_values = ([ex[k] for k in keys] for ex in examples_iter)
out_examples = (self._construct_example_fromlist(
ex_values, out_fields)
for ex_values in example_values)
# If out_examples is a generator, we need to save the filter_pred
# function in serialization too, which would cause a problem when
# `torch.save()`. Thus we materialize it as a list.
out_examples = list(out_examples)
def filter_pred(example):
""" ? """
if tgt_examples_iter is not None:
return 0 < len(example.tgt) <= tgt_seq_length
else:
return True
filter_pred = filter_pred if use_filter_pred else lambda x: True
super(AudioDataset, self).__init__(
out_examples, out_fields, filter_pred
)
def sort_key(self, ex):
""" Sort using duration time of the sound spectrogram. """
return ex.src.size(1)
@staticmethod
def make_audio_examples_nfeats_tpl(path, audio_dir,
sample_rate, window_size,
window_stride, window,
normalize_audio, truncate=None):
"""
Args:
path (str): location of a src file containing audio paths.
audio_dir (str): location of source audio files.
sample_rate (int): sample_rate.
window_size (float) : window size for spectrogram in seconds.
window_stride (float): window stride for spectrogram in seconds.
window (str): window type for spectrogram generation.
normalize_audio (bool): subtract spectrogram by mean and divide
by std or not.
truncate (int): maximum audio length (0 or None for unlimited).
Returns:
(example_dict iterator, num_feats) tuple
"""
examples_iter = AudioDataset.read_audio_file(
path, audio_dir, "src", sample_rate,
window_size, window_stride, window,
normalize_audio, truncate)
num_feats = 0 # Source side(audio) has no features.
return (examples_iter, num_feats)
@staticmethod
def read_audio_file(path, src_dir, side, sample_rate, window_size,
window_stride, window, normalize_audio,
truncate=None):
"""
Args:
path (str): location of a src file containing audio paths.
src_dir (str): location of source audio files.
side (str): 'src' or 'tgt'.
sample_rate (int): sample_rate.
window_size (float) : window size for spectrogram in seconds.
window_stride (float): window stride for spectrogram in seconds.
window (str): window type for spectrogram generation.
normalize_audio (bool): subtract spectrogram by mean and divide
by std or not.
truncate (int): maximum audio length (0 or None for unlimited).
Yields:
a dictionary containing audio data for each line.
"""
assert (src_dir is not None) and os.path.exists(src_dir),\
"src_dir must be a valid directory if data_type is audio"
import torchaudio
import librosa
import numpy as np
with codecs.open(path, "r", "utf-8") as corpus_file:
index = 0
for line in corpus_file:
audio_path = os.path.join(src_dir, line.strip())
if not os.path.exists(audio_path):
audio_path = line
assert os.path.exists(audio_path), \
'audio path %s not found' % (line.strip())
sound, sample_rate = torchaudio.load(audio_path)
if truncate and truncate > 0:
if sound.size(0) > truncate:
continue
assert sample_rate == sample_rate, \
'Sample rate of %s != -sample_rate (%d vs %d)' \
% (audio_path, sample_rate, sample_rate)
sound = sound.numpy()
if len(sound.shape) > 1:
if sound.shape[1] == 1:
sound = sound.squeeze()
else:
sound = sound.mean(axis=1) # average multiple channels
n_fft = int(sample_rate * window_size)
win_length = n_fft
hop_length = int(sample_rate * window_stride)
# STFT
d = librosa.stft(sound, n_fft=n_fft, hop_length=hop_length,
win_length=win_length, window=window)
spect, _ = librosa.magphase(d)
spect = np.log1p(spect)
spect = torch.FloatTensor(spect)
if normalize_audio:
mean = spect.mean()
std = spect.std()
spect.add_(-mean)
spect.div_(std)
example_dict = {side: spect,
side + '_path': line.strip(),
'indices': index}
index += 1
yield example_dict
@staticmethod
def get_fields(n_src_features, n_tgt_features):
"""
Args:
n_src_features: the number of source features to
create `torchtext.data.Field` for.
n_tgt_features: the number of target features to
create `torchtext.data.Field` for.
Returns:
A dictionary whose keys are strings and whose values
are the corresponding Field objects.
"""
fields = {}
def make_audio(data, vocab):
""" ? """
nfft = data[0].size(0)
t = max([t.size(1) for t in data])
sounds = torch.zeros(len(data), 1, nfft, t)
for i, spect in enumerate(data):
sounds[i, :, :, 0:spect.size(1)] = spect
return sounds
fields["src"] = torchtext.data.Field(
use_vocab=False, dtype=torch.float,
postprocessing=make_audio, sequential=False)
for j in range(n_src_features):
fields["src_feat_" + str(j)] = \
torchtext.data.Field(pad_token=PAD_WORD)
fields["tgt"] = torchtext.data.Field(
init_token=BOS_WORD, eos_token=EOS_WORD,
pad_token=PAD_WORD)
for j in range(n_tgt_features):
fields["tgt_feat_" + str(j)] = \
torchtext.data.Field(init_token=BOS_WORD, eos_token=EOS_WORD,
pad_token=PAD_WORD)
def make_src(data, vocab):
""" ? """
src_size = max([t.size(0) for t in data])
src_vocab_size = max([t.max() for t in data]) + 1
alignment = torch.zeros(src_size, len(data), src_vocab_size)
for i, sent in enumerate(data):
for j, t in enumerate(sent):
alignment[j, i, t] = 1
return alignment
fields["src_map"] = torchtext.data.Field(
use_vocab=False, dtype=torch.float,
postprocessing=make_src, sequential=False)
def make_tgt(data, vocab):
""" ? """
tgt_size = max([t.size(0) for t in data])
alignment = torch.zeros(tgt_size, len(data)).long()
for i, sent in enumerate(data):
alignment[:sent.size(0), i] = sent
return alignment
fields["alignment"] = torchtext.data.Field(
use_vocab=False, dtype=torch.long,
postprocessing=make_tgt, sequential=False)
fields["indices"] = torchtext.data.Field(
use_vocab=False, dtype=torch.long,
sequential=False)
return fields
@staticmethod
def get_num_features(corpus_file, side):
"""
For audio corpus, source side is in form of audio, thus
no feature; while target side is in form of text, thus
we can extract its text features.
Args:
corpus_file (str): file path to get the features.
side (str): 'src' or 'tgt'.
Returns:
number of features on `side`.
"""
if side == 'src':
num_feats = 0
else:
with codecs.open(corpus_file, "r", "utf-8") as cf:
f_line = cf.readline().strip().split()
_, _, num_feats = AudioDataset.extract_text_features(f_line)
return num_feats
| {
"pile_set_name": "Github"
} |
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.igormaznitsa</groupId>
<artifactId>jbbp-main-pom</artifactId>
<version>2.0.2</version>
<packaging>pom</packaging>
<modules>
<module>jbbp</module>
</modules>
<url>https://github.com/raydac/java-binary-block-parser</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<timestamp>${maven.build.timestamp}</timestamp>
<maven.build.timestamp.format>yyyyMMddHHmm</maven.build.timestamp.format>
<mvn.version>3.0</mvn.version>
<meta.version>1.1.2</meta.version>
<jbbp.version>2.0.2</jbbp.version>
<jbbp.plugin.version>${jbbp.version}</jbbp.plugin.version>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<junit5.version>5.5.1</junit5.version>
<jmh.version>1.21</jmh.version>
</properties>
<issueManagement>
<system>GitHub Issues</system>
<url>https://github.com/raydac/java-binary-block-parser/issues</url>
</issueManagement>
<inceptionYear>2014</inceptionYear>
<developers>
<developer>
<id>raydac</id>
<name>Igor Maznitsa</name>
<email>rrg4400@gmail.com</email>
<url>http://www.igormaznitsa.com</url>
<timezone>+3</timezone>
<roles>
<role>developer</role>
</roles>
</developer>
</developers>
<prerequisites>
<maven>3.0</maven>
</prerequisites>
<scm>
<url>https://github.com/raydac/java-binary-block-parser</url>
<connection>scm:git:git://github.com/raydac/java-binary-block-parser.git</connection>
<developerConnection>scm:git:git@github.com:raydac/java-binary-block-parser.git</developerConnection>
</scm>
<organization>
<name>Igor Maznitsa</name>
<url>http://www.igormaznitsa.com</url>
</organization>
<licenses>
<license>
<name>The Apache Software License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit5.version}</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit5.version}</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${junit5.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.6</version>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<profile>
<id>plugins</id>
<modules>
<module>jbbp-plugins</module>
</modules>
</profile>
<profile>
<id>bundle</id>
<activation>
<file>
<exists>${basedir}/src/assemble</exists>
</file>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>make-bundle</id>
<phase>install</phase>
<goals>
<goal>single</goal>
</goals>
<configuration>
<descriptors>
<descriptor>src/assemble/bundle.xml</descriptor>
</descriptors>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>properties-maven-plugin</artifactId>
<version>1.0.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven</groupId>
<artifactId>maven-clean-plugin</artifactId>
<version>3.1.0</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>animal-sniffer-maven-plugin</artifactId>
<version>1.18</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.1.1</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>1.6</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>3.1.1</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.1.0</version>
</plugin>
<plugin>
<groupId>com.igormaznitsa</groupId>
<artifactId>uber-pom</artifactId>
<version>1.0.3</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.2</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.0.0-M3</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<compilerArgument>-Xlint:all</compilerArgument>
<verbose>false</verbose>
<showDeprecation>true</showDeprecation>
<showWarnings>true</showWarnings>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<forkCount>1</forkCount>
<reuseForks>false</reuseForks>
<useFile>false</useFile>
<trimStackTrace>false</trimStackTrace>
<systemPropertyVariables>
<jbbp.target.folder>${project.build.directory}${file.separator}tmpFolders</jbbp.target.folder>
<maven.jbbp.plugin.version>${project.version}</maven.jbbp.plugin.version>
</systemPropertyVariables>
</configuration>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit5.version}</version>
</dependency>
</dependencies>
</plugin>
</plugins>
</build>
</project>
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2008 The DragonFly Project. All rights reserved.
*
* This code is derived from software contributed to The DragonFly Project
* by Matthew Dillon <dillon@backplane.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name of The DragonFly Project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific, prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
/*
* vknet [-C] [-b local-bridge] [-B remote-bridge] [-r delay[:retries]]
* local-spec [user@]remote[:remote-spec]
* vknet -S [-b local-bridge] local-spec (server mode)
*
* Connect a SOCK_SEQPACKET socket or TUN device on the local host with
* a SOCK_SEQPACKET socket or TUN device on the remote host through a SSH
* connection. When a TUN device is specified it may be optionally bridged.
*
* This program expects packetized reads and writes on the local and remote
* sides and will re-block them over the SSH stream.
*/
#include "vknet.h"
static void vknet_blastaway(ioinfo_t ios, ioinfo_t iod);
static void *vknet_stream(void *arg);
static void vknet_connect(ioinfo_t ios,
const char *localSide, const char *localBridge);
static pid_t vknet_execssh(int fdin, int fdout, int compressOpt,
const char *remoteSide, const char *remoteBridge);
static void usage(void);
static pthread_mutex_t MasterLock;
int
main(int ac, char **av)
{
int compressOpt = 0;
int remoteOpt = 0;
const char *localBridge = NULL;
const char *remoteBridge = NULL;
const char *localSide;
const char *remoteSide;
char *ptr;
int c;
int retriesOpt = -1;
int timeoutOpt = -1;
pid_t sshpid = -1;
pid_t p;
struct ioinfo ios;
struct ioinfo iod;
while ((c = getopt(ac, av, "b:B:r:CS")) != -1) {
switch (c) {
case 'b':
localBridge = optarg;
break;
case 'B':
remoteBridge = optarg;
break;
case 'r':
timeoutOpt = strtol(optarg, &ptr, 0);
if (ptr && *ptr == ':')
retriesOpt = strtol(ptr + 1, NULL, 0);
break;
case 'S':
remoteOpt = 1;
break;
case 'C':
compressOpt = 1;
break;
default:
usage();
}
}
av += optind;
ac -= optind;
/*
* Local and remote arguments.
*/
if (remoteOpt) {
if (ac != 1)
usage();
localSide = av[0];
remoteSide = NULL;
} else {
if (ac != 2)
usage();
localSide = av[0];
remoteSide = av[1];
}
pthread_mutex_init(&MasterLock, NULL);
retry:
/*
* Setup connections
*/
vknet_connect(&ios, localSide, localBridge);
if (remoteOpt) {
iod.fdin = 0;
iod.fdout = 1;
} else {
int fds[2];
if (pipe(fds) < 0) {
perror("pipe");
exit(1);
}
sshpid = vknet_execssh(fds[1], fds[1], compressOpt,
remoteSide, remoteBridge);
close(fds[1]);
iod.fdin = fds[0];
iod.fdout = fds[0];
}
/*
* Blast away, timeout/retry on failure
*/
vknet_blastaway(&ios, &iod);
/*
* Terminate child process
*/
if (sshpid > 0) {
if (kill(sshpid, SIGTERM) != 0)
perror("kill");
while ((p = waitpid(sshpid, NULL, 0)) != sshpid) {
if (p < 0 && errno != EINTR)
break;
}
sshpid = -1;
}
/*
* Handle timeout/retries
*/
if (timeoutOpt >= 0 && retriesOpt != 0) {
printf("timeout %d retries %d\n", timeoutOpt, retriesOpt);
if (timeoutOpt > 0)
sleep(timeoutOpt);
if (retriesOpt > 0)
--retriesOpt;
goto retry;
}
exit(0);
}
static void
vknet_blastaway(ioinfo_t ios, ioinfo_t iod)
{
struct streaminfo stream1;
struct streaminfo stream2;
pthread_mutex_lock(&MasterLock);
stream1.fdin = ios->fdin;
stream1.fdout = iod->fdout;
stream1.flags = REBLOCK_OUT;
stream1.other = &stream2;
stream2.fdin = iod->fdin;
stream2.fdout = ios->fdout;
stream2.flags = REBLOCK_IN;
stream2.other = &stream1;
pthread_create(&stream1.thread, NULL, vknet_stream, &stream1);
pthread_create(&stream2.thread, NULL, vknet_stream, &stream2);
pthread_mutex_unlock(&MasterLock);
pthread_join(stream1.thread, NULL);
pthread_join(stream2.thread, NULL);
}
/*
* Transfer packets between two descriptors
*/
static
void *
vknet_stream(void *arg)
{
streaminfo_t stream = arg;
struct blkhead head;
u_int8_t *pkt;
int bytes;
int n;
int r;
/*
* Synchronize with master thread, then loop
*/
pthread_mutex_lock(&MasterLock);
pthread_mutex_unlock(&MasterLock);
pkt = malloc(MAXPKT);
for (;;) {
/*
* Input side
*/
if (stream->flags & REBLOCK_IN) {
bytes = sizeof(head);
for (n = 0; n < bytes; n += r) {
r = read(stream->fdin, (char *)&head + n,
bytes - n);
if (r <= 0)
break;
}
if (n != bytes)
break;
if (le32toh(head.magic) != MAGIC)
break;
bytes = le32toh(head.bytes);
if (bytes <= 0 || bytes > MAXPKT)
break;
for (n = 0; n < bytes; n += r) {
r = read(stream->fdin, pkt + n, bytes - n);
if (r <= 0)
break;
}
if (n != bytes)
break;
} else {
bytes = read(stream->fdin, pkt, MAXPKT);
if (bytes <= 0)
break;
}
/*
* Output side
*/
if (stream->flags & REBLOCK_OUT) {
head.magic = htole32(MAGIC);
head.bytes = htole32(bytes);
if (write(stream->fdout, &head, sizeof(head)) != sizeof(head))
break;
if (write(stream->fdout, pkt, bytes) != bytes)
break;
} else {
if (write(stream->fdout, pkt, bytes) != bytes)
break;
}
}
free(pkt);
close(stream->fdin);
close(stream->fdout);
pthread_cancel(stream->other->thread);
pthread_exit(NULL);
}
/*
* vknet_connect() - Connect to local side, optionally find or bridge the tap
* interface.
*/
static void
vknet_connect(ioinfo_t io, const char *localSide, const char *localBridge)
{
struct ifreq ifr;
struct ifaliasreq ifra;
char *buf = NULL;
int tap_fd;
int tap_unit;
int i;
int s;
int flags;
tap_unit = -1;
tap_fd = -1;
if (strcmp(localSide, "auto") == 0) {
for (i = 0; ; ++i) {
asprintf(&buf, "/dev/tap%d", i);
tap_fd = open(buf, O_RDWR | O_NONBLOCK);
free(buf);
if (tap_fd >= 0 || errno == ENOENT) {
tap_unit = i;
break;
}
}
} else if (strncmp(localSide, "tap", 3) == 0) {
asprintf(&buf, "/dev/%s", localSide);
tap_fd = open(buf, O_RDWR | O_NONBLOCK);
tap_unit = strtol(localSide + 3, NULL, 10);
free(buf);
} else if ((tap_fd = open(localSide, O_RDWR | O_NONBLOCK)) >= 0) {
const char *ptr = localSide + strlen(localSide);
while (ptr > localSide && ptr[-1] >= '0' && ptr[-1] <= '9')
--ptr;
tap_unit = strtol(ptr, NULL, 10);
} else {
struct sockaddr_un sunx;
int len;
snprintf(sunx.sun_path, sizeof(sunx.sun_path), "%s", localSide);
len = offsetof(struct sockaddr_un,
sun_path[strlen(sunx.sun_path)]);
++len; /* include nul */
sunx.sun_family = AF_UNIX;
sunx.sun_len = len;
tap_fd = socket(AF_UNIX, SOCK_SEQPACKET, 0);
if (tap_fd >= 0) {
if (connect(tap_fd, (void *)&sunx, len) < 0) {
close(tap_fd);
tap_fd = -1;
}
}
}
if (tap_fd < 0) {
err(1, "Unable to connect to %s", localSide);
/* NOT REACHED */
}
fcntl(tap_fd, F_SETFL, 0);
io->fdin = tap_fd;
io->fdout = tap_fd;
/*
* If this isn't a TAP device we are done.
*/
if (tap_unit < 0)
return;
/*
* Bring up the TAP interface
*/
bzero(&ifr, sizeof(ifr));
bzero(&ifra, sizeof(ifra));
snprintf(ifr.ifr_name, sizeof(ifr.ifr_name), "tap%d", tap_unit);
snprintf(ifra.ifra_name, sizeof(ifra.ifra_name), "tap%d", tap_unit);
s = socket(AF_INET, SOCK_DGRAM, 0);
#if 0
/*
* Set the interface address if in Secure mode.
*/
if (SecureOpt) {
struct sockaddr_in *in;
in = (void *)&ifra.ifra_addr;
in->sin_family = AF_INET;
in->sin_len = sizeof(ifra.ifra_addr);
in->sin_addr = NetAddress;
in = (void *)&ifra.ifra_mask;
in->sin_family = AF_INET;
in->sin_len = sizeof(ifra.ifra_mask);
in->sin_addr = NetMask;
if (ioctl(s, SIOCAIFADDR, &ifra) < 0) {
perror("Unable to set address on tap interface");
exit(1);
}
}
#endif
/*
* Turn up the interface
*/
flags = IFF_UP;
if (ioctl(s, SIOCGIFFLAGS, &ifr) >= 0) {
bzero(&ifr, sizeof(ifr));
snprintf(ifr.ifr_name, sizeof(ifr.ifr_name), "tap%d", tap_unit);
ifr.ifr_flags |= flags & 0xFFFF;
ifr.ifr_flagshigh |= flags >> 16;
if (ioctl(s, SIOCSIFFLAGS, &ifr) < 0) {
perror("Unable to set IFF_UP on tap interface");
exit(1);
}
}
/*
* If a bridge was specified associate the tap interface with the
* bridge.
*/
if (localBridge) {
struct ifbreq ifbr;
struct ifdrv ifd;
/*
* Create the bridge if necessary.
*/
bzero(&ifr, sizeof(ifr));
snprintf(ifr.ifr_name, sizeof(ifr.ifr_name), "%s", localBridge);
if (ioctl(s, SIOCIFCREATE, &ifr) < 0) {
if (errno != EEXIST) {
perror("Unable to create bridge interface");
exit(1);
}
}
/*
* Add the tap interface to the bridge
*/
bzero(&ifbr, sizeof(ifbr));
snprintf(ifbr.ifbr_ifsname, sizeof(ifbr.ifbr_ifsname),
"tap%d", tap_unit);
bzero(&ifd, sizeof(ifd));
snprintf(ifd.ifd_name, sizeof(ifd.ifd_name), "%s", localBridge);
ifd.ifd_cmd = BRDGADD;
ifd.ifd_len = sizeof(ifbr);
ifd.ifd_data = &ifbr;
if (ioctl(s, SIOCSDRVSPEC, &ifd) < 0) {
if (errno != EEXIST) {
perror("Unable to add tap ifc to bridge!");
exit(1);
}
}
}
close(s);
}
/*
* Connect to the remote machine with ssh and set up a stream
*/
static pid_t
vknet_execssh(int fdin, int fdout, int compressOpt,
const char *remoteSide, const char *remoteBridge)
{
char *remoteHost;
char *remotePath;
const char *av[24];
int ac;
pid_t pid;
/*
* Fork / parent returns.
*/
if ((pid = fork()) > 0)
return pid;
if (pid < 0) {
perror("fork");
exit(1);
}
/*
* Setup stdin, stdout
*/
assert(fdin > 2);
assert(fdout > 2);
dup2(fdin, 0);
dup2(fdout, 1);
close(fdin);
close(fdout);
/*
* Set up arguments
*/
remoteHost = strdup(remoteSide);
if ((remotePath = strchr(remoteHost, ':')) != NULL) {
*remotePath++ = 0;
} else {
remotePath = strdup("/var/run/vknet");
}
ac = 0;
av[ac++] = "ssh";
if (compressOpt)
av[ac++] = "-C";
av[ac++] = "-x";
av[ac++] = "-T";
av[ac++] = "-e";
av[ac++] = "none";
av[ac++] = remoteHost;
av[ac++] = "exec";
av[ac++] = "vknet";
av[ac++] = "-S";
if (remoteBridge) {
av[ac++] = "-b";
av[ac++] = remoteBridge;
}
av[ac++] = remotePath;
av[ac++] = NULL;
execv("/usr/bin/ssh", (void *)av);
exit(1);
}
/*
* Misc
*/
static
void
usage(void)
{
fprintf(stderr,
"vknet [-C] [-b local-bridge] [-B remote-bridge] [-r delay[:retries]]\n"
" local-spec [user@]remote[:remote-spec]\n"
"vknet -S [-b local-bridge] local-spec\n"
);
exit(1);
}
| {
"pile_set_name": "Github"
} |
#ifndef OLD_FILM_H_
#define OLD_FILM_H_
#include "video_settings.h"
#include "bitmap.h"
#include "video_aux.h"
#define ABSDIFF(x, y) ((x > y) ? x - y : y - x)
#define ABS(x) ((x > 0) ? x : -x)
// Arrays to store input image data
static unsigned char R_in[MAX_HEIGHT][MAX_WIDTH];
static unsigned char G_in[MAX_HEIGHT][MAX_WIDTH];
static unsigned char B_in[MAX_HEIGHT][MAX_WIDTH];
// Arrays to store output image data
static unsigned char R_out[MAX_HEIGHT][MAX_WIDTH];
static unsigned char G_out[MAX_HEIGHT][MAX_WIDTH];
static unsigned char B_out[MAX_HEIGHT][MAX_WIDTH];
typedef struct {
rgb_pixel pix;
unsigned char edge;
} rgb_edge;
void video_2d_filter_linebuffer(rgb_pixel in_pix[MAX_HEIGHT][MAX_WIDTH],
rgb_pixel out_pix[MAX_HEIGHT][MAX_WIDTH]);
#endif
| {
"pile_set_name": "Github"
} |
// mksyscall.pl -l32 syscall_linux.go syscall_linux_386.go
// MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT
// +build 386,linux
package unix
import (
"syscall"
"unsafe"
)
var _ syscall.Errno
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Linkat(olddirfd int, oldpath string, newdirfd int, newpath string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_LINKAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), uintptr(flags), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func openat(dirfd int, path string, flags int, mode uint32) (fd int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
r0, _, e1 := Syscall6(SYS_OPENAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags), uintptr(mode), 0, 0)
use(unsafe.Pointer(_p0))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) {
r0, _, e1 := Syscall6(SYS_PPOLL, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Readlinkat(dirfd int, path string, buf []byte) (n int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(buf) > 0 {
_p1 = unsafe.Pointer(&buf[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_READLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf)), 0, 0)
use(unsafe.Pointer(_p0))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Symlinkat(oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_SYMLINKAT, uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)))
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unlinkat(dirfd int, path string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UNLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags))
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimes(path string, times *[2]Timeval) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIMES, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimensat(dirfd int, path string, times *[2]Timespec, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_UTIMENSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), uintptr(flags), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func futimesat(dirfd int, path *byte, times *[2]Timeval) (err error) {
_, _, e1 := Syscall(SYS_FUTIMESAT, uintptr(dirfd), uintptr(unsafe.Pointer(path)), uintptr(unsafe.Pointer(times)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getcwd(buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETCWD, uintptr(_p0), uintptr(len(buf)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func wait4(pid int, wstatus *_C_int, options int, rusage *Rusage) (wpid int, err error) {
r0, _, e1 := Syscall6(SYS_WAIT4, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options), uintptr(unsafe.Pointer(rusage)), 0, 0)
wpid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ptrace(request int, pid int, addr uintptr, data uintptr) (err error) {
_, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func reboot(magic1 uint, magic2 uint, cmd int, arg string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(arg)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_REBOOT, uintptr(magic1), uintptr(magic2), uintptr(cmd), uintptr(unsafe.Pointer(_p0)), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func mount(source string, target string, fstype string, flags uintptr, data *byte) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(source)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(target)
if err != nil {
return
}
var _p2 *byte
_p2, err = BytePtrFromString(fstype)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_MOUNT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(unsafe.Pointer(_p2)), uintptr(flags), uintptr(unsafe.Pointer(data)), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
use(unsafe.Pointer(_p2))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Acct(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_ACCT, uintptr(unsafe.Pointer(_p0)), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Adjtimex(buf *Timex) (state int, err error) {
r0, _, e1 := Syscall(SYS_ADJTIMEX, uintptr(unsafe.Pointer(buf)), 0, 0)
state = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chdir(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chroot(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHROOT, uintptr(unsafe.Pointer(_p0)), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ClockGettime(clockid int32, time *Timespec) (err error) {
_, _, e1 := Syscall(SYS_CLOCK_GETTIME, uintptr(clockid), uintptr(unsafe.Pointer(time)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Close(fd int) (err error) {
_, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup(oldfd int) (fd int, err error) {
r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup3(oldfd int, newfd int, flags int) (err error) {
_, _, e1 := Syscall(SYS_DUP3, uintptr(oldfd), uintptr(newfd), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCreate(size int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_EPOLL_CREATE, uintptr(size), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCreate1(flag int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_EPOLL_CREATE1, uintptr(flag), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) {
_, _, e1 := RawSyscall6(SYS_EPOLL_CTL, uintptr(epfd), uintptr(op), uintptr(fd), uintptr(unsafe.Pointer(event)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Exit(code int) {
Syscall(SYS_EXIT_GROUP, uintptr(code), 0, 0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Faccessat(dirfd int, path string, mode uint32, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FACCESSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fallocate(fd int, mode uint32, off int64, len int64) (err error) {
_, _, e1 := Syscall6(SYS_FALLOCATE, uintptr(fd), uintptr(mode), uintptr(off), uintptr(off>>32), uintptr(len), uintptr(len>>32))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchdir(fd int) (err error) {
_, _, e1 := Syscall(SYS_FCHDIR, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchmod(fd int, mode uint32) (err error) {
_, _, e1 := Syscall(SYS_FCHMOD, uintptr(fd), uintptr(mode), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FCHMODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchownat(dirfd int, path string, uid int, gid int, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FCHOWNAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid), uintptr(flags), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fcntl(fd int, cmd int, arg int) (val int, err error) {
r0, _, e1 := Syscall(SYS_FCNTL, uintptr(fd), uintptr(cmd), uintptr(arg))
val = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fdatasync(fd int) (err error) {
_, _, e1 := Syscall(SYS_FDATASYNC, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Flock(fd int, how int) (err error) {
_, _, e1 := Syscall(SYS_FLOCK, uintptr(fd), uintptr(how), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fsync(fd int) (err error) {
_, _, e1 := Syscall(SYS_FSYNC, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getdents(fd int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETDENTS64, uintptr(fd), uintptr(_p0), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpgid(pid int) (pgid int, err error) {
r0, _, e1 := RawSyscall(SYS_GETPGID, uintptr(pid), 0, 0)
pgid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpid() (pid int) {
r0, _, _ := RawSyscall(SYS_GETPID, 0, 0, 0)
pid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getppid() (ppid int) {
r0, _, _ := RawSyscall(SYS_GETPPID, 0, 0, 0)
ppid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpriority(which int, who int) (prio int, err error) {
r0, _, e1 := Syscall(SYS_GETPRIORITY, uintptr(which), uintptr(who), 0)
prio = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrusage(who int, rusage *Rusage) (err error) {
_, _, e1 := RawSyscall(SYS_GETRUSAGE, uintptr(who), uintptr(unsafe.Pointer(rusage)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Gettid() (tid int) {
r0, _, _ := RawSyscall(SYS_GETTID, 0, 0, 0)
tid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getxattr(path string, attr string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(dest) > 0 {
_p2 = unsafe.Pointer(&dest[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_GETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(dest)), 0, 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyAddWatch(fd int, pathname string, mask uint32) (watchdesc int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(pathname)
if err != nil {
return
}
r0, _, e1 := Syscall(SYS_INOTIFY_ADD_WATCH, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(mask))
use(unsafe.Pointer(_p0))
watchdesc = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyInit1(flags int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_INIT1, uintptr(flags), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyRmWatch(fd int, watchdesc uint32) (success int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_RM_WATCH, uintptr(fd), uintptr(watchdesc), 0)
success = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Kill(pid int, sig syscall.Signal) (err error) {
_, _, e1 := RawSyscall(SYS_KILL, uintptr(pid), uintptr(sig), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Klogctl(typ int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_SYSLOG, uintptr(typ), uintptr(_p0), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Listxattr(path string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(dest) > 0 {
_p1 = unsafe.Pointer(&dest[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_LISTXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest)))
use(unsafe.Pointer(_p0))
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mkdirat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_MKDIRAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mknodat(dirfd int, path string, mode uint32, dev int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_MKNODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Nanosleep(time *Timespec, leftover *Timespec) (err error) {
_, _, e1 := Syscall(SYS_NANOSLEEP, uintptr(unsafe.Pointer(time)), uintptr(unsafe.Pointer(leftover)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func PivotRoot(newroot string, putold string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(newroot)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(putold)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_PIVOT_ROOT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func prlimit(pid int, resource int, old *Rlimit, newlimit *Rlimit) (err error) {
_, _, e1 := RawSyscall6(SYS_PRLIMIT64, uintptr(pid), uintptr(resource), uintptr(unsafe.Pointer(old)), uintptr(unsafe.Pointer(newlimit)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Prctl(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error) {
_, _, e1 := Syscall6(SYS_PRCTL, uintptr(option), uintptr(arg2), uintptr(arg3), uintptr(arg4), uintptr(arg5), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func read(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Removexattr(path string, attr string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_REMOVEXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_RENAMEAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), 0, 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setdomainname(p []byte) (err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_SETDOMAINNAME, uintptr(_p0), uintptr(len(p)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sethostname(p []byte) (err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_SETHOSTNAME, uintptr(_p0), uintptr(len(p)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpgid(pid int, pgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETPGID, uintptr(pid), uintptr(pgid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setsid() (pid int, err error) {
r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0)
pid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Settimeofday(tv *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_SETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setns(fd int, nstype int) (err error) {
_, _, e1 := Syscall(SYS_SETNS, uintptr(fd), uintptr(nstype), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpriority(which int, who int, prio int) (err error) {
_, _, e1 := Syscall(SYS_SETPRIORITY, uintptr(which), uintptr(who), uintptr(prio))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setxattr(path string, attr string, data []byte, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(data) > 0 {
_p2 = unsafe.Pointer(&data[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_SETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(data)), uintptr(flags), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sync() {
Syscall(SYS_SYNC, 0, 0, 0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sysinfo(info *Sysinfo_t) (err error) {
_, _, e1 := RawSyscall(SYS_SYSINFO, uintptr(unsafe.Pointer(info)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Tee(rfd int, wfd int, len int, flags int) (n int64, err error) {
r0, r1, e1 := Syscall6(SYS_TEE, uintptr(rfd), uintptr(wfd), uintptr(len), uintptr(flags), 0, 0)
n = int64(int64(r1)<<32 | int64(r0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Tgkill(tgid int, tid int, sig syscall.Signal) (err error) {
_, _, e1 := RawSyscall(SYS_TGKILL, uintptr(tgid), uintptr(tid), uintptr(sig))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Times(tms *Tms) (ticks uintptr, err error) {
r0, _, e1 := RawSyscall(SYS_TIMES, uintptr(unsafe.Pointer(tms)), 0, 0)
ticks = uintptr(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Umask(mask int) (oldmask int) {
r0, _, _ := RawSyscall(SYS_UMASK, uintptr(mask), 0, 0)
oldmask = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Uname(buf *Utsname) (err error) {
_, _, e1 := RawSyscall(SYS_UNAME, uintptr(unsafe.Pointer(buf)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unmount(target string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(target)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UMOUNT2, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unshare(flags int) (err error) {
_, _, e1 := Syscall(SYS_UNSHARE, uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ustat(dev int, ubuf *Ustat_t) (err error) {
_, _, e1 := Syscall(SYS_USTAT, uintptr(dev), uintptr(unsafe.Pointer(ubuf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func write(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func exitThread(code int) (err error) {
_, _, e1 := Syscall(SYS_EXIT, uintptr(code), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func readlen(fd int, p *byte, np int) (n int, err error) {
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(unsafe.Pointer(p)), uintptr(np))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func writelen(fd int, p *byte, np int) (n int, err error) {
r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(unsafe.Pointer(p)), uintptr(np))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func munmap(addr uintptr, length uintptr) (err error) {
_, _, e1 := Syscall(SYS_MUNMAP, uintptr(addr), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Madvise(b []byte, advice int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MADVISE, uintptr(_p0), uintptr(len(b)), uintptr(advice))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mprotect(b []byte, prot int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MPROTECT, uintptr(_p0), uintptr(len(b)), uintptr(prot))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MUNLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlockall(flags int) (err error) {
_, _, e1 := Syscall(SYS_MLOCKALL, uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlockall() (err error) {
_, _, e1 := Syscall(SYS_MUNLOCKALL, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pipe(p *[2]_C_int) (err error) {
_, _, e1 := RawSyscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pipe2(p *[2]_C_int, flags int) (err error) {
_, _, e1 := RawSyscall(SYS_PIPE2, uintptr(unsafe.Pointer(p)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup2(oldfd int, newfd int) (err error) {
_, _, e1 := Syscall(SYS_DUP2, uintptr(oldfd), uintptr(newfd), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fadvise(fd int, offset int64, length int64, advice int) (err error) {
_, _, e1 := Syscall6(SYS_FADVISE64_64, uintptr(fd), uintptr(offset), uintptr(offset>>32), uintptr(length), uintptr(length>>32), uintptr(advice))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchown(fd int, uid int, gid int) (err error) {
_, _, e1 := Syscall(SYS_FCHOWN32, uintptr(fd), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstat(fd int, stat *Stat_t) (err error) {
_, _, e1 := Syscall(SYS_FSTAT64, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ftruncate(fd int, length int64) (err error) {
_, _, e1 := Syscall(SYS_FTRUNCATE64, uintptr(fd), uintptr(length), uintptr(length>>32))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getegid() (egid int) {
r0, _, _ := RawSyscall(SYS_GETEGID32, 0, 0, 0)
egid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Geteuid() (euid int) {
r0, _, _ := RawSyscall(SYS_GETEUID32, 0, 0, 0)
euid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getgid() (gid int) {
r0, _, _ := RawSyscall(SYS_GETGID32, 0, 0, 0)
gid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getuid() (uid int) {
r0, _, _ := RawSyscall(SYS_GETUID32, 0, 0, 0)
uid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyInit() (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_INIT, 0, 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ioperm(from int, num int, on int) (err error) {
_, _, e1 := Syscall(SYS_IOPERM, uintptr(from), uintptr(num), uintptr(on))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Iopl(level int) (err error) {
_, _, e1 := Syscall(SYS_IOPL, uintptr(level), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lchown(path string, uid int, gid int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LCHOWN32, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid))
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lstat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LSTAT64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pread(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PREAD64, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pwrite(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PWRITE64, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) {
r0, _, e1 := Syscall6(SYS_SENDFILE64, uintptr(outfd), uintptr(infd), uintptr(unsafe.Pointer(offset)), uintptr(count), 0, 0)
written = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setfsgid(gid int) (err error) {
_, _, e1 := Syscall(SYS_SETFSGID32, uintptr(gid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setfsuid(uid int) (err error) {
_, _, e1 := Syscall(SYS_SETFSUID32, uintptr(uid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setregid(rgid int, egid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREGID32, uintptr(rgid), uintptr(egid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresgid(rgid int, egid int, sgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESGID32, uintptr(rgid), uintptr(egid), uintptr(sgid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresuid(ruid int, euid int, suid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESUID32, uintptr(ruid), uintptr(euid), uintptr(suid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setreuid(ruid int, euid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREUID32, uintptr(ruid), uintptr(euid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int, err error) {
r0, _, e1 := Syscall6(SYS_SPLICE, uintptr(rfd), uintptr(unsafe.Pointer(roff)), uintptr(wfd), uintptr(unsafe.Pointer(woff)), uintptr(len), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Stat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STAT64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func SyncFileRange(fd int, off int64, n int64, flags int) (err error) {
_, _, e1 := Syscall6(SYS_SYNC_FILE_RANGE, uintptr(fd), uintptr(off), uintptr(off>>32), uintptr(n), uintptr(n>>32), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Truncate(path string, length int64) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_TRUNCATE64, uintptr(unsafe.Pointer(_p0)), uintptr(length), uintptr(length>>32))
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getgroups(n int, list *_Gid_t) (nn int, err error) {
r0, _, e1 := RawSyscall(SYS_GETGROUPS32, uintptr(n), uintptr(unsafe.Pointer(list)), 0)
nn = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setgroups(n int, list *_Gid_t) (err error) {
_, _, e1 := RawSyscall(SYS_SETGROUPS32, uintptr(n), uintptr(unsafe.Pointer(list)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) {
r0, _, e1 := Syscall6(SYS__NEWSELECT, uintptr(nfd), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func mmap2(addr uintptr, length uintptr, prot int, flags int, fd int, pageOffset uintptr) (xaddr uintptr, err error) {
r0, _, e1 := Syscall6(SYS_MMAP2, uintptr(addr), uintptr(length), uintptr(prot), uintptr(flags), uintptr(fd), uintptr(pageOffset))
xaddr = uintptr(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) {
var _p0 unsafe.Pointer
if len(events) > 0 {
_p0 = unsafe.Pointer(&events[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_EPOLL_WAIT, uintptr(epfd), uintptr(_p0), uintptr(len(events)), uintptr(msec), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pause() (err error) {
_, _, e1 := Syscall(SYS_PAUSE, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getrlimit(resource int, rlim *rlimit32) (err error) {
_, _, e1 := RawSyscall(SYS_GETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setrlimit(resource int, rlim *rlimit32) (err error) {
_, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Gettimeofday(tv *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_GETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Time(t *Time_t) (tt Time_t, err error) {
r0, _, e1 := RawSyscall(SYS_TIME, uintptr(unsafe.Pointer(t)), 0, 0)
tt = Time_t(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Utime(path string, buf *Utimbuf) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIME, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf)), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func poll(fds *PollFd, nfds int, timeout int) (n int, err error) {
r0, _, e1 := Syscall(SYS_POLL, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(timeout))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
| {
"pile_set_name": "Github"
} |
require('../../modules/core.string.escape-html');
module.exports = require('../../modules/_core').String.escapeHTML;
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.xream.x7.sqli.repository.config.datasource;
import io.xream.x7.base.util.ExceptionUtil;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.Signature;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.core.Ordered;
/**
* @Author Sim
*/
@Aspect
public class DataSourceAspect implements Ordered {
@Override
public int getOrder() {
return 0;
}
@Pointcut("@annotation(io.xream.x7.sqli.repository.config.datasource.ReadOnly))")
public void cut() {
}
@Around("cut() && @annotation(readOnly) ")
public Object around(ProceedingJoinPoint proceedingJoinPoint, ReadOnly readOnly) {
Signature signature = proceedingJoinPoint.getSignature();
MethodSignature ms = ((MethodSignature) signature);
try {
DataSourceContextHolder.set(DataSourceType.READ);
if (ms.getReturnType() == void.class) {
proceedingJoinPoint.proceed();
return null;
} else {
return proceedingJoinPoint.proceed();
}
} catch (Throwable e) {
throw new RuntimeException(ExceptionUtil.getMessage(e));
}finally {
DataSourceContextHolder.remove();
}
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Declares the AddDebugDirectoryEntryTransform. This find or creates a debug
// directory entry of the specified type. It is intended to be used by other
// transforms.
//
// After the transform has completed the 'offset' and 'block' members functions
// point to the found or created debug directory entry with the type as
// specified in the transform constructor.
#ifndef SYZYGY_PE_TRANSFORMS_ADD_DEBUG_DIRECTORY_ENTRY_TRANSFORM_H_
#define SYZYGY_PE_TRANSFORMS_ADD_DEBUG_DIRECTORY_ENTRY_TRANSFORM_H_
#include <windows.h>
#include "base/files/file_path.h"
#include "syzygy/block_graph/transforms/named_transform.h"
namespace pe {
namespace transforms {
using block_graph::BlockGraph;
using block_graph::TransformPolicyInterface;
using block_graph::transforms::NamedBlockGraphTransformImpl;
// A PE BlockGraph transform for adding/updating the a debug directory entry
// of a given type.
class AddDebugDirectoryEntryTransform
: public NamedBlockGraphTransformImpl<AddDebugDirectoryEntryTransform> {
public:
// Configures this transform.
//
// @param type the type of the debug directory entry to search for.
// @param always_add if this is true a new debug directory entry will always
// be created, otherwise a new one will be created only if none already
// exists.
AddDebugDirectoryEntryTransform(DWORD type, bool always_add)
: type_(type), always_add_(always_add), added_(false), block_(NULL),
offset_(-1) {
}
// Adds or finds the debug data directory of the given type.
//
// @param policy The policy object restricting how the transform is applied.
// @param block_graph The block graph to transform.
// @param dos_header_block The DOS header block of the block graph.
// @returns true on success, false otherwise.
virtual bool TransformBlockGraph(
const TransformPolicyInterface* policy,
BlockGraph* block_graph,
BlockGraph::Block* dos_header_block) override;
// Returns true if a new debug directory entry was created.
bool added() const { return added_; }
// Access the block containing the found or created debug directory entry.
//
// @returns the block housing the debug directory entry.
BlockGraph::Block* block() const { return block_; }
// Access the offset of the found or created debug directory entry.
//
// @returns the offset into the block of the debug directory entry.
BlockGraph::Offset offset() const { return offset_; }
// The transform name.
static const char kTransformName[];
private:
// The type of the debug directory entry to find or add.
DWORD type_;
// If this is true a new debug directory entry will always be added, even if
// there exists another one.
bool always_add_;
// These member variables hold state after the transform has been applied.
// Indicates if a new directory entry was added.
bool added_;
// Stores the block housing the debug data directory entries.
BlockGraph::Block* block_;
// Stores the offset into the block of the found or created debug data
// directory entry.
BlockGraph::Offset offset_;
DISALLOW_COPY_AND_ASSIGN(AddDebugDirectoryEntryTransform);
};
} // namespace transforms
} // namespace pe
#endif // SYZYGY_PE_TRANSFORMS_ADD_DEBUG_DIRECTORY_ENTRY_TRANSFORM_H_
| {
"pile_set_name": "Github"
} |
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :drop_test, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:drop_test, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.