code
stringlengths 3
1.01M
| repo_name
stringlengths 5
116
| path
stringlengths 3
311
| language
stringclasses 30
values | license
stringclasses 15
values | size
int64 3
1.01M
|
|---|---|---|---|---|---|
/*
Copyright 2015, 2016 OpenMarket Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
'use strict';
import React from 'react';
import ReactDOM from 'react-dom';
import PropTypes from 'prop-types';
import { _t } from '../../../languageHandler';
const DIV_ID = 'mx_recaptcha';
/**
* A pure UI component which displays a captcha form.
*/
module.exports = React.createClass({
displayName: 'CaptchaForm',
propTypes: {
sitePublicKey: PropTypes.string,
// called with the captcha response
onCaptchaResponse: PropTypes.func,
},
getDefaultProps: function() {
return {
onCaptchaResponse: () => {},
};
},
getInitialState: function() {
return {
errorText: null,
};
},
componentWillMount: function() {
this._captchaWidgetId = null;
},
componentDidMount: function() {
// Just putting a script tag into the returned jsx doesn't work, annoyingly,
// so we do this instead.
if (global.grecaptcha) {
// already loaded
this._onCaptchaLoaded();
} else {
console.log("Loading recaptcha script...");
window.mx_on_recaptcha_loaded = () => {this._onCaptchaLoaded();};
const protocol = global.location.protocol;
if (protocol === "file:") {
const warning = document.createElement('div');
// XXX: fix hardcoded app URL. Better solutions include:
// * jumping straight to a hosted captcha page (but we don't support that yet)
// * embedding the captcha in an iframe (if that works)
// * using a better captcha lib
ReactDOM.render(_t(
"Robot check is currently unavailable on desktop - please use a <a>web browser</a>",
{},
{ 'a': (sub) => { return <a href='https://riot.im/app'>{ sub }</a>; }}), warning);
this.refs.recaptchaContainer.appendChild(warning);
} else {
const scriptTag = document.createElement('script');
scriptTag.setAttribute(
'src', protocol+"//www.google.com/recaptcha/api.js?onload=mx_on_recaptcha_loaded&render=explicit",
);
this.refs.recaptchaContainer.appendChild(scriptTag);
}
}
},
componentWillUnmount: function() {
this._resetRecaptcha();
},
_renderRecaptcha: function(divId) {
if (!global.grecaptcha) {
console.error("grecaptcha not loaded!");
throw new Error("Recaptcha did not load successfully");
}
const publicKey = this.props.sitePublicKey;
if (!publicKey) {
console.error("No public key for recaptcha!");
throw new Error(
"This server has not supplied enough information for Recaptcha "
+ "authentication");
}
console.log("Rendering to %s", divId);
this._captchaWidgetId = global.grecaptcha.render(divId, {
sitekey: publicKey,
callback: this.props.onCaptchaResponse,
});
},
_resetRecaptcha: function() {
if (this._captchaWidgetId !== null) {
global.grecaptcha.reset(this._captchaWidgetId);
}
},
_onCaptchaLoaded: function() {
console.log("Loaded recaptcha script.");
try {
this._renderRecaptcha(DIV_ID);
} catch (e) {
this.setState({
errorText: e.toString(),
});
}
},
render: function() {
let error = null;
if (this.state.errorText) {
error = (
<div className="error">
{ this.state.errorText }
</div>
);
}
return (
<div ref="recaptchaContainer">
{ _t("This Home Server would like to make sure you are not a robot") }
<br />
<div id={DIV_ID}></div>
{ error }
</div>
);
},
});
|
aperezdc/matrix-react-sdk
|
src/components/views/login/CaptchaForm.js
|
JavaScript
|
apache-2.0
| 4,640
|
filter.js
=========
A very basic jQuery filter plugin. Show/hide elements based on selected tags.
### Usage
```javascript
$(element).filter({
nav: '[data-filter]' // selector for filter nav (optional)
});
```
Set the tag to filter with `data-filter`, eg:
```html
<a data-filter="tag">Tag</a>
```
Filterable elements should have their tags comma separated in `data-filter-tags`, eg:
```html
<div data-filter-tags="tag1,tag2,tag3">This is a block</a>
```
### Demo
[JSFiddle](http://jsfiddle.net/tommaitland/twU4b/)
### Support
I haven't done much testing on this script, and it could probably be more flexible. It requires an element be matched to all selected tags, this could be set to be more loose (etc).
Suggestions/bugs welcome through [Issues](https://github.com/tommaitland/filter.js/issues)
|
tommaitland/filter
|
README.md
|
Markdown
|
apache-2.0
| 810
|
// Copyright 2017 The CrunchyCrypt Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "crunchy/internal/keys/macing_key.h"
#include <utility>
#include "absl/memory/memory.h"
#include "crunchy/internal/algs/random/crypto_rand.h"
#include "crunchy/internal/port/port.h"
namespace crunchy {
namespace {
class MacingKeyImpl : public MacingKey {
public:
explicit MacingKeyImpl(std::unique_ptr<MacInterface> macer)
: macer_(std::move(macer)) {}
StatusOr<std::string> Sign(absl::string_view message) const override {
return macer_->Sign(message);
}
Status Verify(absl::string_view message,
absl::string_view signature) const override {
return macer_->Verify(message, signature);
}
private:
std::unique_ptr<MacInterface> macer_;
};
class MacingKeyFactoryImpl : public MacingKeyFactory {
public:
explicit MacingKeyFactoryImpl(const MacFactory& factory)
: factory_(factory) {}
KeyData CreateRandomKeyData() const override {
KeyData key_data;
key_data.set_private_key(RandString(factory_.GetKeyLength()));
return key_data;
}
StatusOr<std::unique_ptr<MacingKey>> MakeKey(
const KeyData& key_data) const override {
if (key_data.private_key().empty()) {
return InvalidArgumentErrorBuilder(CRUNCHY_LOC).LogInfo()
<< "key_data.private_key() is empty";
}
auto status_or_crypter = factory_.Make(key_data.private_key());
if (!status_or_crypter.ok()) {
return status_or_crypter.status();
}
return {absl::make_unique<MacingKeyImpl>(
std::move(status_or_crypter.ValueOrDie()))};
}
private:
const MacFactory& factory_;
};
} // namespace
std::unique_ptr<MacingKeyFactory> MakeFactory(const MacFactory& factory) {
return {absl::make_unique<MacingKeyFactoryImpl>(factory)};
}
} // namespace crunchy
|
google/crunchy
|
crunchy/internal/keys/macing_key.cc
|
C++
|
apache-2.0
| 2,356
|
"""
Copyright 2013 Shine Wang
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import urllib
import re
from HTMLParser import HTMLParser
from courseClasses import Course, Lecture, Tutorial, Reserve
class CustomHTMLParser(HTMLParser):
"""this class reads a HTML stream, then parses out the "data" fields"""
def __init__(self, webData):
HTMLParser.__init__(self)
self.webData = webData
def handle_data(self, data):
"""takes out the data"""
self.webData.append(data.strip())
class WebParser:
""""A WebParser is created for each and every course,
to parse the corresponding web page"""
requestURL = "http://www.adm.uwaterloo.ca/cgi-bin/" \
"cgiwrap/infocour/salook.pl"
def __init__(self):
self.webData = []
self.index = -1
self.session = None
self.thisCourse = None
def run(self, courseString, sessionString):
"""this is the method that the main class can call
if successful, returns the Course class
if not, returns an error message"""
self.session = self.parseSession(sessionString)
if self.session is None:
return "SessionNameWrongError"
courseString = map(lambda x: x.upper(), courseString.split())
try:
self.thisCourse = Course(self.session, courseString[0],
courseString[1])
except:
return "CourseNameWrongError"
if self.getWebData(self.thisCourse):
return "WebPageError"
elif self.parseWebData():
return "CourseNotFoundError"
else:
self.processCourseInfo()
self.postProcess(self.thisCourse)
return self.thisCourse
def parseSession(self, sessionString):
try:
ret = "1"
ret += sessionString.split()[1][-2:] # last 2 digits of year
tempMap = (("fall", "9"), ("winter", "1"), ("spring", "5"))
for season in tempMap:
if season[0] in sessionString.lower():
ret += season[1]
return ret
except:
return None
def getWebData(self, course):
"""submits a POST query, initializes HTMLParser"""
try:
params = urllib.urlencode({"sess": course.session,
"subject": course.subject,
"cournum": course.catalogNumber})
page = urllib.urlopen(WebParser.requestURL, params)
parser = CustomHTMLParser(self.webData)
# we use .replace() because HTMLParser ignores " ",
# which would screwn up our table
parser.feed(page.read().replace(" ", " "))
except:
return "WebPageError"
def parseWebData(self):
"""We try to find the beginning of the desired table"""
# now, we find the start index and pass that on along
# with the webData
for i in xrange(len(self.webData)-3):
if self.webData[i] == self.thisCourse.subject \
and self.webData[i+2] == self.thisCourse.catalogNumber:
self.index = i
break
if self.index == -1: # website not found
return "CourseNotFound"
def processCourseInfo(self):
"""now, we do the heavy-duty processing of the data table"""
# sets basic attrs of thisCourse
self.thisCourse.units = self.webData[self.index+4]
self.thisCourse.title = self.webData[self.index+6]
while self.webData[self.index] != "Instructor":
self.index += 1
# processing row-by-row
while not self.endOfRow(self.webData[self.index]):
if self.webData[self.index] != "":
self.processSlot()
self.index += 1
if self.index == len(self.webData):
return
def processSlot(self):
"""we check to see if this is the BEGINNING of a valid row"""
if (self.webData[self.index+1][:3].upper() == "LEC"
or self.webData[self.index+1][:3].upper() == "LAB") \
and "ONLINE" not in self.webData[self.index+2]:
# we don't want online classes!
# processing a lecture row
lec = Lecture()
if self.processClass(lec, self.index, self.webData):
return
self.thisCourse.lectures.append(lec)
elif self.webData[self.index+1][:3].upper() == "TUT":
# processing a tutorial row
tut = Tutorial()
if self.processClass(tut, self.index, self.webData):
return
self.thisCourse.tutorials.append(tut)
elif self.webData[self.index][:7].upper() == "RESERVE":
# processing a reserve row
res = Reserve()
self.processReserve(res, self.index, self.webData)
if self.thisCourse.lectures:
self.thisCourse.lectures[-1].reserves.append(res)
# note: we leave out the TST (exam?) times for now
def processReserve(self, res, index, webData):
"""processing reservations for certain types of students"""
res.name = webData[index][9:]
# we remove the "only" suffix (which is annoyingly pointless)
if "only" in res.name:
res.name = res.name[:-5]
# also, the "students" suffx
if "students" in res.name or "Students" in res.name:
res.name = res.name[:-9]
# now, we merge the match list
while not webData[index].isdigit():
index += 1
# retriving enrollment numbers
res.enrlCap = int(webData[index])
res.enrlTotal = int(webData[index+1])
def processClass(self, lec, index, webData):
"""we process a typical lecture or tutorial row"""
attr1 = ["classNumber", "compSec", "campusLocation"]
for i in xrange(len(attr1)):
setattr(lec, attr1[i], webData[index+i].strip())
index += 6
attr2 = ["enrlCap", "enrlTotal", "waitCap", "waitTotal"]
for i in xrange(len(attr2)):
setattr(lec, attr2[i], int(webData[index+i]))
index += 4
# parsing the "Times Days/Date" field
match = re.search(r"([:\d]+)-([:\d]+)(\w+)", webData[index])
if not match:
# we return an error message in the "TBA" case
return "NoTimeError"
attr3 = ["startTime", "endTime", "days"]
for i in xrange(len(attr3)):
setattr(lec, attr3[i], match.group(i+1).strip())
index += 1
if len(webData[index].split()) == 2:
# sometimes, no building, room, and instructor will be given
# this is mostly for Laurier courses
lec.building, lec.room = webData[index].split()
lec.instructor = webData[index+1].strip()
def endOfRow(self, data):
"""returns true if the current data-cell is the last cell
of this course; else - false"""
# the last cell is of the form: ##/##-##/## or
# "Information last updated
if re.search(r"\d+/\d+-\d+/\d+", data) or \
"Information last updated" in data:
return True
else:
return False
def postProcess(self, course):
"""this function will convert the class times to minutes-past-
the-previous-midnight, and converts the days to numbers.
Also, some reservation-postprocessing"""
map(lambda x: x.calcMiscSeats(), course.lectures)
for lec in course.lectures:
lec.courseID = course.subject + " " + course.catalogNumber
for tut in course.tutorials:
tut.courseID = course.subject + " " + course.catalogNumber
for slot in course.lectures + course.tutorials:
# first, we convert time to 24hr time
# earliest start time for a class is 8:30am
# night classes start at/before 7:00pm
if 1 <= int(slot.startTime.split(":")[0]) <= 7:
slot.startTime, slot.endTime = \
map(lambda x: "{}:{}".format(str(int(x.split(":")[0])
+ 12), x[-2:]), [slot.startTime,
slot.endTime])
elif int(slot.startTime.split(":")[0]) > int(
slot.endTime.split(":")[0]):
# e.g. 12:00 to 1:00
slot.endTime = "{}:{}".format(str(int(
slot.endTime.split(":")[0])+12), slot.endTime[-2:])
# now, we write to slot.sTime, slot.eTime
# (minutes-past-midnight...)
slot.sTime, slot.eTime = map(lambda x: int(x[:2]) * 60 +
int(x[-2:]),
[slot.startTime, slot.endTime])
# we write to slot.ndays, where ndays is a string of numbers,
# 0->4
if "M" in slot.days:
slot.ndays += "0"
i = slot.days.find("T")
if i != -1 and (i == len(slot.days) - 1 or
slot.days[i+1] != 'h'):
# basically, if not Th (for Thursday)
slot.ndays += "1"
# now, for the rest of the days...
for i in [("W", "2"), ("Th", "3"), ("F", "4")]:
if i[0] in slot.days:
slot.ndays += i[1]
# we make a small adjustment to campusLocation,
# removing whitespace
slot.campusLocation = slot.campusLocation.split()[0]
# we make the prof name "first last" instead of
# "last,first middle"
if slot.instructor != "":
s = slot.instructor.split(" ")
for i in s:
if "," in i:
# we want the 2 words connected by the ","
slot.instructor = " ".join(reversed(list(
i.split(","))))
|
shinexwang/Classy
|
Main/webParser.py
|
Python
|
apache-2.0
| 10,545
|
#include "common/redis/codec_impl.h"
#include <cstdint>
#include <string>
#include <vector>
#include "common/common/assert.h"
#include "common/common/fmt.h"
#include "common/common/utility.h"
namespace Envoy {
namespace Redis {
std::string RespValue::toString() const {
switch (type_) {
case RespType::Array: {
std::string ret = "[";
for (uint64_t i = 0; i < asArray().size(); i++) {
ret += asArray()[i].toString();
if (i != asArray().size() - 1) {
ret += ", ";
}
}
return ret + "]";
}
case RespType::SimpleString:
case RespType::BulkString:
case RespType::Error:
return fmt::format("\"{}\"", asString());
case RespType::Null:
return "null";
case RespType::Integer:
return std::to_string(asInteger());
}
NOT_REACHED;
}
std::vector<RespValue>& RespValue::asArray() {
ASSERT(type_ == RespType::Array);
return array_;
}
const std::vector<RespValue>& RespValue::asArray() const {
ASSERT(type_ == RespType::Array);
return array_;
}
std::string& RespValue::asString() {
ASSERT(type_ == RespType::BulkString || type_ == RespType::Error ||
type_ == RespType::SimpleString);
return string_;
}
const std::string& RespValue::asString() const {
ASSERT(type_ == RespType::BulkString || type_ == RespType::Error ||
type_ == RespType::SimpleString);
return string_;
}
int64_t& RespValue::asInteger() {
ASSERT(type_ == RespType::Integer);
return integer_;
}
int64_t RespValue::asInteger() const {
ASSERT(type_ == RespType::Integer);
return integer_;
}
void RespValue::cleanup() {
// Need to manually delete because of the union.
switch (type_) {
case RespType::Array: {
array_.~vector<RespValue>();
break;
}
case RespType::SimpleString:
case RespType::BulkString:
case RespType::Error: {
string_.~basic_string<char>();
break;
}
case RespType::Null:
case RespType::Integer: {
break;
}
}
}
void RespValue::type(RespType type) {
cleanup();
// Need to use placement new because of the union.
type_ = type;
switch (type) {
case RespType::Array: {
new (&array_) std::vector<RespValue>();
break;
}
case RespType::SimpleString:
case RespType::BulkString:
case RespType::Error: {
new (&string_) std::string();
break;
}
case RespType::Null:
case RespType::Integer: {
break;
}
}
}
void DecoderImpl::decode(Buffer::Instance& data) {
uint64_t num_slices = data.getRawSlices(nullptr, 0);
Buffer::RawSlice slices[num_slices];
data.getRawSlices(slices, num_slices);
for (const Buffer::RawSlice& slice : slices) {
parseSlice(slice);
}
data.drain(data.length());
}
void DecoderImpl::parseSlice(const Buffer::RawSlice& slice) {
const char* buffer = reinterpret_cast<const char*>(slice.mem_);
uint64_t remaining = slice.len_;
while (remaining || state_ == State::ValueComplete) {
ENVOY_LOG(trace, "parse slice: {} remaining", remaining);
switch (state_) {
case State::ValueRootStart: {
ENVOY_LOG(trace, "parse slice: ValueRootStart");
pending_value_root_.reset(new RespValue());
pending_value_stack_.push_front({pending_value_root_.get(), 0});
state_ = State::ValueStart;
break;
}
case State::ValueStart: {
ENVOY_LOG(trace, "parse slice: ValueStart: {}", buffer[0]);
pending_integer_.reset();
switch (buffer[0]) {
case '*': {
state_ = State::IntegerStart;
pending_value_stack_.front().value_->type(RespType::Array);
break;
}
case '$': {
state_ = State::IntegerStart;
pending_value_stack_.front().value_->type(RespType::BulkString);
break;
}
case '-': {
state_ = State::SimpleString;
pending_value_stack_.front().value_->type(RespType::Error);
break;
}
case '+': {
state_ = State::SimpleString;
pending_value_stack_.front().value_->type(RespType::SimpleString);
break;
}
case ':': {
state_ = State::IntegerStart;
pending_value_stack_.front().value_->type(RespType::Integer);
break;
}
default: { throw ProtocolError("invalid value type"); }
}
remaining--;
buffer++;
break;
}
case State::IntegerStart: {
ENVOY_LOG(trace, "parse slice: IntegerStart: {}", buffer[0]);
if (buffer[0] == '-') {
pending_integer_.negative_ = true;
remaining--;
buffer++;
}
state_ = State::Integer;
break;
}
case State::Integer: {
ENVOY_LOG(trace, "parse slice: Integer: {}", buffer[0]);
char c = buffer[0];
if (buffer[0] == '\r') {
state_ = State::IntegerLF;
} else {
if (c < '0' || c > '9') {
throw ProtocolError("invalid integer character");
} else {
pending_integer_.integer_ = (pending_integer_.integer_ * 10) + (c - '0');
}
}
remaining--;
buffer++;
break;
}
case State::IntegerLF: {
if (buffer[0] != '\n') {
throw ProtocolError("expected new line");
}
ENVOY_LOG(trace, "parse slice: IntegerLF: {}", pending_integer_.integer_);
remaining--;
buffer++;
PendingValue& current_value = pending_value_stack_.front();
if (current_value.value_->type() == RespType::Array) {
if (pending_integer_.negative_) {
// Null array. Convert to null.
current_value.value_->type(RespType::Null);
state_ = State::ValueComplete;
} else if (pending_integer_.integer_ == 0) {
state_ = State::ValueComplete;
} else {
std::vector<RespValue> values(pending_integer_.integer_);
current_value.value_->asArray().swap(values);
pending_value_stack_.push_front({¤t_value.value_->asArray()[0], 0});
state_ = State::ValueStart;
}
} else if (current_value.value_->type() == RespType::Integer) {
if (pending_integer_.integer_ == 0 || !pending_integer_.negative_) {
current_value.value_->asInteger() = pending_integer_.integer_;
} else {
// By subtracting 1 (and later correcting) we ensure that we remain within the int64_t
// range to allow a valid static_cast. This is an issue when we have a value of -2^63,
// which cannot be represented as 2^63 in the intermediate int64_t.
current_value.value_->asInteger() =
static_cast<int64_t>(pending_integer_.integer_ - 1) * -1 - 1;
}
state_ = State::ValueComplete;
} else {
ASSERT(current_value.value_->type() == RespType::BulkString);
if (!pending_integer_.negative_) {
// TODO(mattklein123): reserve and define max length since we don't stream currently.
state_ = State::BulkStringBody;
} else {
// Null bulk string. Switch type to null and move to value complete.
current_value.value_->type(RespType::Null);
state_ = State::ValueComplete;
}
}
break;
}
case State::BulkStringBody: {
ASSERT(!pending_integer_.negative_);
uint64_t length_to_copy =
std::min(static_cast<uint64_t>(pending_integer_.integer_), remaining);
pending_value_stack_.front().value_->asString().append(buffer, length_to_copy);
pending_integer_.integer_ -= length_to_copy;
remaining -= length_to_copy;
buffer += length_to_copy;
if (pending_integer_.integer_ == 0) {
ENVOY_LOG(trace, "parse slice: BulkStringBody complete: {}",
pending_value_stack_.front().value_->asString());
state_ = State::CR;
}
break;
}
case State::CR: {
ENVOY_LOG(trace, "parse slice: CR");
if (buffer[0] != '\r') {
throw ProtocolError("expected carriage return");
}
remaining--;
buffer++;
state_ = State::LF;
break;
}
case State::LF: {
ENVOY_LOG(trace, "parse slice: LF");
if (buffer[0] != '\n') {
throw ProtocolError("expected new line");
}
remaining--;
buffer++;
state_ = State::ValueComplete;
break;
}
case State::SimpleString: {
ENVOY_LOG(trace, "parse slice: SimpleString: {}", buffer[0]);
if (buffer[0] == '\r') {
state_ = State::LF;
} else {
pending_value_stack_.front().value_->asString().push_back(buffer[0]);
}
remaining--;
buffer++;
break;
}
case State::ValueComplete: {
ENVOY_LOG(trace, "parse slice: ValueComplete");
ASSERT(!pending_value_stack_.empty());
pending_value_stack_.pop_front();
if (pending_value_stack_.empty()) {
callbacks_.onRespValue(std::move(pending_value_root_));
state_ = State::ValueRootStart;
} else {
PendingValue& current_value = pending_value_stack_.front();
ASSERT(current_value.value_->type() == RespType::Array);
if (current_value.current_array_element_ < current_value.value_->asArray().size() - 1) {
current_value.current_array_element_++;
pending_value_stack_.push_front(
{¤t_value.value_->asArray()[current_value.current_array_element_], 0});
state_ = State::ValueStart;
}
}
break;
}
}
}
}
void EncoderImpl::encode(const RespValue& value, Buffer::Instance& out) {
switch (value.type()) {
case RespType::Array: {
encodeArray(value.asArray(), out);
break;
}
case RespType::SimpleString: {
encodeSimpleString(value.asString(), out);
break;
}
case RespType::BulkString: {
encodeBulkString(value.asString(), out);
break;
}
case RespType::Error: {
encodeError(value.asString(), out);
break;
}
case RespType::Null: {
out.add("$-1\r\n", 5);
break;
}
case RespType::Integer:
encodeInteger(value.asInteger(), out);
break;
}
}
void EncoderImpl::encodeArray(const std::vector<RespValue>& array, Buffer::Instance& out) {
char buffer[32];
char* current = buffer;
*current++ = '*';
current += StringUtil::itoa(current, 31, array.size());
*current++ = '\r';
*current++ = '\n';
out.add(buffer, current - buffer);
for (const RespValue& value : array) {
encode(value, out);
}
}
void EncoderImpl::encodeBulkString(const std::string& string, Buffer::Instance& out) {
char buffer[32];
char* current = buffer;
*current++ = '$';
current += StringUtil::itoa(current, 31, string.size());
*current++ = '\r';
*current++ = '\n';
out.add(buffer, current - buffer);
out.add(string);
out.add("\r\n", 2);
}
void EncoderImpl::encodeError(const std::string& string, Buffer::Instance& out) {
out.add("-", 1);
out.add(string);
out.add("\r\n", 2);
}
void EncoderImpl::encodeInteger(int64_t integer, Buffer::Instance& out) {
char buffer[32];
char* current = buffer;
*current++ = ':';
if (integer >= 0) {
current += StringUtil::itoa(current, 31, integer);
} else {
*current++ = '-';
// By adding 1 (and later correcting) we ensure that we remain within the int64_t
// range prior to the static_cast. This is an issue when we have a value of -2^63,
// which cannot be represented as 2^63 in the intermediate int64_t.
current += StringUtil::itoa(current, 30, static_cast<uint64_t>((integer + 1) * -1) + 1ULL);
}
*current++ = '\r';
*current++ = '\n';
out.add(buffer, current - buffer);
}
void EncoderImpl::encodeSimpleString(const std::string& string, Buffer::Instance& out) {
out.add("+", 1);
out.add(string);
out.add("\r\n", 2);
}
} // namespace Redis
} // namespace Envoy
|
craffert0/envoy
|
source/common/redis/codec_impl.cc
|
C++
|
apache-2.0
| 11,688
|
/**
* Automatically generated file. DO NOT MODIFY
*/
package wealk.android.jewels.test;
public final class BuildConfig {
public static final boolean DEBUG = Boolean.parseBoolean("true");
public static final String APPLICATION_ID = "wealk.android.jewels.test";
public static final String BUILD_TYPE = "debug";
public static final String FLAVOR = "";
public static final int VERSION_CODE = -1;
public static final String VERSION_NAME = "";
}
|
xfmysql/jewels
|
app/build/generated/source/buildConfig/androidTest/debug/wealk/android/jewels/test/BuildConfig.java
|
Java
|
apache-2.0
| 455
|
package org.jetbrains.plugins.scala
package annotator
package template
import org.jetbrains.plugins.scala.annotator.element.ScTemplateDefinitionAnnotator
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
/**
* Pavel Fatin
*/
class NeedsToBeAbstractTest extends AnnotatorTestBase[ScTemplateDefinition] {
def testFine(): Unit = {
assertNothing(messages("class C"))
assertNothing(messages("class C {}"))
assertNothing(messages("trait T"))
assertNothing(messages("trait T {}"))
assertNothing(messages("abstract class C"))
assertNothing(messages("abstract class C {}"))
assertNothing(messages("abstract class C { def f }"))
assertNothing(messages("trait T { def f }"))
}
def testSkipNew(): Unit = {
assertNothing(messages("trait T { def f }; new Object with T"))
}
def testSkipObject(): Unit = {
assertNothing(messages("trait T { def f }; object O extends T"))
}
def testUndefinedMember(): Unit = {
val message = this.message("Class", "C", "f: Unit", "Holder.C")
assertMatches(messages("class C { def f }")) {
case Error("C", `message`) :: Nil =>
}
}
def testUndefinedInheritedMember(): Unit = {
val message = this.message("Class", "C", "f: Unit", "Holder.T")
assertMatches(messages("trait T { def f }; class C extends T")) {
case Error("C", `message`) :: Nil =>
}
assertMatches(messages("trait T { def f }; class C extends T {}")) {
case Error("C", `message`) :: Nil =>
}
}
def testNeedsToBeAbstractPlaceDiffer(): Unit = {
val message = this.message("Class", "C", "b: Unit", "Holder.B")
val reversedMessage = this.message("Class", "C", "a: Unit", "Holder.A")
assertMatches(messages("trait A { def a }; trait B { def b }; class C extends A with B {}")) {
case Error("C", `message`) :: Nil =>
case Error("C", `reversedMessage`) :: Nil =>
}
}
def testObjectOverrideDef(): Unit = {
assertMatches(messages("trait A { def a }; class D extends A { object a };")) {
case Nil =>
}
}
override protected def annotate(element: ScTemplateDefinition)
(implicit holder: ScalaAnnotationHolder): Unit =
ScTemplateDefinitionAnnotator.annotateNeedsToBeAbstract(element)
private def message(params: String*) =
ScalaBundle.message("member.implementation.required", params: _*)
}
|
JetBrains/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/annotator/template/NeedsToBeAbstractTest.scala
|
Scala
|
apache-2.0
| 2,413
|
<?php
$strgroup = array(
// exclude i, l, I, 1, 0, O
"sa" => array(
"alias" => "Small Alphabet",
"weight" => 8,
"data" => "abcdefghijkmnopqrstuvwxyz"),
"la" => array(
"alias" => "Large Alphabet",
"weight" => 8,
"data" => "ABCDEFGHJKLMNPQRSTUVWXYZ"),
"num" => array(
"alias" => "Number",
"weight" => 4,
"data" => "2345679"),
"m1" => array(
"alias" => "Marks",
"weight" => 3,
"data" => "!/=+:#,@$-%._"),
);
function make_passwd($sg, $len) {
global $strgroup;
$passwd = "";
for($i = 0; $i < $len; $i++) {
$sg_type = decide_type($sg);
$d = $strgroup[$sg_type]["data"];
$passwd .= decide_char($d);
}
return $passwd;
}
function decide_char($str) {
$l = strlen($str);
$r = rand(0, $l - 1);
return $str[$r];
}
function decide_type($sg) {
global $strgroup;
$sg_sz = count($sg);
if($sg_sz == 1) {
return $sg[0];
}
do {
$sg_idx = rand(0, $sg_sz - 1);
$sg_type = $sg[$sg_idx];
$weight = $strgroup[$sg_type]["weight"];
$t = rand(1, 10);
$t = 10 - $t;
} while($t > $weight);
return $sg_type;
}
$pwdlen = isset($_GET['len']) ? $_GET['len'] : 8;
$make_count = isset($_GET['cnt']) ? $_GET['cnt'] : 3;
if(!isset($_GET['sg']) || !is_array($_GET['sg'])) {
$sg[] = "sa";
} else {
$sg = $_GET['sg'];
}
?>
<html>
<head>
<title>make password</title>
<style type="text/css">
<!--
body {
font-family: monospace;
}
-->
</style>
</head>
<body>
<form action="" method="GET">
<h2>Choice Character Group</h2>
<table border="1">
<?php
foreach($strgroup as $k => $group) {
printf("<tr>"
.'<td><input type="checkbox" name="sg[]" value="%s" %s></td>'
."<th>%s</th><td>%s</td></tr>\n",
$k, (in_array($k, $sg) ? 'checked="1"' : ''),
$group["alias"], $group["data"]);
}
?>
</table>
How long password would you make ? ... <input type="text" name="len" value="<?php echo $pwdlen; ?>"><br>
How much password would you make ? ... <input type="text" name="cnt" value="<?php echo $make_count; ?>"><br>
<input type="submit" value="make password">
</form>
<h2>passwords.</h2>
<?php
for($i = 0; $i < $make_count; $i++) {
echo make_passwd($sg, $pwdlen);
echo "<br>\n";
}
?>
</body>
</html>
|
msakamoto-sf/webtoys
|
phptoys/password.php
|
PHP
|
apache-2.0
| 2,238
|
package org.lapanen.stealth.si.process.core;
import com.google.common.base.Optional;
public class ProcessRunResultImpl implements ProcessRunResult {
private final Optional<Integer> exitCode;
private final boolean completed;
private final byte[] output;
private final byte[] error;
private final Throwable throwable;
private ProcessRunResultImpl(final Optional<Integer> exitCode, final boolean completed, final byte[] output, final byte[] error, final Throwable throwable) {
this.exitCode = exitCode;
this.completed = completed;
this.output = output;
this.error = error;
this.throwable = throwable;
}
public static ProcessRunResult normalExecution(final int exitCode, byte[] output, byte[] error) {
return new ProcessRunResultImpl(Optional.of(exitCode), true, output, error, null);
}
public static ProcessRunResult abortedExecution() {
return new ProcessRunResultImpl(Optional.<Integer> absent(), false, null, null, null);
}
public static ProcessRunResult exceptionAbortedExecution(final Throwable throwable) {
return new ProcessRunResultImpl(Optional.<Integer>absent(), false, null, null, throwable);
}
@Override
public boolean completed() {
return completed;
}
@Override
public Optional<Integer> getExitCode() {
return exitCode;
}
@Override
public Optional<byte[]> getError() {
return Optional.fromNullable(error);
}
@Override
public Optional<byte[]> getOutput() {
return Optional.fromNullable(output);
}
@Override
public Optional<Throwable> getThrowable() {
return Optional.fromNullable(throwable);
}
@Override
public String toString() {
return "ProcessRunResultImpl{" +
"exitCode=" + exitCode +
", completed=" + completed +
", output=" + output +
", error=" + error +
", throwable=" + throwable +
'}';
}
}
|
lapanen/stealth
|
stealth-si-process/src/main/java/org/lapanen/stealth/si/process/core/ProcessRunResultImpl.java
|
Java
|
apache-2.0
| 2,055
|
package add_docker_metadata
import (
"fmt"
"strings"
"github.com/elastic/beats/libbeat/beat"
"github.com/elastic/beats/libbeat/common"
"github.com/elastic/beats/libbeat/common/cfgwarn"
"github.com/elastic/beats/libbeat/logp"
"github.com/elastic/beats/libbeat/processors"
"github.com/elastic/beats/libbeat/processors/actions"
)
func init() {
processors.RegisterPlugin("add_docker_metadata", newDockerMetadataProcessor)
}
type addDockerMetadata struct {
watcher Watcher
fields []string
sourceProcessor processors.Processor
}
func newDockerMetadataProcessor(cfg *common.Config) (processors.Processor, error) {
return buildDockerMetadataProcessor(cfg, NewWatcher)
}
func buildDockerMetadataProcessor(cfg *common.Config, watcherConstructor WatcherConstructor) (processors.Processor, error) {
cfgwarn.Beta("The add_docker_metadata processor is beta")
config := defaultConfig()
err := cfg.Unpack(&config)
if err != nil {
return nil, fmt.Errorf("fail to unpack the add_docker_metadata configuration: %s", err)
}
watcher, err := watcherConstructor(config.Host, config.TLS)
if err != nil {
return nil, err
}
if err = watcher.Start(); err != nil {
return nil, err
}
// Use extract_field processor to get container id from source file path
var sourceProcessor processors.Processor
if config.MatchSource {
var procConf, _ = common.NewConfigFrom(map[string]interface{}{
"field": "source",
"separator": "/",
"index": config.SourceIndex,
"target": "docker.container.id",
})
sourceProcessor, err = actions.NewExtractField(procConf)
if err != nil {
return nil, err
}
// Ensure `docker.container.id` is matched:
config.Fields = append(config.Fields, "docker.container.id")
}
return &addDockerMetadata{
watcher: watcher,
fields: config.Fields,
sourceProcessor: sourceProcessor,
}, nil
}
func (d *addDockerMetadata) Run(event *beat.Event) (*beat.Event, error) {
var cid string
var err error
// Process source field
if d.sourceProcessor != nil {
if event.Fields["source"] != nil {
event, err = d.sourceProcessor.Run(event)
if err != nil {
logp.Debug("docker", "Error while extracting container ID from source path: %v", err)
return event, nil
}
}
}
for _, field := range d.fields {
value, err := event.GetValue(field)
if err != nil {
continue
}
if strValue, ok := value.(string); ok {
cid = strValue
}
}
if cid == "" {
return event, nil
}
container := d.watcher.Container(cid)
if container != nil {
meta := common.MapStr{}
metaIface, ok := event.Fields["docker"]
if ok {
meta = metaIface.(common.MapStr)
}
if len(container.Labels) > 0 {
labels := common.MapStr{}
for k, v := range container.Labels {
labels.Put(k, v)
}
meta.Put("container.labels", labels)
}
meta.Put("container.id", container.ID)
meta.Put("container.image", container.Image)
meta.Put("container.name", container.Name)
event.Fields["docker"] = meta
} else {
logp.Debug("docker", "Container not found: %s", cid)
}
return event, nil
}
func (d *addDockerMetadata) String() string {
return "add_docker_metadata=[fields=" + strings.Join(d.fields, ", ") + "]"
}
|
roncohen/apm-server
|
vendor/github.com/elastic/beats/libbeat/processors/add_docker_metadata/add_docker_metadata.go
|
GO
|
apache-2.0
| 3,233
|
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Top-level presubmit script for V8.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
import sys
def _V8PresubmitChecks(input_api, output_api):
"""Runs the V8 presubmit checks."""
import sys
sys.path.append(input_api.os_path.join(
input_api.PresubmitLocalPath(), 'tools'))
from presubmit import CppLintProcessor
from presubmit import SourceProcessor
from presubmit import CheckGeneratedRuntimeTests
results = []
if not CppLintProcessor().Run(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError("C++ lint check failed"))
if not SourceProcessor().Run(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError(
"Copyright header, trailing whitespaces and two empty lines " \
"between declarations check failed"))
if not CheckGeneratedRuntimeTests(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError(
"Generated runtime tests check failed"))
return results
def _CheckUnwantedDependencies(input_api, output_api):
"""Runs checkdeps on #include statements added in this
change. Breaking - rules is an error, breaking ! rules is a
warning.
"""
# We need to wait until we have an input_api object and use this
# roundabout construct to import checkdeps because this file is
# eval-ed and thus doesn't have __file__.
original_sys_path = sys.path
try:
sys.path = sys.path + [input_api.os_path.join(
input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
import checkdeps
from cpp_checker import CppChecker
from rules import Rule
finally:
# Restore sys.path to what it was before.
sys.path = original_sys_path
added_includes = []
for f in input_api.AffectedFiles():
if not CppChecker.IsCppFile(f.LocalPath()):
continue
changed_lines = [line for line_num, line in f.ChangedContents()]
added_includes.append([f.LocalPath(), changed_lines])
deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
error_descriptions = []
warning_descriptions = []
for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
added_includes):
description_with_path = '%s\n %s' % (path, rule_description)
if rule_type == Rule.DISALLOW:
error_descriptions.append(description_with_path)
else:
warning_descriptions.append(description_with_path)
results = []
if error_descriptions:
results.append(output_api.PresubmitError(
'You added one or more #includes that violate checkdeps rules.',
error_descriptions))
if warning_descriptions:
results.append(output_api.PresubmitPromptOrNotify(
'You added one or more #includes of files that are temporarily\n'
'allowed but being removed. Can you avoid introducing the\n'
'#include? See relevant DEPS file(s) for details and contacts.',
warning_descriptions))
return results
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
results.extend(input_api.canned_checks.CheckOwners(
input_api, output_api, source_file_filter=None))
results.extend(_V8PresubmitChecks(input_api, output_api))
results.extend(_CheckUnwantedDependencies(input_api, output_api))
return results
def _SkipTreeCheck(input_api, output_api):
"""Check the env var whether we want to skip tree check.
Only skip if src/version.cc has been updated."""
src_version = 'src/version.cc'
FilterFile = lambda file: file.LocalPath() == src_version
if not input_api.AffectedSourceFiles(
lambda file: file.LocalPath() == src_version):
return False
return input_api.environ.get('PRESUBMIT_TREE_CHECK') == 'skip'
def _CheckChangeLogFlag(input_api, output_api):
"""Checks usage of LOG= flag in the commit message."""
results = []
if input_api.change.BUG and not 'LOG' in input_api.change.tags:
results.append(output_api.PresubmitError(
'An issue reference (BUG=) requires a change log flag (LOG=). '
'Use LOG=Y for including this commit message in the change log. '
'Use LOG=N or leave blank otherwise.'))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckChangeLogFlag(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckChangeLogFlag(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
if not _SkipTreeCheck(input_api, output_api):
results.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
json_url='http://v8-status.appspot.com/current?format=json'))
return results
def GetPreferredTryMasters(project, change):
return {
'tryserver.v8': {
'v8_linux_rel': set(['defaulttests']),
'v8_linux_dbg': set(['defaulttests']),
'v8_linux_nosnap_rel': set(['defaulttests']),
'v8_linux_nosnap_dbg': set(['defaulttests']),
'v8_linux64_rel': set(['defaulttests']),
'v8_linux_arm_dbg': set(['defaulttests']),
'v8_linux_arm64_rel': set(['defaulttests']),
'v8_linux_layout_dbg': set(['defaulttests']),
'v8_mac_rel': set(['defaulttests']),
'v8_win_rel': set(['defaulttests']),
},
}
|
nextsmsversion/macchina.io
|
platform/JS/V8/v8-3.28.4/PRESUBMIT.py
|
Python
|
apache-2.0
| 7,096
|
# R-Line
一汽大众HTML5推广
|
Kevin-1993/R-Line
|
README.md
|
Markdown
|
apache-2.0
| 33
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.shuffle
import org.apache.spark.annotation.DeveloperApi
/**
* An opaque handle to a shuffle, used by a ShuffleManager to pass information about it to tasks.
*这是一个不透明的shuffle,通过使用ShuffleManager传递任务信息
* @param shuffleId ID of the shuffle
*/
@DeveloperApi
abstract class ShuffleHandle(val shuffleId: Int) extends Serializable {}
|
tophua/spark1.52
|
core/src/main/scala/org/apache/spark/shuffle/ShuffleHandle.scala
|
Scala
|
apache-2.0
| 1,195
|
import './scss/root-theme.scss';
|
mschile/terra-core
|
packages/terra-legacy-theme/src/index.js
|
JavaScript
|
apache-2.0
| 33
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
7z x -y windows_package.7z
$env:MXNET_LIBRARY_PATH=join-path $pwd.Path windows_package\lib\libmxnet.dll
$env:PYTHONPATH=join-path $pwd.Path windows_package\python
$env:MXNET_STORAGE_FALLBACK_LOG_VERBOSE=0
$env:MXNET_HOME=[io.path]::combine($PSScriptRoot, 'mxnet_home')
c:\Anaconda3\envs\py3\Scripts\pip install -r tests\requirements.txt
c:\Anaconda3\envs\py3\python.exe -m nose -v --with-timer --timer-ok 1 --timer-warning 15 --timer-filter warning,error --with-xunit --xunit-file nosetests_unittest.xml tests\python\unittest
if (! $?) { Throw ("Error running unittest") }
c:\Anaconda3\envs\py3\python.exe -m nose -v --with-timer --timer-ok 1 --timer-warning 15 --timer-filter warning,error --with-xunit --xunit-file nosetests_operator.xml tests\python\gpu\test_operator_gpu.py
if (! $?) { Throw ("Error running tests") }
c:\Anaconda3\envs\py3\python.exe -m nose -v --with-timer --timer-ok 1 --timer-warning 15 --timer-filter warning,error --with-xunit --xunit-file nosetests_forward.xml tests\python\gpu\test_forward.py
if (! $?) { Throw ("Error running tests") }
c:\Anaconda3\envs\py3\python.exe -m nose -v --with-timer --timer-ok 1 --timer-warning 15 --timer-filter warning,error --with-xunit --xunit-file nosetests_train.xml tests\python\train
if (! $?) { Throw ("Error running tests") }
|
ptrendx/mxnet
|
ci/windows/test_py3_gpu.ps1
|
PowerShell
|
apache-2.0
| 2,080
|
package com.pungwe.cms.core.theme.services;
import com.pungwe.cms.core.theme.ThemeConfig;
import java.net.URL;
import java.util.Collection;
import java.util.Set;
/**
* Created by ian on 29/01/2016.
*/
public interface ThemeConfigService<E extends ThemeConfig> {
/**
* Registers a theme into the theme registry
*
* @param entryPoint the main theme class
* @param themeLocation the jar file that the theme is located
*/
E registerTheme(Class<?> entryPoint, URL themeLocation);
/**
* Removes themes from the registry. This is generally used to remove themes that are no longer on the
* class path
* @param themes the names of the themes to be removed
*
*/
void removeThemes(String... themes);
/**
* Removes themes from the registry. This is generally used to remove themes that are no longer on the
* class path
* @param themes the names of the themes to be removed
*
*/
void removeThemes(Collection<String> themes);
/**
* Enable / disable a theme
* @param themeName the name of the theme to be enabled / disabled
* @param enabled true if the theme is to be enabled, false otherwise
*/
void setThemeEnabled(String themeName, boolean enabled);
/**
* Check if a theme with the given name is enabled
* @param theme the name of the theme to be checked
* @return true if it's enabled, false otherwise
*/
boolean isEnabled(String theme);
/**
* Return a list of all the installed themes
* @return a list of themes found in the registry
*/
Set<E> listAllThemes();
/**
* Return a list of all enabled themes
* @return a list of enabled themes
*/
Set<E> listEnabledThemes();
/**
* Get the current default theme
* @return The ThemeConfig for the current default theme
*/
E getDefaultTheme();
/**
* Get the current default admin theme (useful if different themes are being used)
* @return The ThemeConfig for the current default admin theme
*/
E getDefaultAdminTheme();
E getTheme(String name);
void setDefaultAdminTheme(String theme);
void setDefaultTheme(String theme);
void setInstalled(String name, boolean b);
}
|
thunderbird/pungwecms
|
core/src/main/java/com/pungwe/cms/core/theme/services/ThemeConfigService.java
|
Java
|
apache-2.0
| 2,111
|
package com.huanggang.dragphotoview;
import android.content.Context;
import android.util.AttributeSet;
import android.view.MotionEvent;
import uk.co.senab.photoview.PhotoView;
import uk.co.senab.photoview.PhotoViewAttacher;
/**
* 单指拖动PhotoView
*
* @author HuanggGang
*/
public class DragPhotoView extends PhotoView {
private IDragView zoomParentView;
public DragPhotoView(Context context, AttributeSet attr, int defStyle) {
super(context, attr, defStyle);
}
public DragPhotoView(Context context, AttributeSet attr) {
this(context, attr, 0);
}
public DragPhotoView(Context context) {
this(context, null);
}
@Override
public boolean dispatchTouchEvent(MotionEvent event) {
if (null != zoomParentView && getScale() == 1) {
boolean result = zoomParentView.dispatchEvent(event);
if (result) {
return result;
}
}
return super.dispatchTouchEvent(event);
}
/**
* 设置跟随拖动进行缩放的布局
*
* @param zoomLayout 跟随拖动对应变化的布局
*/
public void setZoomParentView(IDragView zoomLayout) {
if (null == zoomParentView && null != zoomLayout) {
zoomParentView = zoomLayout;
}
}
/**
* 兼容使用PhotoView原生的设置单击监听
*/
@Override
public void setOnViewTapListener(PhotoViewAttacher.OnViewTapListener listener) {
super.setOnViewTapListener(listener);
}
}
|
HuangGangHust/DragPhotoView
|
dragphotoview-library/src/main/java/com/huanggang/dragphotoview/DragPhotoView.java
|
Java
|
apache-2.0
| 1,539
|
package com.hexonxons.leprawatch.util;
import android.content.Context;
import android.graphics.Paint;
import android.graphics.Typeface;
import android.support.v4.util.LruCache;
import android.text.TextPaint;
import android.text.style.MetricAffectingSpan;
/**
* Style a {@link Spannable} with a custom {@link Typeface}.
*
* @author Tristan Waddington
*/
public class TypefaceSpan extends MetricAffectingSpan
{
/**
* An <code>LruCache</code> for previously loaded typefaces.
*/
private static LruCache<String, Typeface> sTypefaceCache = new LruCache<String, Typeface>(12);
private Typeface mTypeface;
/**
* Load the {@link Typeface} and apply to a {@link Spannable}.
*/
public TypefaceSpan(Context context, String typefaceName)
{
mTypeface = sTypefaceCache.get(typefaceName);
if (mTypeface == null)
{
mTypeface = Typeface.createFromAsset(context.getApplicationContext().getAssets(), String.format("fonts/%s", typefaceName));
// Cache the loaded Typeface
sTypefaceCache.put(typefaceName, mTypeface);
}
}
@Override
public void updateMeasureState(TextPaint p)
{
p.setTypeface(mTypeface);
// Note: This flag is required for proper typeface rendering
p.setFlags(p.getFlags() | Paint.SUBPIXEL_TEXT_FLAG);
}
@Override
public void updateDrawState(TextPaint tp)
{
tp.setTypeface(mTypeface);
// Note: This flag is required for proper typeface rendering
tp.setFlags(tp.getFlags() | Paint.SUBPIXEL_TEXT_FLAG);
}
}
|
hexonxons/LepraWatch
|
LepraWatch/src/com/hexonxons/leprawatch/util/TypefaceSpan.java
|
Java
|
apache-2.0
| 1,623
|
//
// Copyright 2011 Kuali Foundation, Inc. Licensed under the
// Educational Community License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may
// obtain a copy of the License at
//
// http://www.opensource.org/licenses/ecl2.php
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an "AS IS"
// BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing
// permissions and limitations under the License.
//
package org.kuali.continuity.plan.domain;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Embedded;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.PreUpdate;
import javax.persistence.Table;
import javax.persistence.Transient;
import org.hibernate.annotations.NotFound;
import org.hibernate.annotations.NotFoundAction;
import org.hibernate.annotations.Parameter;
import org.hibernate.annotations.Type;
import org.hibernate.annotations.TypeDef;
import org.hibernate.annotations.Where;
import org.kuali.continuity.admin.domain.Dependency;
import org.kuali.continuity.domain.BaseDomainObject;
import org.kuali.continuity.domain.CriticalityLevelEnum;
import org.kuali.continuity.domain.SortByType;
import org.kuali.continuity.domain.ext.StringValuedEnumType;
import org.kuali.continuity.plan.domain.ext.Consequences;
import org.kuali.continuity.plan.domain.ext.PeakMonths;
@Entity
@Table(name="critical_functions")
@TypeDef(
name="stringValueEnum",
typeClass=StringValuedEnumType.class)
@NamedQuery(name="CriticalFunction.list",
query="SELECT dObj FROM CriticalFunction dObj WHERE dObj.plan.id = :ownerId")
@SuppressWarnings("serial")
public class CriticalFunction extends BaseDomainObject {
private ContinuityPlan plan;
private int priority;
private CriticalityLevelEnum criticalityLevelEnum;
// description
private String description;
private String performingUnits;
private String responsiblePersons;
// documents
private List<CriticalFunctionDocument> documents;
// dependencies
private Set<Dependency> dependencies;
// how to cope
private CopingMethod copingMethod = new CopingMethod();
// action items
private List<CriticalFunctionActionItem> actionItems;
// used for mapping only
private Consequences consequencesExt = new Consequences();
private PeakMonths peakMonthsExt = new PeakMonths();
public final static String CF_INSTRUCTION = "Instruction";
public enum SortBy implements SortByType {
id, name, priority;
public String getSortByValue() {return this.name();}
}
public CriticalFunction() {
super();
}
public CriticalFunction(Integer id) {
super(id);
}
@Id
@Column(name="cid")
@GeneratedValue(strategy=GenerationType.AUTO)
public Integer getId() {
return super.getId();
}
public String getName() {
return super.getName();
}
public void setName(String name) {
super.setName(name);
}
@ManyToOne(fetch=FetchType.LAZY, optional=false)
@JoinColumn(name="pid", nullable=false, updatable=false)
public ContinuityPlan getPlan() {
return this.plan;
}
public void setPlan(ContinuityPlan plan) {
this.plan = plan;
}
public int getPriority() {
return this.priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
@Column(name="CriticalityLevel", insertable=false)
@Type(
type="stringValueEnum",
parameters={
@Parameter(
name="enum",
value="org.kuali.continuity.domain.CriticalityLevelEnum"
)})
@NotFound(action=NotFoundAction.IGNORE)
public CriticalityLevelEnum getCriticalityLevelEnum() {
return this.criticalityLevelEnum;
}
public void setCriticalityLevelEnum(CriticalityLevelEnum criticalityLevelEnum) {
this.criticalityLevelEnum = criticalityLevelEnum;
}
@Column(name="description", insertable=false)
public String getDescription() {
return this.description;
}
public void setDescription(String description) {
this.description = description;
}
@Column(name="unit_performs", insertable=false)
public String getPerformingUnits() {
return this.performingUnits;
}
public void setPerformingUnits(String performingUnits) {
this.performingUnits = performingUnits;
}
@Column(name="responsible_person", insertable=false)
public String getResponsiblePersons() {
return this.responsiblePersons;
}
public void setResponsiblePersons(String responsiblePersons) {
this.responsiblePersons = responsiblePersons;
}
@Transient
public Set<Integer> getPeakMonths() {
if (this.getPeakMonthsExt() == null) return null;
return this.getPeakMonthsExt().getPeakMonths();
}
public void setPeakMonths(Set<Integer> peakMonths) {
if (this.getPeakMonthsExt() == null)
this.setPeakMonthsExt(new PeakMonths());
this.getPeakMonthsExt().setPeakMonths(peakMonths);
}
@Transient
public String getPeaksExplanation() {
if (this.getPeakMonthsExt() == null) return null;
return this.getPeakMonthsExt().getPeaksExplanation();
}
public void setPeaksExplanation(String peaksExplanation) {
if (this.getPeakMonthsExt() == null)
this.setPeakMonthsExt(new PeakMonths());
this.getPeakMonthsExt().setPeaksExplanation(peaksExplanation);
}
@OneToMany(fetch=FetchType.LAZY)
@JoinColumn(name="cid", insertable=false, updatable=false)
public List<CriticalFunctionDocument> getDocuments() {
return this.documents;
}
public void setDocuments(List<CriticalFunctionDocument> documents) {
this.documents = documents;
}
@OneToMany(fetch=FetchType.LAZY)
@JoinTable(name="cf_dependency",
joinColumns=@JoinColumn(name="cid"),
inverseJoinColumns=@JoinColumn(name="DependencyID"))
public Set<Dependency> getDependencies() {
return this.dependencies;
}
public void setDependencies(Set<Dependency> dependencies) {
this.dependencies = dependencies;
}
public void addDependency(Dependency dependency) {
Set<Dependency> dSet = this.getDependencies();
if (dSet == null) {
dSet = new HashSet<Dependency>();
this.setDependencies(dSet);
}
dSet.add(dependency);
}
public void removeDependency(Dependency dependency) {
Set<Dependency> dSet = this.getDependencies();
dSet.remove(dependency);
}
@Transient
public Map<Consequence.Type, Consequence> getConsequences() {
if (this.getConsequencesExt() == null) return null;
return this.getConsequencesExt().getConsequences();
}
public void setConsequences(Map<Consequence.Type, Consequence> consequences) {
if (this.getConsequencesExt() == null)
this.setConsequencesExt(new Consequences());
this.getConsequencesExt().setConsequences(consequences);
}
@Embedded
public CopingMethod getCopingMethod() {
return this.copingMethod;
}
public void setCopingMethod(CopingMethod copingMethod) {
this.copingMethod = copingMethod;
}
@OneToMany(fetch=FetchType.LAZY)
@JoinColumn(name="cid", insertable=false, updatable=false)
@Where(clause="disabled != 'Y'")
public List<CriticalFunctionActionItem> getActionItems() {
return this.actionItems;
}
public void setActionItems(List<CriticalFunctionActionItem> actionItems) {
this.actionItems = actionItems;
}
@Embedded
private Consequences getConsequencesExt() {
return this.consequencesExt;
}
private void setConsequencesExt(Consequences consequencesExt) {
this.consequencesExt = consequencesExt;
}
@Embedded
private PeakMonths getPeakMonthsExt() {
return this.peakMonthsExt;
}
private void setPeakMonthsExt(PeakMonths peakMonthsExt) {
this.peakMonthsExt = peakMonthsExt;
}
@PreUpdate
protected void preUpdate() {
if (this.criticalityLevelEnum == null)
this.criticalityLevelEnum = CriticalityLevelEnum.LEVEL0;
}
}
|
Ariah-Group/Continuity
|
src/main/java/org/kuali/continuity/plan/domain/CriticalFunction.java
|
Java
|
apache-2.0
| 8,154
|
<div class="row">
{% for speaker in site.speakers %}
{% if speaker.narrator %}
<div class="col-sm-6">
<div class="narrator">
<div class="row">
<div class="col-sm-3">
<img src='/assets/img/{{ speaker.image }}' width="100%"/>
</div>
<div class="col-sm-9">
<div class="narrator-name">{{ speaker.name }}</div>
<div class="narrator-blurb">{% capture x %}
{{ speaker.description }}
{% endcapture %}{{ x | markdownify }}
</div>
<div class="narrator-meta">{% if speaker.github %}<a href='https://github.com/{{ speaker.github }}'>Github </a> · {% endif %}{% if speaker.twitter %}<a href='https://twitter.com/{{ speaker.twitter }}'>Twitter </a> · {% endif %}{% if speaker.website %}<a href='{{ speaker.website }}'>Website </a>{% endif %}</div>
</div>
</div>
</div>
</div>
{% endif %}
{% endfor %}
</div>
|
hackference/hackference-2016
|
_includes/narrators.html
|
HTML
|
apache-2.0
| 878
|
package edu.umt.csci427.canary;
/**
* for getting data from abstract factory
*/
public class DeviceMonitor {
public static final String PulseOximeter = "PULSE_OX";
}
|
csci427/canary
|
app/src/main/java/edu/umt/csci427/canary/DeviceMonitor.java
|
Java
|
apache-2.0
| 174
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v3
import uk.gov.hmrc.ct.accounts.frsse2008.retriever.Frsse2008AccountsBoxRetriever
import uk.gov.hmrc.ct.box.retriever.FilingAttributesBoxValueRetriever
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
import uk.gov.hmrc.ct.ct600j.v3.retriever.CT600JBoxRetriever
trait TaxAvoidanceBoxRetrieverForTest extends CT600BoxRetriever with CT600JBoxRetriever with Frsse2008AccountsBoxRetriever with FilingAttributesBoxValueRetriever
|
hmrc/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/ct600j/v3/TaxAvoidanceBoxRetrieverForTest.scala
|
Scala
|
apache-2.0
| 1,073
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Promise from 'bluebird';
import BaseAdapter from './BaseAdapter';
import Config from 'config.json';
let baseUrl = Config.baseURL;
let urls = {
getDatabases: 'metastore/databases',
setDatabases: 'metastore/databases/current',
getCubes: 'metastore/cubes',
query: 'queryapi/queries', // POST on this to execute, GET to fetch all
getTables: 'metastore/nativetables',
getSavedQueries: 'queryapi/savedqueries',
parameters: 'queryapi/savedqueries/parameters',
saveQuery: 'queryapi/savedqueries', // POST to save, PUT to update, {id} for GET
runSavedQuery: 'queryapi/savedqueries'
};
let AdhocQueryAdapter = {
getDatabases (secretToken) {
let url = baseUrl + urls.getDatabases;
return BaseAdapter.get(url + '?sessionid=' + secretToken);
},
setDatabase (secretToken, database) {
let url = baseUrl + urls.setDatabases;
return BaseAdapter.put(url + '?sessionid=' + secretToken, database, {
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
});
},
getCubes (secretToken) {
let url = baseUrl + urls.getCubes;
let postURL = "?";
if (Config.cubes_type) {
postURL += "type=" + Config.cubes_type + "&"
}
postURL += "sessionid=" + secretToken;
return BaseAdapter.get(url + postURL);
},
getCubeDetails (secretToken, cubeName) {
let url = baseUrl + urls.getCubes + '/' + cubeName;
return BaseAdapter.get(url + '?sessionid=' + secretToken);
},
executeQuery (secretToken, query, queryName) {
let url = baseUrl + urls.query;
let formData = new FormData();
formData.append('sessionid', secretToken);
formData.append('query', query);
formData.append('operation', 'EXECUTE');
formData.append('conf',
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?><conf></conf>');
if (queryName) formData.append('queryName', queryName);
return BaseAdapter.post(url, formData, {
headers: {
'Accept': 'application/json'
}
});
},
saveQuery (secretToken, user, query, options) {
let queryToBeSaved = {
savedQuery: {
name: options.name || '',
query: query,
description: options.description || '',
parameters: options.parameters || []
}
};
let url = baseUrl + urls.saveQuery + '?sessionid=' + secretToken;
return BaseAdapter.post(url, JSON.stringify(queryToBeSaved), {
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
});
},
updateSavedQuery (secretToken, user, query, options, id) {
let url = baseUrl + urls.saveQuery + '/' + id;
let queryToBeSaved = {
savedQuery: {
owner: user,
name: options.name || '',
query: query,
description: options.description || '',
parameters: options.parameters || []
}
};
return BaseAdapter.put(url, JSON.stringify(queryToBeSaved), {
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
});
},
getQuery (secretToken, handle) {
let url = baseUrl + urls.query + '/' + handle;
return BaseAdapter.get(url + '?sessionid=' + secretToken);
},
getQueries (secretToken, email, options) {
let queryOptions = {};
queryOptions.sessionid = secretToken;
queryOptions.user = email;
var state;
if (options && options.state) {
state = options.state.toUpperCase();
}
let handlesUrl = baseUrl + urls.query + '?sessionid=' + secretToken + '&user=' +
email;
if (state) handlesUrl += '&state=' + state;
return BaseAdapter.get(handlesUrl)
.then(function (queryHandles) {
// FIXME limiting to 10 for now
// let handles = queryHandles.slice(0, 10);
return Promise.all(queryHandles.map((q) => {
let queryUrl = baseUrl + urls.query + '/' + q.queryHandle.handleId +
'?sessionid=' + secretToken + '&queryHandle=' + q.queryHandle.handleId;
return BaseAdapter.get(queryUrl);
}));
});
},
getQueryResult (secretToken, handle, queryMode) {
// on page refresh, the store won't have queryMode so fetch query
// this is needed as we won't know in which mode the query was fired
if (!queryMode) {
this.getQuery(secretToken, handle).then((query) => {
queryMode = query.isPersistent;
queryMode = queryMode ? 'PERSISTENT' : 'INMEMORY';
return this._inMemoryOrPersistent(secretToken, handle, queryMode);
});
} else {
return this._inMemoryOrPersistent(secretToken, handle, queryMode);
}
},
_inMemoryOrPersistent (secretToken, handle, queryMode) {
return queryMode === 'PERSISTENT' ?
this.getDownloadURL(secretToken, handle) :
this.getInMemoryResults(secretToken, handle);
},
getTables (secretToken, database) {
let url = baseUrl + urls.getTables;
return BaseAdapter.get(url + '?sessionid=' + secretToken + '&dbName=' + database);
},
getTableDetails (secretToken, tableName, database) {
let url = baseUrl + urls.getTables + '/' + database + '.' + tableName;
return BaseAdapter.get(url + '?sessionid=' + secretToken);
},
cancelQuery (secretToken, handle) {
let url = baseUrl + urls.query + '/' + handle + '?sessionid=' + secretToken;
return BaseAdapter.delete(url);
},
getDownloadURL (secretToken, handle) {
let downloadURL = baseUrl + urls.query + '/' + handle +
'/httpresultset?sessionid=' + secretToken;
return Promise.resolve(downloadURL);
},
getSavedQueryById (secretToken, id) {
let url = baseUrl + urls.saveQuery + '/' + id;
return BaseAdapter.get(url + '?sessionid=' + secretToken);
},
getInMemoryResults (secretToken, handle) {
let resultUrl = baseUrl + urls.query + '/' + handle + '/resultset';
let results = BaseAdapter.get(resultUrl + '?sessionid=' + secretToken);
let metaUrl = baseUrl + urls.query + '/' + handle + '/resultsetmetadata';
let meta = BaseAdapter.get(metaUrl + '?sessionid=' + secretToken);
return Promise.all([results, meta]);
},
getSavedQueries (secretToken, user, options = {}) {
let url = baseUrl + urls.getSavedQueries;
return BaseAdapter.get(
url + '?user=' + user + '&sessionid=' + secretToken + '&start=' +
(options.offset || 0) + '&count=' + (options.pageSize || 10)
);
},
getParams (secretToken, query) {
let url = baseUrl + urls.parameters + '?sessionid=' + secretToken;
let formData = new FormData();
formData.append('query', query);
return BaseAdapter.post(url, formData);
},
runSavedQuery (secretToken, id, params) {
let queryParamString = BaseAdapter.jsonToQueryParams(params);
let url = baseUrl + urls.runSavedQuery + '/' + id + queryParamString;
let formData = new FormData();
formData.append('sessionid', secretToken);
formData.append('conf',
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?><conf></conf>');
return BaseAdapter.post(url, formData);
}
};
export default AdhocQueryAdapter;
|
RaghavendraSingh/lens
|
lens-ui/app/adapters/AdhocQueryAdapter.js
|
JavaScript
|
apache-2.0
| 7,880
|
// Copyright 2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "database/src/android/disconnection_android.h"
#include <assert.h>
#include <jni.h>
#include "app/src/future_manager.h"
#include "app/src/util_android.h"
#include "database/src/android/database_android.h"
#include "database/src/android/util_android.h"
#include "database/src/common/database_reference.h"
#include "database/src/include/firebase/database/disconnection.h"
namespace firebase {
namespace database {
namespace internal {
// clang-format off
#define ON_DISCONNECT_METHODS(X) \
X(SetValue, "setValue", \
"(Ljava/lang/Object;)Lcom/google/android/gms/tasks/Task;"), \
X(SetValueAndStringPriority, "setValue", \
"(Ljava/lang/Object;Ljava/lang/String;)" \
"Lcom/google/android/gms/tasks/Task;"), \
X(SetValueAndDoublePriority, "setValue", \
"(Ljava/lang/Object;D)Lcom/google/android/gms/tasks/Task;"), \
X(UpdateChildren, "updateChildren", \
"(Ljava/util/Map;)Lcom/google/android/gms/tasks/Task;"), \
X(RemoveValue, "removeValue", "()Lcom/google/android/gms/tasks/Task;"), \
X(Cancel, "cancel", "()Lcom/google/android/gms/tasks/Task;")
// clang-format on
METHOD_LOOKUP_DECLARATION(on_disconnect, ON_DISCONNECT_METHODS)
METHOD_LOOKUP_DEFINITION(on_disconnect,
PROGUARD_KEEP_CLASS
"com/google/firebase/database/OnDisconnect",
ON_DISCONNECT_METHODS)
enum DisconnectionHandlerFn {
kDisconnectionHandlerFnCancel = 0,
kDisconnectionHandlerFnRemoveValue,
kDisconnectionHandlerFnSetValue,
kDisconnectionHandlerFnSetValueAndPriority,
kDisconnectionHandlerFnUpdateChildren,
kDisconnectionHandlerFnCount,
};
DisconnectionHandlerInternal::DisconnectionHandlerInternal(DatabaseInternal* db,
jobject obj)
: db_(db) {
obj_ = db_->GetApp()->GetJNIEnv()->NewGlobalRef(obj);
db_->future_manager().AllocFutureApi(this, kDisconnectionHandlerFnCount);
}
DisconnectionHandlerInternal::~DisconnectionHandlerInternal() {
if (obj_ != nullptr) {
db_->GetApp()->GetJNIEnv()->DeleteGlobalRef(obj_);
obj_ = nullptr;
}
db_->future_manager().ReleaseFutureApi(this);
}
bool DisconnectionHandlerInternal::Initialize(App* app) {
JNIEnv* env = app->GetJNIEnv();
jobject activity = app->activity();
return on_disconnect::CacheMethodIds(env, activity);
}
void DisconnectionHandlerInternal::Terminate(App* app) {
JNIEnv* env = app->GetJNIEnv();
on_disconnect::ReleaseClass(env);
util::CheckAndClearJniExceptions(env);
}
namespace {
struct FutureCallbackData {
FutureCallbackData(SafeFutureHandle<void> handle_,
ReferenceCountedFutureImpl* impl_, DatabaseInternal* db_)
: handle(handle_), impl(impl_), db(db_) {}
SafeFutureHandle<void> handle;
ReferenceCountedFutureImpl* impl;
DatabaseInternal* db;
};
void FutureCallback(JNIEnv* env, jobject result, util::FutureResult result_code,
const char* status_message, void* callback_data) {
int status = 0; // TODO(140207379): populate with proper status code
FutureCallbackData* data =
reinterpret_cast<FutureCallbackData*>(callback_data);
if (data != nullptr) {
data->impl->Complete(
data->handle,
data->db->ErrorFromResultAndErrorCode(result_code, status),
status_message);
delete data;
}
}
} // namespace
Future<void> DisconnectionHandlerInternal::Cancel() {
SafeFutureHandle<void> handle =
future()->SafeAlloc<void>(kDisconnectionHandlerFnCancel);
JNIEnv* env = db_->GetApp()->GetJNIEnv();
jobject task = env->CallObjectMethod(
obj_, on_disconnect::GetMethodId(on_disconnect::kCancel));
util::RegisterCallbackOnTask(
env, task, FutureCallback,
// FutureCallback will delete the newed FutureCallbackData.
reinterpret_cast<void*>(new FutureCallbackData(handle, future(), db_)),
kApiIdentifier);
util::CheckAndClearJniExceptions(env);
env->DeleteLocalRef(task);
return MakeFuture(future(), handle);
}
Future<void> DisconnectionHandlerInternal::CancelLastResult() {
return static_cast<const Future<void>&>(
future()->LastResult(kDisconnectionHandlerFnCancel));
}
Future<void> DisconnectionHandlerInternal::RemoveValue() {
SafeFutureHandle<void> handle =
future()->SafeAlloc<void>(kDisconnectionHandlerFnRemoveValue);
JNIEnv* env = db_->GetApp()->GetJNIEnv();
jobject task = env->CallObjectMethod(
obj_, on_disconnect::GetMethodId(on_disconnect::kRemoveValue));
util::RegisterCallbackOnTask(
env, task, FutureCallback,
// FutureCallback will delete the newed FutureCallbackData.
reinterpret_cast<void*>(new FutureCallbackData(handle, future(), db_)),
kApiIdentifier);
util::CheckAndClearJniExceptions(env);
return MakeFuture(future(), handle);
}
Future<void> DisconnectionHandlerInternal::RemoveValueLastResult() {
return static_cast<const Future<void>&>(
future()->LastResult(kDisconnectionHandlerFnRemoveValue));
}
Future<void> DisconnectionHandlerInternal::SetValue(Variant value) {
SafeFutureHandle<void> handle =
future()->SafeAlloc<void>(kDisconnectionHandlerFnSetValue);
if (SetValueAndPriorityLastResult().status() == kFutureStatusPending) {
future()->Complete(handle, kErrorConflictingOperationInProgress,
kErrorMsgConflictSetValue);
} else {
JNIEnv* env = db_->GetApp()->GetJNIEnv();
jobject value_obj = internal::VariantToJavaObject(env, value);
jobject task = env->CallObjectMethod(
obj_, on_disconnect::GetMethodId(on_disconnect::kSetValue), value_obj);
util::RegisterCallbackOnTask(
env, task, FutureCallback,
// FutureCallback will delete the newed FutureCallbackData.
reinterpret_cast<void*>(new FutureCallbackData(handle, future(), db_)),
kApiIdentifier);
util::CheckAndClearJniExceptions(env);
env->DeleteLocalRef(task);
if (value_obj) env->DeleteLocalRef(value_obj);
}
return MakeFuture(future(), handle);
}
Future<void> DisconnectionHandlerInternal::SetValueLastResult() {
return static_cast<const Future<void>&>(
future()->LastResult(kDisconnectionHandlerFnSetValue));
}
Future<void> DisconnectionHandlerInternal::SetValueAndPriority(
Variant value, Variant priority) {
SafeFutureHandle<void> handle =
future()->SafeAlloc<void>(kDisconnectionHandlerFnSetValueAndPriority);
if (SetValueLastResult().status() == kFutureStatusPending) {
future()->Complete(handle, kErrorConflictingOperationInProgress,
kErrorMsgConflictSetValue);
} else if (!IsValidPriority(priority)) {
future()->Complete(handle, kErrorInvalidVariantType,
kErrorMsgInvalidVariantForPriority);
} else {
JNIEnv* env = db_->GetApp()->GetJNIEnv();
jobject value_obj = internal::VariantToJavaObject(env, value);
jobject task;
if (priority.is_string()) {
jobject priority_obj = internal::VariantToJavaObject(env, priority);
task = env->CallObjectMethod(
obj_,
on_disconnect::GetMethodId(on_disconnect::kSetValueAndStringPriority),
value_obj, priority_obj);
env->DeleteLocalRef(priority_obj);
} else {
task = env->CallObjectMethod(
obj_,
on_disconnect::GetMethodId(on_disconnect::kSetValueAndDoublePriority),
value_obj, priority.AsDouble().double_value());
}
util::CheckAndClearJniExceptions(env);
util::RegisterCallbackOnTask(
env, task, FutureCallback,
// FutureCallback will delete the newed FutureCallbackData.
reinterpret_cast<void*>(new FutureCallbackData(handle, future(), db_)),
kApiIdentifier);
env->DeleteLocalRef(task);
if (value_obj) env->DeleteLocalRef(value_obj);
}
return MakeFuture(future(), handle);
}
Future<void> DisconnectionHandlerInternal::SetValueAndPriorityLastResult() {
return static_cast<const Future<void>&>(
future()->LastResult(kDisconnectionHandlerFnSetValueAndPriority));
}
Future<void> DisconnectionHandlerInternal::UpdateChildren(Variant values) {
SafeFutureHandle<void> handle =
future()->SafeAlloc<void>(kDisconnectionHandlerFnUpdateChildren);
if (!values.is_map()) {
future()->Complete(handle, kErrorInvalidVariantType,
kErrorMsgInvalidVariantForUpdateChildren);
} else {
JNIEnv* env = db_->GetApp()->GetJNIEnv();
jobject values_obj = internal::VariantToJavaObject(env, values);
jobject task = env->CallObjectMethod(
obj_, on_disconnect::GetMethodId(on_disconnect::kUpdateChildren),
values_obj);
util::CheckAndClearJniExceptions(env);
util::RegisterCallbackOnTask(
env, task, FutureCallback,
// FutureCallback will delete the newed FutureCallbackData.
reinterpret_cast<void*>(new FutureCallbackData(handle, future(), db_)),
kApiIdentifier);
env->DeleteLocalRef(task);
if (values_obj) env->DeleteLocalRef(values_obj);
}
return MakeFuture(future(), handle);
}
Future<void> DisconnectionHandlerInternal::UpdateChildrenLastResult() {
return static_cast<const Future<void>&>(
future()->LastResult(kDisconnectionHandlerFnUpdateChildren));
}
ReferenceCountedFutureImpl* DisconnectionHandlerInternal::future() {
return db_->future_manager().GetFutureApi(this);
}
} // namespace internal
} // namespace database
} // namespace firebase
|
firebase/firebase-cpp-sdk
|
database/src/android/disconnection_android.cc
|
C++
|
apache-2.0
| 10,329
|
package cdn
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// SetFileCacheExpiredConfig invokes the cdn.SetFileCacheExpiredConfig API synchronously
// api document: https://help.aliyun.com/api/cdn/setfilecacheexpiredconfig.html
func (client *Client) SetFileCacheExpiredConfig(request *SetFileCacheExpiredConfigRequest) (response *SetFileCacheExpiredConfigResponse, err error) {
response = CreateSetFileCacheExpiredConfigResponse()
err = client.DoAction(request, response)
return
}
// SetFileCacheExpiredConfigWithChan invokes the cdn.SetFileCacheExpiredConfig API asynchronously
// api document: https://help.aliyun.com/api/cdn/setfilecacheexpiredconfig.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) SetFileCacheExpiredConfigWithChan(request *SetFileCacheExpiredConfigRequest) (<-chan *SetFileCacheExpiredConfigResponse, <-chan error) {
responseChan := make(chan *SetFileCacheExpiredConfigResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.SetFileCacheExpiredConfig(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// SetFileCacheExpiredConfigWithCallback invokes the cdn.SetFileCacheExpiredConfig API asynchronously
// api document: https://help.aliyun.com/api/cdn/setfilecacheexpiredconfig.html
// asynchronous document: https://help.aliyun.com/document_detail/66220.html
func (client *Client) SetFileCacheExpiredConfigWithCallback(request *SetFileCacheExpiredConfigRequest, callback func(response *SetFileCacheExpiredConfigResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *SetFileCacheExpiredConfigResponse
var err error
defer close(result)
response, err = client.SetFileCacheExpiredConfig(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// SetFileCacheExpiredConfigRequest is the request struct for api SetFileCacheExpiredConfig
type SetFileCacheExpiredConfigRequest struct {
*requests.RpcRequest
OwnerId requests.Integer `position:"Query" name:"OwnerId"`
SecurityToken string `position:"Query" name:"SecurityToken"`
DomainName string `position:"Query" name:"DomainName"`
CacheContent string `position:"Query" name:"CacheContent"`
TTL string `position:"Query" name:"TTL"`
Weight string `position:"Query" name:"Weight"`
}
// SetFileCacheExpiredConfigResponse is the response struct for api SetFileCacheExpiredConfig
type SetFileCacheExpiredConfigResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
}
// CreateSetFileCacheExpiredConfigRequest creates a request to invoke SetFileCacheExpiredConfig API
func CreateSetFileCacheExpiredConfigRequest() (request *SetFileCacheExpiredConfigRequest) {
request = &SetFileCacheExpiredConfigRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("Cdn", "2014-11-11", "SetFileCacheExpiredConfig", "", "")
return
}
// CreateSetFileCacheExpiredConfigResponse creates a response to parse from SetFileCacheExpiredConfig response
func CreateSetFileCacheExpiredConfigResponse() (response *SetFileCacheExpiredConfigResponse) {
response = &SetFileCacheExpiredConfigResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}
|
xiaozhu36/terraform-provider
|
vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/cdn/set_file_cache_expired_config.go
|
GO
|
apache-2.0
| 4,375
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.lang.injection;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.NotNullLazyKey;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.FileViewProvider;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiLanguageInjectionHost;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.List;
public abstract class InjectedLanguageManager {
public static InjectedLanguageManager getInstance(Project project) {
return INSTANCE_CACHE.getValue(project);
}
protected static final NotNullLazyKey<InjectedLanguageManager, Project> INSTANCE_CACHE = ServiceManager.createLazyKey(InjectedLanguageManager.class);
public static final Key<Boolean> FRANKENSTEIN_INJECTION = Key.create("FRANKENSTEIN_INJECTION");
public abstract PsiLanguageInjectionHost getInjectionHost(@NotNull FileViewProvider provider);
@Nullable
public abstract PsiLanguageInjectionHost getInjectionHost(@NotNull PsiElement element);
@NotNull
public abstract TextRange injectedToHost(@NotNull PsiElement injectedContext, @NotNull TextRange injectedTextRange);
public abstract int injectedToHost(@NotNull PsiElement injectedContext, int injectedOffset);
/**
* @deprecated use {@link MultiHostInjector#MULTIHOST_INJECTOR_EP_NAME extension point} for production and
* {@link #registerMultiHostInjector(MultiHostInjector, Disposable)} for tests
*/
@Deprecated
public abstract void registerMultiHostInjector(@NotNull MultiHostInjector injector);
@TestOnly
public abstract void registerMultiHostInjector(@NotNull MultiHostInjector injector, @NotNull Disposable parentDisposable);
public abstract String getUnescapedText(@NotNull PsiElement injectedNode);
@NotNull
public abstract List<TextRange> intersectWithAllEditableFragments(@NotNull PsiFile injectedPsi, @NotNull TextRange rangeToEdit);
public abstract boolean isInjectedFragment(@NotNull PsiFile file);
/**
* Finds PSI element in injected fragment (if any) at the given offset in the host file.<p/>
* E.g. if you injected XML {@code "<xxx/>"} into Java string literal {@code "String s = "<xxx/>";"} and the caret is at {@code "xxx"} then
* this method will return XmlToken(XML_TAG_START) with the text {@code "xxx"}.<br/>
* Invocation of this method on uncommitted {@code hostFile} can lead to unexpected results, including throwing an exception!
*/
@Nullable
public abstract PsiElement findInjectedElementAt(@NotNull PsiFile hostFile, int hostDocumentOffset);
@Nullable
public abstract List<Pair<PsiElement, TextRange>> getInjectedPsiFiles(@NotNull PsiElement host);
public abstract void dropFileCaches(@NotNull PsiFile file);
public abstract PsiFile getTopLevelFile(@NotNull PsiElement element);
@NotNull
public abstract List<DocumentWindow> getCachedInjectedDocumentsInRange(@NotNull PsiFile hostPsiFile, @NotNull TextRange range);
public abstract void startRunInjectorsInRange(@NotNull Document hostDocument,
@NotNull TextRange range,
boolean synchronously);
public abstract void enumerate(@NotNull PsiElement host, @NotNull PsiLanguageInjectionHost.InjectedPsiVisitor visitor);
public abstract void enumerateEx(@NotNull PsiElement host, @NotNull PsiFile containingFile, boolean probeUp, @NotNull PsiLanguageInjectionHost.InjectedPsiVisitor visitor);
/**
* @return the ranges in this document window that correspond to prefix/suffix injected text fragments and thus can't be edited and are not visible in the editor.
*/
@NotNull
public abstract List<TextRange> getNonEditableFragments(@NotNull DocumentWindow window);
/**
* This method can be invoked on an uncommitted document, before performing commit and using other methods here
* (which don't work for uncommitted document).
*/
public abstract boolean mightHaveInjectedFragmentAtOffset(@NotNull Document hostDocument, int hostOffset);
@NotNull
public abstract DocumentWindow freezeWindow(@NotNull DocumentWindow document);
}
|
ThiagoGarciaAlves/intellij-community
|
platform/core-api/src/com/intellij/lang/injection/InjectedLanguageManager.java
|
Java
|
apache-2.0
| 5,097
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Converts the serialized examples to TFRecords for putting into a model."""
# TODO(alanesuhr): Factor out what should be in a lib and what should be in a
# binary.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import os
import random
from absl import app
from absl import flags
import apache_beam as beam
from language.xsp.data_preprocessing.nl_to_sql_example import NLToSQLExample
from language.xsp.model.model_config import load_config
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
flags.DEFINE_string('examples_dir', '',
'The directory containing the examples.')
flags.DEFINE_list('filenames', None,
'The list of files to process containing NLToSQLExamples.')
flags.DEFINE_string('config', '', 'The path to a model config file.')
flags.DEFINE_string('tf_examples_dir', '',
'The location to put the Tensorflow examples.')
flags.DEFINE_string('output_vocab', '',
'The location of the output vocabulary.')
flags.DEFINE_bool('permute', False, 'Whether to permute the train schemas.')
flags.DEFINE_bool('generate_output', False,
'Whether to generate output sequences.')
flags.DEFINE_integer(
'num_spider_repeats', 7,
'The number of times to permute the Spider data tables (for train only).')
BEG_TOK = '[CLS]'
SEP_TOK = '[SEP]'
TAB_TOK = '[TAB]'
UNK_TOK = '[UNK]'
GENERATE_TYPE = 1
COPY_TYPE = 2
COL_TYPE_TO_TOK = {
'text': '[STR_COL]',
'number': '[NUM_COL]',
'others': '[OTH_COL]',
'time': '[TIME_COL]',
'boolean': '[BOOL_COL]',
}
class InputToken(
collections.namedtuple('InputToken', [
'wordpiece', 'index', 'copy_mask', 'segment_id',
'indicates_foreign_key', 'aligned'
])):
pass
class OutputAction(
collections.namedtuple('OutputAction', ['wordpiece', 'action_id', 'type'])):
pass
def add_context(key):
"""Adds context features required by the model."""
features = dict()
features['language'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[b'en']))
features['region'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[b'US_eng']))
features['type'] = tf.train.Feature(int64_list=tf.train.Int64List(value=[1]))
features['weight'] = tf.train.Feature(
float_list=tf.train.FloatList(value=[1.0]))
features['tag'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[b'all']))
features['key'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[key.encode('utf8')]))
return features
class ConvertToSequenceExampleDoFn(beam.DoFn):
"""DoFn for converting from NLToSQLExample to a TFRecord."""
def __init__(self, model_config, generate_output, permute, num_repeats,
*unused_args, **unused_kwargs):
self.model_config = model_config
self.input_vocabulary = None
self.output_vocabulary = None
self.permute = permute
self.num_repeats = num_repeats
if not self.permute and self.num_repeats > 1:
raise ValueError('Not permuting but num_repeats = ' +
str(self.num_repeats))
# This cache maps from a proto representing a schema to its string
# equivalent
# (NOTE: this assumes there's no randomness in the order of the tables,
# cols, etc.)
self.table_cache = dict()
self.generate_output = generate_output
def non_parallel_process(self, example):
# Load cache
if not self.input_vocabulary:
with tf.gfile.Open(
self.model_config.data_options.bert_vocab_path) as infile:
self.input_vocabulary = [
line.rstrip('\n') for line in infile.readlines()
]
if not self.output_vocabulary:
with tf.gfile.Open(FLAGS.output_vocab) as infile:
self.output_vocabulary = [
line.replace('\n', '', 1) for line in infile.readlines()
]
results = list()
for _ in range(self.num_repeats):
# Convert the input to an indexed sequence
input_conversion = self._convert_input_to_indexed_sequence(
example.model_input, random_permutation=self.permute)
if input_conversion is None:
return None
# input_tokens stores the raw wordpieces, its index in the vocabulary, and
# whether it is copiable
# The maps store tuples of table or column entities paired with their head
# index in input_tokens
input_tokens, table_index_map, column_index_map, base_idx = input_conversion
# Convert the output to an indexed sequence
output_actions = list()
if self.generate_output:
output_actions = self._convert_output_to_indexed_sequence(
example, table_index_map, column_index_map, base_idx)
if output_actions is None:
return None
raw_input_wordpieces = [
input_token.wordpiece for input_token in input_tokens
]
for action in output_actions:
if action.type == COPY_TYPE:
# Copy actions should only either
# 1. Copy from the input (i.e., before SEP)
# 2. Copy TAB or COL tokens
assert input_tokens[
action.action_id].index == self.input_vocabulary.index(
TAB_TOK) or input_tokens[action.action_id].index in [
self.input_vocabulary.index(col_tok)
for col_tok in COL_TYPE_TO_TOK.values()
] or action.action_id < raw_input_wordpieces.index(
SEP_TOK
), 'Unexpected copying action: %r with proto:\n%r' % (
input_tokens[action.action_id], example)
assert input_tokens[action.action_id].copy_mask == 1, (
'Copied, but copy mask is 0: %s at '
'index %d; copied action was %s') % (
input_tokens[action.action_id], action.action_id, action)
# Actually create the TF Example
results.append(
self._convert_to_sequence_example(
input_tokens, output_actions,
example.model_input.original_utterance).SerializeToString())
return results
def process(self, example):
results = self.non_parallel_process(example)
if results is not None:
for result in results:
yield result
def _convert_input_to_sequence_example(self, input_tokens, features):
features['source_wordpieces'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[input_token.index]))
for input_token in input_tokens
])
features['copiable_input'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[input_token.copy_mask]))
for input_token in input_tokens
])
copy_features = list()
foreign_key_features = list()
for input_token in input_tokens:
copy_features.append(
tf.train.Feature(
bytes_list=tf.train.BytesList(
value=[input_token.wordpiece.encode('utf8')])))
foreign_key_features.append(
tf.train.Feature(
int64_list=tf.train.Int64List(
value=[input_token.indicates_foreign_key])))
features['copy_strings'] = tf.train.FeatureList(feature=copy_features)
features['segment_ids'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[input_token.segment_id]))
for input_token in input_tokens
])
features['indicates_foreign_key'] = tf.train.FeatureList(
feature=foreign_key_features)
features['utterance_schema_alignment'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[input_token.aligned]))
for input_token in input_tokens
])
def _convert_output_to_sequence_example(self, output_actions, features):
features['target_action_ids'] = tf.train.FeatureList(feature=[
tf.train.Feature(
int64_list=tf.train.Int64List(value=[action.action_id]))
for action in output_actions
])
features['target_action_types'] = tf.train.FeatureList(feature=[
tf.train.Feature(int64_list=tf.train.Int64List(value=[action.type]))
for action in output_actions
])
def _convert_to_sequence_example(self, input_tokens, output_actions,
utterance):
features = collections.OrderedDict()
self._convert_input_to_sequence_example(input_tokens, features)
self._convert_output_to_sequence_example(output_actions, features)
context_features = add_context(utterance)
return tf.train.SequenceExample(
context=tf.train.Features(feature=context_features),
feature_lists=tf.train.FeatureLists(feature_list=features))
def _get_vocab_index_or_unk(self, token, is_input=True):
# Note that this will return a 'Unicode equals warning' if the token is a
# unicode-only token
if is_input:
if token in self.input_vocabulary:
return self.input_vocabulary.index(token)
return self.input_vocabulary.index(UNK_TOK)
if token in self.output_vocabulary:
# Add 3 to this because there are 3 placeholder tokens in the output
# vocabulary that will be used during train (PAD, BEG, and END).
return self.output_vocabulary.index(token) + 3
print('Could not find token ' + token.encode('ascii', 'ignore') +
' in output vocabulary.')
def _convert_input_to_indexed_sequence(self, model_input, random_permutation):
# Everything is tokenized, but need to combine the utterance with the
# schema.
converted_wordpiece_tokens = list()
for wordpiece in model_input.utterance_wordpieces:
converted_wordpiece_tokens.append(
InputToken(('##' if '##' in wordpiece.wordpiece else '') +
model_input.original_utterance[
wordpiece.span_start_index:wordpiece.span_end_index],
self._get_vocab_index_or_unk(wordpiece.wordpiece), 1, 0, 0,
int(wordpiece.matches_to_schema)))
tokens = [
InputToken(BEG_TOK, self.input_vocabulary.index(BEG_TOK), 0, 0, 0, 0)
] + converted_wordpiece_tokens + [
InputToken(SEP_TOK, self.input_vocabulary.index(SEP_TOK), 0, 0, 0, 0)
]
table_index_map = list()
column_index_map = list()
# Add the table tokens
# Look it up in the cache
string_serial = ','.join([str(table) for table in model_input.tables])
if string_serial in self.table_cache and not random_permutation:
tokens_suffix, table_index_map, column_index_map = self.table_cache[
string_serial]
else:
# The input tokens contain the string to copy, rather than the wordpiece
# that's being embedded.
tokens_suffix = list()
order = list(range(len(model_input.tables)))
if random_permutation:
random.shuffle(order)
for table_segment_idx, table_idx in enumerate(order):
table = model_input.tables[table_idx]
table_index_map.append((len(tokens_suffix), table))
table_wordpieces_tokens = list()
for wordpiece in table.table_name_wordpieces:
table_wordpieces_tokens.append(
InputToken('', self._get_vocab_index_or_unk(wordpiece.wordpiece),
0, table_segment_idx + 1, 0,
int(table.matches_to_utterance)))
tokens_suffix.extend([
InputToken(
table.original_table_name, self.input_vocabulary.index(TAB_TOK),
1, table_segment_idx + 1, 0, int(table.matches_to_utterance))
] + table_wordpieces_tokens)
col_order = list(range(len(table.table_columns)))
if random_permutation:
random.shuffle(col_order)
# Add the column tokens for this table
for col_idx in col_order:
column = table.table_columns[col_idx]
column_index_map.append((len(tokens_suffix), column))
column_wordpiece_tokens = list()
for wordpiece in column.column_name_wordpieces:
column_wordpiece_tokens.append(
InputToken('',
self._get_vocab_index_or_unk(wordpiece.wordpiece), 0,
table_segment_idx + 1, int(column.is_foreign_key),
int(column.matches_to_utterance)))
tokens_suffix.extend([
InputToken(
column.original_column_name,
self.input_vocabulary.index(COL_TYPE_TO_TOK[
column.column_type]), 1, table_segment_idx + 1,
int(column.is_foreign_key), int(column.matches_to_utterance))
] + column_wordpiece_tokens)
# Update cache
if not random_permutation:
self.table_cache[string_serial] = (tokens_suffix, table_index_map,
column_index_map)
base_idx = len(tokens)
tokens.extend(tokens_suffix)
# If there are too many tokens, return None.
if len(tokens) > self.model_config.data_options.max_num_tokens:
return None
return tokens, table_index_map, column_index_map, base_idx
def _convert_output_to_indexed_sequence(self, example, table_index_map,
column_index_map, base_idx):
action_sequence = list()
gold_query = example.gold_sql_query
if len(
gold_query.actions) > self.model_config.data_options.max_decode_length:
return None
for action in gold_query.actions:
if action.symbol:
action_sequence.append(
OutputAction(action.symbol,
self._get_vocab_index_or_unk(action.symbol, False),
GENERATE_TYPE))
elif action.entity_copy:
found = False
if action.entity_copy.copied_table:
# Copied a table.
table = action.entity_copy.copied_table
for index, entity in table_index_map:
if entity.original_table_name == table.original_table_name:
action_sequence.append(
OutputAction(table.original_table_name, index + base_idx,
COPY_TYPE))
found = True
break
else:
# Copied a column.
column = action.entity_copy.copied_column
for index, entity in column_index_map:
if entity.original_column_name == column.original_column_name and entity.table_name == column.table_name:
action_sequence.append(
OutputAction(column.original_column_name, index + base_idx,
COPY_TYPE))
found = True
break
if not found:
return None
elif action.utterance_copy:
copy_wordpiece = action.utterance_copy
action_sequence.append(
OutputAction(copy_wordpiece.wordpiece,
copy_wordpiece.tokenized_index + 1, COPY_TYPE))
if None in [action.action_id for action in action_sequence]:
return None
return action_sequence
def creation_wrapper(process_dataset_fn):
"""Wrapper for creating the TFRecords files."""
# Create the tf examples directory.
if not tf.gfile.IsDirectory(FLAGS.tf_examples_dir):
print('Creating TFExamples directory at ' + FLAGS.tf_examples_dir)
tf.gfile.MkDir(FLAGS.tf_examples_dir)
# Get the model config.
model_config = load_config(FLAGS.config)
for filename in FLAGS.filenames:
if not filename:
continue
input_path = os.path.join(FLAGS.examples_dir, filename)
output_path = os.path.join(
FLAGS.tf_examples_dir,
filename.split('/')[-1].split('.')[0] + '.tfrecords')
permute = 'spider_train' in output_path and FLAGS.permute
num_repeats = FLAGS.num_spider_repeats if permute else 1
print('Processing %s. Permute: %r with %d repetitions' %
(filename, permute, num_repeats))
print('Writing to ' + output_path)
process_dataset_fn(input_path, model_config, permute, num_repeats,
output_path)
def process_dataset(input_path, model_config, permute, num_repeats,
output_path):
"""Function that processes a dataset without multiprocessing."""
fn = ConvertToSequenceExampleDoFn(
model_config,
FLAGS.generate_output,
permute=permute,
num_repeats=num_repeats)
with tf.gfile.Open(input_path) as infile:
examples = [NLToSQLExample().from_json(json.loads(line)) for line in infile]
with tf.python_io.TFRecordWriter(output_path) as writer:
num_examples_written = 0
total_examples = 0
for example in examples:
total_examples += 1
converteds = fn.non_parallel_process(example)
if converteds:
num_examples_written += 1
for converted in converteds:
writer.write(converted)
print('Wrote to %d / %d to %s' %
(num_examples_written, total_examples, output_path))
def main(unused_argv):
creation_wrapper(process_dataset)
if __name__ == '__main__':
app.run(main)
|
google-research/language
|
language/xsp/data_preprocessing/convert_to_tfrecords.py
|
Python
|
apache-2.0
| 17,885
|
<?php
/**
* Created by JetBrains PhpStorm.
* User: Rafael
* Date: 06/10/13
* Time: 17:39
* To change this template use File | Settings | File Templates.
*/
class LocationTableSeeder extends Seeder {
public function run() {
DB::table('location')->delete();
Location::create(array(
'name' => 'Serviço de Hematologia e Hemoterapia',
'hours' => 'Segunda a sexta-feira, das 7h às 12h30',
'address' => 'Rua Antonio Sais, 425',
'city_id' => City::where('name', 'São José dos Campos')->first()->id,
'zip' => '12210-040',
));
}
}
|
rafaelmaza/blood-demo
|
api/app/database/seeds/LocationTableSeeder.php
|
PHP
|
apache-2.0
| 666
|
package com.mesosphere.sdk.scheduler.multi;
import org.apache.mesos.Protos;
import org.apache.mesos.Protos.Offer;
import org.apache.mesos.Protos.TaskState;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.mesosphere.sdk.scheduler.AbstractScheduler;
import com.mesosphere.sdk.offer.CommonIdUtils;
import com.mesosphere.sdk.offer.Constants;
import com.mesosphere.sdk.offer.OfferRecommendation;
import com.mesosphere.sdk.offer.ReserveOfferRecommendation;
import com.mesosphere.sdk.scheduler.MesosEventClient.OfferResponse;
import com.mesosphere.sdk.scheduler.MesosEventClient.ClientStatusResponse;
import com.mesosphere.sdk.scheduler.MesosEventClient.TaskStatusResponse;
import com.mesosphere.sdk.scheduler.SchedulerConfig;
import com.mesosphere.sdk.scheduler.uninstall.DeregisterStep;
import com.mesosphere.sdk.specification.ServiceSpec;
import com.mesosphere.sdk.state.StateStore;
import com.mesosphere.sdk.testutils.TestConstants;
import static org.mockito.Mockito.*;
import java.time.Duration;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
/**
* Tests for {@link MultiServiceEventClient}
*/
public class MultiServiceEventClientTest {
private static final Answer<OfferResponse> CONSUME_FIRST_OFFER = new Answer<OfferResponse>() {
@Override
public OfferResponse answer(InvocationOnMock invocation) throws Throwable {
List<Offer> offers = getOffersArgument(invocation);
if (offers.isEmpty()) {
return OfferResponse.processed(Collections.emptyList());
}
return OfferResponse.processed(Collections.singletonList(
new ReserveOfferRecommendation(offers.get(0), getUnreservedCpus(3))));
}
};
private static final Answer<OfferResponse> CONSUME_LAST_OFFER = new Answer<OfferResponse>() {
@Override
public OfferResponse answer(InvocationOnMock invocation) throws Throwable {
List<Offer> offers = getOffersArgument(invocation);
if (offers.isEmpty()) {
return OfferResponse.processed(Collections.emptyList());
}
return OfferResponse.processed(Collections.singletonList(
new ReserveOfferRecommendation(offers.get(offers.size() - 1), getUnreservedCpus(5))));
}
};
private static final Answer<OfferResponse> NO_CHANGES = new Answer<OfferResponse>() {
@Override
public OfferResponse answer(InvocationOnMock invocation) throws Throwable {
return OfferResponse.processed(Collections.emptyList());
}
};
private static final Answer<OfferResponse> OFFER_NOT_READY = new Answer<OfferResponse>() {
@Override
public OfferResponse answer(InvocationOnMock invocation) throws Throwable {
return OfferResponse.notReady(Collections.emptyList());
}
};
@Mock private AbstractScheduler mockClient1;
@Mock private AbstractScheduler mockClient2;
@Mock private AbstractScheduler mockClient3;
@Mock private AbstractScheduler mockClient4;
@Mock private AbstractScheduler mockClient5;
@Mock private AbstractScheduler mockClient6;
@Mock private AbstractScheduler mockClient7;
@Mock private AbstractScheduler mockClient8;
@Mock private AbstractScheduler mockClient9;
@Mock private ServiceSpec mockServiceSpec1;
@Mock private ServiceSpec mockServiceSpec2;
@Mock private ServiceSpec mockServiceSpec3;
@Mock private ServiceSpec mockServiceSpec4;
@Mock private ServiceSpec mockServiceSpec5;
@Mock private ServiceSpec mockServiceSpec6;
@Mock private ServiceSpec mockServiceSpec7;
@Mock private ServiceSpec mockServiceSpec8;
@Mock private ServiceSpec mockServiceSpec9;
@Mock private SchedulerConfig mockSchedulerConfig;
@Mock private StateStore mockStateStore;
@Mock private MultiServiceManager mockMultiServiceManager;
@Mock private OfferDiscipline mockOfferDiscipline;
@Mock private MultiServiceEventClient.UninstallCallback mockUninstallCallback;
@Mock private DeregisterStep mockDeregisterStep;
private MultiServiceEventClient client;
@Before
public void beforeEach() {
MockitoAnnotations.initMocks(this);
when(mockClient1.getServiceSpec()).thenReturn(mockServiceSpec1);
when(mockClient2.getServiceSpec()).thenReturn(mockServiceSpec2);
when(mockClient3.getServiceSpec()).thenReturn(mockServiceSpec3);
when(mockClient4.getServiceSpec()).thenReturn(mockServiceSpec4);
when(mockClient5.getServiceSpec()).thenReturn(mockServiceSpec5);
when(mockClient6.getServiceSpec()).thenReturn(mockServiceSpec6);
when(mockClient7.getServiceSpec()).thenReturn(mockServiceSpec7);
when(mockClient8.getServiceSpec()).thenReturn(mockServiceSpec8);
when(mockClient9.getServiceSpec()).thenReturn(mockServiceSpec9);
when(mockServiceSpec1.getName()).thenReturn("1");
when(mockServiceSpec2.getName()).thenReturn("2");
when(mockServiceSpec3.getName()).thenReturn("3");
when(mockServiceSpec4.getName()).thenReturn("4");
when(mockServiceSpec5.getName()).thenReturn("5");
when(mockServiceSpec6.getName()).thenReturn("6");
when(mockServiceSpec7.getName()).thenReturn("7");
when(mockServiceSpec8.getName()).thenReturn("8");
when(mockServiceSpec9.getName()).thenReturn("9");
when(mockSchedulerConfig.getMultiServiceRemovalTimeout()).thenReturn(Duration.ZERO);
when(mockMultiServiceManager.getService("1")).thenReturn(Optional.of(mockClient1));
when(mockMultiServiceManager.getService("2")).thenReturn(Optional.of(mockClient2));
when(mockMultiServiceManager.getService("3")).thenReturn(Optional.of(mockClient3));
when(mockMultiServiceManager.getService("4")).thenReturn(Optional.of(mockClient4));
when(mockMultiServiceManager.getService("5")).thenReturn(Optional.of(mockClient5));
when(mockMultiServiceManager.getService("6")).thenReturn(Optional.of(mockClient6));
when(mockMultiServiceManager.getService("7")).thenReturn(Optional.of(mockClient7));
when(mockMultiServiceManager.getService("8")).thenReturn(Optional.of(mockClient8));
when(mockMultiServiceManager.getService("9")).thenReturn(Optional.of(mockClient9));
when(mockOfferDiscipline.updateServiceStatus(any(), any())).thenReturn(true);
client = buildClient(false);
}
@Test
public void offerNoClientsUninstalling() {
// Rebuild client in uninstall mode:
client = buildClient(true);
Assert.assertEquals(ClientStatusResponse.readyToRemove(), client.getClientStatus());
}
@Test
public void offerNoClientsDeclineLong() {
// No offers
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
OfferResponse response = client.offers(Collections.emptyList());
Assert.assertEquals(OfferResponse.Result.PROCESSED, response.result);
Assert.assertTrue(response.recommendations.isEmpty());
// Some offers
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
response = client.offers(Arrays.asList(getOffer(1), getOffer(2), getOffer(3)));
Assert.assertEquals(OfferResponse.Result.PROCESSED, response.result);
Assert.assertTrue(response.recommendations.isEmpty());
}
@Test
public void clientRemoval() throws Exception {
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(Collections.singleton(mockClient1));
when(mockClient1.getStateStore()).thenReturn(mockStateStore);
// client is done, expect uninstall trigger:
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.readyToUninstall());
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
verify(mockOfferDiscipline).updateServices(Collections.singleton("1"));
verify(mockOfferDiscipline).updateServiceStatus("1", ClientStatusResponse.readyToUninstall());
verify(mockMultiServiceManager).uninstallServices(Collections.singletonList("1"));
Assert.assertEquals(OfferResponse.Result.PROCESSED, client.offers(Collections.emptyList()).result);
verifyZeroInteractions(mockStateStore);
verifyZeroInteractions(mockUninstallCallback);
// client is uninstalled, expect removal:
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.readyToRemove());
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
verify(mockOfferDiscipline, times(2)).updateServices(Collections.singleton("1"));
verify(mockOfferDiscipline).updateServiceStatus("1", ClientStatusResponse.readyToRemove());
verify(mockStateStore).deleteAllDataIfNamespaced();
verify(mockMultiServiceManager).removeServices(Collections.singletonList("1"));
verify(mockUninstallCallback).uninstalled("1");
Assert.assertEquals(OfferResponse.Result.PROCESSED, client.offers(Collections.emptyList()).result);
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
// now behave like client is removed, we should still be RUNNING since scheduler isnt being uninstalled:
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(Collections.emptyList());
Assert.assertEquals(OfferResponse.Result.PROCESSED, client.offers(Collections.emptyList()).result);
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
}
@Test
public void clientRemovalDuringUninstall() throws Exception {
// Rebuild client in uninstall mode:
client = buildClient(true);
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(Collections.singleton(mockClient1));
when(mockClient1.getStateStore()).thenReturn(mockStateStore);
// client is done, expect uninstall trigger:
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.readyToUninstall());
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
verify(mockOfferDiscipline).updateServices(Collections.singleton("1"));
verify(mockOfferDiscipline).updateServiceStatus("1", ClientStatusResponse.readyToUninstall());
verify(mockMultiServiceManager).uninstallServices(Collections.singletonList("1"));
Assert.assertEquals(OfferResponse.Result.PROCESSED, client.offers(Collections.emptyList()).result);
verifyZeroInteractions(mockStateStore);
verifyZeroInteractions(mockUninstallCallback);
// client is uninstalled, expect removal:
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.readyToRemove());
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
verify(mockOfferDiscipline, times(2)).updateServices(Collections.singleton("1"));
verify(mockOfferDiscipline).updateServiceStatus("1", ClientStatusResponse.readyToRemove());
verify(mockStateStore).deleteAllDataIfNamespaced();
verify(mockMultiServiceManager).removeServices(Collections.singletonList("1"));
verify(mockUninstallCallback).uninstalled("1");
Assert.assertEquals(OfferResponse.Result.PROCESSED, client.offers(Collections.emptyList()).result);
// IDLE until the getServices call returns an empty collection, below:
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
// now behave like client is removed, we should now be READY_TO_REMOVE since scheduler is being uninstalled:
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(Collections.emptyList());
Assert.assertEquals(ClientStatusResponse.readyToRemove(), client.getClientStatus());
}
@Test
public void finishedAndUninstalled() throws Exception {
Collection<AbstractScheduler> services = Arrays.asList(mockClient1, mockClient2, mockClient3, mockClient4);
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(services);
// 1,3: Finished uninstall, remove.
// 2,4: Finished normal run, switch to uninstall.
when(mockClient1.getStateStore()).thenReturn(mockStateStore);
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.readyToRemove());
when(mockClient2.getStateStore()).thenReturn(mockStateStore);
when(mockClient2.getClientStatus()).thenReturn(ClientStatusResponse.readyToUninstall());
when(mockClient3.getStateStore()).thenReturn(mockStateStore);
when(mockClient3.getClientStatus()).thenReturn(ClientStatusResponse.readyToRemove());
when(mockClient4.getStateStore()).thenReturn(mockStateStore);
when(mockClient4.getClientStatus()).thenReturn(ClientStatusResponse.readyToUninstall());
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
// As uninstalled clients are removed, data is cleared and upstream is notified via callback:
verify(mockOfferDiscipline).updateServices(new HashSet<>(Arrays.asList("1", "2", "3", "4")));
verify(mockOfferDiscipline).updateServiceStatus("1", ClientStatusResponse.readyToRemove());
verify(mockOfferDiscipline).updateServiceStatus("2", ClientStatusResponse.readyToUninstall());
verify(mockOfferDiscipline).updateServiceStatus("3", ClientStatusResponse.readyToRemove());
verify(mockOfferDiscipline).updateServiceStatus("4", ClientStatusResponse.readyToUninstall());
verify(mockStateStore, times(2)).deleteAllDataIfNamespaced();
verify(mockMultiServiceManager).removeServices(Arrays.asList("1", "3"));
verify(mockUninstallCallback).uninstalled("1");
verify(mockUninstallCallback).uninstalled("3");
// Uninstall triggered for finished clients:
verify(mockMultiServiceManager).uninstallServices(Arrays.asList("2", "4"));
}
@Test
public void emptyOffersHitAllServices() throws Exception {
Collection<AbstractScheduler> services =
Arrays.asList(mockClient1, mockClient2, mockClient3, mockClient4, mockClient5);
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(services);
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.launching(true));
when(mockClient2.getClientStatus()).thenReturn(ClientStatusResponse.footprint(false));
when(mockClient3.getClientStatus()).thenReturn(ClientStatusResponse.idle());
when(mockClient4.getClientStatus()).thenReturn(ClientStatusResponse.readyToUninstall());
when(mockClient5.getClientStatus()).thenReturn(ClientStatusResponse.readyToRemove());
when(mockClient5.getStateStore()).thenReturn(mockStateStore);
Assert.assertEquals(ClientStatusResponse.footprint(true), client.getClientStatus());
verify(mockOfferDiscipline).updateServices(new HashSet<>(Arrays.asList("1", "2", "3", "4", "5")));
verify(mockOfferDiscipline).updateServiceStatus("1", ClientStatusResponse.launching(true));
verify(mockOfferDiscipline).updateServiceStatus("2", ClientStatusResponse.footprint(false));
verify(mockOfferDiscipline).updateServiceStatus("3", ClientStatusResponse.idle());
verify(mockOfferDiscipline).updateServiceStatus("4", ClientStatusResponse.readyToUninstall());
verify(mockOfferDiscipline).updateServiceStatus("5", ClientStatusResponse.readyToRemove());
when(mockClient1.offers(any())).then(CONSUME_FIRST_OFFER);
when(mockClient2.offers(any())).then(CONSUME_LAST_OFFER);
// Empty offers: All clients should have been pinged regardless
OfferResponse response = client.offers(Collections.emptyList());
Assert.assertEquals(OfferResponse.Result.PROCESSED, response.result);
Assert.assertTrue(response.recommendations.isEmpty());
verify(mockClient1).offers(Collections.emptyList());
verify(mockClient2).offers(Collections.emptyList());
// 3 not hit due to idle status:
verify(mockClient3, never()).offers(Collections.emptyList());
// 4 not hit due to readyToUninstall status:
verify(mockClient4, never()).offers(Collections.emptyList());
verify(mockMultiServiceManager).uninstallServices(Collections.singletonList("4"));
// 5 not hit due to uninstalled status:
verify(mockClient4, never()).offers(Collections.emptyList());
verify(mockMultiServiceManager).removeServices(Collections.singletonList("5"));
verify(mockStateStore).deleteAllDataIfNamespaced();
}
@Test
public void offerPruning() {
// Client 1,4,7: consumes the first offer
// Client 2,5,8: consumes the last offer
// Client 3,6,9: no change to offers
when(mockClient1.offers(any())).then(CONSUME_FIRST_OFFER);
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
when(mockClient2.offers(any())).then(CONSUME_LAST_OFFER);
when(mockClient2.getClientStatus()).thenReturn(ClientStatusResponse.footprint(false));
when(mockClient3.offers(any())).then(NO_CHANGES);
when(mockClient3.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
when(mockClient4.offers(any())).then(CONSUME_FIRST_OFFER);
when(mockClient4.getClientStatus()).thenReturn(ClientStatusResponse.footprint(false));
when(mockClient5.offers(any())).then(CONSUME_LAST_OFFER);
when(mockClient5.getClientStatus()).thenReturn(ClientStatusResponse.launching(true));
when(mockClient6.offers(any())).then(NO_CHANGES);
when(mockClient6.getClientStatus()).thenReturn(ClientStatusResponse.footprint(false));
when(mockClient7.offers(any())).then(CONSUME_FIRST_OFFER);
when(mockClient7.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
when(mockClient8.offers(any())).then(CONSUME_LAST_OFFER);
when(mockClient8.getClientStatus()).thenReturn(ClientStatusResponse.footprint(false));
when(mockClient9.offers(any())).then(NO_CHANGES);
when(mockClient9.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(Arrays.asList(
mockClient1, mockClient2, mockClient3,
mockClient4, mockClient5, mockClient6,
mockClient7, mockClient8, mockClient9));
Assert.assertEquals(ClientStatusResponse.footprint(true), client.getClientStatus());
// Seven offers: Only the middle offer is left at the end.
Protos.Offer middleOffer = getOffer(4);
Collection<Protos.Offer> offers = Arrays.asList(
getOffer(1), getOffer(2), getOffer(3),
middleOffer,
getOffer(5), getOffer(6), getOffer(7));
OfferResponse response = client.offers(offers);
Assert.assertEquals(OfferResponse.Result.PROCESSED, response.result);
Set<Integer> expectedConsumedOffers = new HashSet<>(Arrays.asList(1, 2, 3, 5, 6, 7));
Assert.assertEquals(expectedConsumedOffers.size(), response.recommendations.size());
for (OfferRecommendation rec : response.recommendations) {
Assert.assertTrue(rec.getOfferId().getValue(),
expectedConsumedOffers.contains(Integer.parseInt(rec.getOfferId().getValue())));
}
// Verify that offers are consumed in the order we would expect:
verify(mockClient1).offers(Arrays.asList(
getOffer(1), getOffer(2), getOffer(3), middleOffer, getOffer(5), getOffer(6), getOffer(7)));
verify(mockClient2).offers(Arrays.asList(
getOffer(2), getOffer(3), middleOffer, getOffer(5), getOffer(6), getOffer(7))); // 1 ate first
verify(mockClient3).offers(Arrays.asList(
getOffer(2), getOffer(3), middleOffer, getOffer(5), getOffer(6))); // 2 ate last
verify(mockClient4).offers(Arrays.asList(
getOffer(2), getOffer(3), middleOffer, getOffer(5), getOffer(6))); // no change by 3
verify(mockClient5).offers(Arrays.asList(
getOffer(3), middleOffer, getOffer(5), getOffer(6))); // 4 ate first
verify(mockClient6).offers(Arrays.asList(
getOffer(3), middleOffer, getOffer(5))); // 5 ate last
verify(mockClient7).offers(Arrays.asList(
getOffer(3), middleOffer, getOffer(5))); // no change by 6
verify(mockClient8).offers(Arrays.asList(
middleOffer, getOffer(5))); // 7 ate first
verify(mockClient9).offers(Arrays.asList(
middleOffer)); // 8 ate last
}
@Test
public void offerSomeClientsNotReady() {
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
when(mockClient2.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
when(mockClient3.getClientStatus()).thenReturn(ClientStatusResponse.idle());
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(Arrays.asList(
mockClient1, mockClient2, mockClient3));
Assert.assertEquals(ClientStatusResponse.launching(false), client.getClientStatus());
// One client: Not ready
when(mockClient1.offers(any())).then(NO_CHANGES);
when(mockClient2.offers(any())).then(OFFER_NOT_READY);
// Empty offers: All running clients should have been pinged regardless
OfferResponse response = client.offers(Collections.emptyList());
Assert.assertEquals(OfferResponse.Result.NOT_READY, response.result); // PROCESSED
Assert.assertTrue(response.recommendations.isEmpty());
verify(mockClient1).offers(Collections.emptyList());
verify(mockClient2).offers(Collections.emptyList());
// Client 3 is skipped due to being IDLE:
verify(mockClient3, never()).offers(Collections.emptyList());
// Three offers: All running clients should have been pinged with the same offers.
List<Protos.Offer> offers = Arrays.asList(getOffer(1), getOffer(2), getOffer(3));
response = client.offers(offers);
Assert.assertEquals(OfferResponse.Result.NOT_READY, response.result);
Assert.assertTrue(response.recommendations.isEmpty());
verify(mockClient1).offers(offers);
verify(mockClient2).offers(offers);
// Client 3 is skipped due to being IDLE:
verify(mockClient3, never()).offers(Collections.emptyList());
}
@Test
public void clientStatusOneServiceWithNewWork() throws Exception {
Collection<AbstractScheduler> services = Arrays.asList(mockClient1, mockClient2, mockClient3);
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(services);
when(mockClient1.offers(any())).then(NO_CHANGES);
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
when(mockClient2.offers(any())).then(NO_CHANGES);
when(mockClient2.getClientStatus()).thenReturn(ClientStatusResponse.launching(true));
when(mockClient3.offers(any())).then(NO_CHANGES);
when(mockClient3.getClientStatus()).thenReturn(ClientStatusResponse.idle());
// Empty offers: All clients should have been pinged regardless
OfferResponse response = client.offers(Collections.emptyList());
Assert.assertEquals(OfferResponse.Result.PROCESSED, response.result);
Assert.assertTrue(response.recommendations.isEmpty());
Assert.assertEquals(ClientStatusResponse.launching(true), client.getClientStatus());
}
@Test
public void clientStatusAllServicesLaunching() throws Exception {
Collection<AbstractScheduler> services = Arrays.asList(mockClient1, mockClient2, mockClient3);
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(services);
when(mockClient1.offers(any())).then(NO_CHANGES);
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
when(mockClient2.offers(any())).then(NO_CHANGES);
when(mockClient2.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
when(mockClient3.offers(any())).then(NO_CHANGES);
when(mockClient3.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
// Empty offers: All clients should have been pinged regardless
OfferResponse response = client.offers(Collections.emptyList());
Assert.assertEquals(OfferResponse.Result.PROCESSED, response.result);
Assert.assertTrue(response.recommendations.isEmpty());
Assert.assertEquals(ClientStatusResponse.launching(false), client.getClientStatus());
}
@Test
public void clientStatusAllServicesIdle() throws Exception {
Collection<AbstractScheduler> services = Arrays.asList(mockClient1, mockClient2, mockClient3);
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(services);
when(mockClient1.offers(any())).then(NO_CHANGES);
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.idle());
when(mockClient2.offers(any())).then(NO_CHANGES);
when(mockClient2.getClientStatus()).thenReturn(ClientStatusResponse.idle());
when(mockClient3.offers(any())).then(NO_CHANGES);
when(mockClient3.getClientStatus()).thenReturn(ClientStatusResponse.idle());
// Empty offers: All clients should have been pinged regardless
OfferResponse response = client.offers(Collections.emptyList());
Assert.assertEquals(OfferResponse.Result.PROCESSED, response.result);
Assert.assertTrue(response.recommendations.isEmpty());
Assert.assertEquals(ClientStatusResponse.idle(), client.getClientStatus());
}
@Test
public void clientStatusOneServiceFootprint() throws Exception {
Collection<AbstractScheduler> services = Arrays.asList(mockClient1, mockClient2, mockClient3);
when(mockMultiServiceManager.sharedLockAndGetServices()).thenReturn(services);
when(mockClient1.offers(any())).then(NO_CHANGES);
when(mockClient1.getClientStatus()).thenReturn(ClientStatusResponse.launching(true));
when(mockClient2.offers(any())).then(NO_CHANGES);
when(mockClient2.getClientStatus()).thenReturn(ClientStatusResponse.footprint(false));
when(mockClient3.offers(any())).then(NO_CHANGES);
when(mockClient3.getClientStatus()).thenReturn(ClientStatusResponse.launching(false));
// Empty offers: All clients should have been pinged regardless
OfferResponse response = client.offers(Collections.emptyList());
Assert.assertEquals(OfferResponse.Result.PROCESSED, response.result);
Assert.assertTrue(response.recommendations.isEmpty());
Assert.assertEquals(ClientStatusResponse.footprint(true), client.getClientStatus());
}
@Test
public void taskStatusClientNotFound() {
Protos.TaskStatus status = buildStatus("2");
when(mockMultiServiceManager.getMatchingService(status)).thenReturn(Optional.empty());
when(mockMultiServiceManager.getServiceSanitized(any())).thenReturn(Optional.empty());
Assert.assertEquals(TaskStatusResponse.Result.UNKNOWN_TASK, client.taskStatus(status).result);
verify(mockMultiServiceManager, times(1)).getMatchingService(status);
}
@Test
public void taskStatusDefaultServiceProcessed() {
Protos.TaskStatus status = buildStatus("3");
when(mockClient3.taskStatus(any())).thenReturn(TaskStatusResponse.processed());
when(mockMultiServiceManager.getMatchingService(status)).thenReturn(Optional.empty());
when(mockMultiServiceManager.getServiceSanitized(any())).thenReturn(Optional.empty());
when(mockMultiServiceManager.getServiceSanitized(TestConstants.SERVICE_NAME))
.thenReturn(Optional.of(mockClient3));
Assert.assertEquals(TaskStatusResponse.Result.PROCESSED, client.taskStatus(status).result);
}
@Test
public void taskStatusUnknown() {
// Client 2: unknown task
when(mockClient2.taskStatus(any())).thenReturn(TaskStatusResponse.unknownTask());
Protos.TaskStatus status = buildStatus("2");
when(mockMultiServiceManager.getMatchingService(status)).thenReturn(Optional.of(mockClient2));
Assert.assertEquals(TaskStatusResponse.Result.UNKNOWN_TASK, client.taskStatus(status).result);
verify(mockClient2, times(1)).taskStatus(status);
}
@Test
public void taskStatusProcessed() {
// Client 3: status processed
when(mockClient3.taskStatus(any())).thenReturn(TaskStatusResponse.processed());
Protos.TaskStatus status = buildStatus("3");
when(mockMultiServiceManager.getMatchingService(status)).thenReturn(Optional.of(mockClient3));
Assert.assertEquals(TaskStatusResponse.Result.PROCESSED, client.taskStatus(status).result);
verify(mockClient3, times(1)).taskStatus(status);
}
private MultiServiceEventClient buildClient(boolean uninstalling) {
return new MultiServiceEventClient(
TestConstants.SERVICE_NAME,
mockSchedulerConfig,
mockMultiServiceManager,
mockOfferDiscipline,
Collections.emptyList(),
mockUninstallCallback,
uninstalling ? Optional.of(mockDeregisterStep) : Optional.empty());
}
@SuppressWarnings("deprecation")
private static Protos.Resource getUnreservedCpus(double cpus) {
Protos.Resource.Builder resBuilder = Protos.Resource.newBuilder()
.setName("cpus")
.setType(Protos.Value.Type.SCALAR)
.setRole(Constants.ANY_ROLE);
resBuilder.getScalarBuilder().setValue(cpus);
return resBuilder.build();
}
private static Protos.TaskStatus buildStatus(String clientName) {
return Protos.TaskStatus.newBuilder()
.setTaskId(CommonIdUtils.toTaskId(clientName, "foo"))
.setState(TaskState.TASK_FINISHED)
.build();
}
private static Protos.Offer getOffer(int id) {
return Protos.Offer.newBuilder()
.setId(Protos.OfferID.newBuilder().setValue(Integer.toString(id)))
.setFrameworkId(Protos.FrameworkID.newBuilder().setValue("test-framework-id").build())
.setSlaveId(Protos.SlaveID.newBuilder().setValue("test-slave-id").build())
.setHostname("test-hostname")
.build();
}
@SuppressWarnings("unchecked")
private static List<Protos.Offer> getOffersArgument(InvocationOnMock invocation) {
return (List<Protos.Offer>) invocation.getArguments()[0];
}
}
|
mesosphere/dcos-commons
|
sdk/scheduler/src/test/java/com/mesosphere/sdk/scheduler/multi/MultiServiceEventClientTest.java
|
Java
|
apache-2.0
| 31,298
|
// This file is part of BenchExec, a framework for reliable benchmarking:
// https://github.com/sosy-lab/benchexec
//
// SPDX-FileCopyrightText: 2019-2020 Dirk Beyer <https://www.sosy-lab.org>
//
// SPDX-License-Identifier: Apache-2.0
const path = require("path");
module.exports = {
webpack: function override(config, webpackEnv) {
const isEnvDevelopment = webpackEnv === "development";
// main configuration of output files:
// two bundles, one for our code, one for dependencies
config.output.filename = "bundle.min.js";
config.output.chunkFilename = "[name].min.js";
config.optimization.runtimeChunk = false;
config.optimization.splitChunks = {
chunks: "all",
cacheGroups: {
vendors: {
chunks: "all",
name: "vendors",
test: /(node_modules|src\/data\/dependencies\.json)/,
},
},
};
// same for CSS files
const cssConfig = config.plugins.find(
(p) => p.constructor.name === "MiniCssExtractPlugin",
);
if (cssConfig) {
cssConfig.options.filename = "bundle.min.css";
cssConfig.options.chunkFilename = "[name].min.css";
}
// Don't extract license comments, we bundle them separately
config.optimization.minimizer.find(
(m) => m.constructor.name === "TerserPlugin",
).options.extractComments = false;
// Make vendor bundle change less often even if our own code changes.
config.optimization.occurrenceOrder = false;
// For consistency with previous configs
delete config.output.jsonpFunction;
if (isEnvDevelopment) {
// Make @data resolve to our dummy data
const dataPath = process.env.DATA || "src/data/data.json";
config.resolve.alias["@data"] = path.resolve(__dirname, dataPath);
}
return config;
},
};
|
dbeyer/benchexec
|
benchexec/tablegenerator/react-table/config-overrides.js
|
JavaScript
|
apache-2.0
| 1,815
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_75) on Sat May 16 22:22:32 CEST 2015 -->
<title>NodeTool.UpgradeSSTable</title>
<meta name="date" content="2015-05-16">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="NodeTool.UpgradeSSTable";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/apache/cassandra/tools/NodeTool.TruncateHints.html" title="class in org.apache.cassandra.tools"><span class="strong">Prev Class</span></a></li>
<li><a href="../../../../org/apache/cassandra/tools/NodeTool.Version.html" title="class in org.apache.cassandra.tools"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/apache/cassandra/tools/NodeTool.UpgradeSSTable.html" target="_top">Frames</a></li>
<li><a href="NodeTool.UpgradeSSTable.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor_summary">Constr</a> | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor_detail">Constr</a> | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.apache.cassandra.tools</div>
<h2 title="Class NodeTool.UpgradeSSTable" class="title">Class NodeTool.UpgradeSSTable</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li><a href="../../../../org/apache/cassandra/tools/NodeTool.NodeToolCmd.html" title="class in org.apache.cassandra.tools">org.apache.cassandra.tools.NodeTool.NodeToolCmd</a></li>
<li>
<ul class="inheritance">
<li>org.apache.cassandra.tools.NodeTool.UpgradeSSTable</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Implemented Interfaces:</dt>
<dd>java.lang.Runnable</dd>
</dl>
<dl>
<dt>Enclosing class:</dt>
<dd><a href="../../../../org/apache/cassandra/tools/NodeTool.html" title="class in org.apache.cassandra.tools">NodeTool</a></dd>
</dl>
<hr>
<br>
<pre>@Command(name="upgradesstables",
description="Rewrite sstables (for the requested tables) that are not on the current version (thus upgrading them to said current version)")
public static class <span class="strong">NodeTool.UpgradeSSTable</span>
extends <a href="../../../../org/apache/cassandra/tools/NodeTool.NodeToolCmd.html" title="class in org.apache.cassandra.tools">NodeTool.NodeToolCmd</a></pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor_summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><strong><a href="../../../../org/apache/cassandra/tools/NodeTool.UpgradeSSTable.html#NodeTool.UpgradeSSTable()">NodeTool.UpgradeSSTable</a></strong>()</code> </td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method_summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span>Methods</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><code><strong><a href="../../../../org/apache/cassandra/tools/NodeTool.UpgradeSSTable.html#execute(org.apache.cassandra.tools.NodeProbe)">execute</a></strong>(<a href="../../../../org/apache/cassandra/tools/NodeProbe.html" title="class in org.apache.cassandra.tools">NodeProbe</a> probe)</code> </td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_org.apache.cassandra.tools.NodeTool.NodeToolCmd">
<!-- -->
</a>
<h3>Methods inherited from class org.apache.cassandra.tools.<a href="../../../../org/apache/cassandra/tools/NodeTool.NodeToolCmd.html" title="class in org.apache.cassandra.tools">NodeTool.NodeToolCmd</a></h3>
<code><a href="../../../../org/apache/cassandra/tools/NodeTool.NodeToolCmd.html#parseOptionalColumnFamilies(java.util.List)">parseOptionalColumnFamilies</a>, <a href="../../../../org/apache/cassandra/tools/NodeTool.NodeToolCmd.html#parseOptionalKeyspace(java.util.List,%20org.apache.cassandra.tools.NodeProbe)">parseOptionalKeyspace</a>, <a href="../../../../org/apache/cassandra/tools/NodeTool.NodeToolCmd.html#parseOptionalKeyspace(java.util.List,%20org.apache.cassandra.tools.NodeProbe,%20boolean)">parseOptionalKeyspace</a>, <a href="../../../../org/apache/cassandra/tools/NodeTool.NodeToolCmd.html#run()">run</a></code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor_detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="NodeTool.UpgradeSSTable()">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>NodeTool.UpgradeSSTable</h4>
<pre>public NodeTool.UpgradeSSTable()</pre>
</li>
</ul>
</li>
</ul>
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method_detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="execute(org.apache.cassandra.tools.NodeProbe)">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>execute</h4>
<pre>public void execute(<a href="../../../../org/apache/cassandra/tools/NodeProbe.html" title="class in org.apache.cassandra.tools">NodeProbe</a> probe)</pre>
<dl>
<dt><strong>Specified by:</strong></dt>
<dd><code><a href="../../../../org/apache/cassandra/tools/NodeTool.NodeToolCmd.html#execute(org.apache.cassandra.tools.NodeProbe)">execute</a></code> in class <code><a href="../../../../org/apache/cassandra/tools/NodeTool.NodeToolCmd.html" title="class in org.apache.cassandra.tools">NodeTool.NodeToolCmd</a></code></dd>
</dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/apache/cassandra/tools/NodeTool.TruncateHints.html" title="class in org.apache.cassandra.tools"><span class="strong">Prev Class</span></a></li>
<li><a href="../../../../org/apache/cassandra/tools/NodeTool.Version.html" title="class in org.apache.cassandra.tools"><span class="strong">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/apache/cassandra/tools/NodeTool.UpgradeSSTable.html" target="_top">Frames</a></li>
<li><a href="NodeTool.UpgradeSSTable.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor_summary">Constr</a> | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor_detail">Constr</a> | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
sayanh/ViewMaintenanceCassandra
|
doc/org/apache/cassandra/tools/NodeTool.UpgradeSSTable.html
|
HTML
|
apache-2.0
| 10,847
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from polyaxon import settings
from polyaxon.proxies.schemas.base import clean_config
from polyaxon.proxies.schemas.buffering import get_buffering_config
from polyaxon.proxies.schemas.charset import get_charset_config
from polyaxon.proxies.schemas.error_page import get_error_page_config
from polyaxon.proxies.schemas.gzip import get_gzip_config
from polyaxon.proxies.schemas.listen import get_listen_config
from polyaxon.proxies.schemas.locations import get_streams_locations_config
from polyaxon.proxies.schemas.logging import get_logging_config
from polyaxon.proxies.schemas.streams.gunicorn import (
get_gunicorn_config,
get_k8s_auth_config,
)
from polyaxon.proxies.schemas.streams.k8s import get_k8s_root_location_config
from polyaxon.proxies.schemas.timeout import get_timeout_config
def get_base_config():
config = [
get_listen_config(
is_proxy=False, port=settings.PROXIES_CONFIG.streams_target_port
)
]
config += [
get_logging_config(),
get_gzip_config(),
get_charset_config(),
get_buffering_config(),
get_timeout_config(),
get_gunicorn_config(),
get_k8s_auth_config(),
get_error_page_config(),
get_streams_locations_config(),
get_k8s_root_location_config(),
]
return clean_config(config)
|
polyaxon/polyaxon
|
core/polyaxon/proxies/schemas/streams/base.py
|
Python
|
apache-2.0
| 1,940
|
<!-- BEGIN BREADCRUMBS -->
<div class="row breadcrumbs margin-bottom-40">
<div class="container">
<div class="col-md-4 col-sm-4">
<h1><?php echo ucwords($title); ?></h1>
</div>
<div class="col-md-8 col-sm-8">
<ul class="pull-right breadcrumb">
<!--<li><a href="index.html">Home</a></li>
<li><a href="">Pages</a></li>
<li class="active">About Us</li>
-->
<?php
$i = 0;
foreach($breadcrumbs as $key => $val)
{
$i++;
echo ($i < count($breadcrumbs)) ? '<li><a href="'. $val .'">'. $key .'</a></li>' : '<li><span>'. $key .'</span></li>';
}
?>
</ul>
</div>
</div>
</div>
<!-- END BREADCRUMBS -->
|
kobe8bird/gan
|
application/views/breadcrumbs.php
|
PHP
|
apache-2.0
| 682
|
# Muraltia marlothii Chod SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Polygalaceae/Muraltia/Muraltia marlothii/README.md
|
Markdown
|
apache-2.0
| 173
|
# Crepis modocensis Greene SPECIES
#### Status
ACCEPTED
#### According to
Integrated Taxonomic Information System
#### Published in
Erythea 3:48. 1895
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Crepis/Crepis modocensis/README.md
|
Markdown
|
apache-2.0
| 196
|
# Strychnos spireana Dop SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Loganiaceae/Strychnos/Strychnos spireana/README.md
|
Markdown
|
apache-2.0
| 172
|
# Hieracium chondrillifolium subsp. chiarugianum Chiarugi SUBSPECIES
#### Status
ACCEPTED
#### According to
Euro+Med Plantbase
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Hieracium/Hieracium chondrillifolium/Hieracium chondrillifolium chiarugianum/README.md
|
Markdown
|
apache-2.0
| 195
|
# Acacia polyacantha subsp. polyacantha Willd. SUBSPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Acacia/Acacia polyacantha/Acacia polyacantha polyacantha/README.md
|
Markdown
|
apache-2.0
| 205
|
# Agaricus macmurphyi Murrill SPECIES
#### Status
ACCEPTED
#### According to
Index Fungorum
#### Published in
null
#### Original name
Agaricus macmurphyi Murrill
### Remarks
null
|
mdoering/backbone
|
life/Fungi/Basidiomycota/Agaricomycetes/Agaricales/Agaricaceae/Agaricus/Agaricus macmurphyi/README.md
|
Markdown
|
apache-2.0
| 183
|
# Tarenna cameronii (C.T.White) S.J.Ali & Robbr. SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Rubiaceae/Triflorensia/Triflorensia cameronii/ Syn. Tarenna cameronii/README.md
|
Markdown
|
apache-2.0
| 203
|
# Vitis labrusca f. alba (Prince) Fernald FORM
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Vitales/Vitaceae/Vitis/Vitis labrusca/Vitis labrusca alba/README.md
|
Markdown
|
apache-2.0
| 186
|
# Asterotrichum griseum Bonord. SPECIES
#### Status
ACCEPTED
#### According to
Index Fungorum
#### Published in
null
#### Original name
Asterotrichum griseum Bonord.
### Remarks
null
|
mdoering/backbone
|
life/Fungi/Basidiomycota/Agaricomycetes/Agaricales/Lyophyllaceae/Asterotrichum/Asterotrichum griseum/README.md
|
Markdown
|
apache-2.0
| 187
|
/*
* Copyright (C) 2015 Karumi.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.karumi.dexter;
import android.app.Activity;
import android.content.Context;
import com.karumi.dexter.listener.multi.MultiplePermissionsListener;
import com.karumi.dexter.listener.single.PermissionListener;
import java.util.Arrays;
import java.util.Collection;
/**
* Class to simplify the management of Android runtime permissions
* Dexter needs to be initialized before checking for a permission using {@link
* #initialize(Context)}
*/
public final class Dexter {
private static DexterInstance instance;
/**
* Initializes the library
*
* @param context Context used by Dexter. Use your {@link android.app.Application} to make sure
* the instance is not cleaned up during your app lifetime
*/
public static void initialize(Context context) {
if (instance == null) {
AndroidPermissionService androidPermissionService = new AndroidPermissionService();
IntentProvider intentProvider = new IntentProvider();
instance = new DexterInstance(context, androidPermissionService, intentProvider);
}
}
/**
* Checks the permission and notifies the listener of its state.
* It is important to note that permissions still have to be declared in the manifest.
* Calling this method will result in an exception if {@link #isRequestOngoing()} returns true.
* All listener methods are called on the same thread that fired the permission request.
*
* @param listener The class that will be reported when the state of the permission is ready
* @param permission One of the values found in {@link android.Manifest.permission}
*/
public static void checkPermissionOnSameThread(PermissionListener listener, String permission) {
checkInstanceNotNull();
instance.checkPermission(listener, permission, ThreadFactory.makeSameThread());
}
/**
* Checks the permission and notifies the listener of its state.
* It is important to note that permissions still have to be declared in the manifest.
* Calling this method will result in an exception if {@link #isRequestOngoing()} returns true.
* All listener methods are called on the main thread that fired the permission request.
*
* @param listener The class that will be reported when the state of the permission is ready
* @param permission One of the values found in {@link android.Manifest.permission}
*/
public static void checkPermission(PermissionListener listener, String permission) {
checkInstanceNotNull();
instance.checkPermission(listener, permission, ThreadFactory.makeMainThread());
}
/**
* Checks the permissions and notifies the listener of its state.
* It is important to note that permissions still have to be declared in the manifest.
* Calling this method will result in an exception if {@link #isRequestOngoing()} returns true.
* All listener methods are called on the same thread that fired the permission request.
*
* @param listener The class that will be reported when the state of the permissions are ready
* @param permissions Array of values found in {@link android.Manifest.permission}
*/
public static void checkPermissionsOnSameThread(MultiplePermissionsListener listener,
String... permissions) {
checkInstanceNotNull();
instance.checkPermissions(listener, Arrays.asList(permissions),
ThreadFactory.makeSameThread());
}
/**
* Checks the permissions and notifies the listener of its state.
* It is important to note that permissions still have to be declared in the manifest.
* Calling this method will result in an exception if {@link #isRequestOngoing()} returns true.
* All listener methods are called on the main thread that fired the permission request.
*
* @param listener The class that will be reported when the state of the permissions are ready
* @param permissions Array of values found in {@link android.Manifest.permission}
*/
public static void checkPermissions(MultiplePermissionsListener listener, String... permissions) {
checkInstanceNotNull();
instance.checkPermissions(listener, Arrays.asList(permissions),
ThreadFactory.makeMainThread());
}
/**
* Checks the permissions and notifies the listener of its state
* It is important to note that permissions still have to be declared in the manifest
*
* @param listener The class that will be reported when the state of the permissions are ready
* @param permissions Collection of values found in {@link android.Manifest.permission}
*/
public static void checkPermissions(MultiplePermissionsListener listener,
Collection<String> permissions) {
checkInstanceNotNull();
instance.checkPermissions(listener, permissions, ThreadFactory.makeMainThread());
}
/**
* Checks is there is any permission request still ongoing.
* If so, state of permissions must not be checked until it is resolved
* or it will cause an exception.
*/
public static boolean isRequestOngoing() {
checkInstanceNotNull();
return instance.isRequestOngoing();
}
/**
* Requests pending permissions if there were permissions lost. This method can be used to
* recover the Dexter state during a configuration change, for example when the device is
* rotated.
*/
public static void continuePendingRequestsIfPossible(MultiplePermissionsListener listener) {
checkInstanceNotNull();
instance.continuePendingRequestsIfPossible(listener, ThreadFactory.makeMainThread());
}
/**
* Requests pending permission if there was a permissions lost. This method can be used to
* recover the Dexter state during a configuration change, for example when the device is
* rotated.
*/
public static void continuePendingRequestIfPossible(PermissionListener listener) {
checkInstanceNotNull();
instance.continuePendingRequestIfPossible(listener, ThreadFactory.makeMainThread());
}
private static void checkInstanceNotNull() {
if (instance == null) {
throw new NullPointerException("context == null \n Must call \"initialize\" on Dexter");
}
}
/**
* Method called whenever the DexterActivity has been created or recreated and is ready to be
* used.
*/
static void onActivityReady(Activity activity) {
instance.onActivityReady(activity);
}
/**
* Method called when all the permissions has been requested to the user
*
* @param grantedPermissions Collection with all the permissions the user has granted. Contains
* values from {@link android.Manifest.permission}
* @param deniedPermissions Collection with all the permissions the user has denied. Contains
* values from {@link android.Manifest.permission}
*/
static void onPermissionsRequested(Collection<String> grantedPermissions,
Collection<String> deniedPermissions) {
instance.onPermissionRequestGranted(grantedPermissions);
instance.onPermissionRequestDenied(deniedPermissions);
}
}
|
longtaoge/Dexter
|
dexter/src/main/java/com/karumi/dexter/Dexter.java
|
Java
|
apache-2.0
| 7,529
|
using System;
namespace InsertIt.Exceptions
{
public abstract class ClassException : Exception
{
public readonly Type Type;
protected ClassException(Type type)
{
Type = type;
}
}
}
|
MNie/InsertIt
|
src/InsertIt/Exceptions/ClassException.cs
|
C#
|
apache-2.0
| 242
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_65) on Wed Sep 03 20:05:58 PDT 2014 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Class org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder (HBase 0.98.6-hadoop2 API)</title>
<meta name="date" content="2014-09-03">
<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder (HBase 0.98.6-hadoop2 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/protobuf/generated/class-use/FilterProtos.ValueFilter.Builder.html" target="_top">Frames</a></li>
<li><a href="FilterProtos.ValueFilter.Builder.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder" class="title">Uses of Class<br>org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.apache.hadoop.hbase.protobuf.generated">org.apache.hadoop.hbase.protobuf.generated</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.apache.hadoop.hbase.protobuf.generated">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a> in <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/package-summary.html">org.apache.hadoop.hbase.protobuf.generated</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/package-summary.html">org.apache.hadoop.hbase.protobuf.generated</a> that return <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html#clear()">clear</a></strong>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html#clearCompareFilter()">clearCompareFilter</a></strong>()</code>
<div class="block"><code>required .CompareFilter compare_filter = 1;</code></div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html#clone()">clone</a></strong>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html#mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)">mergeCompareFilter</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.CompareFilter.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.CompareFilter</a> value)</code>
<div class="block"><code>required .CompareFilter compare_filter = 1;</code></div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html#mergeFrom(com.google.protobuf.CodedInputStream,%20com.google.protobuf.ExtensionRegistryLite)">mergeFrom</a></strong>(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html#mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)">mergeFrom</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter</a> other)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html#mergeFrom(com.google.protobuf.Message)">mergeFrom</a></strong>(com.google.protobuf.Message other)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.html#newBuilder()">newBuilder</a></strong>()</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.html#newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)">newBuilder</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter</a> prototype)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.html#newBuilderForType()">newBuilderForType</a></strong>()</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>protected <a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.html#newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent)">newBuilderForType</a></strong>(com.google.protobuf.GeneratedMessage.BuilderParent parent)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html#setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder)">setCompareFilter</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.CompareFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.CompareFilter.Builder</a> builderForValue)</code>
<div class="block"><code>required .CompareFilter compare_filter = 1;</code></div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.Builder.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html#setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)">setCompareFilter</a></strong>(<a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.CompareFilter.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.CompareFilter</a> value)</code>
<div class="block"><code>required .CompareFilter compare_filter = 1;</code></div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">FilterProtos.ValueFilter.Builder</a></code></td>
<td class="colLast"><span class="strong">FilterProtos.ValueFilter.</span><code><strong><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.html#toBuilder()">toBuilder</a></strong>()</code> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../org/apache/hadoop/hbase/protobuf/generated/FilterProtos.ValueFilter.Builder.html" title="class in org.apache.hadoop.hbase.protobuf.generated">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/protobuf/generated/class-use/FilterProtos.ValueFilter.Builder.html" target="_top">Frames</a></li>
<li><a href="FilterProtos.ValueFilter.Builder.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2014 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
</body>
</html>
|
lshain/hbase-0.98.6-hadoop2
|
docs/devapidocs/org/apache/hadoop/hbase/protobuf/generated/class-use/FilterProtos.ValueFilter.Builder.html
|
HTML
|
apache-2.0
| 16,490
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace FhirProfilePublisher.Engine
{
public class OutputOptions
{
public string HeaderText { get; set; }
public string PageTitleSuffix { get; set; }
public string FooterText { get; set; }
public string IndexPageHtml { get; set; }
public string PageTemplate { get; set; }
public bool ShowEverythingOnOnePage { get; set; } = true;
public bool ShowResourcesInW5Group { get; set; }
public ResourceMaturity[] ListOnlyResourcesWithMaturity { get; set; }
}
}
|
endeavourhealth/FHIR-Tools
|
FhirProfilePublisher/FhirProfilePublisher.Engine/Model/OutputConfiguration.cs
|
C#
|
apache-2.0
| 654
|
<div align="center">
<a href="https://github.com/webpack/webpack">
<img width="200" height="200" src="https://webpack.js.org/assets/icon-square-big.svg">
</a>
</div>
[![npm][npm]][npm-url]
[![node][node]][node-url]
[![deps][deps]][deps-url]
[![tests][tests]][tests-url]
[![cover][cover]][cover-url]
[![chat][chat]][chat-url]
[![size][size]][size-url]
# terser-webpack-plugin
This plugin uses [terser](https://github.com/terser-js/terser) to minify your JavaScript.
## Getting Started
To begin, you'll need to install `terser-webpack-plugin`:
```console
$ npm install terser-webpack-plugin --save-dev
```
Then add the plugin to your `webpack` config. For example:
**webpack.config.js**
```js
const TerserPlugin = require('terser-webpack-plugin');
module.exports = {
optimization: {
minimize: true,
minimizer: [new TerserPlugin()],
},
};
```
And run `webpack` via your preferred method.
## Options
### `test`
Type: `String|RegExp|Array<String|RegExp>`
Default: `/\.m?js(\?.*)?$/i`
Test to match files against.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
test: /\.js(\?.*)?$/i,
}),
],
},
};
```
### `include`
Type: `String|RegExp|Array<String|RegExp>`
Default: `undefined`
Files to include.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
include: /\/includes/,
}),
],
},
};
```
### `exclude`
Type: `String|RegExp|Array<String|RegExp>`
Default: `undefined`
Files to exclude.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
exclude: /\/excludes/,
}),
],
},
};
```
### `cache`
> ⚠ Ignored in webpack 5! Please use https://webpack.js.org/configuration/other-options/#cache.
Type: `Boolean|String`
Default: `true`
Enable file caching.
Default path to cache directory: `node_modules/.cache/terser-webpack-plugin`.
> ℹ️ If you use your own `minify` function please read the `minify` section for cache invalidation correctly.
#### `Boolean`
Enable/disable file caching.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
cache: true,
}),
],
},
};
```
#### `String`
Enable file caching and set path to cache directory.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
cache: 'path/to/cache',
}),
],
},
};
```
### `cacheKeys`
> ⚠ Ignored in webpack 5! Please use https://webpack.js.org/configuration/other-options/#cache.
Type: `Function<(defaultCacheKeys, file) -> Object>`
Default: `defaultCacheKeys => defaultCacheKeys`
Allows you to override default cache keys.
Default cache keys:
```js
({
terser: require('terser/package.json').version, // terser version
'terser-webpack-plugin': require('../package.json').version, // plugin version
'terser-webpack-plugin-options': this.options, // plugin options
path: compiler.outputPath ? `${compiler.outputPath}/${file}` : file, // asset path
hash: crypto.createHash('md4').update(input).digest('hex'), // source file hash
});
```
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
cache: true,
cacheKeys: (defaultCacheKeys, file) => {
defaultCacheKeys.myCacheKey = 'myCacheKeyValue';
return defaultCacheKeys;
},
}),
],
},
};
```
### `parallel`
Type: `Boolean|Number`
Default: `true`
Use multi-process parallel running to improve the build speed.
Default number of concurrent runs: `os.cpus().length - 1`.
> ℹ️ Parallelization can speedup your build significantly and is therefore **highly recommended**.
> ⚠️ If you use **Circle CI** or any other environment that doesn't provide real available count of CPUs then you need to setup explicitly number of CPUs to avoid `Error: Call retries were exceeded` (see [#143](https://github.com/webpack-contrib/terser-webpack-plugin/issues/143), [#202](https://github.com/webpack-contrib/terser-webpack-plugin/issues/202)).
#### `Boolean`
Enable/disable multi-process parallel running.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
parallel: true,
}),
],
},
};
```
#### `Number`
Enable multi-process parallel running and set number of concurrent runs.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
parallel: 4,
}),
],
},
};
```
### `sourceMap`
Type: `Boolean`
Default: `false` (see below for details around `devtool` value and `SourceMapDevToolPlugin` plugin)
**Works only with `source-map`, `inline-source-map`, `hidden-source-map` and `nosources-source-map` values for the [`devtool`](https://webpack.js.org/configuration/devtool/) option.**
Why?
- `eval` wraps modules in `eval("string")` and the minimizer does not handle strings.
- `cheap` has not column information and minimizer generate only a single line, which leave only a single mapping.
The plugin respect the [`devtool`](https://webpack.js.org/configuration/devtool/) and using the `SourceMapDevToolPlugin` plugin.
Using supported `devtool` values enable source map generation.
Using `SourceMapDevToolPlugin` with enabled the `columns` option enables source map generation.
Use source maps to map error message locations to modules (this slows down the compilation).
If you use your own `minify` function please read the `minify` section for handling source maps correctly.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
sourceMap: true,
}),
],
},
};
```
### `minify`
Type: `Function`
Default: `undefined`
Allows you to override default minify function.
By default plugin uses [terser](https://github.com/terser-js/terser) package.
Useful for using and testing unpublished versions or forks.
> ⚠️ **Always use `require` inside `minify` function when `parallel` option enabled**.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
minify: (file, sourceMap) => {
const extractedComments = [];
// Custom logic for extract comments
const { error, map, code, warnings } = require('uglify-module') // Or require('./path/to/uglify-module')
.minify(file, {
/* Your options for minification */
});
return { error, map, code, warnings, extractedComments };
},
}),
],
},
};
```
### `terserOptions`
Type: `Object`
Default: [default](https://github.com/terser-js/terser#minify-options)
Terser minify [options](https://github.com/terser-js/terser#minify-options).
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
terserOptions: {
ecma: undefined,
warnings: false,
parse: {},
compress: {},
mangle: true, // Note `mangle.properties` is `false` by default.
module: false,
output: null,
toplevel: false,
nameCache: null,
ie8: false,
keep_classnames: undefined,
keep_fnames: false,
safari10: false,
},
}),
],
},
};
```
### `extractComments`
Type: `Boolean|String|RegExp|Function<(node, comment) -> Boolean|Object>|Object`
Default: `true`
Whether comments shall be extracted to a separate file, (see [details](https://github.com/webpack/webpack/commit/71933e979e51c533b432658d5e37917f9e71595a)).
By default extract only comments using `/^\**!|@preserve|@license|@cc_on/i` regexp condition and remove remaining comments.
If the original file is named `foo.js`, then the comments will be stored to `foo.js.LICENSE.txt`.
The `terserOptions.output.comments` option specifies whether the comment will be preserved, i.e. it is possible to preserve some comments (e.g. annotations) while extracting others or even preserving comments that have been extracted.
#### `Boolean`
Enable/disable extracting comments.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
extractComments: true,
}),
],
},
};
```
#### `String`
Extract `all` or `some` (use `/^\**!|@preserve|@license|@cc_on/i` RegExp) comments.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
extractComments: 'all',
}),
],
},
};
```
#### `RegExp`
All comments that match the given expression will be extracted to the separate file.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
extractComments: /@extract/i,
}),
],
},
};
```
#### `Function<(node, comment) -> Boolean>`
All comments that match the given expression will be extracted to the separate file.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
extractComments: (astNode, comment) => {
if (/@extract/i.test(comment.value)) {
return true;
}
return false;
},
}),
],
},
};
```
#### `Object`
Allow to customize condition for extract comments, specify extracted file name and banner.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
extractComments: {
condition: /^\**!|@preserve|@license|@cc_on/i,
filename: (fileData) => {
// The "fileData" argument contains object with "filename", "basename", "query" and "hash"
return `${fileData.filename}.LICENSE.txt${fileData.query}`;
},
banner: (licenseFile) => {
return `License information can be found in ${licenseFile}`;
},
},
}),
],
},
};
```
##### `condition`
Type: `Boolean|String|RegExp|Function<(node, comment) -> Boolean|Object>`
Condition what comments you need extract.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
extractComments: {
condition: 'some',
filename: (fileData) => {
// The "fileData" argument contains object with "filename", "basename", "query" and "hash"
return `${fileData.filename}.LICENSE.txt${fileData.query}`;
},
banner: (licenseFile) => {
return `License information can be found in ${licenseFile}`;
},
},
}),
],
},
};
```
##### `filename`
Type: `String|Function<(string) -> String>`
Default: `[file].LICENSE.txt[query]`
Available placeholders: `[file]`, `[query]` and `[filebase]` (`[base]` for webpack 5).
The file where the extracted comments will be stored.
Default is to append the suffix `.LICENSE.txt` to the original filename.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
extractComments: {
condition: /^\**!|@preserve|@license|@cc_on/i,
filename: 'extracted-comments.js',
banner: (licenseFile) => {
return `License information can be found in ${licenseFile}`;
},
},
}),
],
},
};
```
##### `banner`
Type: `Boolean|String|Function<(string) -> String>`
Default: `/*! For license information please see ${commentsFile} */`
The banner text that points to the extracted file and will be added on top of the original file.
Can be `false` (no banner), a `String`, or a `Function<(string) -> String>` that will be called with the filename where extracted comments have been stored.
Will be wrapped into comment.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
extractComments: {
condition: true,
filename: (fileData) => {
// The "fileData" argument contains object with "filename", "basename", "query" and "hash"
return `${fileData.filename}.LICENSE.txt${fileData.query}`;
},
banner: (commentsFile) => {
return `My custom banner about license information ${commentsFile}`;
},
},
}),
],
},
};
```
### `warningsFilter`
Type: `Function<(warning, file, source) -> Boolean>`
Default: `() => true`
Allow to filter [terser](https://github.com/terser-js/terser) warnings.
Return `true` to keep the warning, a falsy value (`false`/`null`/`undefined`) otherwise.
> ⚠️ The `source` argument will contain `undefined` if you don't use source maps.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
warningsFilter: (warning, file, source) => {
if (/Dropping unreachable code/i.test(warning)) {
return true;
}
if (/file\.js/i.test(file)) {
return true;
}
if (/source\.js/i.test(source)) {
return true;
}
return false;
},
}),
],
},
};
```
## Examples
### Preserve Comments
Extract all legal comments (i.e. `/^\**!|@preserve|@license|@cc_on/i`) and preserve `/@license/i` comments.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
terserOptions: {
output: {
comments: /@license/i,
},
},
extractComments: true,
}),
],
},
};
```
### Remove Comments
If you avoid building with comments, use this config:
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
terserOptions: {
output: {
comments: false,
},
},
extractComments: false,
}),
],
},
};
```
### Custom Minify Function
Override default minify function - use `uglify-js` for minification.
**webpack.config.js**
```js
module.exports = {
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
// Uncomment lines below for cache invalidation correctly
// cache: true,
// cacheKeys: (defaultCacheKeys) => {
// delete defaultCacheKeys.terser;
//
// return Object.assign(
// {},
// defaultCacheKeys,
// { 'uglify-js': require('uglify-js/package.json').version },
// );
// },
minify: (file, sourceMap) => {
// https://github.com/mishoo/UglifyJS2#minify-options
const uglifyJsOptions = {
/* your `uglify-js` package options */
};
if (sourceMap) {
uglifyJsOptions.sourceMap = {
content: sourceMap,
};
}
return require('uglify-js').minify(file, uglifyJsOptions);
},
}),
],
},
};
```
## Contributing
Please take a moment to read our contributing guidelines if you haven't yet done so.
[CONTRIBUTING](./.github/CONTRIBUTING.md)
## License
[MIT](./LICENSE)
[npm]: https://img.shields.io/npm/v/terser-webpack-plugin.svg
[npm-url]: https://npmjs.com/package/terser-webpack-plugin
[node]: https://img.shields.io/node/v/terser-webpack-plugin.svg
[node-url]: https://nodejs.org
[deps]: https://david-dm.org/webpack-contrib/terser-webpack-plugin.svg
[deps-url]: https://david-dm.org/webpack-contrib/terser-webpack-plugin
[tests]: https://github.com/webpack-contrib/terser-webpack-plugin/workflows/terser-webpack-plugin/badge.svg
[tests-url]: https://github.com/webpack-contrib/terser-webpack-plugin/actions
[cover]: https://codecov.io/gh/webpack-contrib/terser-webpack-plugin/branch/master/graph/badge.svg
[cover-url]: https://codecov.io/gh/webpack-contrib/terser-webpack-plugin
[chat]: https://img.shields.io/badge/gitter-webpack%2Fwebpack-brightgreen.svg
[chat-url]: https://gitter.im/webpack/webpack
[size]: https://packagephobia.now.sh/badge?p=terser-webpack-plugin
[size-url]: https://packagephobia.now.sh/result?p=terser-webpack-plugin
|
cloudfoundry-community/asp.net5-buildpack
|
fixtures/node_apps/angular_dotnet/ClientApp/node_modules/terser-webpack-plugin/README.md
|
Markdown
|
apache-2.0
| 16,763
|
/*
* $Id$
* This file is a part of the Arakhne Foundation Classes, http://www.arakhne.org/afc
*
* Copyright (c) 2000-2012 Stephane GALLAND.
* Copyright (c) 2005-10, Multiagent Team, Laboratoire Systemes et Transports,
* Universite de Technologie de Belfort-Montbeliard.
* Copyright (c) 2013-2020 The original authors, and other authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.arakhne.afc.vmutil.json;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@SuppressWarnings("all")
public class JsonBufferTest {
private JsonBuffer buffer;
@BeforeEach
public void setUp() {
this.buffer = new JsonBuffer();
}
@AfterEach
public void tearDown() {
this.buffer = null;
}
@Test
public void add_String() {
this.buffer.add("xyz", "myvalue"); //$NON-NLS-1$ //$NON-NLS-2$
assertEquals("{\n\t\"xyz\": \"myvalue\"\n}", this.buffer.toString()); //$NON-NLS-1$
}
@Test
public void add_Integer() {
this.buffer.add("xyz", 234); //$NON-NLS-1$
assertEquals("{\n\t\"xyz\": 234\n}", this.buffer.toString()); //$NON-NLS-1$
}
@Test
public void add_Double() {
this.buffer.add("xyz", 234.56); //$NON-NLS-1$
assertEquals("{\n\t\"xyz\": "+ Double.toString(234.56) + "\n}", this.buffer.toString()); //$NON-NLS-1$ //$NON-NLS-2$
}
@Test
public void add_Boolean() {
this.buffer.add("xyz", true); //$NON-NLS-1$
assertEquals("{\n\t\"xyz\": true\n}", this.buffer.toString()); //$NON-NLS-1$
}
@Test
public void add_Object() {
Object obj = new Object();
this.buffer.add("xyz", obj); //$NON-NLS-1$
assertEquals("{\n\t\"xyz\": \"" + obj.toString() + "\"\n}", this.buffer.toString()); //$NON-NLS-1$ //$NON-NLS-2$
}
@Test
public void add_Iterable() {
Object obj = new Object();
List<Object> col = Arrays.asList("myvalue", 123, 456.78, obj, true); //$NON-NLS-1$
this.buffer.add("xyz", col); //$NON-NLS-1$
assertEquals("{\n\t\"xyz\": [\n\t\t" //$NON-NLS-1$
+ "\"myvalue\",\n\t\t123,\n\t\t" //$NON-NLS-1$
+ Double.toString(456.78)
+ ",\n\t\t\"" + obj.toString() //$NON-NLS-1$
+ "\",\n\t\ttrue\n\t]\n}", this.buffer.toString()); //$NON-NLS-1$
}
@Test
public void add_Map() {
Object obj = new Object();
Map<Object, Object> map = new TreeMap<>(); // Use a TreeSet for sorting the keys
map.put("k1", "myvalue"); //$NON-NLS-1$ //$NON-NLS-2$
map.put("k2", 123); //$NON-NLS-1$
map.put("k3", 456.78); //$NON-NLS-1$
map.put("k4", obj); //$NON-NLS-1$
map.put("k5", true); //$NON-NLS-1$
this.buffer.add("xyz", map); //$NON-NLS-1$
assertEquals("{\n\t\"xyz\": {\n\t\t" //$NON-NLS-1$
+ "\"k1\": \"myvalue\",\n\t\t\"k2\": 123,\n\t\t\"k3\": " //$NON-NLS-1$
+ Double.toString(456.78)
+ ",\n\t\t\"k4\": \"" + obj.toString() //$NON-NLS-1$
+ "\",\n\t\t\"k5\": true\n\t}\n}", this.buffer.toString()); //$NON-NLS-1$
}
@Test
public void add_JsonBuffer() {
JsonableObject obj = (buffer) -> {
buffer.add("abc", 345.6); //$NON-NLS-1$
buffer.add("def", "myvalue"); //$NON-NLS-1$ //$NON-NLS-2$
};
this.buffer.add("xyz", obj); //$NON-NLS-1$
assertEquals("{\n\t\"xyz\": {\n\t\t\"abc\": 345.6,\n\t\t\"def\": \"myvalue\"\n\t}\n}", this.buffer.toString()); //$NON-NLS-1$
}
@Test
public void add_JsonableObject_01() {
Object obj = new Object();
List<Object> col = Arrays.asList("myvalue", 123, 456.78, obj, true); //$NON-NLS-1$
JsonBuffer subbuffer = new JsonBuffer();
subbuffer.add("abc", col); //$NON-NLS-1$
this.buffer.add("xyz", subbuffer); //$NON-NLS-1$
assertEquals("{\n\t\"xyz\": {\n\t\t\"abc\": [\n\t\t\t" //$NON-NLS-1$
+ "\"myvalue\",\n\t\t\t123,\n\t\t\t" //$NON-NLS-1$
+ Double.toString(456.78)
+ ",\n\t\t\t\"" + obj.toString() //$NON-NLS-1$
+ "\",\n\t\t\ttrue\n\t\t]\n\t}\n}", this.buffer.toString()); //$NON-NLS-1$
}
@Test
public void add_JsonBuffer_02() {
Object obj = new Object();
Map<Object, Object> map = new TreeMap<>(); // Use a TreeSet for sorting the keys
map.put("k1", "myvalue"); //$NON-NLS-1$ //$NON-NLS-2$
map.put("k2", 123); //$NON-NLS-1$
map.put("k3", 456.78); //$NON-NLS-1$
map.put("k4", obj); //$NON-NLS-1$
map.put("k5", true); //$NON-NLS-1$
JsonBuffer subbuffer = new JsonBuffer();
subbuffer.add("abc", map); //$NON-NLS-1$
this.buffer.add("xyz", subbuffer); //$NON-NLS-1$
assertEquals("{\n\t\"xyz\": {\n\t\t\"abc\": {\n\t\t\t" //$NON-NLS-1$
+ "\"k1\": \"myvalue\",\n\t\t\t\"k2\": 123,\n\t\t\t\"k3\": " //$NON-NLS-1$
+ Double.toString(456.78)
+ ",\n\t\t\t\"k4\": \"" + obj.toString() //$NON-NLS-1$
+ "\",\n\t\t\t\"k5\": true\n\t\t}\n\t}\n}", this.buffer.toString()); //$NON-NLS-1$
}
@Test
public void toString_Static_01() {
String actual = JsonBuffer.toString("k1", "myvalue"); //$NON-NLS-1$ //$NON-NLS-2$
assertEquals("{\n\t" //$NON-NLS-1$
+ "\"k1\": \"myvalue\"\n}", actual); //$NON-NLS-1$
}
@Test
public void toString_Static_02() {
Object obj = new Object();
String actual = JsonBuffer.toString(
"k1", "myvalue", //$NON-NLS-1$ //$NON-NLS-2$
"k2", 123, //$NON-NLS-1$
"k3", 456.78, //$NON-NLS-1$
"k4", obj, //$NON-NLS-1$
"k5", true); //$NON-NLS-1$
assertEquals("{\n\t" //$NON-NLS-1$
+ "\"k1\": \"myvalue\",\n\t\"k2\": 123,\n\t\"k3\": " //$NON-NLS-1$
+ Double.toString(456.78)
+ ",\n\t\"k4\": \"" + obj.toString() //$NON-NLS-1$
+ "\",\n\t\"k5\": true\n}", actual); //$NON-NLS-1$
}
}
|
gallandarakhneorg/afc
|
core/vmutils/src/test/java/org/arakhne/afc/vmutil/json/JsonBufferTest.java
|
Java
|
apache-2.0
| 6,089
|
/**
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
var path = require('path');
var util = require('../cordova/util');
var platforms = require('./platformsConfig.json');
var events = require('weexpack-common').events;
// Avoid loading the same platform projects more than once (identified by path)
var cachedApis = {};
// getPlatformApi() should be the only method of instantiating the
// PlatformProject classes for now.
function getPlatformApi(platform, platformRootDir) {
// if platformRootDir is not specified, try to detect it first
if (!platformRootDir) {
var projectRootDir = util.isCordova();
platformRootDir = projectRootDir && path.join(projectRootDir, 'platforms', platform);
}
if (!platformRootDir) {
// If platformRootDir is still undefined, then we're probably is not inside of cordova project
throw new Error('Current location is not a weexpack project');
}
// CB-11174 Resolve symlinks first before working with root directory
platformRootDir = util.convertToRealPathSafe(platformRootDir);
var cached = cachedApis[platformRootDir];
if (cached && cached.platform == platform) return cached;
if (!platforms[platform]) throw new Error('Unknown platform ' + platform);
var PlatformApi;
//WEEK_HOOK
// try {
// // First we need to find whether platform exposes its' API via js module
// // If it does, then we require and instantiate it.
// var platformApiModule = path.join(platformRootDir, 'cordova', 'Api.js');
// PlatformApi = require(platformApiModule);
// } catch (err) {
// // Check if platform already compatible w/ PlatformApi and show deprecation warning
// if (err && err.code === 'MODULE_NOT_FOUND' && platforms[platform].apiCompatibleSince) {
// events.emit('warn', ' Using this version of weexpack with older version of weexpack-' + platform +
// ' is being deprecated. Consider upgrading to weexpack-' + platform + '@' +
// platforms[platform].apiCompatibleSince + ' or newer.');
// } else {
// events.emit('warn', 'Error loading weexpack-'+platform);
// }
//
// PlatformApi = require('./PlatformApiPoly');
// }
var platformPath = platform + '_' + 'pack';
var platformApiPath = path.join(__dirname, platformPath, 'Api.js');
PlatformApi = require(platformApiPath);
var platformApi = new PlatformApi(platform, platformRootDir, events);
cachedApis[platformRootDir] = platformApi;
return platformApi;
}
function getRealPlatformApi(platform, platformRootDir) {
var cached; //= cachedApis[__dirname];
if (cached && cached.platform == platform) return cached;
if (!platforms[platform]) throw new Error('Unknown platform ' + platform);
var PlatformApi;
try {
// First we need to find whether platform exposes its' API via js module
// If it does, then we require and instantiate it.
var platformPath = platform + '_' + 'pack';
var platformApiPath = path.join(__dirname, platformPath, 'Api.js');
PlatformApi = require(platformApiPath);
} catch (err) {
// Check if platform already compatible w/ PlatformApi and show deprecation warning
if (err && err.code === 'MODULE_NOT_FOUND' && platforms[platform].apiCompatibleSince) {
events.emit('warn', ' Using this version of weexpack with older version of weexpack-' + platform +
' is being deprecated. Consider upgrading to weexpack-' + platform + '@' +
platforms[platform].apiCompatibleSince + ' or newer.');
} else {
events.emit('warn', 'Error loading weexpack-'+platform);
}
PlatformApi = require('./PlatformApiPoly');
}
var platformApi = new PlatformApi(platform, platformRootDir, events);
// cachedApis[__dirname] = platformApi;
return platformApi;
}
module.exports = platforms;
// We don't want these methods to be enumerable on the platforms object, because we expect enumerable properties of the
// platforms object to be platforms.
Object.defineProperties(module.exports, {
'getPlatformApi': {value: getPlatformApi, configurable: true, writable: true},
'getRealPlatformApi':{value: getRealPlatformApi, configurable: true, writable: true}
});
|
weexext/ucar-weex-core
|
tools/weex-tools/lib/src/platforms/platforms.js
|
JavaScript
|
apache-2.0
| 5,032
|
# Syzygium glenum Craven SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Myrtales/Myrtaceae/Syzygium/Syzygium glenum/README.md
|
Markdown
|
apache-2.0
| 180
|
# Hypocrea fomitopsis P.G. Liu & Yoshim. Doi SPECIES
#### Status
ACCEPTED
#### According to
Index Fungorum
#### Published in
in Liu, Doi, Wang & Wang, Mycosystema 19(3): 324 (2000)
#### Original name
Hypocrea fomitopsis P.G. Liu & Yoshim. Doi
### Remarks
null
|
mdoering/backbone
|
life/Fungi/Ascomycota/Sordariomycetes/Hypocreales/Hypocreaceae/Hypocrea/Hypocrea fomitopsis/README.md
|
Markdown
|
apache-2.0
| 264
|
# Aniptodera longispora K.D. Hyde, 1990 SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
Aniptodera longispora K.D. Hyde, 1990
### Remarks
null
|
mdoering/backbone
|
life/Fungi/Ascomycota/Sordariomycetes/Microascales/Halosphaeriaceae/Aniptodera/Aniptodera longispora/README.md
|
Markdown
|
apache-2.0
| 228
|
# Leptorhaphis atomaria (Ach.) Szatala SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
Lichen atomarius Ach.
### Remarks
null
|
mdoering/backbone
|
life/Fungi/Ascomycota/Dothideomycetes/Pleosporales/Naetrocymbaceae/Leptorhaphis/Leptorhaphis atomaria/README.md
|
Markdown
|
apache-2.0
| 211
|
# Octopleura epibaterium SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Myrtales/Melastomataceae/Octopleura/Octopleura epibaterium/README.md
|
Markdown
|
apache-2.0
| 172
|
<?php
require '../App.php';
App::Header(['check_authin'=>App::USERAUTH_ISLOGGED, 'page_title'=>'Messages']);
?>
<div class="navi-list">
<div class="container">
<div class="col-md-8 link">
<ul>
<li class="on"><a href="/shopifly"><span class="glyphicon glyphicon-th" aria-hidden="true"></span> DASHBOARD</a></li>
<li><a href="/shoppe/myitems"><span class="glyphicon glyphicon-briefcase" aria-hidden="true"></span> MY ITEMS</a></li>
<li><a href="/shoppe/wishlist"><span class="glyphicon glyphicon-copy" aria-hidden="true"></span> WISHLIST</a></li>
</ul>
</div>
<div class="col-md-4 sell">
</div>
</div>
</div>
<div class="caty-index">
<div class="container">
<div class="col-md-3 caty-left">
<div class="caty-list" id="caty-lister">
<ul>
<li><span class="glyphicon glyphicon-th-large"></span> Navigation</li>
<li class="on"><span class="glyphicon glyphicon-envelope"></span> Messages</li>
<li><span class="glyphicon glyphicon-globe"></span> Notifiers</li>
<li><span class="glyphicon glyphicon-equalizer"></span> Products</li>
<li><span class="glyphicon glyphicon-cog"></span> Settings</li>
</ul>
</div>
</div>
<div class="col-md-9 caty-right">
<div class="caty-menu">
NOTHING TO DISPLAY
</div>
</div>
</div>
</div>
</div>
<?php App::Footer(['html_struct'=>1]); ?>
|
icefangz/Shopifly
|
messages/index.php
|
PHP
|
apache-2.0
| 1,375
|
package eu.marcocattaneo.rememberhere.presentation.ui;
import android.annotation.TargetApi;
import android.content.Context;
import android.net.Uri;
import android.os.Build;
import android.util.AttributeSet;
import android.view.ViewTreeObserver;
import android.widget.ImageView;
import com.squareup.picasso.Picasso;
public class CoverImageView extends ImageView {
public CoverImageView(Context context) {
super(context);
init();
}
public CoverImageView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public CoverImageView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public CoverImageView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
private void init() {
}
public void setCover(final String url, final boolean fade) {
ViewTreeObserver vto = getViewTreeObserver();
vto.addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
public boolean onPreDraw() {
getViewTreeObserver().removeOnPreDrawListener(this);
Uri uri = Uri.parse(url);
Picasso.with(getContext()).load(uri).resize(getMeasuredWidth(), getMeasuredHeight()).centerCrop().into(CoverImageView.this);
return true;
}
});
}
}
|
mcatta/RememberHere
|
app/src/main/java/eu/marcocattaneo/rememberhere/presentation/ui/CoverImageView.java
|
Java
|
apache-2.0
| 1,708
|
using NUnit.Framework;
using System;
namespace Reactor.Core.Test
{
[TestFixture]
[Timeout(30000)]
public class TimeoutTest
{
[Test]
public void Timeout_Normal()
{
var first = new DirectProcessor<int>();
var item = new DirectProcessor<int>();
var source = new DirectProcessor<int>();
var ts = source.Timeout(first, v => item, Flux.Just(100))
.Test();
source.OnNext(1);
first.OnNext(1);
source.OnNext(2, 3, 4);
source.OnComplete();
ts.AssertResult(1, 2, 3, 4);
Assert.IsFalse(first.HasSubscribers, "first has subscribers?!");
Assert.IsFalse(item.HasSubscribers, "item has subscribers?!");
Assert.IsFalse(source.HasSubscribers, "source has subscribers?!");
}
[Test]
public void Timeout_FirstTimesOut()
{
var first = new DirectProcessor<int>();
var item = new DirectProcessor<int>();
var source = new DirectProcessor<int>();
var ts = source.Timeout(first, v => item, Flux.Just(100))
.Test();
first.OnNext(1);
ts.AssertResult(100);
Assert.IsFalse(first.HasSubscribers, "first has subscribers?!");
Assert.IsFalse(item.HasSubscribers, "item has subscribers?!");
Assert.IsFalse(source.HasSubscribers, "source has subscribers?!");
}
[Test]
public void Timeout_SecondTimesOut()
{
var first = new DirectProcessor<int>();
var item = new DirectProcessor<int>();
var source = new DirectProcessor<int>();
var ts = source.Timeout(first, v => item, Flux.Just(100))
.Test();
source.OnNext(1);
item.OnNext(1);
ts.AssertResult(1, 100);
Assert.IsFalse(first.HasSubscribers, "first has subscribers?!");
Assert.IsFalse(item.HasSubscribers, "item has subscribers?!");
Assert.IsFalse(source.HasSubscribers, "source has subscribers?!");
}
[Test]
public void Timeout_Conditional()
{
var first = new DirectProcessor<int>();
var item = new DirectProcessor<int>();
var source = new DirectProcessor<int>();
var ts = source.Timeout(first, v => item, Flux.Just(100))
.Filter(v => true)
.Test();
source.OnNext(1);
first.OnNext(1);
source.OnNext(2, 3, 4);
source.OnComplete();
ts.AssertResult(1, 2, 3, 4);
Assert.IsFalse(first.HasSubscribers, "first has subscribers?!");
Assert.IsFalse(item.HasSubscribers, "item has subscribers?!");
Assert.IsFalse(source.HasSubscribers, "source has subscribers?!");
}
[Test]
public void Timeout_Conditional_FirstTimesOut()
{
var first = new DirectProcessor<int>();
var item = new DirectProcessor<int>();
var source = new DirectProcessor<int>();
var ts = source.Timeout(first, v => item, Flux.Just(100))
.Filter(v => true)
.Test();
first.OnNext(1);
ts.AssertResult(100);
Assert.IsFalse(first.HasSubscribers, "first has subscribers?!");
Assert.IsFalse(item.HasSubscribers, "item has subscribers?!");
Assert.IsFalse(source.HasSubscribers, "source has subscribers?!");
}
[Test]
public void Timeout_Conditional_SecondTimesOut()
{
var first = new DirectProcessor<int>();
var item = new DirectProcessor<int>();
var source = new DirectProcessor<int>();
var ts = source.Timeout(first, v => item, Flux.Just(100))
.Filter(v => true)
.Test();
source.OnNext(1);
item.OnNext(1);
ts.AssertResult(1, 100);
Assert.IsFalse(first.HasSubscribers, "first has subscribers?!");
Assert.IsFalse(item.HasSubscribers, "item has subscribers?!");
Assert.IsFalse(source.HasSubscribers, "source has subscribers?!");
}
}
}
|
reactor/reactor-core-dotnet
|
Reactor.Core.Test/TimeoutTest.cs
|
C#
|
apache-2.0
| 4,330
|
'use strict';
describe('Main view End-2-End', function() {
var brickTable
beforeEach(function() {
browser.get("http://cfalguiere.github.io/Presentations/2014/DemoBrickInventory/");
brickTable = element.all(by.repeater('brick in bricksList'))
})
describe('When Main view appears', function() {
it('should show all the bricks', function() {
expect(brickTable.count()).toBe(76)
})
});
});
|
cfalguiere/Protractor-Firefox-Headless-Docker
|
testproject/test/e2e/e2e-test.js
|
JavaScript
|
apache-2.0
| 445
|
/*
* Copyright 2009 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.handler.codec.http;
import java.nio.charset.Charset;
/**
* Abstract HttpData implementation
*
* @author <a href="http://www.jboss.org/netty/">The Netty Project</a>
* @author Andy Taylor (andy.taylor@jboss.org)
* @author <a href="http://gleamynode.net/">Trustin Lee</a>
* @author <a href="http://openr66.free.fr/">Frederic Bregier</a>
*
*/
public abstract class AbstractHttpData implements HttpData {
protected final String name;
protected long definedSize = 0;
protected long size = 0;
protected Charset charset = HttpCodecUtil.DEFAULT_CHARSET;
protected boolean completed = false;
public AbstractHttpData(String name, Charset charset, long size)
throws NullPointerException, IllegalArgumentException {
if (name == null) {
throw new NullPointerException("name");
}
name = name.trim();
if (name.length() == 0) {
throw new IllegalArgumentException("empty name");
}
for (int i = 0; i < name.length(); i ++) {
char c = name.charAt(i);
if (c > 127) {
throw new IllegalArgumentException(
"name contains non-ascii character: " + name);
}
// Check prohibited characters.
switch (c) {
case '=':
case ',':
case ';':
case ' ':
case '\t':
case '\r':
case '\n':
case '\f':
case 0x0b: // Vertical tab
throw new IllegalArgumentException(
"name contains one of the following prohibited characters: " +
"=,; \\t\\r\\n\\v\\f: " + name);
}
}
this.name = name;
if (charset != null) {
setCharset(charset);
}
definedSize = size;
}
@Override
public String getName() {
return name;
}
@Override
public boolean isCompleted() {
return completed;
}
@Override
public Charset getCharset() {
return charset;
}
@Override
public void setCharset(Charset charset) {
if (charset == null) {
throw new NullPointerException("charset");
}
this.charset = charset;
}
@Override
public long length() {
return size;
}
}
|
aperepel/netty
|
src/main/java/org/jboss/netty/handler/codec/http/AbstractHttpData.java
|
Java
|
apache-2.0
| 3,014
|
## Android WebView文件上传
本工程为大家演示在H5中如何上传文件,同时也提供了服务端代码,有兴趣的读者可以部署起来,测试文件上传功能。
如果只对客户端部分感兴趣,只需要关注WebView文件选择部分即可
[服务端代码下载地址](https://github.com/chiclaim/android_mvvm_server), 客户端可以用如下url来上传文件
http://`your ip address`:`port`/AndroidMvvmServer/upload,如 http://192.168.1.200:8080/AndroidMvvmServer/upload
### 经过测试的机型
| 厂商 | 机型 | 系统版本 |
| -------- | -----: | :----: |
| 三星 | GalaxyS4 | Android5.0.1 |
| 小米 | 红米4 | Android6.0.1 |
| 华为 | 荣耀7I | Android6.0.1 |
| 华为 | 荣耀V8 | Android8.0 |
| 华为 | Mate20 | Android9.0 |
在华为荣耀某台机器上(Android5.0)上,文件选择效果如下所示:

在`Android4.4`版本中, 如果使用`系统默认的文件选择器`,使用`相册`来选择文件而不是`文件`选项来选择文件 可能会遇到不能正确获取到选择的文件名. 如下图所示:

### 不同手机厂商手机默认的文件选择器
在红米手机上Android6.0(左) 和 华为荣耀7IAndroid6.0(右),WebView文件选择效果如下所示:

### 自定义WebView文件选择器
不同的手机定制厂商对WebView文件选择器不管是在样式上还是按钮个数上都不尽相同。
如果需要统一WebView文件选择样式,需要自定义WebView文件选择器,如下图所示:

### 需要注意的问题
1,如果在使用的过程中,或者在此基础上进行修改的过程中 ,出现H5的中的`选择文件`按钮点击没有反应,可能是`ValueCallback`没有重置导致的。
2,本项目主要处理H5文件选择的问题,关于权限、文件存储的目录可以自行根据实际情况修改
[APK Demo下载地址](https://github.com/chiclaim/android-webview-upload-file/blob/master/assets/demo.apk)
### 引用文档
1. [chromium-webview-samples](https://github.com/GoogleChrome/chromium-webview-samples)
2. [file-upload-in-webview](http://stackoverflow.com/questions/5907369/file-upload-in-webview)
|
chiclaim/android-sample
|
cross-platform-h5/android-h5-file-upload/README.md
|
Markdown
|
apache-2.0
| 2,743
|
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Threading;
namespace Google.Cloud.AspNetCore.DataProtection.Storage
{
/// <summary>
/// Simple means of executing retry with exponential backoff and proportional jitter,
/// retrying on any GoogleApiException.
/// </summary>
internal sealed class RetryHandler
{
private readonly int _maxAttempts;
private readonly TimeSpan _initialBackoff;
private readonly double _backoffMultiplier;
private readonly TimeSpan _maxBackoff;
private readonly object _lock = new object();
private readonly Random _random = new Random(); // Use for backoff.
internal RetryHandler(int maxAttempts, TimeSpan initialBackoff, double backoffMultiplier, TimeSpan maxBackoff)
{
_maxAttempts = maxAttempts;
_initialBackoff = initialBackoff;
_backoffMultiplier = backoffMultiplier;
_maxBackoff = maxBackoff;
}
/// <summary>
/// Applies simple retry until the given function returns successfully (or a non-GoogleApiException is thrown).
/// </summary>
internal T ExecuteWithRetry<T>(Func<T> func)
{
int attempt = 0;
TimeSpan nextBackoff = _initialBackoff;
while (true)
{
try
{
return func();
}
catch (GoogleApiException) when (attempt < _maxAttempts)
{
attempt++;
int millisecondsToSleep;
lock (_lock)
{
int nextBackoffMillis = (int)nextBackoff.TotalMilliseconds;
// Apply jitter to the backoff, but only within the range of 50%-100% of the "theoretical" backoff.
millisecondsToSleep = nextBackoffMillis / 2 + _random.Next(nextBackoffMillis / 2);
}
Thread.Sleep(millisecondsToSleep);
nextBackoff = TimeSpan.FromSeconds(nextBackoff.TotalSeconds * _backoffMultiplier);
if (nextBackoff > _maxBackoff)
{
nextBackoff = _maxBackoff;
}
}
}
}
}
}
|
GoogleCloudPlatform/google-cloud-dotnet-powerpack
|
Google.Cloud.AspNetCore/Google.Cloud.AspNetCore.DataProtection.Storage/RetryHandler.cs
|
C#
|
apache-2.0
| 2,901
|
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import netaddr
from neutron_lib import exceptions
from oslo_log import log as logging
from oslo_policy import policy as oslo_policy
from oslo_utils import excutils
import six
import webob.exc
from neutron._i18n import _, _LE, _LI
from neutron.api import api_common
from neutron.api.v2 import attributes
from neutron.api.v2 import resource as wsgi_resource
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.common import constants as n_const
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.db import api as db_api
from neutron import policy
from neutron import quota
from neutron.quota import resource_registry
LOG = logging.getLogger(__name__)
FAULT_MAP = {exceptions.NotFound: webob.exc.HTTPNotFound,
exceptions.Conflict: webob.exc.HTTPConflict,
exceptions.InUse: webob.exc.HTTPConflict,
exceptions.BadRequest: webob.exc.HTTPBadRequest,
exceptions.ServiceUnavailable: webob.exc.HTTPServiceUnavailable,
exceptions.NotAuthorized: webob.exc.HTTPForbidden,
netaddr.AddrFormatError: webob.exc.HTTPBadRequest,
oslo_policy.PolicyNotAuthorized: webob.exc.HTTPForbidden
}
class Controller(object):
LIST = 'list'
SHOW = 'show'
CREATE = 'create'
UPDATE = 'update'
DELETE = 'delete'
@property
def plugin(self):
return self._plugin
@property
def resource(self):
return self._resource
@property
def attr_info(self):
return self._attr_info
@property
def member_actions(self):
return self._member_actions
def __init__(self, plugin, collection, resource, attr_info,
allow_bulk=False, member_actions=None, parent=None,
allow_pagination=False, allow_sorting=False):
if member_actions is None:
member_actions = []
self._plugin = plugin
self._collection = collection.replace('-', '_')
self._resource = resource.replace('-', '_')
self._attr_info = attr_info
self._allow_bulk = allow_bulk
self._allow_pagination = allow_pagination
self._allow_sorting = allow_sorting
self._native_bulk = self._is_native_bulk_supported()
self._native_pagination = self._is_native_pagination_supported()
self._native_sorting = self._is_native_sorting_supported()
self._policy_attrs = [name for (name, info) in self._attr_info.items()
if info.get('required_by_policy')]
self._notifier = n_rpc.get_notifier('network')
self._member_actions = member_actions
self._primary_key = self._get_primary_key()
if self._allow_pagination and self._native_pagination:
# Native pagination need native sorting support
if not self._native_sorting:
raise exceptions.Invalid(
_("Native pagination depend on native sorting")
)
if not self._allow_sorting:
LOG.info(_LI("Allow sorting is enabled because native "
"pagination requires native sorting"))
self._allow_sorting = True
self.parent = parent
if parent:
self._parent_id_name = '%s_id' % parent['member_name']
parent_part = '_%s' % parent['member_name']
else:
self._parent_id_name = None
parent_part = ''
self._plugin_handlers = {
self.LIST: 'get%s_%s' % (parent_part, self._collection),
self.SHOW: 'get%s_%s' % (parent_part, self._resource)
}
for action in [self.CREATE, self.UPDATE, self.DELETE]:
self._plugin_handlers[action] = '%s%s_%s' % (action, parent_part,
self._resource)
def _get_primary_key(self, default_primary_key='id'):
for key, value in six.iteritems(self._attr_info):
if value.get('primary_key', False):
return key
return default_primary_key
def _is_native_bulk_supported(self):
native_bulk_attr_name = ("_%s__native_bulk_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_bulk_attr_name, False)
def _is_native_pagination_supported(self):
return api_common.is_native_pagination_supported(self._plugin)
def _is_native_sorting_supported(self):
return api_common.is_native_sorting_supported(self._plugin)
def _exclude_attributes_by_policy(self, context, data):
"""Identifies attributes to exclude according to authZ policies.
Return a list of attribute names which should be stripped from the
response returned to the user because the user is not authorized
to see them.
"""
attributes_to_exclude = []
for attr_name in data.keys():
attr_data = self._attr_info.get(attr_name)
if attr_data and attr_data['is_visible']:
if policy.check(
context,
'%s:%s' % (self._plugin_handlers[self.SHOW], attr_name),
data,
might_not_exist=True,
pluralized=self._collection):
# this attribute is visible, check next one
continue
# if the code reaches this point then either the policy check
# failed or the attribute was not visible in the first place
attributes_to_exclude.append(attr_name)
return attributes_to_exclude
def _view(self, context, data, fields_to_strip=None):
"""Build a view of an API resource.
:param context: the neutron context
:param data: the object for which a view is being created
:param fields_to_strip: attributes to remove from the view
:returns: a view of the object which includes only attributes
visible according to API resource declaration and authZ policies.
"""
fields_to_strip = ((fields_to_strip or []) +
self._exclude_attributes_by_policy(context, data))
return self._filter_attributes(context, data, fields_to_strip)
def _filter_attributes(self, context, data, fields_to_strip=None):
if not fields_to_strip:
return data
return dict(item for item in six.iteritems(data)
if (item[0] not in fields_to_strip))
def _do_field_list(self, original_fields):
fields_to_add = None
# don't do anything if fields were not specified in the request
if original_fields:
fields_to_add = [attr for attr in self._policy_attrs
if attr not in original_fields]
original_fields.extend(self._policy_attrs)
return original_fields, fields_to_add
def __getattr__(self, name):
if name in self._member_actions:
@db_api.retry_db_errors
def _handle_action(request, id, **kwargs):
arg_list = [request.context, id]
# Ensure policy engine is initialized
policy.init()
# Fetch the resource and verify if the user can access it
try:
parent_id = kwargs.get(self._parent_id_name)
resource = self._item(request,
id,
do_authz=True,
field_list=None,
parent_id=parent_id)
except oslo_policy.PolicyNotAuthorized:
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
body = kwargs.pop('body', None)
# Explicit comparison with None to distinguish from {}
if body is not None:
arg_list.append(body)
# It is ok to raise a 403 because accessibility to the
# object was checked earlier in this method
policy.enforce(request.context,
name,
resource,
pluralized=self._collection)
ret_value = getattr(self._plugin, name)(*arg_list, **kwargs)
# It is simply impossible to predict whether one of this
# actions alters resource usage. For instance a tenant port
# is created when a router interface is added. Therefore it is
# important to mark as dirty resources whose counters have
# been altered by this operation
resource_registry.set_resources_dirty(request.context)
return ret_value
return _handle_action
else:
raise AttributeError()
def _get_pagination_helper(self, request):
if self._allow_pagination and self._native_pagination:
return api_common.PaginationNativeHelper(request,
self._primary_key)
elif self._allow_pagination:
return api_common.PaginationEmulatedHelper(request,
self._primary_key)
return api_common.NoPaginationHelper(request, self._primary_key)
def _get_sorting_helper(self, request):
if self._allow_sorting and self._native_sorting:
return api_common.SortingNativeHelper(request, self._attr_info)
elif self._allow_sorting:
return api_common.SortingEmulatedHelper(request, self._attr_info)
return api_common.NoSortingHelper(request, self._attr_info)
def _items(self, request, do_authz=False, parent_id=None):
"""Retrieves and formats a list of elements of the requested entity."""
# NOTE(salvatore-orlando): The following ensures that fields which
# are needed for authZ policy validation are not stripped away by the
# plugin before returning.
original_fields, fields_to_add = self._do_field_list(
api_common.list_args(request, 'fields'))
filters = api_common.get_filters(request, self._attr_info,
['fields', 'sort_key', 'sort_dir',
'limit', 'marker', 'page_reverse'])
kwargs = {'filters': filters,
'fields': original_fields}
sorting_helper = self._get_sorting_helper(request)
pagination_helper = self._get_pagination_helper(request)
sorting_helper.update_args(kwargs)
sorting_helper.update_fields(original_fields, fields_to_add)
pagination_helper.update_args(kwargs)
pagination_helper.update_fields(original_fields, fields_to_add)
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, self._plugin_handlers[self.LIST])
obj_list = obj_getter(request.context, **kwargs)
obj_list = sorting_helper.sort(obj_list)
obj_list = pagination_helper.paginate(obj_list)
# Check authz
if do_authz:
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
# Omit items from list that should not be visible
obj_list = [obj for obj in obj_list
if policy.check(request.context,
self._plugin_handlers[self.SHOW],
obj,
plugin=self._plugin,
pluralized=self._collection)]
# Use the first element in the list for discriminating which attributes
# should be filtered out because of authZ policies
# fields_to_add contains a list of attributes added for request policy
# checks but that were not required by the user. They should be
# therefore stripped
fields_to_strip = fields_to_add or []
if obj_list:
fields_to_strip += self._exclude_attributes_by_policy(
request.context, obj_list[0])
collection = {self._collection:
[self._filter_attributes(
request.context, obj,
fields_to_strip=fields_to_strip)
for obj in obj_list]}
pagination_links = pagination_helper.get_links(obj_list)
if pagination_links:
collection[self._collection + "_links"] = pagination_links
# Synchronize usage trackers, if needed
resource_registry.resync_resource(
request.context, self._resource, request.context.tenant_id)
return collection
def _item(self, request, id, do_authz=False, field_list=None,
parent_id=None):
"""Retrieves and formats a single element of the requested entity."""
kwargs = {'fields': field_list}
action = self._plugin_handlers[self.SHOW]
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, action)
obj = obj_getter(request.context, id, **kwargs)
# Check authz
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
if do_authz:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
return obj
@db_api.retry_db_errors
def index(self, request, **kwargs):
"""Returns a list of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return self._items(request, True, parent_id)
@db_api.retry_db_errors
def show(self, request, id, **kwargs):
"""Returns detailed information about the requested entity."""
try:
# NOTE(salvatore-orlando): The following ensures that fields
# which are needed for authZ policy validation are not stripped
# away by the plugin before returning.
field_list, added_fields = self._do_field_list(
api_common.list_args(request, "fields"))
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return {self._resource:
self._view(request.context,
self._item(request,
id,
do_authz=True,
field_list=field_list,
parent_id=parent_id),
fields_to_strip=added_fields)}
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def _emulate_bulk_create(self, obj_creator, request, body, parent_id=None):
objs = []
try:
for item in body[self._collection]:
kwargs = {self._resource: item}
if parent_id:
kwargs[self._parent_id_name] = parent_id
fields_to_strip = self._exclude_attributes_by_policy(
request.context, item)
objs.append(self._filter_attributes(
request.context,
obj_creator(request.context, **kwargs),
fields_to_strip=fields_to_strip))
return objs
# Note(salvatore-orlando): broad catch as in theory a plugin
# could raise any kind of exception
except Exception:
with excutils.save_and_reraise_exception():
for obj in objs:
obj_deleter = getattr(self._plugin,
self._plugin_handlers[self.DELETE])
try:
kwargs = ({self._parent_id_name: parent_id}
if parent_id else {})
obj_deleter(request.context, obj['id'], **kwargs)
except Exception:
# broad catch as our only purpose is to log the
# exception
LOG.exception(_LE("Unable to undo add for "
"%(resource)s %(id)s"),
{'resource': self._resource,
'id': obj['id']})
# TODO(salvatore-orlando): The object being processed when the
# plugin raised might have been created or not in the db.
# We need a way for ensuring that if it has been created,
# it is then deleted
def create(self, request, body=None, **kwargs):
self._notifier.info(request.context,
self._resource + '.create.start',
body)
return self._create(request, body, **kwargs)
@db_api.retry_db_errors
def _create(self, request, body, **kwargs):
"""Creates a new instance of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
body = Controller.prepare_request_body(request.context,
body, True,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.CREATE]
# Check authz
if self._collection in body:
# Have to account for bulk create
items = body[self._collection]
else:
items = [body]
# Ensure policy engine is initialized
policy.init()
# Store requested resource amounts grouping them by tenant
# This won't work with multiple resources. However because of the
# current structure of this controller there will hardly be more than
# one resource for which reservations are being made
request_deltas = collections.defaultdict(int)
for item in items:
self._validate_network_tenant_ownership(request,
item[self._resource])
policy.enforce(request.context,
action,
item[self._resource],
pluralized=self._collection)
if 'tenant_id' not in item[self._resource]:
# no tenant_id - no quota check
continue
tenant_id = item[self._resource]['tenant_id']
request_deltas[tenant_id] += 1
# Quota enforcement
reservations = []
try:
for (tenant, delta) in request_deltas.items():
reservation = quota.QUOTAS.make_reservation(
request.context,
tenant,
{self._resource: delta},
self._plugin)
reservations.append(reservation)
except n_exc.QuotaResourceUnknown as e:
# We don't want to quota this resource
LOG.debug(e)
def notify(create_result):
# Ensure usage trackers for all resources affected by this API
# operation are marked as dirty
with request.context.session.begin():
# Commit the reservation(s)
for reservation in reservations:
quota.QUOTAS.commit_reservation(
request.context, reservation.reservation_id)
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.create.end'
self._notifier.info(request.context,
notifier_method,
create_result)
registry.notify(self._resource, events.BEFORE_RESPONSE, self,
context=request.context, data=create_result,
method_name=notifier_method,
collection=self._collection,
action=action, original={})
return create_result
def do_create(body, bulk=False, emulated=False):
kwargs = {self._parent_id_name: parent_id} if parent_id else {}
if bulk and not emulated:
obj_creator = getattr(self._plugin, "%s_bulk" % action)
else:
obj_creator = getattr(self._plugin, action)
try:
if emulated:
return self._emulate_bulk_create(obj_creator, request,
body, parent_id)
else:
if self._collection in body:
# This is weird but fixing it requires changes to the
# plugin interface
kwargs.update({self._collection: body})
else:
kwargs.update({self._resource: body})
return obj_creator(request.context, **kwargs)
except Exception:
# In case of failure the plugin will always raise an
# exception. Cancel the reservation
with excutils.save_and_reraise_exception():
for reservation in reservations:
quota.QUOTAS.cancel_reservation(
request.context, reservation.reservation_id)
if self._collection in body and self._native_bulk:
# plugin does atomic bulk create operations
objs = do_create(body, bulk=True)
# Use first element of list to discriminate attributes which
# should be removed because of authZ policies
fields_to_strip = self._exclude_attributes_by_policy(
request.context, objs[0])
return notify({self._collection: [self._filter_attributes(
request.context, obj, fields_to_strip=fields_to_strip)
for obj in objs]})
else:
if self._collection in body:
# Emulate atomic bulk behavior
objs = do_create(body, bulk=True, emulated=True)
return notify({self._collection: objs})
else:
obj = do_create(body)
return notify({self._resource: self._view(request.context,
obj)})
def delete(self, request, id, **kwargs):
"""Deletes the specified entity."""
if request.body:
msg = _('Request body is not supported in DELETE.')
raise webob.exc.HTTPBadRequest(msg)
self._notifier.info(request.context,
self._resource + '.delete.start',
{self._resource + '_id': id})
return self._delete(request, id, **kwargs)
@db_api.retry_db_errors
def _delete(self, request, id, **kwargs):
action = self._plugin_handlers[self.DELETE]
# Check authz
policy.init()
parent_id = kwargs.get(self._parent_id_name)
obj = self._item(request, id, parent_id=parent_id)
try:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_deleter = getattr(self._plugin, action)
obj_deleter(request.context, id, **kwargs)
# A delete operation usually alters resource usage, so mark affected
# usage trackers as dirty
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.delete.end'
result = {self._resource: self._view(request.context, obj)}
notifier_payload = {self._resource + '_id': id}
notifier_payload.update(result)
self._notifier.info(request.context,
notifier_method,
notifier_payload)
registry.notify(self._resource, events.BEFORE_RESPONSE, self,
context=request.context, data=result,
method_name=notifier_method, action=action,
original={})
def update(self, request, id, body=None, **kwargs):
"""Updates the specified entity's attributes."""
try:
payload = body.copy()
except AttributeError:
msg = _("Invalid format: %s") % request.body
raise exceptions.BadRequest(resource='body', msg=msg)
payload['id'] = id
self._notifier.info(request.context,
self._resource + '.update.start',
payload)
return self._update(request, id, body, **kwargs)
@db_api.retry_db_errors
def _update(self, request, id, body, **kwargs):
body = Controller.prepare_request_body(request.context,
body, False,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.UPDATE]
# Load object to check authz
# but pass only attributes in the original body and required
# by the policy engine to the policy 'brain'
field_list = [name for (name, value) in six.iteritems(self._attr_info)
if (value.get('required_by_policy') or
value.get('primary_key') or
'default' not in value)]
# Ensure policy engine is initialized
policy.init()
parent_id = kwargs.get(self._parent_id_name)
orig_obj = self._item(request, id, field_list=field_list,
parent_id=parent_id)
orig_object_copy = copy.copy(orig_obj)
orig_obj.update(body[self._resource])
# Make a list of attributes to be updated to inform the policy engine
# which attributes are set explicitly so that it can distinguish them
# from the ones that are set to their default values.
orig_obj[n_const.ATTRIBUTES_TO_UPDATE] = body[self._resource].keys()
try:
policy.enforce(request.context,
action,
orig_obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
with excutils.save_and_reraise_exception() as ctxt:
# If a tenant is modifying its own object, it's safe to return
# a 403. Otherwise, pretend that it doesn't exist to avoid
# giving away information.
orig_obj_tenant_id = orig_obj.get("tenant_id")
if (request.context.tenant_id != orig_obj_tenant_id or
orig_obj_tenant_id is None):
ctxt.reraise = False
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_updater = getattr(self._plugin, action)
kwargs = {self._resource: body}
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj = obj_updater(request.context, id, **kwargs)
# Usually an update operation does not alter resource usage, but as
# there might be side effects it might be worth checking for changes
# in resource usage here as well (e.g: a tenant port is created when a
# router interface is added)
resource_registry.set_resources_dirty(request.context)
result = {self._resource: self._view(request.context, obj)}
notifier_method = self._resource + '.update.end'
self._notifier.info(request.context, notifier_method, result)
registry.notify(self._resource, events.BEFORE_RESPONSE, self,
context=request.context, data=result,
method_name=notifier_method, action=action,
original=orig_object_copy)
return result
@staticmethod
def prepare_request_body(context, body, is_create, resource, attr_info,
allow_bulk=False):
"""Verifies required attributes are in request body.
Also checking that an attribute is only specified if it is allowed
for the given operation (create/update).
Attribute with default values are considered to be optional.
body argument must be the deserialized body.
"""
collection = resource + "s"
if not body:
raise webob.exc.HTTPBadRequest(_("Resource body required"))
LOG.debug("Request body: %(body)s", {'body': body})
try:
if collection in body:
if not allow_bulk:
raise webob.exc.HTTPBadRequest(_("Bulk operation "
"not supported"))
if not body[collection]:
raise webob.exc.HTTPBadRequest(_("Resources required"))
bulk_body = [
Controller.prepare_request_body(
context, item if resource in item
else {resource: item}, is_create, resource, attr_info,
allow_bulk) for item in body[collection]
]
return {collection: bulk_body}
res_dict = body.get(resource)
except (AttributeError, TypeError):
msg = _("Body contains invalid data")
raise webob.exc.HTTPBadRequest(msg)
if res_dict is None:
msg = _("Unable to find '%s' in request body") % resource
raise webob.exc.HTTPBadRequest(msg)
attributes.populate_tenant_id(context, res_dict, attr_info, is_create)
attributes.verify_attributes(res_dict, attr_info)
if is_create: # POST
attributes.fill_default_value(attr_info, res_dict,
webob.exc.HTTPBadRequest)
else: # PUT
for attr, attr_vals in six.iteritems(attr_info):
if attr in res_dict and not attr_vals['allow_put']:
msg = _("Cannot update read-only attribute %s") % attr
raise webob.exc.HTTPBadRequest(msg)
attributes.convert_value(attr_info, res_dict, webob.exc.HTTPBadRequest)
return body
def _validate_network_tenant_ownership(self, request, resource_item):
# TODO(salvatore-orlando): consider whether this check can be folded
# in the policy engine
if (request.context.is_admin or request.context.is_advsvc or
self._resource not in ('port', 'subnet')):
return
network = self._plugin.get_network(
request.context,
resource_item['network_id'])
# do not perform the check on shared networks
if network.get('shared'):
return
network_owner = network['tenant_id']
if network_owner != resource_item['tenant_id']:
# NOTE(kevinbenton): we raise a 404 to hide the existence of the
# network from the tenant since they don't have access to it.
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def create_resource(collection, resource, plugin, params, allow_bulk=False,
member_actions=None, parent=None, allow_pagination=False,
allow_sorting=False):
controller = Controller(plugin, collection, resource, params, allow_bulk,
member_actions=member_actions, parent=parent,
allow_pagination=allow_pagination,
allow_sorting=allow_sorting)
return wsgi_resource.Resource(controller, FAULT_MAP)
|
igor-toga/local-snat
|
neutron/api/v2/base.py
|
Python
|
apache-2.0
| 33,385
|
/*******************************************************************************
* Copyright 2014 United States Government as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package gov.nasa.ensemble.core.plan.resources.profile.operations;
import gov.nasa.ensemble.common.operation.AbstractEnsembleUndoableOperation;
import gov.nasa.ensemble.core.jscience.DataPoint;
import gov.nasa.ensemble.core.jscience.Profile;
import gov.nasa.ensemble.core.jscience.util.ProfileUtil;
import gov.nasa.ensemble.emf.transaction.TransactionUtils;
import org.eclipse.emf.common.util.EList;
public class AddProfileDataPointOperation extends AbstractEnsembleUndoableOperation {
private Profile profile;
private DataPoint dataPoint;
public AddProfileDataPointOperation(Profile profile, DataPoint dp) {
super("add data point");
this.profile = profile;
this.dataPoint = dp;
}
@Override
@SuppressWarnings("unchecked")
protected void execute() throws Throwable {
final EList<DataPoint> dataPoints = profile.getDataPoints();
TransactionUtils.writing(dataPoints, new Runnable() {
@Override
public void run() {
ProfileUtil.insertNewDataPoint(dataPoint, dataPoints);
}
});
}
@Override
@SuppressWarnings("unchecked")
protected void undo() throws Throwable {
final EList<DataPoint> dataPoints = profile.getDataPoints();
TransactionUtils.writing(dataPoints, new Runnable() {
@Override
public void run() {
dataPoints.remove(dataPoint);
}
});
}
@Override
public String toString() {
return "add profile data point "+ this.dataPoint;
}
@Override
protected void dispose(UndoableState state) {
this.profile = null;
this.dataPoint = null;
}
}
|
nasa/OpenSPIFe
|
gov.nasa.ensemble.core.plan.resources/src/gov/nasa/ensemble/core/plan/resources/profile/operations/AddProfileDataPointOperation.java
|
Java
|
apache-2.0
| 2,392
|
<svg class="fingerboard"></svg>
|
wildbits/fingerboard
|
www/templates/FingerBoard.html
|
HTML
|
apache-2.0
| 32
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.mapReduceLayer;
import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
import org.apache.pig.LoadFunc;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
import org.apache.pig.builtin.PigStorage;
import org.apache.pig.test.PigStorageWithStatistics;
import org.apache.pig.test.TestJobControlCompiler;
import org.junit.Assert;
import org.junit.Test;
public class TestInputSizeReducerEstimator {
private static final Configuration CONF = new Configuration();
@Test
public void testGetInputSizeFromFs() throws Exception {
long size = 2L * 1024 * 1024 * 1024;
Assert.assertEquals(size, InputSizeReducerEstimator.getTotalInputFileSize(
CONF, Lists.newArrayList(createPOLoadWithSize(size, new PigStorage())),
new org.apache.hadoop.mapreduce.Job(CONF)));
Assert.assertEquals(size, InputSizeReducerEstimator.getTotalInputFileSize(
CONF,
Lists.newArrayList(createPOLoadWithSize(size, new PigStorageWithStatistics())),
new org.apache.hadoop.mapreduce.Job(CONF)));
Assert.assertEquals(size * 2, InputSizeReducerEstimator.getTotalInputFileSize(
CONF,
Lists.newArrayList(
createPOLoadWithSize(size, new PigStorage()),
createPOLoadWithSize(size, new PigStorageWithStatistics())),
new org.apache.hadoop.mapreduce.Job(CONF)));
}
@Test
public void testGetInputSizeFromLoader() throws Exception {
long size = 2L * 1024 * 1024 * 1024;
Assert.assertEquals(size, InputSizeReducerEstimator.getInputSizeFromLoader(
createPOLoadWithSize(size, new PigStorageWithStatistics()),
new org.apache.hadoop.mapreduce.Job(CONF)));
}
private static POLoad createPOLoadWithSize(long size, LoadFunc loadFunc) throws Exception {
return TestJobControlCompiler.createPOLoadWithSize(size, loadFunc);
}
}
|
bsmedberg/pig
|
test/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/TestInputSizeReducerEstimator.java
|
Java
|
apache-2.0
| 2,904
|
//
// PayTool.h
// DingYouMing
//
// Created by ceyu on 2017/3/8.
// Copyright © 2017年 吴宏佳. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "PayParamObj.h"
@protocol PayToolDelegate <NSObject>
@optional
-(void)payResultWithErrorCode:(NSInteger)errorCode withErrorInfo:(NSString *)errorInfo withObject:(id)object;//
-(void)payBeginWithObject:(id)object;//
@end
@interface PayTool : NSObject
/**代理*/
@property (weak, nonatomic) id<PayToolDelegate> delegate;
@property (strong, nonatomic) id object;
+(instancetype)sharePayTool;
-(void)application:(UIApplication *)app openURL:(NSURL *)url options:(NSDictionary *)options;
-(void)aliPayAction:(PayParamObj*)param;
-(void)wePayAction:(PayParamObj*)param;
///支付确认结果
-(void)payVerifyResults;
@end
|
Sherlock-HJ/HJ_UI_Tools
|
HJ_UI_Tools/HJ_UI_Tools/Myself/Pay/Tool/PayTool.h
|
C
|
apache-2.0
| 790
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SimpleCrud1
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
|
rodrigovilar/Ruby-SimpleCrud1
|
config/application.rb
|
Ruby
|
apache-2.0
| 1,118
|
import logging
import os
import json
import shutil
import sys
import datetime
import csv, math
from tld import get_tld
from collections import OrderedDict
from utils import Util
from components.data.data import Data
from components.iana.iana_transform import IanaTransform
from components.nc.network_context import NetworkContext
from multiprocessing import Process
import pandas as pd
import time
class OA(object):
def __init__(self,date,limit=500,logger=None):
self._initialize_members(date,limit,logger)
def _initialize_members(self,date,limit,logger):
# get logger if exists. if not, create new instance.
self._logger = logging.getLogger('OA.DNS') if logger else Util.get_logger('OA.DNS',create_file=False)
# initialize required parameters.
self._scrtip_path = os.path.dirname(os.path.abspath(__file__))
self._date = date
self._table_name = "dns"
self._dns_results = []
self._limit = limit
self._data_path = None
self._ipynb_path = None
self._ingest_summary_path = None
self._dns_scores = []
self._dns_scores_headers = []
self._results_delimiter = '\t'
self._details_limit = 250
# get app configuration.
self._spot_conf = Util.get_spot_conf()
# get scores fields conf
conf_file = "{0}/dns_conf.json".format(self._scrtip_path)
self._conf = json.loads(open (conf_file).read(),object_pairs_hook=OrderedDict)
# initialize data engine
self._db = self._spot_conf.get('conf', 'DBNAME').replace("'", "").replace('"', '')
self._engine = Data(self._db,self._table_name ,self._logger)
def start(self):
####################
start = time.time()
####################
self._create_folder_structure()
self._add_ipynb()
self._get_dns_results()
self._add_tld_column()
self._add_reputation()
self._add_hh_and_severity()
self._add_iana()
self._add_network_context()
self._create_dns_scores_csv()
self._get_oa_details()
self._ingest_summary()
##################
end = time.time()
print(end - start)
##################
def _create_folder_structure(self):
# create date folder structure if it does not exist.
self._logger.info("Creating folder structure for OA (data and ipynb)")
self._data_path,self._ingest_summary_path,self._ipynb_path = Util.create_oa_folders("dns",self._date)
def _add_ipynb(self):
if os.path.isdir(self._ipynb_path):
self._logger.info("Adding edge investigation IPython Notebook")
shutil.copy("{0}/ipynb_templates/Edge_Investigation_master.ipynb".format(self._scrtip_path),"{0}/Edge_Investigation.ipynb".format(self._ipynb_path))
self._logger.info("Adding threat investigation IPython Notebook")
shutil.copy("{0}/ipynb_templates/Threat_Investigation_master.ipynb".format(self._scrtip_path),"{0}/Threat_Investigation.ipynb".format(self._ipynb_path))
else:
self._logger.error("There was a problem adding the IPython Notebooks, please check the directory exists.")
def _get_dns_results(self):
self._logger.info("Getting {0} Machine Learning Results from HDFS".format(self._date))
dns_results = "{0}/dns_results.csv".format(self._data_path)
# get hdfs path from conf file.
HUSER = self._spot_conf.get('conf', 'HUSER').replace("'", "").replace('"', '')
hdfs_path = "{0}/dns/scored_results/{1}/scores/dns_results.csv".format(HUSER,self._date)
# get results file from hdfs.
get_command = Util.get_ml_results_form_hdfs(hdfs_path,self._data_path)
self._logger.info("{0}".format(get_command))
# validate files exists
if os.path.isfile(dns_results):
# read number of results based in the limit specified.
self._logger.info("Reading {0} dns results file: {1}".format(self._date,dns_results))
self._dns_results = Util.read_results(dns_results,self._limit,self._results_delimiter)[:]
if len(self._dns_results) == 0: self._logger.error("There are not flow results.");sys.exit(1)
else:
self._logger.error("There was an error getting ML results from HDFS")
sys.exit(1)
# add headers.
self._logger.info("Adding headers")
self._dns_scores_headers = [ str(key) for (key,value) in self._conf['dns_score_fields'].items() ]
# add dns content.
self._dns_scores = [ conn[:] for conn in self._dns_results][:]
def _move_time_stamp(self,dns_data):
for dns in dns_data:
time_stamp = dns[1]
dns.remove(time_stamp)
dns.append(time_stamp)
return dns_data
def _create_dns_scores_csv(self):
dns_scores_csv = "{0}/dns_scores.csv".format(self._data_path)
dns_scores_final = self._move_time_stamp(self._dns_scores)
dns_scores_final.insert(0,self._dns_scores_headers)
Util.create_csv_file(dns_scores_csv,dns_scores_final)
# create bk file
dns_scores_bu_csv = "{0}/dns_scores_bu.csv".format(self._data_path)
Util.create_csv_file(dns_scores_bu_csv,dns_scores_final)
def _add_tld_column(self):
qry_name_col = self._conf['dns_results_fields']['dns_qry_name']
self._dns_scores = [conn + [ get_tld("http://" + str(conn[qry_name_col]), fail_silently=True) if "http://" not in str(conn[qry_name_col]) else get_tld(str(conn[qry_name_col]), fail_silently=True)] for conn in self._dns_scores ]
def _add_reputation(self):
# read configuration.
reputation_conf_file = "{0}/components/reputation/reputation_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
self._logger.info("Reading reputation configuration file: {0}".format(reputation_conf_file))
rep_conf = json.loads(open(reputation_conf_file).read())
# initialize reputation services.
self._rep_services = []
self._logger.info("Initializing reputation services.")
for service in rep_conf:
config = rep_conf[service]
module = __import__("components.reputation.{0}.{0}".format(service), fromlist=['Reputation'])
self._rep_services.append(module.Reputation(config,self._logger))
# get columns for reputation.
rep_cols = {}
indexes = [ int(value) for key, value in self._conf["add_reputation"].items()]
self._logger.info("Getting columns to add reputation based on config file: dns_conf.json".format())
for index in indexes:
col_list = []
for conn in self._dns_scores:
col_list.append(conn[index])
rep_cols[index] = list(set(col_list))
# get reputation per column.
self._logger.info("Getting reputation for each service in config")
rep_services_results = []
if self._rep_services :
for key,value in rep_cols.items():
rep_services_results = [ rep_service.check(None,value) for rep_service in self._rep_services]
rep_results = {}
for result in rep_services_results:
rep_results = {k: "{0}::{1}".format(rep_results.get(k, ""), result.get(k, "")).strip('::') for k in set(rep_results) | set(result)}
self._dns_scores = [ conn + [ rep_results[conn[key]] ] for conn in self._dns_scores ]
else:
self._dns_scores = [ conn + [""] for conn in self._dns_scores ]
def _add_hh_and_severity(self):
# add hh value and sev columns.
dns_date_index = self._conf["dns_results_fields"]["frame_time"]
self._dns_scores = [conn + [ filter(None,conn[dns_date_index].split(" "))[3].split(":")[0]] + [0] + [0] for conn in self._dns_scores ]
def _add_iana(self):
iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if os.path.isfile(iana_conf_file):
iana_config = json.loads(open(iana_conf_file).read())
dns_iana = IanaTransform(iana_config["IANA"])
dns_qry_class_index = self._conf["dns_results_fields"]["dns_qry_class"]
dns_qry_type_index = self._conf["dns_results_fields"]["dns_qry_type"]
dns_qry_rcode_index = self._conf["dns_results_fields"]["dns_qry_rcode"]
self._dns_scores = [ conn + [ dns_iana.get_name(conn[dns_qry_class_index],"dns_qry_class")] + [dns_iana.get_name(conn[dns_qry_type_index],"dns_qry_type")] + [ dns_iana.get_name(conn[dns_qry_rcode_index],"dns_qry_rcode") ] for conn in self._dns_scores ]
else:
self._dns_scores = [ conn + ["","",""] for conn in self._dns_scores ]
def _add_network_context(self):
nc_conf_file = "{0}/components/nc/nc_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if os.path.isfile(nc_conf_file):
nc_conf = json.loads(open(nc_conf_file).read())["NC"]
dns_nc = NetworkContext(nc_conf,self._logger)
ip_dst_index = self._conf["dns_results_fields"]["ip_dst"]
self._dns_scores = [ conn + [dns_nc.get_nc(conn[ip_dst_index])] for conn in self._dns_scores ]
else:
self._dns_scores = [ conn + [""] for conn in self._dns_scores ]
def _get_oa_details(self):
self._logger.info("Getting OA DNS suspicious details/chord diagram")
# start suspicious connects details process.
p_sp = Process(target=self._get_suspicious_details)
p_sp.start()
# start chord diagram process.
p_dn = Process(target=self._get_dns_dendrogram)
p_dn.start()
p_sp.join()
p_dn.join()
def _get_suspicious_details(self):
iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if os.path.isfile(iana_conf_file):
iana_config = json.loads(open(iana_conf_file).read())
dns_iana = IanaTransform(iana_config["IANA"])
for conn in self._dns_scores:
# get data to query
date=conn[self._conf["dns_score_fields"]["frame_time"]].split(" ")
date = filter(None,date)
if len(date) == 5:
year=date[2]
month=datetime.datetime.strptime(date[0], '%b').strftime('%m')
day=date[1]
hh=conn[self._conf["dns_score_fields"]["hh"]]
dns_qry_name = conn[self._conf["dns_score_fields"]["dns_qry_name"]]
self._get_dns_details(dns_qry_name,year,month,day,hh,dns_iana)
def _get_dns_details(self,dns_qry_name,year,month,day,hh,dns_iana):
limit = self._details_limit
edge_file ="{0}/edge-{1}_{2}_00.csv".format(self._data_path,dns_qry_name.replace("/","-"),hh)
edge_tmp ="{0}/edge-{1}_{2}_00.tmp".format(self._data_path,dns_qry_name.replace("/","-"),hh)
if not os.path.isfile(edge_file):
dns_qry = ("SELECT frame_time,frame_len,ip_dst,ip_src,dns_qry_name,dns_qry_class,dns_qry_type,dns_qry_rcode,dns_a FROM {0}.{1} WHERE y={2} AND m={3} AND d={4} AND dns_qry_name LIKE '%{5}%' AND h={6} LIMIT {7};").format(self._db,self._table_name,year,month,day,dns_qry_name,hh,limit)
# execute query
try:
self._engine.query(dns_qry,edge_tmp)
except:
self._logger.error("ERROR. Edge file couldn't be created for {0}, skipping this step".format(dns_qry_name))
else:
# add IANA to results.
if dns_iana:
update_rows = []
self._logger.info("Adding IANA translation to details results")
with open(edge_tmp) as dns_details_csv:
rows = csv.reader(dns_details_csv, delimiter=',', quotechar='|')
try:
next(rows)
update_rows = [[conn[0]] + [conn[1]] + [conn[2]] + [conn[3]] + [conn[4]] + [dns_iana.get_name(conn[5],"dns_qry_class")] + [dns_iana.get_name(conn[6],"dns_qry_type")] + [dns_iana.get_name(conn[7],"dns_qry_rcode")] + [conn[8]] for conn in rows]
update_rows = filter(None, update_rows)
header = [ "frame_time", "frame_len", "ip_dst","ip_src","dns_qry_name","dns_qry_class_name","dns_qry_type_name","dns_qry_rcode_name","dns_a" ]
update_rows.insert(0,header)
except IndexError:
pass
else:
self._logger.info("WARNING: NO IANA configured.")
# create edge file.
self._logger.info("Creating edge file:{0}".format(edge_file))
with open(edge_file,'wb') as dns_details_edge:
writer = csv.writer(dns_details_edge, quoting=csv.QUOTE_ALL)
if update_rows:
writer.writerows(update_rows)
else:
shutil.copy(edge_tmp,edge_file)
os.remove(edge_tmp)
def _get_dns_dendrogram(self):
limit = self._details_limit
for conn in self._dns_scores:
date=conn[self._conf["dns_score_fields"]["frame_time"]].split(" ")
date = filter(None,date)
if len(date) == 5:
year=date[2]
month=datetime.datetime.strptime(date[0], '%b').strftime('%m')
day=date[1]
ip_dst=conn[self._conf["dns_score_fields"]["ip_dst"]]
self._get_dendro(self._db,self._table_name,ip_dst,year,month,day, limit)
def _get_dendro(self,db,table,ip_dst,year,month,day,limit):
dendro_file = "{0}/dendro-{1}.csv".format(self._data_path,ip_dst)
if not os.path.isfile(dendro_file):
dndro_qry = ("SELECT dns_a, dns_qry_name, ip_dst FROM (SELECT susp.ip_dst, susp.dns_qry_name, susp.dns_a FROM {0}.{1} as susp WHERE susp.y={2} AND susp.m={3} AND susp.d={4} AND susp.ip_dst='{5}' LIMIT {6}) AS tmp GROUP BY dns_a, dns_qry_name, ip_dst").format(db,table,year,month,day,ip_dst,limit)
# execute query
self._engine.query(dndro_qry,dendro_file)
def _ingest_summary(self):
# get date parameters.
yr = self._date[:4]
mn = self._date[4:6]
dy = self._date[6:]
self._logger.info("Getting ingest summary data for the day")
ingest_summary_cols = ["date","total"]
result_rows = []
df_filtered = pd.DataFrame()
ingest_summary_file = "{0}/is_{1}{2}.csv".format(self._ingest_summary_path,yr,mn)
ingest_summary_tmp = "{0}.tmp".format(ingest_summary_file)
if os.path.isfile(ingest_summary_file):
df = pd.read_csv(ingest_summary_file, delimiter=',')
#discards previous rows from the same date
df_filtered = df[df['date'].str.contains("{0}-{1}-{2}".format(yr, mn, dy)) == False]
else:
df = pd.DataFrame()
# get ingest summary.
ingest_summary_qry = ("SELECT frame_time, COUNT(*) as total "
" FROM {0}.{1}"
" WHERE y={2} AND m={3} AND d={4} "
" AND unix_tstamp IS NOT NULL AND frame_time IS NOT NULL"
" AND frame_len IS NOT NULL AND dns_qry_name IS NOT NULL"
" AND ip_src IS NOT NULL "
" AND (dns_qry_class IS NOT NULL AND dns_qry_type IS NOT NULL AND dns_qry_rcode IS NOT NULL ) "
" GROUP BY frame_time;")
ingest_summary_qry = ingest_summary_qry.format(self._db,self._table_name, yr, mn, dy)
results_file = "{0}/results_{1}.csv".format(self._ingest_summary_path,self._date)
self._engine.query(ingest_summary_qry,output_file=results_file,delimiter=",")
if os.path.isfile(results_file):
df_results = pd.read_csv(results_file, delimiter=',')
# Forms a new dataframe splitting the minutes from the time column
df_new = pd.DataFrame([["{0}-{1}-{2} {3}:{4}".format(yr, mn, dy,val['frame_time'].split(" ")[3].split(":")[0].zfill(2),val['frame_time'].split(" ")[3].split(":")[1].zfill(2)), int(val['total']) if not math.isnan(val['total']) else 0 ] for key,val in df_results.iterrows()],columns = ingest_summary_cols)
#Groups the data by minute
sf = df_new.groupby(by=['date'])['total'].sum()
df_per_min = pd.DataFrame({'date':sf.index, 'total':sf.values})
df_final = df_filtered.append(df_per_min, ignore_index=True)
df_final.to_csv(ingest_summary_tmp,sep=',', index=False)
os.remove(results_file)
os.rename(ingest_summary_tmp,ingest_summary_file)
else:
self._logger.info("No data found for the ingest summary")
|
kpeiruza/incubator-spot
|
spot-oa/oa/dns/dns_oa.py
|
Python
|
apache-2.0
| 17,663
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.3.1"/>
<title>MagickCore: _ContributionInfo Struct Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
$(window).load(resizeHeight);
</script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td style="padding-left: 0.5em;">
<div id="projectname">MagickCore
 <span id="projectnumber">7.0.0</span>
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.3.1 -->
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
<div id="nav-sync" class="sync"></div>
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('struct__ContributionInfo.html','');});
</script>
<div id="doc-content">
<div class="header">
<div class="summary">
<a href="struct__ContributionInfo.html#pub-attribs">Data Fields</a> </div>
<div class="headertitle">
<div class="title">_ContributionInfo Struct Reference</div> </div>
</div><!--header-->
<div class="contents">
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-attribs"></a>
Data Fields</h2></td></tr>
<tr class="memitem:afec96b061670d70186de9df85c92082d"><td class="memItemLeft" align="right" valign="top">double </td><td class="memItemRight" valign="bottom"><a class="el" href="struct__ContributionInfo.html#afec96b061670d70186de9df85c92082d">weight</a></td></tr>
<tr class="separator:afec96b061670d70186de9df85c92082d"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:afa8e02f1deb47a307a5b011821a073b3"><td class="memItemLeft" align="right" valign="top">ssize_t </td><td class="memItemRight" valign="bottom"><a class="el" href="struct__ContributionInfo.html#afa8e02f1deb47a307a5b011821a073b3">pixel</a></td></tr>
<tr class="separator:afa8e02f1deb47a307a5b011821a073b3"><td class="memSeparator" colspan="2"> </td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock">
<p>Definition at line <a class="el" href="resize_8c_source.html#l02283">2283</a> of file <a class="el" href="resize_8c_source.html">resize.c</a>.</p>
</div><h2 class="groupheader">Field Documentation</h2>
<a class="anchor" id="afa8e02f1deb47a307a5b011821a073b3"></a>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">ssize_t _ContributionInfo::pixel</td>
</tr>
</table>
</div><div class="memdoc">
<p>Definition at line <a class="el" href="resize_8c_source.html#l02289">2289</a> of file <a class="el" href="resize_8c_source.html">resize.c</a>.</p>
</div>
</div>
<a class="anchor" id="afec96b061670d70186de9df85c92082d"></a>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">double _ContributionInfo::weight</td>
</tr>
</table>
</div><div class="memdoc">
<p>Definition at line <a class="el" href="resize_8c_source.html#l02286">2286</a> of file <a class="el" href="resize_8c_source.html">resize.c</a>.</p>
</div>
</div>
<hr/>The documentation for this struct was generated from the following file:<ul>
<li><a class="el" href="resize_8c_source.html">resize.c</a></li>
</ul>
</div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="navelem"><a class="el" href="struct__ContributionInfo.html">_ContributionInfo</a></li>
<li class="footer">Generated by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.3.1 </li>
</ul>
</div>
</body>
</html>
|
svn2github/ImageMagick
|
www/api/MagickCore/struct__ContributionInfo.html
|
HTML
|
apache-2.0
| 4,772
|
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
from app import db
import os.path
db.create_all()
if not os.path.exists(SQLALCHEMY_MIGRATE_REPO):
api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository')
api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
else:
api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, api.version(SQLALCHEMY_MIGRATE_REPO))
|
edfungus/Music-Server
|
app/db_create.py
|
Python
|
apache-2.0
| 466
|
/*
* @license Apache-2.0
*
* Copyright (c) 2019 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// TypeScript Version: 2.0
/**
* Tests if two arguments are the same value.
*
* ## Notes
*
* - The function differs from the `===` operator in that the function treats `-0` and `+0` as distinct and `NaNs` as the same.
*
* @param a - first input value
* @param b - second input value
* @returns boolean indicating whether two arguments are the same value
*
* @example
* var bool = isSameValue( true, true );
* // returns true
*
* @example
* var bool = isSameValue( 3.14, 3.14 );
* // returns true
*
* @example
* var bool = isSameValue( {}, {} );
* // returns false
*
* @example
* var bool = isSameValue( -0.0, -0.0 );
* // returns true
*
* @example
* var bool = isSameValue( -0.0, 0.0 );
* // returns false
*
* @example
* var bool = isSameValue( NaN, NaN );
* // returns true
*
* @example
* var bool = isSameValue( [], [] );
* // returns false
*/
declare function isSameValue( a: any, b: any ): boolean;
// EXPORTS //
export = isSameValue;
|
stdlib-js/stdlib
|
lib/node_modules/@stdlib/assert/is-same-value/docs/types/index.d.ts
|
TypeScript
|
apache-2.0
| 1,562
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2017-2022 the original author or authors.
*/
package org.assertj.vavr.api;
import io.vavr.control.Try;
import org.assertj.core.error.BasicErrorMessageFactory;
/**
* Build error message when an {@link Try}
* should contain a specific value.
*
* @author Grzegorz Piwowarek
*/
class TryShouldContain extends BasicErrorMessageFactory {
private static final String EXPECTING_TO_CONTAIN = "%nExpecting:%n <%s>%nto contain:%n <%s>%nbut did not.";
private static final String EXPECTING_TO_CONTAIN_SAME = "%nExpecting:%n <%s>%nto contain the instance (i.e. compared with ==):%n <%s>%nbut did not.";
private TryShouldContain(String message, Object actual, Object expected) {
super(message, actual, expected);
}
private TryShouldContain(Object expected) {
super("%nExpecting Try to contain:%n <%s>%nbut was empty.", expected);
}
/**
* Indicates that the provided {@link io.vavr.control.Try} does not contain the provided argument.
*
* @param <VALUE> the type of the value contained in the {@link Try}.
* @param vTry the {@link Try} which contains a value.
* @param expectedValue the value we expect to be in the provided {@link Try}.
* @return a error message factory
*/
static <VALUE> TryShouldContain shouldContain(Try<VALUE> vTry, VALUE expectedValue) {
return vTry.isSuccess() ?
new TryShouldContain(EXPECTING_TO_CONTAIN, vTry, expectedValue) :
shouldContain(expectedValue);
}
/**
* Indicates that the provided {@link io.vavr.control.Try} does not contain the provided argument (judging by reference
* equality).
*
* @param <VALUE> the type of the value contained in the {@link Try}.
* @param vTry the {@link Try} which contains a value.
* @param expectedValue the value we expect to be in the provided {@link Try}.
* @return a error message factory
*/
static <VALUE> TryShouldContain shouldContainSame(Try<VALUE> vTry, VALUE expectedValue) {
return vTry.isSuccess() ?
new TryShouldContain(EXPECTING_TO_CONTAIN_SAME, vTry, expectedValue) :
shouldContain(expectedValue);
}
/**
* Indicates that an {@link io.vavr.control.Try} is empty so it doesn't contain the expected value.
*
* @param expectedValue the value we expect to be in an {@link Try}.
* @return a error message factory.
*/
static TryShouldContain shouldContain(Object expectedValue) {
return new TryShouldContain(expectedValue);
}
}
|
assertj/assertj-vavr
|
src/main/java/org/assertj/vavr/api/TryShouldContain.java
|
Java
|
apache-2.0
| 3,137
|
package actors.supervised
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.pattern.BackoffOpts
import scala.concurrent.duration.FiniteDuration
case class RestartOnStop(minBackoff: FiniteDuration, maxBackoff: FiniteDuration) {
def actorOf(props: Props, name: String)(implicit system: ActorSystem): ActorRef = {
val onStopOptions = BackoffOpts.onStop(
childProps = props,
childName = name,
minBackoff = minBackoff,
maxBackoff = maxBackoff,
randomFactor = 0
)
system.actorOf(Props(classOf[RestartOnStopActor], onStopOptions), s"$name-supervisor")
}
}
|
UKHomeOffice/drt-scalajs-spa-exploration
|
server/src/main/scala/actors/supervised/RestartOnStop.scala
|
Scala
|
apache-2.0
| 610
|
# Solanum tuberosum var. helenanum Alef. VARIETY
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Solanales/Solanaceae/Solanum/Solanum tuberosum/ Syn. Solanum tuberosum helenanum/README.md
|
Markdown
|
apache-2.0
| 195
|
// Generated from /POI/java/org/apache/poi/hssf/record/chart/ChartTitleFormatRecord.java
#include <org/apache/poi/hssf/record/chart/ChartTitleFormatRecord.hpp>
#include <java/lang/ArrayStoreException.hpp>
#include <java/lang/NullPointerException.hpp>
#include <java/lang/String.hpp>
#include <java/lang/StringBuffer.hpp>
#include <org/apache/poi/hssf/record/RecordInputStream.hpp>
#include <org/apache/poi/hssf/record/chart/ChartTitleFormatRecord_CTFormat.hpp>
#include <org/apache/poi/util/LittleEndianOutput.hpp>
#include <Array.hpp>
#include <ObjectArray.hpp>
#include <SubArray.hpp>
template<typename ComponentType, typename... Bases> struct SubArray;
namespace poi
{
namespace hssf
{
namespace record
{
namespace chart
{
typedef ::SubArray< ::poi::hssf::record::chart::ChartTitleFormatRecord_CTFormat, ::java::lang::ObjectArray > ChartTitleFormatRecord_CTFormatArray;
} // chart
} // record
} // hssf
} // poi
template<typename T>
static T* npc(T* t)
{
if(!t) throw new ::java::lang::NullPointerException();
return t;
}
poi::hssf::record::chart::ChartTitleFormatRecord::ChartTitleFormatRecord(const ::default_init_tag&)
: super(*static_cast< ::default_init_tag* >(0))
{
clinit();
}
poi::hssf::record::chart::ChartTitleFormatRecord::ChartTitleFormatRecord(::poi::hssf::record::RecordInputStream* in)
: ChartTitleFormatRecord(*static_cast< ::default_init_tag* >(0))
{
ctor(in);
}
constexpr int16_t poi::hssf::record::chart::ChartTitleFormatRecord::sid;
void poi::hssf::record::chart::ChartTitleFormatRecord::ctor(::poi::hssf::record::RecordInputStream* in)
{
super::ctor();
auto nRecs = npc(in)->readUShort();
_formats = new ChartTitleFormatRecord_CTFormatArray(nRecs);
for (auto i = int32_t(0); i < nRecs; i++) {
_formats->set(i, new ChartTitleFormatRecord_CTFormat(in));
}
}
void poi::hssf::record::chart::ChartTitleFormatRecord::serialize(::poi::util::LittleEndianOutput* out)
{
npc(out)->writeShort(npc(_formats)->length);
for (auto i = int32_t(0); i < npc(_formats)->length; i++) {
npc((*_formats)[i])->serialize(out);
}
}
int32_t poi::hssf::record::chart::ChartTitleFormatRecord::getDataSize()
{
return int32_t(2) + ChartTitleFormatRecord_CTFormat::ENCODED_SIZE * npc(_formats)->length;
}
int16_t poi::hssf::record::chart::ChartTitleFormatRecord::getSid()
{
return sid;
}
int32_t poi::hssf::record::chart::ChartTitleFormatRecord::getFormatCount()
{
return npc(_formats)->length;
}
void poi::hssf::record::chart::ChartTitleFormatRecord::modifyFormatRun(int16_t oldPos, int16_t newLen)
{
auto shift = int32_t(0);
for (auto i = int32_t(0); i < npc(_formats)->length; i++) {
auto ctf = (*_formats)[i];
if(shift != 0) {
npc(ctf)->setOffset(npc(ctf)->getOffset() + shift);
} else if(oldPos == npc(ctf)->getOffset() && i < npc(_formats)->length - int32_t(1)) {
auto nextCTF = (*_formats)[i + int32_t(1)];
shift = newLen - (npc(nextCTF)->getOffset() - npc(ctf)->getOffset());
}
}
}
java::lang::String* poi::hssf::record::chart::ChartTitleFormatRecord::toString()
{
auto buffer = new ::java::lang::StringBuffer();
npc(buffer)->append(u"[CHARTTITLEFORMAT]\n"_j);
npc(npc(npc(buffer)->append(u" .format_runs = "_j))->append(npc(_formats)->length))->append(u"\n"_j);
for (auto i = int32_t(0); i < npc(_formats)->length; i++) {
auto ctf = (*_formats)[i];
npc(npc(buffer)->append(u" .char_offset= "_j))->append(npc(ctf)->getOffset());
npc(npc(buffer)->append(u",.fontidx= "_j))->append(npc(ctf)->getFontIndex());
npc(buffer)->append(u"\n"_j);
}
npc(buffer)->append(u"[/CHARTTITLEFORMAT]\n"_j);
return npc(buffer)->toString();
}
extern java::lang::Class *class_(const char16_t *c, int n);
java::lang::Class* poi::hssf::record::chart::ChartTitleFormatRecord::class_()
{
static ::java::lang::Class* c = ::class_(u"org.apache.poi.hssf.record.chart.ChartTitleFormatRecord", 55);
return c;
}
int32_t poi::hssf::record::chart::ChartTitleFormatRecord::serialize(int32_t offset, ::int8_tArray* data)
{
return super::serialize(offset, data);
}
int8_tArray* poi::hssf::record::chart::ChartTitleFormatRecord::serialize()
{
return super::serialize();
}
java::lang::Class* poi::hssf::record::chart::ChartTitleFormatRecord::getClass0()
{
return class_();
}
|
pebble2015/cpoi
|
src/org/apache/poi/hssf/record/chart/ChartTitleFormatRecord.cpp
|
C++
|
apache-2.0
| 4,488
|
package com.imethod.module.client.rest;
import com.imethod.core.json.JSONObj;
import com.imethod.core.util.StringTools;
import org.springframework.web.client.HttpClientErrorException;
/**
* time : 15/10/26.
* auth :
* desc :
* tips :
* 1.
*/
public class ApiException extends RuntimeException {
private RestResult restResult;
private static String ERROR_UNDEFINED = "无效的请求数据";
public RestResult getRestResult() {
return restResult;
}
public void setRestResult(RestResult restResult) {
this.restResult = restResult;
}
public ApiException(Exception e) {
super(e);
if (e instanceof HttpClientErrorException) {
HttpClientErrorException errorException = (HttpClientErrorException) e;
String str = errorException.getResponseBodyAsString();
//what can i do;
errorException.getStatusCode();
restResult = new RestResult();
JSONObj.fromObject(str).copyPropertiesTo(restResult);
}
}
public ApiException(String errorCode) {
super(errorCode);
}
public ApiException() {
super(ERROR_UNDEFINED);
}
public String getMessage() {
if (restResult != null && !StringTools.isEmpty(restResult.getMessage())) {
return restResult.getMessage();
}
return super.getMessage();
}
}
|
bqxu/JMethod
|
module/client/src/main/java/com/imethod/module/client/rest/ApiException.java
|
Java
|
apache-2.0
| 1,402
|
Dragonputer
===========
Character manager and utilities for Dungeons and Dragons.
|
glompix/Dragonputer
|
README.md
|
Markdown
|
apache-2.0
| 83
|
module.exports = (req, res) => res.json(req.query)
|
zeit/now-cli
|
packages/now-cli/test/dev/fixtures/test-zero-config-rewrite/api/echo/[id].js
|
JavaScript
|
apache-2.0
| 51
|
<?php
// Copyright 2017 DAIMTO ([Linda Lawton](https://twitter.com/LindaLawtonDK)) : [www.daimto.com](http://www.daimto.com/)
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by DAIMTO-Google-apis-Sample-generator 1.0.0
// Template File Name: ServiceAccount.tt
// Build date: 2017-10-08
// PHP generator version: 1.0.0
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
// About
//
// Unofficial sample for the Cloud User Accounts vm_beta API for PHP.
// This sample is designed to be used with the Google PHP client library. (https://github.com/google/google-api-php-client)
//
// API Description: Creates and manages users and groups for accessing Google Compute Engine virtual machines.
// API Documentation Link https://cloud.google.com/compute/docs/access/user-accounts/api/latest/
//
// Discovery Doc https://www.googleapis.com/discovery/v1/apis/clouduseraccounts/vm_beta/rest
//
//------------------------------------------------------------------------------
// Installation
//
// The preferred method is via https://getcomposer.org. Follow the installation instructions https://getcomposer.org/doc/00-intro.md
// if you do not already have composer installed.
//
// Once composer is installed, execute the following command in your project root to install this library:
//
// composer require google/apiclient:^2.0
//
//------------------------------------------------------------------------------
require_once __DIR__ . '/vendor/autoload.php';
require_once __DIR__ . '/Oauth2Authentication.php';
// Start a session to persist credentials.
session_start();
// Handle authorization flow from the server.
if (! isset($_GET['code'])) {
$client = buildClient();
$auth_url = $client->createAuthUrl();
header('Location: ' . filter_var($auth_url, FILTER_SANITIZE_URL));
} else {
$client = buildClient();
$client->authenticate($_GET['code']); // Exchange the authencation code for a refresh token and access token.
// Add access token and refresh token to seession.
$_SESSION['access_token'] = $client->getAccessToken();
$_SESSION['refresh_token'] = $client->getRefreshToken();
//Redirect back to main script
$redirect_uri = str_replace("oauth2callback.php",$_SESSION['mainScript'],$client->getRedirectUri());
header('Location: ' . filter_var($redirect_uri, FILTER_SANITIZE_URL));
}
?>
|
LindaLawton/Google-APIs-PHP-Samples
|
Samples/Cloud User Accounts API/vm_beta/oauth2callback.php
|
PHP
|
apache-2.0
| 3,126
|
package br.com.safeguard.interfaces;
import br.com.caelum.stella.validation.Validator;
/**
* Interface que deve ser implementada por um (Enum) de validadores (ParametroTipo)
* para ser usado como um (Enum) que retorna instacias da classe (br.com.caelum.stella.validation.Validator)
*
*
* @see
* <ul>
* <li>Exemplo de implementação</li>
* </ul>
* <p>
* {@code TIPO_DO_PADRAO(new Class<? extends Validator>)) }<br><br>
* {@code private Validator<String> tipo; }<br><br>
* {@code private Patterns(Validator<String> tipo) }<br>
* {@code this.tipo = tipo; }<br><br>
* {@code @Override }<br>
* {@code public String getType() }<br>
* {@code return tipo; }<br>
* </p>
*
* @author Gilmar Carlos
*
*/
public interface BaseParam {
Validator<String> getType();
}
|
gilmardeveloper/java-validator-safeguard
|
src/main/java/br/com/safeguard/interfaces/BaseParam.java
|
Java
|
apache-2.0
| 887
|
## Configuration overriding
Some of the configuration properties in `KAP_HOME/conf/` could be overridden through KAP GUI. Configuration overriding has two scopes: project level and cube level. The priority order can be stated as: cube level configurations > project level configurations > configuration files.
### Project level configuration overriding
At the Project Management page, open the edit page for one of projects, user could add configuration properties, which would override property values in configuration files, as the figure below shown:

The configuration of kylin.query.force-limit can be overridden at project level.
### Cube level configuration overriding
At the `Configuration Overwrites` phase in cube design, user could rewrite property values to override those in project level and configuration files, as the figure below shown: 
The configuration of kylin.query.disable-cube-noagg-sql can be overridden at Cube level.
### Overriding properties in kylin.properties
*kylin.hbase.default.compression.codec*, default is none, other valid values include snappy, lzo, gzip, lz4
*kylin.storage.hbase.region-cut-gb*, default is 5
*kylin.storage.hbase.hfile-size-gb*, default is 2
*kylin.storage.hbase.min-region-count*, default is 1
*kylin.storage.hbase.max-region-count*, default is 500
*kylin.job.sampling-percentage*, default is 100
*kylin.engine.mr.reduce-input-mb*, default is 500
*kylin.engine.mr.max-reducer-number*, default is 500
*kylin.engine.mr.mapper-input-rows*, default is 1000000
*kylin.cube.algorithm*, default is auto, other valid values include inmem, layer
*kylin.cube.algorithm.layer-or-inmem-threshold*, default is 8
*kylin.cube.aggrgroup.max-combination*, default is 4096
> Note:**DO NOT** set this parameter over 10000. Otherwise it will consume a lot of CPU and memory resources when calculating the dimension combination, may lead to browser unstable or KAP crash. If there are a number of dimensions in the Cube, it is recommended to set multiple Aggregation Groups (AGG).
*kylin.table.snapshot.max-mb*, default is 300
### Overriding properties in kylin_hive_conf.xml
KAP allows overriding properties in kylin_hive_conf.xml through KAP GUI. Replace original values by the following Key/Value format:
kylin.hive.config.override.*key* = *value*
**Attention: it's necessary to prefix the name of properties with *kylin.hive.config.override*. **
### Overriding properties in kylin_job_conf.xml and kylin_job_conf_inmem.xml
KAP allows overriding kylin_job_conf.xml and kylin_job_conf_inmem.xml through KAP GUI. Replace original values by the following Key/Value format:
kylin.job.mr.config.override.*key* = *value*
**Attention: it's necessary to prefix the name of properties with *kylin.job.mr.config.override*, **as the figure below shown:

The red rectangle marked the prefix, while the blue rectangle marked the name of the property, with a "." used to concatenate them.
|
tttMelody/KAP-manual
|
en/config/config_override.en.md
|
Markdown
|
apache-2.0
| 3,091
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.android.toolchain;
import com.facebook.buck.core.filesystems.AbsPath;
import com.facebook.buck.core.toolchain.ComparableToolchain;
import com.facebook.buck.core.util.immutables.BuckStyleValue;
import java.nio.file.Path;
/** Part of Android toolchain that provides access to Android SDK */
@BuckStyleValue
public interface AndroidSdkLocation extends ComparableToolchain {
String DEFAULT_NAME = "android-sdk-location";
Path getSdkRootPath();
@Override
default String getName() {
return DEFAULT_NAME;
}
static AndroidSdkLocation of(Path sdkRootPath) {
return ImmutableAndroidSdkLocation.ofImpl(sdkRootPath);
}
static AndroidSdkLocation of(AbsPath sdkRootPath) {
return of(sdkRootPath.getPath());
}
}
|
JoelMarcey/buck
|
src/com/facebook/buck/android/toolchain/AndroidSdkLocation.java
|
Java
|
apache-2.0
| 1,377
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.instructions.cp;
import java.util.HashMap;
import org.apache.sysml.lops.Lop;
import org.apache.sysml.parser.ParameterizedBuiltinFunctionExpression;
import org.apache.sysml.parser.Statement;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.controlprogram.caching.CacheableData;
import org.apache.sysml.runtime.controlprogram.caching.FrameObject;
import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
import org.apache.sysml.runtime.functionobjects.ParameterizedBuiltin;
import org.apache.sysml.runtime.functionobjects.ValueFunction;
import org.apache.sysml.runtime.instructions.InstructionUtils;
import org.apache.sysml.runtime.instructions.mr.GroupedAggregateInstruction;
import org.apache.sysml.runtime.matrix.JobReturn;
import org.apache.sysml.runtime.matrix.data.FrameBlock;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.matrix.operators.Operator;
import org.apache.sysml.runtime.matrix.operators.SimpleOperator;
import org.apache.sysml.runtime.transform.DataTransform;
import org.apache.sysml.runtime.transform.TfUtils;
import org.apache.sysml.runtime.transform.decode.Decoder;
import org.apache.sysml.runtime.transform.decode.DecoderFactory;
import org.apache.sysml.runtime.transform.encode.Encoder;
import org.apache.sysml.runtime.transform.encode.EncoderFactory;
import org.apache.sysml.runtime.transform.meta.TfMetaUtils;
import org.apache.sysml.runtime.util.DataConverter;
public class ParameterizedBuiltinCPInstruction extends ComputationCPInstruction
{
private static final int TOSTRING_MAXROWS = 100;
private static final int TOSTRING_MAXCOLS = 100;
private static final int TOSTRING_DECIMAL = 3;
private static final boolean TOSTRING_SPARSE = false;
private static final String TOSTRING_SEPARATOR = " ";
private static final String TOSTRING_LINESEPARATOR = "\n";
private int arity;
protected HashMap<String,String> params;
public ParameterizedBuiltinCPInstruction(Operator op, HashMap<String,String> paramsMap, CPOperand out, String opcode, String istr )
{
super(op, null, null, out, opcode, istr);
_cptype = CPINSTRUCTION_TYPE.ParameterizedBuiltin;
params = paramsMap;
}
public int getArity() {
return arity;
}
public HashMap<String,String> getParameterMap() {
return params;
}
public String getParam(String key) {
return getParameterMap().get(key);
}
public static HashMap<String, String> constructParameterMap(String[] params) {
// process all elements in "params" except first(opcode) and last(output)
HashMap<String,String> paramMap = new HashMap<String,String>();
// all parameters are of form <name=value>
String[] parts;
for ( int i=1; i <= params.length-2; i++ ) {
parts = params[i].split(Lop.NAME_VALUE_SEPARATOR);
paramMap.put(parts[0], parts[1]);
}
return paramMap;
}
public static ParameterizedBuiltinCPInstruction parseInstruction ( String str )
throws DMLRuntimeException
{
String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
// first part is always the opcode
String opcode = parts[0];
// last part is always the output
CPOperand out = new CPOperand( parts[parts.length-1] );
// process remaining parts and build a hash map
HashMap<String,String> paramsMap = constructParameterMap(parts);
// determine the appropriate value function
ValueFunction func = null;
if ( opcode.equalsIgnoreCase("cdf") ) {
if ( paramsMap.get("dist") == null )
throw new DMLRuntimeException("Invalid distribution: " + str);
func = ParameterizedBuiltin.getParameterizedBuiltinFnObject(opcode, paramsMap.get("dist"));
// Determine appropriate Function Object based on opcode
return new ParameterizedBuiltinCPInstruction(new SimpleOperator(func), paramsMap, out, opcode, str);
}
else if ( opcode.equalsIgnoreCase("invcdf") ) {
if ( paramsMap.get("dist") == null )
throw new DMLRuntimeException("Invalid distribution: " + str);
func = ParameterizedBuiltin.getParameterizedBuiltinFnObject(opcode, paramsMap.get("dist"));
// Determine appropriate Function Object based on opcode
return new ParameterizedBuiltinCPInstruction(new SimpleOperator(func), paramsMap, out, opcode, str);
}
else if ( opcode.equalsIgnoreCase("groupedagg")) {
// check for mandatory arguments
String fnStr = paramsMap.get("fn");
if ( fnStr == null )
throw new DMLRuntimeException("Function parameter is missing in groupedAggregate.");
if ( fnStr.equalsIgnoreCase("centralmoment") ) {
if ( paramsMap.get("order") == null )
throw new DMLRuntimeException("Mandatory \"order\" must be specified when fn=\"centralmoment\" in groupedAggregate.");
}
Operator op = GroupedAggregateInstruction.parseGroupedAggOperator(fnStr, paramsMap.get("order"));
return new ParameterizedBuiltinCPInstruction(op, paramsMap, out, opcode, str);
}
else if( opcode.equalsIgnoreCase("rmempty")
|| opcode.equalsIgnoreCase("replace")
|| opcode.equalsIgnoreCase("rexpand") )
{
func = ParameterizedBuiltin.getParameterizedBuiltinFnObject(opcode);
return new ParameterizedBuiltinCPInstruction(new SimpleOperator(func), paramsMap, out, opcode, str);
}
else if ( opcode.equals("transform")
|| opcode.equals("transformapply")
|| opcode.equals("transformdecode")
|| opcode.equals("transformmeta"))
{
return new ParameterizedBuiltinCPInstruction(null, paramsMap, out, opcode, str);
}
else if ( opcode.equals("toString"))
{
return new ParameterizedBuiltinCPInstruction(null, paramsMap, out, opcode, str);
}
else {
throw new DMLRuntimeException("Unknown opcode (" + opcode + ") for ParameterizedBuiltin Instruction.");
}
}
@Override
public void processInstruction(ExecutionContext ec)
throws DMLRuntimeException
{
String opcode = getOpcode();
ScalarObject sores = null;
if ( opcode.equalsIgnoreCase("cdf")) {
SimpleOperator op = (SimpleOperator) _optr;
double result = op.fn.execute(params);
sores = new DoubleObject(result);
ec.setScalarOutput(output.getName(), sores);
}
else if ( opcode.equalsIgnoreCase("invcdf")) {
SimpleOperator op = (SimpleOperator) _optr;
double result = op.fn.execute(params);
sores = new DoubleObject(result);
ec.setScalarOutput(output.getName(), sores);
}
else if ( opcode.equalsIgnoreCase("groupedagg") ) {
// acquire locks
MatrixBlock target = ec.getMatrixInput(params.get(Statement.GAGG_TARGET));
MatrixBlock groups = ec.getMatrixInput(params.get(Statement.GAGG_GROUPS));
MatrixBlock weights= null;
if ( params.get(Statement.GAGG_WEIGHTS) != null )
weights = ec.getMatrixInput(params.get(Statement.GAGG_WEIGHTS));
int ngroups = -1;
if ( params.get(Statement.GAGG_NUM_GROUPS) != null) {
ngroups = (int) Double.parseDouble(params.get(Statement.GAGG_NUM_GROUPS));
}
// compute the result
int k = Integer.parseInt(params.get("k")); //num threads
MatrixBlock soresBlock = groups.groupedAggOperations(target, weights, new MatrixBlock(), ngroups, _optr, k);
ec.setMatrixOutput(output.getName(), soresBlock);
// release locks
target = groups = weights = null;
ec.releaseMatrixInput(params.get(Statement.GAGG_TARGET));
ec.releaseMatrixInput(params.get(Statement.GAGG_GROUPS));
if ( params.get(Statement.GAGG_WEIGHTS) != null )
ec.releaseMatrixInput(params.get(Statement.GAGG_WEIGHTS));
}
else if ( opcode.equalsIgnoreCase("rmempty") ) {
// acquire locks
MatrixBlock target = ec.getMatrixInput(params.get("target"));
MatrixBlock select = params.containsKey("select")? ec.getMatrixInput(params.get("select")):null;
// compute the result
String margin = params.get("margin");
MatrixBlock soresBlock = null;
if( margin.equals("rows") )
soresBlock = target.removeEmptyOperations(new MatrixBlock(), true, select);
else if( margin.equals("cols") )
soresBlock = target.removeEmptyOperations(new MatrixBlock(), false, select);
else
throw new DMLRuntimeException("Unspupported margin identifier '"+margin+"'.");
//release locks
ec.setMatrixOutput(output.getName(), soresBlock);
ec.releaseMatrixInput(params.get("target"));
if (params.containsKey("select"))
ec.releaseMatrixInput(params.get("select"));
}
else if ( opcode.equalsIgnoreCase("replace") ) {
// acquire locks
MatrixBlock target = ec.getMatrixInput(params.get("target"));
// compute the result
double pattern = Double.parseDouble( params.get("pattern") );
double replacement = Double.parseDouble( params.get("replacement") );
MatrixBlock ret = (MatrixBlock) target.replaceOperations(new MatrixBlock(), pattern, replacement);
//release locks
ec.setMatrixOutput(output.getName(), ret);
ec.releaseMatrixInput(params.get("target"));
}
else if ( opcode.equalsIgnoreCase("rexpand") ) {
// acquire locks
MatrixBlock target = ec.getMatrixInput(params.get("target"));
// compute the result
double maxVal = Double.parseDouble( params.get("max") );
boolean dirVal = params.get("dir").equals("rows");
boolean cast = Boolean.parseBoolean(params.get("cast"));
boolean ignore = Boolean.parseBoolean(params.get("ignore"));
int numThreads = Integer.parseInt(params.get("k"));
MatrixBlock ret = (MatrixBlock) target.rexpandOperations(
new MatrixBlock(), maxVal, dirVal, cast, ignore, numThreads);
//release locks
ec.setMatrixOutput(output.getName(), ret);
ec.releaseMatrixInput(params.get("target"));
}
else if ( opcode.equalsIgnoreCase("transform")) {
FrameObject fo = ec.getFrameObject(params.get("target"));
MatrixObject out = ec.getMatrixObject(output.getName());
try {
JobReturn jt = DataTransform.cpDataTransform(this, new FrameObject[] { fo } , new MatrixObject[] {out} );
out.updateMatrixCharacteristics(jt.getMatrixCharacteristics(0));
} catch (Exception e) {
throw new DMLRuntimeException(e);
}
}
else if ( opcode.equalsIgnoreCase("transformapply")) {
//acquire locks
FrameBlock data = ec.getFrameInput(params.get("target"));
FrameBlock meta = ec.getFrameInput(params.get("meta"));
String[] colNames = data.getColumnNames();
//compute transformapply
Encoder encoder = EncoderFactory.createEncoder(params.get("spec"), colNames, data.getNumColumns(), meta);
MatrixBlock mbout = encoder.apply(data, new MatrixBlock(data.getNumRows(), data.getNumColumns(), false));
//release locks
ec.setMatrixOutput(output.getName(), mbout);
ec.releaseFrameInput(params.get("target"));
ec.releaseFrameInput(params.get("meta"));
}
else if ( opcode.equalsIgnoreCase("transformdecode")) {
//acquire locks
MatrixBlock data = ec.getMatrixInput(params.get("target"));
FrameBlock meta = ec.getFrameInput(params.get("meta"));
String[] colnames = meta.getColumnNames();
//compute transformdecode
Decoder decoder = DecoderFactory.createDecoder(getParameterMap().get("spec"), colnames, null, meta);
FrameBlock fbout = decoder.decode(data, new FrameBlock(decoder.getSchema()));
//release locks
ec.setFrameOutput(output.getName(), fbout);
ec.releaseMatrixInput(params.get("target"));
ec.releaseFrameInput(params.get("meta"));
}
else if ( opcode.equalsIgnoreCase("transformmeta")) {
//get input spec and path
String spec = getParameterMap().get("spec");
String path = getParameterMap().get(ParameterizedBuiltinFunctionExpression.TF_FN_PARAM_MTD);
String delim = getParameterMap().containsKey("sep") ? getParameterMap().get("sep") : TfUtils.TXMTD_SEP;
//execute transform meta data read
FrameBlock meta = null;
try {
meta = TfMetaUtils.readTransformMetaDataFromFile(spec, path, delim);
}
catch(Exception ex) {
throw new DMLRuntimeException(ex);
}
//release locks
ec.setFrameOutput(output.getName(), meta);
}
else if ( opcode.equalsIgnoreCase("toString")) {
//handle input parameters
int rows = (getParam("rows")!=null) ? Integer.parseInt(getParam("rows")) : TOSTRING_MAXROWS;
int cols = (getParam("cols") != null) ? Integer.parseInt(getParam("cols")) : TOSTRING_MAXCOLS;
int decimal = (getParam("decimal") != null) ? Integer.parseInt(getParam("decimal")) : TOSTRING_DECIMAL;
boolean sparse = (getParam("sparse") != null) ? Boolean.parseBoolean(getParam("sparse")) : TOSTRING_SPARSE;
String separator = (getParam("sep") != null) ? getParam("sep") : TOSTRING_SEPARATOR;
String lineseparator = (getParam("linesep") != null) ? getParam("linesep") : TOSTRING_LINESEPARATOR;
//get input matrix/frame and convert to string
CacheableData<?> data = ec.getCacheableData(getParam("target"));
String out = null;
if( data instanceof MatrixObject ) {
MatrixBlock matrix = (MatrixBlock) data.acquireRead();
out = DataConverter.toString(matrix, sparse, separator, lineseparator, rows, cols, decimal);
}
else if( data instanceof FrameObject ) {
FrameBlock frame = (FrameBlock) data.acquireRead();
out = DataConverter.toString(frame, sparse, separator, lineseparator, rows, cols, decimal);
}
else {
throw new DMLRuntimeException("toString only converts matrix or frames to string");
}
ec.releaseCacheableData(getParam("target"));
ec.setScalarOutput(output.getName(), new StringObject(out));
}
else {
throw new DMLRuntimeException("Unknown opcode : " + opcode);
}
}
}
|
iyounus/incubator-systemml
|
src/main/java/org/apache/sysml/runtime/instructions/cp/ParameterizedBuiltinCPInstruction.java
|
Java
|
apache-2.0
| 14,376
|
=begin
SimpleCSV
Copyright 2014 Austen Higgins-Cassidy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'csv'
class SimpleCSV < SimpleOutput::SimpleOutputPlugin
def initialize(file_template)
super()
@filename = file_template
@series_next = 0
end
def options_callback(options)
if options.has_key?('xlabel')
@metadata[@current_name]['xlabel'] = options['xlabel']
end
if options.has_key?('ylabel')
@metadata[@current_name]['ylabel'] = options['ylabel']
end
end
def check_title(name, options)
if options.has_key?('series')
@metadata[name]['series_titles'] << options['series']
else
@metadata[name]['series_titles'] << "#{@metadata[name]['ylabel']}#{@series_next}"
@series_next += 1
end
end
def set_x_callback(data, name, options)
check_title(name, options)
end
def new_data_callback(name)
name = translate_name(name)
@metadata[name] = {'xlabel' => "#{name}_x", 'ylabel' => "#{name}_y", 'series_titles' => []}
end
def append_callback(x,y,name,options)
if !@metadata.has_key?(name)
new_data_callback(name)
end
end
def save()
data = self.get_data_as_xy()
data.each do |set_name, series|
CSV.open("#{@filename}_#{set_name}.csv", "wb") do |csv|
xlabel = @metadata[set_name]['xlabel']
series.each_with_index do |values, i|
values[0].unshift(xlabel)
csv << values[0]
ylabel = @metadata[set_name]['series_titles'].empty? ? @metadata[set_name]['ylabel'] : @metadata[set_name]['series_titles'][i]
values[1].unshift(ylabel)
csv << values[1]
end
end
end
end
end
|
Plasmarobo/simpleoutput
|
lib/simplecsv.rb
|
Ruby
|
apache-2.0
| 2,281
|
#!/bin/bash
set -eux
set -o pipefail
source $(dirname $0)/test_functions.bash
test_formats="tar raw qcow2"
if [ -z "$(which qemu-img)" ]; then
echo "Warning: No qemu-img binary found, cowardly refusing to run tests."
exit 0
fi
for format in '' $test_formats; do
build_test_image $format
echo "Test passed for output formats '$format'."
done
combined_format=$(echo $test_formats | tr ' ' ',')
build_test_image $combined_format
echo "Test passed for output format '$combined_format'."
|
rdo-management/diskimage-builder
|
tests/image_output_formats.bash
|
Shell
|
apache-2.0
| 504
|
package net.paslavsky.msm.client.activity;
import net.paslavsky.msm.client.domain.AudioFile;
import java.util.Collection;
/**
* Music Sync Manager play list
*
* @author Andrey Paslavsky
* @version 1.0
*/
public interface Playlist {
AudioFile getInfo(String id);
Collection<AudioFile> getPlaylist();
AudioFile getCurrent();
AudioFile getPrevious();
AudioFile getNext();
}
|
paslavsky/music-sync-manager
|
msm-web-ui/src/main/java/net/paslavsky/msm/client/activity/Playlist.java
|
Java
|
apache-2.0
| 403
|
package com.mars.test.dropwizard;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
public class QueueManager {
private final Queue queue;
private final Counter pendingJobs;
private final Timer responsesTimer;
public QueueManager(MetricRegistry metrics, String name) {
this.queue = new ConcurrentLinkedQueue();
this.pendingJobs = metrics.counter(MetricRegistry.name(QueueManager.class, "pending-jobs"));
this.responsesTimer = metrics.timer(MetricRegistry.name(QueueManager.class, "responses"));
metrics.register(MetricRegistry.name(QueueManager.class, name, "size"),
new Gauge<Integer>() {
@Override
public Integer getValue() {
return queue.size();
}
});
}
public void addJob(String job) {
final Timer.Context context = responsesTimer.time();
pendingJobs.inc();
queue.add(job);
context.stop();
}
public Object takeJob() {
pendingJobs.dec();
return queue.poll();
}
public Long getCount() {
return pendingJobs.getCount();
}
}
|
marsyang1/test-java
|
src/test/java/com/mars/test/dropwizard/QueueManager.java
|
Java
|
apache-2.0
| 1,357
|
/*
* This project is licensed under the Apache License, Version 2.0
* if the following condition is met:
* (otherwise it cannot be used by anyone but the author, Kevin, only)
*
* The original KommonLee project is owned by Lee, Seong Hyun (Kevin).
*
* -What does it mean to you?
* Nothing, unless you want to take the ownership of
* "the original project" (not yours or forked & modified one).
* You are free to use it for both non-commercial and commercial projects
* and free to modify it as the Apache License allows.
*
* -So why is this condition necessary?
* It is only to protect the original project (See the case of Java).
*
*
* Copyright 2009 Lee, Seong Hyun (Kevin)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package contains commonly used utilities.
*/
package org.elixirian.kommonlee.util;
|
Kevin-Lee/kommonlee-core
|
src/main/java/org/elixirian/kommonlee/util/package-info.java
|
Java
|
apache-2.0
| 1,350
|
package ru.job4j.loop;
/**
*Класс для построения шахматной доски.
*@author ifedorenko
*@since 14.08.2017
*@version 1
*/
public class Board {
/**
* Основной метод в программе.
*
* @param width количество столбцов в таблице
* @param height количество строк в таблице
* @return result возвращает необходимую таблицу
*/
public String paint(int width, int height) {
StringBuilder builder = new StringBuilder();
String result;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
if ((i + j) % 2 == 0) {
builder.append("x");
} else {
builder.append(" ");
}
}
builder.append(System.getProperty("line.separator"));
}
result = builder.toString();
return result;
}
}
|
fr3anthe/ifedorenko
|
1.1-Base/src/main/java/ru/job4j/loop/Board.java
|
Java
|
apache-2.0
| 865
|
/**
*
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2011 Eric Haddad Koenig
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.all.client.view.music;
import java.awt.Color;
import java.util.HashMap;
import java.util.Map;
import com.all.appControl.control.ViewEngine;
import com.all.client.util.TrackRepository;
import com.all.client.view.components.TableStyle;
import com.all.core.common.view.SynthColors;
import com.all.core.model.Model;
import com.all.shared.model.Track;
public class DescriptionTableStyle implements TableStyle {
private boolean showCheckboxes = true;
private boolean hideCheckboxes = false;
private static final String FONT11_GRAY170_170_170 = "Font11Gray170_170_170";
private static final String FONT11_PURPLE120_40_140 = "Font11Purple120_40_140";
private static final String FONT11_GRAY77_77_77 = "Font11Gray77_77_77";
private static final String PLAIN_PREFIX = "plain";
private static final String BOLD_PREFIX = "bold";
private Map<Track, Integer> tracks;
private boolean isLocalLibrary;
private ViewEngine viewEngine;
public DescriptionTableStyle() {
}
@Override
public Color getEvenRowColor() {
return SynthColors.CLEAR_GRAY245_245_245;
}
@Override
public Color getOddRowColor() {
return SynthColors.WHITE255_255_255;
}
@Override
public Color getSelectedRowColor() {
return SynthColors.BLUE175_205_225;
}
@Override
public Color getSelectedSeparatorColor() {
return SynthColors.WHITE255_255_255;
}
public int getIndexForTrack(Track t1) {
if (tracks == null) {
return 0;
}
Integer index = tracks.get(t1);
return index == null ? -1 : index;
}
public void setTracks(Iterable<Track> tracks) {
Map<Track, Integer> mapTrack = new HashMap<Track, Integer>();
int i = 0;
for (Track track : tracks) {
mapTrack.put(track, i);
i++;
}
this.tracks = mapTrack;
}
public boolean isTrackAvailable(Track track) {
if (viewEngine == null) {
return true;
}
TrackRepository trackRepository = viewEngine.get(Model.TRACK_REPOSITORY);
return trackRepository == null ? false : trackRepository.isLocallyAvailable(track.getHashcode());
}
public boolean isRemoteTrackAvailable(Track track) {
if (viewEngine == null) {
return false;
}
TrackRepository trackRepository = viewEngine.get(Model.TRACK_REPOSITORY);
return trackRepository == null ? false : trackRepository.isRemotelyAvailable(track.getHashcode());
}
public boolean isTrackInMyLibrary(Track track) {
if (viewEngine == null) {
return true;
}
TrackRepository trackRepository = viewEngine.get(Model.TRACK_REPOSITORY);
return trackRepository == null ? false : trackRepository.getFile(track.getHashcode()) != null;
}
@Override
public Color getGridColor() {
return SynthColors.GRAY150_150_150;
}
public String getAppropiateColorForTrack(Track track, boolean isSelected) {
String name = isSelected ? BOLD_PREFIX : PLAIN_PREFIX;
if (isTrackAvailable(track)) {
return name += FONT11_GRAY77_77_77;
}
if (isRemoteTrackAvailable(track) && !isLocalLibrary) {
return name += FONT11_PURPLE120_40_140;
}
return name += FONT11_GRAY170_170_170;
}
public void setLocalLibrary(boolean b) {
this.isLocalLibrary = b;
}
public boolean getShowCheckboxes() {
return showCheckboxes;
}
public boolean getHideCheckboxes() {
return hideCheckboxes;
}
public void setViewEngine(ViewEngine viewEngine) {
this.viewEngine = viewEngine;
}
}
|
josdem/client
|
client-view/src/main/java/com/all/client/view/music/DescriptionTableStyle.java
|
Java
|
apache-2.0
| 15,004
|
'use strict';
exports.__esModule = true;
exports.default = {
el: {
colorpicker: {
confirm: '確認',
clear: '清空'
},
datepicker: {
now: '現在',
today: '今天',
cancel: '取消',
clear: '清空',
confirm: '確認',
selectDate: '選擇日期',
selectTime: '選擇時間',
startDate: '開始日期',
startTime: '開始時間',
endDate: '結束日期',
endTime: '結束時間',
year: '年',
month1: '1 月',
month2: '2 月',
month3: '3 月',
month4: '4 月',
month5: '5 月',
month6: '6 月',
month7: '7 月',
month8: '8 月',
month9: '9 月',
month10: '10 月',
month11: '11 月',
month12: '12 月',
// week: '周次',
weeks: {
sun: '日',
mon: '一',
tue: '二',
wed: '三',
thu: '四',
fri: '五',
sat: '六'
},
months: {
jan: '一月',
feb: '二月',
mar: '三月',
apr: '四月',
may: '五月',
jun: '六月',
jul: '七月',
aug: '八月',
sep: '九月',
oct: '十月',
nov: '十一月',
dec: '十二月'
}
},
select: {
loading: '加載中',
noMatch: '無匹配資料',
noData: '無資料',
placeholder: '請選擇'
},
cascader: {
noMatch: '無匹配資料',
loading: '加載中',
placeholder: '請選擇'
},
pagination: {
goto: '前往',
pagesize: '項/頁',
total: '共 {total} 項',
pageClassifier: '頁'
},
messagebox: {
title: '提示',
confirm: '確定',
cancel: '取消',
error: '輸入的資料不符規定!'
},
upload: {
deleteTip: '按delete鍵可刪除',
delete: '刪除',
preview: '查看圖片',
continue: '繼續上傳'
},
table: {
emptyText: '暫無資料',
confirmFilter: '篩選',
resetFilter: '重置',
clearFilter: '全部',
sumText: 'Sum' // to be translated
},
tree: {
emptyText: '暫無資料'
},
transfer: {
noMatch: '無匹配資料',
noData: '無資料',
titles: ['List 1', 'List 2'], // to be translated
filterPlaceholder: 'Enter keyword', // to be translated
noCheckedFormat: '{total} items', // to be translated
hasCheckedFormat: '{checked}/{total} checked' // to be translated
}
}
};
|
zengguoyu/sample
|
sample-site/src/main/webapp/assets/static/element-ui/lib/locale/lang/zh-TW.js
|
JavaScript
|
apache-2.0
| 2,528
|
<?php if($type == NUMBER_TYPE_FOUR):
foreach($list as $item):
$price = isset($item['confirm_price']) && $item['confirm_price'] ? $item['confirm_price'] : $item['buffer_price'];
?>
<li id="number_<?php echo $item['number']?>">
<p class="relative">
<span class="jpnumb"><em>靓</em><?php echo $item['number'];?></span>
<span class="tipcon" style="display: none;">价格:<?php echo $price;?>皮蛋</span>
</p>
<p>
<span class="caishen"><?php echo $item['short_desc'];?></span>
</p>
<p>
<a class="numb-btn mt3" title="购买" href="javascript:void(0);" onclick="buyNumber('<?php echo $item['number'];?>','',1)">购买</a>
<a class="numb-btn mt3" title="赠送" href="javascript:void(0)" onclick="_sendNumber('<?php echo $item['number'];?>');">赠送</a>
</p>
</li>
<?php
endforeach;
else:
foreach($list as $item):
$price = isset($item['confirm_price']) && $item['confirm_price'] ? $item['confirm_price'] : $item['buffer_price'];
?>
<li id="number_<?php echo $item['number']?>">
<p><span class="sixnumb"><em>靓</em> <?php echo $item['number']?></span></p>
<p>价格:<?php echo $price;?></p>
<p>
<a class="numb-btn mt3" title="购买" href="javascript:void(0);" onclick="buyNumber('<?php echo $item['number'];?>','',1)">购买</a>
<a class="numb-btn mt3" title="赠送" href="javascript:void(0)" onclick="_sendNumber('<?php echo $item['number'];?>');">赠送</a>
</p>
</li>
<?php
endforeach;
endif;
?>
|
klklmoon/pipishow
|
applications/show/views/shop/numberTemplate.php
|
PHP
|
apache-2.0
| 1,547
|
body{
}
.demo-container{
padding: 0px 0px;
}
.tab-pane{
padding: 25px 0px;
background-color: transparent;
}
.nav-tabs>li.active>a, .nav-tabs>li.active>a:focus, .nav-tabs>li.active>a:hover{
background-color: #e4e5e6;
font-weight: bold;
}
|
TempSquad/TEMP2016-site-client
|
demo/demo.css
|
CSS
|
apache-2.0
| 265
|
(function () {
'use strict';
angular
.module('hz.dashboard', [
'hz.dashboard.launch-instance'
])
.config(config);
config.$inject = ['$provide', '$windowProvider'];
function config($provide, $windowProvider) {
var path = $windowProvider.$get().STATIC_URL + 'dashboard/';
$provide.constant('dashboardBasePath', path);
}
})();
|
xinwu/horizon
|
openstack_dashboard/static/dashboard/dashboard.module.js
|
JavaScript
|
apache-2.0
| 364
|
package com.fengyunweather.app.util;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* Created by Kevin on 2015/10/26.
*/
public class HttpUtil {
public static void sendHttpRequest(final String address,final HttpCallbackListener listener){
new Thread(new Runnable() {
@Override
public void run() {
HttpURLConnection connection=null;
try{
URL url=new URL(address);
connection=(HttpURLConnection)url.openConnection();
connection.setRequestMethod("GET");
connection.setConnectTimeout(8000);
connection.setReadTimeout(8000);
InputStream in=connection.getInputStream();
BufferedReader reader=new BufferedReader(new InputStreamReader(in));
StringBuilder response=new StringBuilder();
String line;
while ((line=reader.readLine())!=null){
response.append(line);
}
if(listener!=null){
//回调onFinish方法
listener.onFinish(response.toString());
}
}catch (Exception e){
if(listener!=null){
//回调onError方法
listener.onError(e);
}
}finally {
if(connection!=null){
connection.disconnect();
}
}
}
}).start();
}
}
|
Kevindroid/fengyunweather
|
app/src/main/java/com/fengyunweather/app/util/HttpUtil.java
|
Java
|
apache-2.0
| 1,742
|
/*
* Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <s2n.h>
#include "error/s2n_errno.h"
#include "tls/s2n_client_extensions.h"
#include "tls/s2n_cipher_suites.h"
#include "tls/s2n_connection.h"
#include "tls/s2n_config.h"
#include "tls/s2n_tls.h"
#include "stuffer/s2n_stuffer.h"
#include "utils/s2n_safety.h"
int s2n_client_cert_verify_recv(struct s2n_connection *conn)
{
struct s2n_stuffer *in = &conn->handshake.io;
s2n_hash_algorithm chosen_hash_alg = S2N_HASH_MD5_SHA1;
s2n_signature_algorithm chosen_signature_alg = S2N_SIGNATURE_RSA;
if(conn->actual_protocol_version == S2N_TLS12){
int pairs_available = 1;
/* Make sure the client is actually using one of the {sig,hash} pairs that we sent in the ClientCertificateRequest */
GUARD(s2n_choose_preferred_signature_hash_pair(in, pairs_available, &chosen_hash_alg, &chosen_signature_alg));
}
uint16_t signature_size;
struct s2n_blob signature;
GUARD(s2n_stuffer_read_uint16(in, &signature_size));
signature.size = signature_size;
signature.data = s2n_stuffer_raw_read(in, signature.size);
notnull_check(signature.data);
struct s2n_hash_state hash_state;
GUARD(s2n_handshake_get_hash_state(conn, chosen_hash_alg, &hash_state));
switch (chosen_signature_alg) {
/* s2n currently only supports RSA Signatures */
case S2N_SIGNATURE_RSA:
GUARD(s2n_pkey_verify(&conn->secure.client_public_key, &hash_state, &signature));
break;
default:
S2N_ERROR(S2N_ERR_INVALID_SIGNATURE_ALGORITHM);
}
/* Client certificate has been verified. Minimize required handshake hash algs */
GUARD(s2n_conn_update_required_handshake_hashes(conn));
return 0;
}
int s2n_client_cert_verify_send(struct s2n_connection *conn)
{
struct s2n_stuffer *out = &conn->handshake.io;
s2n_hash_algorithm chosen_hash_alg = S2N_HASH_MD5_SHA1;
s2n_signature_algorithm chosen_signature_alg = S2N_SIGNATURE_RSA;
if(conn->actual_protocol_version == S2N_TLS12){
chosen_hash_alg = conn->secure.client_cert_hash_algorithm;
chosen_signature_alg = conn->secure.client_cert_sig_alg;
GUARD(s2n_stuffer_write_uint8(out, (uint8_t) chosen_hash_alg));
GUARD(s2n_stuffer_write_uint8(out, (uint8_t) chosen_signature_alg));
}
struct s2n_hash_state hash_state;
GUARD(s2n_handshake_get_hash_state(conn, chosen_hash_alg, &hash_state));
struct s2n_blob signature;
switch (chosen_signature_alg) {
/* s2n currently only supports RSA Signatures */
case S2N_SIGNATURE_RSA:
signature.size = s2n_rsa_private_encrypted_size(&conn->config->cert_and_key_pairs->private_key.key.rsa_key);
GUARD(s2n_stuffer_write_uint16(out, signature.size));
signature.data = s2n_stuffer_raw_write(out, signature.size);
notnull_check(signature.data);
GUARD(s2n_pkey_sign(&conn->config->cert_and_key_pairs->private_key, &hash_state, &signature));
break;
default:
S2N_ERROR(S2N_ERR_INVALID_SIGNATURE_ALGORITHM);
}
/* Client certificate has been verified. Minimize required handshake hash algs */
GUARD(s2n_conn_update_required_handshake_hashes(conn));
return 0;
}
|
jldodds/s2n
|
tls/s2n_client_cert_verify.c
|
C
|
apache-2.0
| 3,766
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ComputeTimeCursor
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-pubsublite
# [START pubsublite_v1_generated_TopicStatsService_ComputeTimeCursor_async]
from google.cloud import pubsublite_v1
async def sample_compute_time_cursor():
# Create a client
client = pubsublite_v1.TopicStatsServiceAsyncClient()
# Initialize request argument(s)
request = pubsublite_v1.ComputeTimeCursorRequest(
topic="topic_value",
partition=986,
)
# Make the request
response = await client.compute_time_cursor(request=request)
# Handle the response
print(response)
# [END pubsublite_v1_generated_TopicStatsService_ComputeTimeCursor_async]
|
googleapis/python-pubsublite
|
samples/generated_samples/pubsublite_v1_generated_topic_stats_service_compute_time_cursor_async.py
|
Python
|
apache-2.0
| 1,540
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.globalaccelerator.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/globalaccelerator-2018-08-08/WithdrawByoipCidr"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class WithdrawByoipCidrResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* Information about the address pool.
* </p>
*/
private ByoipCidr byoipCidr;
/**
* <p>
* Information about the address pool.
* </p>
*
* @param byoipCidr
* Information about the address pool.
*/
public void setByoipCidr(ByoipCidr byoipCidr) {
this.byoipCidr = byoipCidr;
}
/**
* <p>
* Information about the address pool.
* </p>
*
* @return Information about the address pool.
*/
public ByoipCidr getByoipCidr() {
return this.byoipCidr;
}
/**
* <p>
* Information about the address pool.
* </p>
*
* @param byoipCidr
* Information about the address pool.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public WithdrawByoipCidrResult withByoipCidr(ByoipCidr byoipCidr) {
setByoipCidr(byoipCidr);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getByoipCidr() != null)
sb.append("ByoipCidr: ").append(getByoipCidr());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof WithdrawByoipCidrResult == false)
return false;
WithdrawByoipCidrResult other = (WithdrawByoipCidrResult) obj;
if (other.getByoipCidr() == null ^ this.getByoipCidr() == null)
return false;
if (other.getByoipCidr() != null && other.getByoipCidr().equals(this.getByoipCidr()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getByoipCidr() == null) ? 0 : getByoipCidr().hashCode());
return hashCode;
}
@Override
public WithdrawByoipCidrResult clone() {
try {
return (WithdrawByoipCidrResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
aws/aws-sdk-java
|
aws-java-sdk-globalaccelerator/src/main/java/com/amazonaws/services/globalaccelerator/model/WithdrawByoipCidrResult.java
|
Java
|
apache-2.0
| 3,780
|
/*
* Copyright 2009 Phil Burk, Mobileer Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jsyn.unitgen;
import com.jsyn.data.FloatSample;
import com.jsyn.data.SegmentedEnvelope;
import com.jsyn.data.ShortSample;
import com.jsyn.ports.UnitOutputPort;
/**
* This reader can play any SequentialData and will interpolate between adjacent values. It can play
* both {@link SegmentedEnvelope envelopes} and {@link FloatSample samples}.
*
* <pre><code>
// Queue an envelope to the dataQueue port.
ampEnv.dataQueue.queue(ampEnvelope);
</code></pre>
*
* @author Phil Burk (C) 2009 Mobileer Inc
* @see FloatSample
* @see ShortSample
* @see SegmentedEnvelope
*/
public class VariableRateMonoReader extends VariableRateDataReader {
private double phase; // ranges from 0.0 to 1.0
private double baseIncrement;
private double source;
private double current;
private double target;
private boolean starved;
private boolean ranout;
public VariableRateMonoReader() {
super();
addPort(output = new UnitOutputPort("Output"));
starved = true;
baseIncrement = 1.0;
}
@Override
public void generate(int start, int limit) {
double[] amplitudes = amplitude.getValues();
double[] rates = rate.getValues();
double[] outputs = output.getValues();
for (int i = start; i < limit; i++) {
// Decrement phase and advance through queued data until phase back
// in range.
if (phase >= 1.0) {
while (phase >= 1.0) {
source = target;
phase -= 1.0;
baseIncrement = advanceToNextFrame();
}
} else if ((i == 0) && (starved || !dataQueue.isTargetValid())) {
// A starved condition can only be cured at the beginning of a
// block.
source = target = current;
phase = 0.0;
baseIncrement = advanceToNextFrame();
}
// Interpolate along line segment.
current = ((target - source) * phase) + source;
outputs[i] = current * amplitudes[i];
double phaseIncrement = baseIncrement * rates[i];
phase += limitPhaseIncrement(phaseIncrement);
}
if (ranout) {
ranout = false;
if (dataQueue.testAndClearAutoStop()) {
autoStop();
}
}
}
public double limitPhaseIncrement(double phaseIncrement) {
return phaseIncrement;
}
private double advanceToNextFrame() {
// Fire callbacks before getting next value because we already got the
// target value previously.
dataQueue.firePendingCallbacks();
if (dataQueue.hasMore()) {
starved = false;
target = dataQueue.readNextMonoDouble(getFramePeriod());
// calculate phase increment;
return getFramePeriod() * dataQueue.getNormalizedRate();
} else {
starved = true;
ranout = true;
phase = 0.0;
return 0.0;
}
}
}
|
philburk/jsyn
|
src/main/java/com/jsyn/unitgen/VariableRateMonoReader.java
|
Java
|
apache-2.0
| 3,696
|
import unittest
from biothings_explorer.registry import Registry
from biothings_explorer.user_query_dispatcher import SingleEdgeQueryDispatcher
from .utils import get_apis
reg = Registry()
class TestSingleHopQuery(unittest.TestCase):
def test_disease2protein(self):
"""Test gene-protein"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Protein",
input_cls="Disease",
input_id="DOID",
pred="related_to",
output_id="PR",
values="DOID:12143",
)
seqd.query()
self.assertTrue("PR:000007572" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["PR:000007572"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2genomicentity(self):
"""Test gene-protein"""
seqd = SingleEdgeQueryDispatcher(
output_cls="GenomicEntity",
input_cls="Disease",
pred="related_to",
input_id="DOID",
output_id="SO",
values="DOID:12143",
)
seqd.query()
self.assertTrue("SO:0000999" in seqd.G)
self.assertTrue("SO:0001853" in seqd.G)
def test_disease2chemicalsubstance(self):
"""Test gene-genomic entity"""
seqd = SingleEdgeQueryDispatcher(
output_cls="ChemicalSubstance",
input_cls="Disease",
input_id="DOID",
values="DOID:12143",
output_id="CHEBI",
)
seqd.query()
self.assertTrue("CHEBI:65349" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["CHEBI:65349"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2gene(self):
"""Test gene-gene"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Gene", input_cls="Disease", input_id="DOID", values="DOID:12143"
)
seqd.query()
self.assertTrue("DHDDS" in seqd.G)
self.assertTrue("RPL3" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["DHDDS"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2anatomy(self):
"""Test gene-anatomy"""
seqd = SingleEdgeQueryDispatcher(
output_cls="AnatomicalEntity",
input_cls="Disease",
input_id="DOID",
output_id="UBERON",
values="DOID:12143",
)
seqd.query()
self.assertTrue("UBERON:0007023" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["UBERON:0007023"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2ma(self):
"""Test gene-molecular_activity"""
seqd = SingleEdgeQueryDispatcher(
output_cls="MolecularActivity",
input_cls="Disease",
input_id="DOID",
output_id="GO",
values="DOID:12143",
)
seqd.query()
self.assertTrue("GO:0004935" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["GO:0004935"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2bp(self):
"""Test gene-biological_process"""
seqd = SingleEdgeQueryDispatcher(
output_cls="BiologicalProcess",
input_cls="Disease",
input_id="DOID",
values="DOID:12143",
output_id="GO",
)
seqd.query()
self.assertTrue("GO:0007605" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["GO:0007605"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2cc(self):
"""Test gene-cellular_component"""
seqd = SingleEdgeQueryDispatcher(
output_cls="CellularComponent",
input_cls="Disease",
input_id="DOID",
output_id="GO",
values="DOID:0001816",
)
seqd.query()
self.assertTrue("GO:0030017" in seqd.G)
edges = seqd.G["DOID:DOID:0001816"]["GO:0030017"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2cell(self):
"""Test gene-cell"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Cell",
input_cls="Disease",
input_id="DOID",
output_id="CL",
values="DOID:12143",
)
seqd.query()
self.assertTrue("CL:0000731" in seqd.G)
def test_disease2disease(self):
"""Test gene-disease"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Disease",
input_cls="Disease",
input_id="DOID",
output_id="DOID",
values="DOID:12143",
)
seqd.query()
self.assertTrue("DOID:225" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["DOID:225"]
self.assertTrue("CORD Disease API" in get_apis(edges))
|
biothings/biothings_explorer
|
tests/test_apis/test_corddisease.py
|
Python
|
apache-2.0
| 4,836
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.