repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
goldsborough/algs4 | stacks-queues/java/src/LinkedListQueue.java | /**
* Created by petergoldsborough on 08/15/15.
*/
public class LinkedListQueue<T>
{
public void enqueue(T item)
{
Node node = new Node(item);
_last.next = node;
_last = node;
++_size;
}
public T dequeue()
{
T item = _first.item;
Node next = _first.next;
_first = null;
_first = next;
--_size;
return item;
}
public boolean isEmpty()
{
return _size == 0;
}
public int size()
{
return _size;
}
private class Node
{
Node(T i)
{
item = i;
}
T item;
Node next = null;
};
private Node _first = null;
private Node _last = null;
private int _size = 0;
}
|
FlashYoshi/neio | examples/0.3/thesis/src/main/java/be/ugent/thesis/parsing/Thesis.java | <filename>examples/0.3/thesis/src/main/java/be/ugent/thesis/parsing/Thesis.java
// Generated from Thesis.g4 by ANTLR 4.5.1
package be.ugent.thesis.parsing;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class Thesis extends Lexer {
static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
HEADER=1, DOCUMENT_TYPE=2, COMMENT=3, MULTILINE_COMMENT=4, WORD=5, WORDS=6,
WS=7, NEWLINE=8, HASH=9;
public static String[] modeNames = {
"DEFAULT_MODE"
};
public static final String[] ruleNames = {
"HEADER", "DOCUMENT_TYPE", "COMMENT", "MULTILINE_COMMENT", "WORD", "WORDS",
"WS", "NEWLINE", "HASH"
};
private static final String[] _LITERAL_NAMES = {
null, null, null, null, null, null, null, null, null, "'#'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, "HEADER", "DOCUMENT_TYPE", "COMMENT", "MULTILINE_COMMENT", "WORD",
"WORDS", "WS", "NEWLINE", "HASH"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public Thesis(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "Thesis.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\13c\b\1\4\2\t\2\4"+
"\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\3\2\3\2"+
"\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3("+
"\n\3\3\4\3\4\3\4\3\4\7\4.\n\4\f\4\16\4\61\13\4\3\4\3\4\3\5\3\5\3\5\3\5"+
"\7\59\n\5\f\5\16\5<\13\5\3\5\3\5\3\5\3\5\3\5\3\6\6\6D\n\6\r\6\16\6E\3"+
"\7\7\7I\n\7\f\7\16\7L\13\7\3\7\3\7\7\7P\n\7\f\7\16\7S\13\7\6\7U\n\7\r"+
"\7\16\7V\3\b\3\b\3\t\5\t\\\n\t\3\t\3\t\3\t\3\t\3\n\3\n\3:\2\13\3\3\5\4"+
"\7\5\t\6\13\7\r\b\17\t\21\n\23\13\3\2\5\4\2\f\f\17\17\6\2\13\f\17\17\""+
"\"%%\4\2\13\13\"\"j\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2"+
"\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\3\25\3"+
"\2\2\2\5\'\3\2\2\2\7)\3\2\2\2\t\64\3\2\2\2\13C\3\2\2\2\rT\3\2\2\2\17X"+
"\3\2\2\2\21[\3\2\2\2\23a\3\2\2\2\25\26\7]\2\2\26\27\5\5\3\2\27\30\7_\2"+
"\2\30\4\3\2\2\2\31\32\7F\2\2\32\33\7q\2\2\33\34\7e\2\2\34\35\7w\2\2\35"+
"\36\7o\2\2\36\37\7g\2\2\37 \7p\2\2 (\7v\2\2!\"\7U\2\2\"#\7n\2\2#$\7k\2"+
"\2$%\7f\2\2%&\7g\2\2&(\7u\2\2\'\31\3\2\2\2\'!\3\2\2\2(\6\3\2\2\2)*\7\61"+
"\2\2*+\7\61\2\2+/\3\2\2\2,.\n\2\2\2-,\3\2\2\2.\61\3\2\2\2/-\3\2\2\2/\60"+
"\3\2\2\2\60\62\3\2\2\2\61/\3\2\2\2\62\63\b\4\2\2\63\b\3\2\2\2\64\65\7"+
"\61\2\2\65\66\7,\2\2\66:\3\2\2\2\679\13\2\2\28\67\3\2\2\29<\3\2\2\2:;"+
"\3\2\2\2:8\3\2\2\2;=\3\2\2\2<:\3\2\2\2=>\7,\2\2>?\7\61\2\2?@\3\2\2\2@"+
"A\b\5\2\2A\n\3\2\2\2BD\n\3\2\2CB\3\2\2\2DE\3\2\2\2EC\3\2\2\2EF\3\2\2\2"+
"F\f\3\2\2\2GI\5\17\b\2HG\3\2\2\2IL\3\2\2\2JH\3\2\2\2JK\3\2\2\2KM\3\2\2"+
"\2LJ\3\2\2\2MQ\5\13\6\2NP\5\17\b\2ON\3\2\2\2PS\3\2\2\2QO\3\2\2\2QR\3\2"+
"\2\2RU\3\2\2\2SQ\3\2\2\2TJ\3\2\2\2UV\3\2\2\2VT\3\2\2\2VW\3\2\2\2W\16\3"+
"\2\2\2XY\t\4\2\2Y\20\3\2\2\2Z\\\7\17\2\2[Z\3\2\2\2[\\\3\2\2\2\\]\3\2\2"+
"\2]^\7\f\2\2^_\3\2\2\2_`\b\t\2\2`\22\3\2\2\2ab\7%\2\2b\24\3\2\2\2\13\2"+
"\'/:EJQV[\3\b\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
} |
Biyorne/learningcpp | castle-crawl/src/game-coordinator.cpp | // This is an open source non-commercial project. Dear PVS-Studio, please check it.
// PVS-Studio Static Code Analyzer for C, C++ and C#: http://www.viva64.com
//
// game-coordinator.cpp
//
#include "game-coordinator.hpp"
#include "util.hpp"
#include <algorithm>
#include <filesystem>
#include <iostream>
#include <memory>
#include <sstream>
#include <SFML/Graphics.hpp>
namespace castlecrawl
{
GameCoordinator::GameCoordinator()
: m_window()
, m_maps()
, m_media()
, m_board()
, m_layout()
, m_game()
, m_config()
, m_stateMachine()
, m_popupManager()
, m_random()
, m_soundPlayer(m_random)
, m_animationPlayer(m_random)
, m_context(
m_game,
m_maps,
m_board,
m_config,
m_layout,
m_media,
m_stateMachine,
m_popupManager,
m_random,
m_soundPlayer,
m_animationPlayer)
{}
void GameCoordinator::initializeSubsystems(const GameConfig & config)
{
m_game.reset();
m_config = config;
M_CHECK_SS(std::filesystem::exists(m_config.media_dir_path), m_config.media_dir_path);
M_CHECK_SS(std::filesystem::is_directory(m_config.media_dir_path), m_config.media_dir_path);
// this can change m_config and m_layout so call this right after m_config is set
openWindow();
m_soundPlayer.setMediaPath((m_config.media_dir_path / "sfx").string());
m_soundPlayer.volume(75.0f);
m_animationPlayer.setMediaPath((m_config.media_dir_path / "anim").string());
m_media.load(m_config, m_layout, m_soundPlayer);
// depends only on m_random only so passing context here is safe TODO
m_maps.load(m_context);
m_context.switchToMap({ { 0, 0 }, "level-1-first-room", { 5, 3 } });
m_stateMachine.setChangePending(State::Splash);
}
void GameCoordinator::openWindow()
{
m_window.close();
const auto style{ (m_config.is_fullscreen) ? sf::Style::Fullscreen : sf::Style::Default };
m_window.create(m_config.video_mode, m_config.game_name, style);
m_window.setFramerateLimit(m_config.frame_rate_limit);
m_window.setKeyRepeatEnabled(false);
// verify the window size is what was specified/expected,
// otherwise all the size/positions calculations will be wrong
const sf::Vector2u windowExpectedSize{ m_config.video_mode.width,
m_config.video_mode.height };
const sf::Vector2u windowActualSize{ m_window.getSize() };
std::cout << "Game Window: " << windowExpectedSize << " at "
<< m_config.video_mode.bitsPerPixel << "bits per pixel and a "
<< m_config.frame_rate_limit << " fps limit." << std::endl;
M_CHECK_SS(
m_window.isOpen(),
"Failed to make and open the graphics window. (sf::RenderWindow::isOpen() == false)");
if (windowActualSize != windowExpectedSize)
{
std::cout << "Failed to create a window at " << windowExpectedSize
<< ", but strangely, a window did open at " << windowActualSize
<< ". So...meh." << std::endl;
}
m_config.video_mode.width = windowActualSize.x;
m_config.video_mode.height = windowActualSize.y;
m_config.video_mode.bitsPerPixel = m_window.getSettings().depthBits;
m_layout.setupWindow(m_config);
std::cout << "Game Window Cells: width_ratio=" << m_config.map_cell_size_ratio
<< ", pixels=" << m_layout.mapCellDimm()
<< ", grid=" << (m_layout.windowSize() / m_layout.mapCellSize()) << std::endl;
}
void GameCoordinator::run(const GameConfig & config)
{
initializeSubsystems(config);
sf::Clock frameClock;
while (m_window.isOpen() && !m_game.isGameOver())
{
handleEvents();
update(frameClock.restart().asSeconds());
draw();
m_stateMachine.changeIfPending(m_context);
}
}
void GameCoordinator::handleEvents()
{
sf::Event event;
while (m_window.isOpen() && !m_game.isGameOver() && m_window.pollEvent(event))
{
if (sf::Event::Closed == event.type)
{
std::cout << "Player closed the window. Quitting." << std::endl;
m_window.close();
m_stateMachine.setChangePending(State::Quit);
return;
}
m_stateMachine.state().handleEvent(m_context, event);
}
}
void GameCoordinator::update(const float frameTimeSec)
{
m_stateMachine.state().update(m_context, frameTimeSec);
}
void GameCoordinator::draw()
{
m_window.clear();
m_stateMachine.state().draw(m_context, m_window, sf::RenderStates());
m_window.display();
}
} // namespace castlecrawl
|
enfoTek/tomato.linksys.e2000.nvram-mod | tools-src/gnu/gcc/gcc/testsuite/gcc.c-torture/compile/930325-1.c | <gh_stars>100-1000
typedef unsigned uint;
inline
g (uint *s, uint *d, uint c)
{
while (c != 0)
{
*--d = *--s;
c--;
}
}
f (uint *p1, uint c, uint *p2)
{
while (c > 0 && *p1 == 0)
{
p1++;
c--;
}
if (c == 0)
return 1;
g (p2, p1, c);
}
|
JamesCao2048/BlizzardData | Corpus/aspectj/4050.java | public @interface Demo {
Class[] myValues() default { };
}
|
rrjha/dosp | xinu/lib/strncat.c | <reponame>rrjha/dosp
/* strncat.c - strncat */
/*------------------------------------------------------------------------
* strncat - Concatenate s2 onto the end of s1. Sufficient space must
* exist at the end of s1; ar most n characters are
* copied
*------------------------------------------------------------------------
*/
char *strncat(
char *s1, /* first string */
const char *s2, /* second string */
int n /* length to concatenate */
)
{
char *os1;
os1 = s1;
while (*s1++)
;
--s1;
while ((*s1++ = *s2++))
if (--n < 0)
{
*--s1 = '\0';
break;
}
return (os1);
}
|
jeffpuzzo/jp-rosa-react-form-wizard | node_modules/@patternfly/react-table/node_modules/@patternfly/react-icons/dist/js/icons/telegram-icon.js | "use strict"
exports.__esModule = true;
exports.TelegramIconConfig = {
name: 'TelegramIcon',
height: 512,
width: 496,
svgPath: 'M248 8C111 8 0 119 0 256s111 248 248 248 248-111 248-248S385 8 248 8zm121.8 169.9l-40.7 191.8c-3 13.6-11.1 16.9-22.4 10.5l-62-45.7-29.9 28.8c-3.3 3.3-6.1 6.1-12.5 6.1l4.4-63.1 114.9-103.8c5-4.4-1.1-6.9-7.7-2.5l-142 89.4-61.2-19.1c-13.3-4.2-13.6-13.3 2.8-19.7l239.1-92.2c11.1-4 20.8 2.7 17.2 19.5z',
yOffset: 0,
xOffset: 0,
};
exports.TelegramIcon = require('../createIcon').createIcon(exports.TelegramIconConfig);
exports["default"] = exports.TelegramIcon; |
ShadowEditor/ShadowEditor | web/test/WebWorldWind/src/util/GoToAnimator.js | /*
* Copyright 2003-2006, 2009, 2017, United States Government, as represented by the Administrator of the
* National Aeronautics and Space Administration. All rights reserved.
*
* The NASAWorldWind/WebWorldWind platform is licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @exports GoToAnimator
*/
import Location from '../geom/Location';
import Position from '../geom/Position';
import Vec3 from '../geom/Vec3';
/**
* Constructs a GoTo animator.
* @alias GoToAnimator
* @constructor
* @classdesc Incrementally and smoothly moves a {@link Navigator} to a specified position.
* @param {WorldWindow} worldWindow The WorldWindow in which to perform the animation.
*/
function GoToAnimator(worldWindow) {
/**
* The WorldWindow associated with this animator.
* @type {WorldWindow}
* @readonly
*/
this.wwd = worldWindow;
/**
* The frequency in milliseconds at which to animate the position change.
* @type {Number}
* @default 20
*/
this.animationFrequency = 20;
/**
* The animation's duration, in milliseconds. When the distance is short, less than twice the viewport
* size, the travel time is reduced proportionally to the distance to travel. It therefore takes less
* time to move shorter distances.
* @type {Number}
* @default 3000
*/
this.travelTime = 3000;
/**
* Indicates whether the current or most recent animation has been cancelled. Use the cancel() function
* to cancel an animation.
* @type {Boolean}
* @default false
* @readonly
*/
this.cancelled = false;
}
// Stop the current animation.
GoToAnimator.prototype.cancel = function () {
this.cancelled = true;
};
/**
* Moves the navigator to a specified location or position.
* @param {Location | Position} position The location or position to move the navigator to. If this
* argument contains an "altitude" property, as {@link Position} does, the end point of the navigation is
* at the specified altitude. Otherwise the end point is at the current altitude of the navigator.
* @param {Function} completionCallback If not null or undefined, specifies a function to call when the
* animation completes. The completion callback is called with a single argument, this animator.
*/
GoToAnimator.prototype.goTo = function (position, completionCallback) {
this.completionCallback = completionCallback;
// Reset the cancellation flag.
this.cancelled = false;
// Capture the target position and determine its altitude.
this.targetPosition = new Position(position.latitude, position.longitude,
position.altitude || this.wwd.navigator.range);
// Capture the start position and start time.
this.startPosition = new Position(
this.wwd.navigator.lookAtLocation.latitude,
this.wwd.navigator.lookAtLocation.longitude,
this.wwd.navigator.range);
this.startTime = Date.now();
// Determination of the pan and range velocities requires the distance to be travelled.
var animationDuration = this.travelTime,
panDistance = Location.greatCircleDistance(this.startPosition, this.targetPosition),
rangeDistance;
// Determine how high we need to go to give the user context. The max altitude computed is approximately
// that needed to fit the start and end positions in the same viewport assuming a 45 degree field of view.
var pA = this.wwd.globe.computePointFromLocation(
this.startPosition.latitude, this.startPosition.longitude, new Vec3(0, 0, 0)),
pB = this.wwd.globe.computePointFromLocation(
this.targetPosition.latitude, this.targetPosition.longitude, new Vec3(0, 0, 0));
this.maxAltitude = pA.distanceTo(pB);
// Determine an approximate viewport size in radians in order to determine whether we actually change
// the range as we pan to the new location. We don't want to change the range if the distance between
// the start and target positions is small relative to the current viewport.
var viewportSize = this.wwd.pixelSizeAtDistance(this.startPosition.altitude)
* this.wwd.canvas.clientWidth / this.wwd.globe.equatorialRadius;
if (panDistance <= 2 * viewportSize) {
// Start and target positions are close, so don't back out.
this.maxAltitude = this.startPosition.altitude;
}
// We need to capture the time the max altitude is reached in order to begin decreasing the range
// midway through the animation. If we're already above the max altitude, then that time is now since
// we don't back out if the current altitude is above the computed max altitude.
this.maxAltitudeReachedTime = this.maxAltitude <= this.wwd.navigator.range ? Date.now() : null;
// Compute the total range to travel since we need that to compute the range velocity.
// Note that the range velocity and pan velocity are computed so that the respective animations, which
// operate independently, finish at the same time.
if (this.maxAltitude > this.startPosition.altitude) {
rangeDistance = Math.max(0, this.maxAltitude - this.startPosition.altitude);
rangeDistance += Math.abs(this.targetPosition.altitude - this.maxAltitude);
} else {
rangeDistance = Math.abs(this.targetPosition.altitude - this.startPosition.altitude);
}
// Determine which distance governs the animation duration.
var animationDistance = Math.max(panDistance, rangeDistance / this.wwd.globe.equatorialRadius);
if (animationDistance === 0) {
return; // current and target positions are the same
}
if (animationDistance < 2 * viewportSize) {
// Start and target positions are close, so reduce the travel time based on the
// distance to travel relative to the viewport size.
animationDuration = Math.min(animationDistance / viewportSize * this.travelTime, this.travelTime);
}
// Don't let the animation duration go to 0.
animationDuration = Math.max(1, animationDuration);
// Determine the pan velocity, in radians per millisecond.
this.panVelocity = panDistance / animationDuration;
// Determine the range velocity, in meters per millisecond.
this.rangeVelocity = rangeDistance / animationDuration; // meters per millisecond
// Set up the animation timer.
var thisAnimator = this;
var timerCallback = function () {
if (thisAnimator.cancelled) {
if (thisAnimator.completionCallback) {
thisAnimator.completionCallback(thisAnimator);
}
return;
}
if (thisAnimator.update()) {
setTimeout(timerCallback, thisAnimator.animationFrequency);
} else if (thisAnimator.completionCallback) {
thisAnimator.completionCallback(thisAnimator);
}
};
setTimeout(timerCallback, this.animationFrequency); // invoke it the first time
};
// Intentionally not documented.
GoToAnimator.prototype.update = function () {
// This is the timer callback function. It invokes the range animator and the pan animator.
var currentPosition = new Position(
this.wwd.navigator.lookAtLocation.latitude,
this.wwd.navigator.lookAtLocation.longitude,
this.wwd.navigator.range);
var continueAnimation = this.updateRange(currentPosition);
continueAnimation = this.updateLocation(currentPosition) || continueAnimation;
this.wwd.redraw();
return continueAnimation;
};
// Intentionally not documented.
GoToAnimator.prototype.updateRange = function (currentPosition) {
// This function animates the range.
var continueAnimation = false,
nextRange, elapsedTime;
// If we haven't reached the maximum altitude, then step-wise increase it. Otherwise step-wise change
// the range towards the target altitude.
if (!this.maxAltitudeReachedTime) {
elapsedTime = Date.now() - this.startTime;
nextRange = Math.min(this.startPosition.altitude + this.rangeVelocity * elapsedTime, this.maxAltitude);
// We're done if we get withing 1 meter of the desired range.
if (Math.abs(this.wwd.navigator.range - nextRange) < 1) {
this.maxAltitudeReachedTime = Date.now();
}
this.wwd.navigator.range = nextRange;
continueAnimation = true;
} else {
elapsedTime = Date.now() - this.maxAltitudeReachedTime;
if (this.maxAltitude > this.targetPosition.altitude) {
nextRange = this.maxAltitude - this.rangeVelocity * elapsedTime;
nextRange = Math.max(nextRange, this.targetPosition.altitude);
} else {
nextRange = this.maxAltitude + this.rangeVelocity * elapsedTime;
nextRange = Math.min(nextRange, this.targetPosition.altitude);
}
this.wwd.navigator.range = nextRange;
// We're done if we get withing 1 meter of the desired range.
continueAnimation = Math.abs(this.wwd.navigator.range - this.targetPosition.altitude) > 1;
}
return continueAnimation;
};
// Intentionally not documented.
GoToAnimator.prototype.updateLocation = function (currentPosition) {
// This function animates the pan to the desired location.
var elapsedTime = Date.now() - this.startTime,
distanceTravelled = Location.greatCircleDistance(this.startPosition, currentPosition),
distanceRemaining = Location.greatCircleDistance(currentPosition, this.targetPosition),
azimuthToTarget = Location.greatCircleAzimuth(currentPosition, this.targetPosition),
distanceForNow = this.panVelocity * elapsedTime,
nextDistance = Math.min(distanceForNow - distanceTravelled, distanceRemaining),
nextLocation = Location.greatCircleLocation(currentPosition, azimuthToTarget, nextDistance,
new Location(0, 0)),
locationReached = false;
this.wwd.navigator.lookAtLocation.latitude = nextLocation.latitude;
this.wwd.navigator.lookAtLocation.longitude = nextLocation.longitude;
// We're done if we're within a meter of the desired location.
if (nextDistance < 1 / this.wwd.globe.equatorialRadius) {
locationReached = true;
}
return !locationReached;
};
export default GoToAnimator;
|
patte/styleguide | src/components/Overlay/index.js | export { default as Overlay } from './Overlay'
export { OverlayToolbar } from './OverlayToolbar'
export { default as OverlayBody } from './OverlayBody'
|
extremenelson/sirius | lucida/questionanswering/OpenEphyra/src/info/ephyra/nlp/OpenNLP.java | package info.ephyra.nlp;
import info.ephyra.util.RegexConverter;
import info.ephyra.util.StringUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import opennlp.tools.coref.LinkerMode;
import opennlp.tools.coref.mention.DefaultParse;
import opennlp.tools.coref.mention.Mention;
import opennlp.tools.lang.english.PosTagger;
import opennlp.tools.lang.english.SentenceDetector;
import opennlp.tools.lang.english.Tokenizer;
import opennlp.tools.lang.english.TreebankChunker;
import opennlp.tools.lang.english.TreebankLinker;
import opennlp.tools.lang.english.TreebankParser;
import opennlp.tools.parser.Parse;
import opennlp.tools.parser.ParserME;
import opennlp.tools.postag.POSDictionary;
/**
* <p>This class provides a common interface to the
* <a href="http://opennlp.sourceforge.net/">OpenNLP</a> toolkit.</p>
*
* <p>It supports the following natural language processing tools:
* <ul>
* <li>Sentence detection</li>
* <li>Tokenization/untokenization</li>
* <li>Part of speech (POS) tagging</li>
* <li>Chunking</li>
* <li>Full parsing</li>
* <li>Coreference resolution</li>
* </ul>
* </p>
*
* @author <NAME>
* @version 2006-05-20
*/
public class OpenNLP {
/** Pattern for abundant blanks. More specific rules come first. T.b.c. */
private static final Pattern ABUNDANT_BLANKS = Pattern.compile("(" +
"\\d (st|nd|rd)\\b" + "|" + // 1 st -> 1st
"[A-Z] \\$" + "|" + // US $ -> US$
"\\d , \\d\\d\\d\\D" + "|" + // 1 , 000 -> 1,000
"\\d (\\.|:) \\d" + "|" + // 1 . 99 -> 1.99
"\\B(\\$|€|¢|£|¥|¤) \\d" + "|" + // $ 100 -> $100
"\\d (\\$|€|¢|£|¥|¤)" + "|" + // 100 $ -> 100$
" (-|/) " + "|" + // one - third -> one-third
"(\\(|\\[|\\{) " + "|" + // ( ... ) -> (... )
" (\\.|,|:|\\)|\\]|\\})" + ")"); // Prof . -> Prof.
/** Sentence detector from the OpenNLP project. */
private static SentenceDetector sentenceDetector;
/** Tokenizer from the OpenNLP project. */
private static Tokenizer tokenizer;
/** Part of speech tagger from the OpenNLP project. */
private static PosTagger tagger;
/** Chunker from the OpenNLP project. */
private static TreebankChunker chunker;
/** Full parser from the OpenNLP project. */
private static ParserME parser;
/** Linker from the OpenNLP project. */
private static TreebankLinker linker;
/**
* Creates the sentence detector from a model file.
*
* @param model model file
* @return true, iff the sentence detector was created successfully
*/
public static boolean createSentenceDetector(String model) {
try {
sentenceDetector = new SentenceDetector(model);
} catch (IOException e) {
return false;
}
return true;
}
/**
* Creates the tokenizer from a model file.
*
* @param model model file
* @return true, iff the tokenizer was created successfully
*/
public static boolean createTokenizer(String model) {
try {
tokenizer = new Tokenizer(model);
} catch (IOException e) {
return false;
}
return true;
}
/**
* Creates the part of speech tagger from a model file and a case sensitive
* tag dictionary.
*
* @param model model file
* @param tagdict case sensitive tag dictionary
* @return true, iff the POS tagger was created successfully
*/
public static boolean createPosTagger(String model, String tagdict) {
try {
// create POS tagger, use case sensitive tag dictionary
tagger = new PosTagger(model, new POSDictionary(tagdict, true));
} catch (IOException e) {
return false;
}
return true;
}
/**
* Creates the chunker from a model file.
*
* @param model model file
* @return true, iff the chunker was created successfully
*/
public static boolean createChunker(String model) {
try {
chunker = new TreebankChunker(model);
} catch (IOException e) {
return false;
}
return true;
}
/**
* Creates the parser from a directory containing models.
*
* @param dir model directory
* @return true, iff the parser was created successfully
*/
public static boolean createParser(String dir) {
try {
// create parser, use default beamSize and advancePercentage
parser = TreebankParser.getParser(dir);
} catch (IOException e) {
return false;
}
return true;
}
/**
* Creates the linker from a directory containing models.
*
* @param dir model directory
* @return true, iff the linker was created successfully
*/
public static boolean createLinker(String dir) {
try {
// create linker that works on unannotated text (TEST mode)
linker = new TreebankLinker(dir, LinkerMode.TEST);
} catch (IOException e) {
return false;
}
return true;
}
/**
* Splits a text into sentences.
*
* @param text sequence of sentences
* @return array of sentences in the text or <code>null</code>, if the
* sentence detector is not initialized
*/
public static String[] sentDetect(String text) {
return (sentenceDetector != null)
? sentenceDetector.sentDetect(text)
: null;
}
/**
* A model-based tokenizer used to prepare a sentence for POS tagging.
*
* @param text text to tokenize
* @return array of tokens or <code>null</code>, if the tokenizer is not
* initialized
*/
public static String[] tokenize(String text) {
return (tokenizer != null) ? tokenizer.tokenize(text) : null;
}
/**
* Applies the model-based tokenizer and concatenates the tokens with
* spaces.
*
* @param text text to tokenize
* @return string of space-delimited tokens or <code>null</code>, if the
* tokenizer is not initialized
*/
public static String tokenizeWithSpaces(String text) {
String[] tokens = tokenize(text);
return (tokens != null) ? StringUtils.concatWithSpaces(tokens) : null;
}
/**
* <p>Untokenizes a text by removing abundant blanks.</p>
*
* <p>Note that it is not guaranteed that this method exactly reverts the
* effect of <code>tokenize()</code>.</p>
*
* @param text text to untokenize
* @return text without abundant blanks
*/
public static String untokenize(String text) {
Matcher m = ABUNDANT_BLANKS.matcher(text);
while (m.find()) {
String noBlank = "";
for (String token : m.group(0).split(" ")) noBlank += token;
text = text.replace(m.group(0), noBlank);
}
return text;
}
/**
* <p>Untokenizes a text by mapping it to a string that contains the
* original text as a subsequence.</p>
*
* <p>Note that it is not guaranteed that this method exactly reverts the
* effect of <code>tokenize()</code>.</p>
*
* @param text text to untokenize
* @param original string that contains the original text as a subsequence
* @return subsequence of the original string or the input text, iff there
* is no such subsequence
*/
public static String untokenize(String text, String original) {
// try with boundary matchers
String regex = RegexConverter.strToRegexWithBounds(text);
regex = regex.replace(" ", "\\s*+");
Matcher m = Pattern.compile(regex).matcher(original);
if (m.find()) return m.group(0);
// try without boundary matchers
regex = RegexConverter.strToRegex(text);
regex = regex.replace(" ", "\\s*+");
m = Pattern.compile(regex).matcher(original);
if (m.find()) return m.group(0);
// untokenization failed
return text;
}
/**
* Assigns POS tags to a sentence of space-delimited tokens.
*
* @param sentence sentence to be annotated with POS tags
* @return tagged sentence or <code>null</code>, if the tagger is not
* initialized
*/
public static String tagPos(String sentence) {
return (tagger != null) ? tagger.tag(sentence) : null;
}
/**
* Assigns POS tags to an array of tokens that form a sentence.
*
* @param sentence array of tokens to be annotated with POS tags
* @return array of POS tags or <code>null</code>, if the tagger is not
* initialized
*/
public static String[] tagPos(String[] sentence) {
return (tagger != null) ? tagger.tag(sentence) : null;
}
/**
* Assigns chunk tags to an array of tokens and POS tags.
*
* @param tokens array of tokens
* @param pos array of corresponding POS tags
* @return array of chunk tags or <code>null</code>, if the chunker is not
* initialized
*/
public static String[] tagChunks(String[] tokens, String[] pos) {
return (chunker != null) ? chunker.chunk(tokens, pos) : null;
}
/**
* Peforms a full parsing on a sentence of space-delimited tokens.
*
* @param sentence the sentence
* @return parse of the sentence or <code>null</code>, if the parser is not
* initialized or the sentence is empty
*/
public static Parse parse(String sentence) {
return (parser != null && sentence.length() > 0)
// only get first parse (that is most likely to be correct)
? TreebankParser.parseLine(sentence, parser, 1)[0]
: null;
}
/**
* Identifies coreferences in an array of full parses of sentences.
*
* @param parses array of full parses of sentences
*/
public static void link(Parse[] parses) {
int sentenceNumber = 0;
List<Mention> document = new ArrayList<Mention>();
for (Parse parse : parses) {
DefaultParse dp = new DefaultParse(parse, sentenceNumber);
Mention[] extents = linker.getMentionFinder().getMentions(dp);
//construct new parses for mentions which do not have constituents
for (int i = 0; i < extents.length; i++)
if (extents[i].getParse() == null) {
Parse snp = new Parse(parse.getText(), extents[i].getSpan(),
"NML", 1.0);
parse.insert(snp);
extents[i].setParse(new DefaultParse(snp,sentenceNumber));
}
document.addAll(Arrays.asList(extents));
sentenceNumber++;
}
if (document.size() > 0) {
// Mention[] ms = document.toArray(new Mention[document.size()]);
// DiscourseEntity[] entities = linker.getEntities(ms);
// TODO return results in an appropriate data structure
}
}
private static HashSet<String> unJoinablePrepositions = new HashSet<String>();
static {
unJoinablePrepositions.add("that");
unJoinablePrepositions.add("than");
unJoinablePrepositions.add("which");
unJoinablePrepositions.add("whose");
unJoinablePrepositions.add("if");
unJoinablePrepositions.add("such");
unJoinablePrepositions.add("whether");
unJoinablePrepositions.add("when");
unJoinablePrepositions.add("where");
unJoinablePrepositions.add("who");
}
public static String[] joinNounPhrases(String[] tokens, String[] chunkTags) {
if (chunkTags.length < 2) return chunkTags;
String[] newChunkTags = new String[chunkTags.length];
newChunkTags[0] = chunkTags[0];
for (int t = 1; t < chunkTags.length; t++) {
if ("B-NP".equals(chunkTags[t]) && ("B-NP".equals(chunkTags[t - 1]) || "I-NP".equals(chunkTags[t - 1]))) {
newChunkTags[t] = "I-NP";
} else if ((t != 1) && "B-NP".equals(chunkTags[t]) && "B-PP".equals(chunkTags[t - 1]) && !unJoinablePrepositions.contains(tokens[t-1]) && ("B-NP".equals(chunkTags[t - 2]) || "I-NP".equals(chunkTags[t - 2]))) {
newChunkTags[t - 1] = "I-NP";
newChunkTags[t] = "I-NP";
} else newChunkTags[t] = chunkTags[t];
}
return newChunkTags;
}
}
|
JensDerKrueger/ImageVis3D | ImageVis3D/UI/ImageVis3D_I3M.cpp | <gh_stars>0
/*
For more information, please see: http://software.sci.utah.edu
The MIT License
Copyright (c) 2008 Scientific Computing and Imaging Institute,
University of Utah.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
//! File : ImageVis3D_I3M.cpp
//! Author : <NAME>
//! DFKI & MMCI, Saarbruecken
//! SCI Institute, University of Utah
//! Date : July 2009
//
//! Copyright (C) 2009 DFKI, MMCI, SCI Institute
#include "ImageVis3D.h"
#include "../Tuvok/Basics/SysTools.h"
#include "../Tuvok/Controller/Controller.h"
#include "../Tuvok/LuaScripting/TuvokSpecific/LuaTuvokTypes.h"
#include "../Tuvok/LuaScripting/TuvokSpecific/LuaDatasetProxy.h"
#include <QtWidgets/QFileDialog>
#include <QtCore/QSettings>
#include <QtWidgets/QMessageBox>
#include "PleaseWait.h"
#include "Renderer/RenderMesh.h" // we only need this include for proper down cast from RenderMesh to Mesh
#include "MobileGeoConverter.h"
#include "../IO/ZipFile.h"
using namespace tuvok;
using namespace std;
std::wstring MainWindow::ConvertTF(const std::wstring& strSource1DTFilename,
const std::wstring& strTargetDir,
const std::wstring& strTargetFullFilename,
PleaseWaitDialog& pleaseWait) {
pleaseWait.SetText("Converting transfer function, please wait ...");
std::wstring filenameOnly = SysTools::ChangeExt(
SysTools::GetFilename(strTargetFullFilename),L"i3m.1dt"
);
std::wstring strTarget1DTFilename = strTargetDir+L"/"+filenameOnly;
MESSAGE("Converting transferfunction to %s", SysTools::toNarrow(strTarget1DTFilename).c_str());
TransferFunction1D tfIn(strSource1DTFilename);
if (tfIn.GetSize() > 256) {
// resample 1D tf to 8bit
tfIn.Resample(256);
} else {
// if it is 8bit already simply fill it to 256 entries
tfIn.FillOrTruncate(256);
}
if (!tfIn.Save(strTarget1DTFilename)) return L"";
MESSAGE("Saved 8bit transferfunction to %s",SysTools::toNarrow(strTarget1DTFilename).c_str());
return strTarget1DTFilename;
}
std::wstring MainWindow::ConvertDataToI3M(LuaClassInstance currentDataset,
const std::wstring& strTargetDir,
PleaseWaitDialog& pleaseWait,
bool bOverrideExisting) {
shared_ptr<LuaScripting> ss = m_MasterController.LuaScript();
if (ss->cexecRet<LuaDatasetProxy::DatasetType>(
currentDataset.fqName() + ".getDSType") != LuaDatasetProxy::UVF) {
T_ERROR("MainWindow::ConvertDataToI3M can only accept UVF datasets.");
return L"";
}
std::wstring dsFilename = ss->cexecRet<wstring>(currentDataset.fqName() + ".fullpath");
pleaseWait.SetText(QString("Converting:") + QString::fromStdWString(dsFilename));
// UVF to I3M
// first, find the smalest LOD with every dimension
// larger or equal to 128 (if possible)
int iLODLevel = static_cast<int>(
ss->cexecRet<uint64_t>(currentDataset.fqName() + ".getLODLevelCount")) - 1;
for (;iLODLevel>0;iLODLevel--) {
UINTVECTOR3 vLODSize = UINTVECTOR3(ss->cexecRet<UINT64VECTOR3>(
currentDataset.fqName() + ".getDomainSize",
static_cast<size_t>(iLODLevel), size_t(0)));
if (vLODSize.x >= 128 &&
vLODSize.y >= 128 &&
vLODSize.z >= 128) break;
}
wstring filenameOnly = SysTools::GetFilename(dsFilename);
wstring strTargetFilename = strTargetDir+L"/"+
SysTools::ChangeExt(filenameOnly,L"i3m");
if (!bOverrideExisting && SysTools::FileExists(strTargetFilename)) {
strTargetFilename = SysTools::FindNextSequenceName(
strTargetFilename
);
}
if (!ss->cexecRet<bool>("tuvok.io.exportDataset",
currentDataset,
static_cast<uint64_t>(iLODLevel),
strTargetFilename,
m_strTempDir))
{
return L"";
}
return strTargetFilename;
}
namespace {
template<typename T>
shared_ptr<G3D::GeometrySoA> mergeMeshType(Mesh::EMeshType meshType, const vector<shared_ptr<T>>& meshes)
{
// Find first mesh with given primitive type.
size_t m = 0;
for (; m < meshes.size(); m++)
if (meshes[m]->GetMeshType() == meshType)
break;
if (m >= meshes.size())
return nullptr; // Primitive type not found.
MobileGeoConverter mgc;
float * colors = nullptr;
shared_ptr<G3D::GeometrySoA> g3d = mgc.ConvertToNative(*meshes[m], colors, true);
// Now the color array is handled by g3d instance which is a copy a the first mesh.
// The color array will be deleted with the g3d instance.
colors = nullptr;
// Merge all other meshes into the first one.
for (size_t i = m+1; i < meshes.size(); i++)
{
if (meshes[m]->GetMeshType() != meshes[i]->GetMeshType())
continue;
shared_ptr<const G3D::GeometrySoA> geo = mgc.ConvertToNative(*meshes[i], colors);
if (!G3D::merge(g3d.get(), geo.get())) {
T_ERROR("Could not merge mesh %d with mesh %d.", i, m);
}
// Clean up color data that we might have created.
if (colors != nullptr) {
delete[] colors;
colors = nullptr;
}
// The instance geo will be deleted by shared_ptr d'tor.
// Do not call G3D::clean on it because it directly references the mesh's data.
}
return g3d;
}
}
void MainWindow::TransferToI3M() {
if (!m_pActiveRenderWin) return;
shared_ptr<LuaScripting> ss(m_MasterController.LuaScript());
LuaClassInstance ds = m_pActiveRenderWin->GetRendererDataset();
if (ss->cexecRet<LuaDatasetProxy::DatasetType>(
ds.fqName() + ".getDSType") == LuaDatasetProxy::UVF) {
QSettings settings;
QString strLastDir = settings.value("Folders/I3MServer",
".").toString();
QString directoryName =
QFileDialog::getExistingDirectory(this, "Select Dataset Server folder.",
strLastDir);
if (directoryName.isEmpty()) return;
wstring strTargetDir = directoryName.toStdWString();
settings.setValue("Folders/I3MServer", directoryName);
PleaseWaitDialog pleaseWait(this);
pleaseWait.SetText("Preparing data ...");
pleaseWait.AttachLabel(&m_MasterController);
wstring tempVolume = ConvertDataToI3M(ds, m_strTempDir, pleaseWait, true);
if (tempVolume == L"") {
QMessageBox errorMessage;
errorMessage.setText("Unable to convert the dataset "
"into the given directory.");
errorMessage.setIcon(QMessageBox::Critical);
errorMessage.exec();
T_ERROR("Unable to convert the dataset "
"into the given directory.");
}
wstring strTemp1DTFilename = m_strTempDir + L"i3mexport.1dt";
m_1DTransferFunction->SaveToFile(QString::fromStdWString(strTemp1DTFilename));
wstring tempTF = ConvertTF(strTemp1DTFilename, m_strTempDir,
ss->cexecRet<wstring>(ds.fqName() + ".fullpath"),
pleaseWait);
SysTools::RemoveFile(strTemp1DTFilename);
if (tempTF == L"") {
QMessageBox errorMessage;
errorMessage.setText("Unable to convert the transferfunction "
"into the given directory.");
errorMessage.setIcon(QMessageBox::Critical);
errorMessage.exec();
T_ERROR("Unable to convert the transferfunction "
"into the given directory.");
}
wstring filenameOnly = SysTools::RemoveExt(SysTools::GetFilename(
ss->cexecRet<wstring>(ds.fqName() + ".fullpath")));
// zip volume and TF together
ZipFile i3m;
i3m.openZip(SysTools::toNarrow(strTargetDir + L"/" + filenameOnly + L".i3m.zip"));
i3m.copyFileToZip(SysTools::toNarrow(tempVolume), SysTools::toNarrow(filenameOnly + L".i3m"));
i3m.copyFileToZip(SysTools::toNarrow(tempTF), SysTools::toNarrow(filenameOnly + L".1dt"));
i3m.close();
// remove temp files
SysTools::RemoveFile(tempVolume);
SysTools::RemoveFile(tempTF);
pleaseWait.SetText("Exporting Meshes ...");
#if 0
// Old version:
// Export the mesh as stored on disk.
const std::vector<std::shared_ptr<Mesh>> meshes =
ss->cexecRet<std::vector<std::shared_ptr<Mesh>>>(
ds.fqName() + ".getMeshes");
#else
// Changed by Alex:
// Export the visible rendered mesh with updated colors as we see it.
// The export will bake the colors into the exported mesh anyway.
const std::vector<shared_ptr<RenderMesh>> meshes = m_pActiveRenderWin->GetRendererMeshes();
#endif
shared_ptr<G3D::GeometrySoA> triangles = mergeMeshType(Mesh::MT_TRIANGLES, meshes);
shared_ptr<G3D::GeometrySoA> lines = mergeMeshType(Mesh::MT_LINES, meshes);
// write to temp files
if (triangles != nullptr) {
G3D::write(SysTools::toNarrow(m_strTempDir + L"/" + filenameOnly + L".triangles.g3d"), triangles.get());
G3D::clean(triangles.get());
}
if (lines != nullptr) {
G3D::write(SysTools::toNarrow(m_strTempDir + L"/" + filenameOnly + L".lines.g3d"), lines.get());
G3D::clean(lines.get());
}
// zip the files
ZipFile g3d;
g3d.openZip(SysTools::toNarrow(strTargetDir + L"/" + filenameOnly + L".g3d.zip"));
g3d.copyFileToZip(SysTools::toNarrow(m_strTempDir + L"/" + filenameOnly + L".triangles.g3d"), SysTools::toNarrow(filenameOnly + L".triangles.g3d"));
g3d.copyFileToZip(SysTools::toNarrow(m_strTempDir + L"/" + filenameOnly + L".lines.g3d"), SysTools::toNarrow(filenameOnly + L".lines.g3d"));
g3d.close();
// remove temp files
SysTools::RemoveFile(m_strTempDir + L"/" + filenameOnly + L".triangles.g3d");
SysTools::RemoveFile(m_strTempDir + L"/" + filenameOnly + L".lines.g3d");
} else {
QMessageBox errorMessage;
errorMessage.setText("ImageVis3D Mobile Device Transfer only supported for UVF datasets.");
errorMessage.setIcon(QMessageBox::Critical);
errorMessage.exec();
T_ERROR("ImageVis3D Mobile Device Transfer only supported for UVF datasets.");
}
}
|
hskang9/fabric-chaincode-evm | vendor/github.com/hyperledger/fabric-sdk-go/pkg/fabsdk/defpkgsuite.go | /*
Copyright SecureKey Technologies Inc. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package fabsdk
import (
"github.com/hyperledger/fabric-sdk-go/api/apilogging"
"github.com/hyperledger/fabric-sdk-go/def/factory/defclient"
"github.com/hyperledger/fabric-sdk-go/def/factory/defcore"
"github.com/hyperledger/fabric-sdk-go/def/factory/defsvc"
apisdk "github.com/hyperledger/fabric-sdk-go/pkg/fabsdk/api"
"github.com/hyperledger/fabric-sdk-go/pkg/logging/modlog"
)
type defPkgSuite struct{}
func (ps *defPkgSuite) Core() (apisdk.CoreProviderFactory, error) {
return defcore.NewProviderFactory(), nil
}
func (ps *defPkgSuite) Service() (apisdk.ServiceProviderFactory, error) {
return defsvc.NewProviderFactory(), nil
}
func (ps *defPkgSuite) Context() (apisdk.OrgClientFactory, error) {
return defclient.NewOrgClientFactory(), nil
}
func (ps *defPkgSuite) Session() (apisdk.SessionClientFactory, error) {
return defclient.NewSessionClientFactory(), nil
}
func (ps *defPkgSuite) Logger() (apilogging.LoggerProvider, error) {
return modlog.LoggerProvider(), nil
}
|
ebase-projects/ebase-boot | ebase-boot-project/ebase-boot-core/ebase-boot-core-security/src/main/java/me/dwliu/framework/core/security/crypto/CustomPasswordEncoderFactories.java | /*
* Copyright 2002-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.dwliu.framework.core.security.crypto;
import org.springframework.security.crypto.argon2.Argon2PasswordEncoder;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.DelegatingPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.crypto.password.Pbkdf2PasswordEncoder;
import org.springframework.security.crypto.scrypt.SCryptPasswordEncoder;
import java.util.HashMap;
import java.util.Map;
/**
* Used for creating {@link PasswordEncoder} instances
*
* @author <NAME>
* @since 5.0
*/
public class CustomPasswordEncoderFactories {
public static PasswordEncoder createDelegatingPasswordEncoder() {
String encodingId = "bcrypt";
Map<String, PasswordEncoder> encoders = new HashMap<>();
encoders.put(encodingId, new BCryptPasswordEncoder());
encoders.put("ldap", new org.springframework.security.crypto.password.LdapShaPasswordEncoder());
encoders.put("MD4", new org.springframework.security.crypto.password.Md4PasswordEncoder());
// 不加 salt,为了解决历史遗漏项目仅使用md5 场景
encoders.put("md5", new Md5PasswordEncoder());
encoders.put("MD5", new org.springframework.security.crypto.password.MessageDigestPasswordEncoder("MD5"));
// encoders.put("noop", org.springframework.security.crypto.password.NoOpPasswordEncoder.getInstance());
// 仅仅是copy
encoders.put("noop", NoOpPasswordEncoder.getInstance());
encoders.put("pbkdf2", new Pbkdf2PasswordEncoder());
encoders.put("scrypt", new SCryptPasswordEncoder());
encoders.put("SHA-1", new org.springframework.security.crypto.password.MessageDigestPasswordEncoder("SHA-1"));
encoders.put("SHA-256", new org.springframework.security.crypto.password.MessageDigestPasswordEncoder("SHA-256"));
encoders.put("sha256", new org.springframework.security.crypto.password.StandardPasswordEncoder());
encoders.put("argon2", new Argon2PasswordEncoder());
return new DelegatingPasswordEncoder(encodingId, encoders);
}
private CustomPasswordEncoderFactories() {
}
}
|
Lex-DRL/renderdoc-py-stubs | _pycharm_skeletons/renderdoc/GLFixedVertexProcessing.py | <reponame>Lex-DRL/renderdoc-py-stubs
# encoding: utf-8
# module renderdoc
# from P:\1-Scripts\_Python\Py-Autocomplete\renderdoc.pyd
# by generator 1.146
# no doc
# imports
import enum as __enum
from .SwigPyObject import SwigPyObject
class GLFixedVertexProcessing(SwigPyObject):
""" Describes the setup for fixed vertex processing operations. """
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
clipNegativeOneToOne = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""
``True`` if the clip-space Z goes from ``-1`` to ``1``.
``False`` if the clip-space Z goes from ``0`` to ``1``.
"""
clipOriginLowerLeft = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""
``True`` if the clipping origin should be in the lower left.
``False`` if it's in the upper left.
"""
clipPlanes = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""A list of ``bool`` determining which user clipping planes are enabled."""
defaultInnerLevel = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""A list of ``float`` giving the default inner level of tessellation."""
defaultOuterLevel = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""A list of ``float`` giving the default outer level of tessellation."""
discard = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""``True`` if primitives should be discarded during rasterization."""
this = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
thisown = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__dict__ = None # (!) real value is ''
|
becm/mpt-base | mptcore/event/notify_dispatch.c | /*!
* init/fini notifier.
*/
#include <stdlib.h>
#include <inttypes.h>
#include "../mptio/notify.h"
#include "output.h"
#include "event.h"
static int dispatchEvent(void *arg, MPT_STRUCT(event) *ev)
{
MPT_STRUCT(dispatch) *disp = arg;
if (!ev) {
mpt_dispatch_fini(disp);
free(disp);
return 0;
}
/* process reply notification */
if (ev->id) {
if (ev->msg) {
mpt_log(0, "mpt_dispatch_emit", MPT_LOG(Error), "%s: 0x" PRIxPTR,
MPT_tr("unprocessed reply id"), ev->id);
}
return disp->_def ? MPT_EVENTFLAG(Default) : 0;
}
/* trigger default event */
if (!ev->msg) {
ev = 0;
}
return mpt_dispatch_emit(disp, ev);
}
/*!
* \ingroup mptEvent
* \brief new notify dispatcher
*
* Assign new dispatcher to notification descriptor.
* Dispatch controller adheres to finalize convention.
*
* \param no notification descriptor
*
* \return event dispatch instance
*/
extern MPT_STRUCT(dispatch) *mpt_notify_dispatch(MPT_STRUCT(notify) *no)
{
MPT_STRUCT(dispatch) *disp;
if (!(disp = malloc(sizeof(*disp)))) {
return 0;
}
if (no->_disp.cmd) {
no->_disp.cmd(no->_disp.arg, 0);
no->_disp.cmd = 0;
}
mpt_dispatch_init(disp);
no->_disp.cmd = dispatchEvent;
no->_disp.arg = disp;
return disp;
}
|
Dbevan/SunderingShadows | d/darkwood/camps/mon/elguard.c | #include <std.h>
#include "../elfisland1.h"
inherit MONSTER;
void create()
{
::create();
set_name("guard");
set_id(({ "elf", "guard" }));
set_short("Elven guard");
set_long(
"This elven guard is dedicated to protecting villagers and the" +
" paths leading to the elven village of Synoria."
);
set_class("fighter");
set_guild_level("fighter", 15);
set_mlevel("fighter", 15);
set_race("elf");
set_body_type("humanoid");
set_gender("male");
set_hd(15, 8);
set_alignment(7);
set_max_hp(145);
set_hp(145);
set_overall_ac(2);
set_languages(({ "elven" }));
command("speak elven");
set_size(2);
set_exp(2500);
set_wielding_limbs(({ "right hand", "left hand" }));
new(WEAP + "longsword")->move(TO);
command("wield sword in left hand");
new(WEAP + "shortsword")->move(TO);
command("wield sword in right hand");
set_property("full attacks", 1);
set_stats("strength", 16);
set_stats("constitution", 16);
set_stats("intelligence", 15);
set_stats("wisdom", 13);
set_stats("charisma", 14);
set_stats("dexterity", 18);
set_func_chance(45);
set_funcs(({ "flashit", "rushit", "rushit" }));
add_search_path("/cmds/fighter");
set("aggressive", "kill_evil");
remove_property("swarm");
set_monster_feats(({
"parry",
"powerattack",
"shatter",
"sunder",
"rush"
}));
}
void init()
{
::init();
if ((string)TP->query_name() == "raider") {
command("say %^BOLD%^%^GREEN%^Leave our village!%^RESET%^");
command("kill raider");
}
}
void rushit(object targ)
{
TO->force_me("rush " + targ->query_name());
}
void flashit(object targ)
{
TO->force_me("flash " + targ->query_name());
}
void kill_evil()
{
string race;
object shape;
race = TP->query_race();
if (objectp(shape = TP->query_property("shapeshifted"))) {
race = (string)shape->query_shape_race();
}
if (objectp(shape = TP->query_property("altered"))) {
race = (string)shape->query_shape_race();
}
if (race == "drow" || race == "half-drow" || race == "goblin" || race == "hobgoblin" ||
race == "orc" || race == "half-orc" || race == "ogre" || race == "half-ogre" ||
race == "kobold" || race == "ogre-mage" || race == "bugbear" || race == "wemic" ||
race == "gnoll" ||
race == "troll" ||
race == "ratkin" ||
race == "wererat") {
force_me("say %^BOLD%^%^GREEN%^Be gone of this village you evil scum!%^RESET%^");
force_me("kill " + TP->query_name());
command("parry");
return;
}
}
|
LoongPenguin/Linux_210 | kernel/drivers/s390/scsi/zfcp_dbf.h | /*
* This file is part of the zfcp device driver for
* FCP adapters for IBM System z9 and zSeries.
*
* Copyright IBM Corp. 2008, 2009
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#ifndef ZFCP_DBF_H
#define ZFCP_DBF_H
#include <scsi/fc/fc_fcp.h>
#include "zfcp_ext.h"
#include "zfcp_fsf.h"
#include "zfcp_def.h"
#define ZFCP_DBF_TAG_SIZE 4
#define ZFCP_DBF_ID_SIZE 7
#define ZFCP_DBF_INVALID_LUN 0xFFFFFFFFFFFFFFFFull
struct zfcp_dbf_dump {
u8 tag[ZFCP_DBF_TAG_SIZE];
u32 total_size; /* size of total dump data */
u32 offset; /* how much data has being already dumped */
u32 size; /* how much data comes with this record */
u8 data[]; /* dump data */
} __attribute__ ((packed));
struct zfcp_dbf_rec_record_thread {
u32 total;
u32 ready;
u32 running;
};
struct zfcp_dbf_rec_record_target {
u64 ref;
u32 status;
u32 d_id;
u64 wwpn;
u64 fcp_lun;
u32 erp_count;
};
struct zfcp_dbf_rec_record_trigger {
u8 want;
u8 need;
u32 as;
u32 ps;
u32 us;
u64 ref;
u64 action;
u64 wwpn;
u64 fcp_lun;
};
struct zfcp_dbf_rec_record_action {
u32 status;
u32 step;
u64 action;
u64 fsf_req;
};
struct zfcp_dbf_rec_record {
u8 id;
char id2[7];
union {
struct zfcp_dbf_rec_record_action action;
struct zfcp_dbf_rec_record_thread thread;
struct zfcp_dbf_rec_record_target target;
struct zfcp_dbf_rec_record_trigger trigger;
} u;
};
enum {
ZFCP_REC_DBF_ID_ACTION,
ZFCP_REC_DBF_ID_THREAD,
ZFCP_REC_DBF_ID_TARGET,
ZFCP_REC_DBF_ID_TRIGGER,
};
struct zfcp_dbf_hba_record_response {
u32 fsf_command;
u64 fsf_reqid;
u32 fsf_seqno;
u64 fsf_issued;
u32 fsf_prot_status;
u32 fsf_status;
u8 fsf_prot_status_qual[FSF_PROT_STATUS_QUAL_SIZE];
u8 fsf_status_qual[FSF_STATUS_QUALIFIER_SIZE];
u32 fsf_req_status;
u8 sbal_first;
u8 sbal_last;
u8 sbal_response;
u8 pool;
u64 erp_action;
union {
struct {
u64 cmnd;
u64 serial;
} fcp;
struct {
u64 wwpn;
u32 d_id;
u32 port_handle;
} port;
struct {
u64 wwpn;
u64 fcp_lun;
u32 port_handle;
u32 lun_handle;
} unit;
struct {
u32 d_id;
} els;
} u;
} __attribute__ ((packed));
struct zfcp_dbf_hba_record_status {
u8 failed;
u32 status_type;
u32 status_subtype;
struct fsf_queue_designator
queue_designator;
u32 payload_size;
#define ZFCP_DBF_UNSOL_PAYLOAD 80
#define ZFCP_DBF_UNSOL_PAYLOAD_SENSE_DATA_AVAIL 32
#define ZFCP_DBF_UNSOL_PAYLOAD_BIT_ERROR_THRESHOLD 56
#define ZFCP_DBF_UNSOL_PAYLOAD_FEATURE_UPDATE_ALERT 2 * sizeof(u32)
u8 payload[ZFCP_DBF_UNSOL_PAYLOAD];
} __attribute__ ((packed));
struct zfcp_dbf_hba_record_qdio {
u32 qdio_error;
u8 sbal_index;
u8 sbal_count;
} __attribute__ ((packed));
struct zfcp_dbf_hba_record {
u8 tag[ZFCP_DBF_TAG_SIZE];
u8 tag2[ZFCP_DBF_TAG_SIZE];
union {
struct zfcp_dbf_hba_record_response response;
struct zfcp_dbf_hba_record_status status;
struct zfcp_dbf_hba_record_qdio qdio;
struct fsf_bit_error_payload berr;
} u;
} __attribute__ ((packed));
struct zfcp_dbf_san_record_ct_request {
u16 cmd_req_code;
u8 revision;
u8 gs_type;
u8 gs_subtype;
u8 options;
u16 max_res_size;
u32 len;
u32 d_id;
} __attribute__ ((packed));
struct zfcp_dbf_san_record_ct_response {
u16 cmd_rsp_code;
u8 revision;
u8 reason_code;
u8 expl;
u8 vendor_unique;
u16 max_res_size;
u32 len;
} __attribute__ ((packed));
struct zfcp_dbf_san_record_els {
u32 d_id;
} __attribute__ ((packed));
struct zfcp_dbf_san_record {
u8 tag[ZFCP_DBF_TAG_SIZE];
u64 fsf_reqid;
u32 fsf_seqno;
union {
struct zfcp_dbf_san_record_ct_request ct_req;
struct zfcp_dbf_san_record_ct_response ct_resp;
struct zfcp_dbf_san_record_els els;
} u;
} __attribute__ ((packed));
#define ZFCP_DBF_SAN_MAX_PAYLOAD 1024
struct zfcp_dbf_scsi_record {
u8 tag[ZFCP_DBF_TAG_SIZE];
u8 tag2[ZFCP_DBF_TAG_SIZE];
u32 scsi_id;
u32 scsi_lun;
u32 scsi_result;
u64 scsi_cmnd;
u64 scsi_serial;
#define ZFCP_DBF_SCSI_OPCODE 16
u8 scsi_opcode[ZFCP_DBF_SCSI_OPCODE];
u8 scsi_retries;
u8 scsi_allowed;
u64 fsf_reqid;
u32 fsf_seqno;
u64 fsf_issued;
u64 old_fsf_reqid;
u8 rsp_validity;
u8 rsp_scsi_status;
u32 rsp_resid;
u8 rsp_code;
#define ZFCP_DBF_SCSI_FCP_SNS_INFO 16
#define ZFCP_DBF_SCSI_MAX_FCP_SNS_INFO 256
u32 sns_info_len;
u8 sns_info[ZFCP_DBF_SCSI_FCP_SNS_INFO];
} __attribute__ ((packed));
struct zfcp_dbf {
debug_info_t *rec;
debug_info_t *hba;
debug_info_t *san;
debug_info_t *scsi;
spinlock_t rec_lock;
spinlock_t hba_lock;
spinlock_t san_lock;
spinlock_t scsi_lock;
struct zfcp_dbf_rec_record rec_buf;
struct zfcp_dbf_hba_record hba_buf;
struct zfcp_dbf_san_record san_buf;
struct zfcp_dbf_scsi_record scsi_buf;
struct zfcp_adapter *adapter;
};
static inline
void zfcp_dbf_hba_fsf_resp(const char *tag2, int level,
struct zfcp_fsf_req *req, struct zfcp_dbf *dbf)
{
if (level <= dbf->hba->level)
_zfcp_dbf_hba_fsf_response(tag2, level, req, dbf);
}
/**
* zfcp_dbf_hba_fsf_response - trace event for request completion
* @fsf_req: request that has been completed
*/
static inline void zfcp_dbf_hba_fsf_response(struct zfcp_fsf_req *req)
{
struct zfcp_dbf *dbf = req->adapter->dbf;
struct fsf_qtcb *qtcb = req->qtcb;
if ((qtcb->prefix.prot_status != FSF_PROT_GOOD) &&
(qtcb->prefix.prot_status != FSF_PROT_FSF_STATUS_PRESENTED)) {
zfcp_dbf_hba_fsf_resp("perr", 1, req, dbf);
} else if (qtcb->header.fsf_status != FSF_GOOD) {
zfcp_dbf_hba_fsf_resp("ferr", 1, req, dbf);
} else if ((req->fsf_command == FSF_QTCB_OPEN_PORT_WITH_DID) ||
(req->fsf_command == FSF_QTCB_OPEN_LUN)) {
zfcp_dbf_hba_fsf_resp("open", 4, req, dbf);
} else if (qtcb->header.log_length) {
zfcp_dbf_hba_fsf_resp("qtcb", 5, req, dbf);
} else {
zfcp_dbf_hba_fsf_resp("norm", 6, req, dbf);
}
}
/**
* zfcp_dbf_hba_fsf_unsol - trace event for an unsolicited status buffer
* @tag: tag indicating which kind of unsolicited status has been received
* @dbf: reference to dbf structure
* @status_buffer: buffer containing payload of unsolicited status
*/
static inline
void zfcp_dbf_hba_fsf_unsol(const char *tag, struct zfcp_dbf *dbf,
struct fsf_status_read_buffer *buf)
{
int level = 2;
if (level <= dbf->hba->level)
_zfcp_dbf_hba_fsf_unsol(tag, level, dbf, buf);
}
static inline
void zfcp_dbf_scsi(const char *tag, const char *tag2, int level,
struct zfcp_dbf *dbf, struct scsi_cmnd *scmd,
struct zfcp_fsf_req *req, unsigned long old_id)
{
if (level <= dbf->scsi->level)
_zfcp_dbf_scsi(tag, tag2, level, dbf, scmd, req, old_id);
}
/**
* zfcp_dbf_scsi_result - trace event for SCSI command completion
* @dbf: adapter dbf trace
* @scmd: SCSI command pointer
* @req: FSF request used to issue SCSI command
*/
static inline
void zfcp_dbf_scsi_result(struct zfcp_dbf *dbf, struct scsi_cmnd *scmd,
struct zfcp_fsf_req *req)
{
if (scmd->result != 0)
zfcp_dbf_scsi("rslt", "erro", 3, dbf, scmd, req, 0);
else if (scmd->retries > 0)
zfcp_dbf_scsi("rslt", "retr", 4, dbf, scmd, req, 0);
else
zfcp_dbf_scsi("rslt", "norm", 6, dbf, scmd, req, 0);
}
/**
* zfcp_dbf_scsi_fail_send - trace event for failure to send SCSI command
* @dbf: adapter dbf trace
* @scmd: SCSI command pointer
*/
static inline
void zfcp_dbf_scsi_fail_send(struct zfcp_dbf *dbf, struct scsi_cmnd *scmd)
{
zfcp_dbf_scsi("rslt", "fail", 4, dbf, scmd, NULL, 0);
}
/**
* zfcp_dbf_scsi_abort - trace event for SCSI command abort
* @tag: tag indicating success or failure of abort operation
* @adapter: adapter thas has been used to issue SCSI command to be aborted
* @scmd: SCSI command to be aborted
* @new_req: request containing abort (might be NULL)
* @old_id: identifier of request containg SCSI command to be aborted
*/
static inline
void zfcp_dbf_scsi_abort(const char *tag, struct zfcp_dbf *dbf,
struct scsi_cmnd *scmd, struct zfcp_fsf_req *new_req,
unsigned long old_id)
{
zfcp_dbf_scsi("abrt", tag, 1, dbf, scmd, new_req, old_id);
}
/**
* zfcp_dbf_scsi_devreset - trace event for Logical Unit or Target Reset
* @tag: tag indicating success or failure of reset operation
* @flag: indicates type of reset (Target Reset, Logical Unit Reset)
* @unit: unit that needs reset
* @scsi_cmnd: SCSI command which caused this error recovery
*/
static inline
void zfcp_dbf_scsi_devreset(const char *tag, u8 flag, struct zfcp_unit *unit,
struct scsi_cmnd *scsi_cmnd)
{
zfcp_dbf_scsi(flag == FCP_TMF_TGT_RESET ? "trst" : "lrst", tag, 1,
unit->port->adapter->dbf, scsi_cmnd, NULL, 0);
}
#endif /* ZFCP_DBF_H */
|
goodmind/FlowDefinitelyTyped | flow-types/types/p-waterfall_vx.x.x/flow_v0.25.x-/p-waterfall.js | declare module "p-waterfall" {
declare module.exports: typeof pWaterfall;
declare type Task<T, R> = (previousValue: T) => R | PromiseLike<R>;
declare type InitialTask<R> = () => R | PromiseLike<R>;
declare function pWaterfall<R>(tasks: [InitialTask<R>]): Promise<R>;
declare function pWaterfall<T1, R>(
tasks: [InitialTask<T1>, Task<T1, R>]
): Promise<R>;
declare function pWaterfall<T1, T2, R>(
tasks: [InitialTask<T1>, Task<T1, T2>, Task<T2, R>]
): Promise<R>;
declare function pWaterfall<T1, T2, T3, R>(
tasks: [InitialTask<T1>, Task<T1, T2>, Task<T2, T3>, Task<T3, R>]
): Promise<R>;
declare function pWaterfall<T1, T2, T3, T4, R>(
tasks: [
InitialTask<T1>,
Task<T1, T2>,
Task<T2, T3>,
Task<T3, T4>,
Task<T4, R>
]
): Promise<R>;
declare function pWaterfall<T1, T2, T3, T4, T5, R>(
tasks: [
InitialTask<T1>,
Task<T1, T2>,
Task<T2, T3>,
Task<T3, T4>,
Task<T4, T5>,
Task<T5, R>
]
): Promise<R>;
declare function pWaterfall<T1, T2, T3, T4, T5, T6, R>(
tasks: [
InitialTask<T1>,
Task<T1, T2>,
Task<T2, T3>,
Task<T3, T4>,
Task<T4, T5>,
Task<T5, T6>,
Task<T6, R>
]
): Promise<R>;
declare function pWaterfall<T1, T2, T3, T4, T5, T6, T7, R>(
tasks: [
InitialTask<T1>,
Task<T1, T2>,
Task<T2, T3>,
Task<T3, T4>,
Task<T4, T5>,
Task<T5, T6>,
Task<T6, T7>,
Task<T7, R>
]
): Promise<R>;
declare function pWaterfall<T1, R>(
tasks: [Task<T1, R>],
initialValue: T1
): Promise<R>;
declare function pWaterfall<T1, T2, R>(
tasks: [Task<T1, T2>, Task<T2, R>],
initialValue: T1
): Promise<R>;
declare function pWaterfall<T1, T2, T3, R>(
tasks: [Task<T1, T2>, Task<T2, T3>, Task<T3, R>],
initialValue: T1
): Promise<R>;
declare function pWaterfall<T1, T2, T3, T4, R>(
tasks: [Task<T1, T2>, Task<T2, T3>, Task<T3, T4>, Task<T4, R>],
initialValue: T1
): Promise<R>;
declare function pWaterfall<T1, T2, T3, T4, T5, R>(
tasks: [
Task<T1, T2>,
Task<T2, T3>,
Task<T3, T4>,
Task<T4, T5>,
Task<T5, R>
],
initialValue: T1
): Promise<R>;
declare function pWaterfall<T1, T2, T3, T4, T5, T6, R>(
tasks: [
Task<T1, T2>,
Task<T2, T3>,
Task<T3, T4>,
Task<T4, T5>,
Task<T5, T6>,
Task<T6, R>
],
initialValue: T1
): Promise<R>;
declare function pWaterfall<T1, T2, T3, T4, T5, T6, T7, R>(
tasks: [
Task<T1, T2>,
Task<T2, T3>,
Task<T3, T4>,
Task<T4, T5>,
Task<T5, T6>,
Task<T6, T7>,
Task<T7, R>
],
initialValue: T1
): Promise<R>;
declare function pWaterfall<T1, T2, T3, T4, T5, T6, T7, T8, R>(
tasks: [
Task<T1, T2>,
Task<T2, T3>,
Task<T3, T4>,
Task<T4, T5>,
Task<T5, T6>,
Task<T6, T7>,
Task<T7, T8>,
Task<T8, R>
],
initialValue: T1
): Promise<R>;
declare function pWaterfall(
tasks: Iterable<Task<any, any>>,
initialValue?: any
): Promise<any>;
}
|
googleapis/googleapis-gen | google/storage/v1/google-cloud-storage-v1-java/proto-google-cloud-storage-v1-java/src/main/java/com/google/storage/v1/CommonRequestParamsOrBuilder.java | <gh_stars>1-10
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/storage/v1/storage.proto
package com.google.storage.v1;
public interface CommonRequestParamsOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.storage.v1.CommonRequestParams)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Required. Required when using buckets with Requestor Pays feature enabled.
* </pre>
*
* <code>string user_project = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The userProject.
*/
java.lang.String getUserProject();
/**
* <pre>
* Required. Required when using buckets with Requestor Pays feature enabled.
* </pre>
*
* <code>string user_project = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for userProject.
*/
com.google.protobuf.ByteString
getUserProjectBytes();
/**
* <pre>
* Lets you enforce per-user quotas from a server-side application even in
* cases when the user's IP address is unknown. This can occur, for example,
* with applications that run cron jobs on App Engine on a user's behalf.
* You can choose any arbitrary string that uniquely identifies a user, but it
* is limited to 40 characters.
* Overrides user_ip if both are provided.
* </pre>
*
* <code>string quota_user = 2;</code>
* @return The quotaUser.
*/
java.lang.String getQuotaUser();
/**
* <pre>
* Lets you enforce per-user quotas from a server-side application even in
* cases when the user's IP address is unknown. This can occur, for example,
* with applications that run cron jobs on App Engine on a user's behalf.
* You can choose any arbitrary string that uniquely identifies a user, but it
* is limited to 40 characters.
* Overrides user_ip if both are provided.
* </pre>
*
* <code>string quota_user = 2;</code>
* @return The bytes for quotaUser.
*/
com.google.protobuf.ByteString
getQuotaUserBytes();
/**
* <pre>
* Subset of fields to include in the response.
* </pre>
*
* <code>.google.protobuf.FieldMask fields = 4;</code>
* @return Whether the fields field is set.
*/
boolean hasFields();
/**
* <pre>
* Subset of fields to include in the response.
* </pre>
*
* <code>.google.protobuf.FieldMask fields = 4;</code>
* @return The fields.
*/
com.google.protobuf.FieldMask getFields();
/**
* <pre>
* Subset of fields to include in the response.
* </pre>
*
* <code>.google.protobuf.FieldMask fields = 4;</code>
*/
com.google.protobuf.FieldMaskOrBuilder getFieldsOrBuilder();
}
|
bowring/ET_Redux | src/main/java/org/earthtime/Tripoli/rawDataFiles/templates/Thermo/ValenciaWashStateElementII_RawDataTemplate.java | /*
* ValenciaWashStateElementII_RawDataTemplate
*
* Copyright 2006-2018 <NAME>, CIRDLES.org, and Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.Tripoli.rawDataFiles.templates.Thermo;
import java.io.Serializable;
import java.util.TimeZone;
import org.earthtime.Tripoli.dataModels.inputParametersModels.AbstractAcquisitionModel;
import org.earthtime.Tripoli.dataModels.inputParametersModels.SingleCollectorAcquisition;
import org.earthtime.Tripoli.massSpecSetups.singleCollector.ThermoFinnigan.WashStateElementIISetupUPbMeth1;
import org.earthtime.Tripoli.rawDataFiles.templates.AbstractRawDataFileTemplate;
import org.earthtime.dataDictionaries.FileTypeEnum;
/**
*
* @author <NAME>
*/
public final class ValenciaWashStateElementII_RawDataTemplate extends AbstractRawDataFileTemplate implements //
Comparable<AbstractRawDataFileTemplate>,
Serializable {
private static final long serialVersionUID = 81527961461216203l;
private static ValenciaWashStateElementII_RawDataTemplate instance = new ValenciaWashStateElementII_RawDataTemplate();
private ValenciaWashStateElementII_RawDataTemplate() {
super();
this.NAME = "Valencia Wash State Element II";
this.aboutInfo = "analysis runs setup by Valencia";
this.fileType = FileTypeEnum.txt;
this.startOfFirstLine = "Trace for Mass:";
this.startOfDataSectionFirstLine = "Time";
this.startOfEachBlockFirstLine = "Time";
this.blockStartOffset = 6;
this.blockSize = 300;
this.standardIDs = new String[]//
{"Plesovice", "Peixe", "91500", "FC1"};
this.timeZone = TimeZone.getTimeZone("PST");
this.defaultParsingOfFractionsBehavior = 0;
this.massSpecSetup = WashStateElementIISetupUPbMeth1.getInstance();
}
/**
*
* @return
*/
public static ValenciaWashStateElementII_RawDataTemplate getInstance() {
return instance;
}
/**
*
* @return
*/
@Override
public AbstractAcquisitionModel makeNewAcquisitionModel() {
this.acquisitionModel = new SingleCollectorAcquisition();
return acquisitionModel;
}
// private void readObject(
// ObjectInputStream stream)
// throws IOException, ClassNotFoundException {
// stream.defaultReadObject();
// ObjectStreamClass myObject = ObjectStreamClass.lookup(Class.forName(ValenciaWashStateElementII_RawDataTemplate.class.getCanonicalName()));
// long theSUID = myObject.getSerialVersionUID();
// System.out.println("Customized De-serialization of ValenciaWashStateElementII_RawDataTemplate " + theSUID);
// }
}
|
Arun-Singh-Chauhan-09/Supply-demand-forecasting | exploredata/visualization.py | <filename>exploredata/visualization.py
from order import ExploreOrder
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(color_codes=True)
from utility.datafilepath import g_singletonDataFilePath
from weather import ExploreWeather
from traffic import ExploreTraffic
import numpy as np
import math
from poi import ExplorePoi
class visualizeData(ExploreOrder, ExploreWeather, ExploreTraffic, ExplorePoi):
def __init__(self):
ExploreOrder.__init__(self)
self.gapdf = self.load_gapdf(g_singletonDataFilePath.getTrainDir())
# self.gap_time_dict = self.gapdf.groupby('time_slotid')['gap'].sum().to_dict()
self.weathdf = self.load_weatherdf(g_singletonDataFilePath.getTrainDir())
# self.trafficdf = self.load_trafficdf(g_singletonDataFilePath.getTrainDir())
# self.gapDict = self.loadGapDict(g_singletonDataFilePath.getTrainDir() + 'temp/gap.csv.dict.pickle')
return
def disp_gap_bytimeiid(self):
gaps_mean = self.gapdf.groupby('time_id')['gap'].mean()
gaps_mean.plot(kind='bar')
plt.ylabel('Mean of gap')
plt.title('Timeslot/Correlation')
return
def disp_gap_bydistrict(self):
gaps_mean = self.gapdf.groupby('start_district_id')['gap'].mean()
gaps_mean.plot(kind='bar')
plt.ylabel('Mean of gap')
plt.title('District/Gap Correlation')
# for i in gaps_mean.index:
# plt.plot([i,i], [0, gaps_mean[i]], 'k-')
# plt.show()
return
def disp_gap_bydate(self):
gaps_mean = self.gapdf.groupby('time_date')['gap'].mean()
gaps_mean.plot(kind='bar')
plt.ylabel('Mean of gap')
plt.title('Date/Gap Correlation')
# for i in gaps_mean.index:
# plt.plot([i,i], [0, gaps_mean[i]], 'k-')
plt.show()
return
# def drawGapDistribution(self):
# self.gapdf[self.gapdf['gapdf'] < 10]['gapdf'].hist(bins=50)
# # sns.distplot(self.gapdf['gapdf']);
# # sns.distplot(self.gapdf['gapdf'], hist=True, kde=False, rug=False)
# # plt.hist(self.gapdf['gapdf'])
# plt.show()
# return
# def drawGapCorrelation(self):
# _, (ax1, ax2) = plt.subplots(nrows=2, ncols=1)
# res = self.gapdf.groupby('start_district_id')['gapdf'].sum()
# ax1.bar(res.index, res.values)
# res = self.gapdf.groupby('time_slotid')['gapdf'].sum()
# ax2.bar(res.index.map(lambda x: x[11:]), res.values)
# plt.show()
# return
def find_gap_by_timeslot(self, timeslot):
try:
return self.gap_time_dict[timeslot]
except:
return 0
return
# def show_traffic_bydate(self):
# # self.trafficdf['traffic'] = self.trafficdf['time_slotid'].apply(self.find_gap_by_timeslot)
# # by_date = self.trafficdf.groupby('time_date')
# # size = len(by_date)
# # col_len = row_len = math.ceil(math.sqrt(size))
# # count = 1
# # for name, group in by_date:
# # ax=plt.subplot(row_len, col_len, count)
# # # temp = np.empty(group['time_id'].shape[0])
# # # temp.fill(2)
# #
# # # ax.plot(group['time_id'], group['gap']/group['gap'].max(), 'r', alpha=0.75)
# # # ax.plot(group['time_id'], group['weather']/group['weather'].max())
# # ax.bar(group['time_id'], group['traffic'], width=1)
# # ax.set_title(name)
# # count = count + 1
# # plt.bar(group['time_id'], np.full(group['time_id'].shape[0], 5), width=1)
#
# plt.show()
# return
def show_weather_bydate(self):
self.weathdf['gap'] = self.weathdf['time_slotid'].apply(self.find_gap_by_timeslot)
by_date = self.weathdf.groupby('time_date')
size = len(by_date)
col_len = row_len = math.ceil(math.sqrt(size))
count = 1
for name, group in by_date:
ax=plt.subplot(row_len, col_len, count)
# temp = np.empty(group['time_id'].shape[0])
# temp.fill(2)
# ax.plot(group['time_id'], group['gap']/group['gap'].max(), 'r', alpha=0.75)
# ax.plot(group['time_id'], group['weather']/group['weather'].max())
ax.bar(group['time_id'], group['weather'], width=1)
ax.set_title(name)
count = count + 1
# plt.bar(group['time_id'], np.full(group['time_id'].shape[0], 5), width=1)
plt.show()
return
def run(self):
# self.show_traffic_bydate()
self.show_weather_bydate()
# self.drawGapDistribution()
# self.drawGapCorrelation()
return
if __name__ == "__main__":
obj= visualizeData()
obj.run() |
tzulberti/entrenamiento-arqueria | alembic/versions/032_razon_gasto.py | <filename>alembic/versions/032_razon_gasto.py<gh_stars>0
"""razon gasto
Revision ID: 032
Revises: 031
Create Date: 2014-10-18 07:39:43.893448
"""
# revision identifiers, used by Alembic.
revision = '032'
down_revision = '031'
import inspect
import imp
import os
from alembic import op
def upgrade():
utils_path = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
'..',
'utils.py')
utils = imp.load_source('', utils_path)
utils.create_categoric_table('razon_gasto',
['Sueldo',
'Alquiler',
'Compra de materiales de arqueria',
'Otras cosas'])
def downgrade():
op.drop_table('razon_gasto')
|
manasys/grouper | grouper-misc/grouper-ui-dojo/dojo/dojox/atom/io/model.js | <filename>grouper-misc/grouper-ui-dojo/dojo/dojox/atom/io/model.js
//>>built
define("dojox/atom/io/model",["dojo/_base/kernel","dojo/_base/declare","dojo/_base/lang","dojo/date/stamp","dojox/xml/parser"],function(_1,_2,_3,_4,_5){
var _6={};
_1.setObject("dojox.atom.io.model",_6);
_6._Constants={"ATOM_URI":"http://www.w3.org/2005/Atom","ATOM_NS":"http://www.w3.org/2005/Atom","PURL_NS":"http://purl.org/atom/app#","APP_NS":"http://www.w3.org/2007/app"};
_6._actions={"link":function(_7,_8){
if(_7.links===null){
_7.links=[];
}
var _9=new _6.Link();
_9.buildFromDom(_8);
_7.links.push(_9);
},"author":function(_a,_b){
if(_a.authors===null){
_a.authors=[];
}
var _c=new _6.Person("author");
_c.buildFromDom(_b);
_a.authors.push(_c);
},"contributor":function(_d,_e){
if(_d.contributors===null){
_d.contributors=[];
}
var _f=new _6.Person("contributor");
_f.buildFromDom(_e);
_d.contributors.push(_f);
},"category":function(obj,_10){
if(obj.categories===null){
obj.categories=[];
}
var cat=new _6.Category();
cat.buildFromDom(_10);
obj.categories.push(cat);
},"icon":function(obj,_11){
obj.icon=_5.textContent(_11);
},"id":function(obj,_12){
obj.id=_5.textContent(_12);
},"rights":function(obj,_13){
obj.rights=_5.textContent(_13);
},"subtitle":function(obj,_14){
var cnt=new _6.Content("subtitle");
cnt.buildFromDom(_14);
obj.subtitle=cnt;
},"title":function(obj,_15){
var cnt=new _6.Content("title");
cnt.buildFromDom(_15);
obj.title=cnt;
},"updated":function(obj,_16){
obj.updated=_6.util.createDate(_16);
},"issued":function(obj,_17){
obj.issued=_6.util.createDate(_17);
},"modified":function(obj,_18){
obj.modified=_6.util.createDate(_18);
},"published":function(obj,_19){
obj.published=_6.util.createDate(_19);
},"entry":function(obj,_1a){
if(obj.entries===null){
obj.entries=[];
}
var _1b=obj.createEntry?obj.createEntry():new _6.Entry();
_1b.buildFromDom(_1a);
obj.entries.push(_1b);
},"content":function(obj,_1c){
var cnt=new _6.Content("content");
cnt.buildFromDom(_1c);
obj.content=cnt;
},"summary":function(obj,_1d){
var _1e=new _6.Content("summary");
_1e.buildFromDom(_1d);
obj.summary=_1e;
},"name":function(obj,_1f){
obj.name=_5.textContent(_1f);
},"email":function(obj,_20){
obj.email=_5.textContent(_20);
},"uri":function(obj,_21){
obj.uri=_5.textContent(_21);
},"generator":function(obj,_22){
obj.generator=new _6.Generator();
obj.generator.buildFromDom(_22);
}};
_6.util={createDate:function(_23){
var _24=_5.textContent(_23);
if(_24){
return _4.fromISOString(_3.trim(_24));
}
return null;
},escapeHtml:function(str){
return str.replace(/&/gm,"&").replace(/</gm,"<").replace(/>/gm,">").replace(/"/gm,""").replace(/'/gm,"'");
},unEscapeHtml:function(str){
return str.replace(/</gm,"<").replace(/>/gm,">").replace(/"/gm,"\"").replace(/'/gm,"'").replace(/&/gm,"&");
},getNodename:function(_25){
var _26=null;
if(_25!==null){
_26=_25.localName?_25.localName:_25.nodeName;
if(_26!==null){
var _27=_26.indexOf(":");
if(_27!==-1){
_26=_26.substring((_27+1),_26.length);
}
}
}
return _26;
}};
_6.Node=_2(null,{constructor:function(_28,_29,_2a,_2b,_2c){
this.name_space=_28;
this.name=_29;
this.attributes=[];
if(_2a){
this.attributes=_2a;
}
this.content=[];
this.rawNodes=[];
this.textContent=null;
if(_2b){
this.content.push(_2b);
}
this.shortNs=_2c;
this._objName="Node";
this.nodeType="Node";
},buildFromDom:function(_2d){
this._saveAttributes(_2d);
this.name_space=_2d.namespaceURI;
this.shortNs=_2d.prefix;
this.name=_6.util.getNodename(_2d);
for(var x=0;x<_2d.childNodes.length;x++){
var c=_2d.childNodes[x];
if(_6.util.getNodename(c)!="#text"){
this.rawNodes.push(c);
var n=new _6.Node();
n.buildFromDom(c,true);
this.content.push(n);
}else{
this.content.push(c.nodeValue);
}
}
this.textContent=_5.textContent(_2d);
},_saveAttributes:function(_2e){
if(!this.attributes){
this.attributes=[];
}
var _2f=function(_30){
var _31=_30.attributes;
if(_31===null){
return false;
}
return (_31.length!==0);
};
if(_2f(_2e)&&this._getAttributeNames){
var _32=this._getAttributeNames(_2e);
if(_32&&_32.length>0){
for(var x in _32){
var _33=_2e.getAttribute(_32[x]);
if(_33){
this.attributes[_32[x]]=_33;
}
}
}
}
},addAttribute:function(_34,_35){
this.attributes[_34]=_35;
},getAttribute:function(_36){
return this.attributes[_36];
},_getAttributeNames:function(_37){
var _38=[];
for(var i=0;i<_37.attributes.length;i++){
_38.push(_37.attributes[i].nodeName);
}
return _38;
},toString:function(){
var xml=[];
var x;
var _39=(this.shortNs?this.shortNs+":":"")+this.name;
var _3a=(this.name=="#cdata-section");
if(_3a){
xml.push("<![CDATA[");
xml.push(this.textContent);
xml.push("]]>");
}else{
xml.push("<");
xml.push(_39);
if(this.name_space){
xml.push(" xmlns='"+this.name_space+"'");
}
if(this.attributes){
for(x in this.attributes){
xml.push(" "+x+"='"+this.attributes[x]+"'");
}
}
if(this.content){
xml.push(">");
for(x in this.content){
xml.push(this.content[x]);
}
xml.push("</"+_39+">\n");
}else{
xml.push("/>\n");
}
}
return xml.join("");
},addContent:function(_3b){
this.content.push(_3b);
}});
_6.AtomItem=_2(_6.Node,{constructor:function(_3c){
this.ATOM_URI=_6._Constants.ATOM_URI;
this.links=null;
this.authors=null;
this.categories=null;
this.contributors=null;
this.icon=this.id=this.logo=this.xmlBase=this.rights=null;
this.subtitle=this.title=null;
this.updated=this.published=null;
this.issued=this.modified=null;
this.content=null;
this.extensions=null;
this.entries=null;
this.name_spaces={};
this._objName="AtomItem";
this.nodeType="AtomItem";
},_getAttributeNames:function(){
return null;
},_accepts:{},accept:function(tag){
return Boolean(this._accepts[tag]);
},_postBuild:function(){
},buildFromDom:function(_3d){
var i,c,n;
for(i=0;i<_3d.attributes.length;i++){
c=_3d.attributes.item(i);
n=_6.util.getNodename(c);
if(c.prefix=="xmlns"&&c.prefix!=n){
this.addNamespace(c.nodeValue,n);
}
}
c=_3d.childNodes;
for(i=0;i<c.length;i++){
if(c[i].nodeType==1){
var _3e=_6.util.getNodename(c[i]);
if(!_3e){
continue;
}
if(c[i].namespaceURI!=_6._Constants.ATOM_NS&&_3e!="#text"){
if(!this.extensions){
this.extensions=[];
}
var _3f=new _6.Node();
_3f.buildFromDom(c[i]);
this.extensions.push(_3f);
}
if(!this.accept(_3e.toLowerCase())){
continue;
}
var fn=_6._actions[_3e];
if(fn){
fn(this,c[i]);
}
}
}
this._saveAttributes(_3d);
if(this._postBuild){
this._postBuild();
}
},addNamespace:function(_40,_41){
if(_40&&_41){
this.name_spaces[_41]=_40;
}
},addAuthor:function(_42,_43,uri){
if(!this.authors){
this.authors=[];
}
this.authors.push(new _6.Person("author",_42,_43,uri));
},addContributor:function(_44,_45,uri){
if(!this.contributors){
this.contributors=[];
}
this.contributors.push(new _6.Person("contributor",_44,_45,uri));
},addLink:function(_46,rel,_47,_48,_49){
if(!this.links){
this.links=[];
}
this.links.push(new _6.Link(_46,rel,_47,_48,_49));
},removeLink:function(_4a,rel){
if(!this.links||!_3.isArray(this.links)){
return;
}
var _4b=0;
for(var i=0;i<this.links.length;i++){
if((!_4a||this.links[i].href===_4a)&&(!rel||this.links[i].rel===rel)){
this.links.splice(i,1);
_4b++;
}
}
return _4b;
},removeBasicLinks:function(){
if(!this.links){
return;
}
var _4c=0;
for(var i=0;i<this.links.length;i++){
if(!this.links[i].rel){
this.links.splice(i,1);
_4c++;
i--;
}
}
return _4c;
},addCategory:function(_4d,_4e,_4f){
if(!this.categories){
this.categories=[];
}
this.categories.push(new _6.Category(_4d,_4e,_4f));
},getCategories:function(_50){
if(!_50){
return this.categories;
}
var arr=[];
for(var x in this.categories){
if(this.categories[x].scheme===_50){
arr.push(this.categories[x]);
}
}
return arr;
},removeCategories:function(_51,_52){
if(!this.categories){
return;
}
var _53=0;
for(var i=0;i<this.categories.length;i++){
if((!_51||this.categories[i].scheme===_51)&&(!_52||this.categories[i].term===_52)){
this.categories.splice(i,1);
_53++;
i--;
}
}
return _53;
},setTitle:function(str,_54){
if(!str){
return;
}
this.title=new _6.Content("title");
this.title.value=str;
if(_54){
this.title.type=_54;
}
},addExtension:function(_55,_56,_57,_58,_59){
if(!this.extensions){
this.extensions=[];
}
this.extensions.push(new _6.Node(_55,_56,_57,_58,_59||"ns"+this.extensions.length));
},getExtensions:function(_5a,_5b){
var arr=[];
if(!this.extensions){
return arr;
}
for(var x in this.extensions){
if((this.extensions[x].name_space===_5a||this.extensions[x].shortNs===_5a)&&(!_5b||this.extensions[x].name===_5b)){
arr.push(this.extensions[x]);
}
}
return arr;
},removeExtensions:function(_5c,_5d){
if(!this.extensions){
return;
}
for(var i=0;i<this.extensions.length;i++){
if((this.extensions[i].name_space==_5c||this.extensions[i].shortNs===_5c)&&this.extensions[i].name===_5d){
this.extensions.splice(i,1);
i--;
}
}
},destroy:function(){
this.links=null;
this.authors=null;
this.categories=null;
this.contributors=null;
this.icon=this.id=this.logo=this.xmlBase=this.rights=null;
this.subtitle=this.title=null;
this.updated=this.published=null;
this.issued=this.modified=null;
this.content=null;
this.extensions=null;
this.entries=null;
}});
_6.Category=_2(_6.Node,{constructor:function(_5e,_5f,_60){
this.scheme=_5e;
this.term=_5f;
this.label=_60;
this._objName="Category";
this.nodeType="Category";
},_postBuild:function(){
},_getAttributeNames:function(){
return ["label","scheme","term"];
},toString:function(){
var s=[];
s.push("<category ");
if(this.label){
s.push(" label=\""+this.label+"\" ");
}
if(this.scheme){
s.push(" scheme=\""+this.scheme+"\" ");
}
if(this.term){
s.push(" term=\""+this.term+"\" ");
}
s.push("/>\n");
return s.join("");
},buildFromDom:function(_61){
this._saveAttributes(_61);
this.label=this.attributes.label;
this.scheme=this.attributes.scheme;
this.term=this.attributes.term;
if(this._postBuild){
this._postBuild();
}
}});
_6.Content=_2(_6.Node,{constructor:function(_62,_63,src,_64,_65){
this.tagName=_62;
this.value=_63;
this.src=src;
this.type=_64;
this.xmlLang=_65;
this.HTML="html";
this.TEXT="text";
this.XHTML="xhtml";
this.XML="xml";
this._useTextContent="true";
this.nodeType="Content";
},_getAttributeNames:function(){
return ["type","src"];
},_postBuild:function(){
},buildFromDom:function(_66){
var _67=_66.getAttribute("type");
if(_67){
_67=_67.toLowerCase();
if(_67=="xml"||"text/xml"){
_67=this.XML;
}
}else{
_67="text";
}
if(_67===this.XML){
if(_66.firstChild){
var i;
this.value="";
for(i=0;i<_66.childNodes.length;i++){
var c=_66.childNodes[i];
if(c){
this.value+=_5.innerXML(c);
}
}
}
}else{
if(_66.innerHTML){
this.value=_66.innerHTML;
}else{
this.value=_5.textContent(_66);
}
}
this._saveAttributes(_66);
if(this.attributes){
this.type=this.attributes.type;
this.scheme=this.attributes.scheme;
this.term=this.attributes.term;
}
if(!this.type){
this.type="text";
}
var _68=this.type.toLowerCase();
if(_68==="html"||_68==="text/html"||_68==="xhtml"||_68==="text/xhtml"){
this.value=this.value?_6.util.unEscapeHtml(this.value):"";
}
if(this._postBuild){
this._postBuild();
}
},toString:function(){
var s=[];
s.push("<"+this.tagName+" ");
if(!this.type){
this.type="text";
}
if(this.type){
s.push(" type=\""+this.type+"\" ");
}
if(this.xmlLang){
s.push(" xml:lang=\""+this.xmlLang+"\" ");
}
if(this.xmlBase){
s.push(" xml:base=\""+this.xmlBase+"\" ");
}
if(this.type.toLowerCase()==this.HTML){
s.push(">"+_6.util.escapeHtml(this.value)+"</"+this.tagName+">\n");
}else{
s.push(">"+this.value+"</"+this.tagName+">\n");
}
var ret=s.join("");
return ret;
}});
_6.Link=_2(_6.Node,{constructor:function(_69,rel,_6a,_6b,_6c){
this.href=_69;
this.hrefLang=_6a;
this.rel=rel;
this.title=_6b;
this.type=_6c;
this.nodeType="Link";
},_getAttributeNames:function(){
return ["href","jrefLang","rel","title","type"];
},_postBuild:function(){
},buildFromDom:function(_6d){
this._saveAttributes(_6d);
this.href=this.attributes.href;
this.hrefLang=this.attributes.hreflang;
this.rel=this.attributes.rel;
this.title=this.attributes.title;
this.type=this.attributes.type;
if(this._postBuild){
this._postBuild();
}
},toString:function(){
var s=[];
s.push("<link ");
if(this.href){
s.push(" href=\""+this.href+"\" ");
}
if(this.hrefLang){
s.push(" hrefLang=\""+this.hrefLang+"\" ");
}
if(this.rel){
s.push(" rel=\""+this.rel+"\" ");
}
if(this.title){
s.push(" title=\""+this.title+"\" ");
}
if(this.type){
s.push(" type = \""+this.type+"\" ");
}
s.push("/>\n");
return s.join("");
}});
_6.Person=_2(_6.Node,{constructor:function(_6e,_6f,_70,uri){
this.author="author";
this.contributor="contributor";
if(!_6e){
_6e=this.author;
}
this.personType=_6e;
this.name=_6f||"";
this.email=_70||"";
this.uri=uri||"";
this._objName="Person";
this.nodeType="Person";
},_getAttributeNames:function(){
return null;
},_postBuild:function(){
},accept:function(tag){
return Boolean(this._accepts[tag]);
},buildFromDom:function(_71){
var c=_71.childNodes;
for(var i=0;i<c.length;i++){
var _72=_6.util.getNodename(c[i]);
if(!_72){
continue;
}
if(c[i].namespaceURI!=_6._Constants.ATOM_NS&&_72!="#text"){
if(!this.extensions){
this.extensions=[];
}
var _73=new _6.Node();
_73.buildFromDom(c[i]);
this.extensions.push(_73);
}
if(!this.accept(_72.toLowerCase())){
continue;
}
var fn=_6._actions[_72];
if(fn){
fn(this,c[i]);
}
}
this._saveAttributes(_71);
if(this._postBuild){
this._postBuild();
}
},_accepts:{"name":true,"uri":true,"email":true},toString:function(){
var s=[];
s.push("<"+this.personType+">\n");
if(this.name){
s.push("\t<name>"+this.name+"</name>\n");
}
if(this.email){
s.push("\t<email>"+this.email+"</email>\n");
}
if(this.uri){
s.push("\t<uri>"+this.uri+"</uri>\n");
}
s.push("</"+this.personType+">\n");
return s.join("");
}});
_6.Generator=_2(_6.Node,{constructor:function(uri,_74,_75){
this.uri=uri;
this.version=_74;
this.value=_75;
},_postBuild:function(){
},buildFromDom:function(_76){
this.value=_5.textContent(_76);
this._saveAttributes(_76);
this.uri=this.attributes.uri;
this.version=this.attributes.version;
if(this._postBuild){
this._postBuild();
}
},toString:function(){
var s=[];
s.push("<generator ");
if(this.uri){
s.push(" uri=\""+this.uri+"\" ");
}
if(this.version){
s.push(" version=\""+this.version+"\" ");
}
s.push(">"+this.value+"</generator>\n");
var ret=s.join("");
return ret;
}});
_6.Entry=_2(_6.AtomItem,{constructor:function(id){
this.id=id;
this._objName="Entry";
this.feedUrl=null;
},_getAttributeNames:function(){
return null;
},_accepts:{"author":true,"content":true,"category":true,"contributor":true,"created":true,"id":true,"link":true,"published":true,"rights":true,"summary":true,"title":true,"updated":true,"xmlbase":true,"issued":true,"modified":true},toString:function(_77){
var s=[];
var i;
if(_77){
s.push("<?xml version='1.0' encoding='UTF-8'?>");
s.push("<entry xmlns='"+_6._Constants.ATOM_URI+"'");
}else{
s.push("<entry");
}
if(this.xmlBase){
s.push(" xml:base=\""+this.xmlBase+"\" ");
}
for(i in this.name_spaces){
s.push(" xmlns:"+i+"=\""+this.name_spaces[i]+"\"");
}
s.push(">\n");
s.push("<id>"+(this.id?this.id:"")+"</id>\n");
if(this.issued&&!this.published){
this.published=this.issued;
}
if(this.published){
s.push("<published>"+_4.toISOString(this.published)+"</published>\n");
}
if(this.created){
s.push("<created>"+_4.toISOString(this.created)+"</created>\n");
}
if(this.issued){
s.push("<issued>"+_4.toISOString(this.issued)+"</issued>\n");
}
if(this.modified){
s.push("<modified>"+_4.toISOString(this.modified)+"</modified>\n");
}
if(this.modified&&!this.updated){
this.updated=this.modified;
}
if(this.updated){
s.push("<updated>"+_4.toISOString(this.updated)+"</updated>\n");
}
if(this.rights){
s.push("<rights>"+this.rights+"</rights>\n");
}
if(this.title){
s.push(this.title.toString());
}
if(this.summary){
s.push(this.summary.toString());
}
var _78=[this.authors,this.categories,this.links,this.contributors,this.extensions];
for(var x in _78){
if(_78[x]){
for(var y in _78[x]){
s.push(_78[x][y]);
}
}
}
if(this.content){
s.push(this.content.toString());
}
s.push("</entry>\n");
return s.join("");
},getEditHref:function(){
if(this.links===null||this.links.length===0){
return null;
}
for(var x in this.links){
if(this.links[x].rel&&this.links[x].rel=="edit"){
return this.links[x].href;
}
}
return null;
},setEditHref:function(url){
if(this.links===null){
this.links=[];
}
for(var x in this.links){
if(this.links[x].rel&&this.links[x].rel=="edit"){
this.links[x].href=url;
return;
}
}
this.addLink(url,"edit");
}});
_6.Feed=_2(_6.AtomItem,{_accepts:{"author":true,"content":true,"category":true,"contributor":true,"created":true,"id":true,"link":true,"published":true,"rights":true,"summary":true,"title":true,"updated":true,"xmlbase":true,"entry":true,"logo":true,"issued":true,"modified":true,"icon":true,"subtitle":true},addEntry:function(_79){
if(!_79.id){
throw new Error("The entry object must be assigned an ID attribute.");
}
if(!this.entries){
this.entries=[];
}
_79.feedUrl=this.getSelfHref();
this.entries.push(_79);
},getFirstEntry:function(){
if(!this.entries||this.entries.length===0){
return null;
}
return this.entries[0];
},getEntry:function(_7a){
if(!this.entries){
return null;
}
for(var x in this.entries){
if(this.entries[x].id==_7a){
return this.entries[x];
}
}
return null;
},removeEntry:function(_7b){
if(!this.entries){
return;
}
var _7c=0;
for(var i=0;i<this.entries.length;i++){
if(this.entries[i]===_7b){
this.entries.splice(i,1);
_7c++;
}
}
return _7c;
},setEntries:function(_7d){
for(var x in _7d){
this.addEntry(_7d[x]);
}
},toString:function(){
var s=[];
var i;
s.push("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n");
s.push("<feed xmlns=\""+_6._Constants.ATOM_URI+"\"");
if(this.xmlBase){
s.push(" xml:base=\""+this.xmlBase+"\"");
}
for(i in this.name_spaces){
s.push(" xmlns:"+i+"=\""+this.name_spaces[i]+"\"");
}
s.push(">\n");
s.push("<id>"+(this.id?this.id:"")+"</id>\n");
if(this.title){
s.push(this.title);
}
if(this.copyright&&!this.rights){
this.rights=this.copyright;
}
if(this.rights){
s.push("<rights>"+this.rights+"</rights>\n");
}
if(this.issued){
s.push("<issued>"+_4.toISOString(this.issued)+"</issued>\n");
}
if(this.modified){
s.push("<modified>"+_4.toISOString(this.modified)+"</modified>\n");
}
if(this.modified&&!this.updated){
this.updated=this.modified;
}
if(this.updated){
s.push("<updated>"+_4.toISOString(this.updated)+"</updated>\n");
}
if(this.published){
s.push("<published>"+_4.toISOString(this.published)+"</published>\n");
}
if(this.icon){
s.push("<icon>"+this.icon+"</icon>\n");
}
if(this.language){
s.push("<language>"+this.language+"</language>\n");
}
if(this.logo){
s.push("<logo>"+this.logo+"</logo>\n");
}
if(this.subtitle){
s.push(this.subtitle.toString());
}
if(this.tagline){
s.push(this.tagline.toString());
}
var _7e=[this.alternateLinks,this.authors,this.categories,this.contributors,this.otherLinks,this.extensions,this.entries];
for(i in _7e){
if(_7e[i]){
for(var x in _7e[i]){
s.push(_7e[i][x]);
}
}
}
s.push("</feed>");
return s.join("");
},createEntry:function(){
var _7f=new _6.Entry();
_7f.feedUrl=this.getSelfHref();
return _7f;
},getSelfHref:function(){
if(this.links===null||this.links.length===0){
return null;
}
for(var x in this.links){
if(this.links[x].rel&&this.links[x].rel=="self"){
return this.links[x].href;
}
}
return null;
}});
_6.Service=_2(_6.AtomItem,{constructor:function(_80){
this.href=_80;
},buildFromDom:function(_81){
var i;
this.workspaces=[];
if(_81.tagName!="service"){
return;
}
if(_81.namespaceURI!=_6._Constants.PURL_NS&&_81.namespaceURI!=_6._Constants.APP_NS){
return;
}
var ns=_81.namespaceURI;
this.name_space=_81.namespaceURI;
var _82;
if(typeof (_81.getElementsByTagNameNS)!="undefined"){
_82=_81.getElementsByTagNameNS(ns,"workspace");
}else{
_82=[];
var _83=_81.getElementsByTagName("workspace");
for(i=0;i<_83.length;i++){
if(_83[i].namespaceURI==ns){
_82.push(_83[i]);
}
}
}
if(_82&&_82.length>0){
var _84=0;
var _85;
for(i=0;i<_82.length;i++){
_85=(typeof (_82.item)==="undefined"?_82[i]:_82.item(i));
var _86=new _6.Workspace();
_86.buildFromDom(_85);
this.workspaces[_84++]=_86;
}
}
},getCollection:function(url){
for(var i=0;i<this.workspaces.length;i++){
var _87=this.workspaces[i].collections;
for(var j=0;j<_87.length;j++){
if(_87[j].href==url){
return _87;
}
}
}
return null;
}});
_6.Workspace=_2(_6.AtomItem,{constructor:function(_88){
this.title=_88;
this.collections=[];
},buildFromDom:function(_89){
var _8a=_6.util.getNodename(_89);
if(_8a!="workspace"){
return;
}
var c=_89.childNodes;
var len=0;
for(var i=0;i<c.length;i++){
var _8b=c[i];
if(_8b.nodeType===1){
_8a=_6.util.getNodename(_8b);
if(_8b.namespaceURI==_6._Constants.PURL_NS||_8b.namespaceURI==_6._Constants.APP_NS){
if(_8a==="collection"){
var _8c=new _6.Collection();
_8c.buildFromDom(_8b);
this.collections[len++]=_8c;
}
}else{
if(_8b.namespaceURI===_6._Constants.ATOM_NS){
if(_8a==="title"){
this.title=_5.textContent(_8b);
}
}
}
}
}
}});
_6.Collection=_2(_6.AtomItem,{constructor:function(_8d,_8e){
this.href=_8d;
this.title=_8e;
this.attributes=[];
this.features=[];
this.children=[];
this.memberType=null;
this.id=null;
},buildFromDom:function(_8f){
this.href=_8f.getAttribute("href");
var c=_8f.childNodes;
for(var i=0;i<c.length;i++){
var _90=c[i];
if(_90.nodeType===1){
var _91=_6.util.getNodename(_90);
if(_90.namespaceURI==_6._Constants.PURL_NS||_90.namespaceURI==_6._Constants.APP_NS){
if(_91==="member-type"){
this.memberType=_5.textContent(_90);
}else{
if(_91=="feature"){
if(_90.getAttribute("id")){
this.features.push(_90.getAttribute("id"));
}
}else{
var _92=new _6.Node();
_92.buildFromDom(_90);
this.children.push(_92);
}
}
}else{
if(_90.namespaceURI===_6._Constants.ATOM_NS){
if(_91==="id"){
this.id=_5.textContent(_90);
}else{
if(_91==="title"){
this.title=_5.textContent(_90);
}
}
}
}
}
}
}});
return _6;
});
|
cbeer/web | app/controllers/users_controller.rb | <filename>app/controllers/users_controller.rb
# Copyright 2013 Square Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Controller for working with {User Users}.
class UsersController < ApplicationController
skip_before_filter :login_required, only: :create
before_filter :must_be_unauthenticated, only: :create
before_filter :find_user, only: :show
respond_to :html, only: [:show, :create]
respond_to :json, only: :index
def index
return respond_with([]) if params[:query].blank?
@users = User.prefix(params[:query]).limit(10).order('username ASC')
last = params[:last].present? ? User.find_by_username(params[:last]) : nil
@users = @users.where(infinite_scroll_clause('username', 'ASC', last, 'username')) if last
project = params[:project_id].present? ? Project.find_from_slug!(params[:project_id]) : nil
respond_with decorate(@users, project)
end
# Displays information about a user.
#
# Routes
# ------
#
# * `GET /users/:id`
#
# Path Parameters
# ---------------
#
# | | |
# |:-----|:---------------------|
# | `id` | The User's username. |
def show
end
# Creates a new User account. For password authenticating installs only. The
# `email_address` and `password_confirmation` virtual fields must be
# specified.
#
# If the signup is successful, takes the user to the next URL stored in the
# params; or, if none is set, the root URL.
#
# Routes
# ------
#
# * `POST /users`
#
# Request Parameters
#
# | | |
# |:-------|:-------------------------------------------|
# | `next` | The URL to go to after signup is complete. |
#
# Body Parameters
# ---------------
#
# | | |
# |:-------|:-------------------------|
# | `user` | The new User parameters. |
def create
unless Squash::Configuration.authentication.registration_enabled?
return redirect_to(login_url, alert: t('controllers.users.create.disabled'))
end
@user = User.create(user_params)
respond_with @user do |format|
format.html do
if @user.valid?
log_in_user @user
flash[:success] = t('controllers.users.create.success', name: @user.first_name || @user.username)
redirect_to (params[:next].presence || root_url)
else
render 'sessions/new'
end
end
end
end if Squash::Configuration.authentication.strategy == 'password'
private
def find_user
@user = User.find_by_username!(params[:id])
end
def decorate(users, project=nil)
users.map do |user|
user.as_json.merge(is_member: project ? user.memberships.where(project_id: project.id).exists? : nil)
end
end
def user_params
params.require(:user).permit(:username, :password, :password_confirmation,
:email_address, :first_name, :last_name)
end
end
|
rs-ravi2/Codes | GeeksforGeeks/Outputs/27.cpp | <reponame>rs-ravi2/Codes
#include <bits/stdc++.h>
using namespace std;
void fun(int *a)
{
a = (int*)malloc(sizeof(int));
}
int main()
{
int *p;
fun(p);
*p = 6;
cout << *p;
return 0;
}
/*Because of the uncertainty in dereferencing of the variable for invalid memory reference,
the code produces a runtime error,
i.e. Segmentation fault, in this case. |
Rhinob1/data-integration | dataintegration-model/dataintegration-model-management-provider/src/main/java/com/youngdatafan/portal/model/management/outinterfacemodel/service/OutinterfaceModelService.java | <reponame>Rhinob1/data-integration<gh_stars>0
package com.youngdatafan.portal.model.management.outinterfacemodel.service;
import com.youngdatafan.dataintegration.core.model.Result;
import com.youngdatafan.portal.model.management.common.entity.ModelDTO;
import com.youngdatafan.portal.model.management.outinterfacemodel.dto.*;
import com.youngdatafan.portal.model.management.outinterfacemodel.vo.AddOutinterfaceModelVO;
import com.youngdatafan.portal.model.management.outinterfacemodel.vo.OutinterfacePreviewDataVO;
import com.youngdatafan.portal.model.management.outinterfacemodel.vo.UpdateOutinterfaceModelVO;
import com.github.pagehelper.PageInfo;
import java.util.List;
import java.util.Map;
/**
* <p>****************************************************************************</p>
* <ul style="margin:15px;">
* <li>Description : description</li>
* <li>Version : 1.0</li>
* <li>Creation : 2020/2/19 10:58 AM</li>
* <li>Author : ksice_xt</li>
* </ul>
* <p>****************************************************************************</p>
*/
public interface OutinterfaceModelService {
/**
* 查询授权给用户的所有模型
*
* @param userId
* @return
*/
List<OutinterfaceModelTypeAndGroupListDTO> selectModelAndGroupList(String userId);
/**
* 根据模型id查询模型信息
*
* @param modelName
* @return
*/
OutinterfaceModelDTO selectModelByModelName(String userId, String modelName);
/**
* 查询数据源
*
* @param userId
* @param outinterfaceModelName
* @return
*/
Result<OutinterfaceModelDatasourceDTO, Object> selectDatasource(String userId, String outinterfaceModelName);
/**
* 查询业务模型元数据
*
* @param userId
* @param outinterfaceModelName
* @return
*/
Result<List<OutinterfaceModelMetaDataDTO>, Object> selectOutinterfaceModelMetaData(String userId, String outinterfaceModelName);
Result<OutinterfaceModelDatasourceDTO, Object> selectDatasourceByModelName(String userId, String outinterfaceModelName, String modeType);
/**
* 业务模型新增
*
* @param addOutinterfaceModelVO
* @return
*/
Result insert(String userId, AddOutinterfaceModelVO addOutinterfaceModelVO);
/**
* 根据模型名称删除业务模型
*
* @param modelName
* @return
*/
Result<Boolean, Object> delete(String userId, String modelName);
/**
* 业务模型更新
*
* @param updateOutinterfaceModelVO
* @return
*/
Result<Boolean, Object> update(String userId, UpdateOutinterfaceModelVO updateOutinterfaceModelVO);
/**
* 查询所有业务模型
*
* @param userId 用户id
* @param groupName 分组名称
* @param modelName 模型名称
* @param basicModelName 数据源名称
* @param curPage 页码
* @param pageSize 行数
* @return
*/
PageInfo<OutinterfaceModelDTO> selectAll(String userId, String modelName, String groupName, String basicModelName, Integer curPage, Integer pageSize);
/**
* 查询基础模型组
*
* @param userId
* @return
*/
List<OutinterfaceModelGroupDTO> getBasicModelGroup(String userId);
/**
* 查询基础模型
*
* @param userId
* @param groupName
* @return
*/
List<OutinterfaceModelNameAndCnameDTO> getBasicModel(String userId, String groupName);
/**
* 获取基础模型字段
*
* @param userId
* @param basicModel
* @return
*/
List<OutinterfaceBasicModelAndMetaDataDTO> getBasicModelColumns(String userId, String basicModel);
/**
* 获取所有分组
*
* @param userId
* @return
*/
List<OutinterfaceGroupDTO> getAllGroup(String userId);
/**
* 批量下载模型
*
* @param userId
* @param modelNames
* @return
*/
List<BatchDownloadOutinterfaceModelDTO> batchDownloadExcel(String userId, List<String> modelNames);
Result<Boolean, Object> queryOutinterfaceModelExists(String userId, String outinterfaceModelName, String outinterfaceModelGroup);
ModelDTO selectModelAndMetaData(String userId, String modelName);
Result<List<Map<String, Object>>, Object> testPriview(String userId, OutinterfacePreviewDataVO outinterfacePreviewDataVO);
}
|
Meridian59Kor/Meridian59 | module/stats/stats.h | // Meridian 59, Copyright 1994-2012 <NAME> and <NAME>.
// All rights reserved.
//
// This software is distributed under a license that is described in
// the LICENSE file that accompanies it.
//
// Meridian is a registered trademark.
/*
* stats.h: Main header file for character stat adjustment DLL
*/
#ifndef _STATS_H
#define _STATS_H
#include "statsrc.h"
#define MODULE_ID 103 // Unique module id #
#define BK_TABCHANGED (WM_USER + 101)
/* New game state */
enum {GAME_PICKCHAR = 100, };
/* Structures from server giving character information */
typedef struct {
ID id; // Object id of character
char *name; // String name
BYTE flags; // Extra info about character
} Character;
void GoToGame(ID character);
void AbortCharDialogs(void);
#define NUM_CHAR_STATS 6
#define NUM_CHAR_SCHOOLS 7
#define STAT_POINTS_INITIAL 50 // Initial # of available stat points
void MakeStats(int *stats_in, int *levels_in);
void initStatsFromServer(int *stats_in, int *levels_in);
void CharTabPageCommand(HWND hwnd, int id, HWND hwndCtl, UINT codeNotify);
Bool VerifySettings(void);
void CharInfoValid(void);
void CharInfoInvalid(void);
BOOL CALLBACK CharStatsDialogProc(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam);
void CharStatsGetChoices(int *buf);
void CharSchoolsGetChoices(int *buf);
int CharStatsGetPoints(void);
// Sending messages to server
extern client_message msg_table[];
extern ClientInfo *cinfo; // Holds data passed from main client
extern Bool exiting; // True when module is exiting and should be unloaded
extern HINSTANCE hInst; // module handle
/* messages to server */
#define SendNewCharInfo(stat1, stat2, stat3, stat4, stat5, stat6, school1, school2, school3, school4, school5, school6, school7) \
ToServer(BP_CHANGED_STATS, msg_table, stat1, stat2, stat3, stat4, stat5, stat6, school1, school2, school3, school4, school5, school6, school7)
#endif /* #ifndef _STATS_H */
|
Juicestus/Computer-Science | CS-1-Labs/Pr0301.java | <filename>CS-1-Labs/Pr0301.java
public class Pr0301 {
// Wait have we not learned boolean type yet???
public static boolean verify(int n) {
return n > MyLibrary.getRandNum(1, 100);
}
public static void main(String[] args) {
int n = MyLibrary.getInt("Give me an integer between 1 and 100");
if (verify(n))
System.out.println("You win");
else
System.out.println("You loose");
}
}
|
xiaohesongww/HESProject | huiErShi-iOS/Modular/Schedule/Controller/RepeatTipViViewController.h | <filename>huiErShi-iOS/Modular/Schedule/Controller/RepeatTipViViewController.h
//
// RepeatTipViViewController.h
// huiErShi-iOS
//
// Copyright © 2017年 yangsen. All rights reserved.
//
#import "MainBaseViewController.h"
@interface RepeatTipViViewController : MainBaseViewController
@end
|
spunkmars/ProFTPD-Admin | src/proftpd/ftpadmin/lib/model_common.py | #coding=utf-8
from proftpd.ftpadmin.lib.common import initlog
#logger2=initlog()
def get_model_all_field_objects(model=None):
field_objects = []
if model is not None:
field_objects = [ y[0] for y in model._meta.get_fields_with_model() ]
return field_objects
def get_model_relate_field(model=None):
field_objects = get_model_all_field_objects(model=model) #取得model中的全部field 对象
relate_field = {}
for field in field_objects:
if hasattr(field, 'related') : #判断是否为外键。
relate_field[field.name] = field.related.parent_model
return relate_field
def get_model_valid_fields(model=None, invalid_fields=[]):
valid_fields = []
field_objects = get_model_all_field_objects(model=model)
for field in field_objects:
single_field = {}
if field.editable == False or field.unique == True : #过滤 不允许修改, 不允许重复 的field
continue
if field.has_default():
field_init = field.default
else:
field_init = None
#if field.verbose_name:
# field_name = field.verbose_name
#else:
# field_name = field.name
field_name = field.name
single_field['name'] = field_name
single_field['init'] = field_init
single_field['obj'] = field
if len(single_field) >= 3 and single_field['name'] not in invalid_fields:
valid_fields.append(single_field)
return valid_fields
def mult_save(model=None, mult_ids=None, save_args={}):
if model is not None and mult_ids is not None:
relate_field = {}
relate_field = get_model_relate_field(model=model)
modify_items = model.objects.filter(pk__in=mult_ids.split(','))
for obj_item in modify_items:
for arg_name in save_args.keys():
#logger2.info(dir(obj_item))
if arg_name in relate_field.keys():
related_instance = relate_field[arg_name].objects.get(pk=save_args[arg_name])
setattr(obj_item, arg_name, related_instance)
else:
setattr(obj_item, arg_name, save_args[arg_name])
obj_item.save()
|
Carlososuna11/codewars-handbook | python/kata/6-kyu/Consecutive strings/main.py | import codewars_test as test
from solution import longest_consec
def testing(actual, expected):
test.assert_equals(actual, expected)
test.describe("longest_consec")
test.it("Basic tests")
testing(longest_consec(["zone", "abigail", "theta", "form", "libe", "zas"], 2), "abigailtheta")
testing(longest_consec(["ejjjjmmtthh", "zxxuueeg", "aanlljrrrxx", "dqqqaaabbb", "oocccffuucccjjjkkkjyyyeehh"], 1), "oocccffuucccjjjkkkjyyyeehh")
testing(longest_consec([], 3), "")
testing(longest_consec(["itvayloxrp","wkppqsztdkmvcuwvereiupccauycnjutlv","vweqilsfytihvrzlaodfixoyxvyuyvgpck"], 2), "wkppqsztdkmvcuwvereiupccauycnjutlvvweqilsfytihvrzlaodfixoyxvyuyvgpck")
testing(longest_consec(["wlwsasphmxx","owiaxujylentrklctozmymu","wpgozvxxiu"], 2), "wlwsasphmxxowiaxujylentrklctozmymu")
testing(longest_consec(["zone", "abigail", "theta", "form", "libe", "zas"], -2), "")
testing(longest_consec(["it","wkppv","ixoyx", "3452", "zzzzzzzzzzzz"], 3), "ixoyx3452zzzzzzzzzzzz")
testing(longest_consec(["it","wkppv","ixoyx", "3452", "zzzzzzzzzzzz"], 15), "")
testing(longest_consec(["it","wkppv","ixoyx", "3452", "zzzzzzzzzzzz"], 0), "") |
EvenOldridge/NVTabular | tests/integration/test_notebooks.py | <filename>tests/integration/test_notebooks.py<gh_stars>0
#
# Copyright (c) 2020, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import itertools
import json
import os
import shutil
import subprocess
import sys
from os.path import dirname, realpath
import pytest
TEST_PATH = dirname(dirname(realpath(__file__)))
DATA_START = os.environ.get("DATASET_DIR", "/raid/data")
def test_criteo_notebook(tmpdir):
input_path = os.path.join(DATA_START, "criteo/crit_int_pq")
output_path = os.path.join(DATA_START, "criteo/crit_test")
os.environ["PARTS_PER_CHUNK"] = "1"
_run_notebook(
tmpdir,
os.path.join(dirname(TEST_PATH), "examples", "criteo-example.ipynb"),
input_path,
output_path,
# disable rmm.reinitialize, seems to be causing issues
transform=lambda line: line.replace("rmm.reinitialize(", "# rmm.reinitialize("),
gpu_id=0,
batch_size=100000,
)
def test_criteohugectr_notebook(tmpdir):
input_path = os.path.join(DATA_START, "criteo/crit_int_pq")
output_path = os.path.join(DATA_START, "criteo/crit_test")
os.environ["PARTS_PER_CHUNK"] = "1"
_run_notebook(
tmpdir,
os.path.join(dirname(TEST_PATH), "examples", "hugectr", "criteo-hugectr.ipynb"),
input_path,
output_path,
# disable rmm.reinitialize, seems to be causing issues
transform=lambda line: line.replace("rmm.reinitialize(", "# rmm.reinitialize("),
gpu_id="0,1",
batch_size=100000,
)
def test_optimize_criteo(tmpdir):
input_path = os.path.join(DATA_START, "criteo/crit_orig")
output_path = os.path.join(DATA_START, "criteo/crit_test_opt")
notebook_path = os.path.join(dirname(TEST_PATH), "examples", "optimize_criteo.ipynb")
_run_notebook(tmpdir, notebook_path, input_path, output_path, gpu_id=2)
def test_rossman_example(tmpdir):
pytest.importorskip("tensorflow")
data_path = os.path.join(DATA_START, "rossman/data")
input_path = os.path.join(DATA_START, "rossman/input")
output_path = os.path.join(DATA_START, "rossman/output")
notebookpre_path = os.path.join(
dirname(TEST_PATH), "examples", "rossmann-store-sales-preproc.ipynb"
)
_run_notebook(tmpdir, notebookpre_path, data_path, input_path, gpu_id=1, clean_up=False)
notebookex_path = os.path.join(
dirname(TEST_PATH), "examples", "rossmann-store-sales-example.ipynb"
)
_run_notebook(tmpdir, notebookex_path, input_path, output_path, gpu_id=1)
def test_gpu_benchmark(tmpdir):
input_path = os.path.join(DATA_START, "outbrains/input")
output_path = os.path.join(DATA_START, "outbrains/output")
notebook_path = os.path.join(dirname(TEST_PATH), "examples", "gpu_benchmark.ipynb")
_run_notebook(tmpdir, notebook_path, input_path, output_path, gpu_id=0, batch_size=100000)
def _run_notebook(
tmpdir,
notebook_path,
input_path,
output_path,
batch_size=None,
gpu_id=0,
clean_up=True,
transform=None,
):
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id)
if not os.path.exists(input_path):
os.makedirs(input_path)
if not os.path.exists(output_path):
os.makedirs(output_path)
if batch_size:
os.environ["BATCH_SIZE"] = str(batch_size)
os.environ["INPUT_DATA_DIR"] = input_path
os.environ["OUTPUT_DATA_DIR"] = output_path
# read in the notebook as JSON, and extract a python script from it
notebook = json.load(open(notebook_path))
source_cells = [cell["source"] for cell in notebook["cells"] if cell["cell_type"] == "code"]
lines = [
transform(line.rstrip()) if transform else line
for line in itertools.chain(*source_cells)
if not (line.startswith("%") or line.startswith("!"))
]
# save the script to a file, and run with the current python executable
# we're doing this in a subprocess to avoid some issues using 'exec'
# that were causing a segfault with globals of the exec'ed function going
# out of scope
script_path = os.path.join(tmpdir, "notebook.py")
with open(script_path, "w") as script:
script.write("\n".join(lines))
subprocess.check_output([sys.executable, script_path])
# clear out products
if clean_up:
shutil.rmtree(output_path)
|
MingkeVan/java-interview | src/main/java/com/fanmk/jvm/RuntimeConstantPoolOOM.java | package com.fanmk.jvm;
import java.util.ArrayList;
import java.util.List;
/**
* @program: com.fanmk.interview
* @author: fanmk
* @email: <EMAIL>
* @decription:
* @date: 2020/3/2 23:12
*/
public class RuntimeConstantPoolOOM {
public static void main(String[] args) {
List<String> list = new ArrayList<>();
int i = 0;
while (true) {
list.add(String.valueOf(i++).intern());
}
}
}
|
Wuwenxu/code-camp-java | code-camp-java-base/src/main/java/com/wuwenxu/codecamp/base/arithmetic/jzoffer/GetNumberOfK.java | <gh_stars>1-10
package com.wuwenxu.codecamp.base.arithmetic.jzoffer;
/**
* 数字在排序数组中出现的次数
*
* 统计一个数字在排序数组中出现的次数。
* 2.排序数组->数组元素有序->二分查找到这个数字,
* 然后依次上前向后查找出最前一个和最后一个相同元素(因为数组是排序数组,是有序的,这个数字在该数组中连续出现)
*/
public class GetNumberOfK {
public int GetNumberOfK(int [] array , int k) {
if(array.length==0||array==null){
return 0;
}
int low=0,high=array.length-1;
int index=-1;
while(low<=high){
int mid=(low+high)>>1;
if(array[mid]==k){
index=mid;
break;
}else if(array[mid]>k){
high=mid-1;
}else{
low=mid+1;
}
}
if(index==-1){
return 0;
}
low=index-1;
high=index+1;
while(low>=0&&array[low]==k){
low--;
}
while(high<array.length&&array[high]==k){
high++;
}
return high-low-1;
}
}
|
Capstone-Team-34/Slicer4J | benchmarks/JacksonDatabind_3b/src/test/java/com/fasterxml/jackson/databind/creators/TestCreators3.java | package com.fasterxml.jackson.databind.creators;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.*;
public class TestCreators3 extends BaseMapTest
{
static class MultiCtor
{
protected String _a, _b;
@JsonCreator
MultiCtor(@JsonProperty("a") String a, @JsonProperty("b") String b) {
_a = a;
_b = b;
}
MultiCtor(String a, String b, Object c) {
throw new RuntimeException("Wrong factory!");
}
}
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
private final ObjectMapper MAPPER = new ObjectMapper();
public void testMulitCtor421() throws Exception
{
MultiCtor bean = MAPPER.readValue(aposToQuotes("{'a':'123','b':'foo'}"), MultiCtor.class);
assertNotNull(bean);
assertEquals("123", bean._a);
assertEquals("foo", bean._b);
}
}
|
yrong/nextcloud-token-auth | apps/theming/l10n/it.js | <filename>apps/theming/l10n/it.js<gh_stars>0
OC.L10N.register(
"theming",
{
"Saved" : "Salvato",
"Admin" : "Amministratore",
"a safe home for all your data" : "una casa sicura per i tuoi dati",
"The given name is too long" : "Questo nome è troppo lungo",
"The given web address is too long" : "Questo indirizzo web è troppo lungo",
"The given slogan is too long" : "Questo slogan è troppo lungo",
"The given color is invalid" : "Questo colore non è valido",
"No file uploaded" : "Nessun file caricato",
"Unsupported image type" : "Tipo di immagine non supportato",
"You are already using a custom theme" : "Stai già usando un tema personalizzato",
"Theming" : "Tema",
"Name" : "Nome",
"reset to default" : "ripristina valori predefiniti",
"Web address" : "Indirizzo web",
"Web address https://…" : "Indirizzo web https://…",
"Slogan" : "Slogan",
"Color" : "Colore",
"Logo" : "Logo",
"Upload new logo" : "Carica nuovo logo",
"Login image" : "Immagine di accesso",
"Upload new login background" : "Carica nuovo sfondo di accesso",
"Log in image" : "Immagine di accesso"
},
"nplurals=2; plural=(n != 1);");
|
honstar/jackrabbit-oak | oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/SuggestHelper.java | <filename>oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/SuggestHelper.java<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.lucene.util;
import java.io.IOException;
import java.io.Reader;
import java.util.Collections;
import java.util.List;
import org.apache.jackrabbit.oak.plugins.index.lucene.FieldNames;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.suggest.DocumentDictionary;
import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.search.suggest.analyzing.FreeTextSuggester;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Helper class for getting suggest results for a given term, calling a {@link org.apache.lucene.search.suggest.Lookup}
* implementation under the hood.
*/
public class SuggestHelper {
private static final Logger log = LoggerFactory.getLogger(SuggestHelper.class);
private static final Analyzer analyzer = new Analyzer() {
@Override
protected Analyzer.TokenStreamComponents createComponents(String fieldName, Reader reader) {
return new Analyzer.TokenStreamComponents(new CRTokenizer(Version.LUCENE_47, reader));
}
};
private static final Lookup suggester = new FreeTextSuggester(analyzer);
public static void updateSuggester(IndexReader reader) throws IOException {
// Terms terms = MultiFields.getTerms(reader, FieldNames.SUGGEST);
// long size = terms.size() * 2;
// if (size < 0) {
// size = terms.getDocCount() / 3;
// }
// long count = suggester.getCount();
// if (size > count) {
try {
suggester.build(new DocumentDictionary(reader, FieldNames.SUGGEST, FieldNames.PATH_DEPTH));
} catch (RuntimeException e) {
log.debug("could not update the suggester", e);
}
// }
}
public static List<Lookup.LookupResult> getSuggestions(SuggestQuery suggestQuery) {
try {
long count = suggester.getCount();
if (count > 0) {
return suggester.lookup(suggestQuery.getText(), false, 10);
} else {
return Collections.emptyList();
}
} catch (Exception e) {
throw new RuntimeException("could not handle Suggest query " + suggestQuery, e);
}
}
public static SuggestQuery getSuggestQuery(String suggestQueryString) {
try {
String text = null;
for (String param : suggestQueryString.split("&")) {
String[] keyValuePair = param.split("=");
if (keyValuePair.length != 2 || keyValuePair[0] == null || keyValuePair[1] == null) {
throw new RuntimeException("Unparsable native Lucene Suggest query: " + suggestQueryString);
} else {
if ("term".equals(keyValuePair[0])) {
text = keyValuePair[1];
}
}
}
if (text != null) {
return new SuggestQuery(text);
} else {
return null;
}
} catch (Exception e) {
throw new RuntimeException("could not build SuggestQuery " + suggestQueryString, e);
}
}
public static class SuggestQuery {
private final String text;
public SuggestQuery(String text) {
this.text = text;
}
public String getText() {
return text;
}
@Override
public String toString() {
return "SuggestQuery{" +
"text='" + text + '\'' +
'}';
}
}
}
|
KOLANICH/octargs | include/octargs/internal/switch_argument_impl.hpp | <reponame>KOLANICH/octargs<filename>include/octargs/internal/switch_argument_impl.hpp
#ifndef OCTARGS_SWITCH_ARGUMENT_IMPL_HPP_
#define OCTARGS_SWITCH_ARGUMENT_IMPL_HPP_
#include "argument_base_impl.hpp"
namespace oct
{
namespace args
{
namespace internal
{
template <typename char_T, typename values_storage_T>
class basic_switch_argument_impl
: public basic_argument_base_impl<basic_switch_argument_impl<char_T, values_storage_T>, char_T, values_storage_T>
{
public:
using char_type = char_T;
using values_storage_type = values_storage_T;
using base_type = basic_argument_base_impl<basic_switch_argument_impl<char_type, values_storage_type>, char_type,
values_storage_type>;
using string_type = std::basic_string<char_type>;
using string_vector_type = std::vector<string_type>;
using parser_data_weak_ptr_type = typename base_type::parser_data_weak_ptr_type;
explicit basic_switch_argument_impl(parser_data_weak_ptr_type parser_data_ptr, const string_vector_type& names)
: base_type(parser_data_ptr, base_type::FLAG_IS_ASSIGNABLE_BY_NAME, names)
{
// noop
}
void set_min_count(std::size_t count)
{
base_type::set_min_count(count);
}
void set_max_count(std::size_t count)
{
base_type::set_max_count(count);
}
void set_max_count_unlimited()
{
base_type::set_max_count_unlimited();
}
};
} // namespace internal
} // namespace args
} // namespace oct
#endif // OCTARGS_SWITCH_ARGUMENT_IMPL_HPP_
|
pramodbiligiri/datahub | datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java | package com.linkedin.datahub.upgrade.restorebackup.backupreader;
import com.linkedin.datahub.upgrade.UpgradeContext;
import javax.annotation.Nonnull;
/**
* Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 object to be
* ingested back into GMS
*/
public interface BackupReader {
String getName();
@Nonnull
EbeanAspectBackupIterator getBackupIterator(UpgradeContext context);
}
|
ndau/ndaujs | test/api/ServiceDiscovery-test.js | /* ----- ---- --- -- -
* Copyright 2020 The Axiom Foundation. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy
* in the file LICENSE in the source distribution or at
* https://www.apache.org/licenses/LICENSE-2.0.txt
* - -- --- ---- -----
*/
import ServiceDiscovery from '../../src/api/ServiceDiscovery'
import MockHelper from './helpers/MockHelper'
import { expect } from 'chai'
describe('ServiceDiscovery', () => {
it('getBlockchainServiceNodeURL should return something back', async () => {
MockHelper.mockServiceDiscovery()
const serverUrl = await ServiceDiscovery.getBlockchainServiceNode()
// its testnet because that is what we pull in within MockHelper
expect(serverUrl.includes('api.ndau.tech:31300')).to.be.true
})
it('getRecoverServiceNodeURL should return something back', async () => {
MockHelper.mockServiceDiscovery()
const serverUrl = await ServiceDiscovery.getRecoveryServiceNode()
// its testnet because that is what we pull in within MockHelper
expect(serverUrl.includes('recovery.ndau.tech')).to.be.true
})
})
|
frankgh/cassandra | test/unit/org/apache/cassandra/cql3/validation/operations/AutoSnapshotTest.java | <reponame>frankgh/cassandra
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.validation.operations;
import java.time.Instant;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import com.google.common.util.concurrent.Uninterruptibles;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.DurationSpec;
import org.apache.cassandra.cql3.CQLTester;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.service.snapshot.TableSnapshot;
import org.assertj.core.api.Condition;
import static org.apache.cassandra.db.ColumnFamilyStore.SNAPSHOT_DROP_PREFIX;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(Parameterized.class)
public class AutoSnapshotTest extends CQLTester
{
static int TTL_SECS = 1;
public static Boolean enabledBefore;
public static DurationSpec ttlBefore;
@BeforeClass
public static void beforeClass()
{
enabledBefore = DatabaseDescriptor.isAutoSnapshot();
ttlBefore = DatabaseDescriptor.getAutoSnapshotTtl();
}
@AfterClass
public static void afterClass()
{
DatabaseDescriptor.setAutoSnapshot(enabledBefore);
DatabaseDescriptor.setAutoSnapshotTtl(ttlBefore);
}
// Dynamic parameters used during tests
@Parameterized.Parameter(0)
public Boolean autoSnapshotEnabled;
@Parameterized.Parameter(1)
public DurationSpec autoSnapshotTTl;
@Before
public void beforeTest() throws Throwable
{
super.beforeTest();
// Make sure we're testing the correct parameterized settings
DatabaseDescriptor.setAutoSnapshot(autoSnapshotEnabled);
DatabaseDescriptor.setAutoSnapshotTtl(autoSnapshotTTl);
}
// Test for all values of [auto_snapshot=[true,false], ttl=[1s, null]
@Parameterized.Parameters( name = "enabled={0},ttl={1}" )
public static Collection options() {
return Arrays.asList(new Object[][] {
{ true, DurationSpec.inSeconds(TTL_SECS) },
{ false, DurationSpec.inSeconds(TTL_SECS) },
{ true, null },
{ false, null },
});
}
@Test
public void testAutoSnapshotOnTrucate() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, PRIMARY KEY(a, b))");
// Check there are no snapshots
ColumnFamilyStore tableDir = getCurrentColumnFamilyStore();
assertThat(tableDir.listSnapshots()).isEmpty();
execute("INSERT INTO %s (a, b, c) VALUES (?, ?, ?)", 0, 0, 0);
execute("INSERT INTO %s (a, b, c) VALUES (?, ?, ?)", 0, 1, 1);
flush();
execute("DROP TABLE %s");
verifyAutoSnapshot(SNAPSHOT_DROP_PREFIX, tableDir);
}
@Test
public void testAutoSnapshotOnDrop() throws Throwable
{
createTable("CREATE TABLE %s (a int, b int, c int, PRIMARY KEY(a, b))");
// Check there are no snapshots
ColumnFamilyStore tableDir = getCurrentColumnFamilyStore();
assertThat(tableDir.listSnapshots()).isEmpty();
execute("INSERT INTO %s (a, b, c) VALUES (?, ?, ?)", 0, 0, 0);
execute("INSERT INTO %s (a, b, c) VALUES (?, ?, ?)", 0, 1, 1);
flush();
execute("DROP TABLE %s");
verifyAutoSnapshot(SNAPSHOT_DROP_PREFIX, tableDir);
}
/**
* Verify that:
* - A snapshot is created when auto_snapshot = true.
* - TTL is added to the snapshot when auto_snapshot_ttl != null
*/
private void verifyAutoSnapshot(String snapshotPrefix, ColumnFamilyStore tableDir)
{
Map<String, TableSnapshot> snapshots = tableDir.listSnapshots();
if (autoSnapshotEnabled)
{
assertThat(snapshots).hasSize(1);
assertThat(snapshots).hasKeySatisfying(new Condition<>(k -> k.startsWith(snapshotPrefix), "is dropped snapshot"));
TableSnapshot snapshot = snapshots.values().iterator().next();
assertThat(snapshot.getTableName()).isEqualTo(currentTable());
if (autoSnapshotTTl == null)
{
// check that the snapshot has NO TTL
assertThat(snapshot.isExpiring()).isFalse();
}
else
{
// check that snapshot has TTL and is expired after 1 second
assertThat(snapshot.isExpiring()).isTrue();
Uninterruptibles.sleepUninterruptibly(TTL_SECS, TimeUnit.SECONDS);
assertThat(snapshot.isExpired(Instant.now())).isTrue();
}
}
else
{
// No snapshot should be created when auto_snapshot = false
assertThat(snapshots).isEmpty();
}
}
}
|
mautematico/lamassu-server | new-lamassu-admin/src/pages/Notifications/components/SingleFieldEditableNumber.js | <gh_stars>1-10
import { Form, Formik } from 'formik'
import React, { useContext } from 'react'
import * as Yup from 'yup'
import NotificationsCtx from '../NotificationsContext'
import Header from './EditHeader'
import EditableNumber from './EditableNumber'
const SingleFieldEditableNumber = ({
title,
label,
width = 80,
name,
section,
className
}) => {
const {
save,
data,
currency,
isEditing,
isDisabled,
setEditing
} = useContext(NotificationsCtx)
const schema = Yup.object().shape({
[name]: Yup.number()
.integer()
.min(0)
.required()
})
return (
<Formik
enableReinitialize
initialValues={{ [name]: (data && data[name]) ?? '' }}
validationSchema={schema}
onSubmit={it => save(section, schema.cast(it))}
onReset={() => {
setEditing(name, false)
}}>
<Form className={className}>
<Header
title={title}
editing={isEditing(name)}
disabled={isDisabled(name)}
setEditing={it => setEditing(name, it)}
/>
<EditableNumber
label={label}
name={name}
editing={isEditing(name)}
width={width}
displayValue={x => (x === '' ? '-' : x)}
decoration={currency}
/>
</Form>
</Formik>
)
}
export default SingleFieldEditableNumber
|
HuttonICS/germinate | src/jhi/germinate/client/widget/element/ScatterChartSelection.java | <reponame>HuttonICS/germinate
/*
* Copyright 2017 Information and Computational Sciences,
* The James Hutton Institute.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jhi.germinate.client.widget.element;
import com.google.gwt.core.client.*;
import com.google.gwt.uibinder.client.*;
import com.google.gwt.user.client.ui.*;
import org.gwtbootstrap3.client.ui.*;
import java.util.*;
import jhi.germinate.client.i18n.*;
import jhi.germinate.client.widget.listbox.*;
import jhi.germinate.shared.datastructure.database.*;
/**
* @author <NAME>
*/
public class ScatterChartSelection<T extends DatabaseObject> extends Composite
{
interface ScatterChartSelectionUiBinder extends UiBinder<HTMLPanel, ScatterChartSelection>
{
}
private static ScatterChartSelectionUiBinder ourUiBinder = GWT.create(ScatterChartSelectionUiBinder.class);
@UiField
FormLabel firstObjectLabel;
@UiField(provided = true)
GerminateValueListBox firstObject;
@UiField
FormLabel secondObjectLabel;
@UiField(provided = true)
GerminateValueListBox secondObject;
@UiField
GroupListBox group;
@UiField
StringListBox color;
public ScatterChartSelection(ExperimentType type, List<T> objects, List<Group> groups)
{
if (groups == null)
groups = new ArrayList<>();
switch (type)
{
case trials:
firstObject = new PhenotypeListBox();
secondObject = new PhenotypeListBox();
break;
case compound:
firstObject = new CompoundListBox();
secondObject = new CompoundListBox();
}
initWidget(ourUiBinder.createAndBindUi(this));
switch (type)
{
case trials:
firstObjectLabel.setText(Text.LANG.phenotypeFirstPhenotype());
secondObjectLabel.setText(Text.LANG.phenotypeSecondPhenotype());
break;
case compound:
firstObjectLabel.setText(Text.LANG.compoundFirstCompound());
secondObjectLabel.setText(Text.LANG.compoundSecondCompound());
}
firstObject.setValue(objects.get(0), false);
firstObject.setAcceptableValues(objects);
secondObject.setValue(objects.get(0), false);
secondObject.setAcceptableValues(objects);
group.setAcceptableValues(groups);
String[] coloringValues;
switch (type)
{
case trials:
coloringValues = new String[]{Text.LANG.trialsPByPColorByNone(), Text.LANG.trialsPByPColorByDataset(), Text.LANG.trialsPByPColorByYear(), Text.LANG.trialsPByPColorByTreatment()};
break;
case compound:
default:
coloringValues = new String[]{Text.LANG.trialsPByPColorByNone(), Text.LANG.trialsPByPColorByDataset(), Text.LANG.trialsPByPColorByYear()};
break;
}
color.setValue(coloringValues[0], false);
color.setAcceptableValues(coloringValues);
}
public List<T> getFirstObject()
{
return firstObject.getSelections();
}
public List<T> getSecondObject()
{
return secondObject.getSelections();
}
public Group getGroup()
{
return group.getSelection();
}
public String getColor()
{
return color.getSelection();
}
} |
Lavish883/Loki-Stream | node_modules/@babel/types/scripts/generators/asserts.js | version https://git-lfs.github.com/spec/v1
oid sha256:fcb76b02e0a4d5e50f27bcaa0e12729c2108e89ab7af803fd0468907ecb6184f
size 1458
|
dpaiton/DeepSparseCoding | tf1x/modules/sae_module.py | <filename>tf1x/modules/sae_module.py
import tensorflow as tf
from DeepSparseCoding.tf1x.utils.trainable_variable_dict import TrainableVariableDict
import DeepSparseCoding.tf1x.utils.entropy_functions as ef
from DeepSparseCoding.tf1x.modules.ae_module import AeModule
from DeepSparseCoding.tf1x.modules.activations import sigmoid
class SaeModule(AeModule):
def __init__(self, data_tensor, layer_types, enc_channels, dec_channels, patch_size,
conv_strides, sparse_mult, w_decay_mult, w_norm_mult, target_act, act_funcs, dropout,
tie_dec_weights, w_init_type, variable_scope="sae"):
"""
Implementation of sparse autoencoder described in Andrew Ng's 2011 Stanford CS294A lecture notes
Sigmoidal activation function
Untied encoding & decoding weights
Linear reconstructions - input images do not have 0-1 range
Inputs:
data_tensor
enc_channels [list of ints] the number of output channels per encoder layer
Last entry is the number of latent units
dec_channels [list of ints] the number of output channels per decoder layer
Last entry must be the number of input pixels for FC layers and channels for CONV layers
w_decay_mult - weight decay multiplier
w_norm_mult: tradeoff multiplier for weight norm loss (asks weight norm to == 1)
act_funcs - activation functions
dropout: specifies the keep probability or None
conv: if True, do convolution
conv_strides: list of strides for convolution [batch, y, x, channels]
patch_size: number of (y, x) inputs for convolutional patches
w_init_type: if True, l2 normalize w_init,
reducing over [0] axis on enc and [-1] axis on dec
variable_scope - specifies the variable_scope for the module
Outputs:
dictionary
"""
self.sparse_mult = sparse_mult
self.target_act = target_act
super(SaeModule, self).__init__(data_tensor, layer_types, enc_channels, dec_channels,
patch_size, conv_strides, w_decay_mult, w_norm_mult, act_funcs, dropout, tie_dec_weights,
w_init_type, variable_scope)
def compute_sparse_loss(self, a_in):
with tf.compat.v1.variable_scope("unsupervised"):
reduc_dims = tuple(range(len(a_in.get_shape().as_list()) - 1))
avg_act = tf.reduce_mean(input_tensor=a_in, axis=reduc_dims, name="batch_avg_activity")
p_dist = self.target_act * tf.subtract(ef.safe_log(self.target_act),
ef.safe_log(avg_act), name="kl_p")
q_dist = (1-self.target_act) * tf.subtract(ef.safe_log(1-self.target_act),
ef.safe_log(1-avg_act), name="kl_q")
kl_divergence = tf.reduce_sum(input_tensor=tf.add(p_dist, q_dist), name="kld")
sparse_loss = tf.multiply(self.sparse_mult, kl_divergence, name="sparse_loss")
return sparse_loss
def compute_total_loss(self):
with tf.compat.v1.variable_scope("loss") as scope:
self.loss_dict = {"recon_loss":self.compute_recon_loss(self.reconstruction),
"sparse_loss":self.compute_sparse_loss(self.a),
"weight_decay_loss":self.compute_weight_decay_loss(),
"weight_norm_loss":self.compute_weight_norm_loss()}
self.total_loss = tf.add_n([loss for loss in self.loss_dict.values()], name="total_loss")
|
witchlove/hib_immo | app/models/contact.js | <reponame>witchlove/hib_immo<filename>app/models/contact.js
var Contact = DS.Model.extend(
{
subject: DS.attr('String'),
question: DS.attr('String'),
from: DS.attr('String'),
name: DS.attr('String'),
firstName: DS.attr('String'),
phone: DS.attr('String'),
mail: DS.attr('String')
}
);
export default
Contact; |
derrykid/Line-MessageAPI-Poker | sample-spring-boot-echo/src/main/java/Game/Player.java | package Game;
import Card.Card;
import Poker.Analyzer.Classification;
import java.util.Set;
import java.util.TreeSet;
public class Player {
public static final int ALIVE = 0;
public static final int CHECK = 1;
public static final int FOLD = 2;
private String userID;
/*
* position starts with 0
* 0 - small blind
* 1 - big blind, etc
* */
private int position;
private int chip;
private Set<Card> playerCards;
private Classification handClassification;
private String userName;
private int playerStatue;
private int chipOnTheTable;
public int getChipOnTheTable() {
return this.chipOnTheTable;
}
private void addChipOnTheTable(int moneyBet) {
this.chipOnTheTable = this.chipOnTheTable + moneyBet;
}
public void clearChipOnTheTable() {
this.chipOnTheTable = 0;
}
public void setCheck() {
this.playerStatue = CHECK;
}
public void foldHand() {
this.playerStatue = FOLD;
}
public int getPlayerStatue() {
return this.playerStatue;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Player player = (Player) o;
if (position != player.position) return false;
if (chip != player.chip) return false;
if (userID != null ? !userID.equals(player.userID) : player.userID != null) return false;
if (playerCards != null ? !playerCards.equals(player.playerCards) : player.playerCards != null) return false;
if (handClassification != null ? !handClassification.equals(player.handClassification) : player.handClassification != null)
return false;
return userName != null ? userName.equals(player.userName) : player.userName == null;
}
@Override
public int hashCode() {
int result = userID != null ? userID.hashCode() : 0;
result = 31 * result + position;
result = 31 * result + chip;
result = 31 * result + (playerCards != null ? playerCards.hashCode() : 0);
result = 31 * result + (handClassification != null ? handClassification.hashCode() : 0);
result = 31 * result + (userName != null ? userName.hashCode() : 0);
return result;
}
public String getUserName() {
return this.userName;
}
public void setHandClassification(Classification classification) {
this.handClassification = classification;
}
public Classification getHandClassification() {
return this.handClassification;
}
public void addPlayerCards(Card card) {
playerCards.add(card);
}
public Set<Card> getPlayerCards() {
return this.playerCards;
}
@Override
public String toString() {
return "Player{" +
"userID='" + userID + '\'' +
", position=" + position +
", chip=" + chip +
", playerCards=" + playerCards +
'}';
}
public Player(String userID, String userName) {
this.userID = userID;
this.playerCards = new TreeSet<>();
this.userName = userName;
this.playerStatue = ALIVE;
this.chip = 1000;
this.chipOnTheTable = 0;
}
public String getUserID() {
return userID;
}
public int getPosition() {
return position;
}
public void setPosition(int position) {
this.position = position;
}
public int getChip() {
return chip;
}
public void setChip(int chip) {
this.chip = chip;
}
public void bet(int chip) {
addChipOnTheTable(chip);
this.chip = this.chip - chip;
}
public void addChip(int pot){
this.chip = this.chip + pot;
}
}
|
Lens-Platform/Platform | src/libraries/core/core-utilities/httperr.go | package core_delay
import (
"bytes"
"encoding/json"
"net/http"
)
// HandleError checks if the error is not nil, writes it to the output
// with the specified status code, and returns true. If error is nil it returns false.
func HandleError(w http.ResponseWriter, err error, statusCode int) bool {
if err == nil {
return false
}
http.Error(w, string(err.Error()), statusCode)
return true
}
func JSONResponse(w http.ResponseWriter, r *http.Request, result interface{}) {
body, err := json.Marshal(result)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.Header().Set("X-Content-Type-Options", "nosniff")
w.WriteHeader(http.StatusOK)
w.Write(prettyJSON(body))
}
func JSONResponseCode(w http.ResponseWriter, r *http.Request, result interface{}, responseCode int) {
body, err := json.Marshal(result)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.Header().Set("X-Content-Type-Options", "nosniff")
w.WriteHeader(responseCode)
w.Write(prettyJSON(body))
}
func ErrorResponse(w http.ResponseWriter, r *http.Request, error string, code int) {
data := struct {
Code int `json:"code"`
Message string `json:"message"`
}{
Code: code,
Message: error,
}
body, err := json.Marshal(data)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.Header().Set("X-Content-Type-Options", "nosniff")
w.WriteHeader(http.StatusOK)
w.Write(prettyJSON(body))
}
func prettyJSON(b []byte) []byte {
var out bytes.Buffer
json.Indent(&out, b, "", " ")
return out.Bytes()
}
|
Vizzuality/trase | spec/support/contexts/api/v3/brazil/brazil_soy_flow_quants.rb | <reponame>Vizzuality/trase<filename>spec/support/contexts/api/v3/brazil/brazil_soy_flow_quants.rb
shared_context 'api v3 brazil soy flow quants' do
include_context 'api v3 brazil soy flows'
include_context 'api v3 quants'
let!(:api_v3_brazil_soy_flow1_volume) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow1,
quant: api_v3_volume,
value: 10
)
end
let!(:api_v3_brazil_soy_flow2_volume) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow2,
quant: api_v3_volume,
value: 15
)
end
let!(:api_v3_brazil_soy_flow3_volume) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow3,
quant: api_v3_volume,
value: 20
)
end
let!(:api_v3_brazil_soy_flow4_volume) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow4,
quant: api_v3_volume,
value: 25
)
end
let!(:api_v3_brazil_soy_flow5_volume) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow5,
quant: api_v3_volume,
value: 30
)
end
let!(:api_v3_brazil_soy_flow6_volume) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow6,
quant: api_v3_volume,
value: 0
)
end
let!(:api_v3_brazil_soy_flow1_deforestation_v2) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow1,
quant: api_v3_deforestation_v2,
value: 20
)
end
let!(:api_v3_brazil_soy_flow2_deforestation_v2) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow2,
quant: api_v3_deforestation_v2,
value: 25
)
end
let!(:api_v3_brazil_soy_flow1_potential_soy_deforestation_v2) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow1,
quant: api_v3_potential_soy_deforestation_v2,
value: 30
)
end
let!(:api_v3_brazil_soy_flow2_potential_soy_deforestation_v2) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow2,
quant: api_v3_potential_soy_deforestation_v2,
value: 35
)
end
let!(:api_v3_brazil_soy_flow1_agrosatelite_soy_defor_) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow1,
quant: api_v3_agrosatelite_soy_defor_,
value: 40
)
end
let!(:api_v3_brazil_soy_flow2_agrosatelite_soy_defor_) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow2,
quant: api_v3_agrosatelite_soy_defor_,
value: 45
)
end
let!(:api_v3_brazil_soy_flow1_land_use) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow1,
quant: api_v3_land_use,
value: 50
)
end
let!(:api_v3_brazil_soy_flow2_land_use) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow2,
quant: api_v3_land_use,
value: 55
)
end
let!(:api_v3_brazil_soy_flow1_biodiversity) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow1,
quant: api_v3_biodiversity,
value: 60
)
end
let!(:api_v3_brazil_soy_flow2_biodiversity) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow2,
quant: api_v3_biodiversity,
value: 65
)
end
let!(:api_v3_brazil_soy_flow1_ghg_) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow1,
quant: api_v3_ghg_,
value: 70
)
end
let!(:api_v3_brazil_soy_flow2_ghg_) do
FactoryBot.create(
:api_v3_flow_quant,
flow: api_v3_brazil_soy_flow2,
quant: api_v3_ghg_,
value: 75
)
end
end
|
hao-wang/Montage | js-test-suite/testsuite/cf1a0efae1078faee510f7bda78d4902.js | wasPostTestScriptParsed = true;
if (this.jsTestIsAsync) {
if (this.wasFinishJSTestCalled)
finishJSTest();
} else
finishJSTest();
|
Killswitchz/ggrc-core | src/ggrc/assets/javascripts/components/assessment/inline.js | /*!
Copyright (C) 2017 Google Inc.
Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
*/
(function (can, GGRC) {
'use strict';
var tpl = can.view(GGRC.mustache_path +
'/components/assessment/inline/inline.mustache');
var innerTplFolder = GGRC.mustache_path + '/components/assessment/inline';
function getTemplateByType(type) {
type = can.Mustache.resolve(type);
return innerTplFolder + '/' + type + '.mustache';
}
GGRC.Components('assessmentInlineEdit', {
tag: 'assessment-inline-edit',
template: tpl,
viewModel: {
define: {
readonly: {
type: 'htmlbool',
value: false
},
isEditable: {
get: function () {
return !(this.attr('readonly'));
}
}
},
titleText: '',
type: '',
value: null,
options: [],
isSaving: false,
isEdit: false,
context: {
value: null,
options: null
},
_EV_BEFORE_EDIT: 'before-edit', // before entering the edit mode
setPerson: function (scope, el, ev) {
this.attr('context.value', ev.selectedItem.serialize());
},
unsetPerson: function (scope, el, ev) {
ev.preventDefault();
this.attr('context.value', undefined);
},
/**
* Enter the edit mode if editing is allowed (i.e. the readonly option is
* not set).
*
* If the readonly option is enabled, do not do anything. The same if the
* beforeEdit handler is not defined, or if the promise it returns is not
* resolved.
*
* @param {can.Map} scope - the scope object itself (this)
* @param {jQuery.Element} $el - the DOM element that triggered the event
* @param {jQuery.Event} event - the event object
*/
enableEdit: function (scope, $el, event) {
var confirmation;
var onBeforeEdit = this.$rootEl.attr('can-' + scope._EV_BEFORE_EDIT);
event.preventDefault();
event.stopPropagation();
if (this.attr('readonly')) {
return;
}
if (!onBeforeEdit) {
this.attr('isEdit', true);
return;
}
confirmation = this.$rootEl.triggerHandler({
type: this._EV_BEFORE_EDIT
});
confirmation.done(function () {
this.attr('isEdit', true);
}.bind(this)); // and do nothing if no confirmation by the user
},
onCancel: function (scope) {
var value = scope.attr('value');
scope.attr('isEdit', false);
scope.attr('context.value', value);
},
onSave: function () {
var oldValue = this.attr('value');
var value = this.attr('context.value');
this.attr('isEdit', false);
// In case value is String and consists only of spaces - do nothing
if (typeof value === 'string' && !value.trim()) {
this.attr('context.value', '');
value = null;
}
if (oldValue === value) {
return;
}
this.attr('value', value);
this.attr('isSaving', true);
}
},
init: function () {
var viewModel = this.viewModel;
var value = viewModel.attr('value');
viewModel.attr('context.value', value);
},
events: {
inserted: function (el) {
this.viewModel.attr('$rootEl', $(el));
},
'{window} mousedown': function (el, ev) {
var viewModel = this.viewModel;
var isInside = GGRC.Utils.events.isInnerClick(this.element, ev.target);
if (!isInside && viewModel.attr('isEdit')) {
viewModel.onCancel(viewModel);
}
}
},
helpers: {
renderInnerTemplateByType: function (type, options) {
return can.view.render(getTemplateByType(type), options.context);
}
}
});
})(window.can, window.GGRC);
|
shettyh/azure-cosmosdb-java | commons/src/main/java/com/microsoft/azure/cosmosdb/RetryOptions.java | <reponame>shettyh/azure-cosmosdb-java<filename>commons/src/main/java/com/microsoft/azure/cosmosdb/RetryOptions.java
/*
* The MIT License (MIT)
* Copyright (c) 2018 Microsoft Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.microsoft.azure.cosmosdb;
/**
* Encapsulates retry options in the Azure Cosmos DB database service.
*/
public class RetryOptions {
private int maxRetryAttemptsOnThrottledRequests;
private int maxRetryWaitTimeInSeconds;
/**
* Creates a new instance of the RetryOptions class and initializes all
* properties to default values.
*/
public RetryOptions() {
this.maxRetryAttemptsOnThrottledRequests = 9;
this.maxRetryWaitTimeInSeconds = 30;
}
/**
* Gets the maximum number of retries in the case where the request fails
* because the service has applied rate limiting on the client.
*
* @return the maximum number of retries.
*/
public int getMaxRetryAttemptsOnThrottledRequests() {
return this.maxRetryAttemptsOnThrottledRequests;
}
/**
* Sets the maximum number of retries in the case where the request fails
* because the service has applied rate limiting on the client.
* <p>
* When a client is sending requests faster than the allowed rate, the
* service will return HttpStatusCode 429 (Too Many Request) to throttle the
* client. The current implementation in the SDK will then wait for the
* amount of time the service tells it to wait and retry after the time has
* elapsed.
* <p>
* The default value is 9. This means in the case where the request is
* throttled, the same request will be issued for a maximum of 10 times to
* the server before an error is returned to the application.
*
* @param maxRetryAttemptsOnThrottledRequests the max number of retry attempts on failed requests due to a
* throttle error.
*/
public void setMaxRetryAttemptsOnThrottledRequests(int maxRetryAttemptsOnThrottledRequests) {
if (maxRetryAttemptsOnThrottledRequests < 0) {
throw new IllegalArgumentException("maxRetryAttemptsOnThrottledRequests value must be a positive integer.");
}
this.maxRetryAttemptsOnThrottledRequests = maxRetryAttemptsOnThrottledRequests;
}
/**
* Gets the maximum retry time in seconds.
*
* @return the maximum retry time in seconds.
*/
public int getMaxRetryWaitTimeInSeconds() {
return this.maxRetryWaitTimeInSeconds;
}
/**
* Sets the maximum retry time in seconds.
* <p>
* When a request fails due to a throttle error, the service sends back a
* response that contains a value indicating the client should not retry
* before the time period has elapsed (Retry-After). The MaxRetryWaitTime
* flag allows the application to set a maximum wait time for all retry
* attempts. If the cumulative wait time exceeds the MaxRetryWaitTime, the
* SDK will stop retrying and return the error to the application.
* <p>
* The default value is 30 seconds.
*
* @param maxRetryWaitTimeInSeconds the maximum number of seconds a request will be retried.
*/
public void setMaxRetryWaitTimeInSeconds(int maxRetryWaitTimeInSeconds) {
if (maxRetryWaitTimeInSeconds < 0 || maxRetryWaitTimeInSeconds > Integer.MAX_VALUE / 1000) {
throw new IllegalArgumentException(
"value must be a positive integer between the range of 0 to " + Integer.MAX_VALUE / 1000);
}
this.maxRetryWaitTimeInSeconds = maxRetryWaitTimeInSeconds;
}
@Override
public String toString() {
return "RetryOptions{" +
"maxRetryAttemptsOnThrottledRequests=" + maxRetryAttemptsOnThrottledRequests +
", maxRetryWaitTimeInSeconds=" + maxRetryWaitTimeInSeconds +
'}';
}
}
|
drwicked/ketosis-js | examples/host_proxy/index.js | var server = require('../../')
var request = require('request')
var proxy = server()
proxy.listen('test.local.com:3000')
proxy.header(function($){
request.get('http://test2.local.com:5000/', function(error, response, body){
if(error) throw error;
$.send('\nPort 3000 reached\n\nResponse from http://test2.local.com/:\n')
$.end(body)
});
})
var app = server()
app.listen('test.local.com:5000')
app.host('test2.local.com:5000')
app.get('/', function($){
$.end('Hello world!')
})
|
SandraBSofiaH/Final-UMldoclet | src/plantuml-asl/src/net/sourceforge/plantuml/project/draw/ResourceDraw.java | <reponame>SandraBSofiaH/Final-UMldoclet<filename>src/plantuml-asl/src/net/sourceforge/plantuml/project/draw/ResourceDraw.java
/* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2020, <NAME>
*
* Project Info: https://plantuml.com
*
* If you like this project or if you find it useful, you can support us at:
*
* https://plantuml.com/patreon (only 1$ per month!)
* https://plantuml.com/paypal
*
* This file is part of PlantUML.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Original Author: <NAME>
*/
package net.sourceforge.plantuml.project.draw;
import net.sourceforge.plantuml.SpriteContainerEmpty;
import net.sourceforge.plantuml.cucadiagram.Display;
import net.sourceforge.plantuml.graphic.FontConfiguration;
import net.sourceforge.plantuml.graphic.HorizontalAlignment;
import net.sourceforge.plantuml.graphic.TextBlock;
import net.sourceforge.plantuml.graphic.UDrawable;
import net.sourceforge.plantuml.project.GanttDiagram;
import net.sourceforge.plantuml.project.core.Resource;
import net.sourceforge.plantuml.project.time.Day;
import net.sourceforge.plantuml.project.timescale.TimeScale;
import net.sourceforge.plantuml.ugraphic.UFont;
import net.sourceforge.plantuml.ugraphic.UGraphic;
import net.sourceforge.plantuml.ugraphic.ULine;
import net.sourceforge.plantuml.ugraphic.UTranslate;
import net.sourceforge.plantuml.ugraphic.color.HColor;
import net.sourceforge.plantuml.ugraphic.color.HColorUtils;
public class ResourceDraw implements UDrawable {
private final Resource res;
private final TimeScale timeScale;
private final double y;
private final Day min;
private final Day max;
private final GanttDiagram gantt;
public ResourceDraw(GanttDiagram gantt, Resource res, TimeScale timeScale, double y, Day min, Day max) {
this.res = res;
this.timeScale = timeScale;
this.y = y;
this.min = min;
this.max = max;
this.gantt = gantt;
}
public void drawU(UGraphic ug) {
final TextBlock title = Display.getWithNewlines(res.getName()).create(getFontConfiguration(13),
HorizontalAlignment.LEFT, new SpriteContainerEmpty());
title.drawU(ug);
final ULine line = ULine.hline(timeScale.getEndingPosition(max) - timeScale.getStartingPosition(min));
ug.apply(HColorUtils.BLACK).apply(UTranslate.dy(title.calculateDimension(ug.getStringBounder()).getHeight()))
.draw(line);
double startingPosition = -1;
int totalLoad = 0;
int totalLimit = 0;
for (Day i = min; i.compareTo(max) <= 0; i = i.increment()) {
final boolean isBreaking = timeScale.isBreaking(i);
totalLoad += gantt.getLoadForResource(res, i);
totalLimit += 100;
if (isBreaking) {
if (totalLoad > 0) {
final boolean over = totalLoad > totalLimit;
final FontConfiguration fontConfiguration = getFontConfiguration(9,
over ? HColorUtils.RED : HColorUtils.BLACK);
final TextBlock value = Display.getWithNewlines("" + totalLoad).create(fontConfiguration,
HorizontalAlignment.LEFT, new SpriteContainerEmpty());
if (startingPosition == -1)
startingPosition = timeScale.getStartingPosition(i);
final double endingPosition = timeScale.getEndingPosition(i);
final double start = (startingPosition + endingPosition) / 2
- value.calculateDimension(ug.getStringBounder()).getWidth() / 2;
value.drawU(ug.apply(new UTranslate(start, 16)));
}
startingPosition = -1;
totalLoad = 0;
totalLimit = 0;
} else {
if (startingPosition == -1)
startingPosition = timeScale.getStartingPosition(i);
}
}
}
private FontConfiguration getFontConfiguration(int size) {
return getFontConfiguration(size, HColorUtils.BLACK);
}
private FontConfiguration getFontConfiguration(int size, HColor color) {
final UFont font = UFont.serif(size);
return new FontConfiguration(font, color, color, false);
}
public double getHeight() {
return 16 * 2;
}
public final double getY() {
return y;
}
}
|
miotech/KUN | kun-data-quality/data-quality-operators/src/test/java/com/miotech/kun/workflow/operator/MockValidationResultFactory.java | package com.miotech.kun.workflow.operator;
import com.google.common.collect.ImmutableList;
import com.miotech.kun.commons.utils.DateTimeUtils;
import com.miotech.kun.commons.utils.IdGenerator;
import com.miotech.kun.dataquality.core.assertion.ComparisonOperator;
import com.miotech.kun.dataquality.core.expectation.AssertionResult;
import com.miotech.kun.dataquality.core.expectation.ValidationResult;
public class MockValidationResultFactory {
private MockValidationResultFactory() {
}
public static ValidationResult create(boolean passed) {
Long expectationId = IdGenerator.getInstance().nextId();
return ValidationResult.newBuilder()
.withExpectationId(expectationId)
.withExecutionResult(null)
.withPassed(passed)
.withContinuousFailingCount(0)
.withAssertionResults(ImmutableList.of(AssertionResult.newBuilder()
.withField("n")
.withComparisonOperator(ComparisonOperator.EQUALS)
.withOperator(ComparisonOperator.EQUALS.getSymbol())
.withExpectedType("NUMBER")
.withExpectedValue("0")
.withOriginalValue("0")
.withBenchmarkValue("0")
.build()))
.withUpdateTime(DateTimeUtils.now())
.build();
}
public static ValidationResult create() {
return create(true);
}
}
|
Shinji1987/interactiver | app/controllers/messages_controller.rb | class MessagesController < ApplicationController
before_action :find_chat, :only => [:new, :create]
before_action :authenticate_user!
def new
if request.referer&.include?("/messages")
@user_record_double = ChatUser.where(:chat_id => @chat.id).pluck(:created_user_id, :invited_user_id)
@user_record = @user_record_double.slice(0)
@user_arr = @user_record.select do |x|
x != current_user.id
end
@user = User.find_by(:id => @user_arr)
elsif request.referer&.include?("/users")
@user = User.find_by(:id => params[:format])
elsif request.referer&.include?("/chats")
chat_user = ChatUser.where(:chat_id => @chat.id).pluck(:created_user_id, :invited_user_id).slice(0)
chat_user_record = chat_user.select do |x|
x != current_user.id
end
@user = User.find_by(:id => chat_user_record)
else
@user = User.find_by(:id => params[:format])
end
@messages = Message.where(:chat_id => @chat.id)
@message = Message.new
end
def create
@user_record_double = ChatUser.where(:chat_id => @chat.id).pluck(:created_user_id, :invited_user_id)
@user_record = @user_record_double.slice(0)
@user_arr = @user_record.select do |x|
x != current_user.id
end
@user = User.find_by(:id => @user_arr)
@message = @chat.messages.new(message_params)
if @message.save
respond_to do |format|
format.html{redirect_to new_chat_message_path(@chat.id)}
format.json
end
else
if (params[:content]) != ""
@message = Message.find_by(:sent_user_id => current_user.id, :received_user_id => @user.id)
@new_message = @chat.messages.new(message_params)
if @message != nil
@new_message.content = @message.content
@new_message.save
end
end
flash[:notice] = "メッセージを入力、又は画像を添付してください"
@messages = @chat.messages.includes(:user)
redirect_to new_chat_message_path(@chat.id)
end
end
private
def message_params
params.permit(:content, :message_image).merge(:sent_user_id => current_user.id, :received_user_id => @user.id)
end
def find_chat
@chat = Chat.find(params[:chat_id])
end
end |
riveign/zenoffice-frontend | node_modules/recharts/demo/component/LineChart.js | import React from 'react';
import CustomLineDot from './CustomLineDot';
import { changeNumberOfData } from './utils';
import { ResponsiveContainer, LineChart, Line, XAxis, YAxis, ReferenceLine,
ReferenceDot, Tooltip, CartesianGrid, Legend, Brush } from 'recharts';
const data = [
{ name: 'Page A', uv: 400, pv: 2400, amt: 2400 },
{ name: 'Page B', uv: 300, pv: 4567, amt: 2400 },
{ name: 'Page C', uv: 280, pv: 1398, amt: 2400 },
{ name: 'Page D', uv: 200, pv: 9800, amt: 2400 },
{ name: 'Page E', uv: 278, pv: 3908, amt: 2400 },
{ name: 'Page F', uv: 189, pv: 4800, amt: 2400 },
];
const data01 = [
{ day: '05-01', weather: 'sunny' },
{ day: '05-02', weather: 'sunny' },
{ day: '05-03', weather: 'cloudy' },
{ day: '05-04', weather: 'rain' },
{ day: '05-05', weather: 'rain' },
{ day: '05-06', weather: 'cloudy' },
{ day: '05-07', weather: 'cloudy' },
{ day: '05-08', weather: 'sunny' },
{ day: '05-09', weather: 'sunny' },
];
const data02 = [
{ name: 'Page A', uv: 300, pv: 2600, amt: 3400 },
{ name: 'Page B', uv: 400, pv: 4367, amt: 6400 },
{ name: 'Page C', uv: 300, pv: 1398, amt: 2400 },
{ name: 'Page D', uv: 200, pv: 9800, amt: 2400 },
{ name: 'Page E', uv: 278, pv: 3908, amt: 2400 },
{ name: 'Page F', uv: 189, pv: 4800, amt: 2400 },
{ name: 'Page G', uv: 189, pv: 4800, amt: 2400 },
];
const initilaState = {
data,
data01,
data02,
};
const renderSpecialDot = (props) => {
const { cx, cy, stroke, key } = props;
if (cx === +cx && cy === +cy) {
return <path d={`M${cx - 2},${cy - 2}h4v4h-4Z`} fill={stroke} key={key}/>;
}
return null;
};
const renderLabel = (props) => {
const { x, y, textAnchor, key, value } = props;
if (x === +x && y === +y) {
return <text x={x} y={y} dy={-10} textAnchor={textAnchor} key={key}>{value}</text>
}
return null;
};
export default React.createClass({
displayName: 'LineChartDemo',
getInitialState() {
return initilaState;
},
handleChangeData() {
this.setState(() => _.mapValues(initilaState, changeNumberOfData));
},
render() {
const { data, data01, data02 } = this.state;
return (
<div className='line-charts'>
<a
href="javascript: void(0);"
className="btn update"
onClick={this.handleChangeData}
>
change data
</a>
<br/>
<p>A simple LineChart with fixed domain y-axis</p>
<div className='line-chart-wrapper'>
<LineChart width={400} height={400} data={data02} margin={{ top: 20, right: 40, bottom: 20, left: 20 }} syncId="test">
<CartesianGrid stroke='#f5f5f5' />
<Legend/>
<XAxis/>
<YAxis domain={[0, 350]} allowDataOverflow={true} />
<Tooltip />
<Line type='monotone' dataKey='uv' stroke='#ff7300' dot={renderSpecialDot} label={renderLabel}/>
<Brush dataKey="name" height={30} />
</LineChart>
</div>
<p>A simple LineChart with customized line dot</p>
<div className='line-chart-wrapper'>
<LineChart width={400} height={400} data={data} margin={{ top: 20, right: 20, bottom: 20, left: 20 }} syncId="test">
<CartesianGrid stroke='#f5f5f5'/>
<Legend />
<XAxis />
<YAxis domain={[0, 500]}/>
<Tooltip />
<Line type='monotone' dataKey='uv' dot={<CustomLineDot/>} stroke='#ff7300'/>
</LineChart>
</div>
<p>LineChart with two y-axes</p>
<div className='line-chart-wrapper' style={{ padding: 40 }}>
<LineChart width={400} height={400} data={data}
margin={{top: 10, bottom: 10, left: 30, right: 30}}>
<XAxis dataKey='name'/>
<Tooltip/>
<CartesianGrid stroke='#f5f5f5'/>
<Line type='monotone' dataKey='uv' stroke='#ff7300' yAxisId={0} activeDot={{fill: '#ff7300', stroke: 'none'}}/>
<Line type='monotone' dataKey='pv' stroke='#387908' yAxisId={1} activeDot={{fill: '#387908', stroke: 'none', r: 6}}/>
</LineChart>
</div>
<p>LineChart with three y-axes</p>
<div className='line-chart-wrapper' style={{ margin: 40 }}>
<LineChart width={600} height={400} data={data}>
<YAxis type='number' yAxisId={0} ticks={[0, 250]}/>
<YAxis type='number' orientation='right' yAxisId={1}/>
<YAxis type='number' orientation='right' yAxisId={2}/>
<XAxis dataKey='name'/>
<Tooltip/>
<CartesianGrid stroke='#f5f5f5'/>
<Line dataKey='uv' stroke='#ff7300' strokeWidth={2} yAxisId={0}/>
<Line dataKey='pv' stroke='#387908' strokeWidth={2} yAxisId={1}/>
<Line dataKey='amt' stroke='#38abc8' strokeWidth={2} yAxisId={2}/>
</LineChart>
</div>
<p>LineChart when data change</p>
<a
href="javascript:void(0)"
className="btn"
onClick={() => { this.setState({ data: this.state.data === data ? data02 : data }); }}
>
change data
</a>
<div className="line-chart-wrapper">
<LineChart
width={400}
height={400}
data={this.state.data}
margin={{ top: 20, right: 20, bottom: 20, left: 20 }}
>
<CartesianGrid stroke="#f5f5f5" />
<Legend />
<XAxis />
<YAxis domain={[0, 500]} />
<Line type="monotone" dataKey="uv" dot={<CustomLineDot/>} stroke="#ff7300" />
</LineChart>
</div>
<p>LineChart of vertical layout</p>
<div className='line-chart-wrapper' style={{ margin: 40 }}>
<LineChart width={400} height={400} data={data} layout='vertical'
margin={{top: 5, right: 20, left: 20, bottom: 5}}>
<YAxis type='category' dataKey='name'/>
<XAxis type='number' xAxisId={0} orientation='top'/>
<XAxis type='number' xAxisId={1} orientation='bottom'/>
<CartesianGrid stroke='#f5f5f5'/>
<Line dataKey='uv' type="monotone" stroke='#ff7300' strokeWidth={2} xAxisId={0} />
<Line dataKey='pv' type="monotone" stroke='#387908' strokeWidth={2} xAxisId={1} />
</LineChart>
</div>
<p>LineChart of discrete values</p>
<div className="line-chart-wrapper">
<LineChart
width={400} height={400} data={data01}
margin={{ top: 20, right: 20, bottom: 20, left: 20 }}>
<XAxis dataKey="day" />
<YAxis type="category" />
<Tooltip />
<Line type="stepAfter" dataKey="weather" stroke="#ff7300" />
</LineChart>
</div>
</div>
);
}
});
|
jsonshen/MyBatisX | generator/src/main/java/org/shenjia/mybatis/generator/plugins/ExtendableModelPlugin.java | <reponame>jsonshen/MyBatisX<filename>generator/src/main/java/org/shenjia/mybatis/generator/plugins/ExtendableModelPlugin.java
/**
* Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.shenjia.mybatis.generator.plugins;
import java.io.File;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import org.mybatis.generator.api.GeneratedJavaFile;
import org.mybatis.generator.api.IntrospectedTable;
import org.mybatis.generator.api.PluginAdapter;
import org.mybatis.generator.api.dom.java.FullyQualifiedJavaType;
import org.mybatis.generator.api.dom.java.JavaVisibility;
import org.mybatis.generator.api.dom.java.Method;
import org.mybatis.generator.api.dom.java.Parameter;
import org.mybatis.generator.api.dom.java.TopLevelClass;
import org.mybatis.generator.internal.util.StringUtility;
/**
*
* @author json
*
*/
public class ExtendableModelPlugin extends PluginAdapter {
private List<GeneratedJavaFile> extendableModels = new ArrayList<GeneratedJavaFile>();
private boolean generateBuildMethod = true;
private boolean generateSerialVersionUID = true;
public boolean validate(List<String> warnings) {
if (StringUtility.stringHasValue(properties.getProperty("generateBuildMethod"))) {
generateBuildMethod = Boolean.parseBoolean(properties.getProperty("generateBuildMethod"));
}
if (StringUtility.stringHasValue(properties.getProperty("generateSerialVersionUID"))) {
generateSerialVersionUID = Boolean.parseBoolean(properties.getProperty("generateSerialVersionUID"));
}
return true;
}
@Override
public List<GeneratedJavaFile> contextGenerateAdditionalJavaFiles() {
return extendableModels;
}
@Override
public boolean modelBaseRecordClassGenerated(TopLevelClass oldModelClass, IntrospectedTable introspectedTable) {
// New Model Class
TopLevelClass newModelClass = new TopLevelClass(oldModelClass.getType().getFullyQualifiedName());
// Old Model Class
oldModelClass.setAbstract(true);
FullyQualifiedJavaType oldModelType = oldModelClass.getType();
Class<?> clazz = oldModelType.getClass();
try {
// Modify old model class package name
Field pn = clazz.getDeclaredField("packageName");
pn.setAccessible(true);
String packageName = oldModelType.getPackageName() + ".internal";
pn.set(oldModelType, packageName);
// Modify old model class base short name
Field bsn = clazz.getDeclaredField("baseShortName");
bsn.setAccessible(true);
String shortName = "Abstract" + oldModelType.getShortName();
bsn.set(oldModelType, shortName);
// Modify old model class base qualified name
Field bqn = clazz.getDeclaredField("baseQualifiedName");
bqn.setAccessible(true);
String fullName = oldModelType.getFullyQualifiedNameWithoutTypeParameters();
fullName = oldModelType.getPackageName() + "." + shortName;
bqn.set(oldModelType, fullName);
if (generateBuildMethod) {
// Add old model class build method
Method buildMethod = new Method("T build");
buildMethod.setVisibility(JavaVisibility.PUBLIC);
buildMethod.setReturnType(new FullyQualifiedJavaType("<T extends " + shortName + ">"));
// MPG API BUG
buildMethod.addBodyLine("return (T)this;");
buildMethod.addAnnotation("@SuppressWarnings(\"unchecked\")");
oldModelClass.addMethod(buildMethod);
context.getCommentGenerator().addGeneralMethodComment(buildMethod, introspectedTable);
}
List<Method> oldModelMethods = oldModelClass.getMethods();
for (Method oldModelMethod : oldModelMethods) {
if (!oldModelMethod.isConstructor()) {
continue;
}
Method newModelMethod = new Method(oldModelMethod.getName());
newModelMethod.setConstructor(true);
newModelMethod.setVisibility(JavaVisibility.PUBLIC);
StringBuilder bodyBuf = new StringBuilder("super(");
for (Parameter p : oldModelMethod.getParameters()) {
newModelMethod.addParameter(p);
bodyBuf.append(p.getName() + " ,");
}
int bodyLen = bodyBuf.length();
if (bodyLen > 6) {
bodyBuf.delete(bodyLen - 2, bodyLen);
}
bodyBuf.append(");");
newModelMethod.addBodyLine(bodyBuf.toString());
newModelClass.addMethod(newModelMethod);
// Modify old model constructor method name.
oldModelMethod.setName("Abstract" + oldModelMethod.getName());
}
} catch (Exception e) {
e.printStackTrace();
}
// New Model Class
if (generateSerialVersionUID) {
newModelClass.addField(buildSerialVersionUID(introspectedTable));
}
newModelClass.setVisibility(JavaVisibility.PUBLIC);
newModelClass.setSuperClass(oldModelType);
newModelClass.addImportedType(oldModelType);
// Generate java source file
String targetProject = context.getJavaModelGeneratorConfiguration().getTargetProject();
if (!checkExists(targetProject, newModelClass)) {
GeneratedJavaFile modelJavaFile = new GeneratedJavaFile(newModelClass, targetProject, context.getJavaFormatter());
extendableModels.add(modelJavaFile);
} else {
System.out.println("---- JAVA FILE EXISTS, IGNORED. --:" + newModelClass.getType().getFullyQualifiedName());
}
return true;
}
/**
* Build serial version UID
*
* @param it
* @return
*/
private org.mybatis.generator.api.dom.java.Field buildSerialVersionUID(IntrospectedTable it) {
org.mybatis.generator.api.dom.java.Field field = new org.mybatis.generator.api.dom.java.Field(
"serialVersionUID", new FullyQualifiedJavaType("long"));
field.setFinal(true);
field.setStatic(true);
field.setVisibility(JavaVisibility.PRIVATE);
field.setInitializationString("1L");
context.getCommentGenerator().addFieldComment(field, it);
return field;
}
private boolean checkExists(String targetProject, TopLevelClass newModelClass) {
String fullName = newModelClass.getType().getFullyQualifiedNameWithoutTypeParameters();
File pomFile = new File(System.getProperty("user.dir") + "/pom.xml");
String filePath = System.getProperty("user.dir") + "/";
if (pomFile.exists()) {
filePath += targetProject + "/" + fullName.replaceAll("\\.", "/") + ".java";
}
File javaFile = new File(filePath);
return javaFile.exists();
}
}
|
tharindusathis/sourcecodes-of-CodeReadingTheOpenSourcePerspective | XFree86-3.3/xc/programs/x11perf/do_dots.c | <filename>XFree86-3.3/xc/programs/x11perf/do_dots.c
/* $XConsortium: do_dots.c,v 2.5 94/02/06 20:17:34 rws Exp $ */
/*****************************************************************************
Copyright 1988, 1989 by Digital Equipment Corporation, Maynard, Massachusetts.
All Rights Reserved
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name of Digital not be
used in advertising or publicity pertaining to distribution of the
software without specific, written prior permission.
DIGITAL DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL
DIGITAL BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
SOFTWARE.
******************************************************************************/
#include "x11perf.h"
static XPoint *points;
static GC pgc;
int InitDots(xp, p, reps)
XParms xp;
Parms p;
int reps;
{
int i;
pgc = xp->fggc;
points = (XPoint *)malloc(p->objects * sizeof(XPoint));
for (i = 0; i != p->objects; i++) {
points[i].x = 2 * (i/MAXROWS);
points[i].y = 2 * (i%MAXROWS);
}
return reps;
}
void DoDots(xp, p, reps)
XParms xp;
Parms p;
int reps;
{
int i;
for (i = 0; i != reps; i++) {
XDrawPoints(xp->d, xp->w, pgc, points, p->objects, CoordModeOrigin);
if (pgc == xp->bggc)
pgc = xp->fggc;
else
pgc = xp->bggc;
}
}
void EndDots(xp, p)
XParms xp;
Parms p;
{
free(points);
}
|
paser4se/odyssey-reactive-messaging | reactive-messaging/src/main/java/com/amadeus/middleware/odyssey/reactive/messaging/core/impl/PublisherInvoker.java | <filename>reactive-messaging/src/main/java/com/amadeus/middleware/odyssey/reactive/messaging/core/impl/PublisherInvoker.java
package com.amadeus.middleware.odyssey.reactive.messaging.core.impl;
import org.reactivestreams.Publisher;
import com.amadeus.middleware.odyssey.reactive.messaging.core.Message;
public interface PublisherInvoker<T> {
Publisher<Message<T>> invoke() throws FunctionInvocationException;
void initialize();
}
|
tlhhup/scala-learn | scala-chapter-2/src/test/scala/org/tlh/scala/test/examples/StackSpec.scala | package org.tlh.scala.test.examples
import java.util
import java.util.EmptyStackException
import org.scalatest.FlatSpec
/**
* <br>
* Created by <NAME> on 6/27/2019
* <p>
*/
class StackSpec extends FlatSpec {
// subject + verb(should, must, or can) + sentence
"A Stack" should "pop values in last-in-first-out order" in {
val stack = new util.Stack[Int]
stack.push(1)
stack.push(2)
assert(stack.pop() === 2)
assert(stack.pop() === 1)
}
// 使用it 引用前一个subject
it should "throw EmptyStackException if an empty stack is popped" in {
val emptyStack = new util.Stack[String]
assertThrows[EmptyStackException] {
emptyStack.pop()
}
}
}
|
petascalr/algohut | src/main/java/com/specflare/algohut/trees/SuffixTree.java | <reponame>petascalr/algohut
package com.specflare.algohut.trees;
/**
* References:
* - https://www.sanfoundry.com/java-program-implement-suffix-tree/
* - https://brenden.github.io/ukkonen-animation/
* - https://www.geeksforgeeks.org/ukkonens-suffix-tree-construction-part-1/
*/
class Node {
public int suffix_node;
public static int Count = 1;
public Node() {
suffix_node = -1;
}
}
class SuffixTree {
private static final int MAX_LENGTH = 1000;
private static final int HASH_TABLE_SIZE = 2179;
public char[] T = new char[MAX_LENGTH];
public int N;
private Edge[] Edges;
private Node[] Nodes;
public Suffix active;
class Suffix {
public int origin_node;
public int first_char_index;
public int last_char_index;
public Suffix(int node, int start, int stop) {
origin_node = node;
first_char_index = start;
last_char_index = stop;
}
public boolean Implicit() {
return first_char_index > last_char_index;
}
public boolean Explicit() {
return first_char_index > last_char_index;
}
/**
* Function Canonize()
* A suffix in the tree is denoted by a Suffix structure
* that denotes its last character. The canonical
* representation of a suffix for this algorithm requires
* that the origin_node by the closest node to the end
* of the tree. To force this to be true, we have to
* slide down every edge in our current path until we
* reach the final node
**/
public void Canonize() {
if (!Explicit()) {
Edge edge = Find(origin_node, T[first_char_index]);
int edge_span = edge.last_char_index - edge.first_char_index;
while (edge_span <= (last_char_index - first_char_index)) {
first_char_index = first_char_index + edge_span + 1;
origin_node = edge.end_node;
if (first_char_index <= last_char_index) {
edge = Find(edge.end_node, T[first_char_index]);
edge_span = edge.last_char_index - edge.first_char_index;
}
}
}
}
}
class Edge {
public int first_char_index;
public int last_char_index;
public int end_node;
public int start_node;
public Edge() {
start_node = -1;
}
public Edge(int init_first, int init_last, int parent_node) {
first_char_index = init_first;
last_char_index = init_last;
start_node = parent_node;
end_node = Node.Count++;
}
/**
* function Insert ()
* A given edge gets a copy of itself inserted into the table
* with this function. It uses a linear probe technique, which
* means in the case of a collision, we just step forward through
* the table until we find the first unused slot.
**/
public void Insert() {
int i = Hash(start_node, T[first_char_index]);
while (Edges[i].start_node != -1)
i = ++i % HASH_TABLE_SIZE;
Edges[i] = this;
}
/**
* function SplitEdge ()
* This function is called
* to split an edge at the point defined by the Suffix argument
**/
public int SplitEdge(Suffix s) {
Remove();
Edge new_edge = new Edge(first_char_index,
first_char_index + s.last_char_index - s.first_char_index,
s.origin_node);
new_edge.Insert();
Nodes[new_edge.end_node].suffix_node = s.origin_node;
first_char_index += s.last_char_index - s.first_char_index + 1;
start_node = new_edge.end_node;
Insert();
return new_edge.end_node;
}
/**
* function Remove ()
* This function is called to remove an edge from hash table
**/
public void Remove() {
int i = Hash(start_node, T[first_char_index]);
while (Edges[i].start_node != start_node || Edges[i].first_char_index != first_char_index)
i = ++i % HASH_TABLE_SIZE;
for (; ; ) {
Edges[i].start_node = -1;
int j = i;
for (; ; ) {
i = ++i % HASH_TABLE_SIZE;
if (Edges[i].start_node == -1)
return;
int r = Hash(Edges[i].start_node, T[Edges[i].first_char_index]);
if (i >= r && r > j)
continue;
if (r > j && j > i)
continue;
if (j > i && i >= r)
continue;
break;
}
Edges[j] = Edges[i];
}
}
}
/**
* Constructor
*/
public SuffixTree() {
Edges = new Edge[HASH_TABLE_SIZE];
for (int i = 0; i < HASH_TABLE_SIZE; i++)
Edges[i] = new Edge();
Nodes = new Node[MAX_LENGTH * 2];
for (int i = 0; i < MAX_LENGTH * 2; i++)
Nodes[i] = new Node();
active = new Suffix(0, 0, -1);
}
/**
* Function Find() - function to find an edge
**/
public Edge Find(int node, int c) {
int i = Hash(node, c);
for (; ; ) {
if (Edges[i].start_node == node) {
if (c == T[Edges[i].first_char_index]) {
return Edges[i];
}
}
if (Edges[i].start_node == -1) {
return Edges[i];
}
i = ++i % HASH_TABLE_SIZE;
}
}
/**
* Function Hash() - edges are inserted into the hash table using this hashing function
**/
public static int Hash(int node, int c) {
return ((node << 8) + c) % HASH_TABLE_SIZE;
}
/**
* Function AddPrefix() - called repetitively, once for each of the prefixes of the input string
**/
public void AddPrefix(Suffix active, int last_char_index) {
int parent_node;
int last_parent_node = -1;
for (; ; ) {
Edge edge;
parent_node = active.origin_node;
if (active.Explicit()) {
edge = Find(active.origin_node, T[last_char_index]);
if (edge.start_node != -1)
break;
} else {
edge = Find(active.origin_node, T[active.first_char_index]);
int span = active.last_char_index - active.first_char_index;
if (T[edge.first_char_index + span + 1] == T[last_char_index])
break;
parent_node = edge.SplitEdge(active);
}
Edge new_edge = new Edge(last_char_index, N, parent_node);
new_edge.Insert();
if (last_parent_node > 0)
Nodes[last_parent_node].suffix_node = parent_node;
last_parent_node = parent_node;
if (active.origin_node == 0)
active.first_char_index++;
else
active.origin_node = Nodes[active.origin_node].suffix_node;
active.Canonize();
}
if (last_parent_node > 0)
Nodes[last_parent_node].suffix_node = parent_node;
active.last_char_index++;
active.Canonize();
}
/**
* Function to print all contents and details of suffix tree
**/
public void dump_edges(int current_n) {
System.out.println(" Start End Suf First Last String\n");
for (int j = 0; j < HASH_TABLE_SIZE; j++) {
Edge s = Edges[j];
if (s.start_node == -1)
continue;
System.out.printf("%5d %5d %3d %5d %6d ", s.start_node, s.end_node, Nodes[s.end_node].suffix_node, s.first_char_index, s.last_char_index);
int top;
if (current_n > s.last_char_index) {
top = s.last_char_index;
} else {
top = current_n;
}
for (int l = s.first_char_index; l <= top; l++)
System.out.print(T[l]);
System.out.println();
}
}
} |
andreaTP/kanela | agent/src/main/java/kanela/agent/api/instrumentation/bridge/BridgeClassVisitorWrapper.java | <filename>agent/src/main/java/kanela/agent/api/instrumentation/bridge/BridgeClassVisitorWrapper.java
/*
* =========================================================================================
* Copyright © 2013-2018 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
* =========================================================================================
*/
package kanela.agent.api.instrumentation.bridge;
import kanela.agent.util.asm.ClassWriterFlags;
import lombok.EqualsAndHashCode;
import lombok.Value;
import net.bytebuddy.asm.AsmVisitorWrapper;
import net.bytebuddy.description.field.FieldDescription;
import net.bytebuddy.description.field.FieldList;
import net.bytebuddy.description.method.MethodList;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.implementation.Implementation;
import net.bytebuddy.jar.asm.ClassReader;
import net.bytebuddy.jar.asm.ClassVisitor;
import net.bytebuddy.pool.TypePool;
@Value(staticConstructor = "of")
@EqualsAndHashCode(callSuper = false)
public class BridgeClassVisitorWrapper extends AsmVisitorWrapper.AbstractBase {
BridgeDescription bridge;
TypeDescription typeDescription;
ClassLoader classLoader;
@Override
public int mergeWriter(int flags) {
return flags | ClassWriterFlags.resolve(typeDescription, classLoader);
}
@Override
public int mergeReader(int flags) {
return flags | ClassReader.EXPAND_FRAMES;
}
@Override
public ClassVisitor wrap(TypeDescription instrumentedType,
ClassVisitor classVisitor,
Implementation.Context implementationContext,
TypePool typePool,
FieldList<FieldDescription.InDefinedShape> fields,
MethodList<?> methods,
int writerFlags,
int readerFlags) {
return BridgeClassVisitor.from(bridge, instrumentedType.getInternalName(), classVisitor);
}
}
|
gandulf/DsaTab | DsaTab/src/main/java/com/dsatab/data/enums/TalentType.java | <gh_stars>10-100
package com.dsatab.data.enums;
public enum TalentType {
Anderthalbhänder(
"Anderthalbhänder",
TalentGroupType.Nahkampf
, -2),
Armbrust(
"Armbrust",
TalentGroupType.Fernkampf
, -5),
Bastardstäbe(
"Bastardstäbe",
TalentGroupType.Nahkampf
, -2),
Belagerungswaffen(
"Belagerungswaffen",
TalentGroupType.Fernkampf
),
Blasrohr(
"Blasrohr",
TalentGroupType.Fernkampf
, -5),
Bogen(
"Bogen",
TalentGroupType.Fernkampf
, -3),
Diskus(
"Diskus",
TalentGroupType.Fernkampf
, -3),
Dolche(
"Dolche",
TalentGroupType.Nahkampf
, -1),
Fechtwaffen(
"Fechtwaffen",
TalentGroupType.Nahkampf
, -1),
Hiebwaffen(
"Hiebwaffen",
TalentGroupType.Nahkampf
, -4),
Infanteriewaffen(
"Infanteriewaffen",
TalentGroupType.Nahkampf
, -3),
Kettenstäbe(
"Kettenstäbe",
TalentGroupType.Nahkampf
, -1),
Kettenwaffen(
"Kettenwaffen",
TalentGroupType.Nahkampf
, -3),
Lanzenreiten(
"Lanzenreiten",
TalentGroupType.Fernkampf
),
Peitsche(
"Peitsche",
TalentGroupType.Nahkampf
, -1),
Raufen(
"Raufen",
TalentGroupType.Nahkampf
, 0),
Ringen(
"Ringen",
TalentGroupType.Nahkampf
, 0),
Säbel(
"Säbel",
TalentGroupType.Nahkampf
, -2),
Schleuder(
"Schleuder",
TalentGroupType.Fernkampf
, -2),
Schwerter(
"Schwerter",
TalentGroupType.Nahkampf
, -2),
Speere(
"Speere",
TalentGroupType.Nahkampf
, -3),
Stäbe(
"Stäbe",
TalentGroupType.Nahkampf
, -2),
Wurfbeile(
"Wurfbeile",
TalentGroupType.Fernkampf
, -2),
Wurfmesser(
"Wurfmesser",
TalentGroupType.Fernkampf
, -3),
Wurfspeere(
"Wurfspeere",
TalentGroupType.Fernkampf
, -2),
Zweihandflegel(
"Zweihandflegel",
TalentGroupType.Nahkampf
, -3),
Zweihandhiebwaffen(
"Zweihandhiebwaffen",
TalentGroupType.Nahkampf
, -3),
Zweihandschwertersäbel(
"Zweihandschwerter/-säbel",
TalentGroupType.Nahkampf
, -2),
Akrobatik(
"Akrobatik",
TalentGroupType.Körperlich
),
Athletik(
"Athletik",
TalentGroupType.Körperlich
),
Fliegen(
"Fliegen",
TalentGroupType.Körperlich
),
Gaukeleien(
"Gaukeleien",
TalentGroupType.Körperlich
),
Klettern(
"Klettern",
TalentGroupType.Körperlich
),
Körperbeherrschung(
"Körperbeherrschung",
TalentGroupType.Körperlich
),
Reiten(
"Reiten",
TalentGroupType.Körperlich
),
Schleichen(
"Schleichen",
TalentGroupType.Körperlich
),
Schwimmen(
"Schwimmen",
TalentGroupType.Körperlich
),
Selbstbeherrschung(
"Selbstbeherrschung",
TalentGroupType.Körperlich
),
SichVerstecken(
"Sich verstecken",
TalentGroupType.Körperlich
),
Singen(
"Singen",
TalentGroupType.Körperlich
),
Sinnenschärfe(
"Sinnenschärfe",
TalentGroupType.Körperlich
),
Skifahren(
"Skifahren",
TalentGroupType.Körperlich
),
StimmenImitieren(
"Stimmen imitieren",
TalentGroupType.Körperlich
),
Tanzen(
"Tanzen",
TalentGroupType.Körperlich
),
Taschendiebstahl(
"Taschendiebstahl",
TalentGroupType.Körperlich
),
Zechen(
"Zechen",
TalentGroupType.Körperlich
),
Betören(
"Betören",
TalentGroupType.Gesellschaft
),
Etikette(
"Etikette",
TalentGroupType.Gesellschaft
),
Gassenwissen(
"Gassenwissen",
TalentGroupType.Gesellschaft
),
Lehren(
"Lehren",
TalentGroupType.Gesellschaft
),
Menschenkenntnis(
"Menschenkenntnis",
TalentGroupType.Gesellschaft
),
Schauspielerei(
"Schauspielerei",
TalentGroupType.Gesellschaft
),
SchriftlicherAusdruck(
"Schriftlicher Ausdruck",
TalentGroupType.Gesellschaft
),
SichVerkleiden(
"Sich verkleiden",
TalentGroupType.Gesellschaft
),
Überreden(
"Überreden",
TalentGroupType.Gesellschaft
),
Überzeugen(
"Überzeugen",
TalentGroupType.Gesellschaft
),
Galanterie(
"Galanterie",
TalentGroupType.Gesellschaft
),
Fährtensuchen(
"Fährtensuchen",
TalentGroupType.Natur
),
FallenStellen(
"Fallen stellen",
TalentGroupType.Natur
),
FesselnEntfesseln(
"Fesseln/Entfesseln",
TalentGroupType.Natur
),
FischenAngeln(
"Fischen/Angeln",
TalentGroupType.Natur
),
Orientierung(
"Orientierung",
TalentGroupType.Natur
),
Wettervorhersage(
"Wettervorhersage",
TalentGroupType.Natur
),
Seefischerei(
"Seefischerei",
TalentGroupType.Natur
),
Wildnisleben(
"Wildnisleben",
TalentGroupType.Natur
),
LesenSchreiben(
"Lesen/Schreiben",
TalentGroupType.Schriften
),
LesenSchreibenAltesAlaani(
"Lesen/Schreiben Altes Alaani",
TalentGroupType.Schriften
),
LesenSchreibenAltesAmulashtra(
"Lesen/Schreiben Altes Amulashtra",
TalentGroupType.Schriften
),
LesenSchreibenAmulashtra(
"Lesen/Schreiben Amulashtra",
TalentGroupType.Schriften
),
LesenSchreibenAngram(
"Lesen/Schreiben Angram",
TalentGroupType.Schriften
),
LesenSchreibenArkanil(
"Lesen/Schreiben Arkanil",
TalentGroupType.Schriften
),
LesenSchreibenAsdharia(
"Lesen/Schreiben Asdharia",
TalentGroupType.Schriften
),
LesenSchreibenChrmk(
"Lesen/Schreiben Chrmk",
TalentGroupType.Schriften
),
LesenSchreibenChuchas(
"Lesen/Schreiben Chuchas",
TalentGroupType.Schriften
),
LesenSchreibenDrakhardZinken(
"Lesen/Schreiben Drakhard-Zinken",
TalentGroupType.Schriften
),
LesenSchreibenDraknedGlyphen(
"Lesen/Schreiben Drakned-Glyphen",
TalentGroupType.Schriften
),
LesenSchreibenGeheiligteGlyphenVonUnau(
"Lesen/Schreiben Geheiligte Glyphen von Unau",
TalentGroupType.Schriften
),
LesenSchreibenGimarilGlyphen(
"Lesen/Schreiben Gimaril-Glyphen",
TalentGroupType.Schriften
),
LesenSchreibenGjalskisch(
"Lesen/Schreiben Gjalskisch",
TalentGroupType.Schriften
),
LesenSchreibenHjaldingscheRunen(
"Lesen/Schreiben Hjaldingsche Runen",
TalentGroupType.Schriften
),
LesenSchreibenAltImperialeZeichen(
"Lesen/Schreiben (Alt-)Imperiale Zeichen",
TalentGroupType.Schriften
),
LesenSchreibenIsdira(
"Lesen/Schreiben Isdira",
TalentGroupType.Schriften
),
LesenSchreibenIsdiraAsdharia(
"Lesen/Schreiben Isdira/Asdharia",
TalentGroupType.Schriften
),
LesenSchreibenAltesKemi(
"Lesen/Schreiben Altes Kemi",
TalentGroupType.Schriften
),
LesenSchreibenKuslikerZeichen(
"Lesen/Schreiben Kusliker Zeichen",
TalentGroupType.Schriften
),
LesenSchreibenNanduria(
"Lesen/Schreiben Nanduria",
TalentGroupType.Schriften
),
LesenSchreibenRogolan(
"Lesen/Schreiben Rogolan",
TalentGroupType.Schriften
),
LesenSchreibenTrollischeRaumbilderschrift(
"Lesen/Schreiben Trollische Raumbilderschrift",
TalentGroupType.Schriften
),
LesenSchreibenTulamidya(
"Lesen/Schreiben Tulamidya",
TalentGroupType.Schriften
),
LesenSchreibenUrtulamidya(
"Lesen/Schreiben Urtulamidya",
TalentGroupType.Schriften
),
LesenSchreibenZhayad(
"Lesen/Schreiben Zhayad",
TalentGroupType.Schriften
),
LesenSchreibenMahrischeGlyphen(
"Lesen/Schreiben Mahrische Glyphen",
TalentGroupType.Schriften
),
LesenSchreibenWudu(
"Lesen/Schreiben Wudu",
TalentGroupType.Schriften
),
SprachenKennen(
"Sprachen kennen",
TalentGroupType.Sprachen
),
SprachenKennenGarethi(
"Sprachen kennen Garethi",
TalentGroupType.Sprachen
),
SprachenKennenBosparano(
"Sprachen kennen Bosparano",
TalentGroupType.Sprachen
),
SprachenKennenAltImperialAureliani(
"Sprachen kennen Alt-Imperial/Aureliani",
TalentGroupType.Sprachen
),
SprachenKennenZyklopäisch(
"Sprachen kennen Zyklopäisch",
TalentGroupType.Sprachen
),
SprachenKennenTulamidya(
"Sprachen kennen Tulamidya",
TalentGroupType.Sprachen
),
SprachenKennenUrtulamidya(
"Sprachen kennen Urtulamidya",
TalentGroupType.Sprachen
),
SprachenKennenZelemja(
"Sprachen kennen Zelemja",
TalentGroupType.Sprachen
),
SprachenKennenAltesKemi(
"Sprachen kennen Altes Kemi",
TalentGroupType.Sprachen
),
SprachenKennenAlaani(
"Sprachen kennen Alaani",
TalentGroupType.Sprachen
),
SprachenKennenZhulchammaqra(
"Sprachen kennen Zhulchammaqra",
TalentGroupType.Sprachen
),
SprachenKennenFerkina(
"Sprachen kennen Ferkina",
TalentGroupType.Sprachen
),
SprachenKennenRuuz(
"Sprachen kennen Ruuz",
TalentGroupType.Sprachen
),
SprachenKennenRabensprache(
"Sprachen kennen Rabensprache",
TalentGroupType.Sprachen
),
SprachenKennenNujuka(
"Sprachen kennen Nujuka",
TalentGroupType.Sprachen
),
SprachenKennenMohisch(
"Sprachen kennen Mohisch",
TalentGroupType.Sprachen
),
SprachenKennenThorwalsch(
"Sprachen kennen Thorwalsch",
TalentGroupType.Sprachen
),
SprachenKennenHjaldingsch(
"Sprachen kennen Hjaldingsch",
TalentGroupType.Sprachen
),
SprachenKennenIsdira(
"Sprachen kennen Isdira",
TalentGroupType.Sprachen
),
SprachenKennenAsdharia(
"Sprachen kennen Asdharia",
TalentGroupType.Sprachen
),
SprachenKennenRogolan(
"Sprachen kennen Rogolan",
TalentGroupType.Sprachen
),
SprachenKennenAngram(
"Sprachen kennen Angram",
TalentGroupType.Sprachen
),
SprachenKennenOloghaijan(
"Sprachen kennen Ologhaijan",
TalentGroupType.Sprachen
),
SprachenKennenOloarkh(
"Sprachen kennen Oloarkh",
TalentGroupType.Sprachen
),
SprachenKennenGoblinisch(
"Sprachen kennen Goblinisch",
TalentGroupType.Sprachen
),
SprachenKennenTrollisch(
"Sprachen kennen Trollisch",
TalentGroupType.Sprachen
),
SprachenKennenRssahh(
"Sprachen kennen Rssahh",
TalentGroupType.Sprachen
),
SprachenKennenGrolmisch(
"Sprachen kennen Grolmisch",
TalentGroupType.Sprachen
),
SprachenKennenKoboldisch(
"Sprachen kennen Koboldisch",
TalentGroupType.Sprachen
),
SprachenKennenDrachisch(
"Sprachen kennen Drachisch",
TalentGroupType.Sprachen
),
SprachenKennenZhayad(
"Sprachen kennen Zhayad",
TalentGroupType.Sprachen
),
SprachenKennenAtak(
"Sprachen kennen Atak",
TalentGroupType.Sprachen
),
SprachenKennenFüchsisch(
"Sprachen kennen Füchsisch",
TalentGroupType.Sprachen
),
SprachenKennenMahrisch(
"Sprachen kennen Mahrisch",
TalentGroupType.Sprachen
),
SprachenKennenRissoal(
"Sprachen kennen Rissoal",
TalentGroupType.Sprachen
),
SprachenKennenMolochisch(
"Sprachen kennen Molochisch",
TalentGroupType.Sprachen
),
SprachenKennenNeckergesang(
"Sprachen kennen Neckergesang",
TalentGroupType.Sprachen
),
SprachenKennenZLit(
"Sprachen kennen Z'Lit",
TalentGroupType.Sprachen
),
SprachenKennenWudu(
"Sprachen kennen Wudu",
TalentGroupType.Sprachen
),
Anatomie(
"Anatomie",
TalentGroupType.Wissen
),
Baukunst(
"Baukunst",
TalentGroupType.Wissen
),
BrettKartenspiel(
"Brett-/Kartenspiel",
TalentGroupType.Wissen
),
Geografie(
"Geografie",
TalentGroupType.Wissen
),
Geschichtswissen(
"Geschichtswissen",
TalentGroupType.Wissen
),
Gesteinskunde(
"Gesteinskunde",
TalentGroupType.Wissen
),
GötterUndKulte(
"Götter und Kulte",
TalentGroupType.Wissen
),
Heraldik(
"Heraldik",
TalentGroupType.Wissen
),
Hüttenkunde(
"Hüttenkunde",
TalentGroupType.Wissen
),
Schiffbau(
"Schiffbau",
TalentGroupType.Wissen
),
Kriegskunst(
"Kriegskunst",
TalentGroupType.Wissen
),
Kryptographie(
"Kryptographie",
TalentGroupType.Wissen
),
Magiekunde(
"Magiekunde",
TalentGroupType.Wissen
),
Mechanik(
"Mechanik",
TalentGroupType.Wissen
),
Pflanzenkunde(
"Pflanzenkunde",
TalentGroupType.Wissen
),
Philosophie(
"Philosophie",
TalentGroupType.Wissen
),
Rechnen(
"Rechnen",
TalentGroupType.Wissen
),
Rechtskunde(
"Rechtskunde",
TalentGroupType.Wissen
),
SagenUndLegenden(
"Sagen und Legenden",
TalentGroupType.Wissen
),
Schätzen(
"Schätzen",
TalentGroupType.Wissen
),
Sprachenkunde(
"Sprachenkunde",
TalentGroupType.Wissen
),
Staatskunst(
"Staatskunst",
TalentGroupType.Wissen
),
Sternkunde(
"Sternkunde",
TalentGroupType.Wissen
),
Tierkunde(
"Tierkunde",
TalentGroupType.Wissen
),
Abrichten(
"Abrichten",
TalentGroupType.Handwerk
),
Ackerbau(
"Ackerbau",
TalentGroupType.Handwerk
),
Alchimie(
"Alchimie",
TalentGroupType.Handwerk
),
Bergbau(
"Bergbau",
TalentGroupType.Handwerk
),
Bogenbau(
"Bogenbau",
TalentGroupType.Handwerk
),
BooteFahren(
"<NAME>",
TalentGroupType.Handwerk
),
Brauer(
"Brauer",
TalentGroupType.Handwerk
),
Drucker(
"Drucker",
TalentGroupType.Handwerk
),
FahrzeugLenken(
"Fahrzeug lenken",
TalentGroupType.Handwerk
),
Falschspiel(
"Falschspiel",
TalentGroupType.Handwerk
),
Feinmechanik(
"Feinmechanik",
TalentGroupType.Handwerk
),
Feuersteinbearbeitung(
"Feuersteinbearbeitung",
TalentGroupType.Handwerk
),
Fleischer(
"Fleischer",
TalentGroupType.Handwerk
),
GerberKürschner(
"Gerber/Kürschner",
TalentGroupType.Handwerk
),
Glaskunst(
"Glaskunst",
TalentGroupType.Handwerk
),
Grobschmied(
"Grobschmied",
TalentGroupType.Handwerk
),
Handel(
"Handel",
TalentGroupType.Handwerk
),
Hauswirtschaft(
"Hauswirtschaft",
TalentGroupType.Handwerk
),
HeilkundeGift(
"Heilkunde: Gift",
TalentGroupType.Handwerk
),
HeilkundeKrankheiten(
"Heilkunde: Krankheiten",
TalentGroupType.Handwerk
),
HeilkundeSeele(
"Heilkunde: Seele",
TalentGroupType.Handwerk
),
HeilkundeWunden(
"Heilkunde: Wunden",
TalentGroupType.Handwerk
),
Kartographie(
"Kartographie",
TalentGroupType.Handwerk
),
HundeschlittenFahren(
"Hundeschlitten fahren",
TalentGroupType.Handwerk
),
EisseglerFahren(
"Eissegler fahren",
TalentGroupType.Handwerk
),
Kapellmeister(
"Kapellmeister",
TalentGroupType.Handwerk
),
Steuermann(
"Steuermann",
TalentGroupType.Handwerk
),
Holzbearbeitung(
"Holzbearbeitung",
TalentGroupType.Handwerk
),
Instrumentenbauer(
"Instrumentenbauer",
TalentGroupType.Handwerk
),
Kartografie(
"Kartografie",
TalentGroupType.Handwerk
),
Kochen(
"Kochen",
TalentGroupType.Handwerk
),
Kristallzucht(
"Kristallzucht",
TalentGroupType.Handwerk
),
Lederarbeiten(
"Lederarbeiten",
TalentGroupType.Handwerk
),
MalenZeichnen(
"Malen/Zeichnen",
TalentGroupType.Handwerk
),
Maurer(
"Maurer",
TalentGroupType.Handwerk
),
Metallguss(
"Metallguss",
TalentGroupType.Handwerk
),
Musizieren(
"Musizieren",
TalentGroupType.Handwerk
),
SchlösserKnacken(
"<NAME>",
TalentGroupType.Handwerk
),
SchnapsBrennen(
"<NAME>",
TalentGroupType.Handwerk
),
Schneidern(
"Schneidern",
TalentGroupType.Handwerk
),
Seefahrt(
"Seefahrt",
TalentGroupType.Handwerk
),
Seiler(
"Seiler",
TalentGroupType.Handwerk
),
Steinmetz(
"Steinmetz",
TalentGroupType.Handwerk
),
SteinschneiderJuwelier(
"Steinschneider/Juwelier",
TalentGroupType.Handwerk
),
Stellmacher(
"Stellmacher",
TalentGroupType.Handwerk
),
StoffeFärben(
"<NAME>",
TalentGroupType.Handwerk
),
Tätowieren(
"Tätowieren",
TalentGroupType.Handwerk
),
Töpfern(
"Töpfern",
TalentGroupType.Handwerk
),
Viehzucht(
"Viehzucht",
TalentGroupType.Handwerk
),
Webkunst(
"Webkunst",
TalentGroupType.Handwerk
),
Winzer(
"Winzer",
TalentGroupType.Handwerk
),
Zimmermann(
"Zimmermann",
TalentGroupType.Handwerk
),
Gefahreninstinkt(
"Gefahreninstinkt",
TalentGroupType.Gaben
),
Zwergennase(
"Zwergennase",
TalentGroupType.Gaben
),
GeisterRufen(
"<NAME>ufen",
TalentGroupType.Gaben
),
GeisterBannen(
"<NAME>en",
TalentGroupType.Gaben
),
GeisterBinden(
"Geister binden",
TalentGroupType.Gaben
),
GeisterAufnehmen(
"Geister aufnehmen",
TalentGroupType.Gaben
),
PirschUndAnsitzjagd(
"Pirsch- und Ansitzjagd",
TalentGroupType.Meta
),
NahrungSammeln(
"Nahrung sammeln",
TalentGroupType.Meta
),
Kräutersuchen(
"Kräutersuchen",
TalentGroupType.Meta
),
WacheHalten(
"Wache halten",
TalentGroupType.Meta
),
Ritualkenntnis(
"Ritualkenntnis",
TalentGroupType.Gaben
),
RitualkenntnisAchazSchamane(
"Ritualkenntnis: Achaz-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisAlchimist(
"Ritualkenntnis: Alchimist",
TalentGroupType.Gaben
),
RitualkenntnisAlhanisch(
"Ritualkenntnis: Alhanisch",
TalentGroupType.Gaben
),
RitualkenntnisDerwisch(
"Ritualkenntnis: Derwisch",
TalentGroupType.Gaben
),
RitualkenntnisDruide(
"Ritualkenntnis: Druide",
TalentGroupType.Gaben
),
RitualkenntnisDruidischGeodisch(
"Ritualkenntnis: Druidisch-Geodisch",
TalentGroupType.Gaben
),
RitualkenntnisDurroDûn(
"Ritualkenntnis: Durro-Dûn",
TalentGroupType.Gaben
),
RitualkenntnisFerkinaSchamane(
"Ritualkenntnis: Ferkina-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisGjalskerSchamane(
"Ritualkenntnis: Gjalsker-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisGoblinSchamanin(
"Ritualkenntnis: Goblin-Schamanin",
TalentGroupType.Gaben
),
RitualkenntnisGeode(
"Ritualkenntnis: Geode",
TalentGroupType.Gaben
),
RitualkenntnisGildenmagie(
"Ritualkenntnis: Gildenmagie",
TalentGroupType.Gaben
),
RitualkenntnisGüldenländisch(
"Ritualkenntnis: Güldenländisch",
TalentGroupType.Gaben
),
RitualkenntnisGrolmisch(
"Ritualkenntnis: Grolmisch",
TalentGroupType.Gaben
),
RitualkenntnisHexe(
"Ritualkenntnis: Hexe",
TalentGroupType.Gaben
),
RitualkenntnisKophtanisch(
"Ritualkenntnis: Kophtanisch",
TalentGroupType.Gaben
),
RitualkenntnisKristallomantie(
"Ritualkenntnis: Kristallomantie",
TalentGroupType.Gaben
),
RitualkenntnisMudramulisch(
"Ritualkenntnis: Mudramulisch",
TalentGroupType.Gaben
),
RitualkenntnisNivesenSchamane(
"Ritualkenntnis: Nivesen-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisOrkSchamane(
"Ritualkenntnis: Ork-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisRunenzauberei(
"Ritualkenntnis: Runenzauberei",
TalentGroupType.Gaben
),
RitualkenntnisSatuarisch(
"Ritualkenntnis: Satuarisch",
TalentGroupType.Gaben
),
RitualkenntnisScharlatan(
"Ritualkenntnis: Scharlatan",
TalentGroupType.Gaben
),
RitualkenntnisTapasuul(
"Ritualkenntnis: Tapasuul",
TalentGroupType.Gaben
),
RitualkenntnisTrollzackerSchamane(
"Ritualkenntnis: Trollzacker-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisWaldmenschenSchamane(
"Ritualkenntnis: Waldmenschen-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisWaldmenschenSchamaneUtulus(
"Ritualkenntnis: Waldmenschen-Schamane (Utulus)",
TalentGroupType.Gaben
),
RitualkenntnisWaldmenschenSchamaneTocamuyac(
"Ritualkenntnis: Waldmenschen-Schamane (Tocamuyac)",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzer(
"Ritualkenntnis: Zaubertänzer",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzerHazaqi(
"Ritualkenntnis: Zaubertänzer (Hazaqi)",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzerMajuna(
"Ritualkenntnis: Zaubertänzer (Majuna)",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzernovadischeSharisad(
"Ritualkenntnis: Zaubertänzer (novadische Sharisad)",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzertulamidischeSharisad(
"Ritualkenntnis: Zaubertänzer (tulamidische Sharisad)",
TalentGroupType.Gaben
),
RitualkenntnisZibilja(
"Ritualkenntnis: Zibilja",
TalentGroupType.Gaben
),
RitualkenntnisLeonir(
"Ritualkenntnis: Leonir",
TalentGroupType.Gaben
),
Liturgiekenntnis(
"Liturgiekenntnis",
TalentGroupType.Gaben
),
LiturgiekenntnisAngrosch(
"Liturgiekenntnis (Angrosch)",
TalentGroupType.Gaben
),
LiturgiekenntnisAves(
"Liturgiekenntnis (Aves)",
TalentGroupType.Gaben
),
LiturgiekenntnisBoron(
"Liturgiekenntnis (Boron)",
TalentGroupType.Gaben
),
LiturgiekenntnisEfferd(
"Liturgiekenntnis (Efferd)",
TalentGroupType.Gaben
),
LiturgiekenntnisFirun(
"Liturgiekenntnis (Firun)",
TalentGroupType.Gaben
),
LiturgiekenntnisGravesh(
"Liturgiekenntnis (Gravesh)",
TalentGroupType.Gaben
),
LiturgiekenntnisHRanga(
"Liturgiekenntnis (H'Ranga)",
TalentGroupType.Gaben
),
LiturgiekenntnisHSzint(
"Liturgiekenntnis (H'Szint)",
TalentGroupType.Gaben
),
LiturgiekenntnisHesinde(
"Liturgiekenntnis (Hesinde)",
TalentGroupType.Gaben
),
LiturgiekenntnisHimmelswölfe(
"Liturgiekenntnis (Himmelswölfe)",
TalentGroupType.Gaben
),
LiturgiekenntnisIfirn(
"Liturgiekenntnis (Ifirn)",
TalentGroupType.Gaben
),
LiturgiekenntnisIngerimm(
"Liturgiekenntnis (Ingerimm)",
TalentGroupType.Gaben
),
LiturgiekenntnisKamaluq(
"Liturgiekenntnis (Kamaluq)",
TalentGroupType.Gaben
),
LiturgiekenntnisKor(
"Liturgiekenntnis (Kor)",
TalentGroupType.Gaben
),
LiturgiekenntnisNandus(
"Liturgiekenntnis (Nandus)",
TalentGroupType.Gaben
),
LiturgiekenntnisNamenloser(
"Liturgiekenntnis (Namenloser)",
TalentGroupType.Gaben
),
LiturgiekenntnisPeraine(
"Liturgiekenntnis (Peraine)",
TalentGroupType.Gaben
),
LiturgiekenntnisPhex(
"Liturgiekenntnis (Phex)",
TalentGroupType.Gaben
),
LiturgiekenntnisPraios(
"Liturgiekenntnis (Praios)",
TalentGroupType.Gaben
),
LiturgiekenntnisRahja(
"Liturgiekenntnis (Rahja)",
TalentGroupType.Gaben
),
LiturgiekenntnisRondra(
"Liturgiekenntnis (Rondra)",
TalentGroupType.Gaben
),
LiturgiekenntnisSwafnir(
"Liturgiekenntnis (Swafnir)",
TalentGroupType.Gaben
),
LiturgiekenntnisTairach(
"Liturgiekenntnis (Tairach)",
TalentGroupType.Gaben
),
LiturgiekenntnisTravia(
"Liturgiekenntnis (Travia)",
TalentGroupType.Gaben
),
LiturgiekenntnisTsa(
"Liturgiekenntnis (Tsa)",
TalentGroupType.Gaben
),
LiturgiekenntnisZsahh(
"Liturgiekenntnis (Zsahh)",
TalentGroupType.Gaben
),
Prophezeien(
"Prophezeien",
TalentGroupType.Gaben
),
Geräuschhexerei(
"Geräuschhexerei",
TalentGroupType.Gaben
),
Magiegespür(
"Magiegespür",
TalentGroupType.Gaben
),
Tierempathiespeziell(
"Tierempathie (speziell)",
TalentGroupType.Gaben
),
Tierempathiealle(
"Tierempathie (alle)",
TalentGroupType.Gaben
),
Empathie(
"Empathie",
TalentGroupType.Gaben
),
Immanspiel(
"Immanspiel",
TalentGroupType.Körperlich
);
private static final String DEPRECATED_WACHE_NAME = "Wache";
private static final String DEPRECATED_KRÄUTERSUCHE_NAME1 = "Kräutersuchen";
private static final String DEPRECATED_KRÄUTERSUCHE_NAME2 = "<NAME>";
private static final String DEPRECATED_KRÄUTERSUCHE_NAME3 = "Kräutersuche";
private static final String DEPRECATED_PIRSCH_ANSITZ_JAGD = "PirschAnsitzJagd ";
private TalentGroupType groupType;
private Integer be;
private String xmlName;
TalentType(String name, TalentGroupType type) {
this(name, type, null);
}
TalentType(String name, TalentGroupType type, Integer be) {
this.be = be;
this.xmlName = name;
this.groupType = type;
}
public String xmlName() {
if (xmlName != null)
return xmlName;
else
return name();
}
public TalentGroupType type() {
return groupType;
}
public Integer getBe() {
return be;
}
public static TalentType byValue(String type) {
if (DEPRECATED_KRÄUTERSUCHE_NAME1.equalsIgnoreCase(type)
|| DEPRECATED_KRÄUTERSUCHE_NAME2.equalsIgnoreCase(type)
|| DEPRECATED_KRÄUTERSUCHE_NAME3.equalsIgnoreCase(type)) {
return TalentType.Kräutersuchen;
} else if (DEPRECATED_WACHE_NAME.equalsIgnoreCase(type)) {
return TalentType.WacheHalten;
} else if (DEPRECATED_PIRSCH_ANSITZ_JAGD.equalsIgnoreCase(type)) {
return TalentType.PirschUndAnsitzjagd;
} else {
return TalentType.valueOf(type);
}
}
public static TalentType byXmlName(String code) {
if (code == null)
return null;
for (TalentType attr : TalentType.values()) {
if (attr.xmlName().equals(code)) {
return attr;
}
}
return null;
}
} |
UmmMinecraft/AgriCraft | src/main/java/com/infinityraider/agricraft/api/v1/crop/IAgriHarvestable.java | package com.infinityraider.agricraft.api.v1.crop;
import java.util.function.Consumer;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import net.minecraft.entity.LivingEntity;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ActionResultType;
/**
* Interface for harvestable objects.
*/
public interface IAgriHarvestable {
/**
* Determines if the object can currently be harvested or not.
*
* @param entity the entity wishing to harvest
* @return if the object may be harvested.
*/
boolean canBeHarvested(@Nullable LivingEntity entity);
/**
* Harvests the object.
*
* @param consumer a consumer that accepts the items that were harvested.
* @param entity the entity which harvests the crop, may be null if it is harvested by
* automation.
* @return if the harvest was successful.
*/
@Nonnull
ActionResultType harvest(@Nonnull Consumer<ItemStack> consumer, @Nullable LivingEntity entity);
}
|
nasa/gunns | aspects/fluid/source/GunnsFluidEvaporation.hh | #ifndef GunnsFluidEvaporation_EXISTS
#define GunnsFluidEvaporation_EXISTS
/**
@file
@brief GUNNS Fluid Evaporation Link declarations
@defgroup TSM_GUNNS_FLUID_SOURCE_EVAPORATION Evaporation Model
@ingroup TSM_GUNNS_FLUID
@copyright Copyright 2019 United States Government as represented by the Administrator of the
National Aeronautics and Space Administration. All Rights Reserved.
@details
PURPOSE:
- (Classes for the GUNNS Evaporation link model.)
REFERENCE:
- (https://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/20110011190.pdf, "Venting of a
Water/Inhibited Propylene Glycol Mixture in a Vacuum Environment - Characterization and
Representative Test Results", Ungar & Erickson, NASA/JSC)
- (https://www.engineeringtoolbox.com/evaporation-water-surface-d_690.html "Evaporation from Water
Surfaces")
ASSUMPTIONS AND LIMITATIONS:
- (The liquid pool has equal temperature as the air.)
- (Heat of phase change is small enough that it can be neglected.)
- (From Reference: Ungar & Erickson, mixture changes to the liquid pool due to evaporation of a
single species from its solution has no effect on evaporation rate.)
- (Effect of bulk gas velocity around the liquid pool, which normally increases evaporation rate,
is ignored. The evaporation coefficient can be tuned to account for a particular velocity.)
- (Does not model boiling.)
- (Liquid pool displacement of the gas volume is neglected.)
LIBRARY DEPENDENCY:
- ((GunnsFluidEvaporation.o))
PROGRAMMERS:
- ((<NAME>) (CACI) (Initial) (December 2017))
@{
*/
#include "core/GunnsFluidLink.hh"
#include "software/SimCompatibility/TsSimCompatibility.hh"
// Forward declarations of pointer types.
class GunnsFluidAccum;
////////////////////////////////////////////////////////////////////////////////////////////////////
/// @brief GUNNS Evaporation Model Configuration Data
///
/// @details The sole purpose of this class is to provide a data structure for the GUNNS Liquid
/// Leak link model configuration data.
////////////////////////////////////////////////////////////////////////////////////////////////////
class GunnsFluidEvaporationConfigData : public GunnsFluidLinkConfigData
{
public:
FluidProperties::FluidType mGasType; /**< (--) trick_chkpnt_io(**) Gas type that the liquid phase changes into. */
double mEvaporationCoeff; /**< (1/s/kPa) trick_chkpnt_io(**) Evaporation rate coefficient. */
double mPoolMassExponent; /**< (--) trick_chkpnt_io(**) Exponent on pool mass contribution to evaporation rate. */
GunnsFluidAccum* mLiquidPoolAccum; /**< (--) trick_chkpnt_io(**) Pointer to the liquid pool accumulator. */
/// @brief Default constructs this Leak configuration data.
GunnsFluidEvaporationConfigData(
const std::string& name = "",
GunnsNodeList* nodes = 0,
const FluidProperties::FluidType gasType = FluidProperties::NO_FLUID,
const double evaporationCoeff = 0.0,
const double poolMassExponent = 0.0,
GunnsFluidAccum* liquidPoolAccum = 0);
/// @brief Copy constructs this Leak configuration data.
GunnsFluidEvaporationConfigData(const GunnsFluidEvaporationConfigData& that);
/// @brief Default destructs this Leak configuration data.
virtual ~GunnsFluidEvaporationConfigData();
private:
////////////////////////////////////////////////////////////////////////////////////////////
/// @details Assignment operator unavailable since declared private and not implemented.
////////////////////////////////////////////////////////////////////////////////////////////
GunnsFluidEvaporationConfigData& operator =(const GunnsFluidEvaporationConfigData&);
};
////////////////////////////////////////////////////////////////////////////////////////////////////
/// @brief GUNNS Evaporation Model Input Data
///
/// @details The sole purpose of this class is to provide a data structure for the GUNNS Liquid
/// Leak link model input data.
////////////////////////////////////////////////////////////////////////////////////////////////////
class GunnsFluidEvaporationInputData : public GunnsFluidLinkInputData
{
public:
double mGasTotalPressure; /**< (kPa) Initial gas-side total pressure. */
double mVaporPotential; /**< (kPa) Initial gas-side vapor potential. */
double mEvaporationRate; /**< (kg/s) Initial evaporation mass rate. */
/// @brief Default constructs this Leak input data.
GunnsFluidEvaporationInputData(const bool malfBlockageFlag = false,
const double malfBlockageValue = 0.0,
const double gasTotalPressure = 0.0,
const double vaporPotential = 0.0,
const double evaporationRate = 0.0);
/// @brief Copy constructs this Leak input data.
GunnsFluidEvaporationInputData(const GunnsFluidEvaporationInputData& that);
/// @brief Default destructs this Leak input data.
virtual ~GunnsFluidEvaporationInputData();
private:
////////////////////////////////////////////////////////////////////////////////////////////
/// @details Assignment operator unavailable since declared private and not implemented.
////////////////////////////////////////////////////////////////////////////////////////////
GunnsFluidEvaporationInputData& operator =(const GunnsFluidEvaporationInputData&);
};
////////////////////////////////////////////////////////////////////////////////////////////////////
/// @brief GUNNS Evaporation Link
///
/// @details This link models simple evaporation of a liquid pool into a gas node. This link
/// interfaces with the liquid pool mass via pointer to a GunnsFluidAccum link on the
/// liquid pool node.
///
/// The liquid pool can either be in the same network or a different network than the gas
/// volume. This allows you the option of leaking from a separate liquid network into a
/// gas network, or combining them as desired.
///
/// The liquid can be a mixture, such as Propylene Glycol & Water (GUNNS_PG40, etc.) In
/// such cases, this models evaporation of one specific constituent out of that mixture
/// (i.e. the water). However since GUNNS mixed liquid types have fixed mixtures that
/// can't change, we can't model removing only water from the PG mix. We remove an
/// adjusted mass of the PG mix from the liquid node that would contain the same water
/// mass as the mass of water vapor added to the gas side, so that we conserve the total
/// water mass. The limitation is that the mass of the other liquid constituents are not
/// conserved. See the list of assumptions and limitations above.
///
/// @note This link redefines the meaning of some base class terms:
/// - mPower is the heat of evaporation
////////////////////////////////////////////////////////////////////////////////////////////////////
class GunnsFluidEvaporation : public GunnsFluidLink
{
TS_MAKE_SIM_COMPATIBLE(GunnsFluidEvaporation);
public:
/// @brief Default constructs this Leak.
GunnsFluidEvaporation();
/// @brief Default destructs this Leak.
virtual ~GunnsFluidEvaporation();
/// @brief Initializes this Leak with configuration and input data.
void initialize(const GunnsFluidEvaporationConfigData& configData,
const GunnsFluidEvaporationInputData& inputData,
std::vector<GunnsBasicLink*>& networkLinks,
const int port0,
const int port1);
/// @brief Computes the contributions to the network solution.
virtual void step(const double dt);
/// @brief Computes the flows through the link.
virtual void computeFlows(const double dt);
/// @brief Transports the flows through the link.
virtual void transportFlows(const double dt);
protected:
FluidProperties::FluidType mGasType; /**< *o (--) trick_chkpnt_io(**) Gas type that the liquid phase changes into. */
double mEvaporationCoeff; /**< (1/s/kPa) trick_chkpnt_io(**) Evaporation rate coefficient. */
double mPoolMassExponent; /**< (--) trick_chkpnt_io(**) Exponent on pool mass contribution to evaporation rate. */
GunnsFluidAccum* mLiquidPoolAccum; /**< ** (--) trick_chkpnt_io(**) Pointer to the liquid pool accumulator. */
double mGasTotalPressure; /**< (kPa) Gas-side total pressure. */
double mGasMWeight; /**< ** (1/mol) trick_chkpnt_io(**) Constant molecular weight of the gas type. */
PolyFluid* mEvaporationFluid; /**< *o (--) trick_chkpnt_io(**) Evaporated fluid input to the gas node. */
double mLiquidPoolMass; /**< (kg) trick_chkpnt_io(**) Current mass of the liquid pool. */
static const double mSpringCoeff1; /**< ** (--) trick_chkpnt_io(**) Constant for liquid pool accumulator spring coefficient 1. */
/// @brief Validates the initialization of this link.
void validate(const GunnsFluidEvaporationConfigData& configData,
const GunnsFluidEvaporationInputData& inputData) const;
/// @brief Virtual method for derived links to perform their restart functions.
virtual void restartModel();
/// @brief Checks for valid implementation-specific port node assignment.
virtual bool checkSpecificPortRules(const int port, const int node) const;
/// @brief Updates gas-side pressures.
void updateVaporPotential();
/// @brief Updates liquid side parameters and evaporation rate.
void updateVaporRate(const double dt);
/// @brief Updates heat of evaporation.
void updatePower();
private:
/// @details Define the number of ports this link class has. All objects of the same link
/// class always have the same number of ports. We use an enum rather than a
/// static const int so that we can reuse the NPORTS name and allow each class to
/// define its own value.
enum {NPORTS = 2};
////////////////////////////////////////////////////////////////////////////////////////////
/// @details Copy constructor unavailable since declared private and not implemented.
////////////////////////////////////////////////////////////////////////////////////////////
GunnsFluidEvaporation(const GunnsFluidEvaporation&);
////////////////////////////////////////////////////////////////////////////////////////////
/// @details Assignment operator unavailable since declared private and not implemented.
////////////////////////////////////////////////////////////////////////////////////////////
GunnsFluidEvaporation& operator =(const GunnsFluidEvaporation&);
};
/// @}
#endif
|
zealoussnow/chromium | chrome/browser/ui/webui/bookmarks/bookmarks_browsertest.h | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_WEBUI_BOOKMARKS_BOOKMARKS_BROWSERTEST_H_
#define CHROME_BROWSER_UI_WEBUI_BOOKMARKS_BOOKMARKS_BROWSERTEST_H_
#include "chrome/test/base/web_ui_browser_test.h"
class BookmarksBrowserTest : public WebUIBrowserTest {
public:
BookmarksBrowserTest();
BookmarksBrowserTest(const BookmarksBrowserTest&) = delete;
BookmarksBrowserTest& operator=(const BookmarksBrowserTest&) = delete;
~BookmarksBrowserTest() override;
void SetupExtensionAPITest();
void SetupExtensionAPIEditDisabledTest();
};
#endif // CHROME_BROWSER_UI_WEBUI_BOOKMARKS_BOOKMARKS_BROWSERTEST_H_
|
discentem/azure-sdk-for-go | sdk/resourcemanager/storagesync/armstoragesync/zz_generated_workflows_client.go | <filename>sdk/resourcemanager/storagesync/armstoragesync/zz_generated_workflows_client.go<gh_stars>1-10
//go:build go1.16
// +build go1.16
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
package armstoragesync
import (
"context"
"errors"
"fmt"
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/arm"
armruntime "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/runtime"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
"net/http"
"net/url"
"strings"
)
// WorkflowsClient contains the methods for the Workflows group.
// Don't use this type directly, use NewWorkflowsClient() instead.
type WorkflowsClient struct {
ep string
pl runtime.Pipeline
subscriptionID string
}
// NewWorkflowsClient creates a new instance of WorkflowsClient with the specified values.
func NewWorkflowsClient(subscriptionID string, credential azcore.TokenCredential, options *arm.ClientOptions) *WorkflowsClient {
cp := arm.ClientOptions{}
if options != nil {
cp = *options
}
if len(cp.Host) == 0 {
cp.Host = arm.AzurePublicCloud
}
return &WorkflowsClient{subscriptionID: subscriptionID, ep: string(cp.Host), pl: armruntime.NewPipeline(module, version, credential, &cp)}
}
// Abort - Abort the given workflow.
// If the operation fails it returns the *StorageSyncError error type.
func (client *WorkflowsClient) Abort(ctx context.Context, resourceGroupName string, storageSyncServiceName string, workflowID string, options *WorkflowsAbortOptions) (WorkflowsAbortResponse, error) {
req, err := client.abortCreateRequest(ctx, resourceGroupName, storageSyncServiceName, workflowID, options)
if err != nil {
return WorkflowsAbortResponse{}, err
}
resp, err := client.pl.Do(req)
if err != nil {
return WorkflowsAbortResponse{}, err
}
if !runtime.HasStatusCode(resp, http.StatusOK) {
return WorkflowsAbortResponse{}, client.abortHandleError(resp)
}
return client.abortHandleResponse(resp)
}
// abortCreateRequest creates the Abort request.
func (client *WorkflowsClient) abortCreateRequest(ctx context.Context, resourceGroupName string, storageSyncServiceName string, workflowID string, options *WorkflowsAbortOptions) (*policy.Request, error) {
urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows/{workflowId}/abort"
if client.subscriptionID == "" {
return nil, errors.New("parameter client.subscriptionID cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID))
if resourceGroupName == "" {
return nil, errors.New("parameter resourceGroupName cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName))
if storageSyncServiceName == "" {
return nil, errors.New("parameter storageSyncServiceName cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{storageSyncServiceName}", url.PathEscape(storageSyncServiceName))
if workflowID == "" {
return nil, errors.New("parameter workflowID cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{workflowId}", url.PathEscape(workflowID))
req, err := runtime.NewRequest(ctx, http.MethodPost, runtime.JoinPaths(client.ep, urlPath))
if err != nil {
return nil, err
}
reqQP := req.Raw().URL.Query()
reqQP.Set("api-version", "2020-09-01")
req.Raw().URL.RawQuery = reqQP.Encode()
req.Raw().Header.Set("Accept", "application/json")
return req, nil
}
// abortHandleResponse handles the Abort response.
func (client *WorkflowsClient) abortHandleResponse(resp *http.Response) (WorkflowsAbortResponse, error) {
result := WorkflowsAbortResponse{RawResponse: resp}
if val := resp.Header.Get("x-ms-request-id"); val != "" {
result.XMSRequestID = &val
}
if val := resp.Header.Get("x-ms-correlation-request-id"); val != "" {
result.XMSCorrelationRequestID = &val
}
return result, nil
}
// abortHandleError handles the Abort error response.
func (client *WorkflowsClient) abortHandleError(resp *http.Response) error {
body, err := runtime.Payload(resp)
if err != nil {
return runtime.NewResponseError(err, resp)
}
errType := StorageSyncError{raw: string(body)}
if err := runtime.UnmarshalAsJSON(resp, &errType); err != nil {
return runtime.NewResponseError(fmt.Errorf("%s\n%s", string(body), err), resp)
}
return runtime.NewResponseError(&errType, resp)
}
// Get - Get Workflows resource
// If the operation fails it returns the *StorageSyncError error type.
func (client *WorkflowsClient) Get(ctx context.Context, resourceGroupName string, storageSyncServiceName string, workflowID string, options *WorkflowsGetOptions) (WorkflowsGetResponse, error) {
req, err := client.getCreateRequest(ctx, resourceGroupName, storageSyncServiceName, workflowID, options)
if err != nil {
return WorkflowsGetResponse{}, err
}
resp, err := client.pl.Do(req)
if err != nil {
return WorkflowsGetResponse{}, err
}
if !runtime.HasStatusCode(resp, http.StatusOK) {
return WorkflowsGetResponse{}, client.getHandleError(resp)
}
return client.getHandleResponse(resp)
}
// getCreateRequest creates the Get request.
func (client *WorkflowsClient) getCreateRequest(ctx context.Context, resourceGroupName string, storageSyncServiceName string, workflowID string, options *WorkflowsGetOptions) (*policy.Request, error) {
urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows/{workflowId}"
if client.subscriptionID == "" {
return nil, errors.New("parameter client.subscriptionID cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID))
if resourceGroupName == "" {
return nil, errors.New("parameter resourceGroupName cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName))
if storageSyncServiceName == "" {
return nil, errors.New("parameter storageSyncServiceName cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{storageSyncServiceName}", url.PathEscape(storageSyncServiceName))
if workflowID == "" {
return nil, errors.New("parameter workflowID cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{workflowId}", url.PathEscape(workflowID))
req, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.ep, urlPath))
if err != nil {
return nil, err
}
reqQP := req.Raw().URL.Query()
reqQP.Set("api-version", "2020-09-01")
req.Raw().URL.RawQuery = reqQP.Encode()
req.Raw().Header.Set("Accept", "application/json")
return req, nil
}
// getHandleResponse handles the Get response.
func (client *WorkflowsClient) getHandleResponse(resp *http.Response) (WorkflowsGetResponse, error) {
result := WorkflowsGetResponse{RawResponse: resp}
if val := resp.Header.Get("x-ms-request-id"); val != "" {
result.XMSRequestID = &val
}
if val := resp.Header.Get("x-ms-correlation-request-id"); val != "" {
result.XMSCorrelationRequestID = &val
}
if err := runtime.UnmarshalAsJSON(resp, &result.Workflow); err != nil {
return WorkflowsGetResponse{}, runtime.NewResponseError(err, resp)
}
return result, nil
}
// getHandleError handles the Get error response.
func (client *WorkflowsClient) getHandleError(resp *http.Response) error {
body, err := runtime.Payload(resp)
if err != nil {
return runtime.NewResponseError(err, resp)
}
errType := StorageSyncError{raw: string(body)}
if err := runtime.UnmarshalAsJSON(resp, &errType); err != nil {
return runtime.NewResponseError(fmt.Errorf("%s\n%s", string(body), err), resp)
}
return runtime.NewResponseError(&errType, resp)
}
// ListByStorageSyncService - Get a Workflow List
// If the operation fails it returns the *StorageSyncError error type.
func (client *WorkflowsClient) ListByStorageSyncService(ctx context.Context, resourceGroupName string, storageSyncServiceName string, options *WorkflowsListByStorageSyncServiceOptions) (WorkflowsListByStorageSyncServiceResponse, error) {
req, err := client.listByStorageSyncServiceCreateRequest(ctx, resourceGroupName, storageSyncServiceName, options)
if err != nil {
return WorkflowsListByStorageSyncServiceResponse{}, err
}
resp, err := client.pl.Do(req)
if err != nil {
return WorkflowsListByStorageSyncServiceResponse{}, err
}
if !runtime.HasStatusCode(resp, http.StatusOK) {
return WorkflowsListByStorageSyncServiceResponse{}, client.listByStorageSyncServiceHandleError(resp)
}
return client.listByStorageSyncServiceHandleResponse(resp)
}
// listByStorageSyncServiceCreateRequest creates the ListByStorageSyncService request.
func (client *WorkflowsClient) listByStorageSyncServiceCreateRequest(ctx context.Context, resourceGroupName string, storageSyncServiceName string, options *WorkflowsListByStorageSyncServiceOptions) (*policy.Request, error) {
urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageSync/storageSyncServices/{storageSyncServiceName}/workflows"
if client.subscriptionID == "" {
return nil, errors.New("parameter client.subscriptionID cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID))
if resourceGroupName == "" {
return nil, errors.New("parameter resourceGroupName cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName))
if storageSyncServiceName == "" {
return nil, errors.New("parameter storageSyncServiceName cannot be empty")
}
urlPath = strings.ReplaceAll(urlPath, "{storageSyncServiceName}", url.PathEscape(storageSyncServiceName))
req, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.ep, urlPath))
if err != nil {
return nil, err
}
reqQP := req.Raw().URL.Query()
reqQP.Set("api-version", "2020-09-01")
req.Raw().URL.RawQuery = reqQP.Encode()
req.Raw().Header.Set("Accept", "application/json")
return req, nil
}
// listByStorageSyncServiceHandleResponse handles the ListByStorageSyncService response.
func (client *WorkflowsClient) listByStorageSyncServiceHandleResponse(resp *http.Response) (WorkflowsListByStorageSyncServiceResponse, error) {
result := WorkflowsListByStorageSyncServiceResponse{RawResponse: resp}
if val := resp.Header.Get("x-ms-request-id"); val != "" {
result.XMSRequestID = &val
}
if val := resp.Header.Get("x-ms-correlation-request-id"); val != "" {
result.XMSCorrelationRequestID = &val
}
if err := runtime.UnmarshalAsJSON(resp, &result.WorkflowArray); err != nil {
return WorkflowsListByStorageSyncServiceResponse{}, runtime.NewResponseError(err, resp)
}
return result, nil
}
// listByStorageSyncServiceHandleError handles the ListByStorageSyncService error response.
func (client *WorkflowsClient) listByStorageSyncServiceHandleError(resp *http.Response) error {
body, err := runtime.Payload(resp)
if err != nil {
return runtime.NewResponseError(err, resp)
}
errType := StorageSyncError{raw: string(body)}
if err := runtime.UnmarshalAsJSON(resp, &errType); err != nil {
return runtime.NewResponseError(fmt.Errorf("%s\n%s", string(body), err), resp)
}
return runtime.NewResponseError(&errType, resp)
}
|
viewdy/phantomjs | src/qt/qtwebkit/Source/WebCore/platform/network/blackberry/NetworkJob.cpp | /*
* Copyright (C) 2009, 2010, 2011, 2012, 2013 Research In Motion Limited. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "config.h"
#include "NetworkJob.h"
#include "AuthenticationChallengeManager.h"
#include "Chrome.h"
#include "ChromeClient.h"
#include "CookieManager.h"
#include "CredentialBackingStore.h"
#include "CredentialStorage.h"
#include "Frame.h"
#include "FrameLoaderClientBlackBerry.h"
#include "HTTPParsers.h"
#include "KURL.h"
#include "MIMESniffing.h"
#include "MIMETypeRegistry.h"
#include "NetworkManager.h"
#include "Page.h"
#include "RSSFilterStream.h"
#include "ResourceHandleClient.h"
#include "ResourceHandleInternal.h"
#include "ResourceRequest.h"
#include <BlackBerryPlatformLog.h>
#include <BlackBerryPlatformSettings.h>
#include <LocalizeResource.h>
#include <network/MultipartStream.h>
#include <network/NetworkStreamFactory.h>
using BlackBerry::Platform::NetworkRequest;
namespace WebCore {
static const int s_redirectMaximum = 10;
inline static bool isInfo(int statusCode)
{
return 100 <= statusCode && statusCode < 200;
}
inline static bool isRedirect(int statusCode)
{
return 300 <= statusCode && statusCode < 400 && statusCode != 304;
}
inline static bool isUnauthorized(int statusCode)
{
return statusCode == 401;
}
static const char* const appendableHeaders[] = {"access-control-allow-origin", "allow",
"set-cookie", "set-cookie2", "vary", "via", "warning"};
static bool isAppendableHeader(const String& key)
{
// Non-standard header fields are conventionally marked by prefixing the field name with X-.
if (key.startsWith("x-"))
return true;
for (size_t i = 0; i < sizeof(appendableHeaders) /sizeof(char*); i++)
if (key == appendableHeaders[i])
return true;
return false;
}
NetworkJob::NetworkJob()
: FrameDestructionObserver(0)
, m_playerId(0)
, m_deleteJobTimer(this, &NetworkJob::fireDeleteJobTimer)
, m_streamFactory(0)
, m_isFile(false)
, m_isFTP(false)
, m_isFTPDir(true)
#ifndef NDEBUG
, m_isRunning(true) // Always started immediately after creation.
#endif
, m_cancelled(false)
, m_statusReceived(false)
, m_dataReceived(false)
, m_responseSent(false)
, m_callingClient(false)
, m_needsRetryAsFTPDirectory(false)
, m_isOverrideContentType(false)
, m_newJobWithCredentialsStarted(false)
, m_isHeadMethod(false)
, m_extendedStatusCode(0)
, m_redirectCount(0)
, m_deferredData(*this)
, m_deferLoadingCount(0)
, m_isAuthenticationChallenging(false)
{
}
NetworkJob::~NetworkJob()
{
if (m_isAuthenticationChallenging)
AuthenticationChallengeManager::instance()->cancelAuthenticationChallenge(this);
}
void NetworkJob::initialize(int playerId,
const String& pageGroupName,
const KURL& url,
const BlackBerry::Platform::NetworkRequest& request,
PassRefPtr<ResourceHandle> handle,
BlackBerry::Platform::NetworkStreamFactory* streamFactory,
Frame* frame,
int deferLoadingCount,
int redirectCount)
{
BLACKBERRY_ASSERT(handle);
BLACKBERRY_ASSERT(frame);
m_playerId = playerId;
m_pageGroupName = pageGroupName;
m_response.setURL(url);
m_isFile = url.protocolIs("file") || url.protocolIs("local");
m_isFTP = url.protocolIs("ftp");
m_handle = handle;
m_streamFactory = streamFactory;
if (frame && frame->loader()->pageDismissalEventBeingDispatched() != FrameLoader::NoDismissal) {
// In the case the frame will be detached soon, we still need to ping the server, but it is
// no longer safe to reference the Frame object.
// See http://trac.webkit.org/changeset/65910 and https://bugs.webkit.org/show_bug.cgi?id=30457.
// m_frame would be set to zero.
observeFrame(0);
} else
observeFrame(frame);
m_redirectCount = redirectCount;
m_deferLoadingCount = deferLoadingCount;
m_isHeadMethod = m_handle->firstRequest().httpMethod().upper() == "HEAD";
// We don't need to explicitly call notifyHeaderReceived, as the Content-Type
// will ultimately get parsed when sendResponseIfNeeded gets called.
if (!request.getOverrideContentType().empty()) {
m_contentType = String(request.getOverrideContentType());
m_isOverrideContentType = true;
}
if (!request.getSuggestedSaveName().empty())
m_contentDisposition = "filename=" + String(request.getSuggestedSaveName());
BlackBerry::Platform::FilterStream* wrappedStream = m_streamFactory->createNetworkStream(request, m_playerId);
ASSERT(wrappedStream);
BlackBerry::Platform::NetworkRequest::TargetType targetType = request.getTargetType();
if ((targetType == BlackBerry::Platform::NetworkRequest::TargetIsMainFrame
|| targetType == BlackBerry::Platform::NetworkRequest::TargetIsSubframe)
&& !m_isOverrideContentType) {
RSSFilterStream* filter = new RSSFilterStream();
filter->setWrappedStream(wrappedStream);
wrappedStream = filter;
}
setWrappedStream(wrappedStream);
}
int NetworkJob::cancelJob()
{
m_cancelled = true;
return streamCancel();
}
void NetworkJob::updateDeferLoadingCount(int delta)
{
m_deferLoadingCount += delta;
ASSERT(m_deferLoadingCount >= 0);
if (!isDeferringLoading()) {
// There might already be a timer set to call this, but it's safe to schedule it again.
m_deferredData.scheduleProcessDeferredData();
}
}
void NetworkJob::notifyStatusReceived(int status, const BlackBerry::Platform::String& message)
{
if (shouldDeferLoading())
m_deferredData.deferOpen(status, message);
else
handleNotifyStatusReceived(status, message);
}
void NetworkJob::handleNotifyStatusReceived(int status, const String& message)
{
// Check for messages out of order or after cancel.
if (m_responseSent || m_cancelled)
return;
if (isInfo(status))
return; // ignore
m_statusReceived = true;
// Convert non-HTTP status codes to generic HTTP codes.
m_extendedStatusCode = status;
if (!status)
m_response.setHTTPStatusCode(200);
else if (status < 0)
m_response.setHTTPStatusCode(404);
else
m_response.setHTTPStatusCode(status);
m_response.setHTTPStatusText(message);
if (isUnauthorized(m_extendedStatusCode))
purgeCredentials();
}
void NetworkJob::notifyHeadersReceived(const BlackBerry::Platform::NetworkRequest::HeaderList& headers)
{
bool cookiesEnabled = m_frame && m_frame->loader() && m_frame->loader()->client()
&& static_cast<FrameLoaderClientBlackBerry*>(m_frame->loader()->client())->cookiesEnabled();
BlackBerry::Platform::NetworkRequest::HeaderList::const_iterator endIt = headers.end();
for (BlackBerry::Platform::NetworkRequest::HeaderList::const_iterator it = headers.begin(); it != endIt; ++it) {
// Handle Set-Cookie headers immediately, even if loading is being deferred, since any request
// created while loading is deferred should include all cookies received. (This especially
// affects Set-Cookie headers sent with a 401 response - often this causes an auth dialog to be
// opened, which defers loading, but the followup request using the credentials from the dialog
// needs to include the cookie.)
//
// This is safe because handleSetCookieHeader only updates the cookiejar, it doesn't call back
// into the loader.
String keyString(it->first);
if (cookiesEnabled && equalIgnoringCase(keyString, "set-cookie"))
handleSetCookieHeader(it->second);
if (shouldDeferLoading())
m_deferredData.deferHeaderReceived(it->first, it->second);
else {
String valueString;
if (equalIgnoringCase(keyString, "Location")) {
// Location, like all headers, is supposed to be Latin-1. But some sites (wikipedia) send it in UTF-8.
// All byte strings that are valid UTF-8 are also valid Latin-1 (although outside ASCII, the meaning will
// differ), but the reverse isn't true. So try UTF-8 first and fall back to Latin-1 if it's invalid.
// (High Latin-1 should be url-encoded anyway.)
//
// FIXME: maybe we should do this with other headers?
// Skip it for now - we don't want to rewrite random bytes unless we're sure. (Definitely don't want to
// rewrite cookies, for instance.) Needs more investigation.
valueString = it->second;
if (valueString.isNull())
valueString = it->second;
} else
valueString = it->second;
handleNotifyHeaderReceived(keyString, valueString);
}
}
}
void NetworkJob::notifyMultipartHeaderReceived(const char* key, const char* value)
{
if (shouldDeferLoading())
m_deferredData.deferMultipartHeaderReceived(key, value);
else
handleNotifyMultipartHeaderReceived(key, value);
}
void NetworkJob::notifyAuthReceived(NetworkRequest::AuthType authType, NetworkRequest::AuthProtocol authProtocol, NetworkRequest::AuthScheme authScheme, const char* realm, AuthResult result)
{
ProtectionSpaceServerType serverType;
switch (authType) {
case NetworkRequest::AuthTypeHost:
switch (authProtocol) {
case NetworkRequest::AuthProtocolHTTP:
serverType = ProtectionSpaceServerHTTP;
break;
case NetworkRequest::AuthProtocolHTTPS:
serverType = ProtectionSpaceServerHTTPS;
break;
case NetworkRequest::AuthProtocolFTP:
serverType = ProtectionSpaceServerFTP;
break;
case NetworkRequest::AuthProtocolFTPS:
serverType = ProtectionSpaceServerFTPS;
break;
default:
ASSERT_NOT_REACHED();
return;
}
break;
case NetworkRequest::AuthTypeProxy:
switch (authProtocol) {
case NetworkRequest::AuthProtocolHTTP:
serverType = ProtectionSpaceProxyHTTP;
break;
case NetworkRequest::AuthProtocolHTTPS:
serverType = ProtectionSpaceProxyHTTPS;
break;
case NetworkRequest::AuthProtocolFTP:
case NetworkRequest::AuthProtocolFTPS:
serverType = ProtectionSpaceProxyFTP;
break;
default:
ASSERT_NOT_REACHED();
return;
}
break;
default:
ASSERT_NOT_REACHED();
return;
}
ProtectionSpaceAuthenticationScheme scheme;
switch (authScheme) {
case NetworkRequest::AuthSchemeDefault:
scheme = ProtectionSpaceAuthenticationSchemeDefault;
break;
case NetworkRequest::AuthSchemeHTTPBasic:
scheme = ProtectionSpaceAuthenticationSchemeHTTPBasic;
break;
case NetworkRequest::AuthSchemeHTTPDigest:
scheme = ProtectionSpaceAuthenticationSchemeHTTPDigest;
break;
case NetworkRequest::AuthSchemeNegotiate:
scheme = ProtectionSpaceAuthenticationSchemeNegotiate;
break;
case NetworkRequest::AuthSchemeNTLM:
scheme = ProtectionSpaceAuthenticationSchemeNTLM;
break;
default:
ASSERT_NOT_REACHED();
return;
}
// On success, update stored credentials if necessary
// On failure, purge credentials and send new request
// On retry, update stored credentials if necessary and send new request
if (result == AuthResultFailure)
purgeCredentials();
else {
// Update the credentials that will be stored to match the scheme that was actually used
AuthenticationChallenge& challenge = authType == NetworkRequest::AuthTypeProxy ? m_handle->getInternal()->m_proxyWebChallenge : m_handle->getInternal()->m_hostWebChallenge;
if (challenge.hasCredentials()) {
const ProtectionSpace& oldSpace = challenge.protectionSpace();
if (oldSpace.authenticationScheme() != scheme && oldSpace.serverType() == serverType) {
ProtectionSpace newSpace(oldSpace.host(), oldSpace.port(), oldSpace.serverType(), oldSpace.realm(), scheme);
updateCurrentWebChallenge(AuthenticationChallenge(newSpace, challenge.proposedCredential(), challenge.previousFailureCount(), challenge.failureResponse(), challenge.error()));
}
}
storeCredentials();
}
if (result != AuthResultSuccess) {
switch (sendRequestWithCredentials(serverType, scheme, realm, result != AuthResultRetry)) {
case SendRequestSucceeded:
m_newJobWithCredentialsStarted = true;
break;
case SendRequestCancelled:
streamFailedToGetCredentials(authType, authProtocol, authScheme);
// fall through
case SendRequestWaiting:
m_newJobWithCredentialsStarted = false;
break;
}
}
}
void NetworkJob::notifyStringHeaderReceived(const String& key, const String& value)
{
if (shouldDeferLoading())
m_deferredData.deferHeaderReceived(key, value);
else
handleNotifyHeaderReceived(key, value);
}
void NetworkJob::handleNotifyHeaderReceived(const String& key, const String& value)
{
// Check for messages out of order or after cancel.
if (!m_statusReceived || m_responseSent || m_cancelled)
return;
String lowerKey = key.lower();
if (lowerKey == "content-type")
m_contentType = value.lower();
else if (lowerKey == "content-disposition")
m_contentDisposition = value;
else if (equalIgnoringCase(key, BlackBerry::Platform::NetworkRequest::HEADER_BLACKBERRY_FTP))
handleFTPHeader(value);
if (m_response.httpHeaderFields().contains(key.utf8().data()) && isAppendableHeader(lowerKey)) {
// If there are several headers with same key, we should combine the following ones with the first.
m_response.setHTTPHeaderField(key, m_response.httpHeaderField(key) + ", " + value);
} else
m_response.setHTTPHeaderField(key, value);
}
void NetworkJob::handleNotifyMultipartHeaderReceived(const String& key, const String& value)
{
if (!m_multipartResponse) {
// Create a new response based on the original set of headers + the
// replacement headers. We only replace the same few headers that gecko
// does. See netwerk/streamconv/converters/nsMultiMixedConv.cpp.
m_multipartResponse = adoptPtr(new ResourceResponse);
m_multipartResponse->setURL(m_response.url());
// The list of BlackBerry::Platform::replaceHeaders that we do not copy from the original
// response when generating a response.
const WebCore::HTTPHeaderMap& map = m_response.httpHeaderFields();
for (WebCore::HTTPHeaderMap::const_iterator it = map.begin(); it != map.end(); ++it) {
bool needsCopyfromOriginalResponse = true;
int replaceHeadersIndex = 0;
while (BlackBerry::Platform::MultipartStream::replaceHeaders[replaceHeadersIndex]) {
if (it->key.lower() == BlackBerry::Platform::MultipartStream::replaceHeaders[replaceHeadersIndex]) {
needsCopyfromOriginalResponse = false;
break;
}
replaceHeadersIndex++;
}
if (needsCopyfromOriginalResponse)
m_multipartResponse->setHTTPHeaderField(it->key, it->value);
}
m_multipartResponse->setIsMultipartPayload(true);
}
if (key.lower() == "content-type") {
String contentType = value.lower();
m_multipartResponse->setMimeType(extractMIMETypeFromMediaType(contentType));
m_multipartResponse->setTextEncodingName(extractCharsetFromMediaType(contentType));
}
m_multipartResponse->setHTTPHeaderField(key, value);
}
void NetworkJob::handleSetCookieHeader(const String& value)
{
KURL url = m_response.url();
CookieManager& manager = cookieManager();
if ((manager.cookiePolicy() == CookieStorageAcceptPolicyOnlyFromMainDocumentDomain)
&& (m_handle->firstRequest().firstPartyForCookies() != url)
&& manager.getCookie(url, WithHttpOnlyCookies).isEmpty())
return;
manager.setCookies(url, value);
}
void NetworkJob::notifyDataReceivedPlain(const char* buf, size_t len)
{
if (shouldDeferLoading())
m_deferredData.deferDataReceived(buf, len);
else
handleNotifyDataReceived(buf, len);
}
void NetworkJob::handleNotifyDataReceived(const char* buf, size_t len)
{
// Check for messages out of order or after cancel.
if ((!m_isFile && !m_statusReceived) || m_cancelled)
return;
if (!buf || !len)
return;
// The loadFile API sets the override content type,
// this will always be used as the content type and should not be overridden.
if (!m_dataReceived && !m_isOverrideContentType) {
bool shouldSniff = true;
// Don't bother sniffing the content type of a file that
// is on a file system if it has a MIME mappable file extension.
// The file extension is likely to be correct.
if (m_isFile) {
String urlFilename = m_response.url().lastPathComponent();
size_t pos = urlFilename.reverseFind('.');
if (pos != notFound) {
String extension = urlFilename.substring(pos + 1);
String mimeType = MIMETypeRegistry::getMIMETypeForExtension(extension);
if (!mimeType.isEmpty())
shouldSniff = false;
}
}
if (shouldSniff) {
MIMESniffer sniffer = MIMESniffer(m_contentType.latin1().data(), MIMETypeRegistry::isSupportedImageResourceMIMEType(m_contentType));
if (const char* type = sniffer.sniff(buf, std::min(len, sniffer.dataSize())))
m_sniffedMimeType = String(type);
}
}
m_dataReceived = true;
// Protect against reentrancy.
updateDeferLoadingCount(1);
if (shouldSendClientData()) {
sendResponseIfNeeded();
sendMultipartResponseIfNeeded();
if (isClientAvailable()) {
RecursionGuard guard(m_callingClient);
m_handle->client()->didReceiveData(m_handle.get(), buf, len, len);
}
}
updateDeferLoadingCount(-1);
}
void NetworkJob::notifyDataSent(unsigned long long bytesSent, unsigned long long totalBytesToBeSent)
{
if (shouldDeferLoading())
m_deferredData.deferDataSent(bytesSent, totalBytesToBeSent);
else
handleNotifyDataSent(bytesSent, totalBytesToBeSent);
}
void NetworkJob::handleNotifyDataSent(unsigned long long bytesSent, unsigned long long totalBytesToBeSent)
{
if (m_cancelled)
return;
// Protect against reentrancy.
updateDeferLoadingCount(1);
if (isClientAvailable()) {
RecursionGuard guard(m_callingClient);
m_handle->client()->didSendData(m_handle.get(), bytesSent, totalBytesToBeSent);
}
updateDeferLoadingCount(-1);
}
void NetworkJob::notifyClose(int status)
{
if (shouldDeferLoading())
m_deferredData.deferClose(status);
else
handleNotifyClose(status);
}
void NetworkJob::handleNotifyClose(int status)
{
#ifndef NDEBUG
m_isRunning = false;
#endif
if (!m_cancelled) {
if (!m_statusReceived) {
// Connection failed before sending notifyStatusReceived: use generic NetworkError.
notifyStatusReceived(BlackBerry::Platform::FilterStream::StatusNetworkError, BlackBerry::Platform::String::emptyString());
}
if (shouldReleaseClientResource()) {
if (isRedirect(m_extendedStatusCode) && (m_redirectCount >= s_redirectMaximum))
m_extendedStatusCode = BlackBerry::Platform::FilterStream::StatusTooManyRedirects;
sendResponseIfNeeded();
if (isClientAvailable()) {
if (isError(status))
m_extendedStatusCode = status;
RecursionGuard guard(m_callingClient);
if (shouldNotifyClientFailed()) {
String domain = m_extendedStatusCode < 0 ? ResourceError::platformErrorDomain : ResourceError::httpErrorDomain;
ResourceError error(domain, m_extendedStatusCode, m_response.url().string(), m_response.httpStatusText());
m_handle->client()->didFail(m_handle.get(), error);
} else
m_handle->client()->didFinishLoading(m_handle.get(), 0);
}
}
}
// Whoever called notifyClose still have a reference to the job, so
// schedule the deletion with a timer.
m_deleteJobTimer.startOneShot(0);
// Detach from the ResourceHandle in any case.
m_handle = 0;
m_multipartResponse = nullptr;
}
bool NetworkJob::shouldReleaseClientResource()
{
if ((m_needsRetryAsFTPDirectory && retryAsFTPDirectory()) || (isRedirect(m_extendedStatusCode) && handleRedirect()) || m_newJobWithCredentialsStarted || m_isAuthenticationChallenging)
return false;
return true;
}
bool NetworkJob::shouldNotifyClientFailed() const
{
ResourceRequest request = m_handle->firstRequest();
if (request.forceDownload())
return false;
if (m_extendedStatusCode < 0)
return true;
if (isError(m_extendedStatusCode) && !m_dataReceived && !m_isHeadMethod && request.targetType() != ResourceRequest::TargetIsXHR)
return true;
return false;
}
bool NetworkJob::retryAsFTPDirectory()
{
m_needsRetryAsFTPDirectory = false;
ASSERT(m_handle);
ResourceRequest newRequest = m_handle->firstRequest();
KURL url = newRequest.url();
url.setPath(url.path() + "/");
newRequest.setURL(url);
newRequest.setMustHandleInternally(true);
// Update the UI.
handleNotifyHeaderReceived("Location", url.string());
return startNewJobWithRequest(newRequest);
}
bool NetworkJob::startNewJobWithRequest(ResourceRequest& newRequest, bool increaseRedirectCount, bool rereadCookies)
{
// m_frame can be null if this is a PingLoader job (See NetworkJob::initialize).
// In this case we don't start new request.
if (!m_frame)
return false;
if (isClientAvailable()) {
RecursionGuard guard(m_callingClient);
m_handle->client()->willSendRequest(m_handle.get(), newRequest, m_response);
// m_cancelled can become true if the url fails the policy check.
// newRequest can be cleared when the redirect is rejected.
if (m_cancelled || newRequest.isEmpty())
return false;
}
// Pass the ownership of the ResourceHandle to the new NetworkJob.
RefPtr<ResourceHandle> handle = m_handle;
cancelJob();
int status = NetworkManager::instance()->startJob(m_playerId,
m_pageGroupName,
handle,
newRequest,
m_streamFactory,
m_frame,
m_deferLoadingCount,
increaseRedirectCount ? m_redirectCount + 1 : m_redirectCount,
rereadCookies);
return status == BlackBerry::Platform::FilterStream::StatusSuccess;
}
bool NetworkJob::handleRedirect()
{
ASSERT(m_handle);
if (!m_handle || m_redirectCount >= s_redirectMaximum)
return false;
String location = m_response.httpHeaderField("Location");
if (location.isNull())
return false;
KURL newURL(m_response.url(), location);
if (!newURL.isValid())
return false;
if (newURL.protocolIsData()) {
m_extendedStatusCode = BlackBerry::Platform::FilterStream::StatusInvalidRedirectToData;
return false;
}
ResourceRequest newRequest = m_handle->firstRequest();
newRequest.setURL(newURL);
newRequest.setMustHandleInternally(true);
String method = newRequest.httpMethod().upper();
if (method != "GET" && method != "HEAD") {
newRequest.setHTTPMethod("GET");
newRequest.setHTTPBody(0);
newRequest.clearHTTPContentLength();
newRequest.clearHTTPContentType();
}
// If this request is challenged, store the credentials now (if they are null this will do nothing)
storeCredentials();
// Do not send existing credentials with the new request.
m_handle->getInternal()->m_currentWebChallenge.nullify();
m_handle->getInternal()->m_proxyWebChallenge.nullify();
m_handle->getInternal()->m_hostWebChallenge.nullify();
return startNewJobWithRequest(newRequest, /* increaseRedirectCount */ true, /* rereadCookies */ true);
}
void NetworkJob::sendResponseIfNeeded()
{
if (m_responseSent)
return;
m_responseSent = true;
if (shouldNotifyClientFailed())
return;
String urlFilename;
if (!m_response.url().protocolIsData())
urlFilename = m_response.url().lastPathComponent();
// Get the MIME type that was set by the content sniffer
// if there's no custom sniffer header, try to set it from the Content-Type header
// if this fails, guess it from extension.
String mimeType = m_sniffedMimeType;
if (m_isFTP && m_isFTPDir)
mimeType = "application/x-ftp-directory";
else if (mimeType.isNull())
mimeType = extractMIMETypeFromMediaType(m_contentType);
if (mimeType.isNull())
mimeType = MIMETypeRegistry::getMIMETypeForPath(urlFilename);
if (!m_dataReceived && mimeType == "application/octet-stream") {
// For empty content, if can't guess its mimetype from filename, we manually
// set the mimetype to "text/plain" in case it goes to download.
mimeType = "text/plain";
}
m_response.setMimeType(mimeType);
// Set encoding from Content-Type header.
m_response.setTextEncodingName(extractCharsetFromMediaType(m_contentType));
// Set content length from header.
String contentLength = m_response.httpHeaderField("Content-Length");
if (!contentLength.isNull())
m_response.setExpectedContentLength(contentLength.toInt64());
String suggestedFilename = filenameFromHTTPContentDisposition(m_contentDisposition);
if (suggestedFilename.isEmpty()) {
// Check and see if an extension already exists.
String mimeExtension = MIMETypeRegistry::getPreferredExtensionForMIMEType(mimeType);
if (urlFilename.isEmpty()) {
if (mimeExtension.isEmpty()) // No extension found for the mimeType.
suggestedFilename = String(BlackBerry::Platform::LocalizeResource::getString(BlackBerry::Platform::FILENAME_UNTITLED));
else
suggestedFilename = String(BlackBerry::Platform::LocalizeResource::getString(BlackBerry::Platform::FILENAME_UNTITLED)) + "." + mimeExtension;
} else {
if (urlFilename.reverseFind('.') == notFound && !mimeExtension.isEmpty())
suggestedFilename = urlFilename + '.' + mimeExtension;
else
suggestedFilename = urlFilename;
}
}
m_response.setSuggestedFilename(suggestedFilename);
if (isClientAvailable()) {
RecursionGuard guard(m_callingClient);
m_handle->client()->didReceiveResponse(m_handle.get(), m_response);
}
}
void NetworkJob::sendMultipartResponseIfNeeded()
{
if (m_multipartResponse && isClientAvailable()) {
m_handle->client()->didReceiveResponse(m_handle.get(), *m_multipartResponse);
m_multipartResponse = nullptr;
}
}
bool NetworkJob::handleFTPHeader(const String& header)
{
size_t spacePos = header.find(' ');
if (spacePos == notFound)
return false;
String statusCode = header.left(spacePos);
switch (statusCode.toInt()) {
case 213:
m_isFTPDir = false;
break;
case 530:
purgeCredentials();
if (m_response.url().protocolIs("ftps"))
sendRequestWithCredentials(ProtectionSpaceServerFTPS, ProtectionSpaceAuthenticationSchemeDefault, "ftp");
else
sendRequestWithCredentials(ProtectionSpaceServerFTP, ProtectionSpaceAuthenticationSchemeDefault, "ftp");
break;
case 230:
storeCredentials();
break;
case 550:
// The user might have entered an URL which point to a directory but forgot type '/',
// e.g., ftp://ftp.trolltech.com/qt/source where 'source' is a directory. We need to
// added '/' and try again.
if (m_handle && !m_handle->firstRequest().url().path().endsWith("/"))
m_needsRetryAsFTPDirectory = true;
break;
}
return true;
}
NetworkJob::SendRequestResult NetworkJob::sendRequestWithCredentials(ProtectionSpaceServerType type, ProtectionSpaceAuthenticationScheme scheme, const String& realm, bool requireCredentials)
{
ASSERT(m_handle);
if (!m_handle)
return SendRequestCancelled;
KURL newURL = m_response.url();
if (!newURL.isValid())
return SendRequestCancelled;
// IMPORTANT: if a new source of credentials is added to this method, be sure to handle it in
// purgeCredentials as well!
String host;
int port;
BlackBerry::Platform::ProxyInfo proxyInfo;
if (type == ProtectionSpaceProxyHTTP || type == ProtectionSpaceProxyHTTPS) {
proxyInfo = BlackBerry::Platform::Settings::instance()->proxyInfo(newURL.string());
ASSERT(!proxyInfo.address.empty());
if (proxyInfo.address.empty()) {
// Fall back to the response url if there's no proxy
// FIXME: is this the best way to handle this?
host = m_response.url().host();
port = m_response.url().port();
} else {
// proxyInfo returns host:port, without a protocol. KURL can't parse this, so stick http
// on the front.
// (We could split into host and port by hand, but that gets hard to parse with IPv6 urls,
// so better to reuse KURL's parsing.)
StringBuilder proxyAddress;
if (type == ProtectionSpaceProxyHTTP)
proxyAddress.append("http://");
else
proxyAddress.append("https://");
proxyAddress.append(proxyInfo.address);
KURL proxyURL(KURL(), proxyAddress.toString());
host = proxyURL.host();
port = proxyURL.port();
}
} else {
host = m_response.url().host();
port = m_response.url().port();
}
ProtectionSpace protectionSpace(host, port, type, realm, scheme);
// We've got the scheme and realm. Now we need a username and password.
Credential credential;
if (!requireCredentials) {
// Don't overwrite any existing credentials with the empty credential
updateCurrentWebChallenge(AuthenticationChallenge(protectionSpace, credential, 0, m_response, ResourceError()), /* allowOverwrite */ false);
} else if (!(credential = CredentialStorage::get(protectionSpace)).isEmpty()
#if ENABLE(BLACKBERRY_CREDENTIAL_PERSIST)
|| !(credential = CredentialStorage::getFromPersistentStorage(protectionSpace)).isEmpty()
#endif
) {
// First search the CredentialStorage and Persistent Credential Storage
AuthenticationChallenge challenge(protectionSpace, credential, 0, m_response, ResourceError());
challenge.setStored(true);
updateCurrentWebChallenge(challenge);
} else {
ASSERT(credential.isEmpty());
if (m_handle->firstRequest().targetType() == ResourceRequest::TargetIsFavicon) {
// The favicon loading is triggerred after the main resource has been loaded
// and parsed, so if we cancel the authentication challenge when loading the main
// resource, we should also cancel loading the favicon when it starts to
// load. If not we will receive another challenge which may confuse the user.
return SendRequestCancelled;
}
// CredentialStore is empty. Ask the user via dialog.
String username;
String password;
if (!proxyInfo.address.empty()) {
username = proxyInfo.username;
password = <PASSWORD>;
} else {
username = m_handle->getInternal()->m_user;
password = m_handle->getInternal()->m_pass;
}
// Before asking the user for credentials, we check if the URL contains that.
if (username.isEmpty() && password.isEmpty()) {
if (m_handle->firstRequest().targetType() != ResourceRequest::TargetIsMainFrame && BlackBerry::Platform::Settings::instance()->isChromeProcess())
return SendRequestCancelled;
if (!m_frame || !m_frame->page())
return SendRequestCancelled;
// DO overwrite any existing credentials with the empty credential
updateCurrentWebChallenge(AuthenticationChallenge(protectionSpace, credential, 0, m_response, ResourceError()));
m_isAuthenticationChallenging = true;
updateDeferLoadingCount(1);
AuthenticationChallengeManager::instance()->authenticationChallenge(newURL, protectionSpace,
Credential(), this, m_frame->page()->chrome().client()->platformPageClient());
return SendRequestWaiting;
}
credential = Credential(username, password, CredentialPersistenceForSession);
updateCurrentWebChallenge(AuthenticationChallenge(protectionSpace, credential, 0, m_response, ResourceError()));
}
notifyChallengeResult(newURL, protectionSpace, AuthenticationChallengeSuccess, credential);
return m_newJobWithCredentialsStarted ? SendRequestSucceeded : SendRequestCancelled;
}
void NetworkJob::storeCredentials()
{
if (!m_handle)
return;
storeCredentials(m_handle->getInternal()->m_hostWebChallenge);
storeCredentials(m_handle->getInternal()->m_proxyWebChallenge);
}
void NetworkJob::storeCredentials(AuthenticationChallenge& challenge)
{
if (challenge.isNull())
return;
if (challenge.isStored())
return;
// Obviously we can't have successfully authenticated with empty credentials. (To store empty
// credentials, use purgeCredentials.)
// FIXME: We should assert here, but there is one path (when the credentials are read from the
// proxy config entirely in the platform layer) where storeCredentials is called with an empty
// challenge. The credentials should be passed back from the platform layer for storage in this
// case - see PR 287791.
if (challenge.proposedCredential().user().isEmpty() || challenge.proposedCredential().password().isEmpty())
return;
CredentialStorage::set(challenge.proposedCredential(), challenge.protectionSpace(), m_response.url());
challenge.setStored(true);
if (challenge.protectionSpace().serverType() == ProtectionSpaceProxyHTTP || challenge.protectionSpace().serverType() == ProtectionSpaceProxyHTTPS) {
StringBuilder proxyAddress;
proxyAddress.append(challenge.protectionSpace().host());
proxyAddress.append(":");
proxyAddress.appendNumber(challenge.protectionSpace().port());
BlackBerry::Platform::ProxyInfo proxyInfo;
proxyInfo.address = proxyAddress.toString();
proxyInfo.username = challenge.proposedCredential().user();
proxyInfo.password = challenge.proposedCredential().password();
BlackBerry::Platform::Settings::instance()->storeProxyCredentials(proxyInfo);
if (m_frame && m_frame->page())
m_frame->page()->chrome().client()->platformPageClient()->syncProxyCredential(challenge.proposedCredential());
}
}
void NetworkJob::purgeCredentials()
{
if (!m_handle)
return;
purgeCredentials(m_handle->getInternal()->m_hostWebChallenge);
purgeCredentials(m_handle->getInternal()->m_proxyWebChallenge);
m_handle->getInternal()->m_currentWebChallenge.nullify();
m_handle->getInternal()->m_proxyWebChallenge.nullify();
m_handle->getInternal()->m_hostWebChallenge.nullify();
}
void NetworkJob::purgeCredentials(AuthenticationChallenge& challenge)
{
if (challenge.isNull())
return;
const String& purgeUsername = challenge.proposedCredential().user();
const String& purgePassword = challenge.proposedCredential().password();
// Since this credential didn't work, remove it from all sources which would return it
// IMPORTANT: every source that is checked for a password in sendRequestWithCredentials should
// be handled here!
if (challenge.protectionSpace().serverType() == ProtectionSpaceProxyHTTP || challenge.protectionSpace().serverType() == ProtectionSpaceProxyHTTPS) {
BlackBerry::Platform::ProxyInfo proxyInfo = BlackBerry::Platform::Settings::instance()->proxyInfo(m_handle->firstRequest().url().string());
if (!proxyInfo.address.empty() && purgeUsername == proxyInfo.username.c_str() && purgePassword == proxyInfo.password.c_str()) {
proxyInfo.username.clear();
proxyInfo.password.clear();
BlackBerry::Platform::Settings::instance()->storeProxyCredentials(proxyInfo);
}
} else if (m_handle->getInternal()->m_user == purgeUsername && m_handle->getInternal()->m_pass == purgePassword) {
m_handle->getInternal()->m_user = "";
m_handle->getInternal()->m_pass = "";
}
// Do not compare credential objects with == here, since we don't care about the persistence.
const Credential& storedCredential = CredentialStorage::get(challenge.protectionSpace());
if (storedCredential.user() == purgeUsername && storedCredential.password() == purgePassword) {
CredentialStorage::remove(challenge.protectionSpace());
challenge.setStored(false);
}
#if ENABLE(BLACKBERRY_CREDENTIAL_PERSIST)
const Credential& persistedCredential = credentialBackingStore().getLogin(challenge.protectionSpace());
if (persistedCredential.user() == purgeUsername && persistedCredential.password() == purgePassword)
credentialBackingStore().removeLogin(challenge.protectionSpace(), purgeUsername);
#endif
}
bool NetworkJob::shouldSendClientData() const
{
return (!isRedirect(m_extendedStatusCode) || !m_response.httpHeaderFields().contains("Location"))
&& !m_needsRetryAsFTPDirectory;
}
void NetworkJob::fireDeleteJobTimer(Timer<NetworkJob>*)
{
NetworkManager::instance()->deleteJob(this);
}
void NetworkJob::notifyChallengeResult(const KURL& url, const ProtectionSpace& protectionSpace, AuthenticationChallengeResult result, const Credential& credential)
{
ASSERT(url.isValid());
ASSERT(url == m_response.url());
ASSERT(!protectionSpace.host().isEmpty());
if (m_isAuthenticationChallenging) {
m_isAuthenticationChallenging = false;
if (result == AuthenticationChallengeSuccess)
cancelJob();
updateDeferLoadingCount(-1);
}
if (result != AuthenticationChallengeSuccess) {
NetworkRequest::AuthType authType;
NetworkRequest::AuthProtocol authProtocol;
NetworkRequest::AuthScheme authScheme;
protectionSpaceToPlatformAuth(protectionSpace, authType, authProtocol, authScheme);
streamFailedToGetCredentials(authType, authProtocol, authScheme);
return;
}
updateCurrentWebChallenge(AuthenticationChallenge(protectionSpace, credential, 0, m_response, ResourceError()), /* allowOverwrite */ false);
ResourceRequest newRequest = m_handle->firstRequest();
newRequest.setURL(url);
newRequest.setMustHandleInternally(true);
m_newJobWithCredentialsStarted = startNewJobWithRequest(newRequest, /* increaseRedirectCount */ false, /* rereadCookies */ true);
}
void NetworkJob::frameDestroyed()
{
if (m_frame && !m_cancelled)
cancelJob();
FrameDestructionObserver::frameDestroyed();
}
void NetworkJob::willDetachPage()
{
if (m_frame && !m_cancelled)
cancelJob();
}
void NetworkJob::updateCurrentWebChallenge(const AuthenticationChallenge& challenge, bool allowOverwrite)
{
if (allowOverwrite || !m_handle->getInternal()->m_currentWebChallenge.hasCredentials())
m_handle->getInternal()->m_currentWebChallenge = challenge;
if (challenge.protectionSpace().serverType() == ProtectionSpaceProxyHTTP || challenge.protectionSpace().serverType() == ProtectionSpaceProxyHTTPS) {
if (allowOverwrite || !m_handle->getInternal()->m_proxyWebChallenge.hasCredentials())
m_handle->getInternal()->m_proxyWebChallenge = challenge;
} else {
if (allowOverwrite || !m_handle->getInternal()->m_hostWebChallenge.hasCredentials())
m_handle->getInternal()->m_hostWebChallenge = challenge;
}
}
const BlackBerry::Platform::String NetworkJob::mimeType() const
{
return m_response.mimeType();
}
} // namespace WebCore
|
rudylee/expo | ios/Pods/Headers/Public/Protobuf/GPBRuntimeTypes.h | ../../../Protobuf/objectivec/GPBRuntimeTypes.h |
LudoCrypt/Backrooms-1.7.10 | src/main/java/net/ludocrypt/backrooms/block/entity/SpawnwallBlockEntity.java | package net.ludocrypt.backrooms.block.entity;
import java.util.UUID;
import net.minecraft.block.entity.BlockEntity;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.nbt.IntArrayTag;
import net.minecraft.nbt.Tag;
public class SpawnwallBlockEntity extends BlockEntity {
private PlayerEntity player;
public void fromTag(CompoundTag tag) {
super.fromTag(tag);
this.player = world.getPlayerByUuid(toUuid(tag.get("player")));
}
public void toTag(CompoundTag tag) {
super.toTag(tag);
tag.put("player", fromUuid(player.getUuid()));
}
public PlayerEntity getPlayer() {
return this.player;
}
public void setPlayer(PlayerEntity player) {
this.player = player;
this.markDirty();
}
public static IntArrayTag fromUuid(UUID uuid) {
return new IntArrayTag(toIntArray(uuid));
}
public static UUID toUuid(Tag tag) {
if (tag.getType() != new IntArrayTag(new int[] {}).getType()) {
throw new IllegalArgumentException("Expected UUID-Tag to be of type " + new IntArrayTag(new int[] {}).getType() + ", but found " + tag.getType() + ".");
} else {
int[] is = ((IntArrayTag) tag).getIntArray();
if (is.length != 4) {
throw new IllegalArgumentException("Expected UUID-Array to be of length 4, but found " + is.length + ".");
} else {
return toUuid(is);
}
}
}
public static UUID toUuid(int[] array) {
return new UUID((long) array[0] << 32 | (long) array[1] & 4294967295L, (long) array[2] << 32 | (long) array[3] & 4294967295L);
}
public static int[] toIntArray(UUID uuid) {
long l = uuid.getMostSignificantBits();
long m = uuid.getLeastSignificantBits();
return toIntArray(l, m);
}
public static int[] toIntArray(long uuidMost, long uuidLeast) {
return new int[] { (int) (uuidMost >> 32), (int) uuidMost, (int) (uuidLeast >> 32), (int) uuidLeast };
}
}
|
ABTSoftware/SciChart.NativeScript.Examples | src/platforms/ios/Pods/SciChart/SciChart.framework/Headers/SCIAxisStyle.h | <filename>src/platforms/ios/Pods/SciChart/SciChart.framework/Headers/SCIAxisStyle.h
//
// SCIAxisStyle.h
// SciChart
//
// Created by Admin on 20.10.15.
// Copyright © 2015 SciChart Ltd. All rights reserved.
//
/** \addtogroup Themes
* @{
*/
#import <Foundation/Foundation.h>
#import "SCIPen2D.h"
#import "SCICallbackBlock.h"
#import "SCIStyle.h"
@protocol SCIPen2DProtocol;
@protocol SCIBrush2DProtocol;
@class SCITextFormattingStyle;
#pragma mark SCIAxisLabelClippingMode enum
/**
* @typedef SCIAxisLabelClippingMode
* @brief A list of axis labels clipping modes.
* @field SCIAxisLabelClipping_None labels will not be clipped
* @field SCIAxisLabelClipping_Axis labels will be clipped to axis area
* @field SCIAxisLabelClipping_Surface labels will be clipped to whole chart surface (axes and charts area)
*/
typedef NS_ENUM(NSUInteger, SCIAxisLabelClippingMode) {
/** labels will not be clipped */
SCIAxisLabelClipping_None,
/** labels will be clipped to axis area */
SCIAxisLabelClipping_Axis,
/** labels will be clipped to whole chart surface (axes and charts area) */
SCIAxisLabelClipping_Surface
};
#pragma mark - SCIAxisStyle class
/**
* @abstract SCIAxisStyle class
* @discussion Contains properties for axis theming and customization
* @see SCIStyleProtocol
* @see SCIAxis2DProtocol
*/
@interface SCIAxisStyle : NSObject <SCIStyleProtocol, NSCopying>
#pragma mark Minor ticks setting
/**
* @abstract Defines if minor ticks will be drawn on axis.
*/
@property (nonatomic) BOOL drawMinorTicks;
/**
* @abstract Defines minor ticks thickness and color.
* @code
* axis.style.minorTickBrush = SCIPenSolid(colorCode: 0xFFFFFFFF, width: 0.5)
* @encode
* @see SCIPen2DProtocol
*/
@property (nonatomic, strong) id<SCIPen2DProtocol> minorTickBrush;
/**
* @abstract Defines minor ticks line length
*/
@property (nonatomic) float minorTickSize;
#pragma mark Major ticks setting
/**
* @abstract Defines if major ticks will be drawn on axis.
*/
@property (nonatomic) BOOL drawMajorTicks;
/**
* @abstract Defines major ticks thickness and color.
* @code
* axis.style.majorTickBrush = SCIPenSolid(colorCode: 0xFFFFFFFF, width: 0.5)
* @encode
* @see SCIPen2DProtocol
*/
@property (nonatomic, strong) id<SCIPen2DProtocol> majorTickBrush;
/**
* @abstract Defines major ticks line length.
*/
@property (nonatomic) float majorTickSize;
#pragma mark Minor grid lines setting
/**
* @abstract Defines if minor grid lines will be drawn on chart surface.
*/
@property (nonatomic) BOOL drawMinorGridLines;
/**
* @abstract Defines minor grid lines thickness and color that are drawn on chart surface.
* @code
* axis.style.minorGridLineBrush = SCIPenSolid(colorCode: 0xFFFFFFFF, width: 0.5)
* @encode
* @see SCIPen2D
*/
@property (nonatomic, strong) id<SCIPen2DProtocol> minorGridLineBrush;
#pragma mark Major Grid lines setting
/**
* @abstract Defines if major grid lines will be drawn on chart surface.
*/
@property (nonatomic) BOOL drawMajorGridLines;
/**
* @abstract Defines major grid lines thickness and color that are drawn on chart surface
* @code
* axis.style.majorGridLineBrush = SCIPenSolid(colorCode: 0xFFFFFFFF, width: 0.5)
* @encode
* @see SCIPen2DProtocol
*/
@property (nonatomic, strong) id<SCIPen2DProtocol> majorGridLineBrush;
#pragma mark Major bands setting
/**
* @abstract Defines if grid bands will be drawn on chart surface.
* @discussion Bands are drawn between major grid lines.
*/
@property (nonatomic) BOOL drawMajorBands;
/**
* @abstract Defines grid bands color that are drawn on chart surface.
* @code
* axis.style.gridBandBrush = SCIBrushSolid(colorCode: 0xE1232120)
* @encode
* @discussion Bands are drawn between major grid lines.
* @see SCIBrush2DProtocol
*/
@property (nonatomic, strong) id<SCIBrush2DProtocol> gridBandBrush;
#pragma mark Axis labels setting
/**
* @abstract Defines if axis labels will be created.
*/
@property (nonatomic) BOOL drawLabels;
/**
* @abstract Defines axis labels style.
* @discussion Contains properties for customization of labels font, color, alignment etc.
* @see SCITextFormattingStyle
*/
@property (nonatomic, strong) SCITextFormattingStyle * labelStyle;
/**
* @abstract Defines axis Title label style.
* @discussion Contains properties for customization of labels font, color, alignment etc.
* @see SCITextFormattingStyle
*/
@property (nonatomic, strong) SCITextFormattingStyle * axisTitleLabelStyle;
/**
* @abstract Defines axis labels clipping mode.
* @discussion Labels can be clipped to axis bounds or chart surface bounds.
* @see SCIAxisLabelClippingMode
*/
@property (nonatomic) SCIAxisLabelClippingMode labelClipping;
/**
* @abstract Defines if axis should move labels instead of clipping.
* @discussion Axis will atempt to layout labels that about to get clipped inside clipping area.
* @discussion Clipping area is defined with "labelClipping" property
*/
@property (nonatomic) BOOL moveLabelsToClippingArea;
/**
* @abstract Defines distance from axis panel border to axis labels. Spacing is horizontal for vertical axes and vertical for horizontal axes.
*/
@property (nonatomic) float labelSpacing;
#pragma mark Axis size
/**
* @abstract Defines recommended size of axis panel. Width for vertical axes and height for horizontal axis.
* @discussion Actual axis size can be different if it is not possible to layout axes with recommended size
*/
@property (nonatomic) float recommendedSize;
/**
* @abstract Internal usage.
* @discussion Set visibility for grid lines major and minor, and for ticks depends on being axis primary or not.
*/
- (void)setGridLinesAndTicksWithPrimaryAxisOption:(BOOL)isPrimaryAxis;
@end
/** @} */
|
targeter21/drools | drools-model/drools-mvel-parser/src/main/javacc-support/org/drools/mvel/parser/GeneratedMvelParserBase.java | <reponame>targeter21/drools
/*
* Copyright (C) 2007-2010 <NAME>.
* Copyright (C) 2011, 2013-2016 The JavaParser Team.
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* This file is part of JavaParser.
*
* JavaParser can be used either under the terms of
* a) the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* b) the terms of the Apache License
*
* You should have received a copy of both licenses in LICENCE.LGPL and
* LICENCE.APACHE. Please refer to those files for details.
*
* JavaParser is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* Modified by Red Hat, Inc.
*/
package org.drools.mvel.parser;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.TreeSet;
import com.github.javaparser.JavaToken;
import com.github.javaparser.Problem;
import com.github.javaparser.TokenRange;
import com.github.javaparser.ast.ArrayCreationLevel;
import com.github.javaparser.ast.Node;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.Parameter;
import com.github.javaparser.ast.comments.CommentsCollection;
import com.github.javaparser.ast.expr.AnnotationExpr;
import com.github.javaparser.ast.expr.ArrayCreationExpr;
import com.github.javaparser.ast.expr.ArrayInitializerExpr;
import com.github.javaparser.ast.expr.CastExpr;
import com.github.javaparser.ast.expr.EnclosedExpr;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.LambdaExpr;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.SimpleName;
import com.github.javaparser.ast.stmt.Statement;
import com.github.javaparser.ast.type.ArrayType;
import com.github.javaparser.ast.type.Type;
import com.github.javaparser.ast.type.UnknownType;
import com.github.javaparser.utils.Pair;
import static com.github.javaparser.GeneratedJavaParserConstants.EOF;
import static com.github.javaparser.ast.type.ArrayType.unwrapArrayTypes;
import static com.github.javaparser.ast.type.ArrayType.wrapInArrayTypes;
import static com.github.javaparser.utils.Utils.assertNotNull;
/**
* Base class for {@link GeneratedJavaParser}
*/
abstract class GeneratedMvelParserBase {
//// Interface with the generated code
abstract GeneratedMvelParserTokenManager getTokenSource();
abstract void ReInit(Provider provider);
/* Returns the JavaParser specific token type of the last matched token */
abstract JavaToken token();
abstract Token getNextToken();
////
/* The problems encountered while parsing */
List<Problem> problems = new ArrayList<>();
/* Configuration flag whether we store tokens and tokenranges */
boolean storeTokens;
/* Resets the parser for reuse, gaining a little performance */
void reset(Provider provider) {
ReInit(provider);
problems = new ArrayList<>();
getTokenSource().reset();
}
/**
* Return the list of JavaParser specific tokens that have been encountered while parsing code using this parser.
*
* @return a list of tokens
*/
public List<JavaToken> getTokens() {
return getTokenSource().getTokens();
}
/* The collection of comments encountered */
CommentsCollection getCommentsCollection() {
return getTokenSource().getCommentsCollection();
}
/* Reports a problem to the user */
void addProblem(String message) {
// TODO tokenRange only takes the final token. Need all the tokens.
problems.add(new Problem(message, tokenRange(), null));
}
/* Returns a tokenRange that spans the last matched token */
TokenRange tokenRange() {
if (storeTokens) {
return new TokenRange(token(), token());
}
return null;
}
/**
* Return a TokenRange spanning from begin to end
*/
TokenRange range(JavaToken begin, JavaToken end) {
if (storeTokens) {
return new TokenRange(begin, end);
}
return null;
}
/**
* Return a TokenRange spanning from begin to end
*/
TokenRange range(Node begin, JavaToken end) {
if (storeTokens) {
return new TokenRange(begin.getTokenRange().get().getBegin(), end);
}
return null;
}
/**
* Return a TokenRange spanning from begin to end
*/
TokenRange range(JavaToken begin, Node end) {
if (storeTokens) {
return new TokenRange(begin, end.getTokenRange().get().getEnd());
}
return null;
}
/**
* Return a TokenRange spanning from begin to end
*/
TokenRange range(Node begin, Node end) {
if (storeTokens) {
return new TokenRange(begin.getTokenRange().get().getBegin(), end.getTokenRange().get().getEnd());
}
return null;
}
/**
* @return secondChoice if firstChoice is JavaToken.UNKNOWN, otherwise firstChoice
*/
JavaToken orIfInvalid(JavaToken firstChoice, JavaToken secondChoice) {
if (storeTokens) {
assertNotNull(firstChoice);
assertNotNull(secondChoice);
if (firstChoice.valid() || secondChoice.invalid()) {
return firstChoice;
}
return secondChoice;
}
return null;
}
/**
* @return the begin-token secondChoice if firstChoice is JavaToken.UNKNOWN, otherwise firstChoice
*/
JavaToken orIfInvalid(JavaToken firstChoice, Node secondChoice) {
if (storeTokens) {
return orIfInvalid(firstChoice, secondChoice.getTokenRange().get().getBegin());
}
return null;
}
/* Sets the kind of the last matched token to newKind */
void setTokenKind(int newKind) {
org.drools.mvel.parser.JavaToken token = (org.drools.mvel.parser.JavaToken)token();
token.setKind(newKind);
}
/* Makes the parser keep a list of tokens */
void setStoreTokens(boolean storeTokens) {
this.storeTokens = storeTokens;
getTokenSource().setStoreTokens(storeTokens);
}
/* Called from within a catch block to skip forward to a known token,
and report the occurred exception as a problem. */
TokenRange recover(int recoveryTokenType, ParseException p) {
JavaToken begin = null;
if (p.currentToken != null) {
begin = token();
}
Token t;
do {
t = getNextToken();
} while (t.kind != recoveryTokenType && t.kind != EOF);
JavaToken end = token();
TokenRange tokenRange = null;
if (begin != null && end != null) {
tokenRange = range(begin, end);
}
problems.add(new Problem(makeMessageForParseException(p), tokenRange, p));
return tokenRange;
}
/**
* Quickly create a new NodeList
*/
<T extends Node> NodeList<T> emptyList() {
return new NodeList<>();
}
/**
* Add obj to list and return it. Create a new list if list is null
*/
<T extends Node> NodeList<T> add(NodeList<T> list, T obj) {
if (list == null) {
list = new NodeList<>();
}
list.add(obj);
return list;
}
/**
* Add obj to list
*/
<T> List<T> add(List<T> list, T obj) {
if (list == null) {
list = new LinkedList<>();
}
list.add(obj);
return list;
}
/**
* Propagate expansion of the range on the right to the parent. This is necessary when the right border of the child
* is determining the right border of the parent (i.e., the child is the last element of the parent). In this case
* when we "enlarge" the child we should enlarge also the parent.
*/
private void propagateRangeGrowthOnRight(Node node, Node endNode) {
if (storeTokens) {
node.getParentNode().ifPresent(nodeParent -> {
boolean isChildOnTheRightBorderOfParent = node.getTokenRange().get().getEnd().equals(nodeParent.getTokenRange().get().getEnd());
if (isChildOnTheRightBorderOfParent) {
propagateRangeGrowthOnRight(nodeParent, endNode);
}
});
node.setTokenRange(range(node, endNode));
}
}
/**
* Workaround for rather complex ambiguity that lambda's create
*/
Expression generateLambda(Expression ret, Statement lambdaBody) {
if (ret instanceof EnclosedExpr) {
Expression inner = ((EnclosedExpr) ret).getInner();
SimpleName id = ((NameExpr) inner).getName();
NodeList<Parameter> params = add(new NodeList<>(), new Parameter(ret.getTokenRange().orElse(null), new NodeList<>(), new NodeList<>(), new UnknownType(), false, new NodeList<>(), id));
ret = new LambdaExpr(range(ret, lambdaBody), params, lambdaBody, true);
} else if (ret instanceof NameExpr) {
SimpleName id = ((NameExpr) ret).getName();
NodeList<Parameter> params = add(new NodeList<>(), new Parameter(ret.getTokenRange().orElse(null), new NodeList<>(), new NodeList<>(), new UnknownType(), false, new NodeList<>(), id));
ret = new LambdaExpr(range(ret, lambdaBody), params, lambdaBody, false);
} else if (ret instanceof LambdaExpr) {
((LambdaExpr) ret).setBody(lambdaBody);
propagateRangeGrowthOnRight(ret, lambdaBody);
} else if (ret instanceof CastExpr) {
CastExpr castExpr = (CastExpr) ret;
Expression inner = generateLambda(castExpr.getExpression(), lambdaBody);
castExpr.setExpression(inner);
} else {
addProblem("Failed to parse lambda expression! Please create an issue at https://github.com/javaparser/javaparser/issues");
}
return ret;
}
/**
* Throws together an ArrayCreationExpr from a lot of pieces
*/
ArrayCreationExpr juggleArrayCreation(TokenRange range, List<TokenRange> levelRanges, Type type, NodeList<Expression> dimensions, List<NodeList<AnnotationExpr>> arrayAnnotations, ArrayInitializerExpr arrayInitializerExpr) {
NodeList<ArrayCreationLevel> levels = new NodeList<>();
for (int i = 0; i < arrayAnnotations.size(); i++) {
levels.add(new ArrayCreationLevel(levelRanges.get(i), dimensions.get(i), arrayAnnotations.get(i)));
}
return new ArrayCreationExpr(range, type, levels, arrayInitializerExpr);
}
/**
* Throws together a Type, taking care of all the array brackets
*/
Type juggleArrayType(Type partialType, List<ArrayType.ArrayBracketPair> additionalBrackets) {
Pair<Type, List<ArrayType.ArrayBracketPair>> partialParts = unwrapArrayTypes(partialType);
Type elementType = partialParts.a;
List<ArrayType.ArrayBracketPair> leftMostBrackets = partialParts.b;
return wrapInArrayTypes(elementType, leftMostBrackets, additionalBrackets).clone();
}
/**
* This is the code from ParseException.initialise, modified to be more horizontal.
*/
private String makeMessageForParseException(ParseException exception) {
final StringBuilder sb = new StringBuilder("Parse error. Found ");
final StringBuilder expected = new StringBuilder();
int maxExpectedTokenSequenceLength = 0;
TreeSet<String> sortedOptions = new TreeSet<>();
for (int i = 0; i < exception.expectedTokenSequences.length; i++) {
if (maxExpectedTokenSequenceLength < exception.expectedTokenSequences[i].length) {
maxExpectedTokenSequenceLength = exception.expectedTokenSequences[i].length;
}
for (int j = 0; j < exception.expectedTokenSequences[i].length; j++) {
sortedOptions.add(exception.tokenImage[exception.expectedTokenSequences[i][j]]);
}
}
for (String option : sortedOptions) {
expected.append(" ").append(option);
}
sb.append("");
Token token = exception.currentToken.next;
for (int i = 0; i < maxExpectedTokenSequenceLength; i++) {
String tokenText = token.image;
String escapedTokenText = ParseException.add_escapes(tokenText);
if (i != 0) {
sb.append(" ");
}
if (token.kind == 0) {
sb.append(exception.tokenImage[0]);
break;
}
escapedTokenText = "\"" + escapedTokenText + "\"";
String image = exception.tokenImage[token.kind];
if (image.equals(escapedTokenText)) {
sb.append(image);
} else {
sb.append(" ")
.append(escapedTokenText)
.append(" ")
.append(image);
}
token = token.next;
}
if (exception.expectedTokenSequences.length != 0) {
int numExpectedTokens = exception.expectedTokenSequences.length;
sb.append(", expected")
.append(numExpectedTokens == 1 ? "" : " one of ")
.append(expected.toString());
}
return sb.toString();
}
}
|
PhilShishov/Software-University | JS Essentials/Homeworks/03.Arrays-and-Matrices_Exercise/P04.js | <filename>JS Essentials/Homeworks/03.Arrays-and-Matrices_Exercise/P04.js
function solve(input) {
let rotation = Number(input.pop());
for (let i = 0; i < rotation % input.length; i++) {
let lastElement = input.pop();
input.unshift(lastElement);
}
console.log(input.join(" "));
}
solve(['1',
'2',
'3',
'4',
'2']
); |
jackodsteel/RFToolsUtility | src/main/java/mcjty/rftoolsutility/modules/logic/client/GuiRedstoneInformation.java | package mcjty.rftoolsutility.modules.logic.client;
import com.mojang.blaze3d.matrix.MatrixStack;
import mcjty.lib.gui.GenericGuiContainer;
import mcjty.lib.gui.Window;
import mcjty.lib.gui.layout.HorizontalAlignment;
import mcjty.lib.gui.widgets.*;
import mcjty.lib.tileentity.GenericTileEntity;
import mcjty.rftoolsbase.RFToolsBase;
import mcjty.rftoolsutility.RFToolsUtility;
import mcjty.rftoolsutility.modules.logic.LogicBlockModule;
import mcjty.rftoolsutility.modules.logic.items.RedstoneInformationContainer;
import mcjty.rftoolsutility.modules.logic.items.RedstoneInformationItem;
import mcjty.rftoolsutility.modules.logic.network.PacketRemoveChannel;
import mcjty.rftoolsutility.modules.logic.network.PacketSetRedstone;
import mcjty.rftoolsutility.setup.RFToolsUtilityMessages;
import net.minecraft.client.gui.ScreenManager;
import net.minecraft.entity.player.PlayerInventory;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.text.ITextComponent;
import org.apache.commons.lang3.tuple.Pair;
import javax.annotation.Nonnull;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static mcjty.lib.gui.widgets.Widgets.*;
public class GuiRedstoneInformation extends GenericGuiContainer<GenericTileEntity, RedstoneInformationContainer> {
private static final ResourceLocation iconLocation = new ResourceLocation(RFToolsUtility.MODID, "textures/gui/redstone_information.png");
private static final ResourceLocation guiElements = new ResourceLocation(RFToolsBase.MODID, "textures/gui/guielements.png");
public static final int WIDTH = 200;
public static final int HEIGHT = 190;
private WidgetList list;
public GuiRedstoneInformation(RedstoneInformationContainer container, PlayerInventory inventory) {
super(null, container, inventory, RedstoneInformationItem.MANUAL);
imageWidth = WIDTH;
imageHeight = HEIGHT;
}
@Nonnull
public static GuiRedstoneInformation createRedstoneInformationGui(RedstoneInformationContainer container, PlayerInventory inventory, ITextComponent textComponent) {
return new GuiRedstoneInformation(container, inventory);
}
public static void register() {
ScreenManager.register(LogicBlockModule.CONTAINER_REDSTONE_INFORMATION.get(), GuiRedstoneInformation::createRedstoneInformationGui);
}
@Override
public void init() {
super.init();
list = list(5, 5, 180, 180).name("list").propagateEventsToChildren(true);
Slider slider = slider(185, 5, 10, 180).scrollableName("list");
Panel toplevel = positional().background(iconLocation)
.children(list, slider);
toplevel.bounds(leftPos, topPos, imageWidth, imageHeight);
window = new Window(this, toplevel);
fillList();
}
private void removeChannel(int channel) {
RFToolsUtilityMessages.INSTANCE.sendToServer(new PacketRemoveChannel(channel));
}
private void setRedstone(int channel, String newChoice) {
RFToolsUtilityMessages.INSTANCE.sendToServer(new PacketSetRedstone(channel, "1".equals(newChoice) ? 15 : 0));
}
private boolean isDirty() {
Map<Integer, Pair<String, Integer>> data = menu.getChannelData();
if (data == null) {
return true;
}
if (data.size() != list.getChildCount()) {
return true;
} else {
for (int i = 0; i < list.getChildCount(); i++) {
Panel panel = list.getChild(i);
Integer channel = (Integer) panel.getUserObject();
if (!data.containsKey(channel)) {
return true;
}
}
}
return false;
}
private void updateList() {
if (isDirty()) {
fillList();
}
Map<Integer, Pair<String, Integer>> data = menu.getChannelData();
for (int i = 0 ; i < list.getChildCount() ; i++) {
Panel panel = list.getChild(i);
Integer channel = (Integer)panel.getUserObject();
Pair<String, Integer> pair = data.get(channel);
if (pair != null) {
Label name = panel.findChild("name");
ImageChoiceLabel choice = panel.findChild("choice");
Label value = panel.findChild("value");
if (pair.getLeft().isEmpty()) {
name.text(String.valueOf(channel));
} else {
name.text(channel + " (" + pair.getLeft() + ")");
}
choice.setCurrentChoice(pair.getRight() > 0 ? "1" : "0");
value.text(Integer.toString(pair.getRight()));
}
}
}
@Override
protected void drawWindow(MatrixStack matrixStack) {
updateList();
super.drawWindow(matrixStack);
}
private void fillList() {
list.removeChildren();
Map<Integer, Pair<String, Integer>> data = menu.getChannelData();
if (data == null) {
return;
}
Set<Integer> channels = data.keySet();
List<Integer> sortedChannels = channels.stream().sorted().collect(Collectors.toList());
for (Integer channel : sortedChannels) {
Panel panel = horizontal().desiredHeight(18).userObject(channel);
ImageChoiceLabel choice = new ImageChoiceLabel()
.name("choice")
.desiredWidth(16)
.desiredHeight(16)
.choice("0", "Redstone off", guiElements, 16, 0)
.choice("1", "Redstone on", guiElements, 32, 0)
.event(newChoice -> setRedstone(channel, newChoice));
Label valueLabel = label("0").name("value").desiredWidth(30).horizontalAlignment(HorizontalAlignment.ALIGN_LEFT);
panel.children(label(String.valueOf(channel)).name("name").desiredWidth(60).horizontalAlignment(HorizontalAlignment.ALIGN_LEFT), choice, valueLabel, button("Remove").event(() -> removeChannel(channel)));
list.children(panel);
}
}
}
|
wycivil08/blendocv | source/blender/modifiers/intern/MOD_uvproject.c | /*
* $Id: MOD_uvproject.c 40372 2011-09-19 19:55:59Z dfelinto $
*
* ***** BEGIN GPL LICENSE BLOCK *****
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* The Original Code is Copyright (C) 2005 by the Blender Foundation.
* All rights reserved.
*
* Contributor(s): <NAME>
* <NAME>,
* <NAME>,
* <NAME>,
* <NAME>
*
* ***** END GPL LICENSE BLOCK *****
*
*/
/** \file blender/modifiers/intern/MOD_uvproject.c
* \ingroup modifiers
*/
/* UV Project modifier: Generates UVs projected from an object */
#include "DNA_meshdata_types.h"
#include "DNA_camera_types.h"
#include "DNA_object_types.h"
#include "BLI_math.h"
#include "BLI_string.h"
#include "BLI_uvproject.h"
#include "BLI_utildefines.h"
#include "BKE_DerivedMesh.h"
#include "MOD_modifiertypes.h"
#include "MOD_util.h"
#include "MEM_guardedalloc.h"
#include "depsgraph_private.h"
static void initData(ModifierData *md)
{
UVProjectModifierData *umd = (UVProjectModifierData*) md;
int i;
for(i = 0; i < MOD_UVPROJECT_MAXPROJECTORS; ++i)
umd->projectors[i] = NULL;
umd->image = NULL;
umd->flags = 0;
umd->num_projectors = 1;
umd->aspectx = umd->aspecty = 1.0f;
umd->scalex = umd->scaley = 1.0f;
}
static void copyData(ModifierData *md, ModifierData *target)
{
UVProjectModifierData *umd = (UVProjectModifierData*) md;
UVProjectModifierData *tumd = (UVProjectModifierData*) target;
int i;
for(i = 0; i < MOD_UVPROJECT_MAXPROJECTORS; ++i)
tumd->projectors[i] = umd->projectors[i];
tumd->image = umd->image;
tumd->flags = umd->flags;
tumd->num_projectors = umd->num_projectors;
tumd->aspectx = umd->aspectx;
tumd->aspecty = umd->aspecty;
tumd->scalex = umd->scalex;
tumd->scaley = umd->scaley;
BLI_strncpy(tumd->uvlayer_name, umd->uvlayer_name, sizeof(umd->uvlayer_name));
}
static CustomDataMask requiredDataMask(Object *UNUSED(ob), ModifierData *UNUSED(md))
{
CustomDataMask dataMask = 0;
/* ask for UV coordinates */
dataMask |= CD_MASK_MTFACE;
return dataMask;
}
static void foreachObjectLink(ModifierData *md, Object *ob,
ObjectWalkFunc walk, void *userData)
{
UVProjectModifierData *umd = (UVProjectModifierData*) md;
int i;
for(i = 0; i < MOD_UVPROJECT_MAXPROJECTORS; ++i)
walk(userData, ob, &umd->projectors[i]);
}
static void foreachIDLink(ModifierData *md, Object *ob,
IDWalkFunc walk, void *userData)
{
UVProjectModifierData *umd = (UVProjectModifierData*) md;
walk(userData, ob, (ID **)&umd->image);
foreachObjectLink(md, ob, (ObjectWalkFunc)walk,
userData);
}
static void updateDepgraph(ModifierData *md, DagForest *forest,
struct Scene *UNUSED(scene),
Object *UNUSED(ob),
DagNode *obNode)
{
UVProjectModifierData *umd = (UVProjectModifierData*) md;
int i;
for(i = 0; i < umd->num_projectors; ++i) {
if(umd->projectors[i]) {
DagNode *curNode = dag_get_node(forest, umd->projectors[i]);
dag_add_relation(forest, curNode, obNode,
DAG_RL_DATA_DATA | DAG_RL_OB_DATA, "UV Project Modifier");
}
}
}
typedef struct Projector {
Object *ob; /* object this projector is derived from */
float projmat[4][4]; /* projection matrix */
float normal[3]; /* projector normal in world space */
void *uci; /* optional uv-project info (panorama projection) */
} Projector;
static DerivedMesh *uvprojectModifier_do(UVProjectModifierData *umd,
Object *ob, DerivedMesh *dm)
{
float (*coords)[3], (*co)[3];
MTFace *tface;
int i, numVerts, numFaces;
Image *image = umd->image;
MFace *mface, *mf;
int override_image = ((umd->flags & MOD_UVPROJECT_OVERRIDEIMAGE) != 0);
Projector projectors[MOD_UVPROJECT_MAXPROJECTORS];
int num_projectors = 0;
float aspect;
char uvname[32];
float aspx= umd->aspectx ? umd->aspectx : 1.0f;
float aspy= umd->aspecty ? umd->aspecty : 1.0f;
float scax= umd->scalex ? umd->scalex : 1.0f;
float scay= umd->scaley ? umd->scaley : 1.0f;
int free_uci= 0;
aspect = aspx / aspy;
for(i = 0; i < umd->num_projectors; ++i)
if(umd->projectors[i])
projectors[num_projectors++].ob = umd->projectors[i];
if(num_projectors == 0) return dm;
/* make sure there are UV layers available */
if(!CustomData_has_layer(&dm->faceData, CD_MTFACE)) return dm;
/* make sure we're using an existing layer */
validate_layer_name(&dm->faceData, CD_MTFACE, umd->uvlayer_name, uvname);
/* calculate a projection matrix and normal for each projector */
for(i = 0; i < num_projectors; ++i) {
float tmpmat[4][4];
float offsetmat[4][4];
Camera *cam = NULL;
/* calculate projection matrix */
invert_m4_m4(projectors[i].projmat, projectors[i].ob->obmat);
projectors[i].uci= NULL;
if(projectors[i].ob->type == OB_CAMERA) {
cam = (Camera *)projectors[i].ob->data;
if(cam->flag & CAM_PANORAMA) {
projectors[i].uci= project_camera_info(projectors[i].ob, NULL, aspx, aspy);
project_camera_info_scale(projectors[i].uci, scax, scay);
free_uci= 1;
}
else {
float scale= (cam->type == CAM_PERSP) ? cam->clipsta * 32.0f / cam->lens : cam->ortho_scale;
float xmax, xmin, ymax, ymin;
if(aspect > 1.0f) {
xmax = 0.5f * scale;
ymax = xmax / aspect;
} else {
ymax = 0.5f * scale;
xmax = ymax * aspect;
}
xmin = -xmax;
ymin = -ymax;
/* scale the matrix */
xmin *= scax;
xmax *= scax;
ymin *= scay;
ymax *= scay;
if(cam->type == CAM_PERSP) {
float perspmat[4][4];
perspective_m4( perspmat,xmin, xmax, ymin, ymax, cam->clipsta, cam->clipend);
mul_m4_m4m4(tmpmat, projectors[i].projmat, perspmat);
} else { /* if(cam->type == CAM_ORTHO) */
float orthomat[4][4];
orthographic_m4( orthomat,xmin, xmax, ymin, ymax, cam->clipsta, cam->clipend);
mul_m4_m4m4(tmpmat, projectors[i].projmat, orthomat);
}
}
} else {
copy_m4_m4(tmpmat, projectors[i].projmat);
}
unit_m4(offsetmat);
mul_mat3_m4_fl(offsetmat, 0.5);
offsetmat[3][0] = offsetmat[3][1] = offsetmat[3][2] = 0.5;
if (cam) {
if (aspx == aspy) {
offsetmat[3][0] -= cam->shiftx;
offsetmat[3][1] -= cam->shifty;
} else if (aspx < aspy) {
offsetmat[3][0] -=(cam->shiftx * aspy/aspx);
offsetmat[3][1] -= cam->shifty;
} else {
offsetmat[3][0] -= cam->shiftx;
offsetmat[3][1] -=(cam->shifty * aspx/aspy);
}
}
mul_m4_m4m4(projectors[i].projmat, tmpmat, offsetmat);
/* calculate worldspace projector normal (for best projector test) */
projectors[i].normal[0] = 0;
projectors[i].normal[1] = 0;
projectors[i].normal[2] = 1;
mul_mat3_m4_v3(projectors[i].ob->obmat, projectors[i].normal);
}
/* make sure we are not modifying the original UV layer */
tface = CustomData_duplicate_referenced_layer_named(&dm->faceData,
CD_MTFACE, uvname);
numVerts = dm->getNumVerts(dm);
coords = MEM_callocN(sizeof(*coords) * numVerts,
"uvprojectModifier_do coords");
dm->getVertCos(dm, coords);
/* convert coords to world space */
for(i = 0, co = coords; i < numVerts; ++i, ++co)
mul_m4_v3(ob->obmat, *co);
/* if only one projector, project coords to UVs */
if(num_projectors == 1 && projectors[0].uci==NULL)
for(i = 0, co = coords; i < numVerts; ++i, ++co)
mul_project_m4_v3(projectors[0].projmat, *co);
mface = dm->getFaceArray(dm);
numFaces = dm->getNumFaces(dm);
/* apply coords as UVs, and apply image if tfaces are new */
for(i = 0, mf = mface; i < numFaces; ++i, ++mf, ++tface) {
if(override_image || !image || tface->tpage == image) {
if(num_projectors == 1) {
if(projectors[0].uci) {
project_from_camera(tface->uv[0], coords[mf->v1], projectors[0].uci);
project_from_camera(tface->uv[1], coords[mf->v2], projectors[0].uci);
project_from_camera(tface->uv[2], coords[mf->v3], projectors[0].uci);
if(mf->v3)
project_from_camera(tface->uv[3], coords[mf->v4], projectors[0].uci);
}
else {
/* apply transformed coords as UVs */
tface->uv[0][0] = coords[mf->v1][0];
tface->uv[0][1] = coords[mf->v1][1];
tface->uv[1][0] = coords[mf->v2][0];
tface->uv[1][1] = coords[mf->v2][1];
tface->uv[2][0] = coords[mf->v3][0];
tface->uv[2][1] = coords[mf->v3][1];
if(mf->v4) {
tface->uv[3][0] = coords[mf->v4][0];
tface->uv[3][1] = coords[mf->v4][1];
}
}
} else {
/* multiple projectors, select the closest to face normal
* direction
*/
float co1[3], co2[3], co3[3], co4[3];
float face_no[3];
int j;
Projector *best_projector;
float best_dot;
copy_v3_v3(co1, coords[mf->v1]);
copy_v3_v3(co2, coords[mf->v2]);
copy_v3_v3(co3, coords[mf->v3]);
/* get the untransformed face normal */
if(mf->v4) {
copy_v3_v3(co4, coords[mf->v4]);
normal_quad_v3(face_no, co1, co2, co3, co4);
} else {
normal_tri_v3(face_no, co1, co2, co3);
}
/* find the projector which the face points at most directly
* (projector normal with largest dot product is best)
*/
best_dot = dot_v3v3(projectors[0].normal, face_no);
best_projector = &projectors[0];
for(j = 1; j < num_projectors; ++j) {
float tmp_dot = dot_v3v3(projectors[j].normal,
face_no);
if(tmp_dot > best_dot) {
best_dot = tmp_dot;
best_projector = &projectors[j];
}
}
if(best_projector->uci) {
project_from_camera(tface->uv[0], coords[mf->v1], best_projector->uci);
project_from_camera(tface->uv[1], coords[mf->v2], best_projector->uci);
project_from_camera(tface->uv[2], coords[mf->v3], best_projector->uci);
if(mf->v3)
project_from_camera(tface->uv[3], coords[mf->v4], best_projector->uci);
}
else {
mul_project_m4_v3(best_projector->projmat, co1);
mul_project_m4_v3(best_projector->projmat, co2);
mul_project_m4_v3(best_projector->projmat, co3);
if(mf->v4)
mul_project_m4_v3(best_projector->projmat, co4);
/* apply transformed coords as UVs */
tface->uv[0][0] = co1[0];
tface->uv[0][1] = co1[1];
tface->uv[1][0] = co2[0];
tface->uv[1][1] = co2[1];
tface->uv[2][0] = co3[0];
tface->uv[2][1] = co3[1];
if(mf->v4) {
tface->uv[3][0] = co4[0];
tface->uv[3][1] = co4[1];
}
}
}
}
if(override_image) {
tface->tpage = image;
}
}
MEM_freeN(coords);
if(free_uci) {
int j;
for(j = 0; j < num_projectors; ++j) {
if(projectors[j].uci) {
MEM_freeN(projectors[j].uci);
}
}
}
return dm;
}
static DerivedMesh *applyModifier(ModifierData *md, Object *ob,
DerivedMesh *derivedData,
int UNUSED(useRenderParams),
int UNUSED(isFinalCalc))
{
DerivedMesh *result;
UVProjectModifierData *umd = (UVProjectModifierData*) md;
result = uvprojectModifier_do(umd, ob, derivedData);
return result;
}
static DerivedMesh *applyModifierEM(ModifierData *md, Object *ob,
struct EditMesh *UNUSED(editData),
DerivedMesh *derivedData)
{
return applyModifier(md, ob, derivedData, 0, 1);
}
ModifierTypeInfo modifierType_UVProject = {
/* name */ "UVProject",
/* structName */ "UVProjectModifierData",
/* structSize */ sizeof(UVProjectModifierData),
/* type */ eModifierTypeType_Nonconstructive,
/* flags */ eModifierTypeFlag_AcceptsMesh
| eModifierTypeFlag_SupportsMapping
| eModifierTypeFlag_SupportsEditmode
| eModifierTypeFlag_EnableInEditmode,
/* copyData */ copyData,
/* deformVerts */ NULL,
/* deformMatrices */ NULL,
/* deformVertsEM */ NULL,
/* deformMatricesEM */ NULL,
/* applyModifier */ applyModifier,
/* applyModifierEM */ applyModifierEM,
/* initData */ initData,
/* requiredDataMask */ requiredDataMask,
/* freeData */ NULL,
/* isDisabled */ NULL,
/* updateDepgraph */ updateDepgraph,
/* dependsOnTime */ NULL,
/* dependsOnNormals */ NULL,
/* foreachObjectLink */ foreachObjectLink,
/* foreachIDLink */ foreachIDLink,
/* foreachTexLink */ NULL,
};
|
hlystovea/BBBS | project/api/migrations/0018_auto_20210611_2323.py | # Generated by Django 3.2.3 on 2021-06-11 16:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0017_video_tags'),
]
operations = [
migrations.AlterField(
model_name='movie',
name='tags',
field=models.ManyToManyField(related_name='movies', to='api.Tag', verbose_name='Теги'),
),
migrations.AlterField(
model_name='video',
name='tags',
field=models.ManyToManyField(related_name='videos', to='api.Tag', verbose_name='Теги'),
),
]
|
renquanbo/location-engine-anchor-service | src/main/java/com/breadcrumbdata/anchor_service/dataobject/Level.java | <filename>src/main/java/com/breadcrumbdata/anchor_service/dataobject/Level.java<gh_stars>0
package com.breadcrumbdata.anchor_service.dataobject;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
@Entity
public class Level {
@Id
@GeneratedValue
private Integer id;
private String name;
private Double width;
private Double height;
private Double xOffset;
private Double yOffset;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Double getWidth() {
return width;
}
public void setWidth(Double width) {
this.width = width;
}
public Double getHeight() {
return height;
}
public void setHeight(Double height) {
this.height = height;
}
public Double getxOffset() {
return xOffset;
}
public void setxOffset(Double xOffset) {
this.xOffset = xOffset;
}
public Double getyOffset() {
return yOffset;
}
public void setyOffset(Double yOffset) {
this.yOffset = yOffset;
}
@Override
public String toString() {
return "Level{" +
"id=" + id +
", name='" + name + '\'' +
'}';
}
}
|
weimingtom/X-moe | Unpacker/crass/cui/ADX/vgmstream-r526/src/meta/xbox_wvs.c | #include "meta.h"
#include "../util.h"
/* WVS
WVS (found in Metal Arms - Glitch in the System)
*/
VGMSTREAM * init_vgmstream_xbox_wvs(STREAMFILE *streamFile) {
VGMSTREAM * vgmstream = NULL;
char filename[260];
int loop_flag=0;
int channel_count;
int i;
/* check extension, case insensitive */
streamFile->get_name(streamFile,filename,sizeof(filename));
if (strcasecmp("wvs",filename_extension(filename))) goto fail;
if((read_16bitLE(0x0C,streamFile)!=0x69) &&
(read_16bitLE(0x08,streamFile)!=0x4400) &&
(read_32bitLE(0x0,streamFile)!=get_streamfile_size(streamFile)+0x20))
goto fail;
/* Loop seems to be set if offset(0x0A) == 0x472C */
loop_flag = (read_16bitLE(0x0A,streamFile)==0x472C);
/* Always stereo files */
channel_count=read_16bitLE(0x0E,streamFile);
/* build the VGMSTREAM */
vgmstream = allocate_vgmstream(channel_count,loop_flag);
if (!vgmstream) goto fail;
/* fill in the vital statistics */
/* allways 2 channels @ 44100 Hz */
vgmstream->channels = channel_count;
vgmstream->sample_rate = read_32bitLE(0x10,streamFile);
vgmstream->coding_type = coding_XBOX;
vgmstream->num_samples = read_32bitLE(0,streamFile) / 36 * 64 / vgmstream->channels;
vgmstream->layout_type = layout_none;
vgmstream->meta_type = meta_XBOX_WVS;
if(loop_flag) {
vgmstream->loop_start_sample=0;
vgmstream->loop_end_sample=vgmstream->num_samples;
}
/* open the file for reading by each channel */
{
for (i=0;i<channel_count;i++) {
vgmstream->ch[i].streamfile = streamFile->open(streamFile,filename,36);
vgmstream->ch[i].offset = 0x20;
if (!vgmstream->ch[i].streamfile) goto fail;
}
}
return vgmstream;
/* clean up anything we may have opened */
fail:
if (vgmstream) close_vgmstream(vgmstream);
return NULL;
}
|
lnrCoder/Algorithm | src/com/liang/leetcode/L326.java | <filename>src/com/liang/leetcode/L326.java
package com.liang.leetcode;
/**
* @ClassName L326
* @description power-of-three
* @Author LiaNg
* @Date 2018/11/4
*/
public class L326 {
public static void main(String[] args) {
L326 l = new L326();
boolean result = l.isPowerOfThree(27);
System.out.println(result);
}
/**
* 给定一个整数,写一个函数来判断它是否是 3 的幂次方。
* 不使用循环或者递归
*/
public boolean isPowerOfThree(int n) {
double result = Math.log10(n) / Math.log10(3);
return (result == (int) result) ? true : false;
}
}
|
JackSSS/Catch | server.js | <filename>server.js
var express = require('express');
var app = express();
var mongoose = require('mongoose');
mongoose.connect(process.env.MONGOLAB_URI || "mongodb://localhost/catch_dev");
var catchRouter = require(__dirname + '/routes/catch_routes');
var contactsRouter = require(__dirname + '/routes/contacts_routes');
var authRouter = require(__dirname + '/routes/auth_routes');
process.env.APP_SECRET = process.env.APP_SECRET || 'suchmysterynoonewilleverknow';
app.use(function(req, res, next) {
res.header('Access-Control-Allow-Origin', '*');
// res.header('Access-Control-Expose-Headers', '*');
res.header('Access-Control-Allow-Headers',
'Origin, X-Requested-With, Content-Type, Accept, Authorization, token');
res.header('Access-Control-Allow-Methods', 'GET, POST, PUT, PATCH, DELETE, OPTIONS');
next();
});
app.use(express.static(__dirname + '/www'));
app.use('/api', catchRouter);
app.use('/api', contactsRouter);
app.use('/api', authRouter);
var port = process.env.PORT || 3000;
app.listen(port, function() {
console.log('server up on port: ' + port);
});
|
ZetBrush/VidGen | MultiPick/src/main/java/com/luminous/pick/MainActivity.java | <filename>MultiPick/src/main/java/com/luminous/pick/MainActivity.java<gh_stars>0
package com.luminous.pick;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.os.*;
import android.os.Process;
import android.support.v7.widget.DefaultItemAnimator;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.StaggeredGridLayoutManager;
import android.util.Log;
import android.view.View;
import android.view.Window;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.nostra13.universalimageloader.cache.memory.impl.WeakMemoryCache;
import com.nostra13.universalimageloader.core.DisplayImageOptions;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.nostra13.universalimageloader.core.ImageLoaderConfiguration;
import com.nostra13.universalimageloader.core.assist.ImageScaleType;
public class MainActivity extends Activity {
private ImageView currentImage;
private RecyclerView recyclerView;
private Button btnGalleryPickMul;
private Button next;
private MyRecyclerViewAdapter myRecyclerViewAdapter;
private StaggeredGridLayoutManager staggeredGridLayoutManager;
private RecyclerView.ItemAnimator itemAnimator;
private ArrayList<Bitmap> arrayList = new ArrayList<>();
private ArrayList<Bitmap> arr1 = new ArrayList<Bitmap>();
private ImageLoader imageLoader;
private int[] firstItemPos;
private int[] lastItemPos;
private String[] all_path;
private Intent intent = null;
private ProgressDialog pd;
private LinkedList<String> pathlist;
private int arrayLength = 0;
private SharedPreferences sharedPreferences;
private static final String root = Environment.getExternalStorageDirectory().toString();
private File myDir = new File(root + "/req_images");
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.main);
initImageLoader();
init();
}
private void initImageLoader() {
DisplayImageOptions defaultOptions = new DisplayImageOptions.Builder()
.cacheOnDisc().imageScaleType(ImageScaleType.EXACTLY_STRETCHED)
.bitmapConfig(Bitmap.Config.RGB_565).build();
ImageLoaderConfiguration.Builder builder = new ImageLoaderConfiguration.Builder(
this).defaultDisplayImageOptions(defaultOptions).memoryCache(
new WeakMemoryCache());
ImageLoaderConfiguration config = builder.build();
imageLoader = ImageLoader.getInstance();
imageLoader.init(config);
}
private void init() {
sharedPreferences = getPreferences(MODE_PRIVATE);
pathlist=new LinkedList<>();
TextView txt = (TextView) findViewById(R.id.selected_count);
txt.setVisibility(View.GONE);
recyclerView = (RecyclerView) findViewById(R.id.rec_test);
currentImage = (ImageView) findViewById(R.id.image_id);
btnGalleryPickMul = (Button) findViewById(R.id.btnGalleryPickMul);
myRecyclerViewAdapter = new MyRecyclerViewAdapter(arrayList, currentImage, btnGalleryPickMul);
staggeredGridLayoutManager = new StaggeredGridLayoutManager(1, StaggeredGridLayoutManager.HORIZONTAL); // staggered grid
itemAnimator = new DefaultItemAnimator();
recyclerView.setAdapter(myRecyclerViewAdapter);
recyclerView.setLayoutManager(staggeredGridLayoutManager);
recyclerView.setItemAnimator(itemAnimator);
btnGalleryPickMul.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent i = new Intent(Action.ACTION_MULTIPLE_PICK);
startActivityForResult(i, 200);
}
});
next = (Button) findViewById(R.id.go_button);
next.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (arrayList.size() > 0) {
myDir.mkdirs();
SaveToMemary saveToMemary = new SaveToMemary();
saveToMemary.execute(pathlist);
intent = new Intent("android.intent.action.videogen");
intent.putExtra("myimagespath", myDir.toString());
//startActivity(intent);
//finish();
} else {
Toast.makeText(getApplicationContext(), "you have no image", Toast.LENGTH_SHORT).show();
}
//foo(getApplicationContext());
}
});
recyclerView.setOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
super.onScrollStateChanged(recyclerView, newState);
if (myRecyclerViewAdapter.getItemCount() > 0) {
firstItemPos = staggeredGridLayoutManager.findFirstCompletelyVisibleItemPositions(firstItemPos);
lastItemPos = staggeredGridLayoutManager.findLastVisibleItemPositions(lastItemPos);
if (arrayList.size() > 0) {
currentImage.setImageBitmap(arrayList.get(firstItemPos[0]));
}
}
}
});
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == 200 && resultCode == Activity.RESULT_OK) {
all_path = data.getStringArrayExtra("all_path");
if (all_path.length > 0) {
arrayLength = sharedPreferences.getInt("length", 0);
//System.gc();
Bitmap bm = Bitmap.createBitmap(10, 10, Bitmap.Config.RGB_565);
bm.eraseColor(Color.LTGRAY);
for (int i = 0; i < all_path.length; i++) {
pathlist.add(all_path[i]);
arrayList.add(bm);
}
//System.gc();
DownloadFilesTask dtt = new DownloadFilesTask();
dtt.execute(pathlist);
//Toast.makeText(getApplicationContext(), "" + all_path.length, Toast.LENGTH_SHORT).show();
}
//btnGalleryPickMul.setVisibility(View.GONE);
next.setVisibility(View.VISIBLE);
}
}
private class DownloadFilesTask extends AsyncTask<LinkedList<String>, Integer, ArrayList<Bitmap>> {
protected ArrayList<Bitmap> doInBackground(LinkedList<String>... path) {
if (path[0].size() > 0) {
for (int i = 0; i < path[0].size(); i++) {
Bitmap bitmap = null;
try {
bitmap = Utils.currectlyOrientation(path[0].get(i), 300, 300);
} catch (IOException e) {
e.printStackTrace();
}
bitmap = Utils.scaleCenterCrop(bitmap, 300, 300);
arr1.add(bitmap);
publishProgress(i);
Log.d("path[0] length" + i, "" + path[0].size());
}
}
return arr1;
}
protected void onProgressUpdate(Integer... progress) {
if (progress[0] == 0) {
currentImage.setImageBitmap(arr1.get(0));
}
arrayList.set(arrayLength + progress[0], arr1.get(progress[0]));
myRecyclerViewAdapter.notifyDataSetChanged();
}
protected void onPostExecute(ArrayList<Bitmap> result) {
arr1.removeAll(arr1);
//setBadge(getApplicationContext(),0);
Toast.makeText(getApplicationContext(), "done", Toast.LENGTH_SHORT).show();
}
}
private class SaveToMemary extends AsyncTask<LinkedList<String>, Integer, Void> {
protected Void doInBackground(LinkedList<String>... path) {
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_MORE_FAVORABLE);
Log.d("arralist 1 ", " " + arrayList.size());
for (int i = 0; i < path[0].size(); i++) {
String fname = "image_" + String.format("%03d", i) + ".png";
try {
File file = new File(myDir, fname);
Bitmap bitmap = null;
if (file.exists())
file.delete();
bitmap = Utils.currectlyOrientation(path[0].get(i), 700, 700);
bitmap = Utils.scaleCenterCrop(bitmap, 700, 700);
FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 50, out);
setBadge(getApplicationContext(), i);
out.flush();
out.close();
bitmap.recycle();
} catch (Exception e) {
e.printStackTrace();
Toast.makeText(getApplicationContext(), "Error while SaveToMemory", Toast.LENGTH_SHORT).show();
}
publishProgress(i);
}
return null;
}
protected void onProgressUpdate(Integer... progress) {
if (progress[0] % 3 == 0) {
if (pd != null)
pd.setMessage("Doing.. " + ((int) (((float) progress[0] / arrayList.size()) * 100)) + "%");
// Toast.makeText(getApplicationContext(), (((float) progress[0] / arrayList.size()) * 100) + "%", Toast.LENGTH_SHORT).show();
}
Log.d("importing images", "image" + progress[0]);
}
protected void onPostExecute(Void result) {
Toast.makeText(getApplicationContext(), "done", Toast.LENGTH_SHORT).show();
if (pd != null) {
pd.dismiss();
arr1.removeAll(arr1);
arrayList.removeAll(arrayList);
myRecyclerViewAdapter.notifyDataSetChanged();
currentImage.setImageBitmap(null);
btnGalleryPickMul.setVisibility(View.VISIBLE);
setBadge(getApplicationContext(), 0);
startActivity(intent);
overridePendingTransition(R.transition.fade_in, R.transition.fade_out);
}
}
@Override
protected void onPreExecute() {
super.onPreExecute();
File dir = new File(Environment.getExternalStorageDirectory() + "/req_images");
if (dir.isDirectory()) {
String[] children = dir.list();
for (int i = 0; i < children.length; i++) {
new File(dir, children[i]).delete();
}
}
pd = new ProgressDialog(MainActivity.this);
pd.setTitle("Processing...");
pd.setMessage("Please wait.");
pd.setCancelable(false);
pd.setIndeterminate(true);
pd.show();
}
}
@Override
protected void onPause() {
SharedPreferences.Editor ed = sharedPreferences.edit();
ed.putInt("length", arrayList.size());
ed.apply();
super.onPause();
}
public static void setBadge(Context context, int count) {
String launcherClassName = getLauncherClassName(context);
if (launcherClassName == null) {
return;
}
Intent intent = new Intent("android.intent.action.BADGE_COUNT_UPDATE");
intent.putExtra("badge_count", count);
intent.putExtra("badge_count_package_name", context.getPackageName());
intent.putExtra("badge_count_class_name", launcherClassName);
context.sendBroadcast(intent);
}
public static String getLauncherClassName(Context context) {
PackageManager pm = context.getPackageManager();
Intent intent = new Intent(Intent.ACTION_MAIN);
intent.addCategory(Intent.CATEGORY_LAUNCHER);
List<ResolveInfo> resolveInfos = pm.queryIntentActivities(intent, 0);
for (ResolveInfo resolveInfo : resolveInfos) {
String pkgName = resolveInfo.activityInfo.applicationInfo.packageName;
if (pkgName.equalsIgnoreCase(context.getPackageName())) {
String className = resolveInfo.activityInfo.name;
return className;
}
}
return null;
}
public void foo(Context context) {
Intent intent = new Intent();
intent.setAction("com.sonyericsson.home.action.UPDATE_BADGE");
intent.putExtra("com.sonyericsson.home.intent.extra.badge.ACTIVITY_NAME", "com.luminous.pick.MainActivity");
intent.putExtra("com.sonyericsson.home.intent.extra.badge.SHOW_MESSAGE", true);
intent.putExtra("com.sonyericsson.home.intent.extra.badge.MESSAGE", "99");
intent.putExtra("com.sonyericsson.home.intent.extra.badge.PACKAGE_NAME", "com.luminous.pick");
sendBroadcast(intent);
Toast.makeText(getApplicationContext(),"badge",Toast.LENGTH_SHORT).show();
}
}
|
Zimpler/ironfan-pantry | cookbooks/redis/attributes/default.rb | <gh_stars>0
#
# Locations
#
default[:redis][:conf_dir] = "/etc/redis"
default[:redis][:log_dir] = "/var/log/redis"
default[:redis][:data_dir] = "/var/lib/redis"
default[:redis][:home_dir] = "/var/lib/redis"
default[:redis][:pid_file] = "/var/run/redis.pid"
default[:redis][:db_basename] = "dump.rdb"
default[:redis ][:user] = 'redis'
default[:users ]['redis'][:uid] = 335
default[:groups]['redis'][:gid] = 335
#
# Server
#
default[:redis][:server][:addr] = "0.0.0.0"
default[:redis][:server][:port] = "6379"
#
# Tunables
#
default[:redis][:server][:timeout] = "300"
default[:redis][:glueoutputbuf] = "yes"
default[:redis][:saves] = [["900", "1"], ["300", "10"], ["60", "10000"]]
default[:redis][:slave] = "no"
if (node[:redis][:slave] == "yes")
# TODO: replace with discovery
default[:redis][:master_server] = "redis-master." + domain
default[:redis][:master_port] = "6379"
end
default[:redis][:shareobjects] = "no"
if (node[:redis][:shareobjects] == "yes")
default[:redis][:shareobjectspoolsize] = 1024
end
|
tsingdao-Tp/Indigo | core/indigo-core/common/base_cpp/bitoutworker.h | <reponame>tsingdao-Tp/Indigo<filename>core/indigo-core/common/base_cpp/bitoutworker.h
/****************************************************************************
* Copyright (C) from 2009 to Present EPAM Systems.
*
* This file is part of Indigo toolkit.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
#ifndef __bitoutworker_h__
#define __bitoutworker_h__
#include "base_c/defs.h"
namespace indigo
{
class Output;
class BitOutWorker
{
public:
BitOutWorker(int StartBits, Output& NewOut);
bool writeBits(int Code);
void close(void);
~BitOutWorker(void);
private:
int _bits; /* Code size */
int _bitBufferCount;
dword _bitBuffer;
Output& _output;
BitOutWorker(const BitOutWorker&);
};
} // namespace indigo
#endif /* __bitoutworker_h__ */
/* END OF 'BITOUTWORKER.H' FILE */
|
RugeroCarmelo/EBM_tool | src/main/java/EBM_tool/OWL2Prefuse/graph/NodeColorAction.java | package EBM_tool.OWL2Prefuse.graph;
import EBM_tool.OWL2Prefuse.OWL2Prefuse.Constants;
import prefuse.Visualization;
import prefuse.action.assignment.ColorAction;
import prefuse.visual.VisualItem;
/**
* This class is a specific ColorAction for the nodes in the graph.
* <p/>
* Project OWL2Prefuse <br/>
* NodeColorAction.java created 3 januari 2007, 13:37
* <p/>
* Copyright © 2006 <NAME>
* @author <a href="mailto:<EMAIL>"><NAME> adapted by Tomas</a>
* @version $$Revision:$$, $$Date:$$
*/
public class NodeColorAction extends ColorAction
{
/**
* Creates a new instance of NodeColorAction.
* @param p_group The data group for which this ColorAction provides the colors.
* @param p_vis A reference to the visualization processed by this Action.
*/
public NodeColorAction(String p_group, Visualization p_vis)
{
super(p_group, VisualItem.FILLCOLOR);
m_vis = p_vis;
}
/**
* This method returns the color of the given VisualItem.
* @param p_item The node for which the color needs to be retreived.
* @return The color of the given node.
*/
public int getColor(VisualItem p_item)
{
int retval = Constants.NODE_DEFAULT_COLOR;
if (m_vis.isInGroup(p_item, Visualization.SEARCH_ITEMS)) retval = Constants.NODE_COLOR_SEARCH;
else if (p_item.isHighlighted()) retval = Constants.NODE_COLOR_HIGHLIGHTED;
else if (p_item.isFixed()) retval = Constants.NODE_COLOR_SELECTED;
else if (p_item.canGetString("type"))
{
if (p_item.getString("type") != null)
{
if (p_item.getString("type").contains("class")) retval = Constants.NODE_COLOR_CLASS;
else if (p_item.getString("type").contains("individual")) retval = Constants.NODE_COLOR_INDIVIDUAL;
else if(p_item.getString("type").contains("rule")) retval = Constants.NODE_COLOR_HAS_RULE;
}
}
return retval;
}
} |
idc9/mvmm_sim | mvmm_sim/simulation/gif_utils.py | # from imageio import mimsave, imread
from glob import glob
from natsort import natsorted
import os
from PIL import Image, ImageDraw # , ImageFont
import gif
def make_gif_from_dir(gif_fpath, img_dir, frame_length=0.5, font_size=20,
delete_images=False):
"""
Makes a gif from images stored in a directory
"""
img_names = natsorted([os.path.basename(f)
for f in glob('{}/*.png'.format(img_dir))])
images = []
for img_name in img_names:
img_path = os.path.join(img_dir, img_name)
img = Image.open(img_path) # load image
if font_size is not None:
# fnt = ImageFont.truetype('/Library/Fonts/Arial.ttf', font_size)
txt = img_name.split('.')[0].replace('_', ' ')
d = ImageDraw.Draw(img)
# d.text((10, 10), txt, font=fnt, fill=(255, 0, 0))
d.text((10, 10), txt, fill=(255, 0, 0))
images.append(img)
if delete_images:
os.remove(img_path)
images[0].save(gif_fpath,
save_all=True,
append_images=images[1:],
duration=frame_length,
loop=1)
def make_gif(frame_iter, fpath, duration=100):
"""
Parameters
----------
frame_iter: iterable
fpath: str
duration: int
Example
-------
def plot_func(power):
plt.figure(figsize=(5, 5))
xvals = np.arange(5)
yvals = xvals ** power
plt.plot(xvals, yvals, marker='.')
def arg_iter():
for p in range(5):
yield {'power': p}
"""
frames = [frame for frame in frame_iter]
gif.save(frames, fpath, duration=duration)
def get_frame_iter(plot_func, kwarg_iter):
@gif.frame
def _plot_func(kwargs):
plot_func(**kwargs)
for kws in kwarg_iter:
yield _plot_func(kws)
|
Yeeyao/leetcode-go | array/59_test.go | package array
import (
"testing"
)
func TestPro(t *testing.T) {
t.Run("59. Spiral Matrix II", func(t *testing.T) {
input := 3
want := [][]int{{1, 2, 3}, {8, 9, 4}, {7, 6, 5}}
got := solution(input)
if !IntSliceEqual(got, want) {
t.Errorf("got: %v, want: %v", got, want)
}
})
t.Run("59. Spiral Matrix II2", func(t *testing.T) {
input := 3
want := [][]int{{1, 2, 3}, {8, 9, 4}, {7, 6, 5}}
got := solution2(input)
if !IntSliceEqual(got, want) {
t.Errorf("got: %v, want: %v", got, want)
}
})
}
/*
给定 n 需要生成从 1 到 n^2 的螺旋矩阵
难度本身不是生成,是每个元素存放的位置判定
生成的矩阵是 n * n 的 目标行和列计算
找出边界元素值对应的坐标关系,然后每次循环,相当于 n 减小 2
从四周向中间处理
k 为初始值,i 为第几次遍历完四周,j 控制坐标变化。
四周的行或者列需要数值判断
第一行的左到右 这里行不用变化,列变化,后面的同理
同时注意是按照数值大小顺序来处理遍历
[理解意思,但是代码很繁琐](https://leetcode.com/problems/spiral-matrix-ii/discuss/22309/Simple-C%2B%2B-solution(with-explaination))
*/
func solution(n int) [][]int {
retArr := make([][]int, n)
for i, _ := range retArr {
retArr[i] = make([]int, n)
}
// 第几次对四周的处理,数组起始数值
i, k := 0, 1
for k <= n*n {
j := i
// 第一行 注意这里每次大循环,行号会增加,列数减少
// 单次循环行号不变,列号增加
for j < n-i {
retArr[i][j] = k
j++
k++
}
// 最后一列 注意这里每次大循环,列号会减小,行数会减少
// 上面 j 变成了 n - i
// 单次循环列号不变,行号增加
j = i + 1
for j < n-i {
retArr[j][n-i-1] = k
j++
k++
}
// 上面的 j 变成了 n - i
// 最后一行 注意这里每次大循环,行号会减小,列数会减少
// 单次循环行号不变,列号减小
// 注意这里要减一次
j = n - i - 2
for j > i {
retArr[n-i-1][j] = k
j--
k++
}
// 上面的 j 变成 0
// 第一列 注意这里每次大循环,行号会增加,列数会减少
// 单次循环列号不变,行号减小
j = n - i - 1
for j > i {
retArr[j][i] = k
j--
k++
}
i++
}
return retArr
}
/*
跟上面思路一样,但是循环简单一些
*/
func solution2(n int) [][]int {
retArr := make([][]int, n)
for i, _ := range retArr {
retArr[i] = make([]int, n)
}
i, j, di, dj := 0, 0, 0, 1
k := 1
for k <= n*n {
// 初始是 0 这里就 + 1
retArr[i][j] = k
// 注意,这里负数取余,需要得到正数,因为是作为数组的索引
cx := (i + di) % n
if cx < 0 {
cx = -cx
}
cy := (j + dj) % n
if cy < 0 {
cy = -cy
}
// 遇到需要变换行或者列的情况
if retArr[cx][cy] != 0 {
di, dj = dj, -di
}
i += di
j += dj
k++
}
return retArr
}
func IntSliceEqual(a, b [][]int) bool {
if len(a) != len(b) {
return false
}
if (a == nil) != (b == nil) {
return false
}
for i := 0; i < len(a); i++ {
for j := 0; j < len(a[i]); j++ {
if a[i][j] != b[i][j] {
return false
}
}
}
return true
}
|
tristantarrant/protostream | core/src/test/java/org/infinispan/protostream/ProtobufParserTest.java | <gh_stars>0
package org.infinispan.protostream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import org.infinispan.protostream.descriptors.Descriptor;
import org.infinispan.protostream.descriptors.EnumDescriptor;
import org.infinispan.protostream.descriptors.FieldDescriptor;
import org.infinispan.protostream.descriptors.GenericDescriptor;
import org.infinispan.protostream.domain.Address;
import org.infinispan.protostream.domain.User;
import org.infinispan.protostream.impl.Log;
import org.infinispan.protostream.test.AbstractProtoStreamTest;
import org.junit.Test;
/**
* Test the Parser/TagHandler mechanism.
*
* @author <EMAIL>
*/
public class ProtobufParserTest extends AbstractProtoStreamTest {
private static final Log log = Log.LogFactory.getLog(ProtobufParserTest.class);
@Test
public void testTagHandler() throws Exception {
ImmutableSerializationContext ctx = createContext();
User user = new User();
user.setId(1);
user.setName("John");
user.setSurname("Batman");
user.setGender(User.Gender.MALE);
user.setAccountIds(new HashSet<>(Arrays.asList(1, 3)));
user.setAddresses(Arrays.asList(new Address("Old Street", "XYZ42", -12), new Address("Bond Street", "W23", 2)));
byte[] userBytes = ProtobufUtil.toWrappedByteArray(ctx, user);
Descriptor wrapperDescriptor = ctx.getMessageDescriptor(WrappedMessage.PROTOBUF_TYPE_NAME);
TagHandler messageHandler = new TagHandler() {
@Override
public void onStart(GenericDescriptor descriptor) {
log.debugf("\tonStart %s", descriptor);
}
@Override
public void onTag(int fieldNumber, FieldDescriptor fieldDescriptor, Object tagValue) {
log.debugf("\tonTag %d %s %s", fieldNumber, fieldDescriptor != null ? fieldDescriptor.getFullName() : null, tagValue);
}
@Override
public void onStartNested(int fieldNumber, FieldDescriptor fieldDescriptor) {
log.debugf("\tonStartNested %d %s", fieldNumber, fieldDescriptor != null ? fieldDescriptor.getFullName() : null);
}
@Override
public void onEndNested(int fieldNumber, FieldDescriptor fieldDescriptor) {
log.debugf("\tonEndNested %d %s", fieldNumber, fieldDescriptor != null ? fieldDescriptor.getFullName() : null);
}
@Override
public void onEnd() {
log.debug("\tonEnd");
}
};
TagHandler wrapperHandler = new TagHandler() {
private Integer typeId;
private String typeName;
private byte[] wrappedMessage;
private Integer wrappedEnum;
private GenericDescriptor getDescriptor() {
return typeId != null ? ctx.getDescriptorByTypeId(typeId) : ctx.getDescriptorByName(typeName);
}
@Override
public void onStart(GenericDescriptor descriptor) {
log.debugf("onStart %s" + descriptor);
}
@Override
public void onTag(int fieldNumber, FieldDescriptor fieldDescriptor, Object tagValue) {
log.debugf("onTag %d %s %s", fieldNumber, fieldDescriptor != null ? fieldDescriptor.getFullName() : null, tagValue);
if (fieldDescriptor == null) {
// ignore unknown fields
return;
}
switch (fieldNumber) {
case WrappedMessage.WRAPPED_DESCRIPTOR_TYPE_ID:
typeId = (Integer) tagValue;
break;
case WrappedMessage.WRAPPED_DESCRIPTOR_FULL_NAME:
typeName = (String) tagValue;
break;
case WrappedMessage.WRAPPED_MESSAGE:
wrappedMessage = (byte[]) tagValue;
break;
case WrappedMessage.WRAPPED_ENUM:
wrappedEnum = (Integer) tagValue;
break;
case WrappedMessage.WRAPPED_DOUBLE:
case WrappedMessage.WRAPPED_FLOAT:
case WrappedMessage.WRAPPED_INT64:
case WrappedMessage.WRAPPED_UINT64:
case WrappedMessage.WRAPPED_INT32:
case WrappedMessage.WRAPPED_FIXED64:
case WrappedMessage.WRAPPED_FIXED32:
case WrappedMessage.WRAPPED_BOOL:
case WrappedMessage.WRAPPED_STRING:
case WrappedMessage.WRAPPED_BYTES:
case WrappedMessage.WRAPPED_UINT32:
case WrappedMessage.WRAPPED_SFIXED32:
case WrappedMessage.WRAPPED_SFIXED64:
case WrappedMessage.WRAPPED_SINT32:
case WrappedMessage.WRAPPED_SINT64:
messageHandler.onStart(null);
messageHandler.onTag(fieldNumber, fieldDescriptor, tagValue);
messageHandler.onEnd();
break;
}
}
@Override
public void onStartNested(int fieldNumber, FieldDescriptor fieldDescriptor) {
log.debugf("onStartNested %d %s", fieldNumber, fieldDescriptor != null ? fieldDescriptor.getFullName() : null);
}
@Override
public void onEndNested(int fieldNumber, FieldDescriptor fieldDescriptor) {
log.debugf("onEndNested %d %s", fieldNumber, fieldDescriptor != null ? fieldDescriptor.getFullName() : null);
}
@Override
public void onEnd() {
if (wrappedEnum != null) {
EnumDescriptor enumDescriptor = (EnumDescriptor) getDescriptor();
String enumConstantName = enumDescriptor.findValueByNumber(wrappedEnum).getName();
FieldDescriptor fd = wrapperDescriptor.findFieldByNumber(WrappedMessage.WRAPPED_ENUM);
messageHandler.onStart(enumDescriptor);
messageHandler.onTag(WrappedMessage.WRAPPED_ENUM, fd, enumConstantName);
messageHandler.onEnd();
} else if (wrappedMessage != null) {
try {
Descriptor messageDescriptor = (Descriptor) getDescriptor();
ProtobufParser.INSTANCE.parse(messageHandler, messageDescriptor, wrappedMessage);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
log.debug("onEnd");
}
};
ProtobufParser.INSTANCE.parse(wrapperHandler, wrapperDescriptor, userBytes);
}
}
|
PaulsBecks/Blockchain-Logging-Framework | src/main/java/blf/blockchains/ethereum/reader/EthereumDataReader.java | package blf.blockchains.ethereum.reader;
import blf.core.exceptions.ExceptionHandler;
import blf.core.readers.DataReader;
import java.util.stream.Stream;
/**
* EthereumSources
*/
public class EthereumDataReader extends DataReader<EthereumClient, EthereumBlock, EthereumTransaction, EthereumLogEntry> {
public Stream<EthereumTransaction> transactionStream() {
return this.currentBlock == null ? Stream.empty() : this.currentBlock.transactionStream();
}
public Stream<EthereumLogEntry> logEntryStream() {
if (this.currentTransaction == null) {
return this.currentBlock == null
? Stream.empty()
: this.currentBlock.transactionStream().flatMap(EthereumTransaction::logStream);
} else {
return this.currentTransaction.logStream();
}
}
public void connect(String url) {
if (this.client != null) {
ExceptionHandler.getInstance().handleException("Already connected to Ethereum node.");
return;
}
this.client = Web3jClient.connectWebsocket(url);
}
@Override
public void connectIpc(String path) {
if (this.client != null) {
ExceptionHandler.getInstance().handleException("Already connected to Ethereum node.", new NullPointerException());
return;
}
this.client = Web3jClient.connectIpc(path);
}
@Override
public void close() {
if (this.client != null) {
this.client.close();
}
}
}
|
sfarrens/cosmostat | src/cxx/mga/libmga3d/IM3D_DCT.h | /******************************************************************************
** Copyright (C) 2008 by CEA
*******************************************************************************
**
** UNIT
**
** Version: 1.0
**
** Author: <NAME>
**
** Date: 24th Oct. 2008
**
** File: IM3D_DCT.h
**
** Modification history:
**
******************************************************************************
**
** DESCRIPTION 3D Block DCT class
** -----------
**
******************************************************************************/
#ifndef _IM3D_DCT_H_
#define _IM3D_DCT_H_
#include "IM_IO.h"
#include "IM_Obj.h"
#include "IM3D_Block.h"
#include "MGA_Inc.h"
/***********************************************************************/
class IM3D_DCT
{
// Base elements
int NxCube, NyCube, NzCube; // input image size
int BlockSize;
bool InitClass; // True if the CLASS has been initialised
Block3D B3D;
bool AllocClass;
double ***Block3;
double *Block1;
double TabSigma; // Noise level for a given Block size
// dct parameters
int n1d;
int* dct_ip;
double* dct_w;
bool lapped;
int skip_order;
//Methods
void reset();
void alloc();
void dealloc();
void transform_one_block(fltarray & Block, fltarray & BlockTrans);
void transform_one_block(fltarray & Block);
void recons_one_block(fltarray & Block, fltarray & BlockTrans);
void recons_one_block(fltarray & Block);
// Block manipulation
void get_block_cube(int Bi, int Bj, int Bk, fltarray &Cube, fltarray &CubeBlock, Bool Weight=False)
{B3D.get_block_cube(Bi,Bj,Bk,Cube,CubeBlock,Weight);}
void put_block_cube(int Bi, int Bj, int Bk, fltarray &Cube, fltarray &CubeBlock)
{B3D.put_block_cube(Bi,Bj,Bk,Cube,CubeBlock);}
void add_block_cube(int Bi, int Bj, int Bk, fltarray &Cube, fltarray &CubeBlock)
{B3D.add_block_cube(Bi,Bj,Bk,Cube,CubeBlock);}
public:
IM3D_DCT();
~IM3D_DCT();
// Parameters
Bool BlockOverlap; // If True, Overlapped blocks are used.
type_border Border; // Border used in the 3D a trous WT
// initialize the class for given cube sizes, block size
void init(int Nx, int Ny, int Nz, int _BlockSize, Bool _BlockOverlap);
inline void set_lapped(bool l) {lapped=l;}
// Apply the block dct transform and store the result in TabBand
void transform(fltarray & Cube, fltarray & TabBand);
// Reconstruct a cube from its block dct transform
void recons(fltarray &TabBand, fltarray &Cube);
// Properties of the blocks
inline int nbr_block_nx() { return B3D.nbr_block_nx();}
inline int nbr_block_ny() { return B3D.nbr_block_ny();}
inline int nbr_block_nz() { return B3D.nbr_block_nz();}
inline int nbr_block() { return B3D.nbr_block();}
// Statistic and information tools
void normalize_self(fltarray TabBand, bool inverse=false);
void noise_calibration(fltarray &TabBand, char* Outname);
void extract_stat(fltarray &TabBand, char* Outname);
// Filtering methods
void threshold(fltarray &TabBand, float SigmaNoise, float NSigma, filter_type FilterType=FT_HARD);
void wiener(fltarray &TabBand, float noise_lvl, int LocalBS);
void fdr(fltarray &TabBand, float Alpha, float SigmaNoise);
void set_skip_order(int i) { skip_order = i;}
// IO tools
void write(char *Name, fltarray & TabBand, bool Normalize);
void read(char *Name, fltarray & TabBand, bool *NormalizeInv);
void temp(fltarray &TabBand);
};
/***********************************************************************/
#endif
|
stbly/gemp-swccg-public | gemp-swccg-logic/src/main/java/com/gempukku/swccgo/logic/effects/PutCardsFromHandOnBottomOfReserveDeckEffect.java | package com.gempukku.swccgo.logic.effects;
import com.gempukku.swccgo.common.Filterable;
import com.gempukku.swccgo.common.Zone;
import com.gempukku.swccgo.logic.timing.Action;
/**
* An effect to put cards from hand on bottom of Reserve Deck.
*/
public class PutCardsFromHandOnBottomOfReserveDeckEffect extends PutCardsFromHandInCardPileEffect {
/**
* Creates an effect that causes the player to put all cards from hand on bottom of Reserve Deck.
* @param action the action performing this effect
* @param playerId the player
*/
public PutCardsFromHandOnBottomOfReserveDeckEffect(Action action, String playerId) {
super(action, playerId, Zone.RESERVE_DECK, true);
}
/**
* Creates an effect that causes the player to put cards from hand on bottom of Reserve Deck.
* @param action the action performing this effect
* @param playerId the player
* @param minimum the minimum number of cards to put on card pile
* @param maximum the maximum number of cards to put on card pile
*/
public PutCardsFromHandOnBottomOfReserveDeckEffect(Action action, String playerId, int minimum, int maximum) {
super(action, playerId, minimum, maximum, Zone.RESERVE_DECK, playerId, true);
}
/**
* Creates an effect that causes the player to put cards accepted by the specified filter from hand on bottom of Reserve Deck.
* @param action the action performing this effect
* @param playerId the player
* @param minimum the minimum number of cards to put on card pile
* @param maximum the maximum number of cards to put on card pile
* @param filters the filter
* @param hidden true if cards are not revealed, otherwise false
*/
public PutCardsFromHandOnBottomOfReserveDeckEffect(Action action, String playerId, int minimum, int maximum, Filterable filters, boolean hidden) {
super(action, playerId, minimum, maximum, Zone.RESERVE_DECK, playerId, true, filters, hidden);
}
}
|
IkwhanChang/bbb-file-manager | node_modules/tessel/test/badserver/test.js | <filename>node_modules/tessel/test/badserver/test.js
// This test fails if any error is thrown.
var async = require('async');
var assert = require('assert');
var builds = require('../../src/builds');
console.log('1..5');
assert(builds.utils.buildsPath == 'https://builds.tessel.io/');
function checkBogusServer (path) {
return function (next) {
builds.utils.buildsPath = path;
builds.checkBuildList('2014-05-31', function (builds) {
// noop, success if nothing is thrown
console.log('ok');
next();
});
}
}
// run this first
async.series([
function (next) {
builds.checkBuildList('current', function (builds) {
// noop, success if nothing is thrown
console.log(builds ? 'ok' : 'not ok', '- builds list for "current" must exist.');
next();
});
},
checkBogusServer('http://example.com/'),
checkBogusServer('https://example.com/'),
checkBogusServer('http://fake.example.com/'),
checkBogusServer('https://fake.example.com/'),
]);
|
fcrimins/YTMusicUploader | Documentation/html/d5/d88/class_y_t_music_uploader_1_1_providers_1_1_request_models_1_1_browseendpointcontextmusicconfig6.js | var class_y_t_music_uploader_1_1_providers_1_1_request_models_1_1_browseendpointcontextmusicconfig6 =
[
[ "pageType", "d5/d88/class_y_t_music_uploader_1_1_providers_1_1_request_models_1_1_browseendpointcontextmusicconfig6.html#ad50707aca2c88ac8a8df658f21e93a95", null ]
]; |
HawxChen/barrelfishOS | usr/monitor/iref.c | /**
* \file
* \brief IREF allocation/management
*/
/*
* Copyright (c) 2007, 2008, 2010, ETH Zurich.
* All rights reserved.
*
* This file is distributed under the terms in the attached LICENSE file.
* If you do not find this file, copies can be found by writing to:
* ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
*/
#include "monitor.h"
struct iref_service {
struct monitor_binding *binding;
uintptr_t service_id;
};
#define MAX_IREF_PERCORE 256
static struct iref_service iref_table[MAX_IREF_PERCORE];
/**
* \brief Allocate a new iref
*
* Associate it with the server's connection and service id.
*/
errval_t iref_alloc(struct monitor_binding *binding, uintptr_t service_id,
iref_t *iref)
{
assert(binding != NULL);
// find a free slot in the local table
for (iref_t i = 0; i < MAX_IREF_PERCORE; i++) {
if (iref_table[i].binding == NULL) {
iref_table[i].binding = binding;
iref_table[i].service_id = service_id;
// XXX: avoid zero being a valid iref
*iref = MAX_IREF_PERCORE * my_core_id + i + 1;
return SYS_ERR_OK;
}
}
return MON_ERR_IREF_ALLOC;
}
/**
* \brief Return core_id
*
* The core_id is stored in the iref itself.
*/
errval_t iref_get_core_id(iref_t iref, coreid_t *core_id)
{
*core_id = (iref - 1) / MAX_IREF_PERCORE;
return SYS_ERR_OK;
}
/**
* \brief Return conn
*/
errval_t iref_get_binding(iref_t iref, struct monitor_binding **binding)
{
if ((iref - 1) / MAX_IREF_PERCORE != my_core_id) {
return MON_ERR_INVALID_CORE_ID;
}
*binding = iref_table[(iref - 1) % MAX_IREF_PERCORE].binding;
if (*binding == NULL) {
return MON_ERR_INVALID_IREF;
} else {
return SYS_ERR_OK;
}
}
/**
* \brief Return service_id
*/
errval_t iref_get_service_id(iref_t iref, uintptr_t *service_id)
{
if ((iref - 1) / MAX_IREF_PERCORE != my_core_id) {
return MON_ERR_INVALID_CORE_ID;
}
*service_id = iref_table[(iref - 1) % MAX_IREF_PERCORE].service_id;
return SYS_ERR_OK;
}
|
AlexeySKiselev/unirand | test/distributions.seed.test.js | /**
* Tests for distributions with seed
* As PRNG was tested, seeded generator generates uniform distribution [0, 1)
* Distributions also tested for PRNG without seed
* Created by <NAME>
*/
// Import Mocha tool for tests
let chai = require('chai'),
expect = chai.expect,
{describe, it} = require('mocha'),
prng = require('../lib/prng/prngProxy').default;
chai.should();
const compareDistributions = (distA, distB, accuracy = 0.000001) => {
expect(distA.length).to.be.equal(distB.length);
for(let i = 0; i < distA.length; i += 1) {
expect(distA[i]).to.be.closeTo(distB[i], accuracy);
}
};
describe('Random distributions with seed', () => {
describe('Uniform distribution (a = 1, b = 4)', () => {
const Uniform = require('../lib/methods/uniform');
it('should return same value each time', () => {
const uniform = new Uniform(1, 4);
prng.seed('first uniform seed test');
const uniformFirst = uniform.random();
for(let i = 0; i < 1000; i += 1) {
expect(uniform.random()).to.be.closeTo(uniformFirst, 0.000001);
}
prng.seed('second uniform seed test');
const uniformSecond = uniform.random();
for(let i = 0; i < 1000; i += 1) {
expect(uniform.random()).to.be.closeTo(uniformSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const uniform = new Uniform(1, 4);
prng.seed('first uniform seed test');
const uniformFirst = uniform.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(uniform.distribution(10000), uniformFirst);
}
prng.seed('second uniform seed test');
const uniformSecond = uniform.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(uniform.distribution(10000), uniformSecond);
}
done();
});
});
describe('Normal distribution (mu = 1, sigma = 2)', () => {
const Normal = require('../lib/methods/normal');
it('should return same value each time', () => {
const normal = new Normal(1, 2);
prng.seed('first normal seed test');
const normalFirst = normal.random();
for(let i = 0; i < 1000; i += 1) {
expect(normal.random()).to.be.closeTo(normalFirst, 0.000001);
}
prng.seed('second normal seed test');
const normalSecond = normal.random();
for(let i = 0; i < 1000; i += 1) {
expect(normal.random()).to.be.closeTo(normalSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const normal = new Normal(1, 2);
prng.seed('first normal seed test');
const normalFirst = normal.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(normal.distribution(10000), normalFirst);
}
prng.seed('second normal seed test');
const normalSecond = normal.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(normal.distribution(10000), normalSecond);
}
done();
});
});
describe('Bernoulli distribution (p = 0.6)', () => {
const Bernoulli = require('../lib/methods/bernoulli');
it('should return same value each time', () => {
const bernoulli = new Bernoulli(0.6);
prng.seed('first bernoulli seed test');
const bernoulliFirst = bernoulli.random();
for(let i = 0; i < 1000; i += 1) {
expect(bernoulli.random()).to.be.closeTo(bernoulliFirst, 0.000001);
}
prng.seed('second bernoulli seed test');
const bernoulliSecond = bernoulli.random();
for(let i = 0; i < 1000; i += 1) {
expect(bernoulli.random()).to.be.closeTo(bernoulliSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const bernoulli = new Bernoulli(0.6);
prng.seed('first bernoulli seed test');
const bernoulliFirst = bernoulli.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(bernoulli.distribution(10000), bernoulliFirst);
}
prng.seed('second bernoulli seed test');
const bernoulliSecond = bernoulli.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(bernoulli.distribution(10000), bernoulliSecond);
}
done();
});
});
describe('Beta distribution (alpha = 2, beta = 5)', () => {
const Beta = require('../lib/methods/beta');
it('should return same value each time', () => {
const beta = new Beta(2, 5);
prng.seed('first beta seed test');
const betaFirst = beta.random();
for(let i = 0; i < 1000; i += 1) {
expect(beta.random()).to.be.closeTo(betaFirst, 0.000001);
}
prng.seed('second beta seed test');
const betaSecond = beta.random();
for(let i = 0; i < 1000; i += 1) {
expect(beta.random()).to.be.closeTo(betaSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const beta = new Beta(2, 5);
prng.seed('first beta seed test');
const betaFirst = beta.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(beta.distribution(10000), betaFirst);
}
prng.seed('second beta seed test');
const betaSecond = beta.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(beta.distribution(10000), betaSecond);
}
done();
});
});
describe('Beta Prime distribution (alpha = 2, beta = 3)', () => {
const BetaPrime = require('../lib/methods/betaprime');
it('should return same value each time', () => {
const betaprime = new BetaPrime(2, 3);
prng.seed('first betaprime seed test');
const betaprimeFirst = betaprime.random();
for(let i = 0; i < 1000; i += 1) {
expect(betaprime.random()).to.be.closeTo(betaprimeFirst, 0.000001);
}
prng.seed('second betaprime seed test');
const betaprimeSecond = betaprime.random();
for(let i = 0; i < 1000; i += 1) {
expect(betaprime.random()).to.be.closeTo(betaprimeSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const betaprime = new BetaPrime(2, 3);
prng.seed('first betaprime seed test');
const betaprimeFirst = betaprime.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(betaprime.distribution(10000), betaprimeFirst);
}
prng.seed('second betaprime seed test');
const betaprimeSecond = betaprime.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(betaprime.distribution(10000), betaprimeSecond);
}
done();
});
});
describe('Binomial distribution (p = 0.7, n = 20)', () => {
const Binomial = require('../lib/methods/binomial');
it('should return same value each time', () => {
const binomial = new Binomial(20, 0.7);
prng.seed('first binomial seed test');
const binomialFirst = binomial.random();
for(let i = 0; i < 1000; i += 1) {
expect(binomial.random()).to.be.closeTo(binomialFirst, 0.000001);
}
prng.seed('second binomial seed test');
const binomialSecond = binomial.random();
for(let i = 0; i < 1000; i += 1) {
expect(binomial.random()).to.be.closeTo(binomialSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const binomial = new Binomial(20, 0.7);
prng.seed('first binomial seed test');
const binomialFirst = binomial.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(binomial.distribution(10000), binomialFirst);
}
prng.seed('second binomial seed test');
const binomialSecond = binomial.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(binomial.distribution(10000), binomialSecond);
}
done();
});
});
describe('Cauchy distribution (x = 1, gamma = 1)', () => {
const Cauchy = require('../lib/methods/cauchy');
it('should return same value each time', () => {
const cauchy = new Cauchy(1, 1);
prng.seed('first cauchy seed test');
const cauchyFirst = cauchy.random();
for(let i = 0; i < 1000; i += 1) {
expect(cauchy.random()).to.be.closeTo(cauchyFirst, 0.000001);
}
prng.seed('second cauchy seed test');
const cauchySecond = cauchy.random();
for(let i = 0; i < 1000; i += 1) {
expect(cauchy.random()).to.be.closeTo(cauchySecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const cauchy = new Cauchy(1, 1);
prng.seed('first cauchy seed test');
const cauchyFirst = cauchy.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(cauchy.distribution(10000), cauchyFirst);
}
prng.seed('second cauchy seed test');
const cauchySecond = cauchy.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(cauchy.distribution(10000), cauchySecond);
}
done();
});
});
describe('Chi distribution (k = 2)', () => {
const Chi = require('../lib/methods/chi');
it('should return same value each time', () => {
const chi = new Chi(2);
prng.seed('first chi seed test');
const chiFirst = chi.random();
for(let i = 0; i < 1000; i += 1) {
expect(chi.random()).to.be.closeTo(chiFirst, 0.000001);
}
prng.seed('second chi seed test');
const chiSecond = chi.random();
for(let i = 0; i < 1000; i += 1) {
expect(chi.random()).to.be.closeTo(chiSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const chi = new Chi(2);
prng.seed('first chi seed test');
const chiFirst = chi.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(chi.distribution(10000), chiFirst);
}
prng.seed('second chi seed test');
const chiSecond = chi.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(chi.distribution(10000), chiSecond);
}
done();
});
});
describe('Chi Square distribution (k = 2)', () => {
const ChiSquare = require('../lib/methods/chisquare');
it('should return same value each time', () => {
const chisquare = new ChiSquare(2);
prng.seed('first chisquare seed test');
const chisquareFirst = chisquare.random();
for(let i = 0; i < 1000; i += 1) {
expect(chisquare.random()).to.be.closeTo(chisquareFirst, 0.000001);
}
prng.seed('second chisquare seed test');
const chisquareSecond = chisquare.random();
for(let i = 0; i < 1000; i += 1) {
expect(chisquare.random()).to.be.closeTo(chisquareSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const chisquare = new ChiSquare(2);
prng.seed('first chisquare seed test');
const chisquareFirst = chisquare.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(chisquare.distribution(10000), chisquareFirst);
}
prng.seed('second chisquare seed test');
const chisquareSecond = chisquare.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(chisquare.distribution(10000), chisquareSecond);
}
done();
});
});
describe('Compertz distribution (nu = 0.7, b = 2)', () => {
const Compertz = require('../lib/methods/compertz');
it('should return same value each time', () => {
const compertz = new Compertz(0.7, 2);
prng.seed('first compertz seed test');
const compertzFirst = compertz.random();
for(let i = 0; i < 1000; i += 1) {
expect(compertz.random()).to.be.closeTo(compertzFirst, 0.000001);
}
prng.seed('second compertz seed test');
const compertzSecond = compertz.random();
for(let i = 0; i < 1000; i += 1) {
expect(compertz.random()).to.be.closeTo(compertzSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const compertz = new Compertz(0.7, 2);
prng.seed('first compertz seed test');
const compertzFirst = compertz.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(compertz.distribution(10000), compertzFirst);
}
prng.seed('second compertz seed test');
const compertzSecond = compertz.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(compertz.distribution(10000), compertzSecond);
}
done();
});
});
describe('Delaporte distribution (alpha = 1, beta = 2, lambda = 3)', () => {
const Delaporte = require('../lib/methods/delaporte');
it('should return same value each time', () => {
const delaporte = new Delaporte(1, 2, 3);
prng.seed('first delaporte seed test');
const delaporteFirst = delaporte.random();
for(let i = 0; i < 1000; i += 1) {
expect(delaporte.random()).to.be.closeTo(delaporteFirst, 0.000001);
}
prng.seed('second delaporte seed test');
const delaporteSecond = delaporte.random();
for(let i = 0; i < 1000; i += 1) {
expect(delaporte.random()).to.be.closeTo(delaporteSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const delaporte = new Delaporte(1, 2, 3);
prng.seed('first delaporte seed test');
const delaporteFirst = delaporte.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(delaporte.distribution(10000), delaporteFirst);
}
prng.seed('second delaporte seed test');
const delaporteSecond = delaporte.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(delaporte.distribution(10000), delaporteSecond);
}
done();
});
});
describe('Fatigue distribution (alpha = 1, beta = 2)', () => {
const Fatigue = require('../lib/methods/fatigue');
it('should return same value each time', () => {
const fatigue = new Fatigue(1, 2);
prng.seed('first fatigue seed test');
const fatigueFirst = fatigue.random();
for(let i = 0; i < 1000; i += 1) {
expect(fatigue.random()).to.be.closeTo(fatigueFirst, 0.000001);
}
prng.seed('second fatigue seed test');
const fatigueSecond = fatigue.random();
for(let i = 0; i < 1000; i += 1) {
expect(fatigue.random()).to.be.closeTo(fatigueSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const fatigue = new Fatigue(1, 2);
prng.seed('first fatigue seed test');
const fatigueFirst = fatigue.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(fatigue.distribution(10000), fatigueFirst);
}
prng.seed('second fatigue seed test');
const fatigueSecond = fatigue.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(fatigue.distribution(10000), fatigueSecond);
}
done();
});
});
describe('Erlang distribution (k = 2, mu = 2)', () => {
const Erlang = require('../lib/methods/erlang');
it('should return same value each time', () => {
const erlang = new Erlang(2, 2);
prng.seed('first erlang seed test');
const erlangFirst = erlang.random();
for(let i = 0; i < 1000; i += 1) {
expect(erlang.random()).to.be.closeTo(erlangFirst, 0.000001);
}
prng.seed('second erlang seed test');
const erlangSecond = erlang.random();
for(let i = 0; i < 1000; i += 1) {
expect(erlang.random()).to.be.closeTo(erlangSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const erlang = new Erlang(2, 2);
prng.seed('first erlang seed test');
const erlangFirst = erlang.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(erlang.distribution(10000), erlangFirst);
}
prng.seed('second erlang seed test');
const erlangSecond = erlang.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(erlang.distribution(10000), erlangSecond);
}
done();
});
});
describe('Gamma distribution (alpha = 2, beta = 0.5)', () => {
const Gamma = require('../lib/methods/gamma');
it('should return same value each time', () => {
const gamma = new Gamma(2, 0.5);
prng.seed('first gamma seed test');
const gammaFirst = gamma.random();
for(let i = 0; i < 1000; i += 1) {
expect(gamma.random()).to.be.closeTo(gammaFirst, 0.000001);
}
prng.seed('second gamma seed test');
const gammaSecond = gamma.random();
for(let i = 0; i < 1000; i += 1) {
expect(gamma.random()).to.be.closeTo(gammaSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const gamma = new Gamma(2, 0.5);
prng.seed('first gamma seed test');
const gammaFirst = gamma.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(gamma.distribution(10000), gammaFirst);
}
prng.seed('second gamma seed test');
const gammaSecond = gamma.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(gamma.distribution(10000), gammaSecond);
}
done();
});
});
describe('Geometric distribution (p = 0.6)', () => {
const Geometric = require('../lib/methods/geometric');
it('should return same value each time', () => {
const geometric = new Geometric(0.6);
prng.seed('first geometric seed test');
const geometricFirst = geometric.random();
for(let i = 0; i < 1000; i += 1) {
expect(geometric.random()).to.be.closeTo(geometricFirst, 0.000001);
}
prng.seed('second geometric seed test');
const geometricSecond = geometric.random();
for(let i = 0; i < 1000; i += 1) {
expect(geometric.random()).to.be.closeTo(geometricSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const geometric = new Geometric(0.6);
prng.seed('first geometric seed test');
const geometricFirst = geometric.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(geometric.distribution(10000), geometricFirst);
}
prng.seed('second geometric seed test');
const geometricSecond = geometric.distribution(10000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(geometric.distribution(10000), geometricSecond);
}
done();
});
});
describe('Negative Binomial distribution (r = 3, p = 0.6)', () => {
const NegativeBinomial = require('../lib/methods/negativebinomial');
it('should return same value each time', () => {
const negativebinomial = new NegativeBinomial(3, 0.6);
prng.seed('first negativebinomial seed test');
const negativebinomialFirst = negativebinomial.random();
for(let i = 0; i < 1000; i += 1) {
expect(negativebinomial.random()).to.be.closeTo(negativebinomialFirst, 0.000001);
}
prng.seed('second negativebinomial seed test');
const negativebinomialSecond = negativebinomial.random();
for(let i = 0; i < 1000; i += 1) {
expect(negativebinomial.random()).to.be.closeTo(negativebinomialSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const negativebinomial = new NegativeBinomial(3, 0.6);
prng.seed('first negativebinomial seed test');
const negativebinomialFirst = negativebinomial.distribution(2000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(negativebinomial.distribution(2000), negativebinomialFirst);
}
prng.seed('second negativebinomial seed test');
const negativebinomialSecond = negativebinomial.distribution(2000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(negativebinomial.distribution(2000), negativebinomialSecond);
}
done();
});
});
describe('Poisson distribution (lambda = 4)', () => {
const Poisson = require('../lib/methods/poisson');
it('should return same value each time', () => {
const poisson = new Poisson(4);
prng.seed('first poisson seed test');
const poissonFirst = poisson.random();
for(let i = 0; i < 1000; i += 1) {
expect(poisson.random()).to.be.closeTo(poissonFirst, 0.000001);
}
prng.seed('second poisson seed test');
const poissonSecond = poisson.random();
for(let i = 0; i < 1000; i += 1) {
expect(poisson.random()).to.be.closeTo(poissonSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const poisson = new Poisson(4);
prng.seed('first poisson seed test');
const poissonFirst = poisson.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(poisson.distribution(5000), poissonFirst);
}
prng.seed('second poisson seed test');
const poissonSecond = poisson.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(poisson.distribution(5000), poissonSecond);
}
done();
});
});
describe('Exponential distribution (lambda = 1)', () => {
const Exponential = require('../lib/methods/exponential');
it('should return same value each time', () => {
const exponential = new Exponential(1);
prng.seed('first exponential seed test');
const exponentialFirst = exponential.random();
for(let i = 0; i < 1000; i += 1) {
expect(exponential.random()).to.be.closeTo(exponentialFirst, 0.000001);
}
prng.seed('second exponential seed test');
const exponentialSecond = exponential.random();
for(let i = 0; i < 1000; i += 1) {
expect(exponential.random()).to.be.closeTo(exponentialSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const exponential = new Exponential(1);
prng.seed('first exponential seed test');
const exponentialFirst = exponential.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(exponential.distribution(5000), exponentialFirst);
}
prng.seed('second exponential seed test');
const exponentialSecond = exponential.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(exponential.distribution(5000), exponentialSecond);
}
done();
});
});
describe('Extreme Value (Gumbel-type) distribution (mu = 0, sigma = 1)', () => {
const Extremevalue = require('../lib/methods/extremevalue');
it('should return same value each time', () => {
const extremevalue = new Extremevalue(0, 1);
prng.seed('first extremevalue seed test');
const extremevalueFirst = extremevalue.random();
for(let i = 0; i < 1000; i += 1) {
expect(extremevalue.random()).to.be.closeTo(extremevalueFirst, 0.000001);
}
prng.seed('second extremevalue seed test');
const extremevalueSecond = extremevalue.random();
for(let i = 0; i < 1000; i += 1) {
expect(extremevalue.random()).to.be.closeTo(extremevalueSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const extremevalue = new Extremevalue(0, 1);
prng.seed('first extremevalue seed test');
const extremevalueFirst = extremevalue.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(extremevalue.distribution(5000), extremevalueFirst);
}
prng.seed('second extremevalue seed test');
const extremevalueSecond = extremevalue.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(extremevalue.distribution(5000), extremevalueSecond);
}
done();
});
});
describe('Laplace distribution (mu = 0, b = 2)', () => {
const Laplace = require('../lib/methods/laplace');
it('should return same value each time', () => {
const laplace = new Laplace(0, 2);
prng.seed('first laplace seed test');
const laplaceFirst = laplace.random();
for(let i = 0; i < 1000; i += 1) {
expect(laplace.random()).to.be.closeTo(laplaceFirst, 0.000001);
}
prng.seed('second laplace seed test');
const laplaceSecond = laplace.random();
for(let i = 0; i < 1000; i += 1) {
expect(laplace.random()).to.be.closeTo(laplaceSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const laplace = new Laplace(0, 2);
prng.seed('first laplace seed test');
const laplaceFirst = laplace.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(laplace.distribution(5000), laplaceFirst);
}
prng.seed('second laplace seed test');
const laplaceSecond = laplace.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(laplace.distribution(5000), laplaceSecond);
}
done();
});
});
describe('Logistic distribution (mu = 5, s = 2)', () => {
const Logistic = require('../lib/methods/logistic');
it('should return same value each time', () => {
const logistic = new Logistic(5, 2);
prng.seed('first logistic seed test');
const logisticFirst = logistic.random();
for(let i = 0; i < 1000; i += 1) {
expect(logistic.random()).to.be.closeTo(logisticFirst, 0.000001);
}
prng.seed('second logistic seed test');
const logisticSecond = logistic.random();
for(let i = 0; i < 1000; i += 1) {
expect(logistic.random()).to.be.closeTo(logisticSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const logistic = new Logistic(5, 2);
prng.seed('first logistic seed test');
const logisticFirst = logistic.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(logistic.distribution(5000), logisticFirst);
}
prng.seed('second logistic seed test');
const logisticSecond = logistic.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(logistic.distribution(5000), logisticSecond);
}
done();
});
});
describe('Lognormal distribution (mu = 0, sigma = 1)', () => {
const Lognormal = require('../lib/methods/lognormal');
it('should return same value each time', () => {
const lognormal = new Lognormal(0, 1);
prng.seed('first lognormal seed test');
const lognormalFirst = lognormal.random();
for(let i = 0; i < 1000; i += 1) {
expect(lognormal.random()).to.be.closeTo(lognormalFirst, 0.000001);
}
prng.seed('second lognormal seed test');
const lognormalSecond = lognormal.random();
for(let i = 0; i < 1000; i += 1) {
expect(lognormal.random()).to.be.closeTo(lognormalSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const lognormal = new Lognormal(0, 1);
prng.seed('first lognormal seed test');
const lognormalFirst = lognormal.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(lognormal.distribution(5000), lognormalFirst);
}
prng.seed('second lognormal seed test');
const lognormalSecond = lognormal.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(lognormal.distribution(5000), lognormalSecond);
}
done();
});
});
describe('Pareto distribution (xm = 1, alpha = 4)', () => {
const Pareto = require('../lib/methods/pareto');
it('should return same value each time', () => {
const pareto = new Pareto(1, 4);
prng.seed('first pareto seed test');
const paretoFirst = pareto.random();
for(let i = 0; i < 1000; i += 1) {
expect(pareto.random()).to.be.closeTo(paretoFirst, 0.000001);
}
prng.seed('second pareto seed test');
const paretoSecond = pareto.random();
for(let i = 0; i < 1000; i += 1) {
expect(pareto.random()).to.be.closeTo(paretoSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const pareto = new Pareto(1, 4);
prng.seed('first pareto seed test');
const paretoFirst = pareto.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(pareto.distribution(5000), paretoFirst);
}
prng.seed('second pareto seed test');
const paretoSecond = pareto.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(pareto.distribution(5000), paretoSecond);
}
done();
});
});
describe('Rayleigh distribution (sigma = 1)', () => {
const Rayleigh = require('../lib/methods/rayleigh');
it('should return same value each time', () => {
const rayleigh = new Rayleigh(1);
prng.seed('first rayleigh seed test');
const rayleighFirst = rayleigh.random();
for(let i = 0; i < 1000; i += 1) {
expect(rayleigh.random()).to.be.closeTo(rayleighFirst, 0.000001);
}
prng.seed('second rayleigh seed test');
const rayleighSecond = rayleigh.random();
for(let i = 0; i < 1000; i += 1) {
expect(rayleigh.random()).to.be.closeTo(rayleighSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const rayleigh = new Rayleigh(1);
prng.seed('first rayleigh seed test');
const rayleighFirst = rayleigh.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(rayleigh.distribution(5000), rayleighFirst);
}
prng.seed('second rayleigh seed test');
const rayleighSecond = rayleigh.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(rayleigh.distribution(5000), rayleighSecond);
}
done();
});
});
describe('Student\'s t-distribution (v = 6)', () => {
const Student = require('../lib/methods/student');
it('should return same value each time', () => {
const student = new Student(6);
prng.seed('first student seed test');
const studentFirst = student.random();
for(let i = 0; i < 1000; i += 1) {
expect(student.random()).to.be.closeTo(studentFirst, 0.000001);
}
prng.seed('second student seed test');
const studentSecond = student.random();
for(let i = 0; i < 1000; i += 1) {
expect(student.random()).to.be.closeTo(studentSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const student = new Student(6);
prng.seed('first student seed test');
const studentFirst = student.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(student.distribution(5000), studentFirst);
}
prng.seed('second student seed test');
const studentSecond = student.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(student.distribution(5000), studentSecond);
}
done();
});
});
describe('Triangular distribution (a = 1, b = 3, c = 2)', () => {
const Triangular = require('../lib/methods/triangular');
it('should return same value each time', () => {
const triangular = new Triangular(1, 3, 2);
prng.seed('first triangular seed test');
const triangularFirst = triangular.random();
for(let i = 0; i < 1000; i += 1) {
expect(triangular.random()).to.be.closeTo(triangularFirst, 0.000001);
}
prng.seed('second triangular seed test');
const triangularSecond = triangular.random();
for(let i = 0; i < 1000; i += 1) {
expect(triangular.random()).to.be.closeTo(triangularSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const triangular = new Triangular(1, 3, 2);
prng.seed('first triangular seed test');
const triangularFirst = triangular.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(triangular.distribution(5000), triangularFirst);
}
prng.seed('second triangular seed test');
const triangularSecond = triangular.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(triangular.distribution(5000), triangularSecond);
}
done();
});
});
describe('Weibull distribution (lambda = 1, k = 1.5)', () => {
const Weibull = require('../lib/methods/weibull');
it('should return same value each time', () => {
const weibull = new Weibull(1.5, 1);
prng.seed('first weibull seed test');
const weibullFirst = weibull.random();
for(let i = 0; i < 1000; i += 1) {
expect(weibull.random()).to.be.closeTo(weibullFirst, 0.000001);
}
prng.seed('second weibull seed test');
const weibullSecond = weibull.random();
for(let i = 0; i < 1000; i += 1) {
expect(weibull.random()).to.be.closeTo(weibullSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const weibull = new Weibull(1.5, 1);
prng.seed('first weibull seed test');
const weibullFirst = weibull.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(weibull.distribution(5000), weibullFirst);
}
prng.seed('second weibull seed test');
const weibullSecond = weibull.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(weibull.distribution(5000), weibullSecond);
}
done();
});
});
describe('Bates distribution (n = 10, a = 0, b = 1)', () => {
const Bates = require('../lib/methods/bates');
it('should return same value each time', () => {
const bates = new Bates(10, 0, 1);
prng.seed('first bates seed test');
const batesFirst = bates.random();
for(let i = 0; i < 1000; i += 1) {
expect(bates.random()).to.be.closeTo(batesFirst, 0.000001);
}
prng.seed('second bates seed test');
const batesSecond = bates.random();
for(let i = 0; i < 1000; i += 1) {
expect(bates.random()).to.be.closeTo(batesSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const bates = new Bates(10, 0, 1);
prng.seed('first bates seed test');
const batesFirst = bates.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(bates.distribution(5000), batesFirst);
}
prng.seed('second bates seed test');
const batesSecond = bates.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(bates.distribution(5000), batesSecond);
}
done();
});
});
describe('Irwin-Hall distribution (n = 8)', () => {
const Irwinhall = require('../lib/methods/irwinhall');
it('should return same value each time', () => {
const irwinhall = new Irwinhall(8);
prng.seed('first irwinhall seed test');
const irwinhallFirst = irwinhall.random();
for(let i = 0; i < 1000; i += 1) {
expect(irwinhall.random()).to.be.closeTo(irwinhallFirst, 0.000001);
}
prng.seed('second irwinhall seed test');
const irwinhallSecond = irwinhall.random();
for(let i = 0; i < 1000; i += 1) {
expect(irwinhall.random()).to.be.closeTo(irwinhallSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const irwinhall = new Irwinhall(8);
prng.seed('first irwinhall seed test');
const irwinhallFirst = irwinhall.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(irwinhall.distribution(5000), irwinhallFirst);
}
prng.seed('second irwinhall seed test');
const irwinhallSecond = irwinhall.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(irwinhall.distribution(5000), irwinhallSecond);
}
done();
});
});
describe('Zipf distribution (alpha = 0.5, shape = 20)', () => {
const Zipf = require('../lib/methods/zipf');
it('should return same value each time', () => {
const zipf = new Zipf(0.5, 20);
prng.seed('first zipf seed test');
const zipfFirst = zipf.random();
for (let i = 0; i < 1000; i += 1) {
expect(zipf.random()).to.be.closeTo(zipfFirst, 0.000001);
}
prng.seed('second zipf seed test');
const zipfSecond = zipf.random();
for(let i = 0; i < 1000; i += 1) {
expect(zipf.random()).to.be.closeTo(zipfSecond, 0.000001);
}
});
it('should return same distribution each time', function(done) {
this.timeout(480000);
const zipf = new Zipf(0.5, 20);
prng.seed('first zipf seed test');
const zipfFirst = zipf.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(zipf.distribution(5000), zipfFirst);
}
prng.seed('second zipf seed test');
const zipfSecond = zipf.distribution(5000);
for(let i = 0; i < 10; i += 1) {
compareDistributions(zipf.distribution(5000), zipfSecond);
}
done();
});
});
});
|
robotan0921/mruby_on_tinet-tecs | asp3/tecsgen/cdelib/tcontrol.rb | # -*- coding: utf-8 -*-
=begin
TECSCDE - TECS Component Diagram Editor
Copyright (C) 2014-2015 by TOPPERS Project
The above copyright holders grant permission gratis to use,
duplicate, modify, or redistribute (hereafter called use) this
software (including the one made by modifying this software),
provided that the following four conditions (1) through (4) are
satisfied.
(1) When this software is used in the form of source code, the above
copyright notice, this use conditions, and the disclaimer shown
below must be retained in the source code without modification.
(2) When this software is redistributed in the forms usable for the
development of other software, such as in library form, the above
copyright notice, this use conditions, and the disclaimer shown
below must be shown without modification in the document provided
with the redistributed software, such as the user manual.
(3) When this software is redistributed in the forms unusable for the
development of other software, such as the case when the software
is embedded in a piece of equipment, either of the following two
conditions must be satisfied:
(a) The above copyright notice, this use conditions, and the
disclaimer shown below must be shown without modification in
the document provided with the redistributed software, such as
the user manual.
(b) How the software is to be redistributed must be reported to the
TOPPERS Project according to the procedure described
separately.
(4) The above copyright holders and the TOPPERS Project are exempt
from responsibility for any type of damage directly or indirectly
caused from the use of this software and are indemnified by any
users or end users of this software from any and all causes of
action whatsoever.
THIS SOFTWARE IS PROVIDED "AS IS." THE ABOVE COPYRIGHT HOLDERS AND
THE TOPPERS PROJECT DISCLAIM ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, ITS APPLICABILITY TO A PARTICULAR
PURPOSE. IN NO EVENT SHALL THE ABOVE COPYRIGHT HOLDERS AND THE
TOPPERS PROJECT BE LIABLE FOR ANY TYPE OF DAMAGE DIRECTLY OR
INDIRECTLY CAUSED FROM THE USE OF THIS SOFTWARE.
$Id: tcontrol.rb 2640 2017-06-03 11:27:12Z okuma-top $
=end
module TECSCDE
=begin
Structure of Palette Window
+-- @window -----------------------------+
|+-- @box ------------------------------+|
||+- mode (@mode_frame) ---------------+||
|||+-@mode_vbox-----------------------+|||
|||| Pointer (@pointer_button) ||||
|||+----------------------------------+|||
|||| New Cell (@new_button) ||||
|||+----------------------------------+|||
||+- celltypes (@celltype_frame)-------+||
|||+- ClltypeTreeView&ScrolledWindow--+|||
|||| name | region ||||
|||+---------+------------------------+|||
|||| | ||||
|||| | ||||
|||+---------+------------------------+|||
||+- cell properties (@mode_celltype) -+||
|||+- AttrTreeView&ScrolledWindow-----+|||
|||| name | type | value ||||
|||+---------+---------+--------------+|||
|||| | | ||||
|||| | | ||||
|||+---------+---------+--------------+|||
|+--------------------------------------+|
+----------------------------------------+
=end
UNSELECTED_STR = "(unselected)"
class Control
#@window:: Gtk::Window
#@model::Model
#@view::View
#@mode::Symbol: :NEW_CELL, :POINTER
#@cport_joining::TmCPort # :SM_JOINING starting cell
#@celltypeTreeView::CelltypeTreeView
#@attrTreeView::AttrTreeView
#@prev_time::Integer: event time (milli second)
ModeList = [ :MODE_NONE, :MODE_NEW_CELL, :MODE_POINTER ]
SubmodeList = [ :SM_NONE, :SM_JOINING, :SM_SURROUNDING_CELLS,
:SM_MOVING_CELL_BAR, :SM_MOVING_CPORT, :SM_MOVING_EPORT, :SM_MOVING_CELL_EDGE,
:SM_EDIT_CELL_NAME ]
def initialize model
@nest = -1
@model = model
@hilite_objs = Hilite_objs.new
@mode = :MODE_NONE
@sub_mode = :SM_NONE
@cport_joining = nil
@prev_time = 0
create_new_operation_window
add_celltype_list
@hilite_objs.set_attrTreeView @attrTreeView, @cell_name_entry, @cell_region_entry, @cell_frame
@hilite_objs.update_attrTreeView
@last_xm = @last_ym = 0
end
#----- operations for palette -----#
def on_save
flush_print "save\n"; @model.save( @model.get_file_editing )
end
def on_export
fname = @model.get_file_editing.sub( /\.[Cc][Dd][Ee]\Z/, ".pdf" )
if ! ( fname =~ /\.pdf\Z/ )
fname += ".pdf"
end
flush_print "export to #{fname}\n"
@view.export( fname )
end
def on_pointer
flush_print "mode: pointer\n"
@mode = :MODE_POINTER
end
def on_new_cell
@mode = :MODE_NEW_CELL
flush_print "mode: new\n"
end
def on_undo
@model.undo
@hilite_objs.reset
update
end
def on_redo
@model.redo
@hilite_objs.reset
update
end
def on_quit
flush_print "quit\n";Gtk.main_quit
end
def on_cell_name_entry_active entry
@b_cell_renaming = true
@hilite_objs.change_cell_name entry.text
@b_cell_renaming = false
update
end
def on_cell_name_entry_focus_out entry
# to avoid nested message box dialog in error case
if ! @b_cell_renaming
@hilite_objs.change_cell_name entry.text
update
end
end
def on_cell_region_entry_active entry
# @b_cell_renaming = true
# @hilite_objs.change_cell_name entry.text
# @b_cell_renaming = false
# update
end
def on_cell_region_entry_focus_out entry
# to avoid nested message box dialog in error case
# if ! @b_cell_renaming
# @hilite_objs.change_cell_name entry.text
# update
# end
end
def set_attrOperationWidgets window, ctv, attrTreeView, cell_name_entry, cell_region_entry, cell_frame
@window = window
@celltypeTreeView = ctv
@attrTreeView, @cell_name_entry, @cell_region_entry, @cell_frame = attrTreeView, cell_name_entry, cell_region_entry, cell_frame
@hilite_objs.set_attrTreeView @attrTreeView, @cell_name_entry, @cell_region_etnry, @cell_frame
end
#----- palette -----#
def create_new_operation_window
@palette = TECSCDE::Palette.new self
# @palette.get_entry_cell_name
# @palette.get_attrTreeView
end
#----- end of palette operations -----#
def set_view view
@view = view
@attrTreeView.set_view view
# keep controlWindow above mainWindow
@window.set_transient_for( @view.get_window )
@window.window.set_group @view.get_window.window
@window.window.raise
@palette.set_view view
end
#----- canvas events action -----#
#=== mouse pressed on canvas
#button::Integer: mouse button number
#state::GdkModifierType: modifier key state
#time::Integer: milli second
#click_count::Integer: 1=single click, 2=double click
def pressed_on_canvas xm, ym, state, button, time, click_count
# p "button=#{button} state=#{state} time=#{time} sub_mode=#{@sub_mode}"
if @sub_mode == :SM_EDIT_CELL_NAME
name = @view.end_edit_name
# p "end_edit_name name=#{name}"
@hilite_objs.change_cell_name name
@sub_mode = :SM_NONE
end
if button == 1
object = find_near xm, ym
if object.kind_of?( TECSModel::TmCell ) && click_count == 2
if object.is_editable?
# p "begin_edit_name"
@view.begin_edit_name object, time
@hilite_objs.reset( object )
@sub_mode = :SM_EDIT_CELL_NAME
end
elsif object.kind_of?( TECSModel::TmCell ) ||
# if object.kind_of?( TECSModel::TmCell ) ||
object.kind_of?( TECSModel::TmJoinBar )
@sub_mode = :SM_MOVING_CELL_BAR
# p "FOUND Cell or Bar"
if state.shift_mask?
@hilite_objs.add( object )
elsif state.control_mask?
@hilite_objs.add_del( object )
elsif ! @hilite_objs.include? object
@hilite_objs.reset( object )
end
@view.draw_hilite_objects @hilite_objs
elsif object.kind_of? TECSModel::TmCPort
# p "FOUND TmCPort"
if state.shift_mask?
@sub_mode = :SM_MOVING_CPORT
@hilite_objs.add object
elsif state.control_mask?
@sub_mode = :SM_MOVING_CPORT
@hilite_objs.reset( object )
elsif object.get_join == nil
@sub_mode = :SM_JOINING
@hilite_objs.reset
@cport_joining = object
@view.set_cursor CURSOR_JOINING
else
TECSCDE::message_box( <<EOT, :OK )
Call port has already been joined.
Delete existing join before creating new join.
If you want to hilited port, click with pressing shift key.
EOT
end
elsif object.kind_of? TECSModel::TmEPort then
if state.shift_mask?
@sub_mode = :SM_MOVING_EPORT
@hilite_objs.add object
elsif state.control_mask?
@sub_mode = :SM_MOVING_EPORT
@hilite_objs.add_del( object )
else
# p "FOUND TmEPort"
@sub_mode = :SM_MOVING_EPORT
@hilite_objs.reset object
end
else
# p "NOT FOUND"
if @mode == :MODE_NEW_CELL then
ctn, nsp = @celltypeTreeView.selected
if ctn then
cell = @model.new_cell( xm, ym, ctn, nsp )
@model.set_undo_point
end
@hilite_objs.reset cell
else
@hilite_objs.reset
end
end
@last_xm, @last_ym = xm, ym
end # button
@prev_time = time
end
#=== mouse moved on canvas
def motion_on_canvas xm, ym, state
x_inc = xm - @last_xm
y_inc = ym - @last_ym
q, r = x_inc.divmod TECSModel.get_alignment
x_inc2 = TECSModel.get_alignment * q
@last_xm = xm - r
q, r = y_inc.divmod TECSModel.get_alignment
y_inc2 = TECSModel.get_alignment * q
@last_ym = ym - r
case @sub_mode
when :SM_MOVING_CELL_BAR
# p "move hilite obj"
@hilite_objs.each{ |cell_bar|
cell_bar.move( x_inc2, y_inc2 )
}
@view.refresh_canvas
@view.draw_hilite_objects @hilite_objs
when :SM_MOVING_CPORT, :SM_MOVING_EPORT
@hilite_objs.each{ |port|
port.move( x_inc2, y_inc2 )
}
update
@view.refresh_canvas
@view.draw_hilite_objects @hilite_objs
when :SM_JOINING
object = find_near xm, ym
if object.kind_of? TECSModel::TmEPort then
if object.get_signature == @cport_joining.get_signature
@view.set_cursor CURSOR_JOIN_OK
end
# update
end
when :SM_NONE
object = find_near xm, ym
if object.kind_of? TECSModel::TmCPort then
@view.set_cursor CURSOR_PORT
else
@view.set_cursor CURSOR_NORMAL
end
end
end
#=== mouse released on canvas
def released_on_canvas xm, ym, state, button
case @sub_mode
when :SM_MOVING_CELL_BAR
# update
@model.set_undo_point
when :SM_MOVING_CPORT, :SM_MOVING_EPORT
# update
@model.set_undo_point
when :SM_JOINING
object = find_near xm, ym
if object.kind_of? TECSModel::TmEPort then
if object.get_signature == @cport_joining.get_signature
join = @model.new_join( @cport_joining, object )
@model.set_undo_point
end
# update
end
end
@view.set_cursor CURSOR_NORMAL
if @sub_mode != :SM_EDIT_CELL_NAME
update
@sub_mode = :SM_NONE
end
end
def key_pressed keyval, state
if @sub_mode == :SM_EDIT_CELL_NAME
return
end
case keyval
when 0xff # delete key
@hilite_objs.each{ |object|
if object.kind_of? TECSModel::TmJoinBar
object.get_join.delete
elsif object.kind_of? TECSModel::TmCell
object.delete
elsif object.kind_of? TECSModel::TmPort
object.delete_hilited
end
}
@hilite_objs.reset
when 0x63 # Insert
@hilite_objs.each{ |object|
if object.kind_of? TECSModel::TmPort
object.insert( state.shift_mask? ? :before : :after )
end
}
when 0x51, 0x52, 0x53, 0x54
case keyval
when 0x51 # left arrow
x_inc = - TECSModel.get_alignment; y_inc = 0
when 0x52 # up arrow
x_inc = 0.0; y_inc = - TECSModel.get_alignment
when 0x53 # right arrow
x_inc = TECSModel.get_alignment; y_inc = 0
when 0x54 # down arrow
x_inc = 0.0; y_inc = TECSModel.get_alignment
end
@hilite_objs.each{ |obj|
obj.move( x_inc, y_inc )
}
when 0x50 # home
when 0x57 # end
when 0x55 # PageUp
when 0x56 # PageDown
else
# printf "key_pressed: keyval=%02x\n", keyval
end
if @sub_mode != :SM_EDIT_CELL_NAME
update
end
@model.set_undo_point
end
#=== find_near object
#RETURN::TmCell, TmPort, TmJoin
def find_near xm, ym
@model.get_cell_list.each{ |cell|
port = cell.get_near_port( xm, ym )
if port != nil then
# p "found port"
return port
end
if cell.is_near?( xm, ym ) then
# p "found cell"
return cell
end
}
# find nearest bar
min_dist = 999999999
min_bar = nil
@model.get_join_list.each{ |join|
bar, dist = join.get_near_bar( xm, ym )
if dist < min_dist then
min_dist = dist
min_bar = bar
end
}
return min_bar
end
#Control#get_hilite_objs
#return::hilite_objs
def get_hilite_objs
@hilite_objs
end
def add_celltype_list
ctl = @model.get_celltype_list
if ctl then
ctl.each{ |ct|
@celltypeTreeView.add ct
}
end
end
#Control#update
def update
@hilite_objs.update_attrTreeView
@view.paint_canvas
end
end # class Control
#== CelltypeTreeView: show celltype list
# formerly this class is sub-class of Gtk::TreeView
# currently this class has Gtk::TreeView
class CelltypeTreeView
COL_NAME = 0
COL_NSPATH = 1
#=== initialize
def initialize treeView
@treeView = treeView
# create data model
liststore = Gtk::ListStore.new(String, String)
# set data model to tree view(self)
@treeView.set_model(liststore)
# create renderer for text
renderer = Gtk::CellRendererText.new
# set column information
col = Gtk::TreeViewColumn.new("name", renderer, :text => COL_NAME)
@treeView.append_column(col)
col = Gtk::TreeViewColumn.new("namespace", renderer, :text => COL_NSPATH)
@treeView.append_column(col)
liststore.set_sort_column_id( COL_NAME )
end
def add celltype
iter = @treeView.model.append
iter[ COL_NAME ] = celltype.get_name
iter[ COL_NSPATH ] = celltype.get_owner.get_namespace_path.to_s
end
def selected
iter = @treeView.selection.selected
if iter then
[iter[ COL_NAME ], iter[ COL_NSPATH ] ]
else
[ nil, nil ]
end
end
def delete item
end
def clear
@treeView.model.clear
end
#=== CelltypeTreeView#get_treeView
#RETURN::Gtk::TreeView
def get_treeView
@treeView
end
end # class CelltypeTreeView
#== AttrTreeView: show cell list
# formerly this class is sub-class of Gtk::TreeView
# currently this class has Gtk::TreeView
class AttrTreeView # < Gtk::TreeView
#@choice_list::{name=>ListStore}
#@cell::TmCell
#@ct_attr_list::{ String(attr_name) => String(initializer) }
#@view::MainView
#@treeView::Gtk::TreeView
COL_NAME = 0
COL_TYPE = 1
COL_VALUE = 2
#=== initialize
def initialize tv
@treeView = tv
combo_list = Gtk::ListStore.new(String)
iter = combo_list.append
iter[0] = "a0"
=begin
combo_list = Gtk::ListStore.new(String, String, String)
iter = combo_list.append
iter[0] = "a0"
iter[1] = "b0"
iter[2] = "c0"
iter = combo_list.append
iter[0] = "a1"
iter[1] = "b1"
iter[2] = "c1"
combo_list2 = Gtk::ListStore.new(String, String, String)
iter = combo_list2.append
iter[0] = "A0"
iter[1] = "B0"
iter[2] = "C0"
iter = combo_list2.append
iter[0] = "A1"
iter[1] = "B1"
iter[2] = "C1"
=end
# create data model
liststore = Gtk::ListStore.new(String, String, String)
# set data model to tree view(self)
@treeView.set_model(liststore)
# create renderer for text
renderer = Gtk::CellRendererText.new
#----- set column information -----#
# ATTRIBUTE column
col = Gtk::TreeViewColumn.new("attribute", renderer, :text => COL_NAME)
col.set_cell_data_func(renderer) { |col, renderer, model, iter|
if iter[ COL_VALUE ] == nil || iter[ COL_VALUE ] == ""
renderer.foreground = "red"
elsif @cell.is_editable?
renderer.foreground = "black"
else
renderer.foreground = "blue"
end
}
@treeView.append_column(col)
# TYPE column
col = Gtk::TreeViewColumn.new("type", renderer, :text => COL_TYPE)
col.set_cell_data_func(renderer) { |col, renderer, model, iter|
if @cell.is_editable?
renderer.foreground = "black"
else
renderer.foreground = "blue"
end
}
@treeView.append_column(col)
# VALUE column
renderer = Gtk::CellRendererCombo.new
renderer.text_column = 0
renderer.model = combo_list
col = Gtk::TreeViewColumn.new("value", renderer, :text => COL_VALUE )
col.set_cell_data_func(renderer) { |col, renderer, model, iter|
# p "iter[0]=#{iter[0]}"
if @cell.get_attr_list[iter[ COL_NAME ].to_sym] == nil
renderer.foreground = "orange"
elsif @cell.is_editable?
renderer.foreground = "black"
else
renderer.foreground = "blue"
end
if @cell.is_editable?
renderer.editable = true
else
renderer.editable = false
end
if @choice_list[ iter[0] ]
renderer.model = @choice_list[ iter[0] ]
renderer.has_entry = false
renderer.text_column = 0
else
renderer.model = nil
renderer.text_column = 0
renderer.has_entry = true
end
=begin
# if iter[2] && iter[2] != ""
if iter[1] == "ID"
renderer.model = combo_list
renderer.has_entry = false
renderer.text_column = 0
elsif iter[1] == "SIZE"
renderer.model = combo_list2
renderer.has_entry = false
renderer.text_column = 1
elsif iter[1] == "PRI"
renderer.model = combo_list
renderer.has_entry = false
renderer.text_column = 2
else
renderer.model = nil
renderer.text_column = 0
renderer.has_entry = true
end
=end
}
renderer.signal_connect('edited') { |w, path, new_text|
# new_text can be wrong if 'text_column' is changed in each row
# after selection is changed, before sending signal, many rows are redrawn
# p "new_text='#{new_text}'"
if (iter = @treeView.model.get_iter(path))
if new_text == nil || new_text == ""
if @ct_attr_list[ iter[ COL_NAME ] ]
iter[ COL_VALUE ] = @ct_attr_list[ iter[ COL_NAME ] ]
else
iter[ COL_VALUE ] = new_text
end
if new_text == ""
new_text = nil
end
else
iter[ COL_VALUE ] = new_text
end
@cell.set_attr( iter[ COL_NAME ].to_sym, new_text )
@cell.get_model.set_undo_point
@view.paint_canvas
end
}
@treeView.append_column(col)
end
#=== AttrTreeView#set_cell
#cell::TmCell
def set_cell cell
clear
@cell = cell
@choice_list = {}
@ct_attr_list = {}
cell_attr_list = cell.get_attr_list
ct = @cell.get_celltype
if ct
#----- register attributes and initializer to tree view model -----#
ct.get_attribute_list.each{ |attr|
iter = @treeView.model.append
name = attr.get_name.to_s
if attr.get_initializer
@ct_attr_list[ name ] = attr.get_initializer.to_CDL_str
end
iter[ COL_NAME ] = name
iter[ COL_TYPE ] = "#{attr.get_type.get_type_str}#{attr.get_type.get_type_str_post}"
if cell_attr_list[ name.to_sym ]
iter[ COL_VALUE ] = cell_attr_list[name.to_sym]
elsif attr.get_initializer
iter[ COL_VALUE ] = attr.get_initializer.to_CDL_str
else
# iter[ COL_VALUE ] = " "
end
#----- choice list model -----#
if attr.get_choice_list
@choice_list[ name ] = Gtk::ListStore.new(String)
attr.get_choice_list.each{ |choice|
iter = @choice_list[ name ].append
iter[0] = CDLString.remove_dquote( choice.val )
}
end
}
end
end
def clear
@treeView.model.clear
end
#=== AttrTreeView#set_view
#view::MainView
def set_view view
@view = view
end
#=== AttrTreeView#get_treeView
#RETURN::Gtk::TreeView
def get_treeView
@treeView
end
end # class AttrTreeView
#== manage hilited objects
class Hilite_objs
#@hilite_objs::[TmCell|TmJoinBar]
def initialize
@hilite_objs = []
end
def add obj
reset_if_ncessary obj
@hilite_objs << obj
@hilite_objs.uniq!
update_attrTreeView
end
#=== hilite_objs#add_del
# add if not include, delete if include
def add_del obj
reset_if_ncessary obj
if @hilite_objs.include? obj
@hilite_objs.delete obj
else
@hilite_objs << obj
end
update_attrTreeView
end
def reset obj = nil
@hilite_objs = []
if obj
@hilite_objs << obj
end
update_attrTreeView
end
#=== hilite_objs#reset_if_ncessary
# Port and ( Cell or Bar ) cannot be hilited simultaneously.
# Ports belonging to diferent Cell cannot be hilited simultaneously.
#obj::TmCell | TmBar | TmPort: new object to be hilited
def reset_if_ncessary obj
if @hilite_objs.length > 0
if @hilite_objs[0].kind_of? TECSModel::TmPort
if obj.kind_of? TECSModel::TmPort
if obj.get_owner_cell != @hilite_objs[0].get_owner_cell
reset
end
else
reset
end
else
if obj.kind_of? TECSModel::TmPort
reset
end
end
end
end
def each #proc
proc = Proc.new
@hilite_objs.each{ |obj|
proc.call obj
}
end
def empty?
@hilite_objs.empty?
end
def include? object
@hilite_objs.include? object
end
def set_attrTreeView treeview, name_entry, region_entry, frame
@cell_property_frame = frame
@cell_name_entry = name_entry
@cell_region_entry = region_entry
@attrTreeView = treeview
end
def change_cell_name name
if @hilite_objs.length == 1 && @hilite_objs[0].kind_of?( TECSModel::TmCell )
@hilite_objs[0].change_name name.to_sym
@hilite_objs[0].get_model.set_undo_point
end
end
def cell_plugin_dialog
if @hilite_objs.length == 1 && @hilite_objs[0].kind_of?( TECSModel::TmCell )
dialog = CellPluginDialog.new @hilite_objs[0]
dialog.run
end
end
def update_attrTreeView
cell = nil
n_cell = 0
each{ |obj|
if obj.kind_of? TECSModel::TmCell
cell = obj
n_cell += 1
end
}
if n_cell == 1
@cell_name_entry.text = cell.get_name.to_s
@cell_region_entry.text = cell.get_region.get_namespace_path.to_s
# this doesn't work! I don't know how to change the color of Entry text
if cell.is_editable?
@cell_name_entry.modify_fg Gtk::STATE_NORMAL, Gdk::Color.parse( "black" )
@cell_region_entry.modify_fg Gtk::STATE_NORMAL, Gdk::Color.parse( "black" )
@cell_property_frame.set_label "cell property"
else
@cell_name_entry.modify_fg Gtk::STATE_NORMAL, Gdk::Color.parse( "blue" )
@cell_region_entry.modify_fg Gtk::STATE_NORMAL, Gdk::Color.parse( "blue" )
@cell_property_frame.set_label "cell property (read only)"
end
@cell_name_entry.set_editable cell.is_editable?
@cell_region_entry.set_editable cell.is_editable?
@attrTreeView.set_cell cell
else
@cell_name_entry.text = "(unselected)"
@cell_name_entry.set_editable false
@cell_name_entry.text = "(unselected)"
@cell_name_entry.set_editable false
@cell_property_frame.set_label "cell property (unselected)"
@attrTreeView.clear
end
end
end # class hilite_objs
end
|
Faulik/ruby_kaiter | db/migrate/20151025182727_create_sprints.rb | class CreateSprints < ActiveRecord::Migration
def change
create_table :sprints do |t|
t.string :title, limit:45
t.date :started_at
t.date :finished_at
t.string :state
t.timestamps null: false
end
end
end
|
helloximeier/LoveFlyHome | LoveFlyHome/PublicClass/Constant.h | <filename>LoveFlyHome/PublicClass/Constant.h
//
// SearchBarView.h
// LoveFlyHome
//
// Created by Lefeng on 16/5/26.
// Copyright © 2016年 Lefeng. All rights reserved.
//
#ifndef meituan_Constant_h
#define meituan_Constant_h
//友盟Appkey
#define UMAPPKEY @"<KEY>"
#endif
|
arubdesu/zentral | zentral/contrib/monolith/migrations/0044_auto_20201130_0824.py | # Generated by Django 2.2.17 on 2020-11-30 08:24
from datetime import datetime
from django.db import migrations, models
def set_default_manifest_timestamps(apps, schema_editor):
now = datetime.utcnow()
Manifest = apps.get_model("monolith", "Manifest")
for manifest in Manifest.objects.all():
manifest.created_at = now
manifest.updated_at = now
manifest.save()
class Migration(migrations.Migration):
dependencies = [
('monolith', '0043_auto_20201129_1839'),
]
operations = [
migrations.AddField(
model_name='manifest',
name='created_at',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='manifest',
name='updated_at',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='manifest',
name='version',
field=models.PositiveIntegerField(default=1),
),
migrations.RunPython(set_default_manifest_timestamps),
migrations.AlterField(
model_name='manifest',
name='created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='manifest',
name='updated_at',
field=models.DateTimeField(auto_now=True),
),
]
|
wh14274493/raft | server/src/main/java/cn/ttplatform/wh/data/log/LogFileMetadataRegion.java | package cn.ttplatform.wh.data.log;
import cn.ttplatform.wh.data.support.MetadataRegion;
import lombok.extern.slf4j.Slf4j;
import java.io.File;
import static cn.ttplatform.wh.data.FileConstant.LOG_FILE_HEADER_SPACE_POSITION;
import static cn.ttplatform.wh.data.FileConstant.LOG_FILE_HEADER_SPACE_SIZE;
/**
* @author <NAME>
* @date 2021/7/1 14:12
*/
@Slf4j
public class LogFileMetadataRegion{
private static final int RELATIVE_LOG_FILE_SIZE_FIELD_POSITION = 0;
private final MetadataRegion region;
private long fileSize;
public LogFileMetadataRegion(File file) {
this(file, LOG_FILE_HEADER_SPACE_POSITION, LOG_FILE_HEADER_SPACE_SIZE);
}
public LogFileMetadataRegion(File file, long position, long regionSize) {
this.region = new MetadataRegion(file, position, regionSize);
this.fileSize = region.readLong(RELATIVE_LOG_FILE_SIZE_FIELD_POSITION);
}
public void clear() {
recordFileSize(0L);
}
public void recordFileSize(long size) {
if (size != fileSize) {
region.writeLong(RELATIVE_LOG_FILE_SIZE_FIELD_POSITION, size);
fileSize = size;
}
}
public long getFileSize() {
return fileSize;
}
public void force() {
region.force();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.