text
stringlengths 2
1.04M
| meta
dict |
|---|---|
module Middleman
class Emberman
VERSION = "0.1.4"
end
end
|
{
"content_hash": "3b8406282b814172a34f90d198128683",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 21,
"avg_line_length": 13.2,
"alnum_prop": 0.6666666666666666,
"repo_name": "minusfive/emberman",
"id": "e345d51c5221b42d64d71d6d73265cbce5b59eef",
"size": "66",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/emberman/version.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "2556"
}
],
"symlink_target": ""
}
|
using base::Time;
// IPC::Logging is allocated as a singleton, so we don't need any kind of
// special retention program.
DISABLE_RUNNABLE_METHOD_REFCOUNT(IPC::Logging);
namespace IPC {
const int kLogSendDelayMs = 100;
// We use a pointer to the function table to avoid any linker dependencies on
// all the traits used as IPC message parameters.
LogFunctionMap* Logging::log_function_map_;
Logging::Logging()
: enabled_(false),
enabled_on_stderr_(false),
queue_invoke_later_pending_(false),
sender_(NULL),
main_thread_(MessageLoop::current()),
consumer_(NULL) {
#if defined(OS_WIN)
// getenv triggers an unsafe warning. Simply check how big of a buffer
// would be needed to fetch the value to see if the enviornment variable is
// set.
size_t requiredSize = 0;
getenv_s(&requiredSize, NULL, 0, "CHROME_IPC_LOGGING");
bool logging_env_var_set = (requiredSize != 0);
#else // !defined(OS_WIN)
bool logging_env_var_set = (getenv("CHROME_IPC_LOGGING") != NULL);
#endif //defined(OS_WIN)
if (logging_env_var_set) {
enabled_ = true;
enabled_on_stderr_ = true;
}
}
Logging::~Logging() {
}
Logging* Logging::GetInstance() {
return Singleton<Logging>::get();
}
void Logging::SetConsumer(Consumer* consumer) {
consumer_ = consumer;
}
void Logging::Enable() {
enabled_ = true;
}
void Logging::Disable() {
enabled_ = false;
}
void Logging::OnSendLogs() {
queue_invoke_later_pending_ = false;
if (!sender_)
return;
Message* msg = new Message(
MSG_ROUTING_CONTROL, IPC_LOGGING_ID, Message::PRIORITY_NORMAL);
WriteParam(msg, queued_logs_);
queued_logs_.clear();
sender_->Send(msg);
}
void Logging::SetIPCSender(IPC::Message::Sender* sender) {
sender_ = sender;
}
void Logging::OnReceivedLoggingMessage(const Message& message) {
std::vector<LogData> data;
void* iter = NULL;
if (!ReadParam(&message, &iter, &data))
return;
for (size_t i = 0; i < data.size(); ++i) {
Log(data[i]);
}
}
void Logging::OnSendMessage(Message* message, const std::string& channel_id) {
if (!Enabled())
return;
if (message->is_reply()) {
LogData* data = message->sync_log_data();
if (!data)
return;
// This is actually the delayed reply to a sync message. Create a string
// of the output parameters, add it to the LogData that was earlier stashed
// with the reply, and log the result.
data->channel = channel_id;
GenerateLogData("", *message, data);
Log(*data);
delete data;
message->set_sync_log_data(NULL);
} else {
// If the time has already been set (i.e. by ChannelProxy), keep that time
// instead as it's more accurate.
if (!message->sent_time())
message->set_sent_time(Time::Now().ToInternalValue());
}
}
void Logging::OnPreDispatchMessage(const Message& message) {
message.set_received_time(Time::Now().ToInternalValue());
}
void Logging::OnPostDispatchMessage(const Message& message,
const std::string& channel_id) {
if (!Enabled() ||
!message.sent_time() ||
!message.received_time() ||
message.dont_log())
return;
LogData data;
GenerateLogData(channel_id, message, &data);
if (MessageLoop::current() == main_thread_) {
Log(data);
} else {
main_thread_->PostTask(FROM_HERE, NewRunnableMethod(
this, &Logging::Log, data));
}
}
void Logging::GetMessageText(uint32 type, std::string* name,
const Message* message,
std::string* params) {
if (!log_function_map_)
return;
LogFunctionMap::iterator it = log_function_map_->find(type);
if (it == log_function_map_->end()) {
if (name) {
*name = "[UNKNOWN MSG ";
*name += base::IntToString(type);
*name += " ]";
}
return;
}
(*it->second)(name, message, params);
}
void Logging::Log(const LogData& data) {
if (consumer_) {
// We're in the browser process.
consumer_->Log(data);
} else {
// We're in the renderer or plugin processes.
if (sender_) {
queued_logs_.push_back(data);
if (!queue_invoke_later_pending_) {
queue_invoke_later_pending_ = true;
MessageLoop::current()->PostDelayedTask(FROM_HERE, NewRunnableMethod(
this, &Logging::OnSendLogs), kLogSendDelayMs);
}
}
}
if (enabled_on_stderr_) {
std::string message_name;
if (data.message_name.empty()) {
message_name = StringPrintf("[unknown type %d]", data.type);
} else {
message_name = data.message_name;
}
fprintf(stderr, "ipc %s %d %s %s %s\n",
data.channel.c_str(),
data.routing_id,
data.flags.c_str(),
message_name.c_str(),
data.params.c_str());
}
}
void GenerateLogData(const std::string& channel, const Message& message,
LogData* data) {
if (message.is_reply()) {
// "data" should already be filled in.
std::string params;
Logging::GetMessageText(data->type, NULL, &message, ¶ms);
if (!data->params.empty() && !params.empty())
data->params += ", ";
data->flags += " DR";
data->params += params;
} else {
std::string flags;
if (message.is_sync())
flags = "S";
if (message.is_reply())
flags += "R";
if (message.is_reply_error())
flags += "E";
std::string params, message_name;
Logging::GetMessageText(message.type(), &message_name, &message, ¶ms);
data->channel = channel;
data->routing_id = message.routing_id();
data->type = message.type();
data->flags = flags;
data->sent = message.sent_time();
data->receive = message.received_time();
data->dispatch = Time::Now().ToInternalValue();
data->params = params;
data->message_name = message_name;
}
}
}
#endif // IPC_MESSAGE_LOG_ENABLED
|
{
"content_hash": "7aad10bb94b6c8dcb3d0b3c83d5cd5a0",
"timestamp": "",
"source": "github",
"line_count": 223,
"max_line_length": 79,
"avg_line_length": 26.390134529147982,
"alnum_prop": 0.616142735768904,
"repo_name": "Crystalnix/house-of-life-chromium",
"id": "138e6acce735d80cda6d819aef7a91c77c2f2a8a",
"size": "6596",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ipc/ipc_logging.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "3418"
},
{
"name": "C",
"bytes": "88445923"
},
{
"name": "C#",
"bytes": "73756"
},
{
"name": "C++",
"bytes": "77228136"
},
{
"name": "Emacs Lisp",
"bytes": "6648"
},
{
"name": "F#",
"bytes": "381"
},
{
"name": "Go",
"bytes": "3744"
},
{
"name": "Java",
"bytes": "11354"
},
{
"name": "JavaScript",
"bytes": "6191433"
},
{
"name": "Objective-C",
"bytes": "4023654"
},
{
"name": "PHP",
"bytes": "97796"
},
{
"name": "Perl",
"bytes": "92217"
},
{
"name": "Python",
"bytes": "5604932"
},
{
"name": "Ruby",
"bytes": "937"
},
{
"name": "Shell",
"bytes": "1234672"
},
{
"name": "Tcl",
"bytes": "200213"
}
],
"symlink_target": ""
}
|
@interface AppDelegate ()
@end
@implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
@end
|
{
"content_hash": "cc4b3ffb09647adbd32099d341607a78",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 281,
"avg_line_length": 53.48571428571429,
"alnum_prop": 0.7863247863247863,
"repo_name": "shaojiankui/RandomCodeView",
"id": "a99a0f09215bd93941d03765919aacd949f7c4ab",
"size": "2040",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "SFVerificationCodeView-Demo/AppDelegate.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "17063"
}
],
"symlink_target": ""
}
|
export class LazySet {
/*
This class is optimized for a very
particular use case.
We often have lots of subscribers on
a stream. We get an array from the
backend, because it's JSON.
Often the only operation we need
on subscribers is to get the length,
which is plenty cheap as an array.
Making an array from a set is cheap
for one stream, but it's expensive
for all N streams at page load.
Once somebody does an operation
where sets are useful, such
as has/add/delete, we convert it over
to a set for a one-time cost.
*/
constructor(vals) {
this.arr = vals;
this.set = undefined;
}
keys() {
if (this.set !== undefined) {
return this.set.keys();
}
return this.arr.values();
}
_make_set() {
if (this.set !== undefined) {
return;
}
this.set = new Set(this.arr);
this.arr = undefined;
}
get size() {
if (this.set !== undefined) {
return this.set.size;
}
return this.arr.length;
}
map(f) {
return Array.from(this.keys(), f);
}
has(v) {
this._make_set();
const val = this._clean(v);
return this.set.has(val);
}
add(v) {
this._make_set();
const val = this._clean(v);
this.set.add(val);
}
delete(v) {
this._make_set();
const val = this._clean(v);
return this.set.delete(val);
}
_clean(v) {
if (typeof v !== "number") {
blueslip.error("not a number");
return parseInt(v, 10);
}
return v;
}
}
|
{
"content_hash": "148fa2be42764e370056f1a0397d6ee7",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 45,
"avg_line_length": 21.703703703703702,
"alnum_prop": 0.5011376564277589,
"repo_name": "brainwane/zulip",
"id": "4b765f1dc3d2e33f9cbe8b47c1cea486ad23a9d6",
"size": "1758",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "static/js/lazy_set.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "423578"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "647926"
},
{
"name": "JavaScript",
"bytes": "2886792"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "398747"
},
{
"name": "Puppet",
"bytes": "90558"
},
{
"name": "Python",
"bytes": "6000548"
},
{
"name": "Ruby",
"bytes": "249744"
},
{
"name": "Shell",
"bytes": "110849"
},
{
"name": "TypeScript",
"bytes": "9543"
}
],
"symlink_target": ""
}
|
import { combineReducers } from 'redux';
// import reducers here
import profile from './modules/profile'
export default combineReducers({
// combine reducers here, which are imported
profile
});
|
{
"content_hash": "1e17efc5df505e8bc96dc610cd8e8bf6",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 46,
"avg_line_length": 25.75,
"alnum_prop": 0.7233009708737864,
"repo_name": "manikandan-ramar/react-redux-boilerplate",
"id": "81308d0056001ca4b0bc36aec2fa99b441a2a922",
"size": "206",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/redux/reducer.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1023"
},
{
"name": "HTML",
"bytes": "270"
},
{
"name": "JavaScript",
"bytes": "10193"
}
],
"symlink_target": ""
}
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.android_webview.test;
import android.app.Activity;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.FrameLayout;
import org.chromium.android_webview.AwContentsClient;
import org.chromium.base.test.util.CallbackHelper;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* This class is a AwContentsClient for full screen video test.
*/
public class FullScreenVideoTestAwContentsClient extends TestAwContentsClient {
public static final long WAITING_SECONDS = 20L;
private CallbackHelper mOnShowCustomViewCallbackHelper = new CallbackHelper();
private CallbackHelper mOnHideCustomViewCallbackHelper = new CallbackHelper();
private CallbackHelper mOnUnhandledKeyUpEventCallbackHelper = new CallbackHelper();
private Runnable mOnHideCustomViewRunnable;
private final Activity mActivity;
private final boolean mAllowHardwareAcceleration;
private View mCustomView;
private AwContentsClient.CustomViewCallback mExitCallback;
public FullScreenVideoTestAwContentsClient(Activity activity,
boolean allowHardwareAcceleration) {
mActivity = activity;
mAllowHardwareAcceleration = allowHardwareAcceleration;
}
@Override
public void onShowCustomView(View view, AwContentsClient.CustomViewCallback callback) {
mCustomView = view;
if (!mAllowHardwareAcceleration) {
// The hardware emulation in the testing infrastructure is not perfect, and this is
// required to work-around some of the limitations.
mCustomView.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
mExitCallback = callback;
mActivity.getWindow().setFlags(
WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mActivity.getWindow().addContentView(view,
new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT,
Gravity.CENTER));
mOnShowCustomViewCallbackHelper.notifyCalled();
}
/**
* Sets a task that will be run when {@link #onHideCustomView()} is invoked.
*/
public void setOnHideCustomViewRunnable(Runnable runnable) {
mOnHideCustomViewRunnable = runnable;
}
@Override
public void onHideCustomView() {
mActivity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
mOnHideCustomViewCallbackHelper.notifyCalled();
if (mOnHideCustomViewRunnable != null) {
mOnHideCustomViewRunnable.run();
}
}
public AwContentsClient.CustomViewCallback getExitCallback() {
return mExitCallback;
}
@Override
public void onUnhandledKeyEvent(KeyEvent event) {
if (event.getAction() == KeyEvent.ACTION_UP) {
mOnUnhandledKeyUpEventCallbackHelper.notifyCalled();
}
}
public boolean wasOnUnhandledKeyUpEventCalled() {
return mOnUnhandledKeyUpEventCallbackHelper.getCallCount() > 0;
}
public View getCustomView() {
return mCustomView;
}
public boolean wasCustomViewShownCalled() {
return mOnShowCustomViewCallbackHelper.getCallCount() > 0;
}
public void waitForCustomViewShown() throws TimeoutException {
mOnShowCustomViewCallbackHelper.waitForCallback(0, 1, WAITING_SECONDS, TimeUnit.SECONDS);
}
public void waitForCustomViewHidden() throws TimeoutException {
mOnHideCustomViewCallbackHelper.waitForCallback(0, 1, WAITING_SECONDS, TimeUnit.SECONDS);
}
}
|
{
"content_hash": "c3056a24155ea9f3922ef0b373833ca5",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 97,
"avg_line_length": 36.3394495412844,
"alnum_prop": 0.7190103509214845,
"repo_name": "scheib/chromium",
"id": "c7fd841fe4d7b5e4ba0310ffd1562bc63ed35fae",
"size": "3961",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "android_webview/javatests/src/org/chromium/android_webview/test/FullScreenVideoTestAwContentsClient.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
using IronArc.HardwareDefinitionGenerator.Models;
using Newtonsoft.Json;
namespace IronArc.HardwareDefinitionGenerator
{
public static class Generator
{
public static string GenerateHardwareDefinition(string version)
{
var hardwareTypes = GetHardwareTypes();
IEnumerable<HardwareDevice> devices =
hardwareTypes.Select(t => (HardwareDevice)Activator.CreateInstance(t, new Guid())).ToList();
IEnumerable<Models.HardwareDevice> deviceDefinitions =
devices.Select(d => d.Definition);
var definition = new Models.HardwareDefinition(version, deviceDefinitions.ToList());
foreach (HardwareDevice device in devices)
{
device.Dispose();
}
return JsonConvert.SerializeObject(definition);
}
internal static HardwareCall ParseHardwareCall(string callDescription)
{
int argumentsStart = callDescription.IndexOf('(');
var parameters = callDescription
.Substring(argumentsStart)
.TrimStart('(')
.TrimEnd(')')
.Split(',')
.Select(param => param.Trim().Split(' '))
.Select(param => new HardwareCallParameter(
param[1],
MapHardwareCallTypeToCixType(param[0].TrimEnd('*'), param[0].Count(c => c == '*'))
))
.ToList();
var typeAndName = callDescription
.Substring(0, argumentsStart)
.Split(' ');
var returnType = typeAndName[0];
var deviceAndCallName = typeAndName[2].Split(new[] {"::"}, StringSplitOptions.None);
var deviceName = deviceAndCallName[0];
var callName = deviceAndCallName[1];
return new HardwareCall(MapHardwareCallTypeToCixType(returnType, 0),
callName,
parameters);
}
private static HardwareCallDataType MapHardwareCallTypeToCixType(string typeName, int pointerLevel) =>
typeName == "ptr"
? DefaultDataTypes.Pointer
: new HardwareCallDataType(MapHardwareCallTypeNameToCixTypeName(typeName), pointerLevel);
private static string MapHardwareCallTypeNameToCixTypeName(string typeName)
{
switch (typeName)
{
case "uint8": return "byte";
case "uint16": return "ushort";
case "uint32": return "uint";
case "uint64": return "ulong";
case "int8": return "sbyte";
case "int16": return "short";
case "int32": return "int";
case "int64": return "long";
case "ptr": return "void";
default: return typeName;
}
}
private static IEnumerable<Type> GetHardwareTypes()
{
// https://stackoverflow.com/a/949285/2709212
var ironArc = Assembly.GetExecutingAssembly();
var ironArcTypes = ironArc.GetTypes();
const string namespaceName = "IronArc.Hardware";
return ironArcTypes.Where(t => t.Namespace == namespaceName)
.Where(t => !t.IsInterface);
}
}
}
|
{
"content_hash": "bc9808f49d024a131a18463f7e1aec8c",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 110,
"avg_line_length": 37.58064516129032,
"alnum_prop": 0.5699570815450644,
"repo_name": "Celarix/IronArc",
"id": "1e547deb569d91a1fae12518cd8d473133a01578",
"size": "3497",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "IronArc/IronArc/HardwareDefinitionGenerator/Generator.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "109659"
}
],
"symlink_target": ""
}
|
module Makefile where
import Text.PrettyPrint
import Data.String.Utils
import Prelude hiding(all)
($\$) t = ((t <> text "\n") $$)
tab = text "\t"
all = text "all"
bench = text "bench"
clean = text "clean"
rm args = (text "rm -rf" <+> hsep args)
phony = text ".PHONY"
cc args = (text "$(CC)" <+> hsep args)
flags = text "$(FLAGS)"
benchFlags = text "$(BENCH_FLAGS)"
target = text "$(TARGET)"
inc = text "$(INC)"
lib = text "$(LIB)"
deps = text "$(DEPS)"
defs = text "$(DEFINES)"
dSYM = text ".dSYM"
i = (text "-I" <+>)
o = (text "-o" <+>)
parent = text ".."
generateMakefile :: [String] ->
String -> String -> String -> String -> String -> String -> Doc
generateMakefile classFiles progName compiler ccFlags incPath defines libs =
decl "CC" [compiler]
$$
decl "TARGET" [progName]
$$
decl "INC" [incPath]
$$
decl "LIB" [libs]
$$
decl "FLAGS" [ccFlags]
$$
decl "BENCH_FLAGS" [replace "-ggdb" "-O3" ccFlags]
$$
decl "DEFINES" [defines]
$$
decl "DEPS" ("shared.c" : classFiles)
$\$
rule all target
empty
$\$
rule target deps
(cc [flags, i inc, i parent, deps, lib, lib, defs, o target])
$\$
rule bench deps
(cc [benchFlags, i inc, i parent, deps, lib, lib, defs, o target])
$\$
rule clean empty
(rm [target, target <> dSYM])
$\$
rule phony (all <+> bench <+> clean)
empty
where
decl var rhs = text var <> equals <> hsep (map text rhs)
rule target deps cmd
| isEmpty cmd = target <> colon <+> deps
| otherwise = target <> colon <+> deps $$
tab <> cmd
|
{
"content_hash": "8aa6b74a53bc34095dd7c44fc96953d3",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 76,
"avg_line_length": 24.80597014925373,
"alnum_prop": 0.5451263537906137,
"repo_name": "parapluu/encore",
"id": "a6c3569dbea7ba03d48c47f25a64c2f6761604aa",
"size": "1663",
"binary": false,
"copies": "2",
"ref": "refs/heads/development",
"path": "src/front/Makefile.hs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "464743"
},
{
"name": "C++",
"bytes": "1247187"
},
{
"name": "DTrace",
"bytes": "6612"
},
{
"name": "Emacs Lisp",
"bytes": "42575"
},
{
"name": "Haskell",
"bytes": "649301"
},
{
"name": "Lua",
"bytes": "11873"
},
{
"name": "Makefile",
"bytes": "4849"
},
{
"name": "Shell",
"bytes": "20815"
},
{
"name": "YASnippet",
"bytes": "4393"
}
],
"symlink_target": ""
}
|
require 'addressable/uri'
require 'google/api_client/reference'
require 'uuidtools'
module Google
class APIClient
##
# Helper class to contain a response to an individual batched call.
#
# @api private
class BatchedCallResponse
# @return [String] UUID of the call
attr_reader :call_id
# @return [Fixnum] HTTP status code
attr_accessor :status
# @return [Hash] HTTP response headers
attr_accessor :headers
# @return [String] HTTP response body
attr_accessor :body
##
# Initialize the call response
#
# @param [String] call_id
# UUID of the original call
# @param [Fixnum] status
# HTTP status
# @param [Hash] headers
# HTTP response headers
# @param [#read, #to_str] body
# Response body
def initialize(call_id, status = nil, headers = nil, body = nil)
@call_id, @status, @headers, @body = call_id, status, headers, body
end
end
# Wraps multiple API calls into a single over-the-wire HTTP request.
#
# @example
#
# client = Google::APIClient.new
# urlshortener = client.discovered_api('urlshortener')
# batch = Google::APIClient::BatchRequest.new do |result|
# puts result.data
# end
#
# batch.add(:api_method => urlshortener.url.insert, :body_object => { 'longUrl' => 'http://example.com/foo' })
# batch.add(:api_method => urlshortener.url.insert, :body_object => { 'longUrl' => 'http://example.com/bar' })
#
# client.execute(batch)
#
class BatchRequest < Request
BATCH_BOUNDARY = "-----------RubyApiBatchRequest".freeze
# @api private
# @return [Array<(String,Google::APIClient::Request,Proc)] List of API calls in the batch
attr_reader :calls
##
# Creates a new batch request.
#
# @param [Hash] options
# Set of options for this request.
# @param [Proc] block
# Callback for every call's response. Won't be called if a call defined
# a callback of its own.
#
# @return [Google::APIClient::BatchRequest]
# The constructed object.
#
# @yield [Google::APIClient::Result]
# block to be called when result ready
def initialize(options = {}, &block)
@calls = []
@global_callback = block if block_given?
@last_auto_id = 0
# TODO(sgomes): Use SecureRandom.uuid, drop UUIDTools when we drop 1.8
@base_id = UUIDTools::UUID.random_create.to_s
options[:uri] ||= 'https://www.googleapis.com/batch'
options[:http_method] ||= 'POST'
super options
end
##
# Add a new call to the batch request.
# Each call must have its own call ID; if not provided, one will
# automatically be generated, avoiding collisions. If duplicate call IDs
# are provided, an error will be thrown.
#
# @param [Hash, Google::APIClient::Request] call
# the call to be added.
# @param [String] call_id
# the ID to be used for this call. Must be unique
# @param [Proc] block
# callback for this call's response.
#
# @return [Google::APIClient::BatchRequest]
# the BatchRequest, for chaining
#
# @yield [Google::APIClient::Result]
# block to be called when result ready
def add(call, call_id = nil, &block)
unless call.kind_of?(Google::APIClient::Reference)
call = Google::APIClient::Reference.new(call)
end
call_id ||= new_id
if @calls.assoc(call_id)
raise BatchError,
'A call with this ID already exists: %s' % call_id
end
callback = block_given? ? block : @global_callback
@calls << [call_id, call, callback]
return self
end
##
# Processes the HTTP response to the batch request, issuing callbacks.
#
# @api private
#
# @param [Faraday::Response] response
# the HTTP response.
def process_http_response(response)
content_type = find_header('Content-Type', response.headers)
m = /.*boundary=(.+)/.match(content_type)
if m
boundary = m[1]
parts = response.body.split(/--#{Regexp.escape(boundary)}/)
parts = parts[1...-1]
parts.each do |part|
call_response = deserialize_call_response(part)
_, call, callback = @calls.assoc(call_response.call_id)
result = Google::APIClient::Result.new(call, call_response)
callback.call(result) if callback
end
end
Google::APIClient::Result.new(self, response)
end
##
# Return the request body for the BatchRequest's HTTP request.
#
# @api private
#
# @return [String]
# the request body.
def to_http_request
if @calls.nil? || @calls.empty?
raise BatchError, 'Cannot make an empty batch request'
end
parts = @calls.map {|(call_id, call, callback)| serialize_call(call_id, call)}
build_multipart(parts, 'multipart/mixed', BATCH_BOUNDARY)
super
end
protected
##
# Helper method to find a header from its name, regardless of case.
#
# @api private
#
# @param [String] name
# the name of the header to find.
# @param [Hash] headers
# the hash of headers and their values.
#
# @return [String]
# the value of the desired header.
def find_header(name, headers)
_, header = headers.detect do |h, v|
h.downcase == name.downcase
end
return header
end
##
# Create a new call ID. Uses an auto-incrementing, conflict-avoiding ID.
#
# @api private
#
# @return [String]
# the new, unique ID.
def new_id
@last_auto_id += 1
while @calls.assoc(@last_auto_id)
@last_auto_id += 1
end
return @last_auto_id.to_s
end
##
# Convert a Content-ID header value to an id. Presumes the Content-ID
# header conforms to the format that id_to_header() returns.
#
# @api private
#
# @param [String] header
# Content-ID header value.
#
# @return [String]
# The extracted ID value.
def header_to_id(header)
if !header.start_with?('<') || !header.end_with?('>') ||
!header.include?('+')
raise BatchError, 'Invalid value for Content-ID: "%s"' % header
end
base, call_id = header[1...-1].split('+')
return Addressable::URI.unencode(call_id)
end
##
# Auxiliary method to split the headers from the body in an HTTP response.
#
# @api private
#
# @param [String] response
# the response to parse.
#
# @return [Array<Hash>, String]
# the headers and the body, separately.
def split_headers_and_body(response)
headers = {}
payload = response.lstrip
while payload
line, payload = payload.split("\n", 2)
line.sub!(/\s+\z/, '')
break if line.empty?
match = /\A([^:]+):\s*/.match(line)
if match
headers[match[1]] = match.post_match
else
raise BatchError, 'Invalid header line in response: %s' % line
end
end
return headers, payload
end
##
# Convert a single batched response into a BatchedCallResponse object.
#
# @api private
#
# @param [String] call_response
# the request to deserialize.
#
# @return [Google::APIClient::BatchedCallResponse]
# the parsed and converted response.
def deserialize_call_response(call_response)
outer_headers, outer_body = split_headers_and_body(call_response)
status_line, payload = outer_body.split("\n", 2)
protocol, status, reason = status_line.split(' ', 3)
headers, body = split_headers_and_body(payload)
content_id = find_header('Content-ID', outer_headers)
call_id = header_to_id(content_id)
return BatchedCallResponse.new(call_id, status.to_i, headers, body)
end
##
# Serialize a single batched call for assembling the multipart message
#
# @api private
#
# @param [Google::APIClient::Request] call
# the call to serialize.
#
# @return [Faraday::UploadIO]
# the serialized request
def serialize_call(call_id, call)
method, uri, headers, body = call.to_http_request
request = "#{method.to_s.upcase} #{Addressable::URI.parse(uri).request_uri} HTTP/1.1"
headers.each do |header, value|
request << "\r\n%s: %s" % [header, value]
end
if body
# TODO - CompositeIO if body is a stream
request << "\r\n\r\n"
if body.respond_to?(:read)
request << body.read
else
request << body.to_s
end
end
Faraday::UploadIO.new(StringIO.new(request), 'application/http', 'ruby-api-request', 'Content-ID' => id_to_header(call_id))
end
##
# Convert an id to a Content-ID header value.
#
# @api private
#
# @param [String] call_id
# identifier of individual call.
#
# @return [String]
# A Content-ID header with the call_id encoded into it. A UUID is
# prepended to the value because Content-ID headers are supposed to be
# universally unique.
def id_to_header(call_id)
return '<%s+%s>' % [@base_id, Addressable::URI.encode(call_id)]
end
end
end
end
|
{
"content_hash": "f4932246410aff3560dce170f86ae3d4",
"timestamp": "",
"source": "github",
"line_count": 312,
"max_line_length": 131,
"avg_line_length": 31.73076923076923,
"alnum_prop": 0.5648484848484848,
"repo_name": "backupify/google-api-ruby-client",
"id": "7358baaec5201f26bdc2aa56ccd8f8aed0e489c3",
"size": "10477",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/google/api_client/batch.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ruby",
"bytes": "289439"
}
],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/root"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<android.support.v7.widget.Toolbar
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
<ScrollView
android:layout_width="match_parent"
android:layout_height="wrap_content">
<LinearLayout
android:id="@+id/app_detail_main"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:padding="@dimen/dimen_8_cardview_margin">
<ImageView
android:id="@+id/icon"
android:layout_width="@dimen/dimen_48_icon_click_area"
android:layout_height="@dimen/dimen_48_icon_click_area"
android:layout_marginRight="@dimen/dimen_8_cardview_margin" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical">
<TextView
android:id="@+id/app_name"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:textColor="@color/google_blue_500"
android:textSize="@dimen/text_size_20_appbar" />
<TextView
android:id="@+id/package_name"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:textSize="@dimen/text_size_16_subtitle" />
</LinearLayout>
</LinearLayout>
</LinearLayout>
</ScrollView>
</LinearLayout>
|
{
"content_hash": "8843dd3db69283584b4b32520f035477",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 82,
"avg_line_length": 38.46551724137931,
"alnum_prop": 0.5437023756163155,
"repo_name": "binkery/allinone",
"id": "f410a72e7c35b9c6afac39cf209a9e9e88a67ba8",
"size": "2231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "allinone/app/src/main/res/layout/demo_pm_application_detail.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "517831"
},
{
"name": "Kotlin",
"bytes": "4875"
}
],
"symlink_target": ""
}
|
/**
* A link to a certain page, an anchor tag
*/
import styled from 'styled-components';
const A = styled.a`
color: #666;
&:hover {
text-decoration: none;
color: #777;
}
`;
export default A;
|
{
"content_hash": "054e5f4ef3188b8fddd84a33af731b26",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 42,
"avg_line_length": 13.1875,
"alnum_prop": 0.6066350710900474,
"repo_name": "kevinnorris/bookTrading",
"id": "bff54ba4e70d2220c42bb368b4e2dd04d1723656",
"size": "211",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/components/A/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "1787"
},
{
"name": "HTML",
"bytes": "9659"
},
{
"name": "JavaScript",
"bytes": "225726"
}
],
"symlink_target": ""
}
|
const fetch = require(`./fetch`)
const normalize = require(`./normalize`)
const typePrefix = `wordpress__`
const refactoredEntityTypes = {
post: `${typePrefix}POST`,
page: `${typePrefix}PAGE`,
tag: `${typePrefix}TAG`,
category: `${typePrefix}CATEGORY`,
}
/* If true, will output many console logs. */
let _verbose
let _siteURL
let _useACF = true
let _hostingWPCOM
let _auth
let _perPage
exports.sourceNodes = async (
{ boundActionCreators, getNode, store, cache },
{
baseUrl,
protocol,
hostingWPCOM,
useACF = true,
auth = {},
verboseOutput,
perPage = 100,
searchAndReplaceContentUrls = {},
}
) => {
const { createNode } = boundActionCreators
_verbose = verboseOutput
_siteURL = `${protocol}://${baseUrl}`
_useACF = useACF
_hostingWPCOM = hostingWPCOM
_auth = auth
_perPage = perPage
let entities = await fetch({
baseUrl,
_verbose,
_siteURL,
_useACF,
_hostingWPCOM,
_auth,
_perPage,
typePrefix,
refactoredEntityTypes,
})
// Normalize data & create nodes
// Remove ACF key if it's not an object
entities = normalize.normalizeACF(entities)
// Creates entities from object collections of entities
entities = normalize.normalizeEntities(entities)
// Standardizes ids & cleans keys
entities = normalize.standardizeKeys(entities)
// Converts to use only GMT dates
entities = normalize.standardizeDates(entities)
// Lifts all "rendered" fields to top-level.
entities = normalize.liftRenderedField(entities)
// Exclude entities of unknown shape
entities = normalize.excludeUnknownEntities(entities)
// Creates Gatsby IDs for each entity
entities = normalize.createGatsbyIds(entities)
// Creates links between authors and user entities
entities = normalize.mapAuthorsToUsers(entities)
// Creates links between posts and tags/categories.
entities = normalize.mapPostsToTagsCategories(entities)
// Creates links between tags/categories and taxonomies.
entities = normalize.mapTagsCategoriesToTaxonomies(entities)
// Creates links from entities to media nodes
entities = normalize.mapEntitiesToMedia(entities)
// Downloads media files and removes "sizes" data as useless in Gatsby context.
entities = await normalize.downloadMediaFiles({
entities,
store,
cache,
createNode,
_auth,
})
// Search and replace Content Urls
entities = normalize.searchReplaceContentUrls({
entities,
searchAndReplaceContentUrls,
})
// creates nodes for each entry
normalize.createNodesFromEntities({ entities, createNode })
return
}
|
{
"content_hash": "8f855a565069d75ade2f8aa86599da9f",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 81,
"avg_line_length": 24.38317757009346,
"alnum_prop": 0.7121502491376006,
"repo_name": "danielfarrell/gatsby",
"id": "2d8baebbd254151d7489d1d3b9a8dd0b29a1a840",
"size": "2609",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "packages/gatsby-source-wordpress/src/gatsby-node.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "53694"
},
{
"name": "HTML",
"bytes": "118459"
},
{
"name": "JavaScript",
"bytes": "977896"
},
{
"name": "Shell",
"bytes": "834"
}
],
"symlink_target": ""
}
|
<b>Small example project using HapiJS (NodeJS back-end) and AngularJS (front-end) </b>
<br/>
<i>Presentation slides</i>
<a target="_blank" href="http://slides.com/danielaborges/angularjs-lisbonbigapps">http://slides.com/danielaborges/angularjs-lisbonbigapps</a>
<b>Installation Steps</b>
<ol>
<li>Instal <a target="_blank" href="http://nodejs.org/">NodeJS</a></li>
<li>Run npm install in the project folder</li>
<li>Go to http://localhost:3000</li>
<li>'Rock' with AngularJS & HapiJS!</li>
</ol>
|
{
"content_hash": "ef81ad0846d3c250f96cdae67c96f411",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 141,
"avg_line_length": 33.86666666666667,
"alnum_prop": 0.7007874015748031,
"repo_name": "sericaia/angular-hapi-lisbonbigapps-tutorial",
"id": "4574c08d76dbafe227b194f064abcbc489351fff",
"size": "508",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "366"
},
{
"name": "JavaScript",
"bytes": "11786"
}
],
"symlink_target": ""
}
|
#include <string>
#include <vector>
#include <openssl/err.h>
#include <openssl/ssl.h>
#if !defined(OPENSSL_WINDOWS)
bool Client(const std::vector<std::string> &args);
bool Server(const std::vector<std::string> &args);
#endif
bool DoPKCS12(const std::vector<std::string> &args);
bool Speed(const std::vector<std::string> &args);
static void usage(const char *name) {
printf("Usage: %s [speed|client|server|pkcs12]\n", name);
}
int main(int argc, char **argv) {
std::string tool;
if (argc >= 2) {
tool = argv[1];
}
SSL_library_init();
std::vector<std::string> args;
for (int i = 2; i < argc; i++) {
args.push_back(argv[i]);
}
if (tool == "speed") {
return !Speed(args);
#if !defined(OPENSSL_WINDOWS)
} else if (tool == "s_client" || tool == "client") {
return !Client(args);
} else if (tool == "s_server" || tool == "server") {
return !Server(args);
#endif
} else if (tool == "pkcs12") {
return !DoPKCS12(args);
} else {
usage(argv[0]);
return 1;
}
}
|
{
"content_hash": "d3d39bceff967e7ed7111c8f4cd741f7",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 59,
"avg_line_length": 21.229166666666668,
"alnum_prop": 0.6084396467124632,
"repo_name": "mxOBS/deb-pkg_trusty_chromium-browser",
"id": "a57cd166be0a24ef9c7d6b8ba206cf39cb088934",
"size": "1788",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/boringssl/src/tool/tool.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "8402"
},
{
"name": "Assembly",
"bytes": "230130"
},
{
"name": "Batchfile",
"bytes": "34966"
},
{
"name": "C",
"bytes": "12435900"
},
{
"name": "C++",
"bytes": "264378706"
},
{
"name": "CMake",
"bytes": "27829"
},
{
"name": "CSS",
"bytes": "795726"
},
{
"name": "Dart",
"bytes": "74976"
},
{
"name": "Emacs Lisp",
"bytes": "2360"
},
{
"name": "Go",
"bytes": "31783"
},
{
"name": "Groff",
"bytes": "5283"
},
{
"name": "HTML",
"bytes": "19491230"
},
{
"name": "Java",
"bytes": "7637875"
},
{
"name": "JavaScript",
"bytes": "12723911"
},
{
"name": "LLVM",
"bytes": "1169"
},
{
"name": "Logos",
"bytes": "6893"
},
{
"name": "Lua",
"bytes": "14392"
},
{
"name": "Makefile",
"bytes": "208315"
},
{
"name": "Objective-C",
"bytes": "1460032"
},
{
"name": "Objective-C++",
"bytes": "7760068"
},
{
"name": "PLpgSQL",
"bytes": "175360"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "427212"
},
{
"name": "Python",
"bytes": "11447382"
},
{
"name": "Ragel in Ruby Host",
"bytes": "104846"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "1208350"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "nesC",
"bytes": "18335"
}
],
"symlink_target": ""
}
|
<?php
require_once 'helper/rst_dummy_directives.php';
/**
* Test suite for class.
*
* @package Document
* @subpackage Tests
*/
class ezcDocumentRstXhtmlBodyVisitorTests extends ezcTestCase
{
protected static $testDocuments = null;
public static function suite()
{
return new PHPUnit_Framework_TestSuite( __CLASS__ );
}
public static function getTestDocuments()
{
if ( self::$testDocuments === null )
{
// Get a list of all test files from the respektive folder
$testFiles = glob( dirname( __FILE__ ) . '/files/rst/xhtml_body/s_*.txt' );
// Create array with the test file and the expected result file
foreach ( $testFiles as $file )
{
self::$testDocuments[] = array(
$file,
substr( $file, 0, -3 ) . 'html'
);
}
}
return self::$testDocuments;
return array_slice( self::$testDocuments, 3, 1 );
}
/**
* @dataProvider getTestDocuments
*/
public function testParseRstFile( $from, $to )
{
if ( !is_file( $to ) )
{
$this->markTestSkipped( "Comparision file '$to' not yet defined." );
}
$document = new ezcDocumentRst();
$document->options->errorReporting = E_PARSE | E_ERROR | E_WARNING;
$document->options->xhtmlVisitor = 'ezcDocumentRstXhtmlBodyVisitor';
$document->registerDirective( 'my_custom_directive', 'ezcDocumentTestDummyXhtmlDirective' );
$document->registerDirective( 'user', 'ezcDocumentTestDummyXhtmlDirective' );
$document->registerDirective( 'book', 'ezcDocumentTestDummyXhtmlDirective' );
$document->registerDirective( 'function', 'ezcDocumentTestDummyXhtmlDirective' );
$document->registerDirective( 'replace', 'ezcDocumentTestDummyXhtmlDirective' );
$document->loadFile( $from );
$html = $document->getAsXhtml();
$xml = $html->save();
// Store test file, to have something to compare on failure
$tempDir = $this->createTempDir( 'html_body_' ) . '/';
file_put_contents( $tempDir . basename( $to ), $xml );
$this->assertEquals(
file_get_contents( $to ),
$xml,
'Document not visited as expected.'
);
// Remove tempdir, when nothing failed.
$this->removeTempDir();
}
public function testDocumentHeaderLevel()
{
$from = dirname( __FILE__ ) . '/files/rst/xhtml_body/s_002_titles.txt';
$to = dirname( __FILE__ ) . '/files/rst/xhtml_body/s_002_titles_header_level.html';
$document = new ezcDocumentRst();
$document->options->errorReporting = E_PARSE | E_ERROR | E_WARNING;
$document->options->xhtmlVisitor = 'ezcDocumentRstXhtmlBodyVisitor';
$document->options->xhtmlVisitorOptions->headerLevel = 3;
$document->registerDirective( 'my_custom_directive', 'ezcDocumentTestDummyXhtmlDirective' );
$document->registerDirective( 'user', 'ezcDocumentTestDummyXhtmlDirective' );
$document->registerDirective( 'book', 'ezcDocumentTestDummyXhtmlDirective' );
$document->registerDirective( 'function', 'ezcDocumentTestDummyXhtmlDirective' );
$document->registerDirective( 'replace', 'ezcDocumentTestDummyXhtmlDirective' );
$document->loadFile( $from );
$html = $document->getAsXhtml();
$html->options->xmlHeader = true;
$xml = $html->save();
// Store test file, to have something to compare on failure
$tempDir = $this->createTempDir( 'html_' ) . '/';
file_put_contents( $tempDir . basename( $to ), $xml );
$this->assertEquals(
file_get_contents( $to ),
$xml,
'Document not visited as expected.'
);
// Remove tempdir, when nothing failed.
$this->removeTempDir();
}
}
?>
|
{
"content_hash": "edd863f67318adc0e6e308e30b45a760",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 100,
"avg_line_length": 33.771186440677965,
"alnum_prop": 0.5959849435382685,
"repo_name": "gewthen/ezcomponents",
"id": "c8ea7344228c2b0461e01b72990caa76e9e6a605",
"size": "4217",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Document/tests/document_rst_visitor_xhtml_body_tests.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import _ from 'underscore'
import { Component, OnInit, Inject } from '@angular/core'
import { UserService } from '../../upgrade/ajs-upgraded-providers'
import { Feed, Service, FeedService } from '@ngageoint/mage.web-core-lib/feed'
import { StateService } from '@uirouter/angular'
import { MatDialog } from '@angular/material/dialog'
import { forkJoin } from 'rxjs'
import { AdminFeedDeleteComponent } from './admin-feed/admin-feed-delete/admin-feed-delete.component'
import { AdminServiceDeleteComponent } from './admin-service/admin-service-delete/admin-service-delete.component'
import { AdminBreadcrumb } from '../admin-breadcrumb/admin-breadcrumb.model'
@Component({
selector: 'admin-feeds',
templateUrl: './admin-feeds.component.html',
styleUrls: ['./admin-feeds.component.scss']
})
export class AdminFeedsComponent implements OnInit {
breadcrumbs: AdminBreadcrumb[] = [{
title: 'Feeds',
icon: 'rss_feed'
}]
services: Service[] = []
private _services: Service[] = []
private _feeds: Feed[] = []
feeds: Feed[] = []
feedSearch = ''
serviceSearch = ''
feedPage = 0
servicePage = 0
itemsPerPage = 10
hasServiceDeletePermission: boolean
hasFeedCreatePermission: boolean
hasFeedEditPermission: boolean
hasFeedDeletePermission: boolean
constructor(
private feedService: FeedService,
private stateService: StateService,
public dialog: MatDialog,
@Inject(UserService) userService: { myself: { role: {permissions: Array<string>}}}
) {
this.hasServiceDeletePermission = _.contains(userService.myself.role.permissions, 'FEEDS_CREATE_SERVICE')
this.hasFeedCreatePermission = _.contains(userService.myself.role.permissions, 'FEEDS_CREATE_FEED')
this.hasFeedEditPermission = _.contains(userService.myself.role.permissions, 'FEEDS_CREATE_FEED')
this.hasFeedDeletePermission = _.contains(userService.myself.role.permissions, 'FEEDS_CREATE_FEED')
}
ngOnInit(): void {
forkJoin([
this.feedService.fetchServices(),
this.feedService.fetchAllFeeds()
]).subscribe(result => {
this._services = result[0].sort(this.sortByTitle)
this.services = this._services.slice()
this._feeds = result[1].sort(this.sortByTitle)
this.feeds = this._feeds.slice()
})
}
onFeedSearchChange(): void {
this.feedPage = 0
this.updateFilteredFeeds()
}
onServiceSearchChange(): void {
this.servicePage = 0
this.updateFilteredServices()
}
clearFeedSearch(): void {
this.feedPage = 0
this.feedSearch = ''
this.feeds = this._feeds.slice()
}
clearServiceSearch(): void {
this.servicePage = 0
this.serviceSearch = ''
this.services = this._services.slice()
}
updateFilteredFeeds(): void {
this.feeds = this._feeds.filter(this.filterByTitleAndSummary(this.feedSearch))
}
updateFilteredServices(): void {
this.services = this._services.filter(this.filterByTitleAndSummary(this.serviceSearch))
}
goToService(service: Service): void {
this.stateService.go('admin.service', { serviceId: service.id })
}
goToFeed(feed: Feed): void {
this.stateService.go('admin.feed', { feedId: feed.id })
}
newFeed(): void {
this.stateService.go('admin.feedCreate')
}
editFeed(feed: Feed): void {
// TODO edit feed, and edit service
}
deleteService($event: MouseEvent, service: Service): void {
$event.stopPropagation()
this.dialog.open(AdminServiceDeleteComponent, {
data: service,
autoFocus: false,
disableClose: true
}).afterClosed().subscribe(result => {
if (result === true) {
this.feedService.deleteService(service).subscribe(() => {
this.services = this.services.filter(s => s.id !== service.id)
this._feeds = this._feeds.filter(feed => feed.service === service.id)
this.updateFilteredFeeds()
this.updateFilteredServices()
});
}
});
}
deleteFeed($event: MouseEvent, feed: Feed): void {
$event.stopPropagation()
this.dialog.open(AdminFeedDeleteComponent, {
data: feed,
autoFocus: false,
disableClose: true
}).afterClosed().subscribe(result => {
if (result === true) {
this.feedService.deleteFeed(feed).subscribe(() => {
this._feeds = this._feeds.filter(f => f.id !== feed.id)
this.updateFilteredFeeds()
});
}
});
}
private sortByTitle(a: {title: string}, b: {title: string}): number {
return a.title < b.title ? -1 : 1
}
private filterByTitleAndSummary(text: string): (item: {title: string, summary?: string | null}) => boolean {
return (item: { title: string, summary?: string | null }): boolean => {
const textLowerCase = text.toLowerCase()
const title = item.title.toLowerCase()
const summary = item.summary ? item.summary.toLowerCase() : ''
return title.indexOf(textLowerCase) !== -1 || summary.indexOf(textLowerCase) !== -1
}
}
}
|
{
"content_hash": "fd5ef413a79c861c1ee01851ff95ecd5",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 113,
"avg_line_length": 30.857142857142858,
"alnum_prop": 0.6680756843800322,
"repo_name": "ngageoint/mage-server",
"id": "993e7c1979d870f810b5d0a5097a23694dcd28f5",
"size": "4968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web-app/src/app/admin/admin-feeds/admin-feeds.component.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "78"
},
{
"name": "Dockerfile",
"bytes": "1774"
},
{
"name": "HTML",
"bytes": "470278"
},
{
"name": "JavaScript",
"bytes": "1355729"
},
{
"name": "Pug",
"bytes": "11329"
},
{
"name": "SCSS",
"bytes": "134684"
},
{
"name": "Shell",
"bytes": "5082"
},
{
"name": "TypeScript",
"bytes": "1958186"
}
],
"symlink_target": ""
}
|
namespace LinuxPackages.Web.Mvc.Controllers
{
using System.Threading.Tasks;
using System.Web;
using System.Web.Mvc;
using Common.Utilities;
using Data.Models;
using Microsoft.AspNet.Identity;
using Microsoft.AspNet.Identity.Owin;
using Ninject;
using ViewModels.Profile;
public enum EditProfileResultType
{
UpdateProfileSuccess,
ChangePasswordSuccess,
}
[Authorize]
public partial class ProfileController : BaseController
{
private const string XsrfKey = "XsrfId";
private ApplicationSignInManager signInManager;
private ApplicationUserManager userManager;
public ProfileController()
{
}
public ProfileController(ApplicationUserManager userManager, ApplicationSignInManager signInManager)
{
this.UserManager = userManager;
this.SignInManager = signInManager;
}
public ApplicationSignInManager SignInManager
{
get
{
return this.signInManager ?? this.HttpContext.GetOwinContext().Get<ApplicationSignInManager>();
}
private set
{
this.signInManager = value;
}
}
public ApplicationUserManager UserManager
{
get
{
return this.userManager ?? this.HttpContext.GetOwinContext().GetUserManager<ApplicationUserManager>();
}
private set
{
this.userManager = value;
}
}
public ActionResult Index(EditProfileResultType? message)
{
this.ViewBag.StatusMessage = string.Empty;
if (message == EditProfileResultType.ChangePasswordSuccess)
{
this.ViewBag.StatusMessage = "Your password has been changed.";
}
else if (message == EditProfileResultType.UpdateProfileSuccess)
{
this.ViewBag.StatusMessage = "Your profile has been updated.";
}
ProfileViewModel userProfile = this.Mapper.Map<ProfileViewModel>(this.UserProfile);
return this.View(userProfile);
}
[HttpGet]
public ActionResult ChangePassword()
{
return this.View();
}
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<ActionResult> ChangePassword(ChangePasswordViewModel model)
{
if (!this.ModelState.IsValid)
{
return this.View(model);
}
IdentityResult result = await this.UserManager.ChangePasswordAsync(this.User.Identity.GetUserId(), model.OldPassword, model.NewPassword);
if (result.Succeeded)
{
User user = await this.UserManager.FindByIdAsync(this.User.Identity.GetUserId());
if (user != null)
{
await this.SignInManager.SignInAsync(user, isPersistent: false, rememberBrowser: false);
}
return this.RedirectToAction("Index", new { Message = EditProfileResultType.ChangePasswordSuccess });
}
this.AddErrors(result);
return this.View(model);
}
[HttpGet]
public ActionResult ChangeAvatar()
{
return this.View();
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult ChangeAvatar(ChangeAvatarViewModel model)
{
if (!this.ModelState.IsValid)
{
return this.View(model);
}
Avatar newAvatar = this.Users.CreateAvatar(
model.Contents.FileName,
StreamHelper.ReadFully(model.Contents.InputStream, model.Contents.ContentLength),
this.UserProfile);
return this.RedirectToAction("Index", "Profile");
}
protected override void Dispose(bool disposing)
{
if (disposing && this.userManager != null)
{
this.userManager.Dispose();
this.userManager = null;
}
base.Dispose(disposing);
}
private void AddErrors(IdentityResult result)
{
foreach (var error in result.Errors)
{
this.ModelState.AddModelError(string.Empty, error);
}
}
}
}
|
{
"content_hash": "a944b0fe51832814d8863f0917bc9613",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 149,
"avg_line_length": 29.142857142857142,
"alnum_prop": 0.5677361853832442,
"repo_name": "arnaudoff/LinuxPackages",
"id": "e374a5176ec56f585e75142438585bab93147170",
"size": "4490",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/Web/LinuxPackages.Web.Mvc/Controllers/ProfileController.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "3000"
},
{
"name": "C#",
"bytes": "332189"
},
{
"name": "CSS",
"bytes": "15665"
},
{
"name": "HTML",
"bytes": "2822"
},
{
"name": "JavaScript",
"bytes": "328539"
}
],
"symlink_target": ""
}
|
#!bash
#
# bash/zsh completion support for core Git.
#
# Copyright (C) 2006,2007 Shawn O. Pearce <spearce@spearce.org>
# Conceptually based on gitcompletion (http://gitweb.hawaga.org.uk/).
# Distributed under the GNU General Public License, version 2.0.
#
# The contained completion routines provide support for completing:
#
# *) local and remote branch names
# *) local and remote tag names
# *) .git/remotes file names
# *) git 'subcommands'
# *) tree paths within 'ref:path/to/file' expressions
# *) common --long-options
#
# To use these routines:
#
# 1) Copy this file to somewhere (e.g. ~/.git-completion.sh).
# 2) Add the following line to your .bashrc/.zshrc:
# source ~/.git-completion.sh
# 3) Consider changing your PS1 to also show the current branch,
# see git-prompt.sh for details.
case "$COMP_WORDBREAKS" in
*:*) : great ;;
*) COMP_WORDBREAKS="$COMP_WORDBREAKS:"
esac
# __gitdir accepts 0 or 1 arguments (i.e., location)
# returns location of .git repo
__gitdir ()
{
# Note: this function is duplicated in git-prompt.sh
# When updating it, make sure you update the other one to match.
if [ -z "${1-}" ]; then
if [ -n "${__git_dir-}" ]; then
echo "$__git_dir"
elif [ -n "${GIT_DIR-}" ]; then
test -d "${GIT_DIR-}" || return 1
echo "$GIT_DIR"
elif [ -d .git ]; then
echo .git
else
git rev-parse --git-dir 2>/dev/null
fi
elif [ -d "$1/.git" ]; then
echo "$1/.git"
else
echo "$1"
fi
}
__gitcomp_1 ()
{
local c IFS=$' \t\n'
for c in $1; do
c="$c$2"
case $c in
--*=*|*.) ;;
*) c="$c " ;;
esac
printf '%s\n' "$c"
done
}
# The following function is based on code from:
#
# bash_completion - programmable completion functions for bash 3.2+
#
# Copyright © 2006-2008, Ian Macdonald <ian@caliban.org>
# © 2009-2010, Bash Completion Maintainers
# <bash-completion-devel@lists.alioth.debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# The latest version of this software can be obtained here:
#
# http://bash-completion.alioth.debian.org/
#
# RELEASE: 2.x
# This function can be used to access a tokenized list of words
# on the command line:
#
# __git_reassemble_comp_words_by_ref '=:'
# if test "${words_[cword_-1]}" = -w
# then
# ...
# fi
#
# The argument should be a collection of characters from the list of
# word completion separators (COMP_WORDBREAKS) to treat as ordinary
# characters.
#
# This is roughly equivalent to going back in time and setting
# COMP_WORDBREAKS to exclude those characters. The intent is to
# make option types like --date=<type> and <rev>:<path> easy to
# recognize by treating each shell word as a single token.
#
# It is best not to set COMP_WORDBREAKS directly because the value is
# shared with other completion scripts. By the time the completion
# function gets called, COMP_WORDS has already been populated so local
# changes to COMP_WORDBREAKS have no effect.
#
# Output: words_, cword_, cur_.
__git_reassemble_comp_words_by_ref()
{
local exclude i j first
# Which word separators to exclude?
exclude="${1//[^$COMP_WORDBREAKS]}"
cword_=$COMP_CWORD
if [ -z "$exclude" ]; then
words_=("${COMP_WORDS[@]}")
return
fi
# List of word completion separators has shrunk;
# re-assemble words to complete.
for ((i=0, j=0; i < ${#COMP_WORDS[@]}; i++, j++)); do
# Append each nonempty word consisting of just
# word separator characters to the current word.
first=t
while
[ $i -gt 0 ] &&
[ -n "${COMP_WORDS[$i]}" ] &&
# word consists of excluded word separators
[ "${COMP_WORDS[$i]//[^$exclude]}" = "${COMP_WORDS[$i]}" ]
do
# Attach to the previous token,
# unless the previous token is the command name.
if [ $j -ge 2 ] && [ -n "$first" ]; then
((j--))
fi
first=
words_[$j]=${words_[j]}${COMP_WORDS[i]}
if [ $i = $COMP_CWORD ]; then
cword_=$j
fi
if (($i < ${#COMP_WORDS[@]} - 1)); then
((i++))
else
# Done.
return
fi
done
words_[$j]=${words_[j]}${COMP_WORDS[i]}
if [ $i = $COMP_CWORD ]; then
cword_=$j
fi
done
}
if ! type _get_comp_words_by_ref >/dev/null 2>&1; then
_get_comp_words_by_ref ()
{
local exclude cur_ words_ cword_
if [ "$1" = "-n" ]; then
exclude=$2
shift 2
fi
__git_reassemble_comp_words_by_ref "$exclude"
cur_=${words_[cword_]}
while [ $# -gt 0 ]; do
case "$1" in
cur)
cur=$cur_
;;
prev)
prev=${words_[$cword_-1]}
;;
words)
words=("${words_[@]}")
;;
cword)
cword=$cword_
;;
esac
shift
done
}
fi
# Generates completion reply with compgen, appending a space to possible
# completion words, if necessary.
# It accepts 1 to 4 arguments:
# 1: List of possible completion words.
# 2: A prefix to be added to each possible completion word (optional).
# 3: Generate possible completion matches for this word (optional).
# 4: A suffix to be appended to each possible completion word (optional).
__gitcomp ()
{
local cur_="${3-$cur}"
case "$cur_" in
--*=)
COMPREPLY=()
;;
*)
local IFS=$'\n'
COMPREPLY=($(compgen -P "${2-}" \
-W "$(__gitcomp_1 "${1-}" "${4-}")" \
-- "$cur_"))
;;
esac
}
# Generates completion reply with compgen from newline-separated possible
# completion words by appending a space to all of them.
# It accepts 1 to 4 arguments:
# 1: List of possible completion words, separated by a single newline.
# 2: A prefix to be added to each possible completion word (optional).
# 3: Generate possible completion matches for this word (optional).
# 4: A suffix to be appended to each possible completion word instead of
# the default space (optional). If specified but empty, nothing is
# appended.
__gitcomp_nl ()
{
local IFS=$'\n'
COMPREPLY=($(compgen -P "${2-}" -S "${4- }" -W "$1" -- "${3-$cur}"))
}
__git_heads ()
{
local dir="$(__gitdir)"
if [ -d "$dir" ]; then
git --git-dir="$dir" for-each-ref --format='%(refname:short)' \
refs/heads
return
fi
}
__git_tags ()
{
local dir="$(__gitdir)"
if [ -d "$dir" ]; then
git --git-dir="$dir" for-each-ref --format='%(refname:short)' \
refs/tags
return
fi
}
# __git_refs accepts 0, 1 (to pass to __gitdir), or 2 arguments
# presence of 2nd argument means use the guess heuristic employed
# by checkout for tracking branches
__git_refs ()
{
local i hash dir="$(__gitdir "${1-}")" track="${2-}"
local format refs
if [ -d "$dir" ]; then
case "$cur" in
refs|refs/*)
format="refname"
refs="${cur%/*}"
track=""
;;
*)
for i in HEAD FETCH_HEAD ORIG_HEAD MERGE_HEAD; do
if [ -e "$dir/$i" ]; then echo $i; fi
done
format="refname:short"
refs="refs/tags refs/heads refs/remotes"
;;
esac
git --git-dir="$dir" for-each-ref --format="%($format)" \
$refs
if [ -n "$track" ]; then
# employ the heuristic used by git checkout
# Try to find a remote branch that matches the completion word
# but only output if the branch name is unique
local ref entry
git --git-dir="$dir" for-each-ref --shell --format="ref=%(refname:short)" \
"refs/remotes/" | \
while read -r entry; do
eval "$entry"
ref="${ref#*/}"
if [[ "$ref" == "$cur"* ]]; then
echo "$ref"
fi
done | sort | uniq -u
fi
return
fi
case "$cur" in
refs|refs/*)
git ls-remote "$dir" "$cur*" 2>/dev/null | \
while read -r hash i; do
case "$i" in
*^{}) ;;
*) echo "$i" ;;
esac
done
;;
*)
git ls-remote "$dir" HEAD ORIG_HEAD 'refs/tags/*' 'refs/heads/*' 'refs/remotes/*' 2>/dev/null | \
while read -r hash i; do
case "$i" in
*^{}) ;;
refs/*) echo "${i#refs/*/}" ;;
*) echo "$i" ;;
esac
done
;;
esac
}
# __git_refs2 requires 1 argument (to pass to __git_refs)
__git_refs2 ()
{
local i
for i in $(__git_refs "$1"); do
echo "$i:$i"
done
}
# __git_refs_remotes requires 1 argument (to pass to ls-remote)
__git_refs_remotes ()
{
local i hash
git ls-remote "$1" 'refs/heads/*' 2>/dev/null | \
while read -r hash i; do
echo "$i:refs/remotes/$1/${i#refs/heads/}"
done
}
__git_remotes ()
{
local i IFS=$'\n' d="$(__gitdir)"
test -d "$d/remotes" && ls -1 "$d/remotes"
for i in $(git --git-dir="$d" config --get-regexp 'remote\..*\.url' 2>/dev/null); do
i="${i#remote.}"
echo "${i/.url*/}"
done
}
__git_list_merge_strategies ()
{
git merge -s help 2>&1 |
sed -n -e '/[Aa]vailable strategies are: /,/^$/{
s/\.$//
s/.*://
s/^[ ]*//
s/[ ]*$//
p
}'
}
__git_merge_strategies=
# 'git merge -s help' (and thus detection of the merge strategy
# list) fails, unfortunately, if run outside of any git working
# tree. __git_merge_strategies is set to the empty string in
# that case, and the detection will be repeated the next time it
# is needed.
__git_compute_merge_strategies ()
{
test -n "$__git_merge_strategies" ||
__git_merge_strategies=$(__git_list_merge_strategies)
}
__git_complete_revlist_file ()
{
local pfx ls ref cur_="$cur"
case "$cur_" in
*..?*:*)
return
;;
?*:*)
ref="${cur_%%:*}"
cur_="${cur_#*:}"
case "$cur_" in
?*/*)
pfx="${cur_%/*}"
cur_="${cur_##*/}"
ls="$ref:$pfx"
pfx="$pfx/"
;;
*)
ls="$ref"
;;
esac
case "$COMP_WORDBREAKS" in
*:*) : great ;;
*) pfx="$ref:$pfx" ;;
esac
__gitcomp_nl "$(git --git-dir="$(__gitdir)" ls-tree "$ls" 2>/dev/null \
| sed '/^100... blob /{
s,^.* ,,
s,$, ,
}
/^120000 blob /{
s,^.* ,,
s,$, ,
}
/^040000 tree /{
s,^.* ,,
s,$,/,
}
s/^.* //')" \
"$pfx" "$cur_" ""
;;
*...*)
pfx="${cur_%...*}..."
cur_="${cur_#*...}"
__gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
;;
*..*)
pfx="${cur_%..*}.."
cur_="${cur_#*..}"
__gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
;;
*)
__gitcomp_nl "$(__git_refs)"
;;
esac
}
__git_complete_file ()
{
__git_complete_revlist_file
}
__git_complete_revlist ()
{
__git_complete_revlist_file
}
__git_complete_remote_or_refspec ()
{
local cur_="$cur" cmd="${words[1]}"
local i c=2 remote="" pfx="" lhs=1 no_complete_refspec=0
if [ "$cmd" = "remote" ]; then
((c++))
fi
while [ $c -lt $cword ]; do
i="${words[c]}"
case "$i" in
--mirror) [ "$cmd" = "push" ] && no_complete_refspec=1 ;;
--all)
case "$cmd" in
push) no_complete_refspec=1 ;;
fetch)
COMPREPLY=()
return
;;
*) ;;
esac
;;
-*) ;;
*) remote="$i"; break ;;
esac
((c++))
done
if [ -z "$remote" ]; then
__gitcomp_nl "$(__git_remotes)"
return
fi
if [ $no_complete_refspec = 1 ]; then
COMPREPLY=()
return
fi
[ "$remote" = "." ] && remote=
case "$cur_" in
*:*)
case "$COMP_WORDBREAKS" in
*:*) : great ;;
*) pfx="${cur_%%:*}:" ;;
esac
cur_="${cur_#*:}"
lhs=0
;;
+*)
pfx="+"
cur_="${cur_#+}"
;;
esac
case "$cmd" in
fetch)
if [ $lhs = 1 ]; then
__gitcomp_nl "$(__git_refs2 "$remote")" "$pfx" "$cur_"
else
__gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
fi
;;
pull|remote)
if [ $lhs = 1 ]; then
__gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_"
else
__gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
fi
;;
push)
if [ $lhs = 1 ]; then
__gitcomp_nl "$(__git_refs)" "$pfx" "$cur_"
else
__gitcomp_nl "$(__git_refs "$remote")" "$pfx" "$cur_"
fi
;;
esac
}
__git_complete_strategy ()
{
__git_compute_merge_strategies
case "$prev" in
-s|--strategy)
__gitcomp "$__git_merge_strategies"
return 0
esac
case "$cur" in
--strategy=*)
__gitcomp "$__git_merge_strategies" "" "${cur##--strategy=}"
return 0
;;
esac
return 1
}
__git_list_all_commands ()
{
local i IFS=" "$'\n'
for i in $(git help -a|egrep '^ [a-zA-Z0-9]')
do
case $i in
*--*) : helper pattern;;
*) echo $i;;
esac
done
}
__git_all_commands=
__git_compute_all_commands ()
{
test -n "$__git_all_commands" ||
__git_all_commands=$(__git_list_all_commands)
}
__git_list_porcelain_commands ()
{
local i IFS=" "$'\n'
__git_compute_all_commands
for i in $__git_all_commands
do
case $i in
*--*) : helper pattern;;
applymbox) : ask gittus;;
applypatch) : ask gittus;;
archimport) : import;;
cat-file) : plumbing;;
check-attr) : plumbing;;
check-ignore) : plumbing;;
check-ref-format) : plumbing;;
checkout-index) : plumbing;;
commit-tree) : plumbing;;
count-objects) : infrequent;;
credential-cache) : credentials helper;;
credential-store) : credentials helper;;
cvsexportcommit) : export;;
cvsimport) : import;;
cvsserver) : daemon;;
daemon) : daemon;;
diff-files) : plumbing;;
diff-index) : plumbing;;
diff-tree) : plumbing;;
fast-import) : import;;
fast-export) : export;;
fsck-objects) : plumbing;;
fetch-pack) : plumbing;;
fmt-merge-msg) : plumbing;;
for-each-ref) : plumbing;;
hash-object) : plumbing;;
http-*) : transport;;
index-pack) : plumbing;;
init-db) : deprecated;;
local-fetch) : plumbing;;
lost-found) : infrequent;;
ls-files) : plumbing;;
ls-remote) : plumbing;;
ls-tree) : plumbing;;
mailinfo) : plumbing;;
mailsplit) : plumbing;;
merge-*) : plumbing;;
mktree) : plumbing;;
mktag) : plumbing;;
pack-objects) : plumbing;;
pack-redundant) : plumbing;;
pack-refs) : plumbing;;
parse-remote) : plumbing;;
patch-id) : plumbing;;
peek-remote) : plumbing;;
prune) : plumbing;;
prune-packed) : plumbing;;
quiltimport) : import;;
read-tree) : plumbing;;
receive-pack) : plumbing;;
remote-*) : transport;;
repo-config) : deprecated;;
rerere) : plumbing;;
rev-list) : plumbing;;
rev-parse) : plumbing;;
runstatus) : plumbing;;
sh-setup) : internal;;
shell) : daemon;;
show-ref) : plumbing;;
send-pack) : plumbing;;
show-index) : plumbing;;
ssh-*) : transport;;
stripspace) : plumbing;;
symbolic-ref) : plumbing;;
tar-tree) : deprecated;;
unpack-file) : plumbing;;
unpack-objects) : plumbing;;
update-index) : plumbing;;
update-ref) : plumbing;;
update-server-info) : daemon;;
upload-archive) : plumbing;;
upload-pack) : plumbing;;
write-tree) : plumbing;;
var) : infrequent;;
verify-pack) : infrequent;;
verify-tag) : plumbing;;
*) echo $i;;
esac
done
}
__git_porcelain_commands=
__git_compute_porcelain_commands ()
{
__git_compute_all_commands
test -n "$__git_porcelain_commands" ||
__git_porcelain_commands=$(__git_list_porcelain_commands)
}
__git_pretty_aliases ()
{
local i IFS=$'\n'
for i in $(git --git-dir="$(__gitdir)" config --get-regexp "pretty\..*" 2>/dev/null); do
case "$i" in
pretty.*)
i="${i#pretty.}"
echo "${i/ */}"
;;
esac
done
}
__git_aliases ()
{
local i IFS=$'\n'
for i in $(git --git-dir="$(__gitdir)" config --get-regexp "alias\..*" 2>/dev/null); do
case "$i" in
alias.*)
i="${i#alias.}"
echo "${i/ */}"
;;
esac
done
}
# __git_aliased_command requires 1 argument
__git_aliased_command ()
{
local word cmdline=$(git --git-dir="$(__gitdir)" \
config --get "alias.$1")
for word in $cmdline; do
case "$word" in
\!gitk|gitk)
echo "gitk"
return
;;
\!*) : shell command alias ;;
-*) : option ;;
*=*) : setting env ;;
git) : git itself ;;
*)
echo "$word"
return
esac
done
}
# __git_find_on_cmdline requires 1 argument
__git_find_on_cmdline ()
{
local word subcommand c=1
while [ $c -lt $cword ]; do
word="${words[c]}"
for subcommand in $1; do
if [ "$subcommand" = "$word" ]; then
echo "$subcommand"
return
fi
done
((c++))
done
}
__git_has_doubledash ()
{
local c=1
while [ $c -lt $cword ]; do
if [ "--" = "${words[c]}" ]; then
return 0
fi
((c++))
done
return 1
}
__git_whitespacelist="nowarn warn error error-all fix"
_git_am ()
{
local dir="$(__gitdir)"
if [ -d "$dir"/rebase-apply ]; then
__gitcomp "--skip --continue --resolved --abort"
return
fi
case "$cur" in
--whitespace=*)
__gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
return
;;
--*)
__gitcomp "
--3way --committer-date-is-author-date --ignore-date
--ignore-whitespace --ignore-space-change
--interactive --keep --no-utf8 --signoff --utf8
--whitespace= --scissors
"
return
esac
COMPREPLY=()
}
_git_apply ()
{
case "$cur" in
--whitespace=*)
__gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
return
;;
--*)
__gitcomp "
--stat --numstat --summary --check --index
--cached --index-info --reverse --reject --unidiff-zero
--apply --no-add --exclude=
--ignore-whitespace --ignore-space-change
--whitespace= --inaccurate-eof --verbose
"
return
esac
COMPREPLY=()
}
_git_add ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "
--interactive --refresh --patch --update --dry-run
--ignore-errors --intent-to-add
"
return
esac
COMPREPLY=()
}
_git_archive ()
{
case "$cur" in
--format=*)
__gitcomp "$(git archive --list)" "" "${cur##--format=}"
return
;;
--remote=*)
__gitcomp_nl "$(__git_remotes)" "" "${cur##--remote=}"
return
;;
--*)
__gitcomp "
--format= --list --verbose
--prefix= --remote= --exec=
"
return
;;
esac
__git_complete_file
}
_git_bisect ()
{
__git_has_doubledash && return
local subcommands="start bad good skip reset visualize replay log run"
local subcommand="$(__git_find_on_cmdline "$subcommands")"
if [ -z "$subcommand" ]; then
if [ -f "$(__gitdir)"/BISECT_START ]; then
__gitcomp "$subcommands"
else
__gitcomp "replay start"
fi
return
fi
case "$subcommand" in
bad|good|reset|skip|start)
__gitcomp_nl "$(__git_refs)"
;;
*)
COMPREPLY=()
;;
esac
}
_git_branch ()
{
local i c=1 only_local_ref="n" has_r="n"
while [ $c -lt $cword ]; do
i="${words[c]}"
case "$i" in
-d|-m) only_local_ref="y" ;;
-r) has_r="y" ;;
esac
((c++))
done
case "$cur" in
--set-upstream-to=*)
__gitcomp "$(__git_refs)" "" "${cur##--set-upstream-to=}"
;;
--*)
__gitcomp "
--color --no-color --verbose --abbrev= --no-abbrev
--track --no-track --contains --merged --no-merged
--set-upstream-to= --edit-description --list
--unset-upstream
"
;;
*)
if [ $only_local_ref = "y" -a $has_r = "n" ]; then
__gitcomp_nl "$(__git_heads)"
else
__gitcomp_nl "$(__git_refs)"
fi
;;
esac
}
_git_bundle ()
{
local cmd="${words[2]}"
case "$cword" in
2)
__gitcomp "create list-heads verify unbundle"
;;
3)
# looking for a file
;;
*)
case "$cmd" in
create)
__git_complete_revlist
;;
esac
;;
esac
}
_git_checkout ()
{
__git_has_doubledash && return
case "$cur" in
--conflict=*)
__gitcomp "diff3 merge" "" "${cur##--conflict=}"
;;
--*)
__gitcomp "
--quiet --ours --theirs --track --no-track --merge
--conflict= --orphan --patch
"
;;
*)
# check if --track, --no-track, or --no-guess was specified
# if so, disable DWIM mode
local flags="--track --no-track --no-guess" track=1
if [ -n "$(__git_find_on_cmdline "$flags")" ]; then
track=''
fi
__gitcomp_nl "$(__git_refs '' $track)"
;;
esac
}
_git_cherry ()
{
__gitcomp "$(__git_refs)"
}
_git_cherry_pick ()
{
case "$cur" in
--*)
__gitcomp "--edit --no-commit"
;;
*)
__gitcomp_nl "$(__git_refs)"
;;
esac
}
_git_clean ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "--dry-run --quiet"
return
;;
esac
COMPREPLY=()
}
_git_clone ()
{
case "$cur" in
--*)
__gitcomp "
--local
--no-hardlinks
--shared
--reference
--quiet
--no-checkout
--bare
--mirror
--origin
--upload-pack
--template=
--depth
--single-branch
--branch
"
return
;;
esac
COMPREPLY=()
}
_git_commit ()
{
__git_has_doubledash && return
case "$prev" in
-c|-C)
__gitcomp_nl "$(__git_refs)" "" "${cur}"
return
;;
esac
case "$cur" in
--cleanup=*)
__gitcomp "default strip verbatim whitespace
" "" "${cur##--cleanup=}"
return
;;
--reuse-message=*|--reedit-message=*|\
--fixup=*|--squash=*)
__gitcomp_nl "$(__git_refs)" "" "${cur#*=}"
return
;;
--untracked-files=*)
__gitcomp "all no normal" "" "${cur##--untracked-files=}"
return
;;
--*)
__gitcomp "
--all --author= --signoff --verify --no-verify
--edit --no-edit
--amend --include --only --interactive
--dry-run --reuse-message= --reedit-message=
--reset-author --file= --message= --template=
--cleanup= --untracked-files --untracked-files=
--verbose --quiet --fixup= --squash=
"
return
esac
COMPREPLY=()
}
_git_describe ()
{
case "$cur" in
--*)
__gitcomp "
--all --tags --contains --abbrev= --candidates=
--exact-match --debug --long --match --always
"
return
esac
__gitcomp_nl "$(__git_refs)"
}
__git_diff_common_options="--stat --numstat --shortstat --summary
--patch-with-stat --name-only --name-status --color
--no-color --color-words --no-renames --check
--full-index --binary --abbrev --diff-filter=
--find-copies-harder
--text --ignore-space-at-eol --ignore-space-change
--ignore-all-space --exit-code --quiet --ext-diff
--no-ext-diff
--no-prefix --src-prefix= --dst-prefix=
--inter-hunk-context=
--patience
--raw
--dirstat --dirstat= --dirstat-by-file
--dirstat-by-file= --cumulative
"
_git_diff ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "--cached --staged --pickaxe-all --pickaxe-regex
--base --ours --theirs --no-index
$__git_diff_common_options
"
return
;;
esac
__git_complete_revlist_file
}
__git_mergetools_common="diffuse ecmerge emerge kdiff3 meld opendiff
tkdiff vimdiff gvimdiff xxdiff araxis p4merge bc3 codecompare
"
_git_difftool ()
{
__git_has_doubledash && return
case "$cur" in
--tool=*)
__gitcomp "$__git_mergetools_common kompare" "" "${cur##--tool=}"
return
;;
--*)
__gitcomp "--cached --staged --pickaxe-all --pickaxe-regex
--base --ours --theirs
--no-renames --diff-filter= --find-copies-harder
--relative --ignore-submodules
--tool="
return
;;
esac
__git_complete_file
}
__git_fetch_options="
--quiet --verbose --append --upload-pack --force --keep --depth=
--tags --no-tags --all --prune --dry-run
"
_git_fetch ()
{
case "$cur" in
--*)
__gitcomp "$__git_fetch_options"
return
;;
esac
__git_complete_remote_or_refspec
}
__git_format_patch_options="
--stdout --attach --no-attach --thread --thread= --output-directory
--numbered --start-number --numbered-files --keep-subject --signoff
--signature --no-signature --in-reply-to= --cc= --full-index --binary
--not --all --cover-letter --no-prefix --src-prefix= --dst-prefix=
--inline --suffix= --ignore-if-in-upstream --subject-prefix=
"
_git_format_patch ()
{
case "$cur" in
--thread=*)
__gitcomp "
deep shallow
" "" "${cur##--thread=}"
return
;;
--*)
__gitcomp "$__git_format_patch_options"
return
;;
esac
__git_complete_revlist
}
_git_fsck ()
{
case "$cur" in
--*)
__gitcomp "
--tags --root --unreachable --cache --no-reflogs --full
--strict --verbose --lost-found
"
return
;;
esac
COMPREPLY=()
}
_git_gc ()
{
case "$cur" in
--*)
__gitcomp "--prune --aggressive"
return
;;
esac
COMPREPLY=()
}
_git_gitk ()
{
_gitk
}
__git_match_ctag() {
awk "/^${1////\\/}/ { print \$1 }" "$2"
}
_git_grep ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "
--cached
--text --ignore-case --word-regexp --invert-match
--full-name --line-number
--extended-regexp --basic-regexp --fixed-strings
--perl-regexp
--files-with-matches --name-only
--files-without-match
--max-depth
--count
--and --or --not --all-match
"
return
;;
esac
case "$cword,$prev" in
2,*|*,-*)
if test -r tags; then
__gitcomp_nl "$(__git_match_ctag "$cur" tags)"
return
fi
;;
esac
__gitcomp_nl "$(__git_refs)"
}
_git_help ()
{
case "$cur" in
--*)
__gitcomp "--all --info --man --web"
return
;;
esac
__git_compute_all_commands
__gitcomp "$__git_all_commands $(__git_aliases)
attributes cli core-tutorial cvs-migration
diffcore gitk glossary hooks ignore modules
namespaces repository-layout tutorial tutorial-2
workflows
"
}
_git_init ()
{
case "$cur" in
--shared=*)
__gitcomp "
false true umask group all world everybody
" "" "${cur##--shared=}"
return
;;
--*)
__gitcomp "--quiet --bare --template= --shared --shared="
return
;;
esac
COMPREPLY=()
}
_git_ls_files ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "--cached --deleted --modified --others --ignored
--stage --directory --no-empty-directory --unmerged
--killed --exclude= --exclude-from=
--exclude-per-directory= --exclude-standard
--error-unmatch --with-tree= --full-name
--abbrev --ignored --exclude-per-directory
"
return
;;
esac
COMPREPLY=()
}
_git_ls_remote ()
{
__gitcomp_nl "$(__git_remotes)"
}
_git_ls_tree ()
{
__git_complete_file
}
# Options that go well for log, shortlog and gitk
__git_log_common_options="
--not --all
--branches --tags --remotes
--first-parent --merges --no-merges
--max-count=
--max-age= --since= --after=
--min-age= --until= --before=
--min-parents= --max-parents=
--no-min-parents --no-max-parents
"
# Options that go well for log and gitk (not shortlog)
__git_log_gitk_options="
--dense --sparse --full-history
--simplify-merges --simplify-by-decoration
--left-right --notes --no-notes
"
# Options that go well for log and shortlog (not gitk)
__git_log_shortlog_options="
--author= --committer= --grep=
--all-match
"
__git_log_pretty_formats="oneline short medium full fuller email raw format:"
__git_log_date_formats="relative iso8601 rfc2822 short local default raw"
_git_log ()
{
__git_has_doubledash && return
local g="$(git rev-parse --git-dir 2>/dev/null)"
local merge=""
if [ -f "$g/MERGE_HEAD" ]; then
merge="--merge"
fi
case "$cur" in
--pretty=*|--format=*)
__gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
" "" "${cur#*=}"
return
;;
--date=*)
__gitcomp "$__git_log_date_formats" "" "${cur##--date=}"
return
;;
--decorate=*)
__gitcomp "long short" "" "${cur##--decorate=}"
return
;;
--*)
__gitcomp "
$__git_log_common_options
$__git_log_shortlog_options
$__git_log_gitk_options
--root --topo-order --date-order --reverse
--follow --full-diff
--abbrev-commit --abbrev=
--relative-date --date=
--pretty= --format= --oneline
--cherry-pick
--graph
--decorate --decorate=
--walk-reflogs
--parents --children
$merge
$__git_diff_common_options
--pickaxe-all --pickaxe-regex
"
return
;;
esac
__git_complete_revlist
}
__git_merge_options="
--no-commit --no-stat --log --no-log --squash --strategy
--commit --stat --no-squash --ff --no-ff --ff-only --edit --no-edit
"
_git_merge ()
{
__git_complete_strategy && return
case "$cur" in
--*)
__gitcomp "$__git_merge_options"
return
esac
__gitcomp_nl "$(__git_refs)"
}
_git_mergetool ()
{
case "$cur" in
--tool=*)
__gitcomp "$__git_mergetools_common tortoisemerge" "" "${cur##--tool=}"
return
;;
--*)
__gitcomp "--tool="
return
;;
esac
COMPREPLY=()
}
_git_merge_base ()
{
__gitcomp_nl "$(__git_refs)"
}
_git_mv ()
{
case "$cur" in
--*)
__gitcomp "--dry-run"
return
;;
esac
COMPREPLY=()
}
_git_name_rev ()
{
__gitcomp "--tags --all --stdin"
}
_git_notes ()
{
local subcommands='add append copy edit list prune remove show'
local subcommand="$(__git_find_on_cmdline "$subcommands")"
case "$subcommand,$cur" in
,--*)
__gitcomp '--ref'
;;
,*)
case "$prev" in
--ref)
__gitcomp_nl "$(__git_refs)"
;;
*)
__gitcomp "$subcommands --ref"
;;
esac
;;
add,--reuse-message=*|append,--reuse-message=*|\
add,--reedit-message=*|append,--reedit-message=*)
__gitcomp_nl "$(__git_refs)" "" "${cur#*=}"
;;
add,--*|append,--*)
__gitcomp '--file= --message= --reedit-message=
--reuse-message='
;;
copy,--*)
__gitcomp '--stdin'
;;
prune,--*)
__gitcomp '--dry-run --verbose'
;;
prune,*)
;;
*)
case "$prev" in
-m|-F)
;;
*)
__gitcomp_nl "$(__git_refs)"
;;
esac
;;
esac
}
_git_pull ()
{
__git_complete_strategy && return
case "$cur" in
--*)
__gitcomp "
--rebase --no-rebase
$__git_merge_options
$__git_fetch_options
"
return
;;
esac
__git_complete_remote_or_refspec
}
_git_push ()
{
case "$prev" in
--repo)
__gitcomp_nl "$(__git_remotes)"
return
esac
case "$cur" in
--repo=*)
__gitcomp_nl "$(__git_remotes)" "" "${cur##--repo=}"
return
;;
--*)
__gitcomp "
--all --mirror --tags --dry-run --force --verbose
--receive-pack= --repo= --set-upstream
"
return
;;
esac
__git_complete_remote_or_refspec
}
_git_rebase ()
{
local dir="$(__gitdir)"
if [ -d "$dir"/rebase-apply ] || [ -d "$dir"/rebase-merge ]; then
__gitcomp "--continue --skip --abort"
return
fi
__git_complete_strategy && return
case "$cur" in
--whitespace=*)
__gitcomp "$__git_whitespacelist" "" "${cur##--whitespace=}"
return
;;
--*)
__gitcomp "
--onto --merge --strategy --interactive
--preserve-merges --stat --no-stat
--committer-date-is-author-date --ignore-date
--ignore-whitespace --whitespace=
--autosquash
"
return
esac
__gitcomp_nl "$(__git_refs)"
}
_git_reflog ()
{
local subcommands="show delete expire"
local subcommand="$(__git_find_on_cmdline "$subcommands")"
if [ -z "$subcommand" ]; then
__gitcomp "$subcommands"
else
__gitcomp_nl "$(__git_refs)"
fi
}
__git_send_email_confirm_options="always never auto cc compose"
__git_send_email_suppresscc_options="author self cc bodycc sob cccmd body all"
_git_send_email ()
{
case "$cur" in
--confirm=*)
__gitcomp "
$__git_send_email_confirm_options
" "" "${cur##--confirm=}"
return
;;
--suppress-cc=*)
__gitcomp "
$__git_send_email_suppresscc_options
" "" "${cur##--suppress-cc=}"
return
;;
--smtp-encryption=*)
__gitcomp "ssl tls" "" "${cur##--smtp-encryption=}"
return
;;
--thread=*)
__gitcomp "
deep shallow
" "" "${cur##--thread=}"
return
;;
--*)
__gitcomp "--annotate --bcc --cc --cc-cmd --chain-reply-to
--compose --confirm= --dry-run --envelope-sender
--from --identity
--in-reply-to --no-chain-reply-to --no-signed-off-by-cc
--no-suppress-from --no-thread --quiet
--signed-off-by-cc --smtp-pass --smtp-server
--smtp-server-port --smtp-encryption= --smtp-user
--subject --suppress-cc= --suppress-from --thread --to
--validate --no-validate
$__git_format_patch_options"
return
;;
esac
__git_complete_revlist
}
_git_stage ()
{
_git_add
}
__git_config_get_set_variables ()
{
local prevword word config_file= c=$cword
while [ $c -gt 1 ]; do
word="${words[c]}"
case "$word" in
--global|--system|--file=*)
config_file="$word"
break
;;
-f|--file)
config_file="$word $prevword"
break
;;
esac
prevword=$word
c=$((--c))
done
git --git-dir="$(__gitdir)" config $config_file --list 2>/dev/null |
while read -r line
do
case "$line" in
*.*=*)
echo "${line/=*/}"
;;
esac
done
}
_git_config ()
{
case "$prev" in
branch.*.remote)
__gitcomp_nl "$(__git_remotes)"
return
;;
branch.*.merge)
__gitcomp_nl "$(__git_refs)"
return
;;
remote.*.fetch)
local remote="${prev#remote.}"
remote="${remote%.fetch}"
if [ -z "$cur" ]; then
COMPREPLY=("refs/heads/")
return
fi
__gitcomp_nl "$(__git_refs_remotes "$remote")"
return
;;
remote.*.push)
local remote="${prev#remote.}"
remote="${remote%.push}"
__gitcomp_nl "$(git --git-dir="$(__gitdir)" \
for-each-ref --format='%(refname):%(refname)' \
refs/heads)"
return
;;
pull.twohead|pull.octopus)
__git_compute_merge_strategies
__gitcomp "$__git_merge_strategies"
return
;;
color.branch|color.diff|color.interactive|\
color.showbranch|color.status|color.ui)
__gitcomp "always never auto"
return
;;
color.pager)
__gitcomp "false true"
return
;;
color.*.*)
__gitcomp "
normal black red green yellow blue magenta cyan white
bold dim ul blink reverse
"
return
;;
help.format)
__gitcomp "man info web html"
return
;;
log.date)
__gitcomp "$__git_log_date_formats"
return
;;
sendemail.aliasesfiletype)
__gitcomp "mutt mailrc pine elm gnus"
return
;;
sendemail.confirm)
__gitcomp "$__git_send_email_confirm_options"
return
;;
sendemail.suppresscc)
__gitcomp "$__git_send_email_suppresscc_options"
return
;;
--get|--get-all|--unset|--unset-all)
__gitcomp_nl "$(__git_config_get_set_variables)"
return
;;
*.*)
COMPREPLY=()
return
;;
esac
case "$cur" in
--*)
__gitcomp "
--global --system --file=
--list --replace-all
--get --get-all --get-regexp
--add --unset --unset-all
--remove-section --rename-section
"
return
;;
branch.*.*)
local pfx="${cur%.*}." cur_="${cur##*.}"
__gitcomp "remote merge mergeoptions rebase" "$pfx" "$cur_"
return
;;
branch.*)
local pfx="${cur%.*}." cur_="${cur#*.}"
__gitcomp_nl "$(__git_heads)" "$pfx" "$cur_" "."
return
;;
guitool.*.*)
local pfx="${cur%.*}." cur_="${cur##*.}"
__gitcomp "
argprompt cmd confirm needsfile noconsole norescan
prompt revprompt revunmerged title
" "$pfx" "$cur_"
return
;;
difftool.*.*)
local pfx="${cur%.*}." cur_="${cur##*.}"
__gitcomp "cmd path" "$pfx" "$cur_"
return
;;
man.*.*)
local pfx="${cur%.*}." cur_="${cur##*.}"
__gitcomp "cmd path" "$pfx" "$cur_"
return
;;
mergetool.*.*)
local pfx="${cur%.*}." cur_="${cur##*.}"
__gitcomp "cmd path trustExitCode" "$pfx" "$cur_"
return
;;
pager.*)
local pfx="${cur%.*}." cur_="${cur#*.}"
__git_compute_all_commands
__gitcomp_nl "$__git_all_commands" "$pfx" "$cur_"
return
;;
remote.*.*)
local pfx="${cur%.*}." cur_="${cur##*.}"
__gitcomp "
url proxy fetch push mirror skipDefaultUpdate
receivepack uploadpack tagopt pushurl
" "$pfx" "$cur_"
return
;;
remote.*)
local pfx="${cur%.*}." cur_="${cur#*.}"
__gitcomp_nl "$(__git_remotes)" "$pfx" "$cur_" "."
return
;;
url.*.*)
local pfx="${cur%.*}." cur_="${cur##*.}"
__gitcomp "insteadOf pushInsteadOf" "$pfx" "$cur_"
return
;;
esac
__gitcomp "
add.ignoreErrors
advice.commitBeforeMerge
advice.detachedHead
advice.implicitIdentity
advice.pushNonFastForward
advice.resolveConflict
advice.statusHints
alias.
am.keepcr
apply.ignorewhitespace
apply.whitespace
branch.autosetupmerge
branch.autosetuprebase
browser.
clean.requireForce
color.branch
color.branch.current
color.branch.local
color.branch.plain
color.branch.remote
color.decorate.HEAD
color.decorate.branch
color.decorate.remoteBranch
color.decorate.stash
color.decorate.tag
color.diff
color.diff.commit
color.diff.frag
color.diff.func
color.diff.meta
color.diff.new
color.diff.old
color.diff.plain
color.diff.whitespace
color.grep
color.grep.context
color.grep.filename
color.grep.function
color.grep.linenumber
color.grep.match
color.grep.selected
color.grep.separator
color.interactive
color.interactive.error
color.interactive.header
color.interactive.help
color.interactive.prompt
color.pager
color.showbranch
color.status
color.status.added
color.status.changed
color.status.header
color.status.nobranch
color.status.untracked
color.status.updated
color.ui
commit.status
commit.template
core.abbrev
core.askpass
core.attributesfile
core.autocrlf
core.bare
core.bigFileThreshold
core.compression
core.createObject
core.deltaBaseCacheLimit
core.editor
core.eol
core.excludesfile
core.fileMode
core.fsyncobjectfiles
core.gitProxy
core.ignoreCygwinFSTricks
core.ignoreStat
core.ignorecase
core.logAllRefUpdates
core.loosecompression
core.notesRef
core.packedGitLimit
core.packedGitWindowSize
core.pager
core.preferSymlinkRefs
core.preloadindex
core.quotepath
core.repositoryFormatVersion
core.safecrlf
core.sharedRepository
core.sparseCheckout
core.symlinks
core.trustctime
core.warnAmbiguousRefs
core.whitespace
core.worktree
diff.autorefreshindex
diff.statGraphWidth
diff.external
diff.ignoreSubmodules
diff.mnemonicprefix
diff.noprefix
diff.renameLimit
diff.renames
diff.suppressBlankEmpty
diff.tool
diff.wordRegex
difftool.
difftool.prompt
fetch.recurseSubmodules
fetch.unpackLimit
format.attach
format.cc
format.headers
format.numbered
format.pretty
format.signature
format.signoff
format.subjectprefix
format.suffix
format.thread
format.to
gc.
gc.aggressiveWindow
gc.auto
gc.autopacklimit
gc.packrefs
gc.pruneexpire
gc.reflogexpire
gc.reflogexpireunreachable
gc.rerereresolved
gc.rerereunresolved
gitcvs.allbinary
gitcvs.commitmsgannotation
gitcvs.dbTableNamePrefix
gitcvs.dbdriver
gitcvs.dbname
gitcvs.dbpass
gitcvs.dbuser
gitcvs.enabled
gitcvs.logfile
gitcvs.usecrlfattr
guitool.
gui.blamehistoryctx
gui.commitmsgwidth
gui.copyblamethreshold
gui.diffcontext
gui.encoding
gui.fastcopyblame
gui.matchtrackingbranch
gui.newbranchtemplate
gui.pruneduringfetch
gui.spellingdictionary
gui.trustmtime
help.autocorrect
help.browser
help.format
http.lowSpeedLimit
http.lowSpeedTime
http.maxRequests
http.minSessions
http.noEPSV
http.postBuffer
http.proxy
http.sslCAInfo
http.sslCAPath
http.sslCert
http.sslCertPasswordProtected
http.sslKey
http.sslVerify
http.useragent
i18n.commitEncoding
i18n.logOutputEncoding
imap.authMethod
imap.folder
imap.host
imap.pass
imap.port
imap.preformattedHTML
imap.sslverify
imap.tunnel
imap.user
init.templatedir
instaweb.browser
instaweb.httpd
instaweb.local
instaweb.modulepath
instaweb.port
interactive.singlekey
log.date
log.decorate
log.showroot
mailmap.file
man.
man.viewer
merge.
merge.conflictstyle
merge.log
merge.renameLimit
merge.renormalize
merge.stat
merge.tool
merge.verbosity
mergetool.
mergetool.keepBackup
mergetool.keepTemporaries
mergetool.prompt
notes.displayRef
notes.rewrite.
notes.rewrite.amend
notes.rewrite.rebase
notes.rewriteMode
notes.rewriteRef
pack.compression
pack.deltaCacheLimit
pack.deltaCacheSize
pack.depth
pack.indexVersion
pack.packSizeLimit
pack.threads
pack.window
pack.windowMemory
pager.
pretty.
pull.octopus
pull.twohead
push.default
rebase.autosquash
rebase.stat
receive.autogc
receive.denyCurrentBranch
receive.denyDeleteCurrent
receive.denyDeletes
receive.denyNonFastForwards
receive.fsckObjects
receive.unpackLimit
receive.updateserverinfo
remotes.
repack.usedeltabaseoffset
rerere.autoupdate
rerere.enabled
sendemail.
sendemail.aliasesfile
sendemail.aliasfiletype
sendemail.bcc
sendemail.cc
sendemail.cccmd
sendemail.chainreplyto
sendemail.confirm
sendemail.envelopesender
sendemail.from
sendemail.identity
sendemail.multiedit
sendemail.signedoffbycc
sendemail.smtpdomain
sendemail.smtpencryption
sendemail.smtppass
sendemail.smtpserver
sendemail.smtpserveroption
sendemail.smtpserverport
sendemail.smtpuser
sendemail.suppresscc
sendemail.suppressfrom
sendemail.thread
sendemail.to
sendemail.validate
showbranch.default
status.relativePaths
status.showUntrackedFiles
status.submodulesummary
submodule.
tar.umask
transfer.unpackLimit
url.
user.email
user.name
user.signingkey
web.browser
branch. remote.
"
}
_git_remote ()
{
local subcommands="add rename remove set-head set-branches set-url show prune update"
local subcommand="$(__git_find_on_cmdline "$subcommands")"
if [ -z "$subcommand" ]; then
__gitcomp "$subcommands"
return
fi
case "$subcommand" in
rename|remove|set-url|show|prune)
__gitcomp_nl "$(__git_remotes)"
;;
set-head|set-branches)
__git_complete_remote_or_refspec
;;
update)
local i c='' IFS=$'\n'
for i in $(git --git-dir="$(__gitdir)" config --get-regexp "remotes\..*" 2>/dev/null); do
i="${i#remotes.}"
c="$c ${i/ */}"
done
__gitcomp "$c"
;;
*)
COMPREPLY=()
;;
esac
}
_git_replace ()
{
__gitcomp_nl "$(__git_refs)"
}
_git_reset ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "--merge --mixed --hard --soft --patch"
return
;;
esac
__gitcomp_nl "$(__git_refs)"
}
_git_revert ()
{
case "$cur" in
--*)
__gitcomp "--edit --mainline --no-edit --no-commit --signoff"
return
;;
esac
__gitcomp_nl "$(__git_refs)"
}
_git_rm ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "--cached --dry-run --ignore-unmatch --quiet"
return
;;
esac
COMPREPLY=()
}
_git_shortlog ()
{
__git_has_doubledash && return
case "$cur" in
--*)
__gitcomp "
$__git_log_common_options
$__git_log_shortlog_options
--numbered --summary
"
return
;;
esac
__git_complete_revlist
}
_git_show ()
{
__git_has_doubledash && return
case "$cur" in
--pretty=*|--format=*)
__gitcomp "$__git_log_pretty_formats $(__git_pretty_aliases)
" "" "${cur#*=}"
return
;;
--*)
__gitcomp "--pretty= --format= --abbrev-commit --oneline
$__git_diff_common_options
"
return
;;
esac
__git_complete_file
}
_git_show_branch ()
{
case "$cur" in
--*)
__gitcomp "
--all --remotes --topo-order --current --more=
--list --independent --merge-base --no-name
--color --no-color
--sha1-name --sparse --topics --reflog
"
return
;;
esac
__git_complete_revlist
}
_git_stash ()
{
local save_opts='--keep-index --no-keep-index --quiet --patch'
local subcommands='save list show apply clear drop pop create branch'
local subcommand="$(__git_find_on_cmdline "$subcommands")"
if [ -z "$subcommand" ]; then
case "$cur" in
--*)
__gitcomp "$save_opts"
;;
*)
if [ -z "$(__git_find_on_cmdline "$save_opts")" ]; then
__gitcomp "$subcommands"
else
COMPREPLY=()
fi
;;
esac
else
case "$subcommand,$cur" in
save,--*)
__gitcomp "$save_opts"
;;
apply,--*|pop,--*)
__gitcomp "--index --quiet"
;;
show,--*|drop,--*|branch,--*)
COMPREPLY=()
;;
show,*|apply,*|drop,*|pop,*|branch,*)
__gitcomp_nl "$(git --git-dir="$(__gitdir)" stash list \
| sed -n -e 's/:.*//p')"
;;
*)
COMPREPLY=()
;;
esac
fi
}
_git_submodule ()
{
__git_has_doubledash && return
local subcommands="add status init update summary foreach sync"
if [ -z "$(__git_find_on_cmdline "$subcommands")" ]; then
case "$cur" in
--*)
__gitcomp "--quiet --cached"
;;
*)
__gitcomp "$subcommands"
;;
esac
return
fi
}
_git_svn ()
{
local subcommands="
init fetch clone rebase dcommit log find-rev
set-tree commit-diff info create-ignore propget
proplist show-ignore show-externals branch tag blame
migrate mkdirs reset gc
"
local subcommand="$(__git_find_on_cmdline "$subcommands")"
if [ -z "$subcommand" ]; then
__gitcomp "$subcommands"
else
local remote_opts="--username= --config-dir= --no-auth-cache"
local fc_opts="
--follow-parent --authors-file= --repack=
--no-metadata --use-svm-props --use-svnsync-props
--log-window-size= --no-checkout --quiet
--repack-flags --use-log-author --localtime
--ignore-paths= $remote_opts
"
local init_opts="
--template= --shared= --trunk= --tags=
--branches= --stdlayout --minimize-url
--no-metadata --use-svm-props --use-svnsync-props
--rewrite-root= --prefix= --use-log-author
--add-author-from $remote_opts
"
local cmt_opts="
--edit --rmdir --find-copies-harder --copy-similarity=
"
case "$subcommand,$cur" in
fetch,--*)
__gitcomp "--revision= --fetch-all $fc_opts"
;;
clone,--*)
__gitcomp "--revision= $fc_opts $init_opts"
;;
init,--*)
__gitcomp "$init_opts"
;;
dcommit,--*)
__gitcomp "
--merge --strategy= --verbose --dry-run
--fetch-all --no-rebase --commit-url
--revision --interactive $cmt_opts $fc_opts
"
;;
set-tree,--*)
__gitcomp "--stdin $cmt_opts $fc_opts"
;;
create-ignore,--*|propget,--*|proplist,--*|show-ignore,--*|\
show-externals,--*|mkdirs,--*)
__gitcomp "--revision="
;;
log,--*)
__gitcomp "
--limit= --revision= --verbose --incremental
--oneline --show-commit --non-recursive
--authors-file= --color
"
;;
rebase,--*)
__gitcomp "
--merge --verbose --strategy= --local
--fetch-all --dry-run $fc_opts
"
;;
commit-diff,--*)
__gitcomp "--message= --file= --revision= $cmt_opts"
;;
info,--*)
__gitcomp "--url"
;;
branch,--*)
__gitcomp "--dry-run --message --tag"
;;
tag,--*)
__gitcomp "--dry-run --message"
;;
blame,--*)
__gitcomp "--git-format"
;;
migrate,--*)
__gitcomp "
--config-dir= --ignore-paths= --minimize
--no-auth-cache --username=
"
;;
reset,--*)
__gitcomp "--revision= --parent"
;;
*)
COMPREPLY=()
;;
esac
fi
}
_git_tag ()
{
local i c=1 f=0
while [ $c -lt $cword ]; do
i="${words[c]}"
case "$i" in
-d|-v)
__gitcomp_nl "$(__git_tags)"
return
;;
-f)
f=1
;;
esac
((c++))
done
case "$prev" in
-m|-F)
COMPREPLY=()
;;
-*|tag)
if [ $f = 1 ]; then
__gitcomp_nl "$(__git_tags)"
else
COMPREPLY=()
fi
;;
*)
__gitcomp_nl "$(__git_refs)"
;;
esac
}
_git_whatchanged ()
{
_git_log
}
__git_main ()
{
local i c=1 command __git_dir
while [ $c -lt $cword ]; do
i="${words[c]}"
case "$i" in
--git-dir=*) __git_dir="${i#--git-dir=}" ;;
--bare) __git_dir="." ;;
--help) command="help"; break ;;
-c) c=$((++c)) ;;
-*) ;;
*) command="$i"; break ;;
esac
((c++))
done
if [ -z "$command" ]; then
case "$cur" in
--*) __gitcomp "
--paginate
--no-pager
--git-dir=
--bare
--version
--exec-path
--exec-path=
--html-path
--info-path
--work-tree=
--namespace=
--no-replace-objects
--help
"
;;
*) __git_compute_porcelain_commands
__gitcomp "$__git_porcelain_commands $(__git_aliases)" ;;
esac
return
fi
local completion_func="_git_${command//-/_}"
declare -f $completion_func >/dev/null && $completion_func && return
local expansion=$(__git_aliased_command "$command")
if [ -n "$expansion" ]; then
completion_func="_git_${expansion//-/_}"
declare -f $completion_func >/dev/null && $completion_func
fi
}
__gitk_main ()
{
__git_has_doubledash && return
local g="$(__gitdir)"
local merge=""
if [ -f "$g/MERGE_HEAD" ]; then
merge="--merge"
fi
case "$cur" in
--*)
__gitcomp "
$__git_log_common_options
$__git_log_gitk_options
$merge
"
return
;;
esac
__git_complete_revlist
}
if [[ -n ${ZSH_VERSION-} ]]; then
echo "WARNING: this script is deprecated, please see git-completion.zsh" 1>&2
autoload -U +X compinit && compinit
__gitcomp ()
{
emulate -L zsh
local cur_="${3-$cur}"
case "$cur_" in
--*=)
;;
*)
local c IFS=$' \t\n'
local -a array
for c in ${=1}; do
c="$c${4-}"
case $c in
--*=*|*.) ;;
*) c="$c " ;;
esac
array[$#array+1]="$c"
done
compset -P '*[=:]'
compadd -Q -S '' -p "${2-}" -a -- array && _ret=0
;;
esac
}
__gitcomp_nl ()
{
emulate -L zsh
local IFS=$'\n'
compset -P '*[=:]'
compadd -Q -S "${4- }" -p "${2-}" -- ${=1} && _ret=0
}
__git_zsh_helper ()
{
emulate -L ksh
local cur cword prev
cur=${words[CURRENT-1]}
prev=${words[CURRENT-2]}
let cword=CURRENT-1
__${service}_main
}
_git ()
{
emulate -L zsh
local _ret=1
__git_zsh_helper
let _ret && _default -S '' && _ret=0
return _ret
}
compdef _git git gitk
return
fi
__git_func_wrap ()
{
local cur words cword prev
_get_comp_words_by_ref -n =: cur words cword prev
$1
}
# Setup completion for certain functions defined above by setting common
# variables and workarounds.
# This is NOT a public function; use at your own risk.
__git_complete ()
{
local wrapper="__git_wrap${2}"
eval "$wrapper () { __git_func_wrap $2 ; }"
complete -o bashdefault -o default -o nospace -F $wrapper $1 2>/dev/null \
|| complete -o default -o nospace -F $wrapper $1
}
# wrapper for backwards compatibility
_git ()
{
__git_wrap__git_main
}
# wrapper for backwards compatibility
_gitk ()
{
__git_wrap__gitk_main
}
__git_complete git __git_main
__git_complete gitk __gitk_main
# The following are necessary only for Cygwin, and only are needed
# when the user has tab-completed the executable name and consequently
# included the '.exe' suffix.
#
if [ Cygwin = "$(uname -o 2>/dev/null)" ]; then
__git_complete git.exe __git_main
fi
|
{
"content_hash": "15a1f335e1ebc3c98f9171c5a9020a0a",
"timestamp": "",
"source": "github",
"line_count": 2514,
"max_line_length": 99,
"avg_line_length": 19.77645186953063,
"alnum_prop": 0.5944124864234281,
"repo_name": "marpaia/chef-osx",
"id": "8c49ce25443c531fb11243d8faf19010a209e970",
"size": "49720",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cookbooks/dotfiles/files/default/git-completion.bash",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "24"
},
{
"name": "Perl",
"bytes": "185936"
},
{
"name": "Python",
"bytes": "183929"
},
{
"name": "Ruby",
"bytes": "70055"
},
{
"name": "Shell",
"bytes": "216720"
},
{
"name": "VimL",
"bytes": "14722"
}
],
"symlink_target": ""
}
|
#import "CCRotatingSprite.h"
@implementation CCRotatingSprite
@synthesize secondsPerRotation;
- (id) init
{
self = [super init];
if (!self) return NULL;
self.secondsPerRotation = 2;
NSLog(@"------------------>");
return self;
}
- (void) setSecondsPerRotation:(float)spr
{
secondsPerRotation = spr;
// Stop rotating
//s[self stopAllActions];
// Rotate with the new speed
[self runAction:[CCRepeatForever actionWithAction:[CCRotateBy actionWithDuration:spr angle:360]]];
}
@end
|
{
"content_hash": "b7b6e12a5f7e8b50926de81a3d44f6df",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 102,
"avg_line_length": 17.387096774193548,
"alnum_prop": 0.6419294990723562,
"repo_name": "wulin9005/cocosbuilder",
"id": "00136de3f4564403a8f737abcf6dc27862ba8e9c",
"size": "1701",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "CocosPlayer/CocosPlayer/CCRotatingSprite.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3448936"
},
{
"name": "C++",
"bytes": "1864410"
},
{
"name": "CSS",
"bytes": "3025"
},
{
"name": "D",
"bytes": "12324"
},
{
"name": "JavaScript",
"bytes": "1551468"
},
{
"name": "M",
"bytes": "333590"
},
{
"name": "Matlab",
"bytes": "1875"
},
{
"name": "Objective-C",
"bytes": "9046916"
},
{
"name": "Perl",
"bytes": "12283"
},
{
"name": "Python",
"bytes": "153781"
},
{
"name": "Ruby",
"bytes": "11574"
},
{
"name": "Shell",
"bytes": "10647"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Medical Care Responsive Bootstrap themes | webthemez</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="description" content="" />
<meta name="author" content="http://webthemez.com" />
<!-- HTML5 shim, for IE6-8 support of HTML5 elements -->
<!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
</head>
<body>
<div id="wrapper">
<!-- start header -->
<header>
<section class="contactInfo">
<div class="container">
<div class="col-md-12">
<div class="contact-area">
<ul>
<li><i class="fa fa-phone-square"></i>812-123-456</li>
<li><i class="fa fa-envelope-o"></i>info@mdeicalcare.com</li>
</ul>
</div>
</div>
</div>
</section>
<div class="navbar navbar-default navbar-static-top">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href=""><img src="components/img/logo.png" alt="logo"/></a>
</div>
<div class="navbar-collapse collapse ">
<ul class="nav navbar-nav">
<li><a href="" ng-click="goToHome()">Home</a></li>
<li><a href="" ng-click="goToAddPatient()">Add Patient</a></li>
<li><a href="" ng-click="goToAddDoctor()">Add Doctor</a></li>
<li><a href="" ng-click="goToUnadmittedPatients()">Unadmitted Patients</a></li>
<li><a href="" ng-click="goToCreateAccount()">Create an account</a></li>
</ul>
</div>
</div>
</div>
</header><!-- end header -->
<section id="inner-headline">
<div class="container">
<div class="row">
<div class="col-lg-12">
<h2 class="pageTitle">Examination Form</h2>
</div>
</div>
</div>
</section>
<section id="content">
<div class="container">
<div class="row">
<div class="col-md-6">
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do tempor.</p>
<div class="alert alert-success hidden" id="contactSuccess">
<strong>Success!</strong> Your message has been sent to us.
</div>
<div class="alert alert-error hidden" id="contactError">
<strong>Error!</strong> There was an error sending your message.
</div>
<div class="contact-form">
<form id="contact-form" role="form" name="examForm" novalidate="novalidate">
<div class="form-group has-feedback">
<label for="getDoc">Doctor</label>
<select class="form-control" id="getDoc" ng-model="currentDoctor" ng-change="change(currentDoctor)" data-ng-options="doctor as doctor.firstName + ' ' + doctor.lastName for doctor in doctorModel" value="doctor.lastName" >
<option value="">Select Account</option>
</select>
<i class="fa fa-navicon form-control-feedback"></i>
</div>
<div class="form-group has-feedback">
<label for="dischargeDate">Date</label>
<input type="text" class="form-control" id="dischargeDate" name="dischargeDate" placeholder="" uib-datepicker-popup="{{format}}" ng-model="date" is-open="popup1.opened" datepicker-options="dateOptions" ng-required="true" close-text="Close" alt-input-formats="altInputFormats">
<i class="fa fa-user form-control-feedback"></i>
<button type="button" class="btn btn-default" ng-click="open1()"><i class="glyphicon glyphicon-calendar"></i></button>
</div>
<div class="form-group has-feedback">
<label for="diagnosis">Time (format: HH:mm)</label>
<input type="text" class="form-control" id="diagnosis" name="diagnosis" placeholder="" ng-model="time">
<i class="fa fa-navicon form-control-feedback"></i>
</div>
<input type="submit" value="Submit" class="btn btn-default" ng-click="addExamination(date, time)">
</form>
<br>
<br>
<div class="alert alert-success" ng-show="addedPatient == true">
<strong>Success!</strong> The examination has been sucessfully programmed.
</div>
<div class="alert alert-warning" ng-show="invalidPatientForm == true">
<strong>Warning!</strong> Please complete all fields.
</div>
</div>
</div>
</div>
</div>
</section>
<footer>
<div class="container">
<div class="row">
<div class="col-md-3">
<div class="widget">
<h5 class="widgetheading">Our Contact</h5>
<address>
<strong>Target company Inc</strong><br>
JC Main Road, Near Silnile tower<br>
Pin-21542 NewYork US.</address>
<p>
<i class="icon-phone"></i> (123) 456-789 - 1255-12584 <br>
<i class="icon-envelope-alt"></i> email@domainname.com
</p>
</div>
</div>
<div class="col-md-3">
<div class="widget">
<h5 class="widgetheading">Quick Links</h5>
<ul class="link-list">
<li><a href="#">Latest Events</a></li>
<li><a href="#">Terms and conditions</a></li>
<li><a href="#">Privacy policy</a></li>
<li><a href="#">Career</a></li>
<li><a href="#">Contact us</a></li>
</ul>
</div>
</div>
<div class="col-md-3">
<div class="widget">
<h5 class="widgetheading">Latest posts</h5>
<ul class="link-list">
<li><a href="#">Lorem ipsum dolor sit amet, consectetur adipiscing elit.</a></li>
<li><a href="#">Pellentesque et pulvinar enim. Quisque at tempor ligula</a></li>
<li><a href="#">Natus error sit voluptatem accusantium doloremque</a></li>
</ul>
</div>
</div>
<div class="col-md-3">
<div class="widget">
<h5 class="widgetheading">Recent News</h5>
<ul class="link-list">
<li><a href="#">Lorem ipsum dolor sit amet, consectetur adipiscing elit.</a></li>
<li><a href="#">Pellentesque et pulvinar enim. Quisque at tempor ligula</a></li>
<li><a href="#">Natus error sit voluptatem accusantium doloremque</a></li>
</ul>
</div>
</div>
</div>
</div>
<div id="sub-footer">
<div class="container">
<div class="row">
<div class="col-lg-6">
<div class="copyright">
<p>
<span>© Target 2014 All right reserved. By </span><a href="http://webthemez.com" target="_blank">WebThemez</a>
</p>
</div>
</div>
<div class="col-lg-6">
<ul class="social-network">
<li><a href="#" data-placement="top" title="Facebook"><i class="fa fa-facebook"></i></a></li>
<li><a href="#" data-placement="top" title="Twitter"><i class="fa fa-twitter"></i></a></li>
<li><a href="#" data-placement="top" title="Linkedin"><i class="fa fa-linkedin"></i></a></li>
<li><a href="#" data-placement="top" title="Pinterest"><i class="fa fa-pinterest"></i></a></li>
<li><a href="#" data-placement="top" title="Google plus"><i class="fa fa-google-plus"></i></a></li>
</ul>
</div>
</div>
</div>
</div>
</footer>
</div>
<a href="#" class="scrollup"><i class="fa fa-angle-up active"></i></a>
<!-- javascript
================================================== -->
<!-- Placed at the end of the document so the pages load faster -->
<script src="js/jquery.js"></script>
<script src="js/jquery.easing.1.3.js"></script>
<script src="js/bootstrap.min.js"></script>
<script src="js/jquery.fancybox.pack.js"></script>
<script src="js/jquery.fancybox-media.js"></script>
<script src="js/portfolio/jquery.quicksand.js"></script>
<script src="js/portfolio/setting.js"></script>
<script src="js/jquery.flexslider.js"></script>
<script src="js/animate.js"></script>
<script src="js/custom.js"></script>
<script src="js/validate.js"></script>
</body>
</html>
|
{
"content_hash": "dfe4b8c2186a4447c93bea627e308788",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 308,
"avg_line_length": 51.633663366336634,
"alnum_prop": 0.45186960690316397,
"repo_name": "GeaninaAt/hms-fe",
"id": "3e88f35df91ca6b9d8babf310b2a87545b4cc051",
"size": "10430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/addExamination/addExamination.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "57576"
},
{
"name": "HTML",
"bytes": "197362"
},
{
"name": "JavaScript",
"bytes": "220044"
}
],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="utf-8"?>
<android.support.v7.widget.CardView xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:card_view="http://schemas.android.com/apk/res-auto"
android:id="@+id/cardView_aqi"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:clickable="true"
android:foreground="?android:attr/selectableItemBackground"
card_view:cardBackgroundColor="@color/windowBackground"
card_view:cardCornerRadius="4dp"
card_view:cardElevation="3dp"
card_view:cardUseCompatPadding="true">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_margin="15dp"
android:orientation="vertical">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="15dp"
android:layout_marginTop="15dp"
android:text="@string/airquality"
android:textColor="@color/titleColor"
android:textSize="20sp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:orientation="horizontal"
android:padding="10dp">
<LinearLayout
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="3"
android:gravity="center"
android:orientation="vertical">
<TextView
android:id="@+id/quality"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:textColor="@color/textColor"
android:textSize="38sp" />
<TextView
android:id="@+id/suggestion"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="center"
android:textColor="@color/textColor"
android:textSize="15sp" />
</LinearLayout>
<LinearLayout
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:gravity="center"
android:orientation="vertical">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:orientation="horizontal">
<TextView
android:layout_width="45dp"
android:layout_height="wrap_content"
android:text=" AQI:"
android:textColor="@color/titleColor"
android:textSize="10sp" />
<TextView
android:id="@+id/aqi_text"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:textColor="@color/textColor"
android:textSize="10sp" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="center"
android:orientation="horizontal">
<TextView
android:layout_width="45dp"
android:layout_height="wrap_content"
android:text="PM2.5:"
android:textColor="@color/titleColor"
android:textSize="10sp" />
<TextView
android:id="@+id/pm25_text"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:textColor="@color/textColor"
android:textSize="10sp" />
</LinearLayout>
</LinearLayout>
</LinearLayout>
</LinearLayout>
</android.support.v7.widget.CardView>
|
{
"content_hash": "ff906d0e292d27aaac220caf79a6b5a4",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 94,
"avg_line_length": 37.45967741935484,
"alnum_prop": 0.5134553283100107,
"repo_name": "LiuShengchieh/hengweather",
"id": "a8a4f68d8c6b262e324ee7eb8c043c0f4b4646ea",
"size": "4649",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/aqi.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "84852"
}
],
"symlink_target": ""
}
|
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "7fe1d0fe623e3e219ce4c5ba3a13c4a9",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "25b50c3aef5fc7ef7016aeae5c0aaa5fdadc5648",
"size": "217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Liliopsida/Poales/Juncaceae/Juncus/Juncus alpinoarticulatus/Juncus alpinoarticulatus fuscencens/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
package org.jetbrains.plugins.scala.lang.psi.stubs.index
import com.intellij.psi.PsiClass
import com.intellij.psi.stubs.StubIndexKey
class ScClassNameInPackageIndex extends ScStringStubIndexExtension[PsiClass] {
override def getKey: StubIndexKey[String, PsiClass] =
ScalaIndexKeys.CLASS_NAME_IN_PACKAGE_KEY
}
|
{
"content_hash": "cbd227f099024793e437fe46bb7b06e3",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 78,
"avg_line_length": 35.22222222222222,
"alnum_prop": 0.8264984227129337,
"repo_name": "JetBrains/intellij-scala",
"id": "6195cbce1241685d580eef8f970682b17c4328ec",
"size": "317",
"binary": false,
"copies": "1",
"ref": "refs/heads/idea223.x",
"path": "scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/stubs/index/ScClassNameInPackageIndex.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "106688"
},
{
"name": "Java",
"bytes": "1165562"
},
{
"name": "Lex",
"bytes": "45405"
},
{
"name": "Scala",
"bytes": "18656869"
}
],
"symlink_target": ""
}
|
package com.juntosalimos.client.application.userprofile;
import com.google.gwt.event.shared.GwtEvent.Type;
import com.google.inject.Inject;
import com.google.web.bindery.event.shared.EventBus;
import com.gwtplatform.mvp.client.HasUiHandlers;
import com.gwtplatform.mvp.client.Presenter;
import com.gwtplatform.mvp.client.View;
import com.gwtplatform.mvp.client.annotations.ContentSlot;
import com.gwtplatform.mvp.client.annotations.NameToken;
import com.gwtplatform.mvp.client.annotations.ProxyStandard;
import com.gwtplatform.mvp.client.proxy.PlaceRequest;
import com.gwtplatform.mvp.client.proxy.ProxyPlace;
import com.gwtplatform.mvp.client.proxy.RevealContentHandler;
import com.juntosalimos.client.application.ApplicationPresenter;
import com.juntosalimos.client.place.NameTokens;
public class UserProfilePresenter extends
Presenter<UserProfilePresenter.MyView, UserProfilePresenter.MyProxy>
implements UserProfileUiHandlers {
interface MyView extends View, HasUiHandlers<UserProfileUiHandlers> {
void setUserInfo();
void setIdeaLine();
void setImpactLine();
}
@ContentSlot
public static final Type<RevealContentHandler<?>> SLOT_UserProfile = new Type<RevealContentHandler<?>>();
@NameToken(NameTokens.user)
@ProxyStandard
public interface MyProxy extends ProxyPlace<UserProfilePresenter> {
}
@Inject
public UserProfilePresenter(EventBus eventBus, MyView view, MyProxy proxy) {
super(eventBus, view, proxy, ApplicationPresenter.SLOT_SetMainContent);
getView().setUiHandlers(this);
}
@Override
public void prepareFromRequest(PlaceRequest request) {
super.prepareFromRequest(request);
}
@Override
public void onBind(){
super.onBind();
getView().setUserInfo();
getView().setIdeaLine();
getView().setIdeaLine();
getView().setIdeaLine();
getView().setIdeaLine();
getView().setImpactLine();
getView().setImpactLine();
getView().setImpactLine();
getView().setImpactLine();
}
}
|
{
"content_hash": "c438adf1e3fa50effeb33aeed74e6a47",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 106,
"avg_line_length": 30.793650793650794,
"alnum_prop": 0.795360824742268,
"repo_name": "manubotija/junsal",
"id": "736219b8d028ae80b93dbd8c0ab1acc21e6790dc",
"size": "1940",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/juntosalimos/client/application/userprofile/UserProfilePresenter.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "124447"
},
{
"name": "Java",
"bytes": "175658"
}
],
"symlink_target": ""
}
|
\newcommand{\NWtarget}[2]{#2}
\newcommand{\NWlink}[2]{#2}
\newcommand{\NWtxtMacroDefBy}{Fragment defined by}
\newcommand{\NWtxtMacroRefIn}{Fragment referenced in}
\newcommand{\NWtxtMacroNoRef}{Fragment never referenced}
\newcommand{\NWtxtDefBy}{Defined by}
\newcommand{\NWtxtRefIn}{Referenced in}
\newcommand{\NWtxtNoRef}{Not referenced}
\newcommand{\NWtxtFileDefBy}{File defined by}
\newcommand{\NWtxtIdentsUsed}{Uses:}
\newcommand{\NWtxtIdentsNotUsed}{Never used}
\newcommand{\NWtxtIdentsDefed}{Defines:}
\newcommand{\NWsep}{${\diamond}$}
\newcommand{\NWnotglobal}{(not defined globally)}
\newcommand{\NWuseHyperlinks}{}
\documentclass{article}
\usepackage{hyperref}
\usepackage{graphicx}
\renewcommand{\NWtarget}[2]{\hypertarget{#1}{#2}}
\renewcommand{\NWlink}[2]{\hyperlink{#1}{#2}}
\title{Slot Machine}
\author{Soham, Ke, Jagdeep}
\begin{document}
\maketitle
\section{Specification}
Users gamble with Money(Credits) which Casino provides them. Users input the bet amount and then they hit the Spin button to spin the three bars displaying the symbols.
The program comes up with a random sequence of predecided symbols.
Calculating the amount that a user wins in one spin, it adds that to total credits.
The gambling further continues until the user wants to quite or he/she runs out of coins.
\section{Analysis/Design}
Gambler will be provided with 200 credit(coins) initially, and will be given the option to adjust their bet. They can either bet 5 credits, 10 credits, 20 credits, MAX bet (50 credits). Bet amount will be deducted from their total credits as they hit the Spin button.\\
As the user hits the spin button, 3 symbols will be displayed. The sequence of symbols will thus determine the amount(credit) the user earns in that round. \\
There would be total of 10 symbols that can be used to display the sequence.\\
Determining the number of winnig credit.
A User can any time quite to walk away with the credits he/she earned or the program will be terminated as the total credit becomes less then minimum bet.\\ \\
Managing the Program:
\begin{itemize}
\item Initializing the credits to 200 and bet to 0.
\item take input from user to (via buttons) to set their bet until user hits Spin button.
\item Spin button generates a sequence of 3 symbols and display.
\item Read the sequence to determine the credits user won.
\item Change the credits accordingly.
\item Repeat until user hits the quite (withdraw) button.
\end{itemize}
\section{Implementation}
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap1}\raggedright\small
\NWtarget{nuweb2a}{} \verb@"sm.cpp"@\nobreak\ {\footnotesize {2a}}$\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it Include files}\nobreak\ {\footnotesize \NWlink{nuweb12b}{12b}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it Constants}\nobreak\ {\footnotesize \NWlink{nuweb13a}{13a}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it classes}\nobreak\ {\footnotesize \NWlink{nuweb13b}{13b}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it string fill}\nobreak\ {\footnotesize \NWlink{nuweb3}{3}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it Select image from the data base and put the result into the box}\nobreak\ {\footnotesize \NWlink{nuweb5}{5}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it Calculate the bet compare with spin result and give out the credits earned}\nobreak\ {\footnotesize \NWlink{nuweb6}{6}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it Function callback}\nobreak\ {\footnotesize \NWlink{nuweb7c}{7c}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@int main()@\\
\mbox{}\verb@{@\\
\mbox{}\verb@ srand(time(0));@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it Create main window}\nobreak\ {\footnotesize \NWlink{nuweb2b}{2b}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it Create Widgets for input and output and button}\nobreak\ {\footnotesize \NWlink{nuweb4}{4}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it Register a callback function to be called when button is pressed}\nobreak\ {\footnotesize \NWlink{nuweb7a}{7a}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it Show window with its controls}\nobreak\ {\footnotesize \NWlink{nuweb7b}{7b}}$\,\rangle$}\verb@@\\
\mbox{}\verb@return(Fl::run());@\\
\mbox{}\verb@ @\\
\mbox{}\verb@}@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
% Definitons of functions go here; each in their own macro
Creating main window
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap2}\raggedright\small
\NWtarget{nuweb2b}{} $\langle\,${\it Create main window}\nobreak\ {\footnotesize {2b}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@Fl_Window* w = new Fl_Window(width,height);@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
This function is the generator of 3 digits that decide the sequence and display of symbols. It Generates 3 digit unique sequence every time its called and it is used inside the spin function.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap3}\raggedright\small
\NWtarget{nuweb3}{} $\langle\,${\it string fill}\nobreak\ {\footnotesize {3}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@string fill ()@\\
\mbox{}\verb@{@\\
\mbox{}\verb@ int f;@\\
\mbox{}\verb@ ostringstream ostr; @\\
\mbox{}\verb@ @\\
\mbox{}\verb@ f=rand()%1000;@\\
\mbox{}\verb@ ostr << setw(3) << setfill('0') << f;@\\
\mbox{}\verb@ string sequence=ostr.str();@\\
\mbox{}\verb@ @\\
\mbox{}\verb@ return sequence;@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
Need to pass in XY coordinate (upper left corner) of box, and width, height, and label.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap4}\raggedright\small
\NWtarget{nuweb4}{} $\langle\,${\it Create Widgets for input and output and button}\nobreak\ {\footnotesize {4}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@Widgets widgets;@\\
\mbox{}\verb@widgets.credit=200;@\\
\mbox{}\verb@widgets.main_bet=0;@\\
\mbox{}\verb@@\\
\mbox{}\verb@w->begin();@\\
\mbox{}\verb@widgets.credits = new Fl_Output(100,75,150,30,"CREDITS:");@\\
\mbox{}\verb@ ostringstream ostr; @\\
\mbox{}\verb@ ostr << widgets.credit;@\\
\mbox{}\verb@ widgets.credits->value(ostr.str().c_str());@\\
\mbox{}\verb@widgets.bet = new Fl_Output(35,350,100,25,"Bet");@\\
\mbox{}\verb@widgets.credit_earned = new Fl_Output(250,350,200,25,"Credits Earned");@\\
\mbox{}\verb@@\\
\mbox{}\verb@widgets.img[0] = new Fl_JPEG_Image("f0.jpg");@\\
\mbox{}\verb@widgets.img[1] = new Fl_JPEG_Image("f1.jpg");@\\
\mbox{}\verb@widgets.img[2] = new Fl_JPEG_Image("f2.jpg");@\\
\mbox{}\verb@widgets.img[3] = new Fl_JPEG_Image("f3.jpg");@\\
\mbox{}\verb@widgets.img[4] = new Fl_JPEG_Image("f4.jpg");@\\
\mbox{}\verb@widgets.img[5] = new Fl_JPEG_Image("f5.jpg");@\\
\mbox{}\verb@widgets.img[6] = new Fl_JPEG_Image("f6.jpg");@\\
\mbox{}\verb@widgets.img[7] = new Fl_JPEG_Image("f7.jpg");@\\
\mbox{}\verb@widgets.img[8] = new Fl_JPEG_Image("f8.jpg");@\\
\mbox{}\verb@widgets.img[9] = new Fl_JPEG_Image("f9.jpg");@\\
\mbox{}\verb@widgets.img[10] = new Fl_JPEG_Image("tfp.jpg");@\\
\mbox{}\verb@widgets.img[11] = new Fl_JPEG_Image("roc.jpg");@\\
\mbox{}\verb@@\\
\mbox{}\verb@widgets.box1 = new Fl_Box(5,125,150,200);@\\
\mbox{}\verb@widgets.box2 = new Fl_Box(158,125,150,200);@\\
\mbox{}\verb@widgets.box3 = new Fl_Box(311,125,150,200); @\\
\mbox{}\verb@@\\
\mbox{}\verb@Fl_Button* button_withdraw = new Fl_Button(480,450,160,30,"Withdraw");@\\
\mbox{}\verb@Fl_Button* button_spin = new Fl_Button(470,350,130,30,"Spin");@\\
\mbox{}\verb@Fl_Button* button_bet_again = new Fl_Button(470,300,130,30,"Bet Again");@\\
\mbox{}\verb@Fl_Button* button1 = new Fl_Button(170,400,100,30,"5 Credits");@\\
\mbox{}\verb@Fl_Button* button2 = new Fl_Button(270,400,100,30,"10 Credits");@\\
\mbox{}\verb@Fl_Button* button3 = new Fl_Button(370,400,100,30,"20 Credits");@\\
\mbox{}\verb@Fl_Button* buttonreset = new Fl_Button(50,400,100,30,"Reset");@\\
\mbox{}\verb@Fl_Button* buttonmax = new Fl_Button(490,400,100,30,"Bet Max");@\\
\mbox{}\verb@@\\
\mbox{}\verb@w->end();@\\
\mbox{}\verb@@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
This function is used to convert a string digist into integer digits and then display corresponding images. The conversion could be done by atoi function, but as these small functions were used in the framework of this program, initially, atoi failed to fit in the program.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap5}\raggedright\small
\NWtarget{nuweb5}{} $\langle\,${\it Select image from the data base and put the result into the box}\nobreak\ {\footnotesize {5}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@int select_image(char e)@\\
\mbox{}\verb@{ @\\
\mbox{}\verb@int d;@\\
\mbox{}\verb@ if (e == '0') {d=0;return d;}@\\
\mbox{}\verb@else if (e == '1') {d=1;return d;}@\\
\mbox{}\verb@else if (e == '2') {d=2;return d;}@\\
\mbox{}\verb@else if (e == '3') {d=3;return d;}@\\
\mbox{}\verb@else if (e == '4') {d=4;return d;}@\\
\mbox{}\verb@else if (e == '5') {d=5;return d;}@\\
\mbox{}\verb@else if (e == '6') {d=6;return d;}@\\
\mbox{}\verb@else if (e == '7') {d=7;return d;}@\\
\mbox{}\verb@else if (e == '8') {d=8;return d;}@\\
\mbox{}\verb@else {d=9;return d;}@\\
\mbox{}\verb@@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@void out_image(void* v, string s)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Widgets* w = static_cast<Widgets*>(v);@\\
\mbox{}\verb@@\\
\mbox{}\verb@int a=select_image(s[0]);@\\
\mbox{}\verb@int b=select_image(s[1]);@\\
\mbox{}\verb@int c=select_image(s[2]);@\\
\mbox{}\verb@w->box1->image(w->img[a]);@\\
\mbox{}\verb@w->box1->redraw();@\\
\mbox{}\verb@w->box2->image(w->img[b]);@\\
\mbox{}\verb@w->box2->redraw();@\\
\mbox{}\verb@w->box3->image(w->img[c]);@\\
\mbox{}\verb@w->box3->redraw();@\\
\mbox{}\verb@cout << "M here in image";@\\
\mbox{}\verb@@\\
\mbox{}\verb@w->box1->parent()->redraw();@\\
\mbox{}\verb@w->box2->parent()->redraw();@\\
\mbox{}\verb@w->box3->parent()->redraw();@\\
\mbox{}\verb@@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
This function determines the credit earned per round considerring the bet for that round.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap6}\raggedright\small
\NWtarget{nuweb6}{} $\langle\,${\it Calculate the bet compare with spin result and give out the credits earned}\nobreak\ {\footnotesize {6}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@int credit_earned(int c, int bet, string s)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@char e[4];@\\
\mbox{}\verb@double p;@\\
\mbox{}\verb@strcpy(e,s.c_str());@\\
\mbox{}\verb@ @\\
\mbox{}\verb@s=s.c_str();@\\
\mbox{}\verb@@\\
\mbox{}\verb@if (bet==5){p=0.25;}@\\
\mbox{}\verb@else if (bet==10){p=0.50;}@\\
\mbox{}\verb@else if (bet==25){p=0.75;}@\\
\mbox{}\verb@else if (bet==50){p=1;}@\\
\mbox{}\verb@ @\\
\mbox{}\verb@if(s == "111") {c += p*100000000;}@\\
\mbox{}\verb@else if(s == "222") {c += p*10000000;}@\\
\mbox{}\verb@else if(s == "333") {c += p*5000000;}@\\
\mbox{}\verb@else if(s == "444") {c += p*1000000;}@\\
\mbox{}\verb@else if(s == "000") {c /= 3;}@\\
\mbox{}\verb@else if(e[0] == e[1] and e[1] == e[2]) {c += p*100000;}@\\
\mbox{}\verb@else if((e[0] == '1' and e[1] == '1') or (e[0] == '1' and e[2] == '1') or (e[2] == '1' and e[1] == '1')) {c += 100;}@\\
\mbox{}\verb@else if((e[0] == '2' and e[1] == '2') or (e[0] == '2' and e[2] == '2') or (e[2] == '2' and e[1] == '2')) {c += 80;}@\\
\mbox{}\verb@else if((e[0] == '3' and e[1] == '3') or (e[0] == '3' and e[2] == '3') or (e[2] == '3' and e[1] == '3')) {c += 75;}@\\
\mbox{}\verb@else if((e[0] == '4' and e[1] == '4') or (e[0] == '4' and e[2] == '4') or (e[2] == '4' and e[1] == '4')) {c += 50;}@\\
\mbox{}\verb@else if((e[0] == '0' and e[1] == '0') or (e[0] == '0' and e[2] == '0') or (e[2] == '0' and e[1] == '0')) {c -= bet;}@\\
\mbox{}\verb@else if(e[0] == e[2] or e[0] == e[1] or e[1] == e[2]) {c += 45;}@\\
\mbox{}\verb@else if(e[0] == '1' or e[1] == '1' or e[2] == '1') {c += 25;}@\\
\mbox{}\verb@else if(e[0] == '2' or e[1] == '2' or e[2] == '2') {c += 20;}@\\
\mbox{}\verb@else if(e[0] == '3' or e[1] == '3' or e[2] == '3') {c += 15;}@\\
\mbox{}\verb@else if(e[0] == '4' or e[1] == '4' or e[2] == '4') {c += 10;}@\\
\mbox{}\verb@else {}@\\
\mbox{}\verb@ @\\
\mbox{}\verb@ @\\
\mbox{}\verb@return c;@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap7}\raggedright\small
\NWtarget{nuweb7a}{} $\langle\,${\it Register a callback function to be called when button is pressed}\nobreak\ {\footnotesize {7a}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@button1->callback(bet_update_1,&widgets);@\\
\mbox{}\verb@button2->callback(bet_update_2,&widgets);@\\
\mbox{}\verb@button3->callback(bet_update_3,&widgets);@\\
\mbox{}\verb@buttonmax->callback(bet_update_max,&widgets);@\\
\mbox{}\verb@buttonreset->callback(bet_update_reset,&widgets);@\\
\mbox{}\verb@button_bet_again->callback(bet_again,&widgets);@\\
\mbox{}\verb@button_spin->callback(spin,&widgets);@\\
\mbox{}\verb@button_withdraw->callback(withdraw,&widgets);@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap8}\raggedright\small
\NWtarget{nuweb7b}{} $\langle\,${\it Show window with its controls}\nobreak\ {\footnotesize {7b}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@w->show();@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
These are the callback functions in this program. Clicking button triggers certain codes and updates certain variables, changes displays,etc.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap9}\raggedright\small
\NWtarget{nuweb7c}{} $\langle\,${\it Function callback}\nobreak\ {\footnotesize {7c}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it call back function for 5 credit bet button}\nobreak\ {\footnotesize \NWlink{nuweb8a}{8a}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it call back function for 10 credit bet button}\nobreak\ {\footnotesize \NWlink{nuweb8b}{8b}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it call back function for 20 credit bet button}\nobreak\ {\footnotesize \NWlink{nuweb8c}{8c}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it call back function for max credit bet button}\nobreak\ {\footnotesize \NWlink{nuweb9a}{9a}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it call back function for reset bet button}\nobreak\ {\footnotesize \NWlink{nuweb9b}{9b}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it call back function for bet again button}\nobreak\ {\footnotesize \NWlink{nuweb10}{10}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it call back function for spin button}\nobreak\ {\footnotesize \NWlink{nuweb11}{11}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@\hbox{$\langle\,${\it call back function for withdraw button}\nobreak\ {\footnotesize \NWlink{nuweb12a}{12a}}$\,\rangle$}\verb@@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
call back function for 5 credit bet button. It updates the bet to 5 credits.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap10}\raggedright\small
\NWtarget{nuweb8a}{} $\langle\,${\it call back function for 5 credit bet button}\nobreak\ {\footnotesize {8a}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@void bet_update_1(Fl_Widget* o, void* v)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Widgets* w = static_cast<Widgets*>(v);@\\
\mbox{}\verb@w->main_bet=5;@\\
\mbox{}\verb@ ostringstream ostr; @\\
\mbox{}\verb@ ostr << w->main_bet;@\\
\mbox{}\verb@ @\\
\mbox{}\verb@w->bet->value(ostr.str().c_str());@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb7c}{7c}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
call back function for 10 credit bet button. It updates the bet to 10 credits.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap11}\raggedright\small
\NWtarget{nuweb8b}{} $\langle\,${\it call back function for 10 credit bet button}\nobreak\ {\footnotesize {8b}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@void bet_update_2(Fl_Widget* o, void* v)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Widgets* w = static_cast<Widgets*>(v);@\\
\mbox{}\verb@w->main_bet=10;@\\
\mbox{}\verb@ ostringstream ostr; @\\
\mbox{}\verb@ ostr << w->main_bet;@\\
\mbox{}\verb@@\\
\mbox{}\verb@w->bet->value(ostr.str().c_str());@\\
\mbox{}\verb@@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb7c}{7c}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
call back function for 20 credit bet button. It updates the bet to 20 credits.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap12}\raggedright\small
\NWtarget{nuweb8c}{} $\langle\,${\it call back function for 20 credit bet button}\nobreak\ {\footnotesize {8c}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@void bet_update_3(Fl_Widget* o, void* v)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Widgets* w = static_cast<Widgets*>(v);@\\
\mbox{}\verb@w->main_bet=20;@\\
\mbox{}\verb@ ostringstream ostr; @\\
\mbox{}\verb@ ostr << w->main_bet;@\\
\mbox{}\verb@ @\\
\mbox{}\verb@w->bet->value(ostr.str().c_str());@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb7c}{7c}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
call back function for max credit bet button. It updates the bet to 50 credits.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap13}\raggedright\small
\NWtarget{nuweb9a}{} $\langle\,${\it call back function for max credit bet button}\nobreak\ {\footnotesize {9a}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@void bet_update_max(Fl_Widget* o, void* v)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Widgets* w = static_cast<Widgets*>(v);@\\
\mbox{}\verb@w->main_bet=50;@\\
\mbox{}\verb@ ostringstream ostr; @\\
\mbox{}\verb@ ostr << w->main_bet;@\\
\mbox{}\verb@ @\\
\mbox{}\verb@w->bet->value(ostr.str().c_str());@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb7c}{7c}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
call back function for reseting the bet.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap14}\raggedright\small
\NWtarget{nuweb9b}{} $\langle\,${\it call back function for reset bet button}\nobreak\ {\footnotesize {9b}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@void bet_update_reset(Fl_Widget* o, void* v)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Widgets* w = static_cast<Widgets*>(v);@\\
\mbox{}\verb@w->main_bet=0;@\\
\mbox{}\verb@ ostringstream ostr; @\\
\mbox{}\verb@ ostr << w->main_bet;@\\
\mbox{}\verb@ @\\
\mbox{}\verb@w->bet->value(ostr.str().c_str());@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb7c}{7c}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
call back function for bet again button. It updates the credit earned output to blank and thus giving an idea of starting a new round. It shows spin button and hides itself.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap15}\raggedright\small
\NWtarget{nuweb10}{} $\langle\,${\it call back function for bet again button}\nobreak\ {\footnotesize {10}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@void bet_again(Fl_Widget* o, void* v)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Widgets* w = static_cast<Widgets*>(v);@\\
\mbox{}\verb@w->credit_earned->value("");@\\
\mbox{}\verb@@\\
\mbox{}\verb@o->parent()->child(7)->show();@\\
\mbox{}\verb@o->parent()->child(8)->hide();@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb7c}{7c}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
call back function for spin button. The main button which generates a sequence of 3 digit ( a string), displays the credit earned in that round by reading the 3 digit string and deciding the appropriate credit earned. It also generates the images corresponding to three digits. \\ At the end, it hides itself and shows bet again button, thus restricting users to hit the spin button again without reseting the credit earned and the bet.\\ This button also terminates the program to show that user ran out of credits when credits become lower than bet.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap16}\raggedright\small
\NWtarget{nuweb11}{} $\langle\,${\it call back function for spin button}\nobreak\ {\footnotesize {11}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@void spin(Fl_Widget* o, void* v)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Widgets* w = static_cast<Widgets*>(v);@\\
\mbox{}\verb@@\\
\mbox{}\verb@if(w->credit < 50)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@ for(int i=0;i<14;i++)@\\
\mbox{}\verb@ {@\\
\mbox{}\verb@ o->parent()->child(i)->hide();@\\
\mbox{}\verb@ }@\\
\mbox{}\verb@ @\\
\mbox{}\verb@ o->parent()->child(4)->show();@\\
\mbox{}\verb@ w->box2->image(w->img[11]);@\\
\mbox{}\verb@ w->box2->redraw();@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@else@\\
\mbox{}\verb@{@\\
\mbox{}\verb@double difference;@\\
\mbox{}\verb@@\\
\mbox{}\verb@cout << "M here in spin";@\\
\mbox{}\verb@string sequence;@\\
\mbox{}\verb@sequence=fill();@\\
\mbox{}\verb@out_image(v,sequence);@\\
\mbox{}\verb@w->credit=w->credit-w->main_bet;@\\
\mbox{}\verb@double creditearned=w->credit;@\\
\mbox{}\verb@w->credit=credit_earned(w->credit,w->main_bet,sequence);@\\
\mbox{}\verb@difference=fabs(w->credit-creditearned);@\\
\mbox{}\verb@@\\
\mbox{}\verb@ ostringstream ostr; @\\
\mbox{}\verb@ ostr << difference;@\\
\mbox{}\verb@ @\\
\mbox{}\verb@ ostringstream ostr1; @\\
\mbox{}\verb@ ostr1 << w->credit;@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@w->credit_earned->value(ostr.str().c_str());@\\
\mbox{}\verb@w->credits->value(ostr1.str().c_str());@\\
\mbox{}\verb@@\\
\mbox{}\verb@o->parent()->child(8)->show();@\\
\mbox{}\verb@o->parent()->child(7)->hide();@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb7c}{7c}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
call back function for withdraw button. It hides all the buttons and outputs of the window and display an image thanking the users for playing.
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap17}\raggedright\small
\NWtarget{nuweb12a}{} $\langle\,${\it call back function for withdraw button}\nobreak\ {\footnotesize {12a}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@void withdraw(Fl_Widget* o, void* v)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Widgets* w = static_cast<Widgets*>(v);@\\
\mbox{}\verb@for(int i=0;i<14;i++)@\\
\mbox{}\verb@{@\\
\mbox{}\verb@o->parent()->child(i)->hide();@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@o->parent()->child(5)->show();@\\
\mbox{}\verb@w->box3->image(w->img[10]);@\\
\mbox{}\verb@w->box3->redraw();@\\
\mbox{}\verb@}@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb7c}{7c}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
These are the include files needed for library function calls
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap18}\raggedright\small
\NWtarget{nuweb12b}{} $\langle\,${\it Include files}\nobreak\ {\footnotesize {12b}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@#include <iostream>@\\
\mbox{}\verb@#include<vector>@\\
\mbox{}\verb@#include<cmath>@\\
\mbox{}\verb@#include<cstdlib>@\\
\mbox{}\verb@#include<time.h>@\\
\mbox{}\verb@#include<sstream>@\\
\mbox{}\verb@#include<iomanip>@\\
\mbox{}\verb@#include<fl/fl.h>@\\
\mbox{}\verb@#include<fl/fl_window.h>@\\
\mbox{}\verb@#include<fl/fl_button.h>@\\
\mbox{}\verb@#include<fl/fl_output.h>@\\
\mbox{}\verb@#include<fl/fl_jpeg_image.h>@\\
\mbox{}\verb@#include<fl/fl_box.h>@\\
\mbox{}\verb@using namespace std;@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
These are the values that will not change during program execution
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap19}\raggedright\small
\NWtarget{nuweb13a}{} $\langle\,${\it Constants}\nobreak\ {\footnotesize {13a}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@const int total_symbol = 9;@\\
\mbox{}\verb@const int symbol=3;@\\
\mbox{}\verb@const int width = 640;@\\
\mbox{}\verb@const int height = 480;@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
Classes
\begin{flushleft} \small
\begin{minipage}{\linewidth}\label{scrap20}\raggedright\small
\NWtarget{nuweb13b}{} $\langle\,${\it classes}\nobreak\ {\footnotesize {13b}}$\,\rangle\equiv$
\vspace{-1ex}
\begin{list}{}{} \item
\mbox{}\verb@@\\
\mbox{}\verb@@\\
\mbox{}\verb@struct Widgets@\\
\mbox{}\verb@{@\\
\mbox{}\verb@Fl_Output* out;@\\
\mbox{}\verb@Fl_Output* credits;@\\
\mbox{}\verb@Fl_Output* credit_earned;@\\
\mbox{}\verb@Fl_Output* bet;@\\
\mbox{}\verb@Fl_Box* box1;@\\
\mbox{}\verb@Fl_Box* box2;@\\
\mbox{}\verb@Fl_Box* box3;@\\
\mbox{}\verb@Fl_JPEG_Image* img[12];@\\
\mbox{}\verb@int main_bet;@\\
\mbox{}\verb@int credit;@\\
\mbox{}\verb@};@\\
\mbox{}\verb@@\\
\mbox{}\verb@@{\NWsep}
\end{list}
\vspace{-1.5ex}
\footnotesize
\begin{list}{}{\setlength{\itemsep}{-\parsep}\setlength{\itemindent}{-\leftmargin}}
\item \NWtxtMacroRefIn\ \NWlink{nuweb2a}{2a}.
\item{}
\end{list}
\end{minipage}\vspace{4ex}
\end{flushleft}
\section{Test}
\includegraphics[scale=0.5]{lab.jpg}
\end{document}
|
{
"content_hash": "b543b4e390f45cec3b799a1f5438c03b",
"timestamp": "",
"source": "github",
"line_count": 749,
"max_line_length": 551,
"avg_line_length": 40.82510013351135,
"alnum_prop": 0.6349663156517757,
"repo_name": "sjatakia/Beeyer",
"id": "71e8265bbf655f6e4a1981c64590b1afc9448d24",
"size": "30578",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pic/zip/Project.tex",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3452"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html>
<script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
<script src="../../resources/bluetooth/bluetooth-helpers.js"></script>
<script>
'use strict';
promise_test(() => {
return setBluetoothFakeAdapter('ServicesDiscoveredAfterReconnectionAdapter')
.then(() => requestDeviceWithKeyDown({
filters: [{services: ['heart_rate']}],
optionalServices: ['battery_service']}))
.then(device => device.gatt.connect())
.then(gattServer => {
let promise = assert_promise_rejects_with_message(
gattServer.getPrimaryService('battery_service'),
new DOMException('GATT Server disconnected while retrieving services.',
'NetworkError'));
gattServer.disconnect();
return gattServer.connect().then(() => promise);
});
}, 'disconnect() and connect() called during a getPrimaryService call that ' +
'fails. Reject with NetworkError.');
</script>
|
{
"content_hash": "8de97827c5cebba372b6be2cf59ce077",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 79,
"avg_line_length": 43.04347826086956,
"alnum_prop": 0.6676767676767676,
"repo_name": "Samsung/ChromiumGStreamerBackend",
"id": "468eabf97678ce968f93d3d6750f6c7d11c7da27",
"size": "990",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "third_party/WebKit/LayoutTests/bluetooth/getPrimaryService/reconnect-during-error.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import { List } from 'immutable';
import { createAction, handleActions } from 'redux-actions';
import { makeTypedFactory, TypedRecord} from 'typed-immutable-record';
import { IAppStateRecord } from '../../../stores';
import { DATA_PREFIX } from './reduxPrefix';
const ALL_COMMENT_IDS_DATA = [...DATA_PREFIX, 'allCommentIds'];
type ILoadAllCommentIdsCompletePayload = List<number>;
export const loadAllCommentIdsComplete = createAction<ILoadAllCommentIdsCompletePayload>(
'search/LOAD_ALL_COMMENT_IDS_COMPLETE',
);
export const resetCommentIds = createAction<void>(
'search/RESET_ALL_COMMENT_IDS',
);
export interface IAllCommentIDsState {
ids: List<string>;
}
export interface IAllCommentIDsStateRecord extends TypedRecord<IAllCommentIDsStateRecord>, IAllCommentIDsState {}
const StateFactory = makeTypedFactory<IAllCommentIDsState, IAllCommentIDsStateRecord>({
ids: List<string>(),
});
export const allCommentIdsReducer = handleActions<
IAllCommentIDsStateRecord,
void | // resetCommentIds
ILoadAllCommentIdsCompletePayload // loadAllCommentIdsComplete
>({
[resetCommentIds.toString()]: () => StateFactory(),
[loadAllCommentIdsComplete.toString()]: (state, { payload }: { payload: ILoadAllCommentIdsCompletePayload }) => (
state.set('ids', payload)
),
},
StateFactory(),
);
export function getAllCommentIds(state: IAppStateRecord): List<number> {
return state.getIn([...ALL_COMMENT_IDS_DATA, 'ids']);
}
|
{
"content_hash": "c8a650d7c552b7c21f8bdda498d60b39",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 117,
"avg_line_length": 31.58695652173913,
"alnum_prop": 0.7501720578114246,
"repo_name": "Instrument/conversationai-moderator",
"id": "d91deccae0821d9e3d42ead2932c19de042a42fa",
"size": "2010",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/frontend-web/src/app/scenes/Search/store/searchResults.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "9333"
},
{
"name": "HTML",
"bytes": "1575"
},
{
"name": "JavaScript",
"bytes": "20224"
},
{
"name": "Shell",
"bytes": "4482"
},
{
"name": "TypeScript",
"bytes": "1569151"
}
],
"symlink_target": ""
}
|
'use strict';
const biz = require('emag.biz');
const _ = require('underscore');
const conf = require('../settings');
const utils = require('speedt-utils').utils;
const amq = require('emag.lib').amq;
exports.indexUI = function(req, res, next){
biz.notice.findAll(function (err, docs){
if(err) return next(err);
res.render('notice/index', {
conf: conf,
data: {
list_notice: docs,
session_user: req.session.user,
nav_choose: ',04,0401,'
}
});
});
};
exports.addUI = function(req, res, next){
res.render('notice/add', {
conf: conf,
data: {
session_user: req.session.user,
nav_choose: ',04,0401,'
}
});
};
exports.add = function(req, res, next){
var query = req.body;
query.user_id = req.session.userId;
biz.notice.saveNew(query, function (err, status){
if(err) return next(err);
res.send({});
});
};
exports.editUI = function(req, res, next){
var id = req.query.id;
biz.notice.getById(id)
.then(doc => {
if(!doc) return next(new Error('Not Found'));
res.render('notice/edit', {
conf: conf,
data: {
notice: doc,
session_user: req.session.user,
nav_choose: ',04,0401,'
}
});
})
.catch(next);
};
exports.edit = function(req, res, next){
var query = req.body;
biz.notice.editInfo(query, function (err, status){
if(err) return next(err);
res.send({});
});
};
exports.del = function(req, res, next){
var query = req.body;
biz.notice.del(query.id, function (err, status){
if(err) return next(err);
res.send({});
});
};
(() => {
function p1(notice_id, docs){
return new Promise((resolve, reject) => {
if(0 === docs.length) return reject(new Error('前置机未启动'));
biz.notice.getById(notice_id)
.then(p2.bind(null, docs))
.then(() => resolve())
.catch(reject);
});
}
function p2(frontends, doc){
return new Promise((resolve, reject) => {
if(!doc) return reject(new Error('Not Found'));
delete doc.user_id;
delete doc.last_time;
var _data = ['ALL', JSON.stringify([1008, , _.now(), doc])];
for(let i of frontends){
amq.send('/queue/back.send.v3.'+ i, { priority: 8 }, _data, (err, code) => { /* */ });
}
resolve();
});
}
exports.send = function(req, res, next){
var query = req.body;
biz.frontend.findAll()
.then(p1.bind(null, query.id))
.then(() => res.send({}))
.catch(next);
};
})();
|
{
"content_hash": "178e45b3e45707a32f5312e4d206c6fc",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 95,
"avg_line_length": 20.601626016260163,
"alnum_prop": 0.5572217837411207,
"repo_name": "speedt/g2",
"id": "30326091d9f1645bc8ca6ab473c4c55f9c33387f",
"size": "2631",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "logon/src/manage/controllers/notice.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "477"
},
{
"name": "HTML",
"bytes": "111849"
},
{
"name": "JavaScript",
"bytes": "111631"
},
{
"name": "Lua",
"bytes": "7947"
},
{
"name": "Nginx",
"bytes": "5753"
},
{
"name": "Shell",
"bytes": "4933"
}
],
"symlink_target": ""
}
|
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace MokkosuPad.ViewModels
{
// 参考: https://consoleredirect.codeplex.com/
public class ConsoleRedirectWriter : StringWriter
{
TextWriter consoleTextWriter;
public Action<String> OnWrite;
public ConsoleRedirectWriter()
{
consoleTextWriter = Console.Out;
this.OnWrite += delegate(string text) { consoleTextWriter.Write(text); };
Console.SetOut(this);
}
public void Release()
{
Console.SetOut(consoleTextWriter);
}
private void WriteGeneric<T>(T value) { if (OnWrite != null) OnWrite(value.ToString()); }
public override void Write(char value) { WriteGeneric<char>(value); }
public override void Write(string value) { WriteGeneric<string>(value); }
public override void Write(bool value) { WriteGeneric<bool>(value); }
public override void Write(int value) { WriteGeneric<int>(value); }
public override void Write(double value) { WriteGeneric<double>(value); }
public override void Write(long value) { WriteGeneric<long>(value); }
private void WriteLineGeneric<T>(T value) { if (OnWrite != null) OnWrite(value.ToString() + "\n"); }
public override void WriteLine(char value) { WriteLineGeneric<char>(value); }
public override void WriteLine(string value) { WriteLineGeneric<string>(value); }
public override void WriteLine(bool value) { WriteLineGeneric<bool>(value); }
public override void WriteLine(int value) { WriteLineGeneric<int>(value); }
public override void WriteLine(double value) { WriteLineGeneric<double>(value); }
public override void WriteLine(long value) { WriteLineGeneric<long>(value); }
public override void Write(char[] buffer, int index, int count)
{
base.Write(buffer, index, count);
char[] buffer2 = new char[count]; //Ensures large buffers are not a problem
for (int i = 0; i < count; i++) buffer2[i] = buffer[index + i];
WriteGeneric<char[]>(buffer2);
}
public override void WriteLine(char[] buffer, int index, int count)
{
base.Write(buffer, index, count);
char[] buffer2 = new char[count]; //Ensures large buffers are not a problem
for (int i = 0; i < count; i++) buffer2[i] = buffer[index + i];
WriteLineGeneric<char[]>(buffer2);
}
}
}
|
{
"content_hash": "6c390353be76116c4a8615de869c45ba",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 108,
"avg_line_length": 38.776119402985074,
"alnum_prop": 0.6351039260969977,
"repo_name": "lambdataro/Mokkosu",
"id": "c6e04134ee68235b34df3ea5171d981f86624d38",
"size": "2604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "VS2013/MokkosuPad/ViewModels/ConsoleRedirectWriter.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "421817"
},
{
"name": "CSS",
"bytes": "885"
}
],
"symlink_target": ""
}
|
package hudson.plugins.analysis.core;
/**
* Global settings for all static code analysis plug-ins.
*
* @author Ulli Hafner
*/
public interface Settings {
/**
* Returns whether the logger should be quite. If the logger is quite then the output is not shown in the
* console log.
*
* @return on <code>true</code> no logging statements are written to the console log
*/
Boolean getQuietMode();
/**
* Returns whether a build should be failed if the parsed input file is invalid or corrupted.
*
* @return on <code>true</code> the build will be failed, on <code>false</code> an error message is reported
*/
Boolean getFailOnCorrupt();
}
|
{
"content_hash": "537b39760d672f584e01d06c343c03e6",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 112,
"avg_line_length": 30.347826086956523,
"alnum_prop": 0.669054441260745,
"repo_name": "amuniz/analysis-core-plugin",
"id": "98cde9f3eab3cdd2670271648d10e9fcffbc1449",
"size": "698",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/main/java/hudson/plugins/analysis/core/Settings.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "62"
},
{
"name": "C#",
"bytes": "15197"
},
{
"name": "HTML",
"bytes": "47124"
},
{
"name": "Java",
"bytes": "745435"
},
{
"name": "Shell",
"bytes": "685"
}
],
"symlink_target": ""
}
|
//------------------------------------------------------------------------------
// <auto-generated>
// 此代码由工具生成。
// 运行时版本:4.0.30319.42000
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace Idefav.CodeGenerator.Properties {
using System;
/// <summary>
/// 一个强类型的资源类,用于查找本地化的字符串等。
/// </summary>
// 此类是由 StronglyTypedResourceBuilder
// 类通过类似于 ResGen 或 Visual Studio 的工具自动生成的。
// 若要添加或移除成员,请编辑 .ResX 文件,然后重新运行 ResGen
// (以 /str 作为命令选项),或重新生成 VS 项目。
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// 返回此类使用的缓存的 ResourceManager 实例。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("Idefav.CodeGenerator.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// 使用此强类型资源类,为所有资源查找
/// 重写当前线程的 CurrentUICulture 属性。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}
|
{
"content_hash": "990ffa0544e6c21898b8842980c9fae1",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 186,
"avg_line_length": 40.15873015873016,
"alnum_prop": 0.5754940711462451,
"repo_name": "idefav/CodeGenerator",
"id": "0ab25af5a8290ced0282cc73768c8de1da3b2387",
"size": "2880",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Idefav.CodeGenerator/Properties/Resources.Designer.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "179283"
}
],
"symlink_target": ""
}
|
ACCEPTED
#### According to
Index Fungorum
#### Published in
Monograph of Cercospora 53 (1954)
#### Original name
Cercospora ilicis-opacae Chupp
### Remarks
null
|
{
"content_hash": "2750c28d54001ed96ca647b2546dda59",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 33,
"avg_line_length": 12.615384615384615,
"alnum_prop": 0.7317073170731707,
"repo_name": "mdoering/backbone",
"id": "fbdeb104d4f1605f995380d67abe7e0c3358fb64",
"size": "218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Dothideomycetes/Capnodiales/Mycosphaerellaceae/Cercospora/Cercospora ilicis-opacae/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><!--Google Tag Manager--><script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
j=d.createElement(s),dl=l!='dataLayer'?'%26l='+l:'';j.async=true;j.src=
'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
})(window,document,'script','dataLayer','GTM-MWRD6S');</script><!--End Google Tag Manager-->
<meta content="IE=Edge" http-equiv="X-UA-Compatible"/>
<meta content="text/html; charset=utf-8" http-equiv="Content-Type"/>
<title>twilio.rest.studio.v1.flow.execution.execution_step package — twilio-python 7.15.3 documentation</title>
<link href="../../../_static/alabaster.css" rel="stylesheet" type="text/css"/>
<link href="../../../_static/pygments.css" rel="stylesheet" type="text/css"/>
<script data-url_root="../../../" id="documentation_options" src="../../../_static/documentation_options.js" type="text/javascript"></script>
<script src="../../../_static/jquery.js" type="text/javascript"></script>
<script src="../../../_static/underscore.js" type="text/javascript"></script>
<script src="../../../_static/doctools.js" type="text/javascript"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_HTMLorMML" type="text/javascript"></script>
<link href="../../../genindex.html" rel="index" title="Index"/>
<link href="../../../search.html" rel="search" title="Search"/>
<link href="twilio.rest.studio.v2.html" rel="next" title="twilio.rest.studio.v2 package"/>
<link href="twilio.rest.studio.v1.flow.execution.html" rel="prev" title="twilio.rest.studio.v1.flow.execution package"/>
<link href="../../../_static/custom.css" rel="stylesheet" type="text/css"/>
<meta content="width=device-width, initial-scale=0.9, maximum-scale=0.9" name="viewport"/>
</head><body><!--Google Tag Manager (noscript)--><noscript><iframe height="0" src="https://www.googletagmanager.com/ns.html?id=GTM-MWRD6S" style="display:none;visibility:hidden" width="0"></iframe></noscript><!--End Google Tag Manager (noscript)-->
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<div class="section" id="twilio-rest-studio-v1-flow-execution-execution-step-package">
<h1>twilio.rest.studio.v1.flow.execution.execution_step package<a class="headerlink" href="#twilio-rest-studio-v1-flow-execution-execution-step-package" title="Permalink to this headline">¶</a></h1>
<div class="section" id="submodules">
<h2>Submodules<a class="headerlink" href="#submodules" title="Permalink to this headline">¶</a></h2>
</div>
<div class="section" id="module-twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context">
<span id="twilio-rest-studio-v1-flow-execution-execution-step-execution-step-context-module"></span><h2>twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context module<a class="headerlink" href="#module-twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context" title="Permalink to this headline">¶</a></h2>
<p>This code was generated by
/ _ _ _| _ _</p>
<blockquote>
<div><div class="line-block">
<div class="line">(_)/(_)(_|/| <a href="#system-message-1"><span class="problematic" id="problematic-1">|</span></a>(/_ v1.0.0
/ /</div>
</div>
</div></blockquote>
<dl class="class">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextContext">
<em class="property">class </em><code class="descclassname">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.</code><code class="descname">ExecutionStepContextContext</code><span class="sig-paren">(</span><em>version</em>, <em>flow_sid</em>, <em>execution_sid</em>, <em>step_sid</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step/execution_step_context.html#ExecutionStepContextContext"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextContext" title="Permalink to this definition">¶</a></dt>
<dd><p>Bases: <a class="reference internal" href="twilio.base.html#twilio.base.instance_context.InstanceContext" title="twilio.base.instance_context.InstanceContext"><code class="xref py py-class docutils literal notranslate"><span class="pre">twilio.base.instance_context.InstanceContext</span></code></a></p>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextContext.fetch">
<code class="descname">fetch</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step/execution_step_context.html#ExecutionStepContextContext.fetch"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextContext.fetch" title="Permalink to this definition">¶</a></dt>
<dd><p>Fetch the ExecutionStepContextInstance</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The fetched ExecutionStepContextInstance</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance" title="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance</a></td>
</tr>
</tbody>
</table>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance">
<em class="property">class </em><code class="descclassname">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.</code><code class="descname">ExecutionStepContextInstance</code><span class="sig-paren">(</span><em>version</em>, <em>payload</em>, <em>flow_sid</em>, <em>execution_sid</em>, <em>step_sid</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step/execution_step_context.html#ExecutionStepContextInstance"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance" title="Permalink to this definition">¶</a></dt>
<dd><p>Bases: <a class="reference internal" href="twilio.base.html#twilio.base.instance_resource.InstanceResource" title="twilio.base.instance_resource.InstanceResource"><code class="xref py py-class docutils literal notranslate"><span class="pre">twilio.base.instance_resource.InstanceResource</span></code></a></p>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.account_sid">
<code class="descname">account_sid</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.account_sid" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The SID of the Account that created the resource</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.context">
<code class="descname">context</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.context" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The current state of the flow</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.11)">dict</a></td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.execution_sid">
<code class="descname">execution_sid</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.execution_sid" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The SID of the Execution</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.fetch">
<code class="descname">fetch</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step/execution_step_context.html#ExecutionStepContextInstance.fetch"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.fetch" title="Permalink to this definition">¶</a></dt>
<dd><p>Fetch the ExecutionStepContextInstance</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The fetched ExecutionStepContextInstance</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance" title="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance</a></td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.flow_sid">
<code class="descname">flow_sid</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.flow_sid" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The SID of the Flow</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.step_sid">
<code class="descname">step_sid</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.step_sid" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">Step SID</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.url">
<code class="descname">url</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance.url" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The absolute URL of the resource</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList">
<em class="property">class </em><code class="descclassname">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.</code><code class="descname">ExecutionStepContextList</code><span class="sig-paren">(</span><em>version</em>, <em>flow_sid</em>, <em>execution_sid</em>, <em>step_sid</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step/execution_step_context.html#ExecutionStepContextList"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList" title="Permalink to this definition">¶</a></dt>
<dd><p>Bases: <a class="reference internal" href="twilio.base.html#twilio.base.list_resource.ListResource" title="twilio.base.list_resource.ListResource"><code class="xref py py-class docutils literal notranslate"><span class="pre">twilio.base.list_resource.ListResource</span></code></a></p>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList.get">
<code class="descname">get</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step/execution_step_context.html#ExecutionStepContextList.get"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList.get" title="Permalink to this definition">¶</a></dt>
<dd><p>Constructs a ExecutionStepContextContext</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextContext</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextContext" title="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextContext">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextContext</a></td>
</tr>
</tbody>
</table>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextPage">
<em class="property">class </em><code class="descclassname">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.</code><code class="descname">ExecutionStepContextPage</code><span class="sig-paren">(</span><em>version</em>, <em>response</em>, <em>solution</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step/execution_step_context.html#ExecutionStepContextPage"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextPage" title="Permalink to this definition">¶</a></dt>
<dd><p>Bases: <a class="reference internal" href="twilio.base.html#twilio.base.page.Page" title="twilio.base.page.Page"><code class="xref py py-class docutils literal notranslate"><span class="pre">twilio.base.page.Page</span></code></a></p>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextPage.get_instance">
<code class="descname">get_instance</code><span class="sig-paren">(</span><em>payload</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step/execution_step_context.html#ExecutionStepContextPage.get_instance"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextPage.get_instance" title="Permalink to this definition">¶</a></dt>
<dd><p>Build an instance of ExecutionStepContextInstance</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>payload</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.11)"><em>dict</em></a>) – Payload response from the API</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance" title="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextInstance</a></td>
</tr>
</tbody>
</table>
</dd></dl>
</dd></dl>
</div>
<div class="section" id="module-twilio.rest.studio.v1.flow.execution.execution_step">
<span id="module-contents"></span><h2>Module contents<a class="headerlink" href="#module-twilio.rest.studio.v1.flow.execution.execution_step" title="Permalink to this headline">¶</a></h2>
<p>This code was generated by
/ _ _ _| _ _</p>
<blockquote>
<div><div class="line-block">
<div class="line">(_)/(_)(_|/| <a href="#system-message-2"><span class="problematic" id="problematic-2">|</span></a>(/_ v1.0.0
/ /</div>
</div>
</div></blockquote>
<dl class="class">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext">
<em class="property">class </em><code class="descclassname">twilio.rest.studio.v1.flow.execution.execution_step.</code><code class="descname">ExecutionStepContext</code><span class="sig-paren">(</span><em>version</em>, <em>flow_sid</em>, <em>execution_sid</em>, <em>sid</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepContext"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext" title="Permalink to this definition">¶</a></dt>
<dd><p>Bases: <a class="reference internal" href="twilio.base.html#twilio.base.instance_context.InstanceContext" title="twilio.base.instance_context.InstanceContext"><code class="xref py py-class docutils literal notranslate"><span class="pre">twilio.base.instance_context.InstanceContext</span></code></a></p>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext.fetch">
<code class="descname">fetch</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepContext.fetch"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext.fetch" title="Permalink to this definition">¶</a></dt>
<dd><p>Fetch the ExecutionStepInstance</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The fetched ExecutionStepInstance</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance" title="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance</a></td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext.step_context">
<code class="descname">step_context</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext.step_context" title="Permalink to this definition">¶</a></dt>
<dd><p>Access the step_context</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList" title="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList</a></td>
</tr>
</tbody>
</table>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance">
<em class="property">class </em><code class="descclassname">twilio.rest.studio.v1.flow.execution.execution_step.</code><code class="descname">ExecutionStepInstance</code><span class="sig-paren">(</span><em>version</em>, <em>payload</em>, <em>flow_sid</em>, <em>execution_sid</em>, <em>sid=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepInstance"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance" title="Permalink to this definition">¶</a></dt>
<dd><p>Bases: <a class="reference internal" href="twilio.base.html#twilio.base.instance_resource.InstanceResource" title="twilio.base.instance_resource.InstanceResource"><code class="xref py py-class docutils literal notranslate"><span class="pre">twilio.base.instance_resource.InstanceResource</span></code></a></p>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.account_sid">
<code class="descname">account_sid</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.account_sid" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The SID of the Account that created the resource</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.context">
<code class="descname">context</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.context" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The current state of the flow</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.11)">dict</a></td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.date_created">
<code class="descname">date_created</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.date_created" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The ISO 8601 date and time in GMT when the resource was created</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">datetime</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.date_updated">
<code class="descname">date_updated</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.date_updated" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The ISO 8601 date and time in GMT when the resource was last updated</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">datetime</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.execution_sid">
<code class="descname">execution_sid</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.execution_sid" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The SID of the Execution</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.fetch">
<code class="descname">fetch</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepInstance.fetch"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.fetch" title="Permalink to this definition">¶</a></dt>
<dd><p>Fetch the ExecutionStepInstance</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The fetched ExecutionStepInstance</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance" title="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance</a></td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.flow_sid">
<code class="descname">flow_sid</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.flow_sid" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The SID of the Flow</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.links">
<code class="descname">links</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.links" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The URLs of related resources</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.name">
<code class="descname">name</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.name" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The event that caused the Flow to transition to the Step</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.sid">
<code class="descname">sid</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.sid" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The unique string that identifies the resource</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.step_context">
<code class="descname">step_context</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.step_context" title="Permalink to this definition">¶</a></dt>
<dd><p>Access the step_context</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList" title="twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context.ExecutionStepContextList</a></td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.transitioned_from">
<code class="descname">transitioned_from</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.transitioned_from" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The Widget that preceded the Widget for the Step</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.transitioned_to">
<code class="descname">transitioned_to</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.transitioned_to" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The Widget that will follow the Widget for the Step</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="attribute">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.url">
<code class="descname">url</code><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance.url" title="Permalink to this definition">¶</a></dt>
<dd><table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body">The absolute URL of the resource</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">unicode</td>
</tr>
</tbody>
</table>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList">
<em class="property">class </em><code class="descclassname">twilio.rest.studio.v1.flow.execution.execution_step.</code><code class="descname">ExecutionStepList</code><span class="sig-paren">(</span><em>version</em>, <em>flow_sid</em>, <em>execution_sid</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepList"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList" title="Permalink to this definition">¶</a></dt>
<dd><p>Bases: <a class="reference internal" href="twilio.base.html#twilio.base.list_resource.ListResource" title="twilio.base.list_resource.ListResource"><code class="xref py py-class docutils literal notranslate"><span class="pre">twilio.base.list_resource.ListResource</span></code></a></p>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.get">
<code class="descname">get</code><span class="sig-paren">(</span><em>sid</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepList.get"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.get" title="Permalink to this definition">¶</a></dt>
<dd><p>Constructs a ExecutionStepContext</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>sid</strong> – The unique string that identifies the resource</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext" title="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepContext</a></td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.get_page">
<code class="descname">get_page</code><span class="sig-paren">(</span><em>target_url</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepList.get_page"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.get_page" title="Permalink to this definition">¶</a></dt>
<dd><p>Retrieve a specific page of ExecutionStepInstance records from the API.
Request is executed immediately</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>target_url</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.11)"><em>str</em></a>) – API-generated URL for the requested results page</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body">Page of ExecutionStepInstance</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage" title="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage</a></td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.list">
<code class="descname">list</code><span class="sig-paren">(</span><em>limit=None</em>, <em>page_size=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepList.list"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.list" title="Permalink to this definition">¶</a></dt>
<dd><p>Lists ExecutionStepInstance records from the API as a list.
Unlike stream(), this operation is eager and will load <cite>limit</cite> records into
memory before returning.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>limit</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.11)"><em>int</em></a>) – Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit</li>
<li><strong>page_size</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.11)"><em>int</em></a>) – Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">Generator that will yield up to limit results</p>
</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.11)">list</a>[<a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance" title="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance</a>]</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.page">
<code class="descname">page</code><span class="sig-paren">(</span><em>page_token=<object object></em>, <em>page_number=<object object></em>, <em>page_size=<object object></em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepList.page"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.page" title="Permalink to this definition">¶</a></dt>
<dd><p>Retrieve a single page of ExecutionStepInstance records from the API.
Request is executed immediately</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>page_token</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#str" title="(in Python v3.11)"><em>str</em></a>) – PageToken provided by the API</li>
<li><strong>page_number</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.11)"><em>int</em></a>) – Page Number, this value is simply for client state</li>
<li><strong>page_size</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.11)"><em>int</em></a>) – Number of records to return, defaults to 50</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">Page of ExecutionStepInstance</p>
</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage" title="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage</a></p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.stream">
<code class="descname">stream</code><span class="sig-paren">(</span><em>limit=None</em>, <em>page_size=None</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepList.stream"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepList.stream" title="Permalink to this definition">¶</a></dt>
<dd><p>Streams ExecutionStepInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>limit</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.11)"><em>int</em></a>) – Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit</li>
<li><strong>page_size</strong> (<a class="reference external" href="https://docs.python.org/3/library/functions.html#int" title="(in Python v3.11)"><em>int</em></a>) – Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">Generator that will yield up to limit results</p>
</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last"><a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#list" title="(in Python v3.11)">list</a>[<a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance" title="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance</a>]</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage">
<em class="property">class </em><code class="descclassname">twilio.rest.studio.v1.flow.execution.execution_step.</code><code class="descname">ExecutionStepPage</code><span class="sig-paren">(</span><em>version</em>, <em>response</em>, <em>solution</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepPage"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage" title="Permalink to this definition">¶</a></dt>
<dd><p>Bases: <a class="reference internal" href="twilio.base.html#twilio.base.page.Page" title="twilio.base.page.Page"><code class="xref py py-class docutils literal notranslate"><span class="pre">twilio.base.page.Page</span></code></a></p>
<dl class="method">
<dt id="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage.get_instance">
<code class="descname">get_instance</code><span class="sig-paren">(</span><em>payload</em><span class="sig-paren">)</span><a class="reference internal" href="../../../_modules/twilio/rest/studio/v1/flow/execution/execution_step.html#ExecutionStepPage.get_instance"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepPage.get_instance" title="Permalink to this definition">¶</a></dt>
<dd><p>Build an instance of ExecutionStepInstance</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>payload</strong> (<a class="reference external" href="https://docs.python.org/3/library/stdtypes.html#dict" title="(in Python v3.11)"><em>dict</em></a>) – Payload response from the API</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="#twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance" title="twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance">twilio.rest.studio.v1.flow.execution.execution_step.ExecutionStepInstance</a></td>
</tr>
</tbody>
</table>
</dd></dl>
</dd></dl>
</div>
</div>
</div>
</div>
</div>
<div aria-label="main navigation" class="sphinxsidebar" role="navigation">
<div class="sphinxsidebarwrapper"><h3><a href="https://www.twilio.com/docs/libraries/python">About twilio-python</a></h3>
<p>
A Python module for communicating with the Twilio API and generating <a href="http://www.twilio.com/docs/api/twiml/">TwiML</a>.
</p>
<h3>Useful Links</h3>
<ul>
<li><a href="https://www.twilio.com/docs/libraries/python">Twilio's Python Helper Library Docs</a></li>
<li><a href="https://pypi.org/project/twilio/">twilio @ PyPI</a></li>
<li><a href="https://github.com/twilio/twilio-python">twilio-python @ GitHub</a></li>
</ul>
<h3><a href="../../../index.html">Table of Contents</a></h3>
<ul>
<li><a class="reference internal" href="#">twilio.rest.studio.v1.flow.execution.execution_step package</a><ul>
<li><a class="reference internal" href="#submodules">Submodules</a></li>
<li><a class="reference internal" href="#module-twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context">twilio.rest.studio.v1.flow.execution.execution_step.execution_step_context module</a></li>
<li><a class="reference internal" href="#module-twilio.rest.studio.v1.flow.execution.execution_step">Module contents</a></li>
</ul>
</li>
</ul>
<div class="relations">
<h3>Related Topics</h3>
<ul>
<li><a href="../../../index.html">Documentation overview</a><ul>
<li><a href="modules.html">twilio</a><ul>
<li><a href="twilio.html">twilio package</a><ul>
<li><a href="twilio.rest.html">twilio.rest package</a><ul>
<li><a href="twilio.rest.studio.html">twilio.rest.studio package</a><ul>
<li><a href="twilio.rest.studio.v1.html">twilio.rest.studio.v1 package</a><ul>
<li><a href="twilio.rest.studio.v1.flow.html">twilio.rest.studio.v1.flow package</a><ul>
<li><a href="twilio.rest.studio.v1.flow.execution.html">twilio.rest.studio.v1.flow.execution package</a><ul>
<li>Previous: <a href="twilio.rest.studio.v1.flow.execution.html" title="previous chapter">twilio.rest.studio.v1.flow.execution package</a></li>
<li>Next: <a href="twilio.rest.studio.v2.html" title="next chapter">twilio.rest.studio.v2 package</a></li>
</ul></li>
</ul></li>
</ul></li>
</ul></li>
</ul></li>
</ul></li>
</ul></li>
</ul></li>
</ul>
</div>
<div aria-label="source link" role="note">
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="../../../_sources/docs/source/_rst/twilio.rest.studio.v1.flow.execution.execution_step.rst.txt" rel="nofollow">Show Source</a></li>
</ul>
</div>
<div id="searchbox" role="search" style="display: none">
<h3>Quick search</h3>
<div class="searchformwrapper">
<form action="../../../search.html" class="search" method="get">
<input name="q" type="text"/>
<input type="submit" value="Go"/>
<input name="check_keywords" type="hidden" value="yes"/>
<input name="area" type="hidden" value="default"/>
</form>
</div>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="footer">
©2019, Twilio.
|
Powered by <a href="http://sphinx-doc.org/">Sphinx 1.8.0</a>
& <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.12</a>
|
<a href="../../../_sources/docs/source/_rst/twilio.rest.studio.v1.flow.execution.execution_step.rst.txt" rel="nofollow">Page source</a>
</div>
</body>
</html>
|
{
"content_hash": "109a2a561fe95f8bc18affe3be2038dc",
"timestamp": "",
"source": "github",
"line_count": 688,
"max_line_length": 752,
"avg_line_length": 77.34156976744185,
"alnum_prop": 0.7362951269474357,
"repo_name": "twilio/twilio-python",
"id": "dbb0c777a7bf8c762c3801b1f09f11cde0ec2690",
"size": "53281",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "docs/build/html/docs/source/_rst/twilio.rest.studio.v1.flow.execution.execution_step.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "234"
},
{
"name": "Makefile",
"bytes": "2157"
},
{
"name": "Python",
"bytes": "11241545"
}
],
"symlink_target": ""
}
|
package tv.floe.metronome.classification.logisticregression.iterativereduce;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.mahout.math.Matrix;
//import com.cloudera.knittingboar.sgd.GradientBuffer;
import com.cloudera.iterativereduce.Updateable;
public class ParameterVectorUpdatable implements
Updateable<ParameterVector> {
ParameterVector param_msg = null;
public ParameterVectorUpdatable() {}
public ParameterVectorUpdatable(ParameterVector g) {
this.param_msg = g;
}
@Override
public void fromBytes(ByteBuffer b) {
b.rewind();
// System.out.println( " > ParameterVectorGradient::fromBytes > b: " +
// b.array().length + ", remaining: " + b.remaining() );
try {
this.param_msg = new ParameterVector();
this.param_msg.Deserialize(b.array());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public ParameterVector get() {
// TODO Auto-generated method stub
return this.param_msg;
}
@Override
public void set(ParameterVector t) {
// TODO Auto-generated method stub
this.param_msg = t;
}
@Override
public ByteBuffer toBytes() {
// TODO Auto-generated method stub
byte[] bytes = null;
try {
bytes = this.param_msg.Serialize();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// ByteBuffer buf = ByteBuffer.allocate(bytes.length);
// buf.put(bytes);
ByteBuffer buf = ByteBuffer.wrap(bytes);
return buf;
}
@Override
public void fromString(String s) {
// TODO Auto-generated method stub
}
/*
@Override
public int getGlobalBatchNumber() {
// TODO Auto-generated method stub
return 0;
}
@Override
public int getGlobalIterationNumber() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setIterationState(int arg0, int arg1) {
// TODO Auto-generated method stub
}
*/
}
|
{
"content_hash": "f1b9787a647981e33e6283302d63a3a1",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 76,
"avg_line_length": 21.31958762886598,
"alnum_prop": 0.65715667311412,
"repo_name": "jpatanooga/Metronome",
"id": "eaf359d00452798a07ab3801db94fbbcc6065ad1",
"size": "2870",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/java/tv/floe/metronome/classification/logisticregression/iterativereduce/ParameterVectorUpdatable.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1036172"
},
{
"name": "Python",
"bytes": "2359"
},
{
"name": "R",
"bytes": "6576"
},
{
"name": "Shell",
"bytes": "206"
}
],
"symlink_target": ""
}
|
const webpack = require('webpack');
const conf = require('./gulp.conf');
const path = require('path');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const pkg = require('../package.json');
const autoprefixer = require('autoprefixer');
module.exports = {
module: {
preLoaders: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: 'eslint'
}
],
loaders: [
{
test: /.json$/,
loaders: [
'json'
]
},
{
test: /\.css$/,
loaders: ExtractTextPlugin.extract({
fallbackLoader: 'style',
loader: 'css?minimize!!postcss'
})
},
{
test: /\.js$/,
exclude: /node_modules/,
loaders: [
'babel'
]
},
{
test: /.vue$/,
loaders: [
'vue'
]
}
]
},
plugins: [
new webpack.optimize.OccurrenceOrderPlugin(),
new webpack.NoErrorsPlugin(),
new HtmlWebpackPlugin({
template: conf.path.src('index.html')
}),
new webpack.DefinePlugin({
'process.env.NODE_ENV': '"production"'
}),
new webpack.optimize.UglifyJsPlugin({
compress: {unused: true, dead_code: true, warnings: false} // eslint-disable-line camelcase
}),
new ExtractTextPlugin('index-[contenthash].css'),
new webpack.optimize.CommonsChunkPlugin({name: 'vendor'})
],
postcss: () => [autoprefixer],
output: {
path: path.join(process.cwd(), conf.paths.dist),
filename: '[name]-[hash].js'
},
entry: {
app: `./${conf.path.src('index')}`,
vendor: Object.keys(pkg.dependencies)
}
};
|
{
"content_hash": "7ae1d939c383f3b8c0c1a86952d08111",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 97,
"avg_line_length": 23.643835616438356,
"alnum_prop": 0.5457705677867902,
"repo_name": "PizzaTime-Exia/PizzaTime-Dashboard",
"id": "3eb42021494deda86f4e502bf89327d475a4f470",
"size": "1726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conf/webpack-dist.conf.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1617"
},
{
"name": "HTML",
"bytes": "971"
},
{
"name": "JavaScript",
"bytes": "15655"
},
{
"name": "Vue",
"bytes": "15507"
}
],
"symlink_target": ""
}
|
ACCEPTED
#### According to
NUB Generator [autonym]
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "1e3bf8a46f42d952c0f5cae62cb597e7",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 23,
"avg_line_length": 9.076923076923077,
"alnum_prop": 0.6779661016949152,
"repo_name": "mdoering/backbone",
"id": "2ef1be845d7d457d06d2ba920f222e0474d25c4a",
"size": "179",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Pteridophyta/Polypodiopsida/Polypodiales/Polypodiaceae/Grammitis/Grammitis taxifolia/Polypodium taxifolium taxifolium/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
const Node = require('./node')
class Alpha extends Node {
constructor(val) {
super()
this.value = val
}
accept(visitor) {
this.value = visitor.visit(this.value)
}
eval(context) {
if (this.value.eval) {
return new Alpha(this.value.eval(context))
}
return this
}
genCSS(context, output) {
output.add('alpha(opacity=')
if (this.value.genCSS) {
this.value.genCSS(context, output)
} else {
output.add(this.value)
}
output.add(')')
}
}
Alpha.prototype.type = 'Alpha'
module.exports = Alpha
|
{
"content_hash": "bac48c301e6b943c3d06caf7068d150b",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 48,
"avg_line_length": 16.285714285714285,
"alnum_prop": 0.6,
"repo_name": "arusanov/less.js",
"id": "463b0450b9e42a75ab64cf3a721edb91188a8ca6",
"size": "570",
"binary": false,
"copies": "1",
"ref": "refs/heads/lesson",
"path": "lib/less/tree/alpha.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "159749"
},
{
"name": "HTML",
"bytes": "649"
},
{
"name": "JavaScript",
"bytes": "326626"
}
],
"symlink_target": ""
}
|
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var game_info_1 = require('./game-info');
var core_1 = require('@angular/core');
var GameController = (function () {
function GameController() {
this.game = new game_info_1.GameInfo();
}
GameController.prototype.ngOnInit = function () {
var _this = this;
setInterval(function () { return _this.nextTick(); }, 50);
};
GameController.prototype.nextTick = function () {
this.game.miniTicks--;
if (this.game.miniTicks == 0) {
this.game.miniTicks = this.game.miniTicksToTick;
var speedSortedHeroes = this.game.getHeroesByGreatestSpeed();
var speedSortedEnemies = this.game.getEnemiesByGreatestSpeed();
var currentHero = void 0;
var currentEnemy = void 0;
currentHero = speedSortedHeroes.shift();
currentEnemy = speedSortedEnemies.shift();
while (currentHero || currentEnemy) {
if (!currentEnemy || (currentHero && currentHero.hero.speedPriority >= currentEnemy.enemy.speedPriority)) {
currentHero.nextTick(this.game);
currentHero = speedSortedHeroes.shift();
}
else if (!currentHero || (currentEnemy && currentEnemy.enemy.speedPriority > currentHero.hero.speedPriority)) {
currentEnemy.nextTick(this.game);
currentEnemy = speedSortedEnemies.shift();
}
}
for (var _i = 0, _a = this.game.heroes; _i < _a.length; _i++) {
var x = _a[_i];
x.checkIfDead(this.game);
}
for (var _b = 0, _c = this.game.enemies; _b < _c.length; _b++) {
var x = _c[_b];
x.checkIfDead(this.game);
}
}
};
GameController = __decorate([
core_1.Component({
moduleId: module.id,
selector: 'game',
template: "\n\t\t<game-info [game]=\"game\"></game-info>\n\t",
styles: ["\n\t\t:host {\n\t\t\tdisplay: block;\n\t\t\theight: 100%;\n\t\t}\n\t"]
}),
__metadata('design:paramtypes', [])
], GameController);
return GameController;
}());
exports.GameController = GameController;
var game = new GameController();
//# sourceMappingURL=game-controller.js.map
|
{
"content_hash": "77815f8993766af138bc2a8803e1c9d2",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 150,
"avg_line_length": 48.328125,
"alnum_prop": 0.5651471063692208,
"repo_name": "Redd500/www",
"id": "1d51413bc55a12cbe3aa58a3a2b55799b30e403a",
"size": "3093",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/game-controller.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "274"
},
{
"name": "HTML",
"bytes": "1077"
},
{
"name": "JavaScript",
"bytes": "12007"
},
{
"name": "TypeScript",
"bytes": "54426"
}
],
"symlink_target": ""
}
|
.. _`tbreportdemo`:
Demo of Python failure reports with pytest
==================================================
Here is a nice run of several tens of failures
and how ``pytest`` presents things (unfortunately
not showing the nice colors here in the HTML that you
get on the terminal - we are working on that)::
assertion $ pytest failure_demo.py
=========================== test session starts ============================
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y
rootdir: $REGENDOC_TMPDIR/assertion, inifile:
collected 42 items
failure_demo.py FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF [100%]
================================= FAILURES =================================
____________________________ test_generative[0] ____________________________
param1 = 3, param2 = 6
def test_generative(param1, param2):
> assert param1 * 2 < param2
E assert (3 * 2) < 6
failure_demo.py:16: AssertionError
_________________________ TestFailing.test_simple __________________________
self = <failure_demo.TestFailing object at 0xdeadbeef>
def test_simple(self):
def f():
return 42
def g():
return 43
> assert f() == g()
E assert 42 == 43
E + where 42 = <function TestFailing.test_simple.<locals>.f at 0xdeadbeef>()
E + and 43 = <function TestFailing.test_simple.<locals>.g at 0xdeadbeef>()
failure_demo.py:29: AssertionError
____________________ TestFailing.test_simple_multiline _____________________
self = <failure_demo.TestFailing object at 0xdeadbeef>
def test_simple_multiline(self):
otherfunc_multi(
42,
> 6*9)
failure_demo.py:34:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
a = 42, b = 54
def otherfunc_multi(a,b):
> assert (a ==
b)
E assert 42 == 54
failure_demo.py:12: AssertionError
___________________________ TestFailing.test_not ___________________________
self = <failure_demo.TestFailing object at 0xdeadbeef>
def test_not(self):
def f():
return 42
> assert not f()
E assert not 42
E + where 42 = <function TestFailing.test_not.<locals>.f at 0xdeadbeef>()
failure_demo.py:39: AssertionError
_________________ TestSpecialisedExplanations.test_eq_text _________________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_text(self):
> assert 'spam' == 'eggs'
E AssertionError: assert 'spam' == 'eggs'
E - spam
E + eggs
failure_demo.py:43: AssertionError
_____________ TestSpecialisedExplanations.test_eq_similar_text _____________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_similar_text(self):
> assert 'foo 1 bar' == 'foo 2 bar'
E AssertionError: assert 'foo 1 bar' == 'foo 2 bar'
E - foo 1 bar
E ? ^
E + foo 2 bar
E ? ^
failure_demo.py:46: AssertionError
____________ TestSpecialisedExplanations.test_eq_multiline_text ____________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_multiline_text(self):
> assert 'foo\nspam\nbar' == 'foo\neggs\nbar'
E AssertionError: assert 'foo\nspam\nbar' == 'foo\neggs\nbar'
E foo
E - spam
E + eggs
E bar
failure_demo.py:49: AssertionError
______________ TestSpecialisedExplanations.test_eq_long_text _______________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_long_text(self):
a = '1'*100 + 'a' + '2'*100
b = '1'*100 + 'b' + '2'*100
> assert a == b
E AssertionError: assert '111111111111...2222222222222' == '1111111111111...2222222222222'
E Skipping 90 identical leading characters in diff, use -v to show
E Skipping 91 identical trailing characters in diff, use -v to show
E - 1111111111a222222222
E ? ^
E + 1111111111b222222222
E ? ^
failure_demo.py:54: AssertionError
_________ TestSpecialisedExplanations.test_eq_long_text_multiline __________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_long_text_multiline(self):
a = '1\n'*100 + 'a' + '2\n'*100
b = '1\n'*100 + 'b' + '2\n'*100
> assert a == b
E AssertionError: assert '1\n1\n1\n1\n...n2\n2\n2\n2\n' == '1\n1\n1\n1\n1...n2\n2\n2\n2\n'
E Skipping 190 identical leading characters in diff, use -v to show
E Skipping 191 identical trailing characters in diff, use -v to show
E 1
E 1
E 1
E 1
E 1...
E
E ...Full output truncated (7 lines hidden), use '-vv' to show
failure_demo.py:59: AssertionError
_________________ TestSpecialisedExplanations.test_eq_list _________________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_list(self):
> assert [0, 1, 2] == [0, 1, 3]
E assert [0, 1, 2] == [0, 1, 3]
E At index 2 diff: 2 != 3
E Use -v to get the full diff
failure_demo.py:62: AssertionError
______________ TestSpecialisedExplanations.test_eq_list_long _______________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_list_long(self):
a = [0]*100 + [1] + [3]*100
b = [0]*100 + [2] + [3]*100
> assert a == b
E assert [0, 0, 0, 0, 0, 0, ...] == [0, 0, 0, 0, 0, 0, ...]
E At index 100 diff: 1 != 2
E Use -v to get the full diff
failure_demo.py:67: AssertionError
_________________ TestSpecialisedExplanations.test_eq_dict _________________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_dict(self):
> assert {'a': 0, 'b': 1, 'c': 0} == {'a': 0, 'b': 2, 'd': 0}
E AssertionError: assert {'a': 0, 'b': 1, 'c': 0} == {'a': 0, 'b': 2, 'd': 0}
E Omitting 1 identical items, use -vv to show
E Differing items:
E {'b': 1} != {'b': 2}
E Left contains more items:
E {'c': 0}
E Right contains more items:
E {'d': 0}...
E
E ...Full output truncated (2 lines hidden), use '-vv' to show
failure_demo.py:70: AssertionError
_________________ TestSpecialisedExplanations.test_eq_set __________________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_set(self):
> assert set([0, 10, 11, 12]) == set([0, 20, 21])
E AssertionError: assert {0, 10, 11, 12} == {0, 20, 21}
E Extra items in the left set:
E 10
E 11
E 12
E Extra items in the right set:
E 20
E 21...
E
E ...Full output truncated (2 lines hidden), use '-vv' to show
failure_demo.py:73: AssertionError
_____________ TestSpecialisedExplanations.test_eq_longer_list ______________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_eq_longer_list(self):
> assert [1,2] == [1,2,3]
E assert [1, 2] == [1, 2, 3]
E Right contains more items, first extra item: 3
E Use -v to get the full diff
failure_demo.py:76: AssertionError
_________________ TestSpecialisedExplanations.test_in_list _________________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_in_list(self):
> assert 1 in [0, 2, 3, 4, 5]
E assert 1 in [0, 2, 3, 4, 5]
failure_demo.py:79: AssertionError
__________ TestSpecialisedExplanations.test_not_in_text_multiline __________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_not_in_text_multiline(self):
text = 'some multiline\ntext\nwhich\nincludes foo\nand a\ntail'
> assert 'foo' not in text
E AssertionError: assert 'foo' not in 'some multiline\ntext\nw...ncludes foo\nand a\ntail'
E 'foo' is contained here:
E some multiline
E text
E which
E includes foo
E ? +++
E and a...
E
E ...Full output truncated (2 lines hidden), use '-vv' to show
failure_demo.py:83: AssertionError
___________ TestSpecialisedExplanations.test_not_in_text_single ____________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_not_in_text_single(self):
text = 'single foo line'
> assert 'foo' not in text
E AssertionError: assert 'foo' not in 'single foo line'
E 'foo' is contained here:
E single foo line
E ? +++
failure_demo.py:87: AssertionError
_________ TestSpecialisedExplanations.test_not_in_text_single_long _________
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_not_in_text_single_long(self):
text = 'head ' * 50 + 'foo ' + 'tail ' * 20
> assert 'foo' not in text
E AssertionError: assert 'foo' not in 'head head head head hea...ail tail tail tail tail '
E 'foo' is contained here:
E head head foo tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail
E ? +++
failure_demo.py:91: AssertionError
______ TestSpecialisedExplanations.test_not_in_text_single_long_term _______
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
def test_not_in_text_single_long_term(self):
text = 'head ' * 50 + 'f'*70 + 'tail ' * 20
> assert 'f'*70 not in text
E AssertionError: assert 'fffffffffff...ffffffffffff' not in 'head head he...l tail tail '
E 'ffffffffffffffffff...fffffffffffffffffff' is contained here:
E head head fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffftail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail
E ? ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
failure_demo.py:95: AssertionError
______________________________ test_attribute ______________________________
def test_attribute():
class Foo(object):
b = 1
i = Foo()
> assert i.b == 2
E assert 1 == 2
E + where 1 = <failure_demo.test_attribute.<locals>.Foo object at 0xdeadbeef>.b
failure_demo.py:102: AssertionError
_________________________ test_attribute_instance __________________________
def test_attribute_instance():
class Foo(object):
b = 1
> assert Foo().b == 2
E AssertionError: assert 1 == 2
E + where 1 = <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef>.b
E + where <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef> = <class 'failure_demo.test_attribute_instance.<locals>.Foo'>()
failure_demo.py:108: AssertionError
__________________________ test_attribute_failure __________________________
def test_attribute_failure():
class Foo(object):
def _get_b(self):
raise Exception('Failed to get attrib')
b = property(_get_b)
i = Foo()
> assert i.b == 2
failure_demo.py:117:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <failure_demo.test_attribute_failure.<locals>.Foo object at 0xdeadbeef>
def _get_b(self):
> raise Exception('Failed to get attrib')
E Exception: Failed to get attrib
failure_demo.py:114: Exception
_________________________ test_attribute_multiple __________________________
def test_attribute_multiple():
class Foo(object):
b = 1
class Bar(object):
b = 2
> assert Foo().b == Bar().b
E AssertionError: assert 1 == 2
E + where 1 = <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef>.b
E + where <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef> = <class 'failure_demo.test_attribute_multiple.<locals>.Foo'>()
E + and 2 = <failure_demo.test_attribute_multiple.<locals>.Bar object at 0xdeadbeef>.b
E + where <failure_demo.test_attribute_multiple.<locals>.Bar object at 0xdeadbeef> = <class 'failure_demo.test_attribute_multiple.<locals>.Bar'>()
failure_demo.py:125: AssertionError
__________________________ TestRaises.test_raises __________________________
self = <failure_demo.TestRaises object at 0xdeadbeef>
def test_raises(self):
s = 'qwe'
> raises(TypeError, "int(s)")
failure_demo.py:134:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
> int(s)
E ValueError: invalid literal for int() with base 10: 'qwe'
<0-codegen $PYTHON_PREFIX/lib/python3.5/site-packages/_pytest/python_api.py:595>:1: ValueError
______________________ TestRaises.test_raises_doesnt _______________________
self = <failure_demo.TestRaises object at 0xdeadbeef>
def test_raises_doesnt(self):
> raises(IOError, "int('3')")
E Failed: DID NOT RAISE <class 'OSError'>
failure_demo.py:137: Failed
__________________________ TestRaises.test_raise ___________________________
self = <failure_demo.TestRaises object at 0xdeadbeef>
def test_raise(self):
> raise ValueError("demo error")
E ValueError: demo error
failure_demo.py:140: ValueError
________________________ TestRaises.test_tupleerror ________________________
self = <failure_demo.TestRaises object at 0xdeadbeef>
def test_tupleerror(self):
> a,b = [1]
E ValueError: not enough values to unpack (expected 2, got 1)
failure_demo.py:143: ValueError
______ TestRaises.test_reinterpret_fails_with_print_for_the_fun_of_it ______
self = <failure_demo.TestRaises object at 0xdeadbeef>
def test_reinterpret_fails_with_print_for_the_fun_of_it(self):
l = [1,2,3]
print ("l is %r" % l)
> a,b = l.pop()
E TypeError: 'int' object is not iterable
failure_demo.py:148: TypeError
--------------------------- Captured stdout call ---------------------------
l is [1, 2, 3]
________________________ TestRaises.test_some_error ________________________
self = <failure_demo.TestRaises object at 0xdeadbeef>
def test_some_error(self):
> if namenotexi:
E NameError: name 'namenotexi' is not defined
failure_demo.py:151: NameError
____________________ test_dynamic_compile_shows_nicely _____________________
def test_dynamic_compile_shows_nicely():
import imp
import sys
src = 'def foo():\n assert 1 == 0\n'
name = 'abc-123'
module = imp.new_module(name)
code = _pytest._code.compile(src, name, 'exec')
py.builtin.exec_(code, module.__dict__)
sys.modules[name] = module
> module.foo()
failure_demo.py:168:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
def foo():
> assert 1 == 0
E AssertionError
<2-codegen 'abc-123' $REGENDOC_TMPDIR/assertion/failure_demo.py:165>:2: AssertionError
____________________ TestMoreErrors.test_complex_error _____________________
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
def test_complex_error(self):
def f():
return 44
def g():
return 43
> somefunc(f(), g())
failure_demo.py:178:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
failure_demo.py:9: in somefunc
otherfunc(x,y)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
a = 44, b = 43
def otherfunc(a,b):
> assert a==b
E assert 44 == 43
failure_demo.py:6: AssertionError
___________________ TestMoreErrors.test_z1_unpack_error ____________________
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
def test_z1_unpack_error(self):
l = []
> a,b = l
E ValueError: not enough values to unpack (expected 2, got 0)
failure_demo.py:182: ValueError
____________________ TestMoreErrors.test_z2_type_error _____________________
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
def test_z2_type_error(self):
l = 3
> a,b = l
E TypeError: 'int' object is not iterable
failure_demo.py:186: TypeError
______________________ TestMoreErrors.test_startswith ______________________
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
def test_startswith(self):
s = "123"
g = "456"
> assert s.startswith(g)
E AssertionError: assert False
E + where False = <built-in method startswith of str object at 0xdeadbeef>('456')
E + where <built-in method startswith of str object at 0xdeadbeef> = '123'.startswith
failure_demo.py:191: AssertionError
__________________ TestMoreErrors.test_startswith_nested ___________________
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
def test_startswith_nested(self):
def f():
return "123"
def g():
return "456"
> assert f().startswith(g())
E AssertionError: assert False
E + where False = <built-in method startswith of str object at 0xdeadbeef>('456')
E + where <built-in method startswith of str object at 0xdeadbeef> = '123'.startswith
E + where '123' = <function TestMoreErrors.test_startswith_nested.<locals>.f at 0xdeadbeef>()
E + and '456' = <function TestMoreErrors.test_startswith_nested.<locals>.g at 0xdeadbeef>()
failure_demo.py:198: AssertionError
_____________________ TestMoreErrors.test_global_func ______________________
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
def test_global_func(self):
> assert isinstance(globf(42), float)
E assert False
E + where False = isinstance(43, float)
E + where 43 = globf(42)
failure_demo.py:201: AssertionError
_______________________ TestMoreErrors.test_instance _______________________
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
def test_instance(self):
self.x = 6*7
> assert self.x != 42
E assert 42 != 42
E + where 42 = <failure_demo.TestMoreErrors object at 0xdeadbeef>.x
failure_demo.py:205: AssertionError
_______________________ TestMoreErrors.test_compare ________________________
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
def test_compare(self):
> assert globf(10) < 5
E assert 11 < 5
E + where 11 = globf(10)
failure_demo.py:208: AssertionError
_____________________ TestMoreErrors.test_try_finally ______________________
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
def test_try_finally(self):
x = 1
try:
> assert x == 0
E assert 1 == 0
failure_demo.py:213: AssertionError
___________________ TestCustomAssertMsg.test_single_line ___________________
self = <failure_demo.TestCustomAssertMsg object at 0xdeadbeef>
def test_single_line(self):
class A(object):
a = 1
b = 2
> assert A.a == b, "A.a appears not to be b"
E AssertionError: A.a appears not to be b
E assert 1 == 2
E + where 1 = <class 'failure_demo.TestCustomAssertMsg.test_single_line.<locals>.A'>.a
failure_demo.py:224: AssertionError
____________________ TestCustomAssertMsg.test_multiline ____________________
self = <failure_demo.TestCustomAssertMsg object at 0xdeadbeef>
def test_multiline(self):
class A(object):
a = 1
b = 2
> assert A.a == b, "A.a appears not to be b\n" \
"or does not appear to be b\none of those"
E AssertionError: A.a appears not to be b
E or does not appear to be b
E one of those
E assert 1 == 2
E + where 1 = <class 'failure_demo.TestCustomAssertMsg.test_multiline.<locals>.A'>.a
failure_demo.py:230: AssertionError
___________________ TestCustomAssertMsg.test_custom_repr ___________________
self = <failure_demo.TestCustomAssertMsg object at 0xdeadbeef>
def test_custom_repr(self):
class JSON(object):
a = 1
def __repr__(self):
return "This is JSON\n{\n 'foo': 'bar'\n}"
a = JSON()
b = 2
> assert a.a == b, a
E AssertionError: This is JSON
E {
E 'foo': 'bar'
E }
E assert 1 == 2
E + where 1 = This is JSON\n{\n 'foo': 'bar'\n}.a
failure_demo.py:240: AssertionError
============================= warnings summary =============================
None
Metafunc.addcall is deprecated and scheduled to be removed in pytest 4.0.
Please use Metafunc.parametrize instead.
-- Docs: http://doc.pytest.org/en/latest/warnings.html
================== 42 failed, 1 warnings in 0.12 seconds ===================
|
{
"content_hash": "a39b59fb6b437c9c6f2b691f6966f075",
"timestamp": "",
"source": "github",
"line_count": 605,
"max_line_length": 196,
"avg_line_length": 38.315702479338846,
"alnum_prop": 0.5014451490444761,
"repo_name": "tareqalayan/pytest",
"id": "55626b257b2a6b195921fbb90cc3590ae8cd94c6",
"size": "23182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/en/example/reportingdemo.rst",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "568"
},
{
"name": "Python",
"bytes": "1554146"
}
],
"symlink_target": ""
}
|
class MenuSettings : public Screen
{
private:
std::vector<std::string> m_controlTexts;
public:
MenuSettings(ScreenBuffer *screenBuffer);
private:
virtual void handleInput();
virtual void update();
virtual void fillScreenBuffer();
int getCenterPosX(const std::string Text) const;
};
#endif // !MENUSETTINGS_H
|
{
"content_hash": "97af58dd085ed71804fc86ffa13db11d",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 49,
"avg_line_length": 16.15,
"alnum_prop": 0.739938080495356,
"repo_name": "TmCrafz/QuadrisSFML",
"id": "c2aed55647f98791810d9a1c27f6a3483954a2b4",
"size": "472",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MenuSettings.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "49467"
},
{
"name": "Makefile",
"bytes": "1837"
}
],
"symlink_target": ""
}
|
function Controller() {
require("alloy/controllers/BaseController").apply(this, Array.prototype.slice.call(arguments));
arguments[0] ? arguments[0]["__parentSymbol"] : null;
arguments[0] ? arguments[0]["$model"] : null;
var $ = this;
var exports = {};
$.__views.container = Ti.UI.createView({
width: Ti.UI.SIZE,
height: Ti.UI.SIZE,
id: "container"
});
$.__views.container && $.addTopLevelView($.__views.container);
exports.destroy = function() {};
_.extend($, $.__views);
var message = Alloy.createWidget("be.k0suke.progresshud", "widget", {
message: "--- response ----------\n\nuser/query execute"
});
$.container.add(message.getView());
message.on("click", function() {
$.container.remove(message.getView());
});
var users = Alloy.createCollection("Users");
users.fetch({
data: {
page: 1,
per_page: 10
},
success: function(collection) {
message.trigger("add", {
message: "success: "
});
collection.each(function(user, index) {
message.trigger("add", {
message: " " + index + ": " + user.get("id")
});
});
},
error: function(collection, response) {
message.trigger("add", {
message: "error: " + response
});
}
});
_.extend($, exports);
}
var Alloy = require("alloy"), Backbone = Alloy.Backbone, _ = Alloy._;
module.exports = Controller;
|
{
"content_hash": "c4da30f2894cc089d5192e5ad127d4e4",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 99,
"avg_line_length": 32.30612244897959,
"alnum_prop": 0.5173720783322805,
"repo_name": "k0sukey/alloy.adapter.acs",
"id": "c0a9a8d7872ff0404ce8f529009e2a0e421b92dd",
"size": "1583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Resources/alloy/controllers/users/query.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "180720"
},
{
"name": "Python",
"bytes": "4668"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.13"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>Neodroid: /home/heider/Projects/Neodroid/Unity/Examples/Assets/droid/Runtime/Interfaces/IHasRegister.cs File Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtreedata.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
</script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectlogo"><img alt="Logo" src="neodroidcropped124.png"/></td>
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">Neodroid
 <span id="projectnumber">0.2.0</span>
</div>
<div id="projectbrief">Machine Learning Environment Prototyping Tool</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.13 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
$(function() {
initMenu('',true,false,'search.php','Search');
$(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
<div id="nav-sync" class="sync"></div>
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('_i_has_register_8cs.html','');});
</script>
<div id="doc-content">
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="header">
<div class="summary">
<a href="#nested-classes">Classes</a> |
<a href="#namespaces">Namespaces</a> </div>
<div class="headertitle">
<div class="title">IHasRegister.cs File Reference</div> </div>
</div><!--header-->
<div class="contents">
<p><a href="_i_has_register_8cs_source.html">Go to the source code of this file.</a></p>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="nested-classes"></a>
Classes</h2></td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">interface  </td><td class="memItemRight" valign="bottom"><a class="el" href="interfacedroid_1_1_runtime_1_1_interfaces_1_1_i_registerable.html">droid.Runtime.Interfaces.IRegisterable</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">interface  </td><td class="memItemRight" valign="bottom"><a class="el" href="interfacedroid_1_1_runtime_1_1_interfaces_1_1_i_has_register.html">droid.Runtime.Interfaces.IHasRegister< in in T ></a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="namespaces"></a>
Namespaces</h2></td></tr>
<tr class="memitem:namespacedroid_1_1_runtime_1_1_interfaces"><td class="memItemLeft" align="right" valign="top">namespace  </td><td class="memItemRight" valign="bottom"><a class="el" href="namespacedroid_1_1_runtime_1_1_interfaces.html">droid.Runtime.Interfaces</a></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2"> </td></tr>
</table>
</div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="navelem"><a class="el" href="dir_4f6666a8f2ab10bc970eb7559668f031.html">Runtime</a></li><li class="navelem"><a class="el" href="dir_72e30cb13ec7f0b6aa475169ab72c9f9.html">Interfaces</a></li><li class="navelem"><a class="el" href="_i_has_register_8cs.html">IHasRegister.cs</a></li>
<li class="footer">Generated by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.13 </li>
</ul>
</div>
</body>
</html>
|
{
"content_hash": "b91116b60fb3bb79205a86666edfd009",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 295,
"avg_line_length": 47.03361344537815,
"alnum_prop": 0.6853671609790959,
"repo_name": "sintefneodroid/droid",
"id": "bda30cab56ee26c7c6292d395002f039b2075958",
"size": "5597",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/cvs/_i_has_register_8cs.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "1475508"
},
{
"name": "HLSL",
"bytes": "3678"
},
{
"name": "ShaderLab",
"bytes": "157664"
},
{
"name": "Shell",
"bytes": "204"
}
],
"symlink_target": ""
}
|
<?php
namespace creators\metroui\grid;
use Yii;
use yii\helpers\Html;
class CheckboxColumn extends \yii\grid\CheckboxColumn
{
protected function renderHeaderCellContent()
{
$name = rtrim($this->name, '[]') . '_all';
$id = $this->grid->options['id'];
$options = json_encode([
'name' => $this->name,
'multiple' => $this->multiple,
'checkAll' => $name,
], JSON_UNESCAPED_SLASHES | JSON_UNESCAPED_UNICODE);
$this->grid->getView()->registerJs("jQuery('#$id').yiiGridView('setSelectionColumn', $options);");
if ($this->header !== null || !$this->multiple) {
return parent::renderHeaderCellContent();
} else {
return '<div class="input-control checkbox"><label>'.
Html::checkBox($name, false, ['class' => 'select-on-check-all']).
'<span class="check"></span></label></div>';
}
}
protected function renderDataCellContent($model, $key, $index)
{
if ($this->checkboxOptions instanceof Closure) {
$options = call_user_func($this->checkboxOptions, $model, $key, $index, $this);
} else {
$options = $this->checkboxOptions;
if (!isset($options['value'])) {
$options['value'] = is_array($key) ? json_encode($key, JSON_UNESCAPED_SLASHES | JSON_UNESCAPED_UNICODE) : $key;
}
}
return '<div class="input-control checkbox"><label>'.
Html::checkbox($this->name, !empty($options['checked']), $options).
'<span class="check"></span></label></div>';
}
}
|
{
"content_hash": "9c344e528b5c9f9beaae564c36afaf02",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 127,
"avg_line_length": 36.68888888888889,
"alnum_prop": 0.5487583282858873,
"repo_name": "creators/yii2-metroui",
"id": "78542cb0e4fc77a0a98647eb47185c8b4904e35f",
"size": "1651",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CheckboxColumn.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "PHP",
"bytes": "41953"
}
],
"symlink_target": ""
}
|
Date: Tue, 10 Dec 1996 20:46:05 GMT
Server: NCSA/1.4.2
Content-type: text/html
Last-modified: Sun, 01 Sep 1996 22:38:46 GMT
Content-length: 5230
<html>
<head>
<TITLE>Marc Langheinrich's Resume</TITLE>
<BASE HREF="http://www.cs.washington.edu/homes/marclang/">
<LINK REV="made" HREF="mailto:marclang@u.washington.edu">
</head>
<body>
<address>Marc Langheinrich's Resume</address>
<hr></a>
<center><h1>Marc M. Langheinrich</h1></center>
<p align=right>
5210 Woodlawn Ave. N.<BR>
Seattle, WA 98103<BR>
(206) 632-6637</p>
<hr>
<h2>Education</h2>
<ul>
<li><strong>1991 - present</strong><br>
<!WA0><!WA0><a href = "http://www.uni-bielefeld.de">
Universität Bielefeld, Germany</a><br>
Graduate Student (Diplom) in
<!WA1><!WA1><a href="http://www.techfak.uni-bielefeld.de/techfak/studiengang.html">
<em>Computer Science in the Natural Sciences</em></a><br>
<!WA2><!WA2><a href="http://www.techfak.uni-bielefeld.de/techfak/techfakengl.html">
Faculty of Technology</a><br>
Expected Graduation: Spring 1997<p>
<li><strong>Summer 1995 - Summer 1996</strong><br>
<!WA3><!WA3><a href = "http://www.u.washington.edu">
The University of Washington</a><br>
Visiting graduate student under the Fulbright program.<br>
Department of <!WA4><!WA4><a href="http://www.cs.washington.edu">
<em>Computer Science</em></a>.<br>
<br>
</ul><hr>
<h2>Objective</h2>
<ul>
<i>To obtain a position in the field of Computer Science and Information
Systems -- specifically, a career involving utilization of intelligent agents
in modern computer applications.</i></ul><p>
<hr>
<h2>Experience</h2>
<ul>
<li>
<i>Research Assistant</i><br>
<strong>Department of Computer Science</strong>, University of
Washington, USA<br>
Summer 1996<br>
Development of Web Agent software for the research project
"<!WA5><!WA5><a
href="http://www.cs.washington.edu/research/ahoy/">Ahoy! The
Homepage Finder</a>", an agent architecture for finding
personal homepages on the World Wide Web. Program Size 10,000+
lines. Programming Language: Perl5 (OOP) and C. <p>
<li>
<i>Application Programmer and WebMaster</i><br>
<strong>University Library</strong>, University of Bielefeld,
Germany<br>
1994-1995<br>
Programmed several library applications for student use
(DOS/Windows, C/C++).
Created and maintained <!WA6><!WA6><a
href="http://www.ub.uni-bielefeld.de/e-home.htm">
Library web server</a>, including hardware/platform selection,
software evaluation and network installation within the
University Net.<p>
<li>
<i>Supervisor of System Group, HighTension Project</i><br>
<strong>Department of Computer Science</strong>, University of
Bielefeld, Germany<br>
1993-1995<br>
Design and implementation of a knowledge based therapy module as
part of the <!WA7><!WA7><a
href="http://www.TechFak.Uni-Bielefeld.DE/techfak/ags/wbski/hypercon/">
HYPERCON diagnosis system</a>. Coordinator of systems programmer
group.<p>
<li>
<i>Tutor Computer Science Classes</i><br>
<strong>Department of Computer Science</strong>, University of
Bielefeld, Germany<br>
1992-1993<br>
Teaching assistant for technical cs and theory classes.<p>
<li>
<i>Freelance Database Programmer</i><br>
1989-1995<br>
Programmed several medium and large database applications in
Paradox, FoxPro and Access for various customers like Health Care
provider and Moving companies.<p>
<li>
<i>Personal Client Assistant</i><br>
<strong>Rasmussen GmbH</strong>, Maintal, Germany<br>
Seasonal, 1987-1991<br>
Responsible for JIT-production and delivery of vital car-parts to
all major german car companies (Mercedes/Benz, BMW, Porsche, VW).
Daily trading volume averaging $10,000.<p>
</ul>
<hr>
<h2>Computer Skills</h2>
Proficient in the following areas:
<p>
<ul>
<li><i>Internet Tools</i> -- World-Wide Web, Gopher,
WAIS. HTTP-Server (httpd, Apache, Website) on Unix, DOS,
Windows (3.1 + 95) and NT systems. TCP/IP network programming.
<li><i>Programming Languages</i> -- C/C++, Pascal, LISP, Perl5
(object oriented), Java, JavaScript, (HTML/HTTP)<br>
<li><i>Operating systems</i> -- DOS, Windows3.1/NT/95, Unix, Mac OS
<li><i>Database Applications</i> -- Microsoft Access and FoxPro,
Borland Paradox and Paradox for Windows (including programming
experience in all databases)<br>
<li><i>Word Processing</i> --
Microsoft Word, WordPerfect, Ami Pro and the LaTeX Document
preparation system<br>
</ul><p>
<hr>
<h2>Language Skills</h2>
<ul><li><em>Native speaker</em> of <strong>German</strong>.
<li><em>Fluent</em> in <strong>English</strong>.
<li><em>Able to communicate</em> in <strong>French</strong> and
<strong>Japanese</strong>.
</ul>
<hr>
<h2>Scholarships</h2>
<ul>
<li> 1995, Fulbright Scholar at the University of Washington,
Seattle, USA<br>
<li> 1990, Nominated for a 'Studienstiftung des deutschen Volkes'
scholarship
</ul>
<p>
<hr>
<address>
<strong>Marc Langheinrich</strong><br>
Scores (GRE/TOEFL) and references available upon request.<br>
<!WA8><!WA8><a href = http://www.cs.washington.edu>
The University of Washington, Department of Computer Science</a><br>
Email: <em><!WA9><!WA9><a href="mail:marclang@cs.washington.edu">marclang@cs.washington.edu</a><br>
WWW: <em><!WA10><!WA10><a href="http://www.cs.washington.edu/homes/marclang/">
http://www.cs.washington.edu/homes/marclang/</a></em>
</address>
</body></html>
|
{
"content_hash": "cc53c9b639eccdcf4f935dee8cec53e7",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 100,
"avg_line_length": 37.48299319727891,
"alnum_prop": 0.7018148820326678,
"repo_name": "ML-SWAT/Web2KnowledgeBase",
"id": "90deba9b46dfccabbf24162b2e06fd96be6c0d9b",
"size": "5510",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webkb/other/washington/http:^^www.cs.washington.edu^homes^marclang^resume.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Groff",
"bytes": "641"
},
{
"name": "HTML",
"bytes": "34381871"
},
{
"name": "Perl",
"bytes": "14786"
},
{
"name": "Perl6",
"bytes": "18697"
},
{
"name": "Python",
"bytes": "10084"
}
],
"symlink_target": ""
}
|
package org.codinjutsu.tools.jenkins.view.action.results;
import com.google.common.base.MoreObjects;
import com.intellij.execution.PsiLocation;
import com.intellij.execution.testframework.sm.runner.GeneralTestEventsProcessor;
import com.intellij.execution.testframework.sm.runner.events.*;
import com.intellij.openapi.project.Project;
import com.intellij.psi.JavaPsiFacade;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.search.GlobalSearchScope;
import com.offbytwo.jenkins.model.TestCase;
import com.offbytwo.jenkins.model.TestResult;
import com.offbytwo.jenkins.model.TestSuites;
import jetbrains.buildServer.messages.serviceMessages.TestFailed;
import org.codinjutsu.tools.jenkins.exception.JenkinsPluginRuntimeException;
import org.codinjutsu.tools.jenkins.logic.RequestManager;
import org.codinjutsu.tools.jenkins.model.Job;
import org.codinjutsu.tools.jenkins.view.BrowserPanel;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
class JobTestResultsHandler {
private static final String CLASS_METHOD_SEPARATOR = ":::";
private Job job;
private final Project project;
private final GeneralTestEventsProcessor testEventsProcessor;
JobTestResultsHandler(Job job, Project project, GeneralTestEventsProcessor testEventsProcessor) {
this.job = job;
this.project = project;
this.testEventsProcessor = testEventsProcessor;
testEventsProcessor.setLocator((protocol, path, project1, scope) -> {
String[] parts = path.split(CLASS_METHOD_SEPARATOR);
String className = parts.length != 0 ? parts[0] : null;
String method = parts.length > 1 ? parts[1] : null;
PsiClass clazz = find(className, project1);
PsiElement element = clazz == null || method == null
? clazz
: Arrays.stream(clazz.getMethods())
.filter(m -> method.equals(m.getName()))
.findFirst()
.map(PsiElement.class::cast)
.orElse(clazz);
return clazz == null ? Collections.emptyList() : Collections.singletonList(new PsiLocation<>(element));
});
}
void handle() {
final BrowserPanel browserPanel = BrowserPanel.getInstance(project);
try {
List<TestResult> testResults = RequestManager.getInstance(project).loadTestResultsFor(job);
testResults.forEach(this::handleTestResult);
} catch (JenkinsPluginRuntimeException e) {
browserPanel.notifyErrorJenkinsToolWindow(e.getMessage());
}
testEventsProcessor.onFinishTesting();
}
private void handleTestResult(TestResult testResult) {
if (testResult.getSuites() != null) {
testResult.getSuites().forEach(this::handleTestSuites);
}
}
private void handleTestSuites(TestSuites testSuites) {
testEventsProcessor.onSuiteStarted(new TestSuiteStartedEvent(testSuites.getName(), "file://" + testSuites.getName()));
testSuites.getCases().forEach(this::handleTestCase);
testEventsProcessor.onSuiteFinished(new TestSuiteFinishedEvent(testSuites.getName()));
}
private void handleTestCase(TestCase testCase) {
testEventsProcessor.onTestStarted(new TestStartedEvent(testCase.getName(), "file://" + testCase.getClassName() + CLASS_METHOD_SEPARATOR + testCase.getName()));
if (testCase.isSkipped()) {
testEventsProcessor.onTestIgnored(new TestIgnoredEvent(testCase.getName(), MoreObjects.firstNonNull(testCase.getErrorDetails(), ""), testCase.getErrorStackTrace()));
} else if (testCase.getErrorDetails() != null) {
testEventsProcessor.onTestFailure(new TestFailedEvent(new MyTestFailed(testCase), true));
}
testEventsProcessor.onTestFinished(new TestFinishedEvent(testCase.getName(), (long) testCase.getDuration()));
}
private static PsiClass find(String fqClassname, Project project) {
return JavaPsiFacade.getInstance(project).findClass(fqClassname, GlobalSearchScope.allScope(project));
}
private static class MyTestFailed extends TestFailed {
private String stacktrace;
MyTestFailed(TestCase c) {
super(c.getName(), c.getErrorDetails());
this.stacktrace = c.getErrorStackTrace();
}
@Override
public String getStacktrace() {
return stacktrace;
}
}
}
|
{
"content_hash": "212c7b66999479cffb36b79687175950",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 177,
"avg_line_length": 42.60377358490566,
"alnum_prop": 0.6992914083259522,
"repo_name": "dboissier/jenkins-control-plugin",
"id": "c713b161bec0390bedda316e07c33fac8bb48374",
"size": "4516",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/codinjutsu/tools/jenkins/view/action/results/JobTestResultsHandler.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "338322"
}
],
"symlink_target": ""
}
|
package main
import (
"github.com/shaoshing/gotest"
"testing"
)
func TestDiagnose(t *testing.T) {
assert.Test = t
assert.True(diagnose())
}
|
{
"content_hash": "d53810bd95159f4b5e67a4d8d1ba7f0d",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 33,
"avg_line_length": 13.272727272727273,
"alnum_prop": 0.6986301369863014,
"repo_name": "roperzh/ogg",
"id": "8ae44bc07e0f76b5b36d4f0b03cdbd0e03c0dc63",
"size": "146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Godeps/_workspace/src/github.com/shaoshing/train/cmd/diagnose_test.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "31402"
},
{
"name": "Go",
"bytes": "7770"
},
{
"name": "JavaScript",
"bytes": "53208"
},
{
"name": "Ruby",
"bytes": "67"
}
],
"symlink_target": ""
}
|
@import ObjectiveC.runtime;
#pragma mark- Handle
@implementation UIView (Handle)
- (UIImage *)rw_snapshot {
UIGraphicsBeginImageContextWithOptions(self.frame.size, YES, [[UIScreen mainScreen] scale]);
[self drawViewHierarchyInRect:self.bounds afterScreenUpdates:NO];
UIImage *snapshot = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return snapshot;
}
@end
@implementation UIView (Utils)
- (void)rw_eachSubview:(void (^)(UIView *subview))block {
NSParameterAssert(block != nil);
[self.subviews enumerateObjectsUsingBlock:^(UIView *subview, NSUInteger idx, BOOL *stop) {
block(subview);
}];
}
- (void)rw_removeAllSubviews {
[self.subviews makeObjectsPerformSelector:@selector(removeFromSuperview)];
}
- (UIViewController *)rw_viewController {
for (UIView* next = [self superview]; next; next = next.superview) {
UIResponder *nextResponder = [next nextResponder];
if ([nextResponder isKindOfClass:[UIViewController class]]) {
return (UIViewController *)nextResponder;
}
}
return nil;
}
@end
#pragma mark- Gesture
static const void *GestureRecognizerHandler = &GestureRecognizerHandler;
@interface UIView (RWGesturePrivate)
@property (nonatomic, copy) void (^handler)(UITapGestureRecognizer *tap);
@end
@implementation UIView (Gesture)
#pragma mark- 限制view连续点击
- (void)limitViewUserInteractionEnabled {
[self limitViewUserInteractionEnabledForCustomerTimeInterval:0.5];
}
- (void)limitViewUserInteractionEnabledForCustomerTimeInterval:(NSTimeInterval)ti {
self.userInteractionEnabled = NO;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(ti * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
self.userInteractionEnabled = YES;
});
}
#pragma mark- 添加 tap 手势
- (UITapGestureRecognizer *)rw_tapWithTarget:(id)target action:(SEL)aSelector {
NSAssert([target respondsToSelector:aSelector], @"selector & target 必须存在");
UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:target action:aSelector];
[self addGestureRecognizer:tap];
return tap;
}
- (void)rw_tapWithHandler:(void (^)(UITapGestureRecognizer *tap))handler {
NSAssert(handler, @"handler 不能为nil");
self.handler = handler;
[self rw_tapWithTarget:self action:@selector(handlerTapAction:)];
}
#pragma mark- handlerAction
- (void)handlerTapAction:(UITapGestureRecognizer *)sender {
self.handler(sender);
}
#pragma mark- setter && getter
- (void)setHandler:(void (^)(UITapGestureRecognizer *tap))handler {
objc_setAssociatedObject(self, GestureRecognizerHandler, handler, OBJC_ASSOCIATION_COPY_NONATOMIC);
}
- (void (^)(UITapGestureRecognizer *tap))handler {
return objc_getAssociatedObject(self, GestureRecognizerHandler);
}
@end
|
{
"content_hash": "a0e5709835298b31d1d1d010cd73fbc1",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 112,
"avg_line_length": 28.11881188118812,
"alnum_prop": 0.7295774647887324,
"repo_name": "RangeChiong/RWKit",
"id": "d933e4bdce281b319fcda2b60a98d85c8813a08f",
"size": "3029",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RWKit/Category/UIView+Utils.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "658531"
},
{
"name": "Ruby",
"bytes": "1153"
},
{
"name": "Shell",
"bytes": "8290"
}
],
"symlink_target": ""
}
|
using System;
using System.Linq;
using System.Windows;
using TranslatorApk.Logic.OrganisationItems;
using TranslatorApk.Logic.Utils;
using TranslatorApk.Resources.Localizations;
using TranslatorApk.Windows;
namespace TranslatorApk
{
public partial class App
{
private static readonly NLog.Logger Logger = NLog.LogManager.GetCurrentClassLogger();
protected override void OnStartup(StartupEventArgs e)
{
AppDomain.CurrentDomain.UnhandledException += (sender, args) =>
{
var ex = args.ExceptionObject is Exception exception ? exception : new Exception("Domain exception");
Logger.Fatal(ex);
GlobalVariables.BugSnagClient.Notify(ex);
Clipboard.SetText("Message: " + (args.ExceptionObject as Exception)?.FlattenToString());
MessageBox.Show(StringResources.UnhandledExceptionOccured);
};
DispatcherUnhandledException += (sender, args) =>
{
Logger.Error(args.Exception);
GlobalVariables.BugSnagClient.Notify(args.Exception);
Clipboard.SetText(args.Exception.ToString());
MessageBox.Show(string.Format(StringResources.ExceptionOccured, args.Exception.FlattenToString()));
#if !DEBUG
args.Handled = true;
#endif
};
if (e.Args.FirstOrDefault() == "update")
{
new DownloadWindow().Show();
return;
}
CommonUtils.LoadSettings();
#if !DEBUG
if (!GlobalVariables.Portable)
#endif
CommonUtils.CheckForUpdate();
if (string.IsNullOrEmpty(GlobalVariables.AppSettings.ApktoolVersion))
{
Logger.Error("Apktool not found");
MessBox.ShowDial(StringResources.ApktoolNotFound);
}
WindowManager.ActivateWindow<MainWindow>();
}
protected override void OnExit(ExitEventArgs e)
{
NotificationService.Instance.Dispose();
GlobalVariables.AppSettings.SourceDictionaries = GlobalVariables.SourceDictionaries.ToList();
CommonUtils.UpdateSettingsApiKeys();
}
}
}
|
{
"content_hash": "8d6a5d8b4bb4977eaa340972ab1b780f",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 117,
"avg_line_length": 32.371428571428574,
"alnum_prop": 0.6160635481023831,
"repo_name": "And42/TranslatorApk",
"id": "f028fa5bed7fc2af55db6553cf8eaa024796ba70",
"size": "2268",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "App/App.xaml.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "432660"
},
{
"name": "PowerShell",
"bytes": "3849"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:th="http://www.thymeleaf.org"
xmlns:sec="http://www.thymeleaf.org/extras/spring-security">
<head th:include="@{_fragment/_headTag} :: headTag">
</head>
<body>
<div th:remove="tag" th:include="@{_fragment/_apolo-template} :: apolo-template"></div>
<div id="page-header">
<!-- RIBBON -->
<div id="ribbon">
<ol class="breadcrumb">
<li th:text="#{user.group.view.title}">
</li>
</ol>
</div>
<!-- END RIBBON -->
</div>
<div id="page-content">
<section class="content">
<div class="col-sm-12">
<div th:remove="tag" th:include="@{user-group/_form} :: user-group-form"></div>
<div class="btn-group" role="group">
<a sec:authorize="@apoloSecurity.hasPermission('USER_EDIT')"
th:href='@{|/web/${#authentication.systemUser.tenant.url}/user-group/edit/${userGroup.id}|}' class="btn btn-default">
<i class="fa fa-pencil-square-o"></i>
<span th:text="#{common.edit}" />
</a>
<a href='#' class="btn btn-default back" >
<i class="fa fa-backward"></i>
<span th:text="#{common.back}" />
</a>
</div>
</div>
</section>
</div>
</body>
</html>
|
{
"content_hash": "6b0f5abd954d83b1ec70a45f775efe4e",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 136,
"avg_line_length": 33.53658536585366,
"alnum_prop": 0.5127272727272727,
"repo_name": "macielbombonato/apolo",
"id": "0f221df4116f3a347e3c1a064d98c864b7648739",
"size": "1375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apolo-web/src/main/webapp/WEB-INF/views/user-group/view.html",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "169055"
},
{
"name": "HTML",
"bytes": "241672"
},
{
"name": "Java",
"bytes": "269170"
},
{
"name": "JavaScript",
"bytes": "133322"
},
{
"name": "Shell",
"bytes": "5351"
}
],
"symlink_target": ""
}
|
#include "server_base.h"
#include <cassert>
#include <array>
#include <iostream>
#include "debug.h"
using namespace ngtcp2;
extern Config config;
Buffer::Buffer(const uint8_t *data, size_t datalen)
: buf{data, data + datalen}, begin(buf.data()), tail(begin + datalen) {}
Buffer::Buffer(size_t datalen) : buf(datalen), begin(buf.data()), tail(begin) {}
static ngtcp2_conn *get_conn(ngtcp2_crypto_conn_ref *conn_ref) {
auto h = static_cast<HandlerBase *>(conn_ref->user_data);
return h->conn();
}
HandlerBase::HandlerBase() : conn_ref_{get_conn, this}, conn_(nullptr) {
ngtcp2_connection_close_error_default(&last_error_);
}
HandlerBase::~HandlerBase() {
if (conn_) {
ngtcp2_conn_del(conn_);
}
}
ngtcp2_conn *HandlerBase::conn() const { return conn_; }
ngtcp2_crypto_conn_ref *HandlerBase::conn_ref() { return &conn_ref_; }
|
{
"content_hash": "a6caa589def4596e32b89d57ff6993f0",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 80,
"avg_line_length": 24.314285714285713,
"alnum_prop": 0.6803760282021152,
"repo_name": "ngtcp2/ngtcp2",
"id": "aea86bd9284ffeac07acd7476060b69732d0e301",
"size": "1990",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "examples/server_base.cc",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2110067"
},
{
"name": "C++",
"bytes": "2009"
},
{
"name": "CMake",
"bytes": "47185"
},
{
"name": "Dockerfile",
"bytes": "3855"
},
{
"name": "M4",
"bytes": "50317"
},
{
"name": "Makefile",
"bytes": "21158"
},
{
"name": "Shell",
"bytes": "5735"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, division, absolute_import
from numba import struct, jit, double
import numpy as np
record_type = struct([('x', double), ('y', double)])
record_dtype = record_type.get_dtype()
a = np.array([(1.0, 2.0), (3.0, 4.0)], dtype=record_dtype)
@jit(argtypes=[record_type[:]])
def hypot(data):
# return types of numpy functions are inferred
result = np.empty_like(data, dtype=np.float64)
# notice access to structure elements 'x' and 'y' via attribute access
# You can also index by field name or field index:
# data[i].x == data[i]['x'] == data[i][0]
for i in range(data.shape[0]):
result[i] = np.sqrt(data[i].x * data[i].x + data[i].y * data[i].y)
return result
print(hypot(a))
# Notice inferred return type
print(hypot.signature)
# Notice native sqrt calls and for.body direct access to memory...
#print(hypot.lfunc)
|
{
"content_hash": "9027e6ff15c918fc6d3a749ffd81308c",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 74,
"avg_line_length": 34.46153846153846,
"alnum_prop": 0.6618303571428571,
"repo_name": "shiquanwang/numba",
"id": "f10d38556186c26b3bad8c5ff9b1889be1f6f7c4",
"size": "920",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/structures.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "62017"
},
{
"name": "C++",
"bytes": "2247"
},
{
"name": "Python",
"bytes": "1713467"
}
],
"symlink_target": ""
}
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeApplications #-}
module PactCLISpec(spec) where
import Test.Hspec
import qualified Data.Aeson as A
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Data.Either(isRight)
import qualified Data.Yaml as Y
import Data.Text(Text)
import System.FilePath
import Pact.ApiReq
import Pact.Types.Command
import Pact.Types.API(SubmitBatch)
import Pact.Types.SigData(SigData)
spec :: Spec
spec = do
partialSigTests
-- note, generated with `pact -g`
key1, key2 :: FilePath
key1 = "tests" </> "add-sig" </> "key1.yaml"
key2 = "tests" </> "add-sig" </> "key2.yaml"
unsignedFile :: FilePath
unsignedFile = "tests" </> "add-sig" </> "unsigned.yaml"
unsignedFile2 :: FilePath
unsignedFile2 = "tests" </> "add-sig" </> "unsigned2.yaml"
partialSigTests :: Spec
partialSigTests =
describe "partial sigs" $ do
it "validates and combines two partial sigs" $ do
unsigned <- BS.readFile unsignedFile
sig1 <- Y.decodeEither' <$> addSigsReq [key1] True unsigned
sig2 <- Y.decodeEither' <$> addSigsReq [key2] True unsigned
sig1 `shouldSatisfy` isRight
sig2 `shouldSatisfy` isRight
let sig1' = either (error "impossible") id sig1
sig2' = either (error "impossible") id sig2
-- Works normally for local
command <- A.eitherDecode @(Command Text) . BSL.fromStrict <$> combineSigDatas [sig1', sig2'] True
command `shouldSatisfy` isRight
-- Works as submitBatch
commandBatch <- A.eitherDecode @SubmitBatch . BSL.fromStrict <$> combineSigDatas [sig1', sig2'] False
commandBatch `shouldSatisfy` isRight
it "validates when command portion is missing as well:" $ do
unsigned <- BS.readFile unsignedFile2
sig1 <- Y.decodeEither' @(SigData Text) <$> addSigsReq [key1] True unsigned
sig2 <- Y.decodeEither' @(SigData Text) <$> addSigsReq [key2] True unsigned
sig1 `shouldSatisfy` isRight
sig2 `shouldSatisfy` isRight
it "does not validate on missing signatures" $ do
unsigned <- BS.readFile unsignedFile
sig1 <- Y.decodeEither' <$> addSigsReq [key1] True unsigned
sig1 `shouldSatisfy` isRight
let sig1' = either (error "impossible") id sig1
-- Works normally for local
combineSigDatas [sig1'] True `shouldThrow` anyException
|
{
"content_hash": "bfcc98ddc9d515aba54f949ce9877918",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 107,
"avg_line_length": 36.6875,
"alnum_prop": 0.6959114139693356,
"repo_name": "kadena-io/pact",
"id": "621a4f95aa10a75a69077e1056b780ec1b2237b5",
"size": "2348",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/PactCLISpec.hs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "78184"
},
{
"name": "HTML",
"bytes": "2327"
},
{
"name": "Haskell",
"bytes": "1870778"
},
{
"name": "JavaScript",
"bytes": "12586"
},
{
"name": "Nix",
"bytes": "7894"
},
{
"name": "Shell",
"bytes": "6584"
}
],
"symlink_target": ""
}
|
using namespace json_spirit;
using namespace std;
Value getconnectioncount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getconnectioncount\n"
"Returns the number of connections to other nodes.");
LOCK(cs_vNodes);
return (int)vNodes.size();
}
static void CopyNodeStats(std::vector<CNodeStats>& vstats)
{
vstats.clear();
LOCK(cs_vNodes);
vstats.reserve(vNodes.size());
BOOST_FOREACH(CNode* pnode, vNodes) {
CNodeStats stats;
pnode->copyStats(stats);
vstats.push_back(stats);
}
}
Value getpeerinfo(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getpeerinfo\n"
"Returns data about each connected network node.");
vector<CNodeStats> vstats;
CopyNodeStats(vstats);
Array ret;
BOOST_FOREACH(const CNodeStats& stats, vstats) {
Object obj;
obj.push_back(Pair("addr", stats.addrName));
obj.push_back(Pair("services", strprintf("%08"PRIx64, stats.nServices)));
obj.push_back(Pair("lastsend", (boost::int64_t)stats.nLastSend));
obj.push_back(Pair("lastrecv", (boost::int64_t)stats.nLastRecv));
obj.push_back(Pair("conntime", (boost::int64_t)stats.nTimeConnected));
obj.push_back(Pair("version", stats.nVersion));
obj.push_back(Pair("subver", stats.strSubVer));
obj.push_back(Pair("inbound", stats.fInbound));
obj.push_back(Pair("startingheight", stats.nStartingHeight));
obj.push_back(Pair("banscore", stats.nMisbehavior));
ret.push_back(obj);
}
return ret;
}
// summercoinv2: send alert.
// There is a known deadlock situation with ThreadMessageHandler
// ThreadMessageHandler: holds cs_vSend and acquiring cs_main in SendMessages()
// ThreadRPCServer: holds cs_main and acquiring cs_vSend in alert.RelayTo()/PushMessage()/BeginMessage()
Value sendalert(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 6)
throw runtime_error(
"sendalert <message> <privatekey> <minver> <maxver> <priority> <id> [cancelupto]\n"
"<message> is the alert text message\n"
"<privatekey> is hex string of alert master private key\n"
"<minver> is the minimum applicable internal client version\n"
"<maxver> is the maximum applicable internal client version\n"
"<priority> is integer priority number\n"
"<id> is the alert id\n"
"[cancelupto] cancels all alert id's up to this number\n"
"Returns true or false.");
CAlert alert;
CKey key;
alert.strStatusBar = params[0].get_str();
alert.nMinVer = params[2].get_int();
alert.nMaxVer = params[3].get_int();
alert.nPriority = params[4].get_int();
alert.nID = params[5].get_int();
if (params.size() > 6)
alert.nCancel = params[6].get_int();
alert.nVersion = PROTOCOL_VERSION;
alert.nRelayUntil = GetAdjustedTime() + 365*24*60*60;
alert.nExpiration = GetAdjustedTime() + 365*24*60*60;
CDataStream sMsg(SER_NETWORK, PROTOCOL_VERSION);
sMsg << (CUnsignedAlert)alert;
alert.vchMsg = vector<unsigned char>(sMsg.begin(), sMsg.end());
vector<unsigned char> vchPrivKey = ParseHex(params[1].get_str());
key.SetPrivKey(CPrivKey(vchPrivKey.begin(), vchPrivKey.end())); // if key is not correct openssl may crash
if (!key.Sign(Hash(alert.vchMsg.begin(), alert.vchMsg.end()), alert.vchSig))
throw runtime_error(
"Unable to sign alert, check private key?\n");
if(!alert.ProcessAlert())
throw runtime_error(
"Failed to process alert.\n");
// Relay alert
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
alert.RelayTo(pnode);
}
Object result;
result.push_back(Pair("strStatusBar", alert.strStatusBar));
result.push_back(Pair("nVersion", alert.nVersion));
result.push_back(Pair("nMinVer", alert.nMinVer));
result.push_back(Pair("nMaxVer", alert.nMaxVer));
result.push_back(Pair("nPriority", alert.nPriority));
result.push_back(Pair("nID", alert.nID));
if (alert.nCancel > 0)
result.push_back(Pair("nCancel", alert.nCancel));
return result;
}
|
{
"content_hash": "3476f7d8d8d4f31c026278c378b4e620",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 110,
"avg_line_length": 35.79338842975206,
"alnum_prop": 0.6386515816208728,
"repo_name": "sherlockcoin/navbetatest2",
"id": "01d712ab24f645bce52b7f6a15e057f3fc015dcd",
"size": "4640",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/rpcnet.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "51312"
},
{
"name": "C",
"bytes": "3002632"
},
{
"name": "C++",
"bytes": "2662202"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "12684"
},
{
"name": "HTML",
"bytes": "50620"
},
{
"name": "Makefile",
"bytes": "12720"
},
{
"name": "NSIS",
"bytes": "5914"
},
{
"name": "Objective-C",
"bytes": "858"
},
{
"name": "Objective-C++",
"bytes": "3537"
},
{
"name": "Python",
"bytes": "41580"
},
{
"name": "QMake",
"bytes": "15205"
},
{
"name": "Shell",
"bytes": "8509"
}
],
"symlink_target": ""
}
|
module.exports.ALLOWED_TEST_CONFIG = `{% fa fa-exclamation-triangle red %} **Note:** Some configuration values are readonly and cannot be changed via test configuration. The following configuration values **can be changed** via per test configuration:
- \`animationDistanceThreshold\`
- \`baseUrl\`
- \`browser\` **note:** filters whether the tests or a suite of tests runs depending on the current browser
- \`defaultCommandTimeout\`
- \`execTimeout\`
- \`env\` **note:** Provided environment variables will be merged with current environment variables.
- \`includeShadowDom\`
- \`requestTimeout\`
- \`responseTimeout\`
- \`retries\`
- \`scrollBehavior\`
- \`viewportHeight\`
- \`viewportWidth\`
- \`waitForAnimations\`
`
module.exports.CHROMIUM_DOWNLOAD = `### Download specific Chrome version
The Chrome browser is evergreen - meaning it will automatically update itself, sometimes causing a breaking change in your automated tests. We host [chromium.cypress.io](https://chromium.cypress.io) with links to download a specific released version of Chrome (dev, Canary and stable) for every platform.
`
module.exports.CYPRESS_ENV_VAR_WARNING = `{% note warning "Difference between OS-level and Cypress environment variables" %}
In Cypress, "environment variables" are variables that are accessible via \`Cypress.env\`. These are not the same as OS-level environment variables. However, [it is possible to set Cypress environment variables from OS-level environment variables](/guides/guides/environment-variables.html#Option-3-CYPRESS).
{% endnote %}
`
module.exports.ERRORS_ANATOMY = `1. **Error name**: This is the type of the error (e.g. AssertionError, CypressError)
1. **Error message**: This generally tells you what went wrong. It can vary in length. Some are short like in the example, while some are long, and may tell you exactly how to fix the error.
1. **Learn more:** Some error messages contain a Learn more link that will take you to relevant Cypress documentation.
1. **Code frame file**: This is usually the top line of the stack trace and it shows the file, line number, and column number that is highlighted in the code frame below. Clicking on this link will open the file in your [preferred file opener](https://on.cypress.io/IDE-integration#File-Opener-Preference) and highlight the line and column in editors that support it.
1. **Code frame**: This shows a snippet of code where the failure occurred, with the relevant line and column highlighted.
1. **View stack trace**: Clicking this toggles the visibility of the stack trace. Stack traces vary in length. Clicking on a blue file path will open the file in your [preferred file opener](https://on.cypress.io/IDE-integration#File-Opener-Preference).
1. **Print to console button**: Click this to print the full error to your DevTools console. This will usually allow you to click on lines in the stack trace and open files in your DevTools.
{% imgTag /img/guides/command-failure-error.png "example command failure error" %}
`
module.exports.LINUX_DEPENDENCIES = [
'#### Ubuntu/Debian',
'',
'```shell',
'apt-get install libgtk2.0-0 libgtk-3-0 libgbm-dev libnotify-dev libgconf-2-4 libnss3 libxss1 libasound2 libxtst6 xauth xvfb',
'```',
'',
'#### CentOS',
'',
'```shell',
'yum install -y xorg-x11-server-Xvfb gtk2-devel gtk3-devel libnotify-devel GConf2 nss libXScrnSaver alsa-lib',
'```',
].join('\n')
module.exports.NETWORK_STUBBING_WARNING = `{% note warning %}
⚠️ \`cy.route()\` and \`cy.server()\` only support intercepting XMLHttpRequests. Requests using the Fetch API and other types of network requests like page loads and \`<script>\` tags will not be intercepted by \`cy.route()\` and \`cy.server()\`.
**To support requests using the Fetch API you can use one of the solutions below:**
- Use [\`cy.intercept()\`](/api/commands/intercept) which supports requests using the Fetch API and other types of network requests like page loads. See [\`cy.intercept()\`](/api/commands/intercept).
- Polyfill \`window.fetch\` to spy on and stub requests using \`cy.route()\` and \`cy.server()\` by enabling [\`experimentalFetchPolyfill\`](https://on.cypress.io/experimental). See {% issue 95 %} for more details and temporary workarounds.
{% endnote %}
`
module.exports.THEN_SHOULD_DIFFERENCE = [
"### What's the difference between `.then()` and `.should()`/`.and()`?",
'',
'Using `.then()` allows you to use the yielded subject in a callback function and should be used when you need to manipulate some values or do some actions.',
'',
'When using a callback function with `.should()` or `.and()`, on the other hand, there is special logic to rerun the callback function until no assertions throw within it. You should be careful of side affects in a `.should()` or `.and()` callback function that you would not want performed multiple times.',
].join('\n')
module.exports.VPN_ALLOWED_LIST = `To send the data and results of your tests to [Cypress Cloud](https://on.cypress.io/cloud-introduction), Cypress needs free access to some URLs.
If you are running the tests from within a restrictive VPN you will need to allow some URLs so that Cypress can have effective communication with Cypress Cloud.
**The URLs are the following:**
- \`https://api.cypress.io\` - **Cypress API**
- \`https://assets.cypress.io\` - **Asset CDN** (Org logos, icons, videos, screenshots, etc.)
- \`https://authenticate.cypress.io\` - **Authentication API**
- \`https://cloud.cypress.io\` - **Cypress Cloud**
- \`https://docs.cypress.io\` - **Cypress documentation**
- \`https://download.cypress.io\` - **CDN download of Cypress binary**
- \`https://on.cypress.io\` - **URL shortener for link redirects**
`
module.exports.XHR_STUBBING_DEPRECATED = [
'{% note warning %}',
'⚠️ **`cy.server()` and `cy.route()` are deprecated in Cypress 6.0.0**. In a future release, support for `cy.server()` and `cy.route()` will be removed. Consider using [`cy.intercept()`](/api/commands/intercept) instead. See our guide on [Migrating `cy.route()` to `cy.intercept()`](/guides/references/migration-guide#Migrating-cy-route-to-cy-intercept)',
'{% endnote %}',
].join('\n')
module.exports.CODE_RUNS_IN_NODE = [
'{% note warning %}',
'⚠️ This code is part of the [plugins file](/guides/core-concepts/writing-and-organizing-tests#Plugin-files) and thus executes in the Node environment. You cannot call `Cypress` or `cy` commands in this file, but you do have the direct access to the file system and the rest of the operating system.',
'{% endnote %}',
].join('\n')
|
{
"content_hash": "55342c3db8fcc9ba793316e1a5776932",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 368,
"avg_line_length": 67.64948453608247,
"alnum_prop": 0.7293508076805851,
"repo_name": "cypress-io/cypress-documentation",
"id": "959ab1bbaf73fb5ced3eea3a3bdb1140338707ce",
"size": "6574",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/partials.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7417"
},
{
"name": "JavaScript",
"bytes": "115450"
},
{
"name": "Shell",
"bytes": "58"
},
{
"name": "Vue",
"bytes": "94470"
}
],
"symlink_target": ""
}
|
from thrift.Thrift import *
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
"""
pass
def submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
- options
"""
pass
def killTopology(self, name):
"""
Parameters:
- name
"""
pass
def killTopologyWithOpts(self, name, options):
"""
Parameters:
- name
- options
"""
pass
def activate(self, name):
"""
Parameters:
- name
"""
pass
def deactivate(self, name):
"""
Parameters:
- name
"""
pass
def rebalance(self, name, options):
"""
Parameters:
- name
- options
"""
pass
def uploadNewCredentials(self, name, creds):
"""
Parameters:
- name
- creds
"""
pass
def beginFileUpload(self, ):
pass
def uploadChunk(self, location, chunk):
"""
Parameters:
- location
- chunk
"""
pass
def finishFileUpload(self, location):
"""
Parameters:
- location
"""
pass
def beginFileDownload(self, file):
"""
Parameters:
- file
"""
pass
def downloadChunk(self, id):
"""
Parameters:
- id
"""
pass
def getNimbusConf(self, ):
pass
def getClusterInfo(self, ):
pass
def getTopologyInfo(self, id):
"""
Parameters:
- id
"""
pass
def getTopologyConf(self, id):
"""
Parameters:
- id
"""
pass
def getTopology(self, id):
"""
Parameters:
- id
"""
pass
def getUserTopology(self, id):
"""
Parameters:
- id
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
"""
self.send_submitTopology(name, uploadedJarLocation, jsonConf, topology)
self.recv_submitTopology()
def send_submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
self._oprot.writeMessageBegin('submitTopology', TMessageType.CALL, self._seqid)
args = submitTopology_args()
args.name = name
args.uploadedJarLocation = uploadedJarLocation
args.jsonConf = jsonConf
args.topology = topology
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_submitTopology(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = submitTopology_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
if result.aze is not None:
raise result.aze
return
def submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
- options
"""
self.send_submitTopologyWithOpts(name, uploadedJarLocation, jsonConf, topology, options)
self.recv_submitTopologyWithOpts()
def send_submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
self._oprot.writeMessageBegin('submitTopologyWithOpts', TMessageType.CALL, self._seqid)
args = submitTopologyWithOpts_args()
args.name = name
args.uploadedJarLocation = uploadedJarLocation
args.jsonConf = jsonConf
args.topology = topology
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_submitTopologyWithOpts(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = submitTopologyWithOpts_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
if result.aze is not None:
raise result.aze
return
def killTopology(self, name):
"""
Parameters:
- name
"""
self.send_killTopology(name)
self.recv_killTopology()
def send_killTopology(self, name):
self._oprot.writeMessageBegin('killTopology', TMessageType.CALL, self._seqid)
args = killTopology_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_killTopology(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = killTopology_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
return
def killTopologyWithOpts(self, name, options):
"""
Parameters:
- name
- options
"""
self.send_killTopologyWithOpts(name, options)
self.recv_killTopologyWithOpts()
def send_killTopologyWithOpts(self, name, options):
self._oprot.writeMessageBegin('killTopologyWithOpts', TMessageType.CALL, self._seqid)
args = killTopologyWithOpts_args()
args.name = name
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_killTopologyWithOpts(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = killTopologyWithOpts_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
return
def activate(self, name):
"""
Parameters:
- name
"""
self.send_activate(name)
self.recv_activate()
def send_activate(self, name):
self._oprot.writeMessageBegin('activate', TMessageType.CALL, self._seqid)
args = activate_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_activate(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = activate_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
return
def deactivate(self, name):
"""
Parameters:
- name
"""
self.send_deactivate(name)
self.recv_deactivate()
def send_deactivate(self, name):
self._oprot.writeMessageBegin('deactivate', TMessageType.CALL, self._seqid)
args = deactivate_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deactivate(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = deactivate_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
return
def rebalance(self, name, options):
"""
Parameters:
- name
- options
"""
self.send_rebalance(name, options)
self.recv_rebalance()
def send_rebalance(self, name, options):
self._oprot.writeMessageBegin('rebalance', TMessageType.CALL, self._seqid)
args = rebalance_args()
args.name = name
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_rebalance(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = rebalance_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
if result.aze is not None:
raise result.aze
return
def uploadNewCredentials(self, name, creds):
"""
Parameters:
- name
- creds
"""
self.send_uploadNewCredentials(name, creds)
self.recv_uploadNewCredentials()
def send_uploadNewCredentials(self, name, creds):
self._oprot.writeMessageBegin('uploadNewCredentials', TMessageType.CALL, self._seqid)
args = uploadNewCredentials_args()
args.name = name
args.creds = creds
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_uploadNewCredentials(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = uploadNewCredentials_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
if result.aze is not None:
raise result.aze
return
def beginFileUpload(self, ):
self.send_beginFileUpload()
return self.recv_beginFileUpload()
def send_beginFileUpload(self, ):
self._oprot.writeMessageBegin('beginFileUpload', TMessageType.CALL, self._seqid)
args = beginFileUpload_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_beginFileUpload(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = beginFileUpload_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "beginFileUpload failed: unknown result");
def uploadChunk(self, location, chunk):
"""
Parameters:
- location
- chunk
"""
self.send_uploadChunk(location, chunk)
self.recv_uploadChunk()
def send_uploadChunk(self, location, chunk):
self._oprot.writeMessageBegin('uploadChunk', TMessageType.CALL, self._seqid)
args = uploadChunk_args()
args.location = location
args.chunk = chunk
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_uploadChunk(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = uploadChunk_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
return
def finishFileUpload(self, location):
"""
Parameters:
- location
"""
self.send_finishFileUpload(location)
self.recv_finishFileUpload()
def send_finishFileUpload(self, location):
self._oprot.writeMessageBegin('finishFileUpload', TMessageType.CALL, self._seqid)
args = finishFileUpload_args()
args.location = location
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_finishFileUpload(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = finishFileUpload_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
return
def beginFileDownload(self, file):
"""
Parameters:
- file
"""
self.send_beginFileDownload(file)
return self.recv_beginFileDownload()
def send_beginFileDownload(self, file):
self._oprot.writeMessageBegin('beginFileDownload', TMessageType.CALL, self._seqid)
args = beginFileDownload_args()
args.file = file
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_beginFileDownload(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = beginFileDownload_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "beginFileDownload failed: unknown result");
def downloadChunk(self, id):
"""
Parameters:
- id
"""
self.send_downloadChunk(id)
return self.recv_downloadChunk()
def send_downloadChunk(self, id):
self._oprot.writeMessageBegin('downloadChunk', TMessageType.CALL, self._seqid)
args = downloadChunk_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_downloadChunk(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = downloadChunk_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "downloadChunk failed: unknown result");
def getNimbusConf(self, ):
self.send_getNimbusConf()
return self.recv_getNimbusConf()
def send_getNimbusConf(self, ):
self._oprot.writeMessageBegin('getNimbusConf', TMessageType.CALL, self._seqid)
args = getNimbusConf_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNimbusConf(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNimbusConf_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNimbusConf failed: unknown result");
def getClusterInfo(self, ):
self.send_getClusterInfo()
return self.recv_getClusterInfo()
def send_getClusterInfo(self, ):
self._oprot.writeMessageBegin('getClusterInfo', TMessageType.CALL, self._seqid)
args = getClusterInfo_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getClusterInfo(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getClusterInfo_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getClusterInfo failed: unknown result");
def getTopologyInfo(self, id):
"""
Parameters:
- id
"""
self.send_getTopologyInfo(id)
return self.recv_getTopologyInfo()
def send_getTopologyInfo(self, id):
self._oprot.writeMessageBegin('getTopologyInfo', TMessageType.CALL, self._seqid)
args = getTopologyInfo_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopologyInfo(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getTopologyInfo_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyInfo failed: unknown result");
def getTopologyConf(self, id):
"""
Parameters:
- id
"""
self.send_getTopologyConf(id)
return self.recv_getTopologyConf()
def send_getTopologyConf(self, id):
self._oprot.writeMessageBegin('getTopologyConf', TMessageType.CALL, self._seqid)
args = getTopologyConf_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopologyConf(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getTopologyConf_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyConf failed: unknown result");
def getTopology(self, id):
"""
Parameters:
- id
"""
self.send_getTopology(id)
return self.recv_getTopology()
def send_getTopology(self, id):
self._oprot.writeMessageBegin('getTopology', TMessageType.CALL, self._seqid)
args = getTopology_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopology(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getTopology_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopology failed: unknown result");
def getUserTopology(self, id):
"""
Parameters:
- id
"""
self.send_getUserTopology(id)
return self.recv_getUserTopology()
def send_getUserTopology(self, id):
self._oprot.writeMessageBegin('getUserTopology', TMessageType.CALL, self._seqid)
args = getUserTopology_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getUserTopology(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getUserTopology_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getUserTopology failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["submitTopology"] = Processor.process_submitTopology
self._processMap["submitTopologyWithOpts"] = Processor.process_submitTopologyWithOpts
self._processMap["killTopology"] = Processor.process_killTopology
self._processMap["killTopologyWithOpts"] = Processor.process_killTopologyWithOpts
self._processMap["activate"] = Processor.process_activate
self._processMap["deactivate"] = Processor.process_deactivate
self._processMap["rebalance"] = Processor.process_rebalance
self._processMap["uploadNewCredentials"] = Processor.process_uploadNewCredentials
self._processMap["beginFileUpload"] = Processor.process_beginFileUpload
self._processMap["uploadChunk"] = Processor.process_uploadChunk
self._processMap["finishFileUpload"] = Processor.process_finishFileUpload
self._processMap["beginFileDownload"] = Processor.process_beginFileDownload
self._processMap["downloadChunk"] = Processor.process_downloadChunk
self._processMap["getNimbusConf"] = Processor.process_getNimbusConf
self._processMap["getClusterInfo"] = Processor.process_getClusterInfo
self._processMap["getTopologyInfo"] = Processor.process_getTopologyInfo
self._processMap["getTopologyConf"] = Processor.process_getTopologyConf
self._processMap["getTopology"] = Processor.process_getTopology
self._processMap["getUserTopology"] = Processor.process_getUserTopology
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_submitTopology(self, seqid, iprot, oprot):
args = submitTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = submitTopology_result()
try:
self._handler.submitTopology(args.name, args.uploadedJarLocation, args.jsonConf, args.topology)
except AlreadyAliveException, e:
result.e = e
except InvalidTopologyException, ite:
result.ite = ite
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("submitTopology", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_submitTopologyWithOpts(self, seqid, iprot, oprot):
args = submitTopologyWithOpts_args()
args.read(iprot)
iprot.readMessageEnd()
result = submitTopologyWithOpts_result()
try:
self._handler.submitTopologyWithOpts(args.name, args.uploadedJarLocation, args.jsonConf, args.topology, args.options)
except AlreadyAliveException, e:
result.e = e
except InvalidTopologyException, ite:
result.ite = ite
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("submitTopologyWithOpts", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_killTopology(self, seqid, iprot, oprot):
args = killTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = killTopology_result()
try:
self._handler.killTopology(args.name)
except NotAliveException, e:
result.e = e
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("killTopology", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_killTopologyWithOpts(self, seqid, iprot, oprot):
args = killTopologyWithOpts_args()
args.read(iprot)
iprot.readMessageEnd()
result = killTopologyWithOpts_result()
try:
self._handler.killTopologyWithOpts(args.name, args.options)
except NotAliveException, e:
result.e = e
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("killTopologyWithOpts", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_activate(self, seqid, iprot, oprot):
args = activate_args()
args.read(iprot)
iprot.readMessageEnd()
result = activate_result()
try:
self._handler.activate(args.name)
except NotAliveException, e:
result.e = e
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("activate", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deactivate(self, seqid, iprot, oprot):
args = deactivate_args()
args.read(iprot)
iprot.readMessageEnd()
result = deactivate_result()
try:
self._handler.deactivate(args.name)
except NotAliveException, e:
result.e = e
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("deactivate", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_rebalance(self, seqid, iprot, oprot):
args = rebalance_args()
args.read(iprot)
iprot.readMessageEnd()
result = rebalance_result()
try:
self._handler.rebalance(args.name, args.options)
except NotAliveException, e:
result.e = e
except InvalidTopologyException, ite:
result.ite = ite
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("rebalance", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_uploadNewCredentials(self, seqid, iprot, oprot):
args = uploadNewCredentials_args()
args.read(iprot)
iprot.readMessageEnd()
result = uploadNewCredentials_result()
try:
self._handler.uploadNewCredentials(args.name, args.creds)
except NotAliveException, e:
result.e = e
except InvalidTopologyException, ite:
result.ite = ite
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("uploadNewCredentials", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_beginFileUpload(self, seqid, iprot, oprot):
args = beginFileUpload_args()
args.read(iprot)
iprot.readMessageEnd()
result = beginFileUpload_result()
try:
result.success = self._handler.beginFileUpload()
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("beginFileUpload", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_uploadChunk(self, seqid, iprot, oprot):
args = uploadChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = uploadChunk_result()
try:
self._handler.uploadChunk(args.location, args.chunk)
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("uploadChunk", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_finishFileUpload(self, seqid, iprot, oprot):
args = finishFileUpload_args()
args.read(iprot)
iprot.readMessageEnd()
result = finishFileUpload_result()
try:
self._handler.finishFileUpload(args.location)
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("finishFileUpload", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_beginFileDownload(self, seqid, iprot, oprot):
args = beginFileDownload_args()
args.read(iprot)
iprot.readMessageEnd()
result = beginFileDownload_result()
try:
result.success = self._handler.beginFileDownload(args.file)
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("beginFileDownload", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_downloadChunk(self, seqid, iprot, oprot):
args = downloadChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = downloadChunk_result()
try:
result.success = self._handler.downloadChunk(args.id)
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("downloadChunk", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNimbusConf(self, seqid, iprot, oprot):
args = getNimbusConf_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNimbusConf_result()
try:
result.success = self._handler.getNimbusConf()
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("getNimbusConf", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getClusterInfo(self, seqid, iprot, oprot):
args = getClusterInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getClusterInfo_result()
try:
result.success = self._handler.getClusterInfo()
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("getClusterInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopologyInfo(self, seqid, iprot, oprot):
args = getTopologyInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopologyInfo_result()
try:
result.success = self._handler.getTopologyInfo(args.id)
except NotAliveException, e:
result.e = e
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("getTopologyInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopologyConf(self, seqid, iprot, oprot):
args = getTopologyConf_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopologyConf_result()
try:
result.success = self._handler.getTopologyConf(args.id)
except NotAliveException, e:
result.e = e
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("getTopologyConf", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopology(self, seqid, iprot, oprot):
args = getTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopology_result()
try:
result.success = self._handler.getTopology(args.id)
except NotAliveException, e:
result.e = e
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("getTopology", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getUserTopology(self, seqid, iprot, oprot):
args = getUserTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = getUserTopology_result()
try:
result.success = self._handler.getUserTopology(args.id)
except NotAliveException, e:
result.e = e
except AuthorizationException, aze:
result.aze = aze
oprot.writeMessageBegin("getUserTopology", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class submitTopology_args:
"""
Attributes:
- name
- uploadedJarLocation
- jsonConf
- topology
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'uploadedJarLocation', None, None, ), # 2
(3, TType.STRING, 'jsonConf', None, None, ), # 3
(4, TType.STRUCT, 'topology', (StormTopology, StormTopology.thrift_spec), None, ), # 4
)
def __hash__(self):
return 0 + hash(self.name) + hash(self.uploadedJarLocation) + hash(self.jsonConf) + hash(self.topology)
def __init__(self, name=None, uploadedJarLocation=None, jsonConf=None, topology=None,):
self.name = name
self.uploadedJarLocation = uploadedJarLocation
self.jsonConf = jsonConf
self.topology = topology
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.uploadedJarLocation = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.jsonConf = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.topology = StormTopology()
self.topology.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('submitTopology_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.uploadedJarLocation is not None:
oprot.writeFieldBegin('uploadedJarLocation', TType.STRING, 2)
oprot.writeString(self.uploadedJarLocation.encode('utf-8'))
oprot.writeFieldEnd()
if self.jsonConf is not None:
oprot.writeFieldBegin('jsonConf', TType.STRING, 3)
oprot.writeString(self.jsonConf.encode('utf-8'))
oprot.writeFieldEnd()
if self.topology is not None:
oprot.writeFieldBegin('topology', TType.STRUCT, 4)
self.topology.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class submitTopology_result:
"""
Attributes:
- e
- ite
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (AlreadyAliveException, AlreadyAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.e) + hash(self.ite) + hash(self.aze)
def __init__(self, e=None, ite=None, aze=None,):
self.e = e
self.ite = ite
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = AlreadyAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('submitTopology_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 3)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class submitTopologyWithOpts_args:
"""
Attributes:
- name
- uploadedJarLocation
- jsonConf
- topology
- options
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'uploadedJarLocation', None, None, ), # 2
(3, TType.STRING, 'jsonConf', None, None, ), # 3
(4, TType.STRUCT, 'topology', (StormTopology, StormTopology.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'options', (SubmitOptions, SubmitOptions.thrift_spec), None, ), # 5
)
def __hash__(self):
return 0 + hash(self.name) + hash(self.uploadedJarLocation) + hash(self.jsonConf) + hash(self.topology) + hash(self.options)
def __init__(self, name=None, uploadedJarLocation=None, jsonConf=None, topology=None, options=None,):
self.name = name
self.uploadedJarLocation = uploadedJarLocation
self.jsonConf = jsonConf
self.topology = topology
self.options = options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.uploadedJarLocation = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.jsonConf = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.topology = StormTopology()
self.topology.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.options = SubmitOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('submitTopologyWithOpts_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.uploadedJarLocation is not None:
oprot.writeFieldBegin('uploadedJarLocation', TType.STRING, 2)
oprot.writeString(self.uploadedJarLocation.encode('utf-8'))
oprot.writeFieldEnd()
if self.jsonConf is not None:
oprot.writeFieldBegin('jsonConf', TType.STRING, 3)
oprot.writeString(self.jsonConf.encode('utf-8'))
oprot.writeFieldEnd()
if self.topology is not None:
oprot.writeFieldBegin('topology', TType.STRUCT, 4)
self.topology.write(oprot)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 5)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class submitTopologyWithOpts_result:
"""
Attributes:
- e
- ite
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (AlreadyAliveException, AlreadyAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.e) + hash(self.ite) + hash(self.aze)
def __init__(self, e=None, ite=None, aze=None,):
self.e = e
self.ite = ite
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = AlreadyAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('submitTopologyWithOpts_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 3)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class killTopology_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.name)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('killTopology_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class killTopology_result:
"""
Attributes:
- e
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.e) + hash(self.aze)
def __init__(self, e=None, aze=None,):
self.e = e
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('killTopology_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class killTopologyWithOpts_args:
"""
Attributes:
- name
- options
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRUCT, 'options', (KillOptions, KillOptions.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.name) + hash(self.options)
def __init__(self, name=None, options=None,):
self.name = name
self.options = options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.options = KillOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('killTopologyWithOpts_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 2)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class killTopologyWithOpts_result:
"""
Attributes:
- e
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.e) + hash(self.aze)
def __init__(self, e=None, aze=None,):
self.e = e
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('killTopologyWithOpts_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class activate_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.name)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('activate_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class activate_result:
"""
Attributes:
- e
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.e) + hash(self.aze)
def __init__(self, e=None, aze=None,):
self.e = e
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('activate_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deactivate_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.name)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deactivate_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deactivate_result:
"""
Attributes:
- e
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.e) + hash(self.aze)
def __init__(self, e=None, aze=None,):
self.e = e
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deactivate_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class rebalance_args:
"""
Attributes:
- name
- options
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRUCT, 'options', (RebalanceOptions, RebalanceOptions.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.name) + hash(self.options)
def __init__(self, name=None, options=None,):
self.name = name
self.options = options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.options = RebalanceOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('rebalance_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 2)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class rebalance_result:
"""
Attributes:
- e
- ite
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.e) + hash(self.ite) + hash(self.aze)
def __init__(self, e=None, ite=None, aze=None,):
self.e = e
self.ite = ite
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('rebalance_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 3)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class uploadNewCredentials_args:
"""
Attributes:
- name
- creds
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRUCT, 'creds', (Credentials, Credentials.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.name) + hash(self.creds)
def __init__(self, name=None, creds=None,):
self.name = name
self.creds = creds
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.creds = Credentials()
self.creds.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('uploadNewCredentials_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.creds is not None:
oprot.writeFieldBegin('creds', TType.STRUCT, 2)
self.creds.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class uploadNewCredentials_result:
"""
Attributes:
- e
- ite
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 3
)
def __hash__(self):
return 0 + hash(self.e) + hash(self.ite) + hash(self.aze)
def __init__(self, e=None, ite=None, aze=None,):
self.e = e
self.ite = ite
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('uploadNewCredentials_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 3)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class beginFileUpload_args:
thrift_spec = (
)
def __hash__(self):
return 0
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('beginFileUpload_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class beginFileUpload_result:
"""
Attributes:
- success
- aze
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 1
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.aze)
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('beginFileUpload_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class uploadChunk_args:
"""
Attributes:
- location
- chunk
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'location', None, None, ), # 1
(2, TType.STRING, 'chunk', None, None, ), # 2
)
def __hash__(self):
return 0 + hash(self.location) + hash(self.chunk)
def __init__(self, location=None, chunk=None,):
self.location = location
self.chunk = chunk
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.location = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.chunk = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('uploadChunk_args')
if self.location is not None:
oprot.writeFieldBegin('location', TType.STRING, 1)
oprot.writeString(self.location.encode('utf-8'))
oprot.writeFieldEnd()
if self.chunk is not None:
oprot.writeFieldBegin('chunk', TType.STRING, 2)
oprot.writeString(self.chunk)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class uploadChunk_result:
"""
Attributes:
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 1
)
def __hash__(self):
return 0 + hash(self.aze)
def __init__(self, aze=None,):
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('uploadChunk_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class finishFileUpload_args:
"""
Attributes:
- location
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'location', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.location)
def __init__(self, location=None,):
self.location = location
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.location = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('finishFileUpload_args')
if self.location is not None:
oprot.writeFieldBegin('location', TType.STRING, 1)
oprot.writeString(self.location.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class finishFileUpload_result:
"""
Attributes:
- aze
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 1
)
def __hash__(self):
return 0 + hash(self.aze)
def __init__(self, aze=None,):
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('finishFileUpload_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class beginFileDownload_args:
"""
Attributes:
- file
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'file', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.file)
def __init__(self, file=None,):
self.file = file
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.file = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('beginFileDownload_args')
if self.file is not None:
oprot.writeFieldBegin('file', TType.STRING, 1)
oprot.writeString(self.file.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class beginFileDownload_result:
"""
Attributes:
- success
- aze
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 1
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.aze)
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('beginFileDownload_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class downloadChunk_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.id)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('downloadChunk_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class downloadChunk_result:
"""
Attributes:
- success
- aze
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 1
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.aze)
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('downloadChunk_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNimbusConf_args:
thrift_spec = (
)
def __hash__(self):
return 0
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNimbusConf_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNimbusConf_result:
"""
Attributes:
- success
- aze
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 1
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.aze)
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNimbusConf_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getClusterInfo_args:
thrift_spec = (
)
def __hash__(self):
return 0
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getClusterInfo_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getClusterInfo_result:
"""
Attributes:
- success
- aze
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ClusterSummary, ClusterSummary.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 1
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.aze)
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ClusterSummary()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getClusterInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopologyInfo_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.id)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopologyInfo_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopologyInfo_result:
"""
Attributes:
- success
- e
- aze
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TopologyInfo, TopologyInfo.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.e) + hash(self.aze)
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TopologyInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopologyInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopologyConf_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.id)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopologyConf_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopologyConf_result:
"""
Attributes:
- success
- e
- aze
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.e) + hash(self.aze)
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopologyConf_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopology_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.id)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopology_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTopology_result:
"""
Attributes:
- success
- e
- aze
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (StormTopology, StormTopology.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.e) + hash(self.aze)
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = StormTopology()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTopology_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getUserTopology_args:
"""
Attributes:
- id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'id', None, None, ), # 1
)
def __hash__(self):
return 0 + hash(self.id)
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getUserTopology_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getUserTopology_result:
"""
Attributes:
- success
- e
- aze
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (StormTopology, StormTopology.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'aze', (AuthorizationException, AuthorizationException.thrift_spec), None, ), # 2
)
def __hash__(self):
return 0 + hash(self.success) + hash(self.e) + hash(self.aze)
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = StormTopology()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getUserTopology_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
{
"content_hash": "3b639577da39af05d6cb40a94ac4540c",
"timestamp": "",
"source": "github",
"line_count": 3927,
"max_line_length": 188,
"avg_line_length": 30.370766488413548,
"alnum_prop": 0.635478677913236,
"repo_name": "Aloomaio/incubator-storm",
"id": "a2ee892bd3810c99804bc6fa5cc5c7bc2cff8c45",
"size": "120164",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "storm-core/src/py/storm/Nimbus.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "52821"
},
{
"name": "C++",
"bytes": "1650"
},
{
"name": "CSS",
"bytes": "63637"
},
{
"name": "Clojure",
"bytes": "894325"
},
{
"name": "Fancy",
"bytes": "6234"
},
{
"name": "Java",
"bytes": "2976712"
},
{
"name": "JavaScript",
"bytes": "83688"
},
{
"name": "Python",
"bytes": "357373"
},
{
"name": "Ruby",
"bytes": "19946"
},
{
"name": "Shell",
"bytes": "11945"
},
{
"name": "Thrift",
"bytes": "9946"
}
],
"symlink_target": ""
}
|
package edu.gemini.p2checker.rules.gmos;
import edu.gemini.p2checker.api.*;
import edu.gemini.p2checker.rules.altair.AltairRule;
import edu.gemini.p2checker.rules.general.GeneralRule;
import edu.gemini.p2checker.util.AbstractConfigRule;
import edu.gemini.p2checker.util.MdfConfigRule;
import edu.gemini.p2checker.util.NoPOffsetWithSlitRule;
import edu.gemini.p2checker.util.SequenceRule;
import edu.gemini.pot.sp.ISPObservation;
import edu.gemini.pot.sp.ISPProgramNode;
import edu.gemini.pot.sp.SPComponentType;
import edu.gemini.skycalc.Offset;
import edu.gemini.spModel.config2.Config;
import edu.gemini.spModel.config2.ConfigSequence;
import edu.gemini.spModel.config2.ItemKey;
import edu.gemini.spModel.core.*;
import edu.gemini.spModel.data.YesNoType;
import edu.gemini.spModel.gemini.gmos.GmosCommonType.*;
import edu.gemini.spModel.gemini.gmos.GmosNorthType.DisperserNorth;
import edu.gemini.spModel.gemini.gmos.GmosNorthType.FPUnitNorth;
import edu.gemini.spModel.gemini.gmos.GmosNorthType.FilterNorth;
import edu.gemini.spModel.gemini.gmos.GmosSouthType.DisperserSouth;
import edu.gemini.spModel.gemini.gmos.GmosSouthType.FPUnitSouth;
import edu.gemini.spModel.gemini.gmos.GmosSouthType.FilterSouth;
import edu.gemini.spModel.gemini.gmos.InstGmosCommon;
import edu.gemini.spModel.gemini.gmos.InstGmosNorth;
import edu.gemini.spModel.gemini.gmos.InstGmosSouth;
import edu.gemini.spModel.gemini.obscomp.SPSiteQuality;
import edu.gemini.spModel.gemini.obscomp.SPSiteQuality.ImageQuality;
import edu.gemini.spModel.gemini.obscomp.SPSiteQuality.SkyBackground;
import edu.gemini.spModel.obsclass.ObsClass;
import edu.gemini.spModel.obscomp.InstConstants;
import edu.gemini.spModel.obscomp.SPInstObsComp;
import edu.gemini.spModel.target.offset.OffsetPos;
import scala.Option;
import scala.runtime.AbstractFunction2;
import java.beans.PropertyDescriptor;
import java.util.*;
import static edu.gemini.spModel.gemini.gmos.GmosCommonType.DetectorManufacturer.HAMAMATSU;
/**
* GMOS Rule set
*/
public final class GmosRule implements IRule {
private static final String PREFIX = "GmosRule_";
private static Collection<IConfigRule> GMOS_RULES = new ArrayList<>();
//keys to access sequence elements
private static final ItemKey FPU_KEY = new ItemKey("instrument:fpu");
private static final ItemKey FPU_MODE_KEY = new ItemKey("instrument:fpuMode");
private static final ItemKey DISPERSER_KEY = new ItemKey("instrument:disperser");
private static final ItemKey FILTER_KEY = new ItemKey("instrument:filter");
private static final ItemKey DETECTOR_KEY = new ItemKey("instrument:detectorManufacturer");
private static final ItemKey GAIN_KEY = new ItemKey("instrument:gainChoice");
private static final ItemKey CCD_X_BINNING_KEY = new ItemKey("instrument:ccdXBinning");
private static final ItemKey CCD_Y_BINNING_KEY = new ItemKey("instrument:ccdYBinning");
/**
* either a filter or a grating should be defined whatever the fpu
* WARN if (disperser == 'Mirror' && filter == 'none'), \
*/
private static IConfigRule DISPERSER_AND_MIRROR_RULE = new AbstractConfigRule() {
private static final String MESSAGE = "Mirror must be used with a filter, or select a grating " +
"and a filter is optional";
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final Disperser disperser = getDisperser(config);
final Filter filter = getFilter(config);
if (disperser == null || filter == null) return null;
if (disperser.isMirror() && filter.isNone()) {
return new Problem(WARNING, PREFIX + "DISPERSER_AND_MIRROR_RULE",
MESSAGE, SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
};
/**
* Acquisition
* Shouldn't contain grating
* WARN if (class == 'Acquisition) and (disperser != 'Mirror'),
*/
private static IConfigRule ACQUISITION_RULE = new AbstractConfigRule() {
private static final String MESSAGE = "Acquisition observation should not contain grating";
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final ObsClass obsClass = SequenceRule.getObsClass(config);
if (obsClass != ObsClass.ACQ && obsClass != ObsClass.ACQ_CAL) {
return null;
}
final Disperser disperser = getDisperser(config);
if (!disperser.isMirror()) {
return new Problem(WARNING, PREFIX + "ACQUISITION_RULE", MESSAGE,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
};
private static IConfigMatcher SCIENCE_DAYCAL_MATCHER = (config, step, elems) -> {
ObsClass obsClass = SequenceRule.getObsClass(config);
return obsClass == ObsClass.SCIENCE || obsClass == ObsClass.DAY_CAL;
};
private static IConfigMatcher IMAGING_MATCHER = (config, step, elems) -> {
if (!SequenceRule.SCIENCE_MATCHER.matches(config, step, elems))
return false;
return getDisperser(config).isMirror() && getFPU(config, elems).isImaging();
};
private static IConfigMatcher SPECTROSCOPY_MATCHER = (config, step, elems) -> {
if (!SequenceRule.SCIENCE_MATCHER.matches(config, step, elems))
return false;
if (!isSpecFPUnselected(config, elems)) return false;
final Disperser disperser = getDisperser(config);
return !disperser.isMirror();
};
private static IConfigMatcher N_S_SPECTROSCOPY_MATCHER = (config, step, elems) -> {
if (!SPECTROSCOPY_MATCHER.matches(config, step, elems))
return false;
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
return inst != null && inst.getUseNS() == UseNS.TRUE;
};
private static IConfigMatcher N_S_SPECTROSCOPY_SCIENCE_DAYCAL__MATCHER = (config, step, elems) -> {
if (!SCIENCE_DAYCAL_MATCHER.matches(config, step, elems)) {
return false;
}
if (!isSpecFPUnselected(config, elems)) {
return false;
}
final Disperser disperser = getDisperser(config);
if (disperser.isMirror()) {
return false;
}
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
return inst != null && inst.getUseNS() == UseNS.TRUE;
};
private static class ScienceRule implements IConfigRule {
interface IScienceChecker {
boolean check(Config config, ObservationElements elems);
// The error or warning message
String getMessage();
// A unique id used to allow the user to ignore selected messages
String getId();
}
private IConfigMatcher _matcher;
private IScienceChecker _checker;
private Problem.Type _type;
public ScienceRule(IScienceChecker checker, IConfigMatcher validator) {
this(checker, validator, WARNING);
}
public ScienceRule(IScienceChecker checker, IConfigMatcher validator, Problem.Type type) {
_checker = checker;
_matcher = validator;
_type = type;
}
public Problem check(Config config, int step, ObservationElements elems, Object state) {
if (_checker.check(config, elems)) {
return new Problem(_type, _checker.getId(), _checker.getMessage(),
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
public IConfigMatcher getMatcher() {
return _matcher;
}
}
/**
* REL-298: IF (GMOS-N && detector == E2V && amps != 6) then ERROR "The E2V detectors must use 6 amp mode."
* This is for GMOS-N only.
* REL-1194: Also for GMOS-S now, but with different default and rule.
*/
private static IConfigRule CHECK_3_AMP_MODE = new AbstractConfigRule() {
private static final String GMOS_NORTH_MESSAGE = "The E2V detectors must use 6 amp mode";
private static final String GMOS_SOUTH_MESSAGE = "The E2V detectors must use 3 amp mode";
public Problem check(Config config, int step, ObservationElements elems, Object state) {
SPInstObsComp inst = elems.getInstrument();
if (inst instanceof InstGmosNorth) {
// Get the detector manufacturer.
Object tmp = SequenceRule.getInstrumentItem(config, InstGmosNorth.DETECTOR_MANUFACTURER_PROP);
if (tmp == null) return null;
final DetectorManufacturer man = (DetectorManufacturer) tmp;
// Get the amp count.
tmp = SequenceRule.getInstrumentItem(config, InstGmosNorth.AMP_COUNT_PROP);
if (tmp == null) return null;
final AmpCount cnt = (AmpCount) tmp;
if (man == DetectorManufacturer.E2V && cnt != AmpCount.SIX) {
return new Problem(Problem.Type.ERROR, PREFIX + "CHECK_3_AMP_MODE", GMOS_NORTH_MESSAGE,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
} else if (inst instanceof InstGmosSouth) {
// Get the detector manufacturer.
Object tmp = SequenceRule.getInstrumentItem(config, InstGmosSouth.DETECTOR_MANUFACTURER_PROP);
if (tmp == null) return null;
final DetectorManufacturer man = (DetectorManufacturer) tmp;
// Get the amp count.
tmp = SequenceRule.getInstrumentItem(config, InstGmosSouth.AMP_COUNT_PROP);
if (tmp == null) return null;
final AmpCount cnt = (AmpCount) tmp;
if (man == DetectorManufacturer.E2V && cnt != AmpCount.THREE) {
return new Problem(Problem.Type.ERROR, PREFIX + "CHECK_6_AMP_MODE", GMOS_SOUTH_MESSAGE,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
}
return null;
}
};
/**
* Error if we try to use an amp count not supported by the CCD.
*/
private static IConfigRule BAD_AMP_COUNT_RULE = new AbstractConfigRule() {
private static final String MESSAGE = "Amp count %s is not compatible with the %s CCD";
public Problem check(Config config, int step, ObservationElements elems, Object state) {
// Get the detector manufacturer.
Object tmp = SequenceRule.getInstrumentItem(config, InstGmosNorth.DETECTOR_MANUFACTURER_PROP);
if (tmp == null) {
tmp = SequenceRule.getInstrumentItem(config, InstGmosSouth.DETECTOR_MANUFACTURER_PROP);
}
if (tmp == null) return null;
final DetectorManufacturer man = (DetectorManufacturer) tmp;
// Get the amp count.
tmp = SequenceRule.getInstrumentItem(config, InstGmosSouth.AMP_COUNT_PROP);
if (tmp == null) {
tmp = SequenceRule.getInstrumentItem(config, InstGmosNorth.AMP_COUNT_PROP);
}
if (tmp == null) return null;
final AmpCount cnt = (AmpCount) tmp;
// Verify that the count is supported by the CCD. Would rather
// turn this around and ask the manufacturer what counts it
// supports. The issue is that the amp counts are defined in
// subclasses.
if (!cnt.getSupportedBy().contains(man)) {
String message = String.format(MESSAGE, cnt.displayValue(), man.displayValue());
return new Problem(Problem.Type.ERROR, PREFIX + "BAD_AMP_COUNT_RULE", message, elems.getSeqComponentNode());
}
return null;
}
};
/**
* WARN if (gain != 'low' && !HAMAMATSU)
*/
private static ScienceRule GAIN_SCIENCE_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "Low gain is recommended for science observations";
public boolean check(Config config, ObservationElements elems) {
DetectorManufacturer det =
(DetectorManufacturer) SequenceRule.getInstrumentItem(config, InstGmosSouth.DETECTOR_MANUFACTURER_PROP);
if (det == null) {
det = (DetectorManufacturer) SequenceRule.getInstrumentItem(config, InstGmosNorth.DETECTOR_MANUFACTURER_PROP);
}
if (det == DetectorManufacturer.HAMAMATSU) {
return false;
}
AmpGain gain =
(AmpGain) SequenceRule.getInstrumentItem(config, InstGmosSouth.AMP_GAIN_CHOICE_PROP);
if (gain == null)
gain = (AmpGain) SequenceRule.getInstrumentItem(config, InstGmosNorth.AMP_GAIN_CHOICE_PROP);
return gain != AmpGain.LOW;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "GAIN_SCIENCE_RULE";
}
}
, SequenceRule.SCIENCE_MATCHER
);
/**
* WARN if (read == 'fast' && !HAMAMATSU)
*/
private static ScienceRule READMODE_SCIENCE_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "Slow read-out is recommended for science observations";
public boolean check(Config config, ObservationElements elems) {
DetectorManufacturer det =
(DetectorManufacturer) SequenceRule.getInstrumentItem(config, InstGmosSouth.DETECTOR_MANUFACTURER_PROP);
if (det == null) {
det = (DetectorManufacturer) SequenceRule.getInstrumentItem(config, InstGmosNorth.DETECTOR_MANUFACTURER_PROP);
}
if (det == DetectorManufacturer.HAMAMATSU) {
return false;
}
AmpReadMode readMode =
(AmpReadMode) SequenceRule.getInstrumentItem(config, InstGmosSouth.AMP_READ_MODE_PROP);
if (readMode == null)
readMode = (AmpReadMode) SequenceRule.getInstrumentItem(config, InstGmosNorth.AMP_READ_MODE_PROP);
return readMode == AmpReadMode.FAST;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "READMODE_SCIENCE_RULE";
}
}
, SequenceRule.SCIENCE_MATCHER
);
/**
* WARNING if configured with slow-read and high-gain.
*/
private static IConfigRule GAIN_READMODE_RULE = new AbstractConfigRule() {
private static final String MESSAGE = "Slow readout and high gain is not recommended.";
public Problem check(Config config, int step, ObservationElements elems, Object state) {
AmpReadMode readMode =
(AmpReadMode) SequenceRule.getInstrumentItem(config, InstGmosSouth.AMP_READ_MODE_PROP);
if (readMode == null) {
readMode = (AmpReadMode) SequenceRule.getInstrumentItem(config, InstGmosNorth.AMP_READ_MODE_PROP);
}
AmpGain gain =
(AmpGain) SequenceRule.getInstrumentItem(config, InstGmosSouth.AMP_GAIN_CHOICE_PROP);
if (gain == null) {
gain = (AmpGain) SequenceRule.getInstrumentItem(config, InstGmosNorth.AMP_GAIN_CHOICE_PROP);
}
if ((readMode == AmpReadMode.SLOW) && (gain == AmpGain.HIGH)) {
return new Problem(WARNING, PREFIX + "GAIN_READMODE_RULE", MESSAGE,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
};
/**
* For imaging observations, it is required to use 1x1, 2x2, or 4x4 binning.
* For MOS pre-imaging, only 1x1 or 2x2 are allowe.
*/
private static IConfigRule BINNING_RULE = new IConfigRule() {
private static final String IMAGING_MSG = "For imaging, binning is limited to 1x1, 2x2 or 4x4.";
private static final String PREIMAGING_MSG = "For MOS pre-imaging, binning is limited to 1x1 or 2x2.";
public Problem check(Config config, int step, ObservationElements elems, Object state) {
// Collect the x and y binning values.
PropertyDescriptor xProp = InstGmosCommon.CCD_X_BIN_PROP;
Binning x = (Binning) SequenceRule.getInstrumentItem(config, xProp);
PropertyDescriptor yProp = InstGmosCommon.CCD_Y_BIN_PROP;
Binning y = (Binning) SequenceRule.getInstrumentItem(config, yProp);
// Determine whether this is preimaging.
PropertyDescriptor preProp = InstGmosCommon.IS_MOS_PREIMAGING_PROP;
YesNoType pre;
pre = (YesNoType) SequenceRule.getInstrumentItem(config, preProp);
boolean isPreimaging = pre.toBoolean();
// If they don't match, we must warn.
if (x != y) {
String msg = isPreimaging ? PREIMAGING_MSG : IMAGING_MSG;
String id = isPreimaging ? PREFIX + "PREIMAGING_MSG" : PREFIX + "IMAGING_MSG";
ISPProgramNode node;
node = SequenceRule.getInstrumentOrSequenceNode(step, elems, config);
return new Problem(ERROR, id, msg, node);
}
// Even if they match, make sure that we aren't preimaging with
// binning 4x4.
if ((x == Binning.FOUR) && isPreimaging) {
ISPProgramNode node;
node = SequenceRule.getInstrumentOrSequenceNode(step, elems, config);
return new Problem(ERROR, PREFIX + "PREIMAGING_MSG", PREIMAGING_MSG, node);
}
return null;
}
// We only want to do this check for imaging observations.
public IConfigMatcher getMatcher() {
return IMAGING_MATCHER;
}
};
private static IConfigRule GMOS_S_DTA_X_RULE = new IConfigRule() {
private static final String Y1 = "The GMOS-S Hamamatsu allowed DTA X range is -4 to +6 for Ybin=1";
private static final String Y2_4 = "The GMOS-S Hamamatsu allowed DTA X range is -2 to +6 for Ybin=2 or 4";
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final Binning y = (Binning) SequenceRule.getInstrumentItem(config, InstGmosCommon.CCD_Y_BIN_PROP);
final DTAX dtaX = (DTAX) SequenceRule.getInstrumentItem(config, InstGmosCommon.DTAX_OFFSET_PROP);
final ISPProgramNode node = SequenceRule.getInstrumentOrSequenceNode(step, elems, config);
if ((y == Binning.ONE) && (dtaX.intValue() < -4)) {
return new Problem(ERROR, PREFIX + "Y1", Y1, node);
} else if ((y != Binning.ONE) && (dtaX.intValue() < -2)) {
return new Problem(ERROR, PREFIX + "Y2_4", Y2_4, node);
} else {
return null;
}
}
public IConfigMatcher getMatcher() {
return (config, step, elems) -> {
final InstGmosCommon<?,?,?,?> gmos = (InstGmosCommon) elems.getInstrument();
return gmos.getSite().equals(Site.SET_GS) && (gmos.getDetectorManufacturer() == HAMAMATSU);
};
}
};
/**
* State used to check for the SPATIAL_DITHER_IMAGING_RULE. If a sequence
* contains two or more observes at distinct positions, then there should
* not be a warning. Otherwise, a warning is generated. This only applies
* for science imaging.
*/
private static final class GmosSpatialDitherState {
private static final String MESSAGE = "Imaging observations usually benefit from spatial dithers, consider including an offset iterator";
double p;
double q;
boolean foundTwoDifferentPositions;
boolean ruleInEffect; // if false, the imaging rule was never applied
// REL-389
boolean foundMultipleExposurePerPerFilterChange;
boolean foundExpTimeGreaterThan300s;
Filter filter;
int exposureCount;
// Adds a warning to the problem list if necessary -- if doing science
// imaging and there are not two or more observes in different
// positions
void addWarning(IP2Problems probs, ObservationElements elems) {
if (!ruleInEffect) return;
if (foundTwoDifferentPositions) return;
// REL-389: add conditions: && ( >1 exposure per filter || exptime > 300s )
if (foundMultipleExposurePerPerFilterChange || foundExpTimeGreaterThan300s) {
probs.addWarning(PREFIX + "GmosSpatialDitherState", MESSAGE, elems.getSeqComponentNode());
}
}
}
/**
* WARN if (no spatial dithers). This rule will never directly generate
* a warning because the entire sequence must be examined to determine
* whether a warning is necessary. It updates the GmosSpatialDitherState
* so that the GmosRule can check whether to add a warning after the
* entire sequence has been iterated.
*/
private static IConfigRule SPATIAL_DITHER_IMAGING_RULE = new IConfigRule() {
public Problem check(Config config, int step, ObservationElements elems, Object state) {
GmosSpatialDitherState gsds = (GmosSpatialDitherState) state;
// REL-389
if (!gsds.foundExpTimeGreaterThan300s) {
Double expTime = SequenceRule.getExposureTime(config);
if (expTime != null && expTime > 300) {
gsds.foundExpTimeGreaterThan300s = true;
}
}
if (!gsds.foundMultipleExposurePerPerFilterChange) {
final Filter filter = getFilter(config);
if (filter != gsds.filter) {
gsds.filter = filter;
gsds.exposureCount = 0;
}
final String obsType = SequenceRule.getObserveType(config);
if (InstConstants.SCIENCE_OBSERVE_TYPE.equals(obsType)) {
Integer repeatCount = SequenceRule.getStepCount(config);
if (repeatCount != null && repeatCount > 1) {
gsds.exposureCount += repeatCount;
} else {
gsds.exposureCount++;
}
}
if (gsds.exposureCount > 1) {
gsds.foundMultipleExposurePerPerFilterChange = true;
}
}
if (gsds.foundTwoDifferentPositions) {
return null; // already know the
}
gsds.ruleInEffect = true;
final Option<Double> pOpt = SequenceRule.getPOffset(config);
final Option<Double> qOpt = SequenceRule.getQOffset(config);
final double p = pOpt.isDefined() ? pOpt.get() : 0.0;
final double q = qOpt.isDefined() ? qOpt.get() : 0.0;
if (step == 0) {
gsds.p = p;
gsds.q = q;
return null;
}
if (!Offset.areEqual(p, gsds.p) || !Offset.areEqual(q, gsds.q)) {
gsds.foundTwoDifferentPositions = true;
}
return null;
}
public IConfigMatcher getMatcher() {
return IMAGING_MATCHER;
}
};
/**
* WARN if (ccd bin = 1,1) && (IQ != 20) && !Altair
*/
private static ScienceRule CCD_BIN_AND_IQ_IMAGING_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "1x1 binning is usually only necessary in IQ=20";
public boolean check(Config config, ObservationElements elems) {
for (SPSiteQuality sq : elems.getSiteQuality()) {
boolean hasAOComp = elems.hasAltair();
final Binning binningX =
(Binning) SequenceRule.getInstrumentItem(config, InstGmosCommon.CCD_X_BIN_PROP);
final Binning binningY =
(Binning) SequenceRule.getInstrumentItem(config, InstGmosCommon.CCD_Y_BIN_PROP);
return binningX == Binning.ONE &&
binningY == Binning.ONE &&
sq.getImageQuality() != ImageQuality.PERCENT_20 &&
!hasAOComp;
}
return false;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "CCD_BIN_AND_IQ_IMAGING_RULE";
}
}
, IMAGING_MATCHER
);
/**
* WARN if (ccd bin != 1,1) && Altair
*/
private static ScienceRule CCD_BIN_AND_ALTAIR_IMAGING_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "Altair observations should use 1x1 binning.";
public boolean check(Config config, ObservationElements elems) {
boolean hasAOComp = elems.hasAltair();
final Binning binningX =
(Binning) SequenceRule.getInstrumentItem(config, InstGmosCommon.CCD_X_BIN_PROP);
final Binning binningY =
(Binning) SequenceRule.getInstrumentItem(config, InstGmosCommon.CCD_Y_BIN_PROP);
return (binningX != Binning.ONE ||
binningY != Binning.ONE) &&
hasAOComp;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "CCD_BIN_AND_ALTAIR_IMAGING_RULE";
}
}
, IMAGING_MATCHER
);
/**
* WARN if (disperser != 'Mirror) && (Built In fpu == selected) && (fpu == 'None')
*/
private static ScienceRule GRATING_NO_SLIT_SCIENCE_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "A grating is defined but not a slit, mask or IFU";
public boolean check(Config config, ObservationElements elems) {
return !getDisperser(config).isMirror() && getFPU(config, elems).isImaging();
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "GRATING_NO_SLIT_SCIENCE_RULE";
}
}
, SequenceRule.SCIENCE_MATCHER
);
/**
* REL-388: OT Phase-2 check for old GMOS-N B600 grating
*/
private static ScienceRule B600_G5303_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "The B600_G5303 grating has been superseded by the B600_G5307.";
public boolean check(Config config, ObservationElements elems) {
final Disperser disperser = getDisperser(config);
return disperser == DisperserNorth.B600_G5303;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "B600_G5303_RULE";
}
}
, SequenceRule.SCIENCE_MATCHER
);
/**
* Spectroscopic element in fpu without grating
*/
private static ScienceRule SPECTROSCOPIC_ELEMENT_IN_FPU_SCIENCE_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "A slit, mask or IFU is defined, but no grating is selected";
public boolean check(Config config, ObservationElements elems) {
final Disperser disperser = getDisperser(config);
return disperser != null && isSpecFPUnselected(config, elems) && disperser.isMirror();
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "SPECTROSCOPIC_ELEMENT_IN_FPU_SCIENCE_RULE";
}
}
, SequenceRule.SCIENCE_MATCHER
);
/**
* nod and shuffle without slit or grating
*/
private static ScienceRule N_S_NO_SLIT_OR_GRATING_SCIENCE_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "Nod and Shuffle science observations require a " +
"grating and a spectroscopic element in the fpu";
public boolean check(Config config, ObservationElements elems) {
final Disperser disperser = getDisperser(config);
if (disperser == null) return false; //can't check
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
if (inst == null) return false; //can't check
final UseNS useNs = inst.getUseNS();
return useNs == UseNS.TRUE && disperser.isMirror() && !isSpecFPUnselected(config, elems);
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "N_S_NO_SLIT_OR_GRATING_SCIENCE_RULE";
}
}
, SequenceRule.SCIENCE_MATCHER
);
/**
* WARN if (exposure > 3600) under spectroscopy mode
* <p/>
* This is implemented differently since it needs to get the exposure time and depending on where
* this value is defined the result is reported either in the static component or in the sequence node
*/
private static IConfigRule EXP_SPECTROSCOPIC_RULE = new IConfigRule() {
private static final String MESSAGE = "It is usually best to keep spectroscopic exposure " +
"times less than one hour";
public Problem check(Config config, int step, ObservationElements elems, Object state) {
Double expTime = SequenceRule.getExposureTime(config);
if (expTime != null && expTime > 3600) {
return new Problem(WARNING, PREFIX + "EXP_SPECTROSCOPIC_RULE", MESSAGE,
SequenceRule.getInstrumentOrSequenceNode(step, elems, config));
}
return null;
}
public IConfigMatcher getMatcher() {
return SPECTROSCOPY_MATCHER;
}
};
/**
* WARN if (Built in FPU == selected) && (fpu == 'IFU Left Slit (blue)')
*/
private static ScienceRule IFU_LEFT_SLIT_SPECTROSCOPIC_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "In IFU one slit mode it is recommended to use the red slit";
public boolean check(Config config, ObservationElements elems) {
final FPUnit fpu = getFPU(config, elems);
return fpu == FPUnitNorth.IFU_2 ||
fpu == FPUnitSouth.IFU_2;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "IFU_LEFT_SLIT_SPECTROSCOPIC_RULE";
}
}
, SPECTROSCOPY_MATCHER
);
/**
* WARN if (Built in FPU == selected) && (fpu == 'IFU 2 Slits') && (Filter == 'none')
*/
private static ScienceRule IFU_2_SLIT_AND_FILTER_SPECTROSCOPIC_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "In IFU 2-slit mode, it is recommended to use a " +
"filter to prevent spectral overlap";
public boolean check(Config config, ObservationElements elems) {
final Filter filter = getFilter(config);
if (!filter.isNone()) return false;
final FPUnit fpu = getFPU(config, elems);
return fpu == FPUnitNorth.IFU_1 ||
fpu == FPUnitSouth.IFU_1;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "IFU_2_SLIT_AND_FILTER_SPECTROSCOPIC_RULE";
}
}
, SPECTROSCOPY_MATCHER
);
/**
* WARN if (wavelength < 450) || (wavelength > 900)
*/
private static ScienceRule WAVELENGTH_SPECTROSCOPIC_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "The central wavelength is likely too blue or too red";
public boolean check(Config config, ObservationElements elems) {
Double wavelength =
(Double) SequenceRule.getInstrumentItem(config, InstGmosCommon.DISPERSER_LAMBDA_PROP);
return wavelength != null && (wavelength < 450 || wavelength > 900);
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "WAVELENGTH_SPECTROSCOPIC_RULE";
}
}
, SPECTROSCOPY_MATCHER
);
/**
* Companion class for the {@link edu.gemini.p2checker.rules.gmos.GmosRule#DISPERSER_WAVELENGTH_SPECTROSCOPIC_RULE)}
*/
private static class DisperserWavelengthChecker implements ScienceRule.IScienceChecker {
private static final String MESSAGE = "For the selected central wavelength and disperser it is recommended to " +
"use a blocking filter to avoid second order overlap";
private static Map<Disperser, Double> DISPERSER_LIMITS_MAP = new HashMap<>();
static {
//north dispersers
DISPERSER_LIMITS_MAP.put(DisperserNorth.R400_G5305, 710.0);
DISPERSER_LIMITS_MAP.put(DisperserNorth.R831_G5302, 815.0);
DISPERSER_LIMITS_MAP.put(DisperserNorth.R600_G5304, 775.0);
//south dispersers
DISPERSER_LIMITS_MAP.put(DisperserSouth.R400_G5325, 710.0);
DISPERSER_LIMITS_MAP.put(DisperserSouth.R831_G5322, 815.0);
DISPERSER_LIMITS_MAP.put(DisperserSouth.R600_G5324, 775.0);
}
private static DisperserWavelengthChecker _instance = new DisperserWavelengthChecker();
public static DisperserWavelengthChecker getInstance() {
return _instance;
}
public boolean check(Config config, ObservationElements elems) {
final Filter filter = getFilter(config);
if (!filter.isNone()) return false;
final Disperser disperser = getDisperser(config);
if (disperser == null) return false;
//the following 2 dispersers generate a warning no matter what wavelength
if (disperser == DisperserNorth.R150_G5306 || disperser == DisperserSouth.R150_G5326)
return true;
final Double limitWavelength = DISPERSER_LIMITS_MAP.get(disperser);
if (limitWavelength == null) return false;
final Double centralWavelength = (Double) SequenceRule.getInstrumentItem(config, InstGmosCommon.DISPERSER_LAMBDA_PROP);
return (centralWavelength != null && centralWavelength > limitWavelength);
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "DisperserWavelengthChecker";
}
}
/**
* WARN if (filter == 'none') && (((disperser == 'R400_G5305') && (central wavelength > 710)) \
* || ((disperser == 'R831_G5302') && (central wavelength > 815)) \
* || ((disperser == 'R600_G5304') && (central wavelength > 775)) \
* || (disperser == 'R150_G5306'))
*/
private static ScienceRule DISPERSER_WAVELENGTH_SPECTROSCOPIC_RULE =
new ScienceRule(
DisperserWavelengthChecker.getInstance(),
SPECTROSCOPY_MATCHER
);
/**
* check correct fpu for N&S
*/
private static ScienceRule N_S_FPU_SPECTROSCOPIC_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE_SOUTH = "For Nod and Shuffle, either a Nod and Shuffle slit, " +
"or a Custom mask or Nod and Shuffle IFU option must be selected";
private static final String MESSAGE_NORTH = "For Nod and Shuffle, either a Nod and Shuffle slit " +
"or a Custom mask must be selected";
private String _message = MESSAGE_NORTH;
public boolean check(Config config, ObservationElements elems) {
if (elems.getInstrument() instanceof InstGmosSouth) {
_message = MESSAGE_SOUTH;
} else {
_message = MESSAGE_NORTH;
}
final FPUnit fpu = getFPU(config, elems);
return !(fpu.isNS() || fpu == FPUnitNorth.CUSTOM_MASK || fpu == FPUnitSouth.CUSTOM_MASK);
}
public String getMessage() {
return _message;
}
public String getId() {
return PREFIX + "N_S_FPU_SPECTROSCOPIC_RULE";
}
}
, N_S_SPECTROSCOPY_MATCHER
);
/**
* ERROR if (nod_distance == 0)
* Comments below from K.Roth on SCT-203
* The nod distance for Nod and Shuffle must not be equal to zero or otherwise the telescope is not nodding
* when the data are being taken. This is set in the nod and shuffle tab of the GMOS static component.
* The offset distance is just the difference between the two offset positions.
* I believe that for all science data the offset distance has to be non-zero, and I actually think
* this should produce an ERROR instead of just a WARNING. This should not produce an ERROR or a WARNING
* if the observation is a Daytime Calibration since DARKS do not nod when they are taken
* <p/>
* Bryan on SCT-203: I define nod_distance=sqrt((p2-p1)^2 + (q2-q1)^2)
*/
private static ScienceRule NOD_DISTANCE_N_S_SPECTROSCOPY_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "For Nod and Shuffle a nod distance must be set";
public boolean check(Config config, ObservationElements elems) {
InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
//Bryan: I define nod_distance=sqrt((p2-p1)^2 + (q2-q1)^2)
if (inst.getPosList().size() < 2)
return true; //there is not enough offsets, so there is no nod-distance
final Iterator<OffsetPos> it = inst.getPosList().iterator();
OffsetPos current = it.next();
while (it.hasNext()) {
OffsetPos pos = it.next();
//if the nod distance is zero, issue an error
if (Double.compare(getSquareNodDistance(current, pos), 0.0) == 0) {
return true;
}
current = pos;
}
return false;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "NOD_DISTANCE_N_S_SPECTROSCOPY_RULE";
}
}
, N_S_SPECTROSCOPY_MATCHER,
ERROR
);
/**
* ERROR if (shuffle_distance == 0),
* (Comments below from K.Roth on SCT 203)
* The shuffle distance is indeed set also in the nod and shuffle tab of the GMOS static component.
* This is set in either the Offset(arcsec) or Offset(detector rows) fields since they are tied
* together and editing one of them causes the other to change value accordingly.
* Similarly to above, this must be set to something other than zero or else the detector
* is not shuffling when the observation is being taken, and I actually think this should also
* produce an ERROR state and not just a WARNING. Unlike the nod distance, a shuffle distance equal
* to zero should also produce an ERROR in a daytime calibration since you must shuffle the darks
* as well as the science.
* <p/>
* Last comment forces the use of the N_S_SPECTROSCOPY_SCIENCE_DAYCAL__MATCHER matcher,
* which is basically the same as the N_S_SPECTROSCOPY_MATCHER but matches for Daytime calibrations
* as well.
*/
private static ScienceRule SHUFFLE_DISTANCE_N_S_SPECTROSCOPY_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "For Nod and Shuffle a shuffle distance must be set";
public boolean check(Config config, ObservationElements elems) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
if (inst == null) return false;
int shuffle_distance = inst.getNsDetectorRows();
return shuffle_distance == 0;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "SHUFFLE_DISTANCE_N_S_SPECTROSCOPY_RULE";
}
},
N_S_SPECTROSCOPY_SCIENCE_DAYCAL__MATCHER,
ERROR
);
/**
* WARN if (N&S_cycles == 0)
*/
private static ScienceRule N_S_CYCLES_N_S_SPECTROSCOPY_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "For Nod and Shuffle > 0 cycles must be set";
public boolean check(Config config, ObservationElements elems) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
return inst != null && inst.getNsNumCycles() == 0;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "N_S_CYCLES_N_S_SPECTROSCOPY_RULE";
}
},
N_S_SPECTROSCOPY_MATCHER
);
/**
* WARN if (Electronic_Offseting == selected) && (Nod_Distance > 2)
*/
private static ScienceRule EOFFSET_N_S_SPECTROSCOPY_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "To use electronic offsetting the Nod Distance must be <= 2";
private static final int MAX_NOD_DISTANCE = 2;
public boolean check(Config config, ObservationElements elems) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
if (inst == null) return false;
if (!inst.isUseElectronicOffsetting()) return false;
final Iterator<OffsetPos> it = inst.getPosList().iterator();
if (inst.getPosList().size() < 2)
return false; //there is not enough offsets, so there is no nod-distance
OffsetPos current = it.next();
while (it.hasNext()) {
final OffsetPos pos = it.next();
//if the nod distance is greater than MAX_NOD_DISTANCE, issue an error
//notice we get the square of the nod distance
if (getSquareNodDistance(current, pos) > MAX_NOD_DISTANCE * MAX_NOD_DISTANCE) {
return true;
}
current = pos;
}
return false;
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "EOFFSET_N_S_SPECTROSCOPY_RULE";
}
}
, N_S_SPECTROSCOPY_MATCHER
);
/**
* WARN if (Built in fpu == selected) && (fpu == N&S slit) \
* && (shuffle_distance != 1536)
*/
private static IConfigRule NS_SLIT_SPECTROSCOPY_RULE = new IConfigRule() {
private static final String MESSAGE = "For long slit Nod and Shuffle the shuffle distance must be %d";
// This is a bit poor but it returns null if there is no need to issue
// a warning. Returns a formatted message otherwise.
private String getWarningMessage(Config config, ObservationElements elems) {
final FPUnit fpu = getFPU(config, elems);
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
int shuffle_distance = inst.getNsDetectorRows();
if (fpu.isNSslit()) {
final DetectorManufacturer dm = getDetectorManufacturer(config);
if (dm == null) return null;
//int rows = InstGmosCommon.calculateDefaultDetectorRows(dm, 1);
final int rows = dm.shuffleOffsetPixels();
if (shuffle_distance != rows) {
return String.format(MESSAGE, rows);
}
}
return null;
}
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final String msg = getWarningMessage(config, elems);
if (msg != null) {
return new Problem(WARNING, PREFIX + "NS_SLIT_SPECTROSCOPY_RULE", msg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
public IConfigMatcher getMatcher() {
return N_S_SPECTROSCOPY_MATCHER;
}
};
/**
* WARN if (shuffle_distance % ccd_y_binning != 0
*/
private static ScienceRule Y_BINNING_AND_SHUFFLE_DISTANCE_SPECTROSCOPY_RULE = new ScienceRule(
new ScienceRule.IScienceChecker() {
private static final String MESSAGE = "The shuffle distance must be a multiple of the CCD Y binning";
public boolean check(Config config, ObservationElements elems) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
if (inst == null) return false;
final int shuffle_distance = inst.getNsDetectorRows();
final Binning binningY =
(Binning) SequenceRule.getInstrumentItem(config, InstGmosCommon.CCD_Y_BIN_PROP);
return binningY != null && (shuffle_distance % binningY.getValue() != 0);
}
public String getMessage() {
return MESSAGE;
}
public String getId() {
return PREFIX + "Y_BINNING_AND_SHUFFLE_DISTANCE_SPECTROSCOPY_RULE";
}
}
, N_S_SPECTROSCOPY_MATCHER, ERROR
);
/**
* REQUIREMENTS: There must be an OT phase-2 check which gives a warning when individual GMOS exposure times are
* longer than 60 minutes for the eev/e2v detectors or 45 minutes for the Hamamatsu detectors.
* <p/>
* CONTEXT: There are recommended maximum exposure times for the GMOS instruments due to excessive contamination of
* the image due to cosmic rays. We request a (yellow) warning appear when the requested exposure time exceeds this
* limit. This refers not to the total exposure time in the observation but to the exposure time of the individual
* exposures.
* <p/>
* Update: REL-176, 21nov2011
* GMOS-S EEV: 60 min
* GMOS-N EEV: 40 min
* GMOS-N Hamamatsu: 20 min
*/
private static IConfigRule MAX_EXPOSURE_TIME_RULE = new AbstractConfigRule() {
private static final String msg = "Exposure time exceeds recommended maximum for the GMOS instruments due to excessive contamination of the image due to cosmic rays";
@Override
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
final DetectorManufacturer detectorManufacturer = getDetectorManufacturer(config);
final double maxExp;
if (detectorManufacturer == DetectorManufacturer.E2V) {
maxExp = 60.0 * 60;
} else if (detectorManufacturer == DetectorManufacturer.E2V) {
maxExp = 40.0 * 60;
} else if (detectorManufacturer == DetectorManufacturer.HAMAMATSU) {
maxExp = 20.0 * 60;
} else {
return null;
}
final Double expTime = getExposureTime(inst, config);
if (expTime != null && expTime > maxExp) {
return new Problem(WARNING, PREFIX + "MAX_EXPOSURE_TIME_RULE", msg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
};
/**
* REL-387:
* REQUIREMENT: The OT must generate an error if any exposure time (whether in the GMOS-N/S static
* component, in a GMOS-N/S iterator, or in a dark or Flat/Arc Observe) is a non-integer.
* <p/>
* CONTEXT: The GMOS DC does not support fractional exposure times. In practice if someone enters a
* fractional exposure time the DC rounds down to the nearest integer, but currently the OT does not
* give any warning that this will happen so it confuses people. Also, if someone enters an exposure
* time less than 1 sec the GMOS DC rounds down to 0 sec but it still opens and closes the shutter.
* There is no way to calibrate such data since we don't really know how long the shutter was open for.
*/
private static IConfigRule INTEGER_EXPOSURE_TIME_RULE = new AbstractConfigRule() {
private static final String msg = "The GMOS DC does not support fractional exposure times";
@Override
public Problem check(Config config, int step, ObservationElements elems, Object state) {
InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
Double expTime = getExposureTime(inst, config);
if (expTime != null && expTime.doubleValue() != (int) expTime.doubleValue()) {
return new Problem(ERROR, PREFIX + "INTEGER_EXPOSURE_TIME_RULE", msg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
};
private static IConfigRule NON_ZERO_EXPOSURE_TIME_RULE = new AbstractConfigRule() {
private static final String msg = "Exposure time must be greater than 0";
@Override
public Problem check(Config config, int step, ObservationElements elems, Object state) {
if (!InstConstants.BIAS_OBSERVE_TYPE.equals(config.getItemValue(GeneralRule.OBSTYPE_KEY))) {
InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
Double expTime = getExposureTime(inst, config);
if( expTime != null && expTime.doubleValue() <= 0.0 ) {
return new Problem(ERROR, PREFIX + "NON_ZERO_EXPOSURE_TIME_RULE", msg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
}
return null;
}
};
private static class MdfMaskNameRule extends AbstractConfigRule {
private final Problem.Type problemType;
public MdfMaskNameRule(Problem.Type type) {
this.problemType = type;
}
@Override
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final SPInstObsComp spInstObsComp = elems.getInstrument();
if (spInstObsComp.getType() == InstGmosNorth.SP_TYPE || spInstObsComp.getType() == InstGmosSouth.SP_TYPE) {
final Option<Problem> problemOption = MdfConfigRule.checkMaskName(FPUnitMode.CUSTOM_MASK, config, step, elems, state);
if (problemOption.isDefined() && problemOption.get().getType() == problemType)
return problemOption.get();
else
return null;
}
else
return null;
}
}
/**
* ERROR: If GMOS-S is used with E2V after semester 2014A.
* This can happen when PIs copy parts of old programs into new ones. PIs will not be able to change the detector
* settings themselves and will have to have a staff member help them. This is not ideal, but since this is only
* a transitional problem this seems to be the simplest solution.
* (Note that very soon we will have to apply this rule to GMOS-N, too.)
*/
private static final Semester SEMESTER_2014A = new Semester(2014, Semester.Half.A);
private static IConfigRule POST_2014A_GMOS_S_WITH_E2V = new AbstractConfigRule() {
private static final String msg = "Starting with 2014B GMOS-S must use the Hamamatsu CCDs. Please create a new observation or ask your contact scientist to update the CCD settings.";
private Option<Semester> semester(final ObservationElements elems) {
return Optional.ofNullable(elems) // null safe access chain
.map(ObservationElements::getObservationNode)
.map(ISPObservation::getProgramID)
.map(RichSpProgramId$.MODULE$::apply)
.map(RichSpProgramId::semester) // result of this is a Scala Option
.orElse(Option.empty()); // turn the Java None result into a Scala None
}
@Override public Problem check(Config config, int step, ObservationElements elems, Object state) {
// apply this rule to GMOS-S
final SPInstObsComp instrument = elems.getInstrument();
if (instrument.getType() == InstGmosSouth.SP_TYPE) {
// apply only if semester is known
final Option<Semester> semester = semester(elems);
if (semester.isDefined()) {
// apply it to observations for 2016A or later
if (semester.get().compareTo(SEMESTER_2014A) > 0) {
// apply if E2V is selected
final DetectorManufacturer ccd = ((InstGmosSouth) instrument).getDetectorManufacturer();
if (ccd == DetectorManufacturer.E2V) {
return new Problem(ERROR, PREFIX + "POST_2014A_GMOS_S_WITH_E2V_RULE", msg, SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
}
}
}
return null;
}
};
private static final class MultiKey {
final Filter filter;
final SkyBackground sb;
private MultiKey(Filter filter, SkyBackground sb) {
this.filter = filter;
this.sb = sb;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final MultiKey multiKey = (MultiKey) o;
if (filter != multiKey.filter)
return false;
return sb == multiKey.sb;
}
@Override
public int hashCode() {
int result = (filter != null ? filter.hashCode() : 0);
result = 31 * result + (sb != null ? sb.hashCode() : 0);
return result;
}
}
private static final Map<MultiKey, Double> E2V_EXPOSURE_LIMITS = new HashMap<>();
private static final Map<MultiKey, Double> HAM_EXPOSURE_LIMITS = new HashMap<>();
private static Double getLimit(final DetectorManufacturer dm, final Filter filter, final AmpGain gain, final SkyBackground sb, final Binning binning) {
// Currently we only apply these rules for AmpGain.LOW. Note that the amp gain was originally part of
// the MultiKey but as long as only one value is relevant it seems overkill to have it there.
if (gain != AmpGain.LOW) return Double.MAX_VALUE;
// check if we have a max value defined
final MultiKey key = new MultiKey(filter, sb);
final Double storedLimit = getLimits(dm).get(key);
if (storedLimit == null) return Double.MAX_VALUE;
// if so return it, taking binning into account
switch (binning) {
case ONE: return storedLimit;
case TWO: return storedLimit / 4.0;
case FOUR: return storedLimit / 16.0;
default: throw new Error();
}
}
private static Map<MultiKey, Double> getLimits(final DetectorManufacturer dm) {
switch (dm) {
case E2V: return E2V_EXPOSURE_LIMITS;
case HAMAMATSU: return HAM_EXPOSURE_LIMITS;
default: throw new Error();
}
}
static {
// ==================
//HAMAMATSU GMOS-N
// GMOS-N g-band (Hamamatsu Blue CCD) 1x1 binning (unbinned)
// BG20: 4.35 hours (longer than the maximum exposure time due to cosmic rays)
// BG50: 1.83 hours (longer than the maximum exposure time due to cosmic rays)
// BG80: 32.5 minutes
// BGAny: 4.5 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.g_G0301, SkyBackground.PERCENT_20), 4.35 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.g_G0301, SkyBackground.PERCENT_50), 1.83 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.g_G0301, SkyBackground.PERCENT_80), 32.5 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.g_G0301, SkyBackground.ANY), 4.5 * 60);
// GMOS-N r-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 1.83 hours (longer than the maximum exposure time due to cosmic rays)
// BG50: 1.02 hours (longer than the maximum exposure time due to cosmic rays)
// BG80: 25 minutes
// BGAny: 4.3 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.r_G0303, SkyBackground.PERCENT_20), 1.83 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.r_G0303, SkyBackground.PERCENT_50), 1.02 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.r_G0303, SkyBackground.PERCENT_80), 25.0 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.r_G0303, SkyBackground.ANY), 4.3 * 60);
// GMOS-N i-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 1.05 hours (longer than the maximum exposure time due to cosmic rays)
// BG50: 41 minutes (may end up being longer than the maximum exposure time due to cosmic rays)
// BG80: 22.3 minutes
// BGAny: 5.5 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.i_G0302, SkyBackground.PERCENT_20), 1.05 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.i_G0302, SkyBackground.PERCENT_50), 41.0 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.i_G0302, SkyBackground.PERCENT_80), 22.3 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.i_G0302, SkyBackground.ANY), 5.5 * 60);
// GMOS-N z-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 12.3 minutes
// BG50: 12 minutes
// BG80: 11.5 minutes
// BGAny: 8.7 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.z_G0304, SkyBackground.PERCENT_20), 12.3 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.z_G0304, SkyBackground.PERCENT_50), 12.0 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.z_G0304, SkyBackground.PERCENT_80), 11.5 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.z_G0304, SkyBackground.ANY), 8.7 * 60);
// GMOS-N Z-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 35 minutes (may end up being longer than the maximum exposure time due to cosmic rays)
// BG50: 31.1 minutes (may end up being longer than the maximum exposure time due to cosmic rays)
// BG80: 25.8 minutes
// BGAny: 13.3 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Z_G0322, SkyBackground.PERCENT_20), 35.0 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Z_G0322, SkyBackground.PERCENT_50), 31.1 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Z_G0322, SkyBackground.PERCENT_80), 25.8 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Z_G0322, SkyBackground.ANY), 13.3 * 60);
// GMOS-N Y-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 52.6 minutes (may end up being longer than the maximum exposure time due to cosmic rays)
// BG50: 52.7 minutes (clearly there is something not quite right with the ITC...)
// BG80: 52.8 minutes (clearly there is something not quite right with the ITC...)
// BGAny: 52.8 minutes (I suggest we call all of these 53 minutes, we already know there is an approximation with the ITC because it has no dependence of background counts on sky background in the nearIR)
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Y_G0323, SkyBackground.PERCENT_20), 52.6 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Y_G0323, SkyBackground.PERCENT_50), 52.7 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Y_G0323, SkyBackground.PERCENT_80), 52.8 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Y_G0323, SkyBackground.ANY), 52.8 * 60);
//HAMAMATSU GMOS-S
// GMOS-S g-band (Hamamatsu Blue CCD) 1x1 binning (unbinned)
// BG20: 4.35 hours (longer than the maximum exposure time due to cosmic rays)
// BG50: 1.83 hours (longer than the maximum exposure time due to cosmic rays)
// BG80: 32.5 minutes
// BGAny: 4.5 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.g_G0325, SkyBackground.PERCENT_20), 4.35 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.g_G0325, SkyBackground.PERCENT_50), 1.83 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.g_G0325, SkyBackground.PERCENT_80), 32.5 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.g_G0325, SkyBackground.ANY), 4.5 * 60);
// GMOS-S r-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 1.83 hours (longer than the maximum exposure time due to cosmic rays)
// BG50: 1.02 hours (longer than the maximum exposure time due to cosmic rays)
// BG80: 25 minutes
// BGAny: 4.3 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.r_G0326, SkyBackground.PERCENT_20), 1.83 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.r_G0326, SkyBackground.PERCENT_50), 1.02 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.r_G0326, SkyBackground.PERCENT_80), 25.0 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.r_G0326, SkyBackground.ANY), 4.3 * 60);
// GMOS-S i-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 1.05 hours (longer than the maximum exposure time due to cosmic rays)
// BG50: 41 minutes (may end up being longer than the maximum exposure time due to cosmic rays)
// BG80: 22.3 minutes
// BGAny: 5.5 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.i_G0327, SkyBackground.PERCENT_20), 1.05 * 60 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.i_G0327, SkyBackground.PERCENT_50), 41.0 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.i_G0327, SkyBackground.PERCENT_80), 22.3 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.i_G0327, SkyBackground.ANY), 5.5 * 60);
// GMOS-S z-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 12.3 minutes
// BG50: 12 minutes
// BG80: 11.5 minutes
// BGAny: 8.7 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.z_G0328, SkyBackground.PERCENT_20), 12.3 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.z_G0328, SkyBackground.PERCENT_50), 12.0 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.z_G0328, SkyBackground.PERCENT_80), 11.5 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.z_G0328, SkyBackground.ANY), 8.7 * 60);
// GMOS-S Z-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 35 minutes (may end up being longer than the maximum exposure time due to cosmic rays)
// BG50: 31.1 minutes (may end up being longer than the maximum exposure time due to cosmic rays)
// BG80: 25.8 minutes
// BGAny: 13.3 minutes
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.Z_G0343, SkyBackground.PERCENT_20), 35.0 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.Z_G0343, SkyBackground.PERCENT_50), 31.1 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.Z_G0343, SkyBackground.PERCENT_80), 25.8 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.Z_G0343, SkyBackground.ANY), 13.3 * 60);
// GMOS-S Y-band (Hamamatsu Red CCD) 1x1 binning (unbinned)
// BG20: 52.6 minutes (may end up being longer than the maximum exposure time due to cosmic rays)
// BG50: 52.7 minutes (clearly there is something not quite right with the ITC...)
// BG80: 52.8 minutes (clearly there is something not quite right with the ITC...)
// BGAny: 52.8 minutes (I suggest we call all of these 53 minutes, we already know there is an approximation with the ITC because it has no dependence of background counts on sky background in the nearIR)
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.Y_G0344, SkyBackground.PERCENT_20), 52.6 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.Y_G0344, SkyBackground.PERCENT_50), 52.7 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.Y_G0344, SkyBackground.PERCENT_80), 52.8 * 60);
HAM_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.Y_G0344, SkyBackground.ANY), 52.8 * 60);
// ==================
//E2V GMOS-N
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.g_G0301, SkyBackground.PERCENT_20), 4.35 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.g_G0301, SkyBackground.PERCENT_50), 1.83 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.g_G0301, SkyBackground.PERCENT_80), 32.5 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.g_G0301, SkyBackground.ANY), 4.5 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.r_G0303, SkyBackground.PERCENT_20), 1.83 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.r_G0303, SkyBackground.PERCENT_50), 1.02 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.r_G0303, SkyBackground.PERCENT_80), 25.0 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.r_G0303, SkyBackground.ANY), 4.3 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.i_G0302, SkyBackground.PERCENT_20), 1.05 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.i_G0302, SkyBackground.PERCENT_50), 41.0 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.i_G0302, SkyBackground.PERCENT_80), 22.3 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.i_G0302, SkyBackground.ANY), 5.5 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.z_G0304, SkyBackground.PERCENT_20), 12.3 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.z_G0304, SkyBackground.PERCENT_50), 12.0 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.z_G0304, SkyBackground.PERCENT_80), 11.5 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.z_G0304, SkyBackground.ANY), 8.7 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Z_G0322, SkyBackground.PERCENT_20), 35.0 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Z_G0322, SkyBackground.PERCENT_50), 31.1 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Z_G0322, SkyBackground.PERCENT_80), 25.8 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Z_G0322, SkyBackground.ANY), 13.3 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Y_G0323, SkyBackground.PERCENT_20), 52.6 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Y_G0323, SkyBackground.PERCENT_50), 52.7 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Y_G0323, SkyBackground.PERCENT_80), 52.8 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterNorth.Y_G0323, SkyBackground.ANY), 52.8 * 60);
//E2V GMOS-S
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.u_G0332, SkyBackground.PERCENT_20), 48.6 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.u_G0332, SkyBackground.PERCENT_50), 21.1 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.u_G0332, SkyBackground.PERCENT_80), 5.78 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.u_G0332, SkyBackground.ANY), 45.0 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.g_G0325, SkyBackground.PERCENT_20), 4.35 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.g_G0325, SkyBackground.PERCENT_50), 1.83 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.g_G0325, SkyBackground.PERCENT_80), 32.5 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.g_G0325, SkyBackground.ANY), 4.5 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.r_G0326, SkyBackground.PERCENT_20), 2.56 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.r_G0326, SkyBackground.PERCENT_50), 1.43 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.r_G0326, SkyBackground.PERCENT_80), 34.0 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.r_G0326, SkyBackground.ANY), 5.9 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.i_G0327, SkyBackground.PERCENT_20), 2.13 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.i_G0327, SkyBackground.PERCENT_50), 1.37 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.i_G0327, SkyBackground.PERCENT_80), 43.6 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.i_G0327, SkyBackground.ANY), 10.4 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.z_G0328, SkyBackground.PERCENT_20), 1.06 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.z_G0328, SkyBackground.PERCENT_50), 1.0 * 60 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.z_G0328, SkyBackground.PERCENT_80), 55.0 * 60);
E2V_EXPOSURE_LIMITS.put(new MultiKey(FilterSouth.z_G0328, SkyBackground.ANY), 35.8 * 60);
}
private static IConfigRule FILTER_MAX_EXPOSURE_TIME_RULE = new IConfigRule() {
private static final String warnMsg = "The exposure time will cause the background to exceed 50% full well for the configuration and conditions";
private static final String errMsg = "The exposure time may cause the background to saturate for the configuration and conditions";
@Override
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
final DetectorManufacturer dm = getDetectorManufacturer(config);
final Filter filter = getFilter(config);
final Binning xBinning = getXBinning(config);//for imaging, binning is 1x1, 2x2 or 4x4
for (SPSiteQuality sq : elems.getSiteQuality()) {
final SkyBackground sb = sq.getSkyBackground();
final AmpGain gain = getGain(config);
final Double expTime = getExposureTime(inst, config);
final Double maxExpTime = getLimit(dm, filter, gain, sb, xBinning);
if (expTime != null && expTime > maxExpTime) {
return new Problem(ERROR, PREFIX + "E_FILTER_MAX_EXPOSURE_TIME_RULE", errMsg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
} else if (expTime != null && expTime > maxExpTime / 2.0) {
return new Problem(WARNING, PREFIX + "W_FILTER_MAX_EXPOSURE_TIME_RULE", warnMsg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
}
return null;
}
@Override
public IConfigMatcher getMatcher() {
return IMAGING_MATCHER;
}
};
/**
* Auxiliary methods
*/
// Works around a bug (?) in InstGmosCommon.getSysConfig where the FPU
// parameter isn't added unless using a "builtin" FPU option. Seems like
// CUSTOM should be set in this case.
private static FPUnit getFPU(Config config, ObservationElements elems) {
final FPUnitMode mode = (FPUnitMode) SequenceRule.getItem(config, FPUnitMode.class, FPU_MODE_KEY);
final FPUnit fpu;
switch (mode) {
case BUILTIN:
fpu = (FPUnit) SequenceRule.getItem(config, FPUnit.class, FPU_KEY);
break;
case CUSTOM_MASK:
// Okay custom mask "FPU" but *which* one. :-/
final SPComponentType type = elems.getInstrumentNode().getType();
fpu = InstGmosNorth.SP_TYPE.equals(type) ? FPUnitNorth.CUSTOM_MASK
: FPUnitSouth.CUSTOM_MASK;
break;
default:
final String msg = String.format("New unaccounted for FPUnitMode type: %s", mode.displayValue());
LOG.severe(msg);
throw new RuntimeException(msg);
}
return fpu;
}
private static Disperser getDisperser(Config config) {
return (Disperser) SequenceRule.getItem(config, Disperser.class, DISPERSER_KEY);
}
private static Filter getFilter(Config config) {
return (Filter) SequenceRule.getItem(config, Filter.class, FILTER_KEY);
}
public static DetectorManufacturer getDetectorManufacturer(Config config) {
return (DetectorManufacturer) SequenceRule.getItem(config, DetectorManufacturer.class, DETECTOR_KEY);
}
private static AmpGain getGain(Config config) {
return (AmpGain) SequenceRule.getItem(config, AmpGain.class, GAIN_KEY);
}
private static Binning getXBinning(Config config) {
return (Binning) SequenceRule.getItem(config, Binning.class, CCD_X_BINNING_KEY);
}
private static Binning getYBinning(Config config) {
return (Binning) SequenceRule.getItem(config, Binning.class, CCD_Y_BINNING_KEY);
}
private static Double getExposureTime(InstGmosCommon<?,?,?,?> inst, Config config) {
// REL-196. If there are no observes, there will be no observe exposure time.
final Double obsExp = SequenceRule.getExposureTime(config);
if (obsExp == null) return null;
if (inst.getUseNS() == UseNS.TRUE) {
return obsExp * inst.getNsNumCycles() * inst.getPosList().size() + (inst.isUseElectronicOffsetting() ? 11 : 25) * inst.getNsNumCycles();
} else {
return obsExp;
}
}
private static boolean isSpecFPUnselected(Config config, ObservationElements elems) {
final FPUnitMode fpuMode = (FPUnitMode) SequenceRule.getInstrumentItem(config, InstGmosCommon.FPU_MODE_PROP);
if (fpuMode == FPUnitMode.CUSTOM_MASK) return true;
if (fpuMode != FPUnitMode.BUILTIN) return false;
return !getFPU(config, elems).isImaging();
}
// From Bryan on SCT-203: I define nod_distance=sqrt((p2-p1)^2 + (q2-q1)^2)
// Since I don't play with square roots, just return the square nod distance.
private static double getSquareNodDistance(OffsetPos pos1, OffsetPos pos2) {
if (pos1 == null || pos2 == null) return 0;
double diffX = pos1.getXaxis() - pos2.getXaxis();
double diffY = pos1.getYaxis() - pos2.getYaxis();
return diffX * diffX + diffY * diffY;
}
private static IConfigRule MAX_ROI_RULE = new IConfigRule() {
private static final String errMsgE2V = "E2V CCDs support up to 4 custom ROIs";
private static final String errMsgHamamatsu = "Hamamatsu CCDs support up to 5 custom ROIs";
@Override
public Problem check(final Config config, final int step, final ObservationElements elems, final Object state) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
DetectorManufacturer dm = getDetectorManufacturer(config);
if (dm == null) {
dm = inst.getDetectorManufacturer();
}
final CustomROIList customROIList = inst.getCustomROIs();
if (customROIList != null && customROIList.size() > dm.getMaxROIs()) {
final String prefix, msg;
switch (dm) {
case HAMAMATSU:
prefix = PREFIX + "HAMAMATSU_MAX_ROI_RULE";
msg = errMsgHamamatsu;
break;
case E2V:
prefix = PREFIX + "E2V_MAX_ROI_RULE";
msg = errMsgE2V;
break;
default:
throw new IllegalArgumentException("unknown detector");
}
return new Problem(ERROR, prefix, msg, SequenceRule.getInstrumentOrSequenceNode(step, elems));
} else {
return null;
}
}
@Override
public IConfigMatcher getMatcher() {
return IConfigMatcher.ALWAYS;
}
};
private static IConfigRule CUSTOM_ROI_NOT_DECLARED_RULE = new IConfigRule() {
//private static final String warnMsg = "Custom ROIs are declared but not used in any step";
private static final String errMsg = "Custom ROIs are not declared but are used in a step";
@Override
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
BuiltinROI roi = (BuiltinROI) SequenceRule.getInstrumentItem(config, InstGmosCommon.BUILTIN_ROI_PROP);
if (roi == null) roi = inst.getBuiltinROI();
if (roi.equals(BuiltinROI.CUSTOM) && inst.getCustomROIs().isEmpty()) {
return new Problem(ERROR, PREFIX + "CUSTOM_ROI_NOT_DECLARED_RULE", errMsg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
@Override
public IConfigMatcher getMatcher() {
return IConfigMatcher.ALWAYS;
}
};
private static IConfigRule ROI_OVERLAP_RULE = new IConfigRule() {
private static final String errMsg = "The custom ROIs must not overlap";
@Override
public Problem check(final Config config, final int step, final ObservationElements elems, final Object state) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
final DetectorManufacturer dm = inst.getDetectorManufacturer();
final boolean overlaps;
switch (dm) {
case E2V:
overlaps = inst.getCustomROIs().rowOverlap();
break;
case HAMAMATSU:
overlaps = inst.getCustomROIs().pixelOverlap();
break;
default:
throw new IllegalArgumentException("unknown detector");
}
if (overlaps) {
return new Problem(ERROR, PREFIX + "ROI_OVERLAP_RULE", errMsg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
} else {
return null;
}
}
@Override
public IConfigMatcher getMatcher() {
return IConfigMatcher.ALWAYS;
}
};
/**
* REL-2057: It is possible to invalidate custom ROIs by changing the detector. This rule detects ROIs that
* have been invalidated by doing so.
*/
private static IConfigRule ROI_INVALID_RULE = new IConfigRule() {
private static final String errMsg = "One or several custom ROIs are invalid";
@Override
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
if (!inst.validateCustomROIs()) {
return new Problem(ERROR, PREFIX + "ROI_INVALID_RULE", errMsg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
} else {
return null;
}
}
@Override
public IConfigMatcher getMatcher() {
return IConfigMatcher.ALWAYS;
}
};
/**
* REL-1811: Warn if there are P-offsets for a slit spectroscopy observation.
* Warn for FPUs = (*arcsec or Custom Mask).
*/
private static IConfigRule NO_P_OFFSETS_WITH_SLIT_SPECTROSCOPY_RULE = new NoPOffsetWithSlitRule(
PREFIX,
new AbstractFunction2<Config, ObservationElements, Boolean>() {
public Boolean apply(Config config, ObservationElements elems) {
return isCustomMask(config) || isSlitMask(config, elems);
}
private boolean isCustomMask(final Config config) {
final FPUnitMode fpuMode =
(FPUnitMode) SequenceRule.getInstrumentItem(config, InstGmosCommon.FPU_MODE_PROP);
return fpuMode == FPUnitMode.CUSTOM_MASK;
}
private boolean isSlitMask(final Config config, final ObservationElements elems) {
final FPUnit fpu = getFPU(config, elems);
return fpu.isSpectroscopic() || fpu.isNSslit();
}
}
);
private static IRule UNUSED_CUSTOM_ROI_RULE = new IRule() {
private static final String warnMsg = "Custom ROIs are declared but not used in any step";
private IConfigRule rule = new AbstractConfigRule() {
@Override
public Problem check(Config config, int step, ObservationElements elems, Object state) {
final InstGmosCommon<?,?,?,?> inst = (InstGmosCommon) elems.getInstrument();
BuiltinROI roi = (BuiltinROI) SequenceRule.getInstrumentItem(config, InstGmosCommon.BUILTIN_ROI_PROP);
if (roi == null) roi = inst.getBuiltinROI();
if (!roi.equals(BuiltinROI.CUSTOM) && !inst.getCustomROIs().isEmpty()) {
return new Problem(WARNING, PREFIX + "CUSTOM_ROI_NOT_DECLARED", warnMsg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
};
@Override
public IP2Problems check(ObservationElements elements) {
final List<Problem> probs = new ArrayList<>();
// Walk through ever config in the sequence, checking each rule. If
// a rule matches, remove it from the set so it won't be reported twice.
int step = 0;
ConfigSequence seq = elements.getSequence();
for (Iterator<Config> it = seq.iterator(); it.hasNext(); ++step) {
final Config config = it.next();
Problem prob = rule.check(config, step, elements, null);
if (prob != null) {
probs.add(prob);
}
}
IP2Problems problems = new P2Problems();
if (probs.size() == step) {
problems.append(probs.get(probs.size() - 1));
}
return problems;
}
};
/**
* REL-1249: Warn if IFU observations have a spatial binning.
* This rules fires for any IFU observations that do not use the mirror and has a y binning != 1.
*/
private static IConfigRule IFU_NO_SPATIAL_BINNING_RULE = new IConfigRule() {
private static final String errMsg = "IFU observations generally should not be binned in the spatial direction (y)";
@Override
public Problem check(Config config, int step, ObservationElements elems, Object state) {
if (getFPU(config, elems).isIFU() && !getDisperser(config).isMirror() && getYBinning(config) != Binning.ONE) {
return new Problem(WARNING, PREFIX + "IFU_NO_SPATIAL_BINNING_RULE", errMsg,
SequenceRule.getInstrumentOrSequenceNode(step, elems));
}
return null;
}
@Override
public IConfigMatcher getMatcher() {
return IConfigMatcher.ALWAYS;
}
};
/**
* Register all the GMOS rules to apply
*/
static {
// GMOS_RULES.add(SequenceRule.DUMP_CONFIG_RULE);
GMOS_RULES.add(DISPERSER_AND_MIRROR_RULE);
GMOS_RULES.add(BAD_AMP_COUNT_RULE);
GMOS_RULES.add(CHECK_3_AMP_MODE);
GMOS_RULES.add(ACQUISITION_RULE);
// GMOS_RULES.add(AMPLIFIER_SCIENCE_RULE);
GMOS_RULES.add(GAIN_SCIENCE_RULE);
GMOS_RULES.add(READMODE_SCIENCE_RULE);
GMOS_RULES.add(GAIN_READMODE_RULE);
GMOS_RULES.add(BINNING_RULE);
GMOS_RULES.add(GMOS_S_DTA_X_RULE);
GMOS_RULES.add(SPATIAL_DITHER_IMAGING_RULE);
GMOS_RULES.add(CCD_BIN_AND_IQ_IMAGING_RULE);
GMOS_RULES.add(CCD_BIN_AND_ALTAIR_IMAGING_RULE);
GMOS_RULES.add(GRATING_NO_SLIT_SCIENCE_RULE);
GMOS_RULES.add(B600_G5303_RULE);
GMOS_RULES.add(SPECTROSCOPIC_ELEMENT_IN_FPU_SCIENCE_RULE);
GMOS_RULES.add(N_S_NO_SLIT_OR_GRATING_SCIENCE_RULE);
GMOS_RULES.add(EXP_SPECTROSCOPIC_RULE);
GMOS_RULES.add(IFU_LEFT_SLIT_SPECTROSCOPIC_RULE);
GMOS_RULES.add(IFU_2_SLIT_AND_FILTER_SPECTROSCOPIC_RULE);
GMOS_RULES.add(WAVELENGTH_SPECTROSCOPIC_RULE);
GMOS_RULES.add(DISPERSER_WAVELENGTH_SPECTROSCOPIC_RULE);
GMOS_RULES.add(N_S_FPU_SPECTROSCOPIC_RULE);
GMOS_RULES.add(NOD_DISTANCE_N_S_SPECTROSCOPY_RULE);
GMOS_RULES.add(SHUFFLE_DISTANCE_N_S_SPECTROSCOPY_RULE);
GMOS_RULES.add(N_S_CYCLES_N_S_SPECTROSCOPY_RULE);
GMOS_RULES.add(EOFFSET_N_S_SPECTROSCOPY_RULE);
GMOS_RULES.add(NS_SLIT_SPECTROSCOPY_RULE);
GMOS_RULES.add(Y_BINNING_AND_SHUFFLE_DISTANCE_SPECTROSCOPY_RULE);
GMOS_RULES.add(MAX_EXPOSURE_TIME_RULE);
GMOS_RULES.add(FILTER_MAX_EXPOSURE_TIME_RULE);
GMOS_RULES.add(INTEGER_EXPOSURE_TIME_RULE);
GMOS_RULES.add(NON_ZERO_EXPOSURE_TIME_RULE);
GMOS_RULES.add(MAX_ROI_RULE);
GMOS_RULES.add(ROI_OVERLAP_RULE);
GMOS_RULES.add(ROI_INVALID_RULE);
GMOS_RULES.add(CUSTOM_ROI_NOT_DECLARED_RULE);
GMOS_RULES.add(IFU_NO_SPATIAL_BINNING_RULE);
GMOS_RULES.add(NO_P_OFFSETS_WITH_SLIT_SPECTROSCOPY_RULE);
GMOS_RULES.add(new MdfMaskNameRule(Problem.Type.ERROR));
GMOS_RULES.add(new MdfMaskNameRule(Problem.Type.WARNING));
GMOS_RULES.add(POST_2014A_GMOS_S_WITH_E2V);
}
public IP2Problems check(ObservationElements elems) {
GmosSpatialDitherState state = new GmosSpatialDitherState();
IP2Problems probs = (new CompositeRule(
new IRule[]{new GmosOiwfsStarRule(),
new SequenceRule(GMOS_RULES, state),
AltairRule.INSTANCE, // Altair checks (See REL-386)
UNUSED_CUSTOM_ROI_RULE,
},
CompositeRule.Type.all
)).check(elems);
state.addWarning(probs, elems);
final GmosOffsetIteratorRule goir = new GmosOffsetIteratorRule();
probs.append(goir.check(elems));
return probs;
}
}
|
{
"content_hash": "71f920687fe5696b3ca0fb960a8be476",
"timestamp": "",
"source": "github",
"line_count": 1919,
"max_line_length": 204,
"avg_line_length": 46.580510682647216,
"alnum_prop": 0.6064796169508211,
"repo_name": "fnussber/ocs",
"id": "35e5cd7c46232b7a89c36c487a658ce598710009",
"size": "89388",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "bundle/edu.gemini.p2checker/src/main/java/edu/gemini/p2checker/rules/gmos/GmosRule.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "7919"
},
{
"name": "HTML",
"bytes": "491513"
},
{
"name": "Java",
"bytes": "14373815"
},
{
"name": "JavaScript",
"bytes": "7962"
},
{
"name": "Scala",
"bytes": "5075673"
},
{
"name": "Shell",
"bytes": "4989"
},
{
"name": "Tcl",
"bytes": "2841"
}
],
"symlink_target": ""
}
|
package main
import (
"github.com/go-martini/martini"
"github.com/guh/guh-libgo"
"github.com/martini-contrib/render"
)
// DefineVendorEndPoints defines all routes related to vendors
func DefineVendorEndPoints(m *martini.ClassicMartini, config guh.Config) {
// Lists all available vendors
m.Get("/api/v1/vendors.json", func(r render.Render) {
vendor := guh.NewVendor(config)
vendors, err := vendor.All()
if err != nil {
r.JSON(500, err)
} else {
r.JSON(200, vendors)
}
})
// Gets one specific vendor identified by its ID
m.Get("/api/v1/vendors/:id.json", func(r render.Render, params martini.Params) {
device := guh.NewVendor(config)
foundVendor, err := device.Find(params["id"])
if err != nil {
r.JSON(500, err)
} else {
if foundVendor == "" {
r.JSON(404, make(map[string]interface{}))
} else {
r.JSON(200, foundVendor)
}
}
})
// Gets all available device classes of a specific vendor identified by his ID
m.Get("/api/v1/vendors/:vendor_id/device_classes.json", func(r render.Render, params martini.Params) {
deviceClassService := guh.NewDeviceClassService(config)
deviceClasses, err := deviceClassService.AllByVendor(params["vendor_id"])
if err != nil {
r.JSON(500, err)
} else {
r.JSON(200, deviceClasses)
}
})
}
|
{
"content_hash": "ecdfbc6f25e935fd9d3fafd3ff14e1c3",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 103,
"avg_line_length": 24.074074074074073,
"alnum_prop": 0.676923076923077,
"repo_name": "guh/guh-webserver",
"id": "f4d222efc399c621479b359ad3afc1b2232671ae",
"size": "3347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendors.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "36857"
},
{
"name": "Shell",
"bytes": "5389"
}
],
"symlink_target": ""
}
|
package com.sksamuel.scapegoat.inspections
import com.sksamuel.scapegoat.PluginRunner
import org.scalatest.{ FreeSpec, Matchers, OneInstancePerTest }
/** @author Stephen Samuel */
class RedundantFinalModifierOnVarTest
extends FreeSpec
with Matchers
with PluginRunner
with OneInstancePerTest {
override val inspections = Seq(new RedundantFinalModifierOnVar)
"RedundantFinalModifierOnVar" - {
"should report warning" - {
"when object has final var" in {
val code = """object Test { final var foo = {} } """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
"when final class has final var" in {
val code = """final class Test { final var foo = {} } """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
"when case class has final var" in {
val code = """case class Test() { final var foo = {} } """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
"when case object has final var" in {
val code = """case object Test { final var foo = {} } """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
"when implicit object has final var" in {
val code = """object A { implicit object B { final var foo = {} } } """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
}
}
"should not report warning" - {
"when non final class has final var" in {
val code = """class Test { final var foo = {} } """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"when trait has final var" in {
val code = """trait Test { final var foo = {} } """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"when abstract class has final var" in {
val code = """abstract class Test { final var foo = {} } """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"when implicit class has final var" in {
val code = """object A { implicit class B(str:String) { final var foo = {} } }"""
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"on val fields" in {
val code = """object A { val b = true }"""
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"on non final var fields" in {
val code = """object A { var b = true }"""
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
}
}
|
{
"content_hash": "6046b27eff69e457dbab404d4b55acde",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 90,
"avg_line_length": 36.58441558441559,
"alnum_prop": 0.6187433439829606,
"repo_name": "jasonchaffee/scalac-scapegoat-plugin",
"id": "7511f97fee4aa7d5fe032a2299dd9c88daae1a99",
"size": "2817",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/test/scala/com/sksamuel/scapegoat/inspections/RedundantFinalModifierOnVarTest.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "665"
},
{
"name": "Scala",
"bytes": "366763"
}
],
"symlink_target": ""
}
|
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "24c621379cc43d7d410a30c0892762a4",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "5bd52f8be4ec1c076afd8457099f3f52f76d6a53",
"size": "170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Lathyrus/Lathyrus lens/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
''' Provide properties for various visual attrributes.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import base64
from io import BytesIO
import re
# External imports
import PIL.Image
from six import string_types
# Bokeh imports
from .. import enums
from .auto import Auto
from .bases import Property
from .container import Seq, Tuple
from .datetime import Datetime, TimeDelta
from .either import Either
from .enum import Enum
from .numeric import Int, Float
from .regex import Regex
from .primitive import String
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'DashPattern',
'FontSize',
'HatchPatternType',
'Image',
'MinMaxBounds',
'MarkerType',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class DashPattern(Either):
''' Accept line dash specifications.
Express patterns that describe line dashes. ``DashPattern`` values
can be specified in a variety of ways:
* An enum: "solid", "dashed", "dotted", "dotdash", "dashdot"
* a tuple or list of integers in the `HTML5 Canvas dash specification style`_.
Note that if the list of integers has an odd number of elements, then
it is duplicated, and that duplicated list becomes the new dash list.
To indicate that dashing is turned off (solid lines), specify the empty
list [].
.. _HTML5 Canvas dash specification style: http://www.w3.org/html/wg/drafts/2dcontext/html5_canvas/#dash-list
'''
_dash_patterns = {
"solid": [],
"dashed": [6],
"dotted": [2,4],
"dotdash": [2,4,6,4],
"dashdot": [6,4,2,4],
}
def __init__(self, default=[], help=None):
types = Enum(enums.DashPattern), Regex(r"^(\d+(\s+\d+)*)?$"), Seq(Int)
super(DashPattern, self).__init__(*types, default=default, help=help)
def __str__(self):
return self.__class__.__name__
def transform(self, value):
value = super(DashPattern, self).transform(value)
if isinstance(value, string_types):
try:
return self._dash_patterns[value]
except KeyError:
return [int(x) for x in value.split()]
else:
return value
def _sphinx_type(self):
return self._sphinx_prop_link()
class FontSize(String):
_font_size_re = re.compile(r"^[0-9]+(.[0-9]+)?(%|em|ex|ch|ic|rem|vw|vh|vi|vb|vmin|vmax|cm|mm|q|in|pc|pt|px)$", re.I)
def validate(self, value, detail=True):
super(FontSize, self).validate(value, detail)
if isinstance(value, string_types):
if len(value) == 0:
msg = "" if not detail else "empty string is not a valid font size value"
raise ValueError(msg)
elif self._font_size_re.match(value) is None:
msg = "" if not detail else "%r is not a valid font size value" % value
raise ValueError(msg)
class HatchPatternType(Either):
''' Accept built-in fill hatching specifications.
Accepts either "long" names, e.g. "horizontal-wave" or the single letter
abbreviations, e.g. "v"
'''
def __init__(self, default=[], help=None):
types = Enum(enums.HatchPattern), Enum(enums.HatchPatternAbbreviation)
super(HatchPatternType, self).__init__(*types, default=default, help=help)
def __str__(self):
return self.__class__.__name__
def _sphinx_type(self):
return self._sphinx_prop_link()
class Image(Property):
''' Accept image file types, e.g PNG, JPEG, TIFF, etc.
This property can be configured with:
* A string filename to be loaded with ``PIL.Image.open``
* An RGB(A) NumPy array, will be converted to PNG
* A ``PIL.Image.Image`` object
In all cases, the image data is serialized as a Base64 encoded string.
'''
def validate(self, value, detail=True):
import numpy as np
valid = False
if value is None or isinstance(value, (string_types, PIL.Image.Image)):
valid = True
if isinstance(value, np.ndarray):
valid = value.dtype == "uint8" and len(value.shape) == 3 and value.shape[2] in (3, 4)
if not valid:
msg = "" if not detail else "invalid value: %r; allowed values are string filenames, PIL.Image.Image instances, or RGB(A) NumPy arrays" % value
raise ValueError(msg)
def transform(self, value):
if value is None:
return None
import numpy as np
if isinstance(value, np.ndarray):
value = PIL.Image.fromarray(value)
if isinstance(value, string_types):
value = PIL.Image.open(value)
if isinstance(value, PIL.Image.Image):
out = BytesIO()
fmt = value.format or "PNG"
value.save(out, fmt)
return "data:image/%s;base64," % fmt.lower() + base64.b64encode(out.getvalue()).decode('ascii')
raise ValueError("Could not transform %r" % value)
class MinMaxBounds(Either):
''' Accept (min, max) bounds tuples for use with Ranges.
Bounds are provided as a tuple of ``(min, max)`` so regardless of whether your range is
increasing or decreasing, the first item should be the minimum value of the range and the
second item should be the maximum. Setting min > max will result in a ``ValueError``.
Setting bounds to None will allow your plot to pan/zoom as far as you want. If you only
want to constrain one end of the plot, you can set min or max to
``None`` e.g. ``DataRange1d(bounds=(None, 12))`` '''
def __init__(self, accept_datetime=False, default='auto', help=None):
if accept_datetime:
types = (
Auto,
Tuple(Float, Float),
Tuple(TimeDelta, TimeDelta),
Tuple(Datetime, Datetime),
)
else:
types = (
Auto,
Tuple(Float, Float),
Tuple(TimeDelta, TimeDelta),
)
super(MinMaxBounds, self).__init__(*types, default=default, help=help)
def validate(self, value, detail=True):
super(MinMaxBounds, self).validate(value, detail)
if value is None:
pass
elif value[0] is None or value[1] is None:
pass
elif value[0] >= value[1]:
msg = "" if not detail else "Invalid bounds: maximum smaller than minimum. Correct usage: bounds=(min, max)"
raise ValueError(msg)
return True
def _sphinx_type(self):
return self._sphinx_prop_link()
class MarkerType(Enum):
'''
'''
def __init__(self, **kw):
super(MarkerType, self).__init__(enums.MarkerType, **kw)
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
{
"content_hash": "cf7a6546888935a4f57e6402e650c914",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 155,
"avg_line_length": 32.49590163934426,
"alnum_prop": 0.5247824441922059,
"repo_name": "timsnyder/bokeh",
"id": "553f6621e57cddc99f4aafb0820ec6bb8ba32187",
"size": "8260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bokeh/core/property/visual.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1442"
},
{
"name": "CSS",
"bytes": "24877"
},
{
"name": "Dockerfile",
"bytes": "4099"
},
{
"name": "HTML",
"bytes": "54062"
},
{
"name": "JavaScript",
"bytes": "27797"
},
{
"name": "Makefile",
"bytes": "886"
},
{
"name": "PowerShell",
"bytes": "713"
},
{
"name": "Python",
"bytes": "3827067"
},
{
"name": "Roff",
"bytes": "495"
},
{
"name": "Shell",
"bytes": "9953"
},
{
"name": "TypeScript",
"bytes": "2145262"
}
],
"symlink_target": ""
}
|
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
tools:context="com.android.clase2.MainActivity" >
<item
android:id="@+id/action_settings"
android:orderInCategory="100"
android:title="@string/action_settings"
app:showAsAction="never"/>
</menu>
|
{
"content_hash": "20e80120cc2ffdde2937e8b316c3d21e",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 64,
"avg_line_length": 34.083333333333336,
"alnum_prop": 0.6723716381418093,
"repo_name": "sonico999/CursoAndroid",
"id": "beeff4db7c1cf1fb8cc374bdfe5f8bbb8a29eec7",
"size": "409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Clase2/res/menu/main.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1"
},
{
"name": "C++",
"bytes": "1"
},
{
"name": "Java",
"bytes": "18316"
}
],
"symlink_target": ""
}
|
module Barometer
module Query
module Converter
class FromCoordinatesToNoaaStationId
def self.from
[:coordinates]
end
def initialize(query)
@query = query
end
def call
return unless can_convert?
station_id = Service::NoaaStation.fetch(@query)
@query.add_conversion(:noaa_station_id, station_id)
end
private
def can_convert?
!!@query.get_conversion(*self.class.from)
end
end
end
end
end
Barometer::Query::Converter.register(:noaa_station_id, Barometer::Query::Converter::FromCoordinatesToNoaaStationId)
|
{
"content_hash": "cb8513ab1cdccf57e4cbdb29cbced57f",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 115,
"avg_line_length": 22.1,
"alnum_prop": 0.6093514328808446,
"repo_name": "attack/barometer",
"id": "5f2588fa0176e4d7caf6877f26277f8427946640",
"size": "663",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/barometer/query/converters/from_coordinates_to_noaa_station_id.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "217733"
}
],
"symlink_target": ""
}
|
import ConfigParser
from pprint import pprint
import re
import sys
import time
import urllib
def get_api_key():
config = ConfigParser.RawConfigParser()
config.read('setting.ini')
if config.has_option('main', 'api_key'):
api_key = config.get('main', 'api_key')
if api_key != '<YOUR REBRICKABLE API KEY HERE>':
return api_key
print >> sys.stderr, 'You need to write your Rebrickable API Key in setting.ini'
sys.exit(-1)
def urlopen(url):
# "IOError: [Errno socket error] [Errno -2] Name or service not known"
# IOError('socket error', gaierror(-2, 'Name or service not known'))
# ^ errno ^ strerror
retry = 0
while True:
try:
return urllib.urlopen(url)
except IOError as e:
if e.errno == 'socket error' and retry < 3:
retry = retry + 1
time.sleep(retry)
continue
raise e
html_entity_pattern = re.compile('&(?:amp;)?#([0-9]{2,3});')
def _decode_html_entity_chr(match):
return chr(int(match.group(1)))
def decode_html_entity(s):
s = html_entity_pattern.sub(_decode_html_entity_chr, s)
s = s.replace('&amp;', '&')
s = s.replace('&', '&')
s = s.replace('"', '"') # for " and &quot;
# both &amp; and & exist in rebrickable database.
return s
def to_ascii_safe(s):
return ''.join([c for c in s if ord(c) < 0x80])
line_width = 80
indent_spaces = 2
further_indent_depth = 2
def line_wrap(s, indent_depth):
ret = ''
first = True
while s:
depth = indent_depth if first else (indent_depth + further_indent_depth)
width = line_width - indent_spaces * depth
indent = ' ' * indent_spaces * depth
if len(s) <= width:
ret = ret + '%s%s' % (indent, s)
break
idx = s[:width].rfind(' ')
if idx > 0:
ret = ret + '%s%s\n' % (indent, s[:idx])
s = s[idx + 1:]
else:
idx = s.find(' ', width)
if idx == -1:
ret = ret + '%s%s' % (indent, s)
break
else:
ret = ret + '%s%s\n' % (indent, s[:width])
s = s[idx + 1:]
first = False
return ret
|
{
"content_hash": "0758d5a3c4778b61ee63f0b05ca0455b",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 81,
"avg_line_length": 26.253333333333334,
"alnum_prop": 0.6160487557135602,
"repo_name": "maczniak/brickstats",
"id": "0eef18e582ec5c88caeaafbe02f1fb5fc6802486",
"size": "2016",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "util.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "4877"
},
{
"name": "Python",
"bytes": "204569"
}
],
"symlink_target": ""
}
|
package org.camunda.bpm.spring.boot.example.webapp.delegate;
import org.camunda.bpm.engine.delegate.DelegateExecution;
import org.camunda.bpm.engine.delegate.JavaDelegate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class SayHelloDelegate implements JavaDelegate {
private static final Logger LOGGER = LoggerFactory.getLogger(SayHelloDelegate.class);
@Override
public void execute(DelegateExecution execution) throws Exception {
LOGGER.info("hello {}", execution);
}
}
|
{
"content_hash": "e9e6444a16983a390aec7376d496c0c8",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 87,
"avg_line_length": 28.35,
"alnum_prop": 0.8059964726631393,
"repo_name": "ingorichtsmeier/camunda-bpm-examples",
"id": "d6828e1073fc18979f0e17addc0914919d8ce31d",
"size": "1374",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spring-boot-starter/example-webapp/src/main/java/org/camunda/bpm/spring/boot/example/webapp/delegate/SayHelloDelegate.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2383"
},
{
"name": "FreeMarker",
"bytes": "355"
},
{
"name": "Groovy",
"bytes": "276"
},
{
"name": "HTML",
"bytes": "40079"
},
{
"name": "Java",
"bytes": "434911"
},
{
"name": "JavaScript",
"bytes": "1374527"
},
{
"name": "XQuery",
"bytes": "452"
},
{
"name": "XSLT",
"bytes": "739"
}
],
"symlink_target": ""
}
|
package com.sixsq.slipstream.statemachine;
public class DoneState extends OrchestratorsSynchronizedState {
public DoneState(ExtrinsicState extrinsicState) {
super(extrinsicState);
}
@Override
public States getState() {
return States.Done;
}
public boolean isFinal() {
return true;
}
}
|
{
"content_hash": "26f31fd8cf09d35eaa11aa1452888fef",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 63,
"avg_line_length": 16.476190476190474,
"alnum_prop": 0.6647398843930635,
"repo_name": "slipstream/SlipStreamServer",
"id": "13913e84761c8d6d45951d4cc45a438d373e2c3e",
"size": "1142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jar-persistence/src/main/java/com/sixsq/slipstream/statemachine/DoneState.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "106"
},
{
"name": "Clojure",
"bytes": "1890161"
},
{
"name": "HTML",
"bytes": "149740"
},
{
"name": "Java",
"bytes": "1476155"
},
{
"name": "Makefile",
"bytes": "6625"
},
{
"name": "PLpgSQL",
"bytes": "2571"
},
{
"name": "Python",
"bytes": "41223"
},
{
"name": "Shell",
"bytes": "9608"
},
{
"name": "XSLT",
"bytes": "4699"
}
],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="utf-8"?>
<android.support.design.widget.CoordinatorLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:fitsSystemWindows="true"
tools:context="com.lincanbin.carbonforum.ReplyActivity">
<android.support.design.widget.AppBarLayout
android:id="@+id/appbar"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:theme="@style/AppTheme.AppBarOverlay">
<android.support.v7.widget.Toolbar
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="?attr/actionBarSize"
android:background="?attr/colorPrimary"
app:popupTheme="@style/AppTheme.PopupOverlay"
app:layout_scrollFlags="scroll|enterAlways"
android:gravity="end">
<ImageButton
android:src="@drawable/ic_send_24dp"
android:layout_width="48dp"
android:layout_height="48dp"
android:layout_gravity="end"
android:layout_marginRight="8dp"
android:layout_marginEnd="8dp"
android:contentDescription="@string/title_activity_reply"
android:background="?selectableItemBackground"
android:id="@+id/reply_button" />
</android.support.v7.widget.Toolbar>
</android.support.design.widget.AppBarLayout>
<android.support.v4.widget.NestedScrollView
android:id="@+id/reply_scroll_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:fillViewport="true"
android:scrollbars="none"
app:layout_behavior="@string/appbar_scrolling_view_behavior"
app:behavior_overlapTop="64dp">
<LinearLayout
android:id="@+id/reply_form"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:paddingTop="@dimen/activity_vertical_margin"
android:gravity="center_horizontal"
android:orientation="vertical"
tools:context="com.lincanbin.carbonforum.ReplyActivity">
<android.support.design.widget.TextInputLayout
android:layout_width="match_parent"
android:layout_height="fill_parent">
<EditText android:id="@+id/content"
android:layout_width="match_parent"
android:layout_height="fill_parent"
android:layout_margin="@dimen/activity_vertical_margin"
android:hint="@string/reply_content"
android:gravity="top|start"
android:inputType="textMultiLine"
android:singleLine="false" />
</android.support.design.widget.TextInputLayout>
</LinearLayout>
</android.support.v4.widget.NestedScrollView>
</android.support.design.widget.CoordinatorLayout>
|
{
"content_hash": "887bd0b9a84f47c44c3222a1e427e490",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 75,
"avg_line_length": 48.93846153846154,
"alnum_prop": 0.6274756365922666,
"repo_name": "harryplus/Android-Carbon-Forum-master",
"id": "5ebf8ccf0b4d4c2d587e6717c4e6ad4122d3ad5d",
"size": "3181",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/activity_reply.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "211288"
}
],
"symlink_target": ""
}
|
<project name="dcm4chee-hl7" default="jar">
<property file="build.properties"/>
<property file="../build.properties" />
<property name="version" value="2.14.7"/>
<property name="javac.debug" value="on"/>
<property name="javac.deprecation" value="off"/>
<property name="javac.optimize" value="on"/>
<property name="javac.source" value="1.5"/>
<property name="javac.target" value="1.5"/>
<!-- Override with your dcm4che-1.4.x source dist location -->
<property name="dcm4che14.home"
value="${user.home}/dcm4che14/build/dcm4che-1.4.26"/>
<property name="dcm4che.jar" value="${dcm4che14.home}/lib/dcm4che.jar"/>
<!-- Override with your dcm4chee-ejb dist location -->
<property name="dcm4chee-ejb.home" value="../dcm4jboss-ejb"/>
<property name="dcm4chee-ejb.target" value="${dcm4chee-ejb.home}/target"/>
<!-- Override with your dcm4chee-sar dist location -->
<property name="dcm4chee-sar.home" value="../dcm4jboss-sar" />
<property name="dcm4chee.jar" value="${dcm4chee-sar.home}/target/lib/dcm4chee.jar" />
<property name="dcm4che-audit.jar" value="${dcm4chee-sar.home}/lib/dcm4che-audit-2.0.19.jar" />
<!-- Override with your JBoss/Web server bundle dist location -->
<property name="jboss.home" value="${user.home}/jboss-4.2.3.GA"/>
<property name="jboss.lib" value="${jboss.home}/lib"/>
<property name="jboss.server.lib" value="${jboss.home}/server/default/lib"/>
<property name="src.java" value="${basedir}/src/java"/>
<property name="lib" value="${basedir}/lib"/>
<property name="target" value="${basedir}/target"/>
<property name="target.classes" value="${target}/classes"/>
<property name="target.lib" value="${target}/lib"/>
<path id="class.path">
<pathelement location="${dcm4che.jar}"/>
<pathelement location="${dcm4chee.jar}"/>
<pathelement location="${lib}/xhl7.jar"/>
<pathelement location="${dcm4chee-ejb.target}/common/lib/dcm4chee-ejb-client.jar"/>
<pathelement location="${dcm4chee-ejb.target}/hsql/lib/dcm4chee-jdbc-hsql.jar"/>
<pathelement location="${dcm4che-audit.jar}"/>
<fileset dir="${jboss.lib}">
<include name="*.jar"/>
</fileset>
<fileset dir="${jboss.server.lib}">
<include name="*.jar"/>
</fileset>
</path>
<target name="init">
<tstamp>
<format property="TODAY" pattern="yyyy-MM-dd"/>
</tstamp>
</target>
<target name="compile" depends="init">
<mkdir dir="${target.classes}"/>
<javac
destdir="${target.classes}"
classpathref="class.path"
debug="${javac.debug}"
deprecation="${javac.deprecation}"
optimize="${javac.optimize}"
source="${javac.source}"
target="${javac.target}"
>
<src path="${src.java}"/>
<include name="org/dcm4chex/**"/>
</javac>
</target>
<target name="jar" depends="compile">
<mkdir dir="${target.lib}"/>
<jar jarfile="${target.lib}/dcm4chee-hl7.jar">
<manifest>
<attribute name="Implementation-Title" value="dcm4chee-hl7"/>
<attribute name="Implementation-Version" value="${version} ${TODAY}"/>
<attribute name="Implementation-Vendor" value="Tiani Medgraph AG"/>
</manifest>
<fileset dir="${target.classes}"/>
</jar>
</target>
<target name="clean">
<delete dir="${target}"/>
</target>
</project>
|
{
"content_hash": "0a3317c5198469292d0a150365468e14",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 96,
"avg_line_length": 35.053763440860216,
"alnum_prop": 0.652760736196319,
"repo_name": "medicayun/medicayundicom",
"id": "9f95245e622397fbe438feeca30ec5b086cd3019",
"size": "3260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dcm4jboss-all/tags/DCM4CHEE_2_14_7/dcm4jboss-hl7/build.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
/* Fluent Bit
* ==========
* Copyright (C) 2015-2022 The Fluent Bit Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FLB_IN_EXEC_WASI_H
#define FLB_IN_EXEC_WASI_H
#include <fluent-bit/flb_info.h>
#include <fluent-bit/flb_input.h>
#include <fluent-bit/flb_config.h>
#include <fluent-bit/flb_input.h>
#include <fluent-bit/flb_utils.h>
#include <fluent-bit/flb_parser.h>
#include <fluent-bit/wasm/flb_wasm.h>
#include <msgpack.h>
#define DEFAULT_BUF_SIZE "4096"
#define DEFAULT_INTERVAL_SEC "1"
#define DEFAULT_INTERVAL_NSEC "0"
struct flb_exec_wasi {
flb_sds_t wasi_path;
struct mk_list *accessible_dir_list; /* list of directories to be
* accesible from WASM */
flb_sds_t parser_name;
struct flb_parser *parser;
char *buf;
size_t buf_size;
struct flb_input_instance *ins;
struct flb_wasm *wasm;
int oneshot;
flb_pipefd_t ch_manager[2];
int interval_sec;
int interval_nsec;
};
#endif /* FLB_IN_EXEC_WASI_H */
|
{
"content_hash": "516471e3a820d1fdef837ffcb9b3531e",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 77,
"avg_line_length": 31.365384615384617,
"alnum_prop": 0.6689147762109136,
"repo_name": "nokute78/fluent-bit",
"id": "d6d67262075081e2f66c3cd67186fa72d8f6a04c",
"size": "1631",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plugins/in_exec_wasi/in_exec_wasi.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "34215"
},
{
"name": "Awk",
"bytes": "1967"
},
{
"name": "Batchfile",
"bytes": "34565"
},
{
"name": "BitBake",
"bytes": "3218"
},
{
"name": "C",
"bytes": "46242088"
},
{
"name": "C++",
"bytes": "3014145"
},
{
"name": "CMake",
"bytes": "798817"
},
{
"name": "CSS",
"bytes": "33350"
},
{
"name": "Dockerfile",
"bytes": "32271"
},
{
"name": "Emacs Lisp",
"bytes": "305"
},
{
"name": "GDB",
"bytes": "199"
},
{
"name": "Go",
"bytes": "35683"
},
{
"name": "HTML",
"bytes": "284625"
},
{
"name": "Java",
"bytes": "12989"
},
{
"name": "JavaScript",
"bytes": "54874"
},
{
"name": "Lex",
"bytes": "11348"
},
{
"name": "Lua",
"bytes": "487918"
},
{
"name": "M4",
"bytes": "541844"
},
{
"name": "Makefile",
"bytes": "754913"
},
{
"name": "Meson",
"bytes": "772"
},
{
"name": "NASL",
"bytes": "678"
},
{
"name": "PHP",
"bytes": "149"
},
{
"name": "Perl",
"bytes": "184697"
},
{
"name": "PowerShell",
"bytes": "1989"
},
{
"name": "Python",
"bytes": "726457"
},
{
"name": "Roff",
"bytes": "212750"
},
{
"name": "Ruby",
"bytes": "77427"
},
{
"name": "Shell",
"bytes": "1984425"
},
{
"name": "TypeScript",
"bytes": "72168"
},
{
"name": "WebAssembly",
"bytes": "28005"
},
{
"name": "XSLT",
"bytes": "415"
},
{
"name": "Yacc",
"bytes": "17385"
},
{
"name": "jq",
"bytes": "1065"
},
{
"name": "sed",
"bytes": "588"
}
],
"symlink_target": ""
}
|
<?php
namespace App\Form\Processor;
use App\Form\DocumentTemplateForm;
use App\Repository\DocumentTemplateRepository;
use Symfony\Component\Form\FormFactoryInterface;
use Symfony\Component\Security\Core\Authentication\Token\Storage\TokenStorageInterface;
class DocumentTemplateFormProcessor extends DefaultFormProcessor
{
public function __construct(
DocumentTemplateRepository $repository,
FormFactoryInterface $formFactory,
TokenStorageInterface $tokenStorage
) {
parent::__construct(DocumentTemplateForm::class, $repository, $formFactory, $tokenStorage);
}
}
|
{
"content_hash": "5abdc480e02f2da9b365fdf2044acac5",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 99,
"avg_line_length": 30.5,
"alnum_prop": 0.7836065573770492,
"repo_name": "disider/invox",
"id": "6e5e28430efe3b9290d89d963fc8a45319e396c2",
"size": "803",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Form/Processor/DocumentTemplateFormProcessor.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1620694"
},
{
"name": "Gherkin",
"bytes": "372957"
},
{
"name": "HTML",
"bytes": "8151016"
},
{
"name": "JavaScript",
"bytes": "8662911"
},
{
"name": "Makefile",
"bytes": "266"
},
{
"name": "PHP",
"bytes": "1018100"
},
{
"name": "Ruby",
"bytes": "322"
}
],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.in28minutes.example.mockito</groupId>
<artifactId>in28minutes-mockito-example-business</artifactId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<packaging>jar</packaging>
<artifactId>in28minutes-mockito-example-business-impl</artifactId>
<name>${project.artifactId}</name>
<description>Business Implementation.</description>
<dependencies>
<dependency>
<groupId>com.in28minutes.example.mockito</groupId>
<artifactId>in28minutes-mockito-example-model</artifactId>
</dependency>
<dependency>
<groupId>com.in28minutes.example.mockito</groupId>
<artifactId>in28minutes-mockito-example-data-api</artifactId>
</dependency>
<dependency>
<groupId>com.in28minutes.example.mockito</groupId>
<artifactId>in28minutes-mockito-example-business-api</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
</dependency>
<!-- Test Specific Dependencies -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</dependency>
</dependencies>
</project>
|
{
"content_hash": "ebcacb122cd8cf10627661582a94ec9f",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 104,
"avg_line_length": 34.721311475409834,
"alnum_prop": 0.6798866855524079,
"repo_name": "in28minutes/MockitoIn28Minutes",
"id": "f2e0f7b42bd3d0c6cea653e471fdf6c620166e1d",
"size": "2118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "business/impl/pom.xml",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "36951"
}
],
"symlink_target": ""
}
|
<?php // 0.0.1
function testException($testCase, $expectedMessage, $codeBlock) {
try {
$codeBlock();
$exceptionThrown = FALSE;
} catch (Exception $e) {
$testCase->assertEquals($expectedMessage, $e->getMessage());
$exceptionThrown = TRUE;
}
if (!$exceptionThrown) {
$trace = debug_backtrace();
$function = $trace[1]['function'];
throw new Exception("testException failed: Exception should have been raised in $function.");
}
}
?>
|
{
"content_hash": "64fcd0f8dc82bcbd24c091b9650a63ae",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 95,
"avg_line_length": 26.352941176470587,
"alnum_prop": 0.6785714285714286,
"repo_name": "khoomeister/php-web-recipes",
"id": "ab24079418a3024189ba13d330efa3a934bb9088",
"size": "448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/testException.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CoffeeScript",
"bytes": "876"
},
{
"name": "JavaScript",
"bytes": "242602"
},
{
"name": "PHP",
"bytes": "1703988"
}
],
"symlink_target": ""
}
|
Enigma::Application.configure do
# Settings specified here will take precedence over those in config/environment.rb
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Print deprecation notices to the stderr
config.active_support.deprecation = :stderr
config.action_mailer.default_url_options = { :host => 'localhost:3000' }
end
|
{
"content_hash": "a32b56ad61f8241a531c294b3fe96d62",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 85,
"avg_line_length": 44.02777777777778,
"alnum_prop": 0.7621451104100946,
"repo_name": "CultivateHQ/ballpark",
"id": "3c28900ff2e1855c5477e6c40053aec1bcaf94cd",
"size": "1585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/environments/test.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13115"
},
{
"name": "Elixir",
"bytes": "414"
},
{
"name": "JavaScript",
"bytes": "4912"
},
{
"name": "Ruby",
"bytes": "49344"
}
],
"symlink_target": ""
}
|
package weibo4j.examples.account;
import weibo4j.Account;
import weibo4j.examples.oauth2.Log;
import weibo4j.model.User;
import weibo4j.model.WeiboException;
public class EndSession {
public static void main(String[] args) {
String access_token = args[0];
Account am = new Account(access_token);
try {
User user = am.endSession();
Log.logInfo(user.toString());
} catch (WeiboException e) {
e.printStackTrace();
}
}
}
|
{
"content_hash": "8fef77c09227ce00c2a02e1be534f86c",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 41,
"avg_line_length": 21.136363636363637,
"alnum_prop": 0.6817204301075269,
"repo_name": "seagrape/kekoa",
"id": "99121cb2628b7a43c13d4ffdf7d6724b51e5399e",
"size": "465",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "kekoa-common/src/test/java/weibo4j/examples/account/EndSession.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "16891"
}
],
"symlink_target": ""
}
|
https://www.tropo.com/docs/webapi/conference.htm
TPConference has not been documented yet. The class comment should describe the purpose of the class, its collaborations and its variables.
Instance Variables:
id <ProtoObject | PseudoContext>
mute <ProtoObject | PseudoContext>
name <ProtoObject | PseudoContext>
playTones <ProtoObject | PseudoContext>
required <ProtoObject | PseudoContext>
terminator <ProtoObject | PseudoContext>
|
{
"content_hash": "602aa5e7280706788100b93adfc15fc2",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 139,
"avg_line_length": 39.90909090909091,
"alnum_prop": 0.8041002277904328,
"repo_name": "pdebruic/tropo-smalltalk",
"id": "684c3208d6dada6eb6c6ac2ce693972198f0e6c8",
"size": "439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tropo.package/TPConference.class/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Smalltalk",
"bytes": "43165"
}
],
"symlink_target": ""
}
|
namespace pdalboost {} namespace boost = pdalboost; namespace pdalboost {
namespace interprocess {
namespace ipcdetail {
class posix_condition;
class posix_mutex
{
posix_mutex(const posix_mutex &);
posix_mutex &operator=(const posix_mutex &);
public:
posix_mutex();
~posix_mutex();
void lock();
bool try_lock();
bool timed_lock(const pdalboost::posix_time::ptime &abs_time);
void unlock();
friend class posix_condition;
private:
pthread_mutex_t m_mut;
};
inline posix_mutex::posix_mutex()
{
mutexattr_wrapper mut_attr;
mutex_initializer mut(m_mut, mut_attr);
mut.release();
}
inline posix_mutex::~posix_mutex()
{
int res = pthread_mutex_destroy(&m_mut);
BOOST_ASSERT(res == 0);(void)res;
}
inline void posix_mutex::lock()
{
if (pthread_mutex_lock(&m_mut) != 0)
throw lock_exception();
}
inline bool posix_mutex::try_lock()
{
int res = pthread_mutex_trylock(&m_mut);
if (!(res == 0 || res == EBUSY))
throw lock_exception();
return res == 0;
}
inline bool posix_mutex::timed_lock(const pdalboost::posix_time::ptime &abs_time)
{
if(abs_time == pdalboost::posix_time::pos_infin){
this->lock();
return true;
}
#ifdef BOOST_INTERPROCESS_POSIX_TIMEOUTS
timespec ts = ptime_to_timespec(abs_time);
int res = pthread_mutex_timedlock(&m_mut, &ts);
if (res != 0 && res != ETIMEDOUT)
throw lock_exception();
return res == 0;
#else //BOOST_INTERPROCESS_POSIX_TIMEOUTS
//Obtain current count and target time
pdalboost::posix_time::ptime now = microsec_clock::universal_time();
spin_wait swait;
do{
if(this->try_lock()){
break;
}
now = microsec_clock::universal_time();
if(now >= abs_time){
return false;
}
// relinquish current time slice
swait.yield();
}while (true);
return true;
#endif //BOOST_INTERPROCESS_POSIX_TIMEOUTS
}
inline void posix_mutex::unlock()
{
int res = 0;
res = pthread_mutex_unlock(&m_mut);
(void)res;
BOOST_ASSERT(res == 0);
}
} //namespace ipcdetail {
} //namespace interprocess {
} //namespace pdalboost {} namespace boost = pdalboost; namespace pdalboost {
#include <boost/interprocess/detail/config_end.hpp>
#endif //#ifndef BOOST_INTERPROCESS_DETAIL_POSIX_MUTEX_HPP
|
{
"content_hash": "ddddd88fb2699575eb8c4019801f0dd4",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 81,
"avg_line_length": 22.085714285714285,
"alnum_prop": 0.64381198792583,
"repo_name": "verma/PDAL",
"id": "763fc7c616fd04e392926c06d53204678356550e",
"size": "4285",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boost/boost/interprocess/sync/posix/mutex.hpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "755744"
},
{
"name": "C#",
"bytes": "51165"
},
{
"name": "C++",
"bytes": "58234219"
},
{
"name": "CSS",
"bytes": "65128"
},
{
"name": "JavaScript",
"bytes": "81726"
},
{
"name": "Lasso",
"bytes": "1053782"
},
{
"name": "Perl",
"bytes": "4925"
},
{
"name": "Python",
"bytes": "12600"
},
{
"name": "Shell",
"bytes": "40033"
},
{
"name": "XSLT",
"bytes": "7284"
}
],
"symlink_target": ""
}
|
package example.web.filter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
@Provider
@PreMatching
public class HttpMethodOverrideFilter implements ContainerRequestFilter {
private static Logger log = LoggerFactory.getLogger(HttpMethodOverrideFilter.class);
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
String methodOverride = requestContext.getHeaderString("X-Http-Method-Override");
log.info("HttpMethodOverrideFilter:" + methodOverride);
if (methodOverride != null) {
requestContext.setMethod(methodOverride);
}
}
}
|
{
"content_hash": "7a42247f9a1a464f745fb71c91125ba2",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 89,
"avg_line_length": 28.4,
"alnum_prop": 0.7676056338028169,
"repo_name": "naotsugu/example-jersey-grizzly",
"id": "585a4e8e57bc89913297d33989956d126d4527d4",
"size": "852",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/example/web/filter/HttpMethodOverrideFilter.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "46"
},
{
"name": "HTML",
"bytes": "6584"
},
{
"name": "Java",
"bytes": "15456"
}
],
"symlink_target": ""
}
|
.pmis-logo {
margin-right: 5px;
}
table#list tbody tr td:last-child {
text-align: center;
}
table#list tbody tr td:last-child button:last-child {
margin-left: 5px;
}
table#form tbody tr td:last-child, table.table-form tbody tr td:last-child {
width: 10%;
text-align: center;
vertical-align: middle;
}
table#form tbody tr td:last-child i:last-child, table.table-form tbody tr td:last-child i:last-child {
margin-left: 5px;
}
table#form tbody tr td:last-child i:hover, table.table-form tbody tr td:last-child i:hover {
cursor: pointer;
}
#employee-sub-menu li a:hover {
background-color: #1e282c;
border-left-color: #3c8dbc;
color: #ffffff;
}
#employee-sub-menu li a {
color: #b8c7ce;
}
#employee-sub-menu li.active a {
background-color: #1e282c;
}
.with-top-margin {
margin-top: 25px;
}
.qa-indent {
text-indent: 20px;
}
.page-header-normal {
border-bottom: 1px solid #eee;
margin-top: 10px!important;
margin-right: 0!important;
margin-bottom: 20px!important;
margin-left: 0!important;
}
.maintenance-content {
position: relative;
background: #fff;
border: 1px solid #f4f4f4;
padding: 20px;
margin: 10px 25px;
}
.import-status .panel-body {
background-color: #282828;
font-size: 12px;
}
.import-status .panel-body .output {
height: 250px;
font-family: 'Lucida Console';
overflow: auto;
}
.import-status .panel-body .success-response {
display: block;
color: #17b53c;
}
.import-status .panel-body .info-response {
display: block;
color: #1ec9c3;
}
.import-status .panel-body .error-response {
display: block;
color: #ef2f2f;
}
#list-view tbody {
font-size: 12px;
}
.leave-generation-notification {
vertical-align: top!important;
margin-left: 3px;
}
.profile-picture-list {
width: 72px;
}
|
{
"content_hash": "f8178c6e317e9abcf26e5b0e216fae63",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 102,
"avg_line_length": 16.78301886792453,
"alnum_prop": 0.6902754356379989,
"repo_name": "sly14flores/webpmis",
"id": "bfed1b3df1fff7e4cf400791fa895e347437aea7",
"size": "1779",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "css/style.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "947810"
},
{
"name": "HTML",
"bytes": "3693909"
},
{
"name": "JavaScript",
"bytes": "2476192"
},
{
"name": "PHP",
"bytes": "615709"
},
{
"name": "Python",
"bytes": "32324"
}
],
"symlink_target": ""
}
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE36_Absolute_Path_Traversal__char_file_fopen_61b.cpp
Label Definition File: CWE36_Absolute_Path_Traversal.label.xml
Template File: sources-sink-61b.tmpl.cpp
*/
/*
* @description
* CWE: 36 Absolute Path Traversal
* BadSource: file Read input from a file
* GoodSource: Full path and file name
* Sinks: fopen
* BadSink : Open the file named in data using fopen()
* Flow Variant: 61 Data flow: data returned from one function to another in different source files
*
* */
#include "std_testcase.h"
#ifndef _WIN32
#include <wchar.h>
#endif
#ifdef _WIN32
#define FILENAME "C:\\temp\\file.txt"
#else
#define FILENAME "/tmp/file.txt"
#endif
#ifdef _WIN32
#define FOPEN fopen
#else
#define FOPEN fopen
#endif
namespace CWE36_Absolute_Path_Traversal__char_file_fopen_61
{
#ifndef OMITBAD
char * badSource(char * data)
{
{
/* Read input from a file */
size_t dataLen = strlen(data);
FILE * pFile;
/* if there is room in data, attempt to read the input from a file */
if (FILENAME_MAX-dataLen > 1)
{
pFile = fopen(FILENAME, "r");
if (pFile != NULL)
{
/* POTENTIAL FLAW: Read data from a file */
if (fgets(data+dataLen, (int)(FILENAME_MAX-dataLen), pFile) == NULL)
{
printLine("fgets() failed");
/* Restore NUL terminator if fgets fails */
data[dataLen] = '\0';
}
fclose(pFile);
}
}
}
return data;
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B() uses the GoodSource with the BadSink */
char * goodG2BSource(char * data)
{
#ifdef _WIN32
/* FIX: Use a fixed, full path and file name */
strcat(data, "c:\\temp\\file.txt");
#else
/* FIX: Use a fixed, full path and file name */
strcat(data, "/tmp/file.txt");
#endif
return data;
}
#endif /* OMITGOOD */
} /* close namespace */
|
{
"content_hash": "8aeba7a9ce42ffd8d62fb3cbccba0c25",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 99,
"avg_line_length": 24.658823529411766,
"alnum_prop": 0.575381679389313,
"repo_name": "JianpingZeng/xcc",
"id": "9b76ba7f99825b23094f7d65416e393c2bc58253",
"size": "2096",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "xcc/test/juliet/testcases/CWE36_Absolute_Path_Traversal/s02/CWE36_Absolute_Path_Traversal__char_file_fopen_61b.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
ACCEPTED
#### According to
Euro+Med Plantbase
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "5a2ae1b39517736be2a0522dc06518a2",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 18,
"avg_line_length": 8.692307692307692,
"alnum_prop": 0.6814159292035398,
"repo_name": "mdoering/backbone",
"id": "a6dddd6bd0dc8171104113ad15176742cb70b292",
"size": "169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Apocynaceae/Rhazya/Rhazya greissii/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/content_main"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
app:layout_behavior="@string/appbar_scrolling_view_behavior"
tools:context="com.example.dsm2001.myorders.MainActivity"
tools:showIn="@layout/app_bar_main">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Hello World!" />
</RelativeLayout>
|
{
"content_hash": "35c175225c8d3925d3fcb0f6c3c243c9",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 74,
"avg_line_length": 45.2,
"alnum_prop": 0.7234513274336283,
"repo_name": "simeonpp/top-offers",
"id": "c97b0c35e916601c62c9d96c4569886f0a2fa6cc",
"size": "904",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AndroidClient/MyOrders/app/src/main/res/layout/content_main.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "221342"
},
{
"name": "JavaScript",
"bytes": "27427"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html>
<html>
<head>
<title>Multi-row selection</title>
<link rel="stylesheet" href="../../../codebase/webix.css" type="text/css" charset="utf-8">
<script src="../../../codebase/webix.js" type="text/javascript" charset="utf-8"></script>
<link rel="stylesheet" type="text/css" href="../common/samples.css">
<script src="../common/testdata.js" type="text/javascript" charset="utf-8"></script>
</head>
<body>
<div class='header_comment'>Multi-row selection </div>
<div class='sample_comment'>Click on any row to select it. </div>
<div class='sample_comment'>Use ctrl-click and shift-click to select multiple rows.</div>
<div id="testA"></div>
<hr>
<div id="testB" class='sample_comment'></div>
<script type="text/javascript" charset="utf-8">
webix.ready(function(){
grid = webix.ui({
container:"testA",
view:"datatable",
columns:[
{ id:"rank", header:"", css:"rank", width:50},
{ id:"title", header:"Film title",width:200},
{ id:"year", header:"Released" , width:80},
{ id:"votes", header:"Votes", width:100}
],
select:"row",
autoheight:true,
autowidth:true,
multiselect:true,
on:{
onSelectChange:function(){
var text = "Selected: "+grid.getSelectedId(true).join();
document.getElementById('testB').innerHTML = text;
}
},
data:small_film_set
});
});
</script>
</body>
</html>
|
{
"content_hash": "077d6f3536f61fe9a27e6c33ebe7b639",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 92,
"avg_line_length": 30.782608695652176,
"alnum_prop": 0.6179378531073446,
"repo_name": "lastuniverse/explosion",
"id": "c95377e17cb8c0243cae5f08f8ca7962bc288063",
"size": "1416",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "samples/express/public/samples/15_datatable/05_selection/05_multi_row_selection.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "40778"
},
{
"name": "Shell",
"bytes": "26"
}
],
"symlink_target": ""
}
|
<?php
// autoload_real.php @generated by Composer
class ComposerAutoloaderInit57490ad1dcfe79a8596ecec07c4ed5e2
{
private static $loader;
public static function loadClassLoader($class)
{
if ('Composer\Autoload\ClassLoader' === $class) {
require __DIR__ . '/ClassLoader.php';
}
}
public static function getLoader()
{
if (null !== self::$loader) {
return self::$loader;
}
spl_autoload_register(array('ComposerAutoloaderInit57490ad1dcfe79a8596ecec07c4ed5e2', 'loadClassLoader'), true, true);
self::$loader = $loader = new \Composer\Autoload\ClassLoader();
spl_autoload_unregister(array('ComposerAutoloaderInit57490ad1dcfe79a8596ecec07c4ed5e2', 'loadClassLoader'));
$map = require __DIR__ . '/autoload_namespaces.php';
foreach ($map as $namespace => $path) {
$loader->set($namespace, $path);
}
$map = require __DIR__ . '/autoload_psr4.php';
foreach ($map as $namespace => $path) {
$loader->setPsr4($namespace, $path);
}
$classMap = require __DIR__ . '/autoload_classmap.php';
if ($classMap) {
$loader->addClassMap($classMap);
}
$loader->register(true);
$includeFiles = require __DIR__ . '/autoload_files.php';
foreach ($includeFiles as $fileIdentifier => $file) {
composerRequire57490ad1dcfe79a8596ecec07c4ed5e2($fileIdentifier, $file);
}
return $loader;
}
}
function composerRequire57490ad1dcfe79a8596ecec07c4ed5e2($fileIdentifier, $file)
{
if (empty($GLOBALS['__composer_autoload_files'][$fileIdentifier])) {
require $file;
$GLOBALS['__composer_autoload_files'][$fileIdentifier] = true;
}
}
|
{
"content_hash": "baf5d2b11028c261614e952e0e5ee5e0",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 126,
"avg_line_length": 30.389830508474578,
"alnum_prop": 0.6129392080312326,
"repo_name": "jimmyfu135/monkeyblog",
"id": "df166af7100a6afb7a44ce6f2746855ad774506c",
"size": "1793",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/composer/autoload_real.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1541"
},
{
"name": "CSS",
"bytes": "2728"
},
{
"name": "PHP",
"bytes": "134726"
}
],
"symlink_target": ""
}
|
@interface LeftViewController ()
@property (strong, nonatomic) NSArray *titlesArray;
@end
@implementation LeftViewController
- (id)init {
self = [super initWithStyle:UITableViewStylePlain];
if (self) {
self.titlesArray = @[@"assgin",
@"request",
@"review"];
self.view.backgroundColor = [UIColor clearColor];
[self.tableView registerClass:[LeftViewCell class] forCellReuseIdentifier:@"cell"];
self.tableView.separatorStyle = UITableViewCellSeparatorStyleNone;
self.tableView.contentInset = UIEdgeInsetsMake(44.0, 0.0, 44.0, 0.0);
self.tableView.showsVerticalScrollIndicator = NO;
self.tableView.backgroundColor = [UIColor clearColor];
}
return self;
}
- (BOOL)prefersStatusBarHidden {
return YES;
}
- (UIStatusBarStyle)preferredStatusBarStyle {
return UIStatusBarStyleDefault;
}
- (UIStatusBarAnimation)preferredStatusBarUpdateAnimation {
return UIStatusBarAnimationFade;
}
#pragma mark - UITableViewDataSource
- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView {
return 1;
}
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
return self.titlesArray.count;
}
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
LeftViewCell *cell = [tableView dequeueReusableCellWithIdentifier:@"cell"];
cell.textLabel.text = self.titlesArray[indexPath.row];
cell.separatorView.hidden = (indexPath.row <= 3 || indexPath.row == self.titlesArray.count-1);
return cell;
}
#pragma mark - UITableViewDelegate
- (CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath {
return 70;
}
- (CGFloat)tableView:(UITableView *)tableView heightForHeaderInSection:(NSInteger)section {
return 70;
}
- (UIView *)tableView:(UITableView *)tableView viewForHeaderInSection:(NSInteger)section {
UIView *view = [UIView new];
view.backgroundColor = [UIColor clearColor];
return view;
}
- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
WTRootViewController *basicVC = (WTRootViewController *)self.sideMenuController;
if (_delegate) {
[self.delegate leftViewSelectedIndex:indexPath];
}
[basicVC hideLeftViewAnimated:nil];
// if ([basicVC isLeftViewAlwaysVisibleForCurrentOrientation])
//
// if (indexPath.row == 0) {
// if ([mainViewController isLeftViewAlwaysVisibleForCurrentOrientation]) {
// [mainViewController showRightViewAnimated:YES completionHandler:nil];
// }
// else {
// [mainViewController hideLeftViewAnimated:YES completionHandler:^(void) {
// [mainViewController showRightViewAnimated:YES completionHandler:nil];
// }];
// }
// }
// else if (indexPath.row == 2) {
// UINavigationController *navigationController = (UINavigationController *)mainViewController.rootViewController;
// UIViewController *viewController;
//
// if ([navigationController.viewControllers.firstObject isKindOfClass:[ViewController class]]) {
// viewController = [OtherViewController new];
// }
// else {
// viewController = [ViewController new];
// }
//
// [navigationController setViewControllers:@[viewController]];
//
// // Rarely you can get some visual bugs when you change view hierarchy and toggle side views in the same iteration
// // You can use delay to avoid this and probably other unexpected visual bugs
// [mainViewController hideLeftViewAnimated:YES delay:0.0 completionHandler:nil];
// }
// else {
// UIViewController *viewController = [UIViewController new];
// viewController.view.backgroundColor = [UIColor whiteColor];
// viewController.title = self.titlesArray[indexPath.row];
//
// UINavigationController *navigationController = (UINavigationController *)mainViewController.rootViewController;
// [navigationController pushViewController:viewController animated:YES];
//
// [mainViewController hideLeftViewAnimated:YES completionHandler:nil];
// }
}
@end
|
{
"content_hash": "c66576aee6220ad13e118d2190bef9a2",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 123,
"avg_line_length": 34.91056910569106,
"alnum_prop": 0.6977177456916628,
"repo_name": "Sunday4/Capvision.Fun",
"id": "3c0361a6d3224f38e490adf7e5d5dced473811ca",
"size": "4495",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wetan-oc/LeftViewController.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "230263"
},
{
"name": "Ruby",
"bytes": "1326"
}
],
"symlink_target": ""
}
|
using System;
using NUnit.Framework;
using StructureMap.Configuration.DSL;
using StructureMap.Testing.Widget3;
namespace StructureMap.Testing.Configuration.DSL
{
[TestFixture]
public class InterceptorTesting : Registry
{
#region Setup/Teardown
[SetUp]
public void SetUp()
{
_lastService = null;
recorder = new ContextRecorder();
_container = new Container(r =>
{
r.ForRequestedType<ContextRecorder>().TheDefault.IsThis(recorder);
r.ForRequestedType<IService>().AddInstances(x =>
{
x.OfConcreteType<ColorService>()
.OnCreation(s => _lastService = s)
.WithName("Intercepted")
.WithCtorArg("color").EqualTo("Red");
x.OfConcreteType<ColorService>()
.OnCreation((c, s) => c.GetInstance<ContextRecorder>().WasTouched = true)
.WithName("InterceptedWithContext")
.WithCtorArg("color").EqualTo("Red");
x.OfConcreteType<ColorService>()
.WithName("NotIntercepted")
.WithCtorArg("color").EqualTo("Blue");
x.Object(new ColorService("Yellow"))
.WithName("Yellow")
.OnCreation<ColorService>(s => _lastService = s);
x.ConstructedBy(() => new ColorService("Purple")).WithName("Purple")
.EnrichWith<IService>(s => new DecoratorService(s));
x.ConstructedBy(() => new ColorService("Purple")).WithName("DecoratedWithContext")
.EnrichWith<IService>((c, s) =>
{
c.GetInstance<ContextRecorder>().WasTouched = true;
return new DecoratorService(s);
});
x.OfConcreteType<ColorService>().WithName("Decorated").EnrichWith<IService>(
s => new DecoratorService(s))
.WithCtorArg("color").EqualTo("Orange");
x.Object(new ColorService("Yellow")).WithName("Bad")
.OnCreation<ColorService>(obj => { throw new ApplicationException("Bad!"); });
});
});
}
#endregion
private ColorService _lastService;
private IContainer _container;
private ContextRecorder recorder;
[Test]
public void call_the_build_context_with_enrich()
{
_container.GetInstance<IService>("DecoratedWithContext");
recorder.WasTouched.ShouldBeTrue();
}
[Test]
public void call_the_build_context_with_startup()
{
_container.GetInstance<IService>("InterceptedWithContext");
recorder.WasTouched.ShouldBeTrue();
}
[Test]
public void DecorateAConstructedService()
{
var service = _container.GetInstance<IService>("Purple");
var decoratorService = (DecoratorService) service;
var innerService = (ColorService) decoratorService.Inner;
Assert.AreEqual("Purple", innerService.Color);
}
[Test]
public void DecorateInline()
{
var service = _container.GetInstance<IService>("Decorated");
var decoratorService = (DecoratorService) service;
var innerService = (ColorService) decoratorService.Inner;
Assert.AreEqual("Orange", innerService.Color);
}
[Test]
public void OnCreationWithAConstructedService()
{
Assert.IsNull(_lastService);
var interceptedService = _container.GetInstance<IService>("Yellow");
Assert.AreSame(_lastService, interceptedService);
}
[Test]
public void RegisterAnOnCreationMethodForAnInstance()
{
// "Intercepted" should get intercepted and stored as _lastService.
// "NotIntercepted" should not.
Assert.IsNull(_lastService);
_container.GetInstance<IService>("NotIntercepted");
Assert.IsNull(_lastService);
var interceptedService = _container.GetInstance<IService>("Intercepted");
Assert.AreSame(_lastService, interceptedService);
}
[Test]
public void TrapFailureInInterceptor()
{
try
{
_container.GetInstance<IService>("Bad");
Assert.Fail("Should have thrown an error");
}
catch (StructureMapException e)
{
Assert.AreEqual(270, e.ErrorCode);
}
}
}
public class DecoratorService : IService
{
private readonly IService _inner;
public DecoratorService(IService inner)
{
_inner = inner;
}
public IService Inner { get { return _inner; } }
public void DoSomething()
{
throw new NotImplementedException();
}
}
public class ContextRecorder
{
public bool WasTouched { get; set; }
}
}
|
{
"content_hash": "94515812ee392d5d959e4cfda2cfb61e",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 102,
"avg_line_length": 33.54601226993865,
"alnum_prop": 0.5221287490855889,
"repo_name": "mtscout6/structuremap",
"id": "bb3889c79baae22cac1f85b0f9d4466210ca96c0",
"size": "5468",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Source/StructureMap.Testing/Configuration/DSL/InterceptorTesting.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
Polling for changes (repeatedly asking server if there are any new changes)
introduces high load on a GitLab instance, because it usually requires
executing at least a few SQL queries. This makes scaling large GitLab
instances (like GitLab.com) very difficult so we do not allow adding new
features that require polling and hit the database.
Instead you should use polling mechanism with ETag caching in Redis.
## How to use it
1. Add the path of the endpoint which you want to poll to
`Gitlab::EtagCaching::Middleware`.
1. Implement cache invalidation for the path of your endpoint using
`Gitlab::EtagCaching::Store`. Whenever a resource changes you
have to invalidate the ETag for the path that depends on this
resource.
1. Check that the mechanism works:
- requests should return status code 304
- there should be no SQL queries logged in `log/development.log`
## How it works
Cache Miss:

Cache Hit:

1. Whenever a resource changes we generate a random value and store it in
Redis.
1. When a client makes a request we set the `ETag` response header to the value
from Redis.
1. The client caches the response (client-side caching) and sends the ETag as
the `If-None-Match` header with every subsequent request for the same
resource.
1. If the `If-None-Match` header matches the current value in Redis we know
that the resource did not change so we can send 304 response immediately,
without querying the database at all. The client's browser will use the
cached response.
1. If the `If-None-Match` header does not match the current value in Redis
we have to generate a new response, because the resource changed.
Do not use query parameters (for example `?scope=all`) for endpoints where you
want to enable ETag caching. The middleware takes into account only the request
path and ignores query parameters. All parameters should be included in the
request path. By doing this we avoid query parameter ordering problems and make
route matching easier.
For more information see:
- [`Poll-Interval` header](fe_guide/performance.md#realtime-components)
- [RFC 7232](https://tools.ietf.org/html/rfc7232)
- [ETag proposal](https://gitlab.com/gitlab-org/gitlab-ce/issues/26926)
|
{
"content_hash": "8230e0920c2143134c9e7060db70c747",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 79,
"avg_line_length": 41.67272727272727,
"alnum_prop": 0.7691972076788831,
"repo_name": "iiet/iiet-git",
"id": "76bb5ae78196af1236e9493c9ca4a527e239a9c7",
"size": "2321",
"binary": false,
"copies": "1",
"ref": "refs/heads/release",
"path": "doc/development/polling.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "694819"
},
{
"name": "Clojure",
"bytes": "79"
},
{
"name": "Dockerfile",
"bytes": "1907"
},
{
"name": "HTML",
"bytes": "1386003"
},
{
"name": "JavaScript",
"bytes": "4784137"
},
{
"name": "Ruby",
"bytes": "21288676"
},
{
"name": "Shell",
"bytes": "47962"
},
{
"name": "Vue",
"bytes": "1163492"
}
],
"symlink_target": ""
}
|
package com.owncloud.android.lib.resources.files;
import java.io.File;
import com.owncloud.android.lib.common.utils.Log_OC;
public class FileUtils {
public static final String PATH_SEPARATOR = "/";
public static String getParentPath(String remotePath) {
String parentPath = new File(remotePath).getParent();
parentPath = parentPath.endsWith(PATH_SEPARATOR) ? parentPath : parentPath + PATH_SEPARATOR;
return parentPath;
}
/**
* Validate the fileName to detect if contains any forbidden character: / , \ , < , > , : , " , | , ? , *
* @param fileName
* @return
*/
public static boolean isValidName(String fileName) {
boolean result = true;
Log_OC.d("FileUtils", "fileName =======" + fileName);
if (fileName.contains(PATH_SEPARATOR) ||
fileName.contains("\\") || fileName.contains("<") || fileName.contains(">") ||
fileName.contains(":") || fileName.contains("\"") || fileName.contains("|") ||
fileName.contains("?") || fileName.contains("*")) {
result = false;
}
return result;
}
/**
* Validate the path to detect if contains any forbidden character: \ , < , > , : , " , | , ? , *
* @param path
* @return
*/
public static boolean isValidPath(String path) {
boolean result = true;
Log_OC.d("FileUtils", "path ....... " + path);
if (path.contains("\\") || path.contains("<") || path.contains(">") ||
path.contains(":") || path.contains("\"") || path.contains("|") ||
path.contains("?") || path.contains("*")) {
result = false;
}
return result;
}
}
|
{
"content_hash": "9863d3913f6ac884c49d5f828aa7406c",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 106,
"avg_line_length": 27.571428571428573,
"alnum_prop": 0.6198186528497409,
"repo_name": "cloudcopy/owncloud-android-library",
"id": "796e0e82029052c518b6e349dea0e0a4ccd7260a",
"size": "2771",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/com/owncloud/android/lib/resources/files/FileUtils.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "422659"
},
{
"name": "Shell",
"bytes": "430"
}
],
"symlink_target": ""
}
|
<!---
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-->
# Apache Hadoop Changelog
## Release 0.15.3 - 2008-01-21
### INCOMPATIBLE CHANGES:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### NEW FEATURES:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### IMPROVEMENTS:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### BUG FIXES:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
| [HADOOP-2574](https://issues.apache.org/jira/browse/HADOOP-2574) | bugs in mapred tutorial | Major | documentation | Doug Cutting | Arun C Murthy |
| [HADOOP-2570](https://issues.apache.org/jira/browse/HADOOP-2570) | streaming jobs fail after HADOOP-2227 | Blocker | . | Lohit Vijayarenu | Amareshwari Sriramadasu |
| [HADOOP-2562](https://issues.apache.org/jira/browse/HADOOP-2562) | globPaths does not support {ab,cd} as it claims to | Blocker | fs | Hairong Kuang | Hairong Kuang |
| [HADOOP-2540](https://issues.apache.org/jira/browse/HADOOP-2540) | Empty blocks make fsck report corrupt, even when it isn't | Blocker | . | Allen Wittenauer | dhruba borthakur |
| [HADOOP-650](https://issues.apache.org/jira/browse/HADOOP-650) | bin/hadoop dfs -lsr / is broken | Minor | . | Mahadev konar | Mahadev konar |
### TESTS:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### SUB-TASKS:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
### OTHER:
| JIRA | Summary | Priority | Component | Reporter | Contributor |
|:---- |:---- | :--- |:---- |:---- |:---- |
|
{
"content_hash": "2e8099fd710f03dc41186bf3898d286b",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 181,
"avg_line_length": 37.98529411764706,
"alnum_prop": 0.6329849012775842,
"repo_name": "aliyun-beta/aliyun-oss-hadoop-fs",
"id": "9c15db6c64f77c765c90c53e8922add158260b0c",
"size": "2584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hadoop-common-project/hadoop-common/src/site/markdown/release/0.15.3/CHANGES.0.15.3.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "69807"
},
{
"name": "C",
"bytes": "1462272"
},
{
"name": "C++",
"bytes": "1741989"
},
{
"name": "CMake",
"bytes": "54069"
},
{
"name": "CSS",
"bytes": "53242"
},
{
"name": "HTML",
"bytes": "155443"
},
{
"name": "Java",
"bytes": "55877607"
},
{
"name": "JavaScript",
"bytes": "50167"
},
{
"name": "Protocol Buffer",
"bytes": "251100"
},
{
"name": "Python",
"bytes": "3444"
},
{
"name": "Shell",
"bytes": "284392"
},
{
"name": "TeX",
"bytes": "19322"
},
{
"name": "XSLT",
"bytes": "15460"
}
],
"symlink_target": ""
}
|
- Setup
- [Goals](docs/01-setup/01-goals.md)
- [Requirements](docs/01-setup/02-requirements.md)
- [Getting Started](docs/01-setup/03-getting-started.md)
- Core Dependencies
- [First-party](docs/02-dependencies/01-first-party.md)
- [react-wildcat](packages/react-wildcat/README.md)
- [react-wildcat-handoff](packages/react-wildcat-handoff/README.md)
- [react-wildcat-prefetch](packages/react-wildcat-prefetch/README.md)
- [react-wildcat-test-runners](packages/react-wildcat-test-runners/README.md)
- Environments
- [Development](docs/03-environments/01-development.md)
- [Production](docs/03-environments/02-production.md)
- [Test](docs/03-environments/03-test.md)
- CLI Tools
- [wildcat](docs/04-cli-tools/01-wildcat.md)
- [wildcat-static-server](docs/04-cli-tools/02-wildcat-static-server.md)
- [wildcat-babel](docs/04-cli-tools/03-wildcat-babel.md)
- [wildcat-karma-runner](docs/04-cli-tools/04-wildcat-karma-runner.md)
- [wildcat-protractor-runner](docs/04-cli-tools/05-wildcat-protractor-runner.md)
- Advanced Usage
- [Lazy Loading](docs/05-advanced-usage/01-lazy-loading.md)
- [Domain Resolution](docs/05-advanced-usage/02-domain-resolution.md)
- [Route Resolution](docs/05-advanced-usage/03-route-resolution.md)
- Reference
- [Example Project](docs/06-reference/01-example-project.md)
|
{
"content_hash": "651254dd90a8cc094f314e03651ec8fe",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 85,
"avg_line_length": 44.903225806451616,
"alnum_prop": 0.7097701149425287,
"repo_name": "nfl/react-wildcat",
"id": "c2dfcdc3ce884652a89aa580aaa1e2fc331d918c",
"size": "1403",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SUMMARY.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "386"
},
{
"name": "JavaScript",
"bytes": "88470"
},
{
"name": "Makefile",
"bytes": "1137"
},
{
"name": "Shell",
"bytes": "2856"
}
],
"symlink_target": ""
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Activities;
namespace Core.Workflow
{
public static class 工作流扩展
{
/// <summary>
/// 将SynchronizationContext属性设为SynchronousSynchronizationContext类型实例之后,以使工作流变为在当前线程内同步运行。
/// 注意:如果使用了Delay活动,仍然会变成异步,所以如果需要延迟的话应该自定义活动并执行Thread.Sleep(...)使线程休眠。
/// 注意:未测试对并行活动的兼容性。
/// </summary>
/// <param name="o"></param>
public static void SetToSynchronous(this WorkflowApplication o)
{
o.SynchronizationContext = new SynchronousSynchronizationContext();
}
}
}
|
{
"content_hash": "9ee9a9c1e4e83592dd241054f6c24184",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 97,
"avg_line_length": 29.652173913043477,
"alnum_prop": 0.6700879765395894,
"repo_name": "manasheep/Core3",
"id": "344db28fd53eb3dc273ed62d9df6942f5cf12268",
"size": "876",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Core.Workflow/工作流扩展.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "1672309"
},
{
"name": "JavaScript",
"bytes": "428"
}
],
"symlink_target": ""
}
|
"""
Created on Fri Feb 29 14:33:17 2015
@author: Open Risk
Purpose: Testing concentration metrics library
"""
import unittest
import numpy as np
import pandas as pd
import concentrationMetrics as cm
ERROR_MARGIN = 1e-10
class TestConcentrationLib(unittest.TestCase):
""" Testing all indexes with uniform vectors of n = 0, 1, 1.000.000
"""
def shortDescription(self):
doc = self._testMethodDoc
return doc
def test_compare_atkinson_with_R(self):
""" Atkinson Index: Comparison with R version in ineq package
Results
Atkinson a=0.5: 0.1796591
Atkinson a=1: 0.3518251
"""
myIndex = cm.Index()
x = np.array([541, 1463, 2445, 3438, 4437, 5401, 6392, 8304, 11904, 22261])
ERROR_MARGIN = 1e-5
print(self.shortDescription())
self.assertTrue(abs(myIndex.atkinson(x, 0.5) - 0.1796591) < ERROR_MARGIN)
self.assertTrue(abs(myIndex.atkinson(x, 1.0) - 0.3518251) < ERROR_MARGIN)
def test_atkinson_uniform(self):
myIndex = cm.Index()
vector = np.ones(1000000)
print(self.shortDescription())
self.assertTrue(abs(myIndex.atkinson(vector, 0.5) - 0.0) < ERROR_MARGIN)
self.assertTrue(abs(myIndex.atkinson(vector, 1) - 0.0) < ERROR_MARGIN)
self.assertTrue(abs(myIndex.atkinson(vector, 2) - 0.0) < ERROR_MARGIN)
def test_atkinson(self):
myIndex = cm.Index()
vector = np.zeros(1000)
vector[0] = 1
print(self.shortDescription())
self.assertTrue(abs(myIndex.atkinson(vector, 2) - 1.0) < ERROR_MARGIN)
def test_cr(self):
"""
Testing Concentration Ratio
"""
myIndex = cm.Index()
n = 1000
vector = np.ones(n)
print(self.shortDescription())
self.assertTrue(abs(myIndex.cr(vector, 1) - 1.0 / n) < ERROR_MARGIN)
def test_bp(self):
"""
Testing Berger-Parker
"""
myIndex = cm.Index()
n = 1000
vector = np.ones(n)
print(self.shortDescription())
self.assertTrue(abs(myIndex.cr(vector, 1) - myIndex.berger_parker(vector)) < ERROR_MARGIN)
def test_hhi(self):
"""
Testing Herfindahl-Hirschman
"""
myIndex = cm.Index()
vector = np.ones(10)
print(self.shortDescription())
self.assertTrue(abs(myIndex.hhi(vector) - 0.0) < ERROR_MARGIN)
def test_gini(self):
"""
Testing Gini
"""
myIndex = cm.Index()
vector = np.ones(10)
print(self.shortDescription())
self.assertTrue(abs(myIndex.gini(vector) - 0.0) < ERROR_MARGIN)
def test_shannon(self):
"""
Testing Shannon
"""
myIndex = cm.Index()
vector = np.ones(10)
print(self.shortDescription())
self.assertTrue(abs(myIndex.shannon(vector, normalized=True) - 0.0) < ERROR_MARGIN)
def test_hk(self):
"""
Testing Hannah-Kay
"""
myIndex = cm.Index()
n = 10
vector = np.ones(n)
print(self.shortDescription())
self.assertTrue(abs(myIndex.hk(vector, 1) - 1.0 / n) < ERROR_MARGIN)
self.assertTrue(abs(myIndex.hk(vector, 3) - 1.0 / n) < ERROR_MARGIN)
def test_hti(self):
"""
Testing Hall-Tideman Index (compare with Gini)
"""
myIndex = cm.Index()
n = 100
vector = np.random.normal(1000, 1, n)
self.assertTrue(abs(1 - 1 / (n * myIndex.hti(vector)) - myIndex.gini(vector)) < ERROR_MARGIN)
class TestConfidenceIntervals(unittest.TestCase):
""" Test confidence interval functionality
"""
def shortDescription(self):
doc = self._testMethodDoc
return doc
def test_ci_api(self):
"""
Testing that all implemented indexes have monotonic confidence intervals
"""
myIndex = cm.Index()
portfolio = np.random.normal(loc=10, scale=1, size=100)
methods = [['cr', 5], ['berger_parker'], ['hhi'], ['hk', 3],
['hoover'], ['gini'], ['shannon'], ['atkinson', 1.5], ['gei', 3],
['theil'], ['kolm', 2]]
print(self.shortDescription())
for method in methods:
if len(method) > 1:
lower_bound, value, upper_bound = myIndex.compute(portfolio, method[1], index=method[0], ci=0.95,
samples=1000)
else:
lower_bound, value, upper_bound = myIndex.compute(portfolio, index=method[0], ci=0.95, samples=1000)
self.assertTrue(lower_bound <= value)
self.assertTrue(upper_bound >= value)
class TestEllisonGlaeser(unittest.TestCase):
""" Test Ellison Glaeser index (multi-dimensional)
"""
def shortDescription(self):
doc = self._testMethodDoc
return doc
def test_eg_single_industry(self):
"""
Testing EG Single Industry
"""
myIndex = cm.Index()
# Number of observations
N = 10
# Number of areas
Na = 10
# Number of industries
Ni = 1
# uniform exposure
exposure = np.ones(N)
# single industry
industry = np.zeros(N, dtype=np.int)
# uniform area distribution
area = np.arange(0, N)
# create dataframe
d = {'Exposure': exposure, 'Area': area, 'Industry': industry}
data = pd.DataFrame(data=d)
print(self.shortDescription())
self.assertTrue(abs(myIndex.ellison_glaeser(data, Na, Ni)[0] - 0.0) < ERROR_MARGIN)
def test_eg_five_uniform(self):
"""
Testing EG Five Uniform Industries
"""
myIndex = cm.Index()
# Number of observations
N = 50
# Number of areas
Na = 10
# Number of industries
Ni = 5
# uniform exposure
exposure = np.ones(N)
# single industry
industry = np.array(list(np.arange(0, Ni)) * Na)
# uniform area distribution
y_list = [[j for i in range(Ni)] for j in range(Na)]
area = np.array([y for x in y_list for y in x])
# create dataframe
d = {'Exposure': exposure, 'Area': area, 'Industry': industry}
data = pd.DataFrame(data=d)
results = myIndex.ellison_glaeser(data, Na, Ni)
print(self.shortDescription())
for i in range(Ni):
self.assertTrue(abs(results[i] - 0.0) < ERROR_MARGIN)
def test_eg_max_concentration(self):
"""
Testing EG Maximum Concentration
"""
myIndex = cm.Index()
# Number of observations
N = 400000
# Number of areas
Na = 200000
# Number of industries
Ni = 2
# uniform exposure
exposure = np.zeros(N)
exposure[0:199999] = 1
exposure[399999] = 1
exposure[399998] = 0.0001
# two industries
area = np.array(list(np.arange(0, Na)) * Ni)
# uniform area distribution
y_list = [[j for i in range(Na)] for j in range(Ni)]
industry = np.array([y for x in y_list for y in x])
# create dataframe
d = {'Exposure': exposure, 'Area': area, 'Industry': industry}
data = pd.DataFrame(data=d)
print(self.shortDescription())
self.assertTrue(abs(myIndex.ellison_glaeser(data, Na, Ni)[0] - 0.0) < ERROR_MARGIN)
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "dce11c65a1e174b52f233f6cafef00b0",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 116,
"avg_line_length": 29.498039215686273,
"alnum_prop": 0.5610210050518479,
"repo_name": "open-risk/concentration_library",
"id": "99d8999902d35f5daf7e72cd898eb2965bd34af1",
"size": "8231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "127"
},
{
"name": "Python",
"bytes": "22955"
}
],
"symlink_target": ""
}
|
Copyright (c) 2014 RetailMeNot, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
|
{
"content_hash": "d2081d6c2ece2182098ea28dbde7a74a",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 77,
"avg_line_length": 55.8421052631579,
"alnum_prop": 0.8049010367577757,
"repo_name": "RetailMeNot/ember-heisenberg",
"id": "08967d64a04fae1ed409e1ca4ba651f757a96127",
"size": "1061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "LICENSE.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1495849"
}
],
"symlink_target": ""
}
|
require 'sinatra'
reference_temperature = ReferenceTemperature.new(273.15 + 25)
before '/temperature' do
unless session['is_authenticated']
halt 401, 'log in first'
end
end
# https://github.com/conoyes/hippeis#temperature
get '/temperature' do
{
'current' => Temperature.celsius_offset + 10 + ::Random.new.rand(10),
'current_celsius' => 10 + ::Random.new.rand(10),
'reference' => reference_temperature.getTemperature,
'reference_celsius' => reference_temperature.getTemperatureInCelsius
}.to_json
end
post '/temperature' do
body = JSON.parse(request.body.read)
reference_temperature.setTemperature = body['reference']
end
|
{
"content_hash": "0e64409f2fcfecf1ddba2e7ee548336f",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 73,
"avg_line_length": 27.375,
"alnum_prop": 0.7214611872146118,
"repo_name": "conoyes/hippeis",
"id": "62b4d30aaf017544b2af730f1e207721d61ecf1f",
"size": "1234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/api_temperature.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ruby",
"bytes": "9580"
}
],
"symlink_target": ""
}
|
package com.semmle.js.ast.jsdoc;
import com.semmle.js.ast.SourceLocation;
import java.util.List;
/** A JSDoc block tag. */
public class JSDocTag extends JSDocElement {
private final String title;
private final String description;
private final String name;
private final JSDocTypeExpression type;
private final List<String> errors;
public JSDocTag(
SourceLocation loc,
String title,
String description,
String name,
JSDocTypeExpression type,
List<String> errors) {
super(loc);
this.title = title;
this.description = description;
this.name = name;
this.type = type;
this.errors = errors;
}
@Override
public void accept(Visitor v) {
v.visit(this);
}
/** The type of this tag; e.g., <code>"param"</code> for a <code>@param</code> tag. */
public String getTitle() {
return title;
}
/** Does this tag have a description? */
public boolean hasDescription() {
return description != null;
}
/** The description of this tag; may be null. */
public String getDescription() {
return description;
}
/** Does this tag specify a name? */
public boolean hasName() {
return name != null;
}
/** The name this tag refers to; null except for <code>@param</code> tags. */
public String getName() {
return name;
}
/** The type expression this tag specifies; may be null. */
public JSDocTypeExpression getType() {
return type;
}
/** Does this tag specify a type expression? */
public boolean hasType() {
return type != null;
}
/** Errors encountered while parsing this tag. */
public List<String> getErrors() {
return errors;
}
}
|
{
"content_hash": "b16cbb709cc7e694b5a744d39eba7d70",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 88,
"avg_line_length": 23.068493150684933,
"alnum_prop": 0.6526128266033254,
"repo_name": "github/codeql",
"id": "1f65f8af8754c2e30ab43c4268db12811a9bf31d",
"size": "1684",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "javascript/extractor/src/com/semmle/js/ast/jsdoc/JSDocTag.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP.NET",
"bytes": "3739"
},
{
"name": "Batchfile",
"bytes": "3534"
},
{
"name": "C",
"bytes": "410440"
},
{
"name": "C#",
"bytes": "21146000"
},
{
"name": "C++",
"bytes": "1352639"
},
{
"name": "CMake",
"bytes": "1809"
},
{
"name": "CodeQL",
"bytes": "32583145"
},
{
"name": "Dockerfile",
"bytes": "496"
},
{
"name": "EJS",
"bytes": "1478"
},
{
"name": "Emacs Lisp",
"bytes": "3445"
},
{
"name": "Go",
"bytes": "697562"
},
{
"name": "HTML",
"bytes": "58008"
},
{
"name": "Handlebars",
"bytes": "1000"
},
{
"name": "Java",
"bytes": "5417683"
},
{
"name": "JavaScript",
"bytes": "2432320"
},
{
"name": "Kotlin",
"bytes": "12163740"
},
{
"name": "Lua",
"bytes": "13113"
},
{
"name": "Makefile",
"bytes": "8631"
},
{
"name": "Mustache",
"bytes": "17025"
},
{
"name": "Nunjucks",
"bytes": "923"
},
{
"name": "Perl",
"bytes": "1941"
},
{
"name": "PowerShell",
"bytes": "1295"
},
{
"name": "Python",
"bytes": "1649035"
},
{
"name": "RAML",
"bytes": "2825"
},
{
"name": "Ruby",
"bytes": "299268"
},
{
"name": "Rust",
"bytes": "234024"
},
{
"name": "Shell",
"bytes": "23973"
},
{
"name": "Smalltalk",
"bytes": "23"
},
{
"name": "Starlark",
"bytes": "27062"
},
{
"name": "Swift",
"bytes": "204309"
},
{
"name": "Thrift",
"bytes": "3020"
},
{
"name": "TypeScript",
"bytes": "219623"
},
{
"name": "Vim Script",
"bytes": "1949"
},
{
"name": "Vue",
"bytes": "2881"
}
],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="UTF-8"?>
<faces-config>
<!-- The 'demoBean' below is only used as an example on how to use the debug toolbar from Java.
It is not required to use the Debug Toolbar -->
<managed-bean id="demoBean">
<managed-bean-name>demoBean</managed-bean-name>
<managed-bean-class>eu.linqed.debugtoolbar.DemoClass</managed-bean-class>
<managed-bean-scope>view</managed-bean-scope>
</managed-bean>
<!--AUTOGEN-START-BUILDER: Automatically generated by IBM Domino Designer. Do not modify.-->
<!--AUTOGEN-END-BUILDER: End of automatically generated section-->
</faces-config>
|
{
"content_hash": "2679b9f479325dafc98d2de57be5949b",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 97,
"avg_line_length": 50.916666666666664,
"alnum_prop": 0.7201309328968903,
"repo_name": "OpenNTF/DebugToolbar",
"id": "caf37c829657b2af8e331323e39425d7618f1157",
"size": "611",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "org.openntf.debugtoolbar.exampledb.odp/WebContent/WEB-INF/faces-config.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "348458"
},
{
"name": "JavaScript",
"bytes": "1707"
},
{
"name": "XPages",
"bytes": "7023"
}
],
"symlink_target": ""
}
|
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end
|
{
"content_hash": "622a3de7efd5c6e3b1eb38649175d121",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 60,
"avg_line_length": 23.2,
"alnum_prop": 0.7931034482758621,
"repo_name": "martingit/NexaHomeControl",
"id": "14c3c80d1d9d9b8e0be1861471df75906045b289",
"size": "287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NexaHomeControl/AppDelegate.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "192594"
}
],
"symlink_target": ""
}
|
describe('Kitchen Sink', function() {
it('.should() - assert that <title> is correct', function() {
// https://on.cypress.io/visit
cy.visit('https://evanghelic.ro');
// Here we've made our first assertion using a '.should()' command.
// An assertion is comprised of a chainer, subject, and optional value.
// https://on.cypress.io/should
// https://on.cypress.io/and
// https://on.cypress.io/title
cy.title().should('include', 'Biserica Cluj');
// ↲ ↲ ↲
// subject chainer value
});
});
|
{
"content_hash": "15ae6535337aeea5ed598f597010c2c3",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 75,
"avg_line_length": 33.705882352941174,
"alnum_prop": 0.5724258289703316,
"repo_name": "radubrehar/evanghelic.ro",
"id": "7617c8ec64081f2a204263cb8f80a84c635187dc",
"size": "916",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cypress/integration/example_spec.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "150001"
},
{
"name": "JavaScript",
"bytes": "69970"
}
],
"symlink_target": ""
}
|
var addSetEntry = require('./_addSetEntry'),
arrayReduce = require('./_arrayReduce'),
setToArray = require('./_setToArray');
/**
* Creates a clone of `set`.
*
* @private
* @param {Object} set The set to clone.
* @returns {Object} Returns the cloned set.
*/
function cloneSet(set) {
return arrayReduce(setToArray(set), addSetEntry, new set.constructor);
}
module.exports = cloneSet;
|
{
"content_hash": "3de5a934d1a232aa69681ce3cfa1e696",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 72,
"avg_line_length": 26,
"alnum_prop": 0.6490384615384616,
"repo_name": "Moccine/global-service-plus.com",
"id": "df0ef38367d05455a14827d761e90fad5401e6f2",
"size": "416",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/libariries/bootstrap/node_modules/grunt-jscs/node_modules/lodash/_cloneSet.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1629941"
},
{
"name": "HTML",
"bytes": "967450"
},
{
"name": "JavaScript",
"bytes": "390886"
},
{
"name": "Makefile",
"bytes": "404"
},
{
"name": "PHP",
"bytes": "233061"
}
],
"symlink_target": ""
}
|
package at.ac.tuwien.cg.gesture.data;
import java.util.ArrayList;
public class MeasuringPointStorage {
private ArrayList<Object> objectQueue;
private int measurementID = 0;
public MeasuringPointStorage(){
objectQueue = new ArrayList<Object>();
}
public void setMeasurementID(int id){
this.measurementID=id;
}
public int getMeasurementID(){
return measurementID;
}
public synchronized Object getObject(int i){
return objectQueue.get(i);
}
public synchronized void removeObject(int i){
if(objectQueue.size()>0)
objectQueue.remove(i);
}
public synchronized void addObject(Object o){
objectQueue.add(o);
}
public synchronized void removeAll(){
objectQueue.removeAll(objectQueue);
}
public synchronized int getSize(){
return objectQueue.size();
}
}
|
{
"content_hash": "d7e02ea9dcf1b78206e48061b0bebb05",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 46,
"avg_line_length": 19.627906976744185,
"alnum_prop": 0.6966824644549763,
"repo_name": "iSchluff/Wii-Gesture",
"id": "1287d5c7a3dceb00f936e044e8fcc6f854fbf094",
"size": "844",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "GestureDroidServer/src/at/ac/tuwien/cg/gesture/data/MeasuringPointStorage.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "469907"
},
{
"name": "Matlab",
"bytes": "6272"
}
],
"symlink_target": ""
}
|
layout: page
title: Palace Cube Corporation Award Ceremony
date: 2016-05-24
author: Elizabeth Orozco
tags: weekly links, java
status: published
summary: Aliquam erat volutpat. Cras at suscipit.
banner: images/banner/people.jpg
booking:
startDate: 12/12/2019
endDate: 12/16/2019
ctyhocn: GRRHSHX
groupCode: PCCAC
published: true
---
Nulla rhoncus egestas congue. Morbi lorem augue, tempor ac rhoncus at, ornare eu eros. In hac habitasse platea dictumst. Quisque ornare condimentum efficitur. Integer hendrerit erat eu scelerisque varius. Pellentesque fermentum vehicula nisl, a sagittis lacus finibus id. Cras molestie vitae metus nec congue. Suspendisse posuere justo est, eget cursus lectus lacinia vitae. Proin feugiat arcu vitae arcu tincidunt vehicula. Curabitur iaculis velit congue nunc iaculis cursus. Mauris eu interdum sem. Sed bibendum, risus in consectetur facilisis, velit est pulvinar lorem, vel aliquet dolor nisi in mauris. Sed sagittis velit ut velit dignissim, a varius nunc elementum.
1 Morbi eu orci vitae dolor condimentum vehicula.
Vestibulum a commodo nulla. Phasellus nec odio vitae nibh sagittis ullamcorper eget a sem. Nulla vitae sollicitudin magna. Phasellus sed sem justo. Praesent rutrum dolor vel leo tincidunt, et tincidunt ipsum cursus. Suspendisse dictum ultricies felis vel dapibus. Maecenas id mauris hendrerit, porttitor magna sed, tempor lectus. Cras sed vulputate metus, quis cursus ligula. Integer finibus erat eu libero tempor rutrum. Morbi sodales tellus vel sem cursus, ac eleifend elit dignissim. Etiam nec convallis tellus, in congue mauris.
|
{
"content_hash": "0c4091b1257b51ecfaee164f5c1509d9",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 670,
"avg_line_length": 79.8,
"alnum_prop": 0.8114035087719298,
"repo_name": "KlishGroup/prose-pogs",
"id": "304e2a0185e32168567e560da6dd31391fed6b07",
"size": "1600",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "pogs/G/GRRHSHX/PCCAC/index.md",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
<?php
session_start();
error_reporting(5);
if(!isset($_SESSION['user']))
{
header("LOCATION:../login.php");
}
else
{
include("../include/conn.php");
if(isset($_GET['form_purpose']))
{
if($_GET['form_purpose']=='passchange')
{
if($_GET['new_status']=='inactive')
{
foreach($_POST['selected'] as $value => $temp)
{
$sql_change_status="UPDATE `files_info` SET `file_status`='inactive' WHERE `file_code`='".$value."'";
mysql_query($sql_change_status);
}
}
else if($_GET['new_status']=='active')
{
foreach($_POST['selected'] as $value => $temp)
{
$sql_change_status="UPDATE `files_info` SET `file_status`='active' WHERE `file_code`='".$value."'";
mysql_query($sql_change_status);
}
}
}
if($_GET['form_purpose']=='delete')
{
foreach($_POST['selected'] as $value => $temp)
{
$sql_delete_file="
UPDATE files_info SET `file_status` = 'suspended' WHERE `file_code` = '".$value."'";
$file_remove='../uploads/'.$value;
unlink($file_remove);
mysql_query($sql_delete_file);
}
}
}
if($_GET['action']=='status_change')
{
if(isset($_GET['id']))
{
if($_GET['current_status']=='inactive')
$new_status='active';
else
$new_status='inactive';
$temp_id=$_GET['id'];
$sql_change_status="UPDATE `files_info` SET `file_status`='".$new_status."' WHERE `file_code`='".$temp_id."'";
mysql_query($sql_change_status);
}
}
$sortby='file_upload_date';
$order='ASC';
if(isset($_GET['action']))
{
switch($_GET['action'])
{
case 'sortbyname':
$sortby='file_name';
break;
case 'sortbysize':
$sortby='file_size';
break;
case 'sortbydate':
$sortby='file_upload_date';
break;
case 'sortbytime':
$sortby='file_upload_time';
break;
case 'sortbydownloads':
$sortby='file_downloads_count';
break;
case 'sortbystatus':
$sortby='file_status';
break;
default:
$sortby='file_upload_date';
break;
}
}
if(isset($_GET['order']))
{
switch($_GET['order'])
{
case 'descending':
$order='DESC';
break;
default:
$order='ASC';
break;
}
}
$sql_files="SELECT * from files_info WHERE `file_upload_by`='".$_SESSION['user']['u_id']."' AND `file_status`!='suspended' ORDER BY `".$sortby."` ".$order."";
if($result_files=mysql_query($sql_files))
{
while($temp=mysql_fetch_array($result_files))
{
$details_files[]=$temp;
}
}
else
{
echo "database error";
die();
}
}
?>
<!doctype html>
<!--[if IE 7 ]> <html lang="en" class="no-js ie7"> <![endif]-->
<!--[if IE 8 ]> <html lang="en" class="no-js ie8"> <![endif]-->
<!--[if IE 9 ]> <html lang="en" class="no-js ie9"> <![endif]-->
<!--[if (gt IE 9)|!(IE)]><!--> <html lang="en" class="no-js"> <!--<![endif]-->
<head>
<meta charset="UTF-8">
<!--[if IE 8 ]><meta http-equiv="X-UA-Compatible" content="IE=7"><![endif]-->
<title>User Dashboard</title>
<meta name="description" content="">
<meta name="author" content="">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<!-- CSS Styles -->
<link rel="stylesheet" href="css/style.css">
<link rel="stylesheet" href="css/jquery.tipsy.css">
<link rel="stylesheet" href="css/jquery.wysiwyg.css">
<script src="js/libs/modernizr-1.7.min.js"></script>
<script src="../js/ajax.js"></script>
</head>
<body>
<!-- Aside Block -->
<section role="navigation">
<!-- Header with logo and headline -->
<header>
<h1>....Share with the world</h1>
</header>
<!-- User Info -->
<section id="user-info">
<img src="img/sample_user.png" alt="Sample User Avatar">
<div>
<a href="user.php" title="Username"><?php echo $_SESSION['user']['u_id'] ?></a>
<ul>
<li><a class="button-link" href="../index.php" title="Back to home" rel="tooltip">Back to home</a></li>
<li><a class="button-link" href="../include/logout.php" title="Logout" rel="tooltip">logout</a></li>
</ul>
</div>
</section>
<!-- /User Info -->
<!-- Main Navigation -->
<nav id="main-nav">
<ul>
<li ><a href="index.php" title="" class="dashboard no-submenu">File Manager</a></li> <!-- Use class .no-submenu to open link instead of a sub menu-->
<!-- Use class .current to open submenu on page load -->
<li class="current">
<a href="user.php" title="" class="projects">User Settings</a>
</li>
</ul>
</nav>
<!-- /Main Navigation -->
</section>
<!-- /Aside Block -->
<!-- Main Content -->
<section role="main" >
<!-- Widget Container -->
<section id="widgets-container">
<!-- Widget Box -->
<div class="widget increase" id="new-visitors">
<p align="center"><strong>
<?php
$count=0;
foreach($details_files as $value => $temp)
{
if(file_exists("../uploads/".$temp['file_code']))
$count=$count+1;
}
echo $count;
?></strong> Total files under your account</p>
</div>
<div class="widget decrease" id="new-visitors">
<p align="center"><strong>
<?php
$count=0;
foreach($details_files as $value => $temp)
{
$count=$count+$temp['file_downloads_count'];
}
echo $count;
?>
</strong> Total downloads of your files</p>
</div>
<!-- /Widget Box -->
</section>
<!-- /Widget Container -->
<!-- Full Content Block -->
<!-- Note that only 1st article need clearfix class for clearing -->
<article class="full-block clearfix">
<!-- Article Header -->
<header>
<h2>Account Settings</h2>
<!-- Article Header Tab Navigation -->
</header>
<!-- /Article Header -->
<!-- Article Content -->
<section>
<form>
<!-- Inputs -->
<!-- Use class .small, .medium or .large for predefined size -->
<fieldset>
<legend>Change Password</legend>
<dl>
<dt>
<label>Current Password</label>
</dt>
<dd>
<input type="password" class="medium" name="current_password" id="current_password">
</dd>
<dt>
<label>New Password</label>
</dt>
<dd>
<input type="password" class="medium" name="new_password" id="new_password">
</dd>
<dt>
<label>Confirm Password</label>
</dt>
<dd>
<input type="password" class="medium" name="confirm_password" id="confirm_password">
</dd>
</dl>
</fieldset>
<button type="button" onClick="send_data('change_pass_status')">Change Password</button>
<!-- Notification -->
<div class="notification attention" id="change_pass_status" style="display:none;"> </div>
<!-- /Notification -->
</form>
<form>
<!-- Inputs -->
<!-- Use class .small, .medium or .large for predefined size -->
<fieldset>
<legend>Change Email</legend>
<dl>
<dt>
<label>Current Password</label>
</dt>
<dd>
<input type="password" class="medium" name="current_password_email" id="current_password_email">
</dd>
<dt>
<label>New Email</label>
</dt>
<dd>
<input type="text" class="medium" name="new_email" id="new_email">
</dd>
</dl>
</fieldset>
<button type="button" onClick="send_data('change_email_status')">Change Email</button>
<!-- Notification -->
<div class="notification attention" id="change_email_status" style="display:none;"> </div>
<!-- /Notification -->
</form>
<br/>
<form>
<!-- Inputs -->
<!-- Use class .small, .medium or .large for predefined size -->
<fieldset>
<legend>Delete Account</legend>
<dl>
<dt>
<label>Current Password</label>
</dt>
<dd>
<input type="password" class="medium" name="current_password_delete" id="current_password_delete">
<button type="button" onClick="send_data('account_delete_status')">DELETE ACCOUNT</button>
</dd>
</dl>
</fieldset>
<!-- /Notification -->
</form>
<div class="notification attention" id="account_delete_status" style="display:none;"> </div>
<!-- /Side Tab Content #sidetab2 -->
</section>
<!-- /Article Content -->
</article>
<!-- /Full Content Block -->
</section>
<!-- /Main Content -->
<!-- JS Libs at the end for faster loading -->
</body>
</html>
|
{
"content_hash": "a0287cc91643e358051d1ab056c6c09a",
"timestamp": "",
"source": "github",
"line_count": 328,
"max_line_length": 159,
"avg_line_length": 26.28048780487805,
"alnum_prop": 0.5418793503480278,
"repo_name": "balramkverma/Simple-File-Hosting-Website-in-PHP-with-Admin-Panel",
"id": "a96b9a827092bdb7dc5bd5b2195e6e4d928df441",
"size": "8620",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "account/user.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "98449"
},
{
"name": "JavaScript",
"bytes": "220562"
},
{
"name": "PHP",
"bytes": "139566"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.