repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
wthie/nevow | examples/customform/customform.py | #################################################################################
# Example of using patterns to change the appearance of a webform.
#from twisted.application import internet, service
#from twisted.web import static
from zope.interface import implements
from nevow import rend
from nevow import url
from nevow import loaders
from nevow import tags as T
from formless import annotate
from formless import webform
#################################################################################
# This beasty defines how I want the form to look. It's a table (eek!).
# webform looks for patterns to use when rendering parts of the form and fills
# slots with key information.
#
# Key patterns are:
# freeform-form -- the form itself, mostly just the structure
# argument -- the pattern to use for arguments when nothing better
# is found
# argument!!fo -- the pattern to use for the 'fo' argument
#
# Inside the patterns the following slots are filled:
# freeform-form:
# form-action -- action attribute, where the form will be posted
# form-id -- id of the form
# form-name -- name of the form
# form-label -- form label, extracted from the docstring
# form-description -- description, also extracted from the docstring
# form-error -- "global" error
# form-arguments -- insertion point for the arguments' HTML
# argument:
# label -- label
# input -- the form element (input, textarea, etc)
# error -- error message (if any)
# description -- description of argument
#
# Note that you do not have to provide slots for all of the above. For
# instance, you may not want to display the descriptions.
#
# Chances are that this block of text would be in a disk template or
# perhaps defined using stan in a taglib module.
FORM_LAYOUT = loaders.xmlstr(
"""<?xml version="1.0"?>
<form xmlns:n="http://nevow.com/ns/nevow/0.1" n:pattern="freeform-form">
<!-- Replace/fill the form attributes -->
<n:attr name="action"><n:slot name="form-action"/></n:attr>
<n:attr name="id"><n:slot name="form-id"/></n:attr>
<n:attr name="name"><n:slot name="form-name"/></n:attr>
<!-- General form information -->
<p><strong><n:slot name="form-label"/></strong></p>
<p><em><n:slot name="form-description"/></em></p>
<p><strong><em><n:slot name="form-error"/></em></strong></p>
<!-- Start of the form layout table -->
<table style="background: #eee; border: 1px solid #bbb; padding: 1em;" >
<!-- Mark location arguments will be added -->
<n:slot name="form-arguments"/>
<!-- General argument layout pattern -->
<n:invisible n:pattern="argument" n:render="remove">
<tr>
<th><n:slot name="label"/>:</th>
<td><n:slot name="input"/><span class="freeform-error"><n:slot name="error"/></span></td>
</tr>
<tr>
<th></th>
<td><n:slot name="description"/></td>
</tr>
</n:invisible>
<!-- Argument layout, just for fum -->
<n:invisible n:pattern="argument!!fo" n:render="remove">
<tr>
<th><n:slot name="label"/>:</th>
<td>
<textarea cols="40" rows="5"><n:attr name="id"><n:slot name="id"/></n:attr><n:attr name="name"><n:slot name="name"/></n:attr><n:slot name="value"/></textarea>
<span class="freeform-error"><n:slot name="error"/></span></td>
</tr>
<tr>
<th></th>
<td><n:slot name="description"/></td>
</tr>
</n:invisible>
<!-- Button row -->
<tr>
<td colspan="2">
<n:slot name="form-button"/>
</td>
</tr>
</table>
</form>
""").load()
#################################################################################
# ISomething and Page are just something to test the form rendering on.
class ISomething(annotate.TypedInterface):
def doSomething(
ctx = annotate.Context(),
fee = annotate.String(required=True, description="Wee!"),
fi = annotate.Integer(description="Tra-la-la"),
fo = annotate.Text(),
fum = annotate.String(),
):
"""Do Something Really Exciting
Normally you would put a useful description of the interface here but,
since the inteface is useless anyway, I cannot think of anything
useful to say about it. Although ... did I mention it is useless?"""
doSomething = annotate.autocallable(doSomething)
class Root(rend.Page):
"""Render a custom and normal form for an ISomething.
"""
implements(ISomething)
addSlash = True
child_webform_css = webform.defaultCSS
def render_normalForm(self, ctx, data):
return webform.renderForms()
def render_customForm(self, ctx, data):
return webform.renderForms()[FORM_LAYOUT]
def doSomething(self, ctx, **kwargs):
print '***** doSomething called with:', kwargs
docFactory = loaders.stan(
T.html[
T.head[
T.title['Example :: Custom Form Layout'],
T.link(rel='stylesheet', type='text/css', href=url.here.child("webform_css")),
],
T.body[
T.h1['Custom'],
render_customForm,
T.h1['Default'],
render_normalForm,
]
]
)
#application = service.Application('hellostan')
#webServer = internet.TCPServer(8080, appserver.NevowSite(Root()))
#webServer.setServiceParent(application)
|
Alfio00/EasterToken | src/components/Link/component.js | <reponame>Alfio00/EasterToken
import React from "react"
import Link from "next/link";
const LinkComponent = ({children,to, ...rest}) => {
return <>
<Link href={to}>
{children}
</Link>
</>
}
export default LinkComponent
|
binodthapachhetry/JustInTimeAdaptiveIntervention | wearwocketslib/src/main/java/edu/neu/android/wearwocketslib/receivers/SystemBroadcastReceiver.java | <reponame>binodthapachhetry/JustInTimeAdaptiveIntervention
package edu.neu.android.wearwocketslib.receivers;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.support.v4.content.WakefulBroadcastReceiver;
import java.util.Date;
import edu.neu.android.wearwocketslib.core.repeatedwakefulservice.AlwaysOnService;
import edu.neu.android.wearwocketslib.core.repeatedwakefulservice.WearableWakefulService;
import edu.neu.android.wearwocketslib.utils.log.Logger;
public class SystemBroadcastReceiver extends WakefulBroadcastReceiver {
private static final String TAG = "SystemBroadcastReceiver";
private Context mContext;
private Logger logger = null;
@Override
public void onReceive(Context context, Intent intent) {
mContext = context;
logger = new Logger(TAG);
logger.i("stating minute service using intent",mContext);
startRepeatedWakefulService();
logger.i("starting always on service using intent",mContext);
Intent alwaysOnServiceIntent = new Intent(mContext,AlwaysOnService.class);
mContext.startService(alwaysOnServiceIntent);
}
private void startRepeatedWakefulService(){
Intent wakefulService = new Intent(mContext, WearableWakefulService.class);
if(!WearableWakefulService.isRunning()) {
logger.i("Starting service @ " + new Date().toString(), mContext);
startWakefulService(mContext, wakefulService);
}else{
logger.i("Wakeful service is running, no need to start", mContext);
}
}
}
|
huige233/polymer-core | src/main/java/com/nmmoc7/polymercore/client/utils/schematic/control/MoveX.java | <gh_stars>0
package com.nmmoc7.polymercore.client.utils.schematic.control;
import com.google.common.collect.Lists;
import com.nmmoc7.polymercore.client.resources.Icons;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.util.text.TranslationTextComponent;
import java.util.Collections;
import java.util.List;
public class MoveX extends MoveAbstract {
public MoveX() {
super(Icons.MOVE_X);
}
@Override
public ITextComponent getName() {
return new TranslationTextComponent("gui.polymer.locator.control.move_x.title");
}
private final List<ITextComponent> description = Lists.newArrayList(
new TranslationTextComponent("gui.polymer.locator.control.move_x.description_1"),
new TranslationTextComponent("gui.polymer.locator.control.move_common.description_2"),
new TranslationTextComponent("gui.polymer.locator.control.move_common.description_3")
);
@Override
public List<ITextComponent> getDescription() {
return Collections.unmodifiableList(description);
}
@Override
public BlockPos doMove(BlockPos current, int move) {
return current.add(move, 0, 0);
}
}
|
Q2035/Spring-Framework | spring-sourcecode/src/main/java/top/hellooooo/sourcecode/charpter05/circle/CircleApplication.java | package top.hellooooo.sourcecode.charpter05.circle;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class CirCleApplication {
public static void main(String[] args) {
ApplicationContext applicationContext = new ClassPathXmlApplicationContext("circle.xml");
final Object circleA = applicationContext.getBean("circleA");
System.out.println(circleA);
}
}
|
otto-ringhofer/vaactor | example/src/main/scala/org/vaadin/addons/vaactor/example/Example.scala | package org.vaadin.addons.vaactor.example
import javax.servlet.annotation.WebServlet
import ExampleObject.globalCnt
import org.vaadin.addons.vaactor._
import com.vaadin.flow.component.button.Button
import com.vaadin.flow.component.html.Label
import com.vaadin.flow.component.orderedlayout.VerticalLayout
import com.vaadin.flow.component.page.Push
import com.vaadin.flow.router.Route
import com.vaadin.flow.server.VaadinServletConfiguration
import com.vaadin.flow.shared.communication.PushMode
import com.vaadin.flow.shared.ui.Transport
import com.vaadin.flow.theme.Theme
import com.vaadin.flow.theme.lumo.Lumo
import akka.actor.Actor.Receive
import akka.actor.{ Actor, Props }
/**
* @author <NAME>
*/
object ExampleObject {
// global counter
private[this] var _globalCnt = 0
def globalCnt: Int = this.synchronized { _globalCnt }
def globalCnt_=(value: Int): Unit = this.synchronized { _globalCnt = value }
}
@WebServlet(urlPatterns = Array("/*"), asyncSupported = true)
@VaadinServletConfiguration(productionMode = false)
class ExampleServlet extends VaactorSessionServlet {
override val sessionProps: Props = Props(classOf[ExampleSessionActor])
}
@Route("")
@Theme(value = classOf[Lumo], variant = Lumo.DARK)
@Push(value = PushMode.AUTOMATIC, transport = Transport.WEBSOCKET)
class ExampleUI extends VerticalLayout with Vaactor.HasActor with Vaactor.HasSession {
// counter local to this UI
var uiCnt = 0
val stateDisplay = new Label()
setMargin(true)
setSpacing(true)
add(new Label("Vaactor Example"))
add(
new Button("Click Me", { _ =>
uiCnt += 1
session ! s"Thanks for clicking! (uiCnt:$uiCnt)"
})
)
add(stateDisplay)
add(
new Button("Show Counts") with Vaactor.HasActor {
addClickListener(_ => session ! VaactorSession.RequestSessionState)
override def receive: Receive = {
case state: Int => setText(s"Show Counts - uiCnt is $uiCnt, sessionCnt is $state, globalCnt is $globalCnt")
}
}
)
override def receive: Receive = {
case hello: String =>
globalCnt += 1
stateDisplay.setText(s"$hello (globalCnt:$globalCnt)")
}
}
class ExampleSessionActor extends Actor with VaactorSession[Int] {
// state is session counter
override val initialSessionState = 0
override val sessionBehaviour: Receive = {
case msg: String =>
sessionState += 1
sender ! s"$msg (sessionCnt:$sessionState)"
}
}
|
GinesOrtiz/seriedb | src/app/components/common/console/console.component.js | <gh_stars>0
import controller from './console.controller';
import template from './console.html';
import './console.scss';
export default {
controller,
controllerAs: 'vm',
template
}; |
davilinfo/icbct21 | spam/node_modules/@liskhq/lisk-api-client/dist-node/resources/dapps.js | <reponame>davilinfo/icbct21
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const api_method_1 = require("../api_method");
const api_resource_1 = require("../api_resource");
const constants_1 = require("../constants");
class DappsResource extends api_resource_1.APIResource {
constructor(apiClient) {
super(apiClient);
this.path = '/dapps';
this.get = api_method_1.apiMethod({
method: constants_1.GET,
}).bind(this);
}
}
exports.DappsResource = DappsResource;
//# sourceMappingURL=dapps.js.map |
hello-chenchen/design-patterns | src/main/java/com/cc/designpatterns/proxypattern/ProxyComponent.java | package com.cc.designpatterns.proxypattern;
public class ProxyComponent implements Component {
private ConcreateComponent concreateComponent = null;
@Override
public String operate() {
if(null == concreateComponent) {
concreateComponent = new ConcreateComponent();
}
return "Proxy: " + concreateComponent.operate();
}
} |
2868463718/spring-framework-5.2.0.RELEASE | spring-zy-web/src/main/java/zy/blue7/controller/TestController.java | <reponame>2868463718/spring-framework-5.2.0.RELEASE<filename>spring-zy-web/src/main/java/zy/blue7/controller/TestController.java
package zy.blue7.controller;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
/**
* @author blue7
* @create 2021/3/8 14:13
*/
@Controller
public class TestController {
@ResponseBody
@RequestMapping("index")
public String index() {
System.out.println("you are dog!");
return "you are dog!";
}
}
|
gdubicki/yamlpath | tests/test_wrappers_consoleprinter.py | import pytest
from types import SimpleNamespace
from ruamel.yaml.comments import CommentedMap, CommentedSeq, CommentedSet, TaggedScalar
from ruamel.yaml.scalarstring import PlainScalarString, FoldedScalarString
from yamlpath.enums import PathSegmentTypes
from yamlpath.wrappers import NodeCoords, ConsolePrinter
from yamlpath import YAMLPath
class Test_wrappers_ConsolePrinter():
def test_info_noisy(self, capsys):
args = SimpleNamespace(verbose=False, quiet=False, debug=False)
logger = ConsolePrinter(args)
logger.info("Test")
console = capsys.readouterr()
assert console.out == "Test\n"
def test_info_quiet(self, capsys):
args = SimpleNamespace(verbose=False, quiet=True, debug=False)
logger = ConsolePrinter(args)
logger.info("Test")
console = capsys.readouterr()
assert not console.out
def test_verbose_off(self, capsys):
args = SimpleNamespace(verbose=False, quiet=False, debug=False)
logger = ConsolePrinter(args)
logger.verbose("Test")
console = capsys.readouterr()
assert not console.out
def test_verbose_noisy(self, capsys):
args = SimpleNamespace(verbose=True, quiet=False, debug=False)
logger = ConsolePrinter(args)
logger.verbose("Test")
console = capsys.readouterr()
assert console.out == "Test\n"
def test_verbose_quiet(self, capsys):
args = SimpleNamespace(verbose=True, quiet=True, debug=False)
logger = ConsolePrinter(args)
logger.verbose("Test")
console = capsys.readouterr()
assert not console.out
def test_debug_off(self, capsys):
args = SimpleNamespace(verbose=False, quiet=False, debug=False)
logger = ConsolePrinter(args)
logger.debug("Test")
console = capsys.readouterr()
assert not console.out
def test_debug_noisy(self, capsys):
args = SimpleNamespace(verbose=False, quiet=False, debug=True)
logger = ConsolePrinter(args)
anchoredkey = PlainScalarString("TestKey", anchor="KeyAnchor")
anchoredval = PlainScalarString("TestVal", anchor="Anchor")
foldedstr = "123456789 123456789 123456789"
foldedstrfolds = [10, 20]
foldedval = FoldedScalarString(foldedstr)
foldedval.fold_pos = foldedstrfolds
logger.debug(anchoredval)
console = capsys.readouterr()
assert "\n".join([
"DEBUG: (&Anchor)TestVal",
]) + "\n" == console.out
logger.debug(["test", anchoredval])
console = capsys.readouterr()
assert "\n".join([
"DEBUG: [0]test<class 'str'>",
"DEBUG: [1](&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>",
]) + "\n" == console.out
logger.debug({"ichi": 1, anchoredkey: anchoredval})
console = capsys.readouterr()
assert "\n".join([
"DEBUG: [ichi]1<class 'int'>",
"DEBUG: [TestKey](&KeyAnchor,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>",
]) + "\n" == console.out
logger.debug({"ichi": 1, anchoredkey: "non-anchored value"})
console = capsys.readouterr()
assert "\n".join([
"DEBUG: [ichi]1<class 'int'>",
"DEBUG: [TestKey](&KeyAnchor,_)non-anchored value<class 'str'>",
]) + "\n" == console.out
logger.debug({"ichi": 1, "non-anchored-key": anchoredval})
console = capsys.readouterr()
assert "\n".join([
"DEBUG: [ichi]1<class 'int'>",
"DEBUG: [non-anchored-key](_,&Anchor)TestVal<class 'ruamel.yaml.scalarstring.PlainScalarString'>",
]) + "\n" == console.out
tagged_value = "value"
tagged_value_node = TaggedScalar(tagged_value, tag="!tag")
tagged_sequence = CommentedSeq(["a", "b"])
tagged_sequence.yaml_set_tag("!raz")
selfref_value = "self_referring"
selfref_value_node = TaggedScalar(selfref_value, tag="!self_referring")
logger.debug(
"test_wrappers_consoleprinter:",
prefix="test_debug_noisy: ",
header="--- HEADER ---",
footer="=== FOOTER ===",
data_header="+++ DATA HEADER +++",
data_footer="::: DATA FOOTER :::",
data=CommentedMap({
"key": "value",
"tagged": tagged_value_node,
tagged_value_node: "untagged value",
selfref_value_node: selfref_value_node,
"array": ["ichi", "ni", "san"],
"tagged_array": tagged_sequence,
"aoh": [{"id": 1},{"id": 2},{"id": 3}],
"aoa": [[True, True], [True, False], [False, True]],
"dod": {"a": {"b": {"c": "d"}}},
"set": CommentedSet(["one", "two"]),
})
)
console = capsys.readouterr()
assert "\n".join([
"DEBUG: test_debug_noisy: --- HEADER ---",
"DEBUG: test_debug_noisy: test_wrappers_consoleprinter:",
"DEBUG: test_debug_noisy: +++ DATA HEADER +++",
"DEBUG: test_debug_noisy: [key]value<class 'str'>",
"DEBUG: test_debug_noisy: [tagged]<_,!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)",
"DEBUG: test_debug_noisy: [value]<!tag,_>untagged value<class 'str'>",
"DEBUG: test_debug_noisy: [self_referring]<!self_referring,!self_referring>self_referring<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)",
"DEBUG: test_debug_noisy: [array][0]ichi<class 'str'>",
"DEBUG: test_debug_noisy: [array][1]ni<class 'str'>",
"DEBUG: test_debug_noisy: [array][2]san<class 'str'>",
"DEBUG: test_debug_noisy: [tagged_array]<_,!raz>[0]a<class 'str'>",
"DEBUG: test_debug_noisy: [tagged_array]<_,!raz>[1]b<class 'str'>",
"DEBUG: test_debug_noisy: [aoh][0][id]1<class 'int'>",
"DEBUG: test_debug_noisy: [aoh][1][id]2<class 'int'>",
"DEBUG: test_debug_noisy: [aoh][2][id]3<class 'int'>",
"DEBUG: test_debug_noisy: [aoa][0][0]True<class 'bool'>",
"DEBUG: test_debug_noisy: [aoa][0][1]True<class 'bool'>",
"DEBUG: test_debug_noisy: [aoa][1][0]True<class 'bool'>",
"DEBUG: test_debug_noisy: [aoa][1][1]False<class 'bool'>",
"DEBUG: test_debug_noisy: [aoa][2][0]False<class 'bool'>",
"DEBUG: test_debug_noisy: [aoa][2][1]True<class 'bool'>",
"DEBUG: test_debug_noisy: [dod][a][b][c]d<class 'str'>",
"DEBUG: test_debug_noisy: [set]{one}<class 'str'>",
"DEBUG: test_debug_noisy: [set]{two}<class 'str'>",
"DEBUG: test_debug_noisy: ::: DATA FOOTER :::",
"DEBUG: test_debug_noisy: === FOOTER ===",
]) + "\n" == console.out
logger.debug(tagged_value_node)
console = capsys.readouterr()
assert "\n".join([
"DEBUG: <!tag>value<class 'ruamel.yaml.comments.TaggedScalar'>(<class 'str'>)",
])
logger.debug(tagged_sequence)
console = capsys.readouterr()
assert "\n".join([
"DEBUG: [tagged_array]<!raz>[0]a<class 'str'>",
"DEBUG: [tagged_array]<!raz>[1]b<class 'str'>",
])
nc = NodeCoords(
"value",
dict(key="value"),
"key",
YAMLPath("doc_root.key"),
[ (dict(doc_root=dict(key="value")), "doc_root"),
(dict(key="value"), "key")],
(PathSegmentTypes.KEY, "key")
)
logger.debug(
"A node coordinate:", prefix="test_debug_noisy: ", data=nc)
console = capsys.readouterr()
assert "\n".join([
"DEBUG: test_debug_noisy: A node coordinate:",
"DEBUG: test_debug_noisy: (path)doc_root.key",
"DEBUG: test_debug_noisy: (segment)[0]PathSegmentTypes.KEY<enum 'PathSegmentTypes'>",
"DEBUG: test_debug_noisy: (segment)[1]key<class 'str'>",
"DEBUG: test_debug_noisy: (node)value",
"DEBUG: test_debug_noisy: (parent)[key]value<class 'str'>",
"DEBUG: test_debug_noisy: (parentref)key",
"DEBUG: test_debug_noisy: (ancestry)[0][0][doc_root][key]value<class 'str'>",
"DEBUG: test_debug_noisy: (ancestry)[0][1]doc_root<class 'str'>",
"DEBUG: test_debug_noisy: (ancestry)[1][0][key]value<class 'str'>",
"DEBUG: test_debug_noisy: (ancestry)[1][1]key<class 'str'>",
]) + "\n" == console.out
logger.debug(foldedval)
console = capsys.readouterr()
assert "\n".join([
"DEBUG: {}<class 'ruamel.yaml.scalarstring.FoldedScalarString'>,folded@{}".format(foldedstr, foldedstrfolds)
])
def test_debug_quiet(self, capsys):
args = SimpleNamespace(verbose=False, quiet=True, debug=True)
logger = ConsolePrinter(args)
logger.debug("Test")
console = capsys.readouterr()
assert not console.out
def test_warning_noisy(self, capsys):
args = SimpleNamespace(verbose=False, quiet=False, debug=False)
logger = ConsolePrinter(args)
logger.warning("Test")
console = capsys.readouterr()
assert console.out == "WARNING: Test\n"
def test_warning_quiet(self, capsys):
args = SimpleNamespace(verbose=False, quiet=True, debug=False)
logger = ConsolePrinter(args)
logger.warning("Test")
console = capsys.readouterr()
assert not console.out
def test_error_noisy_nonexit(self, capsys):
args = SimpleNamespace(verbose=False, quiet=False, debug=False)
logger = ConsolePrinter(args)
logger.error("Test")
console = capsys.readouterr()
assert console.err == "ERROR: Test\n"
def test_error_quiet_nonexit(self, capsys):
args = SimpleNamespace(verbose=False, quiet=True, debug=False)
logger = ConsolePrinter(args)
logger.error("Test")
console = capsys.readouterr()
assert console.err == "ERROR: Test\n"
def test_error_noisy_exit(self, capsys):
args = SimpleNamespace(verbose=False, quiet=False, debug=False)
logger = ConsolePrinter(args)
with pytest.raises(SystemExit):
logger.error("Test", 27)
console = capsys.readouterr()
assert console.err == "ERROR: Test\n"
def test_error_quiet_exit(self, capsys):
args = SimpleNamespace(verbose=False, quiet=True, debug=False)
logger = ConsolePrinter(args)
with pytest.raises(SystemExit):
logger.error("Test", 27)
console = capsys.readouterr()
assert console.err == "ERROR: Test\n"
def test_critical_noisy(self, capsys):
args = SimpleNamespace(verbose=False, quiet=False, debug=False)
logger = ConsolePrinter(args)
with pytest.raises(SystemExit):
logger.critical("Test")
console = capsys.readouterr()
assert console.err == "CRITICAL: Test\n"
def test_critical_quiet(self, capsys):
args = SimpleNamespace(verbose=False, quiet=True, debug=False)
logger = ConsolePrinter(args)
with pytest.raises(SystemExit):
logger.critical("Test")
console = capsys.readouterr()
assert console.err == "CRITICAL: Test\n"
|
xSHAD0Wx/tiksrv | src/config/src/config.cpp | #include "config/config.hpp"
#include <array>
#include <string>
namespace po = boost::program_options;
namespace {
constexpr std::array config_string_keys {"port", "backlog", "plugins",
"config", "help", "version"};
} // namespace
namespace ts::config {
config::config() : desc_ {"Allowed Options"} {
desc_.add_options()(
"port,p",
po::value<std::uint16_t>()->default_value(defaults::listen_port),
"The server listening port");
desc_.add_options()(
"backlog,b",
po::value<std::uint32_t>()->default_value(defaults::liten_backlog),
"The maximum number in the listening queue");
desc_.add_options()(
"plugins",
po::value<std::string>()->default_value(defaults::plugins_path),
"The path from which to load the application plugins");
desc_.add_options()(
"config,c",
po::value<std::string>()->default_value(defaults::config_file),
"The configuration file location");
desc_.add_options()("help,h", "Show help message");
desc_.add_options()("version,V", "Show app version");
}
auto config::has(config_key key) const -> bool {
return has(::config_string_keys[static_cast<std::uint32_t>(key)]);
}
auto config::operator[](config_key key) const -> const po::variable_value & {
return operator[](::config_string_keys[static_cast<std::uint32_t>(key)]);
}
} // namespace ts::config
|
YB970902/BattlecityTeam1 | BattleCity/Observer.h | #pragma once
class GameEntity;
class Observer
{
public:
virtual ~Observer() = default;
virtual void OnNotify(GameEntity* obj, eSubjectTag subjectTag, eEventTag eventTag) = 0;
};
|
unfoldingWord-dev/tx-job-handler | all_obs_helps.py | #!/usr/bin/env python3
#
# Copyright (c) 2021 unfoldingWord
# http://creativecommons.org/licenses/MIT/
# See LICENSE file for details.
#
# TX WEBHOOK
#
# NOTE: This module name and function name are defined by the rq package and our own tx-enqueue-job package
# This code adapted by RJH June 2018 from tx-manager/client_webhook/ClientWebhook/process_webhook
# NOTE: rq_settings.py is executed at program start-up, reads some environment variables, and sets queue name, etc.
# job() function (at bottom here) is executed by rq package when there is an available entry in the named queue.
# Python imports
from typing import Dict, Tuple, Any, Optional, Type
import os
import sys
import traceback
import tempfile
from glob import glob
# Local imports
from rq_settings import prefix, debug_mode_flag
from general_tools.file_utils import unzip, remove_tree
from general_tools.url_utils import download_file
from app_settings.app_settings import AppSettings
from converters.converter import Converter
from door43_tools.subjects import SUBJECT_ALIASES
from door43_tools.subjects import ALIGNED_BIBLE, BIBLE, OPEN_BIBLE_STORIES, OBS_STUDY_NOTES, OBS_STUDY_QUESTIONS, \
OBS_TRANSLATION_NOTES, TRANSLATION_ACADEMY, TRANSLATION_WORDS, TRANSLATION_QUESTIONS, TSV_STUDY_NOTES, \
TSV_STUDY_QUESTIONS, TSV_TRANSLATION_NOTES, OBS_TRANSLATION_QUESTIONS
from converters.pdf.bible_pdf_converter import BiblePdfConverter
from converters.pdf.obs_pdf_converter import ObsPdfConverter
from converters.pdf.obs_sn_pdf_converter import ObsSnPdfConverter
from converters.pdf.obs_sq_pdf_converter import ObsSqPdfConverter
from converters.pdf.obs_tn_pdf_converter import ObsTnPdfConverter
from converters.pdf.obs_tq_pdf_converter import ObsTqPdfConverter
from converters.pdf.sn_pdf_converter import SnPdfConverter
from converters.pdf.sq_pdf_converter import SqPdfConverter
from converters.pdf.ta_pdf_converter import TaPdfConverter
from converters.pdf.tn_pdf_converter import TnPdfConverter
from converters.pdf.tq_pdf_converter import TqPdfConverter
from converters.pdf.tw_pdf_converter import TwPdfConverter
from door43_tools.dcs_api import DcsApi
sys.setrecursionlimit(1500) # Default is 1,000—beautifulSoup hits this limit with UST
# Columns are: 1/ converter name 2/ converter 3/ input formats 4/ resource types 5/ output format
CONVERTER_TABLE = (
(BIBLE, BiblePdfConverter, ('usfm'), SUBJECT_ALIASES[BIBLE] + SUBJECT_ALIASES[ALIGNED_BIBLE], 'pdf'),
(OPEN_BIBLE_STORIES, ObsPdfConverter, ('md','markdown','txt','text'), SUBJECT_ALIASES[OPEN_BIBLE_STORIES], 'pdf'),
(OBS_STUDY_NOTES, ObsSnPdfConverter, ('md', 'markdown', 'txt', 'text'), SUBJECT_ALIASES[OBS_STUDY_NOTES], 'pdf'),
(OBS_STUDY_QUESTIONS, ObsSqPdfConverter, ('md','markdown','txt','text'), SUBJECT_ALIASES[OBS_STUDY_QUESTIONS], 'pdf'),
(OBS_TRANSLATION_NOTES, ObsTnPdfConverter, ('md','markdown','txt','text'), SUBJECT_ALIASES[OBS_TRANSLATION_NOTES], 'pdf'),
(OBS_TRANSLATION_QUESTIONS, ObsTqPdfConverter, ('md','markdown','txt','text'), SUBJECT_ALIASES[OBS_TRANSLATION_QUESTIONS], 'pdf'),
(TRANSLATION_ACADEMY, TaPdfConverter, ('md','markdown','txt','text'), SUBJECT_ALIASES[TRANSLATION_ACADEMY], 'pdf'),
(TSV_STUDY_NOTES, SnPdfConverter, ('tsv'), SUBJECT_ALIASES[TSV_STUDY_NOTES], 'pdf'),
(TSV_STUDY_QUESTIONS, SqPdfConverter, ('tsv'), SUBJECT_ALIASES[TSV_STUDY_QUESTIONS], 'pdf'),
(TSV_TRANSLATION_NOTES, TnPdfConverter, ('tsv'), SUBJECT_ALIASES[TSV_TRANSLATION_NOTES], 'pdf'),
(TRANSLATION_QUESTIONS, TqPdfConverter, ('md','markdown','txt','text'), SUBJECT_ALIASES[TRANSLATION_QUESTIONS], 'pdf'),
(TRANSLATION_WORDS, TwPdfConverter, ('md','markdown','txt','text'), SUBJECT_ALIASES[TRANSLATION_WORDS], 'pdf'),
)
AppSettings()
if prefix not in ('', 'dev-'):
AppSettings.logger.critical(f"Unexpected prefix: '{prefix}' — expected '' or 'dev-'")
def get_converter_module(entry) -> Tuple[Optional[str],Any]:
for converter_name, converter_class, input_formats, resource_types, output_format in CONVERTER_TABLE:
if entry['subject'] == converter_name:
return converter_name, converter_class
# Didn't find one
return None, None
# end if get_converter_module function
def do_converting(param_dict, source_dir:str, converter_name:str, converter_class:Type[Converter]) -> None:
"""
:param dict param_dict: Will be updated for build log!
:param str source_dir: Directory of the download source files
:param str converter_name: Name of the converter
:param class converter_class: Class of the converter
Updates param_dict as a side-effect.
"""
AppSettings.logger.debug(f"do_converting( {len(param_dict)} fields, {source_dir}, {converter_name}, {converter_class} )")
if 'cdn.door43.org/' in param_dict['output']:
cdn_file_key = param_dict['output'].split('cdn.door43.org/')[1] # Get the last part
else:
cdn_file_key = param_dict['output']
converter = converter_class(param_dict['resource_type'],
source_dir=source_dir,
source_url=param_dict['source'],
cdn_file_key=cdn_file_key, # Key for uploading
identifier=param_dict['identifier'],
options={'debug_mode_flag': debug_mode_flag})
convert_result_dict = converter.run()
converter.close() # do cleanup after run
param_dict['converter_success'] = convert_result_dict['success']
param_dict['converter_info'] = convert_result_dict['info']
param_dict['converter_warnings'] = convert_result_dict['warnings']
param_dict['converter_errors'] = convert_result_dict['errors']
param_dict['status'] = 'converted'
# end of do_converting function
def download_source_file(source_url, destination_folder):
"""
Downloads the specified source file
and unzips it if necessary.
:param str source_url: The URL of the file to download
:param str destination_folder: The directory where the downloaded file should be unzipped
:return: None
"""
AppSettings.logger.debug(f"download_source_file( {source_url}, {destination_folder} )")
source_filepath = os.path.join(destination_folder, source_url.rpartition(os.path.sep)[2])
AppSettings.logger.debug(f"source_filepath: {source_filepath}")
try:
AppSettings.logger.info(f"Downloading {source_url} …")
# if the file already exists, remove it, we want a fresh copy
if os.path.isfile(source_filepath):
os.remove(source_filepath)
download_file(source_url, source_filepath)
finally:
AppSettings.logger.debug("Downloading finished.")
if source_url.lower().endswith('.zip'):
try:
AppSettings.logger.debug(f"Unzipping {source_filepath} …")
# TODO: This is unsafe if the zipfile comes from an untrusted source
unzip(source_filepath, destination_folder)
finally:
AppSettings.logger.debug("Unzipping finished.")
# clean up the downloaded zip file
if os.path.isfile(source_filepath):
os.remove(source_filepath)
str_filelist = str(os.listdir(destination_folder))
str_filelist_adjusted = str_filelist if len(str_filelist)<1500 \
else f'{str_filelist[:1000]} …… {str_filelist[-500:]}'
AppSettings.logger.debug(f"Destination folder '{destination_folder}' now has: {str_filelist_adjusted}")
return os.path.join(destination_folder, str_filelist_adjusted)
#end of download_source_file function
def process_obs_helps(pj_prefix, lang=None, subject=None):
AppSettings.logger.info(f"PROCESSING {pj_prefix+' ' if pj_prefix else ''}obs_helps: {subject} {lang}")
tempfile.tempdir = '/tmp'
if not subject:
# subject = [OPEN_BIBLE_STORIES, OBS_TRANSLATION_QUESTIONS, OBS_TRANSLATION_NOTES, OBS_STUDY_QUESTIONS, OBS_STUDY_NOTES]
# subject = [OBS_TRANSLATION_NOTES, OBS_TRANSLATION_QUESTIONS]
# subject = [OBS_TRANSLATION_NOTES]
# subject = [OBS_STUDY_NOTES, OPEN_BIBLE_STORIES]
subject = [OPEN_BIBLE_STORIES]
# subject = [OBS_TRANSLATION_QUESTIONS]
stage = 'latest'
owner = ['unfoldingWord']
regenerate = 'all'
lang = 'fr'
api = DcsApi(dcs_domain="https://git.door43.org", debug=True)
response = api.query_catalog(subjects=subject, owners=owner, langs=lang, stage=stage, order='desc')
if 'ok' not in response or 'data' not in response or not len(response['data']):
AppSettings.logger.error(f'No entries for {subject}')
exit(1)
print("TO BE GENERATED:")
for entry in response['data']:
print(f" {entry['lang_code']} :: {entry['subject']} :: {entry['owner']} :: {entry['repo']}")
for entry in response['data']:
AppSettings.logger.debug(f"entry: {entry}")
# Setup a temp folder to use
# Move everything down one directory level for simple delete
outdir = os.path.join(entry['owner'], entry['lang_code'], entry['repo'].split('_')[1])
base_temp_dir_name = os.path.join('/tmp', 'working', entry['subject'], outdir)
output_dir = os.path.join(base_temp_dir_name, 'Output')
pdfs = glob(os.path.join(output_dir, '*.pdf'))
if len(pdfs) > 0 and regenerate != 'all':
if regenerate == 'none':
continue
reply = str(input(f"{pdfs[0]} exists. Generate PDF anyway? " + ' (y/N/all/none): ')).lower().strip()
if not reply or reply[0] != 'y' and reply != 'all':
if reply == 'none':
regenerate = 'none'
continue
if reply == 'all':
regenerate = 'all'
AppSettings.logger.debug(f"base_temp_dir_name = {base_temp_dir_name}")
if entry['subject'] == OPEN_BIBLE_STORIES:
outdir = os.path.join(entry['owner'], entry['lang_code'], entry['repo'].split('_')[1])
base_temp_dir_name = os.path.join('/tmp', 'working', entry['subject'], outdir)
try:
os.makedirs(base_temp_dir_name)
except Exception as e:
AppSettings.logger.critical(f"SetupTempFolder threw an exception: {e}: {traceback.format_exc()}")
AppSettings.logger.critical(f"Oh, folder {base_temp_dir_name} already existed!")
AppSettings.logger.info(f"It contained {os.listdir(base_temp_dir_name)}")
# Download and unzip the specified source file
AppSettings.logger.debug(f"Getting source file from {entry['zipball_url']} …")
download_source_file(entry['zipball_url'], base_temp_dir_name)
# Find correct source folder
source_folder_path = os.path.join(base_temp_dir_name, entry['repo'])
converter_name, converter = get_converter_module(entry)
AppSettings.logger.info(f"Got converter = {converter_name}")
build_log_dict = {
'resource_type': entry['subject'],
'identifier': f"{entry['owner']}--{entry['repo']}--{entry['branch_or_tag_name']}",
'output': os.path.join('/tmp', 'working', entry['subject'], outdir, f'{entry["repo"]}-{entry["branch_or_tag_name"]}.zip'),
'source': entry['zipball_url']
}
if converter:
build_log_dict['status'] = 'converting'
build_log_dict['message'] = 'tX job converting…'
build_log_dict['convert_module'] = converter_name
do_converting(build_log_dict, source_folder_path, converter_name, converter)
else:
error_message = f"No converter was found to convert {entry['subject']}"
AppSettings.logger.error(error_message)
build_log_dict['convert_module'] = 'NO CONVERTER'
build_log_dict['converter_success'] = 'false'
build_log_dict['converter_info'] = []
build_log_dict['converter_warnings'] = []
build_log_dict['converter_errors'] = [error_message]
build_log_dict['status'] = 'finished'
if prefix and debug_mode_flag:
AppSettings.logger.debug(f"Temp folder '{base_temp_dir_name}' has been left on disk for debugging!")
else:
remove_tree(base_temp_dir_name) # cleanup
str_build_log = str(build_log_dict)
str_build_log_adjusted = str_build_log if len(str_build_log)<1500 \
else f'{str_build_log[:1000]} …… {str_build_log[-500:]}'
upload_and_update(entry, output_dir)
AppSettings.logger.info(f"Finished: {str_build_log_adjusted}")
#end of process_obs_helps
if __name__ == '__main__':
process_obs_helps("dev", *sys.argv[1:])
|
TheBator/szoftlab4 | SuchTowers/app/src/main/java/hu/bme/aut/suchtowers/GameView.java | <filename>SuchTowers/app/src/main/java/hu/bme/aut/suchtowers/GameView.java
package hu.bme.aut.suchtowers;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.Build;
import android.util.AttributeSet;
import android.view.SurfaceView;
import android.view.ViewTreeObserver;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import hu.bme.aut.suchtowers.model.Enemy;
import hu.bme.aut.suchtowers.model.Game;
import hu.bme.aut.suchtowers.model.GameObserver;
import hu.bme.aut.suchtowers.model.Obstacle;
import hu.bme.aut.suchtowers.model.Projectile;
import hu.bme.aut.suchtowers.model.Tower;
import hu.bme.aut.suchtowers.view.GameDrawable;
import hu.bme.aut.suchtowers.view.GraphicEnemy;
import hu.bme.aut.suchtowers.view.GraphicObstacle;
import hu.bme.aut.suchtowers.view.GraphicProjectile;
import hu.bme.aut.suchtowers.view.GraphicTower;
/**
* TODO: document your custom view class.
*/
public class GameView extends SurfaceView implements GameObserver, Serializable {
private final List<GameDrawable> drawables = new ArrayList<GameDrawable>();
private transient Game game;
private int magic;
private String msg = "";
private GameActivity activity;
private Paint p = new Paint();
/**
* Késleltetett inicializálása a grafikus elemeknek addig amíg a View méretei lekérhetők
*/
private Runnable initRunnable;
public GameView(Context context) {
super(context);
init(null, 0);
}
public GameView(Context context, AttributeSet attrs) {
super(context, attrs);
init(attrs, 0);
}
public GameView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(attrs, defStyle);
}
public void setGame(Game game, GameActivity activity) {
// this.measure(MeasureSpec.EXACTLY, MeasureSpec.EXACTLY);
this.game = game;
magic = game.getMagic();
this.activity = activity;
}
public void setInit(Runnable init) {
initRunnable = init;
}
@Override
protected void onMeasure(int width, int height) {
super.onMeasure(width, height);
Game.updateViewDimensions(getWidth(), getHeight());
}
/* public void onContinue(Game game, GameActivity activity) {
this.game = game;
this.activity = activity;
magic = game.getMagic();
}
*/
private void init(AttributeSet attrs, int defStyle) {
setWillNotDraw(false);
final ViewTreeObserver vto = getViewTreeObserver();
vto.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
boolean wasCalled = false;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public void onGlobalLayout() {
if (!wasCalled) {
Game.updateViewDimensions(getWidth(), getHeight());
initRunnable.run();
wasCalled = true;
}
if (vto.isAlive())
vto.removeOnGlobalLayoutListener(this);
}
});
}
@Override
protected void onDraw(Canvas canvas) {
canvas = getHolder().lockCanvas();
if (!isInEditMode()) {
super.onDraw(canvas);
synchronized (drawables) {
for (GameDrawable d : drawables) {
d.draw(canvas);
}
}
}
else {
Bitmap bp = BitmapFactory.decodeResource(getResources(), R.drawable.background);
canvas.drawBitmap(bp, 0, 0, p);
}
p.setColor(Color.WHITE);
p.setTextSize(40);
p.setTextAlign(Paint.Align.RIGHT);
canvas.drawText(getResources().getText(R.string.magic) + ": " + magic, getWidth() - 5, 40, p);
p.setTextSize(80);
p.setTextAlign(Paint.Align.CENTER);
canvas.drawText(msg, getWidth() / 2, (getHeight() - 80) / 2, p);
getHolder().unlockCanvasAndPost(canvas);
}
@Override
public void drawAll() {
postInvalidate();
}
@Override
public void enemyAdded(Enemy e) {
synchronized (drawables) {
drawables.add(new GraphicEnemy(e, getResources()));
}
}
@Override
public void gameLost() {
msg = getResources().getString(R.string.game_lost);
drawAll();
activity.gameEnded();
}
@Override
public void gameWon() {
msg = getResources().getString(R.string.game_won);
drawAll();
activity.gameEnded();
}
@Override
public void projectileAdded(Projectile p) {
synchronized (drawables) {
drawables.add(new GraphicProjectile(p, getResources()));
}
}
@Override
public void projectileExploded(Projectile p) {
synchronized (drawables) {
drawables.remove(new GraphicProjectile(p, getResources()));
}
}
@Override
public void magicChanged(int amount) {
magic = amount;
}
@Override
public void enemyDied(Enemy e) {
synchronized (drawables) {
drawables.remove(new GraphicEnemy(e, getResources()));
}
}
@Override
public void towerEnchanted(Tower t) {
synchronized (drawables) {
GraphicTower gt = (GraphicTower) drawables.get(drawables.indexOf(new GraphicTower(t, getResources())));
gt.setGem();
}
}
/**
* Hozzáad egy gem-et egy már a kirajzolandó listában lévő akadályhoz.
*/
public void obstacleEnchanted(Obstacle o) {
synchronized (drawables) {
GraphicObstacle go = (GraphicObstacle) drawables.get(drawables.indexOf(new GraphicObstacle(o, getResources())));
go.setGem();
}
}
/**
* Hozzáad egy tornyot a kirajzolandó objektumokhoz.
*/
@Override
public void towerAdded(Tower t) {
synchronized (drawables) {
drawables.add(new GraphicTower(t, getResources()));
Collections.sort(drawables, Collections.reverseOrder());
}
}
/**
* Hozzáad egy akadályt a kirajzolandó objektumokhoz.
*/
@Override
public void obstacleAdded(Obstacle o) {
synchronized (drawables) {
drawables.add(new GraphicObstacle(o, getResources()));
Collections.sort(drawables, Collections.reverseOrder());
}
}
public void addDrawable(GameDrawable d) {
synchronized (drawables) {
drawables.add(d);
Collections.sort(drawables, Collections.reverseOrder());
}
}
}
|
Zenrer/p1xt-guides | evidence/tier-0/javascript/ryan_macdonald_intro_javascript_problems/is_substring.js | function isSubstring(searchString, subString) {
return searchString.split(' ').includes(subString)
}
console.log(isSubstring("time to program", "time"))
console.log(isSubstring("Jump for joy", "joys")) |
twitter-zuiwanyuan/finatra | inject/inject-core/src/test/scala/com/twitter/inject/Test.scala | package com.twitter.inject
import com.twitter.util.{Await, Future}
import java.util.TimeZone
import org.apache.commons.io.IOUtils
import org.joda.time.{DateTimeZone, Duration}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpec}
@RunWith(classOf[JUnitRunner])
abstract class Test
extends WordSpec
with BeforeAndAfterAll
with BeforeAndAfterEach
with Matchers
with Logging {
/* Constructor */
setUtcTimeZone()
/* Overrides */
override protected def afterAll() = {
super.afterAll()
pool.executor.shutdown()
}
/* Protected */
protected lazy val pool = PoolUtils.newUnboundedPool("Test " + getClass.getSimpleName)
protected def setUtcTimeZone() = {
DateTimeZone.setDefault(DateTimeZone.UTC)
TimeZone.setDefault(TimeZone.getTimeZone("UTC"))
}
@deprecated("Use com.twitter.inject.Mockito#reset", "since 2-22-2015")
protected def resetMocks(mocks: AnyRef*) {
for (mock <- mocks) {
trace("Resetting " + mock)
org.mockito.Mockito.reset(mock)
}
}
protected def resourceAsString(resource: String) = {
IOUtils.toString(
getClass.getResourceAsStream(resource))
}
protected def sleep(duration: Duration, verbose: Boolean = false) {
if (verbose) {
println("Starting sleep for " + duration)
}
Thread.sleep(duration.getMillis)
if (verbose) {
println("Finished sleep for " + duration)
}
}
protected def assertFuture[A](result: Future[A], expected: Future[A]) {
val resultVal = Await.result(result)
val expectedVal = Await.result(expected)
resultVal should equal(expectedVal)
}
protected def assertFutureValue[A](result: Future[A], expected: A) {
val resultVal = Await.result(result)
val expectedVal = Await.result(Future.value(expected))
resultVal should equal(expectedVal)
}
protected def assertFailedFuture[T <: Throwable : Manifest](result: Future[_]): T = {
try {
Await.result(result)
fail("Expected exception " + manifest[T].runtimeClass + " never thrown")
} catch {
case e: Throwable =>
if (manifest[T].runtimeClass.isAssignableFrom(e.getClass))
e.asInstanceOf[T]
else
fail("Expected exception " + manifest[T].runtimeClass + " but caught " + e)
}
}
protected def bytes(str: String) = {
str.getBytes("UTF-8")
}
}
|
colinRawlings/osparc-simcore | services/api-server/src/simcore_service_api_server/core/redoc.py | <filename>services/api-server/src/simcore_service_api_server/core/redoc.py
from typing import Callable, Dict
from fastapi import FastAPI
from fastapi.applications import HTMLResponse, Request
from fastapi.openapi.docs import get_redoc_html
# TODO: move all these static resources away from the server!
FAVICON = "https://osparc.io/resource/osparc/favicon.png"
LOGO = "https://raw.githubusercontent.com/ITISFoundation/osparc-manual/b809d93619512eb60c827b7e769c6145758378d0/_media/osparc-logo.svg"
PYTHON_CODE_SAMPLES_BASE_URL = "https://raw.githubusercontent.com/ITISFoundation/osparc-simcore-python-client/master/code_samples"
def compose_long_description(description: str) -> str:
desc = f"**{description}**\n"
desc += "## Python Library\n"
desc += "- Check the [documentation](https://itisfoundation.github.io/osparc-simcore-python-client)\n"
desc += "- Quick install: ``pip install git+https://github.com/ITISFoundation/osparc-simcore-python-client.git``\n"
return desc
def add_vendor_extensions(openapi_schema: Dict):
# ReDoc vendor extensions
# SEE https://github.com/Redocly/redoc/blob/master/docs/redoc-vendor-extensions.md
openapi_schema["info"]["x-logo"] = {
"url": LOGO,
"altText": "osparc-simcore logo",
}
#
# TODO: load code samples add if function is contained in sample
# TODO: See if openapi-cli does this already
# TODO: check that all url are available before exposing
# openapi_schema["paths"][f"/{api_vtag}/meta"]["get"]["x-code-samples"] = [
# {
# "lang": "python",
# "source": {"$ref": f"{PYTHON_CODE_SAMPLES_BASE_URL}/meta/get.py"},
# },
# ]
def create_redoc_handler(app: FastAPI) -> Callable:
async def _redoc_html(_req: Request) -> HTMLResponse:
return get_redoc_html(
openapi_url=app.openapi_url,
title=app.title + " - redoc",
redoc_favicon_url=FAVICON,
)
return _redoc_html
|
linksdl/futuretec-project-self_driving_cars_projects | introduction-for-point-cloud_open3d/demo/deer_demo.py | <reponame>linksdl/futuretec-project-self_driving_cars_projects
"""
# !/usr/bin/env python
# -*- coding: utf-8 -*-
@Time : 2022/3/6 21:53
@Author : <EMAIL>
@ProjectName : introduction-for-point-cloud-open3d_v1
@File : deer_demo.py
"""
import open3d as o3d
house = o3d.io.read_point_cloud("deer.ply")
print(house)
o3d.visualization.draw_geometries([house])
|
petrov9/chart-fx | chartfx-math/src/main/java/de/gsi/math/functions/Function1D.java | package de.gsi.math.functions;
import java.security.InvalidParameterException;
import de.gsi.dataset.DataSet;
import de.gsi.dataset.spi.DefaultErrorDataSet;
/**
* generic one-dimensional function interface
*
* @author rstein
*/
public interface Function1D extends Function {
/**
* @param xmin min. x range
* @param xmax max x range
* @param nsamples number of sample points
* @return DataSet representation of the function
*/
default DataSet getDataSetEstimate(final double xmin, final double xmax, final int nsamples) {
if (xmin > xmax || nsamples <= 0) {
throw new InvalidParameterException("AbstractFunciton1D::getDataSetEstimate(" + xmin + "," + xmax + ","
+ nsamples + ") - " + "invalid range");
}
final double[] xValues = new double[nsamples];
final double step = (xmax - xmin) / nsamples;
for (int i = 0; i < nsamples; i++) {
xValues[i] = xmin + i * step;
}
return getDataSetEstimate(xValues);
}
/**
* @return DataSet representation of the function
* @param xValues X coordinate for which the function should be evaluated
*/
default DataSet getDataSetEstimate(final double[] xValues) {
return new DefaultErrorDataSet(getName(), xValues, getValues(xValues), new double[xValues.length],
new double[xValues.length], xValues.length, true);
}
double getValue(final double x);
default double[] getValues(final double[] x) {
if (x == null) {
throw new IllegalArgumentException("x array argument is null");
}
final double[] y = new double[x.length];
for (int i = 0; i < x.length; i++) {
y[i] = getValue(x[i]);
}
return y;
}
}
|
FxmlesXD/PVZ-Glitch | pvzclass/Classes/CardSlot.cpp | #include "..\PVZ.h"
PVZ::CardSlot::CardSlot(int address)
{
BaseAddress = Memory::ReadMemory<int>(address + 0x144);
}
PVZ::CardSlot::SeedCard* PVZ::CardSlot::GetCard(int index)
{
if (index >= 0 && index < 10)
return new SeedCard(BaseAddress + 0x24 + index * 0x50);
else
return NULL;
}
|
deeptexas-ai/test | Common/script/src/csv_row.cpp | /**
* \file csv_row.cpp
* \brief csv脚本文件行数据类函数实现
*/
#include "pch.h"
#include "csv_row.h"
#include "csv_adapter.h"
namespace dtscript
{
/**
* \brief 构造函数
*/
CsvRow::CsvRow(void)
{
m_pAdapter = NULL;
m_pData = NULL;
m_nSize = 0;
m_szData = NULL;
}
/**
* \brief 析构函数
*/
CsvRow::~CsvRow(void)
{
if (NULL != m_pData)
{
delete [] m_pData;
m_pData = NULL;
}
if (NULL != m_szData)
{
delete [] m_szData;
m_szData = NULL;
}
m_nSize = 0;
m_pAdapter = NULL;
}
/**
* \brief 装载数据
* \param pAdapter csv文件适配器
* \param szData 行数据内容
* \param cSeparator 数据分隔符
* \param bAllowSeries 是否允许连续出现,例:",,,",允许的情况作为3个数据,不允许作为1个数据
* \return 装载成功返回true,否则返回false
*/
bool CsvRow::LoadData(CsvAdapter *pAdapter, const char *szData, char cSeparator /*= ','*/, bool bAllowSeries /*= true*/)
{
const size_t VAR_MAX_PER_ROW = 1024; // 一行变量数量最大数
if (NULL == szData)
{
return false;
}
size_t nDataLen = strlen(szData);
m_szData = new char[nDataLen + 1]; // '\0'
memcpy(m_szData, szData, nDataLen);
m_szData[nDataLen] = 0;
bool bFlag = false; // 引号开始标志
bool bNewVar = true; // 新变量开始标志
size_t nVarOffset[VAR_MAX_PER_ROW] = { 0 }; // 保存每个变量的开始位置
// 解析数据,将分隔符改为字符串结束标志'\0',可以将一行数据分割成多个变量。
// 引号内可能包含'\r'、'\n'这样的字符,所以引号内的换行并不是新行数据。
size_t nRowLen = 0; // 解析的字符长度
char c = m_szData[nRowLen++]; // 当前解析的字符
while ( c != 0 )
{
// 引号开始
if (!bFlag && c == '\"')
{
bFlag = true;
}
// 引号结束
else if (bFlag && c == '\"')
{
bFlag = false;
}
// 非引号内换行,已经读取完一行数据了,退出循环
if ( (c == '\r' || c == '\n') && !bFlag )
{
break;
}
// 如果遇到分隔符
if (c == cSeparator)
{
// 将分隔符改为字符串结束标志
m_szData[nRowLen - 1] = 0;
// 允许连续出现分隔符,每个分隔符之间是一个新变量
if (bAllowSeries)
{
// 变量第一次解析,进入这里表示刚开始解析",",或者刚解析完一个变量",,";
// 允许连续出现分隔符,将分隔符前的空字符保存为一个新变量。
if (bNewVar)
{
nVarOffset[m_nSize++] = nRowLen - 1;
}
// 已经解析了变量部分数据,进入这里表示这个变量已经没有字符了,重置新变量开始标志
else
{
bNewVar = true;
}
}
// 不允许连续出现分隔符,后面连续的分隔符都已经改为了结束标志
// 不保存新变量开始位置到nVarOffset中
else
{
bNewVar = true;
}
}
// 如果不是分隔符
else
{
// 变量第一次解析
if (bNewVar)
{
// 保存变量的开始位置
nVarOffset[m_nSize++] = nRowLen - 1;
// 后面的字符为当前变量的字符,新变量标志改为false
bNewVar = false;
}
}
// 得到下一个字符
c = m_szData[nRowLen++];
// 变量数量已经达到最大值,退出循环
if (m_nSize >= VAR_MAX_PER_ROW)
{
break;
}
}
if (m_nSize == 0)
{
return false;
}
// 开始生成变量数据
m_pData = new CsvVariant[m_nSize];
if (NULL == m_pData)
{
return false;
}
// 遍历生成变量
for (size_t i = 0; i < m_nSize; ++i)
{
m_pData[i].m_Type = CsvVariant::VT_VALUE;
// 已经将m_szData按变量分隔符分成了若干字符串,按位置取字符串即可
m_pData[i].m_Value.szValue = &m_szData[nVarOffset[i]];
}
m_pAdapter = pAdapter;
return true;
}
/**
* \brief 获得数据列数
* \return 数据列数
*/
size_t CsvRow::Size()
{
return m_nSize;
}
/**
* \brief 获得指定列数据
* \param nIndex 列数索引
* \return 指定列数据
*/
CsvVariant & CsvRow::operator [] (size_t nIndex)
{
return GetData(nIndex);
}
/**
* \brief 根据字段名称获得数据
* \param szName 字段名称
* \return 指定字段名称数据
*/
CsvVariant & CsvRow::operator [] (const char *szName)
{
return GetData(szName);
}
/**
* \brief 获得指定列数据
* \param nIndex 列数索引
* \return 指定列数据
*/
CsvVariant & CsvRow::GetData(size_t nIndex)
{
if (nIndex >= m_nSize)
{
return CsvVariant::s_EmptyVar;
}
return m_pData[nIndex];
}
/**
* \brief 根据字段名称获得数据
* \param szName 字段名称
* \return 指定字段名称数据
*/
CsvVariant & CsvRow::GetData(const char *szName)
{
if (NULL == m_pAdapter)
{
return CsvVariant::s_EmptyVar;
}
int32 nIndex = m_pAdapter->FindPropName(szName);
if (nIndex < 0)
{
return CsvVariant::s_EmptyVar;
}
return GetData(nIndex);
}
}
|
StratifyLabs/LvglAPI | library/include/lvgl/Bar.hpp | #ifndef LVGLAPI_LVGL_BAR_HPP
#define LVGLAPI_LVGL_BAR_HPP
#include "ObjectAccess.hpp"
namespace lvgl {
class Range {
public:
Range() = default;
Range(s16 minimum, s16 maximum) : m_minimum(minimum), m_maximum(maximum) {}
static Range from_string(const var::StringView value) {
// min is zero, string is expresse as <value>/<maximum>
const auto tokens = value.split("/");
return Range()
.set_value(tokens.count() > 0 ? tokens.at(0).to_integer() : 0)
.set_maximum(tokens.count() > 1 ? tokens.at(1).to_integer() : 100);
}
Range & expand_to_include(s16 value);
private:
API_AF(Range, s16, minimum, 0);
API_AF(Range, s16, maximum, 100);
API_AF(Range, s16, value, 0);
};
template <class Derived> class BarAccess : public ObjectAccess<Derived> {
public:
BarAccess() = default;
explicit BarAccess(u32 type) : ObjectAccess<Derived>(type) {}
explicit BarAccess(const char *name) : ObjectAccess<Derived>(name) {}
Derived &set_range(const Range &value) {
Object::api()->bar_set_range(Object::object(), value.minimum(), value.maximum());
return static_cast<Derived &>(*this);
}
Derived &set_start_value(s16 start_value, IsAnimate is_animate = IsAnimate::yes) {
Object::api()->bar_set_start_value(
Object::object(), start_value, lv_anim_enable_t(is_animate));
return static_cast<Derived &>(*this);
}
Derived &set_value(s16 value, IsAnimate is_animate = IsAnimate::yes) {
Object::api()->bar_set_value(
Object::object(), value, static_cast<lv_anim_enable_t>(is_animate));
return static_cast<Derived &>(*this);
}
API_NO_DISCARD Range get_range() const {
return Range()
.set_minimum(Object::api()->bar_get_min_value(Object::object()))
.set_maximum(Object::api()->bar_get_max_value(Object::object()));
}
Derived &set_value(const Range &value, IsAnimate is_animate = IsAnimate::yes) {
const auto range = get_range();
if (value.minimum() != range.minimum() || value.maximum() != range.maximum()) {
set_range(value);
}
Object::api()->bar_set_value(
Object::object(), value.value(), static_cast<lv_anim_enable_t>(is_animate));
return static_cast<Derived &>(*this);
}
API_NO_DISCARD s16 get_value() const { return Object::api()->bar_get_value(Object::object()); }
};
class Bar : public BarAccess<Bar> {
public:
explicit Bar(const char *name = "");
explicit Bar(lv_obj_t *object) { m_object = object; }
LVGL_OBJECT_ACCESS_GET_CLASS(bar_class)
enum class Mode {
normal = LV_BAR_MODE_NORMAL,
symmetrical = LV_BAR_MODE_SYMMETRICAL,
range = LV_BAR_MODE_RANGE
};
Bar &set_mode(Mode value);
};
} // namespace lvgl
namespace printer {
Printer &operator<<(Printer &printer, const lvgl::Range &range);
}
#endif // LVGLAPI_LVGL_BAR_HPP
|
SlipFil/DeleteBG | node_modules/@react-spectrum/radio/dist/module.js | <reponame>SlipFil/DeleteBG
import "./main.css";
import {useStyleProps as $4hD9r$useStyleProps, useFocusableRef as $4hD9r$useFocusableRef, classNames as $4hD9r$classNames, useDOMRef as $4hD9r$useDOMRef} from "@react-spectrum/utils";
import {FocusRing as $4hD9r$FocusRing} from "@react-aria/focus";
import $4hD9r$react, {useRef as $4hD9r$useRef, forwardRef as $4hD9r$forwardRef, useContext as $4hD9r$useContext} from "react";
import {useHover as $4hD9r$useHover} from "@react-aria/interactions";
import {useRadio as $4hD9r$useRadio, useRadioGroup as $4hD9r$useRadioGroup} from "@react-aria/radio";
import {Label as $4hD9r$Label} from "@react-spectrum/label";
import {useFormProps as $4hD9r$useFormProps} from "@react-spectrum/form";
import {useProviderProps as $4hD9r$useProviderProps} from "@react-spectrum/provider";
import {useRadioGroupState as $4hD9r$useRadioGroupState} from "@react-stately/radio";
function $parcel$interopDefault(a) {
return a && a.__esModule ? a.default : a;
}
function $parcel$export(e, n, v, s) {
Object.defineProperty(e, n, {get: v, set: s, enumerable: true, configurable: true});
}
var $337c5cb580c4e6ba$exports = {};
$parcel$export($337c5cb580c4e6ba$exports, "Radio", () => $337c5cb580c4e6ba$export$d7b12c4107be0d61);
var $d1246d70e6c52742$exports = {};
$parcel$export($d1246d70e6c52742$exports, "spectrum-Radio", () => $d1246d70e6c52742$export$26ebd4dbfef068c5, (v) => $d1246d70e6c52742$export$26ebd4dbfef068c5 = v);
$parcel$export($d1246d70e6c52742$exports, "spectrum-Radio-input", () => $d1246d70e6c52742$export$c0ce6f9a6a2de1cc, (v) => $d1246d70e6c52742$export$c0ce6f9a6a2de1cc = v);
$parcel$export($d1246d70e6c52742$exports, "spectrum-Radio-button", () => $d1246d70e6c52742$export$bf9d5e902c33bf05, (v) => $d1246d70e6c52742$export$bf9d5e902c33bf05 = v);
$parcel$export($d1246d70e6c52742$exports, "focus-ring", () => $d1246d70e6c52742$export$f39a09f249340e2a, (v) => $d1246d70e6c52742$export$f39a09f249340e2a = v);
$parcel$export($d1246d70e6c52742$exports, "spectrum-Radio-label", () => $d1246d70e6c52742$export$b0f8395a9667922d, (v) => $d1246d70e6c52742$export$b0f8395a9667922d = v);
$parcel$export($d1246d70e6c52742$exports, "spectrum-Radio--labelBelow", () => $d1246d70e6c52742$export$9443e7082698950d, (v) => $d1246d70e6c52742$export$9443e7082698950d = v);
$parcel$export($d1246d70e6c52742$exports, "is-hovered", () => $d1246d70e6c52742$export$b8813cd5d7824ce7, (v) => $d1246d70e6c52742$export$b8813cd5d7824ce7 = v);
$parcel$export($d1246d70e6c52742$exports, "spectrum-Radio--quiet", () => $d1246d70e6c52742$export$6db66b55a897f36d, (v) => $d1246d70e6c52742$export$6db66b55a897f36d = v);
$parcel$export($d1246d70e6c52742$exports, "is-invalid", () => $d1246d70e6c52742$export$14a30de2866685fa, (v) => $d1246d70e6c52742$export$14a30de2866685fa = v);
var $d1246d70e6c52742$export$26ebd4dbfef068c5;
var $d1246d70e6c52742$export$c0ce6f9a6a2de1cc;
var $d1246d70e6c52742$export$bf9d5e902c33bf05;
var $d1246d70e6c52742$export$f39a09f249340e2a;
var $d1246d70e6c52742$export$b0f8395a9667922d;
var $d1246d70e6c52742$export$9443e7082698950d;
var $d1246d70e6c52742$export$b8813cd5d7824ce7;
var $d1246d70e6c52742$export$6db66b55a897f36d;
var $d1246d70e6c52742$export$14a30de2866685fa;
$d1246d70e6c52742$export$26ebd4dbfef068c5 = "spectrum-Radio_bf46c2";
$d1246d70e6c52742$export$c0ce6f9a6a2de1cc = "spectrum-Radio-input_bf46c2";
$d1246d70e6c52742$export$bf9d5e902c33bf05 = "spectrum-Radio-button_bf46c2";
$d1246d70e6c52742$export$f39a09f249340e2a = "focus-ring_bf46c2";
$d1246d70e6c52742$export$b0f8395a9667922d = "spectrum-Radio-label_bf46c2";
$d1246d70e6c52742$export$9443e7082698950d = "spectrum-Radio--labelBelow_bf46c2";
$d1246d70e6c52742$export$b8813cd5d7824ce7 = "is-hovered_bf46c2";
$d1246d70e6c52742$export$6db66b55a897f36d = "spectrum-Radio--quiet_bf46c2";
$d1246d70e6c52742$export$14a30de2866685fa = "is-invalid_bf46c2";
const $f394bff9e10389a0$export$b118023277d4a5c3 = $4hD9r$react.createContext(null);
function $f394bff9e10389a0$export$b054eba74077a826() {
return $4hD9r$useContext($f394bff9e10389a0$export$b118023277d4a5c3);
}
function $337c5cb580c4e6ba$var$Radio(props, ref) {
let { isDisabled: isDisabled , children: children , autoFocus: autoFocus , ...otherProps } = props;
let { styleProps: styleProps } = $4hD9r$useStyleProps(otherProps);
let { hoverProps: hoverProps , isHovered: isHovered } = $4hD9r$useHover({
isDisabled: isDisabled
});
let inputRef = $4hD9r$useRef(null);
let domRef = $4hD9r$useFocusableRef(ref, inputRef);
let radioGroupProps = $f394bff9e10389a0$export$b054eba74077a826();
let { isEmphasized: isEmphasized , validationState: validationState , state: state } = radioGroupProps;
let { inputProps: inputProps } = $4hD9r$useRadio({
...props,
...radioGroupProps,
isDisabled: isDisabled
}, state, inputRef);
return(/*#__PURE__*/ $4hD9r$react.createElement("label", {
...styleProps,
...hoverProps,
ref: domRef,
className: $4hD9r$classNames((/*@__PURE__*/$parcel$interopDefault($d1246d70e6c52742$exports)), 'spectrum-Radio', {
// Removing. Pending design feedback.
// 'spectrum-Radio--labelBelow': labelPosition === 'bottom',
'spectrum-Radio--quiet': !isEmphasized,
'is-disabled': isDisabled,
'is-invalid': validationState === 'invalid',
'is-hovered': isHovered
}, styleProps.className)
}, /*#__PURE__*/ $4hD9r$react.createElement($4hD9r$FocusRing, {
focusRingClass: $4hD9r$classNames((/*@__PURE__*/$parcel$interopDefault($d1246d70e6c52742$exports)), 'focus-ring'),
autoFocus: autoFocus
}, /*#__PURE__*/ $4hD9r$react.createElement("input", {
...inputProps,
ref: inputRef,
className: $4hD9r$classNames((/*@__PURE__*/$parcel$interopDefault($d1246d70e6c52742$exports)), 'spectrum-Radio-input')
})), /*#__PURE__*/ $4hD9r$react.createElement("span", {
className: $4hD9r$classNames((/*@__PURE__*/$parcel$interopDefault($d1246d70e6c52742$exports)), 'spectrum-Radio-button')
}), children && /*#__PURE__*/ $4hD9r$react.createElement("span", {
className: $4hD9r$classNames((/*@__PURE__*/$parcel$interopDefault($d1246d70e6c52742$exports)), 'spectrum-Radio-label')
}, children)));
}
/**
* Radio buttons allow users to select a single option from a list of mutually exclusive options.
* All possible options are exposed up front for users to compare.
*/ const $337c5cb580c4e6ba$export$d7b12c4107be0d61 = /*#__PURE__*/ $4hD9r$forwardRef($337c5cb580c4e6ba$var$Radio);
var $ac9b96486fc4badf$exports = {};
$parcel$export($ac9b96486fc4badf$exports, "RadioGroup", () => $ac9b96486fc4badf$export$a98f0dcb43a68a25);
var $cd231e464c886c86$exports = {};
$parcel$export($cd231e464c886c86$exports, "spectrum-FieldLabel", () => $cd231e464c886c86$export$493d18e796ae054, (v) => $cd231e464c886c86$export$493d18e796ae054 = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-FieldLabel--positionSide", () => $cd231e464c886c86$export$9e6f19dc21f22f2e, (v) => $cd231e464c886c86$export$9e6f19dc21f22f2e = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-FieldLabel-requiredIcon", () => $cd231e464c886c86$export$7c47642c7d46f3c9, (v) => $cd231e464c886c86$export$7c47642c7d46f3c9 = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-FieldLabel--alignEnd", () => $cd231e464c886c86$export$885efcc08143a987, (v) => $cd231e464c886c86$export$885efcc08143a987 = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Field", () => $cd231e464c886c86$export$f6d480ae1e56eba0, (v) => $cd231e464c886c86$export$f6d480ae1e56eba0 = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Field--positionTop", () => $cd231e464c886c86$export$a4ea780a9064d7f9, (v) => $cd231e464c886c86$export$a4ea780a9064d7f9 = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Field-field", () => $cd231e464c886c86$export$3ab8a3dc7f0563d, (v) => $cd231e464c886c86$export$3ab8a3dc7f0563d = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Field-field--multiline", () => $cd231e464c886c86$export$f58a1e966a92ba5e, (v) => $cd231e464c886c86$export$f58a1e966a92ba5e = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Field--positionSide", () => $cd231e464c886c86$export$2963225c91179589, (v) => $cd231e464c886c86$export$2963225c91179589 = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Field-wrapper", () => $cd231e464c886c86$export$127df0b7290eb8ef, (v) => $cd231e464c886c86$export$127df0b7290eb8ef = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Form", () => $cd231e464c886c86$export$94d2bcc94afabd89, (v) => $cd231e464c886c86$export$94d2bcc94afabd89 = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Form--positionSide", () => $cd231e464c886c86$export$7f44db659563c8f4, (v) => $cd231e464c886c86$export$7f44db659563c8f4 = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Form--positionTop", () => $cd231e464c886c86$export$11b8bc81fe551057, (v) => $cd231e464c886c86$export$11b8bc81fe551057 = v);
$parcel$export($cd231e464c886c86$exports, "spectrum-Form-itemLabel", () => $cd231e464c886c86$export$51ec27881a1e3928, (v) => $cd231e464c886c86$export$51ec27881a1e3928 = v);
$parcel$export($cd231e464c886c86$exports, "is-disabled", () => $cd231e464c886c86$export$d35bc1e505d1ebbf, (v) => $cd231e464c886c86$export$d35bc1e505d1ebbf = v);
var $cd231e464c886c86$export$493d18e796ae054;
var $cd231e464c886c86$export$9e6f19dc21f22f2e;
var $cd231e464c886c86$export$7c47642c7d46f3c9;
var $cd231e464c886c86$export$885efcc08143a987;
var $cd231e464c886c86$export$f6d480ae1e56eba0;
var $cd231e464c886c86$export$a4ea780a9064d7f9;
var $cd231e464c886c86$export$3ab8a3dc7f0563d;
var $cd231e464c886c86$export$f58a1e966a92ba5e;
var $cd231e464c886c86$export$2963225c91179589;
var $cd231e464c886c86$export$127df0b7290eb8ef;
var $cd231e464c886c86$export$94d2bcc94afabd89;
var $cd231e464c886c86$export$7f44db659563c8f4;
var $cd231e464c886c86$export$11b8bc81fe551057;
var $cd231e464c886c86$export$51ec27881a1e3928;
var $cd231e464c886c86$export$d35bc1e505d1ebbf;
$cd231e464c886c86$export$493d18e796ae054 = "spectrum-FieldLabel_d2db1f";
$cd231e464c886c86$export$9e6f19dc21f22f2e = "spectrum-FieldLabel--positionSide_d2db1f";
$cd231e464c886c86$export$7c47642c7d46f3c9 = "spectrum-FieldLabel-requiredIcon_d2db1f";
$cd231e464c886c86$export$885efcc08143a987 = "spectrum-FieldLabel--alignEnd_d2db1f";
$cd231e464c886c86$export$f6d480ae1e56eba0 = "spectrum-Field_d2db1f";
$cd231e464c886c86$export$a4ea780a9064d7f9 = "spectrum-Field--positionTop_d2db1f";
$cd231e464c886c86$export$3ab8a3dc7f0563d = "spectrum-Field-field_d2db1f";
$cd231e464c886c86$export$f58a1e966a92ba5e = "spectrum-Field-field--multiline_d2db1f";
$cd231e464c886c86$export$2963225c91179589 = "spectrum-Field--positionSide_d2db1f";
$cd231e464c886c86$export$127df0b7290eb8ef = "spectrum-Field-wrapper_d2db1f";
$cd231e464c886c86$export$94d2bcc94afabd89 = "spectrum-Form_d2db1f";
$cd231e464c886c86$export$7f44db659563c8f4 = "spectrum-Form--positionSide_d2db1f";
$cd231e464c886c86$export$11b8bc81fe551057 = "spectrum-Form--positionTop_d2db1f";
$cd231e464c886c86$export$51ec27881a1e3928 = "spectrum-Form-itemLabel_d2db1f";
$cd231e464c886c86$export$d35bc1e505d1ebbf = "is-disabled_d2db1f";
var $4cb2b26710154c56$exports = {};
$parcel$export($4cb2b26710154c56$exports, "spectrum-FieldGroup", () => $4cb2b26710154c56$export$75eb16cbfc18279c, (v) => $4cb2b26710154c56$export$75eb16cbfc18279c = v);
$parcel$export($4cb2b26710154c56$exports, "spectrum-FieldGroup--positionSide", () => $4cb2b26710154c56$export$859b606bd9f45f81, (v) => $4cb2b26710154c56$export$859b606bd9f45f81 = v);
$parcel$export($4cb2b26710154c56$exports, "spectrum-FieldGroup-group", () => $4cb2b26710154c56$export$984c7bbbfc7ddffc, (v) => $4cb2b26710154c56$export$984c7bbbfc7ddffc = v);
$parcel$export($4cb2b26710154c56$exports, "spectrum-FieldGroup-group--horizontal", () => $4cb2b26710154c56$export$1ad8646bcba21c0e, (v) => $4cb2b26710154c56$export$1ad8646bcba21c0e = v);
var $4cb2b26710154c56$export$75eb16cbfc18279c;
var $4cb2b26710154c56$export$859b606bd9f45f81;
var $4cb2b26710154c56$export$984c7bbbfc7ddffc;
var $4cb2b26710154c56$export$1ad8646bcba21c0e;
$4cb2b26710154c56$export$75eb16cbfc18279c = "spectrum-FieldGroup_41d350";
$4cb2b26710154c56$export$859b606bd9f45f81 = "spectrum-FieldGroup--positionSide_41d350";
$4cb2b26710154c56$export$984c7bbbfc7ddffc = "spectrum-FieldGroup-group_41d350";
$4cb2b26710154c56$export$1ad8646bcba21c0e = "spectrum-FieldGroup-group--horizontal_41d350";
function $ac9b96486fc4badf$var$RadioGroup(props, ref) {
props = $4hD9r$useProviderProps(props);
props = $4hD9r$useFormProps(props);
let { isEmphasized: isEmphasized , isRequired: isRequired , necessityIndicator: necessityIndicator , label: label , labelPosition: labelPosition = 'top' , labelAlign: labelAlign , validationState: validationState , children: children , orientation: orientation = 'vertical' , ...otherProps } = props;
let domRef = $4hD9r$useDOMRef(ref);
let { styleProps: styleProps } = $4hD9r$useStyleProps(otherProps);
let state = $4hD9r$useRadioGroupState(props);
let { radioGroupProps: radioGroupProps , labelProps: labelProps } = $4hD9r$useRadioGroup(props, state);
return(/*#__PURE__*/ $4hD9r$react.createElement("div", {
...styleProps,
...radioGroupProps,
className: $4hD9r$classNames((/*@__PURE__*/$parcel$interopDefault($4cb2b26710154c56$exports)), 'spectrum-FieldGroup', {
'spectrum-FieldGroup--positionSide': labelPosition === 'side'
}, // This is so radio works inside a <Form>
$4hD9r$classNames((/*@__PURE__*/$parcel$interopDefault($cd231e464c886c86$exports)), 'spectrum-Field'), styleProps.className),
ref: domRef
}, label && /*#__PURE__*/ $4hD9r$react.createElement($4hD9r$Label, {
...labelProps,
elementType: "span",
labelPosition: labelPosition,
labelAlign: labelAlign,
isRequired: isRequired,
necessityIndicator: necessityIndicator
}, label), /*#__PURE__*/ $4hD9r$react.createElement("div", {
className: $4hD9r$classNames((/*@__PURE__*/$parcel$interopDefault($4cb2b26710154c56$exports)), 'spectrum-FieldGroup-group', {
'spectrum-FieldGroup-group--horizontal': orientation === 'horizontal'
})
}, /*#__PURE__*/ $4hD9r$react.createElement($f394bff9e10389a0$export$b118023277d4a5c3.Provider, {
value: {
isEmphasized: isEmphasized,
validationState: validationState,
state: state
}
}, children))));
}
/**
* Radio groups allow users to select a single option from a list of mutually exclusive options.
* All possible options are exposed up front for users to compare.
*/ const $ac9b96486fc4badf$export$a98f0dcb43a68a25 = /*#__PURE__*/ $4hD9r$react.forwardRef($ac9b96486fc4badf$var$RadioGroup);
export {$337c5cb580c4e6ba$export$d7b12c4107be0d61 as Radio, $ac9b96486fc4badf$export$a98f0dcb43a68a25 as RadioGroup};
//# sourceMappingURL=module.js.map
|
dingxing123/game-server | game-engine/src/main/java/com/jzy/game/engine/mq/IMQScript.java | package com.jzy.game.engine.mq;
import com.jzy.game.engine.script.IScript;
/**
* MQ 消息处理脚本
* @author JiangZhiYong
* @QQ 359135103
* 2017年7月28日 上午10:39:14
*/
public interface IMQScript extends IScript {
/**
* MQ消息接收处理
* @author JiangZhiYong
* @QQ 359135103
* 2017年7月28日 上午10:39:59
* @param msg
*/
default void onMessage(String msg){
}
}
|
strategicallynicole/superscary | src/components/Heroes/threedee.js | <reponame>strategicallynicole/superscary
import React, { useCallback, useEffect, useState, useRef } from 'react'
import ReactDOM from 'react-dom'
import { useSpring, animated as a } from 'react-spring'
import './threedeestyles.css'
import styled from 'styled-components'
import Nav from './Nav'
import ThreeDShapes from './threedeeshapes.js'
// import useScroll from './useScroll'
/**
* useScroll React custom hook
* Usage:
* const { scrollX, scrollY, scrollDirection } = useScroll();
*/
const isBrowser = typeof window !== 'undefined'
function useWindowSize() {
const isClient = typeof window === 'object'
function getSize() {
return {
width: isClient ? window.innerWidth : undefined,
height: isClient ? window.innerHeight : undefined,
}
}
const [windowSize, setWindowSize] = useState(getSize)
useEffect(() => {
if (!isClient) {
return false
}
function handleResize() {
setWindowSize(getSize())
}
window.addEventListener('resize', handleResize)
return () => window.removeEventListener('resize', handleResize)
}, []) // Empty array ensures that effect is only run on mount and unmount
return windowSize
}
const LabelWrapper = styled.div`
max-width: 80vw;
height: 100vh;
width: 100%;
display: flex;
align-items: center;
position: absolute;
top: 0;
will-change: opacity;
`
const LabelContainer = styled.div`
width: 100%;
height: 0;
padding-bottom: 56.25%;
position: relative;
will-change: opacity;
`
const Track = styled(a.div)`
position: absolute;
width: 18.4%;
height: 0;
padding-bottom: 2.5%;
display: flex;
align-items: center;
will-change: transform;
`
const TrackName = styled(a.div)`
font-weight: 600;
color: white;
padding: 0 5% 0 3%;
position: absolute;
top: 50%;
transform: translateY(-50%);
font-size: 0.8vw;
display: flex;
align-items: center;
span {
width: calc(100% - 1.5vw);
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
padding-right: 5%;
}
svg {
width: 1.25vw;
min-width: 1.25vw;
height: auto;
margin-right: 0.3vw;
opacity: 0.5;
}
`
const Trackground = styled(a.div)`
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
border-radius: 0.25rem;
will-change: opacity;
`
const Hero = () => {
const windowSize = useWindowSize()
const [pcFromTop, setPcFromTop] = useState(0)
const [playingVideo, setPlayingVideo] = useState(null)
const vid1Ref = useRef(null)
const setVideo = () => {
setPlayingVideo(1)
vid1Ref.current.currentTime = 0
vid1Ref.current.play()
}
const [{ st }, set] = useSpring(() => ({ st: 0 }))
const { timelineProgress } = useSpring({
timelineProgress: pcFromTop >= 0.95 ? 100 : 17.5,
config: { duration: pcFromTop >= 0.95 ? 12000 : undefined },
onFrame: (f) => {
if (f.timelineProgress >= 21.5 && f.timelineProgress < 25 && playingVideo !== 0) {
setPlayingVideo(0)
} else if (f.timelineProgress >= 25 && f.timelineProgress < 50 && playingVideo !== 1) {
setVideo()
} else if (f.timelineProgress >= 50) {
setPlayingVideo(2)
} else if (f.timelineProgress < 21.5) {
setPlayingVideo(-1)
}
},
})
const interpLabel1 = st.interpolate(
(o) => `
translate(${(1 - o) * -160}%, ${(1 - o) * -600}%)
rotate(${(1 - o) * -50}deg)
`,
)
const interpLabel2 = st.interpolate(
(o) => `
translate(${(1 - o) * -100}%, ${(1 - o) * 600}%)
rotate(${(1 - o) * -16}deg)
`,
)
const interpLabel3 = st.interpolate(
(o) => `
translate(${(1 - o) * 150}%, ${(1 - o) * -400}%)
rotate(${(1 - o) * 30}deg)
`,
)
const interpLabel4 = st.interpolate(
(o) => `
translate(${(1 - o) * 0}%, ${(1 - o) * -1000}%)
rotate(${(1 - o) * -60}deg)
`,
)
const interpDetailsColor = st.interpolate((o) => (o < 0.4 ? undefined : '#fff'))
const interpBackdropOpacity = st.interpolate((o) => (o < 0.4 ? 0 : 1))
const onScroll = useCallback(
(e) => {
const localPcFromTop = e.target.scrollTop / windowSize.height
if (localPcFromTop < 1) {
set({ st: localPcFromTop })
setPcFromTop(localPcFromTop)
} else if (pcFromTop !== 1) {
set({ st: 1 })
setPcFromTop(1)
}
},
[pcFromTop],
)
return (
<main className="main" onScroll={onScroll}>
<Nav hide={pcFromTop !== 0} />
<div className="heroContainer">
<div className="stickyContainer">
<LabelWrapper style={{ opacity: pcFromTop < 0.92 ? 0 : (pcFromTop - 0.92) * 20 * 1 }}>
<LabelContainer>
<Track
style={{
left: '24.8%',
top: '74.6%',
transform: interpLabel1,
}}>
<Trackground style={{ background: '#5a4cdb', opacity: pcFromTop < 0.94 ? 0 : (pcFromTop - 0.95) * 30 * 1 }} />
<TrackName>
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M4 6.5V5C4 4.44772 4.44772 4 5 4H12M20 6.5V5C20 4.44772 19.5523 4 19 4H12M12 4V20M12 20H9.5M12 20H14.5"
stroke="white"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
<span>Make your business one to watch</span>
</TrackName>
</Track>
<Track
style={{
left: '43.2%',
top: '80%',
transform: interpLabel2,
}}>
<Trackground style={{ background: '#468BE8', opacity: pcFromTop < 0.94 ? 0 : (pcFromTop - 0.95) * 30 * 1 }} />
<TrackName>
<svg width="19" height="19" viewBox="0 0 19 19" fill="none" xmlns="http://www.w3.org/2000/svg">
<g>
<path
d="M9.25 16.75C13.3921 16.75 16.75 13.3921 16.75 9.25C16.75 5.10786 13.3921 1.75 9.25 1.75C5.10786 1.75 1.75 5.10786 1.75 9.25C1.75 13.3921 5.10786 16.75 9.25 16.75Z"
stroke="white"
strokeWidth="1.5"
strokeLinecap="round"
strokeLinejoin="round"
/>
<path
d="M7.75 6.25L12.25 9.25L7.75 12.25V6.25Z"
stroke="white"
strokeWidth="1.5"
strokeLinecap="round"
strokeLinejoin="bevel"
/>
</g>
</svg>
video.mp4
</TrackName>
</Track>
<Track
style={{
left: '61.6%',
top: '85.3%',
transform: interpLabel3,
}}>
<Trackground style={{ background: '#A260D4', opacity: pcFromTop < 0.94 ? 0 : (pcFromTop - 0.95) * 30 * 1 }} />
<TrackName>
<svg width="19" height="19" viewBox="0 0 19 19" fill="none" xmlns="http://www.w3.org/2000/svg">
<g>
<path
d="M9.25 16.75C13.3921 16.75 16.75 13.3921 16.75 9.25C16.75 5.10786 13.3921 1.75 9.25 1.75C5.10786 1.75 1.75 5.10786 1.75 9.25C1.75 13.3921 5.10786 16.75 9.25 16.75Z"
stroke="white"
strokeWidth="1.5"
strokeLinecap="round"
strokeLinejoin="round"
/>
<path
d="M7.75 6.25L12.25 9.25L7.75 12.25V6.25Z"
stroke="white"
strokeWidth="1.5"
strokeLinecap="round"
strokeLinejoin="bevel"
/>
</g>
</svg>
stock_footage.mp4
</TrackName>
</Track>
<Track
style={{
left: '80%',
top: '74.6%',
transform: interpLabel4,
}}>
<Trackground style={{ background: '#5a4cdb', opacity: pcFromTop < 0.94 ? 0 : (pcFromTop - 0.95) * 30 * 1 }} />
<TrackName>
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M4 6.5V5C4 4.44772 4.44772 4 5 4H12M20 6.5V5C20 4.44772 19.5523 4 19 4H12M12 4V20M12 20H9.5M12 20H14.5"
stroke="white"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
Try for free
</TrackName>
</Track>
<a.div
className="seekerWrapper"
style={{
opacity: pcFromTop >= 1 ? 1 : 0,
transition: 'opacity 100ms',
willChange: 'opacity, transform',
transform: timelineProgress.interpolate((o) => `translateX(${o}%)`),
}}>
<a.div className="seeker" style={{ opacity: timelineProgress.interpolate((o) => (o > 25 ? 1 : (o - 20) * 0.2)) }} />
</a.div>
</LabelContainer>
</LabelWrapper>
<a.div className="shapesContainer">
<ThreeDShapes spring={st} />
</a.div>
<div className="heroDetailsContainer">
<div className="detailsContainer">
<a.div
className={`headlineContainer${playingVideo >= 0 ? ' hidden' : ''}`}
style={{
transform: st.interpolate(
(o) =>
`translateY(-${0 + pcFromTop * 7}%) translateX(${0 + pcFromTop * 19}%) scale(${
pcFromTop < 0.95 ? 1 - pcFromTop * 0.135 : 0.865 - (pcFromTop - 0.95) * 20 * 0.05
})`,
// translateY(-7%) translateX(19%) scale(0.86)
),
}}>
<a.div
className="videoBackdrop"
style={{
opacity: interpBackdropOpacity,
transform: st.interpolate((o) => `translate(-50%, -50%) scale(${1.2 - o * 0.2})`),
background:
pcFromTop >= 1
? timelineProgress.interpolate((o) => (o < 50 ? '#5a4cdb' : o < 75 ? '#63ADF2' : '#A260D4'))
: '#5A4CDB',
}}>
<a.video
className={playingVideo === 1 ? 'playing' : playingVideo < 1 ? 'before' : 'after'}
ref={vid1Ref}
src="video_1.mp4"
type="video/mp4"
muted
/>
</a.div>
<a.h2
className="headline"
style={{
color: interpDetailsColor,
transform: st.interpolate((o) => `scale(${1 - o * 0.25})`),
}}>
<span style={{ transition: 'color 500ms, opacity 300ms, transform 300ms' }}>Make your </span>
<a.b
style={{
transition: 'color 500ms, opacity 300ms 75ms, transform 300ms 75ms',
color: interpDetailsColor,
}}>
business
</a.b>
<span style={{ transition: 'color 500ms, opacity 300ms 150ms, transform 300ms 150ms' }}> one to watch</span>
</a.h2>
</a.div>
<div className={`extraDetails ${pcFromTop < 0.05 ? '' : 'hidden'}`}>
<h1 className="subheadline">Tell stories worth sharing with Clipchamp’s free browser-based video editor.</h1>
<div className="buttonContainer">
<button className="button primary">Try for free</button>
<button className="button secondary">
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M12 22C17.5228 22 22 17.5228 22 12C22 6.47715 17.5228 2 12 2C6.47715 2 2 6.47715 2 12C2 17.5228 6.47715 22 12 22Z"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
/>
<path d="M10 8L16 12L10 16V8Z" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" />
</svg>
Play demo
</button>
</div>
</div>
</div>
</div>
</div>
<div className="appWrapper">
<div className="appContainer">
<div className="dummyApp">
<a.div className="seekerWrapper" style={{ transform: timelineProgress.interpolate((o) => `translateX(${o}%)`) }}>
<a.div className="seeker" style={{ opacity: timelineProgress.interpolate((o) => (o > 25 ? 1 : (o - 20) * 0.2)) }} />
</a.div>
</div>
</div>
</div>
<div
style={{
width: '200vw',
background: '#EAEAFA',
height: '50vh',
marginLeft: '-50vw',
}}
/>
</div>
</main>
)
}
class App extends React.Component {
render() {
return <Hero />
}
}
export default Hero;
|
Parametric/chef | chef-server-api/app/controllers/clients.rb | <filename>chef-server-api/app/controllers/clients.rb
#
# Author:: <NAME> (<<EMAIL>>)
# Author:: <NAME> (<<EMAIL>>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/api_client'
class Clients < Application
provides :json
before :authenticate_every
before :is_admin, :only => [ :index, :update, :destroy ]
before :is_admin_or_validator, :only => [ :create ]
before :admin_or_requesting_node, :only => [ :show ]
# GET /clients
def index
@list = Chef::ApiClient.cdb_list(true)
display(@list.inject({}) { |result, element| result[element.name] = absolute_url(:client, :id => element.name); result })
end
# GET /clients/:id
def show
begin
@client = Chef::ApiClient.cdb_load(params[:id])
rescue Chef::Exceptions::CouchDBNotFound => e
raise NotFound, "Cannot load client #{params[:id]}"
end
#display({ :name => @client.name, :admin => @client.admin, :public_key => @client.public_key })
display @client
end
# POST /clients
def create
exists = true
if params.has_key?(:inflated_object)
params[:name] ||= params[:inflated_object].name
params[:admin] ||= params[:inflated_object].admin
end
# We can only create clients if we're the admin or the validator.
# But only allow creating admin clients if we're already an admin.
if params[:admin] == true && @auth_user.admin != true
raise Forbidden, "You are not allowed to take this action."
end
begin
Chef::ApiClient.cdb_load(params[:name])
rescue Chef::Exceptions::CouchDBNotFound
exists = false
end
raise Conflict, "Client already exists" if exists
@client = Chef::ApiClient.new
@client.name(params[:name])
@client.admin(params[:admin]) if params[:admin]
@client.create_keys
@client.cdb_save
self.status = 201
headers['Location'] = absolute_url(:client, @client.name)
display({ :uri => absolute_url(:client, @client.name), :private_key => @client.private_key })
end
# PUT /clients/:id
def update
if params.has_key?(:inflated_object)
params[:private_key] ||= params[:inflated_object].private_key
params[:admin] ||= params[:inflated_object].admin
end
begin
@client = Chef::ApiClient.cdb_load(params[:id])
rescue Chef::Exceptions::CouchDBNotFound => e
raise NotFound, "Cannot load client #{params[:id]}"
end
@client.admin(params[:admin]) unless params[:admin].nil?
results = { :name => @client.name, :admin => @client.admin }
if params[:private_key] == true
@client.create_keys
results[:private_key] = @client.private_key
end
@client.cdb_save
display(results)
end
# DELETE /clients/:id
def destroy
begin
@client = Chef::ApiClient.cdb_load(params[:id])
rescue Chef::Exceptions::CouchDBNotFound => e
raise NotFound, "Cannot load client #{params[:id]}"
end
@client.cdb_destroy
display({ :name => @client.name })
end
end
|
davidzchen/tajo | tajo-storage/tajo-storage-hdfs/src/main/java/org/apache/tajo/storage/text/CSVLineDeserializer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.storage.text;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufProcessor;
import org.apache.tajo.catalog.Column;
import org.apache.tajo.catalog.Schema;
import org.apache.tajo.catalog.TableMeta;
import org.apache.tajo.datum.Datum;
import org.apache.tajo.datum.NullDatum;
import org.apache.tajo.plan.util.PlannerUtil;
import org.apache.tajo.storage.FieldSerializerDeserializer;
import org.apache.tajo.storage.Tuple;
import java.io.IOException;
import java.util.Arrays;
public class CSVLineDeserializer extends TextLineDeserializer {
private ByteBufProcessor processor;
private FieldSerializerDeserializer fieldSerDer;
private ByteBuf nullChars;
private int delimiterCompensation;
private int [] targetColumnIndexes;
public CSVLineDeserializer(Schema schema, TableMeta meta, Column [] projected) {
super(schema, meta);
targetColumnIndexes = PlannerUtil.getTargetIds(schema, projected);
}
@Override
public void init() {
byte[] delimiter = CSVLineSerDe.getFieldDelimiter(meta);
if (delimiter.length == 1) {
this.processor = new FieldSplitProcessor(delimiter[0]);
} else {
this.processor = new MultiBytesFieldSplitProcessor(delimiter);
}
this.delimiterCompensation = delimiter.length - 1;
if (nullChars != null) {
nullChars.release();
}
nullChars = TextLineSerDe.getNullChars(meta);
fieldSerDer = new TextFieldSerializerDeserializer(meta);
fieldSerDer.init(schema);
}
public void deserialize(final ByteBuf lineBuf, Tuple output) throws IOException, TextLineParsingError {
if (lineBuf == null || targetColumnIndexes == null || targetColumnIndexes.length == 0) {
return;
}
int[] projection = targetColumnIndexes;
final int rowLength = lineBuf.readableBytes();
int start = 0, fieldLength = 0, end = 0;
// Projection
int currentTarget = 0;
int currentIndex = 0;
while (end != -1) {
end = lineBuf.forEachByte(start, rowLength - start, processor);
if (end < 0) {
fieldLength = rowLength - start;
} else {
fieldLength = end - start - delimiterCompensation;
}
if (projection.length > currentTarget && currentIndex == projection[currentTarget]) {
lineBuf.setIndex(start, start + fieldLength);
try {
Datum datum = fieldSerDer.deserialize(currentIndex, lineBuf, nullChars);
output.put(currentTarget, datum);
} catch (Exception e) {
output.put(currentTarget, NullDatum.get());
}
currentTarget++;
}
if (projection.length == currentTarget) {
break;
}
start = end + 1;
currentIndex++;
}
/* If a text row is less than table schema size, tuple should set to NullDatum */
if (projection.length > currentTarget) {
for (; currentTarget < projection.length; currentTarget++) {
output.put(currentTarget, NullDatum.get());
}
}
}
@Override
public void release() {
if (nullChars != null) {
nullChars.release();
nullChars = null;
}
}
}
|
thomasrobertz/web-tutorials | spring/spring-certification-spilca/lesson-13/src/main/java/com/laurentiuspilca/springsecurity/configuration/ProjectConfiguration.java | package com.laurentiuspilca.springsecurity.configuration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.password.NoOpPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.provisioning.InMemoryUserDetailsManager;
@Configuration
public class ProjectConfiguration extends WebSecurityConfigurerAdapter {
@Bean
public PasswordEncoder passwordEncoder() {
return NoOpPasswordEncoder.getInstance();
}
@Bean
public UserDetailsService userDetailService() {
var manager = new InMemoryUserDetailsManager();
UserDetails exampleUser = User.withUsername("Bill")
.password("<PASSWORD>")
.roles("ADMIN")
.authorities("write")
.build();
UserDetails anotherUser = User.withUsername("John")
.password("<PASSWORD>")
.roles("MANAGER")
.authorities("read")
.build();
manager.createUser(exampleUser);
manager.createUser(anotherUser);
return manager;
}
@Override
protected void configure(HttpSecurity http) throws Exception {
// We still get 403 will revisit later
http.authorizeRequests()
.antMatchers("/hello")
.access("hasAnyRole('ADMIN')")
.anyRequest().permitAll();
}
}
|
pritishd/PKD_Tools | DevDocs/html/navtreeindex2.js | <filename>DevDocs/html/navtreeindex2.js<gh_stars>1-10
var NAVTREEINDEX2 =
{
"class_p_k_d___tools_1_1lib_py_side_1_1_q_progress_dialog.html#aca95d57730094dfd3bac927b02a4f82f":[1,0,0,2,8,3],
"class_p_k_d___tools_1_1lib_py_side_1_1_q_progress_dialog.html#afc2900db3f9920143d9d3425c6f9f524":[1,0,0,2,8,4],
"class_p_k_d___tools_1_1lib_py_side_1_1_q_question_box.html":[1,0,0,2,9],
"class_p_k_d___tools_1_1lib_py_side_1_1_q_question_box.html#aee76044a56c51ad22d346285aa7a5587":[1,0,0,2,9,0],
"class_p_k_d___tools_1_1lib_py_side_1_1_q_warning_box.html":[1,0,0,2,10],
"class_p_k_d___tools_1_1lib_py_side_1_1_q_warning_box.html#a893970f98535997443eaf58d2c80ae9b":[1,0,0,2,10,0],
"class_p_k_d___tools_1_1lib_py_side_1_1_vertical_tab_bar.html":[1,0,0,2,11],
"class_p_k_d___tools_1_1lib_py_side_1_1_vertical_tab_bar.html#a2222dfb20cecb83c716e124e3ff16c95":[1,0,0,2,11,0],
"class_p_k_d___tools_1_1lib_py_side_1_1_vertical_tab_bar.html#ac44a95d180c7695267fd9afb9ee6087f":[1,0,0,2,11,2],
"class_p_k_d___tools_1_1lib_py_side_1_1_vertical_tab_bar.html#acda7f6ad53144c1a3ebc31b46a132a0f":[1,0,0,2,11,1],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_batch_test.html":[1,0,0,3,0],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_batch_test.html#a35201490f61d42fbfd3b77c67f60b3f0":[1,0,0,3,0,3],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_batch_test.html#a56d8986582b661fa4c0d1437c256b29b":[1,0,0,3,0,2],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_batch_test.html#a93027bc9127cbeec26b39e837bd4020f":[1,0,0,3,0,1],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_batch_test.html#abbd0c69842f9b97b1b45983e5a2e47e8":[1,0,0,3,0,5],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_batch_test.html#ae65ad25ca1958ecf3883c0821384a5f2":[1,0,0,3,0,0],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_batch_test.html#af129a644a9933612557c9f702c9fd2fc":[1,0,0,3,0,6],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_batch_test.html#af4689ccdb5feffe7a8d952ab5ad932be":[1,0,0,3,0,4],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_droid.html":[1,0,0,3,1],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_droid.html#a0773f325afdabbb05e30fff7ce38ea55":[1,0,0,3,1,0],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_unit_test_case.html":[1,0,0,3,2],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_unit_test_case.html#a21b5d4bdd300c2e4dc42b003f3598bec":[1,0,0,3,2,5],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_unit_test_case.html#a480ff0f37d47d5f6d6ee3082ffdbc08d":[1,0,0,3,2,1],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_unit_test_case.html#a777b3c53b5ca6e8fcad1f55e7d59db1e":[1,0,0,3,2,0],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_unit_test_case.html#a9564015d757b1f310e03f8ac92235d48":[1,0,0,3,2,4],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_unit_test_case.html#aabf8b268b7c231ad7e8126c101ba3e38":[1,0,0,3,2,3],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_unit_test_case.html#aabfc2027fd42bce98004a447934e46df":[1,0,0,3,2,2],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_unit_test_case.html#ae92b698a6a371f4298cb5db665939df7":[1,0,0,3,2,7],
"class_p_k_d___tools_1_1lib_unit_tests_1_1_unit_test_case.html#afbbd25109600026733467f2dd50a6615":[1,0,0,3,2,6],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html":[1,0,0,4,0],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a098a9aa864d328d862f927cbfdac5204":[1,0,0,4,0,0],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a1c48e5800ef9b21e38c259c659d330d6":[1,0,0,4,0,6],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a2d483cb698c7b53db528fc5ed88a5f5f":[1,0,0,4,0,11],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a40b97a049416f36741eaa3fdff6a9981":[1,0,0,4,0,9],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a46296298289e84b90fc0942364a46a12":[1,0,0,4,0,8],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a638c53bc3c3fc5f06b8973d45bc7f397":[1,0,0,4,0,3],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a6e7e904921e1080103f3cfbf65f6bc37":[1,0,0,4,0,13],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a7f6aac28b6b1313dbe40b813755a479b":[1,0,0,4,0,5],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a996c516f9dd0a7d5822f8d7fcc42f8bd":[1,0,0,4,0,10],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#a9df9bbab65b455eb89920a0d9ff6bd47":[1,0,0,4,0,1],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#aaeb4eef783f63ed5b0278582d0d548c6":[1,0,0,4,0,2],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#ab9e08752c7f17bba7fd7d0fb81cf970b":[1,0,0,4,0,7],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#ae2953dea8830993c741a1c08d6bbd3bd":[1,0,0,4,0,12],
"class_p_k_d___tools_1_1lib_vector_1_1vector.html#aed435a753ff154752a11eb8b6449a439":[1,0,0,4,0,4],
"class_p_k_d___tools_1_1lib_weights_1_1_blend_shape_weights.html":[1,0,0,5,0],
"class_p_k_d___tools_1_1lib_weights_1_1_blends_weight_manager.html":[1,0,0,5,1],
"class_p_k_d___tools_1_1lib_weights_1_1_cluster_weight_manager.html":[1,0,0,5,2],
"class_p_k_d___tools_1_1lib_weights_1_1_cluster_weights.html":[1,0,0,5,3],
"class_p_k_d___tools_1_1lib_weights_1_1_multi_weights.html":[1,0,0,5,4],
"class_p_k_d___tools_1_1lib_weights_1_1_multi_weights.html#a13ddcdaa3fe0e0dd48d125e4f5592c4c":[1,0,0,5,4,2],
"class_p_k_d___tools_1_1lib_weights_1_1_multi_weights.html#a2fbceaa416f4d1f87d35c961f0ae5ade":[1,0,0,5,4,0],
"class_p_k_d___tools_1_1lib_weights_1_1_multi_weights.html#ad7574024390c1027b860a5ac8ff41df1":[1,0,0,5,4,1],
"class_p_k_d___tools_1_1lib_weights_1_1_multi_weights.html#ae5abd3a7344116d101858d27fad85839":[1,0,0,5,4,3],
"class_p_k_d___tools_1_1lib_weights_1_1_skin_weight_manager.html":[1,0,0,5,5],
"class_p_k_d___tools_1_1lib_weights_1_1_skin_weights.html":[1,0,0,5,6],
"class_p_k_d___tools_1_1lib_weights_1_1_skin_weights.html#a6d98705199f4a06672fcfc605331074c":[1,0,0,5,6,1],
"class_p_k_d___tools_1_1lib_weights_1_1_skin_weights.html#a9a3e83b4c34a29e1ee2c7589e42016fe":[1,0,0,5,6,0],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html":[1,0,0,5,7],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#a0e5c92c48fbbd4cfe2b250e95742d8ff":[1,0,0,5,7,2],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#a121c9f55489fa17e416bca2442dcb3e5":[1,0,0,5,7,0],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#a21ab6664289c35ee662719a1b59e20cd":[1,0,0,5,7,8],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#a70b068f5cb543e96af0e6e2603347865":[1,0,0,5,7,4],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#a7741b0ded56e78e656096cf149c77df2":[1,0,0,5,7,1],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#aba7b3b707f00670b8e770a10bca948de":[1,0,0,5,7,6],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#abda0d854f7b56b82e74d614b7df3bdeb":[1,0,0,5,7,7],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#acb5ddfa7636502d428a9fdce53727a7e":[1,0,0,5,7,3],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#ae7f812afa3beaae3f410811fc26e451d":[1,0,0,5,7,9],
"class_p_k_d___tools_1_1lib_weights_1_1_weight_manager.html#aef369ba61afc51f83e1a5fae30678fa6":[1,0,0,5,7,5],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html":[1,0,0,5,8],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#a0c620c9cb00b752c8fa132aa99dfdd2e":[1,0,0,5,8,0],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#a33533319062acb468711288dd7febb0d":[1,0,0,5,8,5],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#a50cbcc4cf0c4b6e0585f462a57be2f65":[1,0,0,5,8,4],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#a6b42d6ecd6d8bdc6444ccc700648b325":[1,0,0,5,8,1],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#a7aef1027fec8cad876d4bf7134591d12":[1,0,0,5,8,9],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#a7c02013e4975c6cbbd6bcfa018e78a97":[1,0,0,5,8,8],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#a8594d2e58a5b244b0f60aa3e37e2c640":[1,0,0,5,8,7],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#ab488d4d73af9d8490f2e8a48887818bd":[1,0,0,5,8,3],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#acc11a6f2551d45a505ed458228e8d072":[1,0,0,5,8,6],
"class_p_k_d___tools_1_1lib_weights_1_1_weights.html#af06e93c0322719e3457a8c040147f0c1":[1,0,0,5,8,2],
"classes.html":[1,1],
"functions.html":[1,3,0],
"functions.html":[1,3,0,0],
"functions_a.html":[1,3,0,1],
"functions_b.html":[1,3,0,2],
"functions_c.html":[1,3,0,3],
"functions_d.html":[1,3,0,4],
"functions_e.html":[1,3,0,5],
"functions_f.html":[1,3,0,6],
"functions_func.html":[1,3,1,0],
"functions_func.html":[1,3,1],
"functions_func_a.html":[1,3,1,1],
"functions_func_b.html":[1,3,1,2],
"functions_func_c.html":[1,3,1,3],
"functions_func_d.html":[1,3,1,4],
"functions_func_e.html":[1,3,1,5],
"functions_func_f.html":[1,3,1,6],
"functions_func_g.html":[1,3,1,7],
"functions_func_h.html":[1,3,1,8],
"functions_func_i.html":[1,3,1,9],
"functions_func_j.html":[1,3,1,10],
"functions_func_l.html":[1,3,1,11],
"functions_func_m.html":[1,3,1,12],
"functions_func_n.html":[1,3,1,13],
"functions_func_o.html":[1,3,1,14],
"functions_func_p.html":[1,3,1,15],
"functions_func_r.html":[1,3,1,16],
"functions_func_s.html":[1,3,1,17],
"functions_func_t.html":[1,3,1,18],
"functions_func_u.html":[1,3,1,19],
"functions_func_v.html":[1,3,1,20],
"functions_func_w.html":[1,3,1,21],
"functions_g.html":[1,3,0,7],
"functions_h.html":[1,3,0,8],
"functions_i.html":[1,3,0,9],
"functions_j.html":[1,3,0,10],
"functions_l.html":[1,3,0,11],
"functions_m.html":[1,3,0,12],
"functions_n.html":[1,3,0,13],
"functions_o.html":[1,3,0,14],
"functions_p.html":[1,3,0,15],
"functions_prop.html":[1,3,3],
"functions_r.html":[1,3,0,16],
"functions_s.html":[1,3,0,17],
"functions_t.html":[1,3,0,18],
"functions_u.html":[1,3,0,19],
"functions_v.html":[1,3,0,20],
"functions_vars.html":[1,3,2],
"functions_w.html":[1,3,0,21],
"functions_x.html":[1,3,0,22],
"hierarchy.html":[1,2],
"index.html":[],
"namespace_p_k_d___tools.html":[1,0,0],
"namespace_p_k_d___tools.html":[0,0,0],
"namespace_p_k_d___tools_1_1_rigging.html":[0,0,0,11],
"namespace_p_k_d___tools_1_1_rigging.html":[1,0,0,6],
"namespace_p_k_d___tools_1_1_rigging_1_1core.html":[0,0,0,11,0],
"namespace_p_k_d___tools_1_1_rigging_1_1core.html":[1,0,0,6,0],
"namespace_p_k_d___tools_1_1_rigging_1_1limb.html":[1,0,0,6,1],
"namespace_p_k_d___tools_1_1_rigging_1_1limb.html":[0,0,0,11,1],
"namespace_p_k_d___tools_1_1_rigging_1_1parts.html":[1,0,0,6,2],
"namespace_p_k_d___tools_1_1_rigging_1_1parts.html":[0,0,0,11,2],
"namespace_p_k_d___tools_1_1_rigging_1_1spine.html":[0,0,0,11,3],
"namespace_p_k_d___tools_1_1_rigging_1_1spine.html":[1,0,0,6,3],
"namespace_p_k_d___tools_1_1_rigging_1_1utils.html":[0,0,0,11,4],
"namespace_p_k_d___tools_1_1lib_crypto.html":[0,0,0,0],
"namespace_p_k_d___tools_1_1lib_file.html":[0,0,0,1],
"namespace_p_k_d___tools_1_1lib_g_u_i.html":[0,0,0,3],
"namespace_p_k_d___tools_1_1lib_g_u_i.html":[1,0,0,1],
"namespace_p_k_d___tools_1_1lib_geo.html":[1,0,0,0],
"namespace_p_k_d___tools_1_1lib_geo.html":[0,0,0,2],
"namespace_p_k_d___tools_1_1lib_math.html":[0,0,0,4],
"namespace_p_k_d___tools_1_1lib_py_side.html":[0,0,0,5],
"namespace_p_k_d___tools_1_1lib_py_side.html":[1,0,0,2],
"namespace_p_k_d___tools_1_1lib_unit_tests.html":[1,0,0,3],
"namespace_p_k_d___tools_1_1lib_unit_tests.html":[0,0,0,6],
"namespace_p_k_d___tools_1_1lib_utilities.html":[0,0,0,7],
"namespace_p_k_d___tools_1_1lib_vector.html":[0,0,0,8],
"namespace_p_k_d___tools_1_1lib_vector.html":[1,0,0,4],
"namespace_p_k_d___tools_1_1lib_weights.html":[1,0,0,5],
"namespace_p_k_d___tools_1_1lib_weights.html":[0,0,0,9],
"namespace_p_k_d___tools_1_1lib_xml.html":[0,0,0,10],
"namespacemembers.html":[0,1,0],
"namespacemembers_func.html":[0,1,1],
"namespacemembers_vars.html":[0,1,2],
"namespaces.html":[0,0],
"pages.html":[]
};
|
adamrvfisher/TechnicalAnalysisLibrary | IndexListMaker.py | <reponame>adamrvfisher/TechnicalAnalysisLibrary
# -*- coding: utf-8 -*-
"""
@author: <NAME> - https://www.linkedin.com/in/adamrvfisher/
"""
#This is a formatting tool for adding carrots to index names
#so they can be added to SymbolList for scraping/dataRequisition
#Import modules
import pandas as pd
#Read in text
data = pd.read_csv('Indicies.txt', sep="\t",header = None,
names=['Ticker', 'EverythingElse'])
#Remove data
data = data.drop('EverythingElse', axis = 1)
#Text string
data['Carrot'] = '^'
#Add ^ to str
data['IndexTicker'] = data['Carrot'] + data['Ticker']
#Trim data
data = data[1:]
#Drop carrot
data = data.drop(['Ticker','Carrot'], axis = 1)
#Iterable
ranger = range(0,len(data))
data['Index'] = ranger
#To dataframe
dataframe = pd.DataFrame(data, index = data['Index'])
dataframe = dataframe[1:].drop('Index', axis = 1)
#Display
print(dataframe)
#Save to file
dataframe.to_csv('IndexListo', sep = ',')
|
hadjri/evergreen | model/generate.go | <gh_stars>0
package model
import (
"context"
"github.com/evergreen-ci/evergreen"
"github.com/evergreen-ci/evergreen/model/build"
"github.com/evergreen-ci/evergreen/model/patch"
"github.com/evergreen-ci/evergreen/model/task"
"github.com/evergreen-ci/utility"
"github.com/mongodb/grip"
"github.com/mongodb/grip/message"
"github.com/pkg/errors"
"go.mongodb.org/mongo-driver/mongo"
yaml "gopkg.in/yaml.v2"
)
const (
maxGeneratedBuildVariants = 200
maxGeneratedTasks = 25000
)
// GeneratedProject is a subset of the Project type, and is generated from the
// JSON from a `generate.tasks` command.
type GeneratedProject struct {
BuildVariants []parserBV `yaml:"buildvariants"`
Tasks []parserTask `yaml:"tasks"`
Functions map[string]*YAMLCommandSet `yaml:"functions"`
TaskGroups []parserTaskGroup `yaml:"task_groups"`
TaskID string
}
// MergeGeneratedProjects takes a slice of generated projects and returns a single, deduplicated project.
func MergeGeneratedProjects(projects []GeneratedProject) (*GeneratedProject, error) {
catcher := grip.NewBasicCatcher()
bvs := map[string]*parserBV{}
tasks := map[string]*parserTask{}
functions := map[string]*YAMLCommandSet{}
taskGroups := map[string]*parserTaskGroup{}
for _, p := range projects {
mergeBuildVariants:
for i, bv := range p.BuildVariants {
if len(bv.Tasks) == 0 {
if _, ok := bvs[bv.Name]; ok {
catcher.Errorf("found duplicate buildvariant (%s)", bv.Name)
} else {
bvs[bv.Name] = &p.BuildVariants[i]
continue mergeBuildVariants
}
}
if _, ok := bvs[bv.Name]; ok {
bvs[bv.Name].Tasks = append(bvs[bv.Name].Tasks, bv.Tasks...)
bvs[bv.Name].DisplayTasks = append(bvs[bv.Name].DisplayTasks, bv.DisplayTasks...)
}
bvs[bv.Name] = &p.BuildVariants[i]
}
for i, t := range p.Tasks {
if _, ok := tasks[t.Name]; ok {
catcher.Errorf("found duplicate task (%s)", t.Name)
} else {
tasks[t.Name] = &p.Tasks[i]
}
}
for f, val := range p.Functions {
if _, ok := functions[f]; ok {
catcher.Errorf("found duplicate function (%s)", f)
}
functions[f] = val
}
for i, tg := range p.TaskGroups {
if _, ok := taskGroups[tg.Name]; ok {
catcher.Errorf("found duplicate task group (%s)", tg.Name)
} else {
taskGroups[tg.Name] = &p.TaskGroups[i]
}
}
}
g := &GeneratedProject{}
for i := range bvs {
g.BuildVariants = append(g.BuildVariants, *bvs[i])
}
for i := range tasks {
g.Tasks = append(g.Tasks, *tasks[i])
}
g.Functions = functions
for i := range taskGroups {
g.TaskGroups = append(g.TaskGroups, *taskGroups[i])
}
return g, catcher.Resolve()
}
// ParseProjectFromJSON returns a GeneratedTasks type from JSON. We use the
// YAML parser instead of the JSON parser because the JSON parser will not
// properly unmarshal into a struct with multiple fields as options, like the YAMLCommandSet.
func ParseProjectFromJSONString(data string) (GeneratedProject, error) {
g := GeneratedProject{}
dataAsJSON := []byte(data)
if err := yaml.Unmarshal(dataAsJSON, &g); err != nil {
return g, errors.Wrap(err, "error unmarshaling into GeneratedTasks")
}
return g, nil
}
// ParseProjectFromJSON returns a GeneratedTasks type from JSON. We use the
// YAML parser instead of the JSON parser because the JSON parser will not
// properly unmarshal into a struct with multiple fields as options, like the YAMLCommandSet.
func ParseProjectFromJSON(data []byte) (GeneratedProject, error) {
g := GeneratedProject{}
if err := yaml.Unmarshal(data, &g); err != nil {
return g, errors.Wrap(err, "error unmarshaling into GeneratedTasks")
}
return g, nil
}
// NewVersion adds the buildvariants, tasks, and functions
// from a generated project config to a project, and returns the previous config number.
func (g *GeneratedProject) NewVersion(p *Project, pp *ParserProject, v *Version) (*Project, *ParserProject, *Version, error) {
// Cache project data in maps for quick lookup
cachedProject := cacheProjectData(p)
// Validate generated project against original project.
if err := g.validateGeneratedProject(p, cachedProject); err != nil {
// Return version in this error case for handleError, which checks for a race. We only need to do this in cases where there is a validation check.
return nil, pp, v, errors.Wrap(err, "generated project is invalid")
}
newPP, err := g.addGeneratedProjectToConfig(pp, v.Config, cachedProject)
if err != nil {
return nil, nil, nil, errors.Wrap(err, "error creating config from generated config")
}
newPP.Id = v.Id
p, err = TranslateProject(newPP)
if err != nil {
return nil, nil, nil, errors.Wrap(err, "error translating project")
}
return p, newPP, v, nil
}
func (g *GeneratedProject) Save(ctx context.Context, p *Project, pp *ParserProject, v *Version, t *task.Task) error {
// Get task again, to exit early if another generator finished early.
t, err := task.FindOneId(g.TaskID)
if err != nil {
return errors.Wrapf(err, "error finding task %s", g.TaskID)
}
if t == nil {
return errors.Errorf("unable to find task %s", g.TaskID)
}
if t.GeneratedTasks {
grip.Debug(message.Fields{
"message": "skipping attempting to update parser project because another generator marked the task complete",
"task": t.Id,
"version": t.Version,
})
return mongo.ErrNoDocuments
}
if err := updateParserProject(v, pp); err != nil {
return errors.WithStack(err)
}
if err := g.saveNewBuildsAndTasks(ctx, v, p, t); err != nil {
return errors.Wrap(err, "error savings new builds and tasks")
}
return nil
}
// update the parser project using the newest config number (if using legacy version config, this comes from version)
func updateParserProject(v *Version, pp *ParserProject) error {
updateNum := pp.ConfigUpdateNumber + 1
// legacy: most likely a version for which no parser project exists
if pp.ConfigUpdateNumber < v.ConfigUpdateNumber {
updateNum = v.ConfigUpdateNumber + 1
}
if err := pp.UpsertWithConfigNumber(updateNum); err != nil {
return errors.Wrapf(err, "error upserting parser project '%s'", pp.Id)
}
return nil
}
func cacheProjectData(p *Project) projectMaps {
cachedProject := projectMaps{
buildVariants: map[string]struct{}{},
tasks: map[string]*ProjectTask{},
functions: map[string]*YAMLCommandSet{},
}
// use a set because we never need to look up buildvariants
for _, bv := range p.BuildVariants {
cachedProject.buildVariants[bv.Name] = struct{}{}
}
for _, t := range p.Tasks {
cachedProject.tasks[t.Name] = &t
}
// functions is already a map, cache it anyway for convenience
cachedProject.functions = p.Functions
return cachedProject
}
// saveNewBuildsAndTasks saves new builds and tasks to the db.
func (g *GeneratedProject) saveNewBuildsAndTasks(ctx context.Context, v *Version, p *Project, t *task.Task) error {
// inherit priority from the parent task
for i, projBv := range p.BuildVariants {
for j := range projBv.Tasks {
p.BuildVariants[i].Tasks[j].Priority = t.Priority
}
}
// Only consider batchtime for mainline builds. We should always respect activate if it is set.
batchTimeInfo := g.findTasksAndVariantsWithSpecificActivations(v.Requester)
newTVPairs := TaskVariantPairs{}
for _, bv := range g.BuildVariants {
newTVPairs = appendTasks(newTVPairs, bv, p)
}
var err error
newTVPairs.ExecTasks, err = IncludeDependencies(p, newTVPairs.ExecTasks, v.Requester)
grip.Warning(message.WrapError(err, message.Fields{
"message": "error including dependencies for generator",
"task": g.TaskID,
}))
// group into new builds and new tasks for existing builds
builds, err := build.Find(build.ByVersion(v.Id).WithFields(build.IdKey, build.BuildVariantKey))
if err != nil {
return errors.Wrap(err, "problem finding builds for version")
}
buildSet := map[string]struct{}{}
for _, b := range builds {
buildSet[b.BuildVariant] = struct{}{}
}
newTVPairsForExistingVariants := TaskVariantPairs{}
newTVPairsForNewVariants := TaskVariantPairs{}
for _, execTask := range newTVPairs.ExecTasks {
if _, ok := buildSet[execTask.Variant]; ok {
newTVPairsForExistingVariants.ExecTasks = append(newTVPairsForExistingVariants.ExecTasks, execTask)
} else {
newTVPairsForNewVariants.ExecTasks = append(newTVPairsForNewVariants.ExecTasks, execTask)
}
}
for _, dispTask := range newTVPairs.DisplayTasks {
if _, ok := buildSet[dispTask.Variant]; ok {
newTVPairsForExistingVariants.DisplayTasks = append(newTVPairsForExistingVariants.DisplayTasks, dispTask)
} else {
newTVPairsForNewVariants.DisplayTasks = append(newTVPairsForNewVariants.DisplayTasks, dispTask)
}
}
// This will only be populated for patches, not mainline commits.
var syncAtEndOpts patch.SyncAtEndOptions
if patchDoc, _ := patch.FindOne(patch.ByVersion(v.Id)); patchDoc != nil {
if err = patchDoc.AddSyncVariantsTasks(newTVPairs.TVPairsToVariantTasks()); err != nil {
return errors.Wrap(err, "could not update sync variants and tasks")
}
syncAtEndOpts = patchDoc.SyncAtEndOpts
}
projectRef, err := FindOneProjectRef(p.Identifier)
if err != nil {
return errors.Wrap(err, "unable to find project ref")
}
if projectRef == nil {
return errors.Errorf("project '%s' not found", p.Identifier)
}
tasksInExistingBuilds, err := addNewTasks(ctx, batchTimeInfo, v, p, newTVPairsForExistingVariants, syncAtEndOpts, projectRef.Identifier, g.TaskID)
if err != nil {
return errors.Wrap(err, "errors adding new tasks")
}
_, tasksInNewBuilds, err := addNewBuilds(ctx, batchTimeInfo, v, p, newTVPairsForNewVariants, syncAtEndOpts, projectRef, g.TaskID)
if err != nil {
return errors.Wrap(err, "errors adding new builds")
}
if err = addDependencies(t, append(tasksInExistingBuilds, tasksInNewBuilds...)); err != nil {
return errors.Wrap(err, "error adding dependencies")
}
return nil
}
type specificActivationInfo struct {
tasks map[string][]string // tasks by variant that have batchtime or activate specified
variants []string // variants that have batchtime or activate specified
}
func newSpecificActivationInfo() specificActivationInfo {
return specificActivationInfo{
tasks: map[string][]string{},
variants: []string{},
}
}
func (b *specificActivationInfo) variantHasSpecificActivation(variant string) bool {
return utility.StringSliceContains(b.variants, variant)
}
func (b *specificActivationInfo) getTasks(variant string) []string {
return b.tasks[variant]
}
func (b *specificActivationInfo) hasTasks() bool {
return len(b.tasks) > 0
}
// given some list of tasks, returns the tasks that don't have batchtime
func (b *specificActivationInfo) tasksWithoutSpecificActivation(taskNames []string, variant string) []string {
tasksWithoutSpecificActivation, _ := utility.StringSliceSymmetricDifference(taskNames, b.tasks[variant])
return tasksWithoutSpecificActivation
}
func (g *GeneratedProject) findTasksAndVariantsWithSpecificActivations(requester string) specificActivationInfo {
res := newSpecificActivationInfo()
for _, bv := range g.BuildVariants {
// only consider batchtime for certain requesters
if evergreen.ShouldConsiderBatchtime(requester) && (bv.BatchTime != nil || bv.CronBatchTime != "") {
res.variants = append(res.variants, bv.name())
} else if bv.Activate != nil {
res.variants = append(res.variants, bv.name())
}
// regardless of whether the build variant has batchtime, there may be tasks with different batchtime
batchTimeTasks := []string{}
for _, bvt := range bv.Tasks {
if evergreen.ShouldConsiderBatchtime(requester) && (bvt.BatchTime != nil || bvt.CronBatchTime != "") {
batchTimeTasks = append(batchTimeTasks, bvt.Name)
} else if bvt.Activate != nil {
batchTimeTasks = append(batchTimeTasks, bvt.Name)
}
}
if len(batchTimeTasks) > 0 {
res.tasks[bv.name()] = batchTimeTasks
}
}
return res
}
func addDependencies(t *task.Task, newTaskIds []string) error {
statuses := []string{evergreen.TaskSucceeded, task.AllStatuses}
for _, status := range statuses {
if err := t.UpdateDependsOn(status, newTaskIds); err != nil {
return errors.Wrapf(err, "can't update tasks depending on '%s'", t.Id)
}
}
return nil
}
func appendTasks(pairs TaskVariantPairs, bv parserBV, p *Project) TaskVariantPairs {
taskGroups := map[string]TaskGroup{}
for _, tg := range p.TaskGroups {
taskGroups[tg.Name] = tg
}
for _, t := range bv.Tasks {
if tg, ok := taskGroups[t.Name]; ok {
for _, taskInGroup := range tg.Tasks {
pairs.ExecTasks = append(pairs.ExecTasks, TVPair{bv.Name, taskInGroup})
}
} else {
pairs.ExecTasks = append(pairs.ExecTasks, TVPair{bv.Name, t.Name})
}
}
for _, dt := range bv.DisplayTasks {
pairs.DisplayTasks = append(pairs.DisplayTasks, TVPair{bv.Name, dt.Name})
}
return pairs
}
// addGeneratedProjectToConfig takes a ParserProject and a YML config and returns a new one with the GeneratedProject included.
// support for YML config will be degraded.
func (g *GeneratedProject) addGeneratedProjectToConfig(intermediateProject *ParserProject, config string, cachedProject projectMaps) (*ParserProject, error) {
var err error
if intermediateProject == nil {
intermediateProject, err = createIntermediateProject([]byte(config))
if err != nil {
return nil, errors.Wrapf(err, "error creating intermediate project")
}
}
// Append buildvariants, tasks, and functions to the config.
intermediateProject.TaskGroups = append(intermediateProject.TaskGroups, g.TaskGroups...)
intermediateProject.Tasks = append(intermediateProject.Tasks, g.Tasks...)
for key, val := range g.Functions {
intermediateProject.Functions[key] = val
}
for _, bv := range g.BuildVariants {
// If the buildvariant already exists, append tasks to it.
if _, ok := cachedProject.buildVariants[bv.Name]; ok {
for i, intermediateProjectBV := range intermediateProject.BuildVariants {
if intermediateProjectBV.Name == bv.Name {
intermediateProject.BuildVariants[i].Tasks = append(intermediateProject.BuildVariants[i].Tasks, bv.Tasks...)
for _, dt := range bv.DisplayTasks {
// check if the display task already exists, and if it does add the exec tasks to the existing display task
foundExisting := false
for j, intermediateProjectDT := range intermediateProjectBV.DisplayTasks {
if intermediateProjectDT.Name == dt.Name {
foundExisting = true
// avoid adding duplicates
_, execTasksToAdd := utility.StringSliceSymmetricDifference(intermediateProjectDT.ExecutionTasks, dt.ExecutionTasks)
intermediateProject.BuildVariants[i].DisplayTasks[j].ExecutionTasks = append(
intermediateProject.BuildVariants[i].DisplayTasks[j].ExecutionTasks, execTasksToAdd...)
break
}
}
if !foundExisting {
intermediateProject.BuildVariants[i].DisplayTasks = append(intermediateProject.BuildVariants[i].DisplayTasks, dt)
}
}
}
}
} else {
// If the buildvariant does not exist, create it.
intermediateProject.BuildVariants = append(intermediateProject.BuildVariants, bv)
}
}
return intermediateProject, nil
}
// projectMaps is a struct of maps of project fields, which allows efficient comparisons of generated projects to projects.
type projectMaps struct {
buildVariants map[string]struct{}
tasks map[string]*ProjectTask
functions map[string]*YAMLCommandSet
}
// validateMaxTasksAndVariants validates that the GeneratedProject contains fewer than 100 variants and 1000 tasks.
func (g *GeneratedProject) validateMaxTasksAndVariants(catcher grip.Catcher) {
if len(g.BuildVariants) > maxGeneratedBuildVariants {
catcher.Add(errors.Errorf("it is illegal to generate more than %d buildvariants", maxGeneratedBuildVariants))
}
if len(g.Tasks) > maxGeneratedTasks {
catcher.Add(errors.Errorf("it is illegal to generate more than %d tasks", maxGeneratedTasks))
}
}
// validateNoRedefine validates that buildvariants, tasks, or functions, are not redefined
// except to add a task to a buildvariant.
func (g *GeneratedProject) validateNoRedefine(cachedProject projectMaps, catcher grip.Catcher) {
for _, bv := range g.BuildVariants {
if _, ok := cachedProject.buildVariants[bv.Name]; ok {
{
if isNonZeroBV(bv) {
catcher.Add(errors.Errorf("cannot redefine buildvariants in 'generate.tasks' (%s), except to add tasks", bv.Name))
}
}
}
}
for _, t := range g.Tasks {
if _, ok := cachedProject.tasks[t.Name]; ok {
catcher.Add(errors.Errorf("cannot redefine tasks in 'generate.tasks' (%s)", t.Name))
}
}
for f := range g.Functions {
if _, ok := cachedProject.functions[f]; ok {
catcher.Add(errors.Errorf("cannot redefine functions in 'generate.tasks' (%s)", f))
}
}
}
func isNonZeroBV(bv parserBV) bool {
if bv.DisplayName != "" || len(bv.Expansions) > 0 || len(bv.Modules) > 0 ||
bv.Disabled || len(bv.Tags) > 0 || bv.Push ||
bv.BatchTime != nil || bv.Stepback != nil || len(bv.RunOn) > 0 {
return true
}
return false
}
// validateNoRecursiveGenerateTasks validates that no 'generate.tasks' calls another 'generate.tasks'.
func (g *GeneratedProject) validateNoRecursiveGenerateTasks(cachedProject projectMaps, catcher grip.Catcher) {
for _, t := range g.Tasks {
for _, cmd := range t.Commands {
if cmd.Command == evergreen.GenerateTasksCommandName {
catcher.Add(errors.New("cannot define 'generate.tasks' from a 'generate.tasks' block"))
}
}
}
for _, f := range g.Functions {
for _, cmd := range f.List() {
if cmd.Command == evergreen.GenerateTasksCommandName {
catcher.Add(errors.New("cannot define 'generate.tasks' from a 'generate.tasks' block"))
}
}
}
for _, bv := range g.BuildVariants {
for _, t := range bv.Tasks {
if projectTask, ok := cachedProject.tasks[t.Name]; ok {
validateCommands(projectTask, cachedProject, t, catcher)
}
}
}
}
func validateCommands(projectTask *ProjectTask, cachedProject projectMaps, pvt parserBVTaskUnit, catcher grip.Catcher) {
for _, cmd := range projectTask.Commands {
if cmd.Command == evergreen.GenerateTasksCommandName {
catcher.Add(errors.Errorf("cannot assign a task that calls 'generate.tasks' from a 'generate.tasks' block (%s)", pvt.Name))
}
if cmd.Function != "" {
if functionCmds, ok := cachedProject.functions[cmd.Function]; ok {
for _, functionCmd := range functionCmds.List() {
if functionCmd.Command == evergreen.GenerateTasksCommandName {
catcher.Add(errors.Errorf("cannot assign a task that calls 'generate.tasks' from a 'generate.tasks' block (%s)", cmd.Function))
}
}
}
}
}
}
func (g *GeneratedProject) validateGeneratedProject(p *Project, cachedProject projectMaps) error {
catcher := grip.NewBasicCatcher()
g.validateMaxTasksAndVariants(catcher)
g.validateNoRedefine(cachedProject, catcher)
g.validateNoRecursiveGenerateTasks(cachedProject, catcher)
return errors.WithStack(catcher.Resolve())
}
|
icsysky/LRS | ui/src/redux/modules/statements.js | import { Map, fromJS } from 'immutable';
import { handleActions } from 'redux-actions';
import { createSelector } from 'reselect';
const SET_STATEMENT_QUERY = 'learninglocker/statments/SET_STATEMENT_QUERY';
/*
* Reducers
*/
const handler = handleActions({
[SET_STATEMENT_QUERY]: (state, action) => {
const { query } = action;
return state.set('query', query);
}
});
const initialState = fromJS({
query: {}
});
export default function reducer(state = initialState, action = {}) {
if (!Map.isMap(state)) return reducer(fromJS(state), action); // ensure immutability
return handler(state, action);
}
/*
* Actions
*/
export const updateStatementQuery = query => ({
type: SET_STATEMENT_QUERY,
query
});
export const actions = {
updateStatementQuery
};
/*
* Selectors
*/
export const statementQuerySelector = createSelector(
[state => state],
state => state.statements.get('query', new Map())
);
export const sagas = [];
|
mrkara/casper-sdk | src/main/java/com/syntifi/casper/sdk/model/clvalue/cltype/CLTypeByteArray.java | <gh_stars>0
package com.syntifi.casper.sdk.model.clvalue.cltype;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
/**
* CLType for {@link AbstractCLType.BYTE_ARRAY}
*
* @author <NAME>
* @author <NAME>
* @see AbstractCLType
* @since 0.0.1
*/
@Getter
@EqualsAndHashCode(callSuper = false, of = { "typeName", "length" })
public class CLTypeByteArray extends AbstractCLType {
private final String typeName = AbstractCLType.BYTE_ARRAY;
@Setter
@JsonProperty(AbstractCLType.BYTE_ARRAY)
private int length;
}
|
asaaki/ArangoDB | arangod/Utils/AhuacatlGuard.h | <gh_stars>10-100
////////////////////////////////////////////////////////////////////////////////
/// @brief resource holder for AQL queries with auto-free functionality
///
/// @file
///
/// DISCLAIMER
///
/// Copyright 2014 ArangoDB GmbH, Cologne, Germany
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author <NAME>
/// @author Copyright 2014, ArangoDB GmbH, Cologne, Germany
/// @author Copyright 2012-2013, triAGENS GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
#ifndef ARANGODB_UTILS_AHUACATL_GUARD_H
#define ARANGODB_UTILS_AHUACATL_GUARD_H 1
#include "Basics/Common.h"
#include "Ahuacatl/ahuacatl-context.h"
#include "BasicsC/json.h"
#include "BasicsC/logging.h"
#include "VocBase/vocbase.h"
#include "Cluster/ServerState.h"
namespace triagens {
namespace arango {
// -----------------------------------------------------------------------------
// --SECTION-- class AhuacatlGuard
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @brief scope guard for a TRI_aql_context_t*
////////////////////////////////////////////////////////////////////////////////
class AhuacatlGuard {
// -----------------------------------------------------------------------------
// --SECTION-- constructors / destructors
// -----------------------------------------------------------------------------
public:
////////////////////////////////////////////////////////////////////////////////
/// @brief create the guard
////////////////////////////////////////////////////////////////////////////////
AhuacatlGuard (TRI_vocbase_t* vocbase,
const string& query,
TRI_json_t* userOptions) :
_context(0) {
const bool isCoordinator = ServerState::instance()->isCoordinator();
_context = TRI_CreateContextAql(vocbase, query.c_str(), query.size(), isCoordinator, userOptions);
if (_context == 0) {
LOG_DEBUG("failed to create context for query '%s'", query.c_str());
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief destroy the guard
////////////////////////////////////////////////////////////////////////////////
~AhuacatlGuard () {
this->free();
}
// -----------------------------------------------------------------------------
// --SECTION-- public methods
// -----------------------------------------------------------------------------
public:
////////////////////////////////////////////////////////////////////////////////
/// @brief free the context
////////////////////////////////////////////////////////////////////////////////
void free () {
if (_context != 0) {
TRI_FreeContextAql(_context);
_context = 0;
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief access the context
////////////////////////////////////////////////////////////////////////////////
inline TRI_aql_context_t* ptr () const {
return _context;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief return whether context is valid
////////////////////////////////////////////////////////////////////////////////
inline bool valid () const {
return _context != 0;
}
// -----------------------------------------------------------------------------
// --SECTION-- private variables
// -----------------------------------------------------------------------------
private:
////////////////////////////////////////////////////////////////////////////////
/// @brief the AQL context C struct
////////////////////////////////////////////////////////////////////////////////
TRI_aql_context_t* _context;
};
}
}
#endif
// -----------------------------------------------------------------------------
// --SECTION-- END-OF-FILE
// -----------------------------------------------------------------------------
// Local Variables:
// mode: outline-minor
// outline-regexp: "/// @brief\\|/// {@inheritDoc}\\|/// @page\\|// --SECTION--\\|/// @\\}"
// End:
|
kjthegod/chromium | chrome/browser/sync/glue/sync_backend_host_mock.h | <reponame>kjthegod/chromium
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_SYNC_GLUE_SYNC_BACKEND_HOST_MOCK_H_
#define CHROME_BROWSER_SYNC_GLUE_SYNC_BACKEND_HOST_MOCK_H_
#include <string>
#include "base/callback.h"
#include "base/compiler_specific.h"
#include "chrome/browser/sync/glue/sync_backend_host.h"
#include "sync/internal_api/public/util/weak_handle.h"
namespace browser_sync {
// A mock of the SyncBackendHost.
//
// This class implements the bare minimum required for the ProfileSyncService to
// get through initialization. It often returns NULL pointers or nonesense
// values; it is not intended to be used in tests that depend on SyncBackendHost
// behavior.
class SyncBackendHostMock : public SyncBackendHost {
public:
SyncBackendHostMock();
~SyncBackendHostMock() override;
void Initialize(
sync_driver::SyncFrontend* frontend,
scoped_ptr<base::Thread> sync_thread,
const syncer::WeakHandle<syncer::JsEventHandler>& event_handler,
const GURL& service_url,
const syncer::SyncCredentials& credentials,
bool delete_sync_data_folder,
scoped_ptr<syncer::SyncManagerFactory> sync_manager_factory,
scoped_ptr<syncer::UnrecoverableErrorHandler> unrecoverable_error_handler,
syncer::ReportUnrecoverableErrorFunction
report_unrecoverable_error_function,
syncer::NetworkResources* network_resources) override;
void UpdateCredentials(const syncer::SyncCredentials& credentials) override;
void StartSyncingWithServer() override;
void SetEncryptionPassphrase(const std::string& passphrase,
bool is_explicit) override;
bool SetDecryptionPassphrase(const std::string& passphrase) override;
void StopSyncingForShutdown() override;
scoped_ptr<base::Thread> Shutdown(syncer::ShutdownReason reason) override;
void UnregisterInvalidationIds() override;
void ConfigureDataTypes(
syncer::ConfigureReason reason,
const DataTypeConfigStateMap& config_state_map,
const base::Callback<void(syncer::ModelTypeSet, syncer::ModelTypeSet)>&
ready_task,
const base::Callback<void()>& retry_callback) override;
void EnableEncryptEverything() override;
void ActivateDataType(
syncer::ModelType type,
syncer::ModelSafeGroup group,
sync_driver::ChangeProcessor* change_processor) override;
void DeactivateDataType(syncer::ModelType type) override;
syncer::UserShare* GetUserShare() const override;
scoped_ptr<syncer::SyncContextProxy> GetSyncContextProxy() override;
Status GetDetailedStatus() override;
syncer::sessions::SyncSessionSnapshot GetLastSessionSnapshot() const override;
bool HasUnsyncedItems() const override;
bool IsNigoriEnabled() const override;
syncer::PassphraseType GetPassphraseType() const override;
base::Time GetExplicitPassphraseTime() const override;
bool IsCryptographerReady(
const syncer::BaseTransaction* trans) const override;
void GetModelSafeRoutingInfo(
syncer::ModelSafeRoutingInfo* out) const override;
void FlushDirectory() const override;
void RequestBufferedProtocolEventsAndEnableForwarding() override;
void DisableProtocolEventForwarding() override;
void EnableDirectoryTypeDebugInfoForwarding() override;
void DisableDirectoryTypeDebugInfoForwarding() override;
void GetAllNodesForTypes(
syncer::ModelTypeSet types,
base::Callback<void(const std::vector<syncer::ModelType>& type,
ScopedVector<base::ListValue>)> callback) override;
base::MessageLoop* GetSyncLoopForTesting() override;
void set_fail_initial_download(bool should_fail);
private:
bool fail_initial_download_;
};
} // namespace browser_sync
#endif // CHROME_BROWSER_SYNC_GLUE_SYNC_BACKEND_HOST_MOCK_H_
|
hectorj2f/hocl_compilerV2 | src/fr/inria/hocl/core/hocli/ReactionRule.java | <reponame>hectorj2f/hocl_compilerV2
package fr.inria.hocl.core.hocli;
import java.io.*;
import java.util.Comparator;
/**
* This class represents a reaction rule.
*
*/
abstract public class ReactionRule implements Atom, Serializable {
private static final long serialVersionUID = -4561593662483581853L;
public enum Shot {
N_SHOT, ONE_SHOT
}
public enum Trope {
REDUCER, OPTIMIZER, EXPANSER, UNKNOWN;
}
private final String name;
private final Shot shot;
private Trope trope;
protected Permutation permutation; // currently considered atoms
public ReactionRule( String n, Shot sh ) {
name = n;
shot = sh;
trope = Trope.UNKNOWN;
// FIXME: permutation is initialised by the sub-class
}
/**
* Compute the result of the reaction
*
* @return return the result of the reaction
*/
abstract protected Molecule computeResult();
/**
*
* @return the name of the reaction rule
*/
public String getName() {
return name;
}
/**
*
* @return the "shot" type of the rule
*/
public Shot getShot() {
return shot;
}
/**
* Abstract Factory DP: ReactionRule is a ConcreteFactory for Permutation
*
* @param atomIterators
* @param moleculeIterators
* @param reactionRule
* @return
*/
public Permutation newPermutation( AtomIterator[] atomIterators,
MoleculeIterator[] moleculeIterators ) {
return new PermutationNoRepeat( atomIterators, moleculeIterators, this );
}
public void setTrope( Trope trope ) {
this.trope = trope;
}
/**
* This method will be implemented in the classes that extend ReactionRule.
*/
public abstract ReactionRule clone() ;
public static class CmpSmallSolution implements Comparator<ReactionRule> {
public int compare( ReactionRule r1, ReactionRule r2 ) {
int res;
Trope t1 = r1.trope;
Trope t2 = r2.trope;
switch( t1 ) {
case REDUCER:
if( t2 == Trope.REDUCER ) {
res = 0;
} else {
res = -1;
}
break;
case OPTIMIZER:
if( t2 == Trope.OPTIMIZER ) {
res = 0;
} else if( t2 == Trope.REDUCER ) {
res = 1;
} else {
res = -1;
}
break;
case UNKNOWN:
if( t2 == Trope.UNKNOWN ) {
res = 0;
} else if( t2 == Trope.EXPANSER ) {
res = -1;
} else {
res = 1;
}
break;
case EXPANSER:
if( t2 == Trope.EXPANSER ) {
res = 0;
} else {
res = 1;
}
break;
default:
res = 0; // will never happen, but the compiler will complain if absent
}
return res;
}
} // class CmpSmallSolution
public boolean equals( final Atom atom ) {
return atom instanceof ReactionRule
&& name.equals( ( ( ReactionRule ) atom ).name );
}
public String toString() {
return name;
}
/**
* @return the required minimal number of atoms (reaction rule included) of
* the solution that contains this rule such that a reaction may be
* possible
*/
public int getMinAtoms() {
return permutation.size() + 1; // variables of molecule can match nothing
}
/**
* @return some statistics about the rule : number of permutations checked and
* number of reactions
*/
public String getStat() {
String s = new String( "\n" );
s = s + this + " statistics:\n";
s = s + " - number of tests: "
+ Hocli.debug.getNbCheckedPermutations( this ) + "\n";
s = s + " - number of reactions: " + Hocli.debug.getNbReactions( this )
+ "\n";
return s;
}
/**
* Set the solution where this rule is, to find reactives
*
* @param solution
*/
public void setSolution( Solution solution ) {
permutation.setSolution( solution );
}
/**
* Search for the next reactives for this rule.
*
* @return the result of the search
*/
public boolean nextReaction() {
return permutation.nextMatch();
}
/**
*
* @return the reactives assuming that they exist
*/
public Molecule getReactives() {
Molecule reactives = permutation.getReactives();
if( shot == Shot.ONE_SHOT && !reactives.contains( this ) ) {
reactives.add( this );
}
return reactives;
}
} // class ReactionRule
|
luisriverag/verus-desktop | routes/api/erc20/rfox/migration.js | const ethers = require('ethers');
const RFOX_UTILITY_ABI = require('./abi');
const RFOX_UTILITY_CONTRACT = "0xD82F7e3956d3FF391C927Cd7d0A7A57C360DF5b9"
module.exports = (api) => {
api.erc20.rfox = {}
api.setGet('/erc20/rfox/estimate_gas_claim_account_balances', async (req, res, next) => {
try {
res.send(JSON.stringify({
msg: 'success',
result: await api.erc20.rfox.estimateGasClaimAccountBalances()
}));
} catch (e) {
res.send(JSON.stringify({
msg: 'error',
result: e.message
}));
}
});
api.setGet('/erc20/rfox/get_account_balances', async (req, res, next) => {
try {
res.send(JSON.stringify({
msg: 'success',
result: await api.erc20.rfox.getAccountBalances()
}));
} catch (e) {
res.send(JSON.stringify({
msg: 'error',
result: e.message
}));
}
});
api.setPost('/erc20/rfox/claim_account_balances', async (req, res, next) => {
try {
res.send(JSON.stringify({
msg: 'success',
result: await api.erc20.rfox.claimAccountBalances()
}));
} catch (e) {
res.send(JSON.stringify({
msg: 'error',
result: e.message
}));
}
});
/**
* Estimates the gas required to claim the account balance of an RFOX account
*/
api.erc20.rfox.estimateGasClaimAccountBalances = async () => {
const contractId = '0xa1d6Df714F91DeBF4e0802A542E13067f31b8262'
if (api.erc20.contracts[contractId] == null) {
throw new Error(
`No interface to connect to estimateGasClaimAccountBalances`
);
} else if (api.erc20.wallet == null) {
throw new Error(
"No ERC20 wallet authenticated to use for estimateGasClaimAccountBalances"
);
}
const web3Provider = api.erc20.contracts[contractId]
const contract = web3Provider.interface.getContract(RFOX_UTILITY_CONTRACT, RFOX_UTILITY_ABI)
const signer = new ethers.VoidSigner(api.erc20.wallet.address, web3Provider.interface.DefaultProvider)
const uncompressedPubKey = api.erc20.wallet.pub
const x = Buffer.from(uncompressedPubKey.slice(4, 68), 'hex')
const y = Buffer.from(uncompressedPubKey.slice(68), 'hex')
return (await contract
.connect(signer)
.estimateGas.withdrawBalance(x, y)).mul(
await web3Provider.interface.DefaultProvider.getGasPrice()
);
}
/**
* Claims claimable account balance of a RedFOX account
*/
api.erc20.rfox.claimAccountBalances = async () => {
const contractId = '0xa1d6Df714F91DeBF4e0802A542E13067f31b8262'
if (api.erc20.contracts[contractId] == null) {
throw new Error(
`No interface to connect to estimateGasClaimAccountBalances`
);
} else if (api.erc20.wallet == null) {
throw new Error(
"No ERC20 wallet authenticated to use for estimateGasClaimAccountBalances"
);
}
const web3Provider = api.erc20.contracts[contractId]
const contract = web3Provider.interface.getContract(RFOX_UTILITY_CONTRACT, RFOX_UTILITY_ABI)
const signableContract = contract.connect(
new ethers.Wallet(
ethers.utils.hexlify(api.erc20.wallet.signer.signingKey.privateKey),
web3Provider.interface.DefaultProvider
)
);
const uncompressedPubKey = api.erc20.wallet.pub
const x = Buffer.from(uncompressedPubKey.slice(4, 68), 'hex')
const y = Buffer.from(uncompressedPubKey.slice(68), 'hex')
return await signableContract.withdrawBalance(x, y)
}
/**
* Gets total account balance of a RedFOX account
*/
api.erc20.rfox.getAccountBalances = async () => {
const contractId = '0xa1d6Df714F91DeBF4e0802A542E13067f31b8262'
if (api.erc20.contracts[contractId] == null) {
throw new Error(
`No interface to connect to getAccountBalances`
);
} else if (api.erc20.wallet == null) {
throw new Error(
"No ERC20 wallet authenticated to use for getAccountBalances"
);
}
const web3Provider = api.erc20.contracts[contractId]
const contract = web3Provider.interface.getContract(
RFOX_UTILITY_CONTRACT,
RFOX_UTILITY_ABI
);
const uncompressedPubKey = api.erc20.wallet.pub
const x = Buffer.from(uncompressedPubKey.slice(4, 68), 'hex')
const y = Buffer.from(uncompressedPubKey.slice(68), 'hex')
return await contract.totalAccountBalance(x, y)
}
return api;
}; |
reismannnr2/BCDice | lib/bcdice/game_system/cthulhu7th/rollable.rb | <reponame>reismannnr2/BCDice
module BCDice
module GameSystem
class Cthulhu7th < Base
module Rollable
private
# 1D100の一の位用のダイスロール
# 0から9までの値を返す
#
# @return [Integer]
def roll_ones_d10
dice = @randomizer.roll_once(10)
return 0 if dice == 10
return dice
end
# @param bonus [Integer] ボーナス・ペナルティダイスの数。負の数ならペナルティダイス。
# @return [Array<(Integer, Array<Integer>)>]
def roll_with_bonus(bonus)
tens_list = Array.new(bonus.abs + 1) { @randomizer.roll_tens_d10 }
ones = roll_ones_d10()
dice_list = tens_list.map do |tens|
dice = tens + ones
dice == 0 ? 100 : dice
end
dice =
if bonus >= 0
dice_list.min
else
dice_list.max
end
return dice, dice_list
end
end
end
end
end
|
moayyadfaris/susanoo | config/sms.js | const { BaseConfig } = require('backend-core')
const logger = require('../util/logger')
class SMSConfig extends BaseConfig {
constructor () {
super()
this.twilioAuthToken = this.set('TWILIO_AUTH_TOKEN', this.joi.string().min(5).max(100).required())
this.twilioAccountSid = this.set('TWILIO_ACCOUNT_SID', this.joi.string().min(5).max(100).required())
this.from = this.set('PHONE_FROM', this.joi.string().min(7).max(100).required())
}
async init () {
logger.debug(`${this.constructor.name}: Initialization finish...`)
}
}
module.exports = new SMSConfig()
|
MPOscar/sneakers-api | src/infra/repositories/layout/heading.js | const EntityNotFound = require('src/infra/errors/EntityNotFoundError')
const { LayoutHeading } = require('src/domain/layout')
const mapHeading = (headingDomain) => {
let newHeadingDomain = LayoutHeading(headingDomain)
newHeadingDomain.headingImgUrl = headingDomain.imgUrl
return newHeadingDomain
}
const unmapHeading = (dbModel) => {
let headingDomain = Object.create(dbModel)
headingDomain.imgUrl = dbModel.headingImgUrl
return LayoutHeading(headingDomain)
}
module.exports = (database) => {
const model = database.models.layouts
const updateHeading = async (page, heading) => {
let layoutDb = await model.findOne({ where: { page: page } })
if (!layoutDb) {
throw new EntityNotFound()
}
await layoutDb.updateAttributes(mapHeading(heading))
return heading
}
const getHeading = async (page) => {
let layoutDB = await model.findOne({ where: { page } })
if (!layoutDB) {
throw new EntityNotFound()
}
return unmapHeading(layoutDB)
}
return {
updateHeading,
getHeading
}
} |
dmitigr/cefeika | testo/testo.hpp | <reponame>dmitigr/cefeika
// -*- C++ -*-
// Copyright (C) 2021 <NAME>
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
// 3. This notice may not be removed or altered from any source distribution.
//
// <NAME>
// <EMAIL>
#ifndef DMITIGR_TESTO_TESTO_HPP
#define DMITIGR_TESTO_TESTO_HPP
#include "version.hpp"
#include "../error/assert.hpp"
#include <chrono>
#include <iostream>
#include <stdexcept>
#include <string_view>
#ifndef ASSERT
#define ASSERT(a) DMITIGR_ASSERT(a)
#endif
namespace dmitigr::testo {
/// @returns `true` if instance of type `E` is thrown upon calling of `f`.
template<class E, typename F>
bool is_throw_works(F&& f) noexcept
{
try {
f();
} catch (const E&) {
return true;
} catch (...) {}
return false;
}
/// Pretty-prints `e.what()`.
inline void report_failure(const std::string_view test_name, const std::exception& e)
{
std::cerr << "Test \"" << test_name.data() << "\" failed (std::exception catched): " << e.what() << std::endl;
}
/// @overload
inline void report_failure(const std::string_view test_name)
{
std::cerr << "Test \"" << test_name.data() << "\" failed (unknown exception catched)" << std::endl;
}
/// @returns The duration of call of `f`.
template<typename D = std::chrono::milliseconds, typename F>
auto time(F&& f)
{
namespace chrono = std::chrono;
const auto start = chrono::high_resolution_clock::now();
f();
const auto end = chrono::high_resolution_clock::now();
return chrono::duration_cast<D>(end - start);
}
} // namespace dmitigr::testo
#endif // DMITIGR_TESTO_TESTO_HPP
|
gitoneman/react-soc | node_modules/react-datagrid/node_modules/ustring/node_modules/i-s/src/arguments.js | <reponame>gitoneman/react-soc
'use strict'
var objectToString = Object.prototype.toString
module.exports = function(value){
return objectToString.apply(value) === '[object Arguments]' || !!value.callee
} |
zzgchina888/msdn-code-gallery-microsoft | Microsoft Office Developer Documentation Team/SharePoint 2013 Office Web Widgets - Experimental Demo/[C#]-SharePoint 2013 Office Web Widgets - Experimental Demo/C#/OfficeWebWidgetsDemoWeb/Scripts/Utils.js | // Function to retrieve a query string value.
// For production purposes you may want to use
// a library to handle the query string.
function getQueryStringParameter(paramToRetrieve) {
var params =
document.URL.split("?")[1].split("&");
var strParams = "";
for (var i = 0; i < params.length; i = i + 1) {
var singleParam = params[i].split("=");
if (singleParam[0] == paramToRetrieve)
return singleParam[1];
}
} |
kasthor/react-cbridge | src/ts-proto/sgn/sync/v1/tx_pb.js | <filename>src/ts-proto/sgn/sync/v1/tx_pb.js
// source: sgn/sync/v1/tx.proto
/**
* @fileoverview
* @enhanceable
* @suppress {missingRequire} reports error on implicit type usages.
* @suppress {messageConventions} JS Compiler reports an error if a variable or
* field starts with 'MSG_' and isn't a translatable message.
* @public
*/
// GENERATED CODE -- DO NOT EDIT!
/* eslint-disable */
// @ts-nocheck
var jspb = require('google-protobuf');
var goog = jspb;
var global = (function() {
if (this) { return this; }
if (typeof window !== 'undefined') { return window; }
if (typeof global !== 'undefined') { return global; }
if (typeof self !== 'undefined') { return self; }
return Function('return this')();
}.call(null));
var gogoproto_gogo_pb = require('../../../gogoproto/gogo_pb.js');
goog.object.extend(proto, gogoproto_gogo_pb);
var sgn_sync_v1_sync_pb = require('../../../sgn/sync/v1/sync_pb.js');
goog.object.extend(proto, sgn_sync_v1_sync_pb);
goog.exportSymbol('proto.sgn.sync.v1.MsgProposeUpdates', null, global);
goog.exportSymbol('proto.sgn.sync.v1.MsgVoteUpdates', null, global);
goog.exportSymbol('proto.sgn.sync.v1.ProposeUpdate', null, global);
goog.exportSymbol('proto.sgn.sync.v1.VoteUpdate', null, global);
/**
* Generated by JsPbCodeGenerator.
* @param {Array=} opt_data Optional initial data array, typically from a
* server response, or constructed directly in Javascript. The array is used
* in place and becomes part of the constructed object. It is not cloned.
* If no data is provided, the constructed object will be empty, but still
* valid.
* @extends {jspb.Message}
* @constructor
*/
proto.sgn.sync.v1.ProposeUpdate = function(opt_data) {
jspb.Message.initialize(this, opt_data, 0, -1, null, null);
};
goog.inherits(proto.sgn.sync.v1.ProposeUpdate, jspb.Message);
if (goog.DEBUG && !COMPILED) {
/**
* @public
* @override
*/
proto.sgn.sync.v1.ProposeUpdate.displayName = 'proto.sgn.sync.v1.ProposeUpdate';
}
/**
* Generated by JsPbCodeGenerator.
* @param {Array=} opt_data Optional initial data array, typically from a
* server response, or constructed directly in Javascript. The array is used
* in place and becomes part of the constructed object. It is not cloned.
* If no data is provided, the constructed object will be empty, but still
* valid.
* @extends {jspb.Message}
* @constructor
*/
proto.sgn.sync.v1.MsgProposeUpdates = function(opt_data) {
jspb.Message.initialize(this, opt_data, 0, -1, proto.sgn.sync.v1.MsgProposeUpdates.repeatedFields_, null);
};
goog.inherits(proto.sgn.sync.v1.MsgProposeUpdates, jspb.Message);
if (goog.DEBUG && !COMPILED) {
/**
* @public
* @override
*/
proto.sgn.sync.v1.MsgProposeUpdates.displayName = 'proto.sgn.sync.v1.MsgProposeUpdates';
}
/**
* Generated by JsPbCodeGenerator.
* @param {Array=} opt_data Optional initial data array, typically from a
* server response, or constructed directly in Javascript. The array is used
* in place and becomes part of the constructed object. It is not cloned.
* If no data is provided, the constructed object will be empty, but still
* valid.
* @extends {jspb.Message}
* @constructor
*/
proto.sgn.sync.v1.VoteUpdate = function(opt_data) {
jspb.Message.initialize(this, opt_data, 0, -1, null, null);
};
goog.inherits(proto.sgn.sync.v1.VoteUpdate, jspb.Message);
if (goog.DEBUG && !COMPILED) {
/**
* @public
* @override
*/
proto.sgn.sync.v1.VoteUpdate.displayName = 'proto.sgn.sync.v1.VoteUpdate';
}
/**
* Generated by JsPbCodeGenerator.
* @param {Array=} opt_data Optional initial data array, typically from a
* server response, or constructed directly in Javascript. The array is used
* in place and becomes part of the constructed object. It is not cloned.
* If no data is provided, the constructed object will be empty, but still
* valid.
* @extends {jspb.Message}
* @constructor
*/
proto.sgn.sync.v1.MsgVoteUpdates = function(opt_data) {
jspb.Message.initialize(this, opt_data, 0, -1, proto.sgn.sync.v1.MsgVoteUpdates.repeatedFields_, null);
};
goog.inherits(proto.sgn.sync.v1.MsgVoteUpdates, jspb.Message);
if (goog.DEBUG && !COMPILED) {
/**
* @public
* @override
*/
proto.sgn.sync.v1.MsgVoteUpdates.displayName = 'proto.sgn.sync.v1.MsgVoteUpdates';
}
if (jspb.Message.GENERATE_TO_OBJECT) {
/**
* Creates an object representation of this proto.
* Field names that are reserved in JavaScript and will be renamed to pb_name.
* Optional fields that are not set will be set to undefined.
* To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
* For the list of reserved names please see:
* net/proto2/compiler/js/internal/generator.cc#kKeyword.
* @param {boolean=} opt_includeInstance Deprecated. whether to include the
* JSPB instance for transitional soy proto support:
* http://goto/soy-param-migration
* @return {!Object}
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.toObject = function(opt_includeInstance) {
return proto.sgn.sync.v1.ProposeUpdate.toObject(opt_includeInstance, this);
};
/**
* Static version of the {@see toObject} method.
* @param {boolean|undefined} includeInstance Deprecated. Whether to include
* the JSPB instance for transitional soy proto support:
* http://goto/soy-param-migration
* @param {!proto.sgn.sync.v1.ProposeUpdate} msg The msg instance to transform.
* @return {!Object}
* @suppress {unusedLocalVariables} f is only used for nested messages
*/
proto.sgn.sync.v1.ProposeUpdate.toObject = function(includeInstance, msg) {
var f, obj = {
type: jspb.Message.getFieldWithDefault(msg, 1, 0),
data: msg.getData_asB64(),
chainId: jspb.Message.getFieldWithDefault(msg, 3, 0),
chainBlock: jspb.Message.getFieldWithDefault(msg, 4, 0)
};
if (includeInstance) {
obj.$jspbMessageInstance = msg;
}
return obj;
};
}
/**
* Deserializes binary data (in protobuf wire format).
* @param {jspb.ByteSource} bytes The bytes to deserialize.
* @return {!proto.sgn.sync.v1.ProposeUpdate}
*/
proto.sgn.sync.v1.ProposeUpdate.deserializeBinary = function(bytes) {
var reader = new jspb.BinaryReader(bytes);
var msg = new proto.sgn.sync.v1.ProposeUpdate;
return proto.sgn.sync.v1.ProposeUpdate.deserializeBinaryFromReader(msg, reader);
};
/**
* Deserializes binary data (in protobuf wire format) from the
* given reader into the given message object.
* @param {!proto.sgn.sync.v1.ProposeUpdate} msg The message object to deserialize into.
* @param {!jspb.BinaryReader} reader The BinaryReader to use.
* @return {!proto.sgn.sync.v1.ProposeUpdate}
*/
proto.sgn.sync.v1.ProposeUpdate.deserializeBinaryFromReader = function(msg, reader) {
while (reader.nextField()) {
if (reader.isEndGroup()) {
break;
}
var field = reader.getFieldNumber();
switch (field) {
case 1:
var value = /** @type {!proto.sgn.sync.v1.DataType} */ (reader.readEnum());
msg.setType(value);
break;
case 2:
var value = /** @type {!Uint8Array} */ (reader.readBytes());
msg.setData(value);
break;
case 3:
var value = /** @type {number} */ (reader.readUint64());
msg.setChainId(value);
break;
case 4:
var value = /** @type {number} */ (reader.readUint64());
msg.setChainBlock(value);
break;
default:
reader.skipField();
break;
}
}
return msg;
};
/**
* Serializes the message to binary data (in protobuf wire format).
* @return {!Uint8Array}
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.serializeBinary = function() {
var writer = new jspb.BinaryWriter();
proto.sgn.sync.v1.ProposeUpdate.serializeBinaryToWriter(this, writer);
return writer.getResultBuffer();
};
/**
* Serializes the given message to binary data (in protobuf wire
* format), writing to the given BinaryWriter.
* @param {!proto.sgn.sync.v1.ProposeUpdate} message
* @param {!jspb.BinaryWriter} writer
* @suppress {unusedLocalVariables} f is only used for nested messages
*/
proto.sgn.sync.v1.ProposeUpdate.serializeBinaryToWriter = function(message, writer) {
var f = undefined;
f = message.getType();
if (f !== 0.0) {
writer.writeEnum(
1,
f
);
}
f = message.getData_asU8();
if (f.length > 0) {
writer.writeBytes(
2,
f
);
}
f = message.getChainId();
if (f !== 0) {
writer.writeUint64(
3,
f
);
}
f = message.getChainBlock();
if (f !== 0) {
writer.writeUint64(
4,
f
);
}
};
/**
* optional DataType type = 1;
* @return {!proto.sgn.sync.v1.DataType}
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.getType = function() {
return /** @type {!proto.sgn.sync.v1.DataType} */ (jspb.Message.getFieldWithDefault(this, 1, 0));
};
/**
* @param {!proto.sgn.sync.v1.DataType} value
* @return {!proto.sgn.sync.v1.ProposeUpdate} returns this
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.setType = function(value) {
return jspb.Message.setProto3EnumField(this, 1, value);
};
/**
* optional bytes data = 2;
* @return {!(string|Uint8Array)}
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.getData = function() {
return /** @type {!(string|Uint8Array)} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
};
/**
* optional bytes data = 2;
* This is a type-conversion wrapper around `getData()`
* @return {string}
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.getData_asB64 = function() {
return /** @type {string} */ (jspb.Message.bytesAsB64(
this.getData()));
};
/**
* optional bytes data = 2;
* Note that Uint8Array is not supported on all browsers.
* @see http://caniuse.com/Uint8Array
* This is a type-conversion wrapper around `getData()`
* @return {!Uint8Array}
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.getData_asU8 = function() {
return /** @type {!Uint8Array} */ (jspb.Message.bytesAsU8(
this.getData()));
};
/**
* @param {!(string|Uint8Array)} value
* @return {!proto.sgn.sync.v1.ProposeUpdate} returns this
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.setData = function(value) {
return jspb.Message.setProto3BytesField(this, 2, value);
};
/**
* optional uint64 chain_id = 3;
* @return {number}
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.getChainId = function() {
return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 3, 0));
};
/**
* @param {number} value
* @return {!proto.sgn.sync.v1.ProposeUpdate} returns this
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.setChainId = function(value) {
return jspb.Message.setProto3IntField(this, 3, value);
};
/**
* optional uint64 chain_block = 4;
* @return {number}
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.getChainBlock = function() {
return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 4, 0));
};
/**
* @param {number} value
* @return {!proto.sgn.sync.v1.ProposeUpdate} returns this
*/
proto.sgn.sync.v1.ProposeUpdate.prototype.setChainBlock = function(value) {
return jspb.Message.setProto3IntField(this, 4, value);
};
/**
* List of repeated fields within this message type.
* @private {!Array<number>}
* @const
*/
proto.sgn.sync.v1.MsgProposeUpdates.repeatedFields_ = [1];
if (jspb.Message.GENERATE_TO_OBJECT) {
/**
* Creates an object representation of this proto.
* Field names that are reserved in JavaScript and will be renamed to pb_name.
* Optional fields that are not set will be set to undefined.
* To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
* For the list of reserved names please see:
* net/proto2/compiler/js/internal/generator.cc#kKeyword.
* @param {boolean=} opt_includeInstance Deprecated. whether to include the
* JSPB instance for transitional soy proto support:
* http://goto/soy-param-migration
* @return {!Object}
*/
proto.sgn.sync.v1.MsgProposeUpdates.prototype.toObject = function(opt_includeInstance) {
return proto.sgn.sync.v1.MsgProposeUpdates.toObject(opt_includeInstance, this);
};
/**
* Static version of the {@see toObject} method.
* @param {boolean|undefined} includeInstance Deprecated. Whether to include
* the JSPB instance for transitional soy proto support:
* http://goto/soy-param-migration
* @param {!proto.sgn.sync.v1.MsgProposeUpdates} msg The msg instance to transform.
* @return {!Object}
* @suppress {unusedLocalVariables} f is only used for nested messages
*/
proto.sgn.sync.v1.MsgProposeUpdates.toObject = function(includeInstance, msg) {
var f, obj = {
updatesList: jspb.Message.toObjectList(msg.getUpdatesList(),
proto.sgn.sync.v1.ProposeUpdate.toObject, includeInstance),
sender: jspb.Message.getFieldWithDefault(msg, 2, "")
};
if (includeInstance) {
obj.$jspbMessageInstance = msg;
}
return obj;
};
}
/**
* Deserializes binary data (in protobuf wire format).
* @param {jspb.ByteSource} bytes The bytes to deserialize.
* @return {!proto.sgn.sync.v1.MsgProposeUpdates}
*/
proto.sgn.sync.v1.MsgProposeUpdates.deserializeBinary = function(bytes) {
var reader = new jspb.BinaryReader(bytes);
var msg = new proto.sgn.sync.v1.MsgProposeUpdates;
return proto.sgn.sync.v1.MsgProposeUpdates.deserializeBinaryFromReader(msg, reader);
};
/**
* Deserializes binary data (in protobuf wire format) from the
* given reader into the given message object.
* @param {!proto.sgn.sync.v1.MsgProposeUpdates} msg The message object to deserialize into.
* @param {!jspb.BinaryReader} reader The BinaryReader to use.
* @return {!proto.sgn.sync.v1.MsgProposeUpdates}
*/
proto.sgn.sync.v1.MsgProposeUpdates.deserializeBinaryFromReader = function(msg, reader) {
while (reader.nextField()) {
if (reader.isEndGroup()) {
break;
}
var field = reader.getFieldNumber();
switch (field) {
case 1:
var value = new proto.sgn.sync.v1.ProposeUpdate;
reader.readMessage(value,proto.sgn.sync.v1.ProposeUpdate.deserializeBinaryFromReader);
msg.addUpdates(value);
break;
case 2:
var value = /** @type {string} */ (reader.readString());
msg.setSender(value);
break;
default:
reader.skipField();
break;
}
}
return msg;
};
/**
* Serializes the message to binary data (in protobuf wire format).
* @return {!Uint8Array}
*/
proto.sgn.sync.v1.MsgProposeUpdates.prototype.serializeBinary = function() {
var writer = new jspb.BinaryWriter();
proto.sgn.sync.v1.MsgProposeUpdates.serializeBinaryToWriter(this, writer);
return writer.getResultBuffer();
};
/**
* Serializes the given message to binary data (in protobuf wire
* format), writing to the given BinaryWriter.
* @param {!proto.sgn.sync.v1.MsgProposeUpdates} message
* @param {!jspb.BinaryWriter} writer
* @suppress {unusedLocalVariables} f is only used for nested messages
*/
proto.sgn.sync.v1.MsgProposeUpdates.serializeBinaryToWriter = function(message, writer) {
var f = undefined;
f = message.getUpdatesList();
if (f.length > 0) {
writer.writeRepeatedMessage(
1,
f,
proto.sgn.sync.v1.ProposeUpdate.serializeBinaryToWriter
);
}
f = message.getSender();
if (f.length > 0) {
writer.writeString(
2,
f
);
}
};
/**
* repeated ProposeUpdate updates = 1;
* @return {!Array<!proto.sgn.sync.v1.ProposeUpdate>}
*/
proto.sgn.sync.v1.MsgProposeUpdates.prototype.getUpdatesList = function() {
return /** @type{!Array<!proto.sgn.sync.v1.ProposeUpdate>} */ (
jspb.Message.getRepeatedWrapperField(this, proto.sgn.sync.v1.ProposeUpdate, 1));
};
/**
* @param {!Array<!proto.sgn.sync.v1.ProposeUpdate>} value
* @return {!proto.sgn.sync.v1.MsgProposeUpdates} returns this
*/
proto.sgn.sync.v1.MsgProposeUpdates.prototype.setUpdatesList = function(value) {
return jspb.Message.setRepeatedWrapperField(this, 1, value);
};
/**
* @param {!proto.sgn.sync.v1.ProposeUpdate=} opt_value
* @param {number=} opt_index
* @return {!proto.sgn.sync.v1.ProposeUpdate}
*/
proto.sgn.sync.v1.MsgProposeUpdates.prototype.addUpdates = function(opt_value, opt_index) {
return jspb.Message.addToRepeatedWrapperField(this, 1, opt_value, proto.sgn.sync.v1.ProposeUpdate, opt_index);
};
/**
* Clears the list making it empty but non-null.
* @return {!proto.sgn.sync.v1.MsgProposeUpdates} returns this
*/
proto.sgn.sync.v1.MsgProposeUpdates.prototype.clearUpdatesList = function() {
return this.setUpdatesList([]);
};
/**
* optional string sender = 2;
* @return {string}
*/
proto.sgn.sync.v1.MsgProposeUpdates.prototype.getSender = function() {
return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
};
/**
* @param {string} value
* @return {!proto.sgn.sync.v1.MsgProposeUpdates} returns this
*/
proto.sgn.sync.v1.MsgProposeUpdates.prototype.setSender = function(value) {
return jspb.Message.setProto3StringField(this, 2, value);
};
if (jspb.Message.GENERATE_TO_OBJECT) {
/**
* Creates an object representation of this proto.
* Field names that are reserved in JavaScript and will be renamed to pb_name.
* Optional fields that are not set will be set to undefined.
* To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
* For the list of reserved names please see:
* net/proto2/compiler/js/internal/generator.cc#kKeyword.
* @param {boolean=} opt_includeInstance Deprecated. whether to include the
* JSPB instance for transitional soy proto support:
* http://goto/soy-param-migration
* @return {!Object}
*/
proto.sgn.sync.v1.VoteUpdate.prototype.toObject = function(opt_includeInstance) {
return proto.sgn.sync.v1.VoteUpdate.toObject(opt_includeInstance, this);
};
/**
* Static version of the {@see toObject} method.
* @param {boolean|undefined} includeInstance Deprecated. Whether to include
* the JSPB instance for transitional soy proto support:
* http://goto/soy-param-migration
* @param {!proto.sgn.sync.v1.VoteUpdate} msg The msg instance to transform.
* @return {!Object}
* @suppress {unusedLocalVariables} f is only used for nested messages
*/
proto.sgn.sync.v1.VoteUpdate.toObject = function(includeInstance, msg) {
var f, obj = {
id: jspb.Message.getFieldWithDefault(msg, 1, 0),
option: jspb.Message.getFieldWithDefault(msg, 2, 0)
};
if (includeInstance) {
obj.$jspbMessageInstance = msg;
}
return obj;
};
}
/**
* Deserializes binary data (in protobuf wire format).
* @param {jspb.ByteSource} bytes The bytes to deserialize.
* @return {!proto.sgn.sync.v1.VoteUpdate}
*/
proto.sgn.sync.v1.VoteUpdate.deserializeBinary = function(bytes) {
var reader = new jspb.BinaryReader(bytes);
var msg = new proto.sgn.sync.v1.VoteUpdate;
return proto.sgn.sync.v1.VoteUpdate.deserializeBinaryFromReader(msg, reader);
};
/**
* Deserializes binary data (in protobuf wire format) from the
* given reader into the given message object.
* @param {!proto.sgn.sync.v1.VoteUpdate} msg The message object to deserialize into.
* @param {!jspb.BinaryReader} reader The BinaryReader to use.
* @return {!proto.sgn.sync.v1.VoteUpdate}
*/
proto.sgn.sync.v1.VoteUpdate.deserializeBinaryFromReader = function(msg, reader) {
while (reader.nextField()) {
if (reader.isEndGroup()) {
break;
}
var field = reader.getFieldNumber();
switch (field) {
case 1:
var value = /** @type {number} */ (reader.readUint64());
msg.setId(value);
break;
case 2:
var value = /** @type {!proto.sgn.sync.v1.VoteOption} */ (reader.readEnum());
msg.setOption(value);
break;
default:
reader.skipField();
break;
}
}
return msg;
};
/**
* Serializes the message to binary data (in protobuf wire format).
* @return {!Uint8Array}
*/
proto.sgn.sync.v1.VoteUpdate.prototype.serializeBinary = function() {
var writer = new jspb.BinaryWriter();
proto.sgn.sync.v1.VoteUpdate.serializeBinaryToWriter(this, writer);
return writer.getResultBuffer();
};
/**
* Serializes the given message to binary data (in protobuf wire
* format), writing to the given BinaryWriter.
* @param {!proto.sgn.sync.v1.VoteUpdate} message
* @param {!jspb.BinaryWriter} writer
* @suppress {unusedLocalVariables} f is only used for nested messages
*/
proto.sgn.sync.v1.VoteUpdate.serializeBinaryToWriter = function(message, writer) {
var f = undefined;
f = message.getId();
if (f !== 0) {
writer.writeUint64(
1,
f
);
}
f = message.getOption();
if (f !== 0.0) {
writer.writeEnum(
2,
f
);
}
};
/**
* optional uint64 id = 1;
* @return {number}
*/
proto.sgn.sync.v1.VoteUpdate.prototype.getId = function() {
return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 1, 0));
};
/**
* @param {number} value
* @return {!proto.sgn.sync.v1.VoteUpdate} returns this
*/
proto.sgn.sync.v1.VoteUpdate.prototype.setId = function(value) {
return jspb.Message.setProto3IntField(this, 1, value);
};
/**
* optional VoteOption option = 2;
* @return {!proto.sgn.sync.v1.VoteOption}
*/
proto.sgn.sync.v1.VoteUpdate.prototype.getOption = function() {
return /** @type {!proto.sgn.sync.v1.VoteOption} */ (jspb.Message.getFieldWithDefault(this, 2, 0));
};
/**
* @param {!proto.sgn.sync.v1.VoteOption} value
* @return {!proto.sgn.sync.v1.VoteUpdate} returns this
*/
proto.sgn.sync.v1.VoteUpdate.prototype.setOption = function(value) {
return jspb.Message.setProto3EnumField(this, 2, value);
};
/**
* List of repeated fields within this message type.
* @private {!Array<number>}
* @const
*/
proto.sgn.sync.v1.MsgVoteUpdates.repeatedFields_ = [1];
if (jspb.Message.GENERATE_TO_OBJECT) {
/**
* Creates an object representation of this proto.
* Field names that are reserved in JavaScript and will be renamed to pb_name.
* Optional fields that are not set will be set to undefined.
* To access a reserved field use, foo.pb_<name>, eg, foo.pb_default.
* For the list of reserved names please see:
* net/proto2/compiler/js/internal/generator.cc#kKeyword.
* @param {boolean=} opt_includeInstance Deprecated. whether to include the
* JSPB instance for transitional soy proto support:
* http://goto/soy-param-migration
* @return {!Object}
*/
proto.sgn.sync.v1.MsgVoteUpdates.prototype.toObject = function(opt_includeInstance) {
return proto.sgn.sync.v1.MsgVoteUpdates.toObject(opt_includeInstance, this);
};
/**
* Static version of the {@see toObject} method.
* @param {boolean|undefined} includeInstance Deprecated. Whether to include
* the JSPB instance for transitional soy proto support:
* http://goto/soy-param-migration
* @param {!proto.sgn.sync.v1.MsgVoteUpdates} msg The msg instance to transform.
* @return {!Object}
* @suppress {unusedLocalVariables} f is only used for nested messages
*/
proto.sgn.sync.v1.MsgVoteUpdates.toObject = function(includeInstance, msg) {
var f, obj = {
votesList: jspb.Message.toObjectList(msg.getVotesList(),
proto.sgn.sync.v1.VoteUpdate.toObject, includeInstance),
sender: jspb.Message.getFieldWithDefault(msg, 2, "")
};
if (includeInstance) {
obj.$jspbMessageInstance = msg;
}
return obj;
};
}
/**
* Deserializes binary data (in protobuf wire format).
* @param {jspb.ByteSource} bytes The bytes to deserialize.
* @return {!proto.sgn.sync.v1.MsgVoteUpdates}
*/
proto.sgn.sync.v1.MsgVoteUpdates.deserializeBinary = function(bytes) {
var reader = new jspb.BinaryReader(bytes);
var msg = new proto.sgn.sync.v1.MsgVoteUpdates;
return proto.sgn.sync.v1.MsgVoteUpdates.deserializeBinaryFromReader(msg, reader);
};
/**
* Deserializes binary data (in protobuf wire format) from the
* given reader into the given message object.
* @param {!proto.sgn.sync.v1.MsgVoteUpdates} msg The message object to deserialize into.
* @param {!jspb.BinaryReader} reader The BinaryReader to use.
* @return {!proto.sgn.sync.v1.MsgVoteUpdates}
*/
proto.sgn.sync.v1.MsgVoteUpdates.deserializeBinaryFromReader = function(msg, reader) {
while (reader.nextField()) {
if (reader.isEndGroup()) {
break;
}
var field = reader.getFieldNumber();
switch (field) {
case 1:
var value = new proto.sgn.sync.v1.VoteUpdate;
reader.readMessage(value,proto.sgn.sync.v1.VoteUpdate.deserializeBinaryFromReader);
msg.addVotes(value);
break;
case 2:
var value = /** @type {string} */ (reader.readString());
msg.setSender(value);
break;
default:
reader.skipField();
break;
}
}
return msg;
};
/**
* Serializes the message to binary data (in protobuf wire format).
* @return {!Uint8Array}
*/
proto.sgn.sync.v1.MsgVoteUpdates.prototype.serializeBinary = function() {
var writer = new jspb.BinaryWriter();
proto.sgn.sync.v1.MsgVoteUpdates.serializeBinaryToWriter(this, writer);
return writer.getResultBuffer();
};
/**
* Serializes the given message to binary data (in protobuf wire
* format), writing to the given BinaryWriter.
* @param {!proto.sgn.sync.v1.MsgVoteUpdates} message
* @param {!jspb.BinaryWriter} writer
* @suppress {unusedLocalVariables} f is only used for nested messages
*/
proto.sgn.sync.v1.MsgVoteUpdates.serializeBinaryToWriter = function(message, writer) {
var f = undefined;
f = message.getVotesList();
if (f.length > 0) {
writer.writeRepeatedMessage(
1,
f,
proto.sgn.sync.v1.VoteUpdate.serializeBinaryToWriter
);
}
f = message.getSender();
if (f.length > 0) {
writer.writeString(
2,
f
);
}
};
/**
* repeated VoteUpdate votes = 1;
* @return {!Array<!proto.sgn.sync.v1.VoteUpdate>}
*/
proto.sgn.sync.v1.MsgVoteUpdates.prototype.getVotesList = function() {
return /** @type{!Array<!proto.sgn.sync.v1.VoteUpdate>} */ (
jspb.Message.getRepeatedWrapperField(this, proto.sgn.sync.v1.VoteUpdate, 1));
};
/**
* @param {!Array<!proto.sgn.sync.v1.VoteUpdate>} value
* @return {!proto.sgn.sync.v1.MsgVoteUpdates} returns this
*/
proto.sgn.sync.v1.MsgVoteUpdates.prototype.setVotesList = function(value) {
return jspb.Message.setRepeatedWrapperField(this, 1, value);
};
/**
* @param {!proto.sgn.sync.v1.VoteUpdate=} opt_value
* @param {number=} opt_index
* @return {!proto.sgn.sync.v1.VoteUpdate}
*/
proto.sgn.sync.v1.MsgVoteUpdates.prototype.addVotes = function(opt_value, opt_index) {
return jspb.Message.addToRepeatedWrapperField(this, 1, opt_value, proto.sgn.sync.v1.VoteUpdate, opt_index);
};
/**
* Clears the list making it empty but non-null.
* @return {!proto.sgn.sync.v1.MsgVoteUpdates} returns this
*/
proto.sgn.sync.v1.MsgVoteUpdates.prototype.clearVotesList = function() {
return this.setVotesList([]);
};
/**
* optional string sender = 2;
* @return {string}
*/
proto.sgn.sync.v1.MsgVoteUpdates.prototype.getSender = function() {
return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, ""));
};
/**
* @param {string} value
* @return {!proto.sgn.sync.v1.MsgVoteUpdates} returns this
*/
proto.sgn.sync.v1.MsgVoteUpdates.prototype.setSender = function(value) {
return jspb.Message.setProto3StringField(this, 2, value);
};
goog.object.extend(exports, proto.sgn.sync.v1);
|
zsilbi/cphalcon | ext/phalcon/acl/adapter/abstractadapter.zep.h | <gh_stars>1-10
extern zend_class_entry *phalcon_acl_adapter_abstractadapter_ce;
ZEPHIR_INIT_CLASS(Phalcon_Acl_Adapter_AbstractAdapter);
PHP_METHOD(Phalcon_Acl_Adapter_AbstractAdapter, getActiveAccess);
PHP_METHOD(Phalcon_Acl_Adapter_AbstractAdapter, getActiveRole);
PHP_METHOD(Phalcon_Acl_Adapter_AbstractAdapter, getActiveComponent);
PHP_METHOD(Phalcon_Acl_Adapter_AbstractAdapter, getDefaultAction);
PHP_METHOD(Phalcon_Acl_Adapter_AbstractAdapter, getEventsManager);
PHP_METHOD(Phalcon_Acl_Adapter_AbstractAdapter, setDefaultAction);
PHP_METHOD(Phalcon_Acl_Adapter_AbstractAdapter, setEventsManager);
#if PHP_VERSION_ID >= 70200
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_getactiveaccess, 0, 0, IS_STRING, 0)
#else
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_getactiveaccess, 0, 0, IS_STRING, NULL, 0)
#endif
ZEND_END_ARG_INFO()
#if PHP_VERSION_ID >= 70200
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_getactiverole, 0, 0, IS_STRING, 0)
#else
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_getactiverole, 0, 0, IS_STRING, NULL, 0)
#endif
ZEND_END_ARG_INFO()
#if PHP_VERSION_ID >= 70200
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_getactivecomponent, 0, 0, IS_STRING, 0)
#else
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_getactivecomponent, 0, 0, IS_STRING, NULL, 0)
#endif
ZEND_END_ARG_INFO()
#if PHP_VERSION_ID >= 70200
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_getdefaultaction, 0, 0, IS_LONG, 0)
#else
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_getdefaultaction, 0, 0, IS_LONG, NULL, 0)
#endif
ZEND_END_ARG_INFO()
#if PHP_VERSION_ID >= 70200
ZEND_BEGIN_ARG_WITH_RETURN_OBJ_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_geteventsmanager, 0, 0, Phalcon\\Events\\ManagerInterface, 0)
#else
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_geteventsmanager, 0, 0, IS_OBJECT, "Phalcon\\Events\\ManagerInterface", 0)
#endif
ZEND_END_ARG_INFO()
#if PHP_VERSION_ID >= 70100
#if PHP_VERSION_ID >= 70200
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_setdefaultaction, 0, 1, IS_VOID, 0)
#else
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_setdefaultaction, 0, 1, IS_VOID, NULL, 0)
#endif
#else
ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_setdefaultaction, 0, 0, 1)
#define arginfo_phalcon_acl_adapter_abstractadapter_setdefaultaction NULL
#endif
#if PHP_VERSION_ID >= 70200
ZEND_ARG_TYPE_INFO(0, defaultAccess, IS_LONG, 0)
#else
ZEND_ARG_INFO(0, defaultAccess)
#endif
ZEND_END_ARG_INFO()
#if PHP_VERSION_ID >= 70100
#if PHP_VERSION_ID >= 70200
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_seteventsmanager, 0, 1, IS_VOID, 0)
#else
ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_seteventsmanager, 0, 1, IS_VOID, NULL, 0)
#endif
#else
ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_acl_adapter_abstractadapter_seteventsmanager, 0, 0, 1)
#define arginfo_phalcon_acl_adapter_abstractadapter_seteventsmanager NULL
#endif
ZEND_ARG_OBJ_INFO(0, eventsManager, Phalcon\\Events\\ManagerInterface, 0)
ZEND_END_ARG_INFO()
ZEPHIR_INIT_FUNCS(phalcon_acl_adapter_abstractadapter_method_entry) {
PHP_ME(Phalcon_Acl_Adapter_AbstractAdapter, getActiveAccess, arginfo_phalcon_acl_adapter_abstractadapter_getactiveaccess, ZEND_ACC_PUBLIC)
PHP_ME(Phalcon_Acl_Adapter_AbstractAdapter, getActiveRole, arginfo_phalcon_acl_adapter_abstractadapter_getactiverole, ZEND_ACC_PUBLIC)
PHP_ME(Phalcon_Acl_Adapter_AbstractAdapter, getActiveComponent, arginfo_phalcon_acl_adapter_abstractadapter_getactivecomponent, ZEND_ACC_PUBLIC)
PHP_ME(Phalcon_Acl_Adapter_AbstractAdapter, getDefaultAction, arginfo_phalcon_acl_adapter_abstractadapter_getdefaultaction, ZEND_ACC_PUBLIC)
PHP_ME(Phalcon_Acl_Adapter_AbstractAdapter, getEventsManager, arginfo_phalcon_acl_adapter_abstractadapter_geteventsmanager, ZEND_ACC_PUBLIC)
PHP_ME(Phalcon_Acl_Adapter_AbstractAdapter, setDefaultAction, arginfo_phalcon_acl_adapter_abstractadapter_setdefaultaction, ZEND_ACC_PUBLIC)
PHP_ME(Phalcon_Acl_Adapter_AbstractAdapter, setEventsManager, arginfo_phalcon_acl_adapter_abstractadapter_seteventsmanager, ZEND_ACC_PUBLIC)
PHP_FE_END
};
|
jmarranz/jnieasy | src/com/innowhere/jnieasy/core/impl/enhancer/model/signat/NativeFieldMethodSignatureEnhancerImpl.java | /*
* NativeFieldMethodSignatureEnhancerImpl.java
*
* Created on 5 de septiembre de 2005, 12:32
*
* To change this template, choose Tools | Options and locate the template under
* the Source Creation and Management node. Right-click the template and choose
* Open. You can then make changes to the template in the Source Editor.
*/
package com.innowhere.jnieasy.core.impl.enhancer.model.signat;
import com.innowhere.jnieasy.core.impl.common.classdesc.model.JavaClassAsNativeDirectFieldCallbackImpl;
import com.innowhere.jnieasy.core.impl.common.signat.model.NativeFieldMethodSignatureImpl;
import com.innowhere.jnieasy.core.impl.enhancer.NativeEnhancerImpl;
import javassist.CtField;
import javassist.CtMember;
/**
*
* @author jmarranz
*/
public abstract class NativeFieldMethodSignatureEnhancerImpl extends NativeBehaviorSignatureEnhancerImpl
{
/**
* Creates a new instance of NativeFieldMethodSignatureEnhancerImpl
*/
public NativeFieldMethodSignatureEnhancerImpl(NativeFieldMethodSignatureImpl signature,NativeEnhancerImpl enhancer)
{
super(signature,enhancer);
}
public NativeFieldMethodSignatureImpl getNativeFieldMethodSignature()
{
return (NativeFieldMethodSignatureImpl)signature;
}
public String formNativeDirectCallbackUniqueClassName(CtMember behavior)
{
CtField field = (CtField)behavior;
NativeFieldMethodSignatureImpl sig = getNativeFieldMethodSignature();
return sig.formNativeDirectCallbackUniqueClassName(field.getDeclaringClass().getName(),field.getName());
}
}
|
fqueiro/econtools | econtools/geo/krig.py | import numpy as np
from scipy.spatial.distance import pdist, cdist
from scipy.optimize import minimize
import scipy.linalg as la
import matplotlib.pyplot as plt
from econtools.metrics.locallinear import llr
def kriging_weights(X, y, X0, model_name='exp', mle_args=dict()):
"""
Calculate Simple Kriging weights for monitor readings (`X`, `y`) at
locations `X0` using variogram model `model_name`.
Args
----
X (array, dim M x 2) - Monitor locations
y (array, len M) - Spatial variable values
X0 (array, dim N x 2) - Interpolation targets
Kwargs
------
model_name (str) - Class of model to use for variogram estimation. Valid
values are
- 'exp' (Default)
- 'gauss'
- 'spherical'
See documentation for details.
mle_args (dict) - Pass following to MLE estimation routine:
`param0` (iterable) - Initial values for `model_name`
`method` (str) - Optimization method (see `scipy.optimize.minimum`)
Returns
-------
weights (array, dim N x M) - Kriging weights
"""
model = model_factory(model_name)
mle, D = variogram_mle(X, y, model, mle_args)
param_est = mle['x']
# Construct K (monitor covariances w/ lagrange multiplier)
M = len(X) # Number of monitors
K = np.ones((M + 1, M + 1)) # Add row for Lagrange Multiplier
K[:M, :M] = model(D, param_est)
K[-1, -1] = 0 # For Lagrange
# Construct k (monitor-target covariance(s), w/ Lagrange multiplier)
N = len(X0) # Number of interpolation targets
k = np.ones((M + 1, N))
dist_to_x0 = cdist(X, X0)
k[:-1, :] = model(dist_to_x0, param_est) # Assign around Lagrange
# Krige it
weights = la.inv(K).dot(k)[:-1].T # Drop Lagrange
return weights
def check_variogram(X, y, maxd=None, scat=False,
npreg_args=dict(),
model_name='exp',
mle_args=dict(),
):
"""
Scatter p = (h_ij, (y_i - y_j) ^ 2).
Kernel regression of p.
MLE of variogram model to fit p.
"""
# Estimate the MLE
model = model_factory(model_name)
mle, __ = variogram_mle(X, y, model, mle_args)
est = mle['x']
print(mle)
# Get empirical variogram data
xG, h, sqdiff, est_stats = llr_gamma(X, y, maxd=maxd, plot=False,
ret_raw=True)
# Plot everything
x0 = xG[:, 0] # Kernel reg x's
llr = xG[:, 1] # Kernel reg y's
fig, ax = plt.subplots()
if scat:
ax.scatter(h, sqdiff) # Scatter actual (y, sqdiffs)
ax.plot(x0, llr, '-og') # Plot kernel reg
fullx = np.linspace(0, x0.max(), 100)
g_model = est[0] - model(fullx, est) # Model fit y's
ax.plot(fullx, g_model, '-b') # Plot model fit
plt.show()
return xG, h, sqdiff, est_stats
# MLE Driver
def variogram_mle(X, y, model, mle_args):
""" Driver for MLE. """
D = cdist(X, X)
y_demeaned = y - y.mean()
N = len(y)
# Extract MLE args
param0 = mle_args.get('param0')
method = mle_args.get('method', 'BFGS')
# Estimate
mle = minimize(_likelihood, param0, args=(D, y_demeaned, N, model),
method=method)
return mle, D
def _likelihood(ca, D, y_demeaned, n, model):
R = model(D, ca)
R_inv = la.inv(R)
L = np.log(la.det(R)) + y_demeaned.dot(R_inv).dot(y_demeaned)
return L
# Kernel reg of empirical data
def llr_gamma(X, y, maxd=None, scat=False, plot=False, ret_raw=False,
**npregargs):
""" Estimate variogram using local linear regression. """
dist, sqdiff = empirical_gamma(X, y, maxd=maxd)
# plot w/ kernel reg
xG, est_stats = llr(sqdiff, dist, **npregargs)
if plot:
fig, ax = plt.subplots()
if scat:
ax.scatter(dist, sqdiff)
ax.plot(xG[:, 0], xG[:, 1], '-og')
plt.show()
if ret_raw:
return xG, dist, sqdiff, est_stats
else:
return xG, est_stats
# Empirical (distance, squared difference)
def empirical_gamma(X, y, maxd=None):
"""
Raw (distance, squared difference) for every pair in X.
args
----
X (array) - N x 2 array with arbitrary (x, y) coordinates.
y (iterable, array-like) - Variable at locations in `X`.
kwargs
------
maxd (float) - Any distance pairs beyond `maxd` are dropped.
Returns
------
dist (array) - Flattened upper-trianglular distance matrix of rows in
`X`. See `scipy.spatial.distance.pdist`.
sqdiff (array) - Accompanying squared difference in y,
i.e. (y[i] - y[j]) ^ 2.
"""
# Calc distances
dist = pdist(X)
# Calc squared diff of y
N = len(X)
sqdiff = np.zeros(len(dist))
for i in range(N):
y_i = y[i]
for j in range(i + 1, N):
sqdiff[get_flat_matrix_idx(i, j, N)] = (y_i - y[j]) ** 2
if maxd:
sqdiff = sqdiff[dist < maxd]
dist = dist[dist < maxd]
return dist, sqdiff
def get_flat_matrix_idx(i, j, n):
"""
Convert (i, j) indices of matrix to index of flattened upper-triangular
vector.
"""
return n*i - i*(i + 1) // 2 + j - i - 1
# Variogram Models
def model_factory(model_name):
if model_name == 'exp':
return gamma_exp
else:
# XXX TODO
raise NotImplementedError
def gamma_exp(h, s):
sig2, a = s
g = sig2 * (np.exp(- h / a))
return g
def exp_nug(h, s):
sig2, a, nug = s
g = 1 - (nug + (sig2 - nug) * (1 - np.exp(-h / a))) / sig2
return g
def spherical(h, s):
sigma2, a = s
g = sigma2 * (1 - 1.5 * h / a + .5 * (h / a) ** 3)
g[h > a] = 0
return g
def gauss(h, s):
sig2, a = s
g = sig2 * (np.exp(-(h / a) ** 2))
return g
if __name__ == "__main__":
pass
|
camrun91/apollos-apps | packages/apollos-server-core/src/linking/index.js | /* eslint-disable import/prefer-default-export */
import ApollosConfig from '@apollosproject/config';
const defaultCreateRedirectLink = ({ platform }) => {
if (platform === 'android') {
return ApollosConfig.UNIVERSAL_LINKS.PLAY_STORE_LINK;
}
return ApollosConfig.UNIVERSAL_LINKS.APP_STORE_LINK;
};
export function setupUniversalLinks({
app,
assetLinks = {},
appleAppSiteAssociation = {},
// Rather than redirecting to the app store, clients can override this function
// to redirect to their content on the web.
createRedirectLink = defaultCreateRedirectLink,
}) {
const {
APPLE_TEAM_ID,
APPLE_APP_ID,
GOOGLE_APP_ID,
GOOGLE_KEYSTORE_SHA256,
} = ApollosConfig.UNIVERSAL_LINKS;
app.get('/.well-known/apple-app-site-association', (req, res) => {
res.setHeader('Content-Type', 'application/json');
res.send(
JSON.stringify({
applinks: {
apps: [],
details: [
{
appID: [APPLE_TEAM_ID, APPLE_APP_ID].join('.'),
paths: ['/app-link/*'],
},
],
},
...appleAppSiteAssociation,
})
);
});
app.get('/.well-known/assetlinks.json', (req, res) => {
res.setHeader('Content-Type', 'application/json');
res.send(
JSON.stringify([
{
relation: ['delegate_permission/common.handle_all_urls'],
target: {
namespace: 'android_app',
package_name: GOOGLE_APP_ID,
sha256_cert_fingerprints: [GOOGLE_KEYSTORE_SHA256],
},
...assetLinks,
},
])
);
});
app.get('/app-link/*', async (req, res) => {
if (/Android/.test(req.headers['user-agent'])) {
const link = await createRedirectLink({ platform: 'android', req });
res.redirect(link);
} else {
const link = await createRedirectLink({ platform: 'ios', req });
res.redirect(link);
}
});
}
export const generateAppLink = (
type = 'universal',
route = 'nav',
args = { screen: 'home' }
) => {
const TYPES = ['universal', 'deep'];
const ROUTES = ['content', 'nav'];
const SCREENS = ['home', 'read', 'watch', 'pray', 'connect'];
if (!TYPES.includes(type))
throw new Error(`Must select link type from ${TYPES}`);
if (!ROUTES.includes(route))
throw new Error(`Must select link route from ${ROUTES}`);
if (route === 'content' && !args.contentID)
throw new Error('Must pass args.contentID with content route link');
if (route === 'nav' && !SCREENS.includes(args.screen))
throw new Error(`Must select screen from ${SCREENS} with nav route link`);
const host = {
universal: `${ApollosConfig.APP.UNIVERSAL_LINK_HOST}/app-link/`,
deep: `${ApollosConfig.APP.DEEP_LINK_HOST}://app-link/`,
};
const path = {
content: args.contentID,
nav: args.screen,
};
// path is arbitrary and is handled app side
// this just provides structure so the app can expect the same thing every time
return `${host[type]}${route}/${path[route]}`;
};
|
knopflerfish/knopflerfish.org | osgi/bundles/logcommands/src/org/knopflerfish/bundle/logcommands/LogConfigCommandGroup.java | <reponame>knopflerfish/knopflerfish.org<filename>osgi/bundles/logcommands/src/org/knopflerfish/bundle/logcommands/LogConfigCommandGroup.java
/*
* Copyright (c) 2003-2013, KNOPFLERFISH project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* - Neither the name of the KNOPFLERFISH project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.knopflerfish.bundle.logcommands;
import java.io.PrintWriter;
import java.io.Reader;
import java.util.Dictionary;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.osgi.framework.Bundle;
import org.knopflerfish.service.console.CommandGroupAdapter;
import org.knopflerfish.service.console.Session;
import org.knopflerfish.service.console.Util;
import org.knopflerfish.service.log.LogConfig;
import org.knopflerfish.service.log.LogUtil;
/**
* Command group for console configuration of the Log.
*
* @author <NAME>, Knopflerfish Project
*/
public class LogConfigCommandGroup
extends CommandGroupAdapter
{
LogConfigCommandGroup()
{
super("logconfig", "Configuration commands for the log.");
}
//
// Set memory size command
//
public final static String USAGE_MEMORY = "[-c] [<int>]";
public final static String[] HELP_MEMORY =
new String[] { "Number of log entries to keep in memory.",
"The no argument version prints the current setting.",
"<int> The new number of log entries to keep.",
"-c Clear the in memory log.", };
public int cmdMemory(final Dictionary<String, ?> opts,
final Reader in,
final PrintWriter out,
final Session session)
{
// Get log configuration service
final LogConfig configuration =
LogCommands.logConfigTracker.getService();
if (configuration == null) {
out.println("Unable to get a LogConfigService");
return 1;
}
final boolean clear = null != opts.get("-c");
int newSize = -1;
final String sizeArg = (String) opts.get("int");
if (sizeArg != null) {
try {
newSize = Integer.parseInt(sizeArg);
} catch (final NumberFormatException nfe) {
out.println("Can not set log memory size (" + nfe + ").");
return 2;
}
}
final int curSize = configuration.getMemorySize();
if (clear) {
configuration.setMemorySize(1);// This will throw away all old entries
configuration.setMemorySize(-1 < newSize ? newSize : curSize);
} else if (-1 < newSize) {
configuration.setMemorySize(newSize);
} else {
out.println(" log memory size: " + curSize);
}
return 0;
}
//
// Set level command
//
public final static String USAGE_SETLEVEL = "<level> [<bundle>] ...";
public final static String[] HELP_SETLEVEL =
new String[] {
"Set log level",
"<level> The new log level (one of error,warning,info,debug or default)",
"<bundle> The bundle(s) that the new level applies to. If no bundles are",
" given the default level is changed. The bundle may be given as",
" the bundle id, the file location of the bundle or the bundle's",
" short-name. If the bundle's short-name is given then the default",
" configuration for all bundles with the given short-name will be set.",
" This means that if wanting to set the configuration of a specific",
" bundle the bundle id or the bundle location has to be given. ", };
public int cmdSetlevel(final Dictionary<String, ?> opts,
final Reader in,
final PrintWriter out,
final Session session)
{
// Get log configuration service
final LogConfig configuration =
LogCommands.logConfigTracker.getService();
if (configuration == null) {
out.println("Unable to get a LogConfigService");
return 1;
}
final String l = (String) opts.get("level");
final int level = LogUtil.toLevel((l.trim()), -1);
if (level == -1) {
out.println("Unknown level: " + l);
return 1;
}
final String[] selection = (String[]) opts.get("bundle");
if (selection != null) {
setValidBundles(configuration, selection, level);
} else {
configuration.setFilter(level);
}
configuration.commit();
return 0;
}
private void setValidBundles(LogConfig configuration,
String[] givenBundles,
int level)
{
String location = null;
for (int i = givenBundles.length - 1; i >= 0; i--) {
location = givenBundles[i].trim();
try {
final long id = Long.parseLong(location);
final Bundle bundle = LogCommands.bc.getBundle(id);
if (null != bundle) {
location = Util.symbolicName(bundle);
if (null == location || 0 == location.length()) {
location = bundle.getLocation();
}
} else {
location = null;
}
} catch (final NumberFormatException nfe) {
}
if (location != null && location.length() > 0) {
configuration.setFilter(location, level);
}
}
}
//
// Show level command
//
public final static String USAGE_SHOWLEVEL = "[<bundle>] ...";
public final static String[] HELP_SHOWLEVEL =
new String[] {
"Show current log levels for bundles.",
"When called without an argument, all bundles with a log level configuration",
"will be listed followed by all configurations currently not matching a",
"bundle.",
"<bundle> Show level for the specified bundles only. The bundle",
" may be given as the bundle id, bundle's short-name,",
" bundles symbolic name or the bundle location. If the bundle",
" uses the default log level its line will end with the text \"(default)\".", };
public int cmdShowlevel(Dictionary<String, ?> opts,
Reader in,
PrintWriter out,
Session session)
{
// Get log configuration service
final LogConfig configuration =
LogCommands.logConfigTracker.getService();
if (configuration == null) {
out.println("Unable to get a LogConfigService");
return 1;
}
final Bundle[] bundles = LogCommands.bc.getBundles();
String[] selections = (String[]) opts.get("bundle");
final boolean showAll = null == selections;
if (showAll) {
final HashMap<String, Integer> filters = configuration.getFilters();
selections =
filters.keySet().toArray(new String[filters.size()]);
}
// Print the default filter level.
out.println(" * " + LogUtil.fromLevel(configuration.getFilter(), 8)
+ "(default)");
final Set<String> matchedSelectors = new HashSet<String>();
Util.selectBundles(bundles, selections, matchedSelectors);
Util.sortBundlesId(bundles);
printBundleLogLevels(configuration, bundles, out);
if (showAll) {
printConfiguredLogLevels(configuration, selections, matchedSelectors, out);
}
return 0;
}
private void printBundleLogLevels(final LogConfig configuration,
final Bundle[] bundles,
final PrintWriter out)
{
for (final Bundle bundle : bundles) {
if (bundle != null) {
final String short_name = Util.shortName(bundle);
int level = getLevel(configuration, bundle);
final boolean isDefaultLevel = level < 0;
level = isDefaultLevel ? configuration.getFilter() : level;
out.println(Util.showId(bundle) + " " + LogUtil.fromLevel(level, 8)
+ short_name + (isDefaultLevel ? " (default)" : ""));
}
}
}
private void printConfiguredLogLevels(final LogConfig configuration,
final String[] selections,
final Set<String> weedOut,
final PrintWriter out)
{
final SortedSet<String> selectionSet = new TreeSet<String>();
for (int i = selections.length - 1; i >= 0; i--) {
final String selection = selections[i];
if (null != selection && 0 < selection.length()
&& !weedOut.contains(selection)) {
selectionSet.add(selection);
}
}
for (final String selection : selectionSet) {
out.println(" - "
+ LogUtil
.fromLevel(getLevel(configuration, selection, ""), 8)
+ getFullName(selection) + " (Bundle not yet installed)");
}
}
private int getLevel(final LogConfig configuration, final Bundle bundle)
{
final HashMap<String, Integer> filters = configuration.getFilters();
Integer level;
level = filters.get(bundle.getLocation());
if (level == null) {
String l = Util.symbolicName(bundle);
if (l == null) {
l = bundle.getHeaders("").get("Bundle-Name");
}
if (l != null) {
level = filters.get(l);
}
}
// Finally try with the bundle id as key.
if (level == null) {
level = filters.get(String.valueOf(bundle.getBundleId()));
}
return (level != null) ? level.intValue() : -1;
}
private int getLevel(final LogConfig configuration,
final String full_name,
final String short_name)
{
final HashMap<String, Integer> filters = configuration.getFilters();
Integer level_to_use = filters.get(full_name);
if (level_to_use == null) {
level_to_use = filters.get(short_name);
}
return (level_to_use != null) ? level_to_use.intValue() : configuration
.getFilter();
}
private String getFullName(String bundle)
{
return fillName(new StringBuffer(bundle), 30);
}
private String fillName(StringBuffer sb, int length)
{
while (sb.length() < length) {
sb.append(' ');
}
return sb.toString();
}
//
// Set out command
//
public final static String USAGE_OUT = "[-on | -off]";
public final static String[] HELP_OUT =
new String[] {
"Configures logging to standard out",
"-on Turns on writing of log entries to standard out.",
"-off Turns off writing of log entries to standard out.", };
public int cmdOut(Dictionary<String, ?> opts, Reader in, PrintWriter out, Session session)
{
// Get log configuration service
final LogConfig configuration =
LogCommands.logConfigTracker.getService();
if (configuration == null) {
out.println("Unable to get a LogConfigService");
return 1;
}
if (!configuration.isDefaultConfig()) {
out.println(" This command is no persistent. (No valid configuration has been received)");
}
boolean optionFound = false;
// System.out logging on/off
if (opts.get("-on") != null) {
optionFound = true;
configuration.setOut(true);
} else if (opts.get("-off") != null) {
optionFound = true;
configuration.setOut(false);
}
// Show current config
if (!optionFound) {
final boolean isOn = configuration.getOut();
out.println(" Logging to standard out is " + (isOn ? "on" : "off") + ".");
}
return 0;
}
//
// Set file command
//
public final static String USAGE_FILE =
"[-on | -off] [-size #size#] [-gen #gen#] [-flush | -noflush]";
public final static String[] HELP_FILE =
new String[] {
"Configures the file logging (the no argument version prints the current settings)",
"-on Turns on writing of log entries to file.",
"-off Turns off writing of log entries to file.",
"-size #size# Set the maximum size of one log file (characters).",
"-gen #gen# Set the number of log file generations that are kept.",
"-flush Turns on log file flushing after each log entry.",
"-noflush Turns off log file flushing after each log entry.", };
public int cmdFile(final Dictionary<String, ?> opts,
final Reader in,
final PrintWriter out,
final Session session)
{
// Get log configuration service
final LogConfig configuration =
LogCommands.logConfigTracker.getService();
if (configuration == null) {
out.println("Unable to get a LogConfigService");
return 1;
}
if (configuration.getDir() == null) {
out.println(" This command is disabled; "
+ "writable filesystem not available.");
return 1;
}
boolean optionFound = false;
// File logging on/off
if (opts.get("-on") != null) {
optionFound = true;
configuration.setFile(true);
} else if (opts.get("-off") != null) {
optionFound = true;
configuration.setFile(false);
}
// Flush
if (opts.get("-flush") != null) {
optionFound = true;
configuration.setFlush(true);
} else if (opts.get("-noflush") != null) {
optionFound = true;
configuration.setFlush(false);
}
// Log size
String value = (String) opts.get("-size");
if (value != null) {
optionFound = true;
try {
configuration.setFileSize(Integer.parseInt(value));
} catch (final NumberFormatException nfe1) {
out.println("Cannot set log size (" + nfe1 + ").");
}
}
// Log generations
value = (String) opts.get("-gen");
if (value != null) {
optionFound = true;
try {
configuration.setMaxGen(Integer.parseInt(value));
} catch (final NumberFormatException nfe2) {
out.println("Cannot set generation count (" + nfe2 + ").");
}
}
if (optionFound) {
// Create persistent CM-config
configuration.commit();
} else {
// Show current config
final boolean isOn = configuration.getFile();
out.println(" file logging is " + (isOn ? "on" : "off") + ".");
out.println(" file size: " + configuration.getFileSize());
out.println(" generations: " + configuration.getMaxGen());
out.println(" flush: " + configuration.getFlush());
out.println(" log location: " + configuration.getDir());
}
return 0;
}
//
// Set timestamp pattern
//
public final static String USAGE_TIMESTAMP = "[<pattern>]";
public final static String[] HELP_TIMESTAMP =
new String[] {
"Configures the timestamp format used by the Knopflerfish log service",
"The no argument version prints the current pattern.",
"<pattern> Timstamp pattern as defined by java.text.SimpleDateFormat.", };
public int cmdTimestamp(Dictionary<String, ?> opts,
Reader in,
PrintWriter out,
Session session)
{
// Get log configuration service
final LogConfig configuration =
LogCommands.logConfigTracker.getService();
if (configuration == null) {
out.println("Unable to get a LogConfigService");
return 1;
}
final String pattern = (String) opts.get("pattern");
if (null != pattern) {
configuration.setTimestampPattern(pattern);
if (pattern.equals(configuration.getTimestampPattern())) {
configuration.commit();
} else {
out.println("Invalid timestamp pattern, '" + pattern + "', using '"
+ configuration.getTimestampPattern() + "'.");
return 1;
}
} else {
out.println(" time stamp pattern: '"
+ configuration.getTimestampPattern() + "'.");
}
return 0;
}
}
|
lottie-c/spl_tests_new | src/java/cz/cuni/mff/spl/evaluator/statistics/package-info.java | /**
* <p>
* Provides classes used for statistical evaluation of SPL comparisons.
* <p>
* Classes encapsulating simple statistical functions in Apache Commons Math library
* (such as standard deviation, mean, median, minimum and maximum for double array).
* <p>
* Classes representing measurement samples and its statistical data.
* <p>
* Statistical value checker used for validating measurement and comparison properties.
*/
package cz.cuni.mff.spl.evaluator.statistics; |
codegeekgao/framework | Spring-Framerwork/src/main/java/com/codegeek/aop/day1/LogAspect.java | <reponame>codegeekgao/framework<gh_stars>0
package com.codegeek.aop.day1;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.*;
import org.springframework.stereotype.Component;
import java.util.Arrays;
//@Aspect
@Component
public class LogAspect {
/**
* 定义切点,其中execution定义切入点表达式
*/
@Pointcut(value = "execution(* *..aop..*(..))")
public void logPoint() {
}
/**
* 前置通知在方法执行前执行
*/
@Before(value = "execution(public * com.codegeek.aop.day1.CalculatorImpl.*(..))")
public static void logStart(JoinPoint joinPoint) {
Object[] args = joinPoint.getArgs();
// 拿到执行方法的源对象-----即切入表达式实际切入的地方并运行的地方
System.out.println(joinPoint.getTarget().getClass());
// System.out.println(joinPoint.getStaticPart()); 打印详细切入点表达式
System.out.println("LogAspect-普通通知方法@before:" + joinPoint.getSignature().getName() + "日志开始了....方法参数:" + Arrays.asList(args));
}
/**
* 方法执行后返回结果后执行该通知
* @param result
*/
@AfterReturning(value = "execution(public * com.codegeek.aop.day1.CalculatorImpl.*(..))", returning = "result")
public static void logRun(Object result) {
System.out.println("LogAspect-普通通知@AfterReturning" + "运行结果为:" + result);
}
/**
*
* 如果方法执行出现异常将执行此通知
*/
@AfterThrowing(value = "execution(public * com.codegeek.aop.day1.Calculator.*(..))", throwing = "e")
public static void logException(Exception e) {
System.out.println("LogAspect-普通通知@AfterThrowing出异常啦:" + e);
}
/**
* 后置通知切入点执行后
*/
@After(value = "execution(public * com.codegeek.aop.day1.Calculator.*(..))")
public void logEnd() {
System.out.println("LogAspect-普通通知@After日志结束了");
}
@Around(value = "logPoint()")
public Object logAround(ProceedingJoinPoint proceedingJoinPoint) {
Object proceed = null;
try {
// @Before
System.out.println("环绕前通知.....当前执行的方法:" + proceedingJoinPoint.getSignature().getName());
proceed = proceedingJoinPoint.proceed(proceedingJoinPoint.getArgs());
// @AfterReturn
System.out.println("环绕后通知.....");
} catch (Throwable throwable) {
// @AfterThrowing
System.out.println("环绕异常通知.......");
// 如果不抛出异常----则就将异常catch掉了,普通通知异常将无法感知到异常对象,所以认为是正常执行的
throw new RuntimeException(throwable);
} finally {
// @After
System.out.println("环绕结束通知.....");
}
return proceed;
}
}
|
steven-zhc/hummingbird-framework | hummingbird-runtime/src/main/java/com/hczhang/hummingbird/eventlog/SimpleEventLog.java | package com.hczhang.hummingbird.eventlog;
import com.hczhang.hummingbird.event.Event;
import com.hczhang.hummingbird.model.AggregateRoot;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A very simple event log implements. just log event data.
*
* Created by steven on 1/5/15.
*/
public class SimpleEventLog extends AbstractEventLog {
private static Logger logger = LoggerFactory.getLogger(SimpleEventLog.class);
@Override
public void recordEvent(Event event, AggregateRoot root) {
logger.debug("Fired a event: {}.", event.toString());
}
}
|
rwcoding/goback | internal/acl/api_role_query.go | <gh_stars>0
package acl
import (
"github.com/rwcoding/goback/models"
"github.com/rwcoding/goback/pkg/api"
"github.com/rwcoding/goback/pkg/boot"
)
type roleQueryRequest struct {
ctx *boot.Context
RoleId uint32 `validate:"required,numeric,min=1" json:"role_id"`
}
type permission struct {
Id uint32 `json:"id"`
Gid uint32 `json:"gid"`
Name string `json:"name"`
Permission string `json:"permission"`
}
type group struct {
Id uint32 `json:"id"`
Name string `json:"name"`
}
type havePermission struct {
Permission string `json:"permission"`
}
type roleQueryResponse struct {
RoleId uint32 `json:"role_id"`
RoleName string `json:"role_name"`
Groups []group `json:"groups"`
Permissions []permission `json:"permissions"`
PermissionsHave []havePermission `json:"permissions_have"`
}
func NewApiRoleQuery(ctx *boot.Context) boot.Logic {
return &roleQueryRequest{ctx: ctx}
}
func (request *roleQueryRequest) Run() *api.Response {
var role models.Role
if db().Take(&role, request.RoleId).Error != nil {
return api.NewErrorResponse("无效的角色")
}
var groups []group
db().Model(&models.PermissionGroup{}).Order("ord").Find(&groups)
groups = append(groups, group{
Id: 0,
Name: "未定义",
})
var permissions []permission
db().Model(&models.Permission{}).Find(&permissions)
var hp []havePermission
db().Model(&models.RolePermission{}).Select("permission").Where("role_id=?", request.RoleId).Find(&hp)
return api.NewDataResponse(&roleQueryResponse{
RoleName: role.Name,
RoleId: role.Id,
Groups: groups,
Permissions: permissions,
PermissionsHave: hp,
})
}
|
somkiet073/ci-demos-dashbord | assets/DevExtreme/Demos/WidgetsGallery/Demos/ContextMenu/Basics/jQuery/data.js | <gh_stars>0
var contextMenuItems = [
{
text: 'Share',
items: [
{ text: 'Facebook' },
{ text: 'Twitter' }]
},
{ text: 'Download' },
{ text: 'Comment' },
{ text: 'Favorite' }
];
|
problemfighter/pfspring-feature-test-modules | rest-web/src/main/java/com/problemfighter/pfspring/webtestmodule/model/entity/Semester.java | <filename>rest-web/src/main/java/com/problemfighter/pfspring/webtestmodule/model/entity/Semester.java
package com.problemfighter.pfspring.webtestmodule.model.entity;
import com.problemfighter.pfspring.webtestmodule.model.common.EntityCommon;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.OneToMany;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
@Entity
public class Semester extends EntityCommon {
public String name;
@Column(columnDefinition = "TEXT")
public String description;
public Date start;
public Date end;
@OneToMany(mappedBy = "semester", fetch = FetchType.LAZY)
public Set<Registration> registrations = new HashSet<>();
}
|
Bobinette/papernet | google/bolt/repository.go | <gh_stars>1-10
package bolt
import (
"encoding/binary"
"encoding/json"
"github.com/boltdb/bolt"
"github.com/bobinette/papernet/google"
)
var googleBucket = []byte("google")
type UserRepository struct {
driver *Driver
}
func NewUserRepository(driver *Driver) *UserRepository {
return &UserRepository{
driver: driver,
}
}
func (r *UserRepository) GetByID(id int) (google.User, error) {
var user google.User
err := r.driver.store.View(func(tx *bolt.Tx) error {
bucket := tx.Bucket(googleBucket)
data := bucket.Get(itob(id))
if data == nil {
return nil
}
return json.Unmarshal(data, &user)
})
if err != nil {
return google.User{}, err
}
return user, nil
}
func (r *UserRepository) GetByGoogleID(googleID string) (google.User, error) {
var user google.User
err := r.driver.store.View(func(tx *bolt.Tx) error {
bucket := tx.Bucket(googleBucket)
data := bucket.Get([]byte(googleID))
if data == nil {
return nil
}
return json.Unmarshal(data, &user)
})
if err != nil {
return google.User{}, err
}
return user, nil
}
func (r *UserRepository) Upsert(user google.User) error {
return r.driver.store.Update(func(tx *bolt.Tx) error {
bucket := tx.Bucket(googleBucket)
data, err := json.Marshal(user)
if err != nil {
return err
}
if err := bucket.Put([]byte(user.GoogleID), data); err != nil {
return err
}
if err := bucket.Put(itob(user.ID), data); err != nil {
if err := tx.Rollback(); err != nil {
return err
}
return err
}
return nil
})
}
func itob(v int) []byte {
b := make([]byte, 8)
binary.BigEndian.PutUint64(b, uint64(v))
return b
}
func btoi(b []byte) int {
return int(binary.BigEndian.Uint64(b))
}
|
DEShawResearch/fs123 | core123/ut/ut_intuitive_compare.cpp | <gh_stars>10-100
// This file was retrieved from:
// https://github.com/ITHare/util/blob/master/src/type/type.h
// On Aug 6, 2021. The commit is: 6604970e2bf7b83355ca6aef107fc05bca4238dc
// IT HAS BEEN MODIFIED
// Lines modified in the original checkin are marked by the comment: //JKS
/*
Copyright (c) 2018, ITHare.com
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
//#include "../src/type/type.h" // JKS
//#include "lest.hpp" // JKS
// using namespace ithare::util::type; // JKS
#include "core123/intuitive_compare.hpp" // JKS
#include "core123/lest.hpp" // JKS
using core123::comparable;
#ifdef WARNINGS_EXPECTED
static_assert(-1>1U);
static_assert( short(-1) < (unsigned short)(1) );//stands (if sizeof(short)<sizeof(int))!
static_assert( int64_t(-1) < 1U );
static_assert( -1 > uint64_t(1) );
static_assert( -1 == unsigned(-1) );
#endif
static_assert( intuitive::lt(-1,1U) );
static_assert( intuitive::lt(short(-1),(unsigned short)(1)) );
static_assert( intuitive::lt(int64_t(-1), 1U) );
static_assert( intuitive::lt(-1,uint64_t(1)) );
static_assert( intuitive::le(-1,1U) );
static_assert( intuitive::le(short(-1),(unsigned short)(1)) );
static_assert( intuitive::le(int64_t(-1), 1U) );
static_assert( intuitive::le(-1,uint64_t(1)) );
static_assert( !intuitive::lt(1U,-1) );
static_assert( !intuitive::lt((unsigned short)(1),short(-1)) );
static_assert( !intuitive::lt( 1U,int64_t(-1)) );
static_assert( !intuitive::lt(uint64_t(1),-1) );
static_assert( !intuitive::le(1U,-1) );
static_assert( !intuitive::le((unsigned short)(1),short(-1)) );
static_assert( !intuitive::le( 1U,int64_t(-1)) );
static_assert( !intuitive::le(uint64_t(1),-1) );
static_assert( !intuitive::gt(-1,1U) );
static_assert( !intuitive::gt(short(-1),(unsigned short)(1)) );
static_assert( !intuitive::gt(int64_t(-1), 1U) );
static_assert( !intuitive::gt(-1,uint64_t(1)) );
static_assert( intuitive::gt(1U,-1) );
static_assert( intuitive::gt((unsigned short)(1),short(-1)) );
static_assert( intuitive::gt( 1U,int64_t(-1)) );
static_assert( intuitive::gt(uint64_t(1),-1) );
static_assert( intuitive::ge(1U,-1) );
static_assert( intuitive::ge((unsigned short)(1),short(-1)) );
static_assert( intuitive::ge( 1U,int64_t(-1)) );
static_assert( intuitive::ge(uint64_t(1),-1) );
static_assert( intuitive::eq(-1,long(-1)) );
static_assert( !intuitive::eq(-1,unsigned(-1)) );
static_assert( !intuitive::ne(-1,(long long)(-1)) );
static_assert( intuitive::ne(-1,unsigned(-1)) );
const lest::test specification[] = {
CASE("built-in") {
EXPECT(-1<1);
#ifdef WARNINGS_EXPECTED
EXPECT(-1>1U);
EXPECT( short(-1) < (unsigned short)(1) );//stands (if sizeof(short)<sizeof(int))!
EXPECT( int64_t(-1) < 1U );
EXPECT( -1 > uint64_t(1) );
EXPECT( -1 == unsigned(-1) );
#endif
},
CASE("lt") {
EXPECT( intuitive::lt(-1,1U) );
EXPECT( intuitive::lt(short(-1),(unsigned short)(1)) );
EXPECT( intuitive::lt(int64_t(-1), 1U) );
EXPECT( intuitive::lt(-1,uint64_t(1)) );
EXPECT( intuitive::le(-1,1U) );
},
CASE("gt") {
EXPECT( intuitive::gt(1U,-1) );
EXPECT( intuitive::gt((unsigned short)(1),short(-1)) );
EXPECT( intuitive::gt( 1U,int64_t(-1)) );
EXPECT( intuitive::gt(uint64_t(1),-1) );
EXPECT( intuitive::ge(1U,-1) );
},
CASE("eq") {
EXPECT( !intuitive::eq(-1,unsigned(-1)) );
EXPECT( intuitive::ne(-1,unsigned(-1)) );
},
// <ADDED BY JKS>
CASE("operator<") {
EXPECT( comparable(-1) < comparable(1U) );
EXPECT( comparable(short(-1)) < comparable((unsigned short)(1)) );
EXPECT( comparable(int64_t(-1)) < comparable(1U) );
EXPECT( comparable(-1) < comparable(uint64_t(1)) );
EXPECT( comparable(-1) <= comparable(1U) );
},
CASE("operator>") {
EXPECT( comparable(1U) > comparable(-1) );
EXPECT( comparable((unsigned short)(1)) > comparable(short(-1)) );
EXPECT( comparable( 1U) > comparable(int64_t(-1)) );
EXPECT( comparable(uint64_t(1)) > comparable(-1) );
EXPECT( comparable(1U) >= comparable(-1) );
},
CASE("operator==") {
#ifndef __ICC
// icc can't andle EXPECT( (parenthesized) ) I have no idea why???
EXPECT( !(comparable(-1) == comparable(unsigned(-1))) );
#endif
EXPECT( comparable(-1) != comparable(unsigned(-1)) );
},
// </ADDED BY JKS>
};
#define OUT_SIZEOF(t) "sizeof(" #t ")=" << sizeof(t)
#define OUT_CMP(cond) #cond ": " << ((cond)?"true":"false")
int main(int argc, char** argv) {
std::cout << OUT_SIZEOF(short) << " " << OUT_SIZEOF(int) << " " << OUT_SIZEOF(int64_t) << std::endl;
#ifdef WARNINGS_EXPECTED
std::cout << OUT_CMP( int(-1) < unsigned(1) ) << std::endl;
std::cout << OUT_CMP( -1 < 1U ) << std::endl;
std::cout << OUT_CMP( short(-1) < (unsigned short)(1) ) << std::endl;
std::cout << OUT_CMP( int64_t(-1) < unsigned(1) ) << std::endl;
std::cout << OUT_CMP( int(-1) < uint64_t(1) ) << std::endl;
#endif
std::cout << OUT_CMP( intuitive::lt(int(-1),unsigned(1)) ) << std::endl;
std::cout << OUT_CMP( intuitive::lt(-1,1U) ) << std::endl;
std::cout << OUT_CMP( intuitive::lt(short(-1), (unsigned short)(1)) ) << std::endl;
std::cout << OUT_CMP( intuitive::lt(int64_t(-1), unsigned(1)) ) << std::endl;
std::cout << OUT_CMP( intuitive::lt(int(-1), uint64_t(1)) ) << std::endl;
std::cout << OUT_CMP( intuitive::gt(unsigned(1),int(-1)) ) << std::endl;
std::cout << OUT_CMP( intuitive::gt(1U,-1) ) << std::endl;
std::cout << OUT_CMP( intuitive::gt((unsigned short)(1),short(-1)) ) << std::endl;
std::cout << OUT_CMP( intuitive::gt(unsigned(1),int64_t(-1)) ) << std::endl;
std::cout << OUT_CMP( intuitive::gt(uint64_t(1),int(-1)) ) << std::endl;
return lest::run(specification,argc,argv);
}
|
ToothlessTheNightFury/CreditCardVerification | Project 4/src/project4/Panel.java | package project4;
public interface Panel {
void onSettingsUpdated();
}
|
daher-alfawares/xr.desktop | sdk/physx/2.8.3/TrainingPrograms/Programs/Shared_Source/Terrain/tga.cpp | /*-----------------------------------------------------------
This is a very simple TGA lib. It will only load and save
uncompressed images in greyscale, RGB or RGBA mode.
If you want a more complete lib I suggest you take
a look at Paul Groves' TGA loader. Paul's home page is at
http://paulyg.virtualave.net
Just a little bit about the TGA file format.
Header - 12 fields
id unsigned char
colour map type unsigned char
image type unsigned char
1 - colour map image
2 - RGB(A) uncompressed
3 - greyscale uncompressed
9 - greyscale RLE (compressed)
10 - RGB(A) RLE (compressed)
colour map first entry short int
colour map length short int
map entry size short int
horizontal origin short int
vertical origin short int
width short int
height short int
pixel depth unsigned char
8 - greyscale
24 - RGB
32 - RGBA
image descriptor unsigned char
From all these fields, we only care about the image type,
to check if the image is uncompressed and not color indexed,
the width and height, and the pixel depth.
You may use this library for whatever you want. This library is
provide as is, meaning that I won't take any responsability for
any damages that you may incur for its usage.
<NAME> <EMAIL>
-------------------------------------------------------------*/
#include <GL/glut.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "tga.h"
#include "MediaPath.h"
// this variable is used for image series
static int savedImages=0;
// load the image header fields. We only keep those that matter!
void tgaLoadHeader(FILE *file, tgaInfo *info) {
unsigned char cGarbage;
short int iGarbage;
fread(&cGarbage, sizeof(unsigned char), 1, file);
fread(&cGarbage, sizeof(unsigned char), 1, file);
// type must be 2 or 3
fread(&info->type, sizeof(unsigned char), 1, file);
fread(&iGarbage, sizeof(short int), 1, file);
fread(&iGarbage, sizeof(short int), 1, file);
fread(&cGarbage, sizeof(unsigned char), 1, file);
fread(&iGarbage, sizeof(short int), 1, file);
fread(&iGarbage, sizeof(short int), 1, file);
fread(&info->width, sizeof(short int), 1, file);
fread(&info->height, sizeof(short int), 1, file);
fread(&info->pixelDepth, sizeof(unsigned char), 1, file);
#ifdef __CELLOS_LV2__
info->width = ((info->width & 0xff) << 8) | ((info->width >> 8) & 0xff);
info->height = ((info->height & 0xff) << 8) | ((info->height >> 8) & 0xff);
#endif
fread(&cGarbage, sizeof(unsigned char), 1, file);
}
// loads the image pixels. You shouldn't call this function
// directly
void tgaLoadImageData(FILE *file, tgaInfo *info) {
int mode,total,i;
unsigned char aux;
// mode equal the number of components for each pixel
mode = info->pixelDepth / 8;
// total is the number of bytes we'll have to read
total = info->height * info->width * mode;
fread(info->imageData,sizeof(unsigned char),total,file);
// mode=3 or 4 implies that the image is RGB(A). However TGA
// stores it as BGR(A) so we'll have to swap R and B.
if (mode >= 3)
for (i=0; i < total; i+= mode) {
aux = info->imageData[i];
info->imageData[i] = info->imageData[i+2];
info->imageData[i+2] = aux;
}
}
// this is the function to call when we want to load
// an image
tgaInfo * tgaLoad(char *filename) {
FILE *file;
tgaInfo *info;
int mode,total;
// allocate memory for the info struct and check!
info = (tgaInfo *)malloc(sizeof(tgaInfo));
if (info == NULL)
return(NULL);
// open the file for reading (binary mode)
char fname[1024];
file = fopen(FindMediaFile(filename, fname), "rb");
if (file == NULL) {
info->status = TGA_ERROR_FILE_OPEN;
return(info);
}
// load the header
tgaLoadHeader(file,info);
// check for errors when loading the header
if (ferror(file)) {
info->status = TGA_ERROR_READING_FILE;
fclose(file);
return(info);
}
// check if the image is color indexed
if (info->type == 1) {
info->status = TGA_ERROR_INDEXED_COLOR;
fclose(file);
return(info);
}
// check for other types (compressed images)
if ((info->type != 2) && (info->type !=3)) {
info->status = TGA_ERROR_COMPRESSED_FILE;
fclose(file);
return(info);
}
// mode equals the number of image components
mode = info->pixelDepth / 8;
// total is the number of bytes to read
total = info->height * info->width * mode;
// allocate memory for image pixels
info->imageData = (unsigned char *)malloc(sizeof(unsigned char) *
total);
// check to make sure we have the memory required
if (info->imageData == NULL) {
info->status = TGA_ERROR_MEMORY;
fclose(file);
return(info);
}
// finally load the image pixels
tgaLoadImageData(file,info);
// check for errors when reading the pixels
if (ferror(file)) {
info->status = TGA_ERROR_READING_FILE;
fclose(file);
return(info);
}
fclose(file);
info->status = TGA_OK;
return(info);
}
// converts RGB to greyscale
void tgaRGBtoGreyscale(tgaInfo *info) {
int mode,i,j;
unsigned char *newImageData;
// if the image is already greyscale do nothing
if (info->pixelDepth == 8)
return;
// compute the number of actual components
mode = info->pixelDepth / 8;
// allocate an array for the new image data
newImageData = (unsigned char *)malloc(sizeof(unsigned char) *
info->height * info->width);
if (newImageData == NULL) {
return;
}
// convert pixels: greyscale = o.30 * R + 0.59 * G + 0.11 * B
for (i = 0,j = 0; j < info->width * info->height; i +=mode, j++)
newImageData[j] = (unsigned char)(0.30 * info->imageData[i] +
0.59 * info->imageData[i+1] +
0.11 * info->imageData[i+2]);
//free old image data
free(info->imageData);
// reassign pixelDepth and type according to the new image type
info->pixelDepth = 8;
info->type = 3;
// reassing imageData to the new array.
info->imageData = newImageData;
}
// takes a screen shot and saves it to a TGA image
int tgaGrabScreenSeries(char *filename, int x,int y, int w, int h) {
unsigned char *imageData;
// allocate memory for the pixels
imageData = (unsigned char *)malloc(sizeof(unsigned char) * w * h * 4);
// read the pixels from the frame buffer
glReadPixels(x,y,w,h,GL_RGBA,GL_UNSIGNED_BYTE, (GLvoid *)imageData);
// save the image
return(tgaSaveSeries(filename,w,h,32,imageData));
}
// saves an array of pixels as a TGA image
int tgaSave(char *filename,
short int width,
short int height,
unsigned char pixelDepth,
unsigned char *imageData) {
unsigned char cGarbage = 0, type,mode,aux;
short int iGarbage = 0;
int i;
FILE *file;
// open file and check for errors
file = fopen(filename, "wb");
if (file == NULL) {
return(TGA_ERROR_FILE_OPEN);
}
// compute image type: 2 for RGB(A), 3 for greyscale
mode = pixelDepth / 8;
if ((pixelDepth == 24) || (pixelDepth == 32))
type = 2;
else
type = 3;
// write the header
fwrite(&cGarbage, sizeof(unsigned char), 1, file);
fwrite(&cGarbage, sizeof(unsigned char), 1, file);
fwrite(&type, sizeof(unsigned char), 1, file);
fwrite(&iGarbage, sizeof(short int), 1, file);
fwrite(&iGarbage, sizeof(short int), 1, file);
fwrite(&cGarbage, sizeof(unsigned char), 1, file);
fwrite(&iGarbage, sizeof(short int), 1, file);
fwrite(&iGarbage, sizeof(short int), 1, file);
fwrite(&width, sizeof(short int), 1, file);
fwrite(&height, sizeof(short int), 1, file);
fwrite(&pixelDepth, sizeof(unsigned char), 1, file);
fwrite(&cGarbage, sizeof(unsigned char), 1, file);
// convert the image data from RGB(a) to BGR(A)
if (mode >= 3)
for (i=0; i < width * height * mode ; i+= mode) {
aux = imageData[i];
imageData[i] = imageData[i+2];
imageData[i+2] = aux;
}
// save the image data
fwrite(imageData, sizeof(unsigned char), width * height * mode, file);
fclose(file);
// release the memory
free(imageData);
return(TGA_OK);
}
// saves a series of files with names "filenameX.tga"
int tgaSaveSeries(char *filename,
short int width,
short int height,
unsigned char pixelDepth,
unsigned char *imageData) {
char *newFilename;
int status;
// compute the new filename by adding the series number and the extension
newFilename = (char *)malloc(sizeof(char) * strlen(filename)+8);
sprintf(newFilename,"%s%d.tga",filename,savedImages);
// save the image
status = tgaSave(newFilename,width,height,pixelDepth,imageData);
//increase the counter
savedImages++;
return(status);
}
// releases the memory used for the image
void tgaDestroy(tgaInfo *info) {
if (info != NULL) {
free(info->imageData);
free(info);
}
}
|
rjw57/tiw-computer | emulator/src/mame/includes/taitosj.h | // license:BSD-3-Clause
// copyright-holders:<NAME>
#include "machine/taitosjsec.h"
#include "machine/input_merger.h"
#include "sound/dac.h"
#include "sound/discrete.h"
#include "sound/ay8910.h"
#include "screen.h"
class taitosj_state : public driver_device
{
public:
taitosj_state(const machine_config &mconfig, device_type type, const char *tag)
: driver_device(mconfig, type, tag),
m_videoram_1(*this, "videoram_1"),
m_videoram_2(*this, "videoram_2"),
m_videoram_3(*this, "videoram_3"),
m_spriteram(*this, "spriteram"),
m_paletteram(*this, "paletteram"),
m_characterram(*this, "characterram"),
m_scroll(*this, "scroll"),
m_colscrolly(*this, "colscrolly"),
m_gfxpointer(*this, "gfxpointer"),
m_colorbank(*this, "colorbank"),
m_video_mode(*this, "video_mode"),
m_video_priority(*this, "video_priority"),
m_collision_reg(*this, "collision_reg"),
m_kikstart_scrollram(*this, "kikstart_scroll"),
m_maincpu(*this, "maincpu"),
m_audiocpu(*this, "audiocpu"),
m_mcu(*this, "bmcu"),
m_soundnmi(*this, "soundnmi"),
m_soundnmi2(*this, "soundnmi2"),
m_dac(*this, "dac"),
m_dacvol(*this, "dacvol"),
m_ay1(*this, "ay1"),
m_ay2(*this, "ay2"),
m_ay3(*this, "ay3"),
m_ay4(*this, "ay4"),
m_gfxdecode(*this, "gfxdecode"),
m_screen(*this, "screen"),
m_palette(*this, "palette") { }
required_shared_ptr<uint8_t> m_videoram_1;
required_shared_ptr<uint8_t> m_videoram_2;
required_shared_ptr<uint8_t> m_videoram_3;
required_shared_ptr<uint8_t> m_spriteram;
required_shared_ptr<uint8_t> m_paletteram;
required_shared_ptr<uint8_t> m_characterram;
required_shared_ptr<uint8_t> m_scroll;
required_shared_ptr<uint8_t> m_colscrolly;
required_shared_ptr<uint8_t> m_gfxpointer;
required_shared_ptr<uint8_t> m_colorbank;
required_shared_ptr<uint8_t> m_video_mode;
required_shared_ptr<uint8_t> m_video_priority;
required_shared_ptr<uint8_t> m_collision_reg;
optional_shared_ptr<uint8_t> m_kikstart_scrollram;
required_device<cpu_device> m_maincpu;
required_device<cpu_device> m_audiocpu;
optional_device<taito_sj_security_mcu_device> m_mcu;
required_device<input_merger_device> m_soundnmi;
required_device<input_merger_device> m_soundnmi2;
required_device<dac_8bit_r2r_device> m_dac;
required_device<discrete_device> m_dacvol;
required_device<ay8910_device> m_ay1;
required_device<ay8910_device> m_ay2;
required_device<ay8910_device> m_ay3;
required_device<ay8910_device> m_ay4;
required_device<gfxdecode_device> m_gfxdecode;
required_device<screen_device> m_screen;
required_device<palette_device> m_palette;
typedef void (taitosj_state::*copy_layer_func_t)(bitmap_ind16 &,
const rectangle &, int, int *, rectangle *);
uint8_t m_input_port_4_f0;
uint8_t m_kikstart_gears[2];
uint8_t m_spacecr_prot_value;
uint8_t m_protection_value;
uint32_t m_address;
uint8_t m_soundlatch_data;
bool m_soundlatch_flag; // 74ls74 1/2 @ GAME BOARD IC42
bool m_sound_semaphore2; // 74ls74 2/2 @ GAME BOARD IC42
bitmap_ind16 m_layer_bitmap[3];
bitmap_ind16 m_sprite_sprite_collbitmap1;
bitmap_ind16 m_sprite_sprite_collbitmap2;
bitmap_ind16 m_sprite_layer_collbitmap1;
bitmap_ind16 m_sprite_layer_collbitmap2[3];
int m_draw_order[32][4];
DECLARE_WRITE8_MEMBER(soundlatch_w);
DECLARE_WRITE8_MEMBER(sound_semaphore2_w);
TIMER_CALLBACK_MEMBER(soundlatch_w_cb);
TIMER_CALLBACK_MEMBER(soundlatch_clear7_w_cb);
TIMER_CALLBACK_MEMBER(sound_semaphore2_w_cb);
TIMER_CALLBACK_MEMBER(sound_semaphore2_clear_w_cb);
DECLARE_READ8_MEMBER(soundlatch_r);
DECLARE_WRITE8_MEMBER(soundlatch_clear7_w);
DECLARE_READ8_MEMBER(soundlatch_flags_r);
DECLARE_WRITE8_MEMBER(sound_semaphore2_clear_w);
DECLARE_WRITE8_MEMBER(taitosj_bankswitch_w);
DECLARE_READ8_MEMBER(taitosj_fake_data_r);
DECLARE_WRITE8_MEMBER(taitosj_fake_data_w);
DECLARE_READ8_MEMBER(taitosj_fake_status_r);
DECLARE_READ8_MEMBER(mcu_mem_r);
DECLARE_WRITE8_MEMBER(mcu_mem_w);
DECLARE_WRITE_LINE_MEMBER(mcu_intrq_w);
DECLARE_WRITE_LINE_MEMBER(mcu_busrq_w);
DECLARE_READ8_MEMBER(spacecr_prot_r);
DECLARE_WRITE8_MEMBER(alpine_protection_w);
DECLARE_WRITE8_MEMBER(alpinea_bankswitch_w);
DECLARE_READ8_MEMBER(alpine_port_2_r);
DECLARE_READ8_MEMBER(taitosj_gfxrom_r);
DECLARE_WRITE8_MEMBER(taitosj_characterram_w);
DECLARE_WRITE8_MEMBER(junglhbr_characterram_w);
DECLARE_WRITE8_MEMBER(taitosj_collision_reg_clear_w);
DECLARE_CUSTOM_INPUT_MEMBER(input_port_4_f0_r);
DECLARE_CUSTOM_INPUT_MEMBER(kikstart_gear_r);
DECLARE_WRITE8_MEMBER(taitosj_sndnmi_msk_w);
DECLARE_WRITE8_MEMBER(input_port_4_f0_w);
DECLARE_WRITE8_MEMBER(taitosj_dacvol_w);
DECLARE_DRIVER_INIT(alpinea);
DECLARE_DRIVER_INIT(alpine);
DECLARE_DRIVER_INIT(taitosj);
DECLARE_DRIVER_INIT(junglhbr);
DECLARE_DRIVER_INIT(spacecr);
virtual void machine_start() override;
virtual void machine_reset() override;
virtual void video_start() override;
uint32_t screen_update_taitosj(screen_device &screen, bitmap_ind16 &bitmap, const rectangle &cliprect);
uint32_t screen_update_kikstart(screen_device &screen, bitmap_ind16 &bitmap, const rectangle &cliprect);
void init_common();
void reset_common();
void set_pens();
void compute_draw_order();
inline int get_sprite_xy(uint8_t which, uint8_t* sx, uint8_t* sy);
inline gfx_element *get_sprite_gfx_element(uint8_t which);
void check_sprite_sprite_collision();
void calculate_sprite_areas(int *sprites_on, rectangle *sprite_areas);
int check_sprite_layer_bitpattern(int which, rectangle *sprite_areas);
void check_sprite_layer_collision(int *sprites_on, rectangle *sprite_areas);
void draw_layers();
void draw_sprites(bitmap_ind16 &bitmap);
void check_collision(int *sprites_on, rectangle *sprite_areas);
int check_sprite_sprite_bitpattern(int sx1, int sy1, int which1,int sx2, int sy2, int which2);
void taitosj_copy_layer(bitmap_ind16 &bitmap, const rectangle &cliprect,int which, int *sprites_on, rectangle *sprite_areas);
void kikstart_copy_layer(bitmap_ind16 &bitmap, const rectangle &cliprect,int which, int *sprites_on, rectangle *sprite_areas);
void copy_layer(bitmap_ind16 &bitmap, const rectangle &cliprect,copy_layer_func_t copy_layer_func, int which, int *sprites_on, rectangle *sprite_areas);
void copy_layers(bitmap_ind16 &bitmap, const rectangle &cliprect,copy_layer_func_t copy_layer_func, int *sprites_on, rectangle *sprite_areas);
int video_update_common(bitmap_ind16 &bitmap, const rectangle &cliprect, copy_layer_func_t copy_layer_func);
void mcu(machine_config &config);
void nomcu(machine_config &config);
void kikstart(machine_config &config);
void kikstart_main_map(address_map &map);
void taitosj_audio_map(address_map &map);
void taitosj_main_mcu_map(address_map &map);
void taitosj_main_nomcu_map(address_map &map);
};
|
cosbi-research/qsp-cc | middleend/src/eu/cosbi/qspcc/tree/listeners/StatementsWalker.java | package eu.cosbi.qspcc.tree.listeners;
import java.util.Deque;
import eu.cosbi.qspcc.ast.AAST;
import eu.cosbi.qspcc.ast.AASTNode;
import eu.cosbi.qspcc.ast.NodeType;
import eu.cosbi.qspcc.ast.attrs.NodeAttr;
import eu.cosbi.qspcc.exceptions.GException;
import eu.cosbi.qspcc.expressions.type.TypeDefinition.BType;
/**
* Re-parse each function that now have all the parameter types set and resolve
* all the unknown types (including global variables referenced).
*
* @author tomasoni
*
*/
public class StatementsWalker extends FrontEndWalker {
private AASTNode curStatement;
public StatementsWalker(AAST mainAAST) {
super(mainAAST);
}
@Override
public void onEnter(AAST aast, AASTNode node) throws GException {
if (curStatement == null
// should be reparsed
&& node.hasAttr(NodeAttr.REPARSE_STATEMENT)) {
// resolve function types
curStatement = node;
super.onEnter(aast, node);
} else if (curStatement == null)
return;
else
super.onEnter(aast, node);
}
@Override
public void onExit(AAST aast, AASTNode node, Deque<Object> results) throws GException {
if (node.hasAttr(NodeAttr.REPARSE_STATEMENT) && node.equals(curStatement)) {
// complete statement
super.onExit(aast, node, results);
checkAndMarkAsResolved(node);
// reset
curStatement = null;
return;
} else if (curStatement == null)
return;
else
super.onExit(aast, node, results);
}
private void checkAndMarkAsResolved(AASTNode node) {
// allow being re-set to be reparsed if needed
// for funexpr in functionnode, for returntype in funexpr check if
// some unknown -> remove attr/setattr otherwise don't remove
if (allNodesSet(true, node)) {
node.removeAttr(NodeAttr.REPARSE_STATEMENT);
AASTNode n = node.parent(NodeType.FUNCTION);
if (n == null)
n = new AASTNode(null, null, null);
node.attr(NodeAttr.STATEMENT_RESOLVED, n);
}
}
private boolean allNodesSet(boolean allset, AASTNode node) {
allset &= node.expr() == null || !node.expr().equals(BType.UNKNOWN);
for (AASTNode curChild : node.childs()) {
if (!allset)
return false;
allset &= allNodesSet(allset, curChild);
}
return allset;
}
@Override
public void onWalkCompleted(AAST ref) throws GException {
// TODO Auto-generated method stub
}
@Override
public void onWalkStarted(AAST ref) throws GException {
// TODO Auto-generated method stub
}
}
|
moogacs/aws-sdk-go-v2 | service/pi/doc.go | // Code generated by smithy-go-codegen DO NOT EDIT.
// Package pi provides the API client, operations, and parameter types for AWS
// Performance Insights.
//
// Amazon RDS Performance Insights Amazon RDS Performance Insights enables you to
// monitor and explore different dimensions of database load based on data captured
// from a running DB instance. The guide provides detailed information about
// Performance Insights data types, parameters and errors. When Performance
// Insights is enabled, the Amazon RDS Performance Insights API provides visibility
// into the performance of your DB instance. Amazon CloudWatch provides the
// authoritative source for AWS service-vended monitoring metrics. Performance
// Insights offers a domain-specific view of DB load. DB load is measured as
// Average Active Sessions. Performance Insights provides the data to API consumers
// as a two-dimensional time-series dataset. The time dimension provides DB load
// data for each time point in the queried time range. Each time point decomposes
// overall load in relation to the requested dimensions, measured at that time
// point. Examples include SQL, Wait event, User, and Host.
//
// * To learn more about
// Performance Insights and Amazon Aurora DB instances, go to the Amazon Aurora
// User Guide
// (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/USER_PerfInsights.html).
//
// *
// To learn more about Performance Insights and Amazon RDS DB instances, go to the
// Amazon RDS User Guide
// (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PerfInsights.html).
package pi
|
briefjudofox/expo | packages/html-elements/build/primitives/Text.js | <filename>packages/html-elements/build/primitives/Text.js
import { Text as NativeText } from 'react-native';
const Text = NativeText;
export default Text;
//# sourceMappingURL=Text.js.map |
HiltonRoscoe/exchangerxml | src/com/cladonia/xml/xdiff/XParser.java | /**
* XDiff -- A part of Niagara Project
* Author: <NAME>
*
* Copyright (c) Computer Sciences Department,
* University of Wisconsin -- Madison
* All Rights Reserved._
*
* Permission to use, copy, modify and distribute this software and
* its documentation is hereby granted, provided that both the copyright
* notice and this permission notice appear in all copies of the software,
* derivative works or modified versions, and any portions thereof, and
* that both notices appear in supporting documentation._
*
* THE AUTHOR AND THE COMPUTER SCIENCES DEPARTMENT OF THE UNIVERSITY OF
* WISCONSIN - MADISON ALLOW FREE USE OF THIS SOFTWARE IN ITS "AS IS"
* CONDITION, AND THEY DISCLAIM ANY LIABILITY OF ANY KIND FOR ANY DAMAGES
* WHATSOEVER RESULTING FROM THE USE OF THIS SOFTWARE._
*
* This software was developed with support by DARPA through Rome Research
* Laboratory Contract No.F30602-97-2-0247.
*
* Please report bugs or send your comments to <EMAIL>
*/
package com.cladonia.xml.xdiff;
import java.net.URL;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.ext.LexicalHandler;
import com.cladonia.xml.XMLUtilities;
/**
* <code>XParser</code> parses an input XML document and constructs an
* <code>XTree</code>
*/
class XParser extends DefaultHandler implements LexicalHandler
{
private static final String _PARSER_NAME = "org.apache.xerces.parsers.SAXParser";
private static boolean _setValidation = false;
private static boolean _setNameSpaces = true;
private static boolean _setSchemaSupport = true;
private static boolean _setSchemaFullSupport = false;
private static boolean _setNameSpacePrefixes = true;
private static int _STACK_SIZE = 100;
private XMLReader _parser;
private XTree _xtree;
private int _idStack[], _lsidStack[]; // id and left sibling
private long _valueStack[];
private int _stackTop, _currentNodeID;
private boolean _readElement;
private StringBuffer _elementBuffer;
/**
* Constructor.
*/
public XParser() throws Exception
{
XHash.initialize();
try
{
_parser = (XMLReader)Class.forName(_PARSER_NAME).newInstance();
_parser.setFeature("http://xml.org/sax/features/validation", _setValidation);
_parser.setFeature("http://xml.org/sax/features/namespaces", _setNameSpaces);
_parser.setFeature("http://apache.org/xml/features/validation/schema", _setSchemaSupport);
_parser.setFeature("http://apache.org/xml/features/validation/schema-full-checking", _setSchemaFullSupport);
_parser.setFeature("http://xml.org/sax/features/namespace-prefixes", _setNameSpacePrefixes);
_parser.setContentHandler(this);
_parser.setErrorHandler(this);
_parser.setProperty("http://xml.org/sax/properties/lexical-handler", this);
}
catch (Exception e)
{
System.err.println(e.getMessage());
throw new Exception(e);
//System.exit(1);
}
_idStack = new int[_STACK_SIZE];
_lsidStack = new int[_STACK_SIZE];
_valueStack = new long[_STACK_SIZE];
_stackTop = 0;
_currentNodeID = XTree.NULL_NODE;
_elementBuffer = new StringBuffer();
}
/**
* Parse an XML document
* @param uri input XML document
* @return the created XTree
*/
public XTree parse(String uri) throws Exception
{
_xtree = new XTree();
_idStack[_stackTop] = XTree.NULL_NODE;
_lsidStack[_stackTop] = XTree.NULL_NODE;
try
{
URL url = new URL(uri);
XMLUtilities.XMLReader reader = XMLUtilities.replaceAmp(url);
InputSource source = new InputSource(reader);
_parser.parse(source);
//_parser.parse(uri);
}
catch (Exception e)
{
System.err.println(e.getMessage());
throw new Exception(e);
//System.exit(1);
}
return _xtree;
}
// Document handler methods
public void startElement(String uri, String local, String raw,
Attributes attrs)
{
// if text is mixed with elements
if (_elementBuffer.length() > 0)
{
String text = _elementBuffer.toString().trim();
if (text.length() > 0)
{
long value = XHash.hash(text);
int tid = _xtree.addText(_idStack[_stackTop], _lsidStack[_stackTop], text, value);
_lsidStack[_stackTop] = tid;
_currentNodeID = tid;
_valueStack[_stackTop] += value;
}
}
//int eid = _xtree.addElement(_idStack[_stackTop],
// _lsidStack[_stackTop], local);
String localURI = "";
if (uri != null && !uri.equals(""))
{
localURI = local+uri;
}
else
{
localURI = local;
}
int eid = _xtree.addElement(_idStack[_stackTop],
_lsidStack[_stackTop], raw, localURI );
// Update last sibling info.
_lsidStack[_stackTop] = eid;
// Push
_stackTop++;
_idStack[_stackTop] = eid;
_currentNodeID = eid;
_lsidStack[_stackTop] = XTree.NULL_NODE;
//_valueStack[_stackTop] = XHash.hash(local);
//_valueStack[_stackTop] = XHash.hash(raw);
_valueStack[_stackTop] = XHash.hash(uri+local);
// check for root
// if (_stackTop == 1)
// {
// _xtree.setRootHash(XHash.hash(uri+local));
// }
// Take care of attributes
if ((attrs != null) && (attrs.getLength() > 0))
{
for (int i = 0; i < attrs.getLength(); i++)
{
String name = attrs.getQName(i);
String value = attrs.getValue(i);
//long namehash = XHash.hash(name);
String attrNameURI = "";
if (attrs.getURI(i) != null && !attrs.getURI(i).equals(""))
{
attrNameURI = attrs.getLocalName(i)+attrs.getURI(i);
}
else
{
attrNameURI = attrs.getLocalName(i);
}
long namehash = XHash.hash(attrNameURI);
long valuehash = XHash.hash(value);
long attrhash = namehash * namehash +
valuehash * valuehash;
int aid = _xtree.addAttribute(eid, _lsidStack[_stackTop], name, value, namehash, attrhash, attrNameURI);
_lsidStack[_stackTop] = aid;
_currentNodeID = aid + 1;
_valueStack[_stackTop] += attrhash * attrhash;
}
}
_readElement = true;
_elementBuffer = new StringBuffer();
}
public void characters(char ch[], int start, int length)
{
_elementBuffer.append(ch, start, length);
}
public void endElement(String uri, String local, String raw)
{
if (_readElement)
{
if (_elementBuffer.length() > 0)
{
String text = _elementBuffer.toString();
long value = XHash.hash(text);
_currentNodeID =
_xtree.addText(_idStack[_stackTop],
_lsidStack[_stackTop],
text, value);
_valueStack[_stackTop] += value;
}
else // an empty element
{
_currentNodeID =
_xtree.addText(_idStack[_stackTop],
_lsidStack[_stackTop],
"", 0);
}
_readElement = false;
}
else
{
if (_elementBuffer.length() > 0)
{
String text = _elementBuffer.toString().trim();
// More text nodes before end of the element.
if (text.length() > 0)
{
long value = XHash.hash(text);
_currentNodeID =
_xtree.addText(_idStack[_stackTop],
_lsidStack[_stackTop],
text, value);
_valueStack[_stackTop] += value;
}
}
}
_elementBuffer = new StringBuffer();
_xtree.addHashValue(_idStack[_stackTop],_valueStack[_stackTop]);
_valueStack[_stackTop-1] += _valueStack[_stackTop] * _valueStack[_stackTop];
_lsidStack[_stackTop-1] = _idStack[_stackTop];
// Pop
_stackTop--;
}
// End of document handler methods
// Lexical handler methods.
public void startCDATA()
{
// The text node id should be the one next to the current
// node id.
int textid = _currentNodeID + 1;
String text = _elementBuffer.toString();
_xtree.addCDATA(textid, text.length());
}
public void endCDATA()
{
int textid = _currentNodeID + 1;
String text = _elementBuffer.toString();
_xtree.addCDATA(textid, text.length());
}
// Following functions are not implemented.
public void comment(char[] ch, int start, int length)
{
}
public void startDTD(String name, String publicId, String systemId)
{
}
public void endDTD()
{
}
public void startEntity(String name)
{
}
public void endEntity(String name)
{
}
// End of lexical handler methods.
}
|
wangjianxiandev/HappyCode | app/src/main/java/com/redant/codeland/scratchgame/fragment/LevelFragment.java | package com.redant.codeland.scratchgame.fragment;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.SimpleAdapter;
import android.widget.Toast;
import com.redant.codeland.MyButton;
import com.redant.codeland.ParallelViewHelper;
import com.redant.codeland.R;
import com.redant.codeland.entity.LevelInfo;
import com.redant.codeland.scratchgame.ScratchJrActivity;
import com.redant.codeland.ui.AnimalKindActivity;
import com.redant.codeland.ui.BaseLevelActivity;
import com.redant.codeland.ui.BoxBlocklyActivity;
import com.redant.codeland.ui.CelebrityBlocklyActivity;
import com.redant.codeland.ui.CodingLearningActivity;
import com.redant.codeland.ui.EnglishBlocklyActivity;
import com.redant.codeland.ui.PacmanActivity;
import com.redant.codeland.ui.PoetryBlocklyActivity;
import com.redant.codeland.ui.SanzijingBlocklyActivity;
import com.redant.codeland.ui.TadpoleBlocklyActivity;
import com.redant.codeland.ui.TreasureBlocklyActivity;
import com.redant.codeland.ui.TurtleActivity;
import com.yatoooon.screenadaptation.ScreenAdapterTools;
import org.litepal.crud.DataSupport;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static android.content.Context.MODE_PRIVATE;
public class LevelFragment extends Fragment implements AdapterView.OnItemClickListener {
// private int englishChapterNumber=0;
//动态int型数组,List<int>用不了
private ArrayList<Integer> pictureIdArray;
private ArrayList<String> levelNameArray;
private ArrayList<Integer> ratingNumArray;
private GridView gridView;
private List<Map<String,Object>> dataList;
private SimpleAdapter simpleAdapter;
private
int clickedLevel;//表示当前模块选中关卡,通过点击赋值
private int unlockLevel;//表示当前模块解锁数值,从sh中的获得,用于保存
private int maxLevel=0;//表示当前模块最大数值,文件读取累加获得,用于保存
private String model;//表示当前是什么模块,从上个活动intent获得,还传到下个活动
private String modelUnlockLevel;//用于数据库键,表示当前模块解锁的最大关卡(第一模块0-1星不算解锁的最大关卡,2-3星算;第二模块0星不算,1-3星算)
private String modelMaxLevel;//用于数据库键,表示当前模块总共关卡数
private String modelUrl;//当前模块对应网站
private String modelRating;//database
private Class nextClass;
// private MyButton button_back_level;
private ImageView imageView_background;
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view=inflater.inflate(R.layout.hub_level,container,false);
getTransitiveData();
return view;
}
public void getTransitiveData(){
model=(String)getArguments().getString("model");
}
@Override
public void onStart() {
super.onStart();
imageView_background=getActivity().findViewById(R.id.background_chapter);
}
@Override
public void onResume() {
super.onResume();
refreshEnglishChapterActivity();
}
private List<Map<String,Object>> getData(){
for(int counter=0;counter<pictureIdArray.size();counter++){
Map<String,Object> map=new HashMap<>();
map.put("Image",pictureIdArray.get(counter));
map.put("Text",levelNameArray.get(counter));
if(ratingNumArray.get(counter)==0){
map.put("FirstRate",R.mipmap.star_black);
map.put("SecondRate",R.mipmap.star_black);
map.put("ThirdRate",R.mipmap.star_black);
}else if(ratingNumArray.get(counter)==1){
map.put("FirstRate",R.mipmap.star_light);
map.put("SecondRate",R.mipmap.star_black);
map.put("ThirdRate",R.mipmap.star_black);
}else if(ratingNumArray.get(counter)==2){
map.put("FirstRate",R.mipmap.star_light);
map.put("SecondRate",R.mipmap.star_light);
map.put("ThirdRate",R.mipmap.star_black);
}else if(ratingNumArray.get(counter)==3){
map.put("FirstRate",R.mipmap.star_light);
map.put("SecondRate",R.mipmap.star_light);
map.put("ThirdRate",R.mipmap.star_light);
}else if(ratingNumArray.get(counter)==-1){
map.put("FirstRate",R.mipmap.star_nothing);
map.put("SecondRate",R.mipmap.star_nothing);
map.put("ThirdRate",R.mipmap.star_nothing);
}
dataList.add(map);
}
return dataList;
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
//获取用户点击的关卡值
clickedLevel=position+1;
if(clickedLevel<=unlockLevel)
{
SharedPreferences.Editor editor=getActivity().getSharedPreferences("AllLevel",MODE_PRIVATE).edit();
editor.putInt("clickedLevel",clickedLevel);
editor.putInt(modelUnlockLevel,unlockLevel);
editor.commit();
Intent intent=new Intent(getActivity(),nextClass);
//在 Scratch游戏 中的 开发教程 中创建的项目都是一次性的(即不会保存到项目管理区,退出,项目就会消失)
intent.putExtra("scratchGameSingleUse","yes");
//彭佳汉添加,用于加载相应模块的教程
intent.putExtra("model",model);
startActivity(intent);
}
else {
//release发行版本应该使用的代码-开头
Toast.makeText(getActivity(),getString(R.string.over_level_tip)+unlockLevel+getString(R.string.over_level_tip_end),Toast.LENGTH_SHORT).show();
}
}
//2018-4-14 最大关卡数 应该与 关卡内容文件的行数对应,之前与网站个数对应太过牵强
private void initModel(String model){
if(model.equals("scratchGuideBlock")){
//用户点击的是第3模块“Scratch绘画”的“guide”
imageView_background.setBackgroundResource(R.mipmap.background_module3);
modelUrl= "level_txt/scratch_game_guide_block.txt";
modelUnlockLevel="scratchGameGuideBlockUnlockLevel";
modelMaxLevel="scratchGameGuideBlockMaxLevel";
modelRating="scratchGameGuideBlock";
nextClass=ScratchJrActivity.class;
}
SharedPreferences sharedPreferences=getActivity().getSharedPreferences("AllLevel",MODE_PRIVATE);
unlockLevel=sharedPreferences.getInt(modelUnlockLevel,1);
}
private void refreshEnglishChapterActivity(){
initModel(model);
//通过 english_knowledge_url.txt 的行数动态获取 用户所选中章节 的最大关卡数
try{
InputStream is = getActivity().getAssets().open(modelUrl);
BufferedReader br=new BufferedReader(new InputStreamReader(is));
String line="";
if(maxLevel==0){
while ((line=br.readLine())!=null) {
maxLevel++;
}
SharedPreferences.Editor editor=getActivity().getSharedPreferences("AllLevel",MODE_PRIVATE).edit();
editor.putInt(modelMaxLevel,maxLevel);
editor.commit();
}
br.close();
is.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch(IOException e){
e.printStackTrace();
}
pictureIdArray=new ArrayList<Integer>();
levelNameArray=new ArrayList<String>();
ratingNumArray=new ArrayList<Integer>();
List<LevelInfo> ratings= DataSupport.where("model = ?",modelRating).find(LevelInfo.class);
//给动态数组赋值
for(int i=0;i<maxLevel;i++){
if(i<unlockLevel)
{
pictureIdArray.add(R.mipmap.unlock_button_pic);
levelNameArray.add((i+1)+"");
if(!ratings.isEmpty() && ratings.size()>i){
ratingNumArray.add(ratings.get(i).getRating());
}else{
ratingNumArray.add(0);
}
}
else{
pictureIdArray.add(R.mipmap.lock_button_pic);
levelNameArray.add("");
ratingNumArray.add(-1);
}
}
gridView = (GridView) getActivity().findViewById(R.id.gridView_bae_level);
dataList = new ArrayList<>();
String[] key=new String[]{"Image","Text","FirstRate","SecondRate","ThirdRate"};
int[] value=new int[]{R.id.button_gridview_item,R.id.textview_gridview_item,R.id.item_first_image,R.id.item_second_image,R.id.item_third_image};
//适配屏幕引入语句,重写了getView
simpleAdapter = new SimpleAdapter(getActivity(), getData(),R.layout.gridview_item,key,value)
{
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if(convertView==null){
View view=super.getView(position, convertView, parent);
//适配屏幕引入语句,重写了getView
ScreenAdapterTools.getInstance().loadView((ViewGroup) view);
return view;
}
return super.getView(position, convertView, parent);
}
}
;
gridView.setAdapter(simpleAdapter);
gridView.setOnItemClickListener(this);
}
}
|
pi0/node | session2/controlflow/asynchronous.js | <reponame>pi0/node
function async(arg, callback) {
console.log('do something with \'' + arg + '\', return 1 sec later');
setTimeout(function () {
callback(arg * 2);
}, 1000);
}
function final() {
console.log('Done', results);
}
var items = [1, 2, 3, 4, 5, 6];
var results = [];
items.forEach(function (item) {
async(item, function (result) {
results.push(result);
if (results.length == items.length) {
final();
}
})
}); |
mewbak/ferret | pkg/stdlib/arrays/union_distinct_test.go | package arrays_test
import (
"context"
"github.com/MontFerret/ferret/pkg/runtime/values"
"github.com/MontFerret/ferret/pkg/stdlib/arrays"
. "github.com/smartystreets/goconvey/convey"
"testing"
)
func TestUnionDistinct(t *testing.T) {
Convey("Should union all arrays with unique values", t, func() {
arr1 := values.NewArrayWith(
values.NewInt(1),
values.NewInt(2),
values.NewInt(3),
values.NewInt(4),
)
arr2 := values.NewArrayWith(
values.NewInt(5),
values.NewInt(2),
values.NewInt(6),
values.NewInt(4),
)
arr3 := values.NewArrayWith(
values.NewString("a"),
values.NewString("b"),
values.NewString("c"),
values.NewString("d"),
)
arr4 := values.NewArrayWith(
values.NewString("e"),
values.NewString("b"),
values.NewString("f"),
values.NewString("d"),
)
out, err := arrays.UnionDistinct(
context.Background(),
arr1,
arr2,
arr3,
arr4,
)
So(err, ShouldBeNil)
So(out.String(), ShouldEqual, `[1,2,3,4,5,6,"a","b","c","d","e","f"]`)
})
}
|
visit-dav/vis | src/common/plugin/PluginManager.h | // Copyright (c) Lawrence Livermore National Security, LLC and other VisIt
// Project developers. See the top-level LICENSE file for dates and other
// details. No copyright assignment is required to contribute to VisIt.
// ************************************************************************* //
// PluginManager.h //
// ************************************************************************* //
#ifndef PLUGIN_MANAGER_H
#define PLUGIN_MANAGER_H
#include <plugin_exports.h>
#include <vectortypes.h>
#include <map>
#include <utility>
class PluginBroadcaster;
// ****************************************************************************
// Class: PluginManager
//
// Purpose:
// The plugin manager. It provides an abstraction for all plugin
// managers. The information is broken up into several classes since
// portions of it are only relevant to particular components within visit.
// There is the general information which all the components are interested
// in, then portions for the gui, viewer, cli, engine, and mdserver.
//
// Programmer: <NAME>
// Creation: August 20, 2002
//
// Modifications:
// <NAME>, Fri Feb 28 12:28:50 PST 2003
// Renamed some methods and data members to make their function and
// usage more correct and obvious. Added support for loading plugins
// on demand. Made PluginLoaded be private and added PluginAvailable,
// which can attempt to load a plugin on demand before checking to see
// if it is loaded.
//
// <NAME>, Tue Jun 17 19:08:21 PDT 2003
// Added GetEnabledIndex.
//
// <NAME>, Wed Nov 5 13:48:58 PST 2003
// Added ability to check if a plugin is enabled (by id).
//
// <NAME>, Mon Feb 7 18:55:26 PST 2005
// Added ability to check if any errors were generated when
// the plugins were initialized. Right now it is only used
// for detecting incompatible versions. Also, added ability
// for PluginSymbol to not print an error if requested to be
// silent about such things.
//
// <NAME>, Wed Nov 22 16:26:54 PST 2006
// I added an argument to SetPluginDir.
//
// <NAME>, Mon Aug 6 13:36:16 PDT 2007
// Added LoadSinglePluginNow. Changed LoadSinglePlugin to return bool
//
// <NAME>, Fri Oct 5 13:10:38 PDT 2007
// Changed argument for ReadPluginDir.
//
// <NAME>, Thu Jan 17 15:12:09 EST 2008
// Made SetPluginDir be a public function so we can change the directory if
// we need to.
//
// <NAME>, Thu Apr 23 11:36:20 PDT 2009
// I added Simulation to the plugin categories, which will be a superset
// of Engine.
//
// <NAME>, Wed Jun 17 10:10:20 PDT 2009
// I added a callback for ReadPluginInfo so we can enable an optimization
// that lets non-rank 0 processes not call it in parallel, saving a lot of
// file system accesses.
//
// ****************************************************************************
class PLUGIN_API PluginManager
{
public:
enum PluginCategory
{
no_category,
GUI,
Viewer,
Engine,
MDServer,
Scripting,
Simulation
};
public:
virtual ~PluginManager();
void DisablePlugin(const std::string&);
void EnablePlugin(const std::string&);
bool PluginEnabled(const std::string&);
virtual void LoadPluginsNow();
virtual void LoadPluginsOnDemand();
virtual bool LoadSinglePluginNow(const std::string&);
virtual void ReloadPlugins();
virtual void UnloadPlugins();
bool PluginExists(const std::string&);
bool PluginAvailable(const std::string&);
std::string GetPluginName(const std::string&);
std::string GetPluginVersion(const std::string&);
int GetNAllPlugins() const;
std::string GetAllID(const int) const;
int GetAllIndex(const std::string &) const;
int GetAllIndexFromName(const std::string &) const;
int GetNEnabledPlugins() const;
std::string GetEnabledID(const int) const;
int GetEnabledIndex(const std::string &);
std::string GetPluginInitializationErrors();
void SetPluginDir(const char *dir = 0);
protected:
PluginManager(const std::string&);
void ObtainPluginInfo(bool, PluginBroadcaster *);
void ReadPluginInfo();
virtual void BroadcastGeneralInfo(PluginBroadcaster *);
void ReadPluginDir(std::vector<
std::vector<
std::pair<std::string,
std::string> > > &);
void GetPluginList(std::vector<
std::pair<std::string,
std::string> >&);
bool IsGeneralPlugin(const std::string &) const;
bool PluginLoaded(const std::string&);
void PluginOpen(const std::string &pluginFile);
void *PluginSymbol(const std::string &symbol,bool ne=false);
char *PluginError() const;
void PluginClose();
virtual bool LoadSinglePlugin(int i);
virtual bool LoadGeneralPluginInfo() = 0;
virtual void LoadGUIPluginInfo() { }
virtual void LoadViewerPluginInfo() { }
virtual void LoadMDServerPluginInfo() { }
virtual void LoadEnginePluginInfo() { }
virtual void LoadScriptingPluginInfo() { }
virtual void FreeCommonPluginInfo() = 0;
virtual void FreeGUIPluginInfo() { }
virtual void FreeViewerPluginInfo() { }
virtual void FreeMDServerPluginInfo() { }
virtual void FreeEnginePluginInfo() { }
virtual void FreeScriptingPluginInfo() { }
std::vector<std::string> pluginDirs;
std::string openPlugin;
void *handle;
char *pluginError;
int category;
bool parallel;
std::string managerName;
bool loadOnDemand;
// arrays containing all plugins
std::vector<std::string> ids;
std::vector<std::string> names;
std::vector<std::string> versions;
std::vector<std::string> libfiles;
std::vector<bool> enabled;
// maps from id->allindex and id->loadedindex
std::map<std::string, int> allindexmap;
std::map<std::string, int> loadedindexmap;
// arrays containing enabled plugins
std::vector<void*> loadedhandles;
std::vector<std::string> loadedids;
// Has the skip message been issued.
std::map<int,int> issuedMessage;
// accumlated plugin initialization errors
std::string pluginInitErrors;
};
#endif
|
owenmcateer/Motus | src/week_116/main.js | <gh_stars>100-1000
/**
* Motus: The arista
* https://owenmcateer.github.io/Motus-Art
*/
const canvasSize = 540;
const lines = 18;
let timer = 0;
let boundary;
// Setup
function setup() {
createCanvas(canvasSize, canvasSize);
pixelDensity(2);
}
// Draw tick
function draw() {
background(10);
strokeJoin(MITER);
strokeCap(PROJECT);
// Shadow
fill(46);
noStroke();
quad(10, 0, width + 10, height, width, 0, 10, 0);
noFill();
const areaHeight = height + (height / (lines + 2));
boundary = areaHeight / lines;
// Calculate moving lines
const lineData = [];
for (let i = 1; i <= lines; i++) {
// Left points
const leftY = i * (areaHeight / lines) + (timer * boundary) - boundary;
// Mid points
const easeSine = easeInSine(i / lines + (timer / lines), 0, 1, 1);
const midX = easeSine * areaHeight;
const midY = midX - 10;
// Right points
const rightY = i * (areaHeight / lines) + (timer * boundary) - boundary;
const rightX = areaHeight;
// Add to line array
lineData.push({
leftX: 0,
leftY,
midX,
midY,
rightX,
rightY,
});
}
// Render vertical lines first
noFill();
stroke(80);
strokeWeight(1.5);
lineData.forEach((line) => {
beginShape();
vertex(width + 50, -50);
vertex(line.midX, line.midY);
vertex(line.midX, height + 50);
endShape();
});
// Render horizontal lines on top
noFill();
stroke(239);
strokeWeight(6);
lineData.forEach((line) => {
beginShape();
vertex(line.leftX, line.leftY);
vertex(line.midX, line.midY);
vertex(line.rightX, line.rightY);
endShape();
});
// Timer
timer += 0.005;
if (timer >= 1) {
timer = 0;
}
}
// Easing functions
function easeInSine(t, b, c, d) {
return -c * Math.cos(t / d * (Math.PI / 2)) + c + b;
}
|
Shashi-rk/azure-sdk-for-java | sdk/mysql/azure-resourcemanager-mysql/src/main/java/com/azure/resourcemanager/mysql/implementation/ServerAdministratorResourceImpl.java | <filename>sdk/mysql/azure-resourcemanager-mysql/src/main/java/com/azure/resourcemanager/mysql/implementation/ServerAdministratorResourceImpl.java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.mysql.implementation;
import com.azure.resourcemanager.mysql.fluent.models.ServerAdministratorResourceInner;
import com.azure.resourcemanager.mysql.models.AdministratorType;
import com.azure.resourcemanager.mysql.models.ServerAdministratorResource;
import java.util.UUID;
public final class ServerAdministratorResourceImpl implements ServerAdministratorResource {
private ServerAdministratorResourceInner innerObject;
private final com.azure.resourcemanager.mysql.MySqlManager serviceManager;
ServerAdministratorResourceImpl(
ServerAdministratorResourceInner innerObject, com.azure.resourcemanager.mysql.MySqlManager serviceManager) {
this.innerObject = innerObject;
this.serviceManager = serviceManager;
}
public String id() {
return this.innerModel().id();
}
public String name() {
return this.innerModel().name();
}
public String type() {
return this.innerModel().type();
}
public AdministratorType administratorType() {
return this.innerModel().administratorType();
}
public String login() {
return this.innerModel().login();
}
public UUID sid() {
return this.innerModel().sid();
}
public UUID tenantId() {
return this.innerModel().tenantId();
}
public ServerAdministratorResourceInner innerModel() {
return this.innerObject;
}
private com.azure.resourcemanager.mysql.MySqlManager manager() {
return this.serviceManager;
}
}
|
Sheldan/abstracto | abstracto-application/abstracto-modules/moderation/moderation-impl/src/test/java/dev/sheldan/abstracto/moderation/job/WarnDecayJobTest.java | <gh_stars>1-10
package dev.sheldan.abstracto.moderation.job;
import dev.sheldan.abstracto.core.models.database.AServer;
import dev.sheldan.abstracto.core.service.FeatureFlagService;
import dev.sheldan.abstracto.core.service.management.ServerManagementService;
import dev.sheldan.abstracto.moderation.config.feature.WarningDecayFeatureConfig;
import dev.sheldan.abstracto.moderation.service.WarnService;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.quartz.JobExecutionException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class WarnDecayJobTest {
@InjectMocks
private WarnDecayJob testUnit;
@Mock
private ServerManagementService serverManagementService;
@Mock
private FeatureFlagService featureFlagService;
@Mock
private WarningDecayFeatureConfig warningDecayFeatureConfig;
@Mock
private WarnService warnService;
@Mock
private AServer firstServer;
@Mock
private AServer secondServer;
private static final Long SERVER_ID = 1L;
private static final Long SERVER_ID_2 = 2L;
@Test
public void executeJobForNoServers() throws JobExecutionException {
when(serverManagementService.getAllServers()).thenReturn(Collections.emptyList());
testUnit.executeInternal(null);
verify(featureFlagService, times(0)).isFeatureEnabled(eq(warningDecayFeatureConfig), any(AServer.class));
verify(warnService, times(0)).decayWarningsForServer(any(AServer.class));
}
@Test
public void executeJobForAEnabledServer() throws JobExecutionException {
when(firstServer.getId()).thenReturn(SERVER_ID);
when(serverManagementService.getAllServers()).thenReturn(Arrays.asList(firstServer));
when(featureFlagService.isFeatureEnabled(warningDecayFeatureConfig, firstServer)).thenReturn(true);
testUnit.executeInternal(null);
verify(warnService, times(1)).decayWarningsForServer(eq(firstServer));
}
@Test
public void executeJobForADisabledServer() throws JobExecutionException {
when(serverManagementService.getAllServers()).thenReturn(Arrays.asList(firstServer));
when(featureFlagService.isFeatureEnabled(warningDecayFeatureConfig, firstServer)).thenReturn(false);
testUnit.executeInternal(null);
verify(warnService, times(0)).decayWarningsForServer(eq(firstServer));
}
@Test
public void executeJobForMixedServers() throws JobExecutionException {
when(firstServer.getId()).thenReturn(SERVER_ID);
when(serverManagementService.getAllServers()).thenReturn(Arrays.asList(firstServer, secondServer));
when(featureFlagService.isFeatureEnabled(warningDecayFeatureConfig, firstServer)).thenReturn(true);
when(featureFlagService.isFeatureEnabled(warningDecayFeatureConfig, secondServer)).thenReturn(false);
testUnit.executeInternal(null);
verify(warnService, times(1)).decayWarningsForServer(eq(firstServer));
}
@Test
public void executeJobForMultipleEnabledServers() throws JobExecutionException {
when(firstServer.getId()).thenReturn(SERVER_ID);
when(secondServer.getId()).thenReturn(SERVER_ID_2);
when(serverManagementService.getAllServers()).thenReturn(Arrays.asList(firstServer, secondServer));
when(featureFlagService.isFeatureEnabled(warningDecayFeatureConfig, firstServer)).thenReturn(true);
when(featureFlagService.isFeatureEnabled(warningDecayFeatureConfig, secondServer)).thenReturn(true);
testUnit.executeInternal(null);
ArgumentCaptor<AServer> serverCaptor = ArgumentCaptor.forClass(AServer.class);
verify(warnService, times(2)).decayWarningsForServer(serverCaptor.capture());
List<AServer> capturedServers = serverCaptor.getAllValues();
Assert.assertEquals(2, capturedServers.size());
Assert.assertEquals(firstServer, capturedServers.get(0));
Assert.assertEquals(secondServer, capturedServers.get(1));
}
}
|
duonglvtnaist/Multi-ROMix-Scrypt-Accelerator | Software/CPU/myscrypt/build/cmake-3.12.3/Tests/MFC/mfc1/stdafx.h | <filename>Software/CPU/myscrypt/build/cmake-3.12.3/Tests/MFC/mfc1/stdafx.h
// stdafx.h : include file for standard system include files,
// or project specific include files that are used frequently,
// but are changed infrequently
#pragma once
#ifndef VC_EXTRALEAN
# define VC_EXTRALEAN // Exclude rarely-used stuff from Windows headers
#endif
// See http://msdn.microsoft.com/en-us/library/6sehtctf.aspx for more info
// on WINVER and _WIN32_WINNT
// Modify the following defines if you have to target a platform prior to the
// ones specified below.
// Refer to MSDN for the latest info on corresponding values for different
// platforms.
#ifndef WINVER // Allow use of features specific to Windows 95 and Windows NT 4
// or later.
# if _MSC_VER < 1600
# define WINVER \
0x0400 // Change this to the appropriate value to target Windows 98 and
// Windows 2000 or later.
# else
# define WINVER 0x0501 // Target Windows XP and later with VS 10 and later
# endif
#endif
#ifndef _WIN32_WINNT // Allow use of features specific to Windows NT 4 or
// later.
# if _MSC_VER < 1600
# define _WIN32_WINNT \
0x0400 // Change this to the appropriate value to target Windows 98 and
// Windows 2000 or later.
# else
# define _WIN32_WINNT \
0x0501 // Target Windows XP and later with VS 10 and later
# endif
#endif
#ifndef _WIN32_WINDOWS // Allow use of features specific to Windows 98 or
// later.
# if _MSC_VER < 1600
# define _WIN32_WINDOWS \
0x0410 // Change this to the appropriate value to target Windows Me or
// later.
# endif
#endif
#ifndef _WIN32_IE // Allow use of features specific to IE 4.0 or later.
# if _MSC_VER < 1600
# define _WIN32_IE \
0x0400 // Change this to the appropriate value to target IE 5.0 or later.
# endif
#endif
#define _ATL_CSTRING_EXPLICIT_CONSTRUCTORS // some CString constructors will be
// explicit
// turns off MFC's hiding of some common and often safely ignored warning
// messages
#define _AFX_ALL_WARNINGS
#include <afxdisp.h> // MFC Automation classes
#include <afxext.h> // MFC extensions
#include <afxwin.h> // MFC core and standard components
#include <afxdtctl.h> // MFC support for Internet Explorer 4 Common Controls
#ifndef _AFX_NO_AFXCMN_SUPPORT
# include <afxcmn.h> // MFC support for Windows Common Controls
#endif // _AFX_NO_AFXCMN_SUPPORT
|
cpreh/spacegameengine | plugins/opengl/src/occlusion_query/config.cpp | <gh_stars>1-10
// Copyright <NAME> 2006 - 2019.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include <sge/opengl/common.hpp>
#include <sge/opengl/occlusion_query/config.hpp>
sge::opengl::occlusion_query::config::config(
gl_gen_queries _gen_queries,
gl_delete_queries _delete_queries,
gl_begin_query _begin_query,
gl_end_query _end_query,
gl_get_query_object_iv _get_query_object_iv,
gl_get_query_object_uiv _get_query_object_uiv,
GLenum const _samples_target,
GLenum const _query_result_available,
GLenum const _query_result)
: gen_queries_(_gen_queries),
delete_queries_(_delete_queries),
begin_query_(_begin_query),
end_query_(_end_query),
get_query_object_iv_(_get_query_object_iv),
get_query_object_uiv_(_get_query_object_uiv),
samples_target_(_samples_target),
query_result_available_(_query_result_available),
query_result_(_query_result)
{
}
sge::opengl::occlusion_query::config::gl_gen_queries
sge::opengl::occlusion_query::config::gen_queries() const
{
return gen_queries_;
}
sge::opengl::occlusion_query::config::gl_delete_queries
sge::opengl::occlusion_query::config::delete_queries() const
{
return delete_queries_;
}
sge::opengl::occlusion_query::config::gl_begin_query
sge::opengl::occlusion_query::config::begin_query() const
{
return begin_query_;
}
sge::opengl::occlusion_query::config::gl_end_query
sge::opengl::occlusion_query::config::end_query() const
{
return end_query_;
}
sge::opengl::occlusion_query::config::gl_get_query_object_iv
sge::opengl::occlusion_query::config::get_query_object_iv() const
{
return get_query_object_iv_;
}
sge::opengl::occlusion_query::config::gl_get_query_object_uiv
sge::opengl::occlusion_query::config::get_query_object_uiv() const
{
return get_query_object_uiv_;
}
GLenum sge::opengl::occlusion_query::config::samples_target() const { return samples_target_; }
GLenum sge::opengl::occlusion_query::config::query_result_available() const
{
return query_result_available_;
}
GLenum sge::opengl::occlusion_query::config::query_result() const { return query_result_; }
|
georgedeath/TAsK | src/DominationByPathCostBase.h | #ifndef DOMINATION_BY_PATH_COST_BASE
#define DOMINATION_BY_PATH_COST_BASE
#include "UsedTypes.h"
class BiObjLabelContainer;
class BiObjLabel;
/** \brief Base class for path cost domination rule.
Defines interface for derived classes and provides default implementation that doesn't do anything.
*/
class DominationByPathCostBase {
public:
DominationByPathCostBase();
virtual ~DominationByPathCostBase();
/** Adds label to labels.
*/
virtual void addLabelToLabelSet(BiObjLabelContainer& labels, int nodeIndex,
BiObjLabel* label, int destIndex, int odIndex);
/** Doesn't do anything.
*/
virtual void resetBestKnownPathCost();
/** Performs usual dominance check.
@return true if label based on timeLowerBound and tollLowerBound is not
dominated by any label in the destination node, false otherwise.
*/
virtual bool isWorth(const BiObjLabelContainer& labels, int destIndex,
FPType timeLowerBound, TollType tollLowerBound, int odIndex) const;
/** Doesn't do anything.
*/
virtual void updateBestKnown(const BiObjLabelContainer& labels, int destIndex,
int odIndex);
};
#endif |
theeksha101/problem_solving | Hackrnk_problems/MyFirstCode.py | n = int(input("Enter to test weird or not weird: "))
if n % 2 == 1:
print("Weird")
elif n % 2 == 0 and n in range(6, 21):
print("Weird")
else:
print("Not Weird") |
mhus-info/mhus-mongo | mongo-morphia/src/main/java/dev/morphia/logging/SilentLogger.java | <reponame>mhus-info/mhus-mongo<filename>mongo-morphia/src/main/java/dev/morphia/logging/SilentLogger.java
/**
* Copyright (C) 2019 <NAME> (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.morphia.logging;
/** Silent logger; it doesn't do anything! */
public class SilentLogger implements Logger {
@Override
public void debug(final String msg) {}
@Override
public void debug(final String format, final Object... arg) {}
@Override
public void debug(final String msg, final Throwable t) {}
@Override
public void error(final String msg) {}
@Override
public void error(final String format, final Object... arg) {}
@Override
public void error(final String msg, final Throwable t) {}
@Override
public void info(final String msg) {}
@Override
public void info(final String format, final Object... arg) {}
@Override
public void info(final String msg, final Throwable t) {}
@Override
public boolean isDebugEnabled() {
return false;
}
@Override
public boolean isErrorEnabled() {
return false;
}
@Override
public boolean isInfoEnabled() {
return false;
}
@Override
public boolean isTraceEnabled() {
return false;
}
@Override
public boolean isWarningEnabled() {
return false;
}
@Override
public void trace(final String msg) {}
@Override
public void trace(final String format, final Object... arg) {}
@Override
public void trace(final String msg, final Throwable t) {}
@Override
public void warning(final String msg) {}
@Override
public void warning(final String format, final Object... arg) {}
@Override
public void warning(final String msg, final Throwable t) {}
}
|
github-clonner/youtrack-mobile | src/components/multiline-input/multiline-input.js | /* @flow */
import {TextInput, Platform} from 'react-native';
import React, {Component} from 'react';
const MAX_DEFAULT_HEIGHT = 200;
const DEFAULT_FONT_SIZE = 16;
const SPARE_SPACE = 16;
type Props = {
maxInputHeight: number,
style: any
};
type State = {
inputHeight: ?number
};
export default class MultilineInput extends Component<Props, State> {
input: TextInput;
static defaultProps = {
maxInputHeight: MAX_DEFAULT_HEIGHT,
returnKeyType: Platform.OS === 'ios' ? 'default' : 'none'
}
constructor(props: Props) {
super(props);
this.state = {
inputHeight: null,
};
}
focus() {
this.input.focus();
}
onContentSizeChange = (event: Object) => {
const {maxInputHeight} = this.props;
let newHeight = event.nativeEvent.contentSize.height + SPARE_SPACE;
if (maxInputHeight > 0) {
newHeight = newHeight > maxInputHeight ? maxInputHeight : newHeight;
}
this.setState({inputHeight: newHeight});
}
inputRef = (instance: ?TextInput) => {
if (instance) {
this.input = instance;
}
};
render() {
// eslint-disable-next-line no-unused-vars
const {style, maxInputHeight, ...rest} = this.props;
return (
<TextInput
{...rest}
ref={this.inputRef}
multiline={true}
onContentSizeChange={this.onContentSizeChange}
style={[{fontSize: DEFAULT_FONT_SIZE}, style, {height: this.state.inputHeight}]}
/>
);
}
}
|
spb14/slaq | packages/web-app/src/store/state.js | import { readLocalToken } from './helpers'
// prepare
const token = readLocalToken()
const users = []
// expose product
export default {
token,
users
}
|
FeatureToggleStudy/halfpipe | mapper/mapper.go | package mapper
import (
"github.com/spf13/afero"
"github.com/springernature/halfpipe/manifest"
)
type Mapper interface {
Apply(original manifest.Manifest) (updated manifest.Manifest)
}
type mapper struct {
mappers []Mapper
}
func (m mapper) Apply(original manifest.Manifest) (updated manifest.Manifest) {
updated = original
for _, mm := range m.mappers {
updated = mm.Apply(updated)
}
return updated
}
func New() Mapper {
return mapper{
mappers: []Mapper{
NewNotificationsMapper(),
NewDockerComposeMapper(afero.Afero{Fs: afero.NewOsFs()}),
},
}
}
|
teerapongt/marketplace-sample-apps | Play-Along/Freshworks-Huddle/August-2020/Freshteam-new-domains/Onboard-Employee/server/lib/index.js | 'use strict';
const got = require('got');
const request = async (url, options) => {
const response = await got[options.method](url, options);
return response;
};
exports = {
request,
};
|
zcswl7961/unit-common | util-leecode/src/main/java/com/zcswl/leecode/用Rand7实现Rand10.java | <gh_stars>1-10
package com.zcswl.leecode;
/**
* https://leetcode-cn.com/problems/implement-rand10-using-rand7/
*
* 已有方法 rand7 可生成 1 到 7 范围内的均匀随机整数,试写一个方法 rand10 生成 1 到 10 范围内的均匀随机整数。
*
* (randX() - 1)*Y + randY() 可以等概率的生成[1, X * Y]范围的随机数
* @author zhoucg
* @date 2021-05-07 13:54
*/
public class 用Rand7实现Rand10 {
}
|
fabianklonsdorf/ixhh | node_modules/@carbon/icons/umd/shuffle/24.js | <reponame>fabianklonsdorf/ixhh
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.Shuffle24 = factory());
}(this, (function () { 'use strict';
var _24 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 24,
height: 24,
},
content: [
{
elem: 'path',
attrs: {
d:
'M22.59 19.41L26.17 23h-6.62l-4.37-7 4.37-7h6.62l-3.58 3.59L24 14l6-6-6-6-1.41 1.41L26.17 7h-6.62a2 2 0 0 0-1.69.94L14 14.11l-3.86-6.17A2 2 0 0 0 8.45 7H2v2h6.45l4.37 7-4.37 7H2v2h6.45a2 2 0 0 0 1.69-.94L14 17.89l3.86 6.17a2 2 0 0 0 1.69.94h6.62l-3.58 3.59L24 30l6-6-6-6z',
},
},
],
name: 'shuffle',
size: 24,
};
return _24;
})));
|
aliahsan07/safe-development | tests/test262/8.12/8.12.3/S8.12.3_A2.js | <reponame>aliahsan07/safe-development
var __obj = {
};
{
var __result1 = __obj.propFoo !== undefined;
var __expect1 = false;
}
{
var __result2 = __obj['propFoo'] !== undefined;
var __expect2 = false;
}
|
guillaume-plantevin/VeeSeeVSTRack | plugins/community/repos/ImpromptuModular/src/IMWidgets.hpp | //***********************************************************************************************
//Impromptu Modular: Modules for VCV Rack by <NAME>
//
//Based on code from Valley Rack Free by <NAME>
//See ./LICENSE.txt for all licenses
//***********************************************************************************************
#ifndef IM_WIDGETS_HPP
#define IM_WIDGETS_HPP
#include "rack.hpp"
#include "window.hpp"
using namespace rack;
// Dynamic SVGScrew
// General Dynamic Screw creation
template <class TWidget>
TWidget* createDynamicScrew(Vec pos, int* mode) {
TWidget *dynScrew = createWidget<TWidget>(pos);
dynScrew->mode = mode;
return dynScrew;
}
struct ScrewCircle : TransparentWidget {
float angle = 0.0f;
float radius = 2.0f;
ScrewCircle(float _angle);
void draw(NVGcontext *vg) override;
};
struct DynamicSVGScrew : FramebufferWidget {
int* mode;
int oldMode;
// for random rotated screw used in primary mode
SVGWidget *sw;
TransformWidget *tw;
ScrewCircle *sc;
// for fixed svg screw used in alternate mode
SVGWidget* swAlt;
DynamicSVGScrew();
void addSVGalt(std::shared_ptr<SVG> svg);
void step() override;
};
// Dynamic SVGPanel
struct PanelBorderWidget_Impromptu : TransparentWidget { // from SVGPanel.cpp
int** expWidth = nullptr;
void draw(NVGcontext *vg) override;
};
struct DynamicSVGPanel : FramebufferWidget { // like SVGPanel (in app.hpp and SVGPanel.cpp) but with dynmically assignable panel
int* mode;
int oldMode;
int* expWidth;
std::vector<std::shared_ptr<SVG>> panels;
SVGWidget* visiblePanel;
PanelBorderWidget_Impromptu* border;
DynamicSVGPanel();
void addPanel(std::shared_ptr<SVG> svg);
void dupPanel();
void step() override;
};
// ******** Dynamic Ports ********
// General Dynamic Port creation
template <class TDynamicPort>
TDynamicPort* createDynamicPort(Vec pos, Port::PortType type, Module *module, int portId,
int* mode) {
TDynamicPort *dynPort = type == Port::INPUT ?
createInput<TDynamicPort>(pos, module, portId) :
createOutput<TDynamicPort>(pos, module, portId);
dynPort->mode = mode;
return dynPort;
}
template <class TDynamicPort>
TDynamicPort* createDynamicPortCentered(Vec pos, Port::PortType type, Module *module, int portId,
int* mode) {
TDynamicPort *dynPort = type == Port::INPUT ?
createInput<TDynamicPort>(pos, module, portId) :
createOutput<TDynamicPort>(pos, module, portId);
dynPort->mode = mode;
dynPort->box.pos = dynPort->box.pos.minus(dynPort->box.size.div(2));// centering
return dynPort;
}
// Dynamic SVGPort (see SVGPort in app.hpp and SVGPort.cpp)
struct DynamicSVGPort : SVGPort {
int* mode;
int oldMode;
std::vector<std::shared_ptr<SVG>> frames;
DynamicSVGPort();
void addFrame(std::shared_ptr<SVG> svg);
void step() override;
};
// ******** Dynamic Params ********
// General Dynamic Param creation
template <class TDynamicParam>
TDynamicParam* createDynamicParam(Vec pos, Module *module, int paramId, float minValue, float maxValue, float defaultValue,
int* mode) {
TDynamicParam *dynParam = createParam<TDynamicParam>(pos, module, paramId, minValue, maxValue, defaultValue);
dynParam->mode = mode;
return dynParam;
}
template <class TDynamicParam>
TDynamicParam* createDynamicParamCentered(Vec pos, Module *module, int paramId, float minValue, float maxValue, float defaultValue,
int* mode) {
TDynamicParam *dynParam = createParam<TDynamicParam>(pos, module, paramId, minValue, maxValue, defaultValue);
dynParam->mode = mode;
dynParam->box.pos = dynParam->box.pos.minus(dynParam->box.size.div(2));// centering
return dynParam;
}
// Dynamic SVGSwitch (see SVGSwitch in app.hpp and SVGSwitch.cpp)
struct DynamicSVGSwitch : SVGSwitch {
int* mode;
int oldMode;
std::vector<std::shared_ptr<SVG>> framesAll;
DynamicSVGSwitch();
void addFrameAll(std::shared_ptr<SVG> svg);
void step() override;
};
// Dynamic SVGKnob (see SVGKnob in app.hpp and SVGKnob.cpp)
struct DynamicSVGKnob : SVGKnob {
int* mode;
int oldMode;
std::vector<std::shared_ptr<SVG>> framesAll;
SVGWidget* effect;
DynamicSVGKnob();
void addFrameAll(std::shared_ptr<SVG> svg);
void addEffect(std::shared_ptr<SVG> svg);// do this last
void step() override;
};
// General Dynamic Param creation version two with float* instead of one int*
template <class TDynamicParam>
TDynamicParam* createDynamicParam2(Vec pos, Module *module, int paramId, float minValue, float maxValue, float defaultValue,
float* wider, float* paramReadRequest) {
TDynamicParam *dynParam = createParam<TDynamicParam>(pos, module, paramId, minValue, maxValue, defaultValue);
dynParam->wider = wider;
dynParam->paramReadRequest = paramReadRequest;
return dynParam;
}
// Dynamic Tactile pad (see Knob in app.hpp and Knob.cpp, and see SVGSlider in SVGSlider.cpp and app.hpp)
struct DynamicIMTactile : ParamWidget, FramebufferWidget {
float* wider;// > 0.5f = true
float* paramReadRequest;
float oldWider;
float dragY;
float dragValue;
bool snap;
static const int padWidth = 45;
static const int padHeight = 200;
static const int padInterSpace = 18;
static const int padWidthWide = padWidth * 2 + padInterSpace;
DynamicIMTactile();
void step() override;
void onDragStart(EventDragStart &e) override;
void onDragMove(EventDragMove &e) override;
void onMouseDown(EventMouseDown &e) override;
};
#endif
|
Rappsilber-Laboratory/xiFDR | src/main/java/org/rappsilber/fdr/OfflineFDR.java | <filename>src/main/java/org/rappsilber/fdr/OfflineFDR.java
/*
* Copyright 2015 <NAME> <<EMAIL> at staff<EMAIL>>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rappsilber.fdr;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.RandomAccess;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.rappsilber.data.csv.CSVRandomAccess;
import org.rappsilber.fdr.calculation.CheckValid;
import org.rappsilber.fdr.calculation.FDR;
import org.rappsilber.fdr.calculation.FDRImplement;
import org.rappsilber.fdr.calculation.ValidityCheckImplement;
import org.rappsilber.fdr.entities.DirectionalPeptidePair;
import org.rappsilber.fdr.entities.PSM;
import org.rappsilber.fdr.entities.Peptide;
import org.rappsilber.fdr.entities.PeptidePair;
import org.rappsilber.fdr.entities.Protein;
import org.rappsilber.fdr.entities.ProteinGroupDirectionalLink;
import org.rappsilber.fdr.entities.ProteinGroupDirectionalPair;
import org.rappsilber.fdr.entities.ProteinGroupLink;
import org.rappsilber.fdr.entities.ProteinGroupPair;
import org.rappsilber.fdr.entities.Site;
import org.rappsilber.fdr.entities.ProteinGroup;
import org.rappsilber.fdr.filter.PSMFilter;
import org.rappsilber.fdr.filter.SingleSubScoreFilter;
import org.rappsilber.fdr.result.FDRResult;
import org.rappsilber.fdr.result.FDRResultLevel;
import org.rappsilber.fdr.result.SubGroupFdrInfo;
import org.rappsilber.fdr.entities.AbstractFDRElement;
import org.rappsilber.fdr.entities.FDRSelfAdd;
import org.rappsilber.fdr.filter.DeltaScorePercentFilter;
import org.rappsilber.fdr.utils.CalculateWriteUpdate;
import org.rappsilber.fdr.utils.HashedArrayList;
import org.rappsilber.fdr.utils.MaximisingStatus;
import org.rappsilber.fdr.utils.MaximizeLevelInfo;
import org.rappsilber.fdr.utils.MaximizeLevelInfoInteger;
import org.rappsilber.fdr.utils.MaximizingUpdate;
import org.rappsilber.fdr.utils.MiscUtils;
import org.rappsilber.utils.AutoIncrementValueMap;
import org.rappsilber.utils.CountOccurence;
import org.rappsilber.utils.DoubleArrayList;
import org.rappsilber.utils.RArrayUtils;
import org.rappsilber.utils.NullOutputStream;
import org.rappsilber.utils.SelfAddHashSet;
import org.rappsilber.utils.UpdateableInteger;
import org.rappsilber.utils.Version;
import org.rappsilber.utils.statistic.StreamingStatsEstimator;
import rappsilber.utils.MyArrayUtils;
/**
*
* @author lfischer
*/
public abstract class OfflineFDR {
public static enum Normalisation {
None, FDR_Based, Auto_Score, Decoy_Scores, All_Scores
}
/**
* store all psm
*/
protected SelfAddHashSet<PSM> allPSMs = new SelfAddHashSet<PSM>();
/**
* psms that passed some form of prefilter
*/
private ArrayList<PSM> prefilteredPSMs = null;
/**
* store all peptide pairs
*/
SelfAddHashSet<Peptide> allPeptides = new SelfAddHashSet<Peptide>();
/**
* store all Proteins
*/
protected SelfAddHashSet<Protein> allProteins = new SelfAddHashSet<Protein>();
/**
* turn peptide-sequences into unique integer ids
*/
AutoIncrementValueMap<String> m_pepIDs = new AutoIncrementValueMap<String>();
/**
* turn protein-accession into unique integer ids
*/
AutoIncrementValueMap<String> m_protIDs = new AutoIncrementValueMap<String>();
/**
* size of the decoy independent protein pairs in terms of psms
*/
HashMap<Integer, UpdateableInteger> protpairToSize = new HashMap<>();
/**
* id of a protein pair independent of target or decoy
*/
HashMap<String, Integer> protpairToID = new HashMap<>();
// private boolean psm_directional = false;
// private boolean peptides_directional = false;
// private boolean links_directional = false;
// private boolean ppi_directional = false;
protected int m_maximum_summed_peplength = Integer.MAX_VALUE;
private CheckValid check = new ValidityCheckImplement(0, 2);
private FDR calc = new FDRImplement(check);
/**
* is a higher score better than a lower score?
*/
protected boolean PSMScoreHighBetter = true;
/**
* the version of xiFDR to be reported
*/
private static Version xiFDRVersion;
private int minPepPerProteinGroup = 1;
private int minPepPerProteinGroupLink = 1;
private int minPepPerProteinGroupPair = 1;
private double targetPepDBSize = 999999999;
private double decoyPepDBSize = 999999999;
private double targetProtDBSize = 999999999;
private double decoyProtDBSize = 999999999;
private double targetLinkDBSize = 999999999;
private double decoyLinkDBSize = 999999999;
private double[] psmFDRSetting;
private double[] peptidePairFDRSetting;
private double[] ProteinGroupFDRSetting;
private double[] linkFDRSetting;
private double[] ppiFDRSetting;
private double safetyFactorSetting;
private boolean ignoreGroupsSetting;
private boolean csvSummaryOnly = false;
private boolean singleSummary = false;
private PrintWriter singleSummaryOut;
private String csvOutDirSetting;
private String csvOutBaseSetting;
public static final Integer MINIMUM_POSSIBLE_DECOY = 1;
public static final Integer MINIMUM_POSSIBLE_RESULT = 1;
private Integer m_linearPSMCount = null;
private Integer m_XLPSMCount = null;
private Integer m_linearPepCount = null;
private Integer m_XLPepCount = null;
public int m_minPepLength = 0;
public int commandlineFDRDigits = 2;
/**
* do we have PSMs with crosslinker-stubs. Basically do we have a ms2
* cleavable crosslinker search.
*/
private boolean stubsFound = false;
/**
* We normalize psm-scores by median and MAD - but to go around some quirks
* of our score propagation scores then get shifted so that the lowest score
* is around one.
*/
private double psmNormOffset = 0;
/**
* indicates, whether the psms went through a score normalisation
*/
private Normalisation isNormalized = Normalisation.None;
/**
* how many decoys does a fdr group need to have to be reported as result
*/
private Integer minTDChance = 0;
/**
* I filter the cross-linker names through this hashmap, ensuring I have
* only one string instance per cross-linker. That way comparison of
* cross-linker can be reduced to A = B instead of A.equals(B)
*/
HashMap<String, String> foundCrossLinker = new HashMap<>();
/**
* I filter the run names through this hashmap, ensuring I have only one
* string instance per run. That way comparison of runs can be reduced to A
* = B instead of A.equals(B)
*/
HashMap<String, String> foundRuns = new HashMap<>();
HashMap<String, Integer> runToInt = new HashMap<>();
ArrayList<String> runs = new ArrayList<>();
private Locale outputlocale = Locale.getDefault();
private NumberFormat numberFormat = NumberFormat.getNumberInstance(outputlocale);
private boolean stopMaximizing = false;
private ArrayList<String> extraColumns = new ArrayList<>();
/**
* group between links by both proteins beeing observed with self-links
*/
private boolean groupLinksByHasInternal = false;
/**
* group between PPIs by both proteins beeing observed with self-links
*/
private boolean groupPPIByHasInternal = false;
/**
* group between PeptidePairs by both proteins beeing observed with
* self-links
*/
private boolean groupPepPairByHasInternal = false;
/**
* group psms-by runs
*/
private boolean groupPSMsByRun = false;
private FDRSettings settings;
/**
* @return the uniquePSMs
*/
public boolean filterUniquePSMs() {
return settings.filterToUniquePSM();
}
/**
* @param uniquePSMs the uniquePSMs to set
*/
public void setFilterUniquePSMs(boolean uniquePSMs) {
this.settings.setFilterToUniquePSM(uniquePSMs);
}
// protected HashMap<FDRLevel,HashMap<Integer,SubGroupFdrInfo>> GroupedFDRs = new HashMap<FDRLevel, HashMap<Integer, SubGroupFdrInfo>>();
/**
* An enum providing constants for the FDR-Levels
*/
public static enum FDRLevel {
PSM("PSMs"), PEPTIDE_PAIR("Peptidepairs"), PROTEINGROUP("Proteins"), PROTEINGROUPLINK("Residue Pairs"), PROTEINGROUPPAIR("Proteinpairs");
String m_shortname;
private FDRLevel() {
}
private FDRLevel(String shortName) {
m_shortname = shortName;
}
@Override
public String toString() {
return m_shortname == null ? super.toString() : m_shortname;
}
}
public OfflineFDR() {
}
public OfflineFDR(int[] peptideLengthGroups) {
PeptidePair.setLenghtGroup(peptideLengthGroups);
}
public void normalizePSMs(Normalisation how) {
switch (how) {
case FDR_Based:
normalizePSMsToFDR();
break;
case Auto_Score:
normalizePSMs();
break;
case All_Scores:
normalisePSMsAll();
break;
case Decoy_Scores:
normalisePSMsByDecoy();
break;
}
}
public boolean canDecoyNormlise() {
int decoy = 0;
int count = allPSMs.size();
for (PSM p : allPSMs) {
if (p.isDecoy()) {
decoy++;
}
if (decoy > count / 5 && decoy > 1000) {
return true;
}
}
return false;
}
public void normalizePSMs() {
if (canDecoyNormlise()) {
normalisePSMsByDecoy();
this.setNormalised(Normalisation.Decoy_Scores);
return;
}
this.setNormalised(Normalisation.All_Scores);
normalisePSMsAll();
}
public void coNormalizePSMs(OfflineFDR newData, Normalisation how) {
if (how == Normalisation.Auto_Score) {
if (canDecoyNormlise() && newData.canDecoyNormlise()) {
how = Normalisation.Decoy_Scores;
} else {
how = Normalisation.All_Scores;
}
}
if (this.isNormalized() != how) {
this.normalizePSMs(how);
}
if (newData.isNormalized() != how) {
newData.normalizePSMs(how);
}
// if we shift by score we need to adapt the offset
if (how == Normalisation.Decoy_Scores || how == Normalisation.All_Scores) {
double shift = 0;
OfflineFDR toShift = this;
if (newData.psmNormOffset < psmNormOffset) {
toShift = newData;
shift = psmNormOffset - newData.psmNormOffset;
newData.psmNormOffset = psmNormOffset;
} else {
shift = newData.psmNormOffset - psmNormOffset;
psmNormOffset = newData.psmNormOffset;
}
for (PSM psm : toShift.getAllPSMs()) {
psm.setScore(psm.getScore() + shift);
}
}
}
public String getDistributions(String what) {
StreamingStatsEstimator sse = new StreamingStatsEstimator(0.001);
what = what.toLowerCase();
if (what.contentEquals("decoy")) {
for (PSM p : allPSMs) {
if (p.isDecoy()) {
sse.addValue(p.getScore());
}
}
} else if (what.contentEquals("tt")) {
for (PSM p : allPSMs) {
if (!p.isDecoy()) {
sse.addValue(p.getScore());
}
}
} else if (what.contentEquals("td")) {
for (PSM p : allPSMs) {
if (p.isTD()) {
sse.addValue(p.getScore());
}
}
} else if (what.contentEquals("dd")) {
for (PSM p : allPSMs) {
if (p.isDD()) {
sse.addValue(p.getScore());
}
}
} else {
for (PSM p : allPSMs) {
sse.addValue(p.getScore());
}
}
return sse.dumpCSV();
}
/**
* normalizes scores by FDR. For that we do a linear interpolation between
* the FDRs of TD-matches.
*/
public void normalizePSMsToFDR() {
// get all psms
ArrayList<PSM> scorePSMs = new ArrayList<>(allPSMs);
scorePSMs.sort(new Comparator<PSM>() {
@Override
public int compare(PSM o1, PSM o2) {
return Double.compare(o2.getScore(), o1.getScore());
}
});
int size = scorePSMs.size();
ArrayList<Double> fdrs = new ArrayList<>();
ArrayList<Integer> positions = new ArrayList<>();
// only accept unique PSMs
HashSet<String> keys = new HashSet<>(scorePSMs.size() / 2);
double FDR = 0;
fdrs.add(0d);
positions.add(0);
PSM psm = scorePSMs.get(0);
double tt = (psm.isTT() ? 1 : 0);
double td = (psm.isTD() ? 1 : 0);
double dd = (psm.isDD() ? 1 : 0);
int lastPos = 0;
for (int p = 1; p < size; p++) {
psm = scorePSMs.get(p);
// only considere the first encounter of a peptide-pair linksite and charge state combination
if (!keys.contains(psm.getNonDirectionalUnifyingKey())) {
keys.add(psm.getNonDirectionalUnifyingKey());
if (psm.isTT()) {
tt++;
}
if (psm.isTD()) {
td++;
// we found a td
if (td < dd) {
FDR = 0;
} else {
FDR = (td - dd) / tt;
}
// is it lower then a previous FDRs
while (FDR < fdrs.get(fdrs.size() - 1)) {
positions.remove(fdrs.size() - 1);
fdrs.remove(fdrs.size() - 1);
}
psm.setFDR(FDR);
positions.add(p);
fdrs.add(FDR);
lastPos = p;
}
if (psm.isDD()) {
dd++;
}
}
}
if (lastPos < scorePSMs.size() - 1) {
if (td < dd) {
FDR = 0;
} else {
FDR = (td - dd) / tt;
}
while (FDR < fdrs.get(fdrs.size() - 1)) {
positions.remove(fdrs.size() - 1);
fdrs.remove(fdrs.size() - 1);
}
positions.add(scorePSMs.size() - 1);
fdrs.add(FDR);
}
int lowerFDRentry = 0;
int higherFDRentry = 1;
int lowerFDRpos = positions.get(0);
int higherFDRpos = positions.get(1);
PSM l = scorePSMs.get(0);
l.setFDR(0);
PSM h = scorePSMs.get(higherFDRpos);
for (int p = 0; p < scorePSMs.size(); p++) {
if (p == higherFDRpos) {
lowerFDRpos = higherFDRpos;
if (p < scorePSMs.size() - 1) {
higherFDRentry++;
higherFDRpos = positions.get(higherFDRentry);
l = h;
h = scorePSMs.get(higherFDRpos);
}
}
PSM e = scorePSMs.get(p);
e.setLowerFDRTD(l);
e.setHigherFDRTD(h);
}
double[] newScores = new double[scorePSMs.size()];
for (int p = 0; p < size; p++) {
PSM e = scorePSMs.get(p);
// turn the FDR into a score
newScores[p] = 10 * (1 - e.getEstimatedFDR());
}
for (int p = 0; p < size; p++) {
scorePSMs.get(p).setScore(newScores[p]);
}
setNormalised(Normalisation.FDR_Based);
}
public void normalisePSMsByDecoy() {
if (allPSMs.size() == 0) {
Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, "Supposedly no PSMs here");
}
double minScore = Double.MAX_VALUE;
StreamingStatsEstimator sse = new StreamingStatsEstimator(0.001);
for (PSM p : allPSMs) {
if (p.isDecoy()) {
sse.addValue(p.getScore());
}
if (minScore > p.getScore()) {
minScore = p.getScore();
}
}
double mode = sse.getModeEstimation();
// calculate MAD but as deviade from mode
double mad = sse.getMADEstimation(mode);
// xifdr is not particular keen on negative scores - so we define an offset
double offset = -(minScore - mode) / mad;
psmNormOffset = offset;
for (PSM p : allPSMs) {
p.setScore((p.getScore() - mode) / mad + offset);
}
setNormalised(Normalisation.Decoy_Scores);
}
public void normalisePSMsAll() {
if (allPSMs.size() == 0) {
Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, "Supposedly no PSMs here");
}
StreamingStatsEstimator sse = new StreamingStatsEstimator(0.001);
for (PSM p : allPSMs) {
sse.addValue(p.getScore());
}
double mode = sse.getModeEstimation();
// calculate MAD but as deviade from mode
double mad = sse.getMADEstimation(mode);
// xifdr is not particular keen on negative scores - so we define an offset
double offset = -(sse.getMin() - mode) / mad;
psmNormOffset = offset;
for (PSM p : allPSMs) {
p.setScore((p.getScore() - mode) / mad + offset);
}
setNormalised(Normalisation.All_Scores);
}
/**
* add a list of independently normalised psms to the current list of psms
*
* @param psms list of psms
* @param offset the offset applied to this list
*/
public void addNormalisedPsmList(SelfAddHashSet<PSM> psms, double offset) {
double offsetdiff = psmNormOffset - offset;
//make sure we shift the normalised median to the same place for both lists
if (offsetdiff < 0) {
for (PSM p : allPSMs) {
p.setScore(p.getScore() - offsetdiff);
}
this.psmNormOffset -= offsetdiff;
for (PSM p : psms) {
p.setRun(registerRun(p.getRun()));
if (p.getCrosslinker() != null) {
registerCrossLinker(p.getCrosslinker(), p);
}
allPSMs.add(p);
}
} else if (offsetdiff > 0) {
for (PSM p : psms) {
p.setScore(p.getScore() + offsetdiff);
p.setRun(registerRun(p.getRun()));
if (p.getCrosslinker() != null) {
registerCrossLinker(p.getCrosslinker(), p);
}
allPSMs.add(p);
}
}
// allPSMs.addAll(psms);
}
/**
* add a list of independently normalised psms to the current list of psms
*
* @param psms list of psms
* @param offset the offset applied to this list
*/
public void normaliseAndAddPsmList(OfflineFDR other, Normalisation how) {
coNormalizePSMs(other, how);
allPSMs.addAll(other.allPSMs);
}
public void add(OfflineFDR other) {
allPSMs.addAll(other.allPSMs);
}
protected void levelSummary(PrintWriter summaryOut, String pepheader, FDRResultLevel level, String seperator) {
double target_fdr = level.getTargetFDR();
summaryOut.println("\n\"" + pepheader + "\"");
summaryOut.print("\"Group\"");
ArrayList<String> fdrGroups = new ArrayList<String>(level.getGroupIDs());
java.util.Collections.sort(fdrGroups);
for (String fg : fdrGroups) {
summaryOut.print(seperator + "\"" + fg + "\"");
}
summaryOut.print("\n\"Input\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + (((SubGroupFdrInfo) level.getGroup(fg)).inputCount));
}
summaryOut.print("\n\"TT\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).TT);
}
summaryOut.print("\n\"TD\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).TD);
}
summaryOut.print("\n\"DD\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).DD);
}
summaryOut.print("\n\"passing fdr (" + target_fdr + ")\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).results.size());
}
summaryOut.print("\n\"TT\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).resultTT);
}
summaryOut.print("\n\"TD\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).resultTD);
}
summaryOut.print("\n\"DD\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).resultDD);
}
summaryOut.print("\n\"last fdr > " + target_fdr + "\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).firstPassingFDR);
}
summaryOut.print("\n\"higher fdr (> " + target_fdr + ")\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).higherFDR);
}
summaryOut.print("\n\"lower fdr (<= " + target_fdr + ")\"");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).lowerFDR);
}
summaryOut.print("\nfinal");
for (String fg : fdrGroups) {
summaryOut.print(seperator + ((SubGroupFdrInfo) level.getGroup(fg)).filteredResult.size());
}
summaryOut.print("\n");
}
/**
* @return the peptides_directional
*/
public boolean isPeptides_directional() {
return settings.isPeptidePairDirectional();
}
/**
* @param peptides_directional the peptides_directional to set
*/
public void setPeptides_directional(boolean peptides_directional) {
settings.setPeptidePairDirectional(peptides_directional);
}
/**
* @return the links_directional
*/
public boolean isLinks_directional() {
return settings.isLinkDirectional();
}
/**
* @param links_directional the links_directional to set
*/
public void setLinks_directional(boolean links_directional) {
settings.setLinkDirectional(links_directional);
}
/**
* @return the ppi_directional
*/
public boolean isPpi_directional() {
return settings.isPPIDirectional();
}
/**
* @param ppi_directional the ppi_directional to set
*/
public void setPpi_directional(boolean ppi_directional) {
settings.setPPIDirectional(ppi_directional);
}
/**
* @return the psm_directional
*/
public boolean isPsm_directional() {
return settings.isPSMDirectional();
}
/**
* @param psm_directional the psm_directional to set
*/
public void setPsm_directional(boolean psm_directional) {
this.settings.setPSMDirectional(psm_directional);
}
public static String getLongVersionString() {
return xiFDRVersion.toLongString();
}
public void setLengthGroups(int[] peptideLengthGroups) {
PeptidePair.setLenghtGroup(peptideLengthGroups);
}
/**
* adds a psm to the list folds up the scores to peptidespairs links
* proteinpairs and proteins
*
* @param psmID
* @param pepid1
* @param pepid2
* @param peplen1
* @param peplen2
* @param site1
* @param site2
* @param charge
* @param score
* @param proteinId1
* @param proteinId2
* @param pepPosition1
* @param pepPosition2
* @param scoreRation
* @return
*/
public PSM addMatch(String psmID, String pepSeq1, String pepSeq2, int peplen1, int peplen2, int site1, int site2, boolean isDecoy1, boolean isDecoy2, int charge, double score, String accession1, String description1, String accession2, String description2, int pepPosition1, int pepPosition2, double peptide1score, double peptide2score, String isSpecialCase) {
return addMatch(psmID, pepSeq1, pepSeq2, peplen1, peplen2, site1, site2, isDecoy1, isDecoy2, charge, score, accession1, description1, accession2, description2, pepPosition1, pepPosition2, peptide1score, peptide2score, isSpecialCase, "", "", "");
}
public PSM addMatch(String psmID, String pepSeq1, String pepSeq2, int peplen1, int peplen2, int site1, int site2, boolean isDecoy1, boolean isDecoy2, int charge, double score, String accession1, String description1, String accession2, String description2, int pepPosition1, int pepPosition2, double peptide1score, double peptide2score, String isSpecialCase, String crosslinker, String run, String scan) {
long pepid1 = m_pepIDs.toIntValue(pepSeq1);
long pepid2 = m_pepIDs.toIntValue(pepSeq2);
long protid1 = m_protIDs.toIntValue(accession1);
long protid2 = m_protIDs.toIntValue(accession2);
//return addMatch(pepSeq2, pepSeq1, accession1, accession2, protid1, description2, isDecoy1, pepid1, pepPosition1, peplen1, protid2, isDecoy2, pepid2, pepPosition2, peplen2, psmID, site1, site2, charge, score, scoreRatio, isSpecialCase);
return addMatch(psmID, pepid1, pepid2, pepSeq1, pepSeq2, peplen1, peplen2, site1, site2, isDecoy1, isDecoy2, charge, score, protid1, accession1, description1, protid2, accession2, description2, pepPosition1, pepPosition2, "", "", peptide1score, peptide2score, isSpecialCase, crosslinker, run, scan);
}
/**
* adds a psm to the list folds up the scores to peptidespairs links
* proteinpairs and proteins
*
* @param psmID
* @param pepid1
* @param pepid2
* @param peplen1
* @param peplen2
* @param site1
* @param site2
* @param charge
* @param score
* @param proteinId1
* @param proteinId2
* @param pepPosition1
* @param pepPosition2
* @param scoreRation
* @return a peptide pair that is supported by the given match
*/
public PSM addMatch(String psmID, Peptide peptide1, Peptide peptide2, int peplen1, int peplen2, int site1, int site2, int charge, double score, Protein proteinId1, Protein proteinId2, int pepPosition1, int pepPosition2, double peptide1Score, double peptide2Score, String isSpecialCase, String crosslinker, String run, String Scan) {
Peptide npepid1;
Peptide npepid2;
int npeplen1;
int npeplen2;
int nsite1;
int nsite2;
Protein nproteinId1;
Protein nproteinId2;
int npepPosition1;
int npepPosition2;
int protcomp = proteinId1.compareDecoyUnAware(proteinId2);
int pepcomp = peptide1.compare(peptide2);
int sitecomp = (site1 - site2);
//double nScoreRatio = scoreRation;
double npeptide1Score;
double npeptide2Score;
if (protcomp < 0 || (protcomp == 0 && pepcomp < 0) || (protcomp == 0 && pepcomp == 0 && site1 < site2)) {
npepid1 = peptide1;
npepid2 = peptide2;
npeplen1 = peplen1;
npeplen2 = peplen2;
nsite1 = site1;
nsite2 = site2;
nproteinId1 = proteinId1;
nproteinId2 = proteinId2;
npepPosition1 = pepPosition1;
npepPosition2 = pepPosition2;
npeptide1Score = peptide1Score;
npeptide2Score = peptide2Score;
} else {
npepid1 = peptide2;
npepid2 = peptide1;
npeplen1 = peplen2;
npeplen2 = peplen1;
nsite1 = site2;
nsite2 = site1;
nproteinId1 = proteinId2;
nproteinId2 = proteinId1;
npepPosition1 = pepPosition2;
npepPosition2 = pepPosition1;
npeptide1Score = peptide2Score;
npeptide2Score = peptide1Score;
}
if (!PSMScoreHighBetter) {
score = 10 - (10 * score);
}
PSM psm = new PSM(psmID, npepid1, npepid2, (byte) nsite1, (byte) nsite2, nproteinId1.isDecoy(), nproteinId2.isDecoy(), (byte) charge, score, npeptide1Score, npeptide2Score);
psm.setNegativeGrouping(isSpecialCase);
psm.setRun(registerRun(run));
if (crosslinker == null) {
crosslinker = "";
}
registerCrossLinker(crosslinker, psm);
psm.setScan(Scan);
PSM regpsm = getAllPSMs().register(psm);
return regpsm;
}
protected void registerCrossLinker(String crosslinker, PSM psm) {
String c = foundCrossLinker.get(crosslinker);
if (c == null) {
psm.setCrosslinker(crosslinker);
foundCrossLinker.put(crosslinker, crosslinker);
} else {
psm.setCrosslinker(c);
}
}
protected String registerRun(String run) {
// ensure we have just a single instance of a string for each cross-linker and run
// speeds up comparisons later
String r = foundRuns.get(run);
if (r == null) {
r = run;
foundRuns.put(run, run);
runToInt.put(run, runs.size());
runs.add(run);
}
return r;
}
/**
* returns if a calculateWrite will only do a single calculation or a range
* of FDR calculations
*
* @return
*/
public boolean singleCalculation() {
return getPsmFDRSetting()[0] == getPsmFDRSetting()[1]
&& getPeptidePairFDRSetting()[0] == getPeptidePairFDRSetting()[1]
&& getProteinGroupFDRSetting()[0] == getProteinGroupFDRSetting()[1]
&& getLinkFDRSetting()[0] == getLinkFDRSetting()[1]
&& getPpiFDRSetting()[0] == getPpiFDRSetting()[1];
}
public FDRResult calculateWriteFDR(String path, String baseName, String seperator, FDRSettings settings, CalculateWriteUpdate update) throws FileNotFoundException {
return calculateWriteFDR(path, baseName, seperator, commandlineFDRDigits, settings, update);
}
public FDRResult calculateWriteFDR(String path, String baseName, String seperator, int minDigits, FDRSettings settings, final CalculateWriteUpdate update) throws FileNotFoundException {
FDRResult result = null;
setSettings(settings);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "PATH: " + path);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "BaseName: " + baseName);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Seperator: " + seperator);
DoubleArrayList allvalues = new DoubleArrayList();
allvalues.addAll(getPsmFDRSetting());
allvalues.addAll(getPeptidePairFDRSetting());
allvalues.addAll(getProteinGroupFDRSetting());
allvalues.addAll(getLinkFDRSetting());
allvalues.addAll(getPpiFDRSetting());
String format = MiscUtils.formatStringForPrettyPrintingRelatedValues(allvalues.toDoubleArray(), minDigits);
for (double psmfdr = Math.round(getPsmFDRSetting()[0] * 1000000); psmfdr <= Math.round(getPsmFDRSetting()[1] * 1000000); psmfdr += Math.round(getPsmFDRSetting()[2] * 1000000)) {
for (double pepfdr = Math.round(getPeptidePairFDRSetting()[0] * 1000000); pepfdr <= Math.round(getPeptidePairFDRSetting()[1] * 1000000); pepfdr += Math.round(getPeptidePairFDRSetting()[2] * 1000000)) {
for (double pgfdr = Math.round(getProteinGroupFDRSetting()[0] * 1000000); pgfdr <= Math.round(getProteinGroupFDRSetting()[1] * 1000000); pgfdr += Math.round(getProteinGroupFDRSetting()[2] * 1000000)) {
for (double pglfdr = Math.round(getLinkFDRSetting()[0] * 1000000); pglfdr <= Math.round(getLinkFDRSetting()[1] * 1000000); pglfdr += Math.round(getLinkFDRSetting()[2] * 1000000)) {
for (double pgpfdr = Math.round(getPpiFDRSetting()[0] * 1000000); pgpfdr <= Math.round(getPpiFDRSetting()[1] * 1000000); pgpfdr += Math.round(getPpiFDRSetting()[2] * 1000000)) {
if (update.stopped()) {
return result;
}
update.setCurrent(psmfdr / 1000000, pepfdr / 1000000, pgfdr / 1000000, pglfdr / 1000000, pgpfdr / 1000000);
String fdr_basename;
if (getPsmFDRSetting()[0] == getPsmFDRSetting()[1]
&& getPeptidePairFDRSetting()[0] == getPeptidePairFDRSetting()[1]
&& getProteinGroupFDRSetting()[0] == getProteinGroupFDRSetting()[1]
&& getLinkFDRSetting()[0] == getLinkFDRSetting()[1]
&& getPpiFDRSetting()[0] == getPpiFDRSetting()[1]) {
fdr_basename = baseName;
} else {
fdr_basename = baseName + "_" + String.format(format, psmfdr / 1000000.0) + "_"
+ String.format(format, pepfdr / 1000000.0) + "_"
+ String.format(format, pgfdr / 1000000.0) + "_"
+ String.format(format, pglfdr / 1000000.0) + "_"
+ String.format(format, pgpfdr / 1000000.0) + "_"
+ String.format(format, getSafetyFactorSetting()) + "_"
+ isIgnoreGroupsSetting();
}
String testfile = path + "/" + fdr_basename + "_summary.";
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "next " + fdr_basename);
if (!(new File(testfile + "csv").exists() || new File(testfile + "txt").exists())) {
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Calculating:\n"
+ "\npsmFDR: " + String.format(format, psmfdr / 1000000.0)
+ "\nPeptidePairFDR: " + String.format(format, pepfdr / 1000000.0)
+ "\nProteinGroupFDR: " + String.format(format, pgfdr / 1000000.0)
+ "\nProteinGroupLinkFDR: " + String.format(format, pglfdr / 1000000.0)
+ "\nProteinGroupPairFDR: " + String.format(format, pgpfdr / 1000000.0)
+ "\nReport-Factor: " + String.format(format, getSafetyFactorSetting())
+ "\nIgnore Groups: " + isIgnoreGroupsSetting());
FDRSettingsImpl s = new FDRSettingsImpl();
s.setAll(settings);
s.BoostingSteps = 4;
s.PSMFDR = psmfdr / 1000000;
s.PeptidePairFDR = pepfdr / 1000000;
s.ProteinGroupFDR = pgfdr / 1000000;
s.ProteinGroupLinkFDR = pglfdr / 1000000;
s.ProteinGroupPairFDR = pgpfdr / 1000000;
if (settings.doOptimize() == null) {
result = this.calculateFDR(s, true);
} else {
MaximisingStatus m = this.maximise(s, settings.doOptimize(), settings.filterToUniquePSM(), new MaximizingUpdate() {
@Override
public void setStatus(MaximisingStatus state) {
update.setStatus(state);
}
@Override
public void setStatusText(String text) {
update.setStatusText(text);
System.err.println(text);
}
@Override
public void reportError(String text, Exception ex) {
Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, text, ex);
update.reportError(text, ex);
return;
}
});
result = m.result;
}
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "PATH: " + path);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr_basename: " + fdr_basename);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Seperator: " + seperator);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Result-summary:" + summaryString(result));
writeFiles(path, fdr_basename, seperator, result);
} else {
Logger.getLogger(this.getClass().getName()).log(Level.INFO, fdr_basename + "skipped");
}
}
}
}
}
}
update.setComplete();
return result;
}
public void calculatePSMFDR(boolean setElementFDR, boolean ignoreGroups, FDRResult result, FDRSettings settings) {
boolean groupByHasInternal = false;
//boolean groupByCharge4plus = true;
FDRResultLevel<PSM> GroupedFDRs = new FDRResultLevel<PSM>();
GroupedFDRs.isDirectional = false;
FDRResultLevel<PSM> GroupedFDRsS = new FDRResultLevel<PSM>();
GroupedFDRsS.isDirectional = false;
reset();
result.uniquePSMs = settings.filterToUniquePSM();
protpairToID = new HashMap<>();
protpairToSize = new HashMap<>();
Collection<PSM> inputPSM;
ArrayList<PSM> allPSM = null;
if (getPrefilteredPSMs() == null) {
allPSM = new ArrayList<>(getAllPSMs());
} else {
allPSM = new ArrayList<>(getPrefilteredPSMs());
}
if (settings.getMinPeptideFragmentsFilter() > 0) {
PSMFilter f = new SingleSubScoreFilter("MinFragments", settings.getMinPeptideFragmentsFilter(), true);
allPSM = f.filter(allPSM);
}
if (settings.getMinPeptideCoverageFilter() > 0) {
PSMFilter f = new SingleSubScoreFilter("minPepCoverage", settings.getMinPeptideCoverageFilter(), true);
allPSM = f.filter(allPSM);
}
if (settings.getMinDeltaScoreFilter() > 0) {
PSMFilter f = new DeltaScorePercentFilter(settings.getMinDeltaScoreFilter());
allPSM = f.filter(allPSM);
}
if (settings.combineScoreAndDelta()) {
for (PSM p : allPSM) {
p.setScore((p.getOriginalScore() + p.getDeltaScore()) / 2);
}
}
if (settings.getMaxProteinAmbiguity() > 0 && settings.getMinPeptideLength() == 0) {
inputPSM = new ArrayList<PSM>(allPSM.size());
for (PSM p : allPSM) {
if (p.getScore() > 0 && p.getPeptide1().getProteins().size() <= settings.getMaxProteinAmbiguity()
&& p.getPeptide2().getProteins().size() <= settings.getMaxProteinAmbiguity()) {
inputPSM.add(p);
}
}
} else if (settings.getMaxProteinAmbiguity() > 0 && settings.getMinPeptideLength() > 0) {
inputPSM = new ArrayList<PSM>(allPSM.size());
for (PSM p : allPSM) {
Peptide pep1 = p.getPeptide1();
Peptide pep2 = p.getPeptide2();
if (p.getScore() > 0 && pep1.getProteins().size() <= settings.getMaxProteinAmbiguity()
&& pep2.getProteins().size() <= settings.getMaxProteinAmbiguity()
&& (pep1 == Peptide.NOPEPTIDE || pep1.length() >= settings.getMinPeptideLength() || (pep1.length() == 1 && pep1.getSequence().startsWith("X")))
&& (pep2 == Peptide.NOPEPTIDE || pep2.length() >= settings.getMinPeptideLength() || (pep2.length() == 1 && pep2.getSequence().startsWith("X")))) {
inputPSM.add(p);
}
}
} else if (settings.getMinPeptideLength() > 0) {
inputPSM = new ArrayList<PSM>(allPSM.size());
for (PSM p : allPSM) {
if (p.getScore() > 0) {
Peptide pep1 = p.getPeptide1();
Peptide pep2 = p.getPeptide2();
if ((pep1 == Peptide.NOPEPTIDE || pep1.length() >= settings.getMinPeptideLength() || (pep1.length() == 1 && pep1.getSequence().startsWith("X")))
&& (pep2 == Peptide.NOPEPTIDE || pep2.length() >= settings.getMinPeptideLength() || (pep2.length() == 1 && pep2.getSequence().startsWith("X")))) {
inputPSM.add(p);
}
}
}
} else {
inputPSM = new ArrayList<PSM>(allPSM.size());
for (PSM p : allPSM) {
if (p.getScore() > 0) {
inputPSM.add(p);
}
}
}
// if (filterUnique) {
// SelfAddHashSet<PSM> uniquePSM = new SelfAddHashSet<PSM>();
// for (PSM psm : inputPSM)
// uniquePSM.add(new UniquePSM(psm));
// inputPSM = new ArrayList<PSM>(uniquePSM);
// }
// filter to unique PSMs
if (settings.filterToUniquePSM()) {
HashMap<String, PSM> uniquePSM = filterPSMToUnique(inputPSM);
inputPSM = new ArrayList<PSM>(uniquePSM.values());
}
if (settings.filterConsecutivePeptides()) {
ArrayList<PSM> nonconsecutives = new ArrayList<>();
for (PSM psm : inputPSM) {
if (!psm.isConsecutive()) {
nonconsecutives.add(psm);
}
}
inputPSM = nonconsecutives;
}
// for (PSM pp : inputPSM) {
// pp.setFDRGroup(pp.getFDRGroup()+" z"+pp.getCharge());
// }
result.input = inputPSM;
result.minPeptideLength = settings.getMinPeptideLength();
result.maximumProteinAmbiguity = settings.getMaxProteinAmbiguity();
result.maximumLinkAmbiguity = settings.getMaxLinkAmbiguity();
if (settings.getGroupByCrosslinkerStubs()) {
for (PSM p : inputPSM) {
int gr = 0;
if ((Double) p.getOtherInfo("PeptidesWithStubs") > 1) {
gr = 1;
}
if ((Double) p.getOtherInfo("PeptidesWithDoublets") > 0) {
gr++;
}
if (gr == 0) {
p.addNegativeGrouping("no stub support");
} else if (gr > 1) {
p.addPositiveGrouping("high stub support");
}
}
} else {
for (PSM p : inputPSM) {
if (p.getNegativeGrouping() != null) {
p.getNegativeGrouping().remove("no stub support");
if (p.getNegativeGrouping().size() == 0) {
p.setNegativeGrouping(null);
}
}
if (p.getPositiveGrouping() != null) {
p.getPositiveGrouping().remove("high stub support");
if (p.getPositiveGrouping().size() == 0) {
p.setPositiveGrouping(null);
}
}
}
}
this.calc.fdr(settings.getPSMFDR(), settings, inputPSM, GroupedFDRs, targetPepDBSize, decoyPepDBSize, 1, ignoreGroups, setElementFDR, settings.psmLocalFDR(), settings.isGroupByPSMCount());
if (groupByHasInternal) {
HashSet<ProteinGroup> internal = new HashSet<>();
for (PSM pp : GroupedFDRs) {
if (pp.isInternal()) {
internal.add(pp.getProteinGroup1());
internal.add(pp.getProteinGroup1().decoyComplement());
internal.add(pp.getProteinGroup2());
internal.add(pp.getProteinGroup2().decoyComplement());
}
}
for (PSM pp : inputPSM) {
if (pp.isBetween()
&& internal.contains(pp.getProteinGroup1())
&& internal.contains(pp.getProteinGroup2())) {
pp.setFDRGroup(pp.getFDRGroup() + " int_support");
}
}
calc.fdr(settings.getPSMFDR(), settings, inputPSM, GroupedFDRs, targetPepDBSize, decoyPepDBSize, 1, ignoreGroups, setElementFDR, settings.psmLocalFDR(), settings.isGroupByPSMCount());
GroupedFDRs = GroupedFDRsS;
}
for (SubGroupFdrInfo rl : GroupedFDRs.getGroups()) {
if (rl.didntPassCheck != null) {
result.excludedGroups.add("PSM -> " + rl.fdrGroup + "(" + rl.didntPassCheck + ")");
}
}
result.psmFDR = GroupedFDRs;
// return GroupedFDRs;
}
protected HashMap<String, PSM> filterPSMToUnique(Collection<PSM> inputPSM) {
HashMap<String, PSM> uniquePSM = new HashMap<String, PSM>();
for (PSM psm : inputPSM) {
String key = psm.getPeptide1().getSequence() + "_!xl!_"
+ psm.getPeptide2().getSequence() + "_!xl!_"
+ psm.getPeptideLinkSite1() + "_!xl!_"
+ psm.getPeptideLinkSite2() + "_!xl!_"
+ psm.getCharge();
// do we have already something under this key?
PSM stored = uniquePSM.get(key);
if (stored != null) {
// yes
if (stored.getScore() < psm.getScore()) {
// new psm has higher score -> it will be the reprentative one
uniquePSM.put(key, psm);
// register the previous psm
psm.represents(stored);
} else {
// lower score so it will be represented by the stored one
stored.represents(psm);
}
} else if (!isPsm_directional()) {
// we need to check the inverse key as well
key = psm.getPeptide2().getSequence() + "_!xl!_"
+ psm.getPeptide1().getSequence() + "_!xl!_"
+ psm.getPeptideLinkSite2() + "_!xl!_"
+ psm.getPeptideLinkSite1() + "_!xl!_"
+ psm.getCharge();
stored = uniquePSM.get(key);
if (stored != null) {
if (stored.getScore() < psm.getScore()) {
// new psm has higher score -> it will be the reprentative one
uniquePSM.put(key, psm);
// register the previous psm
psm.represents(stored);
} else {
// lower score so it will be represented by the stored one
stored.represents(psm);
}
} else {
uniquePSM.put(key, psm);
}
} else {
uniquePSM.put(key, psm);
}
}
return uniquePSM;
}
//public void calculatePeptidePairFDR(double fdr, double safetyFactor, boolean ignoreGroups, boolean setElementFDR, FDRResult result, boolean directional) {
public void calculatePeptidePairFDR(boolean setElementFDR, FDRResult result, FDRSettings settings, boolean ignoreGroups) {
if (result.psmFDR != null) {
for (PSM pp : result.psmFDR) {
pp.setFdrPeptidePair(null);
pp.setFdrProteinGroup(null);
}
}
if (result.peptidePairFDR != null) {
for (PeptidePair pp : result.peptidePairFDR) {
pp.setFdrLink(null);
pp.setFdrProteinGroup(null);
}
result.peptidePairFDR.clear();
}
if (result.proteinGroupLinkFDR != null) {
for (ProteinGroupLink l : result.proteinGroupLinkFDR) {
l.setFdrPPI(null);
}
result.proteinGroupLinkFDR.clear();
}
if (result.proteinGroupFDR != null) {
result.proteinGroupFDR.clear();
}
if (result.proteinGroupPairFDR != null) {
result.proteinGroupPairFDR.clear();
}
FDRResultLevel<PSM> psms = result.psmFDR;
FDRResultLevel<PeptidePair> GroupedFDRs = new FDRResultLevel<PeptidePair>();
GroupedFDRs.isDirectional = settings.isPeptidePairDirectional();
SelfAddHashSet<PeptidePair> psmPeps = new SelfAddHashSet<PeptidePair>();
if (!GroupedFDRs.isDirectional) {
for (PSM psm : psms.filteredResults()) {
PeptidePair pp = psmPeps.register(psm.getPeptidePair());
}
} else {
for (PSM psm : psms.filteredResults()) {
DirectionalPeptidePair dpp = new DirectionalPeptidePair(psm);
psmPeps.register(dpp);
}
}
if (groupPepPairByHasInternal) {
HashSet<ProteinGroup> internal = new HashSet<>();
for (PeptidePair pp : psmPeps) {
if (pp.isInternal()) {
internal.add(pp.getProteinGroup1());
internal.add(pp.getProteinGroup1().decoyComplement());
internal.add(pp.getProteinGroup2());
internal.add(pp.getProteinGroup2().decoyComplement());
}
}
for (PeptidePair pp : psmPeps) {
if (pp.isBetween()
&& internal.contains(pp.getProteinGroup1())
&& internal.contains(pp.getProteinGroup2())) {
pp.setFDRGroup(pp.getFDRGroup() + " int_support");
}
}
}
this.calc.fdr(settings.getPeptidePairFDR(), settings, psmPeps, GroupedFDRs, targetPepDBSize, decoyPepDBSize, 1, ignoreGroups, setElementFDR, settings.peppairLocalFDR(), settings.isGroupByPSMCount());
for (SubGroupFdrInfo rl : GroupedFDRs.getGroups()) {
if (rl.didntPassCheck != null) {
result.excludedGroups.add("PeptidePair -> " + rl.fdrGroup + "(" + rl.didntPassCheck + ")");
}
}
result.peptidePairFDR = GroupedFDRs;
}
public void calculateProteinGroupFDR(boolean ignoreGroups, boolean setElementFDR, FDRSettings settings, FDRResult result) {
FDRResultLevel<PeptidePair> peps = result.peptidePairFDR;
FDRResultLevel<ProteinGroup> GroupedFDRs = new FDRResultLevel<ProteinGroup>();
GroupedFDRs.isDirectional = false;
// protFDRGroupsInput = new HashMap<Integer, Integer>();
// nextFdrProteinGroup = new HashMap<Integer, Double>();
// countFdrProteinGroup = new HashMap<Integer, Integer>();
SelfAddHashSet<ProteinGroup> pepProteinGroups = new SelfAddHashSet<ProteinGroup>();
// joinSubFDRInfos(GroupedFDRs, true);
// SubGroupFdrInfo<PeptidePair> joined = peps.get(-1);
CountOccurence<String> fdrgroups = new CountOccurence<String>();
int maxAmbiguity = settings.getMaxProteinAmbiguity();
if (maxAmbiguity == 0) {
for (PeptidePair pp : peps.filteredResults()) {
Peptide p = pp.getPeptide1();
if (p != Peptide.NOPEPTIDE) {
ProteinGroup pg = p.getProteinGroup();
pg.addPeptidePair(pp);
pepProteinGroups.register(pg);
}
p = pp.getPeptide2();
if (p != Peptide.NOPEPTIDE) {
ProteinGroup pg = p.getProteinGroup();
pg.addPeptidePair(pp);
pepProteinGroups.register(pg);
}
}
} else {
for (PeptidePair pp : peps.filteredResults()) {
Peptide p = pp.getPeptide1();
if (p != Peptide.NOPEPTIDE) {
ProteinGroup pg = p.getProteinGroup();
if (pg.size() <= maxAmbiguity) {
pg.addPeptidePair(pp);
pepProteinGroups.register(pg);
}
}
p = pp.getPeptide2();
if (p != Peptide.NOPEPTIDE) {
ProteinGroup pg = p.getProteinGroup();
if (pg.size() <= maxAmbiguity) {
pg.addPeptidePair(pp);
pepProteinGroups.register(pg);
}
}
}
}
// double tCountMod = targetProtDBSize;
// tCountMod = -0.5 - Math.sqrt(1 + 8 * tCountMod) / 2;
// double dCountMod = decoyProtDBSize;
// dCountMod = dCountMod / tCountMod;
for (ProteinGroup pg : pepProteinGroups) {
fdrgroups.add(pg.getFDRGroup());
}
if (pepProteinGroups.size() < 10 && settings.getProteinGroupFDR() < 1) {
result.proteinGroupFDR = GroupedFDRs;
return;
}
if (settings.getMinProteinPepCount() > 1) {
filterListByPeptideSuport(pepProteinGroups, settings.getMinProteinPepCount());
}
//Logger.getLogger(this.getClass().getName()).log(Level.INFO, "ProteinGroup fdr " + pepProteinGroups.size() + " Groups as Input.");
this.calc.fdr(settings.getProteinGroupFDR(), settings, pepProteinGroups, GroupedFDRs, targetProtDBSize, decoyProtDBSize, settings.getMinProteinPepCount(), ignoreGroups, setElementFDR, settings.protLocalFDR(), false);
for (SubGroupFdrInfo rl : GroupedFDRs.getGroups()) {
if (rl.didntPassCheck != null) {
result.excludedGroups.add("ProteinGroup -> " + rl.fdrGroup + "(" + rl.didntPassCheck + ")");
}
}
result.proteinGroupFDR = GroupedFDRs;
// fdrProteinGroups = fdr(fdr, safetyFactor, pepProteinGroups, nextFdrProteinGroup, protFDRGroupsInput, countFdrProteinGroup, tCountMod, dCountMod, minPepCount, ignoreGroups, setElementFDR);
}
public void calculateLinkFDR(boolean ignoreGroups, boolean setElementFDR, FDRSettings settings, FDRResult result) {
int topN = 0;
if (result.proteinGroupLinkFDR != null) {
for (ProteinGroupLink l : result.proteinGroupLinkFDR) {
l.setFdrPPI(null);
}
result.proteinGroupLinkFDR.clear();
}
if (result.proteinGroupPairFDR != null) {
result.proteinGroupPairFDR.clear();
}
// linkFDRGroupsInput = new HashMap<Integer, Integer>();
// nextFdrLink = new HashMap<Integer, Double>();
// countFdrLink = new HashMap<Integer, Integer>();
FDRResultLevel<ProteinGroupLink> GroupedFDRs = new FDRResultLevel<ProteinGroupLink>();
GroupedFDRs.isDirectional = settings.isLinkDirectional();
SelfAddHashSet<ProteinGroupLink> pepLinks = new SelfAddHashSet<ProteinGroupLink>();
int maxAmbiguity = settings.getMaxLinkAmbiguity();
if (settings.isLinkDirectional()) {
if (maxAmbiguity == 0) {
for (PeptidePair pp : result.peptidePairFDR.filteredResults()) {
if (!(pp.isLinear() || pp.isNonCovalent()) || pp.isLoop()) {
ProteinGroupDirectionalLink dl = new ProteinGroupDirectionalLink(pp);
pp.setFdrLink(pepLinks.register(dl));
}
}
} else {
for (PeptidePair pp : result.peptidePairFDR.filteredResults()) {
if (!(pp.isLinear() || pp.isNonCovalent()) || pp.isLoop()) {
ProteinGroupDirectionalLink dl = new ProteinGroupDirectionalLink(pp);
if (dl.getAmbiguity() <= maxAmbiguity) {
pp.setFdrLink(pepLinks.register(dl));
}
}
}
}
} else {
if (maxAmbiguity == 0) {
for (PeptidePair pp : result.peptidePairFDR.filteredResults()) {
if (!(pp.isLinear() || pp.isNonCovalent())) {
pp.setFdrLink(pepLinks.register(pp.getLink()));
}
}
} else {
for (PeptidePair pp : result.peptidePairFDR.filteredResults()) {
if (!(pp.isLinear() || pp.isNonCovalent()) || pp.isLoop()) {
ProteinGroupLink l = pp.getLink();
if (l.getAmbiguity() <= maxAmbiguity) {
pp.setFdrLink(pepLinks.register(pp.getLink()));
}
}
}
}
}
if (topN > 0) {
for (ProteinGroupLink l : pepLinks) {
l.setScore(l.getScore(topN));
}
}
this.calc.fdr(settings.getProteinGroupLinkFDR(), settings, pepLinks, GroupedFDRs, targetLinkDBSize, decoyLinkDBSize, settings.getMinLinkPepCount(), ignoreGroups, setElementFDR, settings.linkLocalFDR(), settings.isGroupByPSMCount());
if (groupLinksByHasInternal) {
HashSet<ProteinGroup> internal = new HashSet<>();
for (ProteinGroupLink pgl : GroupedFDRs) {
if (pgl.isInternal()) {
internal.add(pgl.getProteinGroup1());
internal.add(pgl.getProteinGroup1().decoyComplement());
internal.add(pgl.getProteinGroup2());
internal.add(pgl.getProteinGroup2().decoyComplement());
}
}
for (ProteinGroupLink pgl : pepLinks) {
if (pgl.isBetween()
&& internal.contains(pgl.getProteinGroup1())
&& internal.contains(pgl.getProteinGroup2())) {
pgl.setFDRGroup(pgl.getFDRGroup() + " int_support");
}
}
FDRResultLevel<ProteinGroupLink> GroupedFDRs2 = new FDRResultLevel<ProteinGroupLink>();
GroupedFDRs2.isDirectional = settings.isLinkDirectional();
this.calc.fdr(settings.getProteinGroupLinkFDR(), settings, pepLinks, GroupedFDRs2, targetLinkDBSize, decoyLinkDBSize, settings.getMinLinkPepCount(), ignoreGroups, setElementFDR, settings.linkLocalFDR(), settings.isGroupByPSMCount());
GroupedFDRs = GroupedFDRs2;
}
for (SubGroupFdrInfo rl : GroupedFDRs.getGroups()) {
if (rl.didntPassCheck != null) {
result.excludedGroups.add("ResiduePair -> " + rl.fdrGroup + "(" + rl.didntPassCheck + ")");
}
}
result.proteinGroupLinkFDR = GroupedFDRs;
// fdrProtainGroupLinks = fdr(fdr, safetyFactor, pepLinks, nextFdrLink, linkFDRGroupsInput, countFdrLink, targetPepDBSize, decoyPepDBSize, minPepCount, ignoreGroups, setElementFDR);
}
protected void filterListByPeptideSuport(Collection<? extends AbstractFDRElement> list, int min) {
ArrayList<AbstractFDRElement> remove = new ArrayList<>();
for (AbstractFDRElement element : list) {
if (element.getPeptidePairs().size() < min) {
remove.add(element);
}
}
list.removeAll(remove);
}
public void calculateProteinGroupPairFDR(boolean ignoreGroups, boolean setElementFDR, FDRSettings settings, FDRResult result) {
int groupByMinPepSupport = 0;
boolean directional = settings.isPPIDirectional();
int maxAmbiguity = settings.getMaxProteinAmbiguity();
int minPepCount = settings.getMinPPIPepCount();
int topN = 0;
FDRResultLevel<ProteinGroupPair> GroupedFDRs = new FDRResultLevel<ProteinGroupPair>();
GroupedFDRs.isDirectional = directional;
SelfAddHashSet<ProteinGroupPair> linkPPIs = new SelfAddHashSet<ProteinGroupPair>();
if (maxAmbiguity == 0) {
if (directional) {
for (ProteinGroupLink l : result.proteinGroupLinkFDR.filteredResults()) {
ProteinGroupDirectionalPair dpp = new ProteinGroupDirectionalPair(l);
l.setFdrPPI(linkPPIs.register(dpp));
}
} else {
for (ProteinGroupLink l : result.proteinGroupLinkFDR.filteredResults()) {
l.setFdrPPI(linkPPIs.register(l.getProteinGroupPair()));
}
}
} else {
if (directional) {
for (ProteinGroupLink l : result.proteinGroupLinkFDR.filteredResults()) {
if (l.getProteins().size() - 1 <= maxAmbiguity) {
ProteinGroupDirectionalPair dpp = new ProteinGroupDirectionalPair(l);
linkPPIs.register(dpp);
}
}
} else {
for (ProteinGroupLink l : result.proteinGroupLinkFDR.filteredResults()) {
if (l.getProteins().size() - 1 <= maxAmbiguity) {
linkPPIs.register(l.getProteinGroupPair());
}
}
}
}
if (groupPPIByHasInternal) {
HashSet<ProteinGroup> internal = new HashSet<>();
for (ProteinGroupPair ppi : linkPPIs) {
if (ppi.isInternal()) {
internal.add(ppi.getProtein1());
internal.add(ppi.getProtein1().decoyComplement());
internal.add(ppi.getProtein2());
internal.add(ppi.getProtein2().decoyComplement());
}
}
for (ProteinGroupPair ppi : linkPPIs) {
if (ppi.isBetween()
&& internal.contains(ppi.getProtein1())
&& internal.contains(ppi.getProtein2())) {
ppi.setFDRGroup(ppi.getFDRGroup() + " int_support");
}
}
}
if (topN > 0) {
for (ProteinGroupPair ppi : linkPPIs) {
ppi.setScore(ppi.getScore(topN));
}
}
this.calc.fdr(settings.getProteinGroupPairFDR(), settings, linkPPIs, GroupedFDRs, targetProtDBSize, decoyProtDBSize, minPepCount, ignoreGroups, setElementFDR, settings.ppiLocalFDR(), false);
for (SubGroupFdrInfo rl : GroupedFDRs.getGroups()) {
if (rl.didntPassCheck != null) {
result.excludedGroups.add("ProteinGroupPair -> " + rl.fdrGroup + "(" + rl.didntPassCheck + ")");
}
}
result.proteinGroupPairFDR = GroupedFDRs;
}
public void filterFDRLinksByFDRProteinGroupPairs(FDRResult result) {
HashedArrayList<ProteinGroupLink> keep = new HashedArrayList<ProteinGroupLink>();
for (ProteinGroupPair pp : result.proteinGroupPairFDR.filteredResults()) {
keep.addAll(pp.getLinks());
}
result.proteinGroupLinkFDR.retainAll(keep);
}
public void filterFDRPeptidePairsByFDRProteinGroupLinks(FDRResult result) {
// SubGroupFdrInfo<ProteinGroupLink> pgl = joinSubFDRInfos(result.proteinGroupLinkFDR, true) ;
// SubGroupFdrInfo<PeptidePair> pps = joinSubFDRInfos(result.peptidePairFDR, true) ;
long start = System.currentTimeMillis();
HashedArrayList<PeptidePair> keep = new HashedArrayList<PeptidePair>();
int count = 0;
int total = result.proteinGroupLinkFDR.size();
for (ProteinGroupLink l : result.proteinGroupLinkFDR.filteredResults()) {
count++;
if (count % 10000 == 0 && System.currentTimeMillis() - start > 5000) {
System.err.println((count * 100f / total) + "% filterFDRPeptidePairsByFDRProteinGroupLinks");
start = System.currentTimeMillis();
}
keep.addAll(l.getPeptidePairs());
}
for (PeptidePair pp : result.peptidePairFDR) {
if (pp.isLinear() || pp.isNonCovalent()) {
keep.add(pp);
}
}
result.peptidePairFDR.retainAll(keep);
}
public void filterFDRPeptidePairsByFDRProteinGroups(FDRResult result) {
// SubGroupFdrInfo<ProteinGroup> pg = joinSubFDRInfos(result.proteinGroupFDR, true) ;
// SubGroupFdrInfo<PeptidePair> pps = joinSubFDRInfos(result.peptidePairFDR, true) ;
HashedArrayList<PeptidePair> keep = new HashedArrayList<PeptidePair>();
for (ProteinGroup pg : result.proteinGroupFDR.filteredResults()) {
keep.addAll(pg.getPeptidePairs());
}
result.peptidePairFDR.retainAll(keep);
}
public void filterFDRProteinGroupsByFDRPeptidePairs(FDRResult result) {
HashedArrayList<ProteinGroup> keep = new HashedArrayList<ProteinGroup>();
for (PeptidePair pp : result.peptidePairFDR.filteredResults()) {
keep.add(pp.getProteinGroup1());
keep.add(pp.getProteinGroup2());
}
result.proteinGroupFDR.retainAll(keep);
}
public void filterFDRPSMByFDRPeptidePairs(FDRResult result) {
// SubGroupFdrInfo<PSM> psms = joinSubFDRInfos(result.psmFDR, true) ;
// SubGroupFdrInfo<PeptidePair> pps = joinSubFDRInfos(result.peptidePairFDR, true) ;
HashedArrayList<PSM> keep = new HashedArrayList<PSM>();
for (PeptidePair pp : result.peptidePairFDR.filteredResults()) {
keep.addAll(pp.getAllPSMs());
}
result.psmFDR.retainAll(keep);
}
public void setSettings(FDRSettings settings) {
this.settings = new FDRSettingsImpl(settings);
}
public FDRResult calculateFDR(FDRSettings settings, boolean setElementFDR) {
FDRResult result = new FDRResult();
this.settings = settings;
boolean ignoreGroups = this.ignoreGroupsSetting;
result.reportFactor = settings.getReportFactor();
reset();
result.excludedGroups = new ArrayList<>();
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Input PSM :" + getAllPSMs().size() + "\n calculation psm-fdr");
calculatePSMFDR(setElementFDR, ignoreGroups, result, settings);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr PSM :" + result.psmFDR.getResultCount() + "\n calculation peptidepair-fdr");
calculatePeptidePairFDR(setElementFDR, result, settings, ignoreGroups);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr peptide-pairs :" + result.peptidePairFDR.getResultCount() + "\n calculation protein-group-fdr");
calculateProteinGroupFDR(ignoreGroups, setElementFDR, settings, result);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr protein groups :" + result.proteinGroupFDR.getResultCount() + "\n filtering peptide pairs by protein groups");
filterFDRPeptidePairsByFDRProteinGroups(result);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr peptide-pairs :" + result.peptidePairFDR.getResultCount() + "\n calculation link-fdr");
calculateLinkFDR(ignoreGroups, setElementFDR, settings, result);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr links :" + result.proteinGroupLinkFDR.getResultCount() + "\n calculation protein-group-pair-fdr");
calculateProteinGroupPairFDR(ignoreGroups, setElementFDR, settings, result);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr protein-group-pairs :" + result.proteinGroupPairFDR.getResultCount() + "\n filtering links by protein-group-pairs");
filterFDRLinksByFDRProteinGroupPairs(result);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr links :" + result.proteinGroupLinkFDR.getResultCount() + "\n filtering peptide pairs by links");
filterFDRPeptidePairsByFDRProteinGroupLinks(result);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr peptide-pairs :" + result.peptidePairFDR.getResultCount() + "\n filtering psm by peptide pairs");
filterFDRPSMByFDRPeptidePairs(result);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr psms :" + result.psmFDR.getResultCount() + "\n filtering ProteinGroups by peptide pairs");
filterFDRProteinGroupsByFDRPeptidePairs(result);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "fdr protein groups :" + result.proteinGroupFDR.getResultCount());
if (!result.excludedGroups.isEmpty()) {
if (settings.ignoreValidityChecks()) {
Logger.getLogger(this.getClass().getName()).log(Level.WARNING, "For some subgroups the FDR calculation is likely unreliable:\n" + MyArrayUtils.toString(result.excludedGroups, ";\n"));
} else {
Logger.getLogger(this.getClass().getName()).log(Level.WARNING, "Some FDR groups where ignored as being unreliable:\n" + MyArrayUtils.toString(result.excludedGroups, ";\n"));
}
}
return result;
}
public String summaryString(FDRResult result) {
StringBuffer sb = new StringBuffer();
sb.append("Input PSMs:");
sb.append(getAllPSMs().size());
sb.append("; FDR PSM:");
sb.append(result.psmFDR.getResultCount());
sb.append("; FDR PeptidePairs:");
sb.append(result.peptidePairFDR.getResultCount());
sb.append("; FDR ProteinGroups:");
sb.append(result.proteinGroupFDR.getResultCount());
sb.append("; FDR Links:");
sb.append(result.proteinGroupLinkFDR.getResultCount());
sb.append("; FDR PPIs:");
sb.append(result.proteinGroupPairFDR.getResultCount());
return sb.toString();
}
public Collection<PSM> getInputPSMs() {
return getAllPSMs();
}
public void writeFiles(String path, String baseName, String seperator, FDRResult result) throws FileNotFoundException {
if (seperator.equals(",") || seperator.equals(";")) {
writeFiles(path, baseName, ".csv", seperator, result);
} else {
writeFiles(path, baseName, ".tsv", seperator, result);
}
}
public void writeFiles(String path, String baseName, String fileextension, String seperator, FDRResult result) throws FileNotFoundException {
CSVRandomAccess csvFormater = new CSVRandomAccess(seperator.charAt(0), '"');
csvFormater.setLocale(outputlocale);
File folder = new File(path);
int n = 0;
if (!folder.exists()) {
folder.mkdirs();
} else if (!folder.isDirectory()) {
while (folder.exists() && folder.isDirectory()) {
folder = new File(path + "_" + (++n));
}
path = folder.getAbsolutePath();
}
String extension = "_xiFDR" + getXiFDRVersion() + fileextension;
CountOccurence<String> fdrPSMGroupCounts = new CountOccurence<String>();
String outNameNAPS = path + "/" + baseName + "_NAPS_PSM" + extension;
PrintWriter psmNAPSOut = null;
String outNameLinear = path + "/" + baseName + "_Linear_PSM" + extension;
PrintWriter psmLinearOut = null;
String outName = path + "/" + baseName + "_CSM" + extension;
PrintWriter psmOut = null;
if (!csvSummaryOnly) {
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Write CSM-results to " + outName);
psmOut = new PrintWriter(outName);
String header = csvFormater.valuesToString(getPSMHeader());
psmOut.println(header);
psmLinearOut = new PrintWriter(outNameLinear);
psmLinearOut.println(header);
psmNAPSOut = new PrintWriter(outNameNAPS);
psmNAPSOut.println(header);
} else {
psmOut = NullOutputStream.NULLPRINTWRITER;
psmLinearOut = NullOutputStream.NULLPRINTWRITER;
psmNAPSOut = NullOutputStream.NULLPRINTWRITER;
}
ArrayList<PSM> psms = new ArrayList<PSM>(result.psmFDR.getResultCount());
for (SubGroupFdrInfo g : result.psmFDR.getGroups()) {
psms.addAll(g.filteredResult);
}
java.util.Collections.sort(psms, new Comparator<PSM>() {
public int compare(PSM o1, PSM o2) {
return Double.compare(o2.getScore(), o1.getScore());
}
});
// if (!isPSMScoreHighBetter())
// java.util.Collections.reverse(fdrPSM);
int psmCount = 0;
int psmNonCovTT = 0;
int psmNonCovTD = 0;
int psmNonCovDD = 0;
int psmInternalTT = 0;
int psmInternalTD = 0;
int psmInternalDD = 0;
int psmBetweenTT = 0;
int psmBetweenTD = 0;
int psmBetweenDD = 0;
int psmLinearT = 0;
int psmLinearD = 0;
for (PSM pp : psms) {
fdrPSMGroupCounts.add(pp.getFDRGroup());
String line = csvFormater.valuesToString(getPSMOutputLine(pp));
if (!csvSummaryOnly) {
if (pp.isLinear()) {
psmLinearOut.println(line);
} else if (pp.isNonCovalent()) {
psmNAPSOut.println(line);
} else {
psmOut.println(line);
}
}
if (pp.isLinear()) {
if (pp.isTT()) {
psmLinearT++;
} else if (pp.isTD()) {
psmLinearD++;
}
} else if (pp.isNonCovalent()) {
if (pp.isTT()) {
psmNonCovTT++;
} else if (pp.isTD()) {
psmNonCovTD++;
} else {
psmNonCovDD++;
}
} else if (pp.isInternal()) {
if (pp.isTT()) {
psmInternalTT++;
} else if (pp.isTD()) {
psmInternalTD++;
} else {
psmInternalDD++;
}
} else {
if (pp.isTT()) {
psmBetweenTT++;
} else if (pp.isTD()) {
psmBetweenTD++;
} else {
psmBetweenDD++;
}
}
psmCount++;
}
if (!csvSummaryOnly) {
psmOut.flush();
psmOut.close();
psmLinearOut.flush();
psmLinearOut.close();
psmNAPSOut.flush();
psmNAPSOut.close();
// if we had no linears we just delete the linear file again
if (psmLinearT+psmLinearD == 0) {
new File(outNameLinear).delete();
}
// same for NAPS
if (psmNonCovTT+psmNonCovTD+psmNonCovDD == 0) {
new File(outNameNAPS).delete();
}
}
ArrayList<PeptidePair> peps = new ArrayList<PeptidePair>(result.peptidePairFDR.getResultCount());
for (SubGroupFdrInfo g : result.peptidePairFDR.getGroups()) {
peps.addAll(g.filteredResult);
}
java.util.Collections.sort(peps, new Comparator<PeptidePair>() {
public int compare(PeptidePair o1, PeptidePair o2) {
return Double.compare(o2.getScore(), o1.getScore());
}
});
CountOccurence<String> fdrPepPairGroupCounts = new CountOccurence<String>();
int pepInternalTT = 0;
int pepInternalTD = 0;
int pepInternalDD = 0;
int pepNonCovTT = 0;
int pepNonCovTD = 0;
int pepNonCovDD = 0;
int pepBetweenTT = 0;
int pepBetweenTD = 0;
int pepBetweenDD = 0;
int pepCount = 0;
int pepLinearT = 0;
int pepLinearD = 0;
PrintWriter pepsOut = null;
PrintWriter pepsLinearOut = null;
PrintWriter pepsNonCovOut = null;
if (!csvSummaryOnly) {
outName = path + "/" + baseName + "_PeptidePairs" + extension;
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Write peptide pairs results to " + outName);
pepsOut = new PrintWriter(outName);
String xlPepsHeader = csvFormater.valuesToString(getXLPepsHeader());
pepsOut.println(xlPepsHeader);
if (psmLinearT+psmLinearD > 0 ) {
pepsLinearOut = new PrintWriter(path + "/" + baseName + "_Linear_Peptides" + extension);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Write linear peptide results to " + pepsLinearOut);
String linearPepsHeader = csvFormater.valuesToString(getLinearPepsHeader());
pepsLinearOut.println(linearPepsHeader);
}
if (psmNonCovTT+psmNonCovTD+psmNonCovDD > 0 ) {
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Write non covalent peptide pair results to " + outName);
pepsNonCovOut = new PrintWriter(outName);
pepsOut.println(xlPepsHeader);
}
} else {
pepsOut = NullOutputStream.NULLPRINTWRITER;
pepsLinearOut = pepsOut;
}
for (PeptidePair pp : peps) {
fdrPepPairGroupCounts.add(pp.getFDRGroup());
if (pp.isLinear() && !pp.isLoop()) {
if (!csvSummaryOnly) {
String line = csvFormater.valuesToString(getLinearPepeptideOutputLine(pp));
pepsLinearOut.println(line);
}
if (pp.isTT()) {
pepLinearT++;
} else if (pp.isTD()) {
pepLinearD++;
}
} else {
if (!csvSummaryOnly) {
String line = csvFormater.valuesToString(getXlPepeptideOutputLine(pp));
if (pp.isNonCovalent()) {
pepsNonCovOut.println(line);
} else {
pepsOut.println(line);
}
}
if (pp.isNonCovalent()) {
if (pp.isTT()) {
pepNonCovTT++;
} else if (pp.isTD()) {
pepNonCovTD++;
} else {
pepNonCovDD++;
}
} else if (pp.isInternal()) {
if (pp.isTT()) {
pepInternalTT++;
} else if (pp.isTD()) {
pepInternalTD++;
} else {
pepInternalDD++;
}
} else {
if (pp.isTT()) {
pepBetweenTT++;
} else if (pp.isTD()) {
pepBetweenTD++;
} else {
pepBetweenDD++;
}
}
}
pepCount++;
}
if (!csvSummaryOnly) {
pepsOut.flush();
pepsOut.close();
if (pepsLinearOut != null) {
pepsLinearOut.flush();
pepsLinearOut.close();
}
if (pepsNonCovOut != null) {
pepsNonCovOut.flush();
pepsNonCovOut.close();
}
}
CountOccurence<String> fdrLinkGroupCounts = new CountOccurence<String>();
ArrayList<ProteinGroupLink> links = new ArrayList<ProteinGroupLink>(result.proteinGroupLinkFDR.getResultCount());
for (SubGroupFdrInfo g : result.proteinGroupLinkFDR.getGroups()) {
links.addAll(g.filteredResult);
}
java.util.Collections.sort(links, new Comparator<ProteinGroupLink>() {
public int compare(ProteinGroupLink o1, ProteinGroupLink o2) {
return Double.compare(o2.getScore(), o1.getScore());
}
});
// if (!isPSMScoreHighBetter())
// java.util.Collections.reverse(fdrProtainGroupLinks);
int linkInternalTT = 0;
int linkInternalTD = 0;
int linkInternalDD = 0;
int linkBetweenTT = 0;
int linkBetweenTD = 0;
int linkBetweenDD = 0;
int linkCount = 0;
PrintWriter linksOut = null;
if (!csvSummaryOnly) {
linksOut = new PrintWriter(path + "/" + baseName + "_Links" + extension);
String header = csvFormater.valuesToString(getLinkOutputHeader());
linksOut.println(header);
} else {
linksOut = NullOutputStream.NULLPRINTWRITER;
}
// write out a table of all links
for (ProteinGroupLink l : links) {
fdrLinkGroupCounts.add(l.getFDRGroup());
if (!csvSummaryOnly) {
String line = csvFormater.valuesToString(getLinkOutputLine(l));
linksOut.println(line);
}
if (l.isInternal) {
if (l.isTT()) {
linkInternalTT++;
} else if (l.isTD()) {
linkInternalTD++;
} else {
linkInternalDD++;
}
} else {
if (l.isTT()) {
linkBetweenTT++;
} else if (l.isTD()) {
linkBetweenTD++;
} else {
linkBetweenDD++;
}
}
linkCount++;
}
if (!csvSummaryOnly) {
linksOut.flush();
linksOut.close();
}
CountOccurence<String> fdrPPIGroupCounts = new CountOccurence<String>();
ArrayList<ProteinGroupPair> ppis = new ArrayList<ProteinGroupPair>(result.proteinGroupPairFDR.getResultCount());
for (SubGroupFdrInfo g : result.proteinGroupPairFDR.getGroups()) {
ppis.addAll(g.filteredResult);
}
java.util.Collections.sort(ppis, new Comparator<ProteinGroupPair>() {
public int compare(ProteinGroupPair o1, ProteinGroupPair o2) {
return Double.compare(o2.getScore(), o1.getScore());
}
});
// if (!isPSMScoreHighBetter())
// java.util.Collections.reverse(fdrProtainGroupPair);
int ppiInternalTT = 0;
int ppiInternalTD = 0;
int ppiInternalDD = 0;
int ppiBetweenTT = 0;
int ppiBetweenTD = 0;
int ppiBetweenDD = 0;
int ppiCount = 0;
// write out a table of all proteinpairs
PrintWriter ppiOut = null;
if (!csvSummaryOnly) {
ppiOut = new PrintWriter(path + "/" + baseName + "_ppi" + extension);
String header = csvFormater.valuesToString(getPPIOutputHeader());
ppiOut.println(header);
} else {
ppiOut = NullOutputStream.NULLPRINTWRITER;
}
for (ProteinGroupPair pgp : ppis) {
fdrPPIGroupCounts.add(pgp.getFDRGroup());
if (!csvSummaryOnly) {
String line = csvFormater.valuesToString(getPPIOutputLine(pgp));
ppiOut.println(line);
}
if (pgp.isInternal()) {
if (pgp.isTT()) {
ppiInternalTT++;
} else if (pgp.isTD()) {
ppiInternalTD++;
} else {
ppiInternalDD++;
}
} else {
if (pgp.isTT()) {
ppiBetweenTT++;
} else if (pgp.isTD()) {
ppiBetweenTD++;
} else {
ppiBetweenDD++;
}
}
ppiCount++;
}
if (!csvSummaryOnly) {
ppiOut.flush();
ppiOut.close();
}
CountOccurence<String> fdrProteinGroupCounts = new CountOccurence<String>();
ArrayList<ProteinGroup> pgs = new ArrayList<ProteinGroup>(result.proteinGroupFDR.getResultCount());
for (SubGroupFdrInfo g : result.proteinGroupFDR.getGroups()) {
pgs.addAll(g.filteredResult);
}
java.util.Collections.sort(pgs, new Comparator<ProteinGroup>() {
@Override
public int compare(ProteinGroup o1, ProteinGroup o2) {
return Double.compare(o2.getScore(), o1.getScore());
}
});
// if (!isPSMScoreHighBetter())
// java.util.Collections.reverse(fdrProteinGroups);
int proteinGroupT = 0;
int proteinGroupD = 0;
int proteinCount = 0;
// write out a table of all proteinpairs
PrintWriter pgOut = null;
if (!csvSummaryOnly) {
pgOut = new PrintWriter(path + "/" + baseName + "_proteingroups" + extension);
pgOut.println(csvFormater.valuesToString(getProteinGroupOutputHeader()));
} else {
pgOut = NullOutputStream.NULLPRINTWRITER;
}
for (ProteinGroup pg : pgs) {
fdrProteinGroupCounts.add(pg.getFDRGroup());
if (!csvSummaryOnly) {
pgOut.println(csvFormater.valuesToString(getProteinGroupOutput(pg)));
}
if (pg.isDecoy()) {
proteinGroupD++;
} else {
proteinGroupT++;
}
proteinCount++;
}
if (!csvSummaryOnly) {
pgOut.flush();
pgOut.close();
}
// if (!isPSMScoreHighBetter())
// java.util.Collections.reverse(fdrProteinGroups);
// write out a table of all proteinpairs
PrintWriter summaryOut = null;
if (singleSummary) {
if (singleSummaryOut == null) {
singleSummaryOut = new PrintWriter(path + "/" + baseName + "_summary" + extension);
}
summaryOut = singleSummaryOut;
summaryOut.println("SummaryFile:" + baseName + "_summary" + extension);
} else {
summaryOut = new PrintWriter(path + "/" + baseName + "_summary" + extension);
}
summaryOut.println("xiFDR Version:" + seperator + OfflineFDR.xiFDRVersion);
summaryOut.println("Source:" + seperator + csvFormater.quoteValue(getSource()));
summaryOut.println(",\"Target FDRs:\"" + seperator + "Minimum supporting peptides" + seperator + "Directional");
summaryOut.println("psm" + seperator + " " + result.psmFDR.getTargetFDR() + seperator + seperator + isPsm_directional());
summaryOut.println("\"peptide pair\"" + seperator + " " + result.peptidePairFDR.getTargetFDR() + seperator + seperator + isPeptides_directional());
summaryOut.println("\"protein group\"" + seperator + " " + result.proteinGroupFDR.getTargetFDR() + seperator + getMinPepPerProteinGroup());
summaryOut.println("Link" + seperator + " " + result.proteinGroupLinkFDR.getTargetFDR() + seperator + getMinPepPerProteinGroupLink() + seperator + this.isLinks_directional());
summaryOut.println("\"Protein Group Pair\"" + seperator + " " + result.proteinGroupPairFDR.getTargetFDR() + seperator + getMinPepPerProteinGroupPair() + seperator + this.isPpi_directional());
summaryOut.println("\n\"max next level fdr factor (report-factor):\"" + seperator + result.reportFactor);
summaryOut.println("\"minimum peptide length\"" + seperator + "" + (m_minPepLength <= 1 ? "unlimited" : m_minPepLength));
if (result.uniquePSMs) {
summaryOut.println("\"unique PSMs\"");
}
if (settings.getMinPeptideCoverageFilter() > 0) {
summaryOut.println("\"minimum peptide coverage\"" + seperator + settings.getMinPeptideCoverageFilter());
} else {
summaryOut.println();
}
if (settings.getMinDeltaScoreFilter() > 0) {
summaryOut.println("\"delta/score >\"" + seperator + settings.getMinDeltaScoreFilter());
} else {
summaryOut.println();
}
summaryOut.println("\n\"Accepted ambiguity:\"");
summaryOut.println("\"Links for one peptide pair\"" + seperator + "" + (settings.getMaxLinkAmbiguity() == 0 ? "unlimited" : settings.getMaxLinkAmbiguity()));
summaryOut.println("\"Protein pairs for one peptide pair\"" + seperator + "" + (settings.getMaxProteinAmbiguity() == 0 ? "unlimited" : settings.getMaxProteinAmbiguity()));
summaryOut.println();
if (ignoreGroupsSetting) {
summaryOut.println("\"Groups Where Ignored \"");
} else {
summaryOut.println("\"Length-Group:\",\"" + RArrayUtils.toString(PeptidePair.getLenghtGroup(), seperator) + "\"");
}
summaryOut.println();
if (settings.doOptimize() != null) {
summaryOut.println("\"Boost\",\"" + settings.doOptimize().m_shortname + "\"," + (settings.getBoostBetween() ? "Between" : "") + ", steps:," + settings.getBoostingSteps());
summaryOut.println("\"Boost Include\",\""
+ (settings.boostDeltaScore() ? "delta score;" : "")
+ (settings.boostMinFragments() ? "Min Fragments;" : "")
+ (settings.boostPepCoverage() ? "min Coverage;" : "")
+ (settings.boostPSMs() ? "PSMs;" : "")
+ (settings.boostPeptidePairs() ? "Peptide Pairs;" : "")
+ (settings.boostProteins() ? "Protein Groups;" : "")
+ (settings.boostLinks() ? "Residue Pairs;" : "")
+ "\""
);
}
summaryOut.println();
// summaryOut.println("Input PSMs" +seperator + "fdr PSM" +seperator + "fdr peptide pairs" +seperator + "fdr links" +seperator + "fdr ppi");
summaryOut.println("\"class\"" + seperator + "\"all\"" + seperator + "\"Self TT\""
+ seperator + "\"Self TD\"" + seperator + "\"Self DD\""
+ seperator + "\"Between TT\"" + seperator + "\"Between TD\"" + seperator + "\"Between DD\""
+ seperator + "\"Non-Covalent TT\"" + seperator + "\"Non-Covalent TD\"" + seperator + "\"Non-Covalent DD\""
+ seperator + "\"Linear T\"" + seperator + "\"Linear D\"");
summaryOut.println("\"Input PSMs\"" + seperator + "" + getAllPSMs().size());
summaryOut.println("\"fdr CSMs\"" + seperator + psmCount + seperator + psmInternalTT + seperator + psmInternalTD + seperator + psmInternalDD
+ seperator + psmBetweenTT + seperator + psmBetweenTD + seperator + psmBetweenDD
+ seperator + psmNonCovTT + seperator + psmNonCovTD + seperator + psmNonCovDD
+ seperator + psmLinearT + seperator + psmLinearD);
summaryOut.println("\"fdr Peptide Pairs\"" + seperator + pepCount + seperator + pepInternalTT + seperator + pepInternalTD + seperator + pepInternalDD
+ seperator + pepBetweenTT + seperator + pepBetweenTD + seperator + pepBetweenDD
+ seperator + pepNonCovTT + seperator + pepNonCovTD + seperator + pepNonCovDD
+ seperator + pepLinearT + seperator + pepLinearD);
summaryOut.println("\"fdr Link\"" + seperator + linkCount + seperator + linkInternalTT + seperator + linkInternalTD + seperator + linkInternalDD
+ seperator + linkBetweenTT + seperator + linkBetweenTD + seperator + linkBetweenDD);
summaryOut.println("\"fdr Protein Group Pairs\"" + seperator + ppiCount + seperator + ppiInternalTT + seperator + ppiInternalTD + seperator + ppiInternalDD
+ seperator + ppiBetweenTT + seperator + ppiBetweenTD + seperator + ppiBetweenDD);
summaryOut.println("\"fdr Protein Groups\"" + seperator + proteinCount + seperator + seperator + seperator
+ seperator + seperator + seperator
+ seperator + seperator + seperator
+ seperator + proteinGroupT + seperator + proteinGroupD);
// summaryOut.println("\n\"linear fdr psm\"" + seperator + "" + (psmLinearT + psmLinearD) + "\n\"linear fdr peptide pairs\"" + seperator + "" + (pepLinearT + pepLinearD) + "\n\nfdr protein groups" + seperator + "" + fdrProteinGroups.size());
String header = "Petide Spectrum Matches detailed summary";
// HashMap<Integer,String> groups = new HashMap<Integer,String>();
// for (String k : result.psmFDR.getGroupIDs()) {
// groups.put(k,PSM.getFDRGroupName(k));
// }
FDRResultLevel level = result.psmFDR;
levelSummary(summaryOut, header, level, seperator);
header = "Petide Pairs detailed summary";
// groups.clear();
// for (Integer k : result.peptidePairFDR.getGroupIDs()) {
// groups.put(k,PeptidePair.getFDRGroupName(k));
// }
level = result.peptidePairFDR;
levelSummary(summaryOut, header, level, seperator);
header = "Protein groups detailed summary";
// groups.clear();
// for (Integer k : result.proteinGroupFDR.getGroupIDs()) {
// groups.put(k,ProteinGroup.getFDRGroupName(k));
// }
level = result.proteinGroupFDR;
levelSummary(summaryOut, header, level, seperator);
header = "Protein group links detailed summary";
level = result.proteinGroupLinkFDR;
levelSummary(summaryOut, header, level, seperator);
header = "Protein group pairs detailed summary";
level = result.proteinGroupPairFDR;
levelSummary(summaryOut, header, level, seperator);
summaryOut.flush();
if (!singleSummary) {
summaryOut.close();
}
}
protected void peptidePairsToProteinGroups(ArrayList<PeptidePair> forwardPeps, SelfAddHashSet<ProteinGroup> pepProteinGroups) {
// do we care about ambiguity
if (settings.getMaxProteinAmbiguity() > 0) {
// yes we do
// PeptidePairs to Protein groups
peploop:
for (PeptidePair pp : forwardPeps) {
ArrayList<ProteinGroup> groups = new ArrayList<ProteinGroup>(2);
for (Peptide p : pp.getPeptides()) {
ProteinGroup pg = p.getProteinGroup();
if (pg.proteinCount() > settings.getMaxProteinAmbiguity()) {
continue peploop;
}
pg.addPeptidePair(pp);
groups.add(pg);
}
for (ProteinGroup pg : groups) {
pepProteinGroups.register(pg);
}
}
} else {
// no we report everything
for (PeptidePair pp : forwardPeps) {
for (Peptide p : pp.getPeptides()) {
ProteinGroup pg = pepProteinGroups.register(p.getProteinGroup());
pg.addPeptidePair(pp);
}
}
}
}
// protected ArrayList<PeptidePair> proteinGroupFDR(double ProteinGroupFDR, SelfAddHashSet<ProteinGroup> pepProteinGroups, double safetyFactor, HashSet<ProteinGroup> fdrProteinGroupHS, ArrayList<PeptidePair> forwardPeps, int minPepCount, boolean ignoreGroups) {
// // filter out the peptidepairs based on proteins
//// if (ProteinGroupFDR < 1) {
//
// // we are talking about a purly linear view of things
// // but I want to reuse the same function for fdr
// // therefore I need to change the database size, in a way, that
// // the fdr calculation is normalized for linear databses
// double tCountMod = targetProtDBSize;
// tCountMod = -0.5 - Math.sqrt(1 + 8 * tCountMod) / 2;
// double dCountMod = decoyProtDBSize;
// dCountMod = dCountMod / tCountMod;
//
//
// Logger.getLogger(this.getClass().getName()).log(Level.INFO, "ProteinGroup fdr " + pepProteinGroups.size() + " Groups as Input.");
//
// fdrProteinGroups = fdr(ProteinGroupFDR, safetyFactor, pepProteinGroups, nextFdrProteinGroup, protFDRGroupsInput, countFdrProteinGroup, tCountMod, dCountMod, minPepCount, ignoreGroups, true);
//
// fdrProteinGroupHS.addAll(fdrProteinGroups);
//
// Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Filter peptidepairs by " + fdrProteinGroupHS.size() + " proteingroups.");
// ArrayList<PeptidePair> pgtopep = new ArrayList<PeptidePair>();
// //filter back to peptidepairs
// if (ProteinGroupFDR < 1) {
// for (PeptidePair pp : forwardPeps) {
// boolean found = true;
// for (Peptide p : pp.getPeptides()) {
// if (!fdrProteinGroupHS.contains(p.getProteinGroup())) {
// found = false;
// break;
// }
// }
// if (found) {
// pgtopep.add(pp);
// }
// }
//
// forwardPeps = pgtopep;
// }
// Logger.getLogger(this.getClass().getName()).log(Level.INFO, forwardPeps.size() + " peptidepairs made the ProteinGroup fdr cutoff.");
//// } else {
//// fdrProteinGroups= new ArrayList<ProteinGroup>(pepProteinGroups);
//// }
// return forwardPeps;
// }
// protected void PeptidePairsToGroupLinks(ArrayList<PeptidePair> forwardPeps, SelfAddHashSet<ProteinGroupLink> pepProteinGroupLinks, ArrayList<PeptidePair> linearPepPairs) {
// // link level fdr
// // turn peptide pairs to links
// // peptide pairs to links
// if (m_maximumLinkAmbiguity > 0) {
// for (PeptidePair pp : forwardPeps) {
// if (!pp.isLinear()) {
// ProteinGroupLink pgl = pp.getLink();
// if (pgl.getAmbiguity() <= m_maximumLinkAmbiguity) {
// pepProteinGroupLinks.register(pgl);
// }
// } else {
// linearPepPairs.add(pp);
// }
// }
// } else {
// for (PeptidePair pp : forwardPeps) {
// if (!pp.isLinear()) {
// pepProteinGroupLinks.register(pp.getLink());
// } else {
// linearPepPairs.add(pp);
// }
// }
// }
// }
/**
* @return the m_maximumLinkAmbiguity
*/
public int getMaximumLinkAmbiguity() {
return settings.getMaxLinkAmbiguity();
}
/**
* @param m_maximumLinkAmbiguity the m_maximumLinkAmbiguity to set
*/
public void setMaximumLinkAmbiguity(int m_maximumLinkAmbiguity) {
settings.setMaxLinkAmbiguity(m_maximumLinkAmbiguity);
}
/**
*
*/
public int getMinimumPeptideLength() {
return m_minPepLength;
}
/**
* @param m_maximumLinkAmbiguity the m_maximumLinkAmbiguity to set
*/
public void setMinimumPeptideLength(int minimumPeptideLength) {
this.m_minPepLength = minimumPeptideLength;
}
/**
* @return the m_maximumProteinAmbiguity
*/
public int getMaximumProteinAmbiguity() {
return settings.getMaxProteinAmbiguity();
}
/**
* @param m_maximumProteinAmbiguity the m_maximumProteinAmbiguity to set
*/
public void setMaximumProteinAmbiguity(int m_maximumProteinAmbiguity) {
this.settings.setMaxProteinAmbiguity(m_maximumProteinAmbiguity);
}
// private <T extends FDRSelfAdd<T>> HashedArrayList<T> fdr(double fdr, double safetyfactor, Collection<T> c, HashMap<Integer, Double> nextFDR, HashMap<Integer, Integer> inputCounts, HashMap<Integer, Integer> resultCounts, double tCount, double dCount, int minPepCount, boolean ignoreGroups,boolean setElementFDR) {
// HashMap<Integer, ArrayList<T>> groupedList = new HashMap<Integer, ArrayList<T>>(4);
// HashMap<Integer, UpdateableInteger> gTT = new HashMap<Integer, UpdateableInteger>(8);
// HashMap<Integer, UpdateableInteger> gTD = new HashMap<Integer, UpdateableInteger>(8);
// HashMap<Integer, UpdateableInteger> gDD = new HashMap<Integer, UpdateableInteger>(8);
//
// HashedArrayList<T> ret = new HashedArrayList<T>(c.size());
//
// if (c.isEmpty()) {
// return ret;
// }
//
// if (fdr == 1) {
// fdr = 1000;
// safetyfactor = 1000;
// }
//
//
// // split the data up into fdr-groups
// for (T e : c) {
// if (e.getPeptidePairCount() >= minPepCount) {
// Integer fdrgroup = e.getFDRGroup();
// if (ignoreGroups) {
// fdrgroup = -1;
// } else {
// fdrgroup = e.getFDRGroup();
// }
// UpdateableInteger cTT;
// UpdateableInteger cTD;
// UpdateableInteger cDD;
// ArrayList<T> gl = groupedList.get(fdrgroup);
// if (gl == null) {
// gl = new ArrayList<T>();
// groupedList.put(fdrgroup, gl);
// if (e.isTT()) {
// gTT.put(fdrgroup, new UpdateableInteger(1));
// gTD.put(fdrgroup, new UpdateableInteger(0));
// gDD.put(fdrgroup, new UpdateableInteger(0));
// } else if (e.isTD()) {
// gTT.put(fdrgroup, new UpdateableInteger(0));
// gTD.put(fdrgroup, new UpdateableInteger(1));
// gDD.put(fdrgroup, new UpdateableInteger(0));
// } else {
// gTT.put(fdrgroup, new UpdateableInteger(0));
// gTD.put(fdrgroup, new UpdateableInteger(0));
// gDD.put(fdrgroup, new UpdateableInteger(1));
// }
// } else {
// if (e.isTT()) {
// gTT.get(fdrgroup).value++;
// } else if (e.isTD()) {
// gTD.get(fdrgroup).value++;
// } else {
// gDD.get(fdrgroup).value++;
// }
// }
// gl.add(e);
// }
// }
//
// for (Integer fdrgroup : groupedList.keySet()) {
// int TT = gTT.get(fdrgroup).value;
// int TD = gTD.get(fdrgroup).value;
// int DD = gDD.get(fdrgroup).value;
// ArrayList<T> group = groupedList.get(fdrgroup);
// inputCounts.put(fdrgroup, group.size());
// ArrayList<T> groupResult = new ArrayList<T>();
// double prevFDR = subFDR(TD, DD, TT, group, fdr, safetyfactor, groupResult, tCount, dCount,setElementFDR);
// nextFDR.put(fdrgroup, prevFDR);
// ret.addAll(groupResult);
// resultCounts.put(fdrgroup, groupResult.size());
//
// }
//
//
// if (ret.size() == 0 && c.size() > 100) {
// // we didn't get any results through. try a non grouped
// int TT = 0;
// int TD = 0;
// int DD = 0;
// for (Integer fdrgroup : groupedList.keySet()) {
// TT += gTT.get(fdrgroup).value;
// TD += gTD.get(fdrgroup).value;
// DD += gDD.get(fdrgroup).value;
// }
// ArrayList<T> all = new ArrayList<T>(c);
// inputCounts.put(-1, c.size());
// nextFDR.put(-1, subFDR(TD, DD, TT, all, fdr, safetyfactor, ret, tCount, dCount,setElementFDR));
// resultCounts.put(-1, ret.size());
// }
//
//
// return ret;
// }
// private <T> SubGroupFdrInfo<T> joinSubFDRInfos(HashMap<Integer, SubGroupFdrInfo<T>> groupInfos, boolean store) {
// SubGroupFdrInfo<T> joined = groupInfos.get(-1);
// if (joined == null) {
// joined = new SubGroupFdrInfo();
// joined.results = new HashedArrayList<T>();
//
// for (SubGroupFdrInfo sgi : groupInfos.values()) {
// joined.DD+=sgi.DD;
// joined.TD+=sgi.TD;
// joined.TT+=sgi.TT;
// joined.higherFDR+=sgi.higherFDR;
// joined.lowerFDR+=sgi.lowerFDR;
// joined.resultCount+=sgi.resultCount;
// joined.inputCount+=sgi.inputCount;
// joined.saftyfactor+=sgi.saftyfactor;
// joined.targteFDR+=sgi.targteFDR;
// joined.results.addAll(sgi.results);
// joined.filteredResult.addAll(sgi.filteredResult);
// }
// int groupCount = groupInfos.size();
// joined.higherFDR/=groupCount;
// joined.lowerFDR/=groupCount;
// joined.saftyfactor/=groupCount;
// joined.targteFDR/=groupCount;
// if (store)
// groupInfos.put(-1, joined);
// }
//
// return joined;
//
// }
private <T extends AbstractFDRElement<T>> void defineConnectedness(Collection<T> list) {
double maxSupport = 0;
SelfAddHashSet<Site> supports;
supports = new SelfAddHashSet<Site>();
// count how often each site was found
for (T e : list) {
Site s1 = supports.register(e.getLinkSite1());
if (s1.getConnectedness() > maxSupport) {
maxSupport = s1.getConnectedness();
}
Site s2 = e.getLinkSite2();
if (s2 != null) {
s2 = supports.register(e.getLinkSite2());
if (s2.getConnectedness() > maxSupport) {
maxSupport = s2.getConnectedness();
}
}
}
// how connected are the second sites of a link
double maxLinkedSupport = 0;
SelfAddHashSet<Site> linkedSupports = new SelfAddHashSet<Site>();
linkedSupports.selfAdd = false;
for (T e : list) {
Site s1 = supports.get(e.getLinkSite1());
Site s2 = supports.get(e.getLinkSite2());
Site ls1 = e.getLinkSite1();
ls1.setConnectedness(0);
ls1 = linkedSupports.register(ls1);
if (s2 != null) {
Site ls2 = e.getLinkSite2();
ls2.setConnectedness(0);
ls2 = linkedSupports.register(ls2);
double lsc1 = ls1.getConnectedness() + s2.getConnectedness();
if (maxLinkedSupport < lsc1) {
maxLinkedSupport = lsc1;
}
double lsc2 = ls2.getConnectedness() + s1.getConnectedness();
if (maxLinkedSupport < lsc2) {
maxLinkedSupport = lsc2;
}
ls1.setConnectedness(lsc1);
ls1.setConnectedness(lsc2);
}
}
// now we know the conected ness of each link site and how connected the linked link-sites are
// so we turn that into a metric for each link
for (T e : list) {
Site sup1 = supports.get(e.getLinkSite1());
Site s1 = sup1;
if (sup1 == null) {
sup1 = s1;
}
double support = 0;
support = s1.getConnectedness();
double linkedSupport = linkedSupports.get(s1).getConnectedness();
Site s2 = e.getLinkSite1();
if (s2 != null) {
s2 = supports.get(s2);
support += s2.getConnectedness();
linkedSupport += linkedSupports.get(s2).getConnectedness();
}
e.setLinkedSupport(support / (maxSupport) + linkedSupport / (2 * maxLinkedSupport));
}
}
/**
* test whether a result for a subgroup should be considered valid
*
* @param <T>
* @param info
* @return null pass; otherwise reason
*/
public <T extends AbstractFDRElement<T>> String checkValid(SubGroupFdrInfo<T> info, double factor, int minTDCount) {
// make sure we have enough targets that we could theoretically thsi number of TD
if (info.resultTT * info.targteFDR < (double) minTDCount) {
return "not enough TT";
}
if ((info.targteFDR < 1 && info.resultTT < info.resultDD) || info.resultTD < info.resultDD) {
return "to many DD";
}
if ((info.resultDD + 0.00001) / (info.resultTD + 0.0001) > 1 - factor) {
return "resolution to bad";
}
if (info.resultTT * info.targteFDR < factor * 10) {
return "resolution to bad";
}
return null;
}
/**
* is a higher score better than a lower score?
*
* @return the PSMScoreHighBetter
*/
public boolean isPSMScoreHighBetter() {
return PSMScoreHighBetter;
}
/**
* is a higher score better than a lower score?
*
* @param PSMScoreHighBetter the PSMScoreHighBetter to set
*/
public void setPSMScoreHighBetter(boolean PSMScoreHighBetter) {
this.PSMScoreHighBetter = PSMScoreHighBetter;
}
/**
* @return the targetPepDBSize
*/
public double getTargetDBSize() {
return targetPepDBSize;
}
/**
* @param targetPepDBSize the targetPepDBSize to set
*/
public void setTargetDBSize(double targetDBSize) {
this.targetPepDBSize = targetDBSize;
}
/**
* @return the decoyPepDBSize
*/
public double getDecoyDBSize() {
return decoyPepDBSize;
}
/**
* @param decoyPepDBSize the decoyPepDBSize to set
*/
public void setDecoyDBSize(double decoyDBSize) {
this.decoyPepDBSize = decoyDBSize;
}
/**
* @return the targetProtDBSize
*/
public double getTargetProtDBSize() {
return targetProtDBSize;
}
/**
* @param targetProtDBSize the targetProtDBSize to set
*/
public void setTargetProtDBSize(double targetProtDBSize) {
this.targetProtDBSize = targetProtDBSize;
}
/**
* @return the decoyProtDBSize
*/
public double getDecoyProtDBSize() {
return decoyProtDBSize;
}
/**
* @param targetProtDBSize the targetProtDBSize to set
*/
public void setTargetLinkDBSize(double targetLinkDBSize) {
this.targetLinkDBSize = targetLinkDBSize;
}
/**
* @param targetProtDBSize the targetProtDBSize to set
*/
public void setDecoyLinkDBSize(double decoyLinkDBSize) {
this.decoyLinkDBSize = decoyLinkDBSize;
}
/**
* @return the decoyProtDBSize
*/
public double getDecoyLinkDBSize() {
return decoyLinkDBSize;
}
/**
* @return the decoyProtDBSize
*/
public double getTargetLinkDBSize() {
return targetLinkDBSize;
}
/**
* @param decoyProtDBSize the decoyProtDBSize to set
*/
public void setDecoyProtDBSize(double decoyProtDBSize) {
this.decoyProtDBSize = decoyProtDBSize;
}
/**
* @return the minPepPerProteinGroup
*/
public int getMinPepPerProteinGroup() {
return minPepPerProteinGroup;
}
/**
* @param minPepPerProteinGroup the minPepPerProteinGroup to set
*/
public void setMinPepPerProteinGroup(int minPepPerProteinGroup) {
this.minPepPerProteinGroup = minPepPerProteinGroup;
}
/**
* @return the minPepPerProteinGroupLink
*/
public int getMinPepPerProteinGroupLink() {
return minPepPerProteinGroupLink;
}
/**
* @param minPepPerProteinGroupLink the minPepPerProteinGroupLink to set
*/
public void setMinPepPerProteinGroupLink(int minPepPerProteinGroupLink) {
this.minPepPerProteinGroupLink = minPepPerProteinGroupLink;
}
/**
* @return the minPepPerProteinGroupPair
*/
public int getMinPepPerProteinGroupPair() {
return minPepPerProteinGroupPair;
}
/**
* @param minPepPerProteinGroupPair the minPepPerProteinGroupPair to set
*/
public void setMinPepPerProteinGroupPair(int minPepPerProteinGroupPair) {
this.minPepPerProteinGroupPair = minPepPerProteinGroupPair;
}
public String execClass() {
return this.getClass().getName();
}
public void printUsage() {
System.out.println("java " + execClass() + " " + argList());
System.out.println(argDescription());
}
public String argList() {
return "--lenghtgroups=A,B,C "
+ "--psmfdr=X "
+ "--pepfdr=X "
+ "--proteinfdr=X "
+ "--linkfdr=X "
+ "--ppifdr=X "
+ "--reportfactor=X"
+ "--maxProteinAmbiguity=X "
+ "--maxLinkAmbiguity=X "
+ "--minPeptidesPerLink=X "
+ "--minPeptidesPerProtein=X "
+ "--minPeptidesPerPPI=X "
+ "--minPeptideLength=X "
+ "--ignoregroups "
+ "--csvOutDir=X "
+ "--csvBaseName=X "
+ "--csvSummaryOnly "
+ "--singleSummary "
+ "--uniquePSMs= "
+ "--outputlocale= "
+ "--boost= "
+ "--single-step-boost"
+ "--filter-consecutives"
+ "--boostbetween ";
}
public String argDescription() {
return "--lengthgroups=A,B,C how to group peptides by length\n"
+ "--psmfdr=X the psm-fdr\n"
+ " can either be a single value or \n"
+ " a range (min,max,stepsize)\n"
+ "--pepfdr=X the peptide pair fdr\n"
+ " can either be a single value or \n"
+ " a range (min,max,stepsize)\n"
+ "--proteinfdr=X the protein pair fdr\n"
+ " can either be a single value or \n"
+ " a range (min,max,stepsize)\n"
+ "--linkfdr=X residue pair fdr\n"
+ " can either be a single value or \n"
+ " a range (min,max,stepsize)\n"
+ "--ppifdr=X protein pair fdr\n"
+ " can either be a single value or \n"
+ " a range (min,max,stepsize)\n"
+ "--reportfactor=X is ignored\n"
+ "--maxProteinAmbiguity=X any peptide that can be in more \n"
+ " then this number of proteins will\n"
+ " be ignored\n"
+ "--maxLinkAmbiguity=X links that have more then this \n"
+ " number of possible residue\n"
+ " combinationswill be ignored\n"
+ "--minPeptidesPerLink=X only links that have at least this \n"
+ " number of unique peptide pairs \n"
+ " supporting them will be considered\n"
+ "--minPeptidesPerProtein=X Only proteins that have at least\n"
+ " this number of unique peptide pairs\n"
+ " supporting them will be considered\n"
+ "--minPeptidesPerPPI=X only protein pairs that have at \n"
+ " least this number of unique peptide\n"
+ " pairs supporting them will be \n"
+ " considered\n"
+ "--minPeptideLength=X only accept psms where both peptides\n"
+ " have at least this many residues\n"
+ "--ignoregroups don't do any grouping during FDR\n"
+ " calculation\n"
+ "--csvOutDir=X where to write the output files\n"
+ "--csvBaseName=X each file will be prepended with \n"
+ " this name"
+ "--csvSummaryOnly don;t write the actuall results but\n"
+ " only the summary\n"
+ "--singleSummary if fdrs where given in ranges all\n"
+ " summary files will be written into a\n"
+ " sinlge file\n"
+ "--uniquePSMs=X filtr PSMs to unique PSMs \n"
+ " options are true,false,1,0\n"
+ "--outputlocale= numbers in csv-files are writen \n"
+ " according to this locale\n"
+ "--boost=(pep|link|prot) boost results on the given level\n"
+ "--boost-between when boosting try to maximize betweens\n"
+ "--single-step-boost if certain columns are found these are\n"
+ " used boosting as well. By default they\n"
+ " get used in a second round of boosting\n"
+ " everything is bossted together\n"
+ "--filter-consecutives ignore any peptide pair that that could\n"
+ " be consecutive in a protein sequence\n";
}
public boolean setOutputLocale(String locale) {
locale = locale.toLowerCase();
Locale sl = null;
for (Locale l : Locale.getAvailableLocales()) {
if (l.toString().toLowerCase().contentEquals(locale)) {
setOutputLocale(l);
return true;
}
if (l.getDisplayName().toLowerCase().contentEquals(locale)) {
sl = l;
}
if (l.getCountry().toLowerCase().contentEquals(locale)) {
sl = l;
}
if (l.getDisplayScript().toLowerCase().contentEquals(locale)) {
sl = l;
}
if (l.getDisplayLanguage().toLowerCase().contentEquals(locale)) {
sl = l;
}
}
if (sl == null) {
return false;
}
setOutputLocale(sl);
return true;
}
public void setOutputLocale(Locale locale) {
this.outputlocale = locale;
this.numberFormat = NumberFormat.getInstance(locale);
numberFormat = NumberFormat.getNumberInstance(locale);
DecimalFormat fformat = (DecimalFormat) numberFormat;
fformat.setGroupingUsed(false);
// DecimalFormatSymbols symbols=fformat.getDecimalFormatSymbols();
// fformat.setMaximumFractionDigits(6);
// localNumberDecimalSeparator= ""+symbols.getDecimalSeparator();
}
protected String d2s(double d) {
return numberFormat.format(d);
}
protected String i2s(int i) {
return numberFormat.format(i);
}
/**
* @return the psmFDRSetting
*/
public double[] getPsmFDRSetting() {
return psmFDRSetting;
}
/**
* @param psmFDRSetting the psmFDRSetting to set
*/
public void setPsmFDRSetting(double from, double to, double step) {
this.psmFDRSetting = new double[]{from, to, step};
}
/**
* @return the peptidePairFDRSetting
*/
public double[] getPeptidePairFDRSetting() {
return peptidePairFDRSetting;
}
/**
* @param peptidePairFDRSetting the peptidePairFDRSetting to set
*/
public void setPeptidePairFDRSetting(double from, double to, double step) {
this.peptidePairFDRSetting = new double[]{from, to, step};
}
/**
* @return the ProteinGroupFDRSetting
*/
public double[] getProteinGroupFDRSetting() {
return ProteinGroupFDRSetting;
}
/**
* @param ProteinGroupFDRSetting the ProteinGroupFDRSetting to set
*/
public void setProteinGroupFDRSetting(double from, double to, double step) {
this.ProteinGroupFDRSetting = new double[]{from, to, step};
}
/**
* @return the linkFDRSetting
*/
public double[] getLinkFDRSetting() {
return linkFDRSetting;
}
/**
* @param linkFDRSetting the linkFDRSetting to set
*/
public void setLinkFDRSetting(double from, double to, double step) {
this.linkFDRSetting = new double[]{from, to, step};;
}
/**
* @return the ppiFDRSetting
*/
public double[] getPpiFDRSetting() {
return ppiFDRSetting;
}
/**
* @param ppiFDRSetting the ppiFDRSetting to set
*/
public void setPpiFDRSetting(double from, double to, double step) {
this.ppiFDRSetting = new double[]{from, to, step};
}
/**
* @return the safetyFactorSetting
*/
public double getSafetyFactorSetting() {
return safetyFactorSetting;
}
/**
* @param safetyFactorSetting the safetyFactorSetting to set
*/
public void setSafetyFactorSetting(double safetyFactorSetting) {
this.safetyFactorSetting = safetyFactorSetting;
}
/**
* @return the ignoreGroupsSetting
*/
public boolean isIgnoreGroupsSetting() {
return ignoreGroupsSetting;
}
/**
* @param ignoreGroupsSetting the ignoreGroupsSetting to set
*/
public void setIgnoreGroupsSetting(boolean ignoreGroupsSetting) {
this.ignoreGroupsSetting = ignoreGroupsSetting;
}
/**
* @param summaryOnly when writing out put files only write the summary file
*/
public void setCSVSummaryOnly(boolean summaryOnly) {
this.csvSummaryOnly = summaryOnly;
}
/**
* @param summaryOnly when writing out put files only write the summary file
*/
public boolean getCSVSummaryOnly() {
return this.csvSummaryOnly;
}
/**
* @param summaryOnly when writing out put files only write the summary file
*/
public void setCSVSingleSummary(boolean singleSummary) {
this.singleSummary = singleSummary;
}
/**
* @return the csvOutDirSetting
*/
public String getCsvOutDirSetting() {
return csvOutDirSetting;
}
/**
* @param csvOutDirSetting the csvOutDirSetting to set
*/
public void setCsvOutDirSetting(String csvOutDirSetting) {
this.csvOutDirSetting = csvOutDirSetting;
}
/**
* @return the csvOutBaseSetting
*/
public String getCsvOutBaseSetting() {
return csvOutBaseSetting;
}
/**
* @param csvOutBaseSetting the csvOutBaseSetting to set
*/
public void setCsvOutBaseSetting(String csvOutBaseSetting) {
this.csvOutBaseSetting = csvOutBaseSetting;
}
public String[] parseArgs(String[] argv, FDRSettings settings) {
ArrayList<String> unknown = new ArrayList<String>();
int[] lengthgroups = new int[]{4};
double[] psmFDR = new double[]{1, 1, 1};
double[] pepFDR = new double[]{1, 1, 1};
double[] protFDR = new double[]{1, 1, 1};
double[] linkFDR = new double[]{1, 1, 1};
double[] ppiFDR = new double[]{1, 1, 1};
int maxLinkAmbiguity = 0;
int maxProteinGroupAmbiguity = 0;
int minPepPerLink = 1;
int minPepPerProtein = 1;
int minPepPerPPI = 1;
int minPepLength = 6;
boolean ignoreGroups = false;
boolean csvsummaryonly = false;
boolean csvsinglesummary = false;
String csvdir = null;
String csvBase = null;
int fdrDigits = commandlineFDRDigits;
FDRLevel maximizeWhat = null;
settings.doOptimize(null);
for (String arg : argv) {
if (arg.startsWith("--lenghtgroups=") || arg.startsWith("--lengthgroups=")) {
String[] slen = arg.substring(arg.indexOf("=") + 1).trim().split(",");
lengthgroups = new int[slen.length];
for (int i = 0; i < slen.length; i++) {
lengthgroups[i] = Integer.parseInt(slen[i]);
}
} else if (arg.toLowerCase().startsWith("--boost=")) {
String what = arg.substring("--boost=".length()).trim().toLowerCase();
if (what.endsWith("s")) {
what = what.substring(0, what.length() - 1);
}
if (what.contentEquals("pep")
|| what.contentEquals("peptidepair")
|| what.contentEquals("peppair")) {
maximizeWhat = FDRLevel.PEPTIDE_PAIR;
} else if (what.contentEquals("link")
|| what.contentEquals("residuepair")) {
maximizeWhat = FDRLevel.PROTEINGROUPLINK;
} else if (what.contentEquals("prot")
|| what.contentEquals("protein")
|| what.contentEquals("proteinpair")
|| what.contentEquals("ppi")) {
maximizeWhat = FDRLevel.PROTEINGROUPPAIR;
}
settings.doOptimize(maximizeWhat);
} else if (arg.toLowerCase().equals("--boost-between")) {
settings.setBoostBetween(true);
} else if (arg.toLowerCase().equals("--single-step-boost")) {
settings.twoStepOptimization(false);
} else if (arg.toLowerCase().equals("--filter-consecutives") || arg.toLowerCase().equals("-C")) {
settings.setFilterConsecutivePeptides(true);
} else if (arg.startsWith("--reportfactor=")) {
} else if (arg.startsWith("--psmfdr=")) {
double from, to, step;
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
String[] spsm_seq = spsm.split(",");
if (spsm_seq.length == 1) {
from = to = Double.parseDouble(spsm) / 100;
step = 1;
int thisDigits = spsm.trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
} else {
from = Double.parseDouble(spsm_seq[0]) / 100;
to = Double.parseDouble(spsm_seq[1]) / 100;
step = Double.parseDouble(spsm_seq[2]) / 100;
for (int i = 0; i < 3; i++) {
int thisDigits = spsm_seq[i].trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
}
}
psmFDR = new double[]{from, to, step};
} else if (arg.startsWith("--pepfdr=")) {
double from, to, step;
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
String[] spsm_seq = spsm.split(",");
if (spsm_seq.length == 1) {
from = to = Double.parseDouble(spsm) / 100;
step = 1;
int thisDigits = spsm.trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
} else {
from = Double.parseDouble(spsm_seq[0]) / 100;
to = Double.parseDouble(spsm_seq[1]) / 100;
step = Double.parseDouble(spsm_seq[2]) / 100;
for (int i = 0; i < 3; i++) {
int thisDigits = spsm_seq[i].trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
}
}
pepFDR = new double[]{from, to, step};
} else if (arg.startsWith("--proteinfdr=")) {
double from, to, step;
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
String[] spsm_seq = spsm.split(",");
if (spsm_seq.length == 1) {
from = to = Double.parseDouble(spsm) / 100;
step = 1;
int thisDigits = spsm.trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
} else {
from = Double.parseDouble(spsm_seq[0]) / 100;
to = Double.parseDouble(spsm_seq[1]) / 100;
step = Double.parseDouble(spsm_seq[2]) / 100;
for (int i = 0; i < 3; i++) {
int thisDigits = spsm_seq[i].trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
}
}
protFDR = new double[]{from, to, step};
} else if (arg.startsWith("--linkfdr=")) {
double from, to, step;
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
String[] spsm_seq = spsm.split(",");
if (spsm_seq.length == 1) {
from = to = Double.parseDouble(spsm) / 100;
step = 1;
int thisDigits = spsm.trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
} else {
from = Double.parseDouble(spsm_seq[0]) / 100;
to = Double.parseDouble(spsm_seq[1]) / 100;
step = Double.parseDouble(spsm_seq[2]) / 100;
for (int i = 0; i < 3; i++) {
int thisDigits = spsm_seq[i].trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
}
}
linkFDR = new double[]{from, to, step};
} else if (arg.startsWith("--ppifdr=")) {
double from, to, step;
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
String[] spsm_seq = spsm.split(",");
if (spsm_seq.length == 1) {
from = to = Double.parseDouble(spsm) / 100;
step = 1;
int thisDigits = spsm.trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
} else {
from = Double.parseDouble(spsm_seq[0]) / 100;
to = Double.parseDouble(spsm_seq[1]) / 100;
step = Double.parseDouble(spsm_seq[2]) / 100;
for (int i = 0; i < 3; i++) {
int thisDigits = spsm_seq[i].trim().replace("^[^\\.]*", "").trim().length();
if (thisDigits + 2 > fdrDigits) {
fdrDigits = thisDigits + 2;
}
}
}
ppiFDR = new double[]{from, to, step};
} else if (arg.startsWith("--maxProteinAmbiguity=")) {
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
maxProteinGroupAmbiguity = Integer.parseInt(spsm);
} else if (arg.startsWith("--maxLinkAmbiguity=")) {
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
maxLinkAmbiguity = Integer.parseInt(spsm);
} else if (arg.startsWith("--minPeptidesPerLink=")) {
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
minPepPerLink = Integer.parseInt(spsm);
} else if (arg.startsWith("--minPeptidesPerProtein=")) {
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
minPepPerProtein = Integer.parseInt(spsm);
} else if (arg.startsWith("--minPeptidesPerPPI=")) {
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
minPepPerPPI = Integer.parseInt(spsm);
} else if (arg.startsWith("--minPeptideLength=")) {
String spsm = arg.substring(arg.indexOf("=") + 1).trim();
minPepLength = Integer.parseInt(spsm);
} else if (arg.equals("--ignoregroups")) {
ignoreGroups = true;
} else if (arg.startsWith("--csvOutDir=")) {
csvdir = arg.substring(arg.indexOf("=") + 1);
if (csvBase == null) {
csvBase = "FDR";
}
} else if (arg.startsWith("--csvBaseName=")) {
csvBase = arg.substring(arg.indexOf("=") + 1);
if (csvdir == null) {
csvdir = ".";
}
} else if (arg.equals("--csvSummaryOnly")) {
csvsummaryonly = true;
} else if (arg.equals("--singleSummary")) {
csvsummaryonly = true;
csvsinglesummary = true;
} else if (arg.startsWith("--outputlocale=")) {
String l = arg.substring(arg.indexOf("=") + 1).trim();
setOutputLocale(l);
} else if (arg.startsWith("--uniquePSMs=")) {
String bool = arg.substring(arg.indexOf("=") + 1).trim();
boolean filter = bool.matches("(?i)^(T|1(\\.0*)?|-1(\\.0*)?|TRUE|Y|YES|\\+)$");
settings.setFilterToUniquePSM(filter);
} else {
unknown.add(arg);
}
}
this.setLengthGroups(lengthgroups);
commandlineFDRDigits = fdrDigits;
setPsmFDRSetting(psmFDR[0], psmFDR[1], psmFDR[2]);
settings.setPSMFDR(psmFDR[0]);
setPeptidePairFDRSetting(pepFDR[0], pepFDR[1], pepFDR[2]);
settings.setPeptidePairFDR(pepFDR[0]);
setProteinGroupFDRSetting(protFDR[0], protFDR[1], protFDR[2]);
settings.setProteinGroupFDR(protFDR[0]);
setLinkFDRSetting(linkFDR[0], linkFDR[1], linkFDR[2]);
settings.setProteinGroupLinkFDR(linkFDR[0]);
setPpiFDRSetting(ppiFDR[0], ppiFDR[1], ppiFDR[2]);
settings.setProteinGroupPairFDR(ppiFDR[0]);
//setSafetyFactorSetting(reportfactor);
//settings.setReportFactor(reportfactor);
setIgnoreGroupsSetting(ignoreGroups);
setCSVSummaryOnly(csvsummaryonly);
setCSVSingleSummary(csvsinglesummary);
setCsvOutBaseSetting(csvBase);
setCsvOutDirSetting(csvdir);
settings.setMaxLinkAmbiguity(maxLinkAmbiguity);
settings.setMaxProteinAmbiguity(maxProteinGroupAmbiguity);
setMinPepPerProteinGroup(minPepPerProtein);
settings.setMinProteinPepCount(minPepPerProtein);
setMinPepPerProteinGroupLink(minPepPerLink);
settings.setMinLinkPepCount(minPepPerLink);
setMinPepPerProteinGroupPair(minPepPerPPI);
settings.setMinPPIPepCount(minPepPerPPI);
setMinimumPeptideLength(minPepLength);
settings.setMinPeptideLength(minPepLength);
String[] ret = new String[unknown.size()];
ret = unknown.toArray(ret);
return ret;
}
protected void reset() {
// reset the counts
PeptidePair.PEPTIDEPAIRCOUNT = 0;
ProteinGroup.PROTEINGROUPCOUNT = 0;
ProteinGroupPair.PROTEINGROUPPAIRCOUNT = 0;
ProteinGroupLink.LINKCOUNT = 0;
m_linearPepCount = null;
m_XLPepCount = null;
m_linearPSMCount = null;
m_XLPSMCount = null;
for (PSM psm : getAllPSMs()) {
psm.setFDRGroup();
psm.getAdditionalFDRGroups().remove(psm.getRun());
psm.getAdditionalFDRGroups().remove("" + psm.getScan());
psm.setFDR(Double.MAX_VALUE);
psm.setFdrPeptidePair(null);
psm.reset();
}
for (Protein p : allProteins) {
p.resetFDR();
}
for (Peptide p : allPeptides) {
p.resetFDR();
}
if (groupPSMsByRun) {
for (PSM psm : getAllPSMs()) {
psm.getAdditionalFDRGroups().add(psm.getRun());
}
} else {
for (PSM psm : getAllPSMs()) {
psm.getAdditionalFDRGroups().remove(psm.getRun());
}
}
}
public abstract String getSource();
protected String getPSMOutputLine(PSM psm, String seperator) {
return RArrayUtils.toString(getPSMOutputLine(psm), seperator);
}
protected ArrayList<String> getPSMOutputLine(PSM pp) {
PeptidePair pep = pp.getFdrPeptidePair();
ProteinGroupLink l = pep != null ? pep.getFdrLink() : null;
ProteinGroupPair ppi = l != null ? l.getFdrPPI() : null;
Peptide pep1 = pp.getPeptide1();
Peptide pep2 = pp.getPeptide2();
ProteinGroup pg1 = pep1.getProteinGroup();
ProteinGroup pg2 = pep2.getProteinGroup();
int pepLink1 = pp.getPeptideLinkSite1();
int pepLink2 = pp.getPeptideLinkSite2();
int pepLength1 = pp.getPeptideLength1();
int pepLength2 = pp.getPeptideLength2();
String pepSeq1 = getPeptideSequence(pep1);
String pepSeq2 = getPeptideSequence(pep2);
StringBuilder sbaccessions = new StringBuilder();
StringBuilder sbdescriptions = new StringBuilder();
StringBuilder sbPositions = new StringBuilder();
StringBuilder sbProtLink = new StringBuilder();
peptidePositionsToPSMOutString(pep1.getPositions(), sbaccessions, sbdescriptions, sbPositions, sbProtLink, pepLink1);
String accessions1 = sbaccessions.toString();
String descriptions1 = sbdescriptions.toString();
String positons1 = sbPositions.toString();
String proteinLinkPositons1 = pepLink1 > 0 ? sbProtLink.toString() : "";
sbaccessions.setLength(0);
sbdescriptions.setLength(0);
sbPositions.setLength(0);
sbProtLink.setLength(0);
peptidePositionsToPSMOutString(pep2.getPositions(), sbaccessions, sbdescriptions, sbPositions, sbProtLink, pepLink2);
String accessions2 = sbaccessions.toString();
String descriptions2 = sbdescriptions.toString();
String positons2 = sbPositions.toString();
String proteinLinkPositons2 = pepLink2 > 0 ? sbProtLink.toString() : "";
String run = pp.getRun();
String scan = pp.getScan();
if (run == null) {
run = "";
}
if (scan == null) {
scan = "";
}
ArrayList<String> ret = new ArrayList<>(37);
ret.add(pp.getPsmID());
ret.add(run);
ret.add(scan);
ret.add(pp.getPeakListName() == null ? "" : pp.getPeakListName());
ret.add(pp.getFileScanIndex() == null ? "" : i2s(pp.getFileScanIndex()));
ret.add(accessions1);
ret.add(descriptions1);
ret.add(Boolean.toString(pep1.isDecoy()));
ret.add(accessions2);
ret.add(descriptions2);
ret.add(Boolean.toString(pep2.isDecoy()));
ret.add(pepSeq1);
ret.add(pepSeq2);
ret.add(positons1);
ret.add(positons2);
ret.add((pepLength1 == 0 ? "" : i2s(pepLength1)));
ret.add((pepLength2 == 0 ? "" : i2s(pepLength2)));
ret.add(i2s(pepLink1));
ret.add(i2s(pepLink2));
ret.add(proteinLinkPositons1);
ret.add(proteinLinkPositons2);
ret.add(i2s(pp.getCharge()));
ret.add(pp.getCrosslinker());
ret.add(Double.isNaN(pp.getCrosslinkerModMass()) ? "" : d2s(pp.getCrosslinkerModMass()));
for (String name : extraColumns) {
Object v = pp.getOtherInfo(name);
if (v == null) {
ret.add("");
} else if (v instanceof Number) {
ret.add(d2s(((Number) v).doubleValue()));
} else if (v.getClass().isArray()) {
// if we have an array - try to expand that
System.err.println(v.getClass().getComponentType());
if (Number.class.isAssignableFrom(v.getClass().getComponentType())) {
// numeric ones we just filter convert to double values
StringBuilder sb = new StringBuilder();
for (Number n : (Number[])v) {
sb.append(";").append(d2s(n.doubleValue()));
}
if (sb.length()>0)
ret.add(sb.substring(1));
else
ret.add("");
} else {
// everything else we filter through Object.toString()
StringBuilder sb = new StringBuilder();
for (Object n : (Object[])v) {
sb.append(";").append(n.toString());
}
if (sb.length()>0)
ret.add(sb.substring(1));
else
ret.add("");
}
} else {
ret.add(v.toString());
}
}
if (isNormalized() != Normalisation.None) {
ret.add(d2s(pp.getOriginalScore()));
}
ret.add(d2s(pp.getScore()));
ret.add(Boolean.toString(pp.isDecoy()));
ret.add(Boolean.toString(pp.isTT()));
ret.add(Boolean.toString(pp.isTD()));
ret.add(Boolean.toString(pp.isDD()));
ret.add(pp.getFDRGroup());
ret.add(d2s(pp.getFDR()));
ret.add(d2s(pp.getEstimatedFDR()));
ret.add(d2s(pp.getPEP()));
ret.add("");
ret.add((pep == null ? "" : d2s(pep.getFDR())));
ret.add((pp.getFdrProteinGroup1() == null ? "" : d2s(pp.getFdrProteinGroup1().getFDR())));
ret.add((pp.getFdrProteinGroup2() == null ? "" : d2s(pp.getFdrProteinGroup2().getFDR())));
ret.add((l == null ? "" : d2s(l.getFDR())));
ret.add((ppi == null ? "" : d2s(ppi.getFDR())));
ret.add((pep == null ? "" : i2s(pep.getPeptidePairID())));
ret.add((l == null ? "" : i2s(l.getLinkID())));
ret.add((ppi == null ? "" : i2s(ppi.getProteinGroupPairID())));
ret.add(pp.getInfo());
return ret;
}
protected void peptidePositionsToPSMOutString(HashMap<Protein, HashSet<Integer>> positions, StringBuilder sbaccessions, StringBuilder sbdescriptions, StringBuilder sbPositions, StringBuilder sbProtLink, int peplink) {
for (Protein p : positions.keySet()) {
for (int i : positions.get(p)) {
sbaccessions.append(";").append((p.isDecoy() ? "decoy:" : "") + p.getAccession());
sbdescriptions.append(";").append(p.isDecoy() ? "decoy" : p.getDescription());
sbPositions.append(";").append(i2s(i));
sbProtLink.append(";").append(i2s(i + peplink - 1));
}
}
sbaccessions.deleteCharAt(0);
sbdescriptions.deleteCharAt(0);
sbPositions.deleteCharAt(0);
sbProtLink.deleteCharAt(0);
}
protected <T extends RandomAccess & Collection> String getPSMHeader(String seperator) {
return RArrayUtils.toString(getPSMHeader(), seperator);
}
protected <T extends RandomAccess & Collection> ArrayList<String> getPSMHeader() {
ArrayList<String> ret = new ArrayList<String>(RArrayUtils.toCollection(new String[]{"PSMID", "run", "scan", "PeakListFileName", "ScanId", "Protein1", "Description1",
"Decoy1", "Protein2", "Description2", "Decoy2",
"PepSeq1", "PepSeq2", "PepPos1", "PepPos2",
"PeptideLength1", "PeptideLength2", "LinkPos1", "LinkPos2",
"ProteinLinkPos1", "ProteinLinkPos2", "Charge", "Crosslinker", "CrosslinkerModMass"}));
if (allPSMs.size() > 0 && PSM.getOtherInfoNames().length > 0) {
extraColumns = new ArrayList<>(MyArrayUtils.toCollection(PSM.getOtherInfoNames()));
extraColumns.sort(new Comparator<String>() {
@Override
public int compare(String arg0, String arg1) {
return arg0.compareTo(arg1);
}
});
ret.addAll(extraColumns);
} else {
extraColumns = new ArrayList<>(0);
}
if (isNormalized() != Normalisation.None) {
ret.add("Orignal Score");
}
ret.addAll(RArrayUtils.toCollection(new String[]{"Score",
"isDecoy", "isTT", "isTD", "isDD", "fdrGroup", "fdr", "ifdr", "PEP",
"", "PeptidePairFDR", "Protein1FDR", "Protein2FDR",
"LinkFDR", "PPIFDR",
"peptide pair id", "link id", "ppi id", "info"}));
return ret;
}
protected ArrayList<String> getXLPepsHeader() {
ArrayList<String> ret = new ArrayList<String>(RArrayUtils.toCollection(new String[]{
"PeptidePairID",
"PSMIDs",
"Protein1",
"Description1",
"Decoy1",
"Protein2",
"Description2",
"Decoy2",
"Peptide1",
"Peptide2",
"Start1",
"Start2",
"FromSite",
"ToSite",
"FromProteinSite",
"ToProteinSite",
"psmID",
"Crosslinker",
"Score",
"isDecoy",
"isTT",
"isTD",
"isDD",
"fdrGroup",
"fdr",
"ifdr",
"PEP",
"",
"Protein1FDR",
"Protein2FDR",
"LinkFDR",
"PPIFDR",
"",
"link id",
"ppi id"}));
for (String r : runs) {
ret.add(r);
}
return ret;
}
protected String getXLPepsHeader(String seperator) {
return RArrayUtils.toString(getXLPepsHeader(), seperator);
}
protected String getLinearPepsHeader(String seperator) {
return RArrayUtils.toString(getLinearPepsHeader(), seperator);
}
protected ArrayList<String> getLinearPepsHeader() {
ArrayList<String> ret = new ArrayList<>();
ret.add("PeptidePairID");
ret.add("PSMIDs");
ret.add("Protein");
ret.add("Description");
ret.add("Decoy");
ret.add("Peptide");
ret.add("psmID");
ret.add("Score");
ret.add("isDecoy");
ret.add("fdrGroup");
ret.add("fdr");
ret.add("ifdr");
ret.add("PEP");
ret.add("");
ret.add("ProteinFDR");
for (String r : runs) {
ret.add(r);
}
return ret;
}
protected String getLinearPepeptideOutputLine(PeptidePair pp, String seperator) {
return RArrayUtils.toString(getLinearPepeptideOutputLine(pp), seperator);
}
protected ArrayList<String> getLinearPepeptideOutputLine(PeptidePair pp) {
String[] psmids = pp.getPSMids();
ProteinGroup pg1 = pp.getPeptide1().getProteinGroup();
ArrayList<String> ret = new ArrayList<String>();
// "ID" + "PSMIDs" + "Protein" + "Decoy" + "Peptide" + "psmID" + "Score" + "isDecoy" + "fdrGroup" + "fdr" + + "ProteinFDR");
ret.add(i2s(pp.getPeptidePairID()));
ret.add(RArrayUtils.toString(psmids, ";"));
ret.add(pg1.accessions());
ret.add(pg1.descriptions());
ret.add(Boolean.toString(pp.getPeptide1().isDecoy()));
ret.add(getPeptideSequence(pp.getPeptide1()));
ret.add(pp.getTopPSMIDs());
ret.add(d2s(pp.getScore()));
ret.add(Boolean.toString(pp.isDecoy()));
ret.add(pp.getFDRGroup());
ret.add(d2s(pp.getFDR()));
ret.add(d2s(pp.getEstimatedFDR()));
ret.add(d2s(pp.getPEP()));
ret.add("");
ret.add((pp.getFdrProteinGroup1() == null ? "" : d2s(pp.getFdrProteinGroup1().getFDR())));
HashSet<String> linkRuns = new HashSet<>();
HashMap<String, Double> runScore = new HashMap<>();
for (PSM psm : pp.getAllPSMs()) {
for (PSM upsm : psm.getRepresented()) {
psmToRun(upsm, linkRuns, runScore);
}
}
for (String r : runs) {
Double d = runScore.get(r);
if (d == null) {
ret.add("");
} else {
ret.add(d2s(d));
}
}
return ret;
}
protected String getXlPepeptideOutputLine(PeptidePair pp, String seperator) {
return RArrayUtils.toString(getXlPepeptideOutputLine(pp), seperator);
}
protected ArrayList<String> getXlPepeptideOutputLine(PeptidePair pp) {
ProteinGroupLink l = pp.getFdrLink();
ProteinGroupPair ppi = l == null ? null : l.getFdrPPI();
String[] psmids = pp.getPSMids();
ProteinGroup pg1 = pp.getPeptide1().getProteinGroup();
ProteinGroup pg2 = pp.getPeptide2().getProteinGroup();
ArrayList<String> ret = new ArrayList<String>();
StringBuilder sbaccessions = new StringBuilder();
StringBuilder sbdescriptions = new StringBuilder();
StringBuilder sbPositions = new StringBuilder();
StringBuilder sbProtLink = new StringBuilder();
peptidePositionsToPSMOutString(pp.getPeptide1().getPositions(), sbaccessions, sbdescriptions, sbPositions, sbProtLink, pp.getPeptideLinkSite1());
String accessions1 = sbaccessions.toString();
String descriptions1 = sbdescriptions.toString();
String positons1 = sbPositions.toString();
String proteinLinkPositons1 = pp.getPeptideLinkSite1() > 0 ? sbProtLink.toString() : "";
sbaccessions.setLength(0);
sbdescriptions.setLength(0);
sbPositions.setLength(0);
sbProtLink.setLength(0);
peptidePositionsToPSMOutString(pp.getPeptide2().getPositions(), sbaccessions, sbdescriptions, sbPositions, sbProtLink, pp.getPeptideLinkSite2());
String accessions2 = sbaccessions.toString();
String descriptions2 = sbdescriptions.toString();
String positons2 = sbPositions.toString();
String proteinLinkPositons2 = pp.getPeptideLinkSite2() > 0 ? sbProtLink.toString() : "";
// try {
ret.add(i2s(pp.getPeptidePairID()));
ret.add(RArrayUtils.toString(psmids, ";"));
ret.add(accessions1);
ret.add(descriptions1);
ret.add("" + pp.getPeptide1().isDecoy());
ret.add(accessions2);
ret.add(descriptions2);
ret.add("" + pp.getPeptide2().isDecoy());
ret.add(getPeptideSequence(pp.getPeptide1()));
ret.add(getPeptideSequence(pp.getPeptide2()));
ret.add(positons1);
ret.add(pp.getPeptide2() == Peptide.NOPEPTIDE || pp.getPeptide1() == Peptide.NOPEPTIDE ? "" : positons2);
ret.add(pp.getPeptide2() == Peptide.NOPEPTIDE || pp.getPeptide1() == Peptide.NOPEPTIDE ? "" : i2s(pp.getPeptideLinkSite1()));
ret.add(pp.getPeptide2() == Peptide.NOPEPTIDE || pp.getPeptide1() == Peptide.NOPEPTIDE ? "" : i2s(pp.getPeptideLinkSite2()));
ret.add(pp.getPeptide2() == Peptide.NOPEPTIDE || pp.getPeptide1() == Peptide.NOPEPTIDE ? "" : proteinLinkPositons1);
ret.add(pp.getPeptide2() == Peptide.NOPEPTIDE || pp.getPeptide1() == Peptide.NOPEPTIDE ? "" : proteinLinkPositons2);
ret.add(pp.getTopPSMIDs());
ret.add(pp.getCrosslinker());
ret.add(d2s(pp.getScore()));
ret.add("" + pp.isDecoy());
ret.add("" + pp.isTT());
ret.add("" + pp.isTD());
ret.add("" + pp.isDD());
ret.add(pp.getFDRGroup());
ret.add(d2s(pp.getFDR()));
ret.add(d2s(pp.getEstimatedFDR()));
ret.add(d2s(pp.getPEP()));
ret.add("");
ret.add((pp.getFdrProteinGroup1() == null ? "" : d2s(pp.getFdrProteinGroup1().getFDR())));
ret.add((pp.getFdrProteinGroup2() == null ? "" : d2s(pp.getFdrProteinGroup2().getFDR())));
ret.add(l == null ? "" : d2s(l.getFDR()));
ret.add(ppi == null ? "" : d2s(ppi.getFDR()));
ret.add("");
ret.add(l == null ? "" : i2s(l.getLinkID()));
ret.add(ppi == null ? "" : i2s(ppi.getProteinGroupPairID()));
HashSet<String> linkRuns = new HashSet<>();
HashMap<String, Double> runScore = new HashMap<>();
for (PSM psm : pp.getAllPSMs()) {
for (PSM upsm : psm.getRepresented()) {
psmToRun(upsm, linkRuns, runScore);
}
}
for (String r : runs) {
Double d = runScore.get(r);
if (d == null) {
ret.add("");
} else {
ret.add(d2s(d));
}
}
return ret;
}
protected String getLinkOutputLine(ProteinGroupLink l, String seperator) {
return RArrayUtils.toString(getLinkOutputLine(l), seperator);
}
protected ArrayList<String> getLinkOutputLine(ProteinGroupLink l) {
int[] pepids = l.getPeptidePairIDs();
String[] psmids = l.getPSMIDs();
HashSet<String> linkRuns = new HashSet<>();
HashMap<String, Double> runScore = new HashMap<>();
ProteinGroupPair ppi = l.getFdrPPI();
double top_pepfdr = Double.MAX_VALUE;
double top_psmfdr = Double.MAX_VALUE;
HashSet<String> xl = new HashSet<String>();
for (PeptidePair pp : l.getPeptidePairs()) {
if (pp.getFDR() < top_pepfdr) {
top_pepfdr = pp.getFDR();
}
xl.add(pp.getCrosslinker());
for (PSM psm : pp.getTopPSMs()) {
if (psm.getFDR() < top_psmfdr) {
top_psmfdr = psm.getFDR();
}
}
for (PSM psm : pp.getAllPSMs()) {
for (PSM upsm : psm.getRepresented()) {
psmToRun(upsm, linkRuns, runScore);
}
}
}
ProteinGroup pg1 = l.getProteinGroup1();
ProteinGroup pg2 = l.getProteinGroup2();
ArrayList<String> ret = new ArrayList<String>();
ret.add("" + i2s(l.getLinkID()));
ret.add(RArrayUtils.toString(pepids, ";", numberFormat));
ret.add(RArrayUtils.toString(psmids, ";"));
ret.add(l.site1Accessions());
ret.add(l.site1Descriptions());
ret.add("" + pg1.isDecoy());
ret.add(l.site2Accessions());
ret.add(l.site2Descriptions());
ret.add("" + pg2.isDecoy());
ret.add(RArrayUtils.toString(l.site1Sites(), ";", numberFormat));
ret.add(RArrayUtils.toString(l.site2Sites(), ";", numberFormat));
ret.add(RArrayUtils.toString(xl, ";"));
ret.add(d2s(l.getScore()));
ret.add("" + l.isDecoy());
ret.add("" + l.isTT());
ret.add("" + l.isTD());
ret.add("" + l.isDD());
ret.add(i2s(psmids.length));
ret.add(i2s(pepids.length));
ret.add(l.getFDRGroup());
ret.add(d2s(l.getFDR()));
ret.add(d2s(l.getEstimatedFDR()));
ret.add(d2s(l.getPEP()));
for (String r : runs) {
Double d = runScore.get(r);
if (d == null) {
ret.add("");
} else {
ret.add(d2s(d));
}
}
ret.add("");
ret.add(d2s(l.getProteinGroup1().getFDR()));
ret.add(d2s(l.getProteinGroup2().getFDR()));
ret.add(d2s(ppi.getFDR()));
ret.add("");
ret.add(d2s(top_pepfdr));
ret.add(d2s(top_psmfdr));
ret.add("");
ret.add(d2s(ppi.getProteinGroupPairID()));
return ret;
}
private void psmToRun(PSM psm, HashSet<String> linkRuns, HashMap<String, Double> runScore) {
String r = psm.getRun();
linkRuns.add(r);
Double d = runScore.get(r);
if (d == null || d < psm.getScore()) {
runScore.put(r, psm.getScore());
}
}
protected String getLinkOutputHeader(String seperator) {
return RArrayUtils.toString(getLinkOutputHeader(), seperator);
}
protected ArrayList<String> getLinkOutputHeader() {
ArrayList<String> ret = new ArrayList<String>();
ret.add("LinkID");
ret.add("PeptidePairIDs");
ret.add("PSMIDs");
ret.add("Protein1");
ret.add("Description1");
ret.add("Decoy1");
ret.add("Protein2");
ret.add("Description2");
ret.add("Decoy2");
ret.add("fromSite");
ret.add("ToSite");
ret.add("Croslinkers");
ret.add("Score");
ret.add("isDecoy");
ret.add("isTT");
ret.add("isTD");
ret.add("isDD");
ret.add("count PSMs");
ret.add("count peptide pairs");
ret.add("fdrGroup");
ret.add("fdr");
ret.add("ifdr");
ret.add("PEP");
for (String r : runs) {
ret.add(r);
}
ret.add("");
ret.add("Protein1FDR");
ret.add("Protein2FDR");
ret.add("PPIFDR");
ret.add("");
ret.add("top pep fdr");
ret.add("top psm fdr");
ret.add("");
ret.add("PPI id");
return ret;
}
protected String getPPIOutputHeader(String seperator) {
return RArrayUtils.toString(getPPIOutputHeader(), seperator);
}
protected ArrayList<String> getPPIOutputHeader() {
ArrayList<String> ret = new ArrayList<String>();
ret.add("ProteinGroupPairID");
ret.add("LinkIDs");
ret.add("PeptidePairIDs");
ret.add("PSMIDs");
ret.add("Protein1");
ret.add("Descriptions1");
ret.add("isDecoy1");
ret.add("Protein2");
ret.add("Description2");
ret.add("isDecoy2");
ret.add("Crosslinker");
ret.add("Score");
ret.add("isDecoy");
ret.add("isTT");
ret.add("isTD");
ret.add("isDD");
ret.add("count PSMs");
ret.add("count peptide pairs");
ret.add("count links");
ret.add("fdrGroup");
ret.add("fdr");
ret.add("ifdr");
ret.add("PEP");
ret.add("");
ret.add("top link fdr");
ret.add("top peptide fdr");
ret.add("top psm_fdr");
return ret;
}
protected String getPPIOutputLine(ProteinGroupPair pgp, String seperator) {
return RArrayUtils.toString(getPPIOutputLine(pgp), seperator);
}
protected ArrayList<String> getPPIOutputLine(ProteinGroupPair pgp) {
int[] pepids = pgp.getPeptidePairIDs();
String[] psmids = pgp.getPSMIDs();
int[] linkids = pgp.getLinkIDs();
double top_linkfdr = Double.MAX_VALUE;
double top_pepfdr = Double.MAX_VALUE;
double top_psmfdr = Double.MAX_VALUE;
HashSet<String> xl = new HashSet<>();
for (ProteinGroupLink l : pgp.getLinks()) {
if (l.getFDR() < top_linkfdr) {
top_linkfdr = l.getFDR();
}
for (PeptidePair pp : l.getPeptidePairs()) {
if (pp.getFDR() < top_pepfdr) {
top_pepfdr = pp.getFDR();
}
for (PSM psm : pp.getTopPSMs()) {
if (psm.getFDR() < top_psmfdr) {
top_psmfdr = psm.getFDR();
}
}
xl.add(pp.getCrosslinker());
}
}
ArrayList<String> ret = new ArrayList<String>();
ret.add("" + pgp.getProteinGroupPairID());
ret.add(RArrayUtils.toString(linkids, ";", numberFormat));
ret.add(RArrayUtils.toString(pepids, ";", numberFormat));
ret.add(RArrayUtils.toString(psmids, ";"));
ret.add(pgp.getProtein1().accessions());
ret.add(pgp.getProtein1().descriptions());
ret.add("" + pgp.getProtein1().isDecoy());
ret.add(pgp.getProtein2().accessions());
ret.add(pgp.getProtein2().descriptions());
ret.add("" + pgp.getProtein2().isDecoy());
ret.add(RArrayUtils.toString(xl, ";"));
ret.add(d2s(pgp.getScore()));
ret.add("" + pgp.isDecoy());
ret.add("" + pgp.isTT());
ret.add("" + pgp.isTD());
ret.add("" + pgp.isDD());
ret.add(i2s(psmids.length));
ret.add(i2s(pepids.length));
ret.add(i2s(linkids.length));
ret.add(pgp.getFDRGroup());
ret.add(d2s(pgp.getFDR()));
ret.add(d2s(pgp.getEstimatedFDR()));
ret.add(d2s(pgp.getPEP()));
ret.add("");
ret.add(d2s(top_linkfdr));
ret.add(d2s(top_pepfdr));
ret.add(d2s(top_psmfdr));
return ret;
}
protected ArrayList<String> getProteinGroupOutputHeader() {
ArrayList<String> ret = new ArrayList<String>();
ret.add("ProteinGroupID");
ret.add("ProteinGroup");
ret.add("Descriptions");
ret.add("Sequence");
ret.add("Crosslinker");
ret.add("Score");
ret.add("isDecoy");
ret.add("isTT");
ret.add("isTD");
ret.add("isDD");
ret.add("PSM IDs");
ret.add("fdrGroup");
ret.add("fdr");
ret.add("ifdr");
ret.add("PEP");
for (String r : runs) {
ret.add(r);
}
return ret;
}
protected ArrayList<String> getProteinGroupOutput(ProteinGroup pg) {
ArrayList<Integer> pepids = new ArrayList<>();
ArrayList<String> psmids = new ArrayList<>();
HashSet<String> linkRuns = new HashSet<>();
HashMap<String, Double> runScore = new HashMap<>();
double top_pepfdr = Double.MAX_VALUE;
double top_psmfdr = Double.MAX_VALUE;
HashSet<String> crosslinker = new HashSet<>();
for (PeptidePair pp : pg.getPeptidePairs()) {
pepids.add(pp.getPeptidePairID());
crosslinker.add(pp.getCrosslinker());
if (pp.getFDR() < top_pepfdr) {
top_pepfdr = pp.getFDR();
}
for (PSM psm : pp.getTopPSMs()) {
if (psm.getFDR() < top_psmfdr) {
top_psmfdr = psm.getFDR();
}
}
for (PSM psm : pp.getAllPSMs()) {
for (PSM upsm : psm.getRepresented()) {
psmToRun(upsm, linkRuns, runScore);
psmids.add(upsm.getPsmID());
}
}
}
ArrayList<String> sequences = new ArrayList<>();
for (Protein p : pg.getProteins()) {
sequences.add(p.getSequence());
}
ArrayList<String> ret = new ArrayList<String>();
ret.add(pg.ids());
ret.add(pg.accessions());
ret.add(pg.descriptions());
ret.add(RArrayUtils.toString(sequences, ";"));
ret.add(RArrayUtils.toString(crosslinker, ";"));
ret.add(d2s(pg.getScore()));
ret.add(pg.isDecoy() + "");
ret.add(pg.isTT() + "");
ret.add(pg.isTD() + "");
ret.add(pg.isDD() + "");
ret.add(RArrayUtils.toString(psmids, ";"));
ret.add(pg.getFDRGroup());
ret.add(pg.getFDR() + "");
ret.add(pg.getEstimatedFDR() + "");
ret.add(pg.getPEP() + "");
for (String r : runs) {
Double d = runScore.get(r);
if (d == null) {
ret.add("");
} else {
ret.add(d.toString());
}
}
return ret;
}
protected String getPeptideSequence(Peptide p) {
return p.getSequence();
}
public PSM addMatch(String psmID, String run, String scan, Long pepid1, Long pepid2, String pepSeq1, String pepSeq2, int peplen1, int peplen2, int site1, int site2, boolean isDecoy1, boolean isDecoy2, int charge, double score, Long protid1, String accession1, String description1, Long protid2, String accession2, String description2, int pepPosition1, int pepPosition2, double peptide1score, double peptide2score, String isSpecialCase, String crosslinker) {
return addMatch(psmID, run, scan, pepid1, pepid2, pepSeq1, pepSeq2, peplen1, peplen2, site1, site2, isDecoy1, isDecoy2, charge, score, protid1, accession1, description1, protid2, accession2, description2, pepPosition1, pepPosition2, "", "", peptide1score, peptide2score, isSpecialCase, crosslinker);
}
public PSM addMatch(String psmID, String run, String scan, Long pepid1, Long pepid2, String pepSeq1, String pepSeq2, int peplen1, int peplen2, int site1, int site2, boolean isDecoy1, boolean isDecoy2, int charge, double score, Long protid1, String accession1, String description1, Long protid2, String accession2, String description2, int pepPosition1, int pepPosition2, String Protein1Sequence, String Protein2Sequence, double peptide1score, double peptide2score, String isSpecialCase, String crosslinker) {
PSM ret = addMatch(psmID, pepid1, pepid2, pepSeq1, pepSeq2, peplen1, peplen2, site1, site2, isDecoy1, isDecoy2, charge, score, protid1, accession1, description1, protid2, accession2, description2, pepPosition1, pepPosition2, Protein1Sequence, Protein2Sequence, peptide1score, peptide2score, isSpecialCase, crosslinker, run, scan);
return ret;
}
public PSM addMatch(String psmID, Long pepid1, Long pepid2, String pepSeq1, String pepSeq2, int peplen1, int peplen2, int site1, int site2, boolean isDecoy1, boolean isDecoy2, int charge, double score, Long protid1, String accession1, String description1, Long protid2, String accession2, String description2, int pepPosition1, int pepPosition2, String Protein1Sequence, String Protein2Sequence, double peptide1score, double peptide2score, String isSpecialCase, String crosslinker, String run, String Scan) {
boolean linear = pepSeq2 == null || pepSeq2.isEmpty() || pepSeq1 == null || pepSeq1.isEmpty();
boolean internal = (!linear) && (accession1.contentEquals(accession2) || ("REV_" + accession1).contentEquals(accession2) || accession1.contentEquals("REV_" + accession2));
boolean between = !(linear || internal);
Protein p1 = new Protein(protid1, accession1, description1, isDecoy1, linear, internal, between);
if (Protein1Sequence != null && !Protein1Sequence.isEmpty()) {
p1.setSequence(Protein1Sequence);
}
p1 = allProteins.register(p1);
Peptide pep1 = allPeptides.register(new Peptide(pepid1, pepSeq1, isDecoy1, p1, pepPosition1, peplen1));
Protein p2;
Peptide pep2;
if (linear) {
p2 = Protein.NOPROTEIN;
pep2 = Peptide.NOPEPTIDE;
} else {
p2 = new Protein(protid2, accession2, description2, isDecoy2, false, internal, between);
if (Protein2Sequence != null && !Protein2Sequence.isEmpty()) {
p2.setSequence(Protein2Sequence);
}
p2 = allProteins.register(p2);
pep2 = allPeptides.register(new Peptide(pepid2, pepSeq2, isDecoy2, p2, pepPosition2, peplen2));
}
PSM psm = addMatch(psmID, pep1, pep2, peplen1, peplen2, site1, site2, charge, score, p1, p2, pepPosition1, pepPosition2, peptide1score, peptide2score, isSpecialCase, crosslinker, run, Scan);
return psm;
}
/**
* @return the allPSMs
*/
public SelfAddHashSet<PSM> getAllPSMs() {
return allPSMs;
}
/**
* @param allPSMs the allPSMs to set
*/
public void setAllPSMs(SelfAddHashSet<PSM> allPSMs) {
this.allPSMs = allPSMs;
}
/**
* indicates, whether the psms went through a score normalisation
*
* @return the isNormalized
*/
public Normalisation isNormalized() {
return isNormalized;
}
/**
* indicates, whether the psms went through a score normalisation
*
* @param isNormalized the isNormalized to set
*/
public void setNormalised(Normalisation isNormalized) {
this.isNormalized = isNormalized;
}
/**
* We normalize psm-scores by median and MAD - but to go around some quirks
* of our score propagation scores then get shifted so that the lowest score
* is around one.
*
* @return the psmNormOffset
*/
public double getPsmNormalizationOffset() {
return psmNormOffset;
}
/**
* We normalize psm-scores by median and MAD - but to go around some quirks
* of our score propagation scores then get shifted so that the lowest score
* is around one.
*
* @param psmNormOffset the psmNormOffset to set
*/
public void setPsmNormalizationOffset(double psmNormOffset) {
this.psmNormOffset = psmNormOffset;
}
/**
* the version of xiFDR to be reported
*
* @return the xiFDRVersion
*/
public static Version getXiFDRVersion() {
if (xiFDRVersion == null) {
xiFDRVersion = Version.parseEmbededVersion("xifdrproject.properties", "xifdr.version");
}
return xiFDRVersion;
}
/**
* the version of xiFDR to be reported
*
* @param aXiFDRVersion the xiFDRVersion to set
*/
public static void setXiFDRVersion(Version aXiFDRVersion) {
xiFDRVersion = aXiFDRVersion;
}
/**
* how many decoys does a fdr group need to have to be reported as result
*
* @return the minDecoys
*/
public Integer getMinDecoys() {
return minTDChance;
}
/**
* how many decoys does a fdr group need to have to be reported as result
*
* @param minDecoys the minDecoys to set
*/
public void setMinDecoys(Integer minDecoys) {
check = new ValidityCheckImplement(0, minDecoys);
calc.setValidityCheck(check);
this.minTDChance = minDecoys;
}
public MaximisingStatus maximise(FDRSettings fdrSettings, OfflineFDR.FDRLevel level, final boolean between, final MaximizingUpdate stateUpdate) {
return maximise(fdrSettings, level, between, stateUpdate, fdrSettings.twoStepOptimization());
}
public MaximisingStatus maximise(FDRSettings fdrSettings, OfflineFDR.FDRLevel level, final boolean between, final MaximizingUpdate stateUpdate, boolean twoStep) {
if (twoStep
&& (fdrSettings.boostDeltaScore() || fdrSettings.boostMinFragments() || fdrSettings.boostPepCoverage())
&& (fdrSettings.boostPSMs() || fdrSettings.boostPeptidePairs() || fdrSettings.boostProteins() || fdrSettings.boostLinks())) {
FDRSettingsImpl step = new FDRSettingsImpl();
step.setAll(fdrSettings);
step.boostDeltaScore(false);
step.boostMinFragments(false);
step.boostPepCoverage(false);
step.boostPeptideStubs(false);
step.boostPeptideDoublets(false);
MaximisingStatus res = maximiseInner(step, level, between, stateUpdate);
step.setPSMFDR(res.showPSMFDR);
step.boostPSMs(false);
step.setPeptidePairFDR(res.showPepFDR);
step.boostPeptidePairs(false);
step.setProteinGroupFDR(res.showProtFDR);
step.boostProteins(false);
step.setProteinGroupLinkFDR(res.showLinkFDR);
step.boostLinks(false);
step.boostDeltaScore(fdrSettings.boostDeltaScore());
step.boostMinFragments(fdrSettings.boostMinFragments());
step.boostPepCoverage(fdrSettings.boostPepCoverage());
step.boostPeptideStubs(fdrSettings.boostPeptideStubs());
step.boostPeptideDoublets(fdrSettings.boostPeptideDoublets());
return maximiseInner(step, level, between, stateUpdate);
} else {
return maximiseInner(fdrSettings, level, between, stateUpdate);
}
}
public MaximisingStatus maximiseInner(FDRSettings fdrSettings, OfflineFDR.FDRLevel level, final boolean between, final MaximizingUpdate stateUpdate) {
final FDRSettingsImpl settings = new FDRSettingsImpl();
settings.setAll(fdrSettings);
int maxCountDown = 4;
try {
int steps = settings.getBoostingSteps();
StringBuffer sb = new StringBuffer();
double maxDelta = 1;
double maxPeptideCoverage = 1;
// get some settings, that are constant for all calculations
boolean ignoreGroups = this.ignoreGroupsSetting;
// if minpepcoverage is 0 and we boost on it we can probably savly start ad 0.1
if (settings.getMinPeptideCoverageFilter() == 0 && settings.boostPepCoverage()) {
// settings.setMinPeptideCoverageFilter(0.01);
maxPeptideCoverage = 0.8;
}
// also for delta score we can probably savly start as 0.05
if (settings.boostDeltaScore()) {
// if (settings.getMinDeltaScoreFilter() == 0) {
// settings.setMinDeltaScoreFilter(0.01);
// }
// there is probably no point in being more restrictive then 0.7 (even that is overkill)
if (settings.getMinDeltaScoreFilter() < 0.6) {
maxDelta = 0.7;
}
}
final MaximizeLevelInfo pepStubs = new MaximizeLevelInfoInteger(2-settings.getMinPeptideStubFilter(), settings.boostPeptideStubs(), Math.min(steps,3));
final MaximizeLevelInfo pepDoublets = new MaximizeLevelInfoInteger(2-settings.getMinPeptideDoubletFilter(), settings.boostPeptideDoublets(), Math.min(steps,3));
final MaximizeLevelInfo absPepCover = new MaximizeLevelInfoInteger(10-settings.getMinPeptideFragmentsFilter(), settings.boostMinFragments(), steps);
final MaximizeLevelInfo deltaScore = new MaximizeLevelInfo(1 - settings.getMinDeltaScoreFilter(), 1 - maxDelta, settings.boostDeltaScore(), steps);
final MaximizeLevelInfo pepCoverage = new MaximizeLevelInfo(1 - settings.getMinPeptideCoverageFilter(), 1 - maxPeptideCoverage, settings.boostPepCoverage(), steps);
final MaximizeLevelInfo psmFDRInfo = new MaximizeLevelInfo(settings.getPSMFDR(), settings.boostPSMs(), steps);
final MaximizeLevelInfo pepFDRInfo = new MaximizeLevelInfo(settings.getPeptidePairFDR(), settings.boostPeptidePairs() && level.compareTo(OfflineFDR.FDRLevel.PEPTIDE_PAIR) > 0, steps);
final MaximizeLevelInfo protFDRInfo = new MaximizeLevelInfo(settings.getProteinGroupFDR(), settings.boostProteins() && level.compareTo(OfflineFDR.FDRLevel.PROTEINGROUP) > 0, steps);
final MaximizeLevelInfo linkFDRInfo = new MaximizeLevelInfo(settings.getProteinGroupLinkFDR(), settings.boostLinks() && level.compareTo(OfflineFDR.FDRLevel.PROTEINGROUPLINK) > 0, steps);
final MaximizeLevelInfo ppiFDRInfo = new MaximizeLevelInfo(settings.getProteinGroupPairFDR(), false, steps);
final ArrayList<MaximizeLevelInfo> subScoreFilter = new ArrayList<>();
MaximizeLevelInfo targetInfoFirst;
switch (level) {
case PSM:
targetInfoFirst = psmFDRInfo;
break;
case PEPTIDE_PAIR:
targetInfoFirst = pepFDRInfo;
break;
case PROTEINGROUP:
targetInfoFirst = protFDRInfo;
break;
case PROTEINGROUPLINK:
targetInfoFirst = linkFDRInfo;
break;
case PROTEINGROUPPAIR:
targetInfoFirst = ppiFDRInfo;
break;
default:
targetInfoFirst = null;
break;
}
final MaximizeLevelInfo targetInfo = targetInfoFirst;
boolean filterToUniquePSM = settings.filterToUniquePSM();
double linkfdr;
boolean optimizing = true;
int maxCount = 0;
int maxCountBetween = 0;
int countDown = maxCountDown;
int optimizingRound = 1;
while (optimizing) {
int lastMaxCount = maxCount;
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "Round " + optimizingRound++);
Logger.getLogger(this.getClass().getName()).log(Level.INFO, "{0}{1}{2}{3}{4}{5}{6}{7}{8}Steps : {9}{10}", new Object[]{
pepStubs.boost ? "stubs from : " + (2 - pepStubs.fromFDR) + " to " + (2 - pepStubs.toFDR) + "\n" : "",
pepDoublets.boost ? "doublets from : " + (2 - pepDoublets.fromFDR) + " to " + (2 - pepDoublets.toFDR) + "\n" : "",
deltaScore.boost ? "deltaFilter from : " + (1 - deltaScore.fromFDR) + " to " + (1 - deltaScore.toFDR) + "\n" : "",
pepCoverage.boost ? "PepCoverage from : " + (1 - pepCoverage.fromFDR) + " to " + (1 - pepCoverage.toFDR) + "\n" : "",
absPepCover.boost ? "Min Pep Frags from : " + (10 - absPepCover.fromFDR) + " to " + (10 - absPepCover.toFDR) + "\n" : "",
psmFDRInfo.boost ? "PSM fdr from : " + psmFDRInfo.fromFDR + " to " + psmFDRInfo.toFDR + "\n" : "",
pepFDRInfo.boost ? "Peptide pair fdr from " + pepFDRInfo.fromFDR + " to " + pepFDRInfo.toFDR + "\n" : "",
protFDRInfo.boost ? "Protein-groupfdr from " + protFDRInfo.fromFDR + " to " + protFDRInfo.toFDR + "\n" : "",
linkFDRInfo.boost ? "linkfdr from " + linkFDRInfo.fromFDR + " to " + linkFDRInfo.toFDR + "\n" : "",
psmFDRInfo.steps + psmFDRInfo.stepChange, ""});
FDRResult result = new FDRResult();
// initialise subscore filter
if (subScoreFilter.size() > 0) {
for (int c = 0; c < subScoreFilter.size(); c++) {
subScoreFilter.get(c).firstStep();
}
}
boolean optimzeSubScores = true;
// find the combinations with the maximum number of ppis
pepDoubletLoop:
for (pepDoublets.firstStep(); pepDoublets.doThisStep(); pepDoublets.nextStep()) {
settings.setMinPeptideDoubletFilter(2 - (int) Math.round(pepDoublets.getCurrentFDR()));
pepStubsLoop:
for (pepStubs.firstStep(); pepStubs.doThisStep(); pepStubs.nextStep()) {
settings.setMinPeptideStubFilter(2 - (int) Math.round(pepStubs.getCurrentFDR()));
deltaScoreloop:
for (deltaScore.firstStep(); deltaScore.doThisStep(); deltaScore.nextStep()) {
settings.setMinDeltaScoreFilter(1 - deltaScore.getCurrentFDR());
pepFragsloop:
for (absPepCover.firstStep(); absPepCover.doThisStep(); absPepCover.nextStep()) {
settings.setMinPeptideFragmentsFilter(10 - (int) Math.round(absPepCover.getCurrentFDR()));
pepCoverageloop:
for (pepCoverage.firstStep(); pepCoverage.doThisStep(); pepCoverage.nextStep()) {
settings.setMinPeptideCoverageFilter(1 - pepCoverage.getCurrentFDR());
// double highestPSMFDR = Double.POSITIVE_INFINITY;
// boolean firstPSMFDR = true;
psmloop:
for (psmFDRInfo.firstStep(); psmFDRInfo.doThisStep(); psmFDRInfo.nextStep()) {
// if (highestPSMFDR < psmFDRInfo.currentFDR)
// continue;
settings.setPSMFDR(psmFDRInfo.getCurrentFDR());
this.calculatePSMFDR(true, ignoreGroups, result, settings);
psmFDRInfo.setCountsPrefilter(result.psmFDR);
// if ( firstPSMFDR) {
// firstPSMFDR =false;
// double maxInFDR = 0;
// highestPSMFDR = getHighestSubGroupInputFDR(result.psmFDR.getGroups());
// }
if (stopMaximizing) {
break deltaScoreloop;
}
// if we don't get PSM - stop looking at later stages
if (result.psmFDR.getResultCount() == 0) {
break psmloop;
}
// double highestPepFDR = Double.POSITIVE_INFINITY;
// boolean firstPepPairFDR = true;
peploop:
for (pepFDRInfo.firstStep(); pepFDRInfo.doThisStep(); pepFDRInfo.nextStep()) {
// we can skip any steps that would not reduce the peptide FDR below what is already in the input
// calculate peptide level fdr
// if (highestPepFDR < pepFDRInfo.currentFDR)
// continue;
//double highestProtFDR = Double.POSITIVE_INFINITY;
//boolean firstProtFDR = true;
protloop:
for (protFDRInfo.firstStep(); protFDRInfo.doThisStep(); protFDRInfo.nextStep()) {
// we can skip any steps that would not reduce the peptide FDR below what is already in the input
// calculate peptide level fdr
// if (highestProtFDR < protFDRInfo.currentFDR)
// continue;
settings.setPeptidePairFDR(pepFDRInfo.getCurrentFDR());
this.calculatePeptidePairFDR(true, result, settings, ignoreGroups);
// if ( firstPepPairFDR) {
// firstPepPairFDR =false;
// double maxInFDR = 0;
// highestPepFDR = getHighestSubGroupInputFDR(result.peptidePairFDR.getGroups());
// }
//
// if we don't get peptide pairs - stop looking at later stages
if (result.peptidePairFDR.getResultCount() == 0) {
break peploop;
}
pepFDRInfo.setCountsPrefilter(result.peptidePairFDR);
if (stopMaximizing) {
break deltaScoreloop;
}
// calculate protein level fdr
settings.setProteinGroupFDR(protFDRInfo.getCurrentFDR());
this.calculateProteinGroupFDR(ignoreGroups, true, settings, result);
// if ( firstProtFDR) {
// firstProtFDR =false;
// double maxInFDR = 0;
// highestProtFDR = getHighestSubGroupInputFDR(result.proteinGroupFDR.getGroups());
// }
if (result.proteinGroupFDR.getResultCount() == 0) {
break protloop;
}
protFDRInfo.setCountsPrefilter(result.proteinGroupFDR);
// cut down the peptides by proteins
this.filterFDRPeptidePairsByFDRProteinGroups(result);
if (stopMaximizing) {
break deltaScoreloop;
}
// double highestLinkFDR = Double.POSITIVE_INFINITY;
// boolean firstLinkFDR = true;
linkloop:
for (linkFDRInfo.firstStep(); linkFDRInfo.doThisStep(); linkFDRInfo.nextStep()) {
// we can skip any steps that would not reduce the peptide FDR below what is already in the input
// calculate peptide level fdr
// if (highestLinkFDR < linkFDRInfo.currentFDR)
// continue;
// calculate links
settings.setProteinGroupLinkFDR(linkFDRInfo.getCurrentFDR());
this.calculateLinkFDR(ignoreGroups, true, settings, result);
linkFDRInfo.setCountsPrefilter(result.proteinGroupLinkFDR);
// if ( firstLinkFDR && linkFDRInfo.boost) {
// Collection<SubGroupFdrInfo<ProteinGroupLink>> c = result.proteinGroupLinkFDR.getGroups();
// firstLinkFDR =false;
// // detect the highest fdr that we can see - filtering for anything higher is pointless
// highestLinkFDR = getHighestSubGroupInputFDR(c);
// }
if (result.proteinGroupLinkFDR.getResultCount() == 0) {
break linkloop;
}
if (stopMaximizing) {
break deltaScoreloop;
}
settings.setProteinGroupPairFDR(ppiFDRInfo.getCurrentFDR());
this.calculateProteinGroupPairFDR(ignoreGroups, true, settings, result);
if (result.proteinGroupPairFDR.getResultCount() == 0) {
break linkloop;
}
// now we need to filter down to the required level
// if (level.compareTo(level.PROTEINGROUPPAIR)!=0) {
this.filterFDRLinksByFDRProteinGroupPairs(result);
// }
// if (level.compareTo(level.PROTEINGROUPLINK)!=0){
this.filterFDRPeptidePairsByFDRProteinGroupLinks(result);
// }
// if (level.compareTo(level.PROTEINGROUP)==0){
this.filterFDRProteinGroupsByFDRPeptidePairs(result);
// }
// how many links do we now have?
pepCoverage.setCounts(result.psmFDR);
absPepCover.setCounts(result.psmFDR);
deltaScore.setCounts(result.psmFDR);
psmFDRInfo.setCounts(result.psmFDR);
pepFDRInfo.setCounts(result.peptidePairFDR);
protFDRInfo.setCounts(result.proteinGroupFDR);
linkFDRInfo.setCounts(result.proteinGroupLinkFDR);
ppiFDRInfo.setCounts(result.proteinGroupPairFDR);
int count = targetInfo.count;
int countBetween = targetInfo.countBetween;
if (count == 0 && maxCount <= targetInfo.countPreFilter) {
count = targetInfo.countPreFilter;
}
if (countBetween == 0 && maxCountBetween <= targetInfo.countBetweenPreFilter) {
countBetween = targetInfo.countBetweenPreFilter;
}
// is it a new best?
if (((between && (countBetween > maxCountBetween || (countBetween == maxCountBetween && count > maxCount)))
|| (!between && count > maxCount))) {
maxCount = count;
maxCountBetween = countBetween;
deltaScore.setNewMaxFDR();
absPepCover.setNewMaxFDR();
pepCoverage.setNewMaxFDR();
psmFDRInfo.setNewMaxFDR();
pepFDRInfo.setNewMaxFDR();
protFDRInfo.setNewMaxFDR();
linkFDRInfo.setNewMaxFDR();
protFDRInfo.setNewMaxFDR();
// record that we found a new top
String message = "link count, " + linkFDRInfo.count + "(" + linkFDRInfo.countBetween + " between), Protein Pairs, " + ppiFDRInfo.count + "(" + ppiFDRInfo.countBetween + " between)";
if (linkFDRInfo.boost) {
message = "link fdr, " + (linkFDRInfo.getCurrentFDR()) + ", " + message;
}
if (protFDRInfo.boost) {
message = "prot fdr, " + (protFDRInfo.getCurrentFDR()) + ", " + message;
}
if (pepFDRInfo.boost) {
message = "pep fdr, " + (pepFDRInfo.getCurrentFDR()) + ", " + message;
}
if (psmFDRInfo.boost) {
message = "psm fdr, " + (psmFDRInfo.getCurrentFDR()) + ", " + message;
}
if (pepCoverage.boost) {
message = "PepCoverage, " + (1 - pepCoverage.getCurrentFDR()) + ", " + message;
}
if (absPepCover.boost) {
message = "min Fragmenst, " + (10 - absPepCover.getCurrentFDR()) + ", " + message;
}
if (deltaScore.boost) {
message = "deltascore, " + (1 - deltaScore.getCurrentFDR()) + ", " + message;
}
if (pepDoublets.boost) {
message = "min peptides with doublets, " + (2 - pepDoublets.getCurrentFDR()) + ", " + message;
}
if (pepStubs.boost) {
message = "min peptides with Stubs, " + (2 - pepStubs.getCurrentFDR()) + ", " + message;
}
// String message = "psmfdr, " + psmFDRInfo.currentFDR + " , pepfdr, " + pepFDRInfo.currentFDR + " ,protfdr, " + protFDRInfo.currentFDR + ", link count, " + linkFDRInfo.count + "(" + linkFDRInfo.countBetween + " between), Protein Pairs, " + ppiFDRInfo.count + "(" + ppiFDRInfo.countBetween + " between)";
sb.append(message + "\n");
Logger.getLogger(this.getClass().getName()).log(Level.INFO, sb.toString());
// forward the values to the gui
final double showDelta = (1 - deltaScore.getCurrentFDR());
final double showPepCoverage = (1 - pepCoverage.getCurrentFDR());
final int showMinFrags = (int) Math.round((10 - absPepCover.getCurrentFDR()));
final int showMinStubs = (int) Math.round((2 - pepStubs.getCurrentFDR()));
final int showMinDoublets = (int) Math.round((2 - pepDoublets.getCurrentFDR()));
final double showPSMFDR = psmFDRInfo.getCurrentFDR();
final double showPepFDR = pepFDRInfo.getCurrentFDR();
final double showProtFDR = protFDRInfo.getCurrentFDR();
final double showLinkFDR = linkFDRInfo.getCurrentFDR();
final double showPSMCount = psmFDRInfo.count;
final double showPepCount = pepFDRInfo.count;
final double showProtCount = protFDRInfo.countLinear;
final double showLinkCount = linkFDRInfo.count;
final double showPPICount = ppiFDRInfo.count;
final double showLinkCountBetween = linkFDRInfo.countBetween;
final double showPPICountBetween = ppiFDRInfo.countBetween;
final double showLinkCountBetweenPreFilter = linkFDRInfo.countBetweenPreFilter;
final double showLinkCountPreFilter = linkFDRInfo.countPreFilter;
final MaximisingStatus state = new MaximisingStatus();
state.showDelta = showDelta;
state.showMinFrags = showMinFrags;
state.showMinStubs = showMinStubs;
state.showMinDoublets = showMinDoublets;
state.showPepCoverage = showPepCoverage;
state.showPSMFDR = showPSMFDR;
state.showPepFDR = showPepFDR;
state.showProtFDR = showProtFDR;
state.showLinkFDR = showLinkFDR;
state.showPSMCount = showPSMCount + "";
state.showPepCount = showPepCount + "";
state.showProtCount = showProtCount + "";
state.showLinkCount = showLinkCount + (showLinkCount == 0 ? "(before PPI:" + showLinkCountPreFilter + ")" : "");
state.showLinkCountBetween = showLinkCountBetween + (showLinkCountBetween == 0 ? "(before PPI:" + showLinkCountBetweenPreFilter + ")" : "");
state.showPPICount = showPPICount + "";
state.showPPICountBetween = showPPICountBetween + "";
state.resultCount = maxCount;
state.resultCountBetween = maxCountBetween;
stateUpdate.setStatus(state);
} else if (count == maxCount || (between && countBetween == maxCountBetween)) {
deltaScore.setEqualFDR();
pepStubs.setEqualFDR();
pepDoublets.setEqualFDR();
absPepCover.setEqualFDR();
pepCoverage.setEqualFDR();
psmFDRInfo.setEqualFDR();
pepFDRInfo.setEqualFDR();
protFDRInfo.setEqualFDR();
linkFDRInfo.setEqualFDR();
ppiFDRInfo.setEqualFDR();
}
if (stopMaximizing) {
break deltaScoreloop;
}
}
}
}
}
}
}
}
}
}
stateUpdate.setStatusText("Max Round: " + optimizingRound + " - " + maxCount + " matches");
// no improvement for the last few rounds?
if ((maxCount == lastMaxCount && --countDown == 0) || stopMaximizing) {
optimizing = false;
Logger.getLogger(this.getClass().getName()).log(Level.INFO, sb.toString());
// FDRResult ret = this.calculateFDR(psmFDRInfo.maximumFDR, pepFDRInfo.maximumFDR, protFDRInfo.maximumFDR, linkFDRInfo.maximumFDR, ppiFDRInfo.maximumFDR, 10000, ignoreGroups, true, filterToUniquePSM);
settings.setMinDeltaScoreFilter(1 - deltaScore.maximumFDR);
settings.setMinPeptideCoverageFilter(1 - pepCoverage.maximumFDR);
settings.setMinPeptideFragmentsFilter(10 - (int) Math.round(absPepCover.maximumFDR));
settings.setMinPeptideStubFilter(2 - (int) Math.round(pepStubs.maximumFDR));
settings.setMinPeptideDoubletFilter(2 - (int) Math.round(pepDoublets.maximumFDR));
settings.setPSMFDR(psmFDRInfo.maximumFDR);
settings.setPeptidePairFDR(pepFDRInfo.maximumFDR);
settings.setProteinGroupFDR(protFDRInfo.maximumFDR);
settings.setProteinGroupLinkFDR(linkFDRInfo.maximumFDR);
settings.setProteinGroupPairFDR(ppiFDRInfo.maximumFDR);
FDRResult ret = this.calculateFDR(settings, true);
final int foundCount = maxCount;
MaximisingStatus res = new MaximisingStatus();
res.showDelta = (1 - deltaScore.maximumFDR);
res.showMinFrags = (10 - (int) Math.round(absPepCover.maximumFDR));
res.showMinStubs = (2 - (int) Math.round(pepStubs.maximumFDR));
res.showMinDoublets = (2 - (int) Math.round(pepDoublets.maximumFDR));
res.showPepCoverage = (1 - pepCoverage.maximumFDR);
res.showPSMFDR = psmFDRInfo.maximumFDR;
res.showPepFDR = pepFDRInfo.maximumFDR;
res.showProtFDR = protFDRInfo.maximumFDR;
res.showLinkFDR = linkFDRInfo.maximumFDR;
res.result = ret;
res.resultCount = maxCount;
res.resultCountBetween = maxCountBetween;
if (pepCoverage.maximumFDR < 1) {
PSMFilter f = new SingleSubScoreFilter("minPepCoverage", 1 - pepCoverage.maximumFDR, true);
setPrefilteredPSMs(f.filter(getAllPSMs()));
if (getPrefilteredPSMs().size() < 50) {
break;
}
} else {
setPrefilteredPSMs(null);
}
stopMaximizing = false;
stateUpdate.setStatus(res);
return res;
} else {
if (maxCount > lastMaxCount) {
// yes we improved
countDown = maxCountDown;
} else {
stateUpdate.setStatusText("Max Link Round: " + optimizingRound + " - " + maxCount + " links - count down: " + countDown);
}
// so see if we make the resoltuion finer
// can we get a better result?
boolean startLow = false;
int stepChange = 0;
if (countDown == 2) {
startLow = true;
stepChange = 1;
}
if (countDown == 1) {
stepChange = 1;
}
deltaScore.calcNextFDRRange(startLow, stepChange);
absPepCover.calcNextFDRRange(startLow, stepChange);
pepStubs.calcNextFDRRange(startLow, stepChange);
pepDoublets.calcNextFDRRange(startLow, stepChange);
pepCoverage.calcNextFDRRange(startLow, stepChange);
psmFDRInfo.calcNextFDRRange(startLow, stepChange);
pepFDRInfo.calcNextFDRRange(startLow, stepChange);
linkFDRInfo.calcNextFDRRange(startLow, stepChange);
protFDRInfo.calcNextFDRRange(startLow, stepChange);
ppiFDRInfo.calcNextFDRRange(startLow, stepChange);
}
}
} catch (Exception ex) {
Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, "Error maximizing links", ex);
stateUpdate.reportError("Error maximizing", ex);
}
return null;
}
protected <T extends FDRSelfAdd> double getHighestSubGroupInputFDR(Collection<SubGroupFdrInfo<T>> c) {
double maxInFDR = 0;
for (SubGroupFdrInfo g : c) {
double ginfdr = (g.TD - g.DD) / (double) g.TT;
if (maxInFDR < ginfdr) {
maxInFDR = ginfdr;
}
}
return maxInFDR;
}
/**
* @return the numberFormat
*/
public NumberFormat getNumberFormat() {
return numberFormat;
}
/**
* @param numberFormat the numberFormat to set
*/
public void setNumberFormat(NumberFormat numberFormat) {
this.numberFormat = numberFormat;
}
public void stopMaximizing() {
stopMaximizing = true;
}
public void cleanup() {
}
/**
* group between links by both proteins beeing observed with self-links
*
* @return the setGroupLinksByHasInternal
*/
public boolean groupLinksByHasInternal() {
return groupLinksByHasInternal;
}
/**
* group between links by both proteins beeing observed with self-links
*
* @param groupLinksByHasInternal the setGroupLinksByHasInternal to set
*/
public void setGroupLinksByHasInternal(boolean groupLinksByHasInternal) {
this.groupLinksByHasInternal = groupLinksByHasInternal;
}
/**
* group between PPIs by both proteins beeing observed with self-links
*
* @return the groupPPIByHasInternal
*/
public boolean groupPPIByHasInternal() {
return groupPPIByHasInternal;
}
/**
* group between PPIs by both proteins beeing observed with self-links
*
* @param groupPPIByHasInternal the groupPPIByHasInternal to set
*/
public void setGroupPPIByHasInternal(boolean groupPPIByHasInternal) {
this.groupPPIByHasInternal = groupPPIByHasInternal;
}
/**
* group between PeptidePairs by both proteins beeing observed with
* self-links
*
* @return the groupPepPairByHasInternal
*/
public boolean groupPepPairByHasInternal() {
return groupPepPairByHasInternal;
}
/**
* group between PeptidePairs by both proteins beeing observed with
* self-links
*
* @param groupPepPairByHasInternal the groupPepPairByHasInternal to set
*/
public void setGroupPepPairByHasInternal(boolean groupPepPairByHasInternal) {
this.groupPepPairByHasInternal = groupPepPairByHasInternal;
}
/**
* group between PeptidePairs by both proteins beeing observed with
* self-links
*
* @param groupByrun
*/
public void setGroupPSMsByRun(boolean groupByrun) {
this.groupPSMsByRun = groupByrun;
}
/**
* group between PeptidePairs by both proteins beeing observed with
* self-links
*
* @param groupByrun
*/
public boolean groupPSMsByRun() {
return this.groupPSMsByRun;
}
/**
* do we have PSMs with crosslinker-stubs. Basically do we have a ms2
* cleavable crosslinker search.
*
* @return the stubsFound
*/
public boolean stubsFound() {
return stubsFound;
}
/**
* do we have PSMs with crosslinker-stubs. Basically do we have a ms2
* cleavable crosslinker search.
*
* @param stubsFound the stubsFound to set
*/
public void stubsFound(boolean stubsFound) {
this.stubsFound = stubsFound;
}
/**
* psms that passed some form of prefilter
*
* @return the prefilteredPSMs
*/
public ArrayList<PSM> getPrefilteredPSMs() {
return prefilteredPSMs;
}
/**
* psms that passed some form of prefilter
*
* @param prefilteredPSMs the prefilteredPSMs to set
*/
public void setPrefilteredPSMs(ArrayList<PSM> prefilteredPSMs) {
this.prefilteredPSMs = prefilteredPSMs;
}
}
|
docusmart/BlackLab | util/src/main/java/nl/inl/blacklab/index/ZipHandleManager.java | package nl.inl.blacklab.index;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipFile;
import nl.inl.blacklab.exceptions.BlackLabRuntimeException;
/**
* Manages opened zip files.
*
* Openings large zip files takes time, so it's more efficient to keep zip files
* open for a while in case we'll access the same zip file again. Of course, we
* should eventually close them to free up resources as well.
*/
public class ZipHandleManager {
/** Do we want to keep zip files open at all? */
private static boolean keepZipsOpen = true;
/** Maximum age of open zip file in sec */
private static int maxOpenZipAgeSec = 24 * 3600;
/** Maximum number of zip files to keep open */
private static int maxOpenZipFiles = 10;
/** Zip files opened by DocIndexerBase indexers. Should be closed eventually. */
private static Map<File, ZipHandle> openZips = new LinkedHashMap<>();
static class ZipHandle implements Comparable<ZipHandle> {
public File key;
public ZipFile zipFile;
public long lastUsed;
public ZipHandle(File key, ZipFile file) {
this.key = key;
this.zipFile = file;
this.lastUsed = System.currentTimeMillis();
}
@Override
public int compareTo(ZipHandle o) {
return Long.compare(lastUsed, o.lastUsed);
}
public void markUsed() {
lastUsed = System.currentTimeMillis();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((key == null) ? 0 : key.hashCode());
result = prime * result + (int) (lastUsed ^ (lastUsed >>> 32));
result = prime * result + ((zipFile == null) ? 0 : zipFile.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ZipHandle other = (ZipHandle) obj;
if (key == null) {
if (other.key != null)
return false;
} else if (!key.equals(other.key))
return false;
if (lastUsed != other.lastUsed)
return false;
if (zipFile == null) {
if (other.zipFile != null)
return false;
} else if (!zipFile.equals(other.zipFile))
return false;
return true;
}
public long timeSinceLastUsed() {
return System.currentTimeMillis() - lastUsed;
}
public void close() {
try {
zipFile.close();
} catch (IOException e) {
throw BlackLabRuntimeException.wrap(e);
}
}
}
public static void closeAllZips() {
synchronized (openZips) {
// We don't close linked document zips immediately; closing them when you're likely to
// reuse them soon is inefficient.
// (we should probably keep track of last access and close them eventually, though)
Iterator<ZipHandle> it = openZips.values().iterator();
while (it.hasNext()) {
it.next().close(); // close zip file
it.remove();
}
}
}
private synchronized static void removeEntriesIfRequired() {
// Remove any entries that haven't been used for a long time
List<ZipHandle> zl = new ArrayList<>(openZips.values());
Iterator<ZipHandle> it = zl.iterator();
while (it.hasNext()) {
ZipHandle zh = it.next();
if (zh.timeSinceLastUsed() > maxOpenZipAgeSec) {
openZips.remove(zh.key);
zh.close();
it.remove();
}
}
// If too many zips are open, close ones that haven't been used the longest
if (openZips.size() > maxOpenZipFiles) {
zl.sort(Comparator.naturalOrder());
it = openZips.values().iterator();
while (openZips.size() > maxOpenZipFiles && it.hasNext()) {
ZipHandle zh = it.next();
zh.close(); // delete the file
it.remove();
}
}
}
public static ZipFile openZip(File zipFile) throws IOException {
if (!keepZipsOpen)
return new ZipFile(zipFile);
synchronized (openZips) {
ZipHandle z = openZips.get(zipFile);
if (z == null) {
z = new ZipHandle(zipFile, new ZipFile(zipFile));
openZips.put(zipFile, z);
}
z.markUsed();
removeEntriesIfRequired();
return z.zipFile;
}
}
public static void setKeepZipsOpen(boolean keepZipsOpen) {
ZipHandleManager.keepZipsOpen = keepZipsOpen;
}
public static void setMaxOpenZipAgeSec(int maxOpenZipAgeSec) {
ZipHandleManager.maxOpenZipAgeSec = maxOpenZipAgeSec;
}
public static void setMaxOpen(int maxOpenZipFiles) {
ZipHandleManager.maxOpenZipFiles = maxOpenZipFiles;
}
}
|
rocious/omaha | base/command_line_validator.cc | // Copyright 2008-2009 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========================================================================
#include "omaha/base/command_line_validator.h"
#include <atlbase.h>
#include "omaha/base/command_line_parser.h"
#include "omaha/base/debug.h"
#include "omaha/base/error.h"
#include "omaha/base/logging.h"
#include "omaha/base/utils.h"
namespace omaha {
CommandLineValidator::CommandLineValidator() : scenario_sequence_number_(0) {
}
CommandLineValidator::~CommandLineValidator() {
Clear();
}
HRESULT CommandLineValidator::CreateScenario(const CString& scenario_name) {
if (scenarios_.find(scenario_name) != scenarios_.end()) {
return E_INVALIDARG;
}
ScenarioParameters scenario_parameters;
scenarios_[scenario_name] = scenario_parameters;
return S_OK;
}
// TODO(Omaha): Instead of creating the scenario in the map then populating it,
// which requires these methods to know about the map, verify the scenario
// exists, etc. - why not build the scenario then add it to the map? That seems
// more straightforward.
HRESULT CommandLineValidator::AddScenarioParameter(
const CString& scenario_name,
const CString& switch_name,
int num_required_parameters) {
MapScenariosIter iter = scenarios_.find(scenario_name);
if (iter == scenarios_.end()) {
return E_INVALIDARG;
}
ScenarioParameter* scenario_parameter =
new ScenarioParameter(switch_name, num_required_parameters);
(*iter).second.required.push_back(scenario_parameter);
return S_OK;
}
HRESULT CommandLineValidator::AddOptionalScenarioParameter(
const CString& scenario_name,
const CString& switch_name,
int num_required_parameters) {
MapScenariosIter iter = scenarios_.find(scenario_name);
if (iter == scenarios_.end()) {
return E_INVALIDARG;
}
ScenarioParameter* scenario_parameter =
new ScenarioParameter(switch_name, num_required_parameters);
(*iter).second.optional.push_back(scenario_parameter);
return S_OK;
}
HRESULT CommandLineValidator::CreateScenarioFromCmdLine(
const CString& command_line,
CString* scenario_name) {
ASSERT1(scenario_name);
CommandLineParser parser;
HRESULT hr = parser.ParseFromString(command_line);
if (FAILED(hr)) {
return hr;
}
// Generate a unique scenario name.
CString scenario_name_str;
do {
++scenario_sequence_number_;
scenario_name_str.Format(_T("scenario_%d"), scenario_sequence_number_);
} while (scenarios_.find(scenario_name_str) != scenarios_.end());
CreateScenario(scenario_name_str);
int switch_count = parser.GetSwitchCount();
for (int idx_switch = 0; idx_switch < switch_count; ++idx_switch) {
CString switch_name;
hr = parser.GetSwitchNameAtIndex(idx_switch, &switch_name);
if (FAILED(hr)) {
return hr;
}
int arg_count = 0;
hr = parser.GetSwitchArgumentCount(switch_name, &arg_count);
if (FAILED(hr)) {
return hr;
}
hr = AddScenarioParameter(scenario_name_str, switch_name, arg_count);
if (FAILED(hr)) {
return hr;
}
}
switch_count = parser.GetOptionalSwitchCount();
for (int idx_switch = 0; idx_switch < switch_count; ++idx_switch) {
CString switch_name;
hr = parser.GetOptionalSwitchNameAtIndex(idx_switch, &switch_name);
if (FAILED(hr)) {
return hr;
}
int arg_count = 0;
hr = parser.GetOptionalSwitchArgumentCount(switch_name, &arg_count);
if (FAILED(hr)) {
return hr;
}
hr = AddOptionalScenarioParameter(scenario_name_str,
switch_name,
arg_count);
if (FAILED(hr)) {
return hr;
}
}
*scenario_name = scenario_name_str;
return S_OK;
}
HRESULT CommandLineValidator::Validate(
const CommandLineParser& command_line_parser,
CString* scenario_name) const {
// Attempt to verify the data within the command_line_parser against each of
// the scenarios.
MapScenariosConstIter scenarios_iter;
for (scenarios_iter = scenarios_.begin();
scenarios_iter != scenarios_.end();
++scenarios_iter) {
// Make sure we have a match for the number of switches in this scenario.
int parser_switch_count = command_line_parser.GetSwitchCount();
int scenario_required_switch_count =
(*scenarios_iter).second.required.size();
int scenario_optional_switch_count =
(*scenarios_iter).second.optional.size();
if (parser_switch_count < scenario_required_switch_count ||
parser_switch_count > scenario_required_switch_count +
scenario_optional_switch_count) {
continue;
}
if (DoesScenarioMatch(command_line_parser, (*scenarios_iter).second)) {
*scenario_name = (*scenarios_iter).first;
return S_OK;
}
}
return GOOGLEUPDATE_COMMANDLINE_E_NO_SCENARIO_HANDLER_MATCHED;
}
bool CommandLineValidator::DoesScenarioMatch(
const CommandLineParser& command_line_parser,
const ScenarioParameters& scenario_parameters) const {
// Make sure that each switch matches with the right number of arguments.
ScenarioParameterVectorConstIter parameter_iter;
for (parameter_iter = scenario_parameters.required.begin();
parameter_iter != scenario_parameters.required.end();
++parameter_iter) {
CString current_switch_name = (*parameter_iter)->switch_name_;
// This would probably allow duplicate switches (i.e. /c /c) in a command
// line.
if (!command_line_parser.HasSwitch(current_switch_name)) {
return false;
}
int arg_count = 0;
HRESULT hr = command_line_parser.GetSwitchArgumentCount(current_switch_name,
&arg_count);
if (FAILED(hr)) {
return false;
}
int switch_arg_count = (*parameter_iter)->num_required_parameters_;
if (arg_count != switch_arg_count) {
return false;
}
}
int parser_optional_switch_count = command_line_parser.GetSwitchCount() -
scenario_parameters.required.size();
for (parameter_iter = scenario_parameters.optional.begin();
parser_optional_switch_count != 0 &&
parameter_iter != scenario_parameters.optional.end();
++parameter_iter) {
CString current_switch_name = (*parameter_iter)->switch_name_;
// This would probably allow duplicate optional switches (i.e. /oem /oem) in
// a command line.
if (!command_line_parser.HasSwitch(current_switch_name)) {
continue;
}
int arg_count = 0;
HRESULT hr = command_line_parser.GetSwitchArgumentCount(current_switch_name,
&arg_count);
if (FAILED(hr)) {
return false;
}
int switch_arg_count = (*parameter_iter)->num_required_parameters_;
if (arg_count != switch_arg_count) {
return false;
}
--parser_optional_switch_count;
}
return parser_optional_switch_count == 0;
}
void CommandLineValidator::Clear() {
MapScenariosIter scenarios_iter;
for (scenarios_iter = scenarios_.begin();
scenarios_iter != scenarios_.end();
++scenarios_iter) {
ScenarioParameterVectorIter param_iter;
for (param_iter = (*scenarios_iter).second.required.begin();
param_iter != (*scenarios_iter).second.required.end();
++param_iter) {
delete *param_iter;
}
for (param_iter = (*scenarios_iter).second.optional.begin();
param_iter != (*scenarios_iter).second.optional.end();
++param_iter) {
delete *param_iter;
}
}
scenarios_.clear();
}
} // namespace omaha
|
guidotack/gecode | contribs/qecode/qsolver_unblockable.hh | /**** , [ qsolverunblockable.hh ],
Copyright (c) 2008 Universite d'Orleans - <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*************************************************************************/
#ifndef __QECODE_QSOLVER_TRICK__
#define __QECODE_QSOLVER_TRICK__
#include "QCSPPlusUnblockable.hh"
#include "QCOPPlus.hh"
#include <iostream>
#include <cstdlib>
#include "gecode/minimodel.hh"
#include "gecode/search.hh"
#include "Strategy.hh"
#include "qecode.hh"
using namespace Gecode;
/** Unblockable QCSP+ Solver.
* This class is the search engine for unblockable QCSP+ defined with the qpecial QcspUnblockable class.
*/
class QECODE_VTABLE_EXPORT QSolverUnblockable {
private:
int n;
QcspUnblockable* sp;
int* nbRanges;
Strategy rSolve(QcspUnblockable* qs,int scope,vector<int> assignments,unsigned long int& nodes);
public:
/** Public constructor.
@param sp The problem to solve
*/
QECODE_EXPORT QSolverUnblockable(QcspUnblockable* sp);
/** Solves the problem and returns a corresponding winning strategy.
@param nodes A reference that is increased by the number of nodes encountered in the search tree.
*/
QECODE_EXPORT Strategy solve(unsigned long int& nodes);
};
/** Unblockable QCSP+ Solver.
* This class is the search engine for unblockable QCSP+ defined with the general Qcop class.
*/
class QECODE_VTABLE_EXPORT QSolverUnblockable2 {
private:
int n;
Qcop* sp;
int* nbRanges;
Strategy rSolve(Qcop* qs,int scope,vector<int> assignments,unsigned long int& nodes);
public:
/** Public constructor.
@param sp The problem to solve
*/
QECODE_EXPORT QSolverUnblockable2(Qcop* sp);
/** Solves the problem and returns a corresponding winning strategy.
WARNING : Defined optimization conditions and aggregates are NOT taken into account.
@param nodes A reference that is increased by the number of nodes encountered in the search tree.
*/
QECODE_EXPORT Strategy solve(unsigned long int& nodes);
};
#endif
|
l3onardo-oliv3ira/signer4j | src/gui/com/github/signer4j/gui/alert/ExpiredPasswordAlert.java | <reponame>l3onardo-oliv3ira/signer4j
package com.github.signer4j.gui.alert;
import javax.swing.JDialog;
import javax.swing.JOptionPane;
import com.github.signer4j.gui.utils.Images;
import com.github.signer4j.imp.Config;
public final class ExpiredPasswordAlert {
private static final String MESSAGE_FORMAT = "A senha do seu dispositivo expirou e deve ser renovada!\n\n"
+ "O PjeOffice não renova a sua senha! A renovação deve ser feita utilizando o software\n"
+ "fornecido pelo fabricante do seu dispostivo.";
private static final String[] OPTIONS = {"ENTENDI"};
public static boolean display() {
return new ExpiredPasswordAlert().show();
}
private final JOptionPane jop;
private ExpiredPasswordAlert() {
jop = new JOptionPane(
MESSAGE_FORMAT,
JOptionPane.INFORMATION_MESSAGE,
JOptionPane.OK_OPTION,
Images.LOCK.asIcon(),
OPTIONS,
OPTIONS[0]
);
}
private boolean show() {
JDialog dialog = jop.createDialog("Senha expirada");
dialog.setAlwaysOnTop(true);
dialog.setModal(true);
dialog.setIconImage(Config.getIcon());
dialog.setVisible(true);
dialog.dispose();
Object selectedValue = jop.getValue();
return OPTIONS[0].equals(selectedValue);
}
}
|
leechwin/algorithm | leetcode_20_valid-parentheses/Solution.java | <gh_stars>0
class Solution {
public boolean isValid(String s) {
HashMap<Character, Character> map = new HashMap<Character, Character>();
map.put('(', ')');
map.put('[', ']');
map.put('{', '}');
Stack<Character> stack = new Stack<Character>();
for (int i = 0; i < s.length(); i++) {
char ch = s.charAt(i);
// check key
if (map.keySet().contains(ch)) {
stack.push(ch);
} else if (map.values().contains(ch)) {
// check value
if (!stack.isEmpty() && map.get(stack.peek()) == ch) {
stack.pop();
} else {
return false;
}
}
}
return stack.isEmpty();
}
}
|
mxl1n/CodeGen | data/transcoder_evaluation_gfg/cpp/MINIMUM_PRODUCT_SUBSET_ARRAY.cpp | // Copyright (c) 2019-present, Facebook, Inc.
// All rights reserved.
//
// This source code is licensed under the license found in the
// LICENSE file in the root directory of this source tree.
//
#include <iostream>
#include <cstdlib>
#include <string>
#include <vector>
#include <fstream>
#include <iomanip>
#include <bits/stdc++.h>
using namespace std;
int f_gold ( int a [ ], int n ) {
if ( n == 1 ) return a [ 0 ];
int max_neg = INT_MIN;
int min_pos = INT_MAX;
int count_neg = 0, count_zero = 0;
int prod = 1;
for ( int i = 0;
i < n;
i ++ ) {
if ( a [ i ] == 0 ) {
count_zero ++;
continue;
}
if ( a [ i ] < 0 ) {
count_neg ++;
max_neg = max ( max_neg, a [ i ] );
}
if ( a [ i ] > 0 ) min_pos = min ( min_pos, a [ i ] );
prod = prod * a [ i ];
}
if ( count_zero == n || ( count_neg == 0 && count_zero > 0 ) ) return 0;
if ( count_neg == 0 ) return min_pos;
if ( ! ( count_neg & 1 ) && count_neg != 0 ) {
prod = prod / max_neg;
}
return prod;
}
//TOFILL
int main() {
int n_success = 0;
vector<vector<int>> param0 {{3,6,7,8,8,9,12,12,12,13,15,15,15,16,18,18,18,19,20,21,22,22,23,28,29,30,30,33,33,35,35,36,40,43,58,63,73,78,82,83,84,87,89,89,92,94},{18,-6,-8,98,66,-86,24,6,58,74,82},{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},{97,79,93,41,76,34,94,57,63,98,52,62,96,7,63,44,55,43,36,66,35,14,24,40,26,16,67,19,31,86,64,93,85,86,66,24,73,86,45,99,25,98,38,57},{-58,-48,-46,-36,0,18},{1,1,1,0,0,0,1,0,1,0,0,1,1,0,1,0,0,0,0,0,1,0,0,1,1,0,1,1,0,0,1,0,1,0,0,1,0},{1,3,5,15,18,19,21,23,29,29,33,33,34,37,39,43,43,68,73,74,75,84,87,88,89,90,93},{74,70,-36,16,10,60,-82,96,-30,58,56,-54,-14,94,10,-82,-80,-40,-72,-68,8,38,-50,-76,34,2,-66,-30,26},{0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1},{74,74,8,74,85,41,31,3,84,46,73,39,64,72,28,83,98,27,64,7,95,37,10,38,77,32,69,72,62,96,5,81,34,96,80,25,38}};
vector<int> param1 {23,10,25,30,3,36,18,15,20,33};
for(int i = 0; i < param0.size(); ++i)
{
if(f_filled(¶m0[i].front(),param1[i]) == f_gold(¶m0[i].front(),param1[i]))
{
n_success+=1;
}
}
cout << "#Results:" << " " << n_success << ", " << param0.size();
return 0;
} |
psavery/ParaView | Plugins/pvNVIDIAIndeX/include/nv/index/ievent_tracing.h | /******************************************************************************
* Copyright 2019 NVIDIA Corporation. All rights reserved.
*****************************************************************************/
/// \file
/// \brief API for reading tracing events.
///
#ifndef NVIDIA_INDEX_ITRACE_EVENTS_H
#define NVIDIA_INDEX_ITRACE_EVENTS_H
#include <mi/base/interface_declare.h>
#include <mi/dice.h>
#include <mi/base/types.h>
namespace nv {
namespace index {
enum ITrace_event_type
{
NONE = 0,
VALUE_U64,
VALUE_S64,
VALUE_F64,
VALUE_DURATION,
NEW_NS,
CLEAR_NS
};
class ITrace_event_iterator
: public mi::base::Interface_declare<0x55d30b9a,0x4927,0x4150,0x90,0x63,0xd6,0xd6,0x8a,0x40,0xbf,0xf0>
{
public:
virtual void next() = 0;
virtual bool begin() = 0;
virtual bool end() = 0;
virtual const char* get_namespace() const = 0;
virtual mi::Sint32 get_namespace_id() const = 0;
virtual const char* get_name() const = 0;
virtual mi::Uint64 get_timestamp() const = 0;
virtual mi::Uint32 get_host_id() const = 0;
virtual ITrace_event_type get_type() const = 0;
virtual void get_value(mi::Uint64& value) const = 0;
virtual void get_value(mi::Sint64& value) const = 0;
virtual void get_value(mi::Float64& value) const = 0;
};
class ITrace_collection
: public mi::base::Interface_declare<0xe05ed2f7,0x71da,0x4211,0x87,0x1b,0x46,0xfd,0x3f,0xb8,0x8a,0xbd>
{
public:
virtual mi::Size get_collection_id() const = 0;
virtual mi::Uint64 get_collection_timestamp() const = 0;
virtual mi::Uint32 get_local_host_id() const = 0;
virtual mi::Size get_nb_traces() const = 0;
virtual ITrace_event_iterator* get_trace_iterator() const = 0;
};
class ITrace_collection_handler
: public mi::base::Interface_declare<0xb9b64b1b,0x8349,0x4099,0xbe,0x2b,0x86,0x5f,0xca,0x04,0xe0,0x92>
{
public:
virtual void handle(ITrace_collection* collection) = 0;
};
class IEvent_tracing
: public mi::base::Interface_declare<0x5c604cf0,0xb4ac,0x4c19,0xb8,0xf8,0x2d,0x1c,0xeb,0xa6,0x9c,0xff>
{
public:
/// Add a notification handler for the availability of trace collection.
///
/// \param[in] handler Pointer to the notification handler.
virtual void add_collection_handler(ITrace_collection_handler* handler) = 0;
/// Get a specific collection id.
///
/// \param[in] collection_id The id of the collection. It currently maps to the frame id.
/// \param[in] raw Provide raw trace events (triggers collection from all hosts).
///
/// \return Returns a trace collection which could be empty.
virtual ITrace_collection* get_collection(mi::Uint32 collection_id, bool raw = false) = 0;
virtual void record(const char* ns, mi::Sint32 ns_id, const char* name, mi::Uint64 value) = 0;
virtual void record(const char* ns, mi::Sint32 ns_id, const char* name, mi::Sint64 value) = 0;
virtual void record(const char* ns, mi::Sint32 ns_id, const char* name, mi::Float64 value) = 0;
virtual void record_time_duration(const char* ns,
mi::Sint32 ns_id,
const char* name,
mi::Uint64 start_timestamp,
mi::Uint64 duration) = 0;
virtual void record_time_interval(const char* ns,
mi::Sint32 ns_id,
const char* name,
mi::Uint64 start_timestamp,
mi::Uint64 end_timestamp) = 0;
};
} // namespace index
} // namespace nv
#endif // NVIDIA_INDEX_ITRACE_EVENTS_H
|
bizo/aws-tools | emr/elastic-mapreduce-ruby-20131216/ec2_client_wrapper.rb | <filename>emr/elastic-mapreduce-ruby-20131216/ec2_client_wrapper.rb
#
# Copyright 2011-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
require 'credentials'
require 'aws/ec2'
require 'aws/core/response'
class Ec2ClientWrapper
attr_accessor :commands, :logger, :options
RETRIABLE_EXCEPTIONS = [AWS::Core::Client::NetworkError, AWS::EC2::Errors::ServerError]
def initialize(commands, logger)
@commands = commands
@logger = logger
@options = commands.global_options
@config = AWS.config(getConfig)
@client = AWS::EC2.new(:config => @config).client
@retries = 3
end
def getConfig
endpoint = EmrConfig.ec2_endpoint(@options[:region], @options[:suffix])
region = @options[:region]
uri = URI.parse(endpoint)
config={
:ec2_endpoint => uri.host,
:ec2_port => uri.port,
:use_ssl => true,
:ssl_ca_file => File.join(File.dirname(__FILE__), "cacert.pem"),
:ssl_ca_path => File.dirname(__FILE__),
:ssl_verify_peer => false,
:ec2_region => region,
:verbose => (@options[:verbose] != nil),
:secret_access_key => @options[:aws_secret_key],
:access_key_id => @options[:aws_access_id],
:http_read_timeout => 60.0,
:http_wire_trace => @options[:trace] || false,
:options => @options[:force] || false
}
error_if_nil(config[:access_key_id], "Missing access-key")
error_if_nil(config[:secret_access_key], "Missing secret-key")
config
end
def error_if_nil(value, message)
if value == nil then
raise RuntimeError, message
end
end
def is_error_response(response)
response != nil && response.key?('Error')
end
def raise_on_error(response)
if is_error_response(response) then
raise RuntimeError, response["Error"].inspect
end
return response
end
def call_with_retry(retry_count, backoff=5, &block)
begin
result = block.call
rescue *RETRIABLE_EXCEPTIONS => e
warn e.message
if retry_count > 0
warn "Retrying in #{backoff} seconds..."
sleep backoff
result = call_with_retry(retry_count-1, backoff*2, &block)
else
raise e
end
rescue Exception => ex
raise RuntimeError, ex.message
end
result
end
def allocate_address()
logger.trace "AllocateAddress()"
result = call_with_retry(@retries) {
@client.allocate_address()
}
logger.trace result.inspect
return raise_on_error(result)
end
def associate_address(instance_id, public_ip)
logger.trace "AssociateAddress('InstanceId' => #{instance_id.inspect}, 'PublicIp' => #{public_ip.inspect})"
result = call_with_retry(@retries) {
@client.associate_address(:instance_id => instance_id.to_s, :public_ip => public_ip.to_s)
}
logger.trace result.inspect
return raise_on_error(result)
end
#TODO: Add stubs for all other Ec2 WS operations here, see http://s3.amazonaws.com/ec2-downloads/2010-11-15.ec2.wsdl
end
|
lurnid/OpenRefine | main/src/com/google/refine/importers/tree/ImportRecord.java | /*******************************************************************************
* Copyright (C) 2018, OpenRefine contributors
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
package com.google.refine.importers.tree;
import java.util.ArrayList;
import java.util.List;
import com.google.refine.model.Cell;
/**
* A record describes a data element in a tree-structure
*
*/
public class ImportRecord {
public List<List<Cell>> rows = new BasedList<List<Cell>>();
/**
* A List implementation to match the characteristics needed by the
* import process. It's optimized for a relatively small number of
* contiguous records at a potentially large offset from zero.
* <p>
* I suspect it's usually only a single row, but we support more, just
* not as efficiently. Depending on the behavior of the ColumnGroups
* this may not be necessary at all, but I don't fully understand what it
* does, so we'll just put this hack in place for now.
*
* @param <T>
*/
class BasedList<T> extends ArrayList<T> {
private static final long serialVersionUID = 1L;
int offset = Integer.MAX_VALUE;
public T set(int index, T element) {
rebase(index);
extend(index);
return super.set(index - offset, element);
}
public T get(int index) {
if (offset == Integer.MAX_VALUE || index - offset > size() - 1) {
return null;
}
return super.get(index - offset);
}
private void rebase(final int index) {
if (index < offset) {
if (offset < Integer.MAX_VALUE) {
int new_offset = Math.max(0, index - 10); // Leave some extra room
int delta = offset - new_offset;
// Ensure room at top
for (int i = 0; i < delta; i++) {
add(null);
}
// Shuffle up
for (int i = size(); i > delta; i --) {
set(i,get(i-delta));
} // Null unused entries
for (int i = 0; i < delta; i++) {
set(i,null);
}
offset = new_offset;
} else {
offset = index;
}
}
}
private void extend(final int index) {
int i = index - offset;
while (i >= size()) {
add(null);
}
}
}
}
|
14ms/Minecraft-Disclosed-Source-Modifications | Skizzle/net/minecraft/potion/PotionHelper.java | /*
* Decompiled with CFR 0.150.
*
* Could not load the following classes:
* com.google.common.collect.Lists
* com.google.common.collect.Maps
*/
package net.minecraft.potion;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import net.minecraft.potion.Potion;
import net.minecraft.potion.PotionEffect;
import optifine.Config;
import optifine.CustomColors;
public class PotionHelper {
public static final String field_77924_a = null;
public static final String sugarEffect;
public static final String ghastTearEffect = "+0-1-2-3&4-4+13";
public static final String spiderEyeEffect;
public static final String fermentedSpiderEyeEffect;
public static final String speckledMelonEffect;
public static final String blazePowderEffect;
public static final String magmaCreamEffect;
public static final String redstoneEffect;
public static final String glowstoneEffect;
public static final String gunpowderEffect;
public static final String goldenCarrotEffect;
public static final String field_151423_m;
public static final String field_179538_n;
private static final Map field_179539_o;
private static final Map field_179540_p;
private static final Map field_77925_n;
private static final String[] potionPrefixes;
static {
field_179539_o = Maps.newHashMap();
field_179540_p = Maps.newHashMap();
field_179539_o.put(Potion.regeneration.getId(), "0 & !1 & !2 & !3 & 0+6");
sugarEffect = "-0+1-2-3&4-4+13";
field_179539_o.put(Potion.moveSpeed.getId(), "!0 & 1 & !2 & !3 & 1+6");
magmaCreamEffect = "+0+1-2-3&4-4+13";
field_179539_o.put(Potion.fireResistance.getId(), "0 & 1 & !2 & !3 & 0+6");
speckledMelonEffect = "+0-1+2-3&4-4+13";
field_179539_o.put(Potion.heal.getId(), "0 & !1 & 2 & !3");
spiderEyeEffect = "-0-1+2-3&4-4+13";
field_179539_o.put(Potion.poison.getId(), "!0 & !1 & 2 & !3 & 2+6");
fermentedSpiderEyeEffect = "-0+3-4+13";
field_179539_o.put(Potion.weakness.getId(), "!0 & !1 & !2 & 3 & 3+6");
field_179539_o.put(Potion.harm.getId(), "!0 & !1 & 2 & 3");
field_179539_o.put(Potion.moveSlowdown.getId(), "!0 & 1 & !2 & 3 & 3+6");
blazePowderEffect = "+0-1-2+3&4-4+13";
field_179539_o.put(Potion.damageBoost.getId(), "0 & !1 & !2 & 3 & 3+6");
goldenCarrotEffect = "-0+1+2-3+13&4-4";
field_179539_o.put(Potion.nightVision.getId(), "!0 & 1 & 2 & !3 & 2+6");
field_179539_o.put(Potion.invisibility.getId(), "!0 & 1 & 2 & 3 & 2+6");
field_151423_m = "+0-1+2+3+13&4-4";
field_179539_o.put(Potion.waterBreathing.getId(), "0 & !1 & 2 & 3 & 2+6");
field_179538_n = "+0+1-2+3&4-4+13";
field_179539_o.put(Potion.jump.getId(), "0 & 1 & !2 & 3");
glowstoneEffect = "+5-6-7";
field_179540_p.put(Potion.moveSpeed.getId(), "5");
field_179540_p.put(Potion.digSpeed.getId(), "5");
field_179540_p.put(Potion.damageBoost.getId(), "5");
field_179540_p.put(Potion.regeneration.getId(), "5");
field_179540_p.put(Potion.harm.getId(), "5");
field_179540_p.put(Potion.heal.getId(), "5");
field_179540_p.put(Potion.resistance.getId(), "5");
field_179540_p.put(Potion.poison.getId(), "5");
field_179540_p.put(Potion.jump.getId(), "5");
redstoneEffect = "-5+6-7";
gunpowderEffect = "+14&13-13";
field_77925_n = Maps.newHashMap();
potionPrefixes = new String[]{"potion.prefix.mundane", "potion.prefix.uninteresting", "potion.prefix.bland", "potion.prefix.clear", "potion.prefix.milky", "potion.prefix.diffuse", "potion.prefix.artless", "potion.prefix.thin", "potion.prefix.awkward", "potion.prefix.flat", "potion.prefix.bulky", "potion.prefix.bungling", "potion.prefix.buttered", "potion.prefix.smooth", "potion.prefix.suave", "potion.prefix.debonair", "potion.prefix.thick", "potion.prefix.elegant", "potion.prefix.fancy", "potion.prefix.charming", "potion.prefix.dashing", "potion.prefix.refined", "potion.prefix.cordial", "potion.prefix.sparkling", "potion.prefix.potent", "potion.prefix.foul", "potion.prefix.odorless", "potion.prefix.rank", "potion.prefix.harsh", "potion.prefix.acrid", "potion.prefix.gross", "potion.prefix.stinky"};
}
public static boolean checkFlag(int p_77914_0_, int p_77914_1_) {
return (p_77914_0_ & 1 << p_77914_1_) != 0;
}
private static int isFlagSet(int p_77910_0_, int p_77910_1_) {
return PotionHelper.checkFlag(p_77910_0_, p_77910_1_) ? 1 : 0;
}
private static int isFlagUnset(int p_77916_0_, int p_77916_1_) {
return PotionHelper.checkFlag(p_77916_0_, p_77916_1_) ? 0 : 1;
}
public static int func_77909_a(int p_77909_0_) {
return PotionHelper.func_77908_a(p_77909_0_, 5, 4, 3, 2, 1);
}
public static int calcPotionLiquidColor(Collection p_77911_0_) {
int var1 = 3694022;
if (p_77911_0_ != null && !p_77911_0_.isEmpty()) {
float var2 = 0.0f;
float var3 = 0.0f;
float var4 = 0.0f;
float var5 = 0.0f;
for (PotionEffect var7 : p_77911_0_) {
if (!var7.func_180154_f()) continue;
int var8 = Potion.potionTypes[var7.getPotionID()].getLiquidColor();
if (Config.isCustomColors()) {
var8 = CustomColors.getPotionColor(var7.getPotionID(), var8);
}
for (int var9 = 0; var9 <= var7.getAmplifier(); ++var9) {
var2 += (float)(var8 >> 16 & 0xFF) / 255.0f;
var3 += (float)(var8 >> 8 & 0xFF) / 255.0f;
var4 += (float)(var8 >> 0 & 0xFF) / 255.0f;
var5 += 1.0f;
}
}
if (var5 == 0.0f) {
return 0;
}
var2 = var2 / var5 * 255.0f;
var3 = var3 / var5 * 255.0f;
var4 = var4 / var5 * 255.0f;
return (int)var2 << 16 | (int)var3 << 8 | (int)var4;
}
if (Config.isCustomColors()) {
var1 = CustomColors.getPotionColor(0, var1);
}
return var1;
}
public static boolean func_82817_b(Collection potionEffects) {
for (PotionEffect var2 : potionEffects) {
if (var2.getIsAmbient()) continue;
return false;
}
return true;
}
public static int func_77915_a(int dataValue, boolean bypassCache) {
if (!bypassCache) {
if (field_77925_n.containsKey(dataValue)) {
return (Integer)field_77925_n.get(dataValue);
}
int var2 = PotionHelper.calcPotionLiquidColor(PotionHelper.getPotionEffects(dataValue, false));
field_77925_n.put(dataValue, var2);
return var2;
}
return PotionHelper.calcPotionLiquidColor(PotionHelper.getPotionEffects(dataValue, true));
}
public static String func_77905_c(int p_77905_0_) {
int var1 = PotionHelper.func_77909_a(p_77905_0_);
return potionPrefixes[var1];
}
private static int func_77904_a(boolean p_77904_0_, boolean p_77904_1_, boolean p_77904_2_, int p_77904_3_, int p_77904_4_, int p_77904_5_, int p_77904_6_) {
int var7 = 0;
if (p_77904_0_) {
var7 = PotionHelper.isFlagUnset(p_77904_6_, p_77904_4_);
} else if (p_77904_3_ != -1) {
if (p_77904_3_ == 0 && PotionHelper.countSetFlags(p_77904_6_) == p_77904_4_) {
var7 = 1;
} else if (p_77904_3_ == 1 && PotionHelper.countSetFlags(p_77904_6_) > p_77904_4_) {
var7 = 1;
} else if (p_77904_3_ == 2 && PotionHelper.countSetFlags(p_77904_6_) < p_77904_4_) {
var7 = 1;
}
} else {
var7 = PotionHelper.isFlagSet(p_77904_6_, p_77904_4_);
}
if (p_77904_1_) {
var7 *= p_77904_5_;
}
if (p_77904_2_) {
var7 = -var7;
}
return var7;
}
private static int countSetFlags(int p_77907_0_) {
int var1 = 0;
while (p_77907_0_ > 0) {
p_77907_0_ &= p_77907_0_ - 1;
++var1;
}
return var1;
}
private static int parsePotionEffects(String p_77912_0_, int p_77912_1_, int p_77912_2_, int p_77912_3_) {
if (p_77912_1_ < p_77912_0_.length() && p_77912_2_ >= 0 && p_77912_1_ < p_77912_2_) {
int var4 = p_77912_0_.indexOf(124, p_77912_1_);
if (var4 >= 0 && var4 < p_77912_2_) {
int var5 = PotionHelper.parsePotionEffects(p_77912_0_, p_77912_1_, var4 - 1, p_77912_3_);
if (var5 > 0) {
return var5;
}
int var17 = PotionHelper.parsePotionEffects(p_77912_0_, var4 + 1, p_77912_2_, p_77912_3_);
return var17 > 0 ? var17 : 0;
}
int var5 = p_77912_0_.indexOf(38, p_77912_1_);
if (var5 >= 0 && var5 < p_77912_2_) {
int var17 = PotionHelper.parsePotionEffects(p_77912_0_, p_77912_1_, var5 - 1, p_77912_3_);
if (var17 <= 0) {
return 0;
}
int var18 = PotionHelper.parsePotionEffects(p_77912_0_, var5 + 1, p_77912_2_, p_77912_3_);
return var18 <= 0 ? 0 : (var17 > var18 ? var17 : var18);
}
boolean var6 = false;
boolean var7 = false;
boolean var8 = false;
boolean var9 = false;
boolean var10 = false;
int var11 = -1;
int var12 = 0;
int var13 = 0;
int var14 = 0;
for (int var15 = p_77912_1_; var15 < p_77912_2_; ++var15) {
char var16 = p_77912_0_.charAt(var15);
if (var16 >= '0' && var16 <= '9') {
if (var6) {
var13 = var16 - 48;
var7 = true;
continue;
}
var12 *= 10;
var12 += var16 - 48;
var8 = true;
continue;
}
if (var16 == '*') {
var6 = true;
continue;
}
if (var16 == '!') {
if (var8) {
var14 += PotionHelper.func_77904_a(var9, var7, var10, var11, var12, var13, p_77912_3_);
var9 = false;
var10 = false;
var6 = false;
var7 = false;
var8 = false;
var13 = 0;
var12 = 0;
var11 = -1;
}
var9 = true;
continue;
}
if (var16 == '-') {
if (var8) {
var14 += PotionHelper.func_77904_a(var9, var7, var10, var11, var12, var13, p_77912_3_);
var9 = false;
var10 = false;
var6 = false;
var7 = false;
var8 = false;
var13 = 0;
var12 = 0;
var11 = -1;
}
var10 = true;
continue;
}
if (var16 != '=' && var16 != '<' && var16 != '>') {
if (var16 != '+' || !var8) continue;
var14 += PotionHelper.func_77904_a(var9, var7, var10, var11, var12, var13, p_77912_3_);
var9 = false;
var10 = false;
var6 = false;
var7 = false;
var8 = false;
var13 = 0;
var12 = 0;
var11 = -1;
continue;
}
if (var8) {
var14 += PotionHelper.func_77904_a(var9, var7, var10, var11, var12, var13, p_77912_3_);
var9 = false;
var10 = false;
var6 = false;
var7 = false;
var8 = false;
var13 = 0;
var12 = 0;
var11 = -1;
}
if (var16 == '=') {
var11 = 0;
continue;
}
if (var16 == '<') {
var11 = 2;
continue;
}
if (var16 != '>') continue;
var11 = 1;
}
if (var8) {
var14 += PotionHelper.func_77904_a(var9, var7, var10, var11, var12, var13, p_77912_3_);
}
return var14;
}
return 0;
}
public static List getPotionEffects(int p_77917_0_, boolean p_77917_1_) {
ArrayList var2 = null;
for (Potion var6 : Potion.potionTypes) {
int var8;
String var7;
if (var6 == null || var6.isUsable() && !p_77917_1_ || (var7 = (String)field_179539_o.get(var6.getId())) == null || (var8 = PotionHelper.parsePotionEffects(var7, 0, var7.length(), p_77917_0_)) <= 0) continue;
int var9 = 0;
String var10 = (String)field_179540_p.get(var6.getId());
if (var10 != null && (var9 = PotionHelper.parsePotionEffects(var10, 0, var10.length(), p_77917_0_)) < 0) {
var9 = 0;
}
if (var6.isInstant()) {
var8 = 1;
} else {
var8 = 1200 * (var8 * 3 + (var8 - 1) * 2);
var8 >>= var9;
var8 = (int)Math.round((double)var8 * var6.getEffectiveness());
if ((p_77917_0_ & 0x4000) != 0) {
var8 = (int)Math.round((double)var8 * 0.75 + 0.5);
}
}
if (var2 == null) {
var2 = Lists.newArrayList();
}
PotionEffect var11 = new PotionEffect(var6.getId(), var8, var9);
if ((p_77917_0_ & 0x4000) != 0) {
var11.setSplashPotion(true);
}
var2.add(var11);
}
return var2;
}
private static int brewBitOperations(int p_77906_0_, int p_77906_1_, boolean p_77906_2_, boolean p_77906_3_, boolean p_77906_4_) {
if (p_77906_4_) {
if (!PotionHelper.checkFlag(p_77906_0_, p_77906_1_)) {
return 0;
}
} else {
p_77906_0_ = p_77906_2_ ? (p_77906_0_ &= ~(1 << p_77906_1_)) : (p_77906_3_ ? ((p_77906_0_ & 1 << p_77906_1_) == 0 ? (p_77906_0_ |= 1 << p_77906_1_) : (p_77906_0_ &= ~(1 << p_77906_1_))) : (p_77906_0_ |= 1 << p_77906_1_));
}
return p_77906_0_;
}
public static int applyIngredient(int p_77913_0_, String p_77913_1_) {
int var2 = 0;
int var3 = p_77913_1_.length();
boolean var4 = false;
boolean var5 = false;
boolean var6 = false;
boolean var7 = false;
int var8 = 0;
for (int var9 = var2; var9 < var3; ++var9) {
char var10 = p_77913_1_.charAt(var9);
if (var10 >= '0' && var10 <= '9') {
var8 *= 10;
var8 += var10 - 48;
var4 = true;
continue;
}
if (var10 == '!') {
if (var4) {
p_77913_0_ = PotionHelper.brewBitOperations(p_77913_0_, var8, var6, var5, var7);
var7 = false;
var5 = false;
var6 = false;
var4 = false;
var8 = 0;
}
var5 = true;
continue;
}
if (var10 == '-') {
if (var4) {
p_77913_0_ = PotionHelper.brewBitOperations(p_77913_0_, var8, var6, var5, var7);
var7 = false;
var5 = false;
var6 = false;
var4 = false;
var8 = 0;
}
var6 = true;
continue;
}
if (var10 == '+') {
if (!var4) continue;
p_77913_0_ = PotionHelper.brewBitOperations(p_77913_0_, var8, var6, var5, var7);
var7 = false;
var5 = false;
var6 = false;
var4 = false;
var8 = 0;
continue;
}
if (var10 != '&') continue;
if (var4) {
p_77913_0_ = PotionHelper.brewBitOperations(p_77913_0_, var8, var6, var5, var7);
var7 = false;
var5 = false;
var6 = false;
var4 = false;
var8 = 0;
}
var7 = true;
}
if (var4) {
p_77913_0_ = PotionHelper.brewBitOperations(p_77913_0_, var8, var6, var5, var7);
}
return p_77913_0_ & 0x7FFF;
}
public static int func_77908_a(int p_77908_0_, int p_77908_1_, int p_77908_2_, int p_77908_3_, int p_77908_4_, int p_77908_5_) {
return (PotionHelper.checkFlag(p_77908_0_, p_77908_1_) ? 16 : 0) | (PotionHelper.checkFlag(p_77908_0_, p_77908_2_) ? 8 : 0) | (PotionHelper.checkFlag(p_77908_0_, p_77908_3_) ? 4 : 0) | (PotionHelper.checkFlag(p_77908_0_, p_77908_4_) ? 2 : 0) | (PotionHelper.checkFlag(p_77908_0_, p_77908_5_) ? 1 : 0);
}
public static void clearPotionColorCache() {
field_77925_n.clear();
}
}
|
gregory-chekler/api | src/api/views.py | from django.shortcuts import render
from django.http import JsonResponse
from database.utils.json_response_wrapper import Json
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_GET, require_POST
from database.utils.create_factory import CreateFactory
from database.utils.database_reader import DatabaseReader
from database.models import *
from database.utils.common import get_request_contents, rename_filter_args
import requests
import os
import uuid
FACTORY = CreateFactory("Data Creator")
FETCH = DatabaseReader("Database Reader")
def ping(request):
"""
This view returns a dummy json. It is meant to be used to check whether
the server is alive or not
"""
return Json()
@csrf_exempt
def startup_data(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['community__subdomain'] = subdomain
if request.method == 'GET':
page = {}
errors = {}
pages, errors['pages'] = FETCH.all(Page, args)
page['homePage'] = pages.filter(name = 'Home')[0]
page['aboutUsPage'] = pages.filter(name = 'AboutUs')[0]
page['donatePage'] = pages.filter(name = 'Donate')[0]
# teamsPage = teams_stats() will just load this in if i need to when you visit that page the first time
page['events'], errors['events'] = FETCH.all(Event, args)
page['actions'], errors['actions'] = FETCH.all(Action, args)
page['serviceProviders'], errors['serviceProviders'] = FETCH.all(Vendor, args)
page['testimonials'], errors['testimonials'] = FETCH.all(Testimonial, args)
page['communityData'], errors['communityData'] = FETCH.all(Data, args)
page['community'], errors['community'] = FETCH.one(Community, args)
args.pop('community__id', None)
args.pop('community__subdomain', None)
page['menus'], errors['menus'] = FETCH.all(Menu, args)
page['policies'], errors['policies'] = FETCH.all(Policy, args)
page['rsvps'], errors['rsvps'] = FETCH.all(EventAttendee, args)
page['communities'], errors['communities'] = FETCH.all(Community, args)
page['tagCols'], errors['tagCols'] = FETCH.all(TagCollection, args)
return Json(page, errors)
return Json(None)
@csrf_exempt
def actions(request):
args = get_request_contents(request)
if request.method == 'GET':
args['is_deleted'] = False
actions, errors = FETCH.all(Action, args)
return Json(actions, errors)
elif request.method == 'POST':
#about to create a new Action instance
if 'tags' in args and isinstance(args['tags'], str):
if args['tags'] != '':
args['tags'] = [int(k) for k in args['tags'].split(',')]
else:
del args['tags']
if 'vendors' in args and isinstance(args['vendors'], str):
if args['vendors'] != '':
args['vendors'] = [int(k) for k in args['vendors'].split(',') if k != '']
else:
del args['vendors']
if 'community' in args and isinstance(args['community'], str):
args['community'] = int(args['community'])
if 'is_global' in args and isinstance(args['is_global'], str):
args['is_global'] = args['is_global'] == 'true';
if 'image' in args and args['image']:
img = args['image']
media, errors = FACTORY.create(Media, {'file': img, 'media_type': "Image"})
if media:
media.save()
if errors:
print(errors)
args['image'] = media.pk
action, errors = FACTORY.create(Action, args)
return Json(action, errors)
return Json(None)
@csrf_exempt
def action(request, id):
args = get_request_contents(request)
args['id'] = int(id)
if request.method == 'GET':
action, errors = FETCH.one(Action, args)
return Json(action, errors, use_full_json=True)
elif request.method == 'POST':
#updating the Action resource with this <id>
if 'tags' in args and isinstance(args['tags'], str):
args['tags'] = [int(k) for k in args['tags'].split(',')]
if 'vendors' in args and isinstance(args['vendors'], str):
args['vendors'] = [int(k) for k in args['vendors'].split(',') if k != '']
if 'community' in args and isinstance(args['community'], str):
args['community'] = int(args['community'])
if 'is_global' in args and isinstance(args['is_global'], str):
args['is_global'] = args['is_global'] == 'true';
if args['is_global']:
args['community'] = None
if 'image' in args and args['image']:
img = args['image']
media, errors = FACTORY.create(Media, {'file': img})
if media:
media.save()
if errors:
print(errors)
del args['image']
else:
args['image'] = media.pk
action, errors = FACTORY.update(Action, args)
# return JsonResponse({'success': True})
return Json(action, errors, use_full_json=True)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Action, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def action_copy(request, id):
"""
This is method will be used to handle the copying of an action
"""
args = get_request_contents(request)
args['id'] = int(id)
action, errors = FETCH.one(Action, args)
if action:
action.pk = None
action.title = str(action.title) +' Copy'
action.save()
return Json(action, errors)
return Json()
@csrf_exempt
def action_testimonials(request, id):
args = get_request_contents(request)
args["action__id"] = id
if request.method == 'GET':
actions, errors = FETCH.all(Testimonial, args)
return Json(actions, errors)
elif request.method == 'POST':
#about to create a new Action instance
action, errors = FACTORY.create(Testimonial, args)
return Json(action, errors)
return Json(None)
@csrf_exempt
def action_properties(request):
args = get_request_contents(request)
if request.method == 'GET':
actionproperties, errors = FETCH.all(ActionProperty, args)
return Json(actionproperties, errors)
elif request.method == 'POST':
#about to create a new ActionPropertie instance
actionproperty, errors = FACTORY.create(ActionProperty, args)
return Json(actionproperty, errors)
return Json(None)
@csrf_exempt
def action_property(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
actionproperty, errors = FETCH.one(ActionProperty, args)
return Json(actionproperty, errors)
elif request.method == 'POST':
#updating the ActionProperty resource with this <id>
actionproperty, errors = FACTORY.update(ActionProperty, args)
return Json(actionproperty, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(ActionProperty, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def billing_statements(request):
args = get_request_contents(request)
if request.method == 'GET':
billingstatements, errors = FETCH.all(BillingStatement, args)
return Json(billingstatements, errors)
elif request.method == 'POST':
#about to create a new BillingStatement instance
billingstatement, errors = FACTORY.create(BillingStatement, args)
return Json(billingstatement, errors)
return Json(None)
@csrf_exempt
def billing_statement(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
billingstatement, errors = FETCH.one(BillingStatement, args)
return Json(billingstatement, errors)
elif request.method == 'POST':
#updating the BillingStatement resource with this <id>
billingstatement, errors = FACTORY.update(BillingStatement, args)
return Json(billingstatement, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(BillingStatement, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def communities(request):
args = get_request_contents(request)
if request.method == 'GET':
communities, errors = FETCH.all(Community, args)
return Json(communities, errors)
elif request.method == 'POST':
#about to create a new Communitie instance
communities, errors = FACTORY.create(Community, args)
return Json(communities, errors)
# elif request.method == 'DELETE':
# items_deleted, errors = FETCH.delete(Community, args)
# return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def community(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['id'] = cid
if subdomain:
args['subdomain'] = subdomain
if request.method == 'GET':
community, errors = FETCH.one(Community, args)
return Json(community, errors)
elif request.method == 'POST':
#updating the Community resource with this <id>
#TODO: create pages for this community, etc
community, errors = FACTORY.update(Community, args)
# print(community.simple_json())
return Json(community, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Community, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def community_actions(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['community__subdomain'] = subdomain
if request.method == 'GET':
args['is_deleted'] = False
community, errors = FETCH.all(Action, args)
return Json(community, errors)
elif request.method == 'POST':
#updating the Community resource with this <id>
community_actions, errors = FACTORY.create(Action, args)
return Json(community_actions, errors)
return Json(None)
@csrf_exempt
def community_members(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['id'] = cid
if subdomain:
args['subdomain'] = subdomain
if request.method == 'GET':
community, errors = FETCH.one(Community, args)
if community:
res = community.userprofile_set.all()
return Json(res, errors)
elif request.method == 'POST':
#updating the Community resource with this <id>
member, errors = FACTORY.create(UserProfile, args)
return Json(member, errors)
return Json(None)
@csrf_exempt
def community_impact(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['community__subdomain'] = subdomain
if request.method == 'GET':
community, errors = FETCH.all(Graph, args)
return Json(community, errors)
elif request.method == 'POST':
#updating the Community resource with this <id>
community_impact, errors = FACTORY.create(Graph, args)
return Json(community_impact, errors)
return Json(None)
@csrf_exempt
def community_pages(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['community__subdomain'] = subdomain
if request.method == 'GET':
community, errors = FETCH.all(Page, args)
return Json(community, errors, use_full_json=True)
elif request.method == 'POST':
#updating the Community resource with this <id>
community_page, errors = FACTORY.create(Page, args)
return Json(community_page, errors)
return Json(None)
@csrf_exempt
def community_events(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['community__subdomain'] = subdomain
if request.method == 'GET':
args['is_deleted'] = False
community, errors = FETCH.all(Event, args)
return Json(community, errors)
elif request.method == 'POST':
#updating the Community resource with this <id>
community_event, errors = FACTORY.create(Event, args)
return Json(community_event, errors)
return Json(None)
@csrf_exempt
def community_households(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['id'] = cid
if subdomain:
args['subdomain'] = subdomain
if request.method == 'GET':
community, errors = FETCH.one(Community, args)
if community:
res = community.userprofile_set.all()
return Json(res, errors)
elif request.method == 'POST':
#updating the Community resource with this <id>
community_household, errors = FACTORY.create(RealEstateUnit, args)
return Json(community_household, errors)
return Json(None)
@csrf_exempt
def community_teams(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['community__subdomain'] = subdomain
if request.method == 'GET':
args['is_deleted'] = False
community, errors = FETCH.all(Team, args)
return Json(community, errors)
elif request.method == 'POST':
#updating the Community resource with this <id>
community_teams, errors = FACTORY.create(Community, args)
return Json(community_teams, errors)
return Json(None)
@csrf_exempt
def community_data(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['community__subdomain'] = subdomain
if request.method == 'GET':
community, errors = FETCH.all(Data, args)
return Json(community, errors)
elif request.method == 'POST':
#updating the Community resource with this <id>
community_data, errors = FACTORY.create(Community, args)
return Json(community_data, errors)
return Json(None)
@csrf_exempt
def community_vendors(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['id'] = cid
if subdomain:
args['subdomain'] = subdomain
if request.method == 'GET':
community, errors = FETCH.one(Community, args)
if community:
return Json(community.vendor_set.filter(is_deleted=False), errors)
return Json(None)
@csrf_exempt
def community_testimonials(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['action__community__subdomain'] = subdomain
if request.method == 'GET':
args['is_deleted'] = False
community, errors = FETCH.all(Testimonial, args)
return Json(community, errors)
elif request.method == 'POST':
#updating the Community resource with this <id>
community_testimonial, errors = FACTORY.create(Community, args)
return Json(community_testimonial, errors)
return Json(None)
@csrf_exempt
def community_admins(request):
args = get_request_contents(request)
if request.method == 'GET':
communityadmins, errors = FETCH.all(CommunityAdminGroup, args)
return Json(communityadmins, errors)
elif request.method == 'POST':
#about to create a new CommunityAdmin instance
communityadmin, errors = FACTORY.create(CommunityAdminGroup, args)
return Json(communityadmin, errors)
return Json(None)
@csrf_exempt
def community_admins_by_id_or_subdomain(request,cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['community__subdomain'] = subdomain
if request.method == 'GET':
communityadmins, errors = FETCH.all(CommunityAdminGroup, args)
return Json(communityadmins, errors)
elif request.method == 'POST':
#about to create a new CommunityAdmin instance
communityadmin, errors = FACTORY.create(CommunityAdminGroup, args)
return Json(communityadmin, errors)
return Json(None)
@csrf_exempt
def community_admin_group(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
communityadmin, errors = FETCH.one(CommunityAdminGroup, args)
return Json(communityadmin, errors, use_full_json=True)
elif request.method == 'POST':
#updating the CommunityAdminGroup resource with this <id>
communityadmin, errors = FACTORY.update(CommunityAdminGroup, args)
return Json(communityadmin, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(CommunityAdminGroup, args)
return Json(items_deleted, errors)
return Json(None)
def community_profile_full(request, cid):
args = get_request_contents(request)
args['id'] = cid
if request.method == 'GET':
community, errors = FETCH.one(Community, args)
if community:
res = community.full_json()
res['users'] = [u.simple_json() for u in community.userprofile_set.all()]
res['testimonials'] = [t.simple_json() for t in community.testimonial_set.all()[:3]]
res['events'] = [e.simple_json() for e in community.event_set.all()[:3]]
res['actions'] = [a.simple_json() for a in community.action_set.all()[:3]]
res['graphs'] = [g.simple_json() for g in community.graph_set.all()]
return Json(res, errors, use_full_json=True)
return Json(None)
@csrf_exempt
def email_categories(request):
args = get_request_contents(request)
if request.method == 'GET':
emailcategories, errors = FETCH.all(EmailCategory, args)
return Json(emailcategories, errors)
elif request.method == 'POST':
#about to create a new EmailCategory instance
emailcategory, errors = FACTORY.create(EmailCategory, args)
return Json(emailcategory, errors)
return Json(None)
@csrf_exempt
def email_category(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
emailcategory, errors = FETCH.one(EmailCategory, args)
return Json(emailcategory, errors)
elif request.method == 'POST':
#updating the EmailCategory resource with this <id>
emailcategory, errors = FACTORY.update(EmailCategory, args)
return Json(emailcategory, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(EmailCategory, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def events(request):
args = get_request_contents(request)
if request.method == 'GET':
events, errors = FETCH.all(Event, args)
return Json(events, errors)
elif request.method == 'POST':
#about to create a new Event instance
event, errors = FACTORY.create(Event, args)
return Json(event, errors)
return Json(None)
@csrf_exempt
def event(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
event, errors = FETCH.one(Event, args)
return Json(event, errors)
elif request.method == 'POST':
#updating the Event resource with this <id>
event, errors = FACTORY.update(Event, args)
return Json(event, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Event, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def event_attendees(request):
args = get_request_contents(request)
if request.method == 'GET':
eventattendees, errors = FETCH.all(EventAttendee, args)
return Json(eventattendees, errors)
elif request.method == 'POST':
#about to create a new EventAttendee instance
eventattendee, errors = FACTORY.create(EventAttendee, args)
return Json(eventattendee, errors)
return Json(None)
@csrf_exempt
def event_attendee(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
eventattendee, errors = FETCH.one(EventAttendee, args)
return Json(eventattendee, errors)
elif request.method == 'POST':
#updating the EventAttendee resource with this <id>
eventattendee, errors = FACTORY.update(EventAttendee, args)
return Json(eventattendee, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(EventAttendee, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def goals(request):
args = get_request_contents(request)
if request.method == 'GET':
goals, errors = FETCH.all(Goal, args)
return Json(goals, errors)
elif request.method == 'POST':
#about to create a new Goal instance
goal, errors = FACTORY.create(Goal, args)
return Json(goal, errors)
return Json(None)
@csrf_exempt
def goal(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
goal, errors = FETCH.one(Goal, args)
return Json(goal, errors)
elif request.method == 'POST':
#updating the Goal resource with this <id>
goal, errors = FACTORY.update(Goal, args)
return Json(goal, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Goal, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def graphs(request):
args = get_request_contents(request)
if request.method == 'GET':
graphs, errors = FETCH.all(Graph, args)
return Json(graphs, errors)
elif request.method == 'POST':
#about to create a new Graph instance
graph, errors = FACTORY.create(Graph, args)
return Json(graph, errors)
return Json(None)
@csrf_exempt
def graph(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
graph, errors = FETCH.one(Graph, args)
return Json(graph, errors)
elif request.method == 'POST':
#updating the Graph resource with this <id>
graph, errors = FACTORY.update(Graph, args)
return Json(graph, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Graph, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def households(request):
args = get_request_contents(request)
if request.method == 'GET':
households, errors = FETCH.all(RealEstateUnit, args)
return Json(households, errors)
elif request.method == 'POST':
#about to create a new Household instance
household, errors = FACTORY.create(RealEstateUnit, args)
return Json(household, errors)
return Json(None)
@csrf_exempt
def household(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
household, errors = FETCH.one(RealEstateUnit, args)
return Json(household, errors)
elif request.method == 'POST':
#updating the Household resource with this <id>
household, errors = FACTORY.update(RealEstateUnit, args)
return Json(household, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(RealEstateUnit, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def locations(request):
args = get_request_contents(request)
if request.method == 'GET':
locations, errors = FETCH.all(Location, args)
return Json(locations, errors)
elif request.method == 'POST':
#about to create a new Location instance
location, errors = FACTORY.create(Location, args)
return Json(location, errors)
return Json(None)
@csrf_exempt
def location(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
location, errors = FETCH.one(Location, args)
return Json(location, errors)
elif request.method == 'POST':
#updating the Location resource with this <id>
location, errors = FACTORY.update(Location, args)
return Json(location, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Location, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def media(request):
args = get_request_contents(request)
if request.method == 'GET':
medias, errors = FETCH.all(Media, args)
return Json(medias, errors)
elif request.method == 'POST':
#about to create a new Media instance
media, errors = FACTORY.create(Media, args)
return Json(media, errors)
return Json(None)
@csrf_exempt
def media_by_id(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
media, errors = FETCH.one(Media, args)
return Json(media, errors)
elif request.method == 'POST':
#updating the Media resource with this <id>
media, errors = FACTORY.update(Media, args)
return Json(media, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Media, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def media_with_slug(request, slug):
args = get_request_contents(request)
args['name'] = slug
if request.method == 'GET':
media, errors = FETCH.one(Media, args)
return Json(media, errors)
elif request.method == 'POST':
#updating the Media resource with this <id>
media, errors = FACTORY.update(Media, args)
return Json(media, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Media, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def menus(request):
args = get_request_contents(request)
if request.method == 'GET':
menus, errors = FETCH.all(Menu, args)
return Json(menus, errors)
elif request.method == 'POST':
#about to create a new Menu instance
menu, errors = FACTORY.create(Menu, args)
return Json(menu, errors)
return Json(None)
@csrf_exempt
def menu(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
menu, errors = FETCH.one(Menu, args)
return Json(menu, errors)
elif request.method == 'POST':
#updating the Menu resource with this <id>
menu, errors = FACTORY.update(Menu, args)
return Json(menu, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Menu, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def pages(request):
args = get_request_contents(request)
if request.method == 'GET':
pages, errors = FETCH.all(Page, args)
if len(pages) == 1:
return Json(pages, errors, use_full_json=True)
else:
return Json(pages, errors)
elif request.method == 'POST':
#about to create a new Page instance
page, errors = FACTORY.create(Page, args)
return Json(page, errors)
return Json(None)
@csrf_exempt
def page(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
page, errors = FETCH.one(Page, args)
return Json(page, errors)
elif request.method == 'POST':
#updating the Page resource with this <id>
page, errors = FACTORY.update(Page, args)
return Json(page, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Page, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def page_sections(request):
args = get_request_contents(request)
if request.method == 'GET':
pagesections, errors = FETCH.all(PageSection, args)
return Json(pagesections, errors)
elif request.method == 'POST':
#about to create a new PageSection instance
pagesection, errors = FACTORY.create(PageSection, args)
return Json(pagesection, errors)
return Json(None)
@csrf_exempt
def page_section(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
pagesection, errors = FETCH.one(PageSection, args)
return Json(pagesection, errors)
elif request.method == 'POST':
#updating the PageSection resource with this <id>
pagesection, errors = FACTORY.update(PageSection, args)
return Json(pagesection, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(PageSection, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def permissions(request):
args = get_request_contents(request)
if request.method == 'GET':
permissions, errors = FETCH.all(Permission, args)
return Json(permissions, errors)
elif request.method == 'POST':
#about to create a new Permission instance
permission, errors = FACTORY.create(Permission, args)
return Json(permission, errors)
return Json(None)
@csrf_exempt
def permission(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
permission, errors = FETCH.one(Permission, args)
return Json(permission, errors)
elif request.method == 'POST':
#updating the Permission resource with this <id>
permission, errors = FACTORY.update(Permission, args)
return Json(permission, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Permission, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def policies(request):
args = get_request_contents(request)
if request.method == 'GET':
policies, errors = FETCH.all(Policy, args)
return Json(policies, errors)
elif request.method == 'POST':
#about to create a new Policy instance
policy, errors = FACTORY.create(Policy, args)
return Json(policy, errors)
return Json(None)
@csrf_exempt
def policy(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
policies, errors = FETCH.one(Policy, args)
return Json(policies, errors)
elif request.method == 'POST':
#updating the Policy resource with this <id>
policy, errors = FACTORY.update(Policy, args)
return Json(policy, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Policy, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def roles(request):
args = get_request_contents(request)
if request.method == 'GET':
roles, errors = FETCH.all(Role, args)
return Json(roles, errors)
elif request.method == 'POST':
#about to create a new Role instance
role, errors = FACTORY.create(Role, args)
return Json(role, errors)
return Json(None)
@csrf_exempt
def role(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
role, errors = FETCH.one(Role, args)
return Json(role, errors)
elif request.method == 'POST':
#updating the Role resource with this <id>
role, errors = FACTORY.update(Role, args)
return Json(role, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Role, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def services(request):
args = get_request_contents(request)
if request.method == 'GET':
services, errors = FETCH.all(Service, args)
return Json(services, errors)
elif request.method == 'POST':
#about to create a new Service instance
service, errors = FACTORY.create(Service, args)
return Json(service, errors)
return Json(None)
@csrf_exempt
def service(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
service, errors = FETCH.one(Service, args)
return Json(service, errors)
elif request.method == 'POST':
#updating the Service resource with this <id>
service, errors = FACTORY.update(Service, args)
return Json(service, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Service, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def sliders(request):
args = get_request_contents(request)
if request.method == 'GET':
sliders, errors = FETCH.all(Slider, args)
return Json(sliders, errors)
elif request.method == 'POST':
#about to create a new Slider instance
slider, errors = FACTORY.create(Slider, args)
return Json(slider, errors)
return Json(None)
@csrf_exempt
def slider(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
sliders, errors = FETCH.one(Slider, args)
return Json(sliders, errors)
elif request.method == 'POST':
#updating the Slider resource with this <id>
slider, errors = FACTORY.update(Slider, args)
return Json(slider, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Slider, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def slider_images(request):
args = get_request_contents(request)
if request.method == 'GET':
sliderimages, errors = FETCH.all(SliderImage, args)
return Json(sliderimages, errors)
elif request.method == 'POST':
#about to create a new SliderImage instance
sliderimage, errors = FACTORY.create(SliderImage, args)
return Json(sliderimage, errors)
return Json(None)
@csrf_exempt
def slider_image(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
sliderimage, errors = FETCH.one(SliderImage, args)
return Json(sliderimage, errors)
elif request.method == 'POST':
#updating the SliderImage resource with this <id>
sliderimage, errors = FACTORY.update(SliderImage, args)
return Json(sliderimage, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(SliderImage, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def data(request):
args = get_request_contents(request)
if request.method == 'GET':
statistics, errors = FETCH.all(Data, args)
return Json(statistics, errors)
elif request.method == 'POST':
#about to create a new Data instance
data, errors = FACTORY.create(Data, args)
return Json(data, errors)
return Json(None)
@csrf_exempt
def data_by_id(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
data, errors = FETCH.all(Data, args)
return Json(data, errors)
elif request.method == 'POST':
#updating the Data resource with this <id>
data, errors = FACTORY.update(Data, args)
return Json(data, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Data, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def subscribers(request):
args = get_request_contents(request)
if request.method == 'GET':
subscribers, errors = FETCH.all(Subscriber, args)
return Json(subscribers, errors)
elif request.method == 'POST':
#about to create a new Subscriber instance
subscriber, errors = FACTORY.create(Subscriber, args)
return Json(subscriber, errors)
return Json(None)
@csrf_exempt
def subscriber(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
subscriber, errors = FETCH.all(Subscriber, args)
return Json(subscriber, errors)
elif request.method == 'POST':
#updating the Subscriber resource with this <id>
subscriber, errors = FACTORY.update(Subscriber, args)
return Json(subscriber, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Subscriber, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def subscriber_email_preferences(request):
args = get_request_contents(request)
if request.method == 'GET':
preferences, errors = FETCH.all(SubscriberEmailPreference, args)
return Json(preferences, errors)
elif request.method == 'POST':
#about to create a new SubscriberEmailPreference instance
pref, errors = FACTORY.create(SubscriberEmailPreference, args)
return Json(pref, errors)
return Json(None)
@csrf_exempt
def subscriber_email_preference(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
pref, errors = FETCH.one(SubscriberEmailPreference, args)
return Json(pref, errors)
elif request.method == 'POST':
#updating the SubscriberEmailPreference resource with this <id>
pref, errors = FACTORY.update(SubscriberEmailPreference, args)
return Json(pref, errors)
return Json(None)
# @csrf_exempt
def tags(request):
args = get_request_contents(request)
if request.method == 'GET':
tags, errors = FETCH.all(Tag, args)
return Json(tags, errors)
elif request.method == 'POST':
#about to create a new Tag instance
tag, errors = FACTORY.create(Tag, args)
return Json(tag, errors)
return Json(None)
@csrf_exempt
def tag(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
tag, errors = FETCH.one(Tag, args)
return Json(tag, errors)
elif request.method == 'POST':
#updating the Tag resource with this <id>
tag, errors = FACTORY.update(Tag, args)
return Json(tag, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Tag, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def tag_collections(request):
args = get_request_contents(request)
if request.method == 'GET':
tagcollections, errors = FETCH.all(TagCollection, args)
return Json(tagcollections, errors)
elif request.method == 'POST':
#about to create a new TagCollection instance
tagcollection, errors = FACTORY.create(TagCollection, args)
return Json(tagcollection, errors)
return Json(None)
@csrf_exempt
def tag_collection(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
tagcollection, errors = FETCH.one(TagCollection, args)
return Json(tagcollection, errors)
elif request.method == 'POST':
#updating the TagCollection resource with this <id>
tagcollection, errors = FACTORY.update(TagCollection, args)
return Json(tagcollection, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(TagCollection, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def teams(request):
args = get_request_contents(request)
if request.method == 'GET':
teams, errors = FETCH.all(Team, args)
return Json(teams, errors)
elif request.method == 'POST':
#about to create a new Team instance
team, errors = FACTORY.create(Team, args)
return Json(team, errors)
return Json(None)
@csrf_exempt
def team(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
team, errors = FETCH.one(Team, args)
return Json(team, errors)
elif request.method == 'POST':
#updating the Team resource with this <id>
team, errors = FACTORY.update(Team, args)
return Json(team, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Team, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def team_member(request, team_id, member_id):
if request.method == 'DELETE':
team, errors = FETCH.get_one(Team, {'id': team_id})
team_member, errors = FETCH.get_one(UserProfile, {'id': member_id})
if(team and team_member):
teamMember = TeamMember.objects.filter(team=team,user=team_member)
teamMember.delete()
# team.members being deprecated. Does this call get used?
print("views/team_member")
#team.members.remove(team_member)
#team.save()
return Json(team, errors)
return Json(None)
@csrf_exempt
def team_stats(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
team, errors = FETCH.one(Team, args)
if team:
res = {"households": 0, "actions": 0, "actions_completed": 0, "actions_todo": 0}
res["team"] = team.simple_json()
#Team.member deprecated
#for m in team.members.all():
teamMembers = TeamMember.objects.filter(team=team)
res["members"] = teamMembers.count()
for m in teamMembers:
user = m.user
res["households"] += len(user.real_estate_units.all())
actions = user.useractionrel_set.all()
res["actions"] += len(actions)
res["actions_completed"] += len(actions.filter(**{"status":"DONE"}))
res["actions_todo"] += len(actions.filter(**{"status":"TODO"}))
return Json(res, errors)
return Json(None)
@csrf_exempt
def teams_stats(request):
args = get_request_contents(request)
if request.method == 'GET':
teams, errors = FETCH.all(Team, args)
ans = []
for team in teams:
res = {"households": 0, "actions": 0, "actions_completed": 0, "actions_todo": 0}
res["team"] = team.simple_json()
#Team.member deprecated
#for m in team.members.all():
teamMembers = TeamMember.objects.filter(team=team)
res["members"] = teamMembers.count()
for m in teamMembers:
res["households"] += len(user.real_estate_units.all())
actions = user.useractionrel_set.all()
res["actions"] += len(actions)
res["actions_completed"] += len(actions.filter(**{"status":"DONE"}))
res["actions_todo"] += len(actions.filter(**{"status":"TODO"}))
ans.append(res)
return Json(ans, errors,do_not_serialize=True)
return Json(None)
@csrf_exempt
def communities_stats(request):
args = get_request_contents(request)
if request.method == 'GET':
communities, errors = FETCH.all(Community, args)
ans = []
for community in communities:
res = {"households_engaged": 0, "actions_completed": 0, "users_engaged":0}
res["community"] = community.simple_json()
users, errors = FETCH.all(UserProfile, {"communities": community.id})
res["users_engaged"] = len(users)
# changed to fix graph inconsistencies
communityData = community.full_json()
communityGoal = communityData["goal"]
res["households_engaged"] = communityGoal["attained_number_of_households"]
res["actions_completed"] = communityGoal["attained_number_of_actions"]
ans.append(res)
return Json(ans, errors, do_not_serialize=True)
@csrf_exempt
def community_stats(request, cid):
args = get_request_contents(request)
args["id"] = cid
if request.method == 'GET':
community, errors = FETCH.one(Community, args)
if community:
res = {"households_engaged": 0, "actions_completed": 0, "users_engaged":0}
res["community"] = community.simple_json();
users, errors = FETCH.all(UserProfile, {"communities": community.id})
res["users_engaged"] = len(users);
# changed to fix graph inconsistencies
communityData = community.full_json()
communityGoal = communityData["goal"]
res["households_engaged"] = communityGoal["attained_number_of_households"]
res["actions_completed"] = communityGoal["attained_number_of_actions"]
return Json(res, errors, do_not_serialize=True)
return Json(None)
@csrf_exempt
def testimonials(request):
args = get_request_contents(request)
if request.method == 'GET':
testimonials, errors = FETCH.all(Testimonial, args)
return Json(testimonials, errors)
elif request.method == 'POST':
#about to create a new Testimonial instance
testimonial, errors = FACTORY.create(Testimonial, args)
return Json(testimonial, errors)
return Json(None)
@csrf_exempt
def testimonial(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
testimonial, errors = FETCH.one(Testimonial, args)
return Json(testimonial, errors)
elif request.method == 'POST':
#updating the Testimonial resource with this <id>
testimonial, errors = FACTORY.update(Testimonial, args)
return Json(testimonial, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Testimonial, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def users(request):
args = get_request_contents(request)
if request.method == 'GET':
users, errors = FETCH.all(UserProfile, args)
return Json(users, errors)
elif request.method == 'POST':
#about to create a new User instance
user, errors = FACTORY.create(UserProfile, args)
return Json(user, errors)
return Json(None)
@csrf_exempt
def user(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
user, errors = FETCH.one(UserProfile, args)
return Json(user, errors)
elif request.method == 'POST':
#updating the User resource with this <id>
user, errors = FACTORY.update(UserProfile, args)
return Json(user, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(UserProfile, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def user_by_email(request, email):
args = get_request_contents(request)
args['email'] = email
if request.method == 'GET':
user, errors = FETCH.one(UserProfile, args)
return Json(user, errors)
elif request.method == 'POST':
#updating the User resource with this <id>
user, errors = FACTORY.update(UserProfile, args)
return Json(user, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(UserProfile, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def user_households(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
user, errors = FETCH.all(UserProfile, args,
many_to_many_fields_to_prefetch=['real_estate_units'])
if user:
households = user.first().real_estate_units.all()
return Json(households, errors)
elif request.method == 'POST':
#updating the User resource with this <id>
user, errors = FACTORY.create(RealEstateUnit, args)
return Json(user, errors)
return Json(None)
@csrf_exempt
def user_households_by_email(request, email):
#TODO: not working yet
args = get_request_contents(request)
args['email'] = email
if request.method == 'GET':
user, errors = FETCH.all(UserProfile, args,
many_to_many_fields_to_prefetch=['real_estate_units'])
if user:
households = user.first().real_estate_units.all()
return Json(households, errors)
elif request.method == 'POST':
#updating the User resource with this <id>
user, errors = FACTORY.create(RealEstateUnit, args)
return Json(user, errors)
return Json(None)
@csrf_exempt
def user_household_actions(request, id, hid):
args = get_request_contents(request)
args['id'] = id
args['real_estate_unit'] = hid
if request.method == 'GET':
user_households, errors = FETCH.all(Action, args)
return Json(user_households, errors)
elif request.method == 'POST':
#updating the User resource with this <id>
user, errors = FACTORY.create(Action, args)
return Json(user, errors)
return Json(None)
@csrf_exempt
def user_household_actions_by_email(request, email, hid):
args = get_request_contents(request)
args['email'] = email
args['real_estate_unit'] = hid
if request.method == 'GET':
user, errors = FETCH.all(Action, args)
return Json(user, errors, use_full_json=True)
elif request.method == 'POST':
#updating the User resource with this <id>
user, errors = FACTORY.create(Action, args)
return Json(user, errors)
return Json(None)
@csrf_exempt
def user_actions(request, id):
args = get_request_contents(request)
args['user'] = id
if request.method == 'GET':
user, errors = FETCH.all(UserActionRel, args)
return Json(user, errors, use_full_json=True)
elif request.method == 'POST':
#updating the User resource with this <id>
user_action, errors = FACTORY.create(UserActionRel, args)
return Json(user_action, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(UserActionRel, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def user_actions_by_email(request, email):
args = get_request_contents(request)
args['user__email'] = email
if request.method == 'GET':
user, errors = FETCH.all(UserActionRel, args)
return Json(user, errors, use_full_json=True)
elif request.method == 'POST':
#updating the User resource with this <id>
user_action, errors = FACTORY.create(UserActionRel, args)
return Json(user_action, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(UserActionRel, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def user_action(request, id, aid):
args = get_request_contents(request)
args['user'] = id
args['id'] = aid
if request.method == 'GET':
user, errors = FETCH.one(UserActionRel, args)
return Json(user, errors, use_full_json=True)
elif request.method == 'POST':
#updating the User resource with this <id>
user_action, errors = FACTORY.update(UserActionRel, args)
return Json(user_action, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(UserActionRel, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def user_action_by_email(request, email, aid):
args = get_request_contents(request)
args['user__email'] = email
args['id'] = aid
if request.method == 'GET':
user, errors = FETCH.one(UserActionRel, args)
return Json(user, errors, use_full_json=True)
elif request.method == 'POST':
#updating the User resource with this <id>
user_action, errors = FACTORY.update(UserActionRel, args)
return Json(user_action, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(UserActionRel, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def user_teams(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
user, errors = FETCH.all(UserProfile, args)
if user:
user = user.first()
return Json(user.team_members.all(), errors)
elif request.method == 'POST':
#updating the User resource with this <id>
user_team, errors = FACTORY.create(Team, args)
return Json(user_team, errors)
return Json(None)
@csrf_exempt
def user_teams_by_email(request, email):
args = get_request_contents(request)
args['email'] = email
if request.method == 'GET':
user, errors = FETCH.all(UserProfile, args)
if user:
user = user.first()
return Json(user.team_members.all(), errors)
elif request.method == 'POST':
#updating the User resource with this <id>
user_team, errors = FACTORY.create(Team, args)
return Json(user_team, errors)
return Json(None)
@csrf_exempt
def user_testimonials(request, id):
args = get_request_contents(request)
args['user__id'] = id
if request.method == 'GET':
user_testimonials, errors = FETCH.all(Testimonial, args)
return Json(user_testimonials, errors)
elif request.method == 'POST':
#updating the User resource with this <id>
user_testimonial, errors = FACTORY.create(Testimonial, args)
return Json(user_testimonial, errors)
return Json(None)
@csrf_exempt
def user_testimonials_by_email(request, email):
args = get_request_contents(request)
args['user__email'] = email
if request.method == 'GET':
user_testimonials, errors = FETCH.all(Testimonial, args)
return Json(user_testimonials, errors)
elif request.method == 'POST':
#updating the User resource with this <id>
user_testimonial, errors = FACTORY.create(Testimonial, args)
return Json(user_testimonial, errors)
return Json(None)
@csrf_exempt
def user_groups(request):
args = get_request_contents(request)
if request.method == 'GET':
usergroups, errors = FETCH.all(UserGroup, args)
return Json(UserGroup, errors)
elif request.method == 'POST':
#about to create a new UserGroup instance
usergroup, errors = FACTORY.create(UserGroup, args)
return Json(usergroup, errors)
return Json(None)
@csrf_exempt
def user_group(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
usergroup, errors = FETCH.one(UserGroup, args)
return Json(usergroup, errors)
elif request.method == 'POST':
#updating the UserGroup resource with this <id>
usergroup, errors = FACTORY.update(UserGroup, args)
return Json(usergroup, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(UserGroup, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def user_group_by_email(request, email):
args = get_request_contents(request)
args['email'] = email
if request.method == 'GET':
usergroup, errors = FETCH.one(UserGroup, args)
return Json(usergroup, errors)
elif request.method == 'POST':
#updating the UserGroup resource with this <id>
usergroup, errors = FACTORY.update(UserGroup, args)
return Json(usergroup, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(UserGroup, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def vendors(request):
args = get_request_contents(request)
if request.method == 'GET':
args['is_deleted'] = False
vendors, errors = FETCH.all(Vendor, args)
return Json(vendors, errors)
elif request.method == 'POST':
#about to create a new Vendor instance
vendor, errors = FACTORY.create(Vendor, args)
return Json(vendor, errors)
return Json(None)
@csrf_exempt
def vendor(request, id):
args = get_request_contents(request)
args['id'] = id
if request.method == 'GET':
vendor, errors = FETCH.one(Vendor, args)
return Json(vendor, errors)
elif request.method == 'POST':
#updating the Vendor resource with this <id>
vendor, errors = FACTORY.update(Vendor, args)
return Json(vendor, errors)
elif request.method == 'DELETE':
items_deleted, errors = FETCH.delete(Vendor, args)
return Json(items_deleted, errors)
return Json(None)
@csrf_exempt
def startup_data(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['id'] = cid
if subdomain:
args['subdomain'] = subdomain
if request.method == 'GET':
c, err = FETCH.one(Community, args)
result = {
'community': c.simple_json(),
'pages' : [p.full_json() for p in c.page_set.all()],
'events' : [e.full_json() for e in c.event_set.all()],
'actions' : [a.simple_json() for a in c.action_set.all()],
'service_providers' : [e.full_json() for e in FETCH.all(Vendor, {})[0]],
'testimonials' :[e.full_json() for e in c.testimonial_set.all()],
'communityData': [e.full_json() for e in c.data_set.all()],
}
args = {'community__subdomain': subdomain}
menu, err = FETCH.all(Menu, {})
policies, err = FETCH.all(Policy, {})
rsvps, err = FETCH.all(EventAttendee, args)
communities, err = FETCH.all(Community, {})
tag_collections, err = FETCH.all(TagCollection, {})
result['menu'] = [m.simple_json() for m in menu]
result['policies'] = [p.simple_json() for p in policies]
# result['rsvps'] = [r.simple_json() for r in rsvps]
result['communities'] = [c.simple_json() for c in communities]
result['tag_collections'] = [t.full_json() for t in tag_collections]
return Json(result)
return Json(None)
@csrf_exempt
def verify_captcha(request):
args = get_request_contents(request)
if 'captchaString' in args:
data = {
'secret': os.environ.get('RECAPTCHA_SECRET_KEY'),
'response': args['captchaString']
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
return(Json(result))
else:
return Json(None, ['Invalid reCAPTCHA. Please try again.'])
return Json(None, ['You are missing required field: "captchaString"'])
def home_page(request, cid=None, subdomain=None):
args = get_request_contents(request)
if cid:
args['community__id'] = cid
if subdomain:
args['community__subdomain'] = subdomain
args['name'] = 'Home'
if request.method == 'GET':
home_page, err = FACTORY.create(Page, args)
if err:
return Json(None, err)
elif request.method== 'POST':
c, err = FETCH.one(Page, args)
return Json(None) |
mewbak/cc-1 | testdata/gcc-6.3.0/gcc/testsuite/gcc.c-torture/compile/pr37432.c | static void print_wkb_byte(unsigned char val) {
print_wkb_bytes((unsigned char *)&val, 1, 1);
}
void InsertMultiPoint(int b) {
char a = 1;
if (b) a = 0;
print_wkb_byte(a);
}
|
Sydney-Mobile-Application/OnTime-ToDo-App-React | src/pages/addToDoCalendar.js | import React from 'react';
import { StyleSheet, View, Text, Pressable, Alert } from 'react-native';
// import {Calendar, CalendarList, Agenda} from 'react-native-calendars';
import CalendarPicker from 'react-native-calendar-picker';
// import AddToDoTime from './addToDoTime';
export default function TermAndCondition ({closeCalendarModal, receiveDate}) {
const onDateChange = (date) => {
switch (date._i.month+1){
case 1:
var month = "January";
break;
case 2:
var month = "February";
break;
case 3:
var month = "March";
break;
case 4:
var month = "April";
break;
case 5:
var month = "May";
break;
case 6:
var month = "June";
break;
case 7:
var month = "July";
break;
case 8:
var month = "August";
break;
case 9:
var month = "September";
break;
case 10:
var month = "October";
break;
case 11:
var month = "November";
break;
case 12:
var month = "December";
break;
}
receiveDate(String(date._i.day) + " " + String(month) + " " + String(date._i.year));
console.log(date);
}
return (
<View style={styles.container}>
<View style={styles.modalView}>
{/* <Text style={styles.textTitleSave}>Choose Date And Time.</Text> */}
{/* <Calendar
selectionMode="singleDay"
scrollMode="oneMonth"
monthsBefore={12}
monthsAfter={24}
/> */}
<CalendarPicker
todayBackgroundColor="rgba(226, 238, 247, 1)"
todayTextStyle="#fff"
selectedDayColor="#293462"
selectedDayTextColor="#FFFFFF"
onDateChange={onDateChange}
// onDateChange={this.onDateChange}
/>
<Pressable style={styles.buttonSave} onPress={() => closeCalendarModal()}>
<Text style={{color: 'white', fontFamily:'Poppins_600SemiBold'}}>Save Date And Time</Text>
</Pressable>
</View>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
},
modalView: {
width: '100%',
margin: 20,
backgroundColor: "#BFE4FF",
borderRadius: 20,
padding: 35,
alignItems: "center",
shadowColor: "#000",
shadowOffset: {
width: 0,
height: 2
},
shadowOpacity: 0.25,
shadowRadius: 4,
elevation: 5,
position: 'absolute',
bottom: -40
},
textTitleSave: {
fontWeight: 'bold',
fontSize: 18,
marginBottom: 10
},
scrollViewSave: {
maxHeight: 350,
marginBottom: 10
},
buttonSave: {
marginTop: 50,
backgroundColor: '#293462',
color: '#293462',
width: '100%',
height: 50,
borderRadius: 12,
alignItems: 'center',
justifyContent: 'center'
}
}); |
qq756585379/TZTV-iOS | TZTV/TZTV/Model/ShopCart/CartViewModel.h | <filename>TZTV/TZTV/Model/ShopCart/CartViewModel.h
//
// CartViewModel.h
// TZTV
//
// Created by Luosa on 2016/11/30.
// Copyright © 2016年 Luosa. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface CartViewModel : NSObject
@property (nonatomic, strong) NSMutableArray *modelArray;
/** 请求命令 */
@property (nonatomic, strong) RACCommand *requestCommand;
@property (nonatomic, copy) NSString *msg;
- (void)loadDataFromNetwork;
@end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.