text stringlengths 2 1.04M | meta dict |
|---|---|
ACCEPTED
#### According to
IRMNG Homonym List
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "71414414d15a1f95616dba36ecf7e6c3",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 18,
"avg_line_length": 8.692307692307692,
"alnum_prop": 0.6814159292035398,
"repo_name": "mdoering/backbone",
"id": "8dc4d89d3f7565078e759026a45c9e4d17d3fcbb",
"size": "159",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Ericales/Tauschia/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
class CreateIdentities < ActiveRecord::Migration[5.0] # :nodoc:
def change
create_table :identities do |t|
t.belongs_to :user, foreign_key: true
t.string :provider
t.string :uid
t.timestamps
end
end
end
| {
"content_hash": "c8f8dc8b2db7ba5182579b88d3740ecd",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 63,
"avg_line_length": 21.818181818181817,
"alnum_prop": 0.6458333333333334,
"repo_name": "nativestranger/personal",
"id": "a4d9e39e611580c50285e088da86651f500dbcfc",
"size": "271",
"binary": false,
"copies": "1",
"ref": "refs/heads/staging",
"path": "db/migrate/20160308024311_create_identities.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10231"
},
{
"name": "CoffeeScript",
"bytes": "1091"
},
{
"name": "Dockerfile",
"bytes": "1525"
},
{
"name": "HTML",
"bytes": "28941"
},
{
"name": "JavaScript",
"bytes": "42102"
},
{
"name": "Ruby",
"bytes": "232663"
},
{
"name": "Shell",
"bytes": "3345"
}
],
"symlink_target": ""
} |
docker stop sprout
docker rm sprout
bin/docker_run_sprout_container.sh staging latest
| {
"content_hash": "c94fca5e34b7f9850d879c5fa557d29d",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 49,
"avg_line_length": 28.666666666666668,
"alnum_prop": 0.8372093023255814,
"repo_name": "freeformpdx/pollard",
"id": "c1bf81f6ad38a3cd2e6fdc90bd8b53367914e9b3",
"size": "201",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bin/staging_restart_sprout.sh",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "111"
},
{
"name": "HTML",
"bytes": "1138"
},
{
"name": "JavaScript",
"bytes": "105333"
},
{
"name": "Shell",
"bytes": "9602"
}
],
"symlink_target": ""
} |
var _ = require('lodash');
var path = require("path");
module.exports = function(grunt) {
function config(components) {
var targets =_.reduce(components, function(result, component) {
result.push(path.join(component, "src/**/*"));
result.push(path.join(component, "demo/**/*"));
return result;
}, []);
grunt.config("gh-pages", {
src: targets
});
}
return {
config: config
};
}; | {
"content_hash": "79a38ea605d2203f4bdad404ce48302b",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 65,
"avg_line_length": 20.3,
"alnum_prop": 0.6206896551724138,
"repo_name": "egjs/egjs-experiment",
"id": "f72e39d181d0e1d48f307d9a02d04015cfeab6fc",
"size": "406",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "GruntTasks/gh_pages.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "143"
},
{
"name": "HTML",
"bytes": "4136"
},
{
"name": "JavaScript",
"bytes": "22942"
}
],
"symlink_target": ""
} |
package info.bowkett.joxy;
import java.io.*;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
/**
* Created by IntelliJ IDEA.
* User: jbowkett
* Date: Jul 28, 2013
* Time: 10:02:25 AM
* To change this template use File | Settings | File Templates.
*/
public class RequestRunnable implements Runnable {
private final Socket proxyClientConnection;
private RequestReader requestReader;
private RequestParser requestParser;
private final Augmenter augmenter;
private static final int BUFFER_SIZE = 32768;
public RequestRunnable(Socket proxyClientConnection, RequestReader requestReader,
RequestParser requestParser, Augmenter augmenter) {
this.proxyClientConnection = proxyClientConnection;
this.requestReader = requestReader;
this.requestParser = requestParser;
this.augmenter = augmenter;
}
public void run() {
try {
final String requestStr = new String(requestReader.readRequest(new BufferedInputStream(proxyClientConnection.getInputStream())));
final Request request = requestParser.parseRequest(requestStr);
final String destination = request.getDestination();
if(destination != null){
final Socket socket = new Socket(destination, 80);
final InputStream inputStreamFromRemote = makeRemoteRequest(socket, request);
final byte[] response = requestReader.readRequest(inputStreamFromRemote);
final byte[] augmented = augmenter.augment(request, response);
returnResponse(augmented, proxyClientConnection);
System.out.println("done.");
}
}
catch (IOException ioe) {
System.err.println("Cannot service request : " + ioe.getMessage());
ioe.printStackTrace(System.err);
}
}
private void returnResponse(byte[] augmented, Socket socket) throws IOException {
final DataOutputStream out = new DataOutputStream(socket.getOutputStream());
final byte[] buffer = new byte[BUFFER_SIZE];
final ByteArrayInputStream reader = new ByteArrayInputStream(augmented);
int index = reader.read(buffer, 0, BUFFER_SIZE);
while (index != -1) {
out.write(buffer, 0, index);
index = reader.read(buffer, 0, BUFFER_SIZE);
}
out.flush();
}
public BufferedInputStream makeRemoteRequest(Socket socket, Request request) throws IOException {
socket.setSoTimeout(10 * 1000);
BufferedOutputStream serverOut = new BufferedOutputStream(socket.getOutputStream());
// send the request out
serverOut.write(request.text().getBytes(), 0, request.length());
serverOut.flush();
return new BufferedInputStream(socket.getInputStream());
}
}
| {
"content_hash": "0bbacf55f728616dce005db559b83fea",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 135,
"avg_line_length": 34.701298701298704,
"alnum_prop": 0.717814371257485,
"repo_name": "jbowkett/joxy",
"id": "7fede86107a4a55c06bb73d3aa0bed20a5dced79",
"size": "2672",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/info/bowkett/joxy/RequestRunnable.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "21889"
}
],
"symlink_target": ""
} |
package pl.writeonly.re.shared.calculator
import scalaprops._
import scalaz.Scalaz._
object CalculatorIT extends Scalaprops {
val calculator = new Calculator()
val addition: (Int, Int) => Int = (x, y) => calculator.add(x, y)
val additionTest = Property.forAll { (a: Int, b: Int) =>
addition(a, b) === a + b
}
val multiplication: (Int, Int) => Int = (x, y) => calculator.mul(x, y)
val multiplicationTest = Property.forAll { (a: Int, b: Int) =>
multiplication(a, b) === a * b
}
val lessOrEqual: (Int, Int) => Boolean = (x, y) => calculator.leq(x, y)
val lessOrEqualTest = Property.forAll { (a: Int, b: Int) =>
lessOrEqual(a, b) === (a <= b)
}
}
| {
"content_hash": "f4915fa011ab8bb942974c6e853cf305",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 73,
"avg_line_length": 29.565217391304348,
"alnum_prop": 0.6176470588235294,
"repo_name": "writeonly/resentiment",
"id": "a3b59bd1571c5a03b45ef474babdeb4f251922d1",
"size": "680",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "re/shared/src/it/scala/pl/writeonly/re/shared/calculator/CalculatorIT.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Scala",
"bytes": "14766"
}
],
"symlink_target": ""
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>ip::address_v4::from_string (1 of 4 overloads)</title>
<link rel="stylesheet" href="../../../../boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.75.2">
<link rel="home" href="../../../../index.html" title="Asio">
<link rel="up" href="../from_string.html" title="ip::address_v4::from_string">
<link rel="prev" href="../from_string.html" title="ip::address_v4::from_string">
<link rel="next" href="overload2.html" title="ip::address_v4::from_string (2 of 4 overloads)">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr><td valign="top"><img alt="asio C++ library" width="250" height="60" src="../../../../asio.png"></td></tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="../from_string.html"><img src="../../../../prev.png" alt="Prev"></a><a accesskey="u" href="../from_string.html"><img src="../../../../up.png" alt="Up"></a><a accesskey="h" href="../../../../index.html"><img src="../../../../home.png" alt="Home"></a><a accesskey="n" href="overload2.html"><img src="../../../../next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h5 class="title">
<a name="asio.reference.ip__address_v4.from_string.overload1"></a><a class="link" href="overload1.html" title="ip::address_v4::from_string (1 of 4 overloads)">ip::address_v4::from_string
(1 of 4 overloads)</a>
</h5></div></div></div>
<p>
Create an address from an IP address string in dotted decimal form.
</p>
<pre class="programlisting"><span class="keyword">static</span> <span class="identifier">address_v4</span> <span class="identifier">from_string</span><span class="special">(</span>
<span class="keyword">const</span> <span class="keyword">char</span> <span class="special">*</span> <span class="identifier">str</span><span class="special">);</span>
</pre>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2003-2015 Christopher M.
Kohlhoff<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="../from_string.html"><img src="../../../../prev.png" alt="Prev"></a><a accesskey="u" href="../from_string.html"><img src="../../../../up.png" alt="Up"></a><a accesskey="h" href="../../../../index.html"><img src="../../../../home.png" alt="Home"></a><a accesskey="n" href="overload2.html"><img src="../../../../next.png" alt="Next"></a>
</div>
</body>
</html>
| {
"content_hash": "3e8434efa60447e2975e1188da7df1ca",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 358,
"avg_line_length": 66.52272727272727,
"alnum_prop": 0.6272634096344379,
"repo_name": "jeanleflambeur/silkopter",
"id": "cb3ee72fe6a4c001a4f12bba8090aa6d20637765",
"size": "2927",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "asio/doc/asio/reference/ip__address_v4/from_string/overload1.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "110"
},
{
"name": "C",
"bytes": "2221045"
},
{
"name": "C++",
"bytes": "24409548"
},
{
"name": "CMake",
"bytes": "216809"
},
{
"name": "CSS",
"bytes": "34635"
},
{
"name": "Cuda",
"bytes": "4881"
},
{
"name": "Fortran",
"bytes": "1315584"
},
{
"name": "HTML",
"bytes": "18916392"
},
{
"name": "JavaScript",
"bytes": "7839"
},
{
"name": "Lex",
"bytes": "3749"
},
{
"name": "Lua",
"bytes": "3762"
},
{
"name": "M4",
"bytes": "9302"
},
{
"name": "Objective-C",
"bytes": "2096"
},
{
"name": "Objective-C++",
"bytes": "168"
},
{
"name": "Perl",
"bytes": "6547"
},
{
"name": "Python",
"bytes": "8937"
},
{
"name": "QML",
"bytes": "10680"
},
{
"name": "QMake",
"bytes": "101603"
},
{
"name": "Shell",
"bytes": "73534"
}
],
"symlink_target": ""
} |
<?php
namespace PHPExiftool\Driver\Tag\DICOM;
use JMS\Serializer\Annotation\ExclusionPolicy;
use PHPExiftool\Driver\AbstractTag;
/**
* @ExclusionPolicy("all")
*/
class ZonalMapNumberFormat extends AbstractTag
{
protected $Id = '0028,0720';
protected $Name = 'ZonalMapNumberFormat';
protected $FullName = 'DICOM::Main';
protected $GroupName = 'DICOM';
protected $g0 = 'DICOM';
protected $g1 = 'DICOM';
protected $g2 = 'Image';
protected $Type = '?';
protected $Writable = false;
protected $Description = 'Zonal Map Number Format';
}
| {
"content_hash": "26f3c2aff1aee0f324a3a8a50643edce",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 55,
"avg_line_length": 16.82857142857143,
"alnum_prop": 0.6689303904923599,
"repo_name": "romainneutron/PHPExiftool",
"id": "6973942e96ad832aefe42b2435e0bf87739ee401",
"size": "811",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/PHPExiftool/Driver/Tag/DICOM/ZonalMapNumberFormat.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "22042446"
}
],
"symlink_target": ""
} |
"""Script to rename icons files and pack them into a zip file.
build_icon.py --inputs ui-tool.png hiragana.svg ... --output icons.zip
"""
import argparse
import os
import shutil
import tempfile
from xml.dom import minidom
RENAME_RULES = {
'product_icon_32bpp-128.png': 'mozc.png',
'icon.svg': 'mozc.svg',
'full_ascii.svg': 'alpha_full.svg',
'full_katakana.svg': 'katakana_full.svg',
'half_ascii.svg': 'alpha_half.svg',
'half_katakana.svg': 'katakana_half.svg',
'ui-dictionary.png': 'dictionary.png',
'ui-properties.png': 'properties.png',
'ui-tool.png': 'tool.png',
}
VARIANT_SVGS = [
'dictionary.svg',
'properties.svg',
'tool.svg',
]
VARIANT_ATTRIBUTES = {
'white': {
'fill': 'white',
},
'outlined': {
'stroke': 'lightgray',
'stroke-width': '0.9',
},
}
def ParseArguments():
parser = argparse.ArgumentParser()
parser.add_argument('--inputs', nargs='+')
parser.add_argument('--output')
return parser.parse_args()
def GetDestName(src) -> str:
basename = os.path.basename(src)
return RENAME_RULES.get(basename, basename)
def ModifySvg(base_svgfile, new_svgfile, attributes):
with minidom.parse(base_svgfile) as doc:
for path in doc.getElementsByTagName('path'):
if path.hasAttribute('fill'):
continue
for key, value in attributes.items():
path.setAttribute(key, value)
with open(new_svgfile, 'w') as output:
doc.writexml(output)
def CreateVariantIcons(base_svgfile, variant_attributes):
dirname = os.path.dirname(base_svgfile)
basename = os.path.basename(base_svgfile)
for name, attributes in variant_attributes.items():
variant_dir = os.path.join(dirname, name)
os.makedirs(variant_dir, exist_ok=True)
ModifySvg(base_svgfile, os.path.join(variant_dir, basename), attributes)
def main():
args = ParseArguments()
with tempfile.TemporaryDirectory() as tmp_dir:
for src in args.inputs:
dest_name = GetDestName(src)
dest_path = os.path.join(tmp_dir, dest_name)
shutil.copyfile(src, dest_path)
if dest_name in VARIANT_SVGS:
CreateVariantIcons(dest_path, VARIANT_ATTRIBUTES)
basename = os.path.splitext(args.output)[0]
shutil.make_archive(basename, format='zip', root_dir=tmp_dir)
if __name__ == '__main__':
main()
| {
"content_hash": "7913d5996c3b8a0f58104089da6aa971",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 76,
"avg_line_length": 26.269662921348313,
"alnum_prop": 0.6599657827202737,
"repo_name": "google/mozc",
"id": "844a1611dc33a0c3c0962f2e081c7d51a3f3a041",
"size": "3896",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/unix/build_icons.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "69980"
},
{
"name": "C++",
"bytes": "11235105"
},
{
"name": "Dockerfile",
"bytes": "3319"
},
{
"name": "Emacs Lisp",
"bytes": "80236"
},
{
"name": "HTML",
"bytes": "60487"
},
{
"name": "Objective-C",
"bytes": "39737"
},
{
"name": "Objective-C++",
"bytes": "211656"
},
{
"name": "Python",
"bytes": "945726"
},
{
"name": "Shell",
"bytes": "5137"
},
{
"name": "Starlark",
"bytes": "534040"
},
{
"name": "Yacc",
"bytes": "1967"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<Tokens version="1.0">
<File path="Classes/US2ValidatorTextField.html">
<Token>
<TokenIdentifier>//apple_ref/occ/cl/US2ValidatorTextField</TokenIdentifier>
<Abstract>The validator text field is the central object to use. It inherits UITextField and listens internally
to all own changes. After every change it validates the changed text material. When a validator, which
was added to the validator text field, reports a violation the validatorTextFieldDelegate will be
served with this information. The validatorTextFieldDelegate is of type US2ValidatorTextFieldDelegate
and returns what went wrong and in which status the validation text field is at the moment.</Abstract>
<DeclaredIn>US2ValidatorTextField.h</DeclaredIn>
<NodeRef refid="14"/>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instp/US2ValidatorTextField/validatorTextFieldDelegate</TokenIdentifier>
<Abstract>Set delegate implementing US2ValidatorTextFieldPrivate
@todo Rename delegate to validationTextFieldDelegate</Abstract>
<DeclaredIn>US2ValidatorTextField.h</DeclaredIn>
<Declaration>@property (nonatomic, assign) id<US2ValidatorTextFieldDelegate> validatorTextFieldDelegate</Declaration>
<Anchor>//api/name/validatorTextFieldDelegate</Anchor>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instp/US2ValidatorTextField/validator</TokenIdentifier>
<Abstract>Set the validator to check the text of the text field with</Abstract>
<DeclaredIn>US2ValidatorTextField.h</DeclaredIn>
<Declaration>@property (nonatomic, retain) US2Validator *validator</Declaration>
<Anchor>//api/name/validator</Anchor>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instp/US2ValidatorTextField/shouldAllowViolation</TokenIdentifier>
<Abstract>Determines whether text inputs can be made either by violating the conditions.
Is this parameter NO it overrides the 'shouldAllowViolation' parameter of
the conditions added to the validator. If set to YES the 'shouldAllowViolation'
parameters of the conditions considered.</Abstract>
<DeclaredIn>US2ValidatorTextField.h</DeclaredIn>
<Declaration>@property (nonatomic, assign) BOOL shouldAllowViolation</Declaration>
<Anchor>//api/name/shouldAllowViolation</Anchor>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instp/US2ValidatorTextField/isValid</TokenIdentifier>
<Abstract>Return whether the text is valid.</Abstract>
<DeclaredIn>US2ValidatorTextField.h</DeclaredIn>
<Declaration>@property (nonatomic, assign, readonly) BOOL isValid</Declaration>
<ReturnValue><Abstract>Returns the valid state of the text field</Abstract></ReturnValue>
<Anchor>//api/name/isValid</Anchor>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instp/US2ValidatorTextField/validateOnFocusLossOnly</TokenIdentifier>
<Abstract>Determines whether the text has to be validated after leaving the text field
or while editing. After a violation appeared after leaving the text field
the text field will from now on validate while editing. Because the user
knows now that a violation occurrs when using this text field.</Abstract>
<DeclaredIn>US2ValidatorTextField.h</DeclaredIn>
<Declaration>@property (nonatomic, assign) BOOL validateOnFocusLossOnly</Declaration>
<Anchor>//api/name/validateOnFocusLossOnly</Anchor>
</Token>
</File>
</Tokens> | {
"content_hash": "eb18bf8fe520e9aeed06d3299e43a9ca",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 126,
"avg_line_length": 42.97530864197531,
"alnum_prop": 0.7684573398448722,
"repo_name": "pirogoffSergey/US2FormValidator",
"id": "639e96da3316907bb571861434b0af03ba9db909",
"size": "3481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "US2FormValidationFramework/documentation/docset/Contents/Resources/Tokens14.xml",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
'''
JSON related utilities.
This module provides a few things:
1) A handy function for getting an object down to something that can be
JSON serialized. See to_primitive().
2) Wrappers around loads() and dumps(). The dumps() wrapper will
automatically use to_primitive() for you if needed.
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
is available.
'''
import datetime
import functools
import inspect
import itertools
import sys
if sys.version_info < (2, 7):
# On Python <= 2.6, json module is not C boosted, so try to use
# simplejson module if available
try:
import simplejson as json
except ImportError:
import json
else:
import json
import six
import six.moves.xmlrpc_client as xmlrpclib
from ceilometer.openstack.common import gettextutils
from ceilometer.openstack.common import importutils
from ceilometer.openstack.common import timeutils
netaddr = importutils.try_import("netaddr")
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
_simple_types = (six.string_types + six.integer_types
+ (type(None), bool, float))
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
"""
# handle obvious types first - order of basic types determined by running
# full tests on nova project, resulting in the following counts:
# 572754 <type 'NoneType'>
# 460353 <type 'int'>
# 379632 <type 'unicode'>
# 274610 <type 'str'>
# 199918 <type 'dict'>
# 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
if isinstance(value, datetime.datetime):
if convert_datetime:
return timeutils.strtime(value)
else:
return value
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
if isinstance(value, dict):
return dict((k, recursive(v)) for k, v in six.iteritems(value))
elif isinstance(value, (list, tuple)):
return [recursive(lv) for lv in value]
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif isinstance(value, gettextutils.Message):
return value.data
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return recursive(list(value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
elif netaddr and isinstance(value, netaddr.IPAddress):
return six.text_type(value)
else:
if any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return six.text_type(value)
def dumps(value, default=to_primitive, **kwargs):
return json.dumps(value, default=default, **kwargs)
def loads(s):
return json.loads(s)
def load(s):
return json.load(s)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append((__name__, 'dumps', TypeError,
'loads', ValueError, 'load'))
anyjson.force_implementation(__name__)
| {
"content_hash": "9934412a2a86fd1f6e8c329cd0f823e0",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 79,
"avg_line_length": 34.68263473053892,
"alnum_prop": 0.6268991712707183,
"repo_name": "nttdata-osscloud/ceilometer",
"id": "9119b53b624f5dda439fa82cbe9b17784960894f",
"size": "6562",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ceilometer/openstack/common/jsonutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
namespace Richpolis\CategoriasGaleriaBundle\Form;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolverInterface;
use Richpolis\CategoriasGaleriaBundle\Repository\CategoriasRepository;
class GaleriasLinkVideoType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('titulo','hidden')
->add('thumbnail','hidden')
->add('posicion','hidden')
->add('archivo','text',array('label'=>'Link youtube o vimeo'))
->add('categoria','entity', array(
'class' => 'CategoriasGaleriaBundle:Categorias',
'query_builder' => function(CategoriasRepository $er) {
return $er->createQueryBuilder('u')
->orderBy('u.id', 'ASC');
},
'property' => 'categoria',
'label' => 'Categoria',
'multiple' => false
))
->add('tipoArchivo','hidden')
->add('isActive',null,array('label'=>'Activo?','required'=>false))
;
}
public function setDefaultOptions(OptionsResolverInterface $resolver)
{
$resolver->setDefaults(array(
'data_class' => 'Richpolis\CategoriasGaleriaBundle\Entity\Galerias'
));
}
public function getName()
{
return 'richpolis_categoriasgaleriabundle_galeriaslinkvideotype';
}
}
| {
"content_hash": "952c6d90d4effd44b2e0872df4ecf1f4",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 79,
"avg_line_length": 33.38775510204081,
"alnum_prop": 0.5666259168704156,
"repo_name": "richpolis/sf2InteractiveValley",
"id": "9103ad3419738d025b198073da5827dc760c2c70",
"size": "1636",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/Richpolis/CategoriasGaleriaBundle/Form/GaleriasLinkVideoType.php",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "225102"
},
{
"name": "JavaScript",
"bytes": "324508"
},
{
"name": "PHP",
"bytes": "265127"
},
{
"name": "Perl",
"bytes": "2705"
}
],
"symlink_target": ""
} |
/*
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.5.1-pre.0
* Contact: blah@cliffano.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.cliffano.swaggyjenkins.model;
import java.util.Objects;
import java.util.Arrays;
import com.cliffano.swaggyjenkins.model.PipelineBranchesitemlatestRun;
import com.cliffano.swaggyjenkins.model.PipelineBranchesitempullRequest;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import com.google.gson.TypeAdapterFactory;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.cliffano.swaggyjenkins.JSON;
/**
* PipelineBranchesitem
*/
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2022-06-04T08:16:08.847436Z[Etc/UTC]")
public class PipelineBranchesitem {
public static final String SERIALIZED_NAME_DISPLAY_NAME = "displayName";
@SerializedName(SERIALIZED_NAME_DISPLAY_NAME)
private String displayName;
public static final String SERIALIZED_NAME_ESTIMATED_DURATION_IN_MILLIS = "estimatedDurationInMillis";
@SerializedName(SERIALIZED_NAME_ESTIMATED_DURATION_IN_MILLIS)
private Integer estimatedDurationInMillis;
public static final String SERIALIZED_NAME_NAME = "name";
@SerializedName(SERIALIZED_NAME_NAME)
private String name;
public static final String SERIALIZED_NAME_WEATHER_SCORE = "weatherScore";
@SerializedName(SERIALIZED_NAME_WEATHER_SCORE)
private Integer weatherScore;
public static final String SERIALIZED_NAME_LATEST_RUN = "latestRun";
@SerializedName(SERIALIZED_NAME_LATEST_RUN)
private PipelineBranchesitemlatestRun latestRun;
public static final String SERIALIZED_NAME_ORGANIZATION = "organization";
@SerializedName(SERIALIZED_NAME_ORGANIZATION)
private String organization;
public static final String SERIALIZED_NAME_PULL_REQUEST = "pullRequest";
@SerializedName(SERIALIZED_NAME_PULL_REQUEST)
private PipelineBranchesitempullRequest pullRequest;
public static final String SERIALIZED_NAME_TOTAL_NUMBER_OF_PULL_REQUESTS = "totalNumberOfPullRequests";
@SerializedName(SERIALIZED_NAME_TOTAL_NUMBER_OF_PULL_REQUESTS)
private Integer totalNumberOfPullRequests;
public static final String SERIALIZED_NAME_PROPERTY_CLASS = "_class";
@SerializedName(SERIALIZED_NAME_PROPERTY_CLASS)
private String propertyClass;
public PipelineBranchesitem() {
}
public PipelineBranchesitem displayName(String displayName) {
this.displayName = displayName;
return this;
}
/**
* Get displayName
* @return displayName
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public PipelineBranchesitem estimatedDurationInMillis(Integer estimatedDurationInMillis) {
this.estimatedDurationInMillis = estimatedDurationInMillis;
return this;
}
/**
* Get estimatedDurationInMillis
* @return estimatedDurationInMillis
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getEstimatedDurationInMillis() {
return estimatedDurationInMillis;
}
public void setEstimatedDurationInMillis(Integer estimatedDurationInMillis) {
this.estimatedDurationInMillis = estimatedDurationInMillis;
}
public PipelineBranchesitem name(String name) {
this.name = name;
return this;
}
/**
* Get name
* @return name
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public PipelineBranchesitem weatherScore(Integer weatherScore) {
this.weatherScore = weatherScore;
return this;
}
/**
* Get weatherScore
* @return weatherScore
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getWeatherScore() {
return weatherScore;
}
public void setWeatherScore(Integer weatherScore) {
this.weatherScore = weatherScore;
}
public PipelineBranchesitem latestRun(PipelineBranchesitemlatestRun latestRun) {
this.latestRun = latestRun;
return this;
}
/**
* Get latestRun
* @return latestRun
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public PipelineBranchesitemlatestRun getLatestRun() {
return latestRun;
}
public void setLatestRun(PipelineBranchesitemlatestRun latestRun) {
this.latestRun = latestRun;
}
public PipelineBranchesitem organization(String organization) {
this.organization = organization;
return this;
}
/**
* Get organization
* @return organization
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getOrganization() {
return organization;
}
public void setOrganization(String organization) {
this.organization = organization;
}
public PipelineBranchesitem pullRequest(PipelineBranchesitempullRequest pullRequest) {
this.pullRequest = pullRequest;
return this;
}
/**
* Get pullRequest
* @return pullRequest
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public PipelineBranchesitempullRequest getPullRequest() {
return pullRequest;
}
public void setPullRequest(PipelineBranchesitempullRequest pullRequest) {
this.pullRequest = pullRequest;
}
public PipelineBranchesitem totalNumberOfPullRequests(Integer totalNumberOfPullRequests) {
this.totalNumberOfPullRequests = totalNumberOfPullRequests;
return this;
}
/**
* Get totalNumberOfPullRequests
* @return totalNumberOfPullRequests
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getTotalNumberOfPullRequests() {
return totalNumberOfPullRequests;
}
public void setTotalNumberOfPullRequests(Integer totalNumberOfPullRequests) {
this.totalNumberOfPullRequests = totalNumberOfPullRequests;
}
public PipelineBranchesitem propertyClass(String propertyClass) {
this.propertyClass = propertyClass;
return this;
}
/**
* Get propertyClass
* @return propertyClass
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getPropertyClass() {
return propertyClass;
}
public void setPropertyClass(String propertyClass) {
this.propertyClass = propertyClass;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PipelineBranchesitem pipelineBranchesitem = (PipelineBranchesitem) o;
return Objects.equals(this.displayName, pipelineBranchesitem.displayName) &&
Objects.equals(this.estimatedDurationInMillis, pipelineBranchesitem.estimatedDurationInMillis) &&
Objects.equals(this.name, pipelineBranchesitem.name) &&
Objects.equals(this.weatherScore, pipelineBranchesitem.weatherScore) &&
Objects.equals(this.latestRun, pipelineBranchesitem.latestRun) &&
Objects.equals(this.organization, pipelineBranchesitem.organization) &&
Objects.equals(this.pullRequest, pipelineBranchesitem.pullRequest) &&
Objects.equals(this.totalNumberOfPullRequests, pipelineBranchesitem.totalNumberOfPullRequests) &&
Objects.equals(this.propertyClass, pipelineBranchesitem.propertyClass);
}
@Override
public int hashCode() {
return Objects.hash(displayName, estimatedDurationInMillis, name, weatherScore, latestRun, organization, pullRequest, totalNumberOfPullRequests, propertyClass);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class PipelineBranchesitem {\n");
sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n");
sb.append(" estimatedDurationInMillis: ").append(toIndentedString(estimatedDurationInMillis)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" weatherScore: ").append(toIndentedString(weatherScore)).append("\n");
sb.append(" latestRun: ").append(toIndentedString(latestRun)).append("\n");
sb.append(" organization: ").append(toIndentedString(organization)).append("\n");
sb.append(" pullRequest: ").append(toIndentedString(pullRequest)).append("\n");
sb.append(" totalNumberOfPullRequests: ").append(toIndentedString(totalNumberOfPullRequests)).append("\n");
sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
public static HashSet<String> openapiFields;
public static HashSet<String> openapiRequiredFields;
static {
// a set of all properties/fields (JSON key names)
openapiFields = new HashSet<String>();
openapiFields.add("displayName");
openapiFields.add("estimatedDurationInMillis");
openapiFields.add("name");
openapiFields.add("weatherScore");
openapiFields.add("latestRun");
openapiFields.add("organization");
openapiFields.add("pullRequest");
openapiFields.add("totalNumberOfPullRequests");
openapiFields.add("_class");
// a set of required properties/fields (JSON key names)
openapiRequiredFields = new HashSet<String>();
}
/**
* Validates the JSON Object and throws an exception if issues found
*
* @param jsonObj JSON Object
* @throws IOException if the JSON Object is invalid with respect to PipelineBranchesitem
*/
public static void validateJsonObject(JsonObject jsonObj) throws IOException {
if (jsonObj == null) {
if (PipelineBranchesitem.openapiRequiredFields.isEmpty()) {
return;
} else { // has required fields
throw new IllegalArgumentException(String.format("The required field(s) %s in PipelineBranchesitem is not found in the empty JSON string", PipelineBranchesitem.openapiRequiredFields.toString()));
}
}
Set<Entry<String, JsonElement>> entries = jsonObj.entrySet();
// check to see if the JSON string contains additional fields
for (Entry<String, JsonElement> entry : entries) {
if (!PipelineBranchesitem.openapiFields.contains(entry.getKey())) {
throw new IllegalArgumentException(String.format("The field `%s` in the JSON string is not defined in the `PipelineBranchesitem` properties. JSON: %s", entry.getKey(), jsonObj.toString()));
}
}
if (jsonObj.get("displayName") != null && !jsonObj.get("displayName").isJsonPrimitive()) {
throw new IllegalArgumentException(String.format("Expected the field `displayName` to be a primitive type in the JSON string but got `%s`", jsonObj.get("displayName").toString()));
}
if (jsonObj.get("name") != null && !jsonObj.get("name").isJsonPrimitive()) {
throw new IllegalArgumentException(String.format("Expected the field `name` to be a primitive type in the JSON string but got `%s`", jsonObj.get("name").toString()));
}
// validate the optional field `latestRun`
if (jsonObj.getAsJsonObject("latestRun") != null) {
PipelineBranchesitemlatestRun.validateJsonObject(jsonObj.getAsJsonObject("latestRun"));
}
if (jsonObj.get("organization") != null && !jsonObj.get("organization").isJsonPrimitive()) {
throw new IllegalArgumentException(String.format("Expected the field `organization` to be a primitive type in the JSON string but got `%s`", jsonObj.get("organization").toString()));
}
// validate the optional field `pullRequest`
if (jsonObj.getAsJsonObject("pullRequest") != null) {
PipelineBranchesitempullRequest.validateJsonObject(jsonObj.getAsJsonObject("pullRequest"));
}
if (jsonObj.get("_class") != null && !jsonObj.get("_class").isJsonPrimitive()) {
throw new IllegalArgumentException(String.format("Expected the field `_class` to be a primitive type in the JSON string but got `%s`", jsonObj.get("_class").toString()));
}
}
public static class CustomTypeAdapterFactory implements TypeAdapterFactory {
@SuppressWarnings("unchecked")
@Override
public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> type) {
if (!PipelineBranchesitem.class.isAssignableFrom(type.getRawType())) {
return null; // this class only serializes 'PipelineBranchesitem' and its subtypes
}
final TypeAdapter<JsonElement> elementAdapter = gson.getAdapter(JsonElement.class);
final TypeAdapter<PipelineBranchesitem> thisAdapter
= gson.getDelegateAdapter(this, TypeToken.get(PipelineBranchesitem.class));
return (TypeAdapter<T>) new TypeAdapter<PipelineBranchesitem>() {
@Override
public void write(JsonWriter out, PipelineBranchesitem value) throws IOException {
JsonObject obj = thisAdapter.toJsonTree(value).getAsJsonObject();
elementAdapter.write(out, obj);
}
@Override
public PipelineBranchesitem read(JsonReader in) throws IOException {
JsonObject jsonObj = elementAdapter.read(in).getAsJsonObject();
validateJsonObject(jsonObj);
return thisAdapter.fromJsonTree(jsonObj);
}
}.nullSafe();
}
}
/**
* Create an instance of PipelineBranchesitem given an JSON string
*
* @param jsonString JSON string
* @return An instance of PipelineBranchesitem
* @throws IOException if the JSON string is invalid with respect to PipelineBranchesitem
*/
public static PipelineBranchesitem fromJson(String jsonString) throws IOException {
return JSON.getGson().fromJson(jsonString, PipelineBranchesitem.class);
}
/**
* Convert an instance of PipelineBranchesitem to an JSON string
*
* @return JSON string
*/
public String toJson() {
return JSON.getGson().toJson(this);
}
}
| {
"content_hash": "1e9081c934fb66f3a5cd6e0114509c96",
"timestamp": "",
"source": "github",
"line_count": 468,
"max_line_length": 205,
"avg_line_length": 32.348290598290596,
"alnum_prop": 0.7230992800052843,
"repo_name": "cliffano/swaggy-jenkins",
"id": "ad54bed7f7101921a866726f40cf04d75960609c",
"size": "15139",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "clients/java/generated/src/main/java/com/cliffano/swaggyjenkins/model/PipelineBranchesitem.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ada",
"bytes": "569823"
},
{
"name": "Apex",
"bytes": "741346"
},
{
"name": "Batchfile",
"bytes": "14792"
},
{
"name": "C",
"bytes": "971274"
},
{
"name": "C#",
"bytes": "5131336"
},
{
"name": "C++",
"bytes": "7799032"
},
{
"name": "CMake",
"bytes": "20609"
},
{
"name": "CSS",
"bytes": "4873"
},
{
"name": "Clojure",
"bytes": "129018"
},
{
"name": "Crystal",
"bytes": "864941"
},
{
"name": "Dart",
"bytes": "876777"
},
{
"name": "Dockerfile",
"bytes": "7385"
},
{
"name": "Eiffel",
"bytes": "424642"
},
{
"name": "Elixir",
"bytes": "139252"
},
{
"name": "Elm",
"bytes": "187067"
},
{
"name": "Emacs Lisp",
"bytes": "191"
},
{
"name": "Erlang",
"bytes": "373074"
},
{
"name": "F#",
"bytes": "556012"
},
{
"name": "Gherkin",
"bytes": "951"
},
{
"name": "Go",
"bytes": "345227"
},
{
"name": "Groovy",
"bytes": "89524"
},
{
"name": "HTML",
"bytes": "2367424"
},
{
"name": "Haskell",
"bytes": "680841"
},
{
"name": "Java",
"bytes": "12164874"
},
{
"name": "JavaScript",
"bytes": "1959006"
},
{
"name": "Kotlin",
"bytes": "1280953"
},
{
"name": "Lua",
"bytes": "322316"
},
{
"name": "Makefile",
"bytes": "11882"
},
{
"name": "Nim",
"bytes": "65818"
},
{
"name": "OCaml",
"bytes": "94665"
},
{
"name": "Objective-C",
"bytes": "464903"
},
{
"name": "PHP",
"bytes": "4383673"
},
{
"name": "Perl",
"bytes": "743304"
},
{
"name": "PowerShell",
"bytes": "678274"
},
{
"name": "Python",
"bytes": "5529523"
},
{
"name": "QMake",
"bytes": "6915"
},
{
"name": "R",
"bytes": "840841"
},
{
"name": "Raku",
"bytes": "10945"
},
{
"name": "Ruby",
"bytes": "328360"
},
{
"name": "Rust",
"bytes": "1735375"
},
{
"name": "Scala",
"bytes": "1387368"
},
{
"name": "Shell",
"bytes": "407167"
},
{
"name": "Swift",
"bytes": "342562"
},
{
"name": "TypeScript",
"bytes": "3060093"
}
],
"symlink_target": ""
} |
@interface ACAccount (XIGTest)
+(instancetype)testAccount;
@end
| {
"content_hash": "96f7253dfc42613e3ddd8e2989048934",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 30,
"avg_line_length": 21.333333333333332,
"alnum_prop": 0.796875,
"repo_name": "juliengrimault/AppNetChecker",
"id": "7397a651baa120e25798942c1337e4fab1d6ada7",
"size": "243",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AppNetCheckerTests/ACAccount+XIGTest.h",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "69516"
},
{
"name": "Objective-C",
"bytes": "1409491"
},
{
"name": "Ruby",
"bytes": "3642"
},
{
"name": "Shell",
"bytes": "5053"
}
],
"symlink_target": ""
} |
The protos within this directory hierarchy represent the data within
events generated by Google eventing infrastructure, usually
represented as the [data
attribute](https://github.com/cloudevents/spec/blob/main/spec.md#event-data)
within a [CloudEvent](https://cloudevents.io/). They are provided as
machine-readable schemas, allowing libraries of data types to be
created to make it easier to consume events.
## Relationship between protobuf messages and CloudEvent types
There are two protobuf messages that are relevant for any given CloudEvent
type:
- The "envelope" message (e.g. `ObjectFinalizedEvent`) representing
the complete CloudEvent, including any extension attributes.
- The "data" message (e.g. `StorageObjectData`)
There is a 1:1 relationship between CloudEvent types and envelope
messages, but a many:1 relationship between CloudEvent types and
data messages. In other words, many CloudEvent types can use the
same data message. For example, every event associated with
something happening to a Cloud Storage object uses `StorageObjectData`
as the data message. Each envelope message specifies the CloudEvent
type it's associated with via a proto annotation.
Initially we will generate libraries that only contain the data
messages. The event messages serve as useful documentation for the
event type strings and extension attributes. They will be included
in the libraries at a later date if that proves useful.
Note that each proto package is versioned, but this version is
independent of any API version. It would be possible for a single
API version to support multiple event versions, or vice versa.
Our convention for event types is to have a dot-separated list of
the following values, in the order:
- `google` (always)
- product (e.g. `firebase.auth` or `cloud.storage`)
- resource (e.g. `object` or `document`)
- version (e.g. `v1`)
- action (e.g. `finalized` or `deleted`)
The "action" is the action that causes the event to be emitted.
The proto package (in the `package` part of each .proto file) must
be `google.events.`*product*`.`*version*. The .proto files should be
in a directory structure where each element of the package name
corresponds to a directory. The envelope messages must be in a file
named `events.proto`, and the data messages (including any messages
they refer to) must be in a file named `data.proto`. The purpose of
this separation is to allow the envelope and data messages to coexist,
but for consumer libraries to be generated that only contain the data
messages.
As a complete example, consider the
event that is created when a Google Cloud Storage object is
finalized:
- Proto files: `google/events/cloud/storage/v1/events.proto` and
`google/events/cloud/storage/v1/data.proto`
- Envelope message: `google.events.cloud.storage.v1.ObjectFinalizedEvent`
- Data message: `google.events.cloud.storage.v1.StorageObjectData`
- Event type: `google.cloud.storage.object.v1.finalized`
Note that the `events` part of the proto package is not part of the
CloudEvent type, for brevity. It's included in the proto package to
distinguish event message from those used in APIs.
Additionally, the version and resource parts deliberately appear in
different orders between fully-qualified message name and CloudEvent
type. That reversal may seem confusing initially, but it means that
events for multiple resources in the same product (e.g. buckets and
objects within Cloud Storage) can use common messages in their
definitions.
The product and version parts must be all lower-case to conform with
proto package naming conventions. A product may consist of multiple
segments (e.g. `cloud.storage`) but if a single segment consists of
multiple words, they should be simply lower-cased. For example,
`firebase.remoteconfig` is the product name for Firebase Remote
Config. In this case, the `csharp_namespace` option (and similar for
other languages that generate code from the protos) should be set to
indicate the correct word separation via casing.
The resource and action parts are not included in the proto package,
so have more flexibility. Multi-word segments should be camel-cased.
Full examples of CloudEvent types that involve camel-cased segments
include:
- `google.cloud.storage.object.v1.metadataUpdated`
- `google.firebase.remoteconfig.remoteConfig.v1.updated`
## Message naming conventions
While not required for technical correctness, we use a suffix of "Event"
for all envelope messages, and a suffix of "Data" for all event data messages.
Sometimes it can be convenient to combine the two suffixes. For example,
`DocumentEventData` in the Firestore package is used as the event data
message for all document events. This approach should only be taken
when it is required for clarity.
We avoid the use either "Event" or "Data" as a suffix for messages which
aren't envelope messages or event data messages.
## Versioning governance
While not a strict technical necessity, governance rules will
strongly encourage all event types for a single resource to create a
new version at the same time: if there's a need for
`google.cloud.storage.object.v2.finalized`, then all the *other*
object-related event types (`deleted` etc) should be available as
`v2` events as well. However, other resources in the same product do
*not* have to take a new version. This allows users to write common
code to handle all events for a single resource, without creating
unnecessary churn when only an unrelated resource type needs
a new version. (If your code only consumes `bucket.v1` events, the
introduction of `object.v2` events should not affect you.)
## Unexpected fields
CloudEvent providers may include additional data fields beyond those
documented here. In particular:
- Some events may include fields with legacy names such as
`message_id` for compatibility purposes.
- Event providers using proto3 JSON formatting of protobuf `Any`
messages may include a field called `@type` as a natural artifact
of that formatting process.
Neither of these should concern consumers: it's always possible that
new fields will have been added to CloudEvents between the time at
which the code was built and deployed, and the time at which the
CloudEvent is received. Consumers must therefore handle unexpected
data fields. This note is primarily to expain some likely causes of
unexpected fields to avoid developer confusion.
| {
"content_hash": "376a121a0ce997404cb410f428dcc9d8",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 78,
"avg_line_length": 46.99264705882353,
"alnum_prop": 0.7964324831794711,
"repo_name": "googleapis/google-cloudevents",
"id": "801cd310d3a9b1a6f9becfe23f118b769cca58bc",
"size": "6421",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "proto/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "28858"
},
{
"name": "Shell",
"bytes": "7633"
},
{
"name": "TypeScript",
"bytes": "5332"
}
],
"symlink_target": ""
} |
import { PubSub } from 'graphql-subscriptions';
const subscriptionHandler = new PubSub();
export default subscriptionHandler; | {
"content_hash": "53c9cac452ae78a9eedc1b7f7c86a50d",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 47,
"avg_line_length": 25.4,
"alnum_prop": 0.7952755905511811,
"repo_name": "Lochmann85/shout-out-loud-dev",
"id": "1c051627a99f5740aa037cfef10edaafe82c1a66",
"size": "127",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/lib/graphQLApi/subscription/subscriptionHandler.js",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "379"
},
{
"name": "JavaScript",
"bytes": "268559"
},
{
"name": "Shell",
"bytes": "331"
}
],
"symlink_target": ""
} |
<?php
namespace KTU\CountersBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class KTUCountersBundle extends Bundle
{
public function getParent() {
return 'FOSUserBundle';
}
}
| {
"content_hash": "0f9aee08b03584d600768abb9c3b4a66",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 47,
"avg_line_length": 16.833333333333332,
"alnum_prop": 0.7277227722772277,
"repo_name": "xemicalas/iTracker",
"id": "3bf42d2a590541d795425ca9117e082490b24bd1",
"size": "202",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/KTU/CountersBundle/KTUCountersBundle.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "2647"
},
{
"name": "CSS",
"bytes": "183751"
},
{
"name": "JavaScript",
"bytes": "841"
},
{
"name": "PHP",
"bytes": "246241"
}
],
"symlink_target": ""
} |
function checkstr = subsasgn_checkstr(item, subs)
% function checkstr = subsasgn_checkstr(item, subs)
% Preformat a warning message suitable for all subsasgn_check functions
%
% This code is part of a batch job configuration system for MATLAB. See
% help matlabbatch
% for a general overview.
%_______________________________________________________________________
% Copyright (C) 2007 Freiburg Brain Imaging
% Volkmar Glauche
% $Id: subsasgn_checkstr.m 1716 2008-05-23 08:18:45Z volkmar $
rev = '$Rev: 1716 $'; %#ok
checkstr = sprintf('Item ''%s'', field ''%s''', subsref(item,substruct('.','name')), subs(1).subs);
| {
"content_hash": "d11cd2557aad6ca638e9079d003e7da5",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 99,
"avg_line_length": 36.94117647058823,
"alnum_prop": 0.6273885350318471,
"repo_name": "srkesler/bnets",
"id": "7a2b5942069fbf659e1557f6dafef5e7db7b918d",
"size": "628",
"binary": false,
"copies": "30",
"ref": "refs/heads/master",
"path": "GATcmd/Matlab/GAT_ModularAnalysis/GAT_2g_cmd/matlabbatch/@cfg_item/subsasgn_checkstr.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "MATLAB",
"bytes": "4521530"
},
{
"name": "R",
"bytes": "12880"
},
{
"name": "Roff",
"bytes": "7858"
},
{
"name": "Shell",
"bytes": "4788"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PublishConfigData" serverName="pc" />
</project>
| {
"content_hash": "284d3ea9b169625b39fb4c8c014b4474",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 56,
"avg_line_length": 26,
"alnum_prop": 0.676923076923077,
"repo_name": "codecats/progressChecker",
"id": "856bf77dea5d8478e9dae18c7335c00ad47a246d",
"size": "130",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".idea/deployment.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17324674"
},
{
"name": "JavaScript",
"bytes": "84180317"
},
{
"name": "PHP",
"bytes": "234986"
},
{
"name": "Python",
"bytes": "222086"
},
{
"name": "Ruby",
"bytes": "9685"
},
{
"name": "Shell",
"bytes": "1841"
}
],
"symlink_target": ""
} |
module ClosureTree
module Finders
extend ActiveSupport::Concern
# Find a descendant node whose +ancestry_path+ will be ```self.ancestry_path + path```
def find_by_path(path, attributes = {})
return self if path.empty?
self.class.find_by_path(path, attributes, id)
end
# Find or create a descendant node whose +ancestry_path+ will be ```self.ancestry_path + path```
def find_or_create_by_path(path, attributes = {})
subpath = _ct.build_ancestry_attr_path(path, attributes)
return self if subpath.empty?
found = find_by_path(subpath, attributes)
return found if found
attrs = subpath.shift
_ct.with_advisory_lock do
# shenanigans because children.create is bound to the superclass
# (in the case of polymorphism):
child = self.children.where(attrs).first || begin
# Support STI creation by using base_class:
_ct.create(self.class, attrs).tap do |ea|
# We know that there isn't a cycle, because we just created it, and
# cycle detection is expensive when the node is deep.
ea._ct_skip_cycle_detection!
self.children << ea
end
end
child.find_or_create_by_path(subpath, attributes)
end
end
def find_all_by_generation(generation_level)
s = _ct.base_class.joins(<<-SQL.strip_heredoc)
INNER JOIN (
SELECT descendant_id
FROM #{_ct.quoted_hierarchy_table_name}
WHERE ancestor_id = #{_ct.quote(self.id)}
GROUP BY 1
HAVING MAX(#{_ct.quoted_hierarchy_table_name}.generations) = #{generation_level.to_i}
) AS descendants ON (#{_ct.quoted_table_name}.#{_ct.base_class.primary_key} = descendants.descendant_id)
SQL
_ct.scope_with_order(s)
end
def without_self(scope)
scope.without(self)
end
module ClassMethods
def without(instance)
if instance.new_record?
all
else
where(["#{_ct.quoted_table_name}.#{_ct.quoted_id_column_name} != ?", instance.id])
end
end
def roots
_ct.scope_with_order(where(_ct.parent_column_name => nil))
end
# Returns an arbitrary node that has no parents.
def root
roots.first
end
def leaves
s = joins(<<-SQL.strip_heredoc)
INNER JOIN (
SELECT ancestor_id
FROM #{_ct.quoted_hierarchy_table_name}
GROUP BY 1
HAVING MAX(#{_ct.quoted_hierarchy_table_name}.generations) = 0
) AS leaves ON (#{_ct.quoted_table_name}.#{primary_key} = leaves.ancestor_id)
SQL
_ct.scope_with_order(s.readonly(false))
end
def with_ancestor(*ancestors)
ancestor_ids = ancestors.map { |ea| ea.is_a?(ActiveRecord::Base) ? ea._ct_id : ea }
scope = ancestor_ids.blank? ? all : joins(:ancestor_hierarchies).
where("#{_ct.hierarchy_table_name}.ancestor_id" => ancestor_ids).
where("#{_ct.hierarchy_table_name}.generations > 0").
readonly(false)
_ct.scope_with_order(scope)
end
def find_all_by_generation(generation_level)
s = joins(<<-SQL.strip_heredoc)
INNER JOIN (
SELECT #{primary_key} as root_id
FROM #{_ct.quoted_table_name}
WHERE #{_ct.quoted_parent_column_name} IS NULL
) AS roots ON (1 = 1)
INNER JOIN (
SELECT ancestor_id, descendant_id
FROM #{_ct.quoted_hierarchy_table_name}
GROUP BY 1, 2
HAVING MAX(generations) = #{generation_level.to_i}
) AS descendants ON (
#{_ct.quoted_table_name}.#{primary_key} = descendants.descendant_id
AND roots.root_id = descendants.ancestor_id
)
SQL
_ct.scope_with_order(s)
end
# Find the node whose +ancestry_path+ is +path+
def find_by_path(path, attributes = {}, parent_id = nil)
path = _ct.build_ancestry_attr_path(path, attributes)
if path.size > _ct.max_join_tables
return _ct.find_by_large_path(path, attributes, parent_id)
end
scope = where(path.pop)
last_joined_table = _ct.table_name
path.reverse.each_with_index do |ea, idx|
next_joined_table = "p#{idx}"
scope = scope.joins(<<-SQL.strip_heredoc)
INNER JOIN #{_ct.quoted_table_name} AS #{next_joined_table}
ON #{next_joined_table}.#{_ct.quoted_id_column_name} =
#{connection.quote_table_name(last_joined_table)}.#{_ct.quoted_parent_column_name}
SQL
scope = _ct.scoped_attributes(scope, ea, next_joined_table)
last_joined_table = next_joined_table
end
scope.where("#{last_joined_table}.#{_ct.parent_column_name}" => parent_id).readonly(false).first
end
# Find or create nodes such that the +ancestry_path+ is +path+
def find_or_create_by_path(path, attributes = {})
attr_path = _ct.build_ancestry_attr_path(path, attributes)
find_by_path(attr_path) || begin
root_attrs = attr_path.shift
_ct.with_advisory_lock do
# shenanigans because find_or_create can't infer that we want the same class as this:
# Note that roots will already be constrained to this subclass (in the case of polymorphism):
root = roots.where(root_attrs).first || _ct.create!(self, root_attrs)
root.find_or_create_by_path(attr_path)
end
end
end
end
end
end
| {
"content_hash": "02412efe2d4fdb4ee2ef82dd43449e12",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 112,
"avg_line_length": 37.51006711409396,
"alnum_prop": 0.5938450527822509,
"repo_name": "froderik/closure_tree",
"id": "3e0a69dd855f289e6ebe6c6273402891dc773a4d",
"size": "5589",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/closure_tree/finders.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "556"
},
{
"name": "Ruby",
"bytes": "115984"
},
{
"name": "Shell",
"bytes": "197"
}
],
"symlink_target": ""
} |
@echo off
if "%1" == "" goto :Error
goto :Start
:Error
echo usage: %0 ^<application name^>
echo where application name is an azure mobileService app.
echo To run the login tests, the application needs to be configured
echo with all four providers.
goto :TheEnd
:Start
call azure mobile table create %1 stringIdRoundTripTable
call azure mobile table create %1 stringIdMovies
call azure mobile table create %1 offlineReady
call azure mobile table create %1 offlineReadyNoVersionAuthenticated
call azure mobile table update --deleteColumn __version --quiet %1 offlineReadyNoVersionAuthenticated
call azure mobile table create --integerId %1 intIdMovies
call azure mobile table create --integerId %1 ParamsTestTable
REM Tables specific to functional tests
call azure mobile table create --integerId %1 blog_posts
call azure mobile table create --integerId %1 blog_comments
call azure mobile table create --integerId %1 books
call azure mobile table create %1 stringId_test_table
call azure mobile table create --integerId %1 test_table
call azure mobile table create --integerId %1 types
REM Tables specific to E2E tests
call azure mobile table create --integerId %1 admin
call azure mobile table create --integerId %1 application
call azure mobile table create --integerId %1 authenticated
call azure mobile table create --integerId %1 intIdMovies
call azure mobile table create --integerId %1 ParamsTestTable
call azure mobile table create --integerId %1 public
call azure mobile table create %1 stringIdRoundTripTable
call azure mobile table create %1 stringIdMovies
REM Tables specific to JS tests
call azure mobile table create --integerId %1 w8jsRoundTripTable
call azure mobile table create %1 w8jsServerQueryMovies
REM Tables specific to Managed tests
call azure mobile table create %1 --integerId w8RoundTripTable
REM Tables specific to iOS tests
call azure mobile table create --integerId %1 iosRoundTripTable
REM Tables specific to push tests
call azure mobile table create %1 iosPushTest
call azure mobile table create %1 w8PushTest
call azure mobile table create %1 wp8PushTest
REM Permissions
REM Tables specific to unit tests
call azure mobile table update --addColumn title=string,commentCount=number,showComments=boolean,data=string -p insert=public,read=public,update=application,delete=public %1 blog_posts
call azure mobile table update --addColumn postId=string,commentText=string,name=string,test=number -p insert=public,read=public,update=public,delete=public %1 blog_comments
call azure mobile table update --addColumn title=string,type=string,pub_id=string,price=number,advance=number,royalty=number,ytd_sales=number,notes=string,pubdate=date -p insert=public,read=public,update=public,delete=public %1 books
call azure mobile table update --addColumn col5=boolean,name=string -p insert=public,read=public,update=public,delete=public %1 stringId_test_table
call azure mobile table update --addColumn col5=boolean,__anything=string -p insert=public,read=public,update=public,delete=public %1 test_table
call azure mobile table update --addColumn numCol=number,stringCol=string,dateCol=date,boolCol=boolean -p insert=public,read=public,update=public,delete=public %1 types
REM Tables specific to E2E tests
call azure mobile table update -p insert=admin,read=admin,update=admin,delete=admin %1 admin
call azure mobile table update -p insert=application,read=application,update=application,delete=application %1 application
call azure mobile table update -p insert=user,read=user,update=user,delete=user %1 authenticated
call azure mobile table update -p insert=application,read=public,update=public,delete=public %1 public
call azure mobile table update -p insert=admin,read=application,update=admin,delete=admin %1 w8jsServerQueryMovies
call azure mobile table update -p insert=user,read=user,update=user,delete=user %1 offlineReadyNoVersionAuthenticated
REM Scripts
REM Tables specific to unit tests
call azure mobile script upload %1 table/blog_comments.insert -f blog_comments.insert.js
call azure mobile script upload %1 table/blog_comments.read -f blog_comments.read.js
REM Tables specific to E2E tests
call azure mobile script upload %1 table/stringIdRoundTripTable.insert -f stringIdRoundTripTable.insert.js
call azure mobile script upload %1 table/stringIdRoundTripTable.read -f stringIdRoundTripTable.read.js
call azure mobile script upload %1 table/stringIdRoundTripTable.update -f stringIdRoundTripTable.update.js
call azure mobile script upload %1 table/stringIdMovies.insert -f bothIdTypeMovies.insert.js
call azure mobile script upload %1 table/intIdMovies.insert -f bothIdTypeMovies.insert.js
call azure mobile script upload %1 table/ParamsTestTable.insert -f ParamsTestTable.insert.js
call azure mobile script upload %1 table/ParamsTestTable.read -f ParamsTestTable.read.js
call azure mobile script upload %1 table/ParamsTestTable.update -f ParamsTestTable.update.js
call azure mobile script upload %1 table/ParamsTestTable.delete -f ParamsTestTable.delete.js
call azure mobile script upload %1 table/authenticated.insert -f authenticated.insert.js
call azure mobile script upload %1 table/authenticated.read -f authenticated.read.js
call azure mobile script upload %1 table/authenticated.update -f authenticated.update.js
call azure mobile script upload %1 table/authenticated.delete -f authenticated.delete.js
REM Tables specific to JS tests
call azure mobile script upload %1 table/w8jsRoundTripTable.insert -f w8jsRoundTripTable.insert.js
call azure mobile script upload %1 table/w8jsRoundTripTable.read -f w8jsRoundTripTable.read.js
call azure mobile script upload %1 table/w8jsRoundTripTable.update -f w8jsRoundTripTable.update.js
call azure mobile script upload %1 table/w8jsServerQueryMovies.read -f w8jsServerQueryMovies.read.js
REM Tables specific to managed tests
call azure mobile script upload %1 table/w8RoundTripTable.insert -f w8RoundTripTable.insert.js
call azure mobile script upload %1 table/w8RoundTripTable.read -f w8RoundTripTable.read.js
call azure mobile script upload %1 table/w8RoundTripTable.update -f w8RoundTripTable.update.js
REM Tables specific to iOS tests
call azure mobile script upload %1 table/iosRoundTripTable.insert -f iosRoundTripTable.insert.js
REM Tables specific to push tests
call azure mobile script upload %1 table/iosPushTest.insert -f iosPushTest.insert.js
call azure mobile script upload %1 table/w8PushTest.insert -f w8PushTest.insert.js
call azure mobile script upload %1 table/wp8PushTest.insert -f wp8PushTest.insert.js
:TheEnd
| {
"content_hash": "f77c3ea8377982f56d07492ad5992342",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 233,
"avg_line_length": 59.99082568807339,
"alnum_prop": 0.823673344548096,
"repo_name": "apuyana/azure-mobile-services",
"id": "b195213d7e73a356c7747ec5b64b1d51b5f05a36",
"size": "6539",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/SetupScripts/tables/SetupTables.bat",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "110"
},
{
"name": "Batchfile",
"bytes": "16004"
},
{
"name": "C#",
"bytes": "3294461"
},
{
"name": "CSS",
"bytes": "37465"
},
{
"name": "Groff",
"bytes": "32858"
},
{
"name": "HTML",
"bytes": "1905697"
},
{
"name": "Java",
"bytes": "1346475"
},
{
"name": "JavaScript",
"bytes": "2664514"
},
{
"name": "Objective-C",
"bytes": "1319249"
},
{
"name": "PowerShell",
"bytes": "5873"
},
{
"name": "Shell",
"bytes": "15643"
},
{
"name": "Swift",
"bytes": "11165"
},
{
"name": "Visual Basic",
"bytes": "34079"
}
],
"symlink_target": ""
} |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE DefaultSignatures #-}
module Data.SBV.BitVectors.Data
( SBool, SWord8, SWord16, SWord32, SWord64
, SInt8, SInt16, SInt32, SInt64, SInteger, SReal, SFloat, SDouble
, nan, infinity, sNaN, sInfinity, RoundingMode(..), smtLibSquareRoot, smtLibFusedMA
, SymWord(..)
, CW(..), CWVal(..), cwSameType, cwIsBit, cwToBool, normCW
, mkConstCW ,liftCW2, mapCW, mapCW2
, SW(..), trueSW, falseSW, trueCW, falseCW
, SBV(..), NodeId(..), mkSymSBV, mkSymSBVWithRandom
, ArrayContext(..), ArrayInfo, SymArray(..), SFunArray(..), mkSFunArray, SArray(..), arrayUIKind
, sbvToSW, sbvToSymSW
, SBVExpr(..), newExpr
, cache, Cached, uncache, uncacheAI, HasKind(..)
, Op(..), NamedSymVar, UnintKind(..), getTableIndex, SBVPgm(..), Symbolic, runSymbolic, runSymbolic', State, inProofMode, SBVRunMode(..), Kind(..), Outputtable(..), Result(..)
, readResult, getResult
, getTraceInfo, getConstraints, addConstraint
, SBVType(..), newUninterpreted, unintFnUIKind, addAxiom
, Quantifier(..), needsExistentials
, SMTLibPgm(..), SMTLibVersion(..)
, SolverCapabilities(..)
) where
import Control.Applicative (Applicative)
import Control.DeepSeq (NFData(..))
import Control.Monad (when)
import Control.Monad.Fix (MonadFix)
import Control.Monad.Reader (MonadReader, ReaderT, ask, runReaderT)
import Control.Monad.Trans (MonadIO, liftIO)
import Data.Char (isAlpha, isAlphaNum)
import Data.Generics (Data(..), dataTypeName, dataTypeOf, tyconUQname)
import Data.Int (Int8, Int16, Int32, Int64)
import Data.Word (Word8, Word16, Word32, Word64)
import Data.IORef (IORef, newIORef, modifyIORef, readIORef, writeIORef)
import Data.List (intercalate, sortBy)
import Data.Maybe (isJust, fromJust)
import qualified Data.IntMap as IMap (IntMap, empty, size, toAscList, lookup, insert, insertWith)
import qualified Data.Map as Map (Map, empty, toList, size, insert, lookup)
import qualified Data.Set as Set (Set, empty, toList, insert)
import qualified Data.Foldable as F (toList)
import qualified Data.Sequence as S (Seq, empty, (|>))
import System.Mem.StableName
import System.Random
import Data.SBV.BitVectors.AlgReals
import Data.SBV.Utils.Lib
-- | A constant value
data CWVal = CWAlgReal AlgReal -- ^ algebraic real
| CWInteger Integer -- ^ bit-vector/unbounded integer
| CWFloat Float -- ^ float
| CWDouble Double -- ^ double
| CWUninterpreted String -- ^ value of an uninterpreted kind
-- We cannot simply derive Eq/Ord for CWVal, since CWAlgReal doesn't have proper
-- instances for these when values are infinitely precise reals. However, we do
-- need a structural eq/ord for Map indexes; so define custom ones here:
instance Eq CWVal where
CWAlgReal a == CWAlgReal b = a `algRealStructuralEqual` b
CWInteger a == CWInteger b = a == b
CWUninterpreted a == CWUninterpreted b = a == b
CWFloat a == CWFloat b = a == b
CWDouble a == CWDouble b = a == b
_ == _ = False
instance Ord CWVal where
CWAlgReal a `compare` CWAlgReal b = a `algRealStructuralCompare` b
CWAlgReal _ `compare` CWInteger _ = LT
CWAlgReal _ `compare` CWFloat _ = LT
CWAlgReal _ `compare` CWDouble _ = LT
CWAlgReal _ `compare` CWUninterpreted _ = LT
CWInteger _ `compare` CWAlgReal _ = GT
CWInteger a `compare` CWInteger b = a `compare` b
CWInteger _ `compare` CWFloat _ = LT
CWInteger _ `compare` CWDouble _ = LT
CWInteger _ `compare` CWUninterpreted _ = LT
CWFloat _ `compare` CWAlgReal _ = GT
CWFloat _ `compare` CWInteger _ = GT
CWFloat a `compare` CWFloat b = a `compare` b
CWFloat _ `compare` CWDouble _ = LT
CWFloat _ `compare` CWUninterpreted _ = LT
CWDouble _ `compare` CWAlgReal _ = GT
CWDouble _ `compare` CWInteger _ = GT
CWDouble _ `compare` CWFloat _ = GT
CWDouble a `compare` CWDouble b = a `compare` b
CWDouble _ `compare` CWUninterpreted _ = LT
CWUninterpreted _ `compare` CWAlgReal _ = GT
CWUninterpreted _ `compare` CWInteger _ = GT
CWUninterpreted _ `compare` CWFloat _ = GT
CWUninterpreted _ `compare` CWDouble _ = GT
CWUninterpreted a `compare` CWUninterpreted b = a `compare` b
-- | 'CW' represents a concrete word of a fixed size:
-- Endianness is mostly irrelevant (see the 'FromBits' class).
-- For signed words, the most significant digit is considered to be the sign.
data CW = CW { cwKind :: !Kind
, cwVal :: !CWVal
}
deriving (Eq, Ord)
-- | Are two CW's of the same type?
cwSameType :: CW -> CW -> Bool
cwSameType x y = cwKind x == cwKind y
-- | Is this a bit?
cwIsBit :: CW -> Bool
cwIsBit x = case cwKind x of
KBounded False 1 -> True
_ -> False
-- | Convert a CW to a Haskell boolean (NB. Assumes input is well-kinded)
cwToBool :: CW -> Bool
cwToBool x = cwVal x /= CWInteger 0
-- | Normalize a CW. Essentially performs modular arithmetic to make sure the
-- value can fit in the given bit-size. Note that this is rather tricky for
-- negative values, due to asymmetry. (i.e., an 8-bit negative number represents
-- values in the range -128 to 127; thus we have to be careful on the negative side.)
normCW :: CW -> CW
normCW c@(CW (KBounded signed sz) (CWInteger v)) = c { cwVal = CWInteger norm }
where norm | sz == 0 = 0
| signed = let rg = 2 ^ (sz - 1)
in case divMod v rg of
(a, b) | even a -> b
(_, b) -> b - rg
| True = v `mod` (2 ^ sz)
normCW c = c
-- | Kind of symbolic value
data Kind = KBounded Bool Int
| KUnbounded
| KReal
| KUninterpreted String
| KFloat
| KDouble
deriving (Eq, Ord)
instance Show Kind where
show (KBounded False 1) = "SBool"
show (KBounded False n) = "SWord" ++ show n
show (KBounded True n) = "SInt" ++ show n
show KUnbounded = "SInteger"
show KReal = "SReal"
show (KUninterpreted s) = s
show KFloat = "SFloat"
show KDouble = "SDouble"
-- | A symbolic node id
newtype NodeId = NodeId Int deriving (Eq, Ord)
-- | A symbolic word, tracking it's signedness and size.
data SW = SW Kind NodeId deriving (Eq, Ord)
-- | Quantifiers: forall or exists. Note that we allow
-- arbitrary nestings.
data Quantifier = ALL | EX deriving Eq
-- | Are there any existential quantifiers?
needsExistentials :: [Quantifier] -> Bool
needsExistentials = (EX `elem`)
-- | Constant False as a SW. Note that this value always occupies slot -2.
falseSW :: SW
falseSW = SW (KBounded False 1) $ NodeId (-2)
-- | Constant False as a SW. Note that this value always occupies slot -1.
trueSW :: SW
trueSW = SW (KBounded False 1) $ NodeId (-1)
-- | Constant False as a CW. We represent it using the integer value 0.
falseCW :: CW
falseCW = CW (KBounded False 1) (CWInteger 0)
-- | Constant True as a CW. We represent it using the integer value 1.
trueCW :: CW
trueCW = CW (KBounded False 1) (CWInteger 1)
-- | A simple type for SBV computations, used mainly for uninterpreted constants.
-- We keep track of the signedness/size of the arguments. A non-function will
-- have just one entry in the list.
newtype SBVType = SBVType [Kind]
deriving (Eq, Ord)
-- | how many arguments does the type take?
typeArity :: SBVType -> Int
typeArity (SBVType xs) = length xs - 1
instance Show SBVType where
show (SBVType []) = error "SBV: internal error, empty SBVType"
show (SBVType xs) = intercalate " -> " $ map show xs
-- | Symbolic operations
data Op = Plus | Times | Minus
| Quot | Rem
| Equal | NotEqual
| LessThan | GreaterThan | LessEq | GreaterEq
| Ite
| And | Or | XOr | Not
| Shl Int | Shr Int | Rol Int | Ror Int
| Extract Int Int -- Extract i j: extract bits i to j. Least significant bit is 0 (big-endian)
| Join -- Concat two words to form a bigger one, in the order given
| LkUp (Int, Kind, Kind, Int) !SW !SW -- (table-index, arg-type, res-type, length of the table) index out-of-bounds-value
| ArrEq Int Int
| ArrRead Int
| Uninterpreted String
deriving (Eq, Ord)
-- | SMT-Lib's square-root over floats/doubles. We piggy back on to the uninterpreted function mechanism
-- to implement these; which is not a terrible idea; although the use of the constructor 'Uninterpreted'
-- might be confusing. This function will *not* be uninterpreted in reality, as QF_FPA will define it. It's
-- a bit of a shame, but much easier to implement it this way.
smtLibSquareRoot :: Op
smtLibSquareRoot = Uninterpreted "squareRoot"
-- | SMT-Lib's fusedMA over floats/doubles. Similar to the 'smtLibSquareRoot'. Note that we cannot implement
-- this function in Haskell as precision loss would be inevitable. Maybe Haskell will eventually add this op
-- to the Num class.
smtLibFusedMA :: Op
smtLibFusedMA = Uninterpreted "fusedMA"
-- | A symbolic expression
data SBVExpr = SBVApp !Op ![SW]
deriving (Eq, Ord)
-- | A class for capturing values that have a sign and a size (finite or infinite)
-- minimal complete definition: kindOf. This class can be automatically derived
-- for data-types that have a 'Data' instance; this is useful for creating uninterpreted
-- sorts.
class HasKind a where
kindOf :: a -> Kind
hasSign :: a -> Bool
intSizeOf :: a -> Int
isBoolean :: a -> Bool
isBounded :: a -> Bool
isReal :: a -> Bool
isFloat :: a -> Bool
isDouble :: a -> Bool
isInteger :: a -> Bool
isUninterpreted :: a -> Bool
showType :: a -> String
-- defaults
hasSign x = case kindOf x of
KBounded b _ -> b
KUnbounded -> True
KReal -> True
KFloat -> True
KDouble -> True
KUninterpreted{} -> False
intSizeOf x = case kindOf x of
KBounded _ s -> s
KUnbounded -> error "SBV.HasKind.intSizeOf((S)Integer)"
KReal -> error "SBV.HasKind.intSizeOf((S)Real)"
KFloat -> error "SBV.HasKind.intSizeOf((S)Float)"
KDouble -> error "SBV.HasKind.intSizeOf((S)Double)"
KUninterpreted s -> error $ "SBV.HasKind.intSizeOf: Uninterpreted sort: " ++ s
isBoolean x | KBounded False 1 <- kindOf x = True
| True = False
isBounded x | KBounded{} <- kindOf x = True
| True = False
isReal x | KReal{} <- kindOf x = True
| True = False
isFloat x | KFloat{} <- kindOf x = True
| True = False
isDouble x | KDouble{} <- kindOf x = True
| True = False
isInteger x | KUnbounded{} <- kindOf x = True
| True = False
isUninterpreted x | KUninterpreted{} <- kindOf x = True
| True = False
showType = show . kindOf
-- default signature for uninterpreted kinds
default kindOf :: Data a => a -> Kind
kindOf = KUninterpreted . tyconUQname . dataTypeName . dataTypeOf
instance HasKind Bool where kindOf _ = KBounded False 1
instance HasKind Int8 where kindOf _ = KBounded True 8
instance HasKind Word8 where kindOf _ = KBounded False 8
instance HasKind Int16 where kindOf _ = KBounded True 16
instance HasKind Word16 where kindOf _ = KBounded False 16
instance HasKind Int32 where kindOf _ = KBounded True 32
instance HasKind Word32 where kindOf _ = KBounded False 32
instance HasKind Int64 where kindOf _ = KBounded True 64
instance HasKind Word64 where kindOf _ = KBounded False 64
instance HasKind Integer where kindOf _ = KUnbounded
instance HasKind AlgReal where kindOf _ = KReal
instance HasKind Float where kindOf _ = KFloat
instance HasKind Double where kindOf _ = KDouble
-- | Lift a unary function thruough a CW
liftCW :: (AlgReal -> b) -> (Integer -> b) -> (Float -> b) -> (Double -> b) -> (String -> b) -> CW -> b
liftCW f _ _ _ _ (CW _ (CWAlgReal v)) = f v
liftCW _ f _ _ _ (CW _ (CWInteger v)) = f v
liftCW _ _ f _ _ (CW _ (CWFloat v)) = f v
liftCW _ _ _ f _ (CW _ (CWDouble v)) = f v
liftCW _ _ _ _ f (CW _ (CWUninterpreted v)) = f v
-- | Lift a binary function through a CW
liftCW2 :: (AlgReal -> AlgReal -> b) -> (Integer -> Integer -> b) -> (Float -> Float -> b) -> (Double -> Double -> b) -> (String -> String -> b) -> CW -> CW -> b
liftCW2 r i f d u x y = case (cwVal x, cwVal y) of
(CWAlgReal a, CWAlgReal b) -> r a b
(CWInteger a, CWInteger b) -> i a b
(CWFloat a, CWFloat b) -> f a b
(CWDouble a, CWDouble b) -> d a b
(CWUninterpreted a, CWUninterpreted b) -> u a b
_ -> error $ "SBV.liftCW2: impossible, incompatible args received: " ++ show (x, y)
-- | Map a unary function through a CW
mapCW :: (AlgReal -> AlgReal) -> (Integer -> Integer) -> (Float -> Float) -> (Double -> Double) -> (String -> String) -> CW -> CW
mapCW r i f d u x = normCW $ CW (cwKind x) $ case cwVal x of
CWAlgReal a -> CWAlgReal (r a)
CWInteger a -> CWInteger (i a)
CWFloat a -> CWFloat (f a)
CWDouble a -> CWDouble (d a)
CWUninterpreted a -> CWUninterpreted (u a)
-- | Map a binary function through a CW
mapCW2 :: (AlgReal -> AlgReal -> AlgReal) -> (Integer -> Integer -> Integer) -> (Float -> Float -> Float) -> (Double -> Double -> Double) -> (String -> String -> String) -> CW -> CW -> CW
mapCW2 r i f d u x y = case (cwSameType x y, cwVal x, cwVal y) of
(True, CWAlgReal a, CWAlgReal b) -> normCW $ CW (cwKind x) (CWAlgReal (r a b))
(True, CWInteger a, CWInteger b) -> normCW $ CW (cwKind x) (CWInteger (i a b))
(True, CWFloat a, CWFloat b) -> normCW $ CW (cwKind x) (CWFloat (f a b))
(True, CWDouble a, CWDouble b) -> normCW $ CW (cwKind x) (CWDouble (d a b))
(True, CWUninterpreted a, CWUninterpreted b) -> normCW $ CW (cwKind x) (CWUninterpreted (u a b))
_ -> error $ "SBV.mapCW2: impossible, incompatible args received: " ++ show (x, y)
instance HasKind CW where
kindOf = cwKind
instance HasKind SW where
kindOf (SW k _) = k
instance Show CW where
show w | cwIsBit w = show (cwToBool w)
show w = liftCW show show show show id w ++ " :: " ++ showType w
instance Show SW where
show (SW _ (NodeId n))
| n < 0 = "s_" ++ show (abs n)
| True = 's' : show n
instance Show Op where
show (Shl i) = "<<" ++ show i
show (Shr i) = ">>" ++ show i
show (Rol i) = "<<<" ++ show i
show (Ror i) = ">>>" ++ show i
show (Extract i j) = "choose [" ++ show i ++ ":" ++ show j ++ "]"
show (LkUp (ti, at, rt, l) i e)
= "lookup(" ++ tinfo ++ ", " ++ show i ++ ", " ++ show e ++ ")"
where tinfo = "table" ++ show ti ++ "(" ++ show at ++ " -> " ++ show rt ++ ", " ++ show l ++ ")"
show (ArrEq i j) = "array_" ++ show i ++ " == array_" ++ show j
show (ArrRead i) = "select array_" ++ show i
show (Uninterpreted i) = "[uninterpreted] " ++ i
show op
| Just s <- op `lookup` syms = s
| True = error "impossible happened; can't find op!"
where syms = [ (Plus, "+"), (Times, "*"), (Minus, "-")
, (Quot, "quot")
, (Rem, "rem")
, (Equal, "=="), (NotEqual, "/=")
, (LessThan, "<"), (GreaterThan, ">"), (LessEq, "<"), (GreaterEq, ">")
, (Ite, "if_then_else")
, (And, "&"), (Or, "|"), (XOr, "^"), (Not, "~")
, (Join, "#")
]
-- | To improve hash-consing, take advantage of commutative operators by
-- reordering their arguments.
reorder :: SBVExpr -> SBVExpr
reorder s = case s of
SBVApp op [a, b] | isCommutative op && a > b -> SBVApp op [b, a]
_ -> s
where isCommutative :: Op -> Bool
isCommutative o = o `elem` [Plus, Times, Equal, NotEqual, And, Or, XOr]
instance Show SBVExpr where
show (SBVApp Ite [t, a, b]) = unwords ["if", show t, "then", show a, "else", show b]
show (SBVApp (Shl i) [a]) = unwords [show a, "<<", show i]
show (SBVApp (Shr i) [a]) = unwords [show a, ">>", show i]
show (SBVApp (Rol i) [a]) = unwords [show a, "<<<", show i]
show (SBVApp (Ror i) [a]) = unwords [show a, ">>>", show i]
show (SBVApp op [a, b]) = unwords [show a, show op, show b]
show (SBVApp op args) = unwords (show op : map show args)
-- | A program is a sequence of assignments
newtype SBVPgm = SBVPgm {pgmAssignments :: (S.Seq (SW, SBVExpr))}
-- | 'NamedSymVar' pairs symbolic words and user given/automatically generated names
type NamedSymVar = (SW, String)
-- | 'UnintKind' pairs array names and uninterpreted constants with their "kinds"
-- used mainly for printing counterexamples
data UnintKind = UFun Int String | UArr Int String -- in each case, arity and the aliasing name
deriving Show
-- | Result of running a symbolic computation
data Result = Result (Set.Set Kind) -- kinds used in the program
[(String, CW)] -- quick-check counter-example information (if any)
[(String, [String])] -- uninterpreted code segments
[(Quantifier, NamedSymVar)] -- inputs (possibly existential)
[(SW, CW)] -- constants
[((Int, Kind, Kind), [SW])] -- tables (automatically constructed) (tableno, index-type, result-type) elts
[(Int, ArrayInfo)] -- arrays (user specified)
[(String, SBVType)] -- uninterpreted constants
[(String, [String])] -- axioms
SBVPgm -- assignments
[SW] -- additional constraints (boolean)
[SW] -- outputs
-- | Extract the constraints from a result
getConstraints :: Result -> [SW]
getConstraints (Result _ _ _ _ _ _ _ _ _ _ cstrs _) = cstrs
-- | Extract the traced-values from a result (quick-check)
getTraceInfo :: Result -> [(String, CW)]
getTraceInfo (Result _ tvals _ _ _ _ _ _ _ _ _ _) = tvals
instance Show Result where
show (Result _ _ _ _ cs _ _ [] [] _ [] [r])
| Just c <- r `lookup` cs
= show c
show (Result kinds _ cgs is cs ts as uis axs xs cstrs os) = intercalate "\n" $
(if null usorts then [] else "SORTS" : map (" " ++) usorts)
++ ["INPUTS"]
++ map shn is
++ ["CONSTANTS"]
++ map shc cs
++ ["TABLES"]
++ map sht ts
++ ["ARRAYS"]
++ map sha as
++ ["UNINTERPRETED CONSTANTS"]
++ map shui uis
++ ["USER GIVEN CODE SEGMENTS"]
++ concatMap shcg cgs
++ ["AXIOMS"]
++ map shax axs
++ ["DEFINE"]
++ map (\(s, e) -> " " ++ shs s ++ " = " ++ show e) (F.toList (pgmAssignments xs))
++ ["CONSTRAINTS"]
++ map ((" " ++) . show) cstrs
++ ["OUTPUTS"]
++ map ((" " ++) . show) os
where usorts = [s | KUninterpreted s <- Set.toList kinds]
shs sw = show sw ++ " :: " ++ showType sw
sht ((i, at, rt), es) = " Table " ++ show i ++ " : " ++ show at ++ "->" ++ show rt ++ " = " ++ show es
shc (sw, cw) = " " ++ show sw ++ " = " ++ show cw
shcg (s, ss) = ("Variable: " ++ s) : map (" " ++) ss
shn (q, (sw, nm)) = " " ++ ni ++ " :: " ++ showType sw ++ ex ++ alias
where ni = show sw
ex | q == ALL = ""
| True = ", existential"
alias | ni == nm = ""
| True = ", aliasing " ++ show nm
sha (i, (nm, (ai, bi), ctx)) = " " ++ ni ++ " :: " ++ show ai ++ " -> " ++ show bi ++ alias
++ "\n Context: " ++ show ctx
where ni = "array_" ++ show i
alias | ni == nm = ""
| True = ", aliasing " ++ show nm
shui (nm, t) = " [uninterpreted] " ++ nm ++ " :: " ++ show t
shax (nm, ss) = " -- user defined axiom: " ++ nm ++ "\n " ++ intercalate "\n " ss
-- | The context of a symbolic array as created
data ArrayContext = ArrayFree (Maybe SW) -- ^ A new array, with potential initializer for each cell
| ArrayReset Int SW -- ^ An array created from another array by fixing each element to another value
| ArrayMutate Int SW SW -- ^ An array created by mutating another array at a given cell
| ArrayMerge SW Int Int -- ^ An array created by symbolically merging two other arrays
instance Show ArrayContext where
show (ArrayFree Nothing) = " initialized with random elements"
show (ArrayFree (Just s)) = " initialized with " ++ show s ++ " :: " ++ showType s
show (ArrayReset i s) = " reset array_" ++ show i ++ " with " ++ show s ++ " :: " ++ showType s
show (ArrayMutate i a b) = " cloned from array_" ++ show i ++ " with " ++ show a ++ " :: " ++ showType a ++ " |-> " ++ show b ++ " :: " ++ showType b
show (ArrayMerge s i j) = " merged arrays " ++ show i ++ " and " ++ show j ++ " on condition " ++ show s
-- | Expression map, used for hash-consing
type ExprMap = Map.Map SBVExpr SW
-- | Constants are stored in a map, for hash-consing
type CnstMap = Map.Map CW SW
-- | Kinds used in the program; used for determining the final SMT-Lib logic to pick
type KindSet = Set.Set Kind
-- | Tables generated during a symbolic run
type TableMap = Map.Map [SW] (Int, Kind, Kind)
-- | Representation for symbolic arrays
type ArrayInfo = (String, (Kind, Kind), ArrayContext)
-- | Arrays generated during a symbolic run
type ArrayMap = IMap.IntMap ArrayInfo
-- | Uninterpreted-constants generated during a symbolic run
type UIMap = Map.Map String SBVType
-- | Code-segments for Uninterpreted-constants, as given by the user
type CgMap = Map.Map String [String]
-- | Cached values, implementing sharing
type Cache a = IMap.IntMap [(StableName (State -> IO a), a)]
-- | Convert an SBV-type to the kind-of uninterpreted value it represents
unintFnUIKind :: (String, SBVType) -> (String, UnintKind)
unintFnUIKind (s, t) = (s, UFun (typeArity t) s)
-- | Convert an array value type to the kind-of uninterpreted value it represents
arrayUIKind :: (Int, ArrayInfo) -> Maybe (String, UnintKind)
arrayUIKind (i, (nm, _, ctx))
| external ctx = Just ("array_" ++ show i, UArr 1 nm) -- arrays are always 1-dimensional in the SMT-land. (Unless encoded explicitly)
| True = Nothing
where external (ArrayFree{}) = True
external (ArrayReset{}) = False
external (ArrayMutate{}) = False
external (ArrayMerge{}) = False
-- | Different means of running a symbolic piece of code
data SBVRunMode = Proof Bool -- ^ Symbolic simulation mode, for proof purposes. Bool is True if it's a sat instance
| CodeGen -- ^ Code generation mode
| Concrete StdGen -- ^ Concrete simulation mode. The StdGen is for the pConstrain acceptance in cross runs
-- | Is this a concrete run? (i.e., quick-check or test-generation like)
isConcreteMode :: SBVRunMode -> Bool
isConcreteMode (Concrete _) = True
isConcreteMode (Proof{}) = False
isConcreteMode CodeGen = False
-- | The state of the symbolic interpreter
data State = State { runMode :: SBVRunMode
, rStdGen :: IORef StdGen
, rCInfo :: IORef [(String, CW)]
, rctr :: IORef Int
, rUsedKinds :: IORef KindSet
, rinps :: IORef [(Quantifier, NamedSymVar)]
, rConstraints :: IORef [SW]
, routs :: IORef [SW]
, rtblMap :: IORef TableMap
, spgm :: IORef SBVPgm
, rconstMap :: IORef CnstMap
, rexprMap :: IORef ExprMap
, rArrayMap :: IORef ArrayMap
, rUIMap :: IORef UIMap
, rCgMap :: IORef CgMap
, raxioms :: IORef [(String, [String])]
, rSWCache :: IORef (Cache SW)
, rAICache :: IORef (Cache Int)
}
-- | Are we running in proof mode?
inProofMode :: State -> Bool
inProofMode s = case runMode s of
Proof{} -> True
CodeGen -> False
Concrete{} -> False
-- | The "Symbolic" value. Either a constant (@Left@) or a symbolic
-- value (@Right Cached@). Note that caching is essential for making
-- sure sharing is preserved. The parameter 'a' is phantom, but is
-- extremely important in keeping the user interface strongly typed.
data SBV a = SBV !Kind !(Either CW (Cached SW))
-- | A symbolic boolean/bit
type SBool = SBV Bool
-- | 8-bit unsigned symbolic value
type SWord8 = SBV Word8
-- | 16-bit unsigned symbolic value
type SWord16 = SBV Word16
-- | 32-bit unsigned symbolic value
type SWord32 = SBV Word32
-- | 64-bit unsigned symbolic value
type SWord64 = SBV Word64
-- | 8-bit signed symbolic value, 2's complement representation
type SInt8 = SBV Int8
-- | 16-bit signed symbolic value, 2's complement representation
type SInt16 = SBV Int16
-- | 32-bit signed symbolic value, 2's complement representation
type SInt32 = SBV Int32
-- | 64-bit signed symbolic value, 2's complement representation
type SInt64 = SBV Int64
-- | Infinite precision signed symbolic value
type SInteger = SBV Integer
-- | Infinite precision symbolic algebraic real value
type SReal = SBV AlgReal
-- | IEEE-754 single-precision floating point numbers
type SFloat = SBV Float
-- | IEEE-754 double-precision floating point numbers
type SDouble = SBV Double
-- | Not-A-Number for 'Double' and 'Float'. Surprisingly, Haskell
-- Prelude doesn't have this value defined, so we provide it here.
nan :: Floating a => a
nan = 0/0
-- | Infinity for 'Double' and 'Float'. Surprisingly, Haskell
-- Prelude doesn't have this value defined, so we provide it here.
infinity :: Floating a => a
infinity = 1/0
-- | Symbolic variant of Not-A-Number. This value will inhabit both
-- 'SDouble' and 'SFloat'.
sNaN :: (Floating a, SymWord a) => SBV a
sNaN = literal nan
-- | Symbolic variant of infinity. This value will inhabit both
-- 'SDouble' and 'SFloat'.
sInfinity :: (Floating a, SymWord a) => SBV a
sInfinity = literal infinity
-- | Rounding mode to be used for the IEEE floating-point operations.
-- Note that Haskell's default is 'RoundNearestTiesToEven'. If you use
-- a different rounding mode, then the counter-examples you get may not
-- match what you observe in Haskell.
data RoundingMode = RoundNearestTiesToEven -- ^ Round to nearest representable floating point value.
-- If precisely at half-way, pick the even number.
-- (In this context, /even/ means the lowest-order bit is zero.)
| RoundNearestTiesToAway -- ^ Round to nearest representable floating point value.
-- If precisely at half-way, pick the number further away from 0.
-- (That is, for positive values, pick the greater; for negative values, pick the smaller.)
| RoundTowardPositive -- ^ Round towards positive infinity. (Also known as rounding-up or ceiling.)
| RoundTowardNegative -- ^ Round towards negative infinity. (Also known as rounding-down or floor.)
| RoundTowardZero -- ^ Round towards zero. (Also known as truncation.)
-- Not particularly "desirable", but will do if needed
instance Show (SBV a) where
show (SBV _ (Left c)) = show c
show (SBV k (Right _)) = "<symbolic> :: " ++ show k
-- Equality constraint on SBV values. Not desirable since we can't really compare two
-- symbolic values, but will do.
instance Eq (SBV a) where
SBV _ (Left a) == SBV _ (Left b) = a == b
a == b = error $ "Comparing symbolic bit-vectors; Use (.==) instead. Received: " ++ show (a, b)
SBV _ (Left a) /= SBV _ (Left b) = a /= b
a /= b = error $ "Comparing symbolic bit-vectors; Use (./=) instead. Received: " ++ show (a, b)
instance HasKind a => HasKind (SBV a) where
kindOf (SBV k _) = k
-- | Increment the variable counter
incCtr :: State -> IO Int
incCtr s = do ctr <- readIORef (rctr s)
let i = ctr + 1
i `seq` writeIORef (rctr s) i
return ctr
-- | Generate a random value, for quick-check and test-gen purposes
throwDice :: State -> IO Double
throwDice st = do g <- readIORef (rStdGen st)
let (r, g') = randomR (0, 1) g
writeIORef (rStdGen st) g'
return r
-- | Create a new uninterpreted symbol, possibly with user given code
newUninterpreted :: State -> String -> SBVType -> Maybe [String] -> IO ()
newUninterpreted st nm t mbCode
| null nm || not (isAlpha (head nm)) || not (all validChar (tail nm))
= error $ "Bad uninterpreted constant name: " ++ show nm ++ ". Must be a valid identifier."
| True = do
uiMap <- readIORef (rUIMap st)
case nm `Map.lookup` uiMap of
Just t' -> if t /= t'
then error $ "Uninterpreted constant " ++ show nm ++ " used at incompatible types\n"
++ " Current type : " ++ show t ++ "\n"
++ " Previously used at: " ++ show t'
else return ()
Nothing -> do modifyIORef (rUIMap st) (Map.insert nm t)
when (isJust mbCode) $ modifyIORef (rCgMap st) (Map.insert nm (fromJust mbCode))
where validChar x = isAlphaNum x || x `elem` "_"
-- | Create a new SW
newSW :: State -> Kind -> IO (SW, String)
newSW st k = do ctr <- incCtr st
let sw = SW k (NodeId ctr)
registerKind st k
return (sw, 's' : show ctr)
{-# INLINE newSW #-}
registerKind :: State -> Kind -> IO ()
registerKind st k
| KUninterpreted sortName <- k, sortName `elem` reserved
= error $ "SBV: " ++ show sortName ++ " is a reserved sort; please use a different name."
| True
= modifyIORef (rUsedKinds st) (Set.insert k)
where reserved = ["Int", "Real", "List", "Array", "Bool", "NUMERAL", "DECIMAL", "STRING", "FP"] -- Reserved by SMT-Lib
-- | Create a new constant; hash-cons as necessary
newConst :: State -> CW -> IO SW
newConst st c = do
constMap <- readIORef (rconstMap st)
case c `Map.lookup` constMap of
Just sw -> return sw
Nothing -> do let k = kindOf c
(sw, _) <- newSW st k
modifyIORef (rconstMap st) (Map.insert c sw)
return sw
{-# INLINE newConst #-}
-- | Create a new table; hash-cons as necessary
getTableIndex :: State -> Kind -> Kind -> [SW] -> IO Int
getTableIndex st at rt elts = do
tblMap <- readIORef (rtblMap st)
case elts `Map.lookup` tblMap of
Just (i, _, _) -> return i
Nothing -> do let i = Map.size tblMap
modifyIORef (rtblMap st) (Map.insert elts (i, at, rt))
return i
-- | Create a constant word from an integral
mkConstCW :: Integral a => Kind -> a -> CW
mkConstCW k@(KBounded{}) a = normCW $ CW k (CWInteger (toInteger a))
mkConstCW KUnbounded a = normCW $ CW KUnbounded (CWInteger (toInteger a))
mkConstCW KReal a = normCW $ CW KReal (CWAlgReal (fromInteger (toInteger a)))
mkConstCW KFloat a = normCW $ CW KFloat (CWFloat (fromInteger (toInteger a)))
mkConstCW KDouble a = normCW $ CW KDouble (CWDouble (fromInteger (toInteger a)))
mkConstCW (KUninterpreted s) a = error $ "Unexpected call to mkConstCW with uninterpreted kind: " ++ s ++ " with value: " ++ show (toInteger a)
-- | Create a new expression; hash-cons as necessary
newExpr :: State -> Kind -> SBVExpr -> IO SW
newExpr st k app = do
let e = reorder app
exprMap <- readIORef (rexprMap st)
case e `Map.lookup` exprMap of
Just sw -> return sw
Nothing -> do (sw, _) <- newSW st k
modifyIORef (spgm st) (\(SBVPgm xs) -> SBVPgm (xs S.|> (sw, e)))
modifyIORef (rexprMap st) (Map.insert e sw)
return sw
{-# INLINE newExpr #-}
-- | Convert a symbolic value to a symbolic-word
sbvToSW :: State -> SBV a -> IO SW
sbvToSW st (SBV _ (Left c)) = newConst st c
sbvToSW st (SBV _ (Right f)) = uncache f st
-------------------------------------------------------------------------
-- * Symbolic Computations
-------------------------------------------------------------------------
-- | A Symbolic computation. Represented by a reader monad carrying the
-- state of the computation, layered on top of IO for creating unique
-- references to hold onto intermediate results.
newtype Symbolic a = Symbolic (ReaderT State IO a)
deriving (Functor, Applicative, Monad, MonadFix, MonadIO, MonadReader State)
-- | Create a symbolic value, based on the quantifier we have. If an explicit quantifier is given, we just use that.
-- If not, then we pick existential for SAT calls and universal for everything else.
mkSymSBV :: forall a. (Random a, SymWord a) => Maybe Quantifier -> Kind -> Maybe String -> Symbolic (SBV a)
mkSymSBV = mkSymSBVWithRandom randomIO
mkSymSBVWithRandom :: forall a. SymWord a =>
IO (SBV a) -> Maybe Quantifier -> Kind -> Maybe String -> Symbolic (SBV a)
mkSymSBVWithRandom random mbQ k mbNm = do
st <- ask
let q = case (mbQ, runMode st) of
(Just x, _) -> x -- user given, just take it
(Nothing, Concrete{}) -> ALL -- concrete simulation, pick universal
(Nothing, Proof True) -> EX -- sat mode, pick existential
(Nothing, Proof False) -> ALL -- proof mode, pick universal
(Nothing, CodeGen) -> ALL -- code generation, pick universal
case runMode st of
Concrete _ | q == EX -> case mbNm of
Nothing -> error $ "Cannot quick-check in the presence of existential variables, type: " ++ showType (undefined :: SBV a)
Just nm -> error $ "Cannot quick-check in the presence of existential variable " ++ nm ++ " :: " ++ showType (undefined :: SBV a)
Concrete _ -> do v@(SBV _ (Left cw)) <- liftIO random
liftIO $ modifyIORef (rCInfo st) ((maybe "_" id mbNm, cw):)
return v
_ -> do (sw, internalName) <- liftIO $ newSW st k
let nm = maybe internalName id mbNm
liftIO $ modifyIORef (rinps st) ((q, (sw, nm)):)
return $ SBV k $ Right $ cache (const (return sw))
-- | Convert a symbolic value to an SW, inside the Symbolic monad
sbvToSymSW :: SBV a -> Symbolic SW
sbvToSymSW sbv = do
st <- ask
liftIO $ sbvToSW st sbv
-- | A class representing what can be returned from a symbolic computation.
class Outputtable a where
-- | Mark an interim result as an output. Useful when constructing Symbolic programs
-- that return multiple values, or when the result is programmatically computed.
output :: a -> Symbolic a
instance Outputtable (SBV a) where
output i@(SBV _ (Left c)) = do
st <- ask
sw <- liftIO $ newConst st c
liftIO $ modifyIORef (routs st) (sw:)
return i
output i@(SBV _ (Right f)) = do
st <- ask
sw <- liftIO $ uncache f st
liftIO $ modifyIORef (routs st) (sw:)
return i
instance Outputtable a => Outputtable [a] where
output = mapM output
instance Outputtable () where
output = return
instance (Outputtable a, Outputtable b) => Outputtable (a, b) where
output = mlift2 (,) output output
instance (Outputtable a, Outputtable b, Outputtable c) => Outputtable (a, b, c) where
output = mlift3 (,,) output output output
instance (Outputtable a, Outputtable b, Outputtable c, Outputtable d) => Outputtable (a, b, c, d) where
output = mlift4 (,,,) output output output output
instance (Outputtable a, Outputtable b, Outputtable c, Outputtable d, Outputtable e) => Outputtable (a, b, c, d, e) where
output = mlift5 (,,,,) output output output output output
instance (Outputtable a, Outputtable b, Outputtable c, Outputtable d, Outputtable e, Outputtable f) => Outputtable (a, b, c, d, e, f) where
output = mlift6 (,,,,,) output output output output output output
instance (Outputtable a, Outputtable b, Outputtable c, Outputtable d, Outputtable e, Outputtable f, Outputtable g) => Outputtable (a, b, c, d, e, f, g) where
output = mlift7 (,,,,,,) output output output output output output output
instance (Outputtable a, Outputtable b, Outputtable c, Outputtable d, Outputtable e, Outputtable f, Outputtable g, Outputtable h) => Outputtable (a, b, c, d, e, f, g, h) where
output = mlift8 (,,,,,,,) output output output output output output output output
-- | Add a user specified axiom to the generated SMT-Lib file. The first argument is a mere
-- string, use for commenting purposes. The second argument is intended to hold the multiple-lines
-- of the axiom text as expressed in SMT-Lib notation. Note that we perform no checks on the axiom
-- itself, to see whether it's actually well-formed or is sensical by any means.
-- A separate formalization of SMT-Lib would be very useful here.
addAxiom :: String -> [String] -> Symbolic ()
addAxiom nm ax = do
st <- ask
liftIO $ modifyIORef (raxioms st) ((nm, ax) :)
-- | Run a symbolic computation in Proof mode and return a 'Result'. The boolean
-- argument indicates if this is a sat instance or not.
runSymbolic :: Bool -> Symbolic a -> IO Result
runSymbolic b c = snd `fmap` runSymbolic' (Proof b) c
-- | Run a symbolic computation, and return a extra value paired up with the 'Result'
runSymbolic' :: SBVRunMode -> Symbolic a -> IO (a, Result)
runSymbolic' currentRunMode (Symbolic c) = do
ctr <- newIORef (-2) -- start from -2; False and True will always occupy the first two elements
cInfo <- newIORef []
pgm <- newIORef (SBVPgm S.empty)
emap <- newIORef Map.empty
cmap <- newIORef Map.empty
inps <- newIORef []
outs <- newIORef []
tables <- newIORef Map.empty
arrays <- newIORef IMap.empty
uis <- newIORef Map.empty
cgs <- newIORef Map.empty
axioms <- newIORef []
swCache <- newIORef IMap.empty
aiCache <- newIORef IMap.empty
usedKinds <- newIORef Set.empty
cstrs <- newIORef []
rGen <- case currentRunMode of
Concrete g -> newIORef g
_ -> newStdGen >>= newIORef
let st = State { runMode = currentRunMode
, rStdGen = rGen
, rCInfo = cInfo
, rctr = ctr
, rUsedKinds = usedKinds
, rinps = inps
, routs = outs
, rtblMap = tables
, spgm = pgm
, rconstMap = cmap
, rArrayMap = arrays
, rexprMap = emap
, rUIMap = uis
, rCgMap = cgs
, raxioms = axioms
, rSWCache = swCache
, rAICache = aiCache
, rConstraints = cstrs
}
_ <- newConst st (mkConstCW (KBounded False 1) (0::Integer)) -- s(-2) == falseSW
_ <- newConst st (mkConstCW (KBounded False 1) (1::Integer)) -- s(-1) == trueSW
r <- runReaderT c st
res <- readResult st
return $ (r, res)
readResult :: State -> IO Result
readResult st = do
rpgm <- readIORef (spgm st)
inpsO <- reverse `fmap` readIORef (rinps st)
outsO <- reverse `fmap` readIORef (routs st)
let swap (a, b) = (b, a)
cmp (a, _) (b, _) = a `compare` b
cnsts <- (sortBy cmp . map swap . Map.toList) `fmap` readIORef (rconstMap st)
tbls <- (sortBy (\((x, _, _), _) ((y, _, _), _) -> x `compare` y) . map swap . Map.toList) `fmap` readIORef (rtblMap st)
arrs <- IMap.toAscList `fmap` readIORef (rArrayMap st)
unint <- Map.toList `fmap` readIORef (rUIMap st)
axs <- reverse `fmap` readIORef (raxioms st)
knds <- readIORef (rUsedKinds st)
cgMap <- Map.toList `fmap` readIORef (rCgMap st)
traceVals <- reverse `fmap` readIORef (rCInfo st)
extraCstrs <- reverse `fmap` readIORef (rConstraints st)
return $ Result knds traceVals cgMap inpsO cnsts tbls arrs unint axs rpgm extraCstrs outsO
getResult :: Symbolic Result
getResult = do
st <- ask
liftIO $ readResult st
-------------------------------------------------------------------------------
-- * Symbolic Words
-------------------------------------------------------------------------------
-- | A 'SymWord' is a potential symbolic bitvector that can be created instances of
-- to be fed to a symbolic program. Note that these methods are typically not needed
-- in casual uses with 'prove', 'sat', 'allSat' etc, as default instances automatically
-- provide the necessary bits.
class (HasKind a, Ord a) => SymWord a where
-- | Create a user named input (universal)
forall :: String -> Symbolic (SBV a)
-- | Create an automatically named input
forall_ :: Symbolic (SBV a)
-- | Get a bunch of new words
mkForallVars :: Int -> Symbolic [SBV a]
-- | Create an existential variable
exists :: String -> Symbolic (SBV a)
-- | Create an automatically named existential variable
exists_ :: Symbolic (SBV a)
-- | Create a bunch of existentials
mkExistVars :: Int -> Symbolic [SBV a]
-- | Create a free variable, universal in a proof, existential in sat
free :: String -> Symbolic (SBV a)
-- | Create an unnamed free variable, universal in proof, existential in sat
free_ :: Symbolic (SBV a)
-- | Create a bunch of free vars
mkFreeVars :: Int -> Symbolic [SBV a]
-- | Similar to free; Just a more convenient name
symbolic :: String -> Symbolic (SBV a)
-- | Similar to mkFreeVars; but automatically gives names based on the strings
symbolics :: [String] -> Symbolic [SBV a]
-- | Turn a literal constant to symbolic
literal :: a -> SBV a
-- | Extract a literal, if the value is concrete
unliteral :: SBV a -> Maybe a
-- | Extract a literal, from a CW representation
fromCW :: CW -> a
-- | Is the symbolic word concrete?
isConcrete :: SBV a -> Bool
-- | Is the symbolic word really symbolic?
isSymbolic :: SBV a -> Bool
-- | Does it concretely satisfy the given predicate?
isConcretely :: SBV a -> (a -> Bool) -> Bool
-- | max/minbounds, if available. Note that we don't want
-- to impose "Bounded" on our class as Integer is not Bounded but it is a SymWord
mbMaxBound, mbMinBound :: Maybe a
-- | One stop allocator
mkSymWord :: Maybe Quantifier -> Maybe String -> Symbolic (SBV a)
-- minimal complete definition, Nothing.
-- Giving no instances is ok when defining an uninterpreted sort, but otherwise you really
-- want to define: mbMaxBound, mbMinBound, literal, fromCW, mkSymWord
forall = mkSymWord (Just ALL) . Just
forall_ = mkSymWord (Just ALL) Nothing
exists = mkSymWord (Just EX) . Just
exists_ = mkSymWord (Just EX) Nothing
free = mkSymWord Nothing . Just
free_ = mkSymWord Nothing Nothing
mkForallVars n = mapM (const forall_) [1 .. n]
mkExistVars n = mapM (const exists_) [1 .. n]
mkFreeVars n = mapM (const free_) [1 .. n]
symbolic = free
symbolics = mapM symbolic
unliteral (SBV _ (Left c)) = Just $ fromCW c
unliteral _ = Nothing
isConcrete (SBV _ (Left _)) = True
isConcrete _ = False
isSymbolic = not . isConcrete
isConcretely s p
| Just i <- unliteral s = p i
| True = False
-- Followings, you really want to define them unless the instance is for an uninterpreted sort
mbMaxBound = Nothing
mbMinBound = Nothing
literal x = error $ "Cannot create symbolic literals for kind: " ++ show (kindOf x)
fromCW cw = error $ "Cannot convert CW " ++ show cw ++ " to kind " ++ show (kindOf (undefined :: a))
default mkSymWord :: Data a => Maybe Quantifier -> Maybe String -> Symbolic (SBV a)
mkSymWord mbQ mbNm = do
let sortName = tyconUQname . dataTypeName . dataTypeOf $ (undefined :: a)
st <- ask
let k = KUninterpreted sortName
liftIO $ registerKind st k
let q = case (mbQ, runMode st) of
(Just x, _) -> x
(Nothing, Proof True) -> EX
(Nothing, Proof False) -> ALL
(Nothing, Concrete{}) -> error $ "SBV: Uninterpreted sort " ++ sortName ++ " can not be used in concrete simulation mode."
(Nothing, CodeGen) -> error $ "SBV: Uninterpreted sort " ++ sortName ++ " can not be used in code-generation mode."
ctr <- liftIO $ incCtr st
let sw = SW k (NodeId ctr)
nm = maybe ('s':show ctr) id mbNm
liftIO $ modifyIORef (rinps st) ((q, (sw, nm)):)
return $ SBV k $ Right $ cache (const (return sw))
instance (Random a, SymWord a) => Random (SBV a) where
randomR (l, h) g = case (unliteral l, unliteral h) of
(Just lb, Just hb) -> let (v, g') = randomR (lb, hb) g in (literal (v :: a), g')
_ -> error $ "SBV.Random: Cannot generate random values with symbolic bounds"
random g = let (v, g') = random g in (literal (v :: a) , g')
---------------------------------------------------------------------------------
-- * Symbolic Arrays
---------------------------------------------------------------------------------
-- | Flat arrays of symbolic values
-- An @array a b@ is an array indexed by the type @'SBV' a@, with elements of type @'SBV' b@
-- If an initial value is not provided in 'newArray_' and 'newArray' methods, then the elements
-- are left unspecified, i.e., the solver is free to choose any value. This is the right thing
-- to do if arrays are used as inputs to functions to be verified, typically.
--
-- While it's certainly possible for user to create instances of 'SymArray', the
-- 'SArray' and 'SFunArray' instances already provided should cover most use cases
-- in practice. (There are some differences between these models, however, see the corresponding
-- declaration.)
--
--
-- Minimal complete definition: All methods are required, no defaults.
class SymArray array where
-- | Create a new array, with an optional initial value
newArray_ :: (HasKind a, HasKind b) => Maybe (SBV b) -> Symbolic (array a b)
-- | Create a named new array, with an optional initial value
newArray :: (HasKind a, HasKind b) => String -> Maybe (SBV b) -> Symbolic (array a b)
-- | Read the array element at @a@
readArray :: array a b -> SBV a -> SBV b
-- | Reset all the elements of the array to the value @b@
resetArray :: SymWord b => array a b -> SBV b -> array a b
-- | Update the element at @a@ to be @b@
writeArray :: SymWord b => array a b -> SBV a -> SBV b -> array a b
-- | Merge two given arrays on the symbolic condition
-- Intuitively: @mergeArrays cond a b = if cond then a else b@.
-- Merging pushes the if-then-else choice down on to elements
mergeArrays :: SymWord b => SBV Bool -> array a b -> array a b -> array a b
-- | Arrays implemented in terms of SMT-arrays: <http://goedel.cs.uiowa.edu/smtlib/theories/ArraysEx.smt2>
--
-- * Maps directly to SMT-lib arrays
--
-- * Reading from an unintialized value is OK and yields an uninterpreted result
--
-- * Can check for equality of these arrays
--
-- * Cannot quick-check theorems using @SArray@ values
--
-- * Typically slower as it heavily relies on SMT-solving for the array theory
--
data SArray a b = SArray (Kind, Kind) (Cached ArrayIndex)
-- | An array index is simple an int value
type ArrayIndex = Int
instance (HasKind a, HasKind b) => Show (SArray a b) where
show (SArray{}) = "SArray<" ++ showType (undefined :: a) ++ ":" ++ showType (undefined :: b) ++ ">"
instance SymArray SArray where
newArray_ = declNewSArray (\t -> "array_" ++ show t)
newArray n = declNewSArray (const n)
readArray (SArray (_, bk) f) a = SBV bk $ Right $ cache r
where r st = do arr <- uncacheAI f st
i <- sbvToSW st a
newExpr st bk (SBVApp (ArrRead arr) [i])
resetArray (SArray ainfo f) b = SArray ainfo $ cache g
where g st = do amap <- readIORef (rArrayMap st)
val <- sbvToSW st b
i <- uncacheAI f st
let j = IMap.size amap
j `seq` modifyIORef (rArrayMap st) (IMap.insert j ("array_" ++ show j, ainfo, ArrayReset i val))
return j
writeArray (SArray ainfo f) a b = SArray ainfo $ cache g
where g st = do arr <- uncacheAI f st
addr <- sbvToSW st a
val <- sbvToSW st b
amap <- readIORef (rArrayMap st)
let j = IMap.size amap
j `seq` modifyIORef (rArrayMap st) (IMap.insert j ("array_" ++ show j, ainfo, ArrayMutate arr addr val))
return j
mergeArrays t (SArray ainfo a) (SArray _ b) = SArray ainfo $ cache h
where h st = do ai <- uncacheAI a st
bi <- uncacheAI b st
ts <- sbvToSW st t
amap <- readIORef (rArrayMap st)
let k = IMap.size amap
k `seq` modifyIORef (rArrayMap st) (IMap.insert k ("array_" ++ show k, ainfo, ArrayMerge ts ai bi))
return k
-- | Declare a new symbolic array, with a potential initial value
declNewSArray :: forall a b. (HasKind a, HasKind b) => (Int -> String) -> Maybe (SBV b) -> Symbolic (SArray a b)
declNewSArray mkNm mbInit = do
let aknd = kindOf (undefined :: a)
bknd = kindOf (undefined :: b)
st <- ask
amap <- liftIO $ readIORef $ rArrayMap st
let i = IMap.size amap
nm = mkNm i
actx <- liftIO $ case mbInit of
Nothing -> return $ ArrayFree Nothing
Just ival -> sbvToSW st ival >>= \sw -> return $ ArrayFree (Just sw)
liftIO $ modifyIORef (rArrayMap st) (IMap.insert i (nm, (aknd, bknd), actx))
return $ SArray (aknd, bknd) $ cache $ const $ return i
-- | Arrays implemented internally as functions
--
-- * Internally handled by the library and not mapped to SMT-Lib
--
-- * Reading an uninitialized value is considered an error (will throw exception)
--
-- * Cannot check for equality (internally represented as functions)
--
-- * Can quick-check
--
-- * Typically faster as it gets compiled away during translation
--
data SFunArray a b = SFunArray (SBV a -> SBV b)
instance (HasKind a, HasKind b) => Show (SFunArray a b) where
show (SFunArray _) = "SFunArray<" ++ showType (undefined :: a) ++ ":" ++ showType (undefined :: b) ++ ">"
-- | Lift a function to an array. Useful for creating arrays in a pure context. (Otherwise use `newArray`.)
mkSFunArray :: (SBV a -> SBV b) -> SFunArray a b
mkSFunArray = SFunArray
-- | Handling constraints
imposeConstraint :: SBool -> Symbolic ()
imposeConstraint c = do st <- ask
case runMode st of
CodeGen -> error "SBV: constraints are not allowed in code-generation"
_ -> do liftIO $ do v <- sbvToSW st c
modifyIORef (rConstraints st) (v:)
-- | Add a constraint with a given probability
addConstraint :: Maybe Double -> SBool -> SBool -> Symbolic ()
addConstraint Nothing c _ = imposeConstraint c
addConstraint (Just t) c c'
| t < 0 || t > 1
= error $ "SBV: pConstrain: Invalid probability threshold: " ++ show t ++ ", must be in [0, 1]."
| True
= do st <- ask
when (not (isConcreteMode (runMode st))) $ error "SBV: pConstrain only allowed in 'genTest' or 'quickCheck' contexts."
case () of
() | t > 0 && t < 1 -> liftIO (throwDice st) >>= \d -> imposeConstraint (if d <= t then c else c')
| t > 0 -> imposeConstraint c
| True -> imposeConstraint c'
---------------------------------------------------------------------------------
-- * Cached values
---------------------------------------------------------------------------------
-- | We implement a peculiar caching mechanism, applicable to the use case in
-- implementation of SBV's. Whenever we do a state based computation, we do
-- not want to keep on evaluating it in the then-current state. That will
-- produce essentially a semantically equivalent value. Thus, we want to run
-- it only once, and reuse that result, capturing the sharing at the Haskell
-- level. This is similar to the "type-safe observable sharing" work, but also
-- takes into the account of how symbolic simulation executes.
--
-- See Andy Gill's type-safe obervable sharing trick for the inspiration behind
-- this technique: <http://ittc.ku.edu/~andygill/paper.php?label=DSLExtract09>
--
-- Note that this is *not* a general memo utility!
newtype Cached a = Cached (State -> IO a)
-- | Cache a state-based computation
cache :: (State -> IO a) -> Cached a
cache = Cached
-- | Uncache a previously cached computation
uncache :: Cached SW -> State -> IO SW
uncache = uncacheGen rSWCache
-- | Uncache, retrieving array indexes
uncacheAI :: Cached ArrayIndex -> State -> IO ArrayIndex
uncacheAI = uncacheGen rAICache
-- | Generic uncaching. Note that this is entirely safe, since we do it in the IO monad.
uncacheGen :: (State -> IORef (Cache a)) -> Cached a -> State -> IO a
uncacheGen getCache (Cached f) st = do
let rCache = getCache st
stored <- readIORef rCache
sn <- f `seq` makeStableName f
let h = hashStableName sn
case maybe Nothing (sn `lookup`) (h `IMap.lookup` stored) of
Just r -> return r
Nothing -> do r <- f st
r `seq` modifyIORef rCache (IMap.insertWith (++) h [(sn, r)])
return r
-- | Representation of SMTLib Program versions, currently we only know of versions 1 and 2.
-- (NB. Eventually, we should just drop SMTLib1.)
data SMTLibVersion = SMTLib1
| SMTLib2
deriving Eq
-- | Representation of an SMT-Lib program. In between pre and post goes the refuted models
data SMTLibPgm = SMTLibPgm SMTLibVersion ( [(String, SW)] -- alias table
, [String] -- pre: declarations.
, [String]) -- post: formula
instance NFData SMTLibVersion
instance NFData SMTLibPgm
instance Show SMTLibPgm where
show (SMTLibPgm _ (_, pre, post)) = intercalate "\n" $ pre ++ post
-- Other Technicalities..
instance NFData CW where
rnf (CW x y) = x `seq` y `seq` ()
instance NFData Result where
rnf (Result kindInfo qcInfo cgs inps consts tbls arrs uis axs pgm cstr outs)
= rnf kindInfo `seq` rnf qcInfo `seq` rnf cgs `seq` rnf inps
`seq` rnf consts `seq` rnf tbls `seq` rnf arrs
`seq` rnf uis `seq` rnf axs `seq` rnf pgm
`seq` rnf cstr `seq` rnf outs
instance NFData Kind
instance NFData ArrayContext
instance NFData SW
instance NFData SBVExpr
instance NFData Quantifier
instance NFData SBVType
instance NFData UnintKind
instance NFData a => NFData (Cached a) where
rnf (Cached f) = f `seq` ()
instance NFData a => NFData (SBV a) where
rnf (SBV x y) = rnf x `seq` rnf y `seq` ()
instance NFData SBVPgm
-- | Translation tricks needed for specific capabilities afforded by each solver
data SolverCapabilities = SolverCapabilities {
capSolverName :: String -- ^ Name of the solver
, mbDefaultLogic :: Maybe String -- ^ set-logic string to use in case not automatically determined (if any)
, supportsMacros :: Bool -- ^ Does the solver understand SMT-Lib2 macros?
, supportsProduceModels :: Bool -- ^ Does the solver understand produce-models option setting
, supportsQuantifiers :: Bool -- ^ Does the solver understand SMT-Lib2 style quantifiers?
, supportsUninterpretedSorts :: Bool -- ^ Does the solver understand SMT-Lib2 style uninterpreted-sorts
, supportsUnboundedInts :: Bool -- ^ Does the solver support unbounded integers?
, supportsReals :: Bool -- ^ Does the solver support reals?
, supportsFloats :: Bool -- ^ Does the solver support single-precision floating point numbers?
, supportsDoubles :: Bool -- ^ Does the solver support double-precision floating point numbers?
}
| {
"content_hash": "90b4d92ea729f654119cfc7eaf41c992",
"timestamp": "",
"source": "github",
"line_count": 1268,
"max_line_length": 187,
"avg_line_length": 46.9077287066246,
"alnum_prop": 0.5773130012273239,
"repo_name": "dylanmc/cryptol",
"id": "2939167bce88b8eee6ba98bd73448a5b5692da54",
"size": "59859",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sbv/Data/SBV/BitVectors/Data.hs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Haskell",
"bytes": "1083932"
},
{
"name": "Perl",
"bytes": "31978"
},
{
"name": "Python",
"bytes": "101047"
},
{
"name": "Shell",
"bytes": "515"
},
{
"name": "TeX",
"bytes": "711722"
}
],
"symlink_target": ""
} |
var Sprite = require('../display/Sprite'),
inherit = require('../utils/inherit');
/**
* Base Tile implementation, a tile is a single tile in a tilemap layer
*
* @class Tile
* @extends Sprite
* @constructor
* @param texture {Texture} The texture of the tile
*/
var Tile = function(texture) {
//call base ctor
Sprite.call(this, texture);
};
inherit(Tile, Sprite, {
});
module.exports = Tile;
| {
"content_hash": "a78e03accbee18e04fe5f3bc0f94a09f",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 71,
"avg_line_length": 20.6,
"alnum_prop": 0.6553398058252428,
"repo_name": "grapefruitjs/grapefruit",
"id": "3020d989974f0bc0b4a1fcb99fa41aa7ad1e7147",
"size": "412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tilemap/Tile.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "509114"
}
],
"symlink_target": ""
} |
Rails specific tasks for Capistrano v3:
- `cap deploy:migrate`
- `cap deploy:compile_assets`
## Installation
Add this line to your application's Gemfile:
```ruby
group :development do
gem 'capistrano', '~> 3.1'
gem 'capistrano-rails', '~> 1.1'
end
```
## Usage
Require everything (`bundler`, `rails/assets` and `rails/migrations`):
```ruby
# Capfile
require 'capistrano/rails'
```
Or require just what you need manually:
```ruby
# Capfile
require 'capistrano/bundler' # Rails needs Bundler, right?
require 'capistrano/rails/assets'
require 'capistrano/rails/migrations'
```
Please note that any `require`s should be placed in `Capfile`, not in `config/deploy.rb`.
You can tweak some Rails-specific options in `config/deploy.rb`:
```ruby
# If the environment differs from the stage name
set :rails_env, 'staging'
# Defaults to 'db'
set :migration_role, 'migrator'
# Defaults to false
# Skip migration if files in db/migrate were not modified
set :conditionally_migrate, true
# Defaults to [:web]
set :assets_roles, [:web, :app]
# Defaults to 'assets'
# This should match config.assets.prefix in your rails config/application.rb
set :assets_prefix, 'prepackaged-assets'
# If you need to touch public/images, public/javascripts, and public/stylesheets on each deploy
set :normalize_asset_timestamps, %{public/images public/javascripts public/stylesheets}
# Defaults to nil (no asset cleanup is performed)
# If you use Rails 4+ and you'd like to clean up old assets after each deploy,
# set this to the number of versions to keep
set :keep_assets, 2
```
### Symlinks
You'll probably want to symlink Rails shared files and directories like `log`, `tmp` and `public/uploads`.
Make sure you enable it by setting `linked_dirs` and `linked_files` options:
```ruby
# deploy.rb
set :linked_dirs, fetch(:linked_dirs, []).push('log', 'tmp/pids', 'tmp/cache', 'tmp/sockets', 'vendor/bundle', 'public/system', 'public/uploads')
set :linked_files, fetch(:linked_files, []).push('config/database.yml', 'config/secrets.yml')
```
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
| {
"content_hash": "ea8d16f379d8c25448716996b701f238",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 145,
"avg_line_length": 27.457831325301203,
"alnum_prop": 0.7270732777534006,
"repo_name": "Coolnesss/Coordinates",
"id": "5534ab3f4bf9813d1aed1de296fc5716f4b1af38",
"size": "2300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/cache/ruby/2.2.0/gems/capistrano-rails-1.1.6/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2510"
},
{
"name": "CoffeeScript",
"bytes": "844"
},
{
"name": "HTML",
"bytes": "16072"
},
{
"name": "JavaScript",
"bytes": "758"
},
{
"name": "Nginx",
"bytes": "802"
},
{
"name": "Ruby",
"bytes": "100899"
}
],
"symlink_target": ""
} |
package docker // import "docker.io/go-docker"
import (
"net/url"
"strconv"
"docker.io/go-docker/api/types/swarm"
"golang.org/x/net/context"
)
// SecretUpdate attempts to update a Secret
func (cli *Client) SecretUpdate(ctx context.Context, id string, version swarm.Version, secret swarm.SecretSpec) error {
if err := cli.NewVersionError("1.25", "secret update"); err != nil {
return err
}
query := url.Values{}
query.Set("version", strconv.FormatUint(version.Index, 10))
resp, err := cli.post(ctx, "/secrets/"+id+"/update", query, secret, nil)
ensureReaderClosed(resp)
return err
}
| {
"content_hash": "ec06d2d6d3b6f6d95f44467a1fea7405",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 119,
"avg_line_length": 28.476190476190474,
"alnum_prop": 0.7073578595317725,
"repo_name": "atomiqio/atomiq",
"id": "5193267e1cfe1d67401ec5a41a73034e13db4d89",
"size": "598",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vendor/docker.io/go-docker/secret_update.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "489260"
},
{
"name": "HCL",
"bytes": "11250"
},
{
"name": "Makefile",
"bytes": "13514"
},
{
"name": "Protocol Buffer",
"bytes": "12519"
},
{
"name": "Shell",
"bytes": "85625"
},
{
"name": "Smarty",
"bytes": "24497"
}
],
"symlink_target": ""
} |
module.exports = [new Date()]; | {
"content_hash": "fc34d5b534307795e627825d3e8b7d9c",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 30,
"avg_line_length": 30,
"alnum_prop": 0.6666666666666666,
"repo_name": "jonschlinkert/kind-of",
"id": "2ebc8938f98a938a21c45b57be323aa377ad106e",
"size": "30",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchmark/fixtures/date.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "22227"
}
],
"symlink_target": ""
} |
package com.dianping.paas.controller.processor.step;
import com.dianping.paas.controller.dto.depoly.entity.OperationContext;
import com.dianping.paas.controller.exception.AppExistException;
import com.dianping.paas.controller.exception.AppPlanNotExistException;
import com.dianping.paas.controller.executor.DeployExecutor;
import com.dianping.paas.controller.executor.context.InstanceDeployContext;
import com.dianping.paas.controller.record.OperationListener;
import com.dianping.paas.controller.record.OperationRecorder;
import com.dianping.paas.controller.sequencer.Task;
import com.dianping.paas.controller.sequencer.TaskSequencer;
import com.dianping.paas.core.dal.*;
import com.dianping.paas.core.dal.entity.*;
import com.dianping.paas.core.dto.request.AppInitRequest;
import com.dianping.paas.core.dto.response.AllocateWebPackageResponse;
import com.dianping.paas.core.dto.response.AsyncOperationResponse;
import com.dianping.paas.core.dto.response.ResultCode;
import com.dianping.paas.core.util.HttpUtil;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.io.File;
import java.util.Date;
import java.util.List;
/**
* yapu.wang@dianping.com
* Created by wangyapu on 15/12/15.
*/
@Component
public class AppInitStep {
private static final Logger logger = LogManager.getLogger(AppInitStep.class);
@Resource
private AppDal appDal;
@Resource
private AppPlanDal appPlanDal;
@Resource
private AppConfigDal appConfigDal;
@Resource
private QuotaDal quotaDal;
@Resource
private InstanceGroupDal instanceGroupDal;
@Resource
private TaskSequencer taskSequencer;
@Resource
private DeployExecutor deployExecutor;
@Resource
private OperationListener operationListener;
@Resource
private OperationRecorder operationRecorder;
public static final String APP_INIT_VERSION = "0.0.1-paas";
public boolean validAppInitRequest(AppInitRequest appInitRequest) {
return true;
}
public void saveAppInfo(AppInitRequest appInitRequest, AsyncOperationResponse response) throws Exception {
AppPlanEntity appPlan = appPlanDal.findByPK(appInitRequest.getAppPlanId());
if (appPlan == null) {
response.fail(ResultCode.DB_APP_PLAN_NOT_EXIST);
throw new AppPlanNotExistException("the appPlanId is not valid, can not find it database");
}
AppEntity app = appDal.findByAppId(appInitRequest.getAppId());
if (app != null) {
response.fail(ResultCode.DB_APP_EXIST);
throw new AppExistException("the app already exists");
}
app = new AppEntity();
app.setAppId(appInitRequest.getAppId());
app.setCreationDate(new Date());
app.setLastModifiedDate(new Date());
app.setLevel(appInitRequest.getLevel());
app.setOwner(appInitRequest.getOwner());
app.setType(appInitRequest.getType());
app.setAppPlanName(appPlan.getName());
app.setImage(appInitRequest.getImage());
app.setMachineLabel(appInitRequest.getMachineLabel());
AppConfigEntity appConfig = new AppConfigEntity();
appConfig.setAppId(appInitRequest.getAppId());
appConfig.setCreationDate(new Date());
appConfig.setLastModifiedDate(new Date());
appConfig.setLdapBase(appInitRequest.getLdapBase() + ",ou=auth,dc=dianping,dc=com");
// need phenix kernal?
appConfig.setWarmUpUrl(appInitRequest.getWarmUpURL());
QuotaEntity quota = new QuotaEntity();
quota.setCpu(appPlan.getCpu());
quota.setCpuSharable(appPlan.getCpuSharable());
quota.setMaxInstanceCount(appPlan.getMaxInstanceCount());
quota.setMinInstanceCount(appPlan.getMinInstanceCount());
quota.setMemory(appPlan.getMemory());
appConfigDal.insert(appConfig);
app.setQuota(quota);
quotaDal.insert(quota);
app.setQuotaId(quota.getId());
appDal.insert(app);
}
public void saveInstanceGroupInfo(AppInitRequest appInitRequest) {
InstanceGroupEntity instanceGroup1 = new InstanceGroupEntity();
String appId = appInitRequest.getAppId();
instanceGroup1.setAppId(appId);
instanceGroup1.setMaxInstance(1);
instanceGroup1.setName("paas group1");
instanceGroupDal.insert(instanceGroup1);
InstanceGroupEntity instanceGroup2 = new InstanceGroupEntity();
instanceGroup2.setAppId(appId);
instanceGroup2.setMaxInstance(10);
instanceGroup2.setName("paas group2");
instanceGroupDal.insert(instanceGroup2);
}
public AllocateWebPackageResponse allocatePkgToRepository(AppInitRequest appInitRequest) {
AllocateWebPackageResponse allocateWebPackageResponse = new AllocateWebPackageResponse();
try {
allocateWebPackageResponse = deployExecutor.allocateRespository(appInitRequest.getAppId(), APP_INIT_VERSION, "");
} catch (Exception e) {
logger.error("error when allocate package", e);
}
return allocateWebPackageResponse;
}
public void uploadPkgToRepository(AllocateWebPackageResponse allocateWebPackageResponse) throws Exception {
// file to test
File file = new File("/data/appdatas/paas/hello.war");
HttpUtil.post(allocateWebPackageResponse.getUploadUrl(), file);
}
public long receiveOperationId(final AppInitRequest appInitRequest) {
final OperationContext opCtx = new OperationContext();
opCtx.setAppId(appInitRequest.getAppId());
long opId = taskSequencer.queueAndRun(opCtx, OperationEntity.TYPE_CREATE, new Task() {
@Override
public void execute() {
initInstance(opCtx, APP_INIT_VERSION, appInitRequest.getInstanceCount());
}
});
return opId;
}
private void initInstance(OperationContext opCtx, String appInitVersion, int instanceCount) {
AppConfigEntity appConfig = appConfigDal.findByAppId(opCtx.getAppId());
AppEntity app = appDal.findByAppId(opCtx.getAppId());
List<InstanceDeployContext> instanceDeployContexts = deployExecutor.prepareInstanceDeployContext(opCtx, app, appConfig, instanceCount, false);
try {
operationListener.init(opCtx, operationRecorder);
deployExecutor.create(opCtx, instanceDeployContexts, operationListener);
} catch (RuntimeException e) {
logger.error("Unexpected exception in deploy", e);
}
}
}
| {
"content_hash": "f0799319b76a15ba1559312e7077c92c",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 150,
"avg_line_length": 37.20670391061452,
"alnum_prop": 0.7183183183183183,
"repo_name": "dianping/Dolphin",
"id": "669536953a0027e4348e03343d335fad3b81a323",
"size": "6660",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "controller/src/main/java/com/dianping/paas/controller/processor/step/AppInitStep.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "FreeMarker",
"bytes": "72"
},
{
"name": "HTML",
"bytes": "347"
},
{
"name": "Java",
"bytes": "255032"
}
],
"symlink_target": ""
} |
template: post.html
title: D3 to MP4
date: 2017-11-25
permalink: /d3-mp4
draft: false
---
Generating a [high-resolution video](https://www.nytimes.com/interactive/2018/01/24/world/is-there-something-wrong-with-democracy.html) from a d3 animation is tricky. [LICEcap](https://www.cockos.com/licecap/) and QuickTime screen recording work in a pinch, but they aren't scriptable and lose FPS without a beefy video card.
Noah Veltman has [written about](https://github.com/veltman/gifs) and [presented](http://slides.com/veltman/d3unconf/#/) different techniques for exporting d3 graphics. The best way I've found of exporting video come from him and uses a delightful hack: [modifying time itself](https://blocks.roadtolarissa.com/veltman/5de325668417b1d504dc).
## Mutate Time
Inside of your clientside code, overwrite [performance.now](https://developer.mozilla.org/en-US/docs/Web/API/Performance/now) with a function that returns the `currentTime` variable. This will let us control what time `d3-timer` and `d3-transition` think it is.
```js
if (document.URL.includes('d3-video-recording')){
window.currentTime = 0
performance.now = () => currentTime
}
```
This code only runs if the url contains `d3-video-recording`, making it easy to toggle between automatic and manual animations with a query string.
## Take Screenshots
[puppeteer](https://github.com/GoogleChrome/puppeteer) loads the page, moving time forward slowly and taking a screenshot over and over again. Even though each screenshot takes over [half a second](https://bugs.chromium.org/p/chromium/issues/detail?id=741689&can=1&q=is%3Astarred%20&colspec=ID%20Pri%20M%20Stars%20ReleaseBlock%20Component%20Status%20Owner%20Summary%20OS%20Modified) to render, controlling the browser's perception of time ensures no frames are dropped.
```js
const puppeteer = require('puppeteer')
const d3 = require('d3')
;(async () => {
// open new tab and wait for data to load
const browser = await puppeteer.launch()
const page = await browser.newPage()
await page.goto('http://localhost:1337?d3-video-recording')
await sleep(5000)
// step through each frame:
// - increment currentTime on the page
// - save a screenshot
for (let frame of d3.range(120)){
await page.evaluate((frame) => currentTime = frame*1000/60, frame)
await sleep(50)
let path = __dirname + '/png/' + d3.format('05')(frame) + '.png'
await page.setViewport({width: 1920, height: 1080, deviceScaleFactor: 2})
const chartEl = await page.$('.chart')
await chartEl.screenshot({path})
}
browser.close()
})()
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms))
}
```
## Covert to Video
Finally, convert the directory of screenshots to a [video](https://www.nytimes.com/interactive/2018/01/24/world/is-there-something-wrong-with-democracy.html):
```bash
ffmpeg -framerate 60 -pattern_type glob -i 'png/*.png' video.mp4
```
For quick thumbnail previews, check out [gistsnap](https://github.com/1wheel/gistsnap). A similar CLI tool for video could be useful, but I'm not sure passing flags to control the FPS, delay, number of frames, crop area and query string is easier than coding them directly. | {
"content_hash": "04831cd068756170fcee2043ab46f88f",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 470,
"avg_line_length": 44.541666666666664,
"alnum_prop": 0.742126598066729,
"repo_name": "1wheel/roadtolarissa",
"id": "a454922b3642ca12a655c6bd56c77d3b6fc51f99",
"size": "3211",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/_posts/2017-11-25-d3-mp4.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "75492"
},
{
"name": "HTML",
"bytes": "164513"
},
{
"name": "JavaScript",
"bytes": "2134472"
},
{
"name": "Shell",
"bytes": "349"
}
],
"symlink_target": ""
} |
#ifndef TENSORFLOW_STREAM_EXECUTOR_GPU_REDZONE_ALLOCATOR_H_
#define TENSORFLOW_STREAM_EXECUTOR_GPU_REDZONE_ALLOCATOR_H_
#include <vector>
#include "tensorflow/core/lib/math/math_util.h"
#include "tensorflow/core/platform/stream_executor_no_cuda.h"
#include "tensorflow/stream_executor/device_memory_allocator.h"
#include "tensorflow/stream_executor/gpu/asm_compiler.h"
#include "tensorflow/stream_executor/gpu/gpu_asm_opts.h"
namespace stream_executor {
// An allocator that allocates a bit of extra memory around the beginning/end of
// every allocation and can check that this memory is unmodified.
//
// This can be used to check for out-of-bounds writes, and, if the redzone is
// filled with a sufficiently "ugly" pattern, may also be able to check for
// out-of-bounds reads. The default fill pattern of -1 is an unusual NaN
// pattern when interpreted as a floating-point number, so hopefully works for
// out-of-bounds reads and writes in those cases.
//
// This class implements ScratchAllocator, so can be used to allocate temp
// memory for cudnn convolutions.
class RedzoneAllocator : public ScratchAllocator {
public:
static constexpr int64_t kDefaultMemoryLimit = 1LL << 32; // 4GB
static constexpr int64_t kDefaultRedzoneSize =
1LL << 23; // 8MiB per side, 16MiB total.
static constexpr uint8 kDefaultRedzonePattern = -1;
RedzoneAllocator(Stream* stream, DeviceMemoryAllocator* memory_allocator,
GpuAsmOpts gpu_compilation_opts_,
int64_t memory_limit = kDefaultMemoryLimit,
int64_t redzone_size = kDefaultRedzoneSize,
uint8 redzone_pattern = kDefaultRedzonePattern);
// Redzones don't count towards the memory limit.
int64_t GetMemoryLimitInBytes() override { return memory_limit_; }
int64_t TotalAllocatedBytesExcludingRedzones() const {
return allocated_bytes_excluding_redzones_;
}
port::StatusOr<DeviceMemory<uint8>> AllocateBytes(int64_t byte_size) override;
// Non-empty redzone check status implies that there was a write into a
// redzone, with a string communicating the location of the write.
struct RedzoneCheckStatus {
RedzoneCheckStatus() = default;
RedzoneCheckStatus(absl::string_view buffer_name, void* user_buffer_address,
int64_t offset, uint64_t expected_value,
uint64_t actual_value)
: buffer_name(buffer_name),
user_buffer_address(user_buffer_address),
offset(offset),
expected_value(expected_value),
actual_value(actual_value) {}
static RedzoneCheckStatus OK() { return {}; }
bool ok() { return user_buffer_address == nullptr; }
std::string RedzoneFailureMsg() const;
std::string buffer_name = {};
void* user_buffer_address = nullptr;
int64_t offset = 0;
uint64_t expected_value = 0;
uint64_t actual_value = 0;
};
// Determines whether redzones around all allocated buffers are unmodified.
//
// Reinitializes redzones to the expected value, so that the same buffer
// could be reused for multiple checks.
//
// Returns:
//
// - RedzoneCheckStatus::OK() if everything went well.
// - RedzoneCheckStatus with a non-empty error message iff a write into a
// redzone has been detected.
// - A stream error, if loading or launching the kernel has failed.
port::StatusOr<RedzoneCheckStatus> CheckRedzones() const;
private:
const int device_ordinal_;
Stream* stream_;
// Memory limit of the allocator in bytes.
const int64_t memory_limit_;
// Redzone size on *one side* of allocation in bytes.
//
// Must be a multiple of kXlaAllocatedBufferAlignBytes, otherwise the buffers
// returned to users will be misaligned.
const int64_t redzone_size_;
const uint8 redzone_pattern_;
DeviceMemoryAllocator* memory_allocator_;
GpuAsmOpts gpu_compilation_opts_;
// The second element of the pair is the size of the user allocation. This
// isn't necessarily just first.size() - 2 * redzone_size_ because when the
// user allocation size is not a multiple of 4 bytes, we round up the size of
// the RHS redzone.
//
// ScratchAllocators need to free all allocated memory on destruction so we
// use `OwningDeviceMemory` here.
std::vector<std::pair<OwningDeviceMemory, int64_t>> allocated_buffers_;
int64_t allocated_bytes_excluding_redzones_ = 0;
};
} // namespace stream_executor
#endif // TENSORFLOW_STREAM_EXECUTOR_GPU_REDZONE_ALLOCATOR_H_
| {
"content_hash": "12257f3c649f9be9411d3f22dbab4be1",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 80,
"avg_line_length": 37.90756302521008,
"alnum_prop": 0.7133673243183329,
"repo_name": "frreiss/tensorflow-fred",
"id": "133a3f365862c508d641b5b4ee033a304cf39fc1",
"size": "5179",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/stream_executor/gpu/redzone_allocator.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "6729"
},
{
"name": "Batchfile",
"bytes": "49527"
},
{
"name": "C",
"bytes": "871761"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "79093233"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "110545"
},
{
"name": "Go",
"bytes": "1852128"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "961600"
},
{
"name": "Jupyter Notebook",
"bytes": "549457"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1644156"
},
{
"name": "Makefile",
"bytes": "62398"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "303063"
},
{
"name": "PHP",
"bytes": "20523"
},
{
"name": "Pascal",
"bytes": "3982"
},
{
"name": "Pawn",
"bytes": "18876"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "40003007"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Roff",
"bytes": "2472"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "Shell",
"bytes": "681596"
},
{
"name": "Smarty",
"bytes": "34740"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Ann. Wetter. Gesellsch. Ges. Naturk. 2: 102 (1810)
#### Original name
Uredo pimpinellae F. Strauss, 1810
### Remarks
null | {
"content_hash": "a455f2ff490d4aeac7191f8ea37102db",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 50,
"avg_line_length": 16.076923076923077,
"alnum_prop": 0.7033492822966507,
"repo_name": "mdoering/backbone",
"id": "303eb30857b890a958a4e8c50819fc38059da405",
"size": "267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Basidiomycota/Pucciniomycetes/Pucciniales/Pucciniaceae/Puccinia/Puccinia pimpinellae/ Syn. Uredo pimpinellae/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
extern "C"
{
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include "VG/openvg.h"
#include "VG/vgu.h"
#include "fontinfo.h"
#include "shapes.h"
}
#endif
#endif
#ifndef rpi
class VGImage {
};
#endif
struct IMAGE_CACHE {
VGImage image;
int count;
};
class ImageController
{
public:
ImageController(void);
~ImageController(void);
static ImageController& getInstance();
void createImage(std::string path);
void releaseImage(std::string path);
void rebuildImages();
VGImage getImage(std::string path);
private:
std::map<std::string, IMAGE_CACHE>* imagesByPath;
};
| {
"content_hash": "abaa4539a9afa31f079a2e582eace6d8",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 50,
"avg_line_length": 12.458333333333334,
"alnum_prop": 0.7040133779264214,
"repo_name": "drake7707/RpiOpenVGMenu",
"id": "84fb08895f05bf60e134e519eeacf863c064b13d",
"size": "704",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OpenVGRenderer/OpenVGRenderer/ImageController.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "603950"
},
{
"name": "C",
"bytes": "54144"
},
{
"name": "C#",
"bytes": "202339"
},
{
"name": "C++",
"bytes": "29681"
},
{
"name": "Go",
"bytes": "63775"
},
{
"name": "Makefile",
"bytes": "2817"
},
{
"name": "Shell",
"bytes": "825"
}
],
"symlink_target": ""
} |
<?php
use Tester\Assert;
require_once __DIR__ . '/../bootstrap.php';
\Tester\Environment::lock('database', LOCK_DIR);
$connection->loadFile(__DIR__ . '/db/book_author.sql');
/**
* @property int $id
* @property string $name
*/
class Author extends \Joseki\LeanMapper\BaseEntity
{
}
/**
* @property int $id (id)
* @property string $name
* @property Author $author m:hasOne(author:)
*/
class Book extends \Joseki\LeanMapper\BaseEntity
{
}
class AuthorRepository extends \Joseki\LeanMapper\Repository
{
}
class BookRepository extends \Joseki\LeanMapper\Repository
{
}
$mapper->registerTable('author', 'AuthorRepository');
$mapper->registerTable('book', 'BookRepository');
$authorRepository = new AuthorRepository($connection, $mapper, $entityFactory);
$bookRepository = new BookRepository($connection, $mapper, $entityFactory);
$author = $authorRepository->get(1);
Assert::equal(' SELECT `author`.* FROM `author` WHERE (`author`.`id` = 1) ORDER BY `id` LIMIT 1', \dibi::$sql);
$book = $bookRepository->get(2);
Assert::equal(' SELECT `book`.* FROM `book` WHERE (`book`.`id` = 2) ORDER BY `id` LIMIT 1', \dibi::$sql);
$book = new Book();
$book->author = $author;
Assert::true($book->author instanceof Author);
Assert::equal(1, $book->author->id);
| {
"content_hash": "b77ef45eb39d09bdb14ca0b30ee89e0b",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 111,
"avg_line_length": 23.830188679245282,
"alnum_prop": 0.6920031670625495,
"repo_name": "Joseki/LeanMapper-extension",
"id": "d6f3e6895e1019bc7b7b0c2a79856afb1a59f0c9",
"size": "1263",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/JosekiTests/LeanMapperExtension/Repository.get.phpt",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "300"
},
{
"name": "PHP",
"bytes": "65450"
},
{
"name": "Shell",
"bytes": "1002"
}
],
"symlink_target": ""
} |
InvitesFeature::Engine.automount!
InvitesFeature::Engine.routes.draw do
end
| {
"content_hash": "a14844068ef5f1cff24172edbaaea81a",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 37,
"avg_line_length": 25.333333333333332,
"alnum_prop": 0.8421052631578947,
"repo_name": "balvig/chili",
"id": "d2cfd4cb7a66531899fac7ce271c767f2c02f53b",
"size": "76",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/example_app/lib/chili/invites_feature/config/routes.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3378"
},
{
"name": "JavaScript",
"bytes": "1797"
},
{
"name": "Ruby",
"bytes": "40400"
}
],
"symlink_target": ""
} |
$(document).ready(function () {
console.log("document ready");
//默认及实例
(function demo1() {
var $area1Input = $("input[name=area1]").areaselect()
var area1SelectInstance = $area1Input.data('amui.areaselect');
$(document).on("keyup", function (e) {
if (e.key == '`') {
area1SelectInstance.open();
}
if (e.key == 'Escape') {
area1SelectInstance.close();
}
if (e.key == 'Backspace') {
area1SelectInstance.back();
}
if (e.key == 'Delete') {
area1SelectInstance.destory();
}
if (e.key == 's') {
console.info("demo1 selectedArea:")
console.log(area1SelectInstance.selectedArea);
}
})
})();
//事件监听
(function demo2() {
var $area2Input = $("input[name=area2]").areaselect({
allSelectedCallback: function (selectedAreaObjArr) {
console.info("demo2 options allSelectedCallback fired");
console.log(arguments)
},
selectedCallback: function (selectedAreaObj) {
console.info("demo2 options selectedCallback fired");
console.log(arguments)
}
});
$area2Input.on("open.areaselect.amui", function (e) {
console.info("demo2 open.areaselect.amui fired");
console.log(arguments)
})
$area2Input.on("close.areaselect.amui", function (e) {
console.info("demo2 close.areaselect.amui fired");
console.log(arguments)
})
$area2Input.on("back.areaselect.amui", function (e) {
console.info("demo2 back.areaselect.amui fired");
console.log(arguments)
})
$area2Input.on("selected.areaselect.amui", function (e, selectedAreaObj) {
console.info("demo2 selected.areaselect.amui fired");
console.log(arguments)
})
$area2Input.on("allselected.areaselect.amui", function (e, selectedAreaObjArr) {
console.info("demo2 allselected.areaselect.amui fired");
console.log(arguments)
})
})();
//自定义数据源
(function demo3() {
var $area3Input = $("input[name=area3]").areaselect({
data: [
{ careaName: 'A', csub: [{ careaName: 'A1', careaValue: 'A1V' }, { careaName: 'A2', careaValue: 'A2V' },] },
{ careaName: 'B', csub: [{ careaName: 'B1', careaValue: 'B1V' }, { careaName: 'B2', careaValue: 'B2V' },] },
{ careaName: 'C', csub: [{ careaName: 'C1', careaValue: 'C1V' }, { careaName: 'C2', careaValue: 'C2V', csub: [{ careaName: 'C21', careaValue: 'C21V' }] },] },
],
areaNameKey: 'careaName',
areaValueKey: 'careaValue',
subAreaKey: 'csub'
});
})();
//自定义异步数据源
(function demo4() {
var $area4Input = $("input[name=area4]").areaselect({
dataUrl: function (selectedAreaObj) {
//data from mock.js , please "npm install && npm run dev"
if (selectedAreaObj) {
return "/mock/area?areaId=" + selectedAreaObj.areaValue;
}
else {
return "/mock/area";
}
}
});
})();
//自定义异步数据源Promise
(function demo5() {
var $area5Input = $("input[name=area5]").areaselect({
dataUrl: function (selectedAreaObj) {
//data from mock.js , please "npm install && npm run dev"
var url = "/mock/area";
return $.ajax(url, {
data: selectedAreaObj ? { areaId: selectedAreaObj.areaValue } : ""
}).then(function (resData) {
//handle resData
console.info(resData)
return resData;
})
}
});
})();
//自定义input[type=hidden][name]
(function demo6() {
//hidden会根据name来查找,如果出现多个则会在对应input的同级元素查找
var $area6Input = $("input[name=area6]").areaselect({
inputValueName: function (inputName) {
console.log(inputName);
return "customValue";
},
//inputValueName:"customValue",//也可以是String
});
})();
}); | {
"content_hash": "293a8a98d8adf1f3a663c5beccb8afcd",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 174,
"avg_line_length": 32.2463768115942,
"alnum_prop": 0.4995505617977528,
"repo_name": "czl032405/amazeui-areaselect",
"id": "48e5135436a2479b486a6b9a00d98646526b946e",
"size": "4574",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/demo.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1275"
},
{
"name": "JavaScript",
"bytes": "32346"
}
],
"symlink_target": ""
} |
/*
* Copyright 2006-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openspaces.admin.transport;
/**
* Details (non changeable information) of a single transport.
*
* @author kimchy
*/
public interface TransportDetails {
/**
* Returns the local host address of the transport.
*
* @see java.net.InetAddress#getLocalHost()
* @see java.net.InetAddress#getHostAddress()
*/
String getHostAddress();
/**
* Returns the local host address of the transport.
*
* @see java.net.InetAddress#getLocalHost()
* @see java.net.InetAddress#getHostName()
*/
String getHostName();
/**
* Returns the host name or address the communication layer bounded on.
*/
String getBindHost();
/**
* Returns the host name or address the communication layer bounded on.
*/
int getPort();
/**
* Returns the minimum number of threads configured for the transport communication
* layer thread pool.
*/
int getMinThreads();
/**
* Returns the maximum number of threads configured for the transport communication
* layer thread pool.
*/
int getMaxThreads();
/**
* Returns <code>true</code> if ssl is enabled for the transport.
*/
boolean isSslEnabled();
}
| {
"content_hash": "42ee2625bfb2fe590ae0b4df3f5354a7",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 87,
"avg_line_length": 26.585714285714285,
"alnum_prop": 0.6689951638903815,
"repo_name": "Gigaspaces/xap-openspaces",
"id": "221355193bacf10445c3d3305dffbd449c6a63ab",
"size": "2481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/openspaces/admin/transport/TransportDetails.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1107"
},
{
"name": "Groovy",
"bytes": "5372"
},
{
"name": "HTML",
"bytes": "13501"
},
{
"name": "Java",
"bytes": "8666667"
},
{
"name": "Shell",
"bytes": "917"
}
],
"symlink_target": ""
} |
#pragma once
#include "paddle/framework/eigen.h"
#include "paddle/framework/op_registry.h"
namespace paddle {
namespace operators {
template <typename Place, typename T>
class ReshapeKernel : public framework::OpKernel {
public:
void Compute(const framework::ExecutionContext& ctx) const {
auto* out = ctx.Output<framework::Tensor>("Out");
auto* in = ctx.Input<framework::Tensor>("X");
out->mutable_data<T>(ctx.GetPlace());
auto shape = ctx.Attr<std::vector<int>>("shape");
std::vector<int64_t> shape_int64(shape.size(), 0);
std::transform(shape.begin(), shape.end(), shape_int64.begin(),
[](int a) { return static_cast<int64_t>(a); });
auto out_dims = framework::make_ddim(shape_int64);
out->CopyFrom<T>(*in, ctx.GetPlace());
out->Resize(out_dims);
}
};
template <typename Place, typename T>
class ReshapeGradKernel : public framework::OpKernel {
public:
void Compute(const framework::ExecutionContext& ctx) const {
auto* d_out = ctx.Input<framework::Tensor>(framework::GradVarName("Out"));
auto* d_x = ctx.Output<framework::Tensor>(framework::GradVarName("X"));
d_x->mutable_data<T>(ctx.GetPlace());
auto in_dims = d_x->dims();
d_x->CopyFrom<T>(*d_out, ctx.GetPlace());
d_x->Resize(in_dims);
}
};
} // namespace operators
} // namespace paddle
| {
"content_hash": "9fcdce485c78b562fe601f6425881744",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 78,
"avg_line_length": 31.27906976744186,
"alnum_prop": 0.6602230483271375,
"repo_name": "hedaoyuan/Paddle",
"id": "873acf30782d390cdca5e7e864c76e1f743f9a7c",
"size": "1959",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "paddle/operators/reshape_op.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "274462"
},
{
"name": "C++",
"bytes": "4346291"
},
{
"name": "CMake",
"bytes": "190176"
},
{
"name": "CSS",
"bytes": "21730"
},
{
"name": "Cuda",
"bytes": "659848"
},
{
"name": "Go",
"bytes": "99765"
},
{
"name": "HTML",
"bytes": "8941"
},
{
"name": "JavaScript",
"bytes": "1025"
},
{
"name": "Perl",
"bytes": "11452"
},
{
"name": "Protocol Buffer",
"bytes": "54094"
},
{
"name": "Python",
"bytes": "1436674"
},
{
"name": "Shell",
"bytes": "134723"
}
],
"symlink_target": ""
} |
package org.bouncycastle.math.ec.custom.sec;
import java.math.BigInteger;
import org.bouncycastle.math.ec.ECCurve;
import org.bouncycastle.math.ec.ECFieldElement;
import org.bouncycastle.math.raw.Nat256;
import org.bouncycastle.util.Arrays;
public class SecT233FieldElement extends ECFieldElement
{
protected long[] x;
public SecT233FieldElement(BigInteger x)
{
if (x == null || x.signum() < 0 || x.bitLength() > 233)
{
throw new IllegalArgumentException("x value invalid for SecT233FieldElement");
}
this.x = SecT233Field.fromBigInteger(x);
}
public SecT233FieldElement()
{
this.x = Nat256.create64();
}
protected SecT233FieldElement(long[] x)
{
this.x = x;
}
// public int bitLength()
// {
// return x.degree();
// }
public boolean isOne()
{
return Nat256.isOne64(x);
}
public boolean isZero()
{
return Nat256.isZero64(x);
}
public boolean testBitZero()
{
return (x[0] & 1L) != 0L;
}
public BigInteger toBigInteger()
{
return Nat256.toBigInteger64(x);
}
public String getFieldName()
{
return "SecT233Field";
}
public int getFieldSize()
{
return 233;
}
public ECFieldElement add(ECFieldElement b)
{
long[] z = Nat256.create64();
SecT233Field.add(x, ((SecT233FieldElement)b).x, z);
return new SecT233FieldElement(z);
}
public ECFieldElement addOne()
{
long[] z = Nat256.create64();
SecT233Field.addOne(x, z);
return new SecT233FieldElement(z);
}
public ECFieldElement subtract(ECFieldElement b)
{
// Addition and subtraction are the same in F2m
return add(b);
}
public ECFieldElement multiply(ECFieldElement b)
{
long[] z = Nat256.create64();
SecT233Field.multiply(x, ((SecT233FieldElement)b).x, z);
return new SecT233FieldElement(z);
}
public ECFieldElement multiplyMinusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y)
{
return multiplyPlusProduct(b, x, y);
}
public ECFieldElement multiplyPlusProduct(ECFieldElement b, ECFieldElement x, ECFieldElement y)
{
long[] ax = this.x, bx = ((SecT233FieldElement)b).x;
long[] xx = ((SecT233FieldElement)x).x, yx = ((SecT233FieldElement)y).x;
long[] tt = Nat256.createExt64();
SecT233Field.multiplyAddToExt(ax, bx, tt);
SecT233Field.multiplyAddToExt(xx, yx, tt);
long[] z = Nat256.create64();
SecT233Field.reduce(tt, z);
return new SecT233FieldElement(z);
}
public ECFieldElement divide(ECFieldElement b)
{
return multiply(b.invert());
}
public ECFieldElement negate()
{
return this;
}
public ECFieldElement square()
{
long[] z = Nat256.create64();
SecT233Field.square(x, z);
return new SecT233FieldElement(z);
}
public ECFieldElement squareMinusProduct(ECFieldElement x, ECFieldElement y)
{
return squarePlusProduct(x, y);
}
public ECFieldElement squarePlusProduct(ECFieldElement x, ECFieldElement y)
{
long[] ax = this.x;
long[] xx = ((SecT233FieldElement)x).x, yx = ((SecT233FieldElement)y).x;
long[] tt = Nat256.createExt64();
SecT233Field.squareAddToExt(ax, tt);
SecT233Field.multiplyAddToExt(xx, yx, tt);
long[] z = Nat256.create64();
SecT233Field.reduce(tt, z);
return new SecT233FieldElement(z);
}
public ECFieldElement squarePow(int pow)
{
if (pow < 1)
{
return this;
}
long[] z = Nat256.create64();
SecT233Field.squareN(x, pow, z);
return new SecT233FieldElement(z);
}
public ECFieldElement invert()
{
return new SecT233FieldElement(
ECCurve.AbstractF2m.inverse(233, new int[]{ 74 }, toBigInteger()));
}
public ECFieldElement sqrt()
{
return squarePow(getM() - 1);
}
public int getRepresentation()
{
return ECFieldElement.F2m.TPB;
}
public int getM()
{
return 233;
}
public int getK1()
{
return 74;
}
public int getK2()
{
return 0;
}
public int getK3()
{
return 0;
}
public boolean equals(Object other)
{
if (other == this)
{
return true;
}
if (!(other instanceof SecT233FieldElement))
{
return false;
}
SecT233FieldElement o = (SecT233FieldElement)other;
return Nat256.eq64(x, o.x);
}
public int hashCode()
{
return 2330074 ^ Arrays.hashCode(x, 0, 4);
}
}
| {
"content_hash": "f2ed18370a62de1873ccd6eb4f036419",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 100,
"avg_line_length": 22.141552511415526,
"alnum_prop": 0.5879562796452877,
"repo_name": "isghe/bc-java",
"id": "5c04a3eb833dde793a0b3383782c0ff789b7c86c",
"size": "4849",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "core/src/main/java/org/bouncycastle/math/ec/custom/sec/SecT233FieldElement.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "55371"
},
{
"name": "Java",
"bytes": "23571287"
},
{
"name": "Shell",
"bytes": "74632"
}
],
"symlink_target": ""
} |
/*
* Do not modify this file. This file is generated from the redshift-2012-12-01.normal.json service model.
*/
using System;
using System.Net;
using Amazon.Runtime;
namespace Amazon.Redshift.Model
{
///<summary>
/// Redshift exception
/// </summary>
public class ClusterAlreadyExistsException : AmazonRedshiftException
{
/// <summary>
/// Constructs a new ClusterAlreadyExistsException with the specified error
/// message.
/// </summary>
/// <param name="message">
/// Describes the error encountered.
/// </param>
public ClusterAlreadyExistsException(string message)
: base(message) {}
/// <summary>
/// Construct instance of ClusterAlreadyExistsException
/// </summary>
/// <param name="message"></param>
/// <param name="innerException"></param>
public ClusterAlreadyExistsException(string message, Exception innerException)
: base(message, innerException) {}
/// <summary>
/// Construct instance of ClusterAlreadyExistsException
/// </summary>
/// <param name="innerException"></param>
public ClusterAlreadyExistsException(Exception innerException)
: base(innerException) {}
/// <summary>
/// Construct instance of ClusterAlreadyExistsException
/// </summary>
/// <param name="message"></param>
/// <param name="innerException"></param>
/// <param name="errorType"></param>
/// <param name="errorCode"></param>
/// <param name="requestId"></param>
/// <param name="statusCode"></param>
public ClusterAlreadyExistsException(string message, Exception innerException, ErrorType errorType, string errorCode, string requestId, HttpStatusCode statusCode)
: base(message, innerException, errorType, errorCode, requestId, statusCode) {}
/// <summary>
/// Construct instance of ClusterAlreadyExistsException
/// </summary>
/// <param name="message"></param>
/// <param name="errorType"></param>
/// <param name="errorCode"></param>
/// <param name="requestId"></param>
/// <param name="statusCode"></param>
public ClusterAlreadyExistsException(string message, ErrorType errorType, string errorCode, string requestId, HttpStatusCode statusCode)
: base(message, errorType, errorCode, requestId, statusCode) {}
}
} | {
"content_hash": "128e87a7daa75847d46096b13db96485",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 171,
"avg_line_length": 38.57575757575758,
"alnum_prop": 0.6154752553024352,
"repo_name": "rafd123/aws-sdk-net",
"id": "dc012971e031016b98f662c8b4e7e9982af251eb",
"size": "3133",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sdk/src/Services/Redshift/Generated/Model/ClusterAlreadyExistsException.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "85386370"
},
{
"name": "CSS",
"bytes": "18119"
},
{
"name": "HTML",
"bytes": "24352"
},
{
"name": "JavaScript",
"bytes": "6576"
},
{
"name": "PowerShell",
"bytes": "12753"
},
{
"name": "XSLT",
"bytes": "7010"
}
],
"symlink_target": ""
} |
Rylviach Site
=============
Python/Django Implementation
Current version is running `here <https://lp-python-aawlsggrnu.now.sh>`_.
Development Setup
-----------------
Install project
^^^^^^^^^^^^^^^
``virtualenvwrapper``
~~~~~~~~~~~~~~~~~~~~~
These instructions assume that you have already set up ``virtualenv`` and ``virtualenvwrapper``.
1. Create a new project with Python 3 with ``mkproject -p `which python3` rylviach_site``
2. Edit your ``.virtualenv/rylviach_site/bin/postactivate.sh`` and add these lines
.. code-block:: bash
export DJANGO_SETTINGS_MODULE=rylviach.settings.dev
export PYTHONPATH=$PYTHONPATH:[path to repo]/rylviach_site
3. Install the requirements with ``pip install -r requirements/dev.txt``
``pyvenv``
~~~~~~~~~~
**TODO** - Silvio will fill this out
Set up database
^^^^^^^^^^^^^^^
**TODO** - Settings changes and superuser setup
Run development server
^^^^^^^^^^^^^^^^^^^^^^
You should now be able to run the basic site with ``django-admin runserver``
**TODO** - Do you need to manually set env vars or specify a settings file with ``--settings`` or use ``--path`` when using ``pyvenv``?
Site Features and Roadmap
-------------------------
Landing Page
^^^^^^^^^^^^
This will be a pretty basic page. It will set up the navigation and project framework. Currently in progress.
Blog
^^^^
We will chronical our epic learning journey here, along with any other tidbits we find useful.
Continuous Integration
^^^^^^^^^^^^^^^^^^^^^^
We will use at least one CI service. Possibly more than one and do some comparisons. Expect to see findings in the blog!
About Us
^^^^^^^^
This one should be pretty simple.
Contact Us
^^^^^^^^^^
We'll see how setting up emailing goes.
GitHub Tracker
^^^^^^^^^^^^^^
This will track/link to our GitHub repos. Depending on whether the user is logged and an whether that user is an admin, it may show extra information, such as latest commits. Not that it would be publicly available on our repo, but it should be some good learning.
Insult (or something else) API
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This is mostly going to be an excuse to play with Django-Rest-Framework.
License
-------
Please see `LICENSE <https://github.com/Rylviach/lp-python/blob/master/LICENSE.txt>`_.
| {
"content_hash": "454fbab0c57b8fe6d4fec74b8de9f96c",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 264,
"avg_line_length": 30.80821917808219,
"alnum_prop": 0.6798577145397955,
"repo_name": "Rylviach/lp-python",
"id": "c21332942c14ecec25ea6b7de9051b5a26673abe",
"size": "2250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.rst",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27884"
},
{
"name": "HTML",
"bytes": "12107"
},
{
"name": "JavaScript",
"bytes": "1068"
},
{
"name": "Python",
"bytes": "6348"
},
{
"name": "Shell",
"bytes": "301"
}
],
"symlink_target": ""
} |
namespace Eigen {
// Default device for the machine (typically a single cpu core)
struct DefaultDevice {
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void* allocate(size_t num_bytes) const {
return internal::aligned_malloc(num_bytes);
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void deallocate(void* buffer) const {
internal::aligned_free(buffer);
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memcpy(void* dst, const void* src, size_t n) const {
::memcpy(dst, src, n);
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memcpyHostToDevice(void* dst, const void* src, size_t n) const {
memcpy(dst, src, n);
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memcpyDeviceToHost(void* dst, const void* src, size_t n) const {
memcpy(dst, src, n);
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memset(void* buffer, int c, size_t n) const {
::memset(buffer, c, n);
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t numThreads() const {
#ifndef __CUDA_ARCH__
// Running on the host CPU
return 1;
#else
// Running on a CUDA device
return 32;
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t memcpyThreshold() const {
return 2 * numThreads();
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t firstLevelCacheSize() const {
#ifndef __CUDA_ARCH__
// Running on the host CPU
return l1CacheSize();
#else
// Running on a CUDA device, return the amount of shared memory available.
return 48*1024;
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t lastLevelCacheSize() const {
#ifndef __CUDA_ARCH__
// Running single threaded on the host CPU
return l3CacheSize();
#else
// Running on a CUDA device
return firstLevelCacheSize();
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE int majorDeviceVersion() const {
#ifndef __CUDA_ARCH__
// Running single threaded on the host CPU
// Should return an enum that encodes the ISA supported by the CPU
return 1;
#else
// Running on a CUDA device
return __CUDA_ARCH__ / 100;
#endif
}
};
// Multiple cpu cores
#ifdef EIGEN_USE_THREADS
#if __cplusplus > 199711
// This defines an interface that ThreadPoolDevice can take to use
// custom thread pools underneath.
class ThreadPoolInterface {
public:
virtual void Schedule(std::function<void()> fn) = 0;
virtual ~ThreadPoolInterface() {}
};
#endif
// The implementation of the ThreadPool type ensures that the Schedule method
// runs the functions it is provided in FIFO order when the scheduling is done
// by a single thread.
#ifdef EIGEN_USE_CUSTOM_THREAD_POOL
class ThreadPool : public ThreadPoolInterface {
public:
// Construct a pool that contains "num_threads" threads.
explicit ThreadPool(int num_threads) : threads_(num_threads), waiters_(num_threads) {
for (int i = 0; i < num_threads; i++) {
threads_.push_back(new std::thread([this]() { WorkerLoop(); }));
}
}
// Wait until all scheduled work has finished and then destroy the
// set of threads.
~ThreadPool() {
{
// Wait for all work to get done.
std::unique_lock<std::mutex> l(mu_);
while (!pending_.empty()) {
empty_.wait(l);
}
exiting_ = true;
// Wakeup all waiters.
for (auto w : waiters_) {
w->ready = true;
w->work = nullptr;
w->cv.notify_one();
}
}
// Wait for threads to finish.
for (auto t : threads_) {
t->join();
delete t;
}
}
// Schedule fn() for execution in the pool of threads. The functions are
// executed in the order in which they are scheduled.
void Schedule(std::function<void()> fn) final {
std::unique_lock<std::mutex> l(mu_);
if (waiters_.empty()) {
pending_.push_back(fn);
} else {
Waiter* w = waiters_.back();
waiters_.pop_back();
w->ready = true;
w->work = fn;
w->cv.notify_one();
}
}
protected:
void WorkerLoop() {
std::unique_lock<std::mutex> l(mu_);
Waiter w;
while (!exiting_) {
std::function<void()> fn;
if (pending_.empty()) {
// Wait for work to be assigned to me
w.ready = false;
waiters_.push_back(&w);
while (!w.ready) {
w.cv.wait(l);
}
fn = w.work;
w.work = nullptr;
} else {
// Pick up pending work
fn = pending_.front();
pending_.pop_front();
if (pending_.empty()) {
empty_.notify_all();
}
}
if (fn) {
mu_.unlock();
fn();
mu_.lock();
}
}
}
private:
struct Waiter {
std::condition_variable cv;
std::function<void()> work;
bool ready;
};
std::mutex mu_;
FixedSizeVector<std::thread*> threads_; // All threads
FixedSizeVector<Waiter*> waiters_; // Stack of waiting threads.
std::deque<std::function<void()>> pending_; // Queue of pending work
std::condition_variable empty_; // Signaled on pending_.empty()
bool exiting_ = false;
};
// Notification is an object that allows a user to to wait for another
// thread to signal a notification that an event has occurred.
//
// Multiple threads can wait on the same Notification object.
// but only one caller must call Notify() on the object.
class Notification {
public:
Notification() : notified_(false) {}
~Notification() {}
void Notify() {
std::unique_lock<std::mutex> l(mu_);
eigen_assert(!notified_);
notified_ = true;
cv_.notify_all();
}
void WaitForNotification() {
std::unique_lock<std::mutex> l(mu_);
while (!notified_) {
cv_.wait(l);
}
}
private:
std::mutex mu_;
std::condition_variable cv_;
bool notified_;
};
#else
// Notification is an object that allows a user to to wait for another
// thread to signal a notification that an event has occurred.
//
// Multiple threads can wait on the same Notification object.
// but only one caller must call Notify() on the object.
class Notification {
public:
Notification() : notified_(false) {}
~Notification() {}
void Notify() {
tensorflow::mutex_lock l(mu_);
eigen_assert(!notified_);
notified_ = true;
cv_.notify_all();
}
void WaitForNotification() {
tensorflow::mutex_lock l(mu_);
while (!notified_) {
cv_.wait(l);
}
}
private:
tensorflow::mutex mu_;
tensorflow::condition_variable cv_;
bool notified_;
};
#endif
// Runs an arbitrary function and then calls Notify() on the passed in
// Notification.
template <typename Function, typename... Args> struct FunctionWrapper
{
static void run(Notification* n, Function f, Args... args) {
f(args...);
n->Notify();
}
};
static EIGEN_STRONG_INLINE void wait_until_ready(Notification* n) {
if (n) {
n->WaitForNotification();
}
}
struct MemcpyExecutor {
typedef MemcpyExecutor Self;
MemcpyExecutor(void *dst, const void *src) :
m_dst(static_cast<char *>(dst)), m_src(static_cast<const char *>(src)) { }
static EIGEN_STRONG_INLINE void run(const MemcpyExecutor* exec, size_t idx, size_t block_size) {
::memcpy(&(exec->m_dst[idx]), &(exec->m_src[idx]), block_size);
}
private:
char* m_dst;
const char* m_src;
};
struct MemsetExecutor {
typedef MemsetExecutor Self;
MemsetExecutor(void *buffer, int val) :
m_buffer(static_cast<char *>(buffer)), m_val(val) { }
static EIGEN_STRONG_INLINE void run(const MemsetExecutor* exec, size_t idx, size_t block_size) {
::memset(&(exec->m_buffer[idx]), exec->m_val, block_size);
}
private:
char* m_buffer;
const int m_val;
};
struct ThreadPoolDevice {
// The ownership of the thread pool remains with the caller.
ThreadPoolDevice(ThreadPoolInterface* pool, size_t num_cores)
: pool_(pool), num_threads_(num_cores) {}
EIGEN_STRONG_INLINE void* allocate(size_t num_bytes) const {
return internal::aligned_malloc(num_bytes);
}
EIGEN_STRONG_INLINE void deallocate(void* buffer) const {
internal::aligned_free(buffer);
}
EIGEN_STRONG_INLINE void memcpy(void* dst, const void* src, size_t n) const {
#ifdef __ANDROID__
::memcpy(dst, src, n);
#else
if (n <= 32768) {
::memcpy(dst, src, n);
} else {
MemcpyExecutor memcpy_executor(dst, src);
execute(memcpy_executor, n);
}
#endif
}
EIGEN_STRONG_INLINE void memcpyHostToDevice(void* dst, const void* src, size_t n) const {
memcpy(dst, src, n);
}
EIGEN_STRONG_INLINE void memcpyDeviceToHost(void* dst, const void* src, size_t n) const {
memcpy(dst, src, n);
}
EIGEN_STRONG_INLINE void memset(void* buffer, int c, size_t n) const {
#ifdef __ANDROID__
::memset(buffer, c, n);
#else
if (n <= 32768) {
::memset(buffer, c, n);
} else {
MemsetExecutor memset_executor(buffer, c);
execute(memset_executor, n);
}
#endif
}
EIGEN_STRONG_INLINE size_t numThreads() const {
return num_threads_;
}
EIGEN_STRONG_INLINE size_t memcpyThreshold() const {
return 2 * numThreads();
}
EIGEN_STRONG_INLINE size_t firstLevelCacheSize() const {
return l1CacheSize();
}
EIGEN_STRONG_INLINE size_t lastLevelCacheSize() const {
// The l3 cache size is shared between all the cores.
return l3CacheSize() / num_threads_;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE int majorDeviceVersion() const {
// Should return an enum that encodes the ISA supported by the CPU
return 1;
}
template <class Function, class... Args>
EIGEN_STRONG_INLINE Notification* enqueue(Function&& f, Args&&... args) const {
Notification* n = new Notification();
std::function<void()> func =
std::bind(&FunctionWrapper<Function, Args...>::run, n, f, args...);
pool_->Schedule(func);
return n;
}
template <class Function, class... Args>
EIGEN_STRONG_INLINE void enqueue_and_forget(Function&& f, Args&&... args) const {
std::function<void()> func = std::bind(f, args...);
pool_->Schedule(func);
}
private:
template<typename Executor>
EIGEN_STRONG_INLINE void execute(const Executor& exec, size_t n) const {
// don't spawn a thread to process fewer than 1024 bytes (chosen by small amount of
// experimentation)
// TODO: make block_size a multiple of packet_size and align everything
const size_t block_size = numext::maxi(static_cast<size_t>(1024), n / numThreads());
const size_t block_count = n / block_size;
eigen_assert(block_count <= numThreads());
FixedSizeVector<Notification*> results(block_count);
for (size_t block_idx = 0; block_idx < block_count; block_idx++) {
results.push_back(enqueue(&Executor::run, &exec, block_idx * block_size, block_size));
}
if (block_count * block_size < n) {
Executor::run(&exec, block_count * block_size, n - block_count * block_size);
}
// wait for threads to finish
for (size_t block_idx = 0; block_idx < block_count; block_idx++) {
results[block_idx]->WaitForNotification();
delete results[block_idx];
}
}
// todo: NUMA, ...
size_t num_threads_;
ThreadPoolInterface* pool_;
};
#endif
// GPU offloading
#ifdef EIGEN_USE_GPU
// An interface abstracting away device specific memory allocator.
class Allocator {
public:
virtual ~Allocator() {}
EIGEN_DEVICE_FUNC virtual void* allocate(size_t num_bytes) const = 0;
EIGEN_DEVICE_FUNC virtual void deallocate(void* buffer) const = 0;
};
#if !defined(__GCUDACC__) && !defined(__GCUDACC_HOST__)
// This defines an interface that GPUDevice can take to use
// CUDA streams underneath.
class StreamInterface {
public:
virtual ~StreamInterface() {}
virtual const cudaStream_t& stream() const = 0;
virtual const cudaDeviceProp& deviceProperties() const = 0;
// Allocate memory on the actual device where the computation will run
virtual void* allocate(size_t num_bytes) const = 0;
virtual void deallocate(void* buffer) const = 0;
};
static cudaDeviceProp* m_deviceProperties;
static bool m_devicePropInitialized = false;
static tensorflow::mutex m_devicePropInitMutex(tensorflow::LINKER_INITIALIZED);
static void initializeDeviceProp() {
if (!m_devicePropInitialized) {
tensorflow::mutex_lock l(m_devicePropInitMutex);
if (!m_devicePropInitialized) {
int num_devices;
cudaError_t status = cudaGetDeviceCount(&num_devices);
eigen_check(status == cudaSuccess);
m_deviceProperties = new cudaDeviceProp[num_devices];
for (int i = 0; i < num_devices; ++i) {
status = cudaGetDeviceProperties(&m_deviceProperties[i], i);
eigen_check(status == cudaSuccess);
}
m_devicePropInitialized = true;
}
}
}
static const cudaStream_t default_stream = cudaStreamDefault;
class CudaStreamDevice : public StreamInterface {
public:
// Use the default stream on the current device
CudaStreamDevice() : stream_(&default_stream) {
cudaGetDevice(&device_);
initializeDeviceProp();
}
// Use the default stream on the specified device
CudaStreamDevice(int device) : stream_(&default_stream), device_(device) {
initializeDeviceProp();
}
// Use the specified stream. Note that it's the
// caller responsibility to ensure that the stream can run on
// the specified device. If no device is specified the code
// assumes that the stream is associated to the current gpu device.
CudaStreamDevice(const cudaStream_t* stream, int device = -1)
: stream_(stream), device_(device) {
if (device < 0) {
cudaGetDevice(&device_);
} else {
int num_devices;
cudaError_t err = cudaGetDeviceCount(&num_devices);
eigen_check(err == cudaSuccess);
eigen_check(device < num_devices);
device_ = device;
}
initializeDeviceProp();
}
const cudaStream_t& stream() const { return *stream_; }
const cudaDeviceProp& deviceProperties() const {
return m_deviceProperties[device_];
}
virtual void* allocate(size_t num_bytes) const {
cudaError_t err = cudaSetDevice(device_);
eigen_check(err == cudaSuccess);
void* result;
err = cudaMalloc(&result, num_bytes);
eigen_check(err == cudaSuccess);
eigen_check(result != NULL);
return result;
}
virtual void deallocate(void* buffer) const {
cudaError_t err = cudaSetDevice(device_);
eigen_check(err == cudaSuccess);
assert(buffer != NULL);
err = cudaFree(buffer);
assert(err == cudaSuccess);
}
private:
const cudaStream_t* stream_;
int device_;
};
static inline void setCudaSharedMemConfig(cudaSharedMemConfig config) {
cudaError_t status = cudaDeviceSetSharedMemConfig(config);
eigen_check(status == cudaSuccess);
}
struct GpuDevice {
// Neither the cudastream nor the allocator is not owned: the caller is
// responsible for their initialization and eventual destruction.
explicit GpuDevice(const StreamInterface* stream) : stream_(stream) {
eigen_assert(stream);
}
// TODO(bsteiner): This is an internal API, we should not expose it.
EIGEN_STRONG_INLINE const cudaStream_t& stream() const {
return stream_->stream();
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void* allocate(size_t num_bytes) const {
#ifndef __CUDA_ARCH__
return stream_->allocate(num_bytes);
#else
eigen_assert(false && "The default device should be used instead to generate kernel code");
return NULL;
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void deallocate(void* buffer) const {
#ifndef __CUDA_ARCH__
stream_->deallocate(buffer);
#else
eigen_assert(false && "The default device should be used instead to generate kernel code");
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memcpy(void* dst, const void* src, size_t n) const {
#ifndef __CUDA_ARCH__
cudaError_t err = cudaMemcpyAsync(dst, src, n, cudaMemcpyDeviceToDevice,
stream_->stream());
assert(err == cudaSuccess);
#else
eigen_assert(false && "The default device should be used instead to generate kernel code");
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memcpyHostToDevice(void* dst, const void* src, size_t n) const {
#ifndef __CUDA_ARCH__
cudaError_t err =
cudaMemcpyAsync(dst, src, n, cudaMemcpyHostToDevice, stream_->stream());
assert(err == cudaSuccess);
#else
eigen_assert(false && "The default device should be used instead to generate kernel code");
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memcpyDeviceToHost(void* dst, const void* src, size_t n) const {
#ifndef __CUDA_ARCH__
cudaError_t err =
cudaMemcpyAsync(dst, src, n, cudaMemcpyDeviceToHost, stream_->stream());
assert(err == cudaSuccess);
#else
eigen_assert(false && "The default device should be used instead to generate kernel code");
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memset(void* buffer, int c, size_t n) const {
#ifndef __CUDA_ARCH__
cudaError_t err = cudaMemsetAsync(buffer, c, n, stream_->stream());
assert(err == cudaSuccess);
#else
eigen_assert(false && "The default device should be used instead to generate kernel code");
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t numThreads() const {
// FIXME
return 32;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t memcpyThreshold() const {
return 4 * 1024 * 1024;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t firstLevelCacheSize() const {
// FIXME
return 48*1024;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t lastLevelCacheSize() const {
// We won't try to take advantage of the l2 cache for the time being, and
// there is no l3 cache on cuda devices.
return firstLevelCacheSize();
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void synchronize() const {
#ifndef __CUDA_ARCH__
cudaError_t err = cudaStreamSynchronize(stream_->stream());
assert(err == cudaSuccess);
#else
assert(false && "The default device should be used instead to generate kernel code");
#endif
}
inline int getNumCudaMultiProcessors() const {
return stream_->deviceProperties().multiProcessorCount;
}
inline int maxCudaThreadsPerBlock() const {
return stream_->deviceProperties().maxThreadsPerBlock;
}
inline int maxCudaThreadsPerMultiProcessor() const {
return stream_->deviceProperties().maxThreadsPerMultiProcessor;
}
inline int sharedMemPerBlock() const {
return stream_->deviceProperties().sharedMemPerBlock;
}
inline int majorDeviceVersion() const {
return stream_->deviceProperties().major;
}
// This function checks if the CUDA runtime recorded an error for the
// underlying stream device.
inline bool ok() const {
cudaError_t error = cudaStreamQuery(stream_->stream());
return (error == cudaSuccess) || (error == cudaErrorNotReady);
}
private:
const StreamInterface* stream_;
};
inline void assertCudaOk() {
cudaError_t err = cudaGetLastError();
assert(err != cudaErrorMissingConfiguration);
assert(err != cudaErrorMemoryAllocation);
assert(err != cudaErrorInitializationError);
assert(err != cudaErrorLaunchFailure);
assert(err != cudaErrorPriorLaunchFailure);
assert(err != cudaErrorLaunchTimeout);
assert(err != cudaErrorLaunchOutOfResources);
assert(err != cudaErrorInvalidDeviceFunction);
assert(err != cudaErrorInvalidConfiguration);
assert(err != cudaErrorInvalidDevice);
assert(err != cudaErrorInvalidValue);
assert(err != cudaErrorInvalidPitchValue);
assert(err != cudaErrorInvalidSymbol);
assert(err != cudaErrorMapBufferObjectFailed);
assert(err != cudaErrorUnmapBufferObjectFailed);
assert(err != cudaErrorInvalidHostPointer);
assert(err != cudaErrorInvalidDevicePointer);
assert(err != cudaErrorInvalidTexture);
assert(err != cudaErrorInvalidTextureBinding);
assert(err != cudaErrorInvalidChannelDescriptor);
assert(err != cudaErrorInvalidMemcpyDirection);
assert(err != cudaErrorAddressOfConstant);
assert(err != cudaErrorTextureFetchFailed);
assert(err != cudaErrorTextureNotBound);
assert(err != cudaErrorSynchronizationError);
assert(err != cudaErrorInvalidFilterSetting);
assert(err != cudaErrorInvalidNormSetting);
assert(err != cudaErrorMixedDeviceExecution);
assert(err != cudaErrorCudartUnloading);
assert(err != cudaErrorUnknown);
assert(err != cudaErrorNotYetImplemented);
assert(err != cudaErrorMemoryValueTooLarge);
assert(err != cudaErrorInvalidResourceHandle);
assert(err != cudaErrorNotReady);
assert(err != cudaErrorInsufficientDriver);
assert(err != cudaErrorSetOnActiveProcess);
assert(err != cudaErrorInvalidSurface);
assert(err != cudaErrorNoDevice);
assert(err != cudaErrorECCUncorrectable);
assert(err != cudaErrorSharedObjectSymbolNotFound);
assert(err != cudaErrorSharedObjectInitFailed);
assert(err != cudaErrorUnsupportedLimit);
assert(err != cudaErrorDuplicateVariableName);
assert(err != cudaErrorDuplicateTextureName);
assert(err != cudaErrorDuplicateSurfaceName);
assert(err != cudaErrorDevicesUnavailable);
assert(err != cudaErrorInvalidKernelImage);
assert(err != cudaErrorNoKernelImageForDevice);
assert(err != cudaErrorIncompatibleDriverContext);
assert(err != cudaErrorPeerAccessAlreadyEnabled);
assert(err != cudaErrorPeerAccessNotEnabled);
assert(err != cudaErrorDeviceAlreadyInUse);
assert(err != cudaErrorProfilerDisabled);
assert(err != cudaErrorProfilerNotInitialized);
assert(err != cudaErrorProfilerAlreadyStarted);
assert(err != cudaErrorProfilerAlreadyStopped);
assert(err != cudaErrorAssert);
assert(err != cudaErrorTooManyPeers);
assert(err != cudaErrorHostMemoryAlreadyRegistered);
assert(err != cudaErrorHostMemoryNotRegistered);
assert(err != cudaErrorOperatingSystem);
assert(err != cudaErrorStartupFailure);
assert(err != cudaErrorApiFailureBase);
// catch errors types introduced after this function was written
assert(err == cudaSuccess);
}
#define LAUNCH_CUDA_KERNEL(kernel, gridsize, blocksize, sharedmem, device, \
...) \
do { \
(kernel)<<<(gridsize), (blocksize), (sharedmem), (device).stream()>>>( \
__VA_ARGS__); \
assertCudaOk(); \
} while (false)
#else // __GCUDACC__
// The following is the version of GpuDevice for StreamExecutor
// (go/gpuexecutor) a GPU runtime that supports both CUDA and OpenCL.
// StreamExecutor is being developed as an open-source replacement for the CUDA
// runtime and is the runtime used when compiling with gcudacc. Differences
// between the CUDA runtime and StreamExecutor are abstracted away behind
// GpuDevice.
// TODO(jpienaar): Temporary workaround until b/18409724 is addressed.
enum cudaSharedMemConfig
{
cudaSharedMemBankSizeDefault = 0,
cudaSharedMemBankSizeFourByte = 1,
cudaSharedMemBankSizeEightByte = 2
};
static inline void setCudaSharedMemConfig(cudaSharedMemConfig cache_config) {
// TODO(jpienaar): fix when implemented (b/18409724)
}
struct GpuDevice {
GpuDevice()
: stream_(perftools::gputools::MachineManager::singleton()->stream_for_device(0)),
allocator_(nullptr),
stream_exec_(stream_->parent()),
device_descr_(&(stream_exec_->GetDeviceDescription())) {}
GpuDevice(perftools::gputools::Stream* stream,
const Allocator* alloc = nullptr)
: stream_(stream),
allocator_(alloc),
stream_exec_(stream_->parent()),
device_descr_(&(stream_exec_->GetDeviceDescription())) {}
EIGEN_STRONG_INLINE perftools::gputools::Stream* stream() const {
return stream_;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void* allocate(size_t num_bytes) const {
if (allocator_ != nullptr) return allocator_->allocate(num_bytes);
#ifndef __CUDA_ARCH__
perftools::gputools::DeviceMemory<char> mem =
stream_exec_->AllocateArray<char>(num_bytes);
return mem.opaque();
#else
assert(false &&
"The default device should be used instead to generate kernel code");
return nullptr;
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void deallocate(void* buffer) const {
if (allocator_ != nullptr) {
allocator_->deallocate(buffer);
return;
}
#ifndef __CUDA_ARCH__
perftools::gputools::DeviceMemoryBase gpu_mem(buffer);
stream_exec_->Deallocate(&gpu_mem);
#else
assert(false &&
"The default device should be used instead to generate kernel code");
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memcpy(void* dst, const void* src,
size_t n) const {
#ifndef __CUDA_ARCH__
perftools::gputools::DeviceMemoryBase gpu_to(dst);
if (!stream_->ThenMemcpy(&gpu_to, perftools::gputools::DeviceMemoryBase(
const_cast<void*>(src)),
n).ok()) {
assert(false &&
"failed during enqueue of 'copy perftools::gputools to "
"perftools::gputools'");
}
#else
assert(false &&
"The default device should be used instead to generate kernel code");
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memcpyHostToDevice(void* dst, const void* src, size_t n) const {
#ifndef __CUDA_ARCH__
perftools::gputools::DeviceMemoryBase gpu_to(dst);
if (!stream_->ThenMemcpy(&gpu_to, src, n).ok()) {
assert(false && "failed while enqueuing memcpy from host to device");
}
#else
eigen_assert(false && "The default device should be used instead to generate kernel code");
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void memcpyDeviceToHost(void* dst, const void* src, size_t n) const {
#ifndef __CUDA_ARCH__
if (!stream_->ThenMemcpy(dst, perftools::gputools::DeviceMemoryBase(
const_cast<void*>(src)),
n).ok()) {
assert(false && "failed while enqueuing memcpy from device to host");
}
#else
eigen_assert(false && "The default device should be used instead to generate kernel code");
#endif
}
EIGEN_STRONG_INLINE void memset(void* buffer, int c, size_t n) const {
#ifndef __CUDA_ARCH__
perftools::gputools::DeviceMemoryBase gpu_buffer{buffer};
if (!stream_exec_->Memset32(stream_, &gpu_buffer, c, n)) {
assert(false && "GPU memset failed.");
}
#else
assert(false &&
"The default device should be used instead to generate kernel code");
#endif
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t numThreads() const {
// FIXME
return 32;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t memcpyThreshold() const {
return 4 * 1024 * 1024;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t firstLevelCacheSize() const {
// FIXME
return 48*1024;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE size_t lastLevelCacheSize() const {
// We won't try to take advantage of the l2 cache for the time being, and
// there is no l3 cache on cuda devices.
return firstLevelCacheSize();
}
EIGEN_STRONG_INLINE void synchronize() const {
stream_->BlockHostUntilDone();
}
EIGEN_DEVICE_FUNC inline int getNumCudaMultiProcessors() const {
return device_descr_->core_count();
}
EIGEN_DEVICE_FUNC inline int maxCudaThreadsPerBlock() const {
return device_descr_->threads_per_block_limit();
}
EIGEN_DEVICE_FUNC inline int maxCudaThreadsPerMultiProcessor() const {
return device_descr_->threads_per_core_limit();
}
EIGEN_DEVICE_FUNC inline int sharedMemPerBlock() const {
return device_descr_->shared_memory_per_block();
}
EIGEN_DEVICE_FUNC inline int majorDeviceVersion() const {
int major, minor;
if (device_descr_->cuda_compute_capability(&major, &minor)) {
return major;
} else {
return 0;
}
}
inline bool ok() const { return stream_->ok(); }
private:
perftools::gputools::Stream* stream_;
perftools::gputools::StreamExecutor* stream_exec_;
const perftools::gputools::DeviceDescription* device_descr_;
const Allocator* allocator_;
};
#define LAUNCH_CUDA_KERNEL(kernel, gridsize, blocksize, sharedmem, device, ...)\
(kernel) <<< (gridsize), (blocksize), (sharedmem), (device).stream() >>> (__VA_ARGS__); \
CHECK((device).stream()->ok());
#endif // __GCUDACC__
#endif // EIGEN_USE_GPU
} // end namespace Eigen
#endif // EIGEN_CXX11_TENSOR_TENSOR_DEVICE_TYPE_H
| {
"content_hash": "ac03a8dcdc7876cbb73fc9c171fdf60d",
"timestamp": "",
"source": "github",
"line_count": 910,
"max_line_length": 109,
"avg_line_length": 31.284615384615385,
"alnum_prop": 0.6687976395377428,
"repo_name": "MehdiSfr/tensor-flow",
"id": "a62682c7289764bfd951f764b949f75e357f55b4",
"size": "28932",
"binary": false,
"copies": "2",
"ref": "refs/heads/branch_mehdiExpr",
"path": "third_party/eigen3/unsupported/Eigen/CXX11/src/Tensor/TensorDeviceType.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "136455"
},
{
"name": "C++",
"bytes": "5591657"
},
{
"name": "CSS",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "640055"
},
{
"name": "Java",
"bytes": "49257"
},
{
"name": "JavaScript",
"bytes": "6252"
},
{
"name": "Jupyter Notebook",
"bytes": "329336"
},
{
"name": "Objective-C",
"bytes": "1288"
},
{
"name": "Protocol Buffer",
"bytes": "45325"
},
{
"name": "Python",
"bytes": "2684789"
},
{
"name": "Shell",
"bytes": "5104"
},
{
"name": "TypeScript",
"bytes": "256549"
}
],
"symlink_target": ""
} |
using namespace std;
using namespace testing;
class TokenizerBBTest: public Test {
};
TEST_F(TokenizerBBTest, defaultSetting) {
Tokenizer tokenizer;
EXPECT_THAT(tokenizer.tokenize(""),
ElementsAre(""));
EXPECT_THAT(tokenizer.tokenize("some normal string"),
ElementsAre("some", "normal", "string"));
}
TEST_F(TokenizerBBTest, CommaNoFilterNoEscape) {
Tokenizer tokenizer(',');
EXPECT_THAT(tokenizer.tokenize(",,"), ElementsAre("", "", ""));
EXPECT_THAT(tokenizer.tokenize(",\\,"), ElementsAre("","\\",""));
EXPECT_THAT(tokenizer.tokenize(",\\,some,,normal string,"),
ElementsAre("","\\","some","", "normal string",""));
EXPECT_THAT(tokenizer.tokenize("some normal string"),
ElementsAre("some normal string"));
}
TEST_F(TokenizerBBTest, CommaFilterNoEscape) {
Tokenizer tokenizer(',', true);
EXPECT_THAT(tokenizer.tokenize(""), ElementsAre());
EXPECT_THAT(tokenizer.tokenize(","), ElementsAre());
EXPECT_THAT(tokenizer.tokenize("\\,,"), ElementsAre("\\"));
}
TEST_F(TokenizerBBTest, CommaNoFilterEscape) {
Tokenizer tokenizer(',', false, true);
EXPECT_THAT(tokenizer.tokenize("\\,,"), ElementsAre(",", ""));
EXPECT_THAT(tokenizer.tokenize("s,\\"), ElementsAre("s", ""));
}
TEST_F(TokenizerBBTest, CommaFilterEscape) {
Tokenizer tokenizer(',', true, true);
EXPECT_THAT(tokenizer.tokenize("s,\\\\,"), ElementsAre("s", "\\"));
EXPECT_THAT(tokenizer.tokenize(",\\,,"), ElementsAre(","));
EXPECT_THAT(tokenizer.tokenize("\\,\\,"), ElementsAre(",,"));
}
TEST_F(TokenizerBBTest, EscapeNoFilterNoEscape) {
Tokenizer tokenizer('\\');
EXPECT_THAT(tokenizer.tokenize("\\,,"), ElementsAre("", ",,"));
EXPECT_THAT(tokenizer.tokenize("ss\\\\"), ElementsAre("ss","", ""));
}
TEST_F(TokenizerBBTest, EscapeNoFilterEscape) {
Tokenizer tokenizer('\\',false, true);
//the escape charater will be changed to '/' when the delimiter is '\'
EXPECT_THAT(tokenizer.tokenize("s\\\\o\\a"),
ElementsAre("s","","o","a"));
}
TEST_F(TokenizerBBTest, EscapeFilterEscape) {
Tokenizer tokenizer('\\',true, true);
EXPECT_THAT(tokenizer.tokenize("\\\\,,"), ElementsAre(",,"));
EXPECT_THAT(tokenizer.tokenize("ss/\\\\/\\e"), ElementsAre("ss\\","\\e"));
EXPECT_THAT(tokenizer.tokenize("s/\\//o\\a"),
ElementsAre("s\\/o","a"));
}
TEST_F(TokenizerBBTest, WithZeroBytes) {
Tokenizer tokenizer(',',true, true);
const char buffer[] = "a\0""c,\0,def\\,\0"",";
const string input(buffer, sizeof(buffer));
string zero = string(1, '\0');
vector<string> expected;
expected.push_back(string(buffer,3));//"a\0c"
expected.push_back(zero);//"\0"
expected.push_back(string("def,\0",5));//"def,\0"
expected.push_back(zero);//"\0"
EXPECT_THAT(tokenizer.tokenize(input), ContainerEq(expected));
}
| {
"content_hash": "cf2dacc40b07859609f33e7494f49176",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 76,
"avg_line_length": 36.51315789473684,
"alnum_prop": 0.6493693693693694,
"repo_name": "zhanglix/code_for_blog",
"id": "e45de49b7f1323b71e3a585d602748495eb4d2a5",
"size": "2843",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tokenizer/tokenizer_bb_test.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "22371"
},
{
"name": "Python",
"bytes": "4050"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<results>
<links/>
<graph/>
</results>
| {
"content_hash": "81c82141a01a54a6a50575499a84857e",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 54,
"avg_line_length": 20.4,
"alnum_prop": 0.5882352941176471,
"repo_name": "Vladk-el/eFindMe",
"id": "5fd5a25e724114cb4fc2b4bbe040871d54cd529c",
"size": "102",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/xml/users/cedric_piard/results.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "154327"
},
{
"name": "XML",
"bytes": "34333"
}
],
"symlink_target": ""
} |
@interface _PDConformBasic : NSObject <PDConform>
@end
| {
"content_hash": "08084deb7749063a09564b2ebf0e6be3",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 49,
"avg_line_length": 27.5,
"alnum_prop": 0.7818181818181819,
"repo_name": "wileykestner/PDConform",
"id": "e2ffc5479bb78e7b85363bc43a3cae2a60e6f1ac",
"size": "113",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PDConform/Private/_PDConformBasic.h",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Objective-C",
"bytes": "72358"
},
{
"name": "Ruby",
"bytes": "1276"
}
],
"symlink_target": ""
} |
#pragma once
#include "Type.h"
#include "ODBCBase.h"
class DBPlayerShopInfo : public ODBCBase
{
public:
DBPlayerShopInfo( ODBCInterface* pInterface ) ;
virtual BOOL Load() ;
virtual BOOL Save(VOID* pSource) ;
virtual BOOL Delete() ;
virtual BOOL ParseResult(VOID* pResult) ;
public:
} ;
| {
"content_hash": "20ec9f19b001e536a669dcec19031ae5",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 51,
"avg_line_length": 16.7,
"alnum_prop": 0.6437125748502994,
"repo_name": "viticm/web-pap",
"id": "a7a9880f1ace09d02a4174ba2710231e9af0f1c9",
"size": "704",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/Common/DBSystem/DataBase/DBPlayerShopInfo.h",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2382465"
},
{
"name": "C++",
"bytes": "6819417"
},
{
"name": "CSS",
"bytes": "1042"
},
{
"name": "JavaScript",
"bytes": "8703"
},
{
"name": "Lua",
"bytes": "12590"
},
{
"name": "Objective-C",
"bytes": "143909"
},
{
"name": "PHP",
"bytes": "14327"
},
{
"name": "Perl",
"bytes": "892"
},
{
"name": "Shell",
"bytes": "16405"
}
],
"symlink_target": ""
} |
class HTMLParagraphElement : public HTMLElement
{
public:
HTMLParagraphElement();
bool is_paragraph_node() const;
};
#endif
| {
"content_hash": "2d9cc3a400a9b50fac13acd711089ec9",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 47,
"avg_line_length": 18.125,
"alnum_prop": 0.6689655172413793,
"repo_name": "reesmichael1/WebWhirr",
"id": "5b1747e2945cff332460545c8748a021ae4ebd2f",
"size": "236",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/HTMLElements/HTMLParagraphElement.hpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "46554"
},
{
"name": "Makefile",
"bytes": "2827"
},
{
"name": "Shell",
"bytes": "737"
}
],
"symlink_target": ""
} |
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
/**
* Postgre Database Adapter Class
*
* Note: _DB is an extender class that the app controller
* creates dynamically based on whether the query builder
* class is being used or not.
*
* @package CodeIgniter
* @subpackage Drivers
* @category Database
* @author EllisLab Dev Team
* @link https://codeigniter.com/user_guide/database/
*/
class CI_DB_postgre_driver extends CI_DB
{
/**
* Database driver
*
* @var string
*/
public $dbdriver = 'postgre';
/**
* Database schema
*
* @var string
*/
public $schema = 'public';
// --------------------------------------------------------------------
/**
* ORDER BY random keyword
*
* @var array
*/
protected $_random_keyword = array('RANDOM()', 'RANDOM()');
// --------------------------------------------------------------------
/**
* Class constructor
*
* Creates a DSN string to be used for db_connect() and db_pconnect()
*
* @param array $params
* @return void
*/
public function __construct($params)
{
parent::__construct($params);
if (!empty($this->dsn)) {
return;
}
$this->dsn === '' OR $this->dsn = '';
if (strpos($this->hostname, '/') !== FALSE) {
// If UNIX sockets are used, we shouldn't set a port
$this->port = '';
}
$this->hostname === '' OR $this->dsn = 'host=' . $this->hostname . ' ';
if (!empty($this->port) && ctype_digit($this->port)) {
$this->dsn .= 'port=' . $this->port . ' ';
}
if ($this->username !== '') {
$this->dsn .= 'user=' . $this->username . ' ';
/* An empty password is valid!
*
* $db['password'] = NULL must be done in order to ignore it.
*/
$this->password === NULL OR $this->dsn .= "password='" . $this->password . "' ";
}
$this->database === '' OR $this->dsn .= 'dbname=' . $this->database . ' ';
/* We don't have these options as elements in our standard configuration
* array, but they might be set by parse_url() if the configuration was
* provided via string. Example:
*
* postgre://username:password@localhost:5432/database?connect_timeout=5&sslmode=1
*/
foreach (array('connect_timeout', 'options', 'sslmode', 'service') as $key) {
if (isset($this->$key) && is_string($this->key) && $this->key !== '') {
$this->dsn .= $key . "='" . $this->key . "' ";
}
}
$this->dsn = rtrim($this->dsn);
}
// --------------------------------------------------------------------
/**
* Database connection
*
* @param bool $persistent
* @return resource
*/
public function db_connect($persistent = FALSE)
{
$this->conn_id = ($persistent === TRUE)
? pg_pconnect($this->dsn)
: pg_connect($this->dsn);
if ($this->conn_id !== FALSE) {
if ($persistent === TRUE
&& pg_connection_status($this->conn_id) === PGSQL_CONNECTION_BAD
&& pg_ping($this->conn_id) === FALSE
) {
return FALSE;
}
empty($this->schema) OR $this->simple_query('SET search_path TO ' . $this->schema . ',public');
}
return $this->conn_id;
}
// --------------------------------------------------------------------
/**
* Reconnect
*
* Keep / reestablish the db connection if no queries have been
* sent for a length of time exceeding the server's idle timeout
*
* @return void
*/
public function reconnect()
{
if (pg_ping($this->conn_id) === FALSE) {
$this->conn_id = FALSE;
}
}
// --------------------------------------------------------------------
/**
* Set client character set
*
* @param string $charset
* @return bool
*/
protected function _db_set_charset($charset)
{
return (pg_set_client_encoding($this->conn_id, $charset) === 0);
}
// --------------------------------------------------------------------
/**
* Database version number
*
* @return string
*/
public function version()
{
if (isset($this->data_cache['version'])) {
return $this->data_cache['version'];
}
if (!$this->conn_id OR ($pg_version = pg_version($this->conn_id)) === FALSE) {
return FALSE;
}
/* If PHP was compiled with PostgreSQL lib versions earlier
* than 7.4, pg_version() won't return the server version
* and so we'll have to fall back to running a query in
* order to get it.
*/
return isset($pg_version['server'])
? $this->data_cache['version'] = $pg_version['server']
: parent::version();
}
// --------------------------------------------------------------------
/**
* Execute the query
*
* @param string $sql an SQL query
* @return resource
*/
protected function _execute($sql)
{
return pg_query($this->conn_id, $sql);
}
// --------------------------------------------------------------------
/**
* Begin Transaction
*
* @return bool
*/
protected function _trans_begin()
{
return (bool)pg_query($this->conn_id, 'BEGIN');
}
// --------------------------------------------------------------------
/**
* Commit Transaction
*
* @return bool
*/
protected function _trans_commit()
{
return (bool)pg_query($this->conn_id, 'COMMIT');
}
// --------------------------------------------------------------------
/**
* Rollback Transaction
*
* @return bool
*/
protected function _trans_rollback()
{
return (bool)pg_query($this->conn_id, 'ROLLBACK');
}
// --------------------------------------------------------------------
/**
* Determines if a query is a "write" type.
*
* @param string An SQL query string
* @return bool
*/
public function is_write_type($sql)
{
if (preg_match('#^(INSERT|UPDATE).*RETURNING\s.+(\,\s?.+)*$#i', $sql)) {
return FALSE;
}
return parent::is_write_type($sql);
}
// --------------------------------------------------------------------
/**
* Platform-dependant string escape
*
* @param string
* @return string
*/
protected function _escape_str($str)
{
return pg_escape_string($this->conn_id, $str);
}
// --------------------------------------------------------------------
/**
* "Smart" Escape String
*
* Escapes data based on type
*
* @param string $str
* @return mixed
*/
public function escape($str)
{
if (is_php('5.4.4') && (is_string($str) OR (is_object($str) && method_exists($str, '__toString')))) {
return pg_escape_literal($this->conn_id, $str);
} elseif (is_bool($str)) {
return ($str) ? 'TRUE' : 'FALSE';
}
return parent::escape($str);
}
// --------------------------------------------------------------------
/**
* Affected Rows
*
* @return int
*/
public function affected_rows()
{
return pg_affected_rows($this->result_id);
}
// --------------------------------------------------------------------
/**
* Insert ID
*
* @return string
*/
public function insert_id()
{
$v = pg_version($this->conn_id);
$v = isset($v['server']) ? $v['server'] : 0; // 'server' key is only available since PosgreSQL 7.4
$table = (func_num_args() > 0) ? func_get_arg(0) : NULL;
$column = (func_num_args() > 1) ? func_get_arg(1) : NULL;
if ($table === NULL && $v >= '8.1') {
$sql = 'SELECT LASTVAL() AS ins_id';
} elseif ($table !== NULL) {
if ($column !== NULL && $v >= '8.0') {
$sql = 'SELECT pg_get_serial_sequence(\'' . $table . "', '" . $column . "') AS seq";
$query = $this->query($sql);
$query = $query->row();
$seq = $query->seq;
} else {
// seq_name passed in table parameter
$seq = $table;
}
$sql = 'SELECT CURRVAL(\'' . $seq . "') AS ins_id";
} else {
return pg_last_oid($this->result_id);
}
$query = $this->query($sql);
$query = $query->row();
return (int)$query->ins_id;
}
// --------------------------------------------------------------------
/**
* Show table query
*
* Generates a platform-specific query string so that the table names can be fetched
*
* @param bool $prefix_limit
* @return string
*/
protected function _list_tables($prefix_limit = FALSE)
{
$sql = 'SELECT "table_name" FROM "information_schema"."tables" WHERE "table_schema" = \'' . $this->schema . "'";
if ($prefix_limit !== FALSE && $this->dbprefix !== '') {
return $sql . ' AND "table_name" LIKE \''
. $this->escape_like_str($this->dbprefix) . "%' "
. sprintf($this->_like_escape_str, $this->_like_escape_chr);
}
return $sql;
}
// --------------------------------------------------------------------
/**
* List column query
*
* Generates a platform-specific query string so that the column names can be fetched
*
* @param string $table
* @return string
*/
protected function _list_columns($table = '')
{
return 'SELECT "column_name"
FROM "information_schema"."columns"
WHERE LOWER("table_name") = ' . $this->escape(strtolower($table));
}
// --------------------------------------------------------------------
/**
* Returns an object with field data
*
* @param string $table
* @return array
*/
public function field_data($table)
{
$sql = 'SELECT "column_name", "data_type", "character_maximum_length", "numeric_precision", "column_default"
FROM "information_schema"."columns"
WHERE LOWER("table_name") = ' . $this->escape(strtolower($table));
if (($query = $this->query($sql)) === FALSE) {
return FALSE;
}
$query = $query->result_object();
$retval = array();
for ($i = 0, $c = count($query); $i < $c; $i++) {
$retval[$i] = new stdClass();
$retval[$i]->name = $query[$i]->column_name;
$retval[$i]->type = $query[$i]->data_type;
$retval[$i]->max_length = ($query[$i]->character_maximum_length > 0) ? $query[$i]->character_maximum_length : $query[$i]->numeric_precision;
$retval[$i]->default = $query[$i]->column_default;
}
return $retval;
}
// --------------------------------------------------------------------
/**
* Error
*
* Returns an array containing code and message of the last
* database error that has occured.
*
* @return array
*/
public function error()
{
return array('code' => '', 'message' => pg_last_error($this->conn_id));
}
// --------------------------------------------------------------------
/**
* ORDER BY
*
* @param string $orderby
* @param string $direction ASC, DESC or RANDOM
* @param bool $escape
* @return object
*/
public function order_by($orderby, $direction = '', $escape = NULL)
{
$direction = strtoupper(trim($direction));
if ($direction === 'RANDOM') {
if (!is_float($orderby) && ctype_digit((string)$orderby)) {
$orderby = ($orderby > 1)
? (float)'0.' . $orderby
: (float)$orderby;
}
if (is_float($orderby)) {
$this->simple_query('SET SEED ' . $orderby);
}
$orderby = $this->_random_keyword[0];
$direction = '';
$escape = FALSE;
}
return parent::order_by($orderby, $direction, $escape);
}
// --------------------------------------------------------------------
/**
* Update statement
*
* Generates a platform-specific update string from the supplied data
*
* @param string $table
* @param array $values
* @return string
*/
protected function _update($table, $values)
{
$this->qb_limit = FALSE;
$this->qb_orderby = array();
return parent::_update($table, $values);
}
// --------------------------------------------------------------------
/**
* Update_Batch statement
*
* Generates a platform-specific batch update string from the supplied data
*
* @param string $table Table name
* @param array $values Update data
* @param string $index WHERE key
* @return string
*/
protected function _update_batch($table, $values, $index)
{
$ids = array();
foreach ($values as $key => $val) {
$ids[] = $val[$index];
foreach (array_keys($val) as $field) {
if ($field !== $index) {
$final[$field][] = 'WHEN ' . $val[$index] . ' THEN ' . $val[$field];
}
}
}
$cases = '';
foreach ($final as $k => $v) {
$cases .= $k . ' = (CASE ' . $index . "\n"
. implode("\n", $v) . "\n"
. 'ELSE ' . $k . ' END), ';
}
$this->where($index . ' IN(' . implode(',', $ids) . ')', NULL, FALSE);
return 'UPDATE ' . $table . ' SET ' . substr($cases, 0, -2) . $this->_compile_wh('qb_where');
}
// --------------------------------------------------------------------
/**
* Delete statement
*
* Generates a platform-specific delete string from the supplied data
*
* @param string $table
* @return string
*/
protected function _delete($table)
{
$this->qb_limit = FALSE;
return parent::_delete($table);
}
// --------------------------------------------------------------------
/**
* LIMIT
*
* Generates a platform-specific LIMIT clause
*
* @param string $sql SQL Query
* @return string
*/
protected function _limit($sql)
{
return $sql . ' LIMIT ' . $this->qb_limit . ($this->qb_offset ? ' OFFSET ' . $this->qb_offset : '');
}
// --------------------------------------------------------------------
/**
* Close DB Connection
*
* @return void
*/
protected function _close()
{
pg_close($this->conn_id);
}
}
| {
"content_hash": "801504be79bce0f6727e333f230abe5a",
"timestamp": "",
"source": "github",
"line_count": 553,
"max_line_length": 152,
"avg_line_length": 27.694394213381557,
"alnum_prop": 0.42820763956904995,
"repo_name": "libo-89/Sancaisignin",
"id": "758a1ed96c12ff1bc9514a28ee85d61bdaae54f0",
"size": "16996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "system/database/drivers/postgre/postgre_driver.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "240"
},
{
"name": "CSS",
"bytes": "125066"
},
{
"name": "HTML",
"bytes": "12900080"
},
{
"name": "JavaScript",
"bytes": "296308"
},
{
"name": "PHP",
"bytes": "2004199"
}
],
"symlink_target": ""
} |
package es.tid.pce.computingEngine;
/**
* Class made for managing report messages from PCCs. It only adds them to
* the database
*
* @author jimbo
*/
import java.net.Inet4Address;
import java.util.concurrent.LinkedBlockingQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import es.tid.pce.pcep.messages.PCEPReport;
import es.tid.pce.server.PCEServerParameters;
import es.tid.pce.server.lspdb.ReportDB_Handler;
public class ReportProcessorThread extends Thread
{
boolean running;
LinkedBlockingQueue<PCEPReport> reportMessageQueue;
ReportDB_Handler lspDB;
Logger log;
public ReportProcessorThread( LinkedBlockingQueue<PCEPReport> reportMessageQueue, ReportDB_Handler lspDB)
{
log=LoggerFactory.getLogger("PCEServer");
running = true;
this.lspDB = lspDB;
this.reportMessageQueue = reportMessageQueue;
}
public void run()
{
PCEPReport report_m;
while (running)
{
try
{
report_m=reportMessageQueue.take();
effectivelyDispatch(report_m);
}
catch(InterruptedException e)
{
log.warn("Interrupted Exception Captured in ReportProcessorThread");
e.printStackTrace();
break;
}
}
}
public void effectivelyDispatch(PCEPReport pcepReport)
{
log.info("Received new report message: "+pcepReport.toString());
log.info("whith ID :"+pcepReport.getStateReportList().get(0).getLSP().getLspId());
Inet4Address addres = pcepReport.getStateReportList().get(0).getLSP().getLspIdentifiers_tlv().getTunnelSenderIPAddress();
Boolean isSyncOver = false;
log.info("Size LSP:"+pcepReport.getStateReportList().size());
//isSyncOver = lspDB.isPCCSyncOver(pcepReport.getStateReportList().get(0).getLSP().getLspIdentifiers_tlv().getTunnelSenderIPAddress());
log.info("Package received from adress: "+pcepReport.getStateReportList().get(0).getLSP().getLspIdentifiers_tlv().getTunnelSenderIPAddress());
if (!isSyncOver)
{
boolean syncFlag;
int numLSPs = pcepReport.getStateReportList().size();
for (int j = 0; j < numLSPs ; j++)
{
syncFlag = pcepReport.getStateReportList().get(j).getLSP().issFlag();
if ((!syncFlag)&&(pcepReport.getStateReportList().get(j).getLSP().getLspId()==0))
{
isSyncOver = true;
log.info("Sync is over");
// lspDB.setPCCSyncOver(addres);
}
}
}
for (int i = 0; i < pcepReport.getStateReportList().size(); i++)
{
lspDB.processReport(pcepReport);
}
}
}
| {
"content_hash": "009bc7a820b546561e41f6c808f69d09",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 144,
"avg_line_length": 26.04255319148936,
"alnum_prop": 0.7128267973856209,
"repo_name": "telefonicaid/netphony-pce",
"id": "6e289fd9790df9952ed43c1f1c1435172e1df970",
"size": "2448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/es/tid/pce/computingEngine/ReportProcessorThread.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1854336"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<title>YUI: Local Combo Handler</title>
<link rel="stylesheet" type="text/css" href="http://yui.yahooapis.com/2.6.0/build/reset-fonts-grids/reset-fonts-grids.css">
<link rel="stylesheet" type="text/css" href="http://yui.yahooapis.com/2.6.0/build/assets/skins/sam/skin.css">
<link rel="stylesheet" href="http://blog.davglass.com/wp-content/themes/davglass/style.css" type="text/css">
<link rel="stylesheet" type="text/css" href="http://developer.yahoo.com/yui/assets/dpSyntaxHighlighter.css">
<style type="text/css" media="screen">
p, h2, blockquote {
margin: 1em;
}
pre {
border: 1px solid #ddd;
background-color: #efefef;
padding: .5em;
margin: 1em;
}
</style>
</head>
<body class="yui-skin-sam">
<div id="davdoc" class="yui-t7">
<div id="hd"><h1 id="header"><a href="http://blog.davglass.com/">YUI: Local Combo Handler</a></h1></div>
<div id="bd">
<p><strong>Note:</strong> Before I start, it should be noted that my server is not under my control so I can't tweak it to serve these files faster. So the examples may load a little slow.</p>
<p>This guide will help you install a local combo handler instance that will work with YUI 2.6.0.</p>
<p>We will be using an Open Source project called <a href="http://code.google.com/p/minify/">Minify</a> to handle our combo files.</p>
<blockquote>
Minify is a PHP5 app that can combine multiple CSS or Javascript files, compress their contents (i.e. removal of unnecessary whitespace/comments), and serve the results with HTTP encoding (gzip/deflate) and headers that allow optimal client-side caching. This helps you follow several of Yahoo!'s <a rel="nofollow" href="http://developer.yahoo.com/performance/index.html#rules">Rules for High Performance Web Sites</a>.
</blockquote>
<h2>Setup Minify</h2>
<p>First we need to download the minify source <a href="http://code.google.com/p/minify/downloads/list">here</a> and unzip it. You will see a list of files and directories like this:</p>
<pre>
HISTORY.txt
LICENSE.txt
README.txt
UPGRADING.txt
min/
min_unit_tests/
</pre>
<p>We need to move the <code>min</code> directory to the root of the webserver and rename it <code>combo</code>.</p>
<pre>
sudo mv min /var/www/combo
</pre>
<p>Now edit the default config file and change a few options:</p>
<pre>
vim /var/www/combo/config.php
</pre>
<p>Update the following options:</p>
<textarea name="code" class="PHP">
$min_cachePath = '/tmp'; //Sets a directory to store the cached files
$min_enableBuilder = false; //We don't want people getting to the builder
$min_serveOptions['maxAge'] = 180000; //Set the range for a re-cache high
$min_serveOptions['minApp']['maxFiles'] = 30; //Bump up the default files
//Add the following line to keep minify from compressing the javascript since -min files
//are already compressed.
$min_serveOptions['minifiers']['application/x-javascript'] = '';
</textarea>
<p>Now <code>minify</code> should be installed and operational.</p>
<h2>Setup the library code</h2>
<p>First create a place in the webroot to store your library files:</p>
<pre>
sudo mkdir /var/www/lib
</pre>
<p>Now download the YUI <a href="http://developer.yahoo.com/yui/download">2.6.0 release here</a> and unzip it. You will see a list of files like this:</p>
<pre>
README
as-docs/
as-src/
assets/
build/
docs/
examples/
index.html
tests/
</pre>
<p>Now create a directory under <code>lib<code> called <code>2.6.0</code>.</p>
<pre>
sudo mkdir /var/www/lib/2.6.0
</pre>
<p>Now move the <code>build</code> directory inside the <code>lib/2.6.0</code> directory.</p>
<h2>Setting up YUILoader</h2>
<p>Now configure the YUILoader to fetch it's files from the local combo handler instead of the default one.</p>
<p>First load the local copy of the YUILoader from our webroot.</p>
<textarea name="code" class="HTML">
<script type="text/javascript" src="/libs/2.6.0/build/yuiloader/yuiloader.js"></script>
</textarea>
<p>The final step is to configure YUILoader use the local combo handler.</p>
<p>There are 3 main options that need to be configured: <code>comboBase, filter and skin</code></p>
<p><code>comboBase</code>: This needs to be set to the local handler: <code>/combo/?b=libs&f=</code></p>
<p><code>filter</code>: YUILoader uses an & to separate the URL pieces, but minify expects a , so we need to apply a filter that replaces all & characters with a ,</p>
<p><code>skin</code>: This is needed for skinning, minify has a tough time with relative file paths in a CSS file, this option tells YUILoader to rollup skin files. This way we get the full skin with good relative paths.</p>
<h2>Example config</h2>
<textarea name="code" class="JScript">
// Instantiate and configure Loader:
var loader = new YAHOO.util.YUILoader({
require: ["button"],
comboBase: '/combo/?b=libs&f=',
loadOptional: true,
filter: {
'searchExp': /&2/g,
'replaceStr': ",2"
},
skin: {
defaultSkin: 'sam',
base: 'assets/skins/',
path: 'skin.css',
rollup: 1
},
onSuccess: function() {
//Files are loaded..
},
timeout: 10000,
combine: true
});
loader.insert();
</textarea>
<h2>Example Page</h2>
<!--a href="yui3.php">YUI 3 - PR2</a><br-->
<p><a href="yui2.php">Page loading YUI 2.6.0 from a local handler</a></p>
</div>
<div id="ft"> </div>
</div>
<script type="text/javascript" src="http://yui.yahooapis.com/2.6.0/build/utilities/utilities.js"></script>
<script src="http://developer.yahoo.com/yui/assets/dpSyntaxHighlighter.js"></script>
<script type="text/javascript" src="../js/toolseffects-min.js"></script>
<script type="text/javascript" src="../js/davglass.js"></script>
<script type="text/javascript">
(function() {
var Dom = YAHOO.util.Dom,
Event = YAHOO.util.Event;
dp.SyntaxHighlighter.HighlightAll('code');
})();
</script>
</body>
</html>
| {
"content_hash": "fcab3e323cf7322130908425f1a22228",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 427,
"avg_line_length": 43.054794520547944,
"alnum_prop": 0.6616290168628699,
"repo_name": "chemouna/yui-examples",
"id": "af4cecce65c5eac678f578c15c721c84d6804078",
"size": "6286",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "combo/index.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
<?php namespace App\Http\Middleware;
use App\User;
use Closure;
use Illuminate\Support\Facades\Redirect;
use Illuminate\Support\Facades\Session;
class UserAuth {
/**
* Handle an incoming request.
*
* @param \Illuminate\Http\Request $request
* @param \Closure $next
* @return mixed
*/
public function handle($request, Closure $next)
{
if(Session::get('userid')==""){return Redirect::to('login');}
if(Session::get('username')==""){return Redirect::to('login');}
$UserInfo=User::find(Session::get('userid'));
if(Session::get('username')!=$UserInfo->name){return Redirect::to('login');}
return $next($request);
}
}
| {
"content_hash": "5fb5d1982191a14a00cb13cd20ea90c5",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 84,
"avg_line_length": 26.307692307692307,
"alnum_prop": 0.6345029239766082,
"repo_name": "SuperHentai/Shadowsocks-Config-Server",
"id": "20bd88e9fb8113f5efe019a047e8771a3c434506",
"size": "684",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/Http/Middleware/UserAuth.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "356"
},
{
"name": "JavaScript",
"bytes": "10229"
},
{
"name": "PHP",
"bytes": "169982"
},
{
"name": "Shell",
"bytes": "989"
}
],
"symlink_target": ""
} |
/* $Id$ */
/*! \file
* \brief Grass関連型の前方宣言集
*
* \date 2009-01-07
* \author yoh2
* $LastChangedBy$
* $LastChangedDate$
*/
#ifndef grass_fwd_H_
#define grass_fwd_H_
/* grass_value 関連 */
struct grass_value;
struct grass_value_node;
/* grass_instruction 関連 */
struct grass_instraction;
struct grass_instraction_node;
/* grass_machine 関連 */
struct grass_machine;
#endif /* grass_fwd_H_ */
| {
"content_hash": "ee339a700546a4eed18e9f197a1bc026",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 30,
"avg_line_length": 15.88,
"alnum_prop": 0.6675062972292192,
"repo_name": "yoh2/grass-c",
"id": "a3ef537982358a685e4a161a2ec7ea3455fffc1f",
"size": "427",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/grass_fwd.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "40828"
},
{
"name": "Shell",
"bytes": "49"
}
],
"symlink_target": ""
} |
id: bdb35296-7fb0-4687-9882-398f2447d658
post_id: 6bd4ad83-bfb8-4e5e-92f3-6bc4c3eab8ea
draft: false
author:
name: chaostangent
email: john.noel@chaostangent.com
ip: 77.102.217.59
---
Many thanks for your support, I would put more time into this blog if I only had it!
As for the art-style, a tricky question. Out of everyone it's Daikichi who has the most "generic" of visual styles although I'm assuming that's purposeful to give him the everyman vibe. Changing the art-style though would definitely change the innate feeling of the series but I'd like to believe the characters and story would still be as strong. If you started introducing the idiosyncrasies of "moe" shows (is it still called "moeblob"?) however - rather than just the aesthetics - then things begin to be a little muddier.
I guess the best comparison I can think of is between <em>Aria</em> and <em>Mushishi</em>. The former closer to more mainstream styles (as I understand from you) and the latter closer to <em>Usagi Drop</em> but both sterling shows with similar tone.
In short: for me story and characters will always trump aesthetic style, but getting the right tone for visuals is important. | {
"content_hash": "03572bec26e1b986701a2a7841b639a5",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 526,
"avg_line_length": 74.125,
"alnum_prop": 0.7748735244519392,
"repo_name": "johnnoel/chaostangent.com",
"id": "912e8c83aa9f52b212ae44dc57fc35e850cded6f",
"size": "1190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "content/posts/2011/09/usagi-drop/comment-02.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "53399"
},
{
"name": "HTML",
"bytes": "77208"
},
{
"name": "JavaScript",
"bytes": "38156"
},
{
"name": "PHP",
"bytes": "418774"
},
{
"name": "Shell",
"bytes": "5935"
},
{
"name": "Smarty",
"bytes": "235"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "89efe7c9a1b8355b847f1aaae94b66c4",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "ec0cb4dab6afd7cb5c0cba5218bc6c3afa0fb173",
"size": "180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Ericales/Marcgraviaceae/Marcgravia/Marcgravia brownei/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "dbb1565577882eaab2ae13095f9d6d30",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "bd2cba99589c96f82b5d0e090e59fe9e71d90e42",
"size": "189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Solidago/Solidago uliginosa/ Syn. Solidago neglecta/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package logbook.config;
import java.io.IOException;
import logbook.config.bean.AppConfigBean;
import logbook.constants.AppConstants;
import logbook.util.BeanUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* アプリケーション設定を保存・復元します
*
*/
public class AppConfig {
private static class LoggerHolder {
/** ロガー */
private static final Logger LOG = LogManager.getLogger(AppConfig.class);
}
/** アプリケーション設定 */
private static AppConfigBean configBean;
/**
* 設定ファイルに書き込みます
*/
public static void store() throws IOException {
if (configBean == null) {
configBean = new AppConfigBean();
}
BeanUtils.writeObject(AppConstants.APP_CONFIG_FILE, configBean);
}
/**
* アプリケーション設定を読み込みます
*/
public static void load() {
try {
AppConfigBean bean = BeanUtils.readObject(AppConstants.APP_CONFIG_FILE, AppConfigBean.class);
if (bean != null) {
configBean = bean;
} else {
configBean = new AppConfigBean();
}
} catch (Exception e) {
LoggerHolder.LOG.warn("アプリケーション設定を読み込みますに失敗しました", e);
}
}
/**
* アプリケーション設定を取得します
*
* @return アプリケーション設定
*/
public static AppConfigBean get() {
return configBean;
}
}
| {
"content_hash": "f3ca3be6b3cb302ca16bca9f95fdf887",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 105,
"avg_line_length": 23.233333333333334,
"alnum_prop": 0.6040172166427547,
"repo_name": "Ibemu/logbook",
"id": "c9d05e7dc22a16506e076f1fbea3e988e321c34f",
"size": "1618",
"binary": false,
"copies": "2",
"ref": "refs/heads/plus",
"path": "main/logbook/config/AppConfig.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "1072189"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Prodr. 4:195. 1830
#### Original name
null
### Remarks
null | {
"content_hash": "8e47edb7edc1f8d5e884356fcc099e0e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 11.384615384615385,
"alnum_prop": 0.6891891891891891,
"repo_name": "mdoering/backbone",
"id": "1bf31cb96740a94b219ec852d9d9e991f4a515c2",
"size": "195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Apiales/Apiaceae/Heracleum/Heracleum wallichii/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package net.virtualviking.vropsexport;
public interface ProgressMonitor {
public void reportProgress(int n);
}
| {
"content_hash": "9b296a44aabe87be8557898e745489b6",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 38,
"avg_line_length": 19,
"alnum_prop": 0.8070175438596491,
"repo_name": "prydin/vrops-export",
"id": "f727f2f5a880b512b3d394a2d4358d9caff1ac46",
"size": "708",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/net/virtualviking/vropsexport/ProgressMonitor.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "958"
},
{
"name": "Java",
"bytes": "96193"
},
{
"name": "Shell",
"bytes": "916"
}
],
"symlink_target": ""
} |
#include <sys/queue.h>
#include "unity.h"
#include "lagopus_apis.h"
#include "lagopus/pbuf.h"
#include "lagopus/ofp_bridgeq_mgr.h"
#include "openflow.h"
#include "openflow13.h"
#include "handler_test_utils.h"
void
setUp(void) {
ofp_bridgeq_mgr_initialize(NULL);
}
void
tearDown(void) {
lagopus_result_t ret = LAGOPUS_RESULT_ANY_FAILURES;
ret = ofp_bridgeq_mgr_clear();
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_clear error.");
}
void
test_ofp_bridgeq_mgr_bridge(void) {
lagopus_result_t ret = LAGOPUS_RESULT_ANY_FAILURES;
uint64_t dpid = 0x01;
struct ofp_bridgeq *bridgeq = NULL;
lagopus_qmuxer_poll_t poll = NULL;
/* register bridge. */
ret = ofp_bridgeq_mgr_bridge_register(dpid);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_bridge_register error.");
/* register same bridge. */
ret = ofp_bridgeq_mgr_bridge_register(dpid);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_ALREADY_EXISTS, ret,
"ofp_bridgeq_mgr_bridge_register(same bridge) error.");
/* lookup. */
ret = ofp_bridgeq_mgr_bridge_lookup(dpid, &bridgeq);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_bridge_lookup error.");
TEST_ASSERT_NOT_EQUAL_MESSAGE(NULL, bridgeq,
"bridgeq error.");
/* check getter. */
poll = NULL;
poll = ofp_bridgeq_mgr_eventq_poll_get(bridgeq);
TEST_ASSERT_NOT_EQUAL_MESSAGE(NULL, poll,
"ofp_bridgeq_mgr_eventq_poll_get error.");
poll = NULL;
poll = ofp_bridgeq_mgr_dataq_poll_get(bridgeq);
TEST_ASSERT_NOT_EQUAL_MESSAGE(NULL, poll,
"ofp_bridgeq_mgr_dataq_poll_get error.");
/* decrement refs. */
ofp_bridgeq_mgr_bridgeq_free(bridgeq);
/* unregister bridge. */
ret = ofp_bridgeq_mgr_bridge_unregister(dpid);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_bridge_unregister error.");
/* lookup. */
bridgeq = NULL;
ret = ofp_bridgeq_mgr_bridge_lookup(dpid, &bridgeq);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_NOT_FOUND, ret,
"ofp_bridgeq_mgr_bridge_lookup error.");
}
void
test_ofp_bridgeq_mgr_bridge_lookup_null(void) {
lagopus_result_t ret = LAGOPUS_RESULT_ANY_FAILURES;
uint64_t dpid = 0x01;
/* call func. */
ret = ofp_bridgeq_mgr_bridge_lookup(dpid, NULL);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_INVALID_ARGS, ret,
"ofp_bridgeq_mgr_bridge_lookup(null) error.");
}
#define MAX_LENGTH 3
void
test_ofp_bridgeq_mgr_bridge_register_orver_length(void) {
lagopus_result_t ret = LAGOPUS_RESULT_ANY_FAILURES;
int data_num = MAX_BRIDGES;
uint64_t dpids[MAX_BRIDGES + 1];
int i;
/* create data. */
for (i = 0; i < data_num + 1; i++) {
dpids[i] = (uint64_t) (i + 1);
}
/* register bridge. */
for (i = 0; i < data_num; i++) {
ret = ofp_bridgeq_mgr_bridge_register(dpids[i]);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_bridge_registererror.");
}
ret = ofp_bridgeq_mgr_bridge_register(dpids[data_num]);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_ITERATION_HALTED, ret,
"ofp_bridgeq_mgr_bridge_register(orver length) error.");
}
void
test_ofp_bridgeq_mgr_bridgeqs_to_array_normal_pattern(void) {
lagopus_result_t ret = LAGOPUS_RESULT_ANY_FAILURES;
uint64_t i, j;
uint64_t data_num = MAX_LENGTH;
uint64_t dpids[MAX_LENGTH][2] = {{0x01, false},
{0x02, false},
{0x03, false}
};
struct ofp_bridgeq *bridgeqs[MAX_BRIDGES];
struct ofp_bridge *bridge;
uint64_t poll_count = 0;
uint64_t bridgeq_count = 0;
lagopus_qmuxer_poll_t *polls =
(lagopus_qmuxer_poll_t *) calloc(MAX_LENGTH * MAX_BRIDGE_POLLS,
sizeof(lagopus_qmuxer_poll_t));
lagopus_qmuxer_poll_t *dp_polls =
(lagopus_qmuxer_poll_t *) calloc(MAX_LENGTH * MAX_BRIDGE_DP_POLLS,
sizeof(lagopus_qmuxer_poll_t));
/* register bridge. */
for (i = 0; i < data_num; i++) {
ret = ofp_bridgeq_mgr_bridge_register(dpids[i][0]);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_bridge_register error.");
}
/* call func. */
ret = ofp_bridgeq_mgr_bridgeqs_to_array(bridgeqs, &bridgeq_count, MAX_LENGTH);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_bridgeqs_to_array(normal) error.");
TEST_ASSERT_EQUAL_MESSAGE(MAX_LENGTH, bridgeq_count,
"index error.");
for (i = 0; i < data_num; i++) {
for (j = 0; j < data_num; j++) {
bridge = ofp_bridgeq_mgr_bridge_get(bridgeqs[i]);
if (bridge->dpid == dpids[j][0]) {
/* check duplicate. */
if (dpids[j][1] == false) {
dpids[j][1] = true;
} else {
TEST_FAIL_MESSAGE("duplicate error.");
}
}
}
}
/* check dpid. */
for (i = 0; i < data_num; i++) {
TEST_ASSERT_EQUAL_MESSAGE(true, dpids[i][1],
"dpid error.");
}
/* test ofp_bridgeq_mgr_polls_get() */
poll_count = 0;
ret = ofp_bridgeq_mgr_polls_get(polls, bridgeqs,
&poll_count, data_num);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_polls_get(normal) error.");
TEST_ASSERT_EQUAL_MESSAGE(data_num * MAX_BRIDGE_POLLS, poll_count,
"poll_count error.");
for (i = 0; i < data_num * MAX_BRIDGE_POLLS; i++) {
TEST_ASSERT_NOT_EQUAL_MESSAGE(NULL,
polls[i],
"polls error.");
}
/* test ofp_bridgeq_mgr_poll_reset() */
ofp_bridgeq_mgr_poll_reset(polls, MAX_LENGTH * MAX_BRIDGE_POLLS);
for (i = 0; i < data_num * MAX_BRIDGE_POLLS; i++) {
TEST_ASSERT_EQUAL_MESSAGE(NULL,
polls[i],
"polls error(null).");
}
/* test ofp_bridgeq_mgr_dp_polls_get() */
poll_count = 0;
ret = ofp_bridgeq_mgr_dp_polls_get(dp_polls, bridgeqs,
&poll_count, data_num);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_dp_polls_get(normal) error.");
TEST_ASSERT_EQUAL_MESSAGE(data_num * MAX_BRIDGE_DP_POLLS, poll_count,
"poll_count(DP) error.");
for (i = 0; i < data_num * MAX_BRIDGE_DP_POLLS; i++) {
TEST_ASSERT_NOT_EQUAL_MESSAGE(NULL,
dp_polls[i],
"polls error.");
}
/* test ofp_bridgeq_mgr_poll_reset() */
ofp_bridgeq_mgr_poll_reset(dp_polls, MAX_LENGTH * MAX_BRIDGE_DP_POLLS);
for (i = 0; i < data_num * MAX_BRIDGE_DP_POLLS; i++) {
TEST_ASSERT_EQUAL_MESSAGE(NULL,
dp_polls[i],
"dp_polls error(null).");
}
/* test ofp_bridgeq_mgr_bridgeqs_free. */
/* Call func.*/
ofp_bridgeq_mgr_bridgeqs_free(bridgeqs, bridgeq_count);
free(polls);
free(dp_polls);
}
void
test_ofp_bridgeq_mgr_bridgeqs_to_array_null(void) {
lagopus_result_t ret = LAGOPUS_RESULT_ANY_FAILURES;
uint64_t count;
struct ofp_bridgeq *bridgeqs[MAX_BRIDGES];
/* call func. */
ret = ofp_bridgeq_mgr_bridgeqs_to_array(bridgeqs, NULL, MAX_LENGTH);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_INVALID_ARGS, ret,
"ofp_bridgeq_mgr_bridgeqs_to_array(null) error.");
/* call func. */
ret = ofp_bridgeq_mgr_bridgeqs_to_array(NULL, &count, MAX_LENGTH);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_INVALID_ARGS, ret,
"ofp_bridgeq_mgr_bridgeqs_to_array(null) error.");
}
void
test_ofp_bridgeq_mgr_polls_get_null(void) {
lagopus_result_t ret = LAGOPUS_RESULT_ANY_FAILURES;
lagopus_qmuxer_poll_t polls;
struct ofp_bridgeq *bridgeqs[MAX_BRIDGES];
uint64_t count;
uint64_t bridgeqs_size = 0;
/* call func. */
ret = ofp_bridgeq_mgr_polls_get(NULL, bridgeqs, &count, bridgeqs_size);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_INVALID_ARGS, ret,
"ofp_bridgeq_mgr_polls_get(null) error.");
/* call func. */
ret = ofp_bridgeq_mgr_polls_get(&polls, NULL, &count, bridgeqs_size);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_INVALID_ARGS, ret,
"ofp_bridgeq_mgr_polls_get(null) error.");
/* call func. */
ret = ofp_bridgeq_mgr_polls_get(&polls, bridgeqs, NULL, bridgeqs_size);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_INVALID_ARGS, ret,
"ofp_bridgeq_mgr_polls_get(null) error.");
}
void
test_ofp_bridgeq_mgr_dp_polls_get_null(void) {
lagopus_result_t ret = LAGOPUS_RESULT_ANY_FAILURES;
lagopus_qmuxer_poll_t polls;
struct ofp_bridgeq *bridgeqs[MAX_BRIDGES];
uint64_t count;
uint64_t bridgeqs_size = 0;
/* call func. */
ret = ofp_bridgeq_mgr_dp_polls_get(NULL, bridgeqs, &count, bridgeqs_size);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_INVALID_ARGS, ret,
"ofp_bridgeq_mgr_dp_polls_get(null) error.");
/* call func. */
ret = ofp_bridgeq_mgr_dp_polls_get(&polls, NULL, &count, bridgeqs_size);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_INVALID_ARGS, ret,
"ofp_bridgeq_mgr_dp_polls_get(null) error.");
/* call func. */
ret = ofp_bridgeq_mgr_dp_polls_get(&polls, bridgeqs, NULL, bridgeqs_size);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_INVALID_ARGS, ret,
"ofp_bridgeq_mgr_dp_polls_get(null) error.");
}
void
test_ofp_bridgeq_mgr_bridgeqs_to_array_unregister(void) {
lagopus_result_t ret = LAGOPUS_RESULT_ANY_FAILURES;
uint64_t i, j, delete_target = 1;
uint64_t data_num = MAX_LENGTH;
uint64_t dpids[MAX_LENGTH][2] = {{0x01, false},
{0x02, false},
{0x03, false}
};
struct ofp_bridgeq *bridgeqs[MAX_BRIDGES];
struct ofp_bridge *bridge;
uint64_t poll_count = 0;
uint64_t bridgeq_count = 0;
lagopus_qmuxer_poll_t *polls =
(lagopus_qmuxer_poll_t *) calloc(MAX_LENGTH * MAX_BRIDGE_POLLS,
sizeof(lagopus_qmuxer_poll_t));
lagopus_qmuxer_poll_t *dp_polls =
(lagopus_qmuxer_poll_t *) calloc(MAX_LENGTH * MAX_BRIDGE_DP_POLLS,
sizeof(lagopus_qmuxer_poll_t));
/* register bridge. */
for (i = 0; i < data_num; i++) {
ret = ofp_bridgeq_mgr_bridge_register(dpids[i][0]);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_bridge_register error.");
}
ret = ofp_bridgeq_mgr_bridgeqs_to_array(bridgeqs, &bridgeq_count, MAX_LENGTH);
/* delete entry. */
bridge = ofp_bridgeq_mgr_bridge_get(bridgeqs[delete_target]);
ofp_bridgeq_mgr_bridge_unregister(bridge->dpid);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_bridgeqs_to_array(normal) error.");
TEST_ASSERT_EQUAL_MESSAGE(MAX_LENGTH, bridgeq_count,
"index error.");
for (i = 0; i < data_num; i++) {
for (j = 0; j < data_num; j++) {
bridge = ofp_bridgeq_mgr_bridge_get(bridgeqs[i]);
if (bridge->dpid == dpids[j][0]) {
/* check duplicate. */
if (dpids[j][1] == false) {
dpids[j][1] = true;
} else {
TEST_FAIL_MESSAGE("duplicate error.");
}
}
}
}
/* check dpid. */
for (i = 0; i < data_num; i++) {
TEST_ASSERT_EQUAL_MESSAGE(true, dpids[i][1],
"dpid error.");
}
/* test ofp_bridgeq_mgr_polls_get() */
poll_count = 0;
ret = ofp_bridgeq_mgr_polls_get(polls, bridgeqs,
&poll_count, data_num);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_polls_get(normal) error.");
TEST_ASSERT_EQUAL_MESSAGE(data_num * MAX_BRIDGE_POLLS, poll_count,
"poll_count error.");
for (i = 0; i < data_num * MAX_BRIDGE_POLLS; i++) {
TEST_ASSERT_NOT_EQUAL_MESSAGE(NULL,
polls[i],
"polls error.");
}
/* test ofp_bridgeq_mgr_dp_polls_get() */
poll_count = 0;
ret = ofp_bridgeq_mgr_dp_polls_get(dp_polls, bridgeqs,
&poll_count, data_num);
TEST_ASSERT_EQUAL_MESSAGE(LAGOPUS_RESULT_OK, ret,
"ofp_bridgeq_mgr_dp_polls_get(normal) error.");
TEST_ASSERT_EQUAL_MESSAGE(data_num * MAX_BRIDGE_DP_POLLS, poll_count,
"poll_count(DP) error.");
for (i = 0; i < data_num * MAX_BRIDGE_DP_POLLS; i++) {
TEST_ASSERT_NOT_EQUAL_MESSAGE(NULL,
dp_polls[i],
"polls error.");
}
/* test ofp_bridgeq_mgr_bridgeqs_free. */
/* Call func.*/
ofp_bridgeq_mgr_bridgeqs_free(bridgeqs, bridgeq_count);
free(polls);
free(dp_polls);
}
| {
"content_hash": "3e824c9a0af5f4c5fcb54874e82f1a18",
"timestamp": "",
"source": "github",
"line_count": 369,
"max_line_length": 84,
"avg_line_length": 35.46341463414634,
"alnum_prop": 0.5808497631056091,
"repo_name": "skuwa/lagopus",
"id": "4ef7f99b7dcf0e41fadef70f8e10e9acd71947d8",
"size": "13710",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/agent/test/ofp_bridgeq_mgr_test.c",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "4415336"
},
{
"name": "C++",
"bytes": "44569"
},
{
"name": "Objective-C",
"bytes": "2293"
},
{
"name": "Shell",
"bytes": "345262"
}
],
"symlink_target": ""
} |
angular.module('guidebookApp')
.controller('homeCtrl', function($scope) {
$scope.message = "This is the home.html template rendered via ng-view";
});
| {
"content_hash": "af2fb9962b13a588966b9fb8ee30919f",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 73,
"avg_line_length": 25.666666666666668,
"alnum_prop": 0.7142857142857143,
"repo_name": "wesdevclimb/Kentucky-Boulderer",
"id": "7ae55db2bb9c7ee0546a9abd57c3712b5e68853a",
"size": "154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/scripts/controllers/homectrl.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "245"
},
{
"name": "HTML",
"bytes": "6847"
},
{
"name": "JavaScript",
"bytes": "23025"
}
],
"symlink_target": ""
} |
"""Mongodb implementations of repository managers."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package mongo package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from . import profile
from . import sessions
from .. import utilities
from ..primitives import Type
from ..type.objects import TypeList
from dlkit.abstract_osid.osid import errors
from dlkit.manager_impls.repository import managers as repository_managers
from dlkit.mongo.osid import managers as osid_managers
class RepositoryProfile(osid_managers.OsidProfile, repository_managers.RepositoryProfile):
"""The repository profile describes interoperability among repository services."""
def supports_asset_lookup(self):
"""Tests if asset lookup is supported.
return: (boolean) - ``true`` if asset lookup is supported ``,``
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_asset_lookup' in profile.SUPPORTS
def supports_asset_query(self):
"""Tests if asset query is supported.
return: (boolean) - ``true`` if asset query is supported ``,``
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_asset_query' in profile.SUPPORTS
def supports_asset_search(self):
"""Tests if asset search is supported.
return: (boolean) - ``true`` if asset search is supported ``,``
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_asset_search' in profile.SUPPORTS
def supports_asset_admin(self):
"""Tests if asset administration is supported.
return: (boolean) - ``true`` if asset administration is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_asset_admin' in profile.SUPPORTS
def supports_asset_notification(self):
"""Tests if asset notification is supported.
A repository may send messages when assets are created,
modified, or deleted.
return: (boolean) - ``true`` if asset notification is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_asset_notification' in profile.SUPPORTS
def supports_asset_repository(self):
"""Tests if retrieving mappings of assets and repositories is supported.
return: (boolean) - ``true`` if asset repository mapping
retrieval is supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_asset_repository' in profile.SUPPORTS
def supports_asset_repository_assignment(self):
"""Tests if managing mappings of assets and repositories is supported.
return: (boolean) - ``true`` if asset repository assignment is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_asset_repository_assignment' in profile.SUPPORTS
def supports_asset_composition(self):
"""Tests if assets are included in compositions.
return: (boolean) - ``true`` if asset composition supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_asset_composition' in profile.SUPPORTS
def supports_asset_composition_design(self):
"""Tests if mapping assets to compositions is supported.
return: (boolean) - ``true`` if designing asset compositions is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_asset_composition_design' in profile.SUPPORTS
def supports_composition_lookup(self):
"""Tests if composition lookup is supported.
return: (boolean) - ``true`` if composition lookup is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_composition_lookup' in profile.SUPPORTS
def supports_composition_query(self):
"""Tests if composition query is supported.
return: (boolean) - ``true`` if composition query is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_composition_query' in profile.SUPPORTS
def supports_composition_search(self):
"""Tests if composition search is supported.
return: (boolean) - ``true`` if composition search is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_composition_search' in profile.SUPPORTS
def supports_composition_admin(self):
"""Tests if composition administration is supported.
return: (boolean) - ``true`` if composition administration is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_composition_admin' in profile.SUPPORTS
def supports_composition_repository(self):
"""Tests if retrieval of composition to repository mappings is supported.
return: (boolean) - ``true`` if composition to repository
mapping is supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_composition_repository' in profile.SUPPORTS
def supports_composition_repository_assignment(self):
"""Tests if assigning composition to repository mappings is supported.
return: (boolean) - ``true`` if composition to repository
assignment is supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_composition_repository_assignment' in profile.SUPPORTS
def supports_repository_lookup(self):
"""Tests if repository lookup is supported.
return: (boolean) - ``true`` if repository lookup is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_repository_lookup' in profile.SUPPORTS
def supports_repository_query(self):
"""Tests if repository query is supported.
return: (boolean) - ``true`` if repository query is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_repository_query' in profile.SUPPORTS
def supports_repository_admin(self):
"""Tests if repository administration is supported.
return: (boolean) - ``true`` if repository administration is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_repository_admin' in profile.SUPPORTS
def supports_repository_hierarchy(self):
"""Tests if a repository hierarchy traversal is supported.
return: (boolean) - ``true`` if a repository hierarchy traversal
is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_repository_hierarchy' in profile.SUPPORTS
def supports_repository_hierarchy_design(self):
"""Tests if a repository hierarchy design is supported.
return: (boolean) - ``true`` if a repository hierarchy design is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.supports_resource_lookup
return 'supports_repository_hierarchy_design' in profile.SUPPORTS
def get_asset_record_types(self):
"""Gets all the asset record types supported.
return: (osid.type.TypeList) - the list of supported asset
record types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.get_resource_record_types_template
record_type_maps = self._get_registry('ASSET_RECORD_TYPES')
record_types = []
for record_type_map in record_type_maps:
record_types.append(Type(**record_type_maps[record_type_map]))
return TypeList(record_types)
asset_record_types = property(fget=get_asset_record_types)
def get_asset_search_record_types(self):
"""Gets all the asset search record types supported.
return: (osid.type.TypeList) - the list of supported asset
search record types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.get_resource_record_types_template
record_type_maps = self._get_registry('ASSET_SEARCH_RECORD_TYPES')
record_types = []
for record_type_map in record_type_maps:
record_types.append(Type(**record_type_maps[record_type_map]))
return TypeList(record_types)
asset_search_record_types = property(fget=get_asset_search_record_types)
def get_asset_content_record_types(self):
"""Gets all the asset content record types supported.
return: (osid.type.TypeList) - the list of supported asset
content record types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.get_resource_record_types_template
record_type_maps = self._get_registry('ASSET_CONTENT_RECORD_TYPES')
record_types = []
for record_type_map in record_type_maps:
record_types.append(Type(**record_type_maps[record_type_map]))
return TypeList(record_types)
asset_content_record_types = property(fget=get_asset_content_record_types)
def get_composition_record_types(self):
"""Gets all the composition record types supported.
return: (osid.type.TypeList) - the list of supported composition
record types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.get_resource_record_types_template
record_type_maps = self._get_registry('COMPOSITION_RECORD_TYPES')
record_types = []
for record_type_map in record_type_maps:
record_types.append(Type(**record_type_maps[record_type_map]))
return TypeList(record_types)
composition_record_types = property(fget=get_composition_record_types)
def get_composition_search_record_types(self):
"""Gets all the composition search record types supported.
return: (osid.type.TypeList) - the list of supported composition
search record types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.get_resource_record_types_template
record_type_maps = self._get_registry('COMPOSITION_SEARCH_RECORD_TYPES')
record_types = []
for record_type_map in record_type_maps:
record_types.append(Type(**record_type_maps[record_type_map]))
return TypeList(record_types)
composition_search_record_types = property(fget=get_composition_search_record_types)
def get_repository_record_types(self):
"""Gets all the repository record types supported.
return: (osid.type.TypeList) - the list of supported repository
record types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.get_resource_record_types_template
record_type_maps = self._get_registry('REPOSITORY_RECORD_TYPES')
record_types = []
for record_type_map in record_type_maps:
record_types.append(Type(**record_type_maps[record_type_map]))
return TypeList(record_types)
repository_record_types = property(fget=get_repository_record_types)
def get_repository_search_record_types(self):
"""Gets all the repository search record types supported.
return: (osid.type.TypeList) - the list of supported repository
search record types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.get_resource_record_types_template
record_type_maps = self._get_registry('REPOSITORY_SEARCH_RECORD_TYPES')
record_types = []
for record_type_map in record_type_maps:
record_types.append(Type(**record_type_maps[record_type_map]))
return TypeList(record_types)
repository_search_record_types = property(fget=get_repository_search_record_types)
def get_spatial_unit_record_types(self):
"""Gets all the spatial unit record types supported.
return: (osid.type.TypeList) - the list of supported spatial
unit record types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceProfile.get_resource_record_types_template
record_type_maps = self._get_registry('SPATIAL_UNIT_RECORD_TYPES')
record_types = []
for record_type_map in record_type_maps:
record_types.append(Type(**record_type_maps[record_type_map]))
return TypeList(record_types)
spatial_unit_record_types = property(fget=get_spatial_unit_record_types)
def get_coordinate_types(self):
"""Gets all the coordinate types supported.
return: (osid.type.TypeList) - the list of supported coordinate
types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.repository.RepositoryProfile.get_coordinate_types
return TypeList([])
coordinate_types = property(fget=get_coordinate_types)
class RepositoryManager(osid_managers.OsidManager, RepositoryProfile, repository_managers.RepositoryManager):
"""The repository manager provides access to asset lookup and creation session and provides interoperability tests
for
various aspects of this service.
The sessions included in this manager are:
* ``AssetLookupSession:`` a session to retrieve assets
* ``AssetQuerySession:`` a session to query assets
* ``AssetSearchSession:`` a session to search for assets
* ``AssetAdminSession:`` a session to create and delete assets
* ``AssetNotificationSession:`` a session to receive notifications
pertaining to asset changes
* ``AssetRepositorySession:`` a session to look up asset to
repository mappings
* ``AssetRepositoryAssignmentSession:`` a session to manage asset
to repository mappings
* ``AssetSmartRepositorySession:`` a session to manage dynamic
repositories of assets
* ``AssetTemporalSession:`` a session to access the temporal
coverage of an asset
* ``AssetTemporalAssignmentSession:`` a session to manage the
temporal coverage of an asset
* ``AssetSpatialSession:`` a session to access the spatial
coverage of an asset
* ``AssetSpatialAssignmentSession:`` a session to manage the
spatial coverage of an asset
* ``AssetCompositionSession:`` a session to look up asset
composition mappings
* ``AssetCompositionDesignSession:`` a session to map assets to
compositions
* ``CompositionLookupSession: a`` session to retrieve compositions
* ``CompositionQuerySession:`` a session to query compositions
* ``CompositionSearchSession:`` a session to search for
compositions
* ``CompositionAdminSession:`` a session to create, update and
delete compositions
* ``CompositionNotificationSession:`` a session to receive
notifications pertaining to changes in compositions
* ``CompositionRepositorySession:`` a session to retrieve
composition repository mappings
* ``CompositionRepositoryAssignmentSession:`` a session to manage
composition repository mappings
* ``CompositionSmartRepositorySession:`` a session to manage
dynamic repositories of compositions
* ``RepositoryLookupSession: a`` session to retrieve repositories
* ``RepositoryQuerySession:`` a session to query repositories
* ``RepositorySearchSession:`` a session to search for
repositories
* ``RepositoryAdminSession:`` a session to create, update and
delete repositories
* ``RepositoryNotificationSession:`` a session to receive
notifications pertaining to changes in repositories
* ``RepositoryHierarchySession:`` a session to traverse repository
hierarchies
* ``RepositoryHierarchyDesignSession:`` a session to manage
repository hierarchies
"""
def __init__(self):
osid_managers.OsidManager.__init__(self)
def get_asset_lookup_session(self):
"""Gets the ``OsidSession`` associated with the asset lookup service.
return: (osid.repository.AssetLookupSession) - the new
``AssetLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_lookup()`` is ``true``.*
"""
if not self.supports_asset_lookup():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetLookupSession(runtime=self._runtime)
asset_lookup_session = property(fget=get_asset_lookup_session)
@utilities.arguments_not_none
def get_asset_lookup_session_for_repository(self, repository_id):
"""Gets the ``OsidSession`` associated with the asset lookup service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetLookupSession) - the new
``AssetLookupSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_asset_lookup():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetLookupSession(repository_id, runtime=self._runtime)
def get_asset_query_session(self):
"""Gets an asset query session.
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` is ``true``.*
"""
if not self.supports_asset_query():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetQuerySession(runtime=self._runtime)
asset_query_session = property(fget=get_asset_query_session)
@utilities.arguments_not_none
def get_asset_query_session_for_repository(self, repository_id):
"""Gets an asset query session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
if not self.supports_asset_query():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetQuerySession(repository_id, runtime=self._runtime)
def get_asset_search_session(self):
"""Gets an asset search session.
return: (osid.repository.AssetSearchSession) - an
``AssetSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_search()`` is ``true``.*
"""
if not self.supports_asset_search():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetSearchSession(runtime=self._runtime)
asset_search_session = property(fget=get_asset_search_session)
@utilities.arguments_not_none
def get_asset_search_session_for_repository(self, repository_id):
"""Gets an asset search session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetSearchSession) - an
``AssetSearchSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_asset_search():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetSearchSession(repository_id, runtime=self._runtime)
def get_asset_admin_session(self):
"""Gets an asset administration session for creating, updating and deleting assets.
return: (osid.repository.AssetAdminSession) - an
``AssetAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_admin()`` is ``true``.*
"""
if not self.supports_asset_admin():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetAdminSession(runtime=self._runtime)
asset_admin_session = property(fget=get_asset_admin_session)
@utilities.arguments_not_none
def get_asset_admin_session_for_repository(self, repository_id):
"""Gets an asset administration session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetAdminSession) - an
``AssetAdminSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
if not self.supports_asset_admin():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetAdminSession(repository_id, runtime=self._runtime)
@utilities.arguments_not_none
def get_asset_notification_session(self, asset_receiver):
"""Gets the notification session for notifications pertaining to asset changes.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
return: (osid.repository.AssetNotificationSession) - an
``AssetNotificationSession``
raise: NullArgument - ``asset_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_notification()`` is ``true``.*
"""
if not self.supports_asset_notification():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetNotificationSession(runtime=self._runtime, receiver=asset_receiver)
@utilities.arguments_not_none
def get_asset_notification_session_for_repository(self, asset_receiver, repository_id):
"""Gets the asset notification session for the given repository.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetNotificationSession) - an
``AssetNotificationSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``asset_receiver`` or ``repository_id``
is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_asset_notification():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetNotificationSession(repository_id, runtime=self._runtime, receiver=asset_receiver)
def get_asset_repository_session(self):
"""Gets the session for retrieving asset to repository mappings.
return: (osid.repository.AssetRepositorySession) - an
``AssetRepositorySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_repository()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_repository()`` is ``true``.*
"""
if not self.supports_asset_repository():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetRepositorySession(runtime=self._runtime)
asset_repository_session = property(fget=get_asset_repository_session)
def get_asset_repository_assignment_session(self):
"""Gets the session for assigning asset to repository mappings.
return: (osid.repository.AssetRepositoryAssignmentSession) - an
``AssetRepositoryAsignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_asset_repository_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_repository_assignment()`` is ``true``.*
"""
if not self.supports_asset_repository_assignment():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetRepositoryAssignmentSession(runtime=self._runtime)
asset_repository_assignment_session = property(fget=get_asset_repository_assignment_session)
def get_asset_composition_session(self):
"""Gets the session for retrieving asset compositions.
return: (osid.repository.AssetCompositionSession) - an
``AssetCompositionSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_composition()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_composition()`` is ``true``.*
"""
if not self.supports_asset_composition():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetCompositionSession(runtime=self._runtime)
asset_composition_session = property(fget=get_asset_composition_session)
def get_asset_composition_design_session(self):
"""Gets the session for creating asset compositions.
return: (osid.repository.AssetCompositionDesignSession) - an
``AssetCompositionDesignSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_composition_design()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_composition_design()`` is ``true``.*
"""
if not self.supports_asset_composition_design():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetCompositionDesignSession(runtime=self._runtime)
asset_composition_design_session = property(fget=get_asset_composition_design_session)
def get_composition_lookup_session(self):
"""Gets the ``OsidSession`` associated with the composition lookup service.
return: (osid.repository.CompositionLookupSession) - the new
``CompositionLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_lookup()`` is ``true``.*
"""
if not self.supports_composition_lookup():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionLookupSession(runtime=self._runtime)
composition_lookup_session = property(fget=get_composition_lookup_session)
@utilities.arguments_not_none
def get_composition_lookup_session_for_repository(self, repository_id):
"""Gets the ``OsidSession`` associated with the composition lookup service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionLookupSession) - the new
``CompositionLookupSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_composition_lookup():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.CompositionLookupSession(repository_id, runtime=self._runtime)
def get_composition_query_session(self):
"""Gets a composition query session.
return: (osid.repository.CompositionQuerySession) - a
``CompositionQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_query()`` is ``true``.*
"""
if not self.supports_composition_query():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionQuerySession(runtime=self._runtime)
composition_query_session = property(fget=get_composition_query_session)
@utilities.arguments_not_none
def get_composition_query_session_for_repository(self, repository_id):
"""Gets a composition query session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionQuerySession) - a
``CompositionQuerySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_composition_query():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.CompositionQuerySession(repository_id, runtime=self._runtime)
def get_composition_search_session(self):
"""Gets a composition search session.
return: (osid.repository.CompositionSearchSession) - a
``CompositionSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_search()`` is ``true``.*
"""
if not self.supports_composition_search():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionSearchSession(runtime=self._runtime)
composition_search_session = property(fget=get_composition_search_session)
@utilities.arguments_not_none
def get_composition_search_session_for_repository(self, repository_id):
"""Gets a composition search session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionSearchSession) - a
``CompositionSearchSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_composition_search():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.CompositionSearchSession(repository_id, runtime=self._runtime)
def get_composition_admin_session(self):
"""Gets a composition administration session for creating, updating and deleting compositions.
return: (osid.repository.CompositionAdminSession) - a
``CompositionAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_admin()`` is ``true``.*
"""
if not self.supports_composition_admin():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionAdminSession(runtime=self._runtime)
composition_admin_session = property(fget=get_composition_admin_session)
@utilities.arguments_not_none
def get_composition_admin_session_for_repository(self, repository_id):
"""Gets a composiiton administrative session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionAdminSession) - a
``CompositionAdminSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_composition_admin():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.CompositionAdminSession(repository_id, runtime=self._runtime)
def get_composition_repository_session(self):
"""Gets the session for retrieving composition to repository mappings.
return: (osid.repository.CompositionRepositorySession) - a
``CompositionRepositorySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_repository()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_repository()`` is ``true``.*
"""
if not self.supports_composition_repository():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionRepositorySession(runtime=self._runtime)
composition_repository_session = property(fget=get_composition_repository_session)
def get_composition_repository_assignment_session(self):
"""Gets the session for assigning composition to repository mappings.
return: (osid.repository.CompositionRepositoryAssignmentSession)
- a ``CompositionRepositoryAssignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_composition_repository_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_repository_assignment()`` is ``true``.*
"""
if not self.supports_composition_repository_assignment():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionRepositoryAssignmentSession(runtime=self._runtime)
composition_repository_assignment_session = property(fget=get_composition_repository_assignment_session)
def get_repository_lookup_session(self):
"""Gets the repository lookup session.
return: (osid.repository.RepositoryLookupSession) - a
``RepositoryLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_lookup()`` is ``true``.*
"""
if not self.supports_repository_lookup():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryLookupSession(runtime=self._runtime)
repository_lookup_session = property(fget=get_repository_lookup_session)
def get_repository_query_session(self):
"""Gets the repository query session.
return: (osid.repository.RepositoryQuerySession) - a
``RepositoryQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_query()`` is ``true``.*
"""
if not self.supports_repository_query():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryQuerySession(runtime=self._runtime)
repository_query_session = property(fget=get_repository_query_session)
def get_repository_admin_session(self):
"""Gets the repository administrative session for creating, updating and deleteing repositories.
return: (osid.repository.RepositoryAdminSession) - a
``RepositoryAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_admin()`` is ``true``.*
"""
if not self.supports_repository_admin():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryAdminSession(runtime=self._runtime)
repository_admin_session = property(fget=get_repository_admin_session)
def get_repository_hierarchy_session(self):
"""Gets the repository hierarchy traversal session.
return: (osid.repository.RepositoryHierarchySession) - ``a
RepositoryHierarchySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_hierarchy()`` is ``true``.*
"""
if not self.supports_repository_hierarchy():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryHierarchySession(runtime=self._runtime)
repository_hierarchy_session = property(fget=get_repository_hierarchy_session)
def get_repository_hierarchy_design_session(self):
"""Gets the repository hierarchy design session.
return: (osid.repository.RepositoryHierarchyDesignSession) - a
``RepostoryHierarchyDesignSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_repository_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_repository_hierarchy_design()`` is ``true``.*
"""
if not self.supports_repository_hierarchy_design():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryHierarchyDesignSession(runtime=self._runtime)
repository_hierarchy_design_session = property(fget=get_repository_hierarchy_design_session)
def get_repository_batch_manager(self):
"""Gets a ``RepositoryBatchManager``.
return: (osid.repository.batch.RepositoryBatchManager) - a
``RepostoryBatchManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_batch()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_batch()`` is ``true``.*
"""
raise errors.Unimplemented()
repository_batch_manager = property(fget=get_repository_batch_manager)
def get_repository_rules_manager(self):
"""Gets a ``RepositoryRulesManager``.
return: (osid.repository.rules.RepositoryRulesManager) - a
``RepostoryRulesManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_rules()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_rules()`` is ``true``.*
"""
raise errors.Unimplemented()
repository_rules_manager = property(fget=get_repository_rules_manager)
@utilities.arguments_not_none
def get_asset_composition_session_for_repository(self, repository_id):
# This impl is temporary until Tom adds missing methods to RepositoryProxyManager in spec
if not self.supports_asset_composition():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
return sessions.AssetCompositionSession(repository_id, runtime=self._runtime) # pylint: disable=no-member
@utilities.arguments_not_none
def get_asset_composition_design_session_for_repository(self, repository_id):
# This impl is temporary until Tom adds missing methods to RepositoryProxyManager in spec
if not self.supports_asset_composition():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
return sessions.AssetCompositionDesignSession(repository_id, runtime=self._runtime) # pylint: disable=no-member
class RepositoryProxyManager(osid_managers.OsidProxyManager, RepositoryProfile, repository_managers.RepositoryProxyManager):
"""The repository manager provides access to asset lookup and creation session and provides interoperability tests
for
various aspects of this service.
Methods in this manager support the passing of a ``Proxy`` for the
purposes of passing information from a server environment. The
sessions included in this manager are:
* ``AssetLookupSession:`` a session to retrieve assets
* ``AssetQuerySession:`` a session to query assets
* ``AssetSearchSession:`` a session to search for assets
* ``AssetAdminSession:`` a session to create and delete assets
* ``AssetNotificationSession:`` a session to receive notifications
pertaining to asset changes
* ``AssetRepositorySession:`` a session to look up asset to
repository mappings
* ``AssetRepositoryAssignmentSession:`` a session to manage asset
to repository mappings
* ``AssetSmartRepositorySession:`` a session to manage dynamic
repositories of assets
* ``AssetTemporalSession:`` a session to access the temporal
coverage of an asset
* ``AssetTemporalAssignmentSession:`` a session to manage the
temporal coverage of an asset
* ``AssetSpatialSession:`` a session to access the spatial
coverage of an asset
* ``AssetSpatialAssignmentSession:`` a session to manage the
spatial coverage of an asset
* ``AssetCompositionSession:`` a session to look up asset
composition mappings
* ``AssetCompositionDesignSession:`` a session to map assets to
compositions
* ``CompositionLookupSession: a`` session to retrieve compositions
* ``CompositionQuerySession:`` a session to query compositions
* ``CompositionSearchSession:`` a session to search for
compositions
* ``CompositionAdminSession:`` a session to create, update and
delete compositions
* ``CompositionNotificationSession:`` a session to receive
notifications pertaining to changes in compositions
* ``CompositionRepositorySession:`` a session to retrieve
composition repository mappings
* ``CompositionRepositoryAssignmentSession:`` a session to manage
composition repository mappings
* ``CompositionSmartRepositorySession:`` a session to manage
dynamic repositories of compositions
* ``RepositoryLookupSession: a`` session to retrieve repositories
* ``RepositoryQuerySession:`` a session to query repositories
* ``RepositorySearchSession:`` a session to search for
repositories
* ``RepositoryAdminSession:`` a session to create, update and
delete repositories
* ``RepositoryNotificationSession:`` a session to receive
notifications pertaining to changes in repositories
* ``RepositoryHierarchySession:`` a session to traverse repository
hierarchies
* ``RepositoryHierarchyDesignSession:`` a session to manage
repository hierarchies
"""
def __init__(self):
osid_managers.OsidProxyManager.__init__(self)
@utilities.arguments_not_none
def get_asset_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the asset lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetLookupSession) - an
``AssetLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_lookup()`` is ``true``.*
"""
if not self.supports_asset_lookup():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetLookupSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_asset_lookup_session_for_repository(self, repository_id, proxy):
"""Gets the ``OsidSession`` associated with the asset lookup service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetLookupSession) - an
``AssetLookupSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_asset_lookup():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetLookupSession(repository_id, proxy, self._runtime)
@utilities.arguments_not_none
def get_asset_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the asset query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` is ``true``.*
"""
if not self.supports_asset_query():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetQuerySession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_asset_query_session_for_repository(self, repository_id, proxy):
"""Gets the ``OsidSession`` associated with the asset query service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
if not self.supports_asset_query():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetQuerySession(repository_id, proxy, self._runtime)
@utilities.arguments_not_none
def get_asset_search_session(self, proxy):
"""Gets an asset search session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSearchSession) - an
``AssetSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_search()`` is ``true``.*
"""
if not self.supports_asset_search():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetSearchSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_asset_search_session_for_repository(self, repository_id, proxy):
"""Gets an asset search session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSearchSession) - an
``AssetSearchSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_asset_search():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetSearchSession(repository_id, proxy, self._runtime)
@utilities.arguments_not_none
def get_asset_admin_session(self, proxy):
"""Gets an asset administration session for creating, updating and deleting assets.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetAdminSession) - an
``AssetAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_admin()`` is ``true``.*
"""
if not self.supports_asset_admin():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetAdminSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_asset_admin_session_for_repository(self, repository_id, proxy):
"""Gets an asset administration session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetAdminSession) - an
``AssetAdminSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
if not self.supports_asset_admin():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetAdminSession(repository_id, proxy, self._runtime)
@utilities.arguments_not_none
def get_asset_notification_session(self, asset_receiver, proxy):
"""Gets the notification session for notifications pertaining to asset changes.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetNotificationSession) - an
``AssetNotificationSession``
raise: NullArgument - ``asset_receiver`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_notification()`` is ``true``.*
"""
if not self.supports_asset_notification():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetNotificationSession(proxy=proxy, runtime=self._runtime, receiver=asset_receiver)
@utilities.arguments_not_none
def get_asset_notification_session_for_repository(self, asset_receiver, repository_id, proxy):
"""Gets the asset notification session for the given repository.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetNotificationSession) - an
``AssetNotificationSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``asset_receiver, repository_id`` or
``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_asset_notification():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.AssetNotificationSession(catalog_id=repository_id, proxy=proxy, runtime=self._runtime,
receiver=asset_receiver)
@utilities.arguments_not_none
def get_asset_repository_session(self, proxy):
"""Gets the session for retrieving asset to repository mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetRepositorySession) - an
``AssetRepositorySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_repository()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_repository()`` is ``true``.*
"""
if not self.supports_asset_repository():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetRepositorySession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_asset_repository_assignment_session(self, proxy):
"""Gets the session for assigning asset to repository mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetRepositoryAssignmentSession) - an
``AssetRepositoryAsignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_asset_repository_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_repository_assignment()`` is ``true``.*
"""
if not self.supports_asset_repository_assignment():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetRepositoryAssignmentSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_asset_composition_session(self, proxy):
"""Gets the session for retrieving asset compositions.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetCompositionSession) - an
``AssetCompositionSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_composition()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_composition()`` is ``true``.*
"""
if not self.supports_asset_composition():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetCompositionSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_asset_composition_design_session(self, proxy):
"""Gets the session for creating asset compositions.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetCompositionDesignSession) - an
``AssetCompositionDesignSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_composition_design()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_composition_design()`` is ``true``.*
"""
if not self.supports_asset_composition_design():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AssetCompositionDesignSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_composition_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the composition lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionLookupSession) - the new
``CompositionLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_lookup()`` is ``true``.*
"""
if not self.supports_composition_lookup():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionLookupSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_composition_lookup_session_for_repository(self, repository_id, proxy):
"""Gets the ``OsidSession`` associated with the composition lookup service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionLookupSession) - the new
``CompositionLookupSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_composition_lookup():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.CompositionLookupSession(repository_id, proxy, self._runtime)
@utilities.arguments_not_none
def get_composition_query_session(self, proxy):
"""Gets a composition query session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSearchSession) - a
``CompositionQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_query()`` is ``true``.*
"""
if not self.supports_composition_query():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionQuerySession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_composition_query_session_for_repository(self, repository_id, proxy):
"""Gets a composition query session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionQuerySession) - a
``CompositionQuerySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_composition_query():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.CompositionQuerySession(repository_id, proxy, self._runtime)
@utilities.arguments_not_none
def get_composition_search_session(self, proxy):
"""Gets a composition search session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSearchSession) - a
``CompositionSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_search()`` is ``true``.*
"""
if not self.supports_composition_search():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionSearchSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_composition_search_session_for_repository(self, repository_id, proxy):
"""Gets a composition search session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSearchSession) - a
``CompositionSearchSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_composition_search():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.CompositionSearchSession(repository_id, proxy, self._runtime)
@utilities.arguments_not_none
def get_composition_admin_session(self, proxy):
"""Gets a composition administration session for creating, updating and deleting compositions.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionAdminSession) - a
``CompositionAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_admin()`` is ``true``.*
"""
if not self.supports_composition_admin():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionAdminSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_composition_admin_session_for_repository(self, repository_id, proxy):
"""Gets a composiiton administrative session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionAdminSession) - a
``CompositionAdminSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if not self.supports_composition_admin():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.CompositionAdminSession(repository_id, proxy, self._runtime)
@utilities.arguments_not_none
def get_composition_repository_session(self, proxy):
"""Gets the session for retrieving composition to repository mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionRepositorySession) - a
``CompositionRepositorySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_repository()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_repository()`` is ``true``.*
"""
if not self.supports_composition_repository():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionRepositorySession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_composition_repository_assignment_session(self, proxy):
"""Gets the session for assigning composition to repository mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionRepositoryAssignmentSession)
- a ``CompositionRepositoryAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_composition_repository_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_repository_assignment()`` is ``true``.*
"""
if not self.supports_composition_repository_assignment():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.CompositionRepositoryAssignmentSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_repository_lookup_session(self, proxy):
"""Gets the repository lookup session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryLookupSession) - a
``RepositoryLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_lookup()`` is ``true``.*
"""
if not self.supports_repository_lookup():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryLookupSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_repository_query_session(self, proxy):
"""Gets the repository query session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryQuerySession) - a
``RepositoryQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_query()`` is ``true``.*
"""
if not self.supports_repository_query():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryQuerySession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_repository_admin_session(self, proxy):
"""Gets the repository administrative session for creating, updating and deleteing repositories.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryAdminSession) - a
``RepositoryAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_admin()`` is ``true``.*
"""
if not self.supports_repository_admin():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryAdminSession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_repository_hierarchy_session(self, proxy):
"""Gets the repository hierarchy traversal session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryHierarchySession) - ``a
RepositoryHierarchySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_hierarchy()`` is ``true``.*
"""
if not self.supports_repository_hierarchy():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryHierarchySession(proxy=proxy, runtime=self._runtime)
@utilities.arguments_not_none
def get_repository_hierarchy_design_session(self, proxy):
"""Gets the repository hierarchy design session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryHierarchyDesignSession) - a
``RepostoryHierarchyDesignSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_repository_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_repository_hierarchy_design()`` is ``true``.*
"""
if not self.supports_repository_hierarchy_design():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.RepositoryHierarchyDesignSession(proxy=proxy, runtime=self._runtime)
def get_repository_batch_proxy_manager(self):
"""Gets a ``RepositoryBatchProxyManager``.
return: (osid.repository.batch.RepositoryBatchProxyManager) - a
``RepostoryBatchProxyManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_batch()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_batch()`` is ``true``.*
"""
raise errors.Unimplemented()
repository_batch_proxy_manager = property(fget=get_repository_batch_proxy_manager)
def get_repository_rules_proxy_manager(self):
"""Gets a ``RepositoryRulesProxyManager``.
return: (osid.repository.rules.RepositoryRulesProxyManager) - a
``RepostoryRulesProxyManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_rules()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_rules()`` is ``true``.*
"""
raise errors.Unimplemented()
repository_rules_proxy_manager = property(fget=get_repository_rules_proxy_manager)
@utilities.arguments_not_none
def get_asset_composition_session_for_repository(self, repository_id, proxy):
# This impl is temporary until Tom adds missing methods to RepositoryProxyManager in spec
if not self.supports_asset_composition():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
return sessions.AssetCompositionSession(repository_id, proxy, runtime=self._runtime) # pylint: disable=no-member
@utilities.arguments_not_none
def get_asset_composition_design_session_for_repository(self, repository_id, proxy):
# This impl is temporary until Tom adds missing methods to RepositoryProxyManager in spec
if not self.supports_asset_composition():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
return sessions.AssetCompositionDesignSession(repository_id, proxy, runtime=self._runtime) # pylint: disable=no-member
| {
"content_hash": "02d6897c456070f89680e9bead112908",
"timestamp": "",
"source": "github",
"line_count": 2126,
"max_line_length": 126,
"avg_line_length": 39.68908748824083,
"alnum_prop": 0.6462745469844392,
"repo_name": "birdland/dlkit-doc",
"id": "5546fd56a1756303239e9e8a78e0a90ead76d527",
"size": "84379",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dlkit/mongo/repository/managers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12458859"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe Puppet::Type.type(:dsc_xazuresqldatabase) do
let :dsc_xazuresqldatabase do
Puppet::Type.type(:dsc_xazuresqldatabase).new(
:name => 'foo',
:dsc_name => 'foo',
)
end
it "should stringify normally" do
expect(dsc_xazuresqldatabase.to_s).to eq("Dsc_xazuresqldatabase[foo]")
end
it 'should default to ensure => present' do
expect(dsc_xazuresqldatabase[:ensure]).to eq :present
end
it 'should require that dsc_name is specified' do
#dsc_xazuresqldatabase[:dsc_name]
expect { Puppet::Type.type(:dsc_xazuresqldatabase).new(
:name => 'foo',
:dsc_maximumsizeingb => 32,
:dsc_collation => 'foo',
:dsc_edition => 'foo',
:dsc_servercredential => 'foo',
:dsc_servername => 'foo',
:dsc_azuresubscriptionname => 'foo',
:dsc_azurepublishsettingsfile => 'foo',
:dsc_ensure => 'Present',
)}.to raise_error(Puppet::Error, /dsc_name is a required attribute/)
end
it 'should not accept array for dsc_name' do
expect{dsc_xazuresqldatabase[:dsc_name] = ["foo", "bar", "spec"]}.to raise_error(Puppet::ResourceError)
end
it 'should not accept boolean for dsc_name' do
expect{dsc_xazuresqldatabase[:dsc_name] = true}.to raise_error(Puppet::ResourceError)
end
it 'should not accept int for dsc_name' do
expect{dsc_xazuresqldatabase[:dsc_name] = -16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept uint for dsc_name' do
expect{dsc_xazuresqldatabase[:dsc_name] = 16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept array for dsc_maximumsizeingb' do
expect{dsc_xazuresqldatabase[:dsc_maximumsizeingb] = ["foo", "bar", "spec"]}.to raise_error(Puppet::ResourceError)
end
it 'should not accept boolean for dsc_maximumsizeingb' do
expect{dsc_xazuresqldatabase[:dsc_maximumsizeingb] = true}.to raise_error(Puppet::ResourceError)
end
it 'should not accept int for dsc_maximumsizeingb' do
expect{dsc_xazuresqldatabase[:dsc_maximumsizeingb] = -16}.to raise_error(Puppet::ResourceError)
end
it 'should accept uint for dsc_maximumsizeingb' do
dsc_xazuresqldatabase[:dsc_maximumsizeingb] = 32
expect(dsc_xazuresqldatabase[:dsc_maximumsizeingb]).to eq(32)
end
it 'should accept string-like int for dsc_maximumsizeingb' do
dsc_xazuresqldatabase[:dsc_maximumsizeingb] = '16'
expect(dsc_xazuresqldatabase[:dsc_maximumsizeingb]).to eq(16)
end
it 'should accept string-like int for dsc_maximumsizeingb' do
dsc_xazuresqldatabase[:dsc_maximumsizeingb] = '32'
expect(dsc_xazuresqldatabase[:dsc_maximumsizeingb]).to eq(32)
end
it 'should accept string-like int for dsc_maximumsizeingb' do
dsc_xazuresqldatabase[:dsc_maximumsizeingb] = '64'
expect(dsc_xazuresqldatabase[:dsc_maximumsizeingb]).to eq(64)
end
it 'should not accept array for dsc_collation' do
expect{dsc_xazuresqldatabase[:dsc_collation] = ["foo", "bar", "spec"]}.to raise_error(Puppet::ResourceError)
end
it 'should not accept boolean for dsc_collation' do
expect{dsc_xazuresqldatabase[:dsc_collation] = true}.to raise_error(Puppet::ResourceError)
end
it 'should not accept int for dsc_collation' do
expect{dsc_xazuresqldatabase[:dsc_collation] = -16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept uint for dsc_collation' do
expect{dsc_xazuresqldatabase[:dsc_collation] = 16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept array for dsc_edition' do
expect{dsc_xazuresqldatabase[:dsc_edition] = ["foo", "bar", "spec"]}.to raise_error(Puppet::ResourceError)
end
it 'should not accept boolean for dsc_edition' do
expect{dsc_xazuresqldatabase[:dsc_edition] = true}.to raise_error(Puppet::ResourceError)
end
it 'should not accept int for dsc_edition' do
expect{dsc_xazuresqldatabase[:dsc_edition] = -16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept uint for dsc_edition' do
expect{dsc_xazuresqldatabase[:dsc_edition] = 16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept array for dsc_servercredential' do
expect{dsc_xazuresqldatabase[:dsc_servercredential] = ["foo", "bar", "spec"]}.to raise_error(Puppet::ResourceError)
end
it 'should not accept boolean for dsc_servercredential' do
expect{dsc_xazuresqldatabase[:dsc_servercredential] = true}.to raise_error(Puppet::ResourceError)
end
it 'should not accept int for dsc_servercredential' do
expect{dsc_xazuresqldatabase[:dsc_servercredential] = -16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept uint for dsc_servercredential' do
expect{dsc_xazuresqldatabase[:dsc_servercredential] = 16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept array for dsc_servername' do
expect{dsc_xazuresqldatabase[:dsc_servername] = ["foo", "bar", "spec"]}.to raise_error(Puppet::ResourceError)
end
it 'should not accept boolean for dsc_servername' do
expect{dsc_xazuresqldatabase[:dsc_servername] = true}.to raise_error(Puppet::ResourceError)
end
it 'should not accept int for dsc_servername' do
expect{dsc_xazuresqldatabase[:dsc_servername] = -16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept uint for dsc_servername' do
expect{dsc_xazuresqldatabase[:dsc_servername] = 16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept array for dsc_azuresubscriptionname' do
expect{dsc_xazuresqldatabase[:dsc_azuresubscriptionname] = ["foo", "bar", "spec"]}.to raise_error(Puppet::ResourceError)
end
it 'should not accept boolean for dsc_azuresubscriptionname' do
expect{dsc_xazuresqldatabase[:dsc_azuresubscriptionname] = true}.to raise_error(Puppet::ResourceError)
end
it 'should not accept int for dsc_azuresubscriptionname' do
expect{dsc_xazuresqldatabase[:dsc_azuresubscriptionname] = -16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept uint for dsc_azuresubscriptionname' do
expect{dsc_xazuresqldatabase[:dsc_azuresubscriptionname] = 16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept array for dsc_azurepublishsettingsfile' do
expect{dsc_xazuresqldatabase[:dsc_azurepublishsettingsfile] = ["foo", "bar", "spec"]}.to raise_error(Puppet::ResourceError)
end
it 'should not accept boolean for dsc_azurepublishsettingsfile' do
expect{dsc_xazuresqldatabase[:dsc_azurepublishsettingsfile] = true}.to raise_error(Puppet::ResourceError)
end
it 'should not accept int for dsc_azurepublishsettingsfile' do
expect{dsc_xazuresqldatabase[:dsc_azurepublishsettingsfile] = -16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept uint for dsc_azurepublishsettingsfile' do
expect{dsc_xazuresqldatabase[:dsc_azurepublishsettingsfile] = 16}.to raise_error(Puppet::ResourceError)
end
it 'should accept dsc_ensure predefined value Present' do
dsc_xazuresqldatabase[:dsc_ensure] = 'Present'
expect(dsc_xazuresqldatabase[:dsc_ensure]).to eq('Present')
end
it 'should accept dsc_ensure predefined value present' do
dsc_xazuresqldatabase[:dsc_ensure] = 'present'
expect(dsc_xazuresqldatabase[:dsc_ensure]).to eq('present')
end
it 'should accept dsc_ensure predefined value present and update ensure with this value (ensure end value should be a symbol)' do
dsc_xazuresqldatabase[:dsc_ensure] = 'present'
expect(dsc_xazuresqldatabase[:ensure]).to eq(dsc_xazuresqldatabase[:dsc_ensure].downcase.to_sym)
end
it 'should accept dsc_ensure predefined value Absent' do
dsc_xazuresqldatabase[:dsc_ensure] = 'Absent'
expect(dsc_xazuresqldatabase[:dsc_ensure]).to eq('Absent')
end
it 'should accept dsc_ensure predefined value absent' do
dsc_xazuresqldatabase[:dsc_ensure] = 'absent'
expect(dsc_xazuresqldatabase[:dsc_ensure]).to eq('absent')
end
it 'should accept dsc_ensure predefined value absent and update ensure with this value (ensure end value should be a symbol)' do
dsc_xazuresqldatabase[:dsc_ensure] = 'absent'
expect(dsc_xazuresqldatabase[:ensure]).to eq(dsc_xazuresqldatabase[:dsc_ensure].downcase.to_sym)
end
it 'should not accept values not equal to predefined values' do
expect{dsc_xazuresqldatabase[:dsc_ensure] = 'invalid value'}.to raise_error(Puppet::ResourceError)
end
it 'should not accept array for dsc_ensure' do
expect{dsc_xazuresqldatabase[:dsc_ensure] = ["foo", "bar", "spec"]}.to raise_error(Puppet::ResourceError)
end
it 'should not accept boolean for dsc_ensure' do
expect{dsc_xazuresqldatabase[:dsc_ensure] = true}.to raise_error(Puppet::ResourceError)
end
it 'should not accept int for dsc_ensure' do
expect{dsc_xazuresqldatabase[:dsc_ensure] = -16}.to raise_error(Puppet::ResourceError)
end
it 'should not accept uint for dsc_ensure' do
expect{dsc_xazuresqldatabase[:dsc_ensure] = 16}.to raise_error(Puppet::ResourceError)
end
# Configuration PROVIDER TESTS
describe "dsc_configuration provider tests" do
it "should successfully instanciate the provider" do
described_class.provider(:dsc_configuration).new(dsc_xazuresqldatabase)
end
before(:each) do
@provider = described_class.provider(:dsc_configuration).new(dsc_xazuresqldatabase)
end
describe "when dscmeta_import_resource is true (default) and dscmeta_module_name existing/is defined " do
it "should compute powershell dsc test script with Import-DscResource" do
expect(@provider.ps_script_content('test')).to match(/Import-DscResource/)
end
it "should compute powershell dsc set script with Import-DscResource" do
expect(@provider.ps_script_content('set')).to match(/Import-DscResource/)
end
end
describe "when dscmeta_import_resource is false" do
it "should compute powershell dsc test script without Import-DscResource" do
dsc_xazuresqldatabase[:dscmeta_import_resource] = false
expect(@provider.ps_script_content('test')).not_to match(/Import-DscResource/)
end
it "should compute powershell dsc set script without Import-DscResource" do
dsc_xazuresqldatabase[:dscmeta_import_resource] = false
expect(@provider.ps_script_content('set')).not_to match(/Import-DscResource/)
end
end
describe "when dsc_ensure is 'present'" do
before(:each) do
dsc_xazuresqldatabase.original_parameters[:dsc_ensure] = 'present'
dsc_xazuresqldatabase[:dsc_ensure] = 'present'
@provider = described_class.provider(:dsc_configuration).new(dsc_xazuresqldatabase)
end
it "should update :ensure to :present" do
expect(dsc_xazuresqldatabase[:ensure]).to eq(:present)
end
it "should compute powershell dsc test script in which ensure value is 'present'" do
@provider.set_test_dsc_parameters
expect(@provider.ps_script_content('test')).to match(/ensure = 'present'/)
end
it "should compute powershell dsc set script in which ensure value is 'present'" do
@provider.set_original_dsc_parameters
expect(@provider.ps_script_content('set')).to match(/ensure = 'present'/)
end
end
describe "when dsc_ensure is 'absent'" do
before(:each) do
dsc_xazuresqldatabase.original_parameters[:dsc_ensure] = 'absent'
dsc_xazuresqldatabase[:dsc_ensure] = 'absent'
@provider = described_class.provider(:dsc_configuration).new(dsc_xazuresqldatabase)
end
it "should update :ensure to :absent" do
expect(dsc_xazuresqldatabase[:ensure]).to eq(:absent)
end
it "should compute powershell dsc test script in which ensure value is 'present'" do
@provider.set_test_dsc_parameters
expect(@provider.ps_script_content('test')).to match(/ensure = 'present'/)
end
it "should compute powershell dsc set script in which ensure value is 'absent'" do
@provider.set_original_dsc_parameters
expect(@provider.ps_script_content('set')).to match(/ensure = 'absent'/)
end
end
end
# mof PROVIDERS TESTS
describe "mof provider tests" do
it "should successfully instanciate the provider" do
described_class.provider(:dsc_mof).new(dsc_xazuresqldatabase)
end
before(:each) do
@provider = described_class.provider(:dsc_mof).new(dsc_xazuresqldatabase)
end
it "should successfully build mof file" do
# expect(@provider.mof_test_content).to match(/instance of MSFT_xAzureSqlDatabase as $MSFT_xAzureSqlDatabase1ref$/)
expect(@provider.mof_test_content).to match(/instance of MSFT_xAzureSqlDatabase/)
end
describe "when dsc_ensure is 'present'" do
before(:each) do
dsc_xazuresqldatabase[:dsc_ensure] = 'present'
@provider = described_class.provider(:dsc_mof).new(dsc_xazuresqldatabase)
end
it "should update :ensure to :present" do
expect(dsc_xazuresqldatabase[:ensure]).to eq(:present)
end
end
describe "when dsc_ensure is 'absent'" do
before(:each) do
dsc_xazuresqldatabase[:dsc_ensure] = 'absent'
@provider = described_class.provider(:dsc_mof).new(dsc_xazuresqldatabase)
end
it "should update :ensure to :absent" do
expect(dsc_xazuresqldatabase[:ensure]).to eq(:absent)
end
end
end
end
| {
"content_hash": "28c4c1d5704284cffc6b2edb2ca63304",
"timestamp": "",
"source": "github",
"line_count": 365,
"max_line_length": 131,
"avg_line_length": 36.74794520547945,
"alnum_prop": 0.7136360247521062,
"repo_name": "msutter/puppet-dsc-custom",
"id": "2b96658f11c2987ed82fc18e907bcc25b0c969ca",
"size": "13433",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/unit/puppet/type/dsc_xazuresqldatabase_spec.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ruby",
"bytes": "1241094"
}
],
"symlink_target": ""
} |
FR.Backend = {
rootUrl: function() {
var url_matches = window.location.href.match(/^.+admin[^\.]*\.php/);
if ( url_matches ) {
return url_matches[0];
}
else {
throw new Error( 'Could not parse URL' );
}
},
/**
* @param {HTMLTableRowElement} tr
* @param {String} urlMask
*/
getSorterRow: function(tr, urlMask) {
var row = new FR.Backend.SorterRow(
$('label', tr).html(),
$('.weight', tr).get(0)
);
if (urlMask) {
var databaseId = $('.database-id', tr).val();
// swap in database ID for placeholder in URL mask, and inject
// AJAX as onRemove listener via this closure
row.setOnRemove(function(onRemoveOptions) {
return function(url) {
var options = {url: url};
if (typeof onRemoveOptions.success !== 'undefined') {
options.success = onRemoveOptions.success;
}
if (typeof onRemoveOptions.error !== 'undefined') {
options.error = onRemoveOptions.error;
}
$.ajax(options);
}(urlMask.replace('%25', databaseId));
});
}
return row;
}
};
| {
"content_hash": "4137f411536994a2f34860cb3feaf8b3",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 72,
"avg_line_length": 24.1875,
"alnum_prop": 0.5460809646856158,
"repo_name": "yitznewton/freerms",
"id": "35f3637c315b9abb01895ca121032bc91067fbc4",
"size": "1161",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/js/lib/FR/Backend.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "22565"
},
{
"name": "PHP",
"bytes": "237963"
},
{
"name": "Shell",
"bytes": "732"
}
],
"symlink_target": ""
} |
package org.apache.lens.cube.metadata;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class HierarchicalDimAttribute extends CubeDimAttribute {
private final List<CubeDimAttribute> hierarchy;
public HierarchicalDimAttribute(String name, String description, List<CubeDimAttribute> hierarchy) {
super(name, description);
this.hierarchy = hierarchy;
assert (name != null);
assert (hierarchy != null);
}
public List<CubeDimAttribute> getHierarchy() {
return hierarchy;
}
@Override
public void addProperties(Map<String, String> props) {
super.addProperties(props);
for (int i = 0; i < hierarchy.size(); i++) {
CubeDimAttribute dim = hierarchy.get(i);
props.put(MetastoreUtil.getHierachyElementKeyName(getName(), i), getHierarchyElement(dim));
dim.addProperties(props);
}
}
public static String getHierarchyElement(CubeDimAttribute dim) {
return dim.getName() + "," + dim.getClass().getCanonicalName();
}
public HierarchicalDimAttribute(String name, Map<String, String> props) {
super(name, props);
this.hierarchy = getHiearachy(name, props);
}
public static List<CubeDimAttribute> getHiearachy(String name, Map<String, String> props) {
Map<Integer, String> hierarchyElements = new HashMap<Integer, String>();
for (String param : props.keySet()) {
if (param.startsWith(MetastoreUtil.getHierachyElementKeyPFX(name))) {
hierarchyElements.put(MetastoreUtil.getHierachyElementIndex(name, param), props.get(param));
}
}
List<CubeDimAttribute> hierarchy = new ArrayList<CubeDimAttribute>(hierarchyElements.size());
for (int i = 0; i < hierarchyElements.size(); i++) {
String hierarchyElement = hierarchyElements.get(i);
String[] elements = hierarchyElement.split(",");
String dimName = elements[0];
String className = elements[1];
CubeDimAttribute dim;
try {
Class<?> clazz = Class.forName(className);
Constructor<?> constructor;
constructor = clazz.getConstructor(String.class, Map.class);
dim = (CubeDimAttribute) constructor.newInstance(new Object[] { dimName, props });
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Invalid Dimension", e);
} catch (SecurityException e) {
throw new IllegalArgumentException("Invalid Dimension", e);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Invalid Dimension", e);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Invalid Dimension", e);
} catch (InstantiationException e) {
throw new IllegalArgumentException("Invalid Dimension", e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException("Invalid Dimension", e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException("Invalid Dimension", e);
}
hierarchy.add(dim);
}
return hierarchy;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((getHierarchy() == null) ? 0 : getHierarchy().hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (!super.equals(obj)) {
return false;
}
HierarchicalDimAttribute other = (HierarchicalDimAttribute) obj;
if (this.getHierarchy() == null) {
if (other.getHierarchy() != null) {
return false;
}
} else if (!this.getHierarchy().equals(other.getHierarchy())) {
return false;
}
return true;
}
@Override
public String toString() {
String str = super.toString();
str += ", hierarchy:" + MetastoreUtil.getObjectStr(hierarchy);
return str;
}
}
| {
"content_hash": "097cb3bdcbd740a033eb92980055bb34",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 102,
"avg_line_length": 34.56140350877193,
"alnum_prop": 0.6819796954314721,
"repo_name": "rajubairishetti/incubator-lens",
"id": "8ee54d69b64f6e420236dfa1f32a5a99610a3d30",
"size": "4748",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lens-cube/src/main/java/org/apache/lens/cube/metadata/HierarchicalDimAttribute.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "3124035"
},
{
"name": "JavaScript",
"bytes": "284450"
},
{
"name": "Shell",
"bytes": "9553"
}
],
"symlink_target": ""
} |
Translations
============
The Qt GUI can be easily translated into other languages. Here's how we
handle those translations.
Files and Folders
-----------------
### sharkcoin-qt.pro
This file takes care of generating `.qm` files from `.ts` files. It is mostly
automated.
### src/qt/bitcoin.qrc
This file must be updated whenever a new translation is added. Please note that
files must end with `.qm`, not `.ts`.
<qresource prefix="/translations">
<file alias="en">locale/bitcoin_en.qm</file>
...
</qresource>
### src/qt/locale/
This directory contains all translations. Filenames must adhere to this format:
bitcoin_xx_YY.ts or bitcoin_xx.ts
#### bitcoin_en.ts (Source file)
`src/qt/locale/bitcoin_en.ts` is treated in a special way. It is used as the
source for all other translations. Whenever a string in the code is changed
this file must be updated to reflect those changes. This can be accomplished
by running `lupdate` (included in the Qt SDK). Also, a custom script is used
to extract strings from the non-Qt parts:
python share/qt/extract_strings_qt.py
lupdate bitcoin-qt.pro -no-obsolete -locations none -ts src/qt/locale/bitcoin_en.ts
##### Handling of plurals in the source file
When new plurals are added to the source file, it's important to do the following steps:
1. Open bitcoin_en.ts in Qt Linguist (also included in the Qt SDK)
2. Search for `%n`, which will take you to the parts in the translation that use plurals
3. Look for empty `English Translation (Singular)` and `English Translation (Plural)` fields
4. Add the appropriate strings for the singular and plural form of the base string
5. Mark the item as done (via the green arrow symbol in the toolbar)
6. Repeat from step 2. until all singular and plural forms are in the source file
7. Save the source file
##### Creating the pull-request
An updated source file should be merged to github and Transifex will pick it
up from there (can take some hours). Afterwards the new strings show up as "Remaining"
in Transifex and can be translated.
To create the pull-request you have to do:
git add src/qt/bitcoinstrings.cpp src/qt/locale/bitcoin_en.ts
git commit
Syncing with Transifex
----------------------
We are using https://transifex.com as a frontend for translating the client.
https://www.transifex.com/projects/p/bitcoin/resource/tx/
The "Transifex client" (see: http://help.transifex.com/features/client/)
will help with fetching new translations from Transifex. Use the following
config to be able to connect with the client:
### .tx/config
[main]
host = https://www.transifex.com
[bitcoin.tx]
file_filter = src/qt/locale/bitcoin_<lang>.ts
source_file = src/qt/locale/bitcoin_en.ts
source_lang = en
### .tx/config (for Windows)
[main]
host = https://www.transifex.com
[bitcoin.tx]
file_filter = src\qt\locale\bitcoin_<lang>.ts
source_file = src\qt\locale\bitcoin_en.ts
source_lang = en
It is also possible to directly download new translations one by one from the Transifex website.
### Fetching new translations
1. `tx pull -a`
2. update `src/qt/bitcoin.qrc` manually or via
`ls src/qt/locale/*ts|xargs -n1 basename|sed 's/\(bitcoin_\(.*\)\).ts/<file alias="\2">locale/\1.qm<\/file>/'`
3. `git add` new translations from `src/qt/locale/`
| {
"content_hash": "560edaabf935a70cd85edf044bdf38d0",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 113,
"avg_line_length": 32.61165048543689,
"alnum_prop": 0.7153914855611789,
"repo_name": "sharkcoin/sharkcoin",
"id": "8813278886352aad689b6c3da9666eaec819c61f",
"size": "3359",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/translation_process.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "89066"
},
{
"name": "C++",
"bytes": "1376611"
},
{
"name": "IDL",
"bytes": "11511"
},
{
"name": "Objective-C",
"bytes": "2463"
},
{
"name": "Python",
"bytes": "2879"
},
{
"name": "Shell",
"bytes": "1144"
},
{
"name": "TypeScript",
"bytes": "3821892"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
GRIN Taxonomy for Plants
#### Published in
Catalogue 140:ix. 1898
#### Original name
null
### Remarks
null | {
"content_hash": "0ac06b4aa4a5c57d1e37b44609b80d5f",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 24,
"avg_line_length": 10.461538461538462,
"alnum_prop": 0.6985294117647058,
"repo_name": "mdoering/backbone",
"id": "c7ac82adcb5566f4379831ba49689b6c57c28e0c",
"size": "195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Cornales/Hydrangeaceae/Deutzia/Deutzia rosea/ Syn. Deutzia gracilis rosea/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<html dir="LTR">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=Windows-1252" />
<meta name="vs_targetSchema" content="http://schemas.microsoft.com/intellisense/ie5" />
<title>Level.Trace Field</title>
<xml>
</xml>
<link rel="stylesheet" type="text/css" href="MSDN.css" />
</head>
<body id="bodyID" class="dtBODY">
<div id="nsbanner">
<div id="bannerrow1">
<table class="bannerparthead" cellspacing="0">
<tr id="hdr">
<td class="runninghead">Apache log4net SDK Documentation - Microsoft .NET Framework 4.0</td>
<td class="product">
</td>
</tr>
</table>
</div>
<div id="TitleRow">
<h1 class="dtH1">Level.Trace Field
</h1>
</div>
</div>
<div id="nstext">
<p> The <b>Trace</b> level designates fine-grained informational events that are most useful to debug an application. </p>
<div class="syntax">
<span class="lang">[Visual Basic]</span>
<br />Public Shared ReadOnly Trace As <a href="log4net.Core.Level.html">Level</a></div>
<div class="syntax">
<span class="lang">[C#]</span>
<br />public static readonly <a href="log4net.Core.Level.html">Level</a> Trace;</div>
<p>
</p>
<h4 class="dtH4">See Also</h4>
<p>
<a href="log4net.Core.Level.html">Level Class</a> | <a href="log4net.Core.html">log4net.Core Namespace</a></p>
<object type="application/x-oleobject" classid="clsid:1e2a7bd0-dab9-11d0-b93a-00c04fc99f9e" viewastext="true" style="display: none;">
<param name="Keyword" value="Trace field">
</param>
<param name="Keyword" value="Trace field, Level class">
</param>
<param name="Keyword" value="Level.Trace field">
</param>
</object>
<hr />
<div id="footer"><a href='http://logging.apache.org/log4net/'>Copyright 2004-2013 The Apache Software Foundation.</a><br></br>Apache log4net, Apache and log4net are trademarks of The Apache Software Foundation.</div>
</div>
</body>
</html> | {
"content_hash": "fed0a522c94a474113ca9193ffe722ff",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 222,
"avg_line_length": 42.470588235294116,
"alnum_prop": 0.5854108956602031,
"repo_name": "gersonkurz/manualisator",
"id": "ccd369dc056443f2a59cf458a33543fe5594d800",
"size": "2166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manualisator/log4net-1.2.13/doc/release/sdk/log4net.Core.Level.Trace.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "235121"
},
{
"name": "CSS",
"bytes": "15869"
},
{
"name": "HTML",
"bytes": "9723377"
},
{
"name": "JavaScript",
"bytes": "5685"
},
{
"name": "NSIS",
"bytes": "1916"
},
{
"name": "Shell",
"bytes": "1041"
}
],
"symlink_target": ""
} |
<configuration>
<conversionRule conversionWord="coloredLevel" converterClass="play.api.libs.logback.ColoredLevel"/>
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${application.home:-.}/logs/application.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- Daily rollover with compression -->
<fileNamePattern>${application.home:-.}/logs/application-log-%d{yyyy-MM-dd}.gz</fileNamePattern>
<!-- keep 30 days worth of history -->
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%date{yyyy-MM-dd HH:mm:ss ZZZZ} [%level] from %logger in %thread - %message%n%xException</pattern>
</encoder>
</appender>
<appender name="ACCESS_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${application.home:-.}/logs/access.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- daily rollover with compression -->
<fileNamePattern>${application.home:-.}/logs/access-log-%d{yyyy-MM-dd}.gz</fileNamePattern>
<!-- keep 60 days worth of history -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%date{yyyy-MM-dd HH:mm:ss ZZZZ} %message%n</pattern>
<!-- this quadruples logging throughput -->
<immediateFlush>false</immediateFlush>
</encoder>
</appender>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%coloredLevel %logger{15} - %message%n%xException{10}</pattern>
</encoder>
</appender>
<logger name="play" level="INFO"/>
<logger name="application" level="INFO"/>
<logger name="slick" level="INFO"/>
<logger name="httpResponse" level="ERROR"/>
<root level="INFO">
<appender-ref ref="FILE"/>
<appender-ref ref="STDOUT"/>
</root>
<logger name="access" level="INFO" additivity="false">
<appender-ref ref="ACCESS_FILE"/>
<appender-ref ref="STDOUT"/>
</logger>
</configuration> | {
"content_hash": "79ffce4f85dce740da834fe2c7ec3ab6",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 119,
"avg_line_length": 42.46153846153846,
"alnum_prop": 0.6227355072463768,
"repo_name": "THK-ADV/lwm-reloaded",
"id": "c6fbead79c44875c8439eccdf62fd2318d8d6a76",
"size": "2208",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conf/logback.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "147"
},
{
"name": "Java",
"bytes": "1144"
},
{
"name": "JavaScript",
"bytes": "88"
},
{
"name": "Scala",
"bytes": "768023"
},
{
"name": "Shell",
"bytes": "1165"
}
],
"symlink_target": ""
} |
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.simplefooddeliveryapp"
android:versionCode="1"
android:versionName="1.0">
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW"/>
<uses-sdk
android:minSdkVersion="16"
android:targetSdkVersion="22" />
<application
android:name=".MainApplication"
android:allowBackup="true"
android:label="@string/app_name"
android:icon="@mipmap/ic_launcher"
android:theme="@style/AppTheme">
<activity
android:name=".MainActivity"
android:label="@string/app_name"
android:configChanges="keyboard|keyboardHidden|orientation|screenSize"
android:windowSoftInputMode="adjustResize">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name="com.facebook.react.devsupport.DevSettingsActivity" />
</application>
</manifest>
| {
"content_hash": "b501f5fea20440d832838b698b3eff0d",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 83,
"avg_line_length": 36.1875,
"alnum_prop": 0.6761658031088082,
"repo_name": "RobertoNovelo/SimpleFoodDeliveryApp",
"id": "e2af0932eef5bfa6c75ea3577352f3c9152dd7ec",
"size": "1158",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "android/app/src/main/AndroidManifest.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1590"
},
{
"name": "Java",
"bytes": "1533"
},
{
"name": "JavaScript",
"bytes": "6051"
},
{
"name": "Objective-C",
"bytes": "4446"
},
{
"name": "Python",
"bytes": "1752"
}
],
"symlink_target": ""
} |
import IECore
import GafferImage # this sets the OCIO environment variable
import PyOpenColorIO as OCIO
import Gaffer
import GafferUI
import GafferImageUI
# get default display setup
config = OCIO.GetCurrentConfig()
defaultDisplay = config.getDefaultDisplay()
# add preferences plugs
preferences = application.root()["preferences"]
preferences["displayColorSpace"] = Gaffer.CompoundPlug()
preferences["displayColorSpace"]["view"] = Gaffer.StringPlug( defaultValue = config.getDefaultView( defaultDisplay ) )
# configure ui for preferences plugs
GafferUI.PlugValueWidget.registerCreator(
Gaffer.Preferences.staticTypeId(),
"displayColorSpace.view",
GafferUI.EnumPlugValueWidget,
labelsAndValues = zip( config.getViews( defaultDisplay ), config.getViews( defaultDisplay ) ),
)
# update the display transform from the plugs
def __setDisplayTransform() :
view = preferences["displayColorSpace"]["view"].getValue()
colorSpace = config.getDisplayColorSpaceName( defaultDisplay, view )
processor = config.getProcessor( OCIO.Constants.ROLE_SCENE_LINEAR, colorSpace )
def f( c ) :
cc = processor.applyRGB( [ c.r, c.g, c.b ] )
return IECore.Color3f( *cc )
GafferUI.DisplayTransform.set( f )
__setDisplayTransform()
# and connect to plug changed to update things again when the user asks
def __plugSet( plug ) :
if plug.relativeName( plug.node() ) != "displayColorSpace" :
return
__setDisplayTransform()
__updateDefaultDisplayTransforms()
application.__ocioPlugSetConnection = preferences.plugSetSignal().connect( __plugSet )
# register display transforms with the image viewer
def __displayTransformCreator( name ) :
result = GafferImage.OpenColorIO()
result["inputSpace"].setValue( "linear" )
result["outputSpace"].setValue( config.getDisplayColorSpaceName( defaultDisplay, name ) )
return result
for name in config.getViews( defaultDisplay ) :
GafferImageUI.ImageView.registerDisplayTransform( name, IECore.curry( __displayTransformCreator, name ) )
# and register a special "Default" display transform which tracks the
# global settings from the preferences
__defaultDisplayTransforms = []
def __updateDefaultDisplayTransforms() :
view = preferences["displayColorSpace"]["view"].getValue()
colorSpace = config.getDisplayColorSpaceName( defaultDisplay, view )
for node in __defaultDisplayTransforms :
node["outputSpace"].setValue( colorSpace )
def __defaultDisplayTransformCreator() :
result = GafferImage.OpenColorIO()
result["inputSpace"].setValue( "linear" )
__defaultDisplayTransforms.append( result )
__updateDefaultDisplayTransforms()
return result
GafferImageUI.ImageView.registerDisplayTransform( "Default", __defaultDisplayTransformCreator )
| {
"content_hash": "0c5a78247fb098d05d457de1c9b4fdaa",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 118,
"avg_line_length": 28.95744680851064,
"alnum_prop": 0.772226304188097,
"repo_name": "davidsminor/gaffer",
"id": "ca3aab81bcd848fd006d29e8855729a77c88bb7e",
"size": "4601",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "startup/gui/ocio.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "9286"
},
{
"name": "C++",
"bytes": "3358250"
},
{
"name": "COBOL",
"bytes": "64449"
},
{
"name": "CSS",
"bytes": "28027"
},
{
"name": "Python",
"bytes": "3267354"
},
{
"name": "Shell",
"bytes": "7055"
},
{
"name": "Slash",
"bytes": "35200"
}
],
"symlink_target": ""
} |
package edu.stevens.cs549.hadoop.socialrank;
import java.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;
public class ReciprocityRed1 extends Reducer<Text, Text, Text, Text> {
/* TODO: Your reducer code here */
@SuppressWarnings("unused")
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
int i = 0; // counter variable
for (Text v : values) // Iterates over the values
{
i++; // Increments counter
}
context.write(new Text(i + ""), new Text()); // Outputs:Number of linkes => i (2 if bidirectional, 1 if
// unidirectional)
}
}
| {
"content_hash": "e90020c6dca546c22cfe21108cf7439f",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 112,
"avg_line_length": 29.681818181818183,
"alnum_prop": 0.6906584992343032,
"repo_name": "liulin2012/SocialRank",
"id": "9816e89cb1c0b6dff9d34cfc4085f289351b5a7a",
"size": "653",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/edu/stevens/cs549/hadoop/socialrank/ReciprocityRed1.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "29427"
}
],
"symlink_target": ""
} |
import os
import sys
from mock import patch
from pytest import raises
from dciclient.v1.shell_commands.cli import parse_arguments
from dciclient.v1.shell_commands.context import _default_dci_cs_url
from dciclient.version import __version__
@patch("sys.exit")
def test_parse_arguments_version(exit_function, capsys):
parse_arguments(["--version", "user-list"])
captured = capsys.readouterr()
# Note(hguemar): argparse behaviour here changed on py3k
if sys.version_info > (3, 0):
captured = captured.out
else:
captured = captured.err
assert captured == "dcictl {}\n".format(__version__)
assert exit_function.called
def test_parse_arguments_format():
args = parse_arguments(["--format", "json", "user-list"])
assert args.format == "json"
args = parse_arguments(["--format", "csv", "user-list"])
assert args.format == "csv"
args = parse_arguments(["--format", "tsv", "user-list"])
assert args.format == "tsv"
args = parse_arguments(["user-list"])
assert args.format == os.environ.get("DCI_FORMAT", "table")
def test_parse_arguments_dci_login():
args = parse_arguments(["--dci-login", "foo", "user-list"], {"DCI_LOGIN": "foo"})
assert args.dci_login == "foo"
def test_parse_arguments_dci_login_from_env():
args = parse_arguments(["user-list"], {"DCI_LOGIN": "foo"})
assert args.dci_login == "foo"
def test_parse_arguments_dci_login_overload_from_env():
args = parse_arguments(["--dci-login", "bar", "user-list"], {"DCI_LOGIN": "foo"})
assert args.dci_login == "bar"
def test_parse_arguments_dci_password():
args = parse_arguments(["--dci-password", "foo", "user-list"])
assert args.dci_password == "foo"
def test_parse_arguments_dci_password_from_env():
args = parse_arguments(["user-list"], {"DCI_PASSWORD": "foo"})
assert args.dci_password == "foo"
def test_parse_arguments_dci_password_overload_from_env():
args = parse_arguments(
["--dci-password", "bar", "user-list"], {"DCI_PASSWORD": "foo"}
)
assert args.dci_password == "bar"
def test_parse_arguments_dci_client_id():
args = parse_arguments(["--dci-client-id", "foo", "user-list"])
assert args.dci_client_id == "foo"
def test_parse_arguments_dci_client_id_from_env():
args = parse_arguments(["user-list"], {"DCI_CLIENT_ID": "foo"})
assert args.dci_client_id == "foo"
def test_parse_arguments_dci_client_id_overload_from_env():
args = parse_arguments(
["--dci-client-id", "bar", "user-list"], {"DCI_CLIENT_ID": "foo"}
)
assert args.dci_client_id == "bar"
def test_parse_arguments_dci_api_secret():
args = parse_arguments(["--dci-api-secret", "foo", "user-list"])
assert args.dci_api_secret == "foo"
def test_parse_arguments_dci_api_secret_from_env():
args = parse_arguments(["user-list"], {"DCI_API_SECRET": "foo"})
assert args.dci_api_secret == "foo"
def test_parse_arguments_dci_api_secret_overload_from_env():
args = parse_arguments(
["--dci-api-secret", "bar", "user-list"], {"DCI_API_SECRET": "foo"}
)
assert args.dci_api_secret == "bar"
def test_parse_arguments_dci_cs_url_default():
args = parse_arguments(["user-list"])
assert args.dci_cs_url == _default_dci_cs_url
def test_parse_arguments_dci_cs_url():
args = parse_arguments(["--dci-cs-url", "foo", "user-list"])
assert args.dci_cs_url == "foo"
def test_parse_arguments_dci_cs_url_from_env():
args = parse_arguments(["user-list"], {"DCI_CS_URL": "foo"})
assert args.dci_cs_url == "foo"
def test_parse_arguments_dci_cs_url_overload_from_env():
args = parse_arguments(["--dci-cs-url", "bar", "user-list"], {"DCI_CS_URL": "foo"})
assert args.dci_cs_url == "bar"
# fear test
@patch("sys.exit")
def test_parse_arguments_user_create_mutually_exclusive_boolean_flags(exit_function):
with raises(TypeError):
parse_arguments(
[
"user-create",
"--name",
"foo",
"--password",
"bar",
"--email",
"foo@foo.bar",
"--active",
"--no-active",
]
)
assert exit_function.called
def test_parse_arguments_sso():
args = parse_arguments(
[
"--sso-url",
"https://sso.redhat.com",
"--sso-username",
"dci",
"--sso-password",
"dci",
"user-list",
],
{},
)
assert args.sso_url == "https://sso.redhat.com"
assert args.sso_username == "dci"
assert args.sso_password == "dci"
assert args.sso_token is None
assert args.refresh_sso_token is False
def test_parse_arguments_sso_env():
args = parse_arguments(
["user-list"],
{
"SSO_URL": "https://sso.redhat.com",
"SSO_USERNAME": "sso",
"SSO_PASSWORD": "sso",
},
)
assert args.sso_url == "https://sso.redhat.com"
assert args.sso_username == "sso"
assert args.sso_password == "sso"
assert args.sso_token is None
assert args.refresh_sso_token is False
def test_parse_arguments_sso_token():
args = parse_arguments(
["--sso-token", "abc", "--refresh-sso-token", "user-list"], {}
)
assert args.refresh_sso_token
assert args.sso_token == "abc"
def test_parse_arguments_sso_token_env():
args = parse_arguments(["user-list"], {"SSO_TOKEN": "efg"})
assert args.sso_token == "efg"
assert args.refresh_sso_token is False
def test_verbose():
args = parse_arguments(
[
"user-create",
"--name",
"toto",
"--password",
"toto",
"--email",
"toto@example.org",
]
)
assert args.verbose is False
args = parse_arguments(
[
"--verbose",
"user-create",
"--name",
"toto",
"--password",
"toto",
"--email",
"toto@example.org",
]
)
assert args.verbose is True
args = parse_arguments(
[
"--long",
"user-create",
"--name",
"toto",
"--password",
"toto",
"--email",
"toto@example.org",
]
)
assert args.verbose is True
def test_csv():
args = parse_arguments(
[
"component-create",
"--name",
"RHEL-8",
"--type",
"compose",
"--topic-id",
"t1",
"--tags",
"t1,t2,t3"
]
)
assert args.tags == ['t1', 't2', 't3']
args = parse_arguments(
[
"component-create",
"--name",
"RHEL-8",
"--type",
"compose",
"--tags",
"t4, t2 ,t3",
"--topic-id",
"t1"
]
)
assert args.tags == ['t4', 't2', 't3']
args = parse_arguments(
[
"component-create",
"--name",
"RHEL-8",
"--type",
"compose",
"--topic-id",
"t1"
]
)
assert args.tags == []
| {
"content_hash": "17bc16986ba3b5dfc4b7f9cbd8901c2f",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 87,
"avg_line_length": 26.350180505415164,
"alnum_prop": 0.5351418002466092,
"repo_name": "redhat-cip/python-dciclient",
"id": "c7a223fbfcc08f4792cd0f6ff9a047a381b9be70",
"size": "7899",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/shell_commands/test_cli.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "761"
},
{
"name": "Python",
"bytes": "251289"
},
{
"name": "Shell",
"bytes": "2264"
}
],
"symlink_target": ""
} |
#ifndef THRIFT_TEST_LOADGEN_INTERVALTIMER_H_
#define THRIFT_TEST_LOADGEN_INTERVALTIMER_H_ 1
#include <thrift/lib/cpp/concurrency/Util.h>
#include <thrift/lib/cpp/concurrency/Mutex.h>
#include <thrift/lib/cpp/TLogging.h>
#include <folly/portability/Unistd.h>
namespace apache { namespace thrift { namespace loadgen {
/**
* IntervalTimer helps perform tasks at a desired rate.
*
* Call sleep() in between each operation, and it will sleep the required
* amount of time to hit the target rate. It accounts for the time required to
* perform each operation, and it also adjusts the subsequent intervals if the
* system sleep call wakes up later than requested. This allows good accuracy
* for the average rate, even when the requested interval is very small. Works
* between multiple threads.
*/
class IntervalTimer {
public:
/**
* Create a new IntervalTimer
*
* @param intervalNsec The desired number of ns each interval should take.
* @param maxBacklog If we can't keep up with the requested rate, reset
* when we fall more than maxBacklog microseconds
* behind. If the rate does eventually recover, this
* will setting helps reduce the amount of time that the
* timer goes too fast in order to catch up to the
* average rate.
*/
IntervalTimer(uint64_t intervalNsec,
uint64_t maxBacklog = 3 * concurrency::Util::US_PER_S)
: numTimes_(0)
, intervalNsec_(intervalNsec)
, intervalStart_(0)
, maxBacklog_(maxBacklog) { }
void setIntervalNsec(uint64_t interval) {
concurrency::Guard guard(mutex_);
intervalNsec_ = interval;
intervalStart_ = intervalStart_ ? concurrency::Util::currentTimeUsec()
: 0;
numTimes_ = 0;
}
/**
* Change the rate, 0 means run as fast as possible.
*/
void setRatePerSec(uint64_t rate) {
concurrency::Guard guard(mutex_);
if (rate == 0) intervalNsec_ = 0;
else intervalNsec_ = concurrency::Util::NS_PER_S / rate;
intervalStart_ = intervalStart_ ? concurrency::Util::currentTimeUsec()
: 0;
numTimes_ = 0;
}
/**
* Start the timer.
*
* Call this method before the first interval.
*/
void start() {
concurrency::Guard guard(mutex_);
intervalStart_ = concurrency::Util::currentTimeUsec();
}
/**
* Sleep until the next interval should start.
*
* @return Returns true during normal operations, and false if the maxBacklog
* was hit and the timer has reset the average rate calculation.
*/
bool sleep() {
// Go as fast as possible when intervalNsec_ is 0
if (intervalNsec_ == 0) {
return true;
}
uint64_t waitUntil, now;
{
concurrency::Guard guard(mutex_);
// intervalStart_ is when the just previous interval started (or when it
// was supposed to start, if we aren't able to keep up with the requested
// rate).
//
// Update it to be when the next interval is supposed to start
numTimes_++;
now = concurrency::Util::currentTimeUsec();
waitUntil = intervalStart_ + (intervalNsec_ * numTimes_) / 1000;
if (now > waitUntil) {
// If we can't keep up with the requested rate, we'll keep falling
// farther and farther behind.
//
// If we fall farther than maxBacklog_ behind, reset intervalStart_ to
// the current time. This way, if the operations eventually do speed up
// and we are able to meet the requested rate, we won't exceed it for
// too long trying to catch up.
uint64_t delta = now - waitUntil;
if (delta > maxBacklog_) {
intervalStart_ = now;
numTimes_ = 0;
return false;
}
return true;
}
}
usleep(waitUntil - now);
return true;
}
private:
uint64_t numTimes_;
uint64_t intervalNsec_;
uint64_t intervalStart_;
uint64_t maxBacklog_;
concurrency::Mutex mutex_;
};
}}} // apache::thrift::loadgen
#endif // THRIFT_TEST_LOADGEN_INTERVALTIMER_H_
| {
"content_hash": "53c3e4fd13eb5a441e1a2911c639c937",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 80,
"avg_line_length": 31.65909090909091,
"alnum_prop": 0.6300550370902129,
"repo_name": "getyourguide/fbthrift",
"id": "f4d336ae789c1ea3e5f1bf639e8f65e8d2c9c554",
"size": "4982",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "thrift/lib/cpp/test/loadgen/IntervalTimer.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "154349"
},
{
"name": "C#",
"bytes": "28929"
},
{
"name": "C++",
"bytes": "17798156"
},
{
"name": "CMake",
"bytes": "33182"
},
{
"name": "D",
"bytes": "669764"
},
{
"name": "Emacs Lisp",
"bytes": "5154"
},
{
"name": "Erlang",
"bytes": "23039"
},
{
"name": "Go",
"bytes": "375816"
},
{
"name": "HTML",
"bytes": "404999"
},
{
"name": "Hack",
"bytes": "768869"
},
{
"name": "Haskell",
"bytes": "305707"
},
{
"name": "Java",
"bytes": "2408919"
},
{
"name": "JavaScript",
"bytes": "6018"
},
{
"name": "Lex",
"bytes": "11934"
},
{
"name": "M4",
"bytes": "99563"
},
{
"name": "Makefile",
"bytes": "53670"
},
{
"name": "OCaml",
"bytes": "32043"
},
{
"name": "Objective-C",
"bytes": "152361"
},
{
"name": "PHP",
"bytes": "322092"
},
{
"name": "Perl",
"bytes": "70682"
},
{
"name": "Protocol Buffer",
"bytes": "585"
},
{
"name": "Python",
"bytes": "2413275"
},
{
"name": "Ruby",
"bytes": "328584"
},
{
"name": "Shell",
"bytes": "32559"
},
{
"name": "Smalltalk",
"bytes": "22812"
},
{
"name": "TeX",
"bytes": "48707"
},
{
"name": "Thrift",
"bytes": "259661"
},
{
"name": "Vim script",
"bytes": "2837"
},
{
"name": "Yacc",
"bytes": "36158"
}
],
"symlink_target": ""
} |
using System.Net;
namespace Stratis.Bitcoin.EventBus.CoreEvents
{
/// <summary>
/// Base peer event.
/// </summary>
/// <seealso cref="Stratis.Bitcoin.EventBus.EventBase" />
public abstract class PeerEventBase : EventBase
{
/// <summary>
/// Gets the peer end point.
/// </summary>
/// <value>
/// The peer end point.
/// </value>
public IPEndPoint PeerEndPoint { get; }
public PeerEventBase(IPEndPoint peerEndPoint)
{
this.PeerEndPoint = peerEndPoint;
}
}
} | {
"content_hash": "945ab36b3710aad7f15c6104b0674866",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 61,
"avg_line_length": 24.208333333333332,
"alnum_prop": 0.5559380378657487,
"repo_name": "Neurosploit/StratisBitcoinFullNode",
"id": "3fc50827069bb56a100a59cc49cfed6ab027a4a2",
"size": "583",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/Stratis.Bitcoin/EventBus/CoreEvents/Peer/PeerEventBase.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "216"
},
{
"name": "C#",
"bytes": "12434354"
},
{
"name": "CSS",
"bytes": "2127"
},
{
"name": "Dockerfile",
"bytes": "1636"
},
{
"name": "HTML",
"bytes": "2444"
},
{
"name": "PowerShell",
"bytes": "9124"
},
{
"name": "Shell",
"bytes": "3446"
}
],
"symlink_target": ""
} |
textFeats - A tool that extracts text features for given Page XMLs or images.
# INSTALLATION AND USAGE
git clone --recursive https://github.com/mauvilsa/textFeats
mkdir textFeats/build
cd textFeats/build
cmake -DCMAKE_INSTALL_PREFIX:PATH=$HOME ..
make install
textFeats --help
# CONTRIBUTING
If you intend to contribute, before any commits be sure to first execute githook-pre-commit to setup (symlink) the pre-commit hook. This hook takes care of automatically updating the tool and files versions.
# COPYRIGHT
The MIT License (MIT)
Copyright (c) 2015-present, Mauricio Villegas <mauricio_ville@yahoo.com>
| {
"content_hash": "a8397853201291c6f7fd22a0c33f58fe",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 207,
"avg_line_length": 30.61904761904762,
"alnum_prop": 0.7527216174183515,
"repo_name": "mauvilsa/textfeats",
"id": "2447a3fed4cf0cd4ac6ba26df701d65ee6bfb592",
"size": "651",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "921"
},
{
"name": "C++",
"bytes": "26201"
},
{
"name": "CMake",
"bytes": "3407"
},
{
"name": "Dockerfile",
"bytes": "2243"
},
{
"name": "Shell",
"bytes": "5209"
}
],
"symlink_target": ""
} |
<?php
/* vim: set expandtab tabstop=2 shiftwidth=2 softtabstop=2: */
/**
* The htmlinject hook.
*
* A function used to inject html. This utilizes the hook pattern of the theme.
*
* @author Cory Collier <corycollier@corycollier.com>
* @license http://opensource.org/licenses/MIT MIT License
* @version git: $Id$
* @link https://github.com/corycollier/simplesamlphp-module-themes
* @see https://github.com/simplesamlphp/simplesamlphp/
* @since File available since Release 1.3.0
*/
/**
* Hook to modify HTML content from previous hook implementations.
*
* @param array &$hookinfo A reference to all of the hook information.
*
* @return array The modified hookinfo parameter.
*/
function themes_hook_htmlinject(&$hookinfo) {
if (isset($hookinfo['pre'])) {
foreach ($hookinfo['pre'] as $i => $info) {
// This is pretty ugly, but to get the theme to work, We've got to modify the
// existing markup. Overriding it would be ideal.
$info = strtr($info, array(
'tabset_tabs' => 'nav nav-tabs',
// 'id="portalcontent"' => '',
'ui-state-active' => 'active',
'ui-state-default' => '',
'ui-corner-top' => '',
'ui-tabs-nav' => '',
'id="portalmenu"' => 'class="row"',
'ui-tabs-panel ui-widget-content ui-corner-bottom' => '',
'ui-helper-clearfix ui-widget-header ui-corner-all' => '',
'ui-tabs ui-widget ui-widget-content ui-corner-all' => '',
));
$hookinfo['pre'][$i] = $info;
}
}
}
/*
* Local variables:
* tab-width: 2
* c-basic-offset: 2
* c-hanging-comment-ender-p: nil
* End:
*/
| {
"content_hash": "4d45fb305fecf14e6212268481a73666",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 83,
"avg_line_length": 31.30188679245283,
"alnum_prop": 0.6033755274261603,
"repo_name": "corycollier/simplesamlphp-module-themes",
"id": "2860e107856eafdc4de7c008647d610e2f2d3dab",
"size": "1659",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/hooks/hook_htmlinject.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3401"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "PHP",
"bytes": "82117"
},
{
"name": "Ruby",
"bytes": "892"
}
],
"symlink_target": ""
} |
package com.github.xxbeanxx.web;
import org.apache.struts2.dispatcher.filter.StrutsPrepareAndExecuteFilter;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
/**
* @author Greg Baker
*/
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
/**
* Register the Struts filter to intercept URLs with the ".action" suffix.
*/
@Bean
public FilterRegistrationBean strutsFilter() {
final FilterRegistrationBean registration = new FilterRegistrationBean(new StrutsPrepareAndExecuteFilter());
registration.addUrlPatterns("*.action");
registration.setOrder(FilterRegistrationBean.LOWEST_PRECEDENCE);
return registration;
}
}
| {
"content_hash": "e546960844bea93e683650ca3c914dba",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 110,
"avg_line_length": 29.741935483870968,
"alnum_prop": 0.8015184381778742,
"repo_name": "xxbeanxx/struts-spring-archetype",
"id": "38061bb6181828de2a713739a3747e13c034ca05",
"size": "922",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/github/xxbeanxx/web/Application.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "FreeMarker",
"bytes": "2541"
},
{
"name": "Java",
"bytes": "2886"
}
],
"symlink_target": ""
} |
# Route Model Binding and Eloquent-Sluggable
Route Model Binding is easy to implement with only minor configuration to your models.
## Implicit Binding
Implicit binding requires adding a `getRouteKeyName()` method to your model that returns the name
of the slug field:
```php
use Cviebrock\EloquentSluggable\Sluggable;
use Cviebrock\EloquentSluggable\SluggableScopeHelpers;
use Illuminate\Database\Eloquent\Model;
class Post extends Model
{
use Sluggable, SluggableScopeHelpers;
public function sluggable(): array
{
return [
'slug' => [
'source' => 'title',
]
];
}
/**
* Get the route key for the model.
*
* @return string
*/
public function getRouteKeyName(): string
{
return 'slug';
}
}
```
From there, you can set up your routes as described in the Eloquent documentation:
```php
Route::get('api/posts/{post}', function(App\Post $post): string {
return $post->title;
});
```
In this example, since the Eloquent type-hinted `$post` variable defined on the route
matches the {post} segment in the route's URI, Laravel will automatically inject the
model instance that has a slug matching the corresponding value from the request URI.
Further, if you are using the [SluggableScopeHelpers](SCOPE-HELPERS.md) trait, you can bind
the default slug to the route parameter with:
```php
public function getRouteKeyName(): string
{
return $this->getSlugKeyName();
}
```
## Explicit Binding
You can also use the `RouteServiceProvider::boot` method as described in the
[Laravel Documentation](https://laravel.com/docs/routing#route-model-binding) to
handle explicit route model binding.
- - -
Copyright (c) 2013 Colin Viebrock
| {
"content_hash": "6eb1a26fe58766f88d4d28f508230ecc",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 97,
"avg_line_length": 24.027027027027028,
"alnum_prop": 0.6957255343082115,
"repo_name": "cviebrock/eloquent-sluggable",
"id": "afcd22ccda2c2a67a1a56a62b63c17b64644ad29",
"size": "1778",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ROUTE-MODEL-BINDING.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "85251"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<sem:triples uri="http://www.lds.org/vrl/specific-people/church-workforce/barker-paul" xmlns:sem="http://marklogic.com/semantics">
<sem:triple>
<sem:subject>http://www.lds.org/vrl/specific-people/church-workforce/barker-paul</sem:subject>
<sem:predicate>http://www.w3.org/2004/02/skos/core#prefLabel</sem:predicate>
<sem:object datatype="xsd:string" xml:lang="eng">Barker, Paul</sem:object>
</sem:triple>
<sem:triple>
<sem:subject>http://www.lds.org/vrl/specific-people/church-workforce/barker-paul</sem:subject>
<sem:predicate>http://www.w3.org/2004/02/skos/core#inScheme</sem:predicate>
<sem:object datatype="sem:iri">http://www.lds.org/concept-scheme/vrl</sem:object>
</sem:triple>
<sem:triple>
<sem:subject>http://www.lds.org/vrl/specific-people/church-workforce/barker-paul</sem:subject>
<sem:predicate>http://www.lds.org/core#entityType</sem:predicate>
<sem:object datatype="sem:iri">http://www.schema.org/Place</sem:object>
</sem:triple>
</sem:triples>
| {
"content_hash": "a37fcec18a8875ca1af068a65ea1bf21",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 130,
"avg_line_length": 58.166666666666664,
"alnum_prop": 0.7172874880611271,
"repo_name": "freshie/ml-taxonomies",
"id": "c6ad71906dc1652484712cda2710c604e183d2a9",
"size": "1047",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "roxy/data/gospel-topical-explorer-v2/taxonomies/vrl/specific-people/church-workforce/barker-paul.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4422"
},
{
"name": "CSS",
"bytes": "38665"
},
{
"name": "HTML",
"bytes": "356"
},
{
"name": "JavaScript",
"bytes": "411651"
},
{
"name": "Ruby",
"bytes": "259121"
},
{
"name": "Shell",
"bytes": "7329"
},
{
"name": "XQuery",
"bytes": "857170"
},
{
"name": "XSLT",
"bytes": "13753"
}
],
"symlink_target": ""
} |
.machines .masonry-grid-item{
width: 400px;
}
/**
* One Machines
*/
.machines .information{
float:right;
margin:0 10px;
}
.machines .information div{
padding: 10px;
border:1px solid #DDDDDD;
width: 420px;
border-radius: 5px;
margin:5px;
}
.machines .information .actions{
text-align: right;
border:none;
}
.machines .information .prices p{
font-style: italic;
color:#444444;
} | {
"content_hash": "8f57e1c949674419fdc38b9c456c933e",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 33,
"avg_line_length": 15.038461538461538,
"alnum_prop": 0.6982097186700768,
"repo_name": "gaetancollaud/fablab-manager",
"id": "49296365ead48c4d895192dd16232e6df7736f27",
"size": "391",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/main/webapp/components/machine/style.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4343"
},
{
"name": "Dockerfile",
"bytes": "232"
},
{
"name": "HTML",
"bytes": "52667"
},
{
"name": "Java",
"bytes": "215176"
},
{
"name": "JavaScript",
"bytes": "98023"
},
{
"name": "Shell",
"bytes": "1455"
},
{
"name": "TSQL",
"bytes": "67119"
}
],
"symlink_target": ""
} |
<project
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.eclipse.bpel</groupId>
<artifactId>org.eclipse.bpel.parent.pom</artifactId>
<version>0.0.1-SNAPSHOT</version>
<relativePath>../../parent/pom.xml</relativePath>
</parent>
<groupId>org.eclipse.bpel.plugins</groupId>
<artifactId>org.eclipse.bpel.ui</artifactId>
<version>1.0.3-SNAPSHOT</version>
<packaging>eclipse-plugin</packaging>
</project>
| {
"content_hash": "7da2f5688f4df920ce0017a1499cf58b",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 195,
"avg_line_length": 44.642857142857146,
"alnum_prop": 0.728,
"repo_name": "Susankha/developer-studio",
"id": "d24199d65f0a49245ef4ec3f9b4ad7a6ce8806c4",
"size": "625",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bps/org.eclipse.bpel.ui/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "884"
},
{
"name": "CSS",
"bytes": "62302"
},
{
"name": "GAP",
"bytes": "14192"
},
{
"name": "Java",
"bytes": "75107264"
},
{
"name": "JavaScript",
"bytes": "802969"
},
{
"name": "PHP",
"bytes": "4691578"
},
{
"name": "Perl",
"bytes": "50958"
},
{
"name": "SQL",
"bytes": "260298"
},
{
"name": "Shell",
"bytes": "429"
},
{
"name": "XSLT",
"bytes": "10672"
}
],
"symlink_target": ""
} |
from pathlib import Path
import numpy as np
import pandas as pd
from dstools.pipeline.clients import SQLAlchemyClient
from dstools import testing
def test_can_check_nulls(tmp_directory):
client = SQLAlchemyClient('sqlite:///' + str(Path(tmp_directory, 'db.db')))
df = pd.DataFrame({'no_nas': [1, 2, 1], 'nas': [1, np.nan, 1]})
df.to_sql('my_table', client.engine)
assert not testing.sql.nulls_in_columns(client, ['no_nas'], 'my_table')
assert testing.sql.nulls_in_columns(client, ['nas'], 'my_table')
assert testing.sql.nulls_in_columns(client, ['no_nas', 'nas'],
'my_table')
def test_can_check_distinct(tmp_directory):
client = SQLAlchemyClient('sqlite:///' + str(Path(tmp_directory, 'db.db')))
df = pd.DataFrame({'no_nas': [1, 2, 1], 'nas': [1, np.nan, 1]})
df.to_sql('my_table', client.engine)
assert (testing.sql.distinct_values_in_column(client,
'no_nas',
'my_table') == {1, 2})
assert (testing.sql.distinct_values_in_column(client,
'nas',
'my_table') == {1.0, None})
def test_can_check_duplicates(tmp_directory):
client = SQLAlchemyClient('sqlite:///' + str(Path(tmp_directory, 'db.db')))
df = pd.DataFrame({'duplicates': [1, 1], 'no_duplicates': [1, 2]})
df.to_sql('my_table', client.engine)
assert not testing.sql.duplicates_in_column(client, 'no_duplicates',
'my_table')
assert testing.sql.duplicates_in_column(client, 'duplicates', 'my_table')
def test_can_check_range(tmp_directory):
client = SQLAlchemyClient('sqlite:///' + str(Path(tmp_directory, 'db.db')))
df = pd.DataFrame({'x': [1, 2, 3, 4, 5, 1000]})
df.to_sql('my_table', client.engine)
assert testing.sql.range_in_column(client, 'x', 'my_table') == (1, 1000)
| {
"content_hash": "3857859875df1a3c6b771a4fed106974",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 79,
"avg_line_length": 38.0188679245283,
"alnum_prop": 0.5583126550868487,
"repo_name": "edublancas/python-ds-tools",
"id": "7c160a6b42645e95dcdecc7cf92b10737dad9c32",
"size": "2015",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/pipeline/test_testing_sql.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11675"
}
],
"symlink_target": ""
} |
package com.castlemock.web.core.controller.rest;
import com.castlemock.model.core.ServiceProcessor;
import com.castlemock.web.core.model.VersionResponse;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.mockito.Mockito.mock;
class VersionCoreRestControllerTest {
private VersionCoreRestController versionController;
@BeforeEach
void setup(){
final ServiceProcessor serviceProcessor = mock(ServiceProcessor.class);
this.versionController = new VersionCoreRestController(serviceProcessor);
}
@Test
@DisplayName("Get version")
void testGetVersion(){
final ResponseEntity<VersionResponse> responseEntity = this.versionController.getVersion();
assertNotNull(responseEntity);
assertEquals(HttpStatus.OK, responseEntity.getStatusCode());
assertNotNull(responseEntity.getBody());
assertEquals("Undefined", responseEntity.getBody().getVersion());
}
}
| {
"content_hash": "061d7269a6611897ee0b09b14e2553d5",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 99,
"avg_line_length": 32.68421052631579,
"alnum_prop": 0.7737520128824477,
"repo_name": "castlemock/castlemock",
"id": "097682349fc7acda73e700e7244825b45f9c37b3",
"size": "1836",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/web-core/src/test/java/com/castlemock/web/core/controller/rest/VersionCoreRestControllerTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1004"
},
{
"name": "CSS",
"bytes": "8750"
},
{
"name": "HTML",
"bytes": "798"
},
{
"name": "Java",
"bytes": "2612368"
},
{
"name": "JavaScript",
"bytes": "527130"
},
{
"name": "Procfile",
"bytes": "84"
}
],
"symlink_target": ""
} |
import * as metaget from 'metaget';
metaget.fetch('https://wordpress.com').then(response => {
response; // $ExpectType Record<string, string> || Result
});
metaget.fetch('https://wordpress.com', (error, response) => {
error; // $ExpectType Error | null
response; // $ExpectType Record<string, string> || Result
});
| {
"content_hash": "c083aa5a36409c7661e0fdc6852d1456",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 61,
"avg_line_length": 32.9,
"alnum_prop": 0.6595744680851063,
"repo_name": "markogresak/DefinitelyTyped",
"id": "30d3ed923eed20300c9912d8b200b3583a5149a3",
"size": "329",
"binary": false,
"copies": "30",
"ref": "refs/heads/master",
"path": "types/metaget/metaget-tests.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CoffeeScript",
"bytes": "15"
},
{
"name": "Protocol Buffer",
"bytes": "678"
},
{
"name": "TypeScript",
"bytes": "17426898"
}
],
"symlink_target": ""
} |
/* global tinymce, QTags */
// send html to the post editor
var wpActiveEditor, send_to_editor;
send_to_editor = function( html ) {
var editor,
hasTinymce = typeof tinymce !== 'undefined',
hasQuicktags = typeof QTags !== 'undefined';
if ( ! wpActiveEditor ) {
if ( hasTinymce && tinymce.activeEditor ) {
editor = tinymce.activeEditor;
wpActiveEditor = editor.id;
} else if ( ! hasQuicktags ) {
return false;
}
} else if ( hasTinymce ) {
editor = tinymce.get( wpActiveEditor );
}
if ( editor && ! editor.isHidden() ) {
editor.execCommand( 'mceInsertContent', false, html );
} else if ( hasQuicktags ) {
QTags.insertContent( html );
} else {
document.getElementById( wpActiveEditor ).value += html;
}
// If the old thickbox remove function exists, call it
if ( window.tb_remove ) {
try { window.tb_remove(); } catch( e ) {}
}
};
// thickbox settings
var tb_position;
(function($) {
tb_position = function() {
var tbWindow = $('#TB_window'),
width = $(window).width(),
H = $(window).height(),
W = ( 833 < width ) ? 833 : width,
adminbar_height = 0;
if ( $('#wpadminbar').length ) {
adminbar_height = parseInt( $('#wpadminbar').css('height'), 10 );
}
if ( tbWindow.size() ) {
tbWindow.width( W - 50 ).height( H - 45 - adminbar_height );
$('#TB_iframeContent').width( W - 50 ).height( H - 75 - adminbar_height );
tbWindow.css({'margin-left': '-' + parseInt( ( ( W - 50 ) / 2 ), 10 ) + 'px'});
if ( typeof document.body.style.maxWidth !== 'undefined' )
tbWindow.css({'top': 20 + adminbar_height + 'px', 'margin-top': '0'});
}
return $('a.thickbox').each( function() {
var href = $(this).attr('href');
if ( ! href ) return;
href = href.replace(/&width=[0-9]+/g, '');
href = href.replace(/&height=[0-9]+/g, '');
$(this).attr( 'href', href + '&width=' + ( W - 80 ) + '&height=' + ( H - 85 - adminbar_height ) );
});
};
$(window).resize(function(){ tb_position(); });
})(jQuery);
| {
"content_hash": "d7b16297a2abc3b6b9cc8ac4468e5bd4",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 101,
"avg_line_length": 29.753623188405797,
"alnum_prop": 0.5718460789089138,
"repo_name": "ntamvl/planningelegance",
"id": "6586c52260571e58a6232a2bbc5ede13dc872cde",
"size": "2053",
"binary": false,
"copies": "23",
"ref": "refs/heads/master",
"path": "wp-admin/js/media-upload.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4463081"
},
{
"name": "HTML",
"bytes": "30819"
},
{
"name": "JavaScript",
"bytes": "3655998"
},
{
"name": "PHP",
"bytes": "13744512"
},
{
"name": "XSLT",
"bytes": "4442"
}
],
"symlink_target": ""
} |
function [stab_i,w] = niak_interpolate_stability(stab,list_scales,list_scales_i,flag_verbose,N)
% Interpolate stability matrices on a grid of scales based on a few scales
%
% SYNTAX:
% [STAB_I,W] =
% NIAK_INTERPOLATE_STABILITY(STAB,LIST_SCALES,LIST_SCALES_I,[FLAG_VERBOSE])
%
% _________________________________________________________________________
% INPUTS :
%
% STAB
% (matrix) STAB(:,M) is a vectorized stability matrix associated with
% the scale (number of clusters) LIST_SCALES(M)
%
% LIST_SCALES
% (vector of integers) see description of STAB.
%
% LIST_SCALES_I
% (vector of integers) the grid of scales on which the
% interpolation/extrapolation will be performed.
%
% FLAG_VERBOSE
% (boolean, default true) if this flag is true, verbose some infos.
%
% _________________________________________________________________________
% OUTPUTS :
%
% STAB_I
% (matrix) STAB_I(:,K) is the interpolated/extrapolated vectorized
% stability matrix corresponding to LIST_SCALES_I(K)
%
% W
% (matrix) W(K,M) is the contribution of STAB(:,M) to the
% interpolation/extrapolation of STAB_I(:,K)
%
% _________________________________________________________________________
% SEE ALSO:
% NIAK_MSTEPS, MSTEPS_DEMO
%
% _________________________________________________________________________
% COMMENTS:
%
% Copyright (c) Pierre Bellec, 2011
% Centre de recherche de l'institut de Gériatrie de Montréal
% Département d'informatique et de recherche opérationnelle
% Université de Montréal
% Maintainer : pierre.bellec@criugm.qc.ca
% See licensing information in the code.
% Keywords : stability, clustering, interpolation, MSTEPS
% Permission is hereby granted, free of charge, to any person obtaining a copy
% of this software and associated documentation files (the "Software"), to deal
% in the Software without restriction, including without limitation the rights
% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
% copies of the Software, and to permit persons to whom the Software is
% furnished to do so, subject to the following conditions:
%
% The above copyright notice and this permission notice shall be included in
% all copies or substantial portions of the Software.
%
% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
% THE SOFTWARE.
if nargin < 4
flag_verbose = false;
end
if nargin < 5
tmp = niak_vec2mat(stab(:,1));
N = size(tmp,1);
clear tmp
end
[val,order] = sort(list_scales);
stab = stab(:,order);
list_scales = val;
%% Interpolate the stability matrices
if flag_verbose
fprintf('Generation of interpolation coefficients ...\n');
end
w = zeros([length(list_scales_i) length(list_scales)]);
for num_sc = 1:length(list_scales_i)
sci = list_scales_i(num_sc);
ind1 = find(list_scales<=sci);
ind2 = find(list_scales>sci);
if isempty(ind1)
ind2 = ind2(1);
w(num_sc,ind2) = 1;
elseif isempty(ind2)
ind1 = ind1(end);
sc1 = list_scales(ind1);
sc2 = list_scales_i(num_sc);
w(num_sc,ind1) = (1-(sc1^(1/4))/(N^(1/4)))^(-1)*max( (sc1.^(1/4))/((sc2)^(1/4)) - ((sc1)^(1/4))/((N)^(1/4)) , 0);
else
ind1 = ind1(end);
ind2 = ind2(1);
sc1 = list_scales(ind1);
sc2 = list_scales(ind2);
alpha = (sci-sc1)/(sc2-sc1);
w(num_sc,ind1) = 1-alpha;
w(num_sc,ind2) = alpha;
end
end
if flag_verbose
fprintf('Interpolation of stability matrices on a grid of scales ...\n');
end
stab_i = stab*(w'); | {
"content_hash": "e8e69e2121f56d9a43cd8485e0a8e3c9",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 121,
"avg_line_length": 34.86725663716814,
"alnum_prop": 0.6279187817258883,
"repo_name": "pbellec/basc_fir_paper",
"id": "073265ef0223fa825e49c19ebff7488d87588eb4",
"size": "3946",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "basc-714M/commands/clustering/niak_interpolate_stability.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "M",
"bytes": "112624"
},
{
"name": "Matlab",
"bytes": "2840131"
},
{
"name": "Perl",
"bytes": "21682"
},
{
"name": "Shell",
"bytes": "3092"
}
],
"symlink_target": ""
} |
@interface DetailViewController ()
@end
@implementation DetailViewController
#pragma mark - Managing the detail item
- (void)setDetailItem:(id)newDetailItem {
if (_detailItem != newDetailItem) {
_detailItem = newDetailItem;
// Update the view.
[self configureView];
}
}
- (void)configureView {
// Update the user interface for the detail item.
if (self.detailItem) {
self.detailDescriptionLabel.text = [self.detailItem description];
}
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self configureView];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
| {
"content_hash": "c0d00f2733a167ce25cd187a2e46422f",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 76,
"avg_line_length": 22.083333333333332,
"alnum_prop": 0.6716981132075471,
"repo_name": "huboqq/huhello",
"id": "2d6683f16fac290b12795ab58e7343769847b158",
"size": "964",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Test/DetailViewController.m",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Objective-C",
"bytes": "9837"
}
],
"symlink_target": ""
} |
@interface ONESearchAuthorViewController : ONESearchBaseViewController
@end
| {
"content_hash": "b4b2e36ae22d9623dfd8b4e872d5ce2b",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 70,
"avg_line_length": 25.666666666666668,
"alnum_prop": 0.8831168831168831,
"repo_name": "shlyren/ONE-OC",
"id": "541c6d6f26d2dd300f31290c5cd32b7302a7c7d9",
"size": "266",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ONE/Classes/Search-搜索/Controller/ONESearchAuthorViewController.h",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Objective-C",
"bytes": "449795"
},
{
"name": "Ruby",
"bytes": "442"
}
],
"symlink_target": ""
} |
package org.deeplearning4j.nn.conf.layers.samediff;
import lombok.Data;
import org.deeplearning4j.nn.api.MaskState;
import org.deeplearning4j.nn.api.TrainingConfig;
import org.deeplearning4j.nn.conf.GradientNormalization;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.graph.GraphVertex;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.inputs.InvalidInputTypeException;
import org.deeplearning4j.nn.conf.memory.MemoryReport;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.nn.layers.samediff.SameDiffGraphVertex;
import org.nd4j.autodiff.samediff.SDVariable;
import org.nd4j.autodiff.samediff.SameDiff;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.learning.config.IUpdater;
import org.nd4j.linalg.learning.regularization.Regularization;
import org.nd4j.common.primitives.Pair;
import org.nd4j.common.util.ArrayUtil;
import java.util.List;
import java.util.Map;
@Data
public abstract class SameDiffVertex extends GraphVertex implements TrainingConfig {
private SDVertexParams vertexParams;
private String name;
protected List<Regularization> regularization;
protected List<Regularization> regularizationBias;
protected IUpdater updater;
protected IUpdater biasUpdater;
protected GradientNormalization gradientNormalization;
protected double gradientNormalizationThreshold = Double.NaN;
protected DataType dataType;
/**
* Define the vertex
* @param sameDiff SameDiff instance
* @param layerInput Input to the layer - keys as defined by {@link #defineParametersAndInputs(SDVertexParams)}
* @param paramTable Parameter table - keys as defined by {@link #defineParametersAndInputs(SDVertexParams)}
* @param maskVars Masks of input, if available - keys as defined by {@link #defineParametersAndInputs(SDVertexParams)}
* @return The final layer variable corresponding to the activations/output from the forward pass
*/
public abstract SDVariable defineVertex(SameDiff sameDiff, Map<String, SDVariable> layerInput,
Map<String, SDVariable> paramTable, Map<String, SDVariable> maskVars);
/**
* Define the parameters - and inputs - for the network.
* Use {@link SDVertexParams#addWeightParam(String, long...)} and
* {@link SDVertexParams#addBiasParam(String, long...)}.
* Note also you must define (and optionally name) the inputs to the vertex. This is required so that
* DL4J knows how many inputs exists for the vertex.
* @param params Object used to set parameters for this layer
*/
public abstract void defineParametersAndInputs(SDVertexParams params);
/**
* Set the initial parameter values for this layer, if required
* @param params Parameter arrays that may be initialized
*/
public abstract void initializeParameters(Map<String, INDArray> params);
public SDVertexParams getVertexParams() {
if (vertexParams == null) {
vertexParams = new SDVertexParams();
defineParametersAndInputs(vertexParams);
}
return vertexParams;
}
@Override
public long numParams(boolean backprop) {
SDLayerParams params = getVertexParams();
long count = 0;
for (long[] l : params.getParamShapes().values()) {
count += ArrayUtil.prodLong(l);
}
return (int) count;
}
@Override
public int minVertexInputs() {
return 1;
}
@Override
public int maxVertexInputs() {
return -1;
}
@Override
public org.deeplearning4j.nn.graph.vertex.GraphVertex instantiate(ComputationGraph graph, String name, int idx,
INDArray paramsView, boolean initializeParams, DataType networkDatatype) {
this.name = name;
return new SameDiffGraphVertex(this, graph, name, idx, paramsView, initializeParams, networkDatatype);
}
@Override
public InputType getOutputType(int layerIndex, InputType... vertexInputs) throws InvalidInputTypeException {
throw new UnsupportedOperationException("Not yet implemented");
}
public Pair<INDArray, MaskState> feedForwardMaskArrays(INDArray[] maskArrays, MaskState currentMaskState, int minibatchSize) {
throw new UnsupportedOperationException("Not yet supported");
}
/**
* Validate input arrays to confirm that they fulfill the assumptions of the layer. If they don't, throw an exception.
* @param input inputs to the layer
*/
public void validateInput(INDArray[] input){/* no-op */}
@Override
public MemoryReport getMemoryReport(InputType... inputTypes) {
return null;
}
public char paramReshapeOrder(String paramName) {
return 'c';
}
public void applyGlobalConfig(NeuralNetConfiguration.Builder b) {
if(regularization == null || regularization.isEmpty()){
regularization = b.getRegularization();
}
if(regularizationBias == null || regularizationBias.isEmpty()){
regularizationBias = b.getRegularizationBias();
}
if (updater == null) {
updater = b.getIUpdater();
}
if (biasUpdater == null) {
biasUpdater = b.getBiasUpdater();
}
if (gradientNormalization == null) {
gradientNormalization = b.getGradientNormalization();
}
if (Double.isNaN(gradientNormalizationThreshold)) {
gradientNormalizationThreshold = b.getGradientNormalizationThreshold();
}
applyGlobalConfigToLayer(b);
}
public void applyGlobalConfigToLayer(NeuralNetConfiguration.Builder globalConfig) {
//Default implementation: no op
}
@Override
public String getLayerName() {
return name;
}
@Override
public List<Regularization> getRegularizationByParam(String paramName){
if((regularization == null || regularization.isEmpty()) && (regularizationBias == null || regularizationBias.isEmpty())){
return null;
}
if (getVertexParams().isWeightParam(paramName)) {
return regularization;
}
if (getVertexParams().isBiasParam(paramName)) {
return regularizationBias;
}
throw new IllegalStateException("Unknown parameter name: " + paramName + " - not in weights ("
+ getVertexParams().getWeightParameterKeys() + ") or biases ("
+ getVertexParams().getBiasParameterKeys() + ")");
}
@Override
public boolean isPretrainParam(String paramName) {
return false;
}
@Override
public IUpdater getUpdaterByParam(String paramName) {
if (getVertexParams().isWeightParam(paramName)) {
return updater;
}
if (getVertexParams().isBiasParam(paramName)) {
if (biasUpdater == null) {
return updater;
}
return biasUpdater;
}
throw new IllegalStateException("Unknown parameter name: " + paramName + " - not in weights ("
+ getVertexParams().getWeightParameterKeys() + ") or biases ("
+ getVertexParams().getBiasParameterKeys() + ")");
}
@Override
public GradientNormalization getGradientNormalization() {
return gradientNormalization;
}
@Override
public double getGradientNormalizationThreshold() {
return gradientNormalizationThreshold;
}
@Override
public void setDataType(DataType dataType) {
this.dataType = dataType;
}
}
| {
"content_hash": "db8b32dcc8d019373fc89c168ffa8106",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 144,
"avg_line_length": 36.57276995305164,
"alnum_prop": 0.6774069319640564,
"repo_name": "deeplearning4j/deeplearning4j",
"id": "b18aa1cc396eb6e911e0bf4c8bd5cb48bd753a9a",
"size": "8681",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/layers/samediff/SameDiffVertex.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1458"
},
{
"name": "C",
"bytes": "165340"
},
{
"name": "C++",
"bytes": "17817311"
},
{
"name": "CMake",
"bytes": "112697"
},
{
"name": "CSS",
"bytes": "12974"
},
{
"name": "Cuda",
"bytes": "2413085"
},
{
"name": "Cython",
"bytes": "12094"
},
{
"name": "FreeMarker",
"bytes": "77257"
},
{
"name": "HTML",
"bytes": "18609"
},
{
"name": "Java",
"bytes": "47657420"
},
{
"name": "JavaScript",
"bytes": "296767"
},
{
"name": "Kotlin",
"bytes": "2041047"
},
{
"name": "PureBasic",
"bytes": "12254"
},
{
"name": "Python",
"bytes": "77566"
},
{
"name": "Ruby",
"bytes": "4558"
},
{
"name": "Scala",
"bytes": "1026"
},
{
"name": "Shell",
"bytes": "92012"
},
{
"name": "Smarty",
"bytes": "975"
},
{
"name": "Starlark",
"bytes": "931"
},
{
"name": "TypeScript",
"bytes": "81217"
}
],
"symlink_target": ""
} |
glob-to-vinyl [![NPM version][npm-image]][npm-url] [![Dependency Status][depstat-image]][depstat-url]
=============
Takes a glob, reads in files and returns [Vinyl](https://github.com/wearefractal/vinyl)s. Created based on [wearefractal/vinyl/issues/20](https://github.com/wearefractal/vinyl/issues/20).
## Usage
```javascript
var globToVinyl = require('glob-to-vinyl');
globToVinyl('*.js', function(err, files){
console.log(files);
console.log(files[0].contents.toString('utf8'));
});
```
[npm-url]: https://npmjs.org/package/glob-to-vinyl
[npm-image]: http://img.shields.io/npm/v/glob-to-vinyl.svg?style=flat
[depstat-url]: https://david-dm.org/adam-lynch/glob-to-vinyl
[depstat-image]: https://david-dm.org/adam-lynch/glob-to-vinyl.svg?style=flat
| {
"content_hash": "7ec9f699662147e7be6aab5e7a02fa66",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 187,
"avg_line_length": 34.77272727272727,
"alnum_prop": 0.7045751633986929,
"repo_name": "adam-lynch/glob-to-vinyl",
"id": "bcecbbf0bac3d107b03ab4af9ed816e15505b3fb",
"size": "765",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "362"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe GeoWorks::Discovery::AbstractDocument do
subject { described_class.new }
describe '#to_hash' do
it 'raises an error because the class should not be instantiated directly' do
expect { subject.to_hash(nil) }.to raise_error(/hash/)
end
end
describe '#to_json' do
it 'raises an error because the class should not be instantiated directly' do
expect { subject.to_json(nil) }.to raise_error(/json/)
end
end
describe '#to_xml' do
it 'raises an error because the class should not be instantiated directly' do
expect { subject.to_xml(nil) }.to raise_error(/xml/)
end
end
end
| {
"content_hash": "f5190ede0675d1d93988172363bb95a2",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 81,
"avg_line_length": 28.565217391304348,
"alnum_prop": 0.6894977168949772,
"repo_name": "geoconcerns/geo_works",
"id": "9e31edc8709f220837fece5a9f5e76fb98636f02",
"size": "657",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/services/geo_works/discovery/abstract_document_spec.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4894"
},
{
"name": "HTML",
"bytes": "34934"
},
{
"name": "JavaScript",
"bytes": "57081"
},
{
"name": "Ruby",
"bytes": "256618"
},
{
"name": "Shell",
"bytes": "457"
},
{
"name": "XSLT",
"bytes": "22416"
}
],
"symlink_target": ""
} |
describe 'CustomFields::Field' do
before(:each) do
@blog = create_blog
@blog = Blog.find @blog._id
end
describe 'nested attributes' do
it 'renames a field' do
@blog.posts_custom_fields_attributes = {
'0' => { '_id' => @blog.posts_custom_fields.last._id.to_s, 'label' => 'My location' },
'1' => { '_id' => @blog.posts_custom_fields.first._id.to_s, 'label' => 'Author' }
}
@blog.save
expect(@blog.posts_custom_fields.first.label).to eq 'Author'
expect(@blog.posts_custom_fields.last.label).to eq 'My location'
end
end
protected
def create_blog
Blog.new(name: 'My personal blog').tap do |blog|
blog.posts_custom_fields.build label: 'Main Author', type: 'string'
blog.posts_custom_fields.build label: 'Location', type: 'string'
blog.save
end
end
end | {
"content_hash": "62cf25392074829a834e50d4c1aed41c",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 94,
"avg_line_length": 24.571428571428573,
"alnum_prop": 0.6104651162790697,
"repo_name": "locomotivecms/custom_fields",
"id": "c2f7414d96aa0250702e8f36c36246eb22891a32",
"size": "860",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/integration/field_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "196834"
}
],
"symlink_target": ""
} |
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class User(models.Model):
level = models.DecimalField(max_digits=3,decimal_places=0,default=1)
exp = models.DecimalField(max_digits=10,decimal_places=0,default=0)
max_exp = models.DecimalField(max_digits=10,decimal_places=0,default=100)
money = models.DecimalField(max_digits=12,decimal_places=0,default=0)
dps = models.DecimalField(max_digits=5,decimal_places=0,default=1)
facebookID = models.CharField(max_length = 100,default="")
token = models.CharField(max_length = 200,default="")
gender = models.BooleanField(default=True)
class Category(models.Model):
name = models.CharField(max_length = 100)
income = models.BooleanField(default=False)
class Record(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE,)
category = models.ForeignKey(Category,on_delete=models.CASCADE,)
spend = models.DecimalField(max_digits=10,decimal_places=0)
currency = models.CharField(max_length = 10)
createTime = models.DateTimeField(auto_now_add=True, blank=True)
class Item(models.Model):
name = models.CharField(max_length = 100)
#itemType = models.CharField(max_length = 10,default="equipment")
attack = models.DecimalField(max_digits=5,decimal_places=0)
duration = models.DecimalField(max_digits=3,decimal_places=0,default=0)
#expiredTime = models.DateTimeField(blank=True,default="0")
cost = models.DecimalField(max_digits=5,decimal_places=0,default=0)
pngFile = models.CharField(max_length = 200,default="")
class User_Item(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE,)
item = models.ForeignKey(Item,on_delete=models.CASCADE,)
class UserExp(models.Model):
level = models.DecimalField(max_digits=4,decimal_places=0)
required_exp = models.DecimalField(max_digits=12,decimal_places=0)
class Monster(models.Model):
name = models.CharField(max_length = 100,default="Boss")
level = models.DecimalField(max_digits=4,decimal_places=0)
hp = models.DecimalField(max_digits=20,decimal_places=0)
exp = models.DecimalField(max_digits=9,decimal_places=0)
money = models.DecimalField(max_digits=9,decimal_places=0)
pngFile = models.CharField(max_length = 200)
class User_Monster(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE,)
monster = models.ForeignKey(Monster,on_delete=models.CASCADE,)
current_hp = models.DecimalField(max_digits=20,decimal_places=0)
createTime = models.DateTimeField(auto_now_add=True, blank=True)
class Missions(models.Model):
name = models.CharField(max_length = 100,default="")
user = models.ForeignKey(User,on_delete=models.CASCADE,)
missionType = models.CharField(max_length = 40)
status = models.CharField(max_length = 20)
createTime = models.DateTimeField(auto_now_add=True, blank=True)
class ConsecutiveLoginMission(models.Model):
mission = models.ForeignKey(Missions,on_delete=models.CASCADE,default=None)
days = models.DecimalField(max_digits=4,decimal_places=0)
required_days = models.DecimalField(max_digits=4,decimal_places=0)
exp = models.DecimalField(max_digits=10,decimal_places=0)
money = models.DecimalField(max_digits=10,decimal_places=0)
class ConsecutiveConsumeMission(models.Model):
mission = models.ForeignKey(Missions,on_delete=models.CASCADE,default=None)
expiredTime = models.DateTimeField(blank=True,default = None)
targetCategory = models.ForeignKey(Category,on_delete=models.CASCADE,default=None)
exp = models.DecimalField(max_digits=10,decimal_places=0)
money = models.DecimalField(max_digits=10,decimal_places=0)
currentConsume = models.DecimalField(max_digits=10,decimal_places=0,default=0)
requiredConsume = models.DecimalField(max_digits=10,decimal_places=0,default=0)
class ConsecutiveBudgetMission(models.Model):
mission = models.ForeignKey(Missions,on_delete=models.CASCADE,default=None)
days = models.DecimalField(max_digits=4,decimal_places=0)
required_days = models.DecimalField(max_digits=4,decimal_places=0)
budget = models.DecimalField(max_digits=8,decimal_places=0)
accumulation = models.DecimalField(max_digits=8,decimal_places=0,default=0)
exp = models.DecimalField(max_digits=10,decimal_places=0)
money = models.DecimalField(max_digits=10,decimal_places=0)
class MealMission(models.Model):
mission = models.ForeignKey(Missions,on_delete=models.CASCADE,default=None)
meal = models.CharField(max_length = 20)
expiredTime = models.DateTimeField(blank=True,default=None)
exp = models.DecimalField(max_digits=10,decimal_places=0)
money = models.DecimalField(max_digits=10,decimal_places=0)
class RandomMission(models.Model):
mission = models.ForeignKey(Missions,on_delete=models.CASCADE,)
targetItem = models.ForeignKey(Item,on_delete=models.CASCADE,default=None)
amount = models.DecimalField(max_digits=10,decimal_places=0)
expiredTime = models.DateTimeField(blank=True)
exp = models.DecimalField(max_digits=10,decimal_places=0)
money = models.DecimalField(max_digits=10,decimal_places=0)
| {
"content_hash": "2c09b2f4c294235a761b3c075818343c",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 86,
"avg_line_length": 46.473214285714285,
"alnum_prop": 0.7506243996157541,
"repo_name": "hschueh/TrollersHackNTU",
"id": "29b242b0ce58fa012fc4679f30963fdafa7eb209",
"size": "5205",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mysite/charge/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "128117"
},
{
"name": "HTML",
"bytes": "117142"
},
{
"name": "Java",
"bytes": "13266"
},
{
"name": "JavaScript",
"bytes": "245681"
},
{
"name": "PHP",
"bytes": "1313757"
},
{
"name": "Python",
"bytes": "41523"
}
],
"symlink_target": ""
} |
package MIP::Recipes::Analysis::Deeptrio;
use 5.026;
use Carp;
use charnames qw{ :full :short };
use English qw{ -no_match_vars };
use File::Spec::Functions qw{ catdir catfile };
use open qw{ :encoding(UTF-8) :std };
use Params::Check qw{ allow check last_error };
use utf8;
use warnings;
use warnings qw{ FATAL utf8 };
## MIPs lib/
use MIP::Constants qw{ $LOG_NAME $NEWLINE $UNDERSCORE };
BEGIN {
require Exporter;
use base qw{ Exporter };
# Functions and variables which can be optionally exported
our @EXPORT_OK = qw{ analysis_deeptrio };
}
sub analysis_deeptrio {
## Function : Returns vcfs and gvcfs from bam files using DeepVariant's deeptrio caller
## Returns :
## Arguments: $active_parameter_href => Active parameters for this analysis hash {REF}
## : $case_id => Family id
## : $file_info_href => File_info hash {REF}
## : $job_id_href => Job id hash {REF}
## : $parameter_href => Parameter hash {REF}
## : $profile_base_command => Submission profile base command
## : $recipe_name => Recipe name
## : $sample_info_href => Info on samples and case hash {REF}
my ($arg_href) = @_;
## Flatten argument(s)
my $active_parameter_href;
my $file_info_href;
my $job_id_href;
my $parameter_href;
my $recipe_name;
my $sample_info_href;
## Default(s)
my $case_id;
my $profile_base_command;
my $tmpl = {
active_parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$active_parameter_href,
strict_type => 1,
},
case_id => {
default => $arg_href->{active_parameter_href}{case_id},
store => \$case_id,
strict_type => 1,
},
file_info_href => {
default => {},
defined => 1,
required => 1,
store => \$file_info_href,
strict_type => 1,
},
job_id_href => {
default => {},
defined => 1,
required => 1,
store => \$job_id_href,
strict_type => 1,
},
parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$parameter_href,
strict_type => 1,
},
profile_base_command => {
default => q{sbatch},
store => \$profile_base_command,
strict_type => 1,
},
recipe_name => {
defined => 1,
required => 1,
store => \$recipe_name,
strict_type => 1,
},
sample_info_href => {
default => {},
defined => 1,
required => 1,
store => \$sample_info_href,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
use MIP::File_info qw{ get_io_files parse_io_outfiles };
use MIP::Processmanagement::Processes qw{ submit_recipe };
use MIP::Program::Deeptrio qw{ deeptrio };
use MIP::Program::Gnu::Coreutils qw{ gnu_mkdir };
use MIP::Recipe qw{ parse_recipe_prerequisites };
use MIP::Sample_info
qw{ get_case_members_attributes_in_duos get_family_member_id set_recipe_metafile_in_sample_info set_recipe_outfile_in_sample_info };
use MIP::Script::Setup_script qw{ setup_script };
### PREPROCESSING:
## Retrieve logger object
my $log = Log::Log4perl->get_logger($LOG_NAME);
my %case_members_id =
scalar @{ $active_parameter_href->{sample_ids} } == 2
? get_case_members_attributes_in_duos( { sample_info_href => $sample_info_href } )
: get_family_member_id( { sample_info_href => $sample_info_href } );
## Unpack parameters
my %recipe = parse_recipe_prerequisites(
{
active_parameter_href => $active_parameter_href,
parameter_href => $parameter_href,
recipe_name => $recipe_name,
}
);
my $model_type = _consensus_analysis_type(
{
parameter_href => $parameter_href,
}
);
my $infile_name_prefix;
my @parents = grep { defined } @{ \%case_members_id }{qw{ father mother }};
my @parents_filtered = grep { $_ ne '0' } @parents;
my $child_id = $case_members_id{children}[0];
my %iofile_parameter;
my @outfile_directories;
SAMPLE_ID:
foreach my $sample_id ( @parents_filtered, $child_id ) {
my %sample_bam_io = get_io_files(
{
id => $sample_id,
file_info_href => $file_info_href,
parameter_href => $parameter_href,
recipe_name => $recipe_name,
stream => q{in},
}
);
$infile_name_prefix = $sample_bam_io{in}{file_name_prefix};
my %sample_vcf_io = (
%sample_bam_io,
parse_io_outfiles(
{
chain_id => $recipe{job_id_chain},
id => $sample_id,
file_info_href => $file_info_href,
file_name_prefixes_ref => [$infile_name_prefix],
outdata_dir => $active_parameter_href->{outdata_dir},
parameter_href => $parameter_href,
recipe_name => $recipe_name,
}
)
);
$iofile_parameter{$sample_id}{reads} =
$sample_bam_io{out}{file_path_prefix} . $sample_bam_io{out}{file_suffix};
$iofile_parameter{$sample_id}{output_gvcf} = $sample_vcf_io{out}{file_path};
$iofile_parameter{$sample_id}{output_vcf} =
$sample_vcf_io{out}{file_path_prefix} . q{.vcf.gz};
$iofile_parameter{$sample_id}{sample_name} = $sample_id;
push @outfile_directories, $sample_vcf_io{out}{dir_path};
}
## Filehandles
# Create anonymous filehandle
my $filehandle = IO::Handle->new();
## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header
my ( $recipe_file_path, $recipe_info_path ) = setup_script(
{
active_parameter_href => $active_parameter_href,
core_number => $recipe{core_number},
directory_id => $case_id,
filehandle => $filehandle,
gpu_number => $recipe{gpu_number},
job_id_href => $job_id_href,
memory_allocation => $recipe{memory},
process_time => $recipe{time},
recipe_directory => $recipe_name,
recipe_name => $recipe_name,
}
);
### SHELL:
say {$filehandle} q{## Create output directories};
foreach my $out_directory (@outfile_directories) {
gnu_mkdir(
{
filehandle => $filehandle,
indirectory_path => $out_directory,
parents => 1,
}
);
say {$filehandle} $NEWLINE;
}
say {$filehandle} q{## } . $recipe_name;
deeptrio(
{
filehandle => $filehandle,
model_type => $model_type,
num_shards => $recipe{core_number},
output_gvcf_child => $iofile_parameter{$child_id}{output_gvcf},
output_gvcf_parent1 => $iofile_parameter{ $parents[0] }{output_gvcf},
output_gvcf_parent2 => $iofile_parameter{ $parents[1] }{output_gvcf},
output_vcf_child => $iofile_parameter{$child_id}{output_vcf},
output_vcf_parent1 => $iofile_parameter{ $parents[0] }{output_vcf},
output_vcf_parent2 => $iofile_parameter{ $parents[1] }{output_vcf},
referencefile_path => $active_parameter_href->{human_genome_reference},
reads_child => $iofile_parameter{$child_id}{reads},
reads_parent1 => $iofile_parameter{ $parents[0] }{reads},
reads_parent2 => $iofile_parameter{ $parents[1] }{reads},
sample_name_child => $iofile_parameter{$child_id}{sample_name},
sample_name_parent1 => $iofile_parameter{ $parents[0] }{sample_name},
sample_name_parent2 => $iofile_parameter{ $parents[1] }{sample_name},
}
);
## Close filehandleS
close $filehandle or $log->logcroak(q{Could not close filehandle});
if ( $recipe{mode} == 1 ) {
## Collect QC metadata info for later use
set_recipe_outfile_in_sample_info(
{
infile => $iofile_parameter{$child_id}{output_gvcf},
recipe_name => $recipe_name,
sample_info_href => $sample_info_href,
}
);
submit_recipe(
{
base_command => $profile_base_command,
case_id => $case_id,
dependency_method => q{sample_to_case},
job_id_chain => $recipe{job_id_chain},
job_id_href => $job_id_href,
job_reservation_name => $active_parameter_href->{job_reservation_name},
log => $log,
max_parallel_processes_count_href =>
$file_info_href->{max_parallel_processes_count},
recipe_file_path => $recipe_file_path,
sample_ids_ref => \@{ $active_parameter_href->{sample_ids} },
submission_profile => $active_parameter_href->{submission_profile},
}
);
}
return 1;
}
sub _consensus_analysis_type {
## Function : Determine model type from consensus analysis type
## Returns :
## Arguments: $parameter_href => Parameter hash {REF}
my ($arg_href) = @_;
## Flatten argument(s)
my $parameter_href;
my $tmpl = {
parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$parameter_href,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
use MIP::Parameter qw{ get_cache };
my %deepvar_consensus_analysis_type_map =
( MIXED => q{WGS}, PANEL => q{WES}, WGS => q{WGS}, WES => q{WES} );
my $consensus_analysis_type = get_cache(
{
parameter_href => $parameter_href,
parameter_name => q{consensus_analysis_type},
}
);
return $deepvar_consensus_analysis_type_map{ uc $consensus_analysis_type };
}
1;
| {
"content_hash": "e61fe469f057a26df9610ddbf9c2b0c9",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 138,
"avg_line_length": 34.993650793650794,
"alnum_prop": 0.49995464029755965,
"repo_name": "henrikstranneheim/MIP",
"id": "4cef53bff32fb2bc2957007af6ce37472db087fe",
"size": "11023",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/MIP/Recipes/Analysis/Deeptrio.pm",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "2311217"
},
{
"name": "R",
"bytes": "8999"
}
],
"symlink_target": ""
} |
<?php
// Start of AMQPExchange from php-amqp v.1.4.0beta2
/**
* stub class representing AMQPExchange from pecl-amqp
* @jms-builtin
*/
class AMQPExchange
{
/**
* Bind to another exchange.
*
* Bind an exchange to another exchange using the specified routing key.
*
* @param string $exchange_name Name of the exchange to bind.
* @param string $routing_key The routing key to use for binding.
* @param integer $flags Flags to use for binding, defaults to
* AMQP_NOPARAM.
*
* @throws AMQPExchangeException On failure.
* @throws AMQPChannelException If the channel is not open.
* @throws AMQPConnectionException If the connection to the broker was lost.
* @return boolean true on success or false on failure.
*/
public function bind($exchange_name, $routing_key, $flags = AMQP_NOPARAM)
{
}
/**
* Create an instance of AMQPExchange.
*
* Returns a new instance of an AMQPExchange object, associated with the
* given AMQPChannel object.
*
* @param AMQPChannel $amqp_channel A valid AMQPChannel object, connected
* to a broker.
*
* @throws AMQPExchangeException When amqp_channel is not connected to
* a broker.
* @throws AMQPConnectionException If the connection to the broker was
* lost.
*/
public function __construct(AMQPChannel $amqp_channel)
{
}
/**
* Declare a new exchange on the broker.
*
* @throws AMQPExchangeException On failure.
* @throws AMQPChannelException If the channel is not open.
* @throws AMQPConnectionException If the connection to the broker was lost.
*
* @return boolean TRUE on success or FALSE on failure.
*/
public function declareExchange()
{
}
/**
* Delete the exchange from the broker.
*
* @param string $exchangeName Optional name of exchange to delete.
* @param integer $flags Optionally AMQP_IFUNUSED can be specified
* to indicate the exchange should not be
* deleted until no clients are connected to
* it.
*
* @throws AMQPExchangeException On failure.
* @throws AMQPChannelException If the channel is not open.
* @throws AMQPConnectionException If the connection to the broker was lost.
*
* @return boolean true on success or false on failure.
*/
public function delete($exchangeName = null, $flags = AMQP_NOPARAM)
{
}
/**
* Get the argument associated with the given key.
*
* @param string $key The key to look up.
*
* @return string|integer|boolean The string or integer value associated
* with the given key, or FALSE if the key
* is not set.
*/
public function getArgument($key)
{
}
/**
* Get all arguments set on the given exchange.
*
* @return array An array containing all of the set key/value pairs.
*/
public function getArguments()
{
}
/**
* Get all the flags currently set on the given exchange.
*
* @return int An integer bitmask of all the flags currently set on this
* exchange object.
*/
public function getFlags()
{
}
/**
* Get the configured name.
*
* @return string The configured name as a string.
*/
public function getName()
{
}
/**
* Get the configured type.
*
* @return string The configured type as a string.
*/
public function getType()
{
}
/**
* Publish a message to an exchange.
*
* Publish a message to the exchange represented by the AMQPExchange object.
*
* @param string $message The message to publish.
* @param string $routing_key The optional routing key to which to
* publish to.
* @param integer $flags One or more of AMQP_MANDATORY and
* AMQP_IMMEDIATE.
* @param array $attributes One of content_type, content_encoding,
* message_id, user_id, app_id, delivery_mode,
* priority, timestamp, expiration, type
* or reply_to.
*
* @throws AMQPExchangeException On failure.
* @throws AMQPChannelException If the channel is not open.
* @throws AMQPConnectionException If the connection to the broker was lost.
*
* @return boolean TRUE on success or FALSE on failure.
*/
public function publish(
$message,
$routing_key = null,
$flags = AMQP_NOPARAM,
array $attributes = array()
) {
}
/**
* Set the value for the given key.
*
* @param string $key Name of the argument to set.
* @param string|integer $value Value of the argument to set.
*
* @return boolean TRUE on success or FALSE on failure.
*/
public function setArgument($key, $value)
{
}
/**
* Set all arguments on the exchange.
*
* @param array $arguments An array of key/value pairs of arguments.
*
* @return boolean TRUE on success or FALSE on failure.
*/
public function setArguments(array $arguments)
{
}
/**
* Set the flags on an exchange.
*
* @param integer $flags A bitmask of flags. This call currently only
* considers the following flags:
* AMQP_DURABLE, AMQP_PASSIVE.
*
* @return boolean True on success or false on failure.
*/
public function setFlags($flags)
{
}
/**
* Set the name of the exchange.
*
* @param string $exchange_name The name of the exchange to set as string.
*
* @return boolean TRUE on success or FALSE on failure.
*/
public function setName($exchange_name)
{
}
/**
* Set the type of the exchange.
*
* Set the type of the exchange. This can be any of AMQP_EX_TYPE_DIRECT,
* AMQP_EX_TYPE_FANOUT, AMQP_EX_TYPE_HEADER or AMQP_EX_TYPE_TOPIC.
*
* @param string $exchange_type The type of exchange as a string.
*
* @return boolean TRUE on success or FALSE on failure.
*/
public function setType($exchange_type)
{
}
/**
* Get the AMQPChannel object in use
*
* @return AMQPChannel
*/
public function getChannel()
{
}
/**
* Get the AMQPConnection object in use
*
* @return AMQPConnection
*/
public function getConnection()
{
}
}
// End of AMQPExchange from php-amqp v.1.4.0beta2
?> | {
"content_hash": "5a34cee0c2e8541ec1b49093ea74d027",
"timestamp": "",
"source": "github",
"line_count": 240,
"max_line_length": 80,
"avg_line_length": 29.033333333333335,
"alnum_prop": 0.5717566016073479,
"repo_name": "walkeralencar/ci-php-analyzer",
"id": "8dc5c3d36c80d9c4bf5fd46273e0c9ad2d5ed160",
"size": "6968",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "res/php-5.4-core-api/AMQPExchange.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "DOT",
"bytes": "15755"
},
{
"name": "PHP",
"bytes": "9159877"
}
],
"symlink_target": ""
} |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace MemberService {
public partial class ChangePhone {
/// <summary>
/// form1 control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlForm form1;
/// <summary>
/// panel control.
/// </summary>
/// <remarks>
/// Auto-generated field.
/// To modify move field declaration from designer file to code-behind file.
/// </remarks>
protected global::System.Web.UI.WebControls.Panel panel;
}
}
| {
"content_hash": "f68513731b9dfba966d7ddb96c1eb551",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 84,
"avg_line_length": 32.333333333333336,
"alnum_prop": 0.48266166822867856,
"repo_name": "ppschweiz/MemberDatabase",
"id": "d50266340018d7c389dee1b58e2fad7096ac2cfe",
"size": "1069",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Organigram/MemberService/ChangePhone.aspx.designer.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "35993"
},
{
"name": "C#",
"bytes": "668053"
},
{
"name": "CSS",
"bytes": "1556"
},
{
"name": "HTML",
"bytes": "74677"
},
{
"name": "Shell",
"bytes": "339"
}
],
"symlink_target": ""
} |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Threading;
using System.Threading.Tasks;
using Azure.Core;
using Azure.Core.Pipeline;
namespace Azure.Security.KeyVault.Keys
{
/// <summary>
/// A long-running operation for <see cref="KeyClient.StartDeleteKey(string, CancellationToken)"/> or <see cref="KeyClient.StartDeleteKeyAsync(string, CancellationToken)"/>.
/// </summary>
public class DeleteKeyOperation : Operation<DeletedKey>
{
private static readonly TimeSpan s_defaultPollingInterval = TimeSpan.FromSeconds(2);
private readonly KeyVaultPipeline _pipeline;
private readonly DeletedKey _value;
private Response _response;
private bool _completed;
internal DeleteKeyOperation(KeyVaultPipeline pipeline, Response<DeletedKey> response)
{
_pipeline = pipeline;
_value = response.Value ?? throw new InvalidOperationException("The response does not contain a value.");
_response = response.GetRawResponse();
// The recoveryId is only returned if soft-delete is enabled.
if (_value.RecoveryId is null)
{
_completed = true;
}
}
/// <summary> Initializes a new instance of <see cref="DeleteKeyOperation" /> for mocking. </summary>
protected DeleteKeyOperation() {}
/// <inheritdoc/>
public override string Id => _value.Id.ToString();
/// <summary>
/// Gets the <see cref="DeletedKey"/>.
/// You should await <see cref="WaitForCompletionAsync(CancellationToken)"/> before attempting to purge or recover a key in this pending state.
/// </summary>
/// <remarks>
/// Azure Key Vault will return a <see cref="DeletedKey"/> immediately but may take time to actually delete the key if soft-delete is enabled.
/// </remarks>
public override DeletedKey Value => _value;
/// <inheritdoc/>
public override bool HasCompleted => _completed;
/// <inheritdoc/>
public override bool HasValue => true;
/// <inheritdoc/>
public override Response GetRawResponse() => _response;
/// <inheritdoc/>
public override Response UpdateStatus(CancellationToken cancellationToken = default)
{
if (!_completed)
{
using DiagnosticScope scope = _pipeline.CreateScope($"{nameof(DeleteKeyOperation)}.{nameof(UpdateStatus)}");
scope.AddAttribute("secret", _value.Name);
scope.Start();
try
{
_response = _pipeline.GetResponse(RequestMethod.Get, cancellationToken, KeyClient.DeletedKeysPath, _value.Name);
_completed = CheckCompleted(_response);
}
catch (Exception e)
{
scope.Failed(e);
throw;
}
}
return GetRawResponse();
}
/// <inheritdoc/>
public override async ValueTask<Response> UpdateStatusAsync(CancellationToken cancellationToken = default)
{
if (!_completed)
{
using DiagnosticScope scope = _pipeline.CreateScope($"{nameof(DeleteKeyOperation)}.{nameof(UpdateStatus)}");
scope.AddAttribute("secret", _value.Name);
scope.Start();
try
{
_response = await _pipeline.GetResponseAsync(RequestMethod.Get, cancellationToken, KeyClient.DeletedKeysPath, _value.Name).ConfigureAwait(false);
_completed = await CheckCompletedAsync(_response).ConfigureAwait(false);
}
catch (Exception e)
{
scope.Failed(e);
throw;
}
}
return GetRawResponse();
}
/// <inheritdoc />
public override ValueTask<Response<DeletedKey>> WaitForCompletionAsync(CancellationToken cancellationToken = default) =>
this.DefaultWaitForCompletionAsync(s_defaultPollingInterval, cancellationToken);
/// <inheritdoc />
public override ValueTask<Response<DeletedKey>> WaitForCompletionAsync(TimeSpan pollingInterval, CancellationToken cancellationToken) =>
this.DefaultWaitForCompletionAsync(pollingInterval, cancellationToken);
private async ValueTask<bool> CheckCompletedAsync(Response response)
{
switch (response.Status)
{
case 200:
case 403: // Access denied but proof the key was deleted.
return true;
case 404:
return false;
default:
throw await _pipeline.Diagnostics.CreateRequestFailedExceptionAsync(response).ConfigureAwait(false);
}
}
private bool CheckCompleted(Response response)
{
switch (response.Status)
{
case 200:
case 403: // Access denied but proof the key was deleted.
return true;
case 404:
return false;
default:
throw _pipeline.Diagnostics.CreateRequestFailedException(response);
}
}
}
}
| {
"content_hash": "44b65395e1af49f9006444ebe5f65a3b",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 177,
"avg_line_length": 37.16891891891892,
"alnum_prop": 0.5808034902744955,
"repo_name": "ayeletshpigelman/azure-sdk-for-net",
"id": "4d52e57adc31127a7d6279fe024195624f58f3ed",
"size": "5503",
"binary": false,
"copies": "2",
"ref": "refs/heads/ayshpige/InternalBranchForDebuging",
"path": "sdk/keyvault/Azure.Security.KeyVault.Keys/src/DeleteKeyOperation.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "118"
},
{
"name": "Batchfile",
"bytes": "28895"
},
{
"name": "C#",
"bytes": "45912328"
},
{
"name": "CSS",
"bytes": "685"
},
{
"name": "HTML",
"bytes": "45212"
},
{
"name": "JavaScript",
"bytes": "7875"
},
{
"name": "PowerShell",
"bytes": "24250"
},
{
"name": "Shell",
"bytes": "1470"
},
{
"name": "XSLT",
"bytes": "6114"
}
],
"symlink_target": ""
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package tmf.org.dsmapi.ordering;
import java.io.Serializable;
/**
*
* @author pierregauthier
* "relatedParties": [
{
"role": "Originator",
"reference": "/customer/1234"
},
{
"role": "Owner",
"reference": "/operator/1234"
},
{
"role": "Reviser",
"reference": "Roger Collins"
}
*/
public class RelatedParty implements Serializable {
private String role;
private String reference;
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
public String getReference() {
return reference;
}
public void setReference(String reference) {
this.reference = reference;
}
}
| {
"content_hash": "eefc15034cea29e0bccc487aa852bc80",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 52,
"avg_line_length": 19.4468085106383,
"alnum_prop": 0.5481400437636762,
"repo_name": "tmforum/DSPRODUCTORDERING",
"id": "682f6c200e6493cf393837d5ce5e2928036919f9",
"size": "914",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/java/tmf/org/dsmapi/ordering/RelatedParty.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "123248"
},
{
"name": "Java",
"bytes": "36103"
},
{
"name": "JavaScript",
"bytes": "80158"
}
],
"symlink_target": ""
} |
package io.awacs.plugin.org.objectweb.asm.tree;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import io.awacs.plugin.org.objectweb.asm.MethodVisitor;
import io.awacs.plugin.org.objectweb.asm.Opcodes;
/**
* A node that represents a stack map frame. These nodes are pseudo instruction
* nodes in order to be inserted in an instruction list. In fact these nodes
* must(*) be inserted <i>just before</i> any instruction node <b>i</b> that
* follows an unconditionnal branch instruction such as GOTO or THROW, that is
* the target of a jump instruction, or that starts an exception handler block.
* The stack map frame types must describe the values of the local variables and
* of the operand stack elements <i>just before</i> <b>i</b> is executed. <br>
* <br>
* (*) this is mandatory only for classes whose version is greater than or equal
* to {@link Opcodes#V1_6 V1_6}.
*
* @author Eric Bruneton
*/
public class FrameNode extends AbstractInsnNode {
/**
* The type of this frame. Must be {@link Opcodes#F_NEW} for expanded
* frames, or {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
* {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
* {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for compressed frames.
*/
public int type;
/**
* The types of the local variables of this stack map frame. Elements of
* this list can be Integer, String or LabelNode objects (for primitive,
* reference and uninitialized types respectively - see
* {@link MethodVisitor}).
*/
public List<Object> local;
/**
* The types of the operand stack elements of this stack map frame. Elements
* of this list can be Integer, String or LabelNode objects (for primitive,
* reference and uninitialized types respectively - see
* {@link MethodVisitor}).
*/
public List<Object> stack;
private FrameNode() {
super(-1);
}
/**
* Constructs a new {@link FrameNode}.
*
* @param type
* the type of this frame. Must be {@link Opcodes#F_NEW} for
* expanded frames, or {@link Opcodes#F_FULL},
* {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP},
* {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND},
* {@link Opcodes#F_SAME1} for compressed frames.
* @param nLocal
* number of local variables of this stack map frame.
* @param local
* the types of the local variables of this stack map frame.
* Elements of this list can be Integer, String or LabelNode
* objects (for primitive, reference and uninitialized types
* respectively - see {@link MethodVisitor}).
* @param nStack
* number of operand stack elements of this stack map frame.
* @param stack
* the types of the operand stack elements of this stack map
* frame. Elements of this list can be Integer, String or
* LabelNode objects (for primitive, reference and uninitialized
* types respectively - see {@link MethodVisitor}).
*/
public FrameNode(final int type, final int nLocal, final Object[] local,
final int nStack, final Object[] stack) {
super(-1);
this.type = type;
switch (type) {
case Opcodes.F_NEW:
case Opcodes.F_FULL:
this.local = asList(nLocal, local);
this.stack = asList(nStack, stack);
break;
case Opcodes.F_APPEND:
this.local = asList(nLocal, local);
break;
case Opcodes.F_CHOP:
this.local = Arrays.asList(new Object[nLocal]);
break;
case Opcodes.F_SAME:
break;
case Opcodes.F_SAME1:
this.stack = asList(1, stack);
break;
}
}
@Override
public int getType() {
return FRAME;
}
/**
* Makes the given visitor visit this stack map frame.
*
* @param mv
* a method visitor.
*/
@Override
public void accept(final MethodVisitor mv) {
switch (type) {
case Opcodes.F_NEW:
case Opcodes.F_FULL:
mv.visitFrame(type, local.size(), asArray(local), stack.size(),
asArray(stack));
break;
case Opcodes.F_APPEND:
mv.visitFrame(type, local.size(), asArray(local), 0, null);
break;
case Opcodes.F_CHOP:
mv.visitFrame(type, local.size(), null, 0, null);
break;
case Opcodes.F_SAME:
mv.visitFrame(type, 0, null, 0, null);
break;
case Opcodes.F_SAME1:
mv.visitFrame(type, 0, null, 1, asArray(stack));
break;
}
}
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
FrameNode clone = new FrameNode();
clone.type = type;
if (local != null) {
clone.local = new ArrayList<Object>();
for (int i = 0; i < local.size(); ++i) {
Object l = local.get(i);
if (l instanceof LabelNode) {
l = labels.get(l);
}
clone.local.add(l);
}
}
if (stack != null) {
clone.stack = new ArrayList<Object>();
for (int i = 0; i < stack.size(); ++i) {
Object s = stack.get(i);
if (s instanceof LabelNode) {
s = labels.get(s);
}
clone.stack.add(s);
}
}
return clone;
}
// ------------------------------------------------------------------------
private static List<Object> asList(final int n, final Object[] o) {
return Arrays.asList(o).subList(0, n);
}
private static Object[] asArray(final List<Object> l) {
Object[] objs = new Object[l.size()];
for (int i = 0; i < objs.length; ++i) {
Object o = l.get(i);
if (o instanceof LabelNode) {
o = ((LabelNode) o).getLabel();
}
objs[i] = o;
}
return objs;
}
}
| {
"content_hash": "864533d02f5b7cf588be35bf0841b0aa",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 80,
"avg_line_length": 34.72527472527472,
"alnum_prop": 0.5549050632911392,
"repo_name": "ArcherFeel/AWACS",
"id": "4eab990553732ee622d7ce28207fcb43df59cbde",
"size": "7979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "awacs-plugin/awacs-stacktrace-plugin/src/main/java/io/awacs/plugin/org/objectweb/asm/tree/FrameNode.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "194069"
},
{
"name": "Shell",
"bytes": "2436"
}
],
"symlink_target": ""
} |
local functional = require(script.Parent:WaitForChild'Functional')
function getType(prop)
local _
if pcall(function () _ = prop.Number end) then
return updateBrickColor
elseif pcall(function () _ = prop.r end) then
return updateLerp
elseif pcall(function () _ = prop.X.Offset end) then
return updateLerp
elseif pcall(function () _ = prop.p end) then
return updateLerp
elseif pcall(function () _ = prop.Z end) then
return updateLerp
elseif pcall(function () _ = prop.Keypoints end) then
return updateNumberSequence
elseif pcall(function () _ = prop.X end) then
return updateLerp
elseif type(prop) == "number" then
return updateNumber
else
warn("Attempt to animate an unsupported datatype, value will still be set once duration has elapsed.")
return update
end
end
do
local ids = {}
local count = 0
function getId(object)
if not ids[object] then
count = count + 1
ids[object] = tonumber(count, 16)
end
return ids[object]
end
end
function animate(object, property, newValue, ...)
if object and property and newValue then
local oldValue, updater
if object:IsA'Model' and property == "CFrame" then
updater = updateModelCFrame
oldValue = object:GetPrimaryPartCFrame()
else
updater = getType(object[property])
oldValue = object[property]
end
local id = "Object:" .. getId(object) .. ":" .. property
local a = functional(id, function (i)
updater(i, oldValue, newValue, object, property)
end, 0, 1, ...)
end
end
function updateNumber(i, value, newValue, object, property)
object[property] = value + ((newValue - value) * i)
end
function update(i, value, newValue, object, property)
object[property] = newValue
end
function updateLerp(i, value, newValue, object, property)
object[property] = value:lerp(newValue, i)
end
function updateNumberSequence(i, value, newValue, object, property)
local oV = value.Keypoints[1].Value
object[property] = NumberSequence.new(
oV + ((newValue - oV) * i)
)
end
function updateBrickColor(i, value, newValue, object, property)
local oR, oG, oB, nR, nG, nB = value.r, value.g, value.b, newValue.r, newValue.g, newValue.b
object[property] = BrickColor.new(
oR + ((nR - oR) * i),
oG + ((nG - oG) * i),
oB + ((nB - oB) * i)
)
end
function updateModelCFrame(i, value, newValue, object, property)
object:SetPrimaryPartCFrame(value:lerp(newValue, i))
end
return animate
| {
"content_hash": "e12332e4cb7eb3165cef532681a47238",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 104,
"avg_line_length": 27.181818181818183,
"alnum_prop": 0.7081939799331104,
"repo_name": "BradSharp/roblox",
"id": "348af297f344923e482c20174006c451cef5a271",
"size": "2392",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "animation/objectional.lua",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "52850"
}
],
"symlink_target": ""
} |
========
Usage
========
To use Experiment in a project::
import experiment
| {
"content_hash": "03c519f3d226e0e60880a422bfa940d6",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 32,
"avg_line_length": 11.571428571428571,
"alnum_prop": 0.5679012345679012,
"repo_name": "barakschiller/experiment",
"id": "dc1963ea22ed80c73b219a7eee81187ff38a5af0",
"size": "81",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/usage.rst",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1740"
},
{
"name": "Python",
"bytes": "20876"
}
],
"symlink_target": ""
} |
package com.jetbrains.python.inspections.quickfix;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.inspections.PyDictCreationInspection;
import com.jetbrains.python.psi.*;
import org.jetbrains.annotations.NotNull;
import java.util.List;
import java.util.Map;
/**
* Created by IntelliJ IDEA.
* User: Alexey.Ivanov
* Date: 26.02.2010
* Time: 13:29:02
*/
public class DictCreationQuickFix implements LocalQuickFix {
private final PyAssignmentStatement myStatement;
public DictCreationQuickFix(@NotNull final PyAssignmentStatement statement) {
myStatement = statement;
}
@Override
@NotNull
public String getName() {
return PyBundle.message("QFIX.dict.creation");
}
@Override
@NotNull
public String getFamilyName() {
return getName();
}
@Override
public void applyFix(@NotNull final Project project, @NotNull final ProblemDescriptor descriptor) {
final PyElementGenerator elementGenerator = PyElementGenerator.getInstance(project);
final Map<String, String> statementsMap = Maps.newLinkedHashMap();
final PyExpression assignedValue = myStatement.getAssignedValue();
if (assignedValue instanceof PyDictLiteralExpression) {
for (PyKeyValueExpression expression: ((PyDictLiteralExpression)assignedValue).getElements()) {
final PyExpression value = expression.getValue();
if (value != null)
statementsMap.put(expression.getKey().getText(), value.getText());
}
PyStatement statement = PsiTreeUtil.getNextSiblingOfType(myStatement, PyStatement.class);
while (statement instanceof PyAssignmentStatement) {
final PyAssignmentStatement assignmentStatement = (PyAssignmentStatement)statement;
final PyExpression target = myStatement.getTargets()[0];
final String targetName = target.getName();
if (targetName != null) {
final List<Pair<PyExpression, PyExpression>> targetsToValues =
PyDictCreationInspection.getDictTargets(target, targetName, assignmentStatement);
final PyStatement nextStatement = PsiTreeUtil.getNextSiblingOfType(statement, PyStatement.class);
if (targetsToValues == null || targetsToValues.isEmpty()) break;
for (Pair<PyExpression, PyExpression> targetToValue : targetsToValues) {
final PySubscriptionExpression subscription = (PySubscriptionExpression)targetToValue.first;
final PyExpression indexExpression = subscription.getIndexExpression();
assert indexExpression != null;
final String indexText;
if (indexExpression instanceof PyTupleExpression)
indexText = "("+indexExpression.getText()+")";
else
indexText = indexExpression.getText();
final String valueText;
if (targetToValue.second instanceof PyTupleExpression)
valueText = "("+targetToValue.second.getText()+")";
else
valueText = targetToValue.second.getText();
statementsMap.put(indexText, valueText);
statement.delete();
}
statement = nextStatement;
}
}
List<String> statements = Lists.newArrayList();
for (Map.Entry<String, String> entry : statementsMap.entrySet()) {
statements.add(entry.getKey() + ": " + entry.getValue());
}
final PyExpression expression = elementGenerator.createExpressionFromText(LanguageLevel.forElement(myStatement),
"{" + StringUtil.join(statements, ", ") + "}");
if (expression != null)
assignedValue.replace(expression);
}
}
}
| {
"content_hash": "279858cb86dc4bbabac2ab335a469d54",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 118,
"avg_line_length": 41.44897959183673,
"alnum_prop": 0.6964549483013294,
"repo_name": "romankagan/DDBWorkbench",
"id": "94383bdeaebb684cca1bf5e6ef8ad1c668923a59",
"size": "4662",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/src/com/jetbrains/python/inspections/quickfix/DictCreationQuickFix.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "182"
},
{
"name": "C",
"bytes": "174330"
},
{
"name": "C#",
"bytes": "390"
},
{
"name": "C++",
"bytes": "85270"
},
{
"name": "CSS",
"bytes": "102018"
},
{
"name": "Erlang",
"bytes": "10"
},
{
"name": "FLUX",
"bytes": "57"
},
{
"name": "Groovy",
"bytes": "1899897"
},
{
"name": "J",
"bytes": "5050"
},
{
"name": "Java",
"bytes": "128604770"
},
{
"name": "JavaScript",
"bytes": "123045"
},
{
"name": "Objective-C",
"bytes": "19702"
},
{
"name": "Perl",
"bytes": "6549"
},
{
"name": "Python",
"bytes": "17759911"
},
{
"name": "Ruby",
"bytes": "1213"
},
{
"name": "Shell",
"bytes": "45691"
},
{
"name": "TeX",
"bytes": "60798"
},
{
"name": "XSLT",
"bytes": "113531"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.