text stringlengths 2 1.04M | meta dict |
|---|---|
<?php
namespace Predis\PubSub;
use Predis\ClientException;
use Predis\ClientInterface;
use Predis\NotSupportedException;
/**
* Client-side abstraction of a Publish / Subscribe context.
*
* @author Daniele Alessandri <suppakilla@gmail.com>
*/
abstract class AbstractPubSubContext implements \Iterator
{
const SUBSCRIBE = 'subscribe';
const UNSUBSCRIBE = 'unsubscribe';
const PSUBSCRIBE = 'psubscribe';
const PUNSUBSCRIBE = 'punsubscribe';
const MESSAGE = 'message';
const PMESSAGE = 'pmessage';
const STATUS_VALID = 1; // 0b0001
const STATUS_SUBSCRIBED = 2; // 0b0010
const STATUS_PSUBSCRIBED = 4; // 0b0100
private $position = null;
private $statusFlags = self::STATUS_VALID;
/**
* Automatically closes the context when PHP's garbage collector kicks in.
*/
public function __destruct()
{
$this->closeContext(true);
}
/**
* Checks if the specified flag is valid in the state of the context.
*
* @param int $value Flag.
* @return Boolean
*/
protected function isFlagSet($value)
{
return ($this->statusFlags & $value) === $value;
}
/**
* Subscribes to the specified channels.
*
* @param mixed $arg,... One or more channel names.
*/
public function subscribe(/* arguments */)
{
$this->writeCommand(self::SUBSCRIBE, func_get_args());
$this->statusFlags |= self::STATUS_SUBSCRIBED;
}
/**
* Unsubscribes from the specified channels.
*
* @param mixed $arg,... One or more channel names.
*/
public function unsubscribe(/* arguments */)
{
$this->writeCommand(self::UNSUBSCRIBE, func_get_args());
}
/**
* Subscribes to the specified channels using a pattern.
*
* @param mixed $arg,... One or more channel name patterns.
*/
public function psubscribe(/* arguments */)
{
$this->writeCommand(self::PSUBSCRIBE, func_get_args());
$this->statusFlags |= self::STATUS_PSUBSCRIBED;
}
/**
* Unsubscribes from the specified channels using a pattern.
*
* @param mixed $arg,... One or more channel name patterns.
*/
public function punsubscribe(/* arguments */)
{
$this->writeCommand(self::PUNSUBSCRIBE, func_get_args());
}
/**
* Closes the context by unsubscribing from all the subscribed channels.
* Optionally, the context can be forcefully closed by dropping the
* underlying connection.
*
* @param Boolean $force Forcefully close the context by closing the connection.
* @return Boolean Returns false if there are no pending messages.
*/
public function closeContext($force = false)
{
if (!$this->valid()) {
return false;
}
if ($force) {
$this->invalidate();
$this->disconnect();
} else {
if ($this->isFlagSet(self::STATUS_SUBSCRIBED)) {
$this->unsubscribe();
}
if ($this->isFlagSet(self::STATUS_PSUBSCRIBED)) {
$this->punsubscribe();
}
}
return !$force;
}
/**
* Closes the underlying connection on forced disconnection.
*/
protected abstract function disconnect();
/**
* Writes a Redis command on the underlying connection.
*
* @param string $method ID of the command.
* @param array $arguments List of arguments.
*/
protected abstract function writeCommand($method, $arguments);
/**
* {@inheritdoc}
*/
public function rewind()
{
// NOOP
}
/**
* Returns the last message payload retrieved from the server and generated
* by one of the active subscriptions.
*
* @return array
*/
public function current()
{
return $this->getValue();
}
/**
* {@inheritdoc}
*/
public function key()
{
return $this->position;
}
/**
* {@inheritdoc}
*/
public function next()
{
if ($this->valid()) {
$this->position++;
}
return $this->position;
}
/**
* Checks if the the context is still in a valid state to continue.
*
* @return Boolean
*/
public function valid()
{
$isValid = $this->isFlagSet(self::STATUS_VALID);
$subscriptionFlags = self::STATUS_SUBSCRIBED | self::STATUS_PSUBSCRIBED;
$hasSubscriptions = ($this->statusFlags & $subscriptionFlags) > 0;
return $isValid && $hasSubscriptions;
}
/**
* Resets the state of the context.
*/
protected function invalidate()
{
$this->statusFlags = 0; // 0b0000;
}
/**
* Waits for a new message from the server generated by one of the active
* subscriptions and returns it when available.
*
* @return array
*/
protected abstract function getValue();
}
| {
"content_hash": "dd72981bd25e8d24343163ad93febab4",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 84,
"avg_line_length": 24.665024630541872,
"alnum_prop": 0.5811863391252247,
"repo_name": "h0x91b/redis-v8",
"id": "29126b0a1365d0e9f9ad18ddb334807f8b4d9383",
"size": "5239",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "Client-Libraries/PHP/predis-0.8.3/PubSub/AbstractPubSubContext.php",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3522778"
},
{
"name": "C++",
"bytes": "37327587"
},
{
"name": "CSS",
"bytes": "7201"
},
{
"name": "JavaScript",
"bytes": "14084294"
},
{
"name": "Lua",
"bytes": "25752"
},
{
"name": "Objective-C",
"bytes": "470"
},
{
"name": "PHP",
"bytes": "455118"
},
{
"name": "Perl",
"bytes": "170502"
},
{
"name": "Python",
"bytes": "528990"
},
{
"name": "Ruby",
"bytes": "32982"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "97117"
},
{
"name": "Tcl",
"bytes": "303718"
},
{
"name": "XSLT",
"bytes": "303"
}
],
"symlink_target": ""
} |
=head1 LICENSE
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2017] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::Analysis::RunnableDB::HaplotypeMapper -
=head1 SYNOPSIS
my $obj = Bio::EnsEMBL::Analysis::RunnableDB::HaplotypeProjection->new(
-db => $db,
-input_id => $id,
);
$obj->fetch_input
$obj->run
my @newfeatures = $obj->output;
=head1 DESCRIPTION
This method is used to get project the genes annotated in the reference chromosome into the Haplotype regions.
=head1 METHODS
=head1 APPENDIX
The rest of the documentation details each of the object methods.
Internal methods are usually preceded with a _
=cut
# Let the code begin...
package Bio::EnsEMBL::Analysis::RunnableDB::HaplotypeMapper;
use warnings ;
use vars qw(@ISA);
use strict;
# Object preamble
use Bio::EnsEMBL::Analysis::RunnableDB;
use Bio::EnsEMBL::Analysis::Runnable::HaplotypeMapper;
use Bio::EnsEMBL::Analysis::RunnableDB::BaseGeneBuild;
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
use Bio::EnsEMBL::Analysis::Config::HaplotypeProjection qw (
);
@ISA = qw(Bio::EnsEMBL::Analysis::RunnableDB::BaseGeneBuild);
############################################################
=head2 new
Usage : $self->new(-DBOBJ => $db,
-INPUT_ID => $id,
-SEQFETCHER => $sf,
-ANALYSIS => $analysis,
);
Function: creates a Bio::EnsEMBL::Analysis::RunnableDB::HaplotypeProjection object
Returns : A Bio::EnsEMBL::Analysis::RunnableDB::HaplotypeProjection object
Args : -dbobj: A Bio::EnsEMBL::DBSQL::DBAdaptor (required),
-input_id: Hap_pair input id (required),
-seqfetcher: A Sequence Fetcher Object,
-analysis: A Bio::EnsEMBL::Analysis (optional)
-extend: determines the extension of the virtual contig
note: not implemented yet!
-golden_path: determines the name of the golden path to use
=cut
sub new {
my ($class,@args) = @_;
my $self = $class->SUPER::new(@args);
return $self;
}
############################################################
sub input_id {
my ($self,$arg) = @_;
if (defined($arg)) {
$self->{_input_id} = $arg;
}
return $self->{_input_id};
}
############################################################
=head2 write_output
Title : write_output
Usage : $self->write_output
Function: Writes output data to db
Returns : array of exons (with start and end)
Args : none
=cut
sub write_output {
my($self,@mappings) = @_;
#my $db = $self->get_dbadaptor("REFERENCE_DB") ;
@mappings = $self->output;
print "REACH TEST 2 position\n";
print "now I have mappings: ",@mappings,"\n";
my $genebuilders = $self->get_genebuilders;
foreach my $target (keys %{ $genebuilders } ) {
foreach my $query (keys %{$genebuilders->{$target}}){
$genebuilders->{$target}->{$query}->load_tables(@mappings);
}
}
return 1;
}
############################################################
=head2 fetch_input
Function: It fetches the slice or contig according to the input_id,
and it defines the database where the
previous annotations are stored and create a Bio::EnsEMBL::Pipeline::GeneBuilder
object for that genomic, input_id and db
Returns : nothing
Args : none
=cut
sub fetch_input {
my( $self) = @_;
$self->throw("No input id") unless defined($self->input_id);
my $discarded_db = $self->get_dbadaptor("DISCARDED_DB");
print "DISCARDED GENE DB: ", $discarded_db->dbname,"\n";
# database where the genebuild produced genes are
my $ref_db = $self->get_dbadaptor("REFERENCE_DB");
print "ENSEMBL DB : ", $ref_db->dbname,"\n";
print $self->input_id,"\n";
my @input_id = split(/:/,$self->input_id);
my $hap_slice = $ref_db->get_SliceAdaptor->fetch_by_region($input_id[0],$input_id[2],$input_id[3],$input_id[4],1,$input_id[1]);
my $slice = $ref_db->get_SliceAdaptor->fetch_by_region($input_id[5],$input_id[7],$input_id[8],$input_id[9],1,$input_id[6]);
#$self->fetch_sequence();
print "HAP_slice: ",$hap_slice,"\n";
print "REF_slice: ",$slice,"\n";
$self->query($hap_slice);
$self->target($slice);
print "QUERY: ",$self->query->seq_region_name,"\n";
print "TARGET: ",$self->target->seq_region_name,"\n";
my $genebuilder = new Bio::EnsEMBL::Analysis::Runnable::HaplotypeMapper
(
'-hap_slice' => $self->query,
'-slice' => $self->target,
'-input_id' => $self->input_id,
);
$genebuilder->discarded_db($discarded_db);
$genebuilder->ensembl_db($ref_db);
# store the object and the piece of genomic where it will run
$self->addgenebuilder($genebuilder,$self->target,$self->query);
}
############################################################
sub addgenebuilder {
my ($self,$arg,$target,$query) = @_;
if (defined($arg) && defined($target) && defined($query)) {
$self->{_genebuilder}{$target->id}{$query->id} = $arg;
}
else {
$self->throw("Wrong number of inputs [$arg,$target,$query]\n");
}
}
############################################################
sub get_genebuilders {
my ($self) = @_;
return $self->{_genebuilder};
}
############################################################
sub run {
my ($self) = @_;
my @mapping;
# get a hash, with keys = contig/slice and value = genebuilder object
my $genebuilders = $self->get_genebuilders;
#my @genes;
foreach my $target (keys %{ $genebuilders } ) {
foreach my $query (keys %{$genebuilders->{$target}}){
$genebuilders->{$target}->{$query}->create_alignment;
$genebuilders->{$target}->{$query}->filter_alignment;
@mapping = $genebuilders->{$target}->{$query}->make_map_regions;
print "I got mappings: ",@mapping,"\n";
}
}
$self->output( @mapping );
}
############################################################
# override the evil RunnableDB output method:
sub output{
my ($self, @output ) = @_;
unless ( $self->{_output} ){
$self->{_output} = [];
}
if (@output){
push( @{$self->{_output}}, @output );
}
return @{$self->{_output}};
}
############################################################
sub target {
my ($self,$slice) = @_;
if (defined($slice)) {
$self->{_target} = $slice;
}
return $self->{_target};
}
1;
| {
"content_hash": "02d3ef7a7eb284c767a4ff38698a56a9",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 131,
"avg_line_length": 26.83448275862069,
"alnum_prop": 0.5539707016191211,
"repo_name": "james-monkeyshines/ensembl-analysis",
"id": "758dd62362f50847314c252b0c4797c0a01b9e81",
"size": "7782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/Bio/EnsEMBL/Analysis/RunnableDB/HaplotypeMapper.pm",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "5520948"
},
{
"name": "Perl6",
"bytes": "400039"
},
{
"name": "Shell",
"bytes": "8872"
}
],
"symlink_target": ""
} |
/**
* Main class for manifestParsing, it takes the URL, parse its content.
*/
package downloader.manifest;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import downloader.exceptions.InvalidManifestFileException;
import downloader.exceptions.InvalidUrlException;
import downloader.utils.Utils;
public class ManifestParser {
private Manifest manifestFile;
private ArrayList<UrlLine> segments;
/**
* ManifestParser constructor, it takes Manifest file as an object to
* extract the content
*
* @param Object
* manifestFile manifestFile object
*
*/
public ManifestParser(Manifest manifestFile) {
this.manifestFile = manifestFile;
segments = new ArrayList<UrlLine>();
}
/**
* getSegments function, extracts the segments from manifest file
*
* @param
*
* @return ArrayList, returns array list of segments and manifest urls
* inside manifest file
*
* @throws IOException
* @throws InvalidUrlException
* @throws InvalidManifestFileException
*
*/
public ArrayList<UrlLine> getSegments() throws IOException, InvalidUrlException, InvalidManifestFileException {
ArrayList<String> manifestLines = extractManifestLines();
boolean isAlternative = false;
for (String manifestline : manifestLines) {
if (manifestline.trim().equals("**")) {
isAlternative = false;
} else if (manifestline.trim().startsWith("http")) {
// validate if URL is real URL
if ((Utils.isUrl(manifestline.trim()))) {
// check if Manifest
if (Utils.isValidManifestUrl(manifestline.trim())) {
segments.add(new Manifest(manifestline.trim()));
} else {
// it is a Segment
if (isAlternative) {
segments.get(segments.size() - 1).addMirror(manifestline.trim());;
} else {
segments.add(new Segment(manifestline.trim()));
isAlternative = true;
}
}
}
} else {
throw new InvalidManifestFileException("Invalided Manifest File");
}
}
return segments;
}
/**
* extractManifestLines function, extracts the lines from manifest file
*
* @param
*
* @return ArrayList, returns array list of lines inside the manifest file
*
* @throws IOException
*
*/
private ArrayList<String> extractManifestLines() throws IOException {
ArrayList<String> manifestLines = new ArrayList<String>();
URLConnection manifestUrlCon = new URL(this.manifestFile.getUrl()).openConnection();
InputStream in = manifestUrlCon.getInputStream();
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
String manifestLine = null;
while ((manifestLine = reader.readLine()) != null) {
manifestLines.add(manifestLine);
}
in.close();
return manifestLines;
}
} | {
"content_hash": "d4b1f1a12451802033f06e1d7c16cc23",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 112,
"avg_line_length": 26.51818181818182,
"alnum_prop": 0.7086047308878985,
"repo_name": "AhmadBadir/Downloader",
"id": "b067a159dde389b590f7bd47b29e65f60632a2cc",
"size": "2917",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "multipart/src/main/java/downloader/manifest/ManifestParser.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "30483"
}
],
"symlink_target": ""
} |
package de.qaware.oss.metrics.jsr340;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.inject.Default;
import javax.enterprise.inject.Produces;
import javax.enterprise.inject.spi.InjectionPoint;
import java.util.logging.Logger;
/**
* A CDI producer implementation to inject Logger instance.
*
* @author lreimer
*/
@ApplicationScoped
public class TestableLoggerProducer {
/**
* The CDI producer method for JULI loggers.
*
* @param ip the injection point
* @return a suitable logger
*/
@Produces
@Default
public Logger createLogger(final InjectionPoint ip) {
Class declaringClass = ip.getMember().getDeclaringClass();
return Logger.getLogger(declaringClass.getName());
}
}
| {
"content_hash": "9dc95c065ec8fd59e7e5c589cd30cb6d",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 66,
"avg_line_length": 26.379310344827587,
"alnum_prop": 0.7254901960784313,
"repo_name": "lreimer/cloud-native-javaee",
"id": "b01c135038cb7f85210b44c0ac2d873124b20be6",
"size": "1925",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "infrastructure/metrics-jsr340/src/test/java/de/qaware/oss/metrics/jsr340/TestableLoggerProducer.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2641"
},
{
"name": "Java",
"bytes": "160567"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "9132c6c9b9259a7424f803371411bed4",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "cf11dde78b81b24af34d7f2da1ea3a13d1557353",
"size": "173",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Lamiaceae/Volkameria/Volkameria pumila/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package uk.ac.ebi.embl.api.validation.check.genomeassembly;
import org.apache.commons.lang.StringUtils;
import uk.ac.ebi.embl.api.entry.genomeassembly.ChromosomeEntry;
import uk.ac.ebi.embl.api.validation.ValidationEngineException;
import uk.ac.ebi.embl.api.validation.ValidationResult;
import uk.ac.ebi.embl.api.validation.annotation.Description;
import java.util.Arrays;
import java.util.regex.Pattern;
@Description("")
public class ChromosomeListChromosomeNameCheck extends GenomeAssemblyValidationCheck<ChromosomeEntry>
{
private final String MESSAGE_KEY_MISSING_CHROMOSOME_NAME_ERROR = "ChromosomeListMissingNameCheck";
private final String MESSAGE_KEY_CHROMOSOME_NAME_LENGTH_ERROR = "ChromosomeListNameLengthCheck";
private final String MESSAGE_KEY_CHROMOSOME_NAME_REGEX_ERROR = "ChromosomeListNameRegexCheck";
private final String MESSAGE_KEY_INVALID_CHROMOSOME_NAME_ERROR = "ChromosomeListNameInvalidCheck";
private Pattern ChromosomeNamePattern = Pattern.compile("^([A-Za-z0-9]){1}([A-Za-z0-9_\\.]|-)*$");
private String[] chromosomeNamesToRejectArray = new String[] { "Un", "chrUn", "random", "rnd" , "unknown"};
public ValidationResult check(ChromosomeEntry entry) throws ValidationEngineException
{
if(entry == null)
return result;
if (null == entry.getChromosomeName())
{
reportError(entry.getOrigin(), MESSAGE_KEY_MISSING_CHROMOSOME_NAME_ERROR, entry.getObjectName());
return result;
}
if(entry.getChromosomeName().length()>=33)
{
reportError(entry.getOrigin(), MESSAGE_KEY_CHROMOSOME_NAME_LENGTH_ERROR, entry.getObjectName());
}
if(!ChromosomeNamePattern.matcher(entry.getChromosomeName().trim()).matches())
{
reportError(entry.getOrigin(), MESSAGE_KEY_CHROMOSOME_NAME_REGEX_ERROR, entry.getObjectName());
}
if(!(getEmblEntryValidationPlanProperty() != null && getEmblEntryValidationPlanProperty().ignore_errors.get()) && Arrays.stream(chromosomeNamesToRejectArray).anyMatch(x -> StringUtils.containsIgnoreCase(entry.getChromosomeName(),x ))) {
reportError(entry.getOrigin(), MESSAGE_KEY_INVALID_CHROMOSOME_NAME_ERROR, entry.getChromosomeName());
}
return result;
}
}
| {
"content_hash": "0ea205a51655ed26a99fe4ae3a21f5cb",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 238,
"avg_line_length": 43.48,
"alnum_prop": 0.765869365225391,
"repo_name": "enasequence/sequencetools",
"id": "cd68bd629682704b1c50fe409d67333ac6a0775c",
"size": "2941",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/uk/ac/ebi/embl/api/validation/check/genomeassembly/ChromosomeListChromosomeNameCheck.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CAP CDS",
"bytes": "3626"
},
{
"name": "Java",
"bytes": "4569769"
}
],
"symlink_target": ""
} |
var m = require('mithril'),
loginSrvc = require('./model/jwtSrvc');
var Home = m.component(require('./pages/Home'), loginSrvc, 'some Text'),
User = m.component(require('./pages/User'), loginSrvc);
m.route.mode = 'hash';
m.route(document.body, '/', {
'/': Home,
'/user': User
});
| {
"content_hash": "ec3eabf477e6b939d45ea99ada2654ce",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 72,
"avg_line_length": 25.833333333333332,
"alnum_prop": 0.5774193548387097,
"repo_name": "cachaito/mithril_jwt",
"id": "73d2c360be98dbbb4c6e6b6cb3e495a8c61e987e",
"size": "310",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/js/app.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "101"
},
{
"name": "JavaScript",
"bytes": "12488"
}
],
"symlink_target": ""
} |
using Swashbuckle.AspNetCore.SwaggerGen;
namespace AutoQueryable.AspNetCore.Swagger
{
public static class SwaggerGenOptionsExtensions
{
public static void AddAutoQueryable(this SwaggerGenOptions swaggerGenOptions)
{
swaggerGenOptions.OperationFilter<AutoQueryableOperationFilter>();
}
}
} | {
"content_hash": "3e519b493c8276c1f09c8e8d5316dd16",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 85,
"avg_line_length": 28.166666666666668,
"alnum_prop": 0.7366863905325444,
"repo_name": "trenoncourt/AutoQueryable",
"id": "deac22a14de6f7705c1280e484cb92ece77f58c7",
"size": "340",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/AutoQueryable.AspNetCore.Swagger/SwaggerGenOptionsExtensions.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "645"
},
{
"name": "C#",
"bytes": "248487"
}
],
"symlink_target": ""
} |
/*
http://www.cgsoso.com/forum-211-1.html
CG搜搜 Unity3d 每日Unity3d插件免费更新 更有VIP资源!
CGSOSO 主打游戏开发,影视设计等CG资源素材。
插件如若商用,请务必官网购买!
daily assets update for try.
U should buy the asset from home store if u use it in your project!
*/
#if !BESTHTTP_DISABLE_ALTERNATE_SSL && (!UNITY_WEBGL || UNITY_EDITOR)
using System;
using Org.BouncyCastle.Math;
using Org.BouncyCastle.Security;
namespace Org.BouncyCastle.Utilities
{
/**
* BigInteger utilities.
*/
public abstract class BigIntegers
{
private const int MaxIterations = 1000;
/**
* Return the passed in value as an unsigned byte array.
*
* @param value value to be converted.
* @return a byte array without a leading zero byte if present in the signed encoding.
*/
public static byte[] AsUnsignedByteArray(
BigInteger n)
{
return n.ToByteArrayUnsigned();
}
/**
* Return the passed in value as an unsigned byte array of specified length, zero-extended as necessary.
*
* @param length desired length of result array.
* @param n value to be converted.
* @return a byte array of specified length, with leading zeroes as necessary given the size of n.
*/
public static byte[] AsUnsignedByteArray(int length, BigInteger n)
{
byte[] bytes = n.ToByteArrayUnsigned();
if (bytes.Length > length)
throw new ArgumentException("standard length exceeded", "n");
if (bytes.Length == length)
return bytes;
byte[] tmp = new byte[length];
Array.Copy(bytes, 0, tmp, tmp.Length - bytes.Length, bytes.Length);
return tmp;
}
/**
* Return a random BigInteger not less than 'min' and not greater than 'max'
*
* @param min the least value that may be generated
* @param max the greatest value that may be generated
* @param random the source of randomness
* @return a random BigInteger value in the range [min,max]
*/
public static BigInteger CreateRandomInRange(
BigInteger min,
BigInteger max,
// TODO Should have been just Random class
SecureRandom random)
{
int cmp = min.CompareTo(max);
if (cmp >= 0)
{
if (cmp > 0)
throw new ArgumentException("'min' may not be greater than 'max'");
return min;
}
if (min.BitLength > max.BitLength / 2)
{
return CreateRandomInRange(BigInteger.Zero, max.Subtract(min), random).Add(min);
}
for (int i = 0; i < MaxIterations; ++i)
{
BigInteger x = new BigInteger(max.BitLength, random);
if (x.CompareTo(min) >= 0 && x.CompareTo(max) <= 0)
{
return x;
}
}
// fall back to a faster (restricted) method
return new BigInteger(max.Subtract(min).BitLength - 1, random).Add(min);
}
}
}
#endif
| {
"content_hash": "a2741358b31e7fcbb1d0073a2553a7fc",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 112,
"avg_line_length": 29.73148148148148,
"alnum_prop": 0.5605730302086578,
"repo_name": "SeongBongJang/amuzlabBonRepo",
"id": "e56f2a78e1c5a3ef0d47488bf314f41192cda528",
"size": "3305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NGUI_Test/NGUI_Sample/Assets/Best HTTP (Pro)/BestHTTP/SecureProtocol/util/BigIntegers.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "10179144"
},
{
"name": "CSS",
"bytes": "95737"
},
{
"name": "GLSL",
"bytes": "200778"
},
{
"name": "HTML",
"bytes": "39430"
},
{
"name": "JavaScript",
"bytes": "366148"
},
{
"name": "Objective-C",
"bytes": "768"
},
{
"name": "Objective-C++",
"bytes": "6612"
},
{
"name": "Python",
"bytes": "7443"
},
{
"name": "Shell",
"bytes": "254"
}
],
"symlink_target": ""
} |
FROM balenalib/kitra710-fedora:35-run
ENV NODE_VERSION 16.14.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& echo "82d71968c82eb391f463df62ba277563a3bd01ce43bba0e7e1c533991567b8fe node-v$NODE_VERSION-linux-arm64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-arm64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Fedora 35 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v16.14.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | {
"content_hash": "e3dad32652afe7277e9a475a4d8e0a20",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 689,
"avg_line_length": 66.34146341463415,
"alnum_prop": 0.7080882352941177,
"repo_name": "resin-io-library/base-images",
"id": "8ab1b99f1afec20c25b32b34d9ea71ba1e197810",
"size": "2741",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/node/kitra710/fedora/35/16.14.0/run/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "71234697"
},
{
"name": "JavaScript",
"bytes": "13096"
},
{
"name": "Shell",
"bytes": "12051936"
},
{
"name": "Smarty",
"bytes": "59789"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="ISO-8859-1"?>
<?xml-stylesheet href="latest_ob.xsl" type="text/xsl"?>
<current_observation version="1.0"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="http://www.weather.gov/view/current_observation.xsd">
<credit>NOAA's National Weather Service</credit>
<credit_URL>http://weather.gov/</credit_URL>

<suggested_pickup>15 minutes after the hour</suggested_pickup>
<suggested_pickup_period>60</suggested_pickup_period>
<location>Unknown Station</location>
<station_id>SH409</station_id>
<observation_time>Last Updated on Dec 27 2015, 2:00 am AST</observation_time>
<observation_time_rfc822>Sun, 27 Dec 2015 02:00:00 -0400</observation_time_rfc822>
<temperature_string>61.9 F (16.6 C)</temperature_string>
<temp_f>61.9</temp_f>
<temp_c>16.6</temp_c>
<water_temp_f>61.9</water_temp_f>
<water_temp_c>16.6</water_temp_c>
<wind_string>North at 12.8 MPH (11.08 KT)</wind_string>
<wind_dir>North</wind_dir>
<wind_degrees>0</wind_degrees>
<wind_mph>12.8</wind_mph>
<wind_gust_mph>0.0</wind_gust_mph>
<wind_kt>11.08</wind_kt>
<pressure_string>1025.0 mb</pressure_string>
<pressure_mb>1025.0</pressure_mb>
<pressure_tendency_mb>0.6</pressure_tendency_mb>
<dewpoint_string>56.8 F (13.8 C)</dewpoint_string>
<dewpoint_f>56.8</dewpoint_f>
<dewpoint_c>13.8</dewpoint_c>
<windchill_string>60 F (16 C)</windchill_string>
<windchill_f>60</windchill_f>
<windchill_c>16</windchill_c>
<visibility_mi>11.00</visibility_mi>
<wave_height_m>0.01</wave_height_m>
<wave_height_ft>0.03</wave_height_ft>
<dominant_period_sec>5</dominant_period_sec>
<average_period_sec>0.0</average_period_sec>
<mean_wave_dir>North</mean_wave_dir>
<mean_wave_degrees>0</mean_wave_degrees>
<disclaimer_url>http://weather.gov/disclaimer.html</disclaimer_url>
<copyright_url>http://weather.gov/disclaimer.html</copyright_url>
<privacy_policy_url>http://weather.gov/notice.html</privacy_policy_url>
</current_observation>
| {
"content_hash": "2784d0aab819ef5c196cab947455db8c",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 90,
"avg_line_length": 44.04,
"alnum_prop": 0.717983651226158,
"repo_name": "pjconsidine/codingclass",
"id": "8a98d6215adf61e81184e3c6a406b48a13d1f7c1",
"size": "2202",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WeatherApp/data/SH409.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8178"
},
{
"name": "HTML",
"bytes": "12855"
},
{
"name": "JavaScript",
"bytes": "5031361"
},
{
"name": "Python",
"bytes": "305"
},
{
"name": "Shell",
"bytes": "2810"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.sohu.cache.dao.AppDao">
<sql id="app_desc_add_fields">
app_id,name,user_id,status,intro,create_time,passed_time,type,officer,
ver_id,is_test,has_back_store,need_persistence,need_hot_back_up,forecase_qps,
forecast_obj_num,mem_alert_value,client_machine_room,app_key,client_conn_alert_value,hit_precent_alert_value,is_access_monitor,important_level,password,version_id,custom_password
</sql>
<sql id="app_desc_fields">
app_id,name,user_id,status,intro,create_time,passed_time,type,officer,
ver_id,is_test,has_back_store,need_persistence,need_hot_back_up,forecase_qps,
forecast_obj_num,mem_alert_value,client_machine_room,app_key,client_conn_alert_value,hit_precent_alert_value,is_access_monitor,important_level,password "pkey",version_id,custom_password
</sql>
<!--通过appId查询app的信息-->
<select id="getAppDescById" resultType="AppDesc" parameterType="long">
SELECT
<include refid="app_desc_fields"/>
FROM app_desc
WHERE app_id = #{appId};
</select>
<select id="getOnlineAppDescById" resultType="AppDesc" parameterType="long">
SELECT
<include refid="app_desc_fields"/>
FROM app_desc
WHERE app_id = #{appId}
AND status = 2
</select>
<!-- 根据应用名查询app信息 -->
<select id="getByAppName" resultType="AppDesc" parameterType="string">
SELECT
<include refid="app_desc_fields"/>
FROM app_desc
WHERE name = #{appName};
</select>
<insert id="save" parameterType="AppDesc" keyProperty="appId" useGeneratedKeys="true" >
insert into app_desc
(<include refid="app_desc_add_fields"/>)
values
(
#{appId},#{name},#{userId},#{status},#{intro},#{createTime},#{passedTime},
#{type},#{officer},#{verId},#{isTest},#{hasBackStore},#{needPersistence},
#{needHotBackUp},#{forecaseQps},#{forecastObjNum},#{memAlertValue},#{clientMachineRoom},
#{appKey},#{clientConnAlertValue},#{hitPrecentAlertValue},#{isAccessMonitor},#{importantLevel},#{pkey},#{versionId},#{customPassword}
)
</insert>
<update id="update" parameterType="AppDesc">
update app_desc
set name=#{name}, user_id=#{userId}, status=#{status}, intro=#{intro}, create_time=#{createTime},
passed_time=#{passedTime},type=#{type},
officer=#{officer},ver_id=#{verId},mem_alert_value=#{memAlertValue},
client_conn_alert_value=#{clientConnAlertValue},hit_precent_alert_value=#{hitPrecentAlertValue},is_access_monitor=#{isAccessMonitor},important_level=#{importantLevel},
password=#{pkey},version_id=#{versionId}
where app_id=#{appId}
</update>
<update id="updateWithCustomPwd" parameterType="AppDesc">
update app_desc
set name=#{name}, user_id=#{userId}, status=#{status}, intro=#{intro}, create_time=#{createTime},
passed_time=#{passedTime},type=#{type},
officer=#{officer},ver_id=#{verId},mem_alert_value=#{memAlertValue},
client_conn_alert_value=#{clientConnAlertValue},hit_precent_alert_value=#{hitPrecentAlertValue},is_access_monitor=#{isAccessMonitor},important_level=#{importantLevel},
password=#{pkey},version_id=#{versionId},custom_password=#{customPassword}
where app_id=#{appId}
</update>
<sql id="app_desc_select_column">
app_desc.app_id,name,app_desc.user_id,status,intro,create_time,passed_time,type,officer,ver_id,app_key,password "pkey",version_id,is_test,custom_password
</sql>
<select id="getAppDescList" resultType="AppDesc" parameterType="long">
select
<include refid="app_desc_select_column"/>
from app_desc,app_to_user where app_to_user.user_id=#{userId} and app_to_user.app_id=app_desc.app_id and app_desc.status < 3
</select>
<select id="getOnlineApps" resultType="AppDesc">
select
<include refid="app_desc_select_column"/>
from app_desc
where status = 2
</select>
<select id="getOnlineAppsNonTest" resultType="AppDesc">
select
<include refid="app_desc_select_column"/>
from app_desc
where status = 2 AND is_test = 0
</select>
<select id="getAllApps" resultType="AppDesc">
select
<include refid="app_desc_select_column"/>
from app_desc
order by app_id
</select>
<select id="getUserAppCount" resultType="int" parameterType="long">
select count(app_desc.app_id) from app_desc,app_to_user where app_to_user.user_id=#{userId} and app_to_user.app_id=app_desc.app_id and app_desc.status < 3
</select>
<select id="getAllAppCount" resultType="int" parameterType="AppSearch">
select count(app_id) from app_desc where 1=1 and status < 3
<choose>
<when test="appName != null and appName != ''">
and instr(name, #{appName}) > 0
</when>
</choose>
<choose>
<when test="appType != null and appType > 0">
and type = #{appType}
</when>
</choose>
<choose>
<when test="appStatus != null and appStatus >= 0">
and status = #{appStatus}
</when>
</choose>
<choose>
<when test="appId != null and appId > 0">
and app_id = #{appId}
</when>
</choose>
<choose>
<when test="importantLevel != null and importantLevel > 0">
and important_level = #{importantLevel}
</when>
</choose>
<choose>
<when test="versionId != null and versionId > 0">
and version_id = #{versionId}
</when>
</choose>
<choose>
<when test="userId != null and userId > 0">
and app_id in (select app_id from app_to_user where user_id = #{userId})
</when>
</choose>
</select>
<select id="getAllAppDescList" resultType="AppDesc" parameterType="AppSearch">
select
<include refid="app_desc_fields"/>
from app_desc where 1=1 and status < 3
<choose>
<when test="appName != null and appName != ''">
and instr(name, #{appName}) > 0
</when>
</choose>
<choose>
<when test="appType != null and appType > 0">
and type = #{appType}
</when>
</choose>
<choose>
<when test="appStatus != null and appStatus >= 0">
and status = #{appStatus}
</when>
</choose>
<choose>
<when test="appId != null and appId > 0">
and app_id = #{appId}
</when>
</choose>
<choose>
<when test="importantLevel != null and importantLevel > 0">
and important_level = #{importantLevel}
</when>
</choose>
<choose>
<when test="versionId != null and versionId > 0">
and version_id = #{versionId}
</when>
</choose>
<choose>
<when test="userId != null and userId > 0">
and app_id in (select app_id from app_to_user where user_id = #{userId})
</when>
</choose>
<choose>
<when test="isTest != null and isTest > 0">
and is_test = #{isTest}
</when>
</choose>
ORDER BY app_id desc
<choose>
<when test="page != null">
<choose>
<when test="page.totalCount > page.pageSize">
limit #{page.start},#{page.pageSize}
</when>
<otherwise>
limit #{page.totalCount}
</otherwise>
</choose>
</when>
</choose>
</select>
<select id="getTotalAppCount" resultType="int" parameterType="AppSearch">
select count(app_id) from app_desc
</select>
<update id="updateAppKey">
update app_desc set app_key=#{appKey} where app_id=#{appId}
</update>
<select id="getAppDescByIds" resultType="AppDesc" parameterType="java.util.List">
select * from app_desc where app_id in
<foreach collection="appIds" index="index" item="appId" open="(" separator="," close=")">
#{appId}
</foreach>
</select>
</mapper>
| {
"content_hash": "cefd79a0bcf8e107414d24af2687e9ad",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 193,
"avg_line_length": 38.066964285714285,
"alnum_prop": 0.5825026386771432,
"repo_name": "sohutv/cachecloud",
"id": "4ac47f8063fb17fc62fa2a2b86faeb19143d17bb",
"size": "8559",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "cachecloud-web/src/main/resources/mapper/AppDao.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "383298"
},
{
"name": "FreeMarker",
"bytes": "74311"
},
{
"name": "HTML",
"bytes": "1764"
},
{
"name": "Java",
"bytes": "1497812"
},
{
"name": "JavaScript",
"bytes": "1481557"
},
{
"name": "PLpgSQL",
"bytes": "202324"
},
{
"name": "Shell",
"bytes": "8160"
}
],
"symlink_target": ""
} |
module Predicated
module PrintSupport
def inspect(indent="")
indent + to_s
end
private
def part_to_s(thing)
part_to_str(thing) {|thing| thing.to_s}
end
def part_inspect(thing, indent="")
part_to_str(thing, indent) {|thing| thing.inspect(indent)}
end
def part_to_str(thing, indent="")
if thing.is_a?(String)
"'#{thing}'"
elsif thing.is_a?(Numeric) || thing.is_a?(TrueClass) || thing.is_a?(FalseClass)
thing.to_s
elsif thing.is_a?(Binary)
yield(thing)
elsif thing.nil?
"nil"
else
"#{thing.class.name}{'#{thing.to_s}'}"
end
end
end
class Unary < Predicate
include PrintSupport
def to_s
"#{self.class.shorthand}(#{part_to_s(inner)})"
end
end
class Binary < Predicate
include PrintSupport
def to_s
"#{self.class.shorthand}(#{part_to_s(left)},#{part_to_s(right)})"
end
end
module ContainerToString
def inspect(indent="")
next_indent = indent + " " + " "
str = "#{indent}#{self.class.shorthand}(\n"
str << "#{part_inspect(left, next_indent)},\n"
str << "#{part_inspect(right, next_indent)}\n"
str << "#{indent})"
str << "\n" if indent == ""
str
end
end
class And; include ContainerToString end
class Or; include ContainerToString end
end
| {
"content_hash": "baf8c8a9cd1da414623951e8fca11511",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 85,
"avg_line_length": 22.725806451612904,
"alnum_prop": 0.5592618878637331,
"repo_name": "sconover/predicated",
"id": "7939a1a3a41bff9196e1b9456204f2b4f60c74c7",
"size": "1409",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/predicated/print.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "101161"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "41bfc1d86a1eaec14318f0da9bed83c3",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "67bdc16aa588bf702e90778845ce1222041919ef",
"size": "186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Liliopsida/Poales/Poaceae/Alopecurus/Alopecurus ponticus/ Syn. Alopecurus caucasicus/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_27) on Wed Nov 21 16:03:56 EST 2012 -->
<TITLE>
Uses of Class org.pentaho.di.core.database.SequenceMetaTests
</TITLE>
<META NAME="date" CONTENT="2012-11-21">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.pentaho.di.core.database.SequenceMetaTests";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/pentaho/di/core/database/SequenceMetaTests.html" title="class in org.pentaho.di.core.database"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/pentaho/di/core/database//class-useSequenceMetaTests.html" target="_top"><B>FRAMES</B></A>
<A HREF="SequenceMetaTests.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.pentaho.di.core.database.SequenceMetaTests</B></H2>
</CENTER>
No usage of org.pentaho.di.core.database.SequenceMetaTests
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/pentaho/di/core/database/SequenceMetaTests.html" title="class in org.pentaho.di.core.database"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/pentaho/di/core/database//class-useSequenceMetaTests.html" target="_top"><B>FRAMES</B></A>
<A HREF="SequenceMetaTests.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
| {
"content_hash": "581d795bba3e9554fb13627238370838",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 234,
"avg_line_length": 42.048611111111114,
"alnum_prop": 0.6132122213047069,
"repo_name": "ColFusion/PentahoKettle",
"id": "f4f536744f960dc9f395f95da06f832fac9b8670",
"size": "6055",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kettle-data-integration/docs/api/org/pentaho/di/core/database/class-use/SequenceMetaTests.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "21071"
},
{
"name": "Batchfile",
"bytes": "21366"
},
{
"name": "C",
"bytes": "7006"
},
{
"name": "CSS",
"bytes": "1952277"
},
{
"name": "Groff",
"bytes": "684"
},
{
"name": "Groovy",
"bytes": "33843"
},
{
"name": "HTML",
"bytes": "197173221"
},
{
"name": "Java",
"bytes": "3685348"
},
{
"name": "JavaScript",
"bytes": "31972698"
},
{
"name": "PHP",
"bytes": "224688"
},
{
"name": "Perl",
"bytes": "6881"
},
{
"name": "PigLatin",
"bytes": "7496"
},
{
"name": "Python",
"bytes": "109487"
},
{
"name": "Shell",
"bytes": "43881"
},
{
"name": "Smarty",
"bytes": "2952"
},
{
"name": "XQuery",
"bytes": "798"
},
{
"name": "XSLT",
"bytes": "562453"
}
],
"symlink_target": ""
} |
#import "BPMViewController.h"
#import "ScannerViewController.h"
#import "Constants.h"
#import "AppUtilities.h"
#import "CharacteristicReader.h"
#import "HelpViewController.h"
@interface BPMViewController () {
CBUUID *bpmServiceUUID;
CBUUID *bpmBloodPressureMeasurementCharacteristicUUID;
CBUUID *bpmIntermediateCuffPressureCharacteristicUUID;
CBUUID *batteryServiceUUID;
CBUUID *batteryLevelCharacteristicUUID;
}
/*!
* This property is set when the device successfully connects to the peripheral. It is used to cancel the connection
* after user press Disconnect button.
*/
@property (strong, nonatomic) CBPeripheral *connectedPeripheral;
@property (weak, nonatomic) IBOutlet UILabel *systolic;
@property (weak, nonatomic) IBOutlet UILabel *systolicUnit;
@property (weak, nonatomic) IBOutlet UILabel *diastolic;
@property (weak, nonatomic) IBOutlet UILabel *diastolicUnit;
@property (weak, nonatomic) IBOutlet UILabel *meanAp;
@property (weak, nonatomic) IBOutlet UILabel *meanApUnit;
@property (weak, nonatomic) IBOutlet UILabel *pulse;
@property (weak, nonatomic) IBOutlet UILabel *timestamp;
@end
@implementation BPMViewController
@synthesize bluetoothManager;
@synthesize backgroundImage;
@synthesize verticalLabel;
@synthesize battery;
@synthesize deviceName;
@synthesize connectButton;
@synthesize connectedPeripheral;
-(id)initWithCoder:(NSCoder *)aDecoder
{
self = [super initWithCoder:aDecoder];
if (self) {
// Custom initialization
bpmServiceUUID = [CBUUID UUIDWithString:bpmServiceUUIDString];
bpmBloodPressureMeasurementCharacteristicUUID = [CBUUID UUIDWithString:bpmBloodPressureMeasurementCharacteristicUUIDString];
bpmIntermediateCuffPressureCharacteristicUUID = [CBUUID UUIDWithString:bpmIntermediateCuffPressureCharacteristicUUIDString];
batteryServiceUUID = [CBUUID UUIDWithString:batteryServiceUUIDString];
batteryLevelCharacteristicUUID = [CBUUID UUIDWithString:batteryLevelCharacteristicUUIDString];
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
if (is4InchesIPhone)
{
// 4 inches iPhone
UIImage *image = [UIImage imageNamed:@"Background4.png"];
[backgroundImage setImage:image];
}
else
{
// 3.5 inches iPhone
UIImage *image = [UIImage imageNamed:@"Background35.png"];
[backgroundImage setImage:image];
}
// Rotate the vertical label
self.verticalLabel.transform = CGAffineTransformRotate(CGAffineTransformMakeTranslation(-150.0f, 0.0f), (float)(-M_PI / 2));
}
-(void)appDidEnterBackground:(NSNotification *)_notification
{
[AppUtilities showBackgroundNotification:[NSString stringWithFormat:@"You are still connected to %@ peripheral. It will collect data also in background.",connectedPeripheral.name]];
}
-(void)appDidBecomeActiveBackground:(NSNotification *)_notification
{
[[UIApplication sharedApplication] cancelAllLocalNotifications];
}
- (IBAction)connectOrDisconnectClicked {
if (connectedPeripheral != nil)
{
[bluetoothManager cancelPeripheralConnection:connectedPeripheral];
}
}
-(BOOL)shouldPerformSegueWithIdentifier:(NSString *)identifier sender:(id)sender
{
// The 'scan' seque will be performed only if connectedPeripheral == nil (if we are not connected already).
return ![identifier isEqualToString:@"scan"] || connectedPeripheral == nil;
}
-(void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender
{
if ([segue.identifier isEqualToString:@"scan"])
{
// Set this contoller as scanner delegate
ScannerViewController *controller = (ScannerViewController *)segue.destinationViewController;
controller.filterUUID = bpmServiceUUID;
controller.delegate = self;
}
else if ([[segue identifier] isEqualToString:@"help"]) {
HelpViewController *helpVC = [segue destinationViewController];
helpVC.helpText = [AppUtilities getBPMHelpText];
}
}
#pragma mark Scanner Delegate methods
-(void)centralManager:(CBCentralManager *)manager didPeripheralSelected:(CBPeripheral *)peripheral
{
// Some devices disconnects just after finishing measurement so we have to clear the UI before new connection, not after previous.
[self clearUI];
// We may not use more than one Central Manager instance. Let's just take the one returned from Scanner View Controller
bluetoothManager = manager;
bluetoothManager.delegate = self;
// The sensor has been selected, connect to it
peripheral.delegate = self;
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:CBConnectPeripheralOptionNotifyOnNotificationKey];
[bluetoothManager connectPeripheral:peripheral options:options];
}
#pragma mark Central Manager delegate methods
- (void)centralManagerDidUpdateState:(CBCentralManager *)central
{
if (central.state == CBCentralManagerStatePoweredOn) {
// TODO
}
else
{
// TODO
NSLog(@"Bluetooth not ON");
}
}
- (void)centralManager:(CBCentralManager *)central didConnectPeripheral:(CBPeripheral *)peripheral
{
// Scanner uses other queue to send events. We must edit UI in the main queue
dispatch_async(dispatch_get_main_queue(), ^{
[deviceName setText:peripheral.name];
[connectButton setTitle:@"DISCONNECT" forState:UIControlStateNormal];
});
//Following if condition display user permission alert for background notification
if ([UIApplication instancesRespondToSelector:@selector(registerUserNotificationSettings:)]) {
[[UIApplication sharedApplication] registerUserNotificationSettings:[UIUserNotificationSettings settingsForTypes:UIUserNotificationTypeAlert|UIUserNotificationTypeSound categories:nil]];
}
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(appDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(appDidBecomeActiveBackground:) name:UIApplicationDidBecomeActiveNotification object:nil];
// Peripheral has connected. Discover required services
connectedPeripheral = peripheral;
[peripheral discoverServices:@[bpmServiceUUID, batteryServiceUUID]];
}
-(void)centralManager:(CBCentralManager *)central didFailToConnectPeripheral:(CBPeripheral *)peripheral error:(NSError *)error
{
// Scanner uses other queue to send events. We must edit UI in the main queue
dispatch_async(dispatch_get_main_queue(), ^{
[AppUtilities showAlert:@"Error" alertMessage:@"Connecting to the peripheral failed. Try again"];
[connectButton setTitle:@"CONNECT" forState:UIControlStateNormal];
connectedPeripheral = nil;
[self clearUI];
});
}
- (void)centralManager:(CBCentralManager *)central didDisconnectPeripheral:(CBPeripheral *)peripheral error:(NSError *)error
{
// Scanner uses other queue to send events. We must edit UI in the main queue
dispatch_async(dispatch_get_main_queue(), ^{
[connectButton setTitle:@"CONNECT" forState:UIControlStateNormal];
if ([AppUtilities isApplicationStateInactiveORBackground]) {
[AppUtilities showBackgroundNotification:[NSString stringWithFormat:@"%@ peripheral is disconnected",peripheral.name]];
}
connectedPeripheral = nil;
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidBecomeActiveNotification object:nil];
[[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidEnterBackgroundNotification object:nil];
});
}
- (void) clearUI
{
[deviceName setText:@"DEFAULT BPM"];
battery.tag = 0;
[battery setTitle:@"n/a" forState:UIControlStateDisabled];
self.systolicUnit.hidden = YES;
self.diastolicUnit.hidden = YES;
self.meanApUnit.hidden = YES;
self.systolic.text = @"-";
self.diastolic.text = @"-";
self.meanAp.text = @"-";
self.pulse.text = @"-";
self.timestamp.text = @"n/a";
}
#pragma mark Peripheral delegate methods
-(void)peripheral:(CBPeripheral *)peripheral didDiscoverServices:(NSError *)error
{
if (error)
{
NSLog(@"Error discovering service: %@", [error localizedDescription]);
[bluetoothManager cancelPeripheralConnection:connectedPeripheral];
return;
}
for (CBService *service in peripheral.services)
{
// Discovers the characteristics for a given service
if ([service.UUID isEqual:bpmServiceUUID])
{
[connectedPeripheral discoverCharacteristics:@[bpmBloodPressureMeasurementCharacteristicUUID, bpmIntermediateCuffPressureCharacteristicUUID] forService:service];
}
else if ([service.UUID isEqual:batteryServiceUUID])
{
[connectedPeripheral discoverCharacteristics:@[batteryLevelCharacteristicUUID] forService:service];
}
}
}
-(void)peripheral:(CBPeripheral *)peripheral didDiscoverCharacteristicsForService:(CBService *)service error:(NSError *)error
{
// Characteristics for one of those services has been found
if ([service.UUID isEqual:bpmServiceUUID])
{
for (CBCharacteristic *characteristic in service.characteristics)
{
if ([characteristic.UUID isEqual:bpmBloodPressureMeasurementCharacteristicUUID] ||
[characteristic.UUID isEqual:bpmIntermediateCuffPressureCharacteristicUUID])
{
// Enable notifications and indications on data characteristics
[peripheral setNotifyValue:YES forCharacteristic:characteristic];
}
}
} else if ([service.UUID isEqual:batteryServiceUUID])
{
for (CBCharacteristic *characteristic in service.characteristics)
{
if ([characteristic.UUID isEqual:batteryLevelCharacteristicUUID])
{
// Read the current battery value
[peripheral readValueForCharacteristic:characteristic];
break;
}
}
}
}
-(void)peripheral:(CBPeripheral *)peripheral didUpdateValueForCharacteristic:(CBCharacteristic *)characteristic error:(NSError *)error
{
// Scanner uses other queue to send events. We must edit UI in the main queue
dispatch_async(dispatch_get_main_queue(), ^{
// Decode the characteristic data
NSData *data = characteristic.value;
uint8_t *array = (uint8_t*) data.bytes;
if ([characteristic.UUID isEqual:batteryLevelCharacteristicUUID])
{
UInt8 batteryLevel = [CharacteristicReader readUInt8Value:&array];
NSString* text = [[NSString alloc] initWithFormat:@"%d%%", batteryLevel];
[battery setTitle:text forState:UIControlStateDisabled];
if (battery.tag == 0)
{
// If battery level notifications are available, enable them
if (([characteristic properties] & CBCharacteristicPropertyNotify) > 0)
{
battery.tag = 1; // mark that we have enabled notifications
// Enable notification on data characteristic
[peripheral setNotifyValue:YES forCharacteristic:characteristic];
}
}
}
else if ([characteristic.UUID isEqual:bpmBloodPressureMeasurementCharacteristicUUID] ||
[characteristic.UUID isEqual:bpmIntermediateCuffPressureCharacteristicUUID])
{
UInt8 flags = [CharacteristicReader readUInt8Value:&array];
BOOL kPa = (flags & 0x01) > 0;
BOOL timestampPresent = (flags & 0x02) > 0;
BOOL pulseRatePresent = (flags & 0x04) > 0;
// Update units
if (kPa)
{
self.systolicUnit.text = @"kPa";
self.diastolicUnit.text = @"kPa";
self.meanApUnit.text = @"kPa";
}
else
{
self.systolicUnit.text = @"mmHg";
self.diastolicUnit.text = @"mmHg";
self.meanApUnit.text = @"mmHg";
}
// Read main values
if ([characteristic.UUID isEqual:bpmBloodPressureMeasurementCharacteristicUUID])
{
float systolicValue = [CharacteristicReader readSFloatValue:&array];
float diastolicValue = [CharacteristicReader readSFloatValue:&array];
float meanApValue = [CharacteristicReader readSFloatValue:&array];
self.systolic.text = [NSString stringWithFormat:@"%.1f", systolicValue];
self.diastolic.text = [NSString stringWithFormat:@"%.1f", diastolicValue];
self.meanAp.text = [NSString stringWithFormat:@"%.1f", meanApValue];
self.systolicUnit.hidden = NO;
self.diastolicUnit.hidden = NO;
self.meanApUnit.hidden = NO;
}
else
{
float systolicValue = [CharacteristicReader readSFloatValue:&array];
array += 4;
self.systolic.text = [NSString stringWithFormat:@"%.1f", systolicValue];
self.diastolic.text = @"n/a";
self.meanAp.text = @"n/a";
self.systolicUnit.hidden = NO;
self.diastolicUnit.hidden = YES;
self.meanApUnit.hidden = YES;
}
// Read timestamp
if (timestampPresent)
{
NSDate* date = [CharacteristicReader readDateTime:&array];
NSDateFormatter *dateFormat = [[NSDateFormatter alloc] init];
[dateFormat setDateFormat:@"dd.MM.yyyy, hh:mm"];
NSString* dateFormattedString = [dateFormat stringFromDate:date];
self.timestamp.text = dateFormattedString;
}
else
{
self.timestamp.text = @"n/a";
}
// Read pulse
if (pulseRatePresent)
{
float pulseValue = [CharacteristicReader readSFloatValue:&array];
self.pulse.text = [NSString stringWithFormat:@"%.1f", pulseValue];
}
else
{
self.pulse.text = @"-";
}
}
});
}
@end
| {
"content_hash": "5b1e72efd017b94434687901e6ddd593",
"timestamp": "",
"source": "github",
"line_count": 371,
"max_line_length": 194,
"avg_line_length": 39.3800539083558,
"alnum_prop": 0.6668720054757016,
"repo_name": "szqt/IOS-nRF-Toolbox",
"id": "940287699910b4f599fc6cbff91dd46c3323a1fe",
"size": "16159",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nRF Toolbox/BPM/BPMViewController.m",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3112"
},
{
"name": "Objective-C",
"bytes": "481188"
}
],
"symlink_target": ""
} |
package com.sourceallies.aeleakdetection;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.hamcrest.text.*;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.http.MediaType;
import org.springframework.mock.web.MockServletContext;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import com.sourceallies.aeleakdetection.controller.AETestController;
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = MockServletContext.class)
@WebAppConfiguration
public class BaseTest {
private MockMvc mvc;
@Before
public void setUp() throws Exception {
mvc = MockMvcBuilders.standaloneSetup(new AETestController()).build();
}
@Test
public void getHello() throws Exception {
mvc.perform(MockMvcRequestBuilders.get("/").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().string("Greetings from Spring Boot!"));
}
}
| {
"content_hash": "036ace98b311b209d143473a25f605a2",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 87,
"avg_line_length": 36.07692307692308,
"alnum_prop": 0.7846481876332623,
"repo_name": "iowatiger08/AELeakDetectLibrary",
"id": "611ef9ba6226d340da92919b5353eed45e139dfe",
"size": "1407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/com/sourceallies/aeleakdetection/BaseTest.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "11916"
}
],
"symlink_target": ""
} |
<?xml version="1.0"?>
<project name="ejb3" basedir="." default="usage">
<!-- IMPORTS -->
<import file="${ts.home}/src/com/sun/ts/tests/ejb30/persistence/common/persistence-import.xml"/>
<import file="${ts.home}/bin/xml/ts.import.xml"/>
<property name="app.name" value="ejb3_field_types"/>
<property name="par.classes"
value="${pkg.dir}/DataTypes.class,
${pkg.dir}/DataTypes2.class"/>
<target name="package">
<ts.par descriptor="persistence.xml"
descriptordir="${ejb3.persistence.common.pkg}/template"
archivename="${app.name}"
includedefaultfiles="false">
<fileset dir="${class.dir}"
includes="com/sun/ts/tests/ejb30/persistence/types/common/Grade.class,
com/sun/ts/tests/ejb30/persistence/types/field/DataTypes.class,
com/sun/ts/tests/ejb30/persistence/types/field/DataTypes2.class"/>
</ts.par>
<ts.vehicles name="${app.name}" singleear="true">
<ear-elements>
<zipfileset dir="${dist.dir}/${pkg.dir}"
includes="${app.name}.jar"
prefix="lib"/>
</ear-elements>
</ts.vehicles>
</target>
</project>
| {
"content_hash": "0dadde4df33baef91c33fa5172c568ef",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 99,
"avg_line_length": 31.526315789473685,
"alnum_prop": 0.6001669449081803,
"repo_name": "datanucleus/tests",
"id": "19ab54d21e1d939e1b2696c2ed2a7926c45d7122",
"size": "1198",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jpa/TCK_1.0/src/com/sun/ts/tests/ejb30/persistence/types/field/build.xml",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "28945"
},
{
"name": "HTML",
"bytes": "34413"
},
{
"name": "Java",
"bytes": "11934658"
},
{
"name": "Perl",
"bytes": "9273"
},
{
"name": "Python",
"bytes": "3087"
},
{
"name": "Shell",
"bytes": "20836"
},
{
"name": "XSLT",
"bytes": "211062"
}
],
"symlink_target": ""
} |
package grammar
import (
"github.com/google/badwolf/bql/lexer"
"github.com/google/badwolf/bql/semantic"
)
var (
// bql LL1 grammar.
bql *Grammar
// semanticBQL contains the BQL grammar with hooks injected.
semanticBQL *Grammar
)
func init() {
initBQL()
initSemanticBQL()
}
// BQL LL1 grammar.
func BQL() *Grammar {
return bql
}
// SemanticBQL contains the BQL grammar with hooks injected.
func SemanticBQL() *Grammar {
return semanticBQL
}
func initBQL() {
bql = &Grammar{
"START": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemQuery),
NewSymbol("VARS"),
NewTokenType(lexer.ItemFrom),
NewSymbol("GRAPHS"),
NewSymbol("WHERE"),
NewSymbol("GROUP_BY"),
NewSymbol("ORDER_BY"),
NewSymbol("HAVING"),
NewSymbol("GLOBAL_TIME_BOUND"),
NewSymbol("LIMIT"),
NewTokenType(lexer.ItemSemicolon),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemInsert),
NewTokenType(lexer.ItemData),
NewTokenType(lexer.ItemInto),
NewSymbol("GRAPHS"),
NewTokenType(lexer.ItemLBracket),
NewTokenType(lexer.ItemNode),
NewTokenType(lexer.ItemPredicate),
NewSymbol("INSERT_OBJECT"),
NewSymbol("INSERT_DATA"),
NewTokenType(lexer.ItemRBracket),
NewTokenType(lexer.ItemSemicolon),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemDelete),
NewTokenType(lexer.ItemData),
NewTokenType(lexer.ItemFrom),
NewSymbol("GRAPHS"),
NewTokenType(lexer.ItemLBracket),
NewTokenType(lexer.ItemNode),
NewTokenType(lexer.ItemPredicate),
NewSymbol("DELETE_OBJECT"),
NewSymbol("DELETE_DATA"),
NewTokenType(lexer.ItemRBracket),
NewTokenType(lexer.ItemSemicolon),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemCreate),
NewSymbol("CREATE_GRAPHS"),
NewTokenType(lexer.ItemSemicolon),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemDrop),
NewSymbol("DROP_GRAPHS"),
NewTokenType(lexer.ItemSemicolon),
},
},
},
"CREATE_GRAPHS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemGraph),
NewSymbol("GRAPHS"),
},
},
},
"DROP_GRAPHS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemGraph),
NewSymbol("GRAPHS"),
},
},
},
"VARS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemBinding),
NewSymbol("VARS_AS"),
NewSymbol("MORE_VARS"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemCount),
NewTokenType(lexer.ItemLPar),
NewSymbol("COUNT_DISTINCT"),
NewTokenType(lexer.ItemBinding),
NewTokenType(lexer.ItemRPar),
NewTokenType(lexer.ItemAs),
NewTokenType(lexer.ItemBinding),
NewSymbol("MORE_VARS"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemSum),
NewTokenType(lexer.ItemLPar),
NewTokenType(lexer.ItemBinding),
NewTokenType(lexer.ItemRPar),
NewTokenType(lexer.ItemAs),
NewTokenType(lexer.ItemBinding),
NewSymbol("MORE_VARS"),
},
},
},
"COUNT_DISTINCT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemDistinct),
},
},
{},
},
"VARS_AS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAs),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"MORE_VARS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemComma),
NewSymbol("VARS"),
},
},
{},
},
"GRAPHS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemBinding),
NewSymbol("MORE_GRAPHS"),
},
},
},
"MORE_GRAPHS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemComma),
NewTokenType(lexer.ItemBinding),
NewSymbol("MORE_GRAPHS"),
},
},
{},
},
"WHERE": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemWhere),
NewTokenType(lexer.ItemLBracket),
NewSymbol("CLAUSES"),
NewTokenType(lexer.ItemRBracket),
},
},
},
"CLAUSES": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemNode),
NewSymbol("SUBJECT_EXTRACT"),
NewSymbol("PREDICATE"),
NewSymbol("OBJECT"),
NewSymbol("MORE_CLAUSES"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemBinding),
NewSymbol("SUBJECT_EXTRACT"),
NewSymbol("PREDICATE"),
NewSymbol("OBJECT"),
NewSymbol("MORE_CLAUSES"),
},
},
},
"SUBJECT_EXTRACT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAs),
NewTokenType(lexer.ItemBinding),
NewSymbol("SUBJECT_TYPE"),
NewSymbol("SUBJECT_ID"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemType),
NewTokenType(lexer.ItemBinding),
NewSymbol("SUBJECT_ID"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemID),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"SUBJECT_TYPE": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemType),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"SUBJECT_ID": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemID),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"PREDICATE": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemPredicate),
NewSymbol("PREDICATE_AS"),
NewSymbol("PREDICATE_ID"),
NewSymbol("PREDICATE_AT"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemPredicateBound),
NewSymbol("PREDICATE_AS"),
NewSymbol("PREDICATE_ID"),
NewSymbol("PREDICATE_BOUND_AT"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemBinding),
NewSymbol("PREDICATE_AS"),
NewSymbol("PREDICATE_ID"),
NewSymbol("PREDICATE_AT"),
},
},
},
"PREDICATE_AS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAs),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"PREDICATE_ID": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemID),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"PREDICATE_AT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAt),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"PREDICATE_BOUND_AT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAt),
NewSymbol("PREDICATE_BOUND_AT_BINDINGS"),
},
},
{},
},
"PREDICATE_BOUND_AT_BINDINGS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemBinding),
NewSymbol("PREDICATE_BOUND_AT_BINDINGS_END"),
},
},
{},
},
"PREDICATE_BOUND_AT_BINDINGS_END": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemComma),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemLiteral),
NewSymbol("OBJECT_LITERAL_AS"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemNode),
NewSymbol("OBJECT_SUBJECT_EXTRACT"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemPredicate),
NewSymbol("OBJECT_PREDICATE_AS"),
NewSymbol("OBJECT_PREDICATE_ID"),
NewSymbol("OBJECT_PREDICATE_AT"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemPredicateBound),
NewSymbol("OBJECT_PREDICATE_AS"),
NewSymbol("OBJECT_PREDICATE_ID"),
NewSymbol("OBJECT_PREDICATE_BOUND_AT"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemBinding),
NewSymbol("OBJECT_LITERAL_BINDING_AS"),
NewSymbol("OBJECT_LITERAL_BINDING_TYPE"),
NewSymbol("OBJECT_LITERAL_BINDING_ID"),
NewSymbol("OBJECT_LITERAL_BINDING_AT"),
},
},
},
"OBJECT_SUBJECT_EXTRACT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAs),
NewTokenType(lexer.ItemBinding),
NewSymbol("OBJECT_SUBJECT_TYPE"),
NewSymbol("OBJECT_SUBJECT_ID"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemType),
NewTokenType(lexer.ItemBinding),
NewSymbol("OBJECT_SUBJECT_ID"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemID),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_SUBJECT_TYPE": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemType),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_SUBJECT_ID": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemID),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_PREDICATE_AS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAs),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_PREDICATE_ID": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemID),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_PREDICATE_AT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAt),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_PREDICATE_BOUND_AT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAt),
NewSymbol("OBJECT_PREDICATE_BOUND_AT_BINDINGS"),
},
},
{},
},
"OBJECT_PREDICATE_BOUND_AT_BINDINGS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemBinding),
NewSymbol("OBJECT_PREDICATE_BOUND_AT_BINDINGS_END"),
},
},
{},
},
"OBJECT_PREDICATE_BOUND_AT_BINDINGS_END": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemComma),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_LITERAL_AS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAs),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_LITERAL_BINDING_AS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAs),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_LITERAL_BINDING_TYPE": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemType),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_LITERAL_BINDING_ID": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemID),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"OBJECT_LITERAL_BINDING_AT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAt),
NewTokenType(lexer.ItemBinding),
},
},
{},
},
"MORE_CLAUSES": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemDot),
NewSymbol("CLAUSES"),
},
},
{},
},
"GROUP_BY": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemGroup),
NewTokenType(lexer.ItemBy),
NewTokenType(lexer.ItemBinding),
NewSymbol("GROUP_BY_BINDINGS"),
},
},
{},
},
"GROUP_BY_BINDINGS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemComma),
NewTokenType(lexer.ItemBinding),
NewSymbol("GROUP_BY_BINDINGS"),
},
},
{},
},
"ORDER_BY": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemOrder),
NewTokenType(lexer.ItemBy),
NewTokenType(lexer.ItemBinding),
NewSymbol("ORDER_BY_DIRECTION"),
NewSymbol("ORDER_BY_BINDINGS"),
},
},
{},
},
"ORDER_BY_DIRECTION": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAsc),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemDesc),
},
},
{},
},
"ORDER_BY_BINDINGS": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemComma),
NewTokenType(lexer.ItemBinding),
NewSymbol("ORDER_BY_DIRECTION"),
NewSymbol("ORDER_BY_BINDINGS"),
},
},
{},
},
"HAVING": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemHaving),
NewSymbol("HAVING_CLAUSE"),
},
},
{},
},
"HAVING_CLAUSE": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemBinding),
NewSymbol("HAVING_CLAUSE_BINARY_COMPOSITE"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemNot),
NewSymbol("HAVING_CLAUSE"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemLPar),
NewSymbol("HAVING_CLAUSE"),
NewTokenType(lexer.ItemRPar),
NewSymbol("HAVING_CLAUSE_BINARY_COMPOSITE"),
},
},
},
"HAVING_CLAUSE_BINARY_COMPOSITE": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAnd),
NewSymbol("HAVING_CLAUSE"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemOr),
NewSymbol("HAVING_CLAUSE"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemEQ),
NewSymbol("HAVING_CLAUSE"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemLT),
NewSymbol("HAVING_CLAUSE"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemGT),
NewSymbol("HAVING_CLAUSE"),
},
},
{},
},
"GLOBAL_TIME_BOUND": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemBefore),
NewTokenType(lexer.ItemPredicate),
NewSymbol("GLOBAL_TIME_BOUND_COMPOSITE"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemAfter),
NewTokenType(lexer.ItemPredicate),
NewSymbol("GLOBAL_TIME_BOUND_COMPOSITE"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemBetween),
NewTokenType(lexer.ItemPredicate),
NewTokenType(lexer.ItemComma),
NewTokenType(lexer.ItemPredicate),
NewSymbol("GLOBAL_TIME_BOUND_COMPOSITE"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemLPar),
NewSymbol("GLOBAL_TIME_BOUND"),
NewTokenType(lexer.ItemRPar),
},
},
{},
},
"GLOBAL_TIME_BOUND_COMPOSITE": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemAnd),
NewSymbol("GLOBAL_TIME_BOUND"),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemOr),
NewSymbol("GLOBAL_TIME_BOUND"),
},
},
{},
},
"LIMIT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemLimit),
NewTokenType(lexer.ItemLiteral),
},
},
{},
},
"INSERT_OBJECT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemNode),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemPredicate),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemLiteral),
},
},
},
"INSERT_DATA": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemDot),
NewTokenType(lexer.ItemNode),
NewTokenType(lexer.ItemPredicate),
NewSymbol("INSERT_OBJECT"),
NewSymbol("INSERT_DATA"),
},
},
{},
},
"DELETE_OBJECT": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemNode),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemPredicate),
},
},
{
Elements: []Element{
NewTokenType(lexer.ItemLiteral),
},
},
},
"DELETE_DATA": []*Clause{
{
Elements: []Element{
NewTokenType(lexer.ItemDot),
NewTokenType(lexer.ItemNode),
NewTokenType(lexer.ItemPredicate),
NewSymbol("DELETE_OBJECT"),
NewSymbol("DELETE_DATA"),
},
},
{},
},
}
}
func cloneGrammar(dst, src *Grammar) {
for k, cls := range *src {
newCls := []*Clause{}
for _, c := range cls {
newC := new(Clause)
*newC = *c
newCls = append(newCls, newC)
}
(*dst)[k] = newCls
}
}
func initSemanticBQL() {
semanticBQL = &Grammar{}
cloneGrammar(semanticBQL, bql)
// Create and Drop semantic hooks for type.
for _, cls := range (*semanticBQL)["CREATE_GRAPHS"] {
cls.ProcessEnd = semantic.TypeBindingClauseHook(semantic.Create)
}
for _, cls := range (*semanticBQL)["DROP_GRAPHS"] {
cls.ProcessEnd = semantic.TypeBindingClauseHook(semantic.Drop)
}
// Add graph binding collection to GRAPHS and MORE_GRAPHS clauses.
graphSymbols := []semantic.Symbol{"GRAPHS", "MORE_GRAPHS"}
for _, sym := range graphSymbols {
for _, cls := range (*semanticBQL)[sym] {
cls.ProcessedElement = semantic.GraphAccumulatorHook()
}
}
// Insert and Delete semantic hooks addition.
symbols := []semantic.Symbol{
"INSERT_OBJECT", "INSERT_DATA", "DELETE_OBJECT", "DELETE_DATA",
}
for _, sym := range symbols {
for _, cls := range (*semanticBQL)[sym] {
cls.ProcessedElement = semantic.DataAccumulatorHook()
}
}
for _, cls := range (*semanticBQL)["INSERT_OBJECT"] {
cls.ProcessEnd = semantic.TypeBindingClauseHook(semantic.Insert)
}
for _, cls := range (*semanticBQL)["DELETE_OBJECT"] {
cls.ProcessEnd = semantic.TypeBindingClauseHook(semantic.Delete)
}
for _, cls := range (*semanticBQL)["START"] {
if t := cls.Elements[0].Token(); t != lexer.ItemInsert && t != lexer.ItemDelete {
continue
}
cls.ProcessedElement = semantic.DataAccumulatorHook()
}
// Query semantic hooks.
for _, cls := range (*semanticBQL)["WHERE"] {
cls.ProcessStart = semantic.WhereInitWorkingClauseHook()
cls.ProcessEnd = semantic.WhereNextWorkingClauseHook()
}
clauseSymbols := []semantic.Symbol{
"CLAUSES", "MORE_CLAUSES",
}
for _, sym := range clauseSymbols {
for _, cls := range (*semanticBQL)[sym] {
cls.ProcessStart = semantic.WhereNextWorkingClauseHook()
cls.ProcessEnd = semantic.WhereNextWorkingClauseHook()
}
}
subSymbols := []semantic.Symbol{
"CLAUSES", "SUBJECT_EXTRACT", "SUBJECT_TYPE", "SUBJECT_ID",
}
for _, sym := range subSymbols {
for _, cls := range (*semanticBQL)[sym] {
cls.ProcessedElement = semantic.WhereSubjectClauseHook()
}
}
predSymbols := []semantic.Symbol{
"PREDICATE", "PREDICATE_AS", "PREDICATE_ID", "PREDICATE_AT", "PREDICATE_BOUND_AT",
"PREDICATE_BOUND_AT_BINDINGS", "PREDICATE_BOUND_AT_BINDINGS_END",
}
for _, sym := range predSymbols {
for _, cls := range (*semanticBQL)[sym] {
cls.ProcessedElement = semantic.WherePredicateClauseHook()
}
}
objSymbols := []semantic.Symbol{
"OBJECT", "OBJECT_SUBJECT_EXTRACT", "OBJECT_SUBJECT_TYPE", "OBJECT_SUBJECT_ID",
"OBJECT_PREDICATE_AS", "OBJECT_PREDICATE_ID", "OBJECT_PREDICATE_AT",
"OBJECT_PREDICATE_BOUND_AT", "OBJECT_PREDICATE_BOUND_AT_BINDINGS",
"OBJECT_PREDICATE_BOUND_AT_BINDINGS_END", "OBJECT_LITERAL_AS",
"OBJECT_LITERAL_BINDING_AS", "OBJECT_LITERAL_BINDING_TYPE",
"OBJECT_LITERAL_BINDING_ID", "OBJECT_LITERAL_BINDING_AT",
}
for _, sym := range objSymbols {
for _, cls := range (*semanticBQL)[sym] {
cls.ProcessedElement = semantic.WhereObjectClauseHook()
}
}
}
| {
"content_hash": "691098a2d4a75c8648ad9c2d3db16c06",
"timestamp": "",
"source": "github",
"line_count": 865,
"max_line_length": 84,
"avg_line_length": 21.4485549132948,
"alnum_prop": 0.5987171885948365,
"repo_name": "pombredanne/badwolf",
"id": "9a608d7157ff6cec7f12e10ce43b5e3e684eca18",
"size": "19608",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bql/grammar/grammar.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "285672"
}
],
"symlink_target": ""
} |
package io.swagger.api.impl;
import io.swagger.api.*;
import io.swagger.model.*;
import io.swagger.model.AuthorizeBody;
import io.swagger.model.BookBody;
import io.swagger.model.CreateBody;
import io.swagger.model.OnUsCreditTransferNLInfo;
import io.swagger.model.RejectClosedAccountBody;
import io.swagger.model.RejectExecutionDateInThePastBody;
import java.util.List;
import io.swagger.api.NotFoundException;
import java.io.InputStream;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
@javax.annotation.Generated(value = "class io.swagger.codegen.languages.JavaJerseyServerCodegen", date = "2016-11-14T12:30:06.878Z")
public class OnUsCreditTransferNLApiServiceImpl extends OnUsCreditTransferNLApiService {
@Override
public Response onUsCreditTransferNLIdAuthorizePost(String id, AuthorizeBody body, SecurityContext securityContext) throws NotFoundException {
// do some magic!
return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build();
}
@Override
public Response onUsCreditTransferNLIdBookPost(String id, BookBody body, SecurityContext securityContext) throws NotFoundException {
// do some magic!
return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build();
}
@Override
public Response onUsCreditTransferNLIdCreatePost(String id, CreateBody body, SecurityContext securityContext) throws NotFoundException {
// do some magic!
return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build();
}
@Override
public Response onUsCreditTransferNLIdGet(String id, SecurityContext securityContext) throws NotFoundException {
// do some magic!
return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build();
}
@Override
public Response onUsCreditTransferNLIdRejectClosedAccountPost(String id, RejectClosedAccountBody body, SecurityContext securityContext) throws NotFoundException {
// do some magic!
return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build();
}
@Override
public Response onUsCreditTransferNLIdRejectExecutionDateInThePastPost(String id, RejectExecutionDateInThePastBody body, SecurityContext securityContext) throws NotFoundException {
// do some magic!
return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build();
}
}
| {
"content_hash": "b71606391c162b333718325ef7d2d974",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 184,
"avg_line_length": 47.25454545454546,
"alnum_prop": 0.7726048480184686,
"repo_name": "maartenvanheek/rebelapi",
"id": "ab62feb7dba3d273e80701df5d13d6b651686c10",
"size": "2599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/io/swagger/api/impl/OnUsCreditTransferNLApiServiceImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "990587"
},
{
"name": "HTML",
"bytes": "7124"
},
{
"name": "Java",
"bytes": "94781"
},
{
"name": "JavaScript",
"bytes": "74980"
}
],
"symlink_target": ""
} |
clear
fake build -t build
| {
"content_hash": "04dabb600002cefd2ba944cfb0d2b289",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 19,
"avg_line_length": 13,
"alnum_prop": 0.7692307692307693,
"repo_name": "yjpark/dotfiles",
"id": "bb88169373a01bb73a5a83a62aa2f9333225b3e8",
"size": "26",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "windows/PowerShell/aliases/f.build.ps1",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "AppleScript",
"bytes": "134"
},
{
"name": "AutoHotkey",
"bytes": "11439"
},
{
"name": "Batchfile",
"bytes": "3693"
},
{
"name": "CSS",
"bytes": "2749"
},
{
"name": "Clojure",
"bytes": "179"
},
{
"name": "Dockerfile",
"bytes": "334"
},
{
"name": "Emacs Lisp",
"bytes": "13027"
},
{
"name": "Erlang",
"bytes": "364"
},
{
"name": "HTML",
"bytes": "10558"
},
{
"name": "Haskell",
"bytes": "24925"
},
{
"name": "JavaScript",
"bytes": "5486"
},
{
"name": "Nix",
"bytes": "23792"
},
{
"name": "PowerShell",
"bytes": "13369"
},
{
"name": "Python",
"bytes": "82494"
},
{
"name": "Ruby",
"bytes": "12305"
},
{
"name": "Shell",
"bytes": "517109"
},
{
"name": "Vim script",
"bytes": "98802"
}
],
"symlink_target": ""
} |
package com.example.android.bitmapfun.util;
import com.example.android.bitmapfun.BuildConfig;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.TransitionDrawable;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.util.Log;
import android.widget.ImageView;
import java.lang.ref.WeakReference;
/**
* This class wraps up completing some arbitrary long running work when loading a bitmap to an
* ImageView. It handles things like using a memory and disk cache, running the work in a background
* thread and setting a placeholder image.
*/
public abstract class ImageWorker {
private static final String TAG = "ImageWorker";
private static final int FADE_IN_TIME = 200;
private ImageCache mImageCache;
private ImageCache.ImageCacheParams mImageCacheParams;
private Bitmap mLoadingBitmap;
private boolean mFadeInBitmap = true;
private boolean mExitTasksEarly = false;
protected boolean mPauseWork = false;
private final Object mPauseWorkLock = new Object();
protected Resources mResources;
private static final int MESSAGE_CLEAR = 0;
private static final int MESSAGE_INIT_DISK_CACHE = 1;
private static final int MESSAGE_FLUSH = 2;
private static final int MESSAGE_CLOSE = 3;
protected ImageWorker(Context context) {
mResources = context.getResources();
}
/**
* Load an image specified by the data parameter into an ImageView (override
* {@link ImageWorker#processBitmap(Object)} to define the processing logic). A memory and
* disk cache will be used if an {@link ImageCache} has been added using
* {@link ImageWorker#addImageCache(FragmentManager, ImageCache.ImageCacheParams)}. If the
* image is found in the memory cache, it is set immediately, otherwise an {@link AsyncTask}
* will be created to asynchronously load the bitmap.
*
* @param data The URL of the image to download.
* @param imageView The ImageView to bind the downloaded image to.
*/
public void loadImage(Object data, ImageView imageView) {
if (data == null) {
return;
}
BitmapDrawable value = null;
if (mImageCache != null) {
value = mImageCache.getBitmapFromMemCache(String.valueOf(data));
}
if (value != null) {
// Bitmap found in memory cache
imageView.setImageDrawable(value);
} else if (cancelPotentialWork(data, imageView)) {
final BitmapWorkerTask task = new BitmapWorkerTask(imageView);
final AsyncDrawable asyncDrawable =
new AsyncDrawable(mResources, mLoadingBitmap, task);
imageView.setImageDrawable(asyncDrawable);
// NOTE: This uses a custom version of AsyncTask that has been pulled from the
// framework and slightly modified. Refer to the docs at the top of the class
// for more info on what was changed.
task.executeOnExecutor(AsyncTask.DUAL_THREAD_EXECUTOR, data);
}
}
/**
* Set placeholder bitmap that shows when the the background thread is running.
*
* @param bitmap
*/
public void setLoadingImage(Bitmap bitmap) {
mLoadingBitmap = bitmap;
}
/**
* Set placeholder bitmap that shows when the the background thread is running.
*
* @param resId
*/
public void setLoadingImage(int resId) {
mLoadingBitmap = BitmapFactory.decodeResource(mResources, resId);
}
/**
* Adds an {@link ImageCache} to this {@link ImageWorker} to handle disk and memory bitmap
* caching.
* @param fragmentManager
* @param cacheParams The cache parameters to use for the image cache.
*/
public void addImageCache(FragmentManager fragmentManager,
ImageCache.ImageCacheParams cacheParams) {
mImageCacheParams = cacheParams;
mImageCache = ImageCache.getInstance(fragmentManager, mImageCacheParams);
new CacheAsyncTask().execute(MESSAGE_INIT_DISK_CACHE);
}
/**
* Adds an {@link ImageCache} to this {@link ImageWorker} to handle disk and memory bitmap
* caching.
* @param activity
* @param diskCacheDirectoryName See
* {@link ImageCache.ImageCacheParams#ImageCacheParams(Context, String)}.
*/
public void addImageCache(FragmentActivity activity, String diskCacheDirectoryName) {
mImageCacheParams = new ImageCache.ImageCacheParams(activity, diskCacheDirectoryName);
mImageCache = ImageCache.getInstance(activity.getSupportFragmentManager(), mImageCacheParams);
new CacheAsyncTask().execute(MESSAGE_INIT_DISK_CACHE);
}
/**
* If set to true, the image will fade-in once it has been loaded by the background thread.
*/
public void setImageFadeIn(boolean fadeIn) {
mFadeInBitmap = fadeIn;
}
public void setExitTasksEarly(boolean exitTasksEarly) {
mExitTasksEarly = exitTasksEarly;
setPauseWork(false);
}
/**
* Subclasses should override this to define any processing or work that must happen to produce
* the final bitmap. This will be executed in a background thread and be long running. For
* example, you could resize a large bitmap here, or pull down an image from the network.
*
* @param data The data to identify which image to process, as provided by
* {@link ImageWorker#loadImage(Object, ImageView)}
* @return The processed bitmap
*/
protected abstract Bitmap processBitmap(Object data);
/**
* @return The {@link ImageCache} object currently being used by this ImageWorker.
*/
protected ImageCache getImageCache() {
return mImageCache;
}
/**
* Cancels any pending work attached to the provided ImageView.
* @param imageView
*/
public static void cancelWork(ImageView imageView) {
final BitmapWorkerTask bitmapWorkerTask = getBitmapWorkerTask(imageView);
if (bitmapWorkerTask != null) {
bitmapWorkerTask.cancel(true);
if (BuildConfig.DEBUG) {
final Object bitmapData = bitmapWorkerTask.data;
Log.d(TAG, "cancelWork - cancelled work for " + bitmapData);
}
}
}
/**
* Returns true if the current work has been canceled or if there was no work in
* progress on this image view.
* Returns false if the work in progress deals with the same data. The work is not
* stopped in that case.
*/
public static boolean cancelPotentialWork(Object data, ImageView imageView) {
final BitmapWorkerTask bitmapWorkerTask = getBitmapWorkerTask(imageView);
if (bitmapWorkerTask != null) {
final Object bitmapData = bitmapWorkerTask.data;
if (bitmapData == null || !bitmapData.equals(data)) {
bitmapWorkerTask.cancel(true);
if (BuildConfig.DEBUG) {
Log.d(TAG, "cancelPotentialWork - cancelled work for " + data);
}
} else {
// The same work is already in progress.
return false;
}
}
return true;
}
/**
* @param imageView Any imageView
* @return Retrieve the currently active work task (if any) associated with this imageView.
* null if there is no such task.
*/
private static BitmapWorkerTask getBitmapWorkerTask(ImageView imageView) {
if (imageView != null) {
final Drawable drawable = imageView.getDrawable();
if (drawable instanceof AsyncDrawable) {
final AsyncDrawable asyncDrawable = (AsyncDrawable) drawable;
return asyncDrawable.getBitmapWorkerTask();
}
}
return null;
}
/**
* The actual AsyncTask that will asynchronously process the image.
*/
private class BitmapWorkerTask extends AsyncTask<Object, Void, BitmapDrawable> {
private Object data;
private final WeakReference<ImageView> imageViewReference;
public BitmapWorkerTask(ImageView imageView) {
imageViewReference = new WeakReference<ImageView>(imageView);
}
/**
* Background processing.
*/
@Override
protected BitmapDrawable doInBackground(Object... params) {
if (BuildConfig.DEBUG) {
Log.d(TAG, "doInBackground - starting work");
}
data = params[0];
final String dataString = String.valueOf(data);
Bitmap bitmap = null;
BitmapDrawable drawable = null;
// Wait here if work is paused and the task is not cancelled
synchronized (mPauseWorkLock) {
while (mPauseWork && !isCancelled()) {
try {
mPauseWorkLock.wait();
} catch (InterruptedException e) {}
}
}
// If the image cache is available and this task has not been cancelled by another
// thread and the ImageView that was originally bound to this task is still bound back
// to this task and our "exit early" flag is not set then try and fetch the bitmap from
// the cache
if (mImageCache != null && !isCancelled() && getAttachedImageView() != null
&& !mExitTasksEarly) {
bitmap = mImageCache.getBitmapFromDiskCache(dataString);
}
// If the bitmap was not found in the cache and this task has not been cancelled by
// another thread and the ImageView that was originally bound to this task is still
// bound back to this task and our "exit early" flag is not set, then call the main
// process method (as implemented by a subclass)
if (bitmap == null && !isCancelled() && getAttachedImageView() != null
&& !mExitTasksEarly) {
bitmap = processBitmap(params[0]);
}
// If the bitmap was processed and the image cache is available, then add the processed
// bitmap to the cache for future use. Note we don't check if the task was cancelled
// here, if it was, and the thread is still running, we may as well add the processed
// bitmap to our cache as it might be used again in the future
if (bitmap != null) {
if (Utils.hasHoneycomb()) {
// Running on Honeycomb or newer, so wrap in a standard BitmapDrawable
drawable = new BitmapDrawable(mResources, bitmap);
} else {
// Running on Gingerbread or older, so wrap in a RecyclingBitmapDrawable
// which will recycle automagically
drawable = new RecyclingBitmapDrawable(mResources, bitmap);
}
if (mImageCache != null) {
mImageCache.addBitmapToCache(dataString, drawable);
}
}
if (BuildConfig.DEBUG) {
Log.d(TAG, "doInBackground - finished work");
}
return drawable;
}
/**
* Once the image is processed, associates it to the imageView
*/
@Override
protected void onPostExecute(BitmapDrawable value) {
// if cancel was called on this task or the "exit early" flag is set then we're done
if (isCancelled() || mExitTasksEarly) {
value = null;
}
final ImageView imageView = getAttachedImageView();
if (value != null && imageView != null) {
if (BuildConfig.DEBUG) {
Log.d(TAG, "onPostExecute - setting bitmap");
}
setImageDrawable(imageView, value);
}
}
@Override
protected void onCancelled(BitmapDrawable value) {
super.onCancelled(value);
synchronized (mPauseWorkLock) {
mPauseWorkLock.notifyAll();
}
}
/**
* Returns the ImageView associated with this task as long as the ImageView's task still
* points to this task as well. Returns null otherwise.
*/
private ImageView getAttachedImageView() {
final ImageView imageView = imageViewReference.get();
final BitmapWorkerTask bitmapWorkerTask = getBitmapWorkerTask(imageView);
if (this == bitmapWorkerTask) {
return imageView;
}
return null;
}
}
/**
* A custom Drawable that will be attached to the imageView while the work is in progress.
* Contains a reference to the actual worker task, so that it can be stopped if a new binding is
* required, and makes sure that only the last started worker process can bind its result,
* independently of the finish order.
*/
private static class AsyncDrawable extends BitmapDrawable {
private final WeakReference<BitmapWorkerTask> bitmapWorkerTaskReference;
public AsyncDrawable(Resources res, Bitmap bitmap, BitmapWorkerTask bitmapWorkerTask) {
super(res, bitmap);
bitmapWorkerTaskReference =
new WeakReference<BitmapWorkerTask>(bitmapWorkerTask);
}
public BitmapWorkerTask getBitmapWorkerTask() {
return bitmapWorkerTaskReference.get();
}
}
/**
* Called when the processing is complete and the final drawable should be
* set on the ImageView.
*
* @param imageView
* @param drawable
*/
private void setImageDrawable(ImageView imageView, Drawable drawable) {
if (mFadeInBitmap) {
// Transition drawable with a transparent drawable and the final drawable
final TransitionDrawable td =
new TransitionDrawable(new Drawable[] {
new ColorDrawable(android.R.color.transparent),
drawable
});
// Set background to loading bitmap
imageView.setBackgroundDrawable(
new BitmapDrawable(mResources, mLoadingBitmap));
imageView.setImageDrawable(td);
td.startTransition(FADE_IN_TIME);
} else {
imageView.setImageDrawable(drawable);
}
}
/**
* Pause any ongoing background work. This can be used as a temporary
* measure to improve performance. For example background work could
* be paused when a ListView or GridView is being scrolled using a
* {@link android.widget.AbsListView.OnScrollListener} to keep
* scrolling smooth.
* <p>
* If work is paused, be sure setPauseWork(false) is called again
* before your fragment or activity is destroyed (for example during
* {@link android.app.Activity#onPause()}), or there is a risk the
* background thread will never finish.
*/
public void setPauseWork(boolean pauseWork) {
synchronized (mPauseWorkLock) {
mPauseWork = pauseWork;
if (!mPauseWork) {
mPauseWorkLock.notifyAll();
}
}
}
protected class CacheAsyncTask extends AsyncTask<Object, Void, Void> {
@Override
protected Void doInBackground(Object... params) {
switch ((Integer)params[0]) {
case MESSAGE_CLEAR:
clearCacheInternal();
break;
case MESSAGE_INIT_DISK_CACHE:
initDiskCacheInternal();
break;
case MESSAGE_FLUSH:
flushCacheInternal();
break;
case MESSAGE_CLOSE:
closeCacheInternal();
break;
}
return null;
}
}
protected void initDiskCacheInternal() {
if (mImageCache != null) {
mImageCache.initDiskCache();
}
}
protected void clearCacheInternal() {
if (mImageCache != null) {
mImageCache.clearCache();
}
}
protected void flushCacheInternal() {
if (mImageCache != null) {
mImageCache.flush();
}
}
protected void closeCacheInternal() {
if (mImageCache != null) {
mImageCache.close();
mImageCache = null;
}
}
public void clearCache() {
new CacheAsyncTask().execute(MESSAGE_CLEAR);
}
public void flushCache() {
new CacheAsyncTask().execute(MESSAGE_FLUSH);
}
public void closeCache() {
new CacheAsyncTask().execute(MESSAGE_CLOSE);
}
}
| {
"content_hash": "9d27d31166680c0feb364f37334ea658",
"timestamp": "",
"source": "github",
"line_count": 463,
"max_line_length": 102,
"avg_line_length": 37.08639308855292,
"alnum_prop": 0.6162716207559257,
"repo_name": "indashnet/InDashNet.Open.UN2000",
"id": "d260660113c536e35bfb95513a32de82e2695f64",
"size": "17790",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "android/development/samples/training/bitmapfun/src/com/example/android/bitmapfun/util/ImageWorker.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
namespace App\Console\Commands;
use App\Rate;
use Illuminate\Console\Command;
class CleanCsv extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'csv:clean';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Get and clean csv';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
$this->info('Getting csv');
$date = date('Ymd');
$csv = @file_get_contents('http://www4.bcb.gov.br/Download/fechamento/'.$date.'.csv');
if (!$csv) {
$this->error("Today's date does not have records associated!");
return;
}
$this->info('Cleaning the csv');
$split = preg_split('/\r\n/', $csv);
array_pop($split); // Removed because it's empty
$this->info('Inserting data into Rates table');
$progress = $this->output->createProgressBar(count($split));
foreach ($split as $key => $values) {
$value = explode(';', $values);
Rate::updateOrCreate(
['initials' => $value[3]],
[
'type' => $value[2],
'buy' => $value[4],
'sell' => $value[5],
]
);
$progress->advance();
}
$progress->finish();
$this->info("\n" . 'Finished!');
}
}
| {
"content_hash": "454a1cb55f8b974ae05e3aa78d1fccc5",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 94,
"avg_line_length": 21.417721518987342,
"alnum_prop": 0.48226950354609927,
"repo_name": "LucasLeandro1204/cotacao-api",
"id": "9c2c6711fa783bc38976931361e6108d0f61f2a9",
"size": "1692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/Console/Commands/CleanCsv.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "553"
},
{
"name": "CSS",
"bytes": "2692"
},
{
"name": "HTML",
"bytes": "3040"
},
{
"name": "JavaScript",
"bytes": "718"
},
{
"name": "PHP",
"bytes": "21222"
}
],
"symlink_target": ""
} |
package com.example.pinpaiactivity;
import java.util.ArrayList;
import java.util.List;
import com.example.activity.R;
import com.example.adapter.PinPaiAdapter;
import com.example.driverapp.widget.ElasticListViewBai;
import android.app.Activity;
import android.os.Bundle;
import android.view.Window;
public class ZGYiQiActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); // 消失ActionBar
setContentView(R.layout.activity_adapter_zg_yiqi);
// 得到控件
ElasticListViewBai elasticListView = (ElasticListViewBai) findViewById(R.id.elasticListView1);
// 随便创建一个list 但是size为1
List list = new ArrayList();
list.add(0);
// 设置一个适配器
elasticListView.setAdapter(new DeGuoAdapter(this, list));
}
}
| {
"content_hash": "e3ad33ca127602a4a5bbe46a70d9285f",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 96,
"avg_line_length": 28.903225806451612,
"alnum_prop": 0.7600446428571429,
"repo_name": "hxh129/AndroidProject",
"id": "ed06bb4522e6460bf87ebd26465d91eff49e26ba",
"size": "940",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PeopleDriver/src/com/example/pinpaiactivity/ZGYiQiActivity.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "380028"
}
],
"symlink_target": ""
} |
from open_facebook.api import *
import unittest
import logging
import mock
import datetime
from open_facebook.exceptions import OpenGraphException
logger = logging.getLogger()
from open_facebook.utils import json
TEST_USER_FORCE_CREATE = False
TEST_USER_DICT = {
'tommy': dict(name='Tommaso Ilgubrab'),
'thi': dict(name='Thierry Hcabnellehcs'),
'guy': dict(name='Guyon Eerom', permissions=['read_stream'])
}
TEST_USER_NAMES = [v['name'] for k, v in TEST_USER_DICT.items()]
TEST_USER_OBJECTS = None
def setup_users():
'''
Since this is soo slow we only do this once for all tests
'''
# caching because these apis are just too damn slow for test driven
# development
from django.core.cache import cache
global TEST_USER_OBJECTS
if TEST_USER_OBJECTS is None:
key = 'test_user_objects'
user_objects = cache.get(key)
if not user_objects or TEST_USER_FORCE_CREATE:
logger.info('test user cache not found, rebuilding')
user_objects = {}
app_token = FacebookAuthorization.get_app_access_token()
for user_slug, user_dict in TEST_USER_DICT.items():
test_user = FacebookAuthorization.get_or_create_test_user(
app_token, name=user_dict[
'name'], force_create=TEST_USER_FORCE_CREATE,
permissions=user_dict.get('permissions')
)
user_objects[user_slug] = test_user
cache.set(key, user_objects, 60 * 60)
TEST_USER_OBJECTS = user_objects
return TEST_USER_OBJECTS
class OpenFacebookTest(unittest.TestCase):
def setUp(self):
setup_users()
for user_slug, user_object in TEST_USER_OBJECTS.items():
setattr(self, user_slug, user_object)
# capture print statements
import sys
import StringIO
self.prints = sys.stdout = StringIO.StringIO()
def tearDown(self):
# complain about print statements
self.prints.seek(0)
content = self.prints.read()
if content:
raise ValueError('print statement found, output %s' % content)
class TestErrorMapping(OpenFacebookTest):
def test_syntax_error(self):
error_response = '''
{
'error': {
'message': 'Syntax error "Expected end of string instead of "?"." at character 14: third_party_id?access_token=AAABbPDnY390BAOZA22ugLfCyr2OGH0k82VJMPJRR8qxceV96nBra53R5ISiou7VOD9eBd21ZCzPZC5Vn1hWbVkY9Qvx9g8wl1NCmuL9vwZDZD',
'code': 2500,
'type': 'OAuthException'
}
}
'''
return
def test_oauth_errors(self):
expires_response = '''{
"error": {
"type": "OAuthException",
"message": "Session has expired at unix time SOME_TIME. The current unix time is SOME_TIME."
}
} '''
changed_password_response = '''
{
"error": {
"type": "OAuthException",
"message": "The session has been invalidated because the user has changed the password."
}
}
'''
deauthorized_response = '''
{
"error": {
"type": "OAuthException",
"message": "Error validating access token: USER_ID has not authorized application APP_ID"
}
}
'''
loggedout_response = '''
{
"error": {
"type": "OAuthException",
"message": "Error validating access token: The session is invalid because the user logged out."
}
}
'''
responses = [expires_response, changed_password_response,
deauthorized_response, loggedout_response]
response_objects = []
for response_string in responses:
response = json.loads(response_string)
response_objects.append(response)
from open_facebook import exceptions as open_facebook_exceptions
for response in response_objects:
oauth = False
try:
FacebookConnection.raise_error(response['error']['type'],
response['error']['message'])
except open_facebook_exceptions.OAuthException, e:
oauth = True
assert oauth, 'response %s didnt raise oauth error' % response
def test_non_oauth_errors(self):
object_open_graph_error = '''
{"error":
{"message": "(#3502) Object at URL http://www.fashiolista.com/my_style/list/441276/?og=active&utm_campaign=facebook_action_comment&utm_medium=facebook&utm_source=facebook has og:type of 'website'. The property 'list' requires an object of og:type 'fashiolista:list'. ",
"code": 3502, "type": "OAuthException"
}
}
'''
response = json.loads(object_open_graph_error)
def test():
FacebookConnection.raise_error(
response['error']['type'],
response['error']['message'],
response['error'].get('code')
)
self.assertRaises(OpenGraphException, test)
class Test500Detection(OpenFacebookTest):
def test_application_error(self):
'''
Facebook errors often look like 500s
Its a silly system, but we need to support it
This is actually an application error
'''
from StringIO import StringIO
graph = self.guy.graph()
with mock.patch('urllib2.build_opener') as patched:
from urllib2 import HTTPError
opener = mock.MagicMock()
response = StringIO('''{
"error": {
"type": "OAuthException",
"message": "Error validating access token: USER_ID has not authorized application APP_ID"
}
}''')
opener.open.side_effect = HTTPError(
'bla', 500, 'bla', 'bla', response)
patched.return_value = opener
def make_request():
graph.get('me')
self.assertRaises(facebook_exceptions.OAuthException, make_request)
def test_facebook_down(self):
'''
Facebook errors often look like 500s
After 3 attempts while facebook is down we raise a FacebookUnreachable
Exception
'''
from StringIO import StringIO
graph = self.guy.graph()
with mock.patch('urllib2.build_opener') as patched:
from urllib2 import HTTPError
opener = mock.MagicMock()
def side_effect(*args, **kwargs):
response = StringIO(u'''
<title>Facebook | Error</title>
Sorry, something went wrong.
''')
http_exception = HTTPError('bla', 505, 'bla', 'bla', response)
raise http_exception
opener.open.side_effect = side_effect
patched.return_value = opener
def make_request():
graph.get('me')
self.assertRaises(
facebook_exceptions.FacebookUnreachable, make_request)
class TestPublishing(OpenFacebookTest):
def test_permissions(self):
graph = self.thi.graph()
permission_responses = [
(
{u'paging': {u'next': u'https://graph.facebook.com/100005270323705/permissions?access_token=CAADD9tTuZCZBQBALXBfM0xDzsn68jAS8HgUSnbhRkZAp5L1FFpY7iLu3aAytCv8jGN4ZCXZAbZCehSvnK7e8d9P22FZCeHarRnFbFne8MluM0S7UNhoCwKWBNrazrs2tjZCIelQAdzesschwzUr3kRCR0oL9bW4Tp6syWmjm0FOUjwZDZD&limit=5000&offset=5000'}, u'data': [
{u'user_photos': 1, u'publish_actions': 1, u'read_stream': 1, u'video_upload': 1, u'installed': 1, u'offline_access': 1, u'create_note': 1, u'publish_stream': 1, u'photo_upload': 1, u'share_item': 1, u'status_update': 1}]},
{u'user_photos': True, u'publish_actions': True, u'read_stream': True, u'video_upload': True, u'installed': True, u'offline_access': True, u'create_note': True, u'publish_stream': True, u'photo_upload': True, u'share_item': True, u'status_update': True}),
(
{u'paging': {
u'next': u'https://graph.facebook.com/100005270323705/permissions?access_token=CAADD9tTuZCZBQBALXBfM0xDzsn68jAS8HgUSnbhRkZAp5L1FFpY7iLu3aAytCv8jGN4ZCXZAbZCehSvnK7e8d9P22FZCeHarRnFbFne8MluM0S7UNhoCwKWBNrazrs2tjZCIelQAdzesschwzUr3kRCR0oL9bW4Tp6syWmjm0FOUjwZDZD&limit=5000&offset=5000'}, u'data': []},
{}),
]
# test the full flow, just check no errors are raised
live_permissions = graph.permissions()
# test weird responses
for response, correct_permissions in permission_responses:
with mock.patch('open_facebook.api.OpenFacebook.get') as g:
g.return_value = response
permissions = graph.permissions()
self.assertEqual(permissions, correct_permissions)
def test_wallpost(self):
graph = self.thi.graph()
now = datetime.datetime.now()
result = graph.set('me/feed', message='This should work %s' % now)
self.assertTrue(result['id'])
graph.delete(result['id'])
# we have no permissions, this should fail
guy_graph = self.guy.graph()
try:
guy_graph.set('me/feed', message='Nonnonono')
raise ValueError('We were expecting a permissions exception')
except facebook_exceptions.PermissionException, e:
pass
def test_og_follow(self):
return
# perform an og follow
graph = self.thi.graph()
path = 'me/og.follows'
result = graph.set(path, profile=self.guy.id)
self.assertTrue(result['id'])
# now try removing it
remove_path = result['id']
deleted = graph.delete(remove_path)
def test_og_adjust(self):
return
# perform an og follow
graph = self.thi.graph()
path = 'me/og.follows'
result = graph.set(path, profile=self.guy.id)
self.assertTrue(result['id'])
change_result = graph.set(result['id'], message='hello world')
assert change_result is True
def test_og_explicit_share(self):
return
# perform an og follow
graph = self.thi.graph()
path = 'me/og.follows'
result = graph.set(
path, profile=self.guy.id, fb__explicitly_shared='true')
self.assertTrue(result['id'])
class TestOpenFacebook(OpenFacebookTest):
def test_cookie_parsing(self):
cookie = 'F7cndfQuSIkcVHWIgg_SHQ4LIDJXeeHhiXUNjesOw5g.eyJhbGdvcml0aG0iOiJITUFDLVNIQTI1NiIsImNvZGUiOiJVMTZuMFNoWVUxSTJ5VEFJMVZ0RmlvZTdhRVRaaEZ4cGV5d1hwYnZvOUprLmV5SnBkaUk2SW1OcmFGVXlWR053ZDA1VlMwSTRlUzFzZDA1WmFtY2lmUS5rZl9RTUhCMnVFTVh5YW83UU5UcnFGMlJzOGxxQUxrM1AxYm8zazBLMm5YUXpOZW5LSVlfczBVV3ZNbE1jTXAzcE04TXNLNVVDQUpjWlQ1N1ZaZXFkS3ZPeXRFbmdoODFxTmczTXVDeTBHNjB6WjFBOWZGZlpHenVDejdKSEVSSCIsImlzc3VlZF9hdCI6MTMxMTYwMDEyNywidXNlcl9pZCI6Nzg0Nzg1NDMwfQ'
parsed_cookie = FacebookAuthorization.parse_signed_data(cookie)
assert 'code' in parsed_cookie
def test_code_conversion(self):
from open_facebook import exceptions as open_facebook_exceptions
# before testing update this with a valid code, hope facebook comes
# with a way to automate this
code = 'AQDByzD95HCaQLIY3PyQFvCJ67bkYx5f692TylEXARQ0p6_XK0mXGRVBU3G759qOIa_A966Wmm-kxxw1GbXkXQiJj0A3b_XNFewFhT8GSro4i9F8b_7q1RSnKzfq327XYno-Qw4NGxm0ordSl0gJ0YTjhwY8TwSMy2b2whD5ZhHvaYkEaC1J-GcBhkF7o4F2-W8'
# the redirect uri needs to be connected
try:
user_token = FacebookAuthorization.convert_code(
code, redirect_uri='http://local.mellowmorning.com:8080')
facebook = OpenFacebook(user_token['access_token'])
facebook.me()
except open_facebook_exceptions.ParameterException, e:
pass
def test_fql(self):
facebook = self.thi.graph()
result = facebook.fql('SELECT name FROM user WHERE uid = me()')
assert 'name' in result[0]
def test_open_api(self):
facebook = self.guy.graph()
assert 'name' in facebook.me()
assert facebook.get('fashiolista')
| {
"content_hash": "44d0de6e76b10c49ec9df0f3cfeec40d",
"timestamp": "",
"source": "github",
"line_count": 318,
"max_line_length": 457,
"avg_line_length": 38.783018867924525,
"alnum_prop": 0.6157463715235547,
"repo_name": "michaelBenin/Django-facebook",
"id": "a25560f0ddb271ba2346acc778f1c22749f5ac55",
"size": "12358",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "open_facebook/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
<?php
/**
* Simple test script for Propel drivers.
*
* This script will no do in-depth testing, but is designed to test whether drivers
* are correctly performing basic operations -- SELECT, UPDATE, DELETE, limit support,
* prepared query emulation, etc.
*
* IMPORTANT:
*
* Use this script with a clean version of the [example] bookstore database. If records
* already exist, an error will be displayed.
*
* TODO:
* A more advanced driver test system should be developed that could test capabilities
* of driver-specific things like callable statements (stored procedures), etc. Perhaps break
* functionality into class & provide ability to subclass.
*
* @author Hans Lellelid <hans@xmpl.org>
* @version $Revision: 1612 $
*/
// Setup configuration. It is expected that the bookstore-conf.php file exists in ../build/conf
//
error_reporting(E_ALL);
$conf_path = realpath(dirname(__FILE__) . '/../projects/bookstore-packaged/build/conf/bookstore-packaged-conf.php');
if (!file_exists($conf_path)) {
print "Make sure that you specify properties in conf/bookstore-packaged.properties and "
."build propel before running this script.";
exit;
}
// Add PHP_CLASSPATH, if set
if (getenv("PHP_CLASSPATH")) {
set_include_path(getenv("PHP_CLASSPATH") . PATH_SEPARATOR . get_include_path());
}
// Add build/classes/ and classes/ to path
set_include_path(
realpath(dirname(__FILE__) . '/../projects/bookstore-packaged/build/classes') . PATH_SEPARATOR .
dirname(__FILE__) . '/../../runtime/classes' . PATH_SEPARATOR .
get_include_path()
);
// Require classes.
require_once 'propel/Propel.php';
require_once 'author/Author.php';
require_once 'publisher/Publisher.php';
require_once 'book/Book.php';
require_once 'review/Review.php';
include_once 'media/Media.php';
include_once 'log/BookstoreLog.php';
include_once 'book_club_list/BookClubList.php';
include_once 'book_club_list/BookListRel.php';
include_once 'Benchmark/Timer.php';
$timer = new Benchmark_Timer;
$timer->start();
// Some utility functions
function boolTest($cond) {
if ($cond) {
return "[OK]\n";
} else {
return "[FAILED]\n";
}
}
try {
// Initialize Propel
Propel::init($conf_path);
} catch (Exception $e) {
die("Error initializing propel: ". $e->__toString());
}
function check_tables_empty() {
try {
print "\nChecking to see that tables are empty\n";
print "-------------------------------------\n\n";
print "Ensuring that there are no records in [author] table: ";
$res = AuthorPeer::doSelect(new Criteria());
print boolTest(empty($res));
print "Ensuring that there are no records in [publisher] table: ";
$res2 = PublisherPeer::doSelect(new Criteria());
print boolTest(empty($res2));
print "Ensuring that there are no records in [book] table: ";
$res3 = AuthorPeer::doSelect(new Criteria());
print boolTest(empty($res3));
print "Ensuring that there are no records in [review] table: ";
$res4 = ReviewPeer::doSelect(new Criteria());
print boolTest(empty($res4));
print "Ensuring that there are no records in [media] table: ";
$res5 = MediaPeer::doSelect(new Criteria());
print boolTest(empty($res5));
print "Ensuring that there are no records in [book_club_list] table: ";
$res6 = BookClubListPeer::doSelect(new Criteria());
print boolTest(empty($res6));
print "Ensuring that there are no records in [book_x_list] table: ";
$res7 = BookListRelPeer::doSelect(new Criteria());
print boolTest(empty($res7));
return (empty($res) && empty($res2) && empty($res3) && empty($res4) && empty($res5));
} catch (Exception $e) {
die("Error ensuring tables were empty: " . $e->__toString());
}
}
// Check to see if records already exist in any of the three tables. If so, display an error
// and exit.
if (!check_tables_empty()) {
die("Tables must be empty to perform these tests.");
}
// Add publisher records
// ---------------------
try {
print "\nAdding some new publishers to the list\n";
print "--------------------------------------\n\n";
$scholastic = new Publisher();
$scholastic->setName("Scholastic");
// do not save, will do later to test cascade
print "Added publisher \"Scholastic\" [not saved yet].\n";
$morrow = new Publisher();
$morrow->setName("William Morrow");
$morrow->save();
$morrow_id = $morrow->getId();
print "Added publisher \"William Morrow\" [id = $morrow_id].\n";
$penguin = new Publisher();
$penguin->setName("Penguin");
$penguin->save();
$penguin_id = $penguin->getId();
print "Added publisher \"Penguin\" [id = $penguin_id].\n";
$vintage = new Publisher();
$vintage->setName("Vintage");
$vintage->save();
$vintage_id = $vintage->getId();
print "Added publisher \"Vintage\" [id = $vintage_id].\n";
} catch (Exception $e) {
die("Error adding publisher: " . $e->__toString());
}
// Add author records
// ------------------
try {
print "\nAdding some new authors to the list\n";
print "--------------------------------------\n\n";
$rowling = new Author();
$rowling->setFirstName("J.K.");
$rowling->setLastName("Rowling");
// no save()
print "Added author \"J.K. Rowling\" [not saved yet].\n";
$stephenson = new Author();
$stephenson->setFirstName("Neal");
$stephenson->setLastName("Stephenson");
$stephenson->save();
$stephenson_id = $stephenson->getId();
print "Added author \"Neal Stephenson\" [id = $stephenson_id].\n";
$byron = new Author();
$byron->setFirstName("George");
$byron->setLastName("Byron");
$byron->save();
$byron_id = $byron->getId();
print "Added author \"George Byron\" [id = $byron_id].\n";
$grass = new Author();
$grass->setFirstName("Gunter");
$grass->setLastName("Grass");
$grass->save();
$grass_id = $grass->getId();
print "Added author \"Gunter Grass\" [id = $grass_id].\n";
} catch (Exception $e) {
die("Error adding author: " . $e->__toString());
}
// Add book records
// ----------------
try {
print "\nAdding some new books to the list\n";
print "-------------------------------------\n\n";
$phoenix = new Book();
$phoenix->setTitle("Harry Potter and the Order of the Phoenix");
$phoenix->setISBN("043935806X");
print "Trying cascading save (Harry Potter): ";
$phoenix->setAuthor($rowling);
$phoenix->setPublisher($scholastic);
$phoenix->save();
$phoenix_id = $phoenix->getId();
print boolTest(true);
print "Added book \"Harry Potter and the Order of the Phoenix\" [id = $phoenix_id].\n";
$qs = new Book();
$qs->setISBN("0380977427");
$qs->setTitle("Quicksilver");
$qs->setAuthor($stephenson);
$qs->setPublisher($morrow);
$qs->save();
$qs_id = $qs->getId();
print "Added book \"Quicksilver\" [id = $qs_id].\n";
$dj = new Book();
$dj->setISBN("0140422161");
$dj->setTitle("Don Juan");
$dj->setAuthor($byron);
$dj->setPublisher($penguin);
$dj->save();
$dj_id = $qs->getId();
print "Added book \"Don Juan\" [id = $dj_id].\n";
$td = new Book();
$td->setISBN("067972575X");
$td->setTitle("The Tin Drum");
$td->setAuthor($grass);
$td->setPublisher($vintage);
$td->save();
$td_id = $td->getId();
print "Added book \"The Tin Drum\" [id = $dj_id].\n";
} catch (Exception $e) {
die("Error saving book: " . $e->__toString());
}
// Add review records
// ------------------
try {
print "\nAdding some book reviews to the list\n";
print "------------------------------------\n\n";
$r1 = new Review();
$r1->setBook($phoenix);
$r1->setReviewedBy("Washington Post");
$r1->setRecommended(true);
$r1->setReviewDate(time());
$r1->save();
$r1_id = $r1->getId();
print "Added Washington Post book review [id = $r1_id].\n";
$r2 = new Review();
$r2->setBook($phoenix);
$r2->setReviewedBy("New York Times");
$r2->setRecommended(false);
$r2->setReviewDate(time());
$r2->save();
$r2_id = $r2->getId();
print "Added New York Times book review [id = $r2_id].\n";
} catch (Exception $e) {
die("Error saving book review: " . $e->__toString());
}
// Perform a "complex" search
// --------------------------
try {
print "\nDoing complex search on books\n";
print "-----------------------------\n\n";
$crit = new Criteria();
$crit->add(BookPeer::TITLE, 'Harry%', Criteria::LIKE);
print "Looking for \"Harry%\": ";
$results = BookPeer::doSelect($crit);
print boolTest(count($results) === 1);
$crit2 = new Criteria();
$crit2->add(BookPeer::ISBN, array("0380977427", "0140422161"), Criteria::IN);
$results = BookPeer::doSelect($crit2);
print "Looking for ISBN IN (\"0380977427\", \"0140422161\"): ";
print boolTest(count($results) === 2);
} catch (Exception $e) {
die("Error while performing complex query: " . $e->__toString());
}
// Perform a "limit" search
// ------------------------
try {
print "\nDoing LIMITed search on books\n";
print "-----------------------------\n\n";
$crit = new Criteria();
$crit->setLimit(2);
$crit->setOffset(1);
$crit->addAscendingOrderByColumn(BookPeer::TITLE);
print "Checking to make sure correct number returned: ";
$results = BookPeer::doSelect($crit);
print boolTest(count($results) === 2);
print "Checking to make sure correct books returned: ";
// we ordered on book title, so we expect to get
print boolTest( $results[0]->getTitle() == "Harry Potter and the Order of the Phoenix" && $results[1]->getTitle() == "Quicksilver" );
} catch (Exception $e) {
die("Error while performing LIMIT query: " . $e->__toString());
}
// Perform a lookup & update!
// --------------------------
try {
print "\nUpdating just-created book title\n";
print "--------------------------------\n\n";
print "First finding book by PK (=$qs_id) .... ";
try {
$qs_lookup = BookPeer::retrieveByPk($qs_id);
} catch (Exception $e) {
print "ERROR!\n";
die("Error retrieving by pk: " . $e->__toString());
}
if ($qs_lookup) {
print "FOUND!\n";
} else {
print "NOT FOUND :(\n";
die("Couldn't find just-created book: book_id = $qs_id");
}
try {
$new_title = "Quicksilver (".crc32(uniqid(rand())).")";
print "Attempting to update found object (".$qs_lookup->getTitle()." -> ".$new_title."): ";
$qs_lookup->setTitle($new_title);
$qs_lookup->save();
print boolTest(true);
} catch (Exception $e) {
die("Error saving (updating) book: " . $e->__toString());
}
print "Making sure object was correctly updated: ";
$qs_lookup2 = BookPeer::retrieveByPk($qs_id);
print boolTest($qs_lookup2->getTitle() == $new_title);
} catch (Exception $e) {
die("Error updating book: " . $e->__toString());
}
// Test some basic DATE / TIME stuff
// ---------------------------------
try {
print "\nTesting the DATE/TIME columns\n";
print "-----------------------------\n\n";
// that's the control timestamp.
$control = strtotime('2004-02-29 00:00:00');
// should be two in the db
$r = ReviewPeer::doSelectOne(new Criteria());
$r_id = $r->getId();
$r->setReviewDate($control);
$r->save();
$r2 = ReviewPeer::retrieveByPk($r_id);
print "Checking ability to fetch native unix timestamp: ";
print boolTest($r2->getReviewDate(null) === $control);
print "Checking ability to use date() formatter: ";
print boolTest($r2->getReviewDate('n-j-Y') === '2-29-2004');
print "[FYI] Here's the strftime() formatter for current locale: " . $r2->getReviewDate('%x') . "\n";
} catch (Exception $e) {
die("Error test date/time: " . $e->__toString());
}
// Handle BLOB/CLOB Columns
// ------------------------
try {
print "\nTesting the BLOB/CLOB columns\n";
print "-------------------------------\n\n";
$blob_path = dirname(__FILE__) . '/etc/lob/tin_drum.gif';
$blob2_path = dirname(__FILE__) . '/etc/lob/propel.gif';
$clob_path = dirname(__FILE__) . '/etc/lob/tin_drum.txt';
$m1 = new Media();
$m1->setBook($phoenix);
$m1->setCoverImage(file_get_contents($blob_path));
$m1->setExcerpt(file_get_contents($clob_path));
$m1->save();
$m1_id = $m1->getId();
print "Added Media collection [id = $m1_id].\n";
print "Looking for just-created mediat by PK (=$m1_id) .... ";
try {
$m1_lookup = MediaPeer::retrieveByPk($m1_id);
} catch (Exception $e) {
print "ERROR!\n";
die("Error retrieving media by pk: " . $e->__toString());
}
if ($m1_lookup) {
print "FOUND!\n";
} else {
print "NOT FOUND :(\n";
die("Couldn't find just-created media item: media_id = $m1_id");
}
print "Making sure BLOB was correctly updated: ";
print boolTest( $m1_lookup->getCoverImage()->getContents() === file_get_contents($blob_path));
print "Making sure CLOB was correctly updated: ";
print boolTest((string) $m1_lookup->getExcerpt()->getContents() === file_get_contents($clob_path));
// now update the BLOB column and save it & check the results
$b = $m1_lookup->getCoverImage();
$b->setContents(file_get_contents($blob2_path));
$m1_lookup->setCoverImage($b);
$m1_lookup->save();
try {
$m2_lookup = MediaPeer::retrieveByPk($m1_id);
} catch (Exception $e) {
print "ERROR!\n";
die("Error retrieving media by pk: " . $e->__toString());
}
print "Making sure BLOB was correctly overwritten: ";
print boolTest($m2_lookup->getCoverImage()->getContents() === file_get_contents($blob2_path));
} catch (Exception $e) {
die("Error doing blob/clob updates: " . $e->__toString());
}
// Test Validators
// ---------------
try {
print "\nTesting the column validators\n";
print "-----------------------------\n\n";
$bk1 = new Book();
$bk1->setTitle("12345"); // min length is 10
$ret = $bk1->validate();
print "Making sure validation failed: ";
print boolTest($ret !== true);
print "Making sure 1 validation message was returned: ";
print boolTest(count($ret) === 1);
print "Making sure expected validation message was returned: ";
$el = array_shift($ret);
print boolTest(stripos($el->getMessage(), "must be more than") !== false);
print "\n(Unique validator)\n";
$bk2 = new Book();
$bk2->setTitle("Don Juan");
$ret = $bk2->validate();
print "Making sure validation failed: ";
print boolTest($ret !== true);
print "Making sure 1 validation message was returned: ";
print boolTest(count($ret) === 1);
print "Making sure expected validation message was returned: ";
$el = array_shift($ret);
print boolTest(stripos($el->getMessage(), "Book title already in database.") !== false);
print "\n(Now trying some more complex validation.)\n";
$auth1 = new Author();
$auth1->setFirstName("Hans");
// last name required; will fail
$bk1->setAuthor($auth1);
$rev1 = new Review();
$rev1->setReviewDate("08/09/2001");
// will fail: reviewed_by column required
$bk1->addReview($rev1);
$ret2 = $bk1->validate();
print "Making sure 6 validation messages were returned: ";
print boolTest(count($ret2) === 6);
print "Making sure correct columns failed: ";
print boolTest(array_keys($ret2) === array(
AuthorPeer::LAST_NAME,
AuthorPeer::EMAIL,
AuthorPeer::AGE,
BookPeer::TITLE,
ReviewPeer::REVIEWED_BY,
ReviewPeer::STATUS
));
$bk2 = new Book();
$bk2->setTitle("12345678901"); // passes
$auth2 = new Author();
$auth2->setLastName("Blah"); //passes
$auth2->setEmail("some@body.com"); //passes
$auth2->setAge(50); //passes
$bk2->setAuthor($auth2);
$rev2 = new Review();
$rev2->setReviewedBy("Me!"); // passes
$rev2->setStatus("new"); // passes
$bk2->addReview($rev2);
$ret3 = $bk2->validate();
print "Making sure complex validation can pass: ";
print boolTest($ret3 === true);
} catch (Exception $e) {
die("Error doing validation tests: " . $e->__toString());
}
// Test doCount()
//
try {
print "\nTesting doCount() functionality\n";
print "-------------------------------\n\n";
$c = new Criteria();
$records = BookPeer::doSelect($c);
$count = BookPeer::doCount($c);
print "Making sure correct number of results: ";
print boolTest(count($records) === $count);
} catch (Exception $e) {
die("Error deleting book: " . $e->__toString());
}
// Test many-to-many relationships
// ---------------
try {
print "\nTesting many-to-many relationships\n";
print "-----------------------------\n\n";
// init book club list 1 with 2 books
$blc1 = new BookClubList();
$blc1->setGroupLeader("Crazyleggs");
$blc1->setTheme("Happiness");
$brel1 = new BookListRel();
$brel1->setBook($phoenix);
$brel2 = new BookListRel();
$brel2->setBook($dj);
$blc1->addBookListRel($brel1);
$blc1->addBookListRel($brel2);
$blc1->save();
print "Making sure BookClubList 1 was saved: ";
print boolTest(!is_null($blc1->getId()));
// init book club list 2 with 1 book
$blc2 = new BookClubList();
$blc2->setGroupLeader("John Foo");
$blc2->setTheme("Default");
$brel3 = new BookListRel();
$brel3->setBook($phoenix);
$blc2->addBookListRel($brel3);
$blc2->save();
print "Making sure BookClubList 2 was saved: ";
print boolTest(!is_null($blc2->getId()));
// re-fetch books and lists from db to be sure that nothing is cached
$crit = new Criteria();
$crit->add(BookPeer::ID, $phoenix->getId());
$phoenix = BookPeer::doSelectOne($crit);
print "Making sure book 'phoenix' has been re-fetched from db: ";
print boolTest(!empty($phoenix));
$crit = new Criteria();
$crit->add(BookClubListPeer::ID, $blc1->getId());
$blc1 = BookClubListPeer::doSelectOne($crit);
print "Making sure BookClubList 1 has been re-fetched from db: ";
print boolTest(!empty($blc1));
$crit = new Criteria();
$crit->add(BookClubListPeer::ID, $blc2->getId());
$blc2 = BookClubListPeer::doSelectOne($crit);
print "Making sure BookClubList 2 has been re-fetched from db: ";
print boolTest(!empty($blc2));
$relCount = $phoenix->countBookListRels();
print "Making sure book 'phoenix' has 2 BookListRels: ";
print boolTest($relCount == 2);
$relCount = $blc1->countBookListRels();
print "Making sure BookClubList 1 has 2 BookListRels: ";
print boolTest($relCount == 2);
$relCount = $blc2->countBookListRels();
print "Making sure BookClubList 2 has 1 BookListRel: ";
print boolTest($relCount == 1);
} catch (Exception $e) {
die("Error doing many-to-many relationships tests: " . $e->__toString());
}
// Test multiple databases
// ---------------
try {
print "\nTesting multiple databases\n";
print "-----------------------------\n\n";
$line = new BookstoreLog();
$line->setIdent('bookstore-packaged-test');
$line->setTime(time());
$line->setMessage('We are testing to write something to the log database ...');
$line->setPriority('debug');
$line->save();
$line_id = $line->getId();
print "Making sure BookstoreLog was saved: ";
print boolTest(!empty($line_id));
} catch (Exception $e) {
die("Error doing multiple databases tests: " . $e->__toString());
}
// Cleanup (tests DELETE)
// ----------------------
try {
print "\nRemoving books that were just created\n";
print "-------------------------------------\n\n";
print "First finding book by PK (=$phoenix_id) .... ";
try {
$hp = BookPeer::retrieveByPk($phoenix_id);
} catch (Exception $e) {
print "ERROR!\n";
die("Error retrieving by pk: " . $e->__toString());
}
if ($hp) {
print "FOUND!\n";
} else {
print "NOT FOUND :(\n";
die("Couldn't find just-created book: book_id = $phoenix_id");
}
print "Attempting to delete [multi-table] by found pk: ";
$c = new Criteria();
$c->add(BookPeer::ID, $hp->getId());
// The only way for cascading to work currently
// is to specify the author_id and publisher_id (i.e. the fkeys
// have to be in the criteria).
$c->add(AuthorPeer::ID, $hp->getId());
$c->add(PublisherPeer::ID, $hp->getId());
$c->setSingleRecord(true);
BookPeer::doDelete($c);
print boolTest(true);
print "Checking to make sure correct records were removed.\n";
print "\tFrom author table: ";
$res = AuthorPeer::doSelect(new Criteria());
print boolTest(count($res) === 3);
print "\tFrom publisher table: ";
$res2 = PublisherPeer::doSelect(new Criteria());
print boolTest(count($res2) === 3);
print "\tFrom book table: ";
$res3 = BookPeer::doSelect(new Criteria());
print boolTest(count($res3) === 3);
print "Attempting to delete books by complex criteria: ";
$c = new Criteria();
$cn = $c->getNewCriterion(BookPeer::ISBN, "043935806X");
$cn->addOr($c->getNewCriterion(BookPeer::ISBN, "0380977427"));
$cn->addOr($c->getNewCriterion(BookPeer::ISBN, "0140422161"));
$c->add($cn);
BookPeer::doDelete($c);
print boolTest(true);
print "Attempting to delete book [id = $td_id]: ";
$td->delete();
print boolTest(true);
print "Attempting to delete author [id = $stephenson_id]: ";
AuthorPeer::doDelete($stephenson_id);
print boolTest(true);
print "Attempting to delete author [id = $byron_id]: ";
AuthorPeer::doDelete($byron_id);
print boolTest(true);
print "Attempting to delete author [id = $grass_id]: ";
$grass->delete();
print boolTest(true);
print "Attempting to delete publisher [id = $morrow_id]: ";
PublisherPeer::doDelete($morrow_id);
print boolTest(true);
print "Attempting to delete publisher [id = $penguin_id]: ";
PublisherPeer::doDelete($penguin_id);
print boolTest(true);
print "Attempting to delete publisher [id = $vintage_id]: ";
$vintage->delete();
print boolTest(true);
// These have to be deleted manually also since we have onDelete
// set to SETNULL in the foreign keys in book. Is this correct?
print "Attempting to delete author [lastname = 'Rowling']: ";
$rowling->delete();
print boolTest(true);
print "Attempting to delete publisher [lastname = 'Scholastic']: ";
$scholastic->delete();
print boolTest(true);
print "Attempting to delete BookClubList 1: ";
$blc1->delete();
print boolTest(true);
print "Attempting to delete BookClubList 2: ";
$blc2->delete();
print boolTest(true);
} catch (Exception $e) {
die("Error deleting book: " . $e->__toString());
}
// Check again to make sure that tables are empty
// ----------------------------------------------
check_tables_empty();
$timer->stop();
print $timer->display();
| {
"content_hash": "1b0acf24afee243693fc348e1cb6ac59",
"timestamp": "",
"source": "github",
"line_count": 805,
"max_line_length": 135,
"avg_line_length": 27.034782608695654,
"alnum_prop": 0.6326333685613197,
"repo_name": "alfonsojimenez/propel_15",
"id": "7455e9011156ece941f6cb5afc9dff39cc413929",
"size": "21971",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "test/bookstore-packaged-test.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "2802259"
},
{
"name": "Shell",
"bytes": "4134"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "5d0c66ddf57f1bb6680c6c345154c9f9",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "b3cf8316acdcca030375286cfe82296996474195",
"size": "183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Chromista/Ochrophyta/Phaeophyceae/Scytothamnales/Splachnidiaceae/Splachnidium/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package org.apache.cassandra.serializers;
import java.nio.ByteBuffer;
import java.util.LinkedHashMap;
import java.util.Map.Entry;
import org.apache.cassandra.utils.ByteBufferUtil;
public class UserTypeSerializer extends BytesSerializer
{
public final LinkedHashMap<String, TypeSerializer<?>> fields;
public UserTypeSerializer(LinkedHashMap<String, TypeSerializer<?>> fields)
{
this.fields = fields;
}
@Override
public void validate(ByteBuffer bytes) throws MarshalException
{
ByteBuffer input = bytes.duplicate();
int i = -1; // first thing in the loop is to increment, so when starting this will get set to 0 and match the fields
for (Entry<String, TypeSerializer<?>> entry : fields.entrySet())
{
i++;
// we allow the input to have less fields than declared so as to support field addition.
if (!input.hasRemaining())
return;
if (input.remaining() < 4)
throw new MarshalException(String.format("Not enough bytes to read size of %dth field %s", i, entry.getKey()));
int size = input.getInt();
// size < 0 means null value
if (size < 0)
continue;
if (input.remaining() < size)
throw new MarshalException(String.format("Not enough bytes to read %dth field %s", i, entry.getKey()));
ByteBuffer field = ByteBufferUtil.readBytes(input, size);
try
{
entry.getValue().validate(field);
}
catch (MarshalException e)
{
throw new MarshalException(String.format("Failure validating the %dth field %s; %s", i, entry.getKey(), e.getMessage()), e);
}
}
// We're allowed to get less fields than declared, but not more
if (input.hasRemaining())
throw new MarshalException("Invalid remaining data after end of UDT value");
}
}
| {
"content_hash": "54ee15c8f9496e8afef0f03e4df09f8b",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 140,
"avg_line_length": 34.56896551724138,
"alnum_prop": 0.6064837905236907,
"repo_name": "krummas/cassandra",
"id": "7af6c4a78158f990c912653c9d6c1cabb2262873",
"size": "2810",
"binary": false,
"copies": "1",
"ref": "refs/heads/trunk",
"path": "src/java/org/apache/cassandra/serializers/UserTypeSerializer.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "801"
},
{
"name": "Batchfile",
"bytes": "38885"
},
{
"name": "GAP",
"bytes": "88488"
},
{
"name": "HTML",
"bytes": "264240"
},
{
"name": "Java",
"bytes": "24521342"
},
{
"name": "Lex",
"bytes": "10151"
},
{
"name": "PowerShell",
"bytes": "39042"
},
{
"name": "Python",
"bytes": "524753"
},
{
"name": "Shell",
"bytes": "93545"
}
],
"symlink_target": ""
} |
=========
Packstack
=========
SYNOPSIS
========
packstack [options]
DESCRIPTION
===========
Packstack is a utility that uses uses puppet modules to install OpenStack. It can be used to install each openstack service on separate servers, all on one server or any combination of these. There are 3 ways that Packstack can be run.
- packstack
- packstack [options]
- packstack --gen-answer-file=<file> / packstack --answer-file=<file>
The third option allows the user to generate a default answer file, edit the default options and finally run Packstack a second time using this answer file. This is the easiest way to run Packstack and the one that will be documented here. When <file> is created the OPTIONS below will be contained and can be edited by the user.
OPTIONS
=======
Global Options
--------------
**CONFIG_GLANCE_INSTALL**
Set to 'y' if you would like Packstack to install Glance ['y', 'n'].
**CONFIG_CINDER_INSTALL**
Set to 'y' if you would like Packstack to install Cinder ['y', 'n'].
**CONFIG_NOVA_INSTALL**
Set to 'y' if you would like Packstack to install Nova ['y', 'n'].
**CONFIG_HORIZON_INSTALL**
Set to 'y' if you would like Packstack to install Horizon ['y', 'n'].
**CONFIG_SWIFT_INSTALL**
Set to 'y' if you would like Packstack to install Swift ['y', 'n'].
**CONFIG_CLIENT_INSTALL**
Set to 'y' if you would like Packstack to install the OpenStack Client packages. An admin "rc" file will also be installed ['y', 'n'].
**CONFIG_NTP_SERVERS**
Comma separated list of NTP servers. Leave plain if Packstack should not install ntpd on instances..
**CONFIG_NAGIOS_INSTALL**
Set to 'y' if you would like Packstack to install Nagios to monitor openstack hosts ['y', 'n'].
**CONFIG_CEILOMETER_INSTALL**
Set to 'y' if you would like Packstack to install OpenStack Metering (Ceilometer).
**CONFIG_HEAT_INSTALL**
Set to 'y' if you would like Packstack to install OpenStack Orchestration (Heat).
**CONFIG_NEUTRON_INSTALL**
Set to 'y' if you would like Packstack to install OpenStack Networking (Neutron).
**CONFIG_MYSQL_INSTALL**
Set to 'y' if you would like Packstack to install MySQL.
SSH Configs
------------
**CONFIG_SSH_KEY**
Path to a Public key to install on servers. If a usable key has not been installed on the remote servers the user will be prompted for a password and this key will be installed so the password will not be required again.
MySQL Config parameters
-----------------------
**CONFIG_MYSQL_HOST**
The IP address of the server on which to install MySQL.
**CONFIG_MYSQL_USER**
Username for the MySQL admin user.
**CONFIG_MYSQL_PW**
Password for the MySQL admin user.
QPID Config parameters
----------------------
**CONFIG_QPID_HOST**
The IP address of the server on which to install the QPID service.
**CONFIG_QPID_ENABLE_SSL**
Enable SSL for the QPID service.
**CONFIG_QPID_NSS_CERTDB_PW**
The password for the NSS certificate database of the QPID service.
**CONFIG_QPID_SSL_PORT**
The port in which the QPID service listens to SSL connections.
**CONFIG_QPID_SSL_CERT_FILE**
The filename of the certificate that the QPID service is going to use.
**CONFIG_QPID_SSL_KEY_FILE**
The filename of the private key that the QPID service is going to use.
**CONFIG_QPID_SSL_SELF_SIGNED**
Auto Generates self signed SSL certificate and key.
Keystone Config parameters
--------------------------
**CONFIG_KEYSTONE_HOST**
The IP address of the server on which to install Keystone.
**CONFIG_KEYSTONE_DB_PW**
The password to use for the Keystone to access DB.
**CONFIG_KEYSTONE_ADMIN_TOKEN**
The token to use for the Keystone service api.
**CONFIG_KEYSTONE_ADMIN_PW**
The password to use for the Keystone admin user.
**CONFIG_KEYSTONE_DEMO_PW**
The password to use for the Keystone demo user
**CONFIG_KEYSTONE_TOKEN_FORMAT**
Kestone token format. Use either UUID or PKI
Glance Config parameters
------------------------
**CONFIG_GLANCE_HOST**
The IP address of the server on which to install Glance.
**CONFIG_GLANCE_DB_PW**
The password to use for the Glance to access DB.
**CONFIG_GLANCE_KS_PW**
The password to use for the Glance to authenticate with Keystone.
Cinder Config parameters
------------------------
**CONFIG_CINDER_HOST**
The IP address of the server on which to install Cinder.
**CONFIG_CINDER_DB_PW**
The password to use for the Cinder to access DB.
**CONFIG_CINDER_KS_PW**
The password to use for the Cinder to authenticate with Keystone.
**CONFIG_CINDER_BACKEND**
The Cinder backend to use ['lvm', 'gluster', 'nfs'].
Cinder volume create Config parameters
--------------------------------------
**CONFIG_CINDER_VOLUMES_CREATE**
Create Cinder's volumes group ['y', 'n'].
Cinder volume size Config parameters
------------------------------------
**CONFIG_CINDER_VOLUMES_SIZE**
Cinder's volumes group size.
Cinder gluster Config parameters
--------------------------------
**CONFIG_CINDER_GLUSTER_MOUNTS**
A single or comma separated list of gluster volume shares.
Cinder NFS Config parameters
----------------------------
**CONFIG_CINDER_NFS_MOUNTS**
A single or comma separated list of NFS exports to mount.
Nova Options
------------
**CONFIG_NOVA_API_HOST**
The IP address of the server on which to install the Nova API service.
**CONFIG_NOVA_CERT_HOST**
The IP address of the server on which to install the Nova Cert service.
**CONFIG_NOVA_VNCPROXY_HOST**
The IP address of the server on which to install the Nova VNC proxy.
**CONFIG_NOVA_COMPUTE_HOSTS**
A comma separated list of IP addresses on which to install the Nova Compute services.
**CONFIG_NOVA_COMPUTE_PRIVIF**
Private interface for Flat DHCP on the Nova compute servers.
**CONFIG_NOVA_NETWORK_HOSTS**
List of IP address of the servers on which to install the Nova Network service.
**CONFIG_NOVA_DB_PW**
The password to use for the Nova to access DB.
**CONFIG_NOVA_KS_PW**
The password to use for the Nova to authenticate with Keystone.
**CONFIG_NOVA_NETWORK_PUBIF**
Public interface on the Nova network server.
**CONFIG_NOVA_NETWORK_PRIVIF**
Private interface for Flat DHCP on the Nova network server.
**CONFIG_NOVA_NETWORK_FIXEDRANGE**
IP Range for Flat DHCP ['^([\\d]{1,3}\\.){3}[\\d]{1,3}/\\d\\d?$'].
**CONFIG_NOVA_NETWORK_FLOATRANGE**
IP Range for Floating IP's ['^([\\d]{1,3}\\.){3}[\\d]{1,3}/\\d\\d?$'].
**CONFIG_NOVA_SCHED_HOST**
The IP address of the server on which to install the Nova Scheduler service.
**CONFIG_NOVA_SCHED_CPU_ALLOC_RATIO**
The overcommitment ratio for virtual to physical CPUs. Set to 1.0 to disable CPU overcommitment.
**CONFIG_NOVA_SCHED_RAM_ALLOC_RATIO**
The overcommitment ratio for virtual to physical RAM. Set to 1.0 to disable RAM overcommitment.
**CONFIG_NOVA_CONDUCTOR_HOST**
The IP address of the server on which to install the Nova Conductor service.
**CONFIG_NOVA_NETWORK_AUTOASSIGNFLOATINGIP**
Automatically assign a floating IP to new instances.
**CONFIG_NOVA_NETWORK_DEFAULTFLOATINGPOOL**
Name of the default floating pool to which the specified floating ranges are added to.
**CONFIG_NOVA_NETWORK_MANAGER**
Nova network manager.
**CONFIG_NOVA_NETWORK_NUMBER**
Number of networks to support.
**CONFIG_NOVA_NETWORK_SIZE**
Number of addresses in each private subnet.
**CONFIG_NOVA_NETWORK_VLAN_START**
First VLAN for private networks.
NOVACLIENT Config parameters
----------------------------
**CONFIG_OSCLIENT_HOST**
The IP address of the server on which to install the OpenStack client packages. An admin "rc" file will also be installed.
OpenStack Horizon Config parameters
-----------------------------------
**CONFIG_HORIZON_HOST**
The IP address of the server on which to install Horizon.
**CONFIG_HORIZON_SSL**
To set up Horizon communication over https set this to "y" ['y', 'n'].
**CONFIG_SSL_CERT**
PEM encoded certificate to be used for ssl on the https server, leave blank if one should be generated, this certificate should not require a passphrase.
**CONFIG_SSL_KEY**
Keyfile corresponding to the certificate if one was entered.
OpenStack Swift Config parameters
---------------------------------
**CONFIG_SWIFT_PROXY_HOSTS**
The IP address on which to install the Swift proxy service.
**CONFIG_SWIFT_KS_PW**
The password to use for the Swift to authenticate with Keystone.
**CONFIG_SWIFT_STORAGE_HOSTS**
A comma separated list of IP addresses on which to install the Swift Storage services, each entry should take the format <ipaddress>[/dev], for example 127.0.0.1/vdb will install /dev/vdb on 127.0.0.1 as a swift storage device(packstack does not create the filesystem, you must do this first), if /dev is omitted Packstack will create a loopback device for a test setup.
**CONFIG_SWIFT_STORAGE_ZONES**
Number of swift storage zones, this number MUST be no bigger than the number of storage devices configured.
**CONFIG_SWIFT_STORAGE_REPLICAS**
Number of swift storage replicas, this number MUST be no bigger than the number of storage zones configured.
**CONFIG_SWIFT_STORAGE_FSTYPE**
FileSystem type for storage nodes ['xfs', 'ext4'].
**CONFIG_SWIFT_HASH**
Shared secret for Swift.
Server Prepare Configs
----------------------
**CONFIG_USE_EPEL**
Install OpenStack from EPEL. If set to "y" EPEL will be installed on each server ['y', 'n'].
**CONFIG_REPO**
A comma separated list of URLs to any additional yum repositories to install.
**CONFIG_RH_USER**
To subscribe each server with Red Hat subscription manager, include this with **CONFIG_RH_PW**.
**CONFIG_RH_PW**
To subscribe each server with Red Hat subscription manager, include this with **CONFIG_RH_USER**.
**CONFIG_RH_BETA_REPO**
To subscribe each server with Red Hat subscription manager, to Red Hat Beta RPM's ['y', 'n'].
**CONFIG_SATELLITE_URL**
To subscribe each server with RHN Satellite,fill Satellite's URL here. Note that either satellite's username/password or activation key has to be provided.
RHN Satellite config
--------------------
**CONFIG_SATELLITE_USER**
Username to access RHN Satellite.
**CONFIG_SATELLITE_PW**
Password to access RHN Satellite.
**CONFIG_SATELLITE_AKEY**
Activation key for subscription to RHN Satellite.
**CONFIG_SATELLITE_CACERT**
Specify a path or URL to a SSL CA certificate to use.
**CONFIG_SATELLITE_PROFILE**
If required specify the profile name that should be used as an identifier for the system in RHN Satellite.
**CONFIG_SATELLITE_FLAGS**
Comma separated list of flags passed to rhnreg_ks. Valid flags are: novirtinfo, norhnsd, nopackages ['novirtinfo', 'norhnsd', 'nopackages'].
**CONFIG_SATELLITE_PROXY**
Specify a HTTP proxy to use with RHN Satellite.
RHN Satellite proxy config
--------------------------
**CONFIG_SATELLITE_PROXY_USER**
Specify a username to use with an authenticated HTTP proxy.
**CONFIG_SATELLITE_PROXY_PW**
Specify a password to use with an authenticated HTTP proxy.
Nagios Config parameters
------------------------
**CONFIG_NAGIOS_HOST**
The IP address of the server on which to install the Nagios server.
**CONFIG_NAGIOS_PW**
The password of the nagiosadmin user on the Nagios server.
Ceilometer Config Parameters
----------------------------
**CONFIG_CEILOMETER_HOST**
The IP address of the server on which to install Ceilometer.
**CONFIG_CEILOMETER_SECRET**
Secret key for signing metering messages.
**CONFIG_CEILOMETER_KS_PW**
The password to use for Ceilometer to authenticate with Keystone.
Heat Config Parameters
----------------------
**CONFIG_HEAT_HOST**
The IP address of the server on which to install Heat service.
**CONFIG_HEAT_DB_PW**
The password used by Heat user to authenticate against MySQL.
**CONFIG_HEAT_KS_PW**
The password to use for the Heat to authenticate with Keystone.
**CONFIG_HEAT_CLOUDWATCH_INSTALL**
Set to 'y' if you would like Packstack to install Heat CloudWatch API.
**CONFIG_HEAT_CFN_INSTALL**
Set to 'y' if you would like Packstack to install Heat CloudFormation API.
**CONFIG_HEAT_CLOUDWATCH_HOST**
The IP address of the server on which to install Heat CloudWatch API service.
**CONFIG_HEAT_CFN_HOST**
The IP address of the server on which to install Heat CloudFormation API.
Neutron Config Parameters
-------------------------
**CONFIG_NEUTRON_SERVER_HOST**
The IP addresses of the server on which to install the Neutron server.
**CONFIG_NEUTRON_KS_PW**
The password to use for Neutron to authenticate with Keystone.
**CONFIG_NEUTRON_DB_PW**
The password to use for Neutron to access DB.
**CONFIG_NEUTRON_L3_HOSTS**
A comma separated list of IP addresses on which to install Neutron L3 agent.
**CONFIG_NEUTRON_L3_EXT_BRIDGE**
The name of the bridge that the Neutron L3 agent will use for external traffic, or 'provider' if using provider networks.
**CONFIG_NEUTRON_DHCP_HOSTS**
A comma separated list of IP addresses on which to install Neutron DHCP agent.
**CONFIG_NEUTRON_L2_PLUGIN**
The name of the L2 plugin to be used with Neutron.
**CONFIG_NEUTRON_METADATA_HOSTS**
A comma separated list of IP addresses on which to install Neutron metadata agent.
**CONFIG_NEUTRON_METADATA_PW**
A comma separated list of IP addresses on which to install Neutron metadata agent.
**CONFIG_NEUTRON_LB_TENANT_NETWORK_TYPE**
The type of network to allocate for tenant networks (eg. vlan, local, gre).
**CONFIG_NEUTRON_LB_VLAN_RANGES**
A comma separated list of VLAN ranges for the Neutron linuxbridge plugin (eg. physnet1:1:4094,physnet2,physnet3:3000:3999).
**CONFIG_NEUTRON_LB_INTERFACE_MAPPINGS**
A comma separated list of interface mappings for the Neutron linuxbridge plugin (eg. physnet1:br-eth1,physnet2:br-eth2,physnet3:br-eth3).
**CONFIG_NEUTRON_OVS_TENANT_NETWORK_TYPE**
Type of network to allocate for tenant networks (eg. vlan, local, gre).
**CONFIG_NEUTRON_OVS_VLAN_RANGES**
A comma separated list of VLAN ranges for the Neutron openvswitch plugin (eg. physnet1:1:4094,physnet2,physnet3:3000:3999).
**CONFIG_NEUTRON_OVS_BRIDGE_MAPPINGS**
A comma separated list of bridge mappings for the Neutron openvswitch plugin (eg. physnet1:br-eth1,physnet2:br-eth2,physnet3:br-eth3).
**CONFIG_NEUTRON_OVS_BRIDGE_IFACES**
A comma separated list of colon-separated OVS brid.
**CONFIG_NEUTRON_OVS_TUNNEL_RANGES**
A comma separated list of tunnel ranges for the Neutron openvswitch plugin.
**CONFIG_NEUTRON_OVS_TUNNEL_IF**
Override the IP used for GRE tunnels on this hypervisor to the IP found on the specified interface (defaults to the HOST IP).
**CONFIG_NEUTRON_ML2_TYPE_DRIVERS**
A comma separated list of network type (eg: local, flat, vlan, gre, vxlan).
**CONFIG_NEUTRON_ML2_TENANT_NETWORK_TYPES**
A comma separated ordered list of network_types to allocate as tenant networks (eg: local, flat, vlan, gre, vxlan). The value 'local' is only useful for single-box testing but provides no connectivity between hosts.
**CONFIG_NEUTRON_ML2_SM_DRIVERS**
A comma separated ordered list of networking mechanism driver entrypoints to be loaded from the **neutron.ml2.mechanism_drivers** namespace (eg: logger, test, linuxbridge, openvswitch, hyperv, ncs, arista, cisco_nexus, l2population).
**CONFIG_NEUTRON_ML2_FLAT_NETWORKS**
A comma separated list of physical_network names with which flat networks can be created. Use * to allow flat networks with arbitrary physical_network names.
**CONFIG_NEUTRON_ML2_VLAN_RANGES**
A comma separated list of **<physical_network>:<vlan_min>:<vlan_max>** or **<physical_network>** specifying physical_network names usable for VLAN provider and tenant networks, as well as ranges of VLAN tags on each available for allocation to tenant networks.
**CONFIG_NEUTRON_ML2_TUNNEL_ID_RANGES**
A comma separated list of **<tun_min>:<tun_max>** tuples enumerating ranges of GRE tunnel IDs that are available for tenant network allocation. Should be an array with **tun_max +1 - tun_min > 1000000**.
**CONFIG_NEUTRON_ML2_VXLAN_GROUP**
Multicast group for VXLAN. If unset, disables VXLAN enable sending allocate broadcast traffic to this multicast group. When left unconfigured, will disable multicast VXLAN mode. Should be an **Multicast IP (v4 or v6)** address.
**CONFIG_NEUTRON_ML2_VNI_RANGES**
A comma separated list of **<vni_min>:<vni_max>** tuples enumerating ranges of VXLAN VNI IDs that are available for tenant network allocation. Min value is 0 and Max value is 16777215.
Provision Config Parameters
---------------------------
**CONFIG_PROVISION_ALL_IN_ONE_OVS_BRIDGE**
Whether to configure the ovs external bridge in an all-in-one deployment.
**CONFIG_PROVISION_DEMO**
Whether to provision for demo usage and testing.
**CONFIG_PROVISION_DEMO_FLOATRANGE**
The CIDR network address for the floating IP subnet.
**CONFIG_PROVISION_TEMPEST**
Whether to configure tempest for testing.
**CONFIG_PROVISION_TEMPEST_REPO_REVISION**
The revision of the tempest git repository to use.
**CONFIG_PROVISION_TEMPEST_REPO_URI**
The uri of the tempest git repository to use.
Log files and Debug info
------------------------
The location of the log files and generated puppet manifests are in the /var/tmp/packstack directory under a directory named by the date in which packstack was run and a random string (e.g. /var/tmp/packstack/20131022-204316-Bf3Ek2). Inside, we find a manifest directory and the openstack-setup.log file; puppet manifests and a log file for each one are found inside the manifest directory.
In case debugging info is needed while running packstack the -d switch will make it write more detailed information about the installation.
Examples:
If we need an allinone debug session:
packstack -d --allinone
If we need a answer file to tailor it and then debug:
packstack --gen-answer-file=ans.txt
packstack -d --answer-file=ans.txt
SOURCE
======
* `packstack https://github.com/stackforge/packstack`
* `puppet modules https://github.com/puppetlabs and https://github.com/packstack`
| {
"content_hash": "e002d432a1ffa0e5de72ac498dcbf004",
"timestamp": "",
"source": "github",
"line_count": 518,
"max_line_length": 390,
"avg_line_length": 35.237451737451735,
"alnum_prop": 0.715334465567304,
"repo_name": "reelai/packstack",
"id": "b7f9fe4964dbe447b6352392ba1eb30a17d215b7",
"size": "18255",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/packstack.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "4ef2098291a6d8c4524f78c6420baf28",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "f58ef73e6ecfb61c31dc4db266d28d13d41e5574",
"size": "191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Asparagaceae/Camassia/Camassia quamash/Camassia quamash maxima/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package com.dgwave.car.maven;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.logging.Logger;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.repository.WorkspaceReader;
import org.eclipse.aether.repository.WorkspaceRepository;
import com.dgwave.car.common.CeylonUtil;
/**
* For future IDE support.
* @author Akber Choudhry
*/
@Component (role = WorkspaceReader.class, hint = "ide")
public class CeylonRepoReader implements WorkspaceReader {
@Requirement
private Logger logger;
/* (non-Javadoc)
* @see org.eclipse.aether.repository.WorkspaceReader#getRepository()
*/
@Override
public WorkspaceRepository getRepository() {
return new WorkspaceRepository("ceylon", "dotCeylonConfig");
}
/* (non-Javadoc)
* @see org.eclipse.aether.repository.WorkspaceReader#findArtifact(org.eclipse.aether.artifact.Artifact)
*/
@Override
public File findArtifact(final Artifact artifact) {
String type = artifact.getProperty("type", "jar");
if ("ceylon-jar".equals(type) || "car".equals(type)) {
if ("ceylon-jar".equals(type)) {
type = "jar";
}
File art = new File(CeylonUtil.ceylonSystemFullPath(artifact, type));
if (art.isFile()) {
if (logger != null) {
logger.info("Resolved from Ceylon repo: " + artifact);
}
artifact.setFile(art);
return art;
}
}
return null;
}
/* (non-Javadoc)
* @see org.eclipse.aether.repository.WorkspaceReader#findVersions(org.eclipse.aether.artifact.Artifact)
*/
@Override
public List<String> findVersions(final Artifact artifact) {
return Arrays.asList(new String[]{artifact.getVersion()});
}
}
| {
"content_hash": "3a810a7d11c81a11f49a9525a9ddb5bb",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 108,
"avg_line_length": 31.59090909090909,
"alnum_prop": 0.6306954436450839,
"repo_name": "dgwave/ceylon-maven-plugin",
"id": "080447668e957312e6f1ea4182490fbbd237e79d",
"size": "2085",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/dgwave/car/maven/CeylonRepoReader.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "82281"
}
],
"symlink_target": ""
} |
<?php
/**
* @package The_SEO_Framework\Classes
*/
namespace The_SEO_Framework;
defined( 'ABSPATH' ) or die;
/**
* The SEO Framework plugin
* Copyright (C) 2015 - 2016 Sybre Waaijer, CyberWire (https://cyberwire.nl/)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as published
* by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Class The_SEO_Framework\Render
*
* Puts all data into HTML valid meta tags.
*
* @since 2.8.0
*/
class Render extends Admin_Init {
/**
* Theme title doing it wrong boolean.
*
* @since 2.4.0
*
* @var bool Holds Theme is doing it wrong.
*/
protected $title_doing_it_wrong = null;
/**
* Constructor, load parent constructor
*/
protected function __construct() {
parent::__construct();
}
/**
* Cache description in static variable
* Must be called inside the loop
*
* @since 2.2.2
* @staticvar array $description_cache
*
* @return string The description
*/
public function description_from_cache( $social = false ) {
static $description_cache = array();
if ( isset( $description_cache[ $social ] ) )
return $description_cache[ $social ];
return $description_cache[ $social ] = $this->generate_description( '', array( 'social' => $social ) );
}
/**
* Cache current URL in static variable
* Must be called inside the loop
*
* @since 2.2.2
* @staticvar array $url_cache
*
* @param string $url the url
* @param int $post_id the page id, if empty it will fetch the requested ID, else the page uri
* @param bool $paged Return current page URL with pagination
* @param bool $from_option Get the canonical uri option
* @param bool $paged_plural Whether to allow pagination on second or later pages.
* @return string The url
*/
public function the_url_from_cache( $url = '', $post_id = null, $paged = false, $from_option = true, $paged_plural = true ) {
static $url_cache = array();
if ( empty( $post_id ) )
$post_id = $this->get_the_real_ID();
if ( isset( $url_cache[ $url ][ $post_id ][ $paged ][ $from_option ][ $paged_plural ] ) )
return $url_cache[ $url ][ $post_id ][ $paged ][ $from_option ][ $paged_plural ];
return $url_cache[ $url ][ $post_id ][ $paged ][ $from_option ][ $paged_plural ] = $this->the_url( $url, array( 'paged' => $paged, 'get_custom_field' => $from_option, 'id' => $post_id, 'paged_plural' => $paged_plural ) );
}
/**
* Cache home URL in static variable
*
* @since 2.5.0
* @staticvar array $url_cache
*
* @param bool $force_slash Force slash
* @return string The url
*/
public function the_home_url_from_cache( $force_slash = false ) {
static $url_cache = array();
if ( isset( $url_cache[ $force_slash ] ) )
return $url_cache[ $force_slash ];
return $url_cache[ $force_slash ] = $this->the_url( '', array( 'home' => true, 'forceslash' => $force_slash ) );
}
/**
* Cache current Title in static variable
* Must be called inside the loop
*
* @since 2.2.2
* @since 2.4.0 : If the theme is doing it right, override cache parameters to speed things up.
* @staticvar array $title_cache
*
* @param string $title The Title to return
* @param string $sep The Title sepeartor
* @param string $seplocation The Title sepeartor location ( accepts 'left' or 'right' )
* @param bool $meta Ignore theme doing it wrong.
* @return string The title
*/
public function title_from_cache( $title = '', $sep = '', $seplocation = '', $meta = false ) {
/**
* Cache the inputs, for when the title is doing it right.
* Use those values to fetch the cached title.
*
* @since 2.4.0
*/
static $setup_cache = null;
static $title_param_cache = null;
static $sep_param_cache = null;
static $seplocation_param_cache = null;
if ( ! isset( $setup_cache ) ) {
if ( \doing_filter( 'pre_get_document_title' ) || \doing_filter( 'wp_title' ) ) {
$title_param_cache = $title;
$sep_param_cache = $sep;
$seplocation_param_cache = $seplocation;
$setup_cache = 'I like turtles.';
}
}
if ( isset( $this->title_doing_it_wrong ) && false === $this->title_doing_it_wrong ) {
$title = $title_param_cache;
$sep = $sep_param_cache;
$seplocation = $seplocation_param_cache;
$meta = false;
}
static $title_cache = array();
if ( isset( $title_cache[ $title ][ $sep ][ $seplocation ][ $meta ] ) )
return $title_cache[ $title ][ $sep ][ $seplocation ][ $meta ];
return $title_cache[ $title ][ $sep ][ $seplocation ][ $meta ] = $this->title( $title, $sep, $seplocation, array( 'meta' => $meta ) );
}
/**
* Caches current Image URL in static variable.
* Must be called inside the loop.
*
* @since 2.2.2
* @since 2.7.0 $get_id parameter has been added.
* @staticvar string $cache
*
* @return string The image URL.
*/
public function get_image_from_cache() {
static $cache = null;
return isset( $cache ) ? $cache : $cache = $this->get_image( $this->get_the_real_ID() );
}
/**
* Returns the current Twitter card type.
*
* @since 2.8.2
* @staticvar string $cache
*
* @return string The cached Twitter card.
*/
public function get_current_twitter_card_type() {
static $cache = null;
return isset( $cache ) ? $cache : $cache = $this->generate_twitter_card_type();
}
/**
* Renders the description meta tag.
*
* @since 1.3.0
* @uses $this->description_from_cache()
* @uses $this->detect_seo_plugins()
*
* @return string The description meta tag.
*/
public function the_description() {
if ( $this->detect_seo_plugins() )
return '';
/**
* Applies filters 'the_seo_framework_description_output' : string
* @since 2.3.0
* @since 2.7.0 : Added output within filter.
*/
$description = (string) \apply_filters( 'the_seo_framework_description_output', $this->description_from_cache(), $this->get_the_real_ID() );
if ( $description )
return '<meta name="description" content="' . \esc_attr( $description ) . '" />' . "\r\n";
return '';
}
/**
* Renders og:description meta tag
*
* @since 1.3.0
* @uses $this->description_from_cache()
*
* @return string The Open Graph description meta tag.
*/
public function og_description() {
if ( ! $this->use_og_tags() )
return '';
/**
* Applies filters 'the_seo_framework_ogdescription_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$description = (string) \apply_filters( 'the_seo_framework_ogdescription_output', $this->description_from_cache( true ), $this->get_the_real_ID() );
if ( $description )
return '<meta property="og:description" content="' . \esc_attr( $description ) . '" />' . "\r\n";
return '';
}
/**
* Renders the OG locale meta tag.
*
* @since 1.0.0
*
* @return string The Open Graph locale meta tag.
*/
public function og_locale() {
if ( ! $this->use_og_tags() )
return '';
/**
* Applies filters 'the_seo_framework_oglocale_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$locale = (string) \apply_filters( 'the_seo_framework_oglocale_output', $this->fetch_locale(), $this->get_the_real_ID() );
if ( $locale )
return '<meta property="og:locale" content="' . \esc_attr( $locale ) . '" />' . "\r\n";
return '';
}
/**
* Renders the Open Graph title meta tag.
*
* @uses $this->title_from_cache()
* @since 2.0.3
*
* @return string The Open Graph title meta tag.
*/
public function og_title() {
if ( ! $this->use_og_tags() )
return '';
/**
* Applies filters 'the_seo_framework_ogtitle_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$title = (string) \apply_filters( 'the_seo_framework_ogtitle_output', $this->title_from_cache( '', '', '', true ), $this->get_the_real_ID() );
if ( $title )
return '<meta property="og:title" content="' . \esc_attr( $title ) . '" />' . "\r\n";
return '';
}
/**
* Renders the Open Graph type meta tag.
*
* @since 1.1.0
*
* @return string The Open Graph type meta tag.
*/
public function og_type() {
if ( ! $this->use_og_tags() )
return '';
if ( $type = $this->get_og_type() )
return '<meta property="og:type" content="' . \esc_attr( $type ) . '" />' . "\r\n";
return '';
}
/**
* Renders Open Graph image meta tag.
*
* @since 1.3.0
* @since 2.6.0 : Added WooCommerce gallery images.
* @since 2.7.0 : Added image dimensions if found.
*
* @return string The Open Graph image meta tag.
*/
public function og_image() {
if ( ! $this->use_og_tags() )
return '';
/**
* Applies filters 'the_seo_framework_ogimage_output' : string|bool
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*
* @NOTE: Use of this might cause incorrect meta since other functions
* depend on the image from cache.
*
* @todo Place in listener cache.
* @priority medium 2.8.0+
*/
$image = \apply_filters( 'the_seo_framework_ogimage_output', $this->get_image_from_cache(), $id = $this->get_the_real_ID() );
/**
* Now returns empty string on false.
* @since 2.6.0
*/
if ( false === $image )
return '';
$image = (string) $image;
/**
* Always output
* @since 2.1.1
*/
$output = '<meta property="og:image" content="' . \esc_attr( $image ) . '" />' . "\r\n";
if ( $image ) {
if ( ! empty( $this->image_dimensions[ $id ]['width'] ) && ! empty( $this->image_dimensions[ $id ]['height'] ) ) {
$output .= '<meta property="og:image:width" content="' . \esc_attr( $this->image_dimensions[ $id ]['width'] ) . '" />' . "\r\n";
$output .= '<meta property="og:image:height" content="' . \esc_attr( $this->image_dimensions[ $id ]['height'] ) . '" />' . "\r\n";
}
}
//* Fetch Product images.
$woocommerce_product_images = $this->render_woocommerce_product_og_image();
return $output . $woocommerce_product_images;
}
/**
* Renders WooCommerce Product Gallery OG images.
*
* @since 2.6.0
* @since 2.7.0 : Added image dimensions if found.
* @since 2.8.0 : Checks for featured ID internally, rather than using a far-off cache.
*
* @return string The rendered OG Image.
*/
public function render_woocommerce_product_og_image() {
$output = '';
if ( $this->is_wc_product() ) {
$images = $this->get_image_from_woocommerce_gallery();
if ( $images && is_array( $images ) ) {
$post_id = $this->get_the_real_ID();
$post_manual_og = $this->get_custom_field( '_social_image_id', $post_id );
$featured_id = $post_manual_og ? (int) $post_manual_og : (int) \get_post_thumbnail_id( $post_id );
foreach ( $images as $id ) {
if ( $id === $featured_id )
continue;
//* Parse 1500px url.
$img = $this->parse_og_image( $id );
if ( $img ) {
$output .= '<meta property="og:image" content="' . \esc_attr( $img ) . '" />' . "\r\n";
if ( ! empty( $this->image_dimensions[ $id ]['width'] ) && ! empty( $this->image_dimensions[ $id ]['height'] ) ) {
$output .= '<meta property="og:image:width" content="' . \esc_attr( $this->image_dimensions[ $id ]['width'] ) . '" />' . "\r\n";
$output .= '<meta property="og:image:height" content="' . \esc_attr( $this->image_dimensions[ $id ]['height'] ) . '" />' . "\r\n";
}
}
}
}
}
return $output;
}
/**
* Renders Open Graph sitename meta tag.
*
* @since 1.3.0
*
* @return string The Open Graph sitename meta tag.
*/
public function og_sitename() {
if ( ! $this->use_og_tags() )
return '';
/**
* Applies filters 'the_seo_framework_ogsitename_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$sitename = (string) \apply_filters( 'the_seo_framework_ogsitename_output', \get_bloginfo( 'name' ), $this->get_the_real_ID() );
if ( $sitename )
return '<meta property="og:site_name" content="' . \esc_attr( $sitename ) . '" />' . "\r\n";
return '';
}
/**
* Renders Open Graph URL meta tag.
*
* @since 1.3.0
* @uses $this->the_url_from_cache()
*
* @return string The Open Graph URL meta tag.
*/
public function og_url() {
if ( $this->use_og_tags() )
return '<meta property="og:url" content="' . $this->the_url_from_cache() . '" />' . "\r\n";
return '';
}
/**
* Renders the Twitter Card type meta tag.
*
* @since 2.2.2
*
* @return string The Twitter Card meta tag.
*/
public function twitter_card() {
if ( ! $this->use_twitter_tags() )
return '';
/**
* Applies filters 'the_seo_framework_twittercard_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$card = (string) \apply_filters( 'the_seo_framework_twittercard_output', $this->get_current_twitter_card_type(), $this->get_the_real_ID() );
if ( $card )
return '<meta name="twitter:card" content="' . \esc_attr( $card ) . '" />' . "\r\n";
return '';
}
/**
* Renders the Twitter Site meta tag.
*
* @since 2.2.2
*
* @return string The Twitter Site meta tag.
*/
public function twitter_site() {
if ( ! $this->use_twitter_tags() )
return '';
/**
* Applies filters 'the_seo_framework_twittersite_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$site = (string) \apply_filters( 'the_seo_framework_twittersite_output', $this->get_option( 'twitter_site' ), $this->get_the_real_ID() );
if ( $site )
return '<meta name="twitter:site" content="' . \esc_attr( $site ) . '" />' . "\r\n";
return '';
}
/**
* Renders The Twitter Creator meta tag.
* If no Twitter Site is found, it will render the Twitter Site ID meta tag.
*
* @since 2.2.2
*
* @return string The Twitter Creator or Twitter Site ID meta tag.
*/
public function twitter_creator() {
if ( ! $this->use_twitter_tags() )
return '';
/**
* Applies filters 'the_seo_framework_twittercreator_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$creator = (string) \apply_filters( 'the_seo_framework_twittercreator_output', $this->get_option( 'twitter_creator' ), $this->get_the_real_ID() );
if ( $creator ) {
/**
* Return site:id instead of creator is no twitter:site is found.
* Per Twitter requirements.
*/
if ( $this->get_option( 'twitter_site' ) ) {
return '<meta name="twitter:site:id" content="' . \esc_attr( $creator ) . '" />' . "\r\n";
} else {
return '<meta name="twitter:creator" content="' . \esc_attr( $creator ) . '" />' . "\r\n";
}
}
return '';
}
/**
* Renders Twitter Title meta tag.
*
* @uses $this->title_from_cache()
* @since 2.2.2
*
* @return string The Twitter Title meta tag.
*/
public function twitter_title() {
if ( ! $this->use_twitter_tags() )
return '';
/**
* Applies filters 'the_seo_framework_twittertitle_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$title = (string) \apply_filters( 'the_seo_framework_twittertitle_output', $this->title_from_cache( '', '', '', true ), $this->get_the_real_ID() );
if ( $title )
return '<meta name="twitter:title" content="' . \esc_attr( $title ) . '" />' . "\r\n";
return '';
}
/**
* Renders Twitter Description meta tag.
*
* @uses $this->description_from_cache()
* @since 2.2.2
*
* @return string The Twitter Descritpion meta tag.
*/
public function twitter_description() {
if ( ! $this->use_twitter_tags() )
return '';
/**
* Applies filters 'the_seo_framework_twitterdescription_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$description = (string) \apply_filters( 'the_seo_framework_twitterdescription_output', $this->description_from_cache( true ), $this->get_the_real_ID() );
if ( $description )
return '<meta name="twitter:description" content="' . \esc_attr( $description ) . '" />' . "\r\n";
return '';
}
/**
* Renders Twitter Image meta tag.
*
* @since 2.2.2
*
* @return string The Twitter Image meta tag.
*/
public function twitter_image() {
if ( ! $this->use_twitter_tags() )
return '';
/**
* Applies filters 'the_seo_framework_twitterimage_output' : string|bool
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$image = (string) \apply_filters( 'the_seo_framework_twitterimage_output', $this->get_image_from_cache(), $id = $this->get_the_real_ID() );
$output = '';
if ( $image ) {
$output = '<meta name="twitter:image" content="' . \esc_attr( $image ) . '" />' . "\r\n";
if ( ! empty( $this->image_dimensions[ $id ]['width'] ) && ! empty( $this->image_dimensions[ $id ]['height'] ) ) {
$output .= '<meta name="twitter:image:width" content="' . \esc_attr( $this->image_dimensions[ $id ]['width'] ) . '" />' . "\r\n";
$output .= '<meta name="twitter:image:height" content="' . \esc_attr( $this->image_dimensions[ $id ]['height'] ) . '" />' . "\r\n";
}
}
return $output;
}
/**
* Renders Facebook Author meta tag.
*
* @since 2.2.2
* @since 2.8.0 : Return empty on og:type 'website' or 'product'
*
* @return string The Facebook Author meta tag.
*/
public function facebook_author() {
if ( ! $this->use_facebook_tags() )
return '';
if ( in_array( $this->get_og_type(), array( 'website', 'product' ), true ) )
return '';
/**
* Applies filters 'the_seo_framework_facebookauthor_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$author = (string) \apply_filters( 'the_seo_framework_facebookauthor_output', $this->get_option( 'facebook_author' ), $this->get_the_real_ID() );
if ( $author )
return '<meta property="article:author" content="' . \esc_attr( \esc_url_raw( $author ) ) . '" />' . "\r\n";
return '';
}
/**
* Renders Facebook Publisher meta tag.
*
* @since 2.2.2
*
* @return string The Facebook Publisher meta tag.
*/
public function facebook_publisher() {
if ( ! $this->use_facebook_tags() )
return '';
/**
* Applies filters 'the_seo_framework_facebookpublisher_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$publisher = (string) \apply_filters( 'the_seo_framework_facebookpublisher_output', $this->get_option( 'facebook_publisher' ), $this->get_the_real_ID() );
if ( $publisher )
return '<meta property="article:publisher" content="' . \esc_attr( \esc_url_raw( $publisher ) ) . '" />' . "\r\n";
return '';
}
/**
* Renders Facebook App ID meta tag.
*
* @since 2.2.2
*
* @return string The Facebook App ID meta tag.
*/
public function facebook_app_id() {
if ( ! $this->use_facebook_tags() )
return '';
/**
* Applies filters 'the_seo_framework_facebookappid_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$app_id = (string) \apply_filters( 'the_seo_framework_facebookappid_output', $this->get_option( 'facebook_appid' ), $this->get_the_real_ID() );
if ( $app_id )
return '<meta property="fb:app_id" content="' . \esc_attr( $app_id ) . '" />' . "\r\n";
return '';
}
/**
* Renders Article Publishing Time meta tag.
*
* @since 2.2.2
* @since 2.8.0 Returns empty on product pages.
*
* @return string The Article Publishing Time meta tag.
*/
public function article_published_time() {
//* Don't do anything if it's not a page or post.
if ( false === $this->is_singular() )
return '';
if ( 'product' === $this->get_og_type() )
return '';
if ( $this->is_front_page() ) {
//* If it's the frontpage, but the option is disabled, don't do anything.
if ( ! $this->get_option( 'home_publish_time' ) )
return '';
} else {
//* If it's a post, but the option is disabled, don't do anything.
if ( $this->is_single() && ! $this->get_option( 'post_publish_time' ) )
return '';
//* If it's a page, but the option is disabled, don't do anything.
if ( $this->is_page() && ! $this->get_option( 'page_publish_time' ) )
return '';
}
$id = $this->get_the_real_ID();
/**
* Applies filters 'the_seo_framework_publishedtime_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$time = (string) \apply_filters( 'the_seo_framework_publishedtime_output', \get_the_date( 'Y-m-d', $id ), $id );
if ( $time )
return '<meta property="article:published_time" content="' . \esc_attr( $time ) . '" />' . "\r\n";
return '';
}
/**
* Renders Article Modified Time meta tag.
* Also renders the Open Graph Updated Time meta tag if Open Graph tags are enabled.
*
* @since 2.2.2
* @since 2.7.0 Listens to $this->get_the_real_ID() instead of WordPress Core ID determination.
* @since 2.8.0 Returns empty on product pages.
*
* @return string The Article Modified Time meta tag, and optionally the Open Graph Updated Time.
*/
public function article_modified_time() {
// Don't do anything if it's not a page or post.
if ( false === $this->is_singular() )
return '';
if ( 'product' === $this->get_og_type() )
return '';
if ( $this->is_front_page() ) {
//* If it's the frontpage, but the option is disabled, don't do anything.
if ( ! $this->get_option( 'home_modify_time' ) )
return '';
} else {
//* If it's a post, but the option is disabled, don't do anyhting.
if ( $this->is_single() && ! $this->get_option( 'post_modify_time' ) )
return '';
//* If it's a page, but the option is disabled, don't do anything.
if ( $this->is_page() && ! $this->get_option( 'page_modify_time' ) )
return '';
}
$id = $this->get_the_real_ID();
/**
* Applies filters 'the_seo_framework_modifiedtime_output' : string
* @since 2.3.0
* @since 2.7.0 Added output within filter.
*/
$time = (string) \apply_filters( 'the_seo_framework_modifiedtime_output', \get_post_modified_time( 'Y-m-d', false, $id, false ), $id );
if ( $time ) {
$output = '<meta property="article:modified_time" content="' . \esc_attr( $time ) . '" />' . "\r\n";
if ( $this->use_og_tags() )
$output .= '<meta property="og:updated_time" content="' . \esc_attr( $time ) . '" />' . "\r\n";
return $output;
}
return '';
}
/**
* Renders Canonical URL meta tag.
*
* @since 2.0.6
* @uses $this->the_url_from_cache()
*
* @return string The Canonical URL meta tag.
*/
public function canonical() {
/**
* Applies filters the_seo_framework_output_canonical : Don't output canonical if false.
* @since 2.4.2
*
* @deprecated
* @since 2.7.0
*/
if ( true !== \apply_filters( 'the_seo_framework_output_canonical', true, $this->get_the_real_ID() ) ) {
$this->_deprecated_filter( 'the_seo_framework_output_canonical', '2.7.0', "add_filter( 'the_seo_framework_rel_canonical_output', '__return_empty_string' );" );
return '';
}
/**
* Applies filters 'the_seo_framework_rel_canonical_output' : Change canonical URL output.
* @since 2.6.5
*/
$url = (string) \apply_filters( 'the_seo_framework_rel_canonical_output', $this->the_url_from_cache(), $this->get_the_real_ID() );
/**
* @since 2.7.0 Listens to the second filter.
*/
if ( $url )
return '<link rel="canonical" href="' . $url . '" />' . "\r\n";
return '';
}
/**
* Renders LD+JSON Schema.org scripts.
*
* @uses $this->render_ld_json_scripts()
*
* @since 1.2.0
* @return string The LD+json Schema.org scripts.
*/
public function ld_json() {
//* Don't output on Search, 404 or preview.
if ( $this->is_search() || $this->is_404() || $this->is_preview() )
return '';
/**
* Applies filters 'the_seo_framework_ldjson_scripts' : string
* @since 2.6.0
*/
$json = (string) \apply_filters( 'the_seo_framework_ldjson_scripts', $this->render_ld_json_scripts(), $this->get_the_real_ID() );
return $json;
}
/**
* Renders Google Site Verification Code meta tag.
*
* @since 2.2.4
*
* @return string The Google Site Verification code meta tag.
*/
public function google_site_output() {
/**
* Applies filters 'the_seo_framework_googlesite_output' : string
* @since 2.6.0
*/
$code = (string) \apply_filters( 'the_seo_framework_googlesite_output', $this->get_option( 'google_verification' ), $this->get_the_real_ID() );
if ( $code )
return '<meta name="google-site-verification" content="' . \esc_attr( $code ) . '" />' . "\r\n";
return '';
}
/**
* Renders Bing Site Verification Code meta tag.
*
* @since 2.2.4
*
* @return string The Bing Site Verification Code meta tag.
*/
public function bing_site_output() {
/**
* Applies filters 'the_seo_framework_bingsite_output' : string
* @since 2.6.0
*/
$code = (string) \apply_filters( 'the_seo_framework_bingsite_output', $this->get_option( 'bing_verification' ), $this->get_the_real_ID() );
if ( $code )
return '<meta name="msvalidate.01" content="' . \esc_attr( $code ) . '" />' . "\r\n";
return '';
}
/**
* Renders Yandex Site Verification code meta tag.
*
* @since 2.6.0
*
* @return string The Yandex Site Verification code meta tag.
*/
public function yandex_site_output() {
/**
* Applies filters 'the_seo_framework_yandexsite_output' : string
* @since 2.6.0
*/
$code = (string) \apply_filters( 'the_seo_framework_yandexsite_output', $this->get_option( 'yandex_verification' ), $this->get_the_real_ID() );
if ( $code )
return '<meta name="yandex-verification" content="' . \esc_attr( $code ) . '" />' . "\r\n";
return '';
}
/**
* Renders Pinterest Site Verification code meta tag.
*
* @since 2.5.2
*
* @return string The Pinterest Site Verification code meta tag.
*/
public function pint_site_output() {
/**
* Applies filters 'the_seo_framework_pintsite_output' : string
* @since 2.6.0
*/
$code = (string) \apply_filters( 'the_seo_framework_pintsite_output', $this->get_option( 'pint_verification' ), $this->get_the_real_ID() );
if ( $code )
return '<meta name="p:domain_verify" content="' . \esc_attr( $code ) . '" />' . "\r\n";
return '';
}
/**
* Renders Robots meta tags.
* Returns early if blog isn't public. WordPress Core will then output the meta tags.
*
* @since 2.0.0
*
* @return string The Robots meta tags.
*/
public function robots() {
//* Don't do anything if the blog isn't set to public.
if ( false === $this->is_blog_public() )
return '';
/**
* Applies filters 'the_seo_framework_robots_meta' : array
* @since 2.6.0
*/
$meta = (array) \apply_filters( 'the_seo_framework_robots_meta', $this->robots_meta(), $this->get_the_real_ID() );
if ( empty( $meta ) )
return '';
return sprintf( '<meta name="robots" content="%s" />' . "\r\n", implode( ',', $meta ) );
}
/**
* Renders Shortlink meta tag
*
* @since 2.2.2
* @uses $this->get_shortlink()
*
* @return string The Shortlink meta tag.
*/
public function shortlink() {
/**
* Applies filters 'the_seo_framework_shortlink_output' : string
* @since 2.6.0
*/
$url = (string) \apply_filters( 'the_seo_framework_shortlink_output', $this->get_shortlink(), $this->get_the_real_ID() );
if ( $url )
return sprintf( '<link rel="shortlink" href="%s" />' . "\r\n", $url );
return '';
}
/**
* Renders Prev/Next Paged URL meta tags.
*
* @since 2.2.2
* @uses $this->get_paged_url()
*
* @return string The Prev/Next Paged URL meta tags.
*/
public function paged_urls() {
$id = $this->get_the_real_ID();
/**
* Applies filters 'the_seo_framework_paged_url_output' : array
* @since 2.6.0
*/
$next = (string) \apply_filters( 'the_seo_framework_paged_url_output_next', $this->get_paged_url( 'next' ), $id );
/**
* Applies filters 'the_seo_framework_paged_url_output' : array
* @since 2.6.0
*/
$prev = (string) \apply_filters( 'the_seo_framework_paged_url_output_prev', $this->get_paged_url( 'prev' ), $id );
$output = '';
if ( $prev )
$output .= sprintf( '<link rel="prev" href="%s" />' . "\r\n", $prev );
if ( $next )
$output .= sprintf( '<link rel="next" href="%s" />' . "\r\n", $next );
return $output;
}
/**
* Determines whether we can use Open Graph tags.
*
* @since 2.6.0
* @staticvar bool $cache
*
* @return bool
*/
public function use_og_tags() {
static $cache = null;
if ( isset( $cache ) )
return $cache;
return $cache = $this->is_option_checked( 'og_tags' ) && false === $this->detect_og_plugin();
}
/**
* Determines whether we can use Facebook tags.
*
* @since 2.6.0
* @staticvar bool $cache
*
* @return bool
*/
public function use_facebook_tags() {
static $cache = null;
if ( isset( $cache ) )
return $cache;
return $cache = $this->is_option_checked( 'facebook_tags' );
}
/**
* Determines whether we can use Twitter tags.
*
* @since 2.6.0
* @since 2.8.2 : Now also considers Twitter card type output.
* @staticvar bool $cache
*
* @return bool
*/
public function use_twitter_tags() {
static $cache = null;
if ( isset( $cache ) )
return $cache;
return $cache = $this->is_option_checked( 'twitter_tags' ) && false === $this->detect_twitter_card_plugin() && $this->get_current_twitter_card_type();
}
/**
* Determines whether we can use Google+ tags.
*
* @since 2.6.0
* @staticvar bool $cache
* @NOTE: not used.
*
* @return bool
*/
public function use_googleplus_tags() {
return false;
static $cache = null;
if ( isset( $cache ) )
return $cache;
return $cache = $this->is_option_checked( 'googleplus_tags' );
}
}
| {
"content_hash": "64f243ee63fcd28013aeb8c41b5fe767",
"timestamp": "",
"source": "github",
"line_count": 1115,
"max_line_length": 223,
"avg_line_length": 26.91748878923767,
"alnum_prop": 0.6027054942858094,
"repo_name": "Kilbourne/biosphaera",
"id": "a175e91f0f722ab2694c4b6e0c899a09d29b596a",
"size": "30013",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/app/plugins/autodescription/inc/classes/render.class.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "223361"
},
{
"name": "HTML",
"bytes": "40850"
},
{
"name": "JavaScript",
"bytes": "44809"
},
{
"name": "PHP",
"bytes": "173651"
}
],
"symlink_target": ""
} |
FROM ubuntu:xenial
ENV LANG=en_US.UTF-8
RUN \
apt-get update && \
apt-get upgrade -y && \
apt-get install -y --allow-unauthenticated \
locales \
ca-certificates tzdata python2.7 netcat htop less curl libmysqlclient20 \
libjpeg8 libpng12-0 libxslt1.1 \
libmysqlclient-dev python2.7-dev python-pip libjpeg8-dev libpng12-dev libxslt1-dev && \
locale-gen ${LANG} && \
pip install -U pillow django-redis-cache pytz lxml gu-django-filebrowser-no-grappelli gu-django-tinymce mysqlclient uwsgi && \
apt-get autoremove -y && \
apt-get autoclean -y && \
rm -rf /tmp/* /var/lib/apt/lists/* /root/.cache && \
true
| {
"content_hash": "40190e9e6f1bcbbdb67dbdf5dbfc3753",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 130,
"avg_line_length": 37.333333333333336,
"alnum_prop": 0.65625,
"repo_name": "andriyg76/docker",
"id": "075a9c9d36a7909c65276ae0ca2b06cf2865ddc6",
"size": "672",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python2.7-nonginx/Dockerfile",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1298"
},
{
"name": "Python",
"bytes": "38086"
},
{
"name": "Shell",
"bytes": "9593"
}
],
"symlink_target": ""
} |
#include <aws/ecs/model/NetworkBinding.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace ECS
{
namespace Model
{
NetworkBinding::NetworkBinding() :
m_bindIPHasBeenSet(false),
m_containerPort(0),
m_containerPortHasBeenSet(false),
m_hostPort(0),
m_hostPortHasBeenSet(false),
m_protocolHasBeenSet(false)
{
}
NetworkBinding::NetworkBinding(const JsonValue& jsonValue) :
m_bindIPHasBeenSet(false),
m_containerPort(0),
m_containerPortHasBeenSet(false),
m_hostPort(0),
m_hostPortHasBeenSet(false),
m_protocolHasBeenSet(false)
{
*this = jsonValue;
}
NetworkBinding& NetworkBinding::operator =(const JsonValue& jsonValue)
{
if(jsonValue.ValueExists("bindIP"))
{
m_bindIP = jsonValue.GetString("bindIP");
m_bindIPHasBeenSet = true;
}
if(jsonValue.ValueExists("containerPort"))
{
m_containerPort = jsonValue.GetInteger("containerPort");
m_containerPortHasBeenSet = true;
}
if(jsonValue.ValueExists("hostPort"))
{
m_hostPort = jsonValue.GetInteger("hostPort");
m_hostPortHasBeenSet = true;
}
if(jsonValue.ValueExists("protocol"))
{
m_protocol = TransportProtocolMapper::GetTransportProtocolForName(jsonValue.GetString("protocol"));
m_protocolHasBeenSet = true;
}
return *this;
}
JsonValue NetworkBinding::Jsonize() const
{
JsonValue payload;
if(m_bindIPHasBeenSet)
{
payload.WithString("bindIP", m_bindIP);
}
if(m_containerPortHasBeenSet)
{
payload.WithInteger("containerPort", m_containerPort);
}
if(m_hostPortHasBeenSet)
{
payload.WithInteger("hostPort", m_hostPort);
}
if(m_protocolHasBeenSet)
{
payload.WithString("protocol", TransportProtocolMapper::GetNameForTransportProtocol(m_protocol));
}
return payload;
}
} // namespace Model
} // namespace ECS
} // namespace Aws | {
"content_hash": "37a9f49e3e62dc933a5aecfdb83886b2",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 103,
"avg_line_length": 18.902912621359224,
"alnum_prop": 0.7082691319979456,
"repo_name": "ambasta/aws-sdk-cpp",
"id": "33e7197227cf076d4cb04a4a1094ad2daad508b7",
"size": "2520",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-cpp-sdk-ecs/source/model/NetworkBinding.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2305"
},
{
"name": "C++",
"bytes": "74273816"
},
{
"name": "CMake",
"bytes": "412257"
},
{
"name": "Java",
"bytes": "229873"
},
{
"name": "Python",
"bytes": "62933"
}
],
"symlink_target": ""
} |
'use strict';
var config = require('../config.js');
function checkAndCreateAlias(indexName, termValue, routingValue, resMsg, aliasToken, esClient)
{
console.log('index ['+indexName+'] with UUID ['+routingValue+']. Creating Alias for index ['+indexName+']!');
var aliasBodyWrite = {
"actions": [{
"add": {
"filter": {"term": { [termValue] : routingValue}},
"routing": routingValue
}
}]
};
var aliasBodySearch =
{
"actions" : [{
"add": {
"filter": {"term": { [termValue] : routingValue}},
"routing": routingValue
}
}]
};
console.log('indexName: '+indexName+ ' name: '+routingValue + aliasToken + 'read');
//e.g. name: 2344d4523sdg4_banks_read
esClient.indices.existsAlias({index: indexName, name: routingValue + aliasToken + 'read'},
function (errorRespExists, respReadExists, statusReadExists) {
console.log('errorRespExists = ' + errorRespExists);
console.log('respReadExists = ' + respReadExists);
console.log('statusReadExists = '+ statusReadExists );
if(errorRespExists)
{
console.log('Index Alias ['+indexName+'] exists in ElasticSearch AND Alias['+routingValue + aliasToken + 'read'+'] for read exists. Checking now if write exists = '+respReadExists);
//checking wirte alias exists
esClient.indices.existsAlias({index: indexName, name: routingValue + aliasToken + 'write'},
function (errorWriteExists, respWriteExists, statusWriteExists) {
console.log('errorWriteExists = ' + errorWriteExists);
console.log('respWriteExists = ' + respWriteExists);
console.log('statusWriteExists = '+ statusWriteExists );
if(errorWriteExists)
{
resMsg = 'Index ['+indexName+'] exists in ElasticSearch AND Both Alias ['+routingValue + aliasToken + 'read'+'] and ['+routingValue + aliasToken + 'write'+'] Read and Write already EXISTS = '+respWriteExists;
console.log(resMsg);
return;
}
else {
console.log('Index ['+indexName+'] exists in ElasticSearch Alias for read exists ['+routingValue + aliasToken + 'read'+'] AND Alias write DOES NOT EXISTS. Creating now! response value ='+respWriteExists);
//put write alias
esClient.indices.putAlias({index: indexName, name: routingValue + aliasToken + 'write', body: aliasBodyWrite })
.then(function (resp){
resMsg = 'Index ['+indexName+'] exists in ElasticSearch AND Alias ['+routingValue + aliasToken + 'write'+'] write created as read already existed = '+JSON.stringify(resp);
console.log(resMsg);
return;
}, function (error) {
resMsg = 'Error: Index ['+indexName+'] exists in ElasticSearch but Alias write not created by read exists -'+JSON.stringify(error);
console.log(resMsg);
return;
}); //end putAlias(write)
}
});
}
else {
console.log('Index ['+indexName+'] exists in ElasticSearch AND Alias ['+routingValue + aliasToken + 'read'+']read DOES NOT EXISTS! creating now! response is ='+respReadExists);
//put read alias
esClient.indices.putAlias({index: indexName, name: routingValue + aliasToken + 'read', body: aliasBodySearch })
.then(function (resp){
console.log('Index ['+indexName+'] exists in ElasticSearch AND Alias ['+routingValue + aliasToken + 'read'+']read newly created now checking if write ['+routingValue + aliasToken + 'write'+'] exists = '+resp);
resMsg = 'Index ['+indexName+'] exists in ElasticSearch AND Alias ['+routingValue + aliasToken + 'read'+'] read newly created now checking if write ['+routingValue + aliasToken + 'write'+'] exists = '+resp;
//now check if write exists
esClient.indices.existsAlias({index: indexName, name: routingValue + aliasToken + 'write'},
function (errorWirteEx, respWriteEx, statusWriteEx) {
console.log('errorWirteEx = ' + errorWirteEx);
console.log('respWriteEx = ' + respWriteEx);
console.log('statusWriteEx = '+ statusWriteEx);
if(errorWirteEx)
{
resMsg = 'Index ['+indexName+'] exists in ElasticSearch AND Both Alias read newly created and write already exits = '+respWriteEx;
console.log(resMsg);
return;
}
else {
console.log('Index ['+indexName+'] exists in ElasticSearch AND Alias read newly created but write DOES NOT EXISTS '+respWriteEx);
//put write alias
esClient.indices.putAlias({index: indexName, name: routingValue + aliasToken + 'write', body: aliasBodyWrite })
.then(function (resp){
resMsg = 'Index ['+indexName+'] exists in ElasticSearch AND Alias Both read and write newly created = '+ JSON.stringify(resp);
console.log(resMsg);
return;
}, function (error) {
resMsg = 'Error: Index ['+indexName+'] exists in ElasticSearch but Alias write not created but read newly created -'+JSON.stringify(error);
console.log(resMsg);
return;
}); //end putAlias(write)
}
});
}, function (error) {
resMsg = 'Error: Index ['+indexName+'] exists in ElasticSearch but Alias read alias could not create. Write not attempted. Error -'+JSON.stringify(error);
console.log(resMsg);
return;
}); //end put read alias
} //end Else
}); //end existsAlias(read)
}
//sample uid: 3LsrOoGQAQOBVC1vU89bNpFuXwA3
function handlePOST (user, esClient ) {
// Do something with the POST request
var resMsg = '';
console.log('Inside handlePOST(user)');
console.log('user = '+ JSON.stringify(user) );
var routingValue = user.uid;
var indexType = 'settings';
var userBody = user;
if(userBody === null || userBody === undefined) {
resMsg = "Error: userBody required to create Index in ES ->" + userBody;
console.log(resMsg);
return;;
}
if(routingValue === null || routingValue === undefined) {
resMsg = "Error: routingValue required to create Index in ES ->" + routingValue;
console.log(resMsg);
return;;
}
resMsg = 'Index not created';
esClient.ping({ requestTimeout: 30000 }, function(error)
{
if (error) {
console.trace('Error: elasticsearch cluster is down!', error);
return;
} else {
console.log('Elasticsearch Instance on ObjectRocket Connected!');
}
});
//check elasticsearch health
esClient.cluster.health({},function(err,resp,status) {
console.log("-- esClient Health --",resp);
});
var indexName = null;
var aliasToken = null;
var termValue = null;
resMsg = 'Error - No Alias Created for ['+indexName+']';
console.log('Checking if indexName Exists');
// do not change the termValue as it is the property of index defined in the template
if(indexType.includes('banks'))
{ aliasToken = '_banks_'; termValue = "bank_userId_routingAliasId";
indexName = config.banks_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(indexType.includes('coas'))
{ aliasToken = '_coas_'; termValue = "coa_userId_routingAliasId";
indexName = config.coas_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(indexType.includes('customers'))
{ aliasToken = '_customers_'; termValue = "cust_userId_routingAliasId";
indexName = config.customers_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(indexType.includes('invoices'))
{ aliasToken = '_invoices_'; termValue = "inv_userId_routingAliasId";
indexName = config.invoices_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(indexType.includes('notes'))
{ aliasToken = '_notes_'; termValue = "note_userId_routingAliasId";
indexName = config.notes_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(indexType.includes('payments'))
{ aliasToken = '_payments_'; termValue = "pymt_userId_routingAliasId";
indexName = config.payments_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(indexType.includes('rules'))
{ aliasToken = '_rules_'; termValue = "rule_userId_routingAliasId";
indexName = config.rules_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(indexType.includes('suppliers'))
{ aliasToken = '_suppliers_'; termValue = "supp_userId_routingAliasId";
indexName = config.suppliers_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(indexType.includes('settings'))
{ aliasToken = '_settings_'; termValue = "sett_userId_routingAliasId";
indexName = config.settings_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(indexType.includes('transactions'))
{ aliasToken = '_transactions_'; termValue = "tran_userId_routingAliasId";
indexName = config.transactions_index_name;
console.log('aliasToken ['+aliasToken+' termValue ['+termValue+']');
}
if(termValue === '' || termValue === null)
{
console.log('ERROR - indexType does not contain valid term');
return;
}
if(indexName === null || indexName === undefined) {
resMsg = "Error: indexName required to create Index in ES ->" + indexName;
return;
}
console.log('AliasToken considered ['+aliasToken+'] and termValue considered ['+termValue+']');
console.log('Index ['+indexName+'] and routingValue ['+routingValue+']');
console.log('Checking if index Exists('+ indexName+')');
esClient.indices.exists({index: indexName})
.then(function (error,resp) {
console.log('error value -' + error);
console.log('response value - ' + resp);
if(error)
{
console.log('Index ['+indexName+'] already exists in ElasticSearch. Response is ->'+error);
resMsg = 'Index ['+indexName+'] already exists in ElasticSearch -'+JSON.stringify(resp);
checkAndCreateAlias(indexName, termValue, routingValue, resMsg, aliasToken, esClient);
}//end if
else{
//index dosen't exist.
resMsg = 'Index does not Exists!. Can not insert user routingValue to the index. Error Value = '+ error;
console.log(resMsg);
return;
}
});//end then - indices.exists()
return;
}
exports.handler = function(event, database, esClient)
{
var usersRef = database.ref('users');
const user = event.data; // The Firebase user.
console.log( 'event data ='+JSON.stringify(event.data) );
return handlePOST(user, esClient);
};
/* event.data =
{
"displayName": "Ramhigh Low",
"email": "testbizvv@gmail.com",
"emailVerified": true,
"metadata": {
"creationTime": "2017-10-27T00:16:16Z",
"lastSignInTime": "2017-10-27T00:16:16Z"
},
"photoURL": "https://lh5.googleusercontent.com/-7k6JG8RCtRI/AAAAAAAAAAI/AAAAAAAAAAc/aKrbE08MqDI/photo.jpg",
"providerData": [{
"displayName": "Ramhigh Low",
"email": "testbizvv@gmail.com",
"photoURL": "https://lh5.googleusercontent.com/-7k6JG8RCtRI/AAAAAAAAAAI/AAAAAAAAAAc/aKrbE08MqDI/photo.jpg",
"providerId": "google.com",
"uid": "106907019373764493113"
}],
"uid": "MePBMojfc4hXaM5x490hWAcxsIs2"
}
*/
| {
"content_hash": "a5673ebb79083e3428ef0297e3dd1b4b",
"timestamp": "",
"source": "github",
"line_count": 266,
"max_line_length": 227,
"avg_line_length": 47.07142857142857,
"alnum_prop": 0.5870138167877965,
"repo_name": "vinvantest/biz_rec_code",
"id": "863635c93511039ec44680a6834baaa6e9ed83e7",
"size": "12521",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "frontend/PWA_firebase/functions/auth_triggers/createSettingIndexAliasForUser.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8313"
},
{
"name": "HTML",
"bytes": "263379"
},
{
"name": "JavaScript",
"bytes": "645533"
},
{
"name": "Python",
"bytes": "1367283"
}
],
"symlink_target": ""
} |
package com.vk.api.sdk.queries.notifications;
import com.vk.api.sdk.client.AbstractQueryBuilder;
import com.vk.api.sdk.client.VkApiClient;
import com.vk.api.sdk.client.actors.UserActor;
import com.vk.api.sdk.objects.base.BoolInt;
import java.util.Arrays;
import java.util.List;
/**
* Query for Notifications.markAsViewed method
*/
public class NotificationsMarkAsViewedQuery extends AbstractQueryBuilder<NotificationsMarkAsViewedQuery, BoolInt> {
/**
* Creates a AbstractQueryBuilder instance that can be used to build api request with various parameters
*
* @param client VK API client
* @param actor actor with access token
*/
public NotificationsMarkAsViewedQuery(VkApiClient client, UserActor actor) {
super(client, "notifications.markAsViewed", BoolInt.class);
accessToken(actor.getAccessToken());
}
@Override
protected NotificationsMarkAsViewedQuery getThis() {
return this;
}
@Override
protected List<String> essentialKeys() {
return Arrays.asList("access_token");
}
}
| {
"content_hash": "f5cfa315964d6b793ab8eefffbb398a2",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 115,
"avg_line_length": 30.771428571428572,
"alnum_prop": 0.7307335190343547,
"repo_name": "kokorin/vk-java-sdk",
"id": "3ac597b23adff08ce3dd5a9d41f570792846bcfd",
"size": "1077",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "sdk/src/main/java/com/vk/api/sdk/queries/notifications/NotificationsMarkAsViewedQuery.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "2634022"
}
],
"symlink_target": ""
} |
import pytest
import numpy as np
from numpy.testing import assert_allclose
from .... import units
from ....tests.helper import assert_quantity_allclose
from .. import LombScargle
ALL_METHODS = LombScargle.available_methods
ALL_METHODS_NO_AUTO = [method for method in ALL_METHODS if method != 'auto']
FAST_METHODS = [method for method in ALL_METHODS if 'fast' in method]
NTERMS_METHODS = [method for method in ALL_METHODS if 'chi2' in method]
NORMALIZATIONS = ['standard', 'psd', 'log', 'model']
@pytest.fixture
def data(N=100, period=1, theta=[10, 2, 3], dy=1, rseed=0):
"""Generate some data for testing"""
rng = np.random.RandomState(rseed)
t = 20 * period * rng.rand(N)
omega = 2 * np.pi / period
y = theta[0] + theta[1] * np.sin(omega * t) + theta[2] * np.cos(omega * t)
dy = dy * (0.5 + rng.rand(N))
y += dy * rng.randn(N)
return t, y, dy
@pytest.mark.parametrize('minimum_frequency', [None, 1.0])
@pytest.mark.parametrize('maximum_frequency', [None, 5.0])
@pytest.mark.parametrize('nyquist_factor', [1, 10])
@pytest.mark.parametrize('samples_per_peak', [1, 5])
def test_autofrequency(data, minimum_frequency, maximum_frequency,
nyquist_factor, samples_per_peak):
t, y, dy = data
baseline = t.max() - t.min()
freq = LombScargle(t, y, dy).autofrequency(samples_per_peak,
nyquist_factor,
minimum_frequency,
maximum_frequency)
df = freq[1] - freq[0]
# Check sample spacing
assert_allclose(df, 1. / baseline / samples_per_peak)
# Check minimum frequency
if minimum_frequency is None:
assert_allclose(freq[0], 0.5 * df)
else:
assert_allclose(freq[0], minimum_frequency)
if maximum_frequency is None:
avg_nyquist = 0.5 * len(t) / baseline
assert_allclose(freq[-1], avg_nyquist * nyquist_factor, atol=0.5*df)
else:
assert_allclose(freq[-1], maximum_frequency, atol=0.5*df)
@pytest.mark.parametrize('method', ALL_METHODS_NO_AUTO)
@pytest.mark.parametrize('center_data', [True, False])
@pytest.mark.parametrize('fit_mean', [True, False])
@pytest.mark.parametrize('with_errors', [True, False])
@pytest.mark.parametrize('with_units', [True, False])
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
def test_all_methods(data, method, center_data, fit_mean,
with_errors, with_units, normalization):
if method == 'scipy' and (fit_mean or with_errors):
return
t, y, dy = data
frequency = 0.8 + 0.01 * np.arange(40)
if with_units:
t = t * units.day
y = y * units.mag
dy = dy * units.mag
frequency = frequency / t.unit
if not with_errors:
dy = None
kwds = dict(normalization=normalization)
ls = LombScargle(t, y, dy, center_data=center_data, fit_mean=fit_mean)
P_expected = ls.power(frequency, **kwds)
# don't use the fft approximation here; we'll test this elsewhere
if method in FAST_METHODS:
kwds['method_kwds'] = dict(use_fft=False)
P_method = ls.power(frequency, method=method, **kwds)
if with_units:
if normalization == 'psd' and not with_errors:
assert P_method.unit == y.unit ** 2
else:
assert P_method.unit == units.dimensionless_unscaled
else:
assert not hasattr(P_method, 'unit')
assert_quantity_allclose(P_expected, P_method)
@pytest.mark.parametrize('method', ALL_METHODS_NO_AUTO)
@pytest.mark.parametrize('center_data', [True, False])
@pytest.mark.parametrize('fit_mean', [True, False])
@pytest.mark.parametrize('with_errors', [True, False])
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
def test_integer_inputs(data, method, center_data, fit_mean, with_errors,
normalization):
if method == 'scipy' and (fit_mean or with_errors):
return
t, y, dy = data
t = np.floor(100 * t)
t_int = t.astype(int)
y = np.floor(100 * y)
y_int = y.astype(int)
dy = np.floor(100 * dy)
dy_int = dy.astype('int32')
frequency = 1E-2 * (0.8 + 0.01 * np.arange(40))
if not with_errors:
dy = None
dy_int = None
kwds = dict(center_data=center_data,
fit_mean=fit_mean)
P_float = LombScargle(t, y, dy, **kwds).power(frequency,
method=method,
normalization=normalization)
P_int = LombScargle(t_int, y_int, dy_int,
**kwds).power(frequency,
method=method,
normalization=normalization)
assert_allclose(P_float, P_int)
@pytest.mark.parametrize('method', NTERMS_METHODS)
@pytest.mark.parametrize('center_data', [True, False])
@pytest.mark.parametrize('fit_mean', [True, False])
@pytest.mark.parametrize('with_errors', [True, False])
@pytest.mark.parametrize('nterms', [0, 2, 4])
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
def test_nterms_methods(method, center_data, fit_mean, with_errors,
nterms, normalization, data):
t, y, dy = data
frequency = 0.8 + 0.01 * np.arange(40)
if not with_errors:
dy = None
ls = LombScargle(t, y, dy, center_data=center_data,
fit_mean=fit_mean, nterms=nterms)
kwds = dict(normalization=normalization)
if nterms == 0 and not fit_mean:
with pytest.raises(ValueError) as err:
ls.power(frequency, method=method, **kwds)
assert 'nterms' in str(err.value) and 'bias' in str(err.value)
else:
P_expected = ls.power(frequency, **kwds)
# don't use fast fft approximations here
if 'fast' in method:
kwds['method_kwds'] = dict(use_fft=False)
P_method = ls.power(frequency, method=method, **kwds)
assert_allclose(P_expected, P_method, rtol=1E-7, atol=1E-25)
@pytest.mark.parametrize('method', FAST_METHODS)
@pytest.mark.parametrize('center_data', [True, False])
@pytest.mark.parametrize('fit_mean', [True, False])
@pytest.mark.parametrize('with_errors', [True, False])
@pytest.mark.parametrize('nterms', [0, 1, 2])
def test_fast_approximations(method, center_data, fit_mean,
with_errors, nterms, data):
t, y, dy = data
frequency = 0.8 + 0.01 * np.arange(40)
if not with_errors:
dy = None
ls = LombScargle(t, y, dy, center_data=center_data,
fit_mean=fit_mean, nterms=nterms)
# use only standard normalization because we compare via absolute tolerance
kwds = dict(method=method, normalization='standard')
if method == 'fast' and nterms != 1:
with pytest.raises(ValueError) as err:
ls.power(frequency, **kwds)
assert 'nterms' in str(err.value)
elif nterms == 0 and not fit_mean:
with pytest.raises(ValueError) as err:
ls.power(frequency, **kwds)
assert 'nterms' in str(err.value) and 'bias' in str(err.value)
else:
P_fast = ls.power(frequency, **kwds)
kwds['method_kwds'] = dict(use_fft=False)
P_slow = ls.power(frequency, **kwds)
assert_allclose(P_fast, P_slow, atol=0.008)
@pytest.mark.parametrize('method', LombScargle.available_methods)
@pytest.mark.parametrize('shape', [(), (1,), (2,), (3,), (2, 3)])
def test_output_shapes(method, shape, data):
t, y, dy = data
freq = np.asarray(np.zeros(shape))
freq.flat = np.arange(1, freq.size + 1)
PLS = LombScargle(t, y, fit_mean=False).power(freq, method=method)
assert PLS.shape == shape
@pytest.mark.parametrize('method', LombScargle.available_methods)
def test_errors_on_unit_mismatch(method, data):
t, y, dy = data
t = t * units.second
y = y * units.mag
frequency = np.linspace(0.5, 1.5, 10)
# this should fail because frequency and 1/t units do not match
with pytest.raises(ValueError) as err:
LombScargle(t, y, fit_mean=False).power(frequency, method=method)
assert str(err.value).startswith('Units of frequency not equivalent')
# this should fail because dy and y units do not match
with pytest.raises(ValueError) as err:
LombScargle(t, y, dy, fit_mean=False).power(frequency / t.unit)
assert str(err.value).startswith('Units of dy not equivalent')
# we don't test all normalizations here because they are tested above
# only test method='auto' because unit handling does not depend on method
@pytest.mark.parametrize('fit_mean', [True, False])
@pytest.mark.parametrize('center_data', [True, False])
@pytest.mark.parametrize('normalization', ['standard', 'psd'])
@pytest.mark.parametrize('with_error', [True, False])
def test_unit_conversions(data, fit_mean, center_data,
normalization, with_error):
t, y, dy = data
t_day = t * units.day
t_hour = units.Quantity(t_day, 'hour')
y_meter = y * units.meter
y_millimeter = units.Quantity(y_meter, 'millimeter')
# sanity check on inputs
assert_quantity_allclose(t_day, t_hour)
assert_quantity_allclose(y_meter, y_millimeter)
if with_error:
dy = dy * units.meter
else:
dy = None
freq_day, P1 = LombScargle(t_day, y_meter, dy).autopower()
freq_hour, P2 = LombScargle(t_hour, y_millimeter, dy).autopower()
# Check units of frequency
assert freq_day.unit == 1. / units.day
assert freq_hour.unit == 1. / units.hour
# Check that results match
assert_quantity_allclose(freq_day, freq_hour)
assert_quantity_allclose(P1, P2)
# Check that switching frequency units doesn't change things
P3 = LombScargle(t_day, y_meter, dy).power(freq_hour)
P4 = LombScargle(t_hour, y_meter, dy).power(freq_day)
assert_quantity_allclose(P3, P4)
@pytest.mark.parametrize('fit_mean', [True, False])
@pytest.mark.parametrize('with_units', [True, False])
@pytest.mark.parametrize('freq', [1.0, 2.0])
def test_model(fit_mean, with_units, freq):
rand = np.random.RandomState(0)
t = 10 * rand.rand(40)
params = 10 * rand.rand(3)
y = np.zeros_like(t)
if fit_mean:
y += params[0]
y += params[1] * np.sin(2 * np.pi * freq * (t - params[2]))
if with_units:
t = t * units.day
y = y * units.mag
freq = freq / units.day
ls = LombScargle(t, y, center_data=False, fit_mean=fit_mean)
y_fit = ls.model(t, freq)
assert_quantity_allclose(y_fit, y)
@pytest.mark.parametrize('t_unit', [units.second, units.day])
@pytest.mark.parametrize('frequency_unit', [units.Hz, 1. / units.second])
@pytest.mark.parametrize('y_unit', [units.mag, units.jansky])
def test_model_units_match(data, t_unit, frequency_unit, y_unit):
t, y, dy = data
t_fit = t[:5]
frequency = 1.0
t = t * t_unit
t_fit = t_fit * t_unit
y = y * y_unit
dy = dy * y_unit
frequency = frequency * frequency_unit
ls = LombScargle(t, y, dy)
y_fit = ls.model(t_fit, frequency)
assert y_fit.unit == y_unit
def test_model_units_mismatch(data):
t, y, dy = data
frequency = 1.0
t_fit = t[:5]
t = t * units.second
t_fit = t_fit * units.second
y = y * units.mag
frequency = 1.0 / t.unit
# this should fail because frequency and 1/t units do not match
with pytest.raises(ValueError) as err:
LombScargle(t, y).model(t_fit, frequency=1.0)
assert str(err.value).startswith('Units of frequency not equivalent')
# this should fail because t and t_fit units do not match
with pytest.raises(ValueError) as err:
LombScargle(t, y).model([1, 2], frequency)
assert str(err.value).startswith('Units of t not equivalent')
# this should fail because dy and y units do not match
with pytest.raises(ValueError) as err:
LombScargle(t, y, dy).model(t_fit, frequency)
assert str(err.value).startswith('Units of dy not equivalent')
def test_autopower(data):
t, y, dy = data
ls = LombScargle(t, y, dy)
kwargs = dict(samples_per_peak=6, nyquist_factor=2,
minimum_frequency=2, maximum_frequency=None)
freq1 = ls.autofrequency(**kwargs)
power1 = ls.power(freq1)
freq2, power2 = ls.autopower(**kwargs)
assert_allclose(freq1, freq2)
assert_allclose(power1, power2)
@pytest.fixture
def null_data(N=1000, dy=1, rseed=0):
"""Generate null hypothesis data"""
rng = np.random.RandomState(rseed)
t = 100 * rng.rand(N)
dy = 0.5 * dy * (1 + rng.rand(N))
y = dy * rng.randn(N)
return t, y, dy
@pytest.mark.parametrize('normalization', NORMALIZATIONS)
def test_distribution(null_data, normalization):
t, y, dy = null_data
N = len(t)
ls = LombScargle(t, y, dy)
freq, power = ls.autopower(normalization=normalization,
maximum_frequency=40)
z = np.linspace(0, power.max(), 1000)
# Test that pdf and cdf are consistent
dz = z[1] - z[0]
z_mid = z[:-1] + 0.5 * dz
pdf = _lombscargle_pdf(z_mid, N, normalization=normalization)
cdf = _lombscargle_cdf(z, N, normalization=normalization)
assert_allclose(pdf, np.diff(cdf) / dz, rtol=1E-5, atol=1E-8)
# Test that observed power is distributed according to the theoretical pdf
hist, bins = np.histogram(power, 30, normed=True)
midpoints = 0.5 * (bins[1:] + bins[:-1])
pdf = _lombscargle_pdf(midpoints, N, normalization=normalization)
assert_allclose(hist, pdf, rtol=0.05, atol=0.05 * pdf[0])
# The following are convenience functions used to compute statistics of the
# periodogram under various normalizations; they are used in the preceding
# test.
def _lombscargle_pdf(z, N, normalization, dH=1, dK=3):
"""Probability density function for Lomb-Scargle periodogram
Compute the expected probability density function of the periodogram
for the null hypothesis - i.e. data consisting of Gaussian noise.
Parameters
----------
z : array-like
the periodogram value
N : int
the number of data points from which the periodogram was computed
normalization : string
The periodogram normalization. Must be one of
['standard', 'model', 'log', 'psd']
dH, dK : integers (optional)
The number of parameters in the null hypothesis and the model
Returns
-------
pdf : np.ndarray
The expected probability density function
Notes
-----
For normalization='psd', the distribution can only be computed for
periodograms constructed with errors specified.
All expressions used here are adapted from Table 1 of Baluev 2008 [1]_.
References
----------
.. [1] Baluev, R.V. MNRAS 385, 1279 (2008)
"""
if dK - dH != 2:
raise NotImplementedError("Degrees of freedom != 2")
Nk = N - dK
if normalization == 'psd':
return np.exp(-z)
elif normalization == 'standard':
return 0.5 * Nk * (1 + z) ** (-0.5 * Nk - 1)
elif normalization == 'model':
return 0.5 * Nk * (1 - z) ** (0.5 * Nk - 1)
elif normalization == 'log':
return 0.5 * Nk * np.exp(-0.5 * Nk * z)
else:
raise ValueError("normalization='{0}' is not recognized"
"".format(normalization))
def _lombscargle_cdf(z, N, normalization, dH=1, dK=3):
"""Cumulative distribution for the Lomb-Scargle periodogram
Compute the expected cumulative distribution of the periodogram
for the null hypothesis - i.e. data consisting of Gaussian noise.
Parameters
----------
z : array-like
the periodogram value
N : int
the number of data points from which the periodogram was computed
normalization : string
The periodogram normalization. Must be one of
['standard', 'model', 'log', 'psd']
dH, dK : integers (optional)
The number of parameters in the null hypothesis and the model
Returns
-------
cdf : np.ndarray
The expected cumulative distribution function
Notes
-----
For normalization='psd', the distribution can only be computed for
periodograms constructed with errors specified.
All expressions used here are adapted from Table 1 of Baluev 2008 [1]_.
References
----------
.. [1] Baluev, R.V. MNRAS 385, 1279 (2008)
"""
if dK - dH != 2:
raise NotImplementedError("Degrees of freedom != 2")
Nk = N - dK
if normalization == 'psd':
return 1 - np.exp(-z)
elif normalization == 'standard':
return 1 - (1 + z) ** (-0.5 * Nk)
elif normalization == 'model':
return 1 - (1 - z) ** (0.5 * Nk)
elif normalization == 'log':
return 1 - np.exp(-0.5 * Nk * z)
else:
raise ValueError("normalization='{0}' is not recognized"
"".format(normalization))
| {
"content_hash": "a1d1f4bd66c123df19adb0c68c1b3b66",
"timestamp": "",
"source": "github",
"line_count": 497,
"max_line_length": 79,
"avg_line_length": 34.074446680080484,
"alnum_prop": 0.6220253912016533,
"repo_name": "kelle/astropy",
"id": "1f8211fcf340abb04e6a76b702a38e96c513d795",
"size": "16935",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "astropy/stats/lombscargle/tests/test_lombscargle.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "366877"
},
{
"name": "C++",
"bytes": "1825"
},
{
"name": "HTML",
"bytes": "1172"
},
{
"name": "Jupyter Notebook",
"bytes": "62553"
},
{
"name": "Python",
"bytes": "8072264"
},
{
"name": "Shell",
"bytes": "446"
},
{
"name": "TeX",
"bytes": "778"
}
],
"symlink_target": ""
} |
<!-- Portfolio Grid Section -->
<section id="portfolio">
<div class="container">
<div class="row">
<div class="col-lg-12 text-center">
<h2 class="section-heading">Portfolio</h2>
<h3 class="section-subheading text-muted">Lorem ipsum dolor sit amet consectetur.</h3>
</div>
</div>
<div class="row">
{% for post in site.posts %}
<div class="col-md-4 col-sm-6 portfolio-item">
<a href="#portfolioModal{{ post.modal-id }}" class="portfolio-link" data-toggle="modal">
<div class="portfolio-hover">
<div class="portfolio-hover-content">
<i class="fa fa-plus fa-3x"></i>
</div>
</div>
<img src="img/portfolio/{{ post.thumbnail }}" class="img-responsive img-centered" alt="">
</a>
<div class="portfolio-caption">
<h4>{{ post.title }}</h4>
<p class="text-muted">{{ post.subtitle }}</p>
</div>
</div>
{% endfor %}
</div>
</div>
</section>
| {
"content_hash": "36ea5e6d54c99fa3422c78c518e56b45",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 101,
"avg_line_length": 36.96551724137931,
"alnum_prop": 0.5149253731343284,
"repo_name": "iagomosqueira/agency",
"id": "6cd33dd0c2ba1ccd227aebd3651b15153ec81a6e",
"size": "1072",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_includes/portfolio_grid.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "18499"
},
{
"name": "HTML",
"bytes": "26178"
},
{
"name": "JavaScript",
"bytes": "42756"
},
{
"name": "PHP",
"bytes": "1092"
},
{
"name": "Ruby",
"bytes": "6047"
}
],
"symlink_target": ""
} |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Management.Automation;
using Microsoft.Azure.Commands.RecoveryServices.Backup.Cmdlets.Models;
using Microsoft.Azure.Commands.RecoveryServices.Backup.Cmdlets.ProviderModel;
using Microsoft.Azure.Commands.RecoveryServices.Backup.Properties;
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
namespace Microsoft.Azure.Commands.RecoveryServices.Backup.Cmdlets
{
/// <summary>
/// Enable protection of an item with the recovery services vault.
/// Returns the corresponding job created in the service to track this operation.
/// </summary>
[Cmdlet(VerbsLifecycle.Enable, "AzureRmRecoveryServicesBackupProtection",
DefaultParameterSetName = AzureVMComputeParameterSet, SupportsShouldProcess = true),
OutputType(typeof(JobBase))]
public class EnableAzureRmRecoveryServicesBackupProtection : RecoveryServicesBackupCmdletBase
{
internal const string AzureVMClassicComputeParameterSet = "AzureVMClassicComputeEnableProtection";
internal const string AzureVMComputeParameterSet = "AzureVMComputeEnableProtection";
internal const string ModifyProtectionParameterSet = "ModifyProtection";
/// <summary>
/// Policy to be associated with this item as part of the protection operation.
/// </summary>
[Parameter(Position = 1, Mandatory = true, HelpMessage = ParamHelpMsgs.Policy.ProtectionPolicy)]
[ValidateNotNullOrEmpty]
public PolicyBase Policy { get; set; }
/// <summary>
/// Name of the Azure VM whose representative item needs to be protected.
/// </summary>
[Parameter(Position = 2, Mandatory = true, ValueFromPipelineByPropertyName = true,
ParameterSetName = AzureVMClassicComputeParameterSet, HelpMessage = ParamHelpMsgs.Item.AzureVMName)]
[Parameter(Mandatory = true, ValueFromPipelineByPropertyName = true,
ParameterSetName = AzureVMComputeParameterSet, HelpMessage = ParamHelpMsgs.Item.AzureVMName)]
public string Name { get; set; }
/// <summary>
/// Service name of the classic Azure VM whose representative item needs to be protected.
/// </summary>
[Parameter(Position = 3, Mandatory = true, ValueFromPipelineByPropertyName = true,
ParameterSetName = AzureVMClassicComputeParameterSet,
HelpMessage = ParamHelpMsgs.Item.AzureVMServiceName)]
public string ServiceName { get; set; }
/// <summary>
/// Resource group name of the compute Azure VM whose representative item needs to be protected.
/// </summary>
[Parameter(Position = 3, Mandatory = true, ValueFromPipelineByPropertyName = true,
ParameterSetName = AzureVMComputeParameterSet,
HelpMessage = ParamHelpMsgs.Item.AzureVMResourceGroupName)]
[ResourceGroupCompleter]
public string ResourceGroupName { get; set; }
/// <summary>
/// Item whose protection needs to be modified.
/// </summary>
[Parameter(Position = 4, Mandatory = true, ParameterSetName = ModifyProtectionParameterSet,
HelpMessage = ParamHelpMsgs.Item.ProtectedItem, ValueFromPipeline = true)]
[ValidateNotNullOrEmpty]
public ItemBase Item { get; set; }
public override void ExecuteCmdlet()
{
ExecutionBlock(() =>
{
base.ExecuteCmdlet();
string shouldProcessName = Name;
if (ParameterSetName == ModifyProtectionParameterSet)
{
shouldProcessName = Item.Name;
}
if (ShouldProcess(shouldProcessName, VerbsLifecycle.Enable))
{
PsBackupProviderManager providerManager =
new PsBackupProviderManager(new Dictionary<Enum, object>()
{
{ItemParams.AzureVMName, Name},
{ItemParams.AzureVMCloudServiceName, ServiceName},
{ItemParams.AzureVMResourceGroupName, ResourceGroupName},
{ItemParams.Policy, Policy},
{ItemParams.Item, Item},
{ItemParams.ParameterSetName, this.ParameterSetName},
}, ServiceClientAdapter);
IPsBackupProvider psBackupProvider = (Item != null) ?
providerManager.GetProviderInstance(Item.WorkloadType, Item.BackupManagementType)
: providerManager.GetProviderInstance(Policy.WorkloadType);
var itemResponse = psBackupProvider.EnableProtection();
// Track Response and display job details
HandleCreatedJob(itemResponse, Resources.EnableProtectionOperation);
}
});
}
}
}
| {
"content_hash": "f73d5932781c37f777c5aa3758010e6e",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 112,
"avg_line_length": 49.10344827586207,
"alnum_prop": 0.6457162921348315,
"repo_name": "naveedaz/azure-powershell",
"id": "efbe13f80cbab1853af1c98b1118b4ced85fbe30",
"size": "5698",
"binary": false,
"copies": "4",
"ref": "refs/heads/preview",
"path": "src/ResourceManager/RecoveryServices.Backup/Commands.RecoveryServices.Backup/Cmdlets/Item/EnableAzureRmRecoveryServicesBackupProtection.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "18388"
},
{
"name": "C#",
"bytes": "60116091"
},
{
"name": "HTML",
"bytes": "209"
},
{
"name": "JavaScript",
"bytes": "4979"
},
{
"name": "PHP",
"bytes": "41"
},
{
"name": "PowerShell",
"bytes": "7094905"
},
{
"name": "Ruby",
"bytes": "398"
},
{
"name": "Shell",
"bytes": "50"
},
{
"name": "Smalltalk",
"bytes": "2510"
},
{
"name": "XSLT",
"bytes": "6114"
}
],
"symlink_target": ""
} |
<section data-ng-controller="ApisController">
<div class="page-header">
<h1>New Api</h1>
</div>
<div class="col-md-12">
<form class="form-horizontal" data-ng-submit="create()" novalidate>
<fieldset>
<div class="form-group">
<label class="control-label" for="name">Name</label>
<div class="controls">
<input type="text" data-ng-model="name" id="name" class="form-control" placeholder="Name" required>
</div>
</div>
<div class="form-group">
<input type="submit" class="btn btn-default">
</div>
<div data-ng-show="error" class="text-danger">
<strong data-ng-bind="error"></strong>
</div>
</fieldset>
</form>
</div>
</section> | {
"content_hash": "eff5bdec6ab650f74979092cc241c883",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 123,
"avg_line_length": 37.69565217391305,
"alnum_prop": 0.49596309111880044,
"repo_name": "PongPi/admissions",
"id": "2e14b2c38f213f95d8059ba6b3f2649b12a6ae55",
"size": "867",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "public/modules/apis/views/create-api.client.view.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "124337"
},
{
"name": "HTML",
"bytes": "99365"
},
{
"name": "JavaScript",
"bytes": "353628"
},
{
"name": "Shell",
"bytes": "414"
}
],
"symlink_target": ""
} |
import json
class ApiBase:
def __init__(self, name):
self.name = name
self.is_auth = False
self.username = ""
def icon_url(self):
raise NotImplementedError
def oauth_link(self):
raise NotImplementedError
def oauth_callback(self, params):
raise NotImplementedError
def update(self):
raise NotImplementedError
def logout(self):
raise NotImplementedError
def pack(self):
return self.__dict__
def unpack(self, data):
if data:
self.__dict__.update(data)
return self
def __str__(self):
return "ApiBase " + str(self.__dict__)
| {
"content_hash": "553a6974660828d35cfc9f777c1d0717",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 47,
"avg_line_length": 19.823529411764707,
"alnum_prop": 0.5756676557863502,
"repo_name": "blukat29/notifyhere",
"id": "5bccd5b02f4ae6371033b7265a5140983637688c",
"size": "674",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "notifyhere/dash/api/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "473"
},
{
"name": "JavaScript",
"bytes": "218"
},
{
"name": "Python",
"bytes": "22876"
}
],
"symlink_target": ""
} |
import Lumines from './src/Lumines.js';
let config = {
baseGravity: 120
};
const lumines = new Lumines(document.getElementById('lumines'), config);
lumines.start();
document.getElementById('stop').onclick = () => {
lumines.stop();
};
document.getElementById('start').onclick = () => {
lumines.start();
};
| {
"content_hash": "0e6ef22d9dc635b96b0f4d96bb83a9b8",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 72,
"avg_line_length": 22.785714285714285,
"alnum_prop": 0.6614420062695925,
"repo_name": "tobice/flux-lumines",
"id": "d655916d87599d22cf0619f3503850171a729bbc",
"size": "319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2483"
},
{
"name": "HTML",
"bytes": "590"
},
{
"name": "JavaScript",
"bytes": "58995"
}
],
"symlink_target": ""
} |
#include <aws/servicecatalog/model/AccessLevelFilterKey.h>
#include <aws/core/utils/HashingUtils.h>
#include <aws/core/Globals.h>
#include <aws/core/utils/EnumParseOverflowContainer.h>
using namespace Aws::Utils;
namespace Aws
{
namespace ServiceCatalog
{
namespace Model
{
namespace AccessLevelFilterKeyMapper
{
static const int Account_HASH = HashingUtils::HashString("Account");
static const int Role_HASH = HashingUtils::HashString("Role");
static const int User_HASH = HashingUtils::HashString("User");
AccessLevelFilterKey GetAccessLevelFilterKeyForName(const Aws::String& name)
{
int hashCode = HashingUtils::HashString(name.c_str());
if (hashCode == Account_HASH)
{
return AccessLevelFilterKey::Account;
}
else if (hashCode == Role_HASH)
{
return AccessLevelFilterKey::Role;
}
else if (hashCode == User_HASH)
{
return AccessLevelFilterKey::User;
}
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
overflowContainer->StoreOverflow(hashCode, name);
return static_cast<AccessLevelFilterKey>(hashCode);
}
return AccessLevelFilterKey::NOT_SET;
}
Aws::String GetNameForAccessLevelFilterKey(AccessLevelFilterKey enumValue)
{
switch(enumValue)
{
case AccessLevelFilterKey::Account:
return "Account";
case AccessLevelFilterKey::Role:
return "Role";
case AccessLevelFilterKey::User:
return "User";
default:
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue));
}
return "";
}
}
} // namespace AccessLevelFilterKeyMapper
} // namespace Model
} // namespace ServiceCatalog
} // namespace Aws
| {
"content_hash": "fd54595ef951b80dc96ce5f179c7c2d2",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 92,
"avg_line_length": 29.835616438356166,
"alnum_prop": 0.610651974288338,
"repo_name": "ambasta/aws-sdk-cpp",
"id": "a310f24aaed7f28e6a75c350b20b2b5644b7ba78",
"size": "2751",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-cpp-sdk-servicecatalog/source/model/AccessLevelFilterKey.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2305"
},
{
"name": "C++",
"bytes": "74273816"
},
{
"name": "CMake",
"bytes": "412257"
},
{
"name": "Java",
"bytes": "229873"
},
{
"name": "Python",
"bytes": "62933"
}
],
"symlink_target": ""
} |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Composition;
using Microsoft.CodeAnalysis.Formatting.Rules;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Text;
namespace Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces
{
[ExportWorkspaceServiceFactory(typeof(IHostDependentFormattingRuleFactoryService), WorkspaceKind.Test), Shared]
internal sealed class TestFormattingRuleFactoryServiceFactory : IWorkspaceServiceFactory
{
[ImportingConstructor]
[Obsolete(MefConstruction.ImportingConstructorMessage, error: true)]
public TestFormattingRuleFactoryServiceFactory()
{
}
public IWorkspaceService CreateService(HostWorkspaceServices workspaceServices)
{
// return new factory per workspace
return new Factory();
}
public sealed class Factory : IHostDependentFormattingRuleFactoryService
{
public int BaseIndentation = 0;
public TextSpan TextSpan = default;
public bool UseBaseIndentation = false;
public bool ShouldUseBaseIndentation(Document document)
=> UseBaseIndentation;
public AbstractFormattingRule CreateRule(Document document, int position)
{
if (BaseIndentation == 0)
{
return NoOpFormattingRule.Instance;
}
var root = document.GetSyntaxRootAsync().Result;
return new BaseIndentationFormattingRule(root, TextSpan, BaseIndentation + 4);
}
public IEnumerable<TextChange> FilterFormattedChanges(Document document, TextSpan span, IList<TextChange> changes)
=> changes;
public bool ShouldNotFormatOrCommitOnPaste(Document document)
=> UseBaseIndentation;
}
}
}
| {
"content_hash": "4317abf584342aa40e50ea3661064998",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 126,
"avg_line_length": 37.40350877192982,
"alnum_prop": 0.6810506566604128,
"repo_name": "davkean/roslyn",
"id": "9d2dc49079c73d2df84b91d038d1d7205a1fe9aa",
"size": "2134",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/EditorFeatures/TestUtilities/Workspaces/TestFormattingRuleFactoryServiceFactory.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "1C Enterprise",
"bytes": "289100"
},
{
"name": "Batchfile",
"bytes": "9059"
},
{
"name": "C#",
"bytes": "134997671"
},
{
"name": "C++",
"bytes": "5602"
},
{
"name": "CMake",
"bytes": "9153"
},
{
"name": "Dockerfile",
"bytes": "2450"
},
{
"name": "F#",
"bytes": "549"
},
{
"name": "PowerShell",
"bytes": "233223"
},
{
"name": "Shell",
"bytes": "92956"
},
{
"name": "Visual Basic .NET",
"bytes": "71479723"
}
],
"symlink_target": ""
} |
import GridSquare
import random
from PIL import Image, ImageDraw
class DungeonRoom(object):
"""
This class is an object which describes a room in the Dungeon. It includes a
list of GridSquares, creatures, and the top-left position
"""
def __randint(self, a, b, seed):
"""
This method is intended only for internal use. It takes three
arguments:
a - The smallest number returnable
b - The largest number returnable
seed - The random seed to be used
"""
random.seed(seed)
r = random.random()
random.seed()
return a+int(r*(b-a))
def __init_grid(self):
max_row = self.width+1
max_col = self.height+1
for row in xrange(0, max_row+1):
for col in xrange(0, max_col+1):
if row == 0 or row == max_row or\
col == 0 or col == max_col:
self.squares[(row,col)] = GridSquare.GridSquare(fill='wall')
else:
self.squares[(row,col)] = GridSquare.GridSquare()
def draw_grid(self):
im = Image.new('RGB', ((self.width+2)*20, (self.height+2)*20))
draw = ImageDraw.Draw(im)
max_row = self.width+1
max_col = self.height+1
for row in xrange(0, max_row+1):
for col in xrange(0, max_col+1):
x0 = 20*row
y0 = 20*col
x1 = x0+20
y1 = y0+20
im.paste(self.squares[(row,col)].get_fill(), \
[(x0,y0),(x1,y1)])
draw.rectangle([(x0,y0),(x1,y1)], outline='teal')
return im
def __init__(self, seed, order, max_side, pos):
rseed = str(seed)+str(order)
wseed = str(rseed)+"w"
hseed = str(rseed)+"h"
self.width = self.__randint(1, max_side, wseed)
self.height = self.__randint(1, max_side, hseed)
self.order = order
self.squares = {}
self.__init_grid()
| {
"content_hash": "59426220b1281ee4791541a720494897",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 80,
"avg_line_length": 28.914285714285715,
"alnum_prop": 0.5148221343873518,
"repo_name": "zmcneilly/gm_tools",
"id": "71274c616499dee4e60d8749d1363df69b97793c",
"size": "2024",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dev_testing/DungeonRoom.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5360"
}
],
"symlink_target": ""
} |
using System.Collections.Concurrent;
namespace Nimbus.Extensions
{
internal static class ConcurrentBagExtensions
{
internal static void Clear<T>(this ConcurrentBag<T> bag)
{
T dummy;
while (bag.TryTake(out dummy))
{
}
}
}
} | {
"content_hash": "85f03ff3924eb03671061904a1a2e578",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 64,
"avg_line_length": 20.6,
"alnum_prop": 0.5598705501618123,
"repo_name": "NimbusAPI/Nimbus",
"id": "5e09ae61edda3dc36829ad25bc1c7eb3af28b1af",
"size": "311",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "src/Nimbus/Extensions/ConcurrentBagExtensions.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "891511"
},
{
"name": "Dockerfile",
"bytes": "868"
},
{
"name": "PowerShell",
"bytes": "6089"
},
{
"name": "Shell",
"bytes": "2082"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:media="http://search.yahoo.com/mrss/"
xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<title>XML Sitemaps RSS Feed</title>
<link>http://www.example.com/</link>
<description>Example.com - Feed</description>
<item>
<title>First Post</title>
</item>
</channel>
</rss>
| {
"content_hash": "3b2dff4b64e5873de1c2b7437c9cef14",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 49,
"avg_line_length": 27.933333333333334,
"alnum_prop": 0.6205250596658711,
"repo_name": "dhl/maxml",
"id": "1f185901ff974fcc262f59a7a37d93fae32a310c",
"size": "419",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/fixtures/sample.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "9832"
}
],
"symlink_target": ""
} |
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:wicket="http://wicket.apache.org">
<wicket:panel>
<div wicket:id="filters">[filters]</div>
</wicket:panel>
</html>
| {
"content_hash": "8335a9cd90f6c78e731159fcbcc7a4c1",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 83,
"avg_line_length": 40.69565217391305,
"alnum_prop": 0.7702991452991453,
"repo_name": "ilgrosso/syncope",
"id": "24c716b8bcd833a06f4b802f87b73fb8826b3565",
"size": "936",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "client/am/console/src/main/resources/org/apache/syncope/client/console/panels/SRARouteWizardBuilder$Filters.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1932"
},
{
"name": "Batchfile",
"bytes": "1044"
},
{
"name": "CSS",
"bytes": "44883"
},
{
"name": "Dockerfile",
"bytes": "8716"
},
{
"name": "Groovy",
"bytes": "78474"
},
{
"name": "HTML",
"bytes": "549487"
},
{
"name": "Java",
"bytes": "13523162"
},
{
"name": "JavaScript",
"bytes": "36620"
},
{
"name": "PLpgSQL",
"bytes": "20311"
},
{
"name": "SCSS",
"bytes": "65724"
},
{
"name": "Shell",
"bytes": "6231"
},
{
"name": "TSQL",
"bytes": "11632"
},
{
"name": "XSLT",
"bytes": "5158"
}
],
"symlink_target": ""
} |
@interface RXCurrentQueueViewController : UIViewController
@end
| {
"content_hash": "041b222bfde78379b1a0e9062cce84a6",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 58,
"avg_line_length": 21.666666666666668,
"alnum_prop": 0.8615384615384616,
"repo_name": "xzjxylophone/RXVerifyExample",
"id": "75810dd5db543bccd9823aba5b2367f5e599fef6",
"size": "249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RXVerifyExample/RXVerifyExample/VC/GCD/RXCurrentQueueViewController.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3331"
},
{
"name": "C++",
"bytes": "1578988"
},
{
"name": "JavaScript",
"bytes": "145"
},
{
"name": "LLVM",
"bytes": "45738"
},
{
"name": "Objective-C",
"bytes": "1454563"
},
{
"name": "Objective-C++",
"bytes": "9289"
},
{
"name": "Ruby",
"bytes": "1589"
}
],
"symlink_target": ""
} |
module TransitlandClient
# Inherit from StandardError to provide error classes specific
# to different parts of the TransitlandClient. An ApiException
# will be raised when there is an error during an HTTP API call.
class ApiException < StandardError
end
# Inherit from StandardError to provide error classes specific
# to different parts of the TransitlandClient. A DatabaseException
# will be raised when there is an error during reading or writing
# from the local database, which is used to cache data from API calls.
class DatabaseException < StandardError
end
# Inherit from StandardError to provide error classes specific
# to different parts of the TransitlandClient. An EntityException
# will be raised when there is an error during the creation of
# an object which inherits from Entity.
class EntityException < StandardError
end
end
| {
"content_hash": "6f19c6ab31d1de159f8f0563092a3f44",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 72,
"avg_line_length": 40.04545454545455,
"alnum_prop": 0.7832009080590239,
"repo_name": "transitland/transitland-ruby-client",
"id": "0e7ee032fa52b5787cf986f863b6669be3ca2875",
"size": "881",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/transitland_client/errors.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "25543"
}
],
"symlink_target": ""
} |
__author__ = 'SkyLapse'
from model.basemodel import BaseModel
class ApiKeyModel(BaseModel):
def get_collection(self):
return self.db['apikeys']
def _populate(self, item):
return {
"key": item["key"],
"machineName": item["machineName"],
"user": item["user"]
} | {
"content_hash": "f5e6cadef03093d01a34b68962cae9d9",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 47,
"avg_line_length": 22,
"alnum_prop": 0.5575757575757576,
"repo_name": "SkyLapse/DMS",
"id": "62aeb55489186b9dbe3771f17f21145868942a97",
"size": "330",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Server/model/apikeymodel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "182986"
},
{
"name": "CSS",
"bytes": "36639"
},
{
"name": "JavaScript",
"bytes": "86382"
},
{
"name": "Python",
"bytes": "13733"
}
],
"symlink_target": ""
} |
// Copyright 2018 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/code_cache/generated_code_cache.h"
#include <iostream>
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/feature_list.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/string_number_conversions.h"
#include "base/time/time.h"
#include "components/services/storage/public/cpp/big_io_buffer.h"
#include "content/public/common/content_features.h"
#include "content/public/common/url_constants.h"
#include "crypto/sha2.h"
#include "net/base/completion_once_callback.h"
#include "net/base/features.h"
#include "net/base/network_isolation_key.h"
#include "net/base/url_util.h"
#include "url/gurl.h"
using storage::BigIOBuffer;
namespace content {
namespace {
constexpr char kPrefix[] = "_key";
constexpr char kSeparator[] = " \n";
// We always expect to receive valid URLs that can be used as keys to the code
// cache. The relevant checks (for ex: resource_url is valid, origin_lock is
// not opque etc.,) must be done prior to requesting the code cache.
//
// This function doesn't enforce anything in the production code. It is here
// to make the assumptions explicit and to catch any errors when DCHECKs are
// enabled.
void CheckValidKeys(const GURL& resource_url,
const GURL& origin_lock,
GeneratedCodeCache::CodeCacheType cache_type) {
// If the resource url is invalid don't cache the code.
DCHECK(resource_url.is_valid());
bool resource_url_is_chrome_or_chrome_untrusted =
resource_url.SchemeIs(content::kChromeUIScheme) ||
resource_url.SchemeIs(content::kChromeUIUntrustedScheme);
DCHECK(resource_url.SchemeIsHTTPOrHTTPS() ||
resource_url_is_chrome_or_chrome_untrusted);
// |origin_lock| should be either empty or should have
// Http/Https/chrome/chrome-untrusted schemes and it should not be a URL with
// opaque origin. Empty origin_locks are allowed when the renderer is not
// locked to an origin.
bool origin_lock_is_chrome_or_chrome_untrusted =
origin_lock.SchemeIs(content::kChromeUIScheme) ||
origin_lock.SchemeIs(content::kChromeUIUntrustedScheme);
DCHECK(origin_lock.is_empty() ||
((origin_lock.SchemeIsHTTPOrHTTPS() ||
origin_lock_is_chrome_or_chrome_untrusted) &&
!url::Origin::Create(origin_lock).opaque()));
// The chrome and chrome-untrusted schemes are only used with the WebUI
// code cache type.
DCHECK_EQ(origin_lock_is_chrome_or_chrome_untrusted,
cache_type == GeneratedCodeCache::kWebUIJavaScript);
DCHECK_EQ(resource_url_is_chrome_or_chrome_untrusted,
cache_type == GeneratedCodeCache::kWebUIJavaScript);
}
// Generates the cache key for the given |resource_url|, |origin_lock| and
// |nik|.
// |resource_url| is the url corresponding to the requested resource.
// |origin_lock| is the origin that the renderer which requested this
// resource is locked to.
// |nik| is the network isolation key that consists of top-level-site that
// initiated the request.
// For example, if SitePerProcess is enabled and http://script.com/script1.js is
// requested by http://example.com, then http://script.com/script.js is the
// resource_url and http://example.com is the origin_lock.
//
// This returns the key by concatenating the serialized url, origin lock and nik
// with a separator in between. |origin_lock| could be empty when renderer is
// not locked to an origin (ex: SitePerProcess is disabled) and it is safe to
// use only |resource_url| as the key in such cases.
// TODO(wjmaclean): Either convert this to use a SiteInfo object, or convert it
// to something not based on URLs.
std::string GetCacheKey(const GURL& resource_url,
const GURL& origin_lock,
const net::NetworkIsolationKey& nik,
GeneratedCodeCache::CodeCacheType cache_type) {
CheckValidKeys(resource_url, origin_lock, cache_type);
// Add a prefix _ so it can't be parsed as a valid URL.
std::string key(kPrefix);
// Remove reference, username and password sections of the URL.
key.append(net::SimplifyUrlForRequest(resource_url).spec());
// Add a separator between URL and origin to avoid any possibility of
// attacks by crafting the URL. URLs do not contain any control ASCII
// characters, and also space is encoded. So use ' \n' as a seperator.
key.append(kSeparator);
if (origin_lock.is_valid())
key.append(net::SimplifyUrlForRequest(origin_lock).spec());
if (base::FeatureList::IsEnabled(
net::features::kSplitCacheByNetworkIsolationKey)) {
// TODO(https://crbug.com/1346188): Transient NIKs return nullopt when
// their ToCacheKeyString() method is invoked, as they generally shouldn't
// be written to disk. This code is currently reached for transient NIKs,
// which needs to be fixed.
if (!nik.IsTransient()) {
key.append(kSeparator);
key.append(*nik.ToCacheKeyString());
}
}
return key;
}
constexpr size_t kResponseTimeSizeInBytes = sizeof(int64_t);
constexpr size_t kDataSizeInBytes = sizeof(uint32_t);
constexpr size_t kHeaderSizeInBytes =
kResponseTimeSizeInBytes + kDataSizeInBytes;
// The SHA-256 checksum is used as the key for the de-duplicated code data. We
// must convert the checksum to a string key in a way that is guaranteed not to
// match a key generated by |GetCacheKey|. A simple way to do this is to convert
// it to a hex number string, which is twice as long as the checksum.
constexpr size_t kSHAKeySizeInBytes = 2 * crypto::kSHA256Length;
// This is the threshold for storing the header and cached code in stream 0,
// which is read into memory on opening an entry. JavaScript code caching stores
// time stamps with no data, or timestamps with just a tag, and we observe many
// 8 and 16 byte reads and writes. Make the threshold larger to speed up small
// code entries too.
constexpr size_t kInlineDataLimit = 4096;
// This is the maximum size for code that will be stored under the key generated
// by |GetCacheKey|. Each origin will get its own copy of the generated code for
// a given resource. Code that is larger than this limit will be stored under a
// key derived from the code checksum, and each origin using a given resource
// gets its own small entry under the key generated by |GetCacheKey| that holds
// the hash, enabling a two stage lookup. This limit was determined empirically
// by a Finch experiment.
constexpr size_t kDedicatedDataLimit = 16384;
void WriteCommonDataHeader(scoped_refptr<net::IOBufferWithSize> buffer,
const base::Time& response_time,
uint32_t data_size) {
DCHECK_LE(static_cast<int>(kHeaderSizeInBytes), buffer->size());
int64_t serialized_time =
response_time.ToDeltaSinceWindowsEpoch().InMicroseconds();
memcpy(buffer->data(), &serialized_time, kResponseTimeSizeInBytes);
// Copy size to small data buffer.
memcpy(buffer->data() + kResponseTimeSizeInBytes, &data_size,
kDataSizeInBytes);
}
void ReadCommonDataHeader(scoped_refptr<net::IOBufferWithSize> buffer,
base::Time* response_time,
uint32_t* data_size) {
DCHECK_LE(static_cast<int>(kHeaderSizeInBytes), buffer->size());
int64_t raw_response_time;
memcpy(&raw_response_time, buffer->data(), kResponseTimeSizeInBytes);
*response_time = base::Time::FromDeltaSinceWindowsEpoch(
base::Microseconds(raw_response_time));
memcpy(data_size, buffer->data() + kResponseTimeSizeInBytes,
kDataSizeInBytes);
}
static_assert(mojo_base::BigBuffer::kMaxInlineBytes <=
std::numeric_limits<int>::max(),
"Buffer size calculations may overflow int");
net::CacheType CodeCacheTypeToNetCacheType(
GeneratedCodeCache::CodeCacheType type) {
switch (type) {
case GeneratedCodeCache::CodeCacheType::kJavaScript:
return net::GENERATED_BYTE_CODE_CACHE;
case GeneratedCodeCache::CodeCacheType::kWebAssembly:
return net::GENERATED_NATIVE_CODE_CACHE;
case GeneratedCodeCache::CodeCacheType::kWebUIJavaScript:
return net::GENERATED_WEBUI_BYTE_CODE_CACHE;
}
NOTREACHED();
}
} // namespace
bool GeneratedCodeCache::IsValidHeader(
scoped_refptr<net::IOBufferWithSize> small_buffer) const {
size_t buffer_size = small_buffer->size();
if (buffer_size < kHeaderSizeInBytes)
return false;
uint32_t data_size;
memcpy(&data_size, small_buffer->data() + kResponseTimeSizeInBytes,
kDataSizeInBytes);
if (data_size <= kInlineDataLimit)
return buffer_size == kHeaderSizeInBytes + data_size;
if (!ShouldDeduplicateEntry(data_size))
return buffer_size == kHeaderSizeInBytes;
return buffer_size == kHeaderSizeInBytes + kSHAKeySizeInBytes;
}
void GeneratedCodeCache::ReportPeriodicalHistograms() {
DCHECK_EQ(cache_type_, CodeCacheType::kJavaScript);
base::UmaHistogramCustomCounts(
"SiteIsolatedCodeCache.JS.PotentialMemoryBackedCodeCacheSize2",
lru_cache_.GetSize(),
/*min=*/0,
/*exclusive_max=*/kLruCacheCapacity,
/*buckets=*/50);
}
std::string GeneratedCodeCache::GetResourceURLFromKey(const std::string& key) {
constexpr size_t kPrefixStringLen = std::size(kPrefix) - 1;
// |key| may not have a prefix and separator (e.g. for deduplicated entries).
// In that case, return an empty string.
const size_t separator_index = key.find(kSeparator);
if (key.length() < kPrefixStringLen || separator_index == std::string::npos) {
return std::string();
}
std::string resource_url =
key.substr(kPrefixStringLen, separator_index - kPrefixStringLen);
return resource_url;
}
void GeneratedCodeCache::CollectStatistics(
GeneratedCodeCache::CacheEntryStatus status) {
switch (cache_type_) {
case GeneratedCodeCache::CodeCacheType::kJavaScript:
case GeneratedCodeCache::CodeCacheType::kWebUIJavaScript:
UMA_HISTOGRAM_ENUMERATION("SiteIsolatedCodeCache.JS.Behaviour", status);
break;
case GeneratedCodeCache::CodeCacheType::kWebAssembly:
UMA_HISTOGRAM_ENUMERATION("SiteIsolatedCodeCache.WASM.Behaviour", status);
break;
}
}
// Stores the information about a pending request while disk backend is
// being initialized or another request for the same key is live.
class GeneratedCodeCache::PendingOperation {
public:
PendingOperation(Operation op,
const std::string& key,
scoped_refptr<net::IOBufferWithSize> small_buffer,
scoped_refptr<BigIOBuffer> large_buffer)
: op_(op),
key_(key),
small_buffer_(small_buffer),
large_buffer_(large_buffer) {
DCHECK(Operation::kWrite == op_ || Operation::kWriteWithSHAKey == op_);
}
PendingOperation(Operation op,
const std::string& key,
ReadDataCallback read_callback)
: op_(op), key_(key), read_callback_(std::move(read_callback)) {
DCHECK_EQ(Operation::kFetch, op_);
}
PendingOperation(Operation op,
const std::string& key,
const base::Time& response_time,
const base::TimeTicks start_time,
scoped_refptr<net::IOBufferWithSize> small_buffer,
scoped_refptr<BigIOBuffer> large_buffer,
ReadDataCallback read_callback)
: op_(op),
key_(key),
response_time_(response_time),
start_time_(start_time),
small_buffer_(small_buffer),
large_buffer_(large_buffer),
read_callback_(std::move(read_callback)) {
DCHECK_EQ(Operation::kFetchWithSHAKey, op_);
}
PendingOperation(Operation op, const std::string& key) : op_(op), key_(key) {
DCHECK_EQ(Operation::kDelete, op_);
}
PendingOperation(Operation op, GetBackendCallback backend_callback)
: op_(op), backend_callback_(std::move(backend_callback)) {
DCHECK_EQ(Operation::kGetBackend, op_);
}
~PendingOperation();
Operation operation() const { return op_; }
const std::string& key() const { return key_; }
scoped_refptr<net::IOBufferWithSize> small_buffer() { return small_buffer_; }
scoped_refptr<BigIOBuffer> large_buffer() { return large_buffer_; }
ReadDataCallback TakeReadCallback() { return std::move(read_callback_); }
void RunReadCallback(GeneratedCodeCache* code_cache,
base::Time response_time,
mojo_base::BigBuffer data) {
if (code_cache->cache_type_ == CodeCacheType::kJavaScript) {
const bool code_cache_hit = data.size() > 0;
const bool in_memory_code_cache_hit = code_cache->lru_cache_.Has(key_);
if (code_cache_hit && !in_memory_code_cache_hit) {
code_cache->lru_cache_.Put(key_, response_time, base::make_span(data));
}
if (!base::FeatureList::IsEnabled(features::kInMemoryCodeCache)) {
if (code_cache_hit && in_memory_code_cache_hit) {
base::UmaHistogramTimes(
"SiteIsolatedCodeCache.JS.MemoryBackedCodeCachePotentialImpact",
base::TimeTicks::Now() - start_time_);
}
base::UmaHistogramBoolean("SiteIsolatedCodeCache.JS.Hit",
code_cache_hit);
base::UmaHistogramBoolean(
"SiteIsolatedCodeCache.JS.PotentialMemoryBackedCodeCacheHit",
in_memory_code_cache_hit);
}
}
std::move(read_callback_).Run(response_time, std::move(data));
}
GetBackendCallback TakeBackendCallback() {
return std::move(backend_callback_);
}
// These are called by Fetch operations to hold the buffers we create once the
// entry is opened.
void set_small_buffer(scoped_refptr<net::IOBufferWithSize> small_buffer) {
DCHECK_EQ(Operation::kFetch, op_);
small_buffer_ = small_buffer;
}
void set_large_buffer(scoped_refptr<BigIOBuffer> large_buffer) {
DCHECK_EQ(Operation::kFetch, op_);
large_buffer_ = large_buffer;
}
// This returns the site-specific response time for merged code entries.
const base::Time& response_time() const {
DCHECK_EQ(Operation::kFetchWithSHAKey, op_);
return response_time_;
}
base::TimeTicks start_time() const { return start_time_; }
// These are called by write and fetch operations to track buffer completions
// and signal when the operation has finished, and whether it was successful.
bool succeeded() const { return succeeded_; }
bool AddBufferCompletion(bool succeeded) {
DCHECK(op_ == Operation::kWrite || op_ == Operation::kWriteWithSHAKey ||
op_ == Operation::kFetch || op_ == Operation::kFetchWithSHAKey);
if (!succeeded)
succeeded_ = false;
DCHECK_GT(2, completions_);
completions_++;
return completions_ == 2;
}
private:
const Operation op_;
const std::string key_;
const base::Time response_time_;
const base::TimeTicks start_time_ = base::TimeTicks::Now();
scoped_refptr<net::IOBufferWithSize> small_buffer_;
scoped_refptr<BigIOBuffer> large_buffer_;
ReadDataCallback read_callback_;
GetBackendCallback backend_callback_;
int completions_ = 0;
bool succeeded_ = true;
};
GeneratedCodeCache::PendingOperation::~PendingOperation() = default;
GeneratedCodeCache::GeneratedCodeCache(const base::FilePath& path,
int max_size_bytes,
CodeCacheType cache_type)
: backend_state_(kInitializing),
path_(path),
max_size_bytes_(max_size_bytes),
cache_type_(cache_type),
lru_cache_(max_size_bytes == 0
? kLruCacheCapacity
: std::min<int64_t>(kLruCacheCapacity, max_size_bytes)) {
CreateBackend();
if (cache_type == CodeCacheType::kJavaScript) {
histograms_timer_.Start(
FROM_HERE, base::Minutes(5),
base::BindRepeating(&GeneratedCodeCache::ReportPeriodicalHistograms,
base::Unretained(this)));
}
}
GeneratedCodeCache::~GeneratedCodeCache() = default;
void GeneratedCodeCache::GetBackend(GetBackendCallback callback) {
switch (backend_state_) {
case kFailed:
std::move(callback).Run(nullptr);
return;
case kInitialized:
std::move(callback).Run(backend_.get());
return;
case kInitializing:
pending_ops_.emplace(std::make_unique<PendingOperation>(
Operation::kGetBackend, std::move(callback)));
return;
}
}
void GeneratedCodeCache::WriteEntry(const GURL& url,
const GURL& origin_lock,
const net::NetworkIsolationKey& nik,
const base::Time& response_time,
mojo_base::BigBuffer data) {
if (backend_state_ == kFailed) {
// Silently fail the request.
CollectStatistics(CacheEntryStatus::kError);
return;
}
// Reject buffers that are large enough to cause overflow problems.
if (data.size() >= std::numeric_limits<int32_t>::max())
return;
const std::string key = GetCacheKey(url, origin_lock, nik, cache_type_);
if (cache_type_ == CodeCacheType::kJavaScript) {
lru_cache_.Put(key, response_time, base::make_span(data));
}
scoped_refptr<net::IOBufferWithSize> small_buffer;
scoped_refptr<BigIOBuffer> large_buffer;
const uint32_t data_size = static_cast<uint32_t>(data.size());
// We have three different cache entry layouts, depending on data size.
if (data_size <= kInlineDataLimit) {
// 1. Inline
// [stream0] response time, size, data
// [stream1] <empty>
small_buffer = base::MakeRefCounted<net::IOBufferWithSize>(
kHeaderSizeInBytes + data.size());
// Copy |data| into the small buffer.
memcpy(small_buffer->data() + kHeaderSizeInBytes, data.data(), data.size());
// Write 0 bytes and truncate stream 1 to clear any stale data.
large_buffer = base::MakeRefCounted<BigIOBuffer>(mojo_base::BigBuffer());
} else if (!ShouldDeduplicateEntry(data_size)) {
// 2. Dedicated
// [stream0] response time, size
// [stream1] data
small_buffer =
base::MakeRefCounted<net::IOBufferWithSize>(kHeaderSizeInBytes);
large_buffer = base::MakeRefCounted<BigIOBuffer>(std::move(data));
} else {
// 3. Indirect
// [stream0] response time, size, checksum
// [stream1] <empty>
// [stream0 (checksum key entry)] <empty>
// [stream1 (checksum key entry)] data
// Make a copy of the data before hashing. A compromised renderer could
// change shared memory before we can compute the hash and write the data.
// TODO(1135729) Eliminate this copy when the shared memory can't be written
// by the sender.
mojo_base::BigBuffer copy({data.data(), data.size()});
if (copy.size() != data.size())
return;
data = mojo_base::BigBuffer(); // Release the old buffer.
uint8_t result[crypto::kSHA256Length];
crypto::SHA256HashString(
base::StringPiece(reinterpret_cast<char*>(copy.data()), copy.size()),
result, std::size(result));
std::string checksum_key = base::HexEncode(result, std::size(result));
small_buffer = base::MakeRefCounted<net::IOBufferWithSize>(
kHeaderSizeInBytes + kSHAKeySizeInBytes);
// Copy |checksum_key| into the small buffer.
DCHECK_EQ(kSHAKeySizeInBytes, checksum_key.length());
memcpy(small_buffer->data() + kHeaderSizeInBytes, checksum_key.data(),
kSHAKeySizeInBytes);
// Write 0 bytes and truncate stream 1 to clear any stale data.
large_buffer = base::MakeRefCounted<BigIOBuffer>(mojo_base::BigBuffer());
// Issue another write operation for the code, with the checksum as the key
// and nothing in the header.
auto small_buffer2 = base::MakeRefCounted<net::IOBufferWithSize>(0);
auto large_buffer2 = base::MakeRefCounted<BigIOBuffer>(std::move(copy));
auto op2 = std::make_unique<PendingOperation>(Operation::kWriteWithSHAKey,
checksum_key, small_buffer2,
large_buffer2);
EnqueueOperation(std::move(op2));
}
WriteCommonDataHeader(small_buffer, response_time, data_size);
// Create the write operation.
auto op = std::make_unique<PendingOperation>(Operation::kWrite, key,
small_buffer, large_buffer);
EnqueueOperation(std::move(op));
}
void GeneratedCodeCache::FetchEntry(const GURL& url,
const GURL& origin_lock,
const net::NetworkIsolationKey& nik,
ReadDataCallback read_data_callback) {
if (backend_state_ == kFailed) {
CollectStatistics(CacheEntryStatus::kError);
// Fail the request.
std::move(read_data_callback).Run(base::Time(), mojo_base::BigBuffer());
return;
}
std::string key = GetCacheKey(url, origin_lock, nik, cache_type_);
auto op = std::make_unique<PendingOperation>(Operation::kFetch, key,
std::move(read_data_callback));
EnqueueOperation(std::move(op));
}
void GeneratedCodeCache::DeleteEntry(const GURL& url,
const GURL& origin_lock,
const net::NetworkIsolationKey& nik) {
if (backend_state_ == kFailed) {
// Silently fail.
CollectStatistics(CacheEntryStatus::kError);
return;
}
std::string key = GetCacheKey(url, origin_lock, nik, cache_type_);
auto op = std::make_unique<PendingOperation>(Operation::kDelete, key);
EnqueueOperation(std::move(op));
lru_cache_.Delete(key);
}
void GeneratedCodeCache::CreateBackend() {
// If the initialization of the existing cache fails, this call would delete
// all the contents and recreates a new one.
disk_cache::BackendResult result = disk_cache::CreateCacheBackend(
CodeCacheTypeToNetCacheType(cache_type_), net::CACHE_BACKEND_SIMPLE,
/*file_operations=*/nullptr, path_, max_size_bytes_,
disk_cache::ResetHandling::kResetOnError, /*net_log=*/nullptr,
base::BindOnce(&GeneratedCodeCache::DidCreateBackend,
weak_ptr_factory_.GetWeakPtr()));
if (result.net_error != net::ERR_IO_PENDING) {
DidCreateBackend(std::move(result));
}
}
void GeneratedCodeCache::DidCreateBackend(disk_cache::BackendResult result) {
if (result.net_error != net::OK) {
backend_state_ = kFailed;
} else {
backend_ = std::move(result.backend);
backend_state_ = kInitialized;
}
IssuePendingOperations();
}
void GeneratedCodeCache::EnqueueOperation(
std::unique_ptr<PendingOperation> op) {
if (backend_state_ != kInitialized) {
// Insert it into the list of pending operations while the backend is
// still being opened.
pending_ops_.emplace(std::move(op));
return;
}
EnqueueOperationAndIssueIfNext(std::move(op));
}
void GeneratedCodeCache::IssuePendingOperations() {
// Issue any operations that were received while creating the backend.
while (!pending_ops_.empty()) {
// Take ownership of the next PendingOperation here. |op| will either be
// moved onto a queue in active_entries_map_ or issued and completed in
// |DoPendingGetBackend|.
std::unique_ptr<PendingOperation> op = std::move(pending_ops_.front());
pending_ops_.pop();
// Properly enqueue/dequeue ops for Write, Fetch, and Delete.
if (op->operation() != Operation::kGetBackend) {
EnqueueOperationAndIssueIfNext(std::move(op));
} else {
// There is no queue for get backend operations. Issue them immediately.
IssueOperation(op.get());
}
}
}
void GeneratedCodeCache::IssueOperation(PendingOperation* op) {
switch (op->operation()) {
case kFetch:
case kFetchWithSHAKey:
FetchEntryImpl(op);
break;
case kWrite:
case kWriteWithSHAKey:
WriteEntryImpl(op);
break;
case kDelete:
DeleteEntryImpl(op);
break;
case kGetBackend:
DoPendingGetBackend(op);
break;
}
}
void GeneratedCodeCache::WriteEntryImpl(PendingOperation* op) {
DCHECK(Operation::kWrite == op->operation() ||
Operation::kWriteWithSHAKey == op->operation());
if (backend_state_ != kInitialized) {
// Silently fail the request.
CloseOperationAndIssueNext(op);
return;
}
disk_cache::EntryResult result = backend_->OpenOrCreateEntry(
op->key(), net::LOW,
base::BindOnce(&GeneratedCodeCache::OpenCompleteForWrite,
weak_ptr_factory_.GetWeakPtr(), op));
if (result.net_error() != net::ERR_IO_PENDING) {
OpenCompleteForWrite(op, std::move(result));
}
}
void GeneratedCodeCache::OpenCompleteForWrite(
PendingOperation* op,
disk_cache::EntryResult entry_result) {
DCHECK(Operation::kWrite == op->operation() ||
Operation::kWriteWithSHAKey == op->operation());
if (entry_result.net_error() != net::OK) {
CollectStatistics(CacheEntryStatus::kError);
CloseOperationAndIssueNext(op);
return;
}
if (entry_result.opened()) {
CollectStatistics(CacheEntryStatus::kUpdate);
} else {
CollectStatistics(CacheEntryStatus::kCreate);
}
disk_cache::ScopedEntryPtr entry(entry_result.ReleaseEntry());
// There should be a valid entry if the open was successful.
DCHECK(entry);
// For merged entries, don't write if the entry already exists.
if (op->operation() == Operation::kWriteWithSHAKey) {
int small_size = entry->GetDataSize(kSmallDataStream);
int large_size = entry->GetDataSize(kLargeDataStream);
if (small_size == 0 && large_size == op->large_buffer()->size()) {
// Skip overwriting with identical data.
CloseOperationAndIssueNext(op);
return;
}
// Otherwise, there shouldn't be any data for this entry yet.
DCHECK_EQ(0, small_size);
DCHECK_EQ(0, large_size);
}
// Write the small data first, truncating.
auto small_buffer = op->small_buffer();
int result = entry->WriteData(
kSmallDataStream, 0, small_buffer.get(), small_buffer->size(),
base::BindOnce(&GeneratedCodeCache::WriteSmallBufferComplete,
weak_ptr_factory_.GetWeakPtr(), op),
true);
if (result != net::ERR_IO_PENDING) {
WriteSmallBufferComplete(op, result);
}
// Write the large data, truncating.
auto large_buffer = op->large_buffer();
result = entry->WriteData(
kLargeDataStream, 0, large_buffer.get(), large_buffer->size(),
base::BindOnce(&GeneratedCodeCache::WriteLargeBufferComplete,
weak_ptr_factory_.GetWeakPtr(), op),
true);
if (result != net::ERR_IO_PENDING) {
WriteLargeBufferComplete(op, result);
}
}
void GeneratedCodeCache::WriteSmallBufferComplete(PendingOperation* op,
int rv) {
DCHECK(Operation::kWrite == op->operation() ||
Operation::kWriteWithSHAKey == op->operation());
if (op->AddBufferCompletion(rv == op->small_buffer()->size())) {
WriteComplete(op);
}
}
void GeneratedCodeCache::WriteLargeBufferComplete(PendingOperation* op,
int rv) {
DCHECK(Operation::kWrite == op->operation() ||
Operation::kWriteWithSHAKey == op->operation());
if (op->AddBufferCompletion(rv == op->large_buffer()->size())) {
WriteComplete(op);
}
}
void GeneratedCodeCache::WriteComplete(PendingOperation* op) {
DCHECK(Operation::kWrite == op->operation() ||
Operation::kWriteWithSHAKey == op->operation());
if (!op->succeeded()) {
// The write failed; record the failure and doom the entry here.
CollectStatistics(CacheEntryStatus::kWriteFailed);
DoomEntry(op);
}
CloseOperationAndIssueNext(op);
}
void GeneratedCodeCache::FetchEntryImpl(PendingOperation* op) {
DCHECK(Operation::kFetch == op->operation() ||
Operation::kFetchWithSHAKey == op->operation());
if (base::FeatureList::IsEnabled(features::kInMemoryCodeCache)) {
if (auto result = lru_cache_.Get(op->key())) {
op->RunReadCallback(this, result->response_time, std::move(result->data));
CloseOperationAndIssueNext(op);
return;
}
}
if (backend_state_ != kInitialized) {
op->RunReadCallback(this, base::Time(), mojo_base::BigBuffer());
CloseOperationAndIssueNext(op);
return;
}
// This is a part of loading cycle and hence should run with a high priority.
disk_cache::EntryResult result = backend_->OpenEntry(
op->key(), net::HIGHEST,
base::BindOnce(&GeneratedCodeCache::OpenCompleteForRead,
weak_ptr_factory_.GetWeakPtr(), op));
if (result.net_error() != net::ERR_IO_PENDING) {
OpenCompleteForRead(op, std::move(result));
}
}
void GeneratedCodeCache::OpenCompleteForRead(
PendingOperation* op,
disk_cache::EntryResult entry_result) {
DCHECK(Operation::kFetch == op->operation() ||
Operation::kFetchWithSHAKey == op->operation());
if (entry_result.net_error() != net::OK) {
CollectStatistics(CacheEntryStatus::kMiss);
op->RunReadCallback(this, base::Time(), mojo_base::BigBuffer());
CloseOperationAndIssueNext(op);
return;
}
disk_cache::ScopedEntryPtr entry(entry_result.ReleaseEntry());
// There should be a valid entry if the open was successful.
DCHECK(entry);
int small_size = entry->GetDataSize(kSmallDataStream);
int large_size = entry->GetDataSize(kLargeDataStream);
scoped_refptr<net::IOBufferWithSize> small_buffer;
scoped_refptr<BigIOBuffer> large_buffer;
if (op->operation() == Operation::kFetch) {
small_buffer = base::MakeRefCounted<net::IOBufferWithSize>(small_size);
op->set_small_buffer(small_buffer);
large_buffer = base::MakeRefCounted<BigIOBuffer>(large_size);
op->set_large_buffer(large_buffer);
} else {
small_buffer = op->small_buffer();
large_buffer = op->large_buffer();
DCHECK_EQ(small_size, small_buffer->size());
DCHECK_EQ(large_size, large_buffer->size());
}
// Read the small data first.
int result = entry->ReadData(
kSmallDataStream, 0, small_buffer.get(), small_buffer->size(),
base::BindOnce(&GeneratedCodeCache::ReadSmallBufferComplete,
weak_ptr_factory_.GetWeakPtr(), op));
if (result != net::ERR_IO_PENDING) {
ReadSmallBufferComplete(op, result);
}
// Skip the large read if data is in the small read.
if (large_size == 0)
return;
// Read the large data.
result = entry->ReadData(
kLargeDataStream, 0, large_buffer.get(), large_buffer->size(),
base::BindOnce(&GeneratedCodeCache::ReadLargeBufferComplete,
weak_ptr_factory_.GetWeakPtr(), op));
if (result != net::ERR_IO_PENDING) {
ReadLargeBufferComplete(op, result);
}
}
void GeneratedCodeCache::ReadSmallBufferComplete(PendingOperation* op, int rv) {
DCHECK(Operation::kFetch == op->operation() ||
Operation::kFetchWithSHAKey == op->operation());
bool no_header = op->operation() == Operation::kFetchWithSHAKey;
bool succeeded = (rv == op->small_buffer()->size() &&
(no_header || IsValidHeader(op->small_buffer())));
CollectStatistics(succeeded ? CacheEntryStatus::kHit
: CacheEntryStatus::kMiss);
if (op->AddBufferCompletion(succeeded))
ReadComplete(op);
// Small reads must finish now since no large read is pending.
if (op->large_buffer()->size() == 0)
ReadLargeBufferComplete(op, 0);
}
void GeneratedCodeCache::ReadLargeBufferComplete(PendingOperation* op, int rv) {
DCHECK(Operation::kFetch == op->operation() ||
Operation::kFetchWithSHAKey == op->operation());
if (op->AddBufferCompletion(rv == op->large_buffer()->size()))
ReadComplete(op);
}
void GeneratedCodeCache::ReadComplete(PendingOperation* op) {
DCHECK(Operation::kFetch == op->operation() ||
Operation::kFetchWithSHAKey == op->operation());
if (!op->succeeded()) {
op->RunReadCallback(this, base::Time(), mojo_base::BigBuffer());
// Doom this entry since it is inaccessible.
DoomEntry(op);
} else {
if (op->operation() != Operation::kFetchWithSHAKey) {
base::Time response_time;
uint32_t data_size = 0;
ReadCommonDataHeader(op->small_buffer(), &response_time, &data_size);
if (data_size <= kInlineDataLimit) {
// Small data. Copy the data from the small buffer.
DCHECK_EQ(0, op->large_buffer()->size());
mojo_base::BigBuffer data(data_size);
memcpy(data.data(), op->small_buffer()->data() + kHeaderSizeInBytes,
data_size);
op->RunReadCallback(this, response_time, std::move(data));
} else if (!ShouldDeduplicateEntry(data_size)) {
// Large data below the merging threshold, or deduplication is disabled.
// Return the large buffer.
op->RunReadCallback(this, response_time,
op->large_buffer()->TakeBuffer());
} else {
// Very large data. Create the second fetch using the checksum as key.
DCHECK_EQ(static_cast<int>(kHeaderSizeInBytes + kSHAKeySizeInBytes),
op->small_buffer()->size());
std::string checksum_key(
op->small_buffer()->data() + kHeaderSizeInBytes,
kSHAKeySizeInBytes);
auto small_buffer = base::MakeRefCounted<net::IOBufferWithSize>(0);
auto large_buffer = base::MakeRefCounted<BigIOBuffer>(data_size);
auto op2 = std::make_unique<PendingOperation>(
Operation::kFetchWithSHAKey, checksum_key, response_time,
op->start_time(), small_buffer, large_buffer,
op->TakeReadCallback());
EnqueueOperation(std::move(op2));
}
} else {
// Large merged code data with no header. |op| holds the response time.
op->RunReadCallback(this, op->response_time(),
op->large_buffer()->TakeBuffer());
}
}
CloseOperationAndIssueNext(op);
}
void GeneratedCodeCache::DeleteEntryImpl(PendingOperation* op) {
DCHECK_EQ(Operation::kDelete, op->operation());
DoomEntry(op);
CloseOperationAndIssueNext(op);
}
void GeneratedCodeCache::DoomEntry(PendingOperation* op) {
// Write, Fetch, and Delete may all doom an entry.
DCHECK_NE(Operation::kGetBackend, op->operation());
// Entries shouldn't be doomed if the backend hasn't been initialized.
DCHECK_EQ(kInitialized, backend_state_);
CollectStatistics(CacheEntryStatus::kClear);
backend_->DoomEntry(op->key(), net::LOWEST, net::CompletionOnceCallback());
}
void GeneratedCodeCache::IssueNextOperation(const std::string& key) {
auto it = active_entries_map_.find(key);
if (it == active_entries_map_.end())
return;
DCHECK(!it->second.empty());
IssueOperation(it->second.front().get());
}
void GeneratedCodeCache::CloseOperationAndIssueNext(PendingOperation* op) {
// Dequeue op, keeping it alive long enough to issue another op.
std::unique_ptr<PendingOperation> keep_alive = DequeueOperation(op);
IssueNextOperation(op->key());
}
void GeneratedCodeCache::EnqueueOperationAndIssueIfNext(
std::unique_ptr<PendingOperation> op) {
// GetBackend ops have no key and shouldn't be enqueued here.
DCHECK_NE(Operation::kGetBackend, op->operation());
auto it = active_entries_map_.find(op->key());
bool can_issue = false;
if (it == active_entries_map_.end()) {
it = active_entries_map_.emplace(op->key(), PendingOperationQueue()).first;
can_issue = true;
}
const std::string& key = op->key();
it->second.emplace(std::move(op));
if (can_issue)
IssueNextOperation(key);
}
std::unique_ptr<GeneratedCodeCache::PendingOperation>
GeneratedCodeCache::DequeueOperation(PendingOperation* op) {
auto it = active_entries_map_.find(op->key());
DCHECK(it != active_entries_map_.end());
DCHECK(!it->second.empty());
std::unique_ptr<PendingOperation> result = std::move(it->second.front());
// |op| should be at the front.
DCHECK_EQ(op, result.get());
it->second.pop();
// Delete the queue if it becomes empty.
if (it->second.empty()) {
active_entries_map_.erase(it);
}
return result;
}
void GeneratedCodeCache::DoPendingGetBackend(PendingOperation* op) {
// |op| is kept alive in |IssuePendingOperations| for the duration of this
// call. We shouldn't access |op| after returning from this function.
DCHECK_EQ(kGetBackend, op->operation());
if (backend_state_ == kInitialized) {
op->TakeBackendCallback().Run(backend_.get());
} else {
DCHECK_EQ(backend_state_, kFailed);
op->TakeBackendCallback().Run(nullptr);
}
}
bool GeneratedCodeCache::IsDeduplicationEnabled() const {
// Deduplication is disabled in the WebUI code cache, as an additional defense
// against privilege escalation in case there is a bug in the deduplication
// logic.
return cache_type_ != kWebUIJavaScript;
}
bool GeneratedCodeCache::ShouldDeduplicateEntry(uint32_t data_size) const {
return data_size > kDedicatedDataLimit && IsDeduplicationEnabled();
}
void GeneratedCodeCache::SetLastUsedTimeForTest(
const GURL& resource_url,
const GURL& origin_lock,
const net::NetworkIsolationKey& nik,
base::Time time,
base::OnceClosure user_callback) {
// This is used only for tests. So reasonable to assume that backend is
// initialized here. All other operations handle the case when backend was not
// yet opened.
DCHECK_EQ(backend_state_, kInitialized);
auto split = base::SplitOnceCallback(std::move(user_callback));
disk_cache::EntryResultCallback callback = base::BindOnce(
&GeneratedCodeCache::OpenCompleteForSetLastUsedForTest,
weak_ptr_factory_.GetWeakPtr(), time, std::move(split.first));
std::string key = GetCacheKey(resource_url, origin_lock, nik, cache_type_);
disk_cache::EntryResult result =
backend_->OpenEntry(key, net::LOWEST, std::move(callback));
if (result.net_error() != net::ERR_IO_PENDING) {
OpenCompleteForSetLastUsedForTest(time, std::move(split.second),
std::move(result));
}
}
void GeneratedCodeCache::ClearInMemoryCache() {
lru_cache_.Clear();
}
void GeneratedCodeCache::OpenCompleteForSetLastUsedForTest(
base::Time time,
base::OnceClosure callback,
disk_cache::EntryResult result) {
DCHECK_EQ(result.net_error(), net::OK);
{
disk_cache::ScopedEntryPtr disk_entry(result.ReleaseEntry());
DCHECK(disk_entry);
disk_entry->SetLastUsedTimeForTest(time);
}
std::move(callback).Run();
}
} // namespace content
| {
"content_hash": "cebc74a0a9bcfcf322bcd828d7f21f84",
"timestamp": "",
"source": "github",
"line_count": 993,
"max_line_length": 80,
"avg_line_length": 39.03323262839879,
"alnum_prop": 0.6726522187822498,
"repo_name": "nwjs/chromium.src",
"id": "4030ed6ecfa04008f000db59fd5f0a28a3c24015",
"size": "38760",
"binary": false,
"copies": "6",
"ref": "refs/heads/nw70",
"path": "content/browser/code_cache/generated_code_cache.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
<?php
namespace Tests\Flarum\Api\Handler;
use Flarum\Api\Handler\FloodingExceptionHandler;
use Flarum\Core\Exception\FloodingException;
use Tests\Test\TestCase;
class FloodingExceptionHandlerTest extends TestCase
{
private $handler;
public function init()
{
$this->handler = new FloodingExceptionHandler;
}
public function test_it_handles_recognisable_exceptions()
{
$this->assertFalse($this->handler->manages(new \Exception));
$this->assertTrue($this->handler->manages(new FloodingException));
}
public function test_it_provides_expected_output()
{
$result = $this->handler->handle(new FloodingException);
$this->assertEquals(429, $result->getStatus());
$this->assertEquals([[]], $result->getErrors());
}
}
| {
"content_hash": "7e8d7325b8228ab5351b40fa713d2520",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 74,
"avg_line_length": 26.733333333333334,
"alnum_prop": 0.6845386533665836,
"repo_name": "kirkbushell/core",
"id": "fae69cc0050e874f3c604ec890ad51d10a171dfa",
"size": "802",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/Flarum/Api/Handler/FloodingExceptionHandlerTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "154291"
},
{
"name": "HTML",
"bytes": "572"
},
{
"name": "JavaScript",
"bytes": "2128190"
},
{
"name": "PHP",
"bytes": "626815"
},
{
"name": "Shell",
"bytes": "280"
}
],
"symlink_target": ""
} |
namespace Amazon.Lambda.DynamoDBEvents
{
using Amazon.DynamoDBv2.Model;
using System;
using System.Collections.Generic;
/// <summary>
/// AWS DynamoDB event
/// http://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html
/// http://docs.aws.amazon.com/lambda/latest/dg/eventsources.html#eventsources-ddb-update
/// </summary>
public class DynamoDBEvent
{
/// <summary>
/// List of DynamoDB event records.
/// </summary>
public IList<DynamodbStreamRecord> Records { get; set; }
/// <summary>
/// DynamoDB stream record
/// http://docs.aws.amazon.com/dynamodbstreams/latest/APIReference/API_StreamRecord.html
/// </summary>
public class DynamodbStreamRecord : Record
{
/// <summary>
/// The event source arn of DynamoDB.
/// </summary>
public string EventSourceArn { get; set; }
}
}
}
| {
"content_hash": "26df077c33d8ba0e9534463f6bc0bc2e",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 96,
"avg_line_length": 31.125,
"alnum_prop": 0.5732931726907631,
"repo_name": "thedevopsmachine/aws-lambda-dotnet",
"id": "310f4ab613f27e43f8f6ae6ed6da01332d5bb412",
"size": "996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Libraries/src/Amazon.Lambda.DynamoDBEvents/DynamoDBEvent.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "630"
},
{
"name": "C#",
"bytes": "951951"
},
{
"name": "CSS",
"bytes": "649"
},
{
"name": "F#",
"bytes": "42403"
},
{
"name": "HTML",
"bytes": "24912"
},
{
"name": "JavaScript",
"bytes": "223344"
},
{
"name": "PowerShell",
"bytes": "878640"
},
{
"name": "Shell",
"bytes": "234"
}
],
"symlink_target": ""
} |
package com.amazonaws.services.robomaker.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/robomaker-2018-06-29/ListSimulationApplications"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListSimulationApplicationsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The version qualifier of the simulation application.
* </p>
*/
private String versionQualifier;
/**
* <p>
* The <code>nextToken</code> value returned from a previous paginated <code>ListSimulationApplications</code>
* request where <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination
* continues from the end of the previous results that returned the <code>nextToken</code> value.
* </p>
* <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and
* not for other programmatic purposes.
* </p>
* </note>
*/
private String nextToken;
/**
* <p>
* The maximum number of deployment job results returned by <code>ListSimulationApplications</code> in paginated
* output. When this parameter is used, <code>ListSimulationApplications</code> only returns <code>maxResults</code>
* results in a single page along with a <code>nextToken</code> response element. The remaining results of the
* initial request can be seen by sending another <code>ListSimulationApplications</code> request with the returned
* <code>nextToken</code> value. This value can be between 1 and 100. If this parameter is not used, then
* <code>ListSimulationApplications</code> returns up to 100 results and a <code>nextToken</code> value if
* applicable.
* </p>
*/
private Integer maxResults;
/**
* <p>
* Optional list of filters to limit results.
* </p>
* <p>
* The filter name <code>name</code> is supported. When filtering, you must use the complete value of the filtered
* item. You can use up to three filters.
* </p>
*/
private java.util.List<Filter> filters;
/**
* <p>
* The version qualifier of the simulation application.
* </p>
*
* @param versionQualifier
* The version qualifier of the simulation application.
*/
public void setVersionQualifier(String versionQualifier) {
this.versionQualifier = versionQualifier;
}
/**
* <p>
* The version qualifier of the simulation application.
* </p>
*
* @return The version qualifier of the simulation application.
*/
public String getVersionQualifier() {
return this.versionQualifier;
}
/**
* <p>
* The version qualifier of the simulation application.
* </p>
*
* @param versionQualifier
* The version qualifier of the simulation application.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListSimulationApplicationsRequest withVersionQualifier(String versionQualifier) {
setVersionQualifier(versionQualifier);
return this;
}
/**
* <p>
* The <code>nextToken</code> value returned from a previous paginated <code>ListSimulationApplications</code>
* request where <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination
* continues from the end of the previous results that returned the <code>nextToken</code> value.
* </p>
* <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and
* not for other programmatic purposes.
* </p>
* </note>
*
* @param nextToken
* The <code>nextToken</code> value returned from a previous paginated
* <code>ListSimulationApplications</code> request where <code>maxResults</code> was used and the results
* exceeded the value of that parameter. Pagination continues from the end of the previous results that
* returned the <code>nextToken</code> value. </p> <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a
* list and not for other programmatic purposes.
* </p>
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The <code>nextToken</code> value returned from a previous paginated <code>ListSimulationApplications</code>
* request where <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination
* continues from the end of the previous results that returned the <code>nextToken</code> value.
* </p>
* <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and
* not for other programmatic purposes.
* </p>
* </note>
*
* @return The <code>nextToken</code> value returned from a previous paginated
* <code>ListSimulationApplications</code> request where <code>maxResults</code> was used and the results
* exceeded the value of that parameter. Pagination continues from the end of the previous results that
* returned the <code>nextToken</code> value. </p> <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a
* list and not for other programmatic purposes.
* </p>
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The <code>nextToken</code> value returned from a previous paginated <code>ListSimulationApplications</code>
* request where <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination
* continues from the end of the previous results that returned the <code>nextToken</code> value.
* </p>
* <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and
* not for other programmatic purposes.
* </p>
* </note>
*
* @param nextToken
* The <code>nextToken</code> value returned from a previous paginated
* <code>ListSimulationApplications</code> request where <code>maxResults</code> was used and the results
* exceeded the value of that parameter. Pagination continues from the end of the previous results that
* returned the <code>nextToken</code> value. </p> <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a
* list and not for other programmatic purposes.
* </p>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListSimulationApplicationsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The maximum number of deployment job results returned by <code>ListSimulationApplications</code> in paginated
* output. When this parameter is used, <code>ListSimulationApplications</code> only returns <code>maxResults</code>
* results in a single page along with a <code>nextToken</code> response element. The remaining results of the
* initial request can be seen by sending another <code>ListSimulationApplications</code> request with the returned
* <code>nextToken</code> value. This value can be between 1 and 100. If this parameter is not used, then
* <code>ListSimulationApplications</code> returns up to 100 results and a <code>nextToken</code> value if
* applicable.
* </p>
*
* @param maxResults
* The maximum number of deployment job results returned by <code>ListSimulationApplications</code> in
* paginated output. When this parameter is used, <code>ListSimulationApplications</code> only returns
* <code>maxResults</code> results in a single page along with a <code>nextToken</code> response element. The
* remaining results of the initial request can be seen by sending another
* <code>ListSimulationApplications</code> request with the returned <code>nextToken</code> value. This value
* can be between 1 and 100. If this parameter is not used, then <code>ListSimulationApplications</code>
* returns up to 100 results and a <code>nextToken</code> value if applicable.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of deployment job results returned by <code>ListSimulationApplications</code> in paginated
* output. When this parameter is used, <code>ListSimulationApplications</code> only returns <code>maxResults</code>
* results in a single page along with a <code>nextToken</code> response element. The remaining results of the
* initial request can be seen by sending another <code>ListSimulationApplications</code> request with the returned
* <code>nextToken</code> value. This value can be between 1 and 100. If this parameter is not used, then
* <code>ListSimulationApplications</code> returns up to 100 results and a <code>nextToken</code> value if
* applicable.
* </p>
*
* @return The maximum number of deployment job results returned by <code>ListSimulationApplications</code> in
* paginated output. When this parameter is used, <code>ListSimulationApplications</code> only returns
* <code>maxResults</code> results in a single page along with a <code>nextToken</code> response element.
* The remaining results of the initial request can be seen by sending another
* <code>ListSimulationApplications</code> request with the returned <code>nextToken</code> value. This
* value can be between 1 and 100. If this parameter is not used, then
* <code>ListSimulationApplications</code> returns up to 100 results and a <code>nextToken</code> value if
* applicable.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of deployment job results returned by <code>ListSimulationApplications</code> in paginated
* output. When this parameter is used, <code>ListSimulationApplications</code> only returns <code>maxResults</code>
* results in a single page along with a <code>nextToken</code> response element. The remaining results of the
* initial request can be seen by sending another <code>ListSimulationApplications</code> request with the returned
* <code>nextToken</code> value. This value can be between 1 and 100. If this parameter is not used, then
* <code>ListSimulationApplications</code> returns up to 100 results and a <code>nextToken</code> value if
* applicable.
* </p>
*
* @param maxResults
* The maximum number of deployment job results returned by <code>ListSimulationApplications</code> in
* paginated output. When this parameter is used, <code>ListSimulationApplications</code> only returns
* <code>maxResults</code> results in a single page along with a <code>nextToken</code> response element. The
* remaining results of the initial request can be seen by sending another
* <code>ListSimulationApplications</code> request with the returned <code>nextToken</code> value. This value
* can be between 1 and 100. If this parameter is not used, then <code>ListSimulationApplications</code>
* returns up to 100 results and a <code>nextToken</code> value if applicable.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListSimulationApplicationsRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* <p>
* Optional list of filters to limit results.
* </p>
* <p>
* The filter name <code>name</code> is supported. When filtering, you must use the complete value of the filtered
* item. You can use up to three filters.
* </p>
*
* @return Optional list of filters to limit results.</p>
* <p>
* The filter name <code>name</code> is supported. When filtering, you must use the complete value of the
* filtered item. You can use up to three filters.
*/
public java.util.List<Filter> getFilters() {
return filters;
}
/**
* <p>
* Optional list of filters to limit results.
* </p>
* <p>
* The filter name <code>name</code> is supported. When filtering, you must use the complete value of the filtered
* item. You can use up to three filters.
* </p>
*
* @param filters
* Optional list of filters to limit results.</p>
* <p>
* The filter name <code>name</code> is supported. When filtering, you must use the complete value of the
* filtered item. You can use up to three filters.
*/
public void setFilters(java.util.Collection<Filter> filters) {
if (filters == null) {
this.filters = null;
return;
}
this.filters = new java.util.ArrayList<Filter>(filters);
}
/**
* <p>
* Optional list of filters to limit results.
* </p>
* <p>
* The filter name <code>name</code> is supported. When filtering, you must use the complete value of the filtered
* item. You can use up to three filters.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setFilters(java.util.Collection)} or {@link #withFilters(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param filters
* Optional list of filters to limit results.</p>
* <p>
* The filter name <code>name</code> is supported. When filtering, you must use the complete value of the
* filtered item. You can use up to three filters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListSimulationApplicationsRequest withFilters(Filter... filters) {
if (this.filters == null) {
setFilters(new java.util.ArrayList<Filter>(filters.length));
}
for (Filter ele : filters) {
this.filters.add(ele);
}
return this;
}
/**
* <p>
* Optional list of filters to limit results.
* </p>
* <p>
* The filter name <code>name</code> is supported. When filtering, you must use the complete value of the filtered
* item. You can use up to three filters.
* </p>
*
* @param filters
* Optional list of filters to limit results.</p>
* <p>
* The filter name <code>name</code> is supported. When filtering, you must use the complete value of the
* filtered item. You can use up to three filters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListSimulationApplicationsRequest withFilters(java.util.Collection<Filter> filters) {
setFilters(filters);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getVersionQualifier() != null)
sb.append("VersionQualifier: ").append(getVersionQualifier()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults()).append(",");
if (getFilters() != null)
sb.append("Filters: ").append(getFilters());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListSimulationApplicationsRequest == false)
return false;
ListSimulationApplicationsRequest other = (ListSimulationApplicationsRequest) obj;
if (other.getVersionQualifier() == null ^ this.getVersionQualifier() == null)
return false;
if (other.getVersionQualifier() != null && other.getVersionQualifier().equals(this.getVersionQualifier()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
if (other.getFilters() == null ^ this.getFilters() == null)
return false;
if (other.getFilters() != null && other.getFilters().equals(this.getFilters()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getVersionQualifier() == null) ? 0 : getVersionQualifier().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
hashCode = prime * hashCode + ((getFilters() == null) ? 0 : getFilters().hashCode());
return hashCode;
}
@Override
public ListSimulationApplicationsRequest clone() {
return (ListSimulationApplicationsRequest) super.clone();
}
}
| {
"content_hash": "fd4cfda9add9a8bea7990486c9eae356",
"timestamp": "",
"source": "github",
"line_count": 430,
"max_line_length": 129,
"avg_line_length": 43.9953488372093,
"alnum_prop": 0.6482714874722486,
"repo_name": "jentfoo/aws-sdk-java",
"id": "5c8f7362dc1882ecaf278a4254a5e5765cb149d1",
"size": "19498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-java-sdk-robomaker/src/main/java/com/amazonaws/services/robomaker/model/ListSimulationApplicationsRequest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "270"
},
{
"name": "FreeMarker",
"bytes": "173637"
},
{
"name": "Gherkin",
"bytes": "25063"
},
{
"name": "Java",
"bytes": "356214839"
},
{
"name": "Scilab",
"bytes": "3924"
},
{
"name": "Shell",
"bytes": "295"
}
],
"symlink_target": ""
} |
#ifndef TEE_OBJ_H
#define TEE_OBJ_H
#include <tee_api_types.h>
#include <kernel/tee_ta_manager.h>
#include <sys/queue.h>
#define TEE_USAGE_DEFAULT 0xffffffff
struct tee_obj {
TAILQ_ENTRY(tee_obj) link;
TEE_ObjectInfo info;
bool busy; /* true if used by an operation */
uint32_t have_attrs; /* bitfield identifying set properties */
void *attr;
struct tee_pobj *pobj; /* ptr to persistant object */
int fd;
uint32_t ds_size; /* data stream size */
uint32_t flags; /* permission flags for persistent objects */
};
void tee_obj_add(struct user_ta_ctx *utc, struct tee_obj *o);
TEE_Result tee_obj_get(struct user_ta_ctx *utc, uint32_t obj_id,
struct tee_obj **obj);
void tee_obj_close(struct user_ta_ctx *utc, struct tee_obj *o);
void tee_obj_close_all(struct user_ta_ctx *utc);
TEE_Result tee_obj_verify(struct tee_ta_session *sess, struct tee_obj *o);
struct tee_obj *tee_obj_alloc(void);
void tee_obj_free(struct tee_obj *o);
#endif
| {
"content_hash": "a2a03c3397d7fbd2968e305a3a0a0b41",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 74,
"avg_line_length": 25.42105263157895,
"alnum_prop": 0.6987577639751553,
"repo_name": "matt2048/optee_os",
"id": "2902932ce5f049884b876f8ff5e92f189b41813b",
"size": "2356",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "core/include/tee/tee_obj.h",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "263165"
},
{
"name": "Awk",
"bytes": "6672"
},
{
"name": "C",
"bytes": "6833507"
},
{
"name": "C++",
"bytes": "58069"
},
{
"name": "Groff",
"bytes": "276147"
},
{
"name": "HTML",
"bytes": "75678"
},
{
"name": "Makefile",
"bytes": "162955"
},
{
"name": "Python",
"bytes": "14335"
}
],
"symlink_target": ""
} |
Lean Tutorial
=============
Introduction
------------
Lean is an automatic and interactive theorem prover. It can be used to
create specifications, build mathematical libraries, and solve
constraints. In this tutorial, we introduce basic concepts, the logic
used in Lean, and the main commands.
Getting started
---------------
We can use Lean in interactive or batch mode.
The following example just displays the message `hello world`.
```lean
print "hello world"
```
All we have to do to run your first example is to call the `lean` executable
with the name of the text file that contains the command above.
If you saved the above command in the file `hello.lean`, then you just have
to execute
lean hello.lean
As a more complex example, the next example defines a function that doubles
the input value.
```lean
-- defines the double function
definition double (x : Nat) := x + x
```
Basics
------
We can also view Lean as a suite of tools for evaluating and processing
expressions representing terms, definitions, and theorems.
Every expression has a unique type in Lean. The command `check` returns the
type of a given expression.
```lean
check double 3
check double
```
The last command returns `Nat → Nat`. That is, the type of double is a function
from `Nat` to `Nat`, where `Nat` is the type of the natural numbers.
The command `import` loads existing libraries and extensions. The
following command imports the command `find` that searches the Lean
environment using regular expressions
```lean
import find
find "Nat" -- find all object that start with the prefix Nat
check Nat::ge -- display the signature of the Nat::ge definition
```
We say `Nat::ge` is a hierarchical name comprised of two parts: `Nat` and `ge`
The command `using` creates aliases based on a given prefix. For example, the following
command creates aliases for all objects starting with `Nat`
```lean
using Nat
check ge -- display the signature of the Nat::ge definition
```
The command `variable` assigns a type to an identifier. The following command postulates/assumes
that `n`, `m` and `o` have type `Nat`.
```lean
variable n : Nat
variable m : Nat
variable o : Nat
```
The command `variables n m o : Nat` can be used a shorthand for the three commands above.
In Lean, proofs are also expressions, and all functionality provided for manipulating
expressions is also available for manipulating proofs. For example, `refl n` is a proof
for `n = n`. In Lean, `refl` is the reflexivity theorem.
```lean
check refl n
```
The command `axiom` postulates that a given proposition (aka Boolean formula) is true.
The following commands postulate two axioms `Ax1` and `Ax2` that state that `n = m` and
`m = o`.
```lean
axiom Ax1 : n = m
axiom Ax2 : m = o
```
`Ax1` and `Ax2` are not just names. For example, `trans Ax1 Ax2` is a proof that
`n = o`, where `trans` is the transitivity theorem.
```lean
check trans Ax1 Ax2
```
The expression `trans Ax1 Ax2` is just a function application like any other.
Moreover, in Lean, _propositions are types_. Any Boolean expression `P` can be used
as a type. The elements of type `P` can be viewed as the proofs of `P`.
Moreover, in Lean, _proof checking is type checking_. For example, the Lean type checker
will reject the type incorrect term `trans Ax2 Ax1`.
Because we use _proposition as types_, we must support _empty types_. For example,
the type `false` must be empty, since we don't have a proof for `false`.
Most systems based on the _propositions as types_ paradigm are based on constructive logic.
Lean on the other hand is based on classical logic. The _excluded middle_ is a theorem
in Lean, and `em p` is a proof for `p ∨ ¬ p`.
```lean
variable p : Bool
check em p
```
The commands `axiom` and `variable` are essentially the same command. We provide both
just to make Lean files more readable. We encourage users to use `axiom` only for
propostions, and `variable` for everything else.
Similarly, a theorem is just a definition. The following command defines a new theorem
called `nat_trans3`
```lean
theorem nat_trans3 (a b c d : Nat) (H1 : a = b) (H2 : c = b) (H3 : c = d) : a = d
:= trans (trans H1 (symm H2)) H3
```
The theorem `nat_trans3` has 7 parameters, it takes for natural numbers `a`, `b`, `c` and `d`,
and three proofs showing that `a = b`, `c = b` and `c = d`, and returns a proof that `a = d`.
In the example above, `symm` is the symmetry theorem. Now, we use `nat_trans3` in a simple
example.
```lean
variables x y z w : Nat
axiom Hxy : x = y
axiom Hzy : z = y
axiom Hzw : z = w
check nat_trans3 x y z w Hxy Hzy Hzw
```
The theorem `nat_trans3` is somewhat inconvenient to use because it has 7 parameters.
However, the first four parameters can be inferred from the last 3. We can use `_` as a placeholder
that instructs Lean to synthesize this expression. The synthesis process is based on type inference, and it is
the most basic form of automation provided by Lean.
```lean
check nat_trans3 _ _ _ _ Hxy Hzy Hzw
```
Lean also supports _implicit arguments_.
We mark implicit arguments using curly braces instead of parenthesis.
In the following example, we define the theorem `nat_trans3i` using implicit arguments.
```lean
theorem nat_trans3i {a b c d : Nat} (H1 : a = b) (H2 : c = b) (H3 : c = d) : a = d
:= trans (trans H1 (symm H2)) H3
```
It is identical to `nat_trans3`, the only difference is the use of curly braces.
Lean will (try to) infer the implicit arguments. The idea behind implicit arguments
is quite simple, we are just instructing Lean to automatically insert the placeholders
`_` for us.
```lean
check nat_trans3i Hxy Hzy Hzw
```
Sometimes, Lean will not be able to infer the parameters automatically.
So, whenever we define a theorem/definition/axiom/variable containing implicit arguments, Lean will
automatically create an _explicit_ version where all parameters are explicit.
The explicit version uses the same name with a `@` prefix.
```lean
check @nat_trans3i
```
The theorems `refl`, `trans` and `symm` all have implicit arguments.
```lean
check @refl
check @trans
check @symm
```
We can also instruct Lean to display all implicit arguments when it prints expressions.
This is useful when debugging non-trivial problems.
```lean
set_option pp::implicit true -- show implicit arguments
check nat_trans3i Hxy Hzy Hzw
set_option pp::implicit false -- hide implicit arguments
```
In the previous example, the `check` command stated that `nat_trans3i Hxy Hzy Hzw`
has type `@eq ℕ x w`. The expression `x = w` is just notational convenience.
We have seen many occurrences of `(Type U)`, where `U` is a _universe variable_.
In Lean, the type of `Nat` and `Bool` is `Type`.
```lean
check Nat
check Bool
```
We say `Type` is the type of all _small_ types, but what is the type of `Type`?
```lean
check Type
```
Lean returns `(Type 1)`. Similarly, the type of `(Type 1)` is `(Type 2)`. In Lean, we also have _universe cumulativity_.
That is, we can provide an element of type `(Type i)` where an element of type `(Type j)` is expected when `i ≤ j`.
This makes the system more convenient to use. Otherwise, we would need a reflexivity theorem for `Type` (i.e., `(Type 0)`),
`Type 1`, `Type 2`, etc. Universe cumulativity improves usability, but it is not enough because
we would still have the question: how big should `i` be? Moreover, if we choose an `i` that is not big enough
we have to go back and correct all libraries. This is not satisfactory and not modular.
So, in Lean, we allow users to declare _universe variables_ and simple constraints between them. The Lean kernel defines
one universe variable `U`, and states that `U ≥ 1` using the command `universe U ≥ 1`.
The Lean type casting library defines another universe variable called `M` and states that `universe M ≥ 1`.
In Lean, whenever we declare a new universe `V`, the system automatically adds the constraint `U ≥ V + 1`.
That is, `U` the _maximal_ universe in Lean.
Lean reports an universe inconsistency if the universe constraints are inconsistent. For example, it will return an error
if execute the command `universe M ≥ U`. We can view universe variables as placeholders, and we can always solve
the universe constraints and find and assignment for the universe variables used in our developments.
This assignment allows us to automatically generate a Lean specification that is not based on this particular feature.
Propositional logic
-------------------
To manipulate formulas with a richer logical structure, it is important to master the notation Lean uses for building
composite logical expressions out of basic formulas using _logical connectives_. The logical connectives (`and`, `or`, `not`, etc)
are defined in the Lean [kernel](../../src/builtin/kernel.lean). The kernel also defines notational convention for rewriting formulas
in a natural way. Here is a table showing the notation for the so called propositional (or Boolean) connectives.
| Ascii | Ascii alt. | Unicode | Definition |
|-------|--------------|---------|--------------|
| true | | ⊤ | true |
| false | | ⊥ | false |
| not | | ¬ | not |
| /\ | && | ∧ | and |
| \/ | || | ∨ | or |
| -> | | → | implies |
| <-> | | ↔ | iff |
`true` and `false` are logical constants to denote the true and false propositions. Logical negation is a unary operator just like
arithmetical negation on numbers. The other connectives are all binary operators. The meaning of the operators is the usual one.
The table above makes clear that Lean supports unicode characters. We can use Ascii or/and unicode versions.
Here is a simple example using the connectives above.
```lean
variable q : Bool
check p → q → p ∧ q
check ¬ p → p ↔ false
check p ∨ q → q ∨ p
-- Ascii version
check p -> q -> p && q
check not p -> p <-> false
check p || q -> q \/ p
```
Depending on the platform, Lean uses unicode characters by default when printing expressions. The following commands can be used to
change this behavior.
```lean
set_option pp::unicode false
check p → q → p ∧ q
set_option pp::unicode true
check p → q → p ∧ q
```
Note that, it may seem that the symbols `->` and `→` are overloaded, and Lean uses them to represent Boolean implication and the type
of functions. Actually, they are not overloaded, they are the same symbols. In Lean, the Boolean `p → q` expression is also the type
of the functions that given a proof for `p`, returns a proof for `q`. This is very convenient for writing proofs.
```lean
-- Hpq is a function that takes a proof for p and returns a proof for q
axiom Hpq : p → q
-- Hq is a proof/certificate for p
axiom Hp : p
-- The expression Hpq Hp is a proof/certificate for q
check Hpq Hp
```
In composite expressions, the precedences of the various binary
connectives are in order of the above table, with `and` being the
strongest and `iff` the weakest. For example, `a ∧ b → c ∨ d ∧ e`
means `(a ∧ b) → (c ∨ (d ∧ e))`. All of them are right-associative.
So, `p ∧ q ∧ r` means `p ∧ (q ∧ r)`. The actual precedence and fixity of all
logical connectives is defined in the Lean [kernel definition file](../../src/builtin/kernel.lean).
Finally, `not`, `and`, `or` and `iff` are the actual names used when
defining the Boolean connectives. They can be used as any other function.
```lean
check and
check or
check not
```
Lean supports _currying_ `and true` is a function from `Bool` to `Bool`.
```lean
check and true
definition id := and true
```
Functions
---------
There are many variable-binding constructs in mathematics. Lean expresses
all of them using just one _abstraction_, which is a converse operation to
function application. Given a variable `x`, a type `A`, and a term `t` that
may or may not contain `x`, one can construct the so-called _lambda abstraction_
`fun x : A, t`, or using unicode notation `λ x : A, t`. Here is some simple
examples.
```lean
check fun x : Nat, x + 1
check fun x y : Nat, x + 2 * y
check fun x y : Bool, not (x ∧ y)
check λ x : Nat, x + 1
check λ (x : Nat) (p : Bool), x = 0 ∨ p
```
In many cases, Lean can automatically infer the type of the variable. Actually,
In all examples above, the type can be inferred automatically.
```lean
check fun x, x + 1
check fun x y, x + 2 * y
check fun x y, not (x ∧ y)
check λ x, x + 1
check λ x p, x = 0 ∨ p
```
However, Lean will complain that it cannot infer the type of the
variable `x` in `fun x, x` because any type would work in this example.
The following example shows how to use lambda abstractions in
function applications
```lean
check (fun x y, x + 2 * y) 1
check (fun x y, x + 2 * y) 1 2
check (fun x y, not (x ∧ y)) true false
```
Lambda abstractions are also used to create proofs for propositions of the form `A → B`.
This should be natural since we can "view" `A → B` as the type of functions that given
a proof for `A` returns a proof for `B`.
For example, a proof for `p → p` is just `fun H : p, H` (the identity function).
```lean
check fun H : p, H
```
Definitional equality
---------------------
The command `eval t` computes a normal form for the term `t`.
In Lean, we say two terms are _definitionally equal_ if the have the same
normal form. For example, the terms `(λ x : Nat, x + 1) a` and `a + 1`
are definitionally equal. The Lean type/proof checker uses the normalizer when
checking types/proofs. So, we can prove that two definitionally equal terms
are equal using just `refl`. Here is a simple example.
```lean
theorem def_eq_th (a : Nat) : ((λ x : Nat, x + 1) a) = a + 1
:= refl (a+1)
```
Provable equality
-----------------
In the previous examples, we have used `nat_trans3 x y z w Hxy Hzy Hzw`
to show that `x = w`. In this case, `x` and `w` are not definitionally equal,
but they are provably equal in the environment that contains `nat_trans3` and
axioms `Hxy`, `Hzy` and `Hzw`.
Proving
-------
The Lean kernel contains basic theorems for creating proof terms. The
basic theorems are useful for creating manual proofs. The are also the
basic building blocks used by all automated proof engines available in
Lean. The theorems can be broken into three different categories:
introduction, elimination, and rewriting. First, we cover the introduction
and elimination theorems for the basic Boolean connectives.
### And (conjuction)
The expression `and_intro H1 H2` creates a proof for `a ∧ b` using proofs
`H1 : a` and `H2 : b`. We say `and_intro` is the _and-introduction_ operation.
In the following example we use `and_intro` for creating a proof for
`p → q → p ∧ q`.
```lean
check fun (Hp : p) (Hq : q), and_intro Hp Hq
```
The expression `and_eliml H` creates a proof `a` from a proof `H : a ∧ b`.
Similarly `and_elimr H` is a proof for `b`. We say they are the _left/right and-elimination_.
```lean
-- Proof for p ∧ q → p
check fun H : p ∧ q, and_eliml H
-- Proof for p ∧ q → q
check fun H : p ∧ q, and_elimr H
```
Now, we prove `p ∧ q → q ∧ p` with the following simple proof term.
```lean
check fun H : p ∧ q, and_intro (and_elimr H) (and_eliml H)
```
Note that the proof term is very similar to a function that just swaps the
elements of a pair.
### Or (disjuction)
The expression `or_introl H1 b` creates a proof for `a ∨ b` using a proof `H1 : a`.
Similarly, `or_intror a H2` creates a proof for `a ∨ b` using a proof `H2 : b`.
We say they are the _left/right or-introduction_.
```lean
-- Proof for p → p ∨ q
check fun H : p, or_introl H q
-- Proof for q → p ∨ q
check fun H : q, or_intror p H
```
The or-elimination rule is slightly more complicated. The basic idea is the
following, we can prove `c` from `a ∨ b`, by showing we can prove `c`
by assuming `a` or by assuming `b`. It is essentially a proof by cases.
`or_elim Hab Hac Hbc` takes three arguments `Hab : a ∨ b`, `Hac : a → c` and `Hbc : b → c` and produces a proof for `c`.
In the following example, we use `or_elim` to prove that `p v q → q ∨ p`.
```lean
check fun H : p ∨ q,
or_elim H
(fun Hp : p, or_intror q Hp)
(fun Hq : q, or_introl Hq p)
```
### Not (negation)
`not_intro H` produces a proof for `¬ a` from `H : a → false`. That is,
we obtain `¬ a` if we can derive `false` from `a`. The expression
`absurd_elim b Ha Hna` produces a proof for `b` from `Ha : a` and `Hna : ¬ a`.
That is, we can deduce anything if we have `a` and `¬ a`.
We now use `not_intro` and `absurd_elim` to produce a proof term for
`(a → b) → ¬ b → ¬ a`
```lean
variables a b : Bool
check fun (Hab : a → b) (Hnb : ¬ b),
not_intro (fun Ha : a, absurd_elim false (Hab Ha) Hnb)
```
Here is the proof term for `¬ a → b → (b → a) → c`
```lean
variable c : Bool
check fun (Hna : ¬ a) (Hb : b) (Hba : b → a),
absurd_elim c (Hba Hb) Hna
```
### Iff (if-and-only-if)
The expression `iff_intro H1 H2` produces a proof for `a ↔ b` from `H1 : a → b` and `H2 : b → a`.
`iff_eliml H` produces a proof for `a → b` from `H : a ↔ b`. Similarly,
`iff_elimr H` produces a proof for `b → a` from `H : a ↔ b`.
Note that, in Lean, `a ↔ b` is definitionally equal to `a = b` when `a` and `b` have type `Bool`.
Here is the proof term for `a ∧ b ↔ b ∧ a`
```lean
check iff_intro (fun H : a ∧ b, and_intro (and_elimr H) (and_eliml H))
(fun H : b ∧ a, and_intro (and_elimr H) (and_eliml H))
```
### True and False
The expression `trivial` is a proof term for `true`, and `false_elim a H`
produces a proof for `a` from `H : false`.
Other basic operators used in proof construction are `eqt_intro`, `eqt_elim`, `eqf_intro` and `eqf_elim`.
`eqt_intro H` produces a proof for `a ↔ true` from `H : a`.
`eqt_elim H` produces a proof for `a` from `H : a ↔ true`.
`eqf_intro H` produces a proof for `a ↔ false` from `H : ¬ a`.
`eqf_elim H` produces a proof for `¬ a` from `H : a ↔ false`.
```lean
check @eqt_intro
check @eqt_elim
check @eqf_intro
check @eqf_elim
```
### Rewrite rules
The Lean kernel also contains many theorems that are meant to be used as rewriting/simplification rules.
The conclusion of these theorems is of the form `t = s` or `t ↔ s`. For example, `and_id a` is proof term for
`a ∧ a ↔ a`. The Lean simplifier can use these theorems to automatically create proof terms for us.
The expression `(by simp [rule-set])` is similar to `_`, but it tells Lean to synthesize the proof term using the simplifier
using the rewrite rule set named `[rule-set]`. In the following example, we create a simple rewrite rule set
and use it to prove a theorem that would be quite tedious to prove by hand.
```lean
-- import module that defines several tactics/strategies including "simp"
import tactic
-- create a rewrite rule set with name 'simple'
rewrite_set simple
-- add some theorems to the rewrite rule set 'simple'
add_rewrite and_id and_truer and_truel and_comm and_assoc and_left_comm iff_id : simple
theorem th1 (a b : Bool) : a ∧ b ∧ true ∧ b ∧ true ∧ b ↔ a ∧ b
:= (by simp simple)
```
In Lean, we can combine manual and automated proofs in a natural way. We can manually write the proof
skeleton and use the `by` construct to invoke automated proof engines like the simplifier for filling the
tedious steps. Here is a very simple example.
```lean
theorem th2 (a b : Bool) : a ∧ b ↔ b ∧ a
:= iff_intro
(fun H : a ∧ b, (by simp simple))
(fun H : b ∧ a, (by simp simple))
```
### Dependent functions and quantifiers
Lean supports _dependent functions_. In type theory, they are also called dependent product types or Pi-types.
The idea is quite simple, suppose we have a type `A` in some universe `(Type i)`, and a family of types `B : A → (Type j)` which assigns to each `a : A` a type `B a`. So a dependent function is a function whose range varies depending on its arguments.
In lean, the dependent functions is written as `forall a : A, B a`, or `∀ x : A, B a` using unicode.
The proposition as types paradigm is based on dependent functions. In the previous examples, we have seen many examples of dependent functions. The theorems `refl`, `trans` and `symm`, and the equality are all dependent functions,
```lean
check @refl
check @trans
check @symm
check @eq
```
The universal quantifier is also a dependent function. In Lean, if we have a family of types `B : A → Bool`, then `∀ x : A, B a` has type `Bool`. This features complicates the Lean set-theoretic model, but it improves usability. Several theorem provers have a `forall elimination` (aka instantiation) proof rule. In Lean (and other systems based on proposition as types), this rule is just function application. In the following example we add an axiom stating that `f x` is `0` forall `x`. Then we instantiate the axiom using function application.
```lean
variable f : Nat → Nat
axiom fzero : ∀ x, f x = 0
check fzero 1
check fzero x
```
Since we instantiate quantifiers using function application, it is
natural to create proof terms for universal quantifiers using lambda
abstraction. In the following example, we create a proof term showing that for all
`x` and `y`, `f x = f y`.
```lean
check λ x y, trans (fzero x) (symm (fzero y))
```
We can view the proof term above as a simple function or "recipe" for showing that
`f x = f y` for any `x` and `y`. The function "invokes" `fzero` for creating
proof terms for `f x = 0` and `f y = 0`. Then, it uses symmetry `symm` to create
a proof term for `0 = f y`. Finally, transitivity is used to combine the proofs
for `f x = 0` and `0 = f y`.
In Lean, the existential quantifier `exists x : A, B x` is defined as `¬ forall x : A, ¬ B x`.
We can also write existential quantifiers as `∃ x : A, B x`. Actually both versions are just
notational convenience for `Exists A (fun x : A, B x)`. That is, the existential quantifier
is actually a constant defined in the file `kernel.lean`. This file also defines the
`exists_intro` and `exists_elim` theorems. To build a proof for `∃ x : A, B x`, we should
provide a term `w : A` and a proof term `Hw : B w` to `exists_intro`.
We say `w` is the witness for the existential introduction. In previous examples,
`nat_trans3i Hxy Hzy Hzw` was a proof term for `x = w`. Then, we can create a proof term
for `∃ a : Nat, a = w` using
```lean
theorem ex_a_eq_w : exists a, a = w := exists_intro x (nat_trans3i Hxy Hzy Hzw)
check ex_a_eq_w
```
Note that `exists_intro` also has implicit arguments. For example, Lean has to infer the implicit argument
`P : A → Bool`, a predicate (aka function to Bool). This creates complications. For example, suppose
we have `Hg : g 0 0 = 0` and we invoke `exists_intro 0 Hg`. There are different possible values for `P`.
Each possible value corresponds to a different theorem: `∃ x, g x x = x`, `∃ x, g x x = 0`,
`∃ x, g x 0 = x`, etc. Lean uses the context where `exists_intro` occurs to infer the users intent.
In the example above, we were trying to prove the theorem `∃ a, a = w`. So, we are implicitly telling
Lean how to choose `P`. In the following example, we demonstrate this issue. We ask Lean to display
the implicit arguments using the option `pp::implicit`. We see that each instance of `exists_intro 0 Hg`
has different values for the implicit argument `P`.
```lean
check @exists_intro
variable g : Nat → Nat → Nat
axiom Hg : g 0 0 = 0
theorem gex1 : ∃ x, g x x = x := exists_intro 0 Hg
theorem gex2 : ∃ x, g x 0 = x := exists_intro 0 Hg
theorem gex3 : ∃ x, g 0 0 = x := exists_intro 0 Hg
theorem gex4 : ∃ x, g x x = 0 := exists_intro 0 Hg
set_option pp::implicit true -- display implicit arguments
print environment 4 -- print the last four theorems
set_option pp::implicit false -- hide implicit arguments
```
We can view `exists_intro` (aka existential introduction) as an information hiding procedure.
We are "hiding" what is the witness for some fact. The existential elimination performs the opposite
operation. The `exists_elim` theorem allows us to prove some proposition `B` from `∃ x : A, B x`
if we can derive `B` using an "abstract" witness `w` and a proof term `Hw : B w`.
```lean
check @exists_elim
```
In the following example, we define `even a` as `∃ b, a = 2*b`, and then we show that the sum
of two even numbers is an even number.
```lean
definition even (a : Nat) := ∃ b, a = 2*b
theorem EvenPlusEven {a b : Nat} (H1 : even a) (H2 : even b) : even (a + b)
:= exists_elim H1 (fun (w1 : Nat) (Hw1 : a = 2*w1),
exists_elim H2 (fun (w2 : Nat) (Hw2 : b = 2*w2),
exists_intro (w1 + w2)
(calc a + b = 2*w1 + b : { Hw1 }
... = 2*w1 + 2*w2 : { Hw2 }
... = 2*(w1 + w2) : symm (distributer 2 w1 w2))))
```
The example above also uses [_calculational proofs_](calc.md) to show that `a + b = 2*(w1 + w2)`.
The `calc` construct is just syntax sugar for creating proofs using transitivity and substitution.
The module `macros` provides notation for making proof terms more readable.
For example, it defines the `obtain _, from _, _` macro as syntax sugar for `exists_elim`.
With this macro we can write the example above as:
```lean
import macros
theorem EvenPlusEven2 {a b : Nat} (H1 : even a) (H2 : even b) : even (a + b)
:= obtain (w1 : Nat) (Hw1 : a = 2*w1), from H1,
obtain (w2 : Nat) (Hw2 : b = 2*w2), from H2,
exists_intro (w1 + w2)
(calc a + b = 2*w1 + b : { Hw1 }
... = 2*w1 + 2*w2 : { Hw2 }
... = 2*(w1 + w2) : symm (distributer 2 w1 w2))
```
The module `macros` also defines `take x : A, H` and `assume x : A, H`
as syntax sugar for `fun x : A, H`. This may been silly, but it allows us to simulate [Mizar](http://en.wikipedia.org/wiki/Mizar_system)-style declarative proofs in Lean. Using these macros, we can write
```lean
definition Set (A : Type) : Type := A → Bool
definition element {A : Type} (x : A) (s : Set A) := s x
infix 60 ∈ : element
definition subset {A : Type} (s1 : Set A) (s2 : Set A) := ∀ x, x ∈ s1 → x ∈ s2
infix 50 ⊆ : subset
theorem subset_trans {A : Type} {s1 s2 s3 : Set A} (H1 : s1 ⊆ s2) (H2 : s2 ⊆ s3) : s1 ⊆ s3
:= take x : A,
assume Hin : x ∈ s1,
show x ∈ s3, from
let L1 : x ∈ s2 := H1 x Hin
in H2 x L1
```
Finally, the construct `show A, from H` means "have" a proof for `A` using `H`. It is just syntax sugar for
`let H_show : A := H in H_show`. It is useful to document intermediate steps in manually constructed proofs.
| {
"content_hash": "3e1c49780f08b25266369cbfb6b68503",
"timestamp": "",
"source": "github",
"line_count": 684,
"max_line_length": 548,
"avg_line_length": 39.646198830409354,
"alnum_prop": 0.6681539936573494,
"repo_name": "codyroux/lean0.1",
"id": "9f1235700c7e251eac28b99010562080e916228d",
"size": "27460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/lean/tutorial.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "131510"
},
{
"name": "C++",
"bytes": "2267336"
},
{
"name": "Emacs Lisp",
"bytes": "37774"
},
{
"name": "Lua",
"bytes": "69704"
},
{
"name": "Objective-C",
"bytes": "361"
},
{
"name": "Python",
"bytes": "168045"
},
{
"name": "Shell",
"bytes": "14504"
}
],
"symlink_target": ""
} |
"""Contains functions for performing events on rooms."""
from twisted.internet import defer
from ._base import BaseHandler
from synapse.types import UserID, RoomAlias, RoomID, RoomStreamToken
from synapse.api.constants import (
EventTypes, JoinRules, RoomCreationPreset
)
from synapse.api.errors import AuthError, StoreError, SynapseError
from synapse.util import stringutils
from synapse.visibility import filter_events_for_client
from collections import OrderedDict
import logging
import math
import string
logger = logging.getLogger(__name__)
id_server_scheme = "https://"
class RoomCreationHandler(BaseHandler):
PRESETS_DICT = {
RoomCreationPreset.PRIVATE_CHAT: {
"join_rules": JoinRules.INVITE,
"history_visibility": "shared",
"original_invitees_have_ops": False,
"guest_can_join": True,
},
RoomCreationPreset.TRUSTED_PRIVATE_CHAT: {
"join_rules": JoinRules.INVITE,
"history_visibility": "shared",
"original_invitees_have_ops": True,
"guest_can_join": True,
},
RoomCreationPreset.PUBLIC_CHAT: {
"join_rules": JoinRules.PUBLIC,
"history_visibility": "shared",
"original_invitees_have_ops": False,
"guest_can_join": False,
},
}
@defer.inlineCallbacks
def create_room(self, requester, config):
""" Creates a new room.
Args:
requester (Requester): The user who requested the room creation.
config (dict) : A dict of configuration options.
Returns:
The new room ID.
Raises:
SynapseError if the room ID couldn't be stored, or something went
horribly wrong.
"""
user_id = requester.user.to_string()
self.ratelimit(requester)
if "room_alias_name" in config:
for wchar in string.whitespace:
if wchar in config["room_alias_name"]:
raise SynapseError(400, "Invalid characters in room alias")
room_alias = RoomAlias.create(
config["room_alias_name"],
self.hs.hostname,
)
mapping = yield self.store.get_association_from_room_alias(
room_alias
)
if mapping:
raise SynapseError(400, "Room alias already taken")
else:
room_alias = None
invite_list = config.get("invite", [])
for i in invite_list:
try:
UserID.from_string(i)
except:
raise SynapseError(400, "Invalid user_id: %s" % (i,))
invite_3pid_list = config.get("invite_3pid", [])
visibility = config.get("visibility", None)
is_public = visibility == "public"
# autogen room IDs and try to create it. We may clash, so just
# try a few times till one goes through, giving up eventually.
attempts = 0
room_id = None
while attempts < 5:
try:
random_string = stringutils.random_string(18)
gen_room_id = RoomID.create(
random_string,
self.hs.hostname,
)
yield self.store.store_room(
room_id=gen_room_id.to_string(),
room_creator_user_id=user_id,
is_public=is_public
)
room_id = gen_room_id.to_string()
break
except StoreError:
attempts += 1
if not room_id:
raise StoreError(500, "Couldn't generate a room ID.")
if room_alias:
directory_handler = self.hs.get_handlers().directory_handler
yield directory_handler.create_association(
user_id=user_id,
room_id=room_id,
room_alias=room_alias,
servers=[self.hs.hostname],
)
preset_config = config.get(
"preset",
RoomCreationPreset.PRIVATE_CHAT
if visibility == "private"
else RoomCreationPreset.PUBLIC_CHAT
)
raw_initial_state = config.get("initial_state", [])
initial_state = OrderedDict()
for val in raw_initial_state:
initial_state[(val["type"], val.get("state_key", ""))] = val["content"]
creation_content = config.get("creation_content", {})
msg_handler = self.hs.get_handlers().message_handler
room_member_handler = self.hs.get_handlers().room_member_handler
yield self._send_events_for_new_room(
requester,
room_id,
msg_handler,
room_member_handler,
preset_config=preset_config,
invite_list=invite_list,
initial_state=initial_state,
creation_content=creation_content,
room_alias=room_alias,
)
if "name" in config:
name = config["name"]
yield msg_handler.create_and_send_nonmember_event(
requester,
{
"type": EventTypes.Name,
"room_id": room_id,
"sender": user_id,
"state_key": "",
"content": {"name": name},
},
ratelimit=False)
if "topic" in config:
topic = config["topic"]
yield msg_handler.create_and_send_nonmember_event(
requester,
{
"type": EventTypes.Topic,
"room_id": room_id,
"sender": user_id,
"state_key": "",
"content": {"topic": topic},
},
ratelimit=False)
content = {}
is_direct = config.get("is_direct", None)
if is_direct:
content["is_direct"] = is_direct
for invitee in invite_list:
yield room_member_handler.update_membership(
requester,
UserID.from_string(invitee),
room_id,
"invite",
ratelimit=False,
content=content,
)
for invite_3pid in invite_3pid_list:
id_server = invite_3pid["id_server"]
address = invite_3pid["address"]
medium = invite_3pid["medium"]
yield self.hs.get_handlers().room_member_handler.do_3pid_invite(
room_id,
requester.user,
medium,
address,
id_server,
requester,
txn_id=None,
)
result = {"room_id": room_id}
if room_alias:
result["room_alias"] = room_alias.to_string()
yield directory_handler.send_room_alias_update_event(
requester, user_id, room_id
)
defer.returnValue(result)
@defer.inlineCallbacks
def _send_events_for_new_room(
self,
creator, # A Requester object.
room_id,
msg_handler,
room_member_handler,
preset_config,
invite_list,
initial_state,
creation_content,
room_alias
):
def create(etype, content, **kwargs):
e = {
"type": etype,
"content": content,
}
e.update(event_keys)
e.update(kwargs)
return e
@defer.inlineCallbacks
def send(etype, content, **kwargs):
event = create(etype, content, **kwargs)
yield msg_handler.create_and_send_nonmember_event(
creator,
event,
ratelimit=False
)
config = RoomCreationHandler.PRESETS_DICT[preset_config]
creator_id = creator.user.to_string()
event_keys = {
"room_id": room_id,
"sender": creator_id,
"state_key": "",
}
creation_content.update({"creator": creator_id})
yield send(
etype=EventTypes.Create,
content=creation_content,
)
yield room_member_handler.update_membership(
creator,
creator.user,
room_id,
"join",
ratelimit=False,
)
if (EventTypes.PowerLevels, '') not in initial_state:
power_level_content = {
"users": {
creator_id: 100,
},
"users_default": 0,
"events": {
EventTypes.Name: 50,
EventTypes.PowerLevels: 100,
EventTypes.RoomHistoryVisibility: 100,
EventTypes.CanonicalAlias: 50,
EventTypes.RoomAvatar: 50,
},
"events_default": 0,
"state_default": 50,
"ban": 50,
"kick": 50,
"redact": 50,
"invite": 0,
}
if config["original_invitees_have_ops"]:
for invitee in invite_list:
power_level_content["users"][invitee] = 100
yield send(
etype=EventTypes.PowerLevels,
content=power_level_content,
)
if room_alias and (EventTypes.CanonicalAlias, '') not in initial_state:
yield send(
etype=EventTypes.CanonicalAlias,
content={"alias": room_alias.to_string()},
)
if (EventTypes.JoinRules, '') not in initial_state:
yield send(
etype=EventTypes.JoinRules,
content={"join_rule": config["join_rules"]},
)
if (EventTypes.RoomHistoryVisibility, '') not in initial_state:
yield send(
etype=EventTypes.RoomHistoryVisibility,
content={"history_visibility": config["history_visibility"]}
)
if config["guest_can_join"]:
if (EventTypes.GuestAccess, '') not in initial_state:
yield send(
etype=EventTypes.GuestAccess,
content={"guest_access": "can_join"}
)
for (etype, state_key), content in initial_state.items():
yield send(
etype=etype,
state_key=state_key,
content=content,
)
class RoomContextHandler(BaseHandler):
@defer.inlineCallbacks
def get_event_context(self, user, room_id, event_id, limit):
"""Retrieves events, pagination tokens and state around a given event
in a room.
Args:
user (UserID)
room_id (str)
event_id (str)
limit (int): The maximum number of events to return in total
(excluding state).
Returns:
dict, or None if the event isn't found
"""
before_limit = math.floor(limit / 2.)
after_limit = limit - before_limit
now_token = yield self.hs.get_event_sources().get_current_token()
users = yield self.store.get_users_in_room(room_id)
is_peeking = user.to_string() not in users
def filter_evts(events):
return filter_events_for_client(
self.store,
user.to_string(),
events,
is_peeking=is_peeking
)
event = yield self.store.get_event(event_id, get_prev_content=True,
allow_none=True)
if not event:
defer.returnValue(None)
return
filtered = yield(filter_evts([event]))
if not filtered:
raise AuthError(
403,
"You don't have permission to access that event."
)
results = yield self.store.get_events_around(
room_id, event_id, before_limit, after_limit
)
results["events_before"] = yield filter_evts(results["events_before"])
results["events_after"] = yield filter_evts(results["events_after"])
results["event"] = event
if results["events_after"]:
last_event_id = results["events_after"][-1].event_id
else:
last_event_id = event_id
state = yield self.store.get_state_for_events(
[last_event_id], None
)
results["state"] = state[last_event_id].values()
results["start"] = now_token.copy_and_replace(
"room_key", results["start"]
).to_string()
results["end"] = now_token.copy_and_replace(
"room_key", results["end"]
).to_string()
defer.returnValue(results)
class RoomEventSource(object):
def __init__(self, hs):
self.store = hs.get_datastore()
@defer.inlineCallbacks
def get_new_events(
self,
user,
from_key,
limit,
room_ids,
is_guest,
explicit_room_id=None,
):
# We just ignore the key for now.
to_key = yield self.get_current_key()
from_token = RoomStreamToken.parse(from_key)
if from_token.topological:
logger.warn("Stream has topological part!!!! %r", from_key)
from_key = "s%s" % (from_token.stream,)
app_service = self.store.get_app_service_by_user_id(
user.to_string()
)
if app_service:
events, end_key = yield self.store.get_appservice_room_stream(
service=app_service,
from_key=from_key,
to_key=to_key,
limit=limit,
)
else:
room_events = yield self.store.get_membership_changes_for_user(
user.to_string(), from_key, to_key
)
room_to_events = yield self.store.get_room_events_stream_for_rooms(
room_ids=room_ids,
from_key=from_key,
to_key=to_key,
limit=limit or 10,
order='ASC',
)
events = list(room_events)
events.extend(e for evs, _ in room_to_events.values() for e in evs)
events.sort(key=lambda e: e.internal_metadata.order)
if limit:
events[:] = events[:limit]
if events:
end_key = events[-1].internal_metadata.after
else:
end_key = to_key
defer.returnValue((events, end_key))
def get_current_key(self):
return self.store.get_room_events_max_id()
def get_current_key_for_room(self, room_id):
return self.store.get_room_events_max_id(room_id)
@defer.inlineCallbacks
def get_pagination_rows(self, user, config, key):
events, next_key = yield self.store.paginate_room_events(
room_id=key,
from_key=config.from_key,
to_key=config.to_key,
direction=config.direction,
limit=config.limit,
)
defer.returnValue((events, next_key))
| {
"content_hash": "77014ab745ab73a7d45469f062d8ebda",
"timestamp": "",
"source": "github",
"line_count": 493,
"max_line_length": 83,
"avg_line_length": 31.093306288032455,
"alnum_prop": 0.5108617652814926,
"repo_name": "TribeMedia/synapse",
"id": "99cb7db0db40d97995a93b3c29377b02f2003d3a",
"size": "15939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "synapse/handlers/room.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4376"
},
{
"name": "HTML",
"bytes": "9046"
},
{
"name": "JavaScript",
"bytes": "176441"
},
{
"name": "Perl",
"bytes": "31852"
},
{
"name": "Python",
"bytes": "2748398"
},
{
"name": "Shell",
"bytes": "7827"
}
],
"symlink_target": ""
} |
@interface SFModularization ()
@property (strong, nonatomic) NSMutableArray *modules;
@property (strong, nonatomic) NSMutableDictionary *nameModuleMap;
@property (strong, nonatomic) NSMutableDictionary *protocolModuleMap;
@property (strong, nonatomic) NSMutableDictionary *eventModulesMap;
@end
@implementation SFModularization
+ (instancetype)sharedInstence {
static id sharedInstence = nil;
static dispatch_once_t predicate;
dispatch_once(&predicate, ^{
sharedInstence = [[self alloc] init];
});
return sharedInstence;
}
- (instancetype)init {
self = [super init];
if (self) {
_modules = [NSMutableArray arrayWithCapacity:5];
_nameModuleMap = [NSMutableDictionary dictionary];
_protocolModuleMap = [NSMutableDictionary dictionary];
_eventModulesMap = [NSMutableDictionary dictionary];
}
return self;
}
- (id)performSelector:(SEL)aSelector target:(id)target action:(NSString *)action params:(NSDictionary *)params {
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Warc-performSelector-leaks"
return [target performSelector:aSelector withObject:action withObject:params];
#pragma clang diagnostic pop
}
- (id<SFModuleProtocol>)moduleNamed:(NSString *)moduleName {
if (!moduleName) {
return nil;
}
id<SFModuleProtocol> module = [self.nameModuleMap objectForKey:moduleName];
return module;
}
- (void)addModule:(id<SFModuleProtocol>)module intoArray:(NSMutableArray *)array {
SFModulePriority priority = SFModulePriorityDefault;
if ([module respondsToSelector:@selector(modulePriority)]) {
priority = [module modulePriority];
}
if (priority==SFModulePriorityHigh) {
[array insertObject:module atIndex:0];
} else if (priority==SFModulePriorityHigh) {
[array addObject:module];
} else {
[array addObject:module];
}
}
#pragma mark - public method
- (void)enumerateModules:(void(^)(id<SFModuleProtocol>))block {
if (!block) {
return;
}
[_modules enumerateObjectsUsingBlock:^(id _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
block(obj);
}];
}
- (BOOL)registerModule:(id<SFModuleProtocol>)module protocols:(NSArray<Protocol *> *)protocols {
if (!module) {
NSLog(@"[warn] register module, module can't be nil");
return NO;
}
NSString *moduleName = nil;
if ([module respondsToSelector:@selector(moduleName)]) {
moduleName = [module moduleName];
} else {
moduleName = NSStringFromClass([module class]);
}
if ([self.nameModuleMap objectForKey:moduleName]) {
NSLog(@"[warn] register module name:%@, already exist:%@", moduleName, [self.nameModuleMap objectForKey:moduleName]);
return NO;
}
[self.nameModuleMap setObject:module forKey:moduleName];
[protocols enumerateObjectsUsingBlock:^(Protocol * _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
NSString *protocolName = NSStringFromProtocol(obj);
id premodule = [_protocolModuleMap objectForKey:protocolName];
if (premodule) {
NSLog(@"[warn] register protocol%@, already exist:%@", protocolName, premodule);
} else {
[_protocolModuleMap setObject:module forKey:protocolName];
}
}];
[self addModule:module intoArray:self.modules];
return YES;
}
- (void)addListener:(id<SFModuleProtocol>)module toEvent:(NSString *)event {
if (!event || !module) {
NSLog(@"[warn] add listener, module or event can't be nil");
return;
}
NSMutableArray *modules = [_eventModulesMap objectForKey:event];
if (!modules) {
modules = [NSMutableArray array];
}
[self addModule:module intoArray:modules];
[_eventModulesMap setObject:modules forKey:event];
}
- (void)removeListener:(id<SFModuleProtocol>)module toEvent:(NSString *)event {
NSMutableArray *modules = [_eventModulesMap objectForKey:event];
[modules removeObject:module];
}
- (void)removeAllListenersToEvent:(NSString *)event {
[_eventModulesMap removeObjectForKey:event];
}
- (id)moduleConformsToProtocol:(Protocol *)protocol {
if (!protocol) {
NSLog(@"[warn] get module from protocol, protocol can't be nil");
return nil;
}
NSString *protocolName = NSStringFromProtocol(protocol);
id module = [_protocolModuleMap objectForKey:protocolName];
if (!module) {
NSLog(@"[error] can't find module conforms to protocol:%@", protocolName);
}
return module;
}
- (void)sendEvent:(NSString *)event params:(NSDictionary *)params {
NSMutableArray<id<SFModuleProtocol>> *modules = [_eventModulesMap objectForKey:event];
[modules enumerateObjectsUsingBlock:^(id<SFModuleProtocol> _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
if ([obj respondsToSelector:@selector(receiveEvent:params:)]) {
[obj receiveEvent:event params:params];
}
}];
}
- (id)performAction:(NSString *)actionName toModuleNamed:(NSString *)moduleName params:(NSDictionary *)params {
return [self performAction:actionName toModuleNamed:moduleName params:params isRemote:NO];
}
- (id)performAction:(NSString *)actionName toModuleNamed:(NSString *)moduleName params:(NSDictionary *)params isRemote:(BOOL)isRemote {
if (!moduleName || !actionName) {
NSLog(@"[warn] performAction:%@ toModuleNamed:%@, params can't be nil", actionName, moduleName);
return nil;
}
id module = [self moduleNamed:moduleName];
if (!module) {
NSLog(@"[error] module named:%@ not found", moduleName);
return nil;
}
//远程调用,需要先验证这个action是否支持远程调用
if (isRemote) {
SEL canPerformSelector = NSSelectorFromString(@"canPerformRemoteAction:params:");
if (![module respondsToSelector:canPerformSelector] || ![[self performSelector:canPerformSelector target:module action:actionName params:params] boolValue]) {
NSLog(@"[error] module:%@ can't perform remote action:%@", module, actionName);
return nil;
}
}
SEL selector = NSSelectorFromString(@"performAction:params:");
if ([module respondsToSelector:selector]) {
return [self performSelector:selector target:module action:actionName params:params];
} else {
NSLog(@"[warn] module:%@ not implements performAction:", module);
return nil;
}
}
@end
| {
"content_hash": "15a6dea5ded2b0036e66cec95d8fc096",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 166,
"avg_line_length": 33.333333333333336,
"alnum_prop": 0.6732307692307692,
"repo_name": "sofach/SFModularization",
"id": "cc606c6792af39d863366f9a67582b93283bb599",
"size": "6719",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SFModularization/lib/SFModularization.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "32447"
},
{
"name": "Ruby",
"bytes": "651"
}
],
"symlink_target": ""
} |
from .count import smartseq_count, droplet_count
from .quant import quant
__all__ = ["count", "detect"] | {
"content_hash": "056321035d3f89bc9789156888d82a16",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 48,
"avg_line_length": 26,
"alnum_prop": 0.7115384615384616,
"repo_name": "huangyh09/brie",
"id": "4f2697e40067d74f1c9a57d2a1ea4dbbea4e5825",
"size": "104",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "brie/bin/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "233075"
},
{
"name": "Shell",
"bytes": "1944"
}
],
"symlink_target": ""
} |
package org.spongepowered.api.util;
import com.google.common.collect.ImmutableSet;
import java.util.Arrays;
import java.util.Collection;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.function.Predicate;
/**
* Utility methods to help with function work
*/
public class Functional {
private Functional() {
}
/**
* Perform an AND using an array of predicates.
*
* @param predicates The predicates to AND
* @param <E> The type to accept
* @return The combined predicate
*/
@SafeVarargs
@SuppressWarnings("varargs")
public static <E> Predicate<E> predicateAnd(Predicate<E>... predicates) {
return predicateAnd(Arrays.asList(predicates));
}
/**
* Perform an AND using an iterable of predicates.
*
* @param predicates The predicates to and
* @param <E> The type to accept
* @return The combined predicate
*/
public static <E> Predicate<E> predicateAnd(Iterable<Predicate<E>> predicates) {
return e -> {
for (Predicate<E> pred : predicates) {
if (!pred.test(e)) {
return false;
}
}
return true;
};
}
public static <E> Predicate<E> predicateIn(Collection<E> collection) {
return collection::contains;
}
public static <E> com.google.common.base.Predicate<E> java8ToGuava(Predicate<E> predicate) {
return predicate::test;
}
/**
* Get the value of an {@link Optional} as either a zero- or one-element immutable set.
*
* @param value The value to get as a set
* @param <T> The type
* @return The immutable set containing any value the optional has
*/
public static <T> Set<T> optionalAsSet(Optional<T> value) {
return value.isPresent() ? ImmutableSet.of(value.get()) : ImmutableSet.of();
}
/**
* Execute a callable on <strong>the current thread</strong>, capturing the result or any exceptions that may be thrown into a {@link
* CompletableFuture}.
*
* @param call The callable to execute
* @param <T> The type of value returned
* @return The future holding the result
*/
public static <T> CompletableFuture<T> failableFuture(Callable<T> call) {
CompletableFuture<T> ret = new CompletableFuture<>();
try {
ret.complete(call.call());
} catch (Exception e) {
ret.completeExceptionally(e);
}
return ret;
}
/**
* Execute a callable on the provided executor, capturing the result or any exceptions that may be thrown into a {@link
* CompletableFuture}.
*
* @param call The callable to execute
* @param exec The executor to execute this task on
* @param <T> The type of value returned
* @return The future holding the result
*/
public static <T> CompletableFuture<T> asyncFailableFuture(Callable<T> call, Executor exec) {
CompletableFuture<T> ret = new CompletableFuture<>();
exec.execute(() -> {
try {
ret.complete(call.call());
} catch (Exception e) {
ret.completeExceptionally(e);
}
});
return ret;
}
} | {
"content_hash": "b0e494d62f062da3d531452b51113cca",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 137,
"avg_line_length": 30.63063063063063,
"alnum_prop": 0.6176470588235294,
"repo_name": "ryantheleach/SpongeAPI",
"id": "4e630341036607bf664c90986a8352148c62b226",
"size": "4650",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/spongepowered/api/util/Functional.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "4772579"
},
{
"name": "Shell",
"bytes": "81"
}
],
"symlink_target": ""
} |
package rest;
import java.sql.Date;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletContext;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.codehaus.jackson.annotate.JsonProperty;
import tm.AbonoMaster;
import tm.BoletaMaster;
import tm.UsuarioMaster;
import vos.Abono;
import vos.Boleta;
import vos.ListaBoletas;
import vos.Usuario;
@Path("boletas")
public class BoletaServices extends FestivAndesServices {
/**
* Atributo que usa la anotación @Context para tener el ServletContext de la conexión actual.
*/
@Context
private ServletContext context;
/**
* Método que retorna el path de la carpeta WEB-INF/ConnectionData en el deploy actual dentro del servidor.
* @return path de la carpeta WEB-INF/ConnectionData en el deploy actual.
*/
private String getPath() {
return context.getRealPath("WEB-INF/ConnectionData");
}
// Rest
/**
* Da las boletas de la base de datos
* @return Boletas de la base de datos
*/
@GET
@Produces({MediaType.APPLICATION_JSON})
public Response getBoletas() {
BoletaMaster tm = new BoletaMaster(getPath());
ListaBoletas boletas;
try {
boletas = tm.darBoletas();
} catch(Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(boletas).build();
}
/**
* Agrega una boleta a la base de datos
* @param boleta Boleta a agregar
* @return Resultado de intentar agregar la boleta
*/
@PUT
@Path("/boleta")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response addBoleta(Boleta boleta) {
BoletaMaster tm = new BoletaMaster(getPath());
try {
tm.addBoleta(boleta);
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(boleta).build();
}
/**
* Actualiza una boleta de la base de datos.
* @param boleta Boleta con los nuevos datos.
* @return Resultado de intentar actualizar la boleta
*/
@POST
@Path("/boleta")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response updateBoleta(Boleta boleta) {
BoletaMaster tm = new BoletaMaster(getPath());
try {
tm.updateBoleta(boleta);
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(boleta).build();
}
/**
* Elimina una boleta
* @param id Id de la boleta a borrar
* @return Resultado de intentar eliminar la boleta
*/
@DELETE
@Path("/{id}/fecha/{fecha}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response deleteBoleta(@PathParam("id") int id, @PathParam("fecha") Date fechaEliminacion) {
Map<String, Object> registro;
BoletaMaster tm = new BoletaMaster(getPath());
try {
Boleta boleta = tm.darBoleta(id);
registro = boleta.darFactura();
tm.deleteBoleta(boleta, fechaEliminacion);
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(registro).build();
}
/**
* Elimina una boleta con un usuario registrado
* @param id Id de la boleta a borrar
* @param id_usuario Id del usuario
* @param fechaEliminacion Fecha de eliminacion de la boleta
* @return Resultado de intentar eliminar la boleta
*/
@DELETE
@Path("/{id}/usuario/{id_usuario}/fecha/{fecha}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response deleteBoleta(@PathParam("id") int id, @PathParam("fecha") Date fechaEliminacion, @PathParam("id_usuario") int id_usuario) {
Map<String, Object> registro;
BoletaMaster tm = new BoletaMaster(getPath());
UsuarioMaster um = new UsuarioMaster(getPath());
try {
Usuario x = um.darUsuario(id_usuario);
Boleta boleta = tm.darBoleta(id);
if(x == null)
throw new Exception("El usuario no existe");
else if(x.getId() != boleta.getId_usuario())
throw new Exception("Solo puede eliminar sus propias boletas");
registro = boleta.darFactura();
tm.deleteBoleta(boleta, fechaEliminacion);
} catch (Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(registro).build();
}
@GET
@Path("/compraBoletas/{id}/{fecha1}/{fecha2}/")
@Produces({MediaType.APPLICATION_JSON})
public Response consultarCompraBoletas(@PathParam("id") int id, @PathParam("fecha1") Date fecha1, @PathParam("fecha2") Date fecha2) {
BoletaMaster tm = new BoletaMaster(getPath());
ArrayList<HashMap<String, Object>> respuesta = null;
try {
respuesta = tm.consultarCompraBoletas(id, fecha1, fecha2);
} catch(Exception e) {
return Response.status(500).entity(doErrorMessage(e)).build();
}
return Response.status(200).entity(respuesta).build();
}
}
| {
"content_hash": "203181467dcff10b5fe7d10106cca283",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 140,
"avg_line_length": 29.482758620689655,
"alnum_prop": 0.7214424951267057,
"repo_name": "ravelinx22/Iteracion2Sistrans",
"id": "9cb00823cd90a94b7c8d149b090486ef0bdb925e",
"size": "5133",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/rest/BoletaServices.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "126"
},
{
"name": "Java",
"bytes": "338708"
}
],
"symlink_target": ""
} |
class ApplicationController < ActionController::Base
helper :all # include all helpers, all the time
# See ActionController::RequestForgeryProtection for details
# Uncomment the :secret if you're not using the cookie session store
protect_from_forgery # :secret => '09df61bbb1d592d41d68a3205f2816d0'
# See ActionController::Base for details
# Uncomment this to filter the contents of submitted sensitive data parameters
# from your application log (in this case, all fields with names like "password").
# filter_parameter_logging :password
end
| {
"content_hash": "0fc5bbebeebaf2df68be1979974cf2f6",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 85,
"avg_line_length": 47.166666666666664,
"alnum_prop": 0.7756183745583038,
"repo_name": "remi/aux_codes",
"id": "f91eee790c19af583477c7a883ff1033b19c26d6",
"size": "720",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example_rails_app/app/controllers/application.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "10374"
},
{
"name": "JavaScript",
"bytes": "148"
},
{
"name": "Perl",
"bytes": "477"
},
{
"name": "Ruby",
"bytes": "49767"
}
],
"symlink_target": ""
} |
// Portions of this code come from the following open source works
/*
* uuidP.h -- private header file for uuids
*
* Copyright (C) 1996, 1997 Theodore Ts'o.
*
* %Begin-Header%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, and the entire permission notice in its entirety,
* including the disclaimer of warranties.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF
* WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
* %End-Header%
*/
/*
* Internal routine for packing UUIDs
*
* Copyright (C) 1996, 1997 Theodore Ts'o.
*
* %Begin-Header%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, and the entire permission notice in its entirety,
* including the disclaimer of warranties.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF
* WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
* %End-Header%
*/
/*
* unparse.c -- convert a UUID to string
*
* Copyright (C) 1996, 1997 Theodore Ts'o.
*
* %Begin-Header%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, and the entire permission notice in its entirety,
* including the disclaimer of warranties.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF
* WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
* %End-Header%
*/
/*
* parse.c --- UUID parsing
*
* Copyright (C) 1996, 1997 Theodore Ts'o.
*
* %Begin-Header%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, and the entire permission notice in its entirety,
* including the disclaimer of warranties.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF
* WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
* %End-Header%
*/
#include "libuuid.h"
#include <ctype.h>
#include <stddef.h>
#include <string.h>
#include <stdlib.h>
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
/*
* Note that RFC4122 defines UUID in more details:
*
* Field Data Type Octet Note
* -------------------------------------------------
* time_low unsigned 32 0-3 The low field of the
* bit integer timestamp
*
* time_mid unsigned 16 4-5 The middle field of the
* bit integer timestamp
*
* time_hi_and_version unsigned 16 6-7 The high field of the
* bit integer timestamp multiplexed
* with the version number
*
* clock_seq_hi_and_rese unsigned 8 8 The high field of the
* rved bit integer clock sequence
* multiplexed with the
* variant
*
* clock_seq_low unsigned 8 9 The low field of the
* bit integer clock sequence
*
* node unsigned 48 10-15 The spatially unique
* bit integer node identifier
*
* We have clock_seq_hi_and_reserved (8bit) and clock_seq_low (8bit)
* merged into clock_seq (16bit).
*/
struct uuid {
uint32_t time_low;
uint16_t time_mid;
uint16_t time_hi_and_version;
uint16_t clock_seq;
uint8_t node[6];
};
static void uuid_pack(const struct uuid *uu, uuid_t ptr)
{
uint32_t tmp;
unsigned char *out = ptr;
tmp = uu->time_low;
out[3] = (unsigned char) tmp;
tmp >>= 8;
out[2] = (unsigned char) tmp;
tmp >>= 8;
out[1] = (unsigned char) tmp;
tmp >>= 8;
out[0] = (unsigned char) tmp;
tmp = uu->time_mid;
out[5] = (unsigned char) tmp;
tmp >>= 8;
out[4] = (unsigned char) tmp;
tmp = uu->time_hi_and_version;
out[7] = (unsigned char) tmp;
tmp >>= 8;
out[6] = (unsigned char) tmp;
tmp = uu->clock_seq;
out[9] = (unsigned char) tmp;
tmp >>= 8;
out[8] = (unsigned char) tmp;
memcpy(out+10, uu->node, 6);
}
int uuid_parse_range(const char *in_start, const char *in_end, uuid_t uu)
{
struct uuid uuid;
int i;
const char *cp;
char buf[3];
if ((in_end - in_start) != 36)
return -1;
for (i=0, cp = in_start; i <= 36; i++,cp++) {
if ((i == 8) || (i == 13) || (i == 18) ||
(i == 23)) {
if (*cp == '-')
continue;
return -1;
}
if (i== 36)
if (*cp == 0)
continue;
if (!isxdigit(static_cast<unsigned char>(*cp)))
return -1;
}
uuid.time_low = strtoul(in_start, NULL, 16);
uuid.time_mid = static_cast<uint16_t>(strtoul(in_start+9, NULL, 16));
uuid.time_hi_and_version = static_cast<uint16_t>(strtoul(in_start+14, NULL, 16));
uuid.clock_seq = static_cast<uint16_t>(strtoul(in_start+19, NULL, 16));
cp = in_start+24;
buf[2] = 0;
for (i=0; i < 6; i++) {
buf[0] = *cp++;
buf[1] = *cp++;
uuid.node[i] = static_cast<uint8_t>(strtoul(buf, NULL, 16));
}
uuid_pack(&uuid, uu);
return 0;
}
static char const hexdigits_lower[] = "0123456789abcdef";
static void uuid_fmt(const uuid_t uuid, char *buf, const char fmt[])
{
char *p = buf;
for (int i = 0; i < 16; i++) {
if (i == 4 || i == 6 || i == 8 || i == 10) {
*p++ = '-';
}
size_t tmp = uuid[i];
*p++ = fmt[tmp >> 4];
*p++ = fmt[tmp & 15];
}
*p = '\0';
}
void uuid_unparse_lower(const uuid_t uu, char *out)
{
uuid_fmt(uu, out, hexdigits_lower);
}
#ifdef __cplusplus
}
#endif
| {
"content_hash": "f6ee9179fcfb35de18aaf40d2e8cc26d",
"timestamp": "",
"source": "github",
"line_count": 284,
"max_line_length": 82,
"avg_line_length": 35.26056338028169,
"alnum_prop": 0.6690633113640903,
"repo_name": "googlestadia/pal",
"id": "8ee45ed3ee098d082c7e7789b3efe02179bf6325",
"size": "11427",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "src/util/imported/libuuid/libuuid.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "837694"
},
{
"name": "C++",
"bytes": "71724218"
},
{
"name": "CMake",
"bytes": "144498"
},
{
"name": "Python",
"bytes": "150479"
}
],
"symlink_target": ""
} |
A Prolog compiler with an emphasis on running in and generating code for as many places as possible.
### Key goals:
* Reach: run identically on many platforms
* Speed: have reasonable performance, try to beat SWI Prolog
* Standards: move towards ISO compliance
* Extensibility: easy to implement native predicates
* Constraints: include a full Constraint Handling Rules system
### Philosophy:
Anaphor Prolog is a meta-programming and knowledge encoding language rather than the language that is used in performance-critical loops. It will provide a REPL but will most frequently be embedded in other applications.
| {
"content_hash": "c7129f7397c0abedbca3f57e6ffccee5",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 220,
"avg_line_length": 56,
"alnum_prop": 0.8035714285714286,
"repo_name": "nickmain/anaphor-prolog",
"id": "0b86c6a801dd5f84d7b0811b3e1c2c14ebe37c24",
"size": "634",
"binary": false,
"copies": "1",
"ref": "refs/heads/primary",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Racket",
"bytes": "44191"
}
],
"symlink_target": ""
} |
  
# react-youtube
Simple [React](http://facebook.github.io/react/) component acting as a thin layer over the [YouTube IFrame Player API](https://developers.google.com/youtube/iframe_api_reference)
## Features
- url playback
- [playback event bindings](https://developers.google.com/youtube/iframe_api_reference#Events)
- [customizable player options](https://developers.google.com/youtube/player_parameters)
## Installation
```bash
$ npm install react-youtube
```
## Usage
```js
<YouTube
videoId={string} // defaults -> null
id={string} // defaults -> null
className={string} // defaults -> null
containerClassName={string} // defaults -> ''
opts={obj} // defaults -> {}
onReady={func} // defaults -> noop
onPlay={func} // defaults -> noop
onPause={func} // defaults -> noop
onEnd={func} // defaults -> noop
onError={func} // defaults -> noop
onStateChange={func} // defaults -> noop
onPlaybackRateChange={func} // defaults -> noop
onPlaybackQualityChange={func} // defaults -> noop
/>
```
For convenience it is also possible to access the PlayerState constants through react-youtube:
`YouTube.PlayerState` contains the values that are used by the [YouTube IFrame Player API](https://developers.google.com/youtube/iframe_api_reference#onStateChange).
## Example
```js
import React from 'react';
import YouTube from 'react-youtube';
class Example extends React.Component {
render() {
const opts = {
height: '390',
width: '640',
playerVars: {
// https://developers.google.com/youtube/player_parameters
autoplay: 1,
},
};
return <YouTube videoId="2g811Eo7K8U" opts={opts} onReady={this._onReady} />;
}
_onReady(event) {
// access to player in all event handlers via event.target
event.target.pauseVideo();
}
}
```
## Controlling the player
You can access & control the player in a way similar to the [official api](https://developers.google.com/youtube/iframe_api_reference#Events):
> The ~~API~~ _component_ will pass an event object as the sole argument to each of ~~those functions~~ _the event handler props_. The event object has the following properties:
>
> - The event's `target` identifies the video player that corresponds to the event.
> - The event's `data` specifies a value relevant to the event. Note that the `onReady` event does not specify a `data` property.
# License
MIT
| {
"content_hash": "cd3c9ea298664f249167c83284663f4c",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 244,
"avg_line_length": 35.5875,
"alnum_prop": 0.6592904812082895,
"repo_name": "troybetz/react-youtube",
"id": "ab24c650b09a7332dc875039c034a54a558b52fd",
"size": "2847",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "208"
},
{
"name": "JavaScript",
"bytes": "21980"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_102-google-v7) on Wed Jan 11 15:17:54 PST 2017 -->
<title>FsFile.Filter</title>
<meta name="date" content="2017-01-11">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="FsFile.Filter";
}
}
catch(err) {
}
//-->
var methods = {"i0":6};
var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/robolectric/res/FsFile.html" title="interface in org.robolectric.res"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../org/robolectric/res/OpaqueFileLoader.html" title="class in org.robolectric.res"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/robolectric/res/FsFile.Filter.html" target="_top">Frames</a></li>
<li><a href="FsFile.Filter.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.robolectric.res</div>
<h2 title="Interface FsFile.Filter" class="title">Interface FsFile.Filter</h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>Enclosing interface:</dt>
<dd><a href="../../../org/robolectric/res/FsFile.html" title="interface in org.robolectric.res">FsFile</a></dd>
</dl>
<hr>
<br>
<pre>public static interface <span class="typeNameLabel">FsFile.Filter</span></pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../org/robolectric/res/FsFile.Filter.html#accept-org.robolectric.res.FsFile-">accept</a></span>(<a href="../../../org/robolectric/res/FsFile.html" title="interface in org.robolectric.res">FsFile</a> fsFile)</code> </td>
</tr>
</table>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="accept-org.robolectric.res.FsFile-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>accept</h4>
<pre>boolean accept(@NotNull
<a href="../../../org/robolectric/res/FsFile.html" title="interface in org.robolectric.res">FsFile</a> fsFile)</pre>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/robolectric/res/FsFile.html" title="interface in org.robolectric.res"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../org/robolectric/res/OpaqueFileLoader.html" title="class in org.robolectric.res"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/robolectric/res/FsFile.Filter.html" target="_top">Frames</a></li>
<li><a href="FsFile.Filter.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"content_hash": "f0b14c7732bfb22c1aa64d34c5320741",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 391,
"avg_line_length": 34.36607142857143,
"alnum_prop": 0.6286048324240062,
"repo_name": "robolectric/robolectric.github.io",
"id": "9b6a7132a7776556b98f909c5724c6a5c9667a8e",
"size": "7698",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "javadoc/3.2/org/robolectric/res/FsFile.Filter.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "132673"
},
{
"name": "HTML",
"bytes": "277730"
},
{
"name": "JavaScript",
"bytes": "24371"
},
{
"name": "Ruby",
"bytes": "1051"
},
{
"name": "SCSS",
"bytes": "64100"
},
{
"name": "Shell",
"bytes": "481"
}
],
"symlink_target": ""
} |
package de.framey.lab.evil.eviltentaclesofdeath.asm.command;
import java.util.Stack;
import org.objectweb.asm.tree.AbstractInsnNode;
import org.objectweb.asm.tree.ClassNode;
import org.objectweb.asm.tree.MethodInsnNode;
import org.objectweb.asm.tree.MethodNode;
import de.framey.lab.evil.eviltentaclesofdeath.Tentacle;
import de.framey.lab.evil.eviltentaclesofdeath.asm.AsmUtil;
import de.framey.lab.evil.eviltentaclesofdeath.asm.FrameTable;
import de.framey.lab.evil.eviltentaclesofdeath.asm.JumpTable;
import de.framey.lab.evil.eviltentaclesofdeath.asm.command.NondeterministicLineNumberGotoCommand.Key;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
/**
* Represents the STATE command which returns the current method state.
* <p>
* Retrieves the current method state from the current stack map frame and puts it into an Array to provide it.
* </p>
*
* @author Frank Meyfarth
*/
public class NondeterministicLineNumberGotoCommand implements GlobalCommand<Key> {
@Getter
@EqualsAndHashCode
@AllArgsConstructor
public static final class Key {
private String signature;
}
private static final String METHOD = "GOTO_ANY_ONE_OF";
private static final String SIGNATURE = AsmUtil.getMethodSignature(Tentacle.class, METHOD);
@Override
public boolean doesInstructionFit(AbstractInsnNode ain, FrameTable frameTable, Stack<AbstractInsnNode> instructionStack) {
return AsmUtil.isMethodSignatureMatching(ain, METHOD, SIGNATURE);
}
@Override
public void injectInstructions(MethodNode mn, MethodInsnNode min, JumpTable jumpTable, FrameTable frameTable,
Stack<AbstractInsnNode> instructionStack) {
AsmUtil.removeCurrentMethodCall(instructionStack, frameTable);
// TODO Call method from thread
}
@Override
public Key getCommandKey(MethodNode mn) {
return new Key(mn.signature);
}
@Override
public void applyGlobalChanges(ClassNode cn, Key key) {
// TODO Add wrapper and threadable method
}
}
| {
"content_hash": "7fad70e945b91c79c4c3d6ccfb2884ba",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 126,
"avg_line_length": 34.049180327868854,
"alnum_prop": 0.7631198844487241,
"repo_name": "Franknjava/TheNightOfTheTentacles",
"id": "061291bfea3886e9ca2c4d3e75685b8791aa10ed",
"size": "2077",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "projects/EvilTentaclesOfDeath/src/main/java/de/framey/lab/evil/eviltentaclesofdeath/asm/command/NondeterministicLineNumberGotoCommand.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "171848"
}
],
"symlink_target": ""
} |
(function ($, window, undefined) {
$.fn.simplyScroll = function (options) {
return this.each(function () {
new $.simplyScroll(this, options);
});
};
var defaults = {
customClass: 'simply-scroll',
frameRate: 24, //No of movements per second
speed: 1, //No of pixels per frame
orientation: 'horizontal', //'horizontal or 'vertical' - not to be confused with device orientation
auto: true,
autoMode: 'loop', //auto = true, 'loop' or 'bounce',
manualMode: 'end', //auto = false, 'loop' or 'end'
direction: 'forwards', //'forwards' or 'backwards'.
pauseOnHover: true, //autoMode = loop|bounce only
pauseOnTouch: true, //" touch device only
pauseButton: false, //" generates an extra element to allow manual pausing
startOnLoad: false //use this to delay starting of plugin until all page assets have loaded
};
$.simplyScroll = function (el, options) {
var self = this;
this.o = $.extend({}, defaults, options || {});
this.isAuto = this.o.auto !== false && this.o.autoMode.match(/^loop|bounce$/) !== null;
this.isHorizontal = this.o.orientation.match(/^horizontal|vertical$/) !== null && this.o.orientation == defaults.orientation;
this.isRTL = this.isHorizontal && $("html").attr('dir') == 'rtl';
this.isForwards = !this.isAuto || (this.isAuto && this.o.direction.match(/^forwards|backwards$/) !== null && this.o.direction == defaults.direction) && !this.isRTL;
this.isLoop = this.isAuto && this.o.autoMode == 'loop' || !this.isAuto && this.o.manualMode == 'loop';
this.supportsTouch = ('createTouch' in document);
this.events = this.supportsTouch ?
{
start: 'touchstart MozTouchDown',
move: 'touchmove MozTouchMove',
end: 'touchend touchcancel MozTouchRelease'
} :
{start: 'mouseenter', end: 'mouseleave'};
this.$list = $(el); //called on ul/ol/div etc
var $items = this.$list.children();
//generate extra markup
this.$list.addClass('simply-scroll-list')
.wrap('<div class="simply-scroll-clip"></div>')
.parent().wrap('<div class="' + this.o.customClass + ' simply-scroll-container"></div>');
if (!this.isAuto) { //button placeholders
this.$list.parent().parent()
.prepend('<div class="simply-scroll-forward"></div>')
.prepend('<div class="simply-scroll-back"></div>');
} else {
if (this.o.pauseButton) {
this.$list.parent().parent()
.prepend('<div class="simply-scroll-btn simply-scroll-btn-pause"></div>');
this.o.pauseOnHover = false;
}
}
//wrap an extra div around the whole lot if elements scrolled aren't equal
if ($items.length > 1) {
var extra_wrap = false,
total = 0;
if (this.isHorizontal) {
$items.each(function () {
total += $(this).outerWidth(true);
});
extra_wrap = $items.eq(0).outerWidth(true) * $items.length !== total;
} else {
$items.each(function () {
total += $(this).outerHeight(true);
});
extra_wrap = $items.eq(0).outerHeight(true) * $items.length !== total;
}
if (extra_wrap) {
this.$list = this.$list.wrap('<div></div>').parent().addClass('simply-scroll-list');
if (this.isHorizontal) {
this.$list.children().css({"float": 'left', width: total + 'px'});
} else {
this.$list.children().css({height: total + 'px'});
}
}
}
if (!this.o.startOnLoad) {
this.init();
} else {
//wait for load before completing setup
$(window).load(function () {
self.init();
});
}
};
$.simplyScroll.fn = $.simplyScroll.prototype = {};
$.simplyScroll.fn.extend = $.simplyScroll.extend = $.extend;
$.simplyScroll.fn.extend({
init: function () {
this.$items = this.$list.children();
this.$clip = this.$list.parent(); //this is the element that scrolls
this.$container = this.$clip.parent();
this.$btnBack = $('.simply-scroll-back', this.$container);
this.$btnForward = $('.simply-scroll-forward', this.$container);
if (!this.isHorizontal) {
this.itemMax = this.$items.eq(0).outerHeight(true);
this.clipMax = this.$clip.height();
this.dimension = 'height';
this.moveBackClass = 'simply-scroll-btn-up';
this.moveForwardClass = 'simply-scroll-btn-down';
this.scrollPos = 'Top';
} else {
this.itemMax = this.$items.eq(0).outerWidth(true);
this.clipMax = this.$clip.width();
this.dimension = 'width';
this.moveBackClass = 'simply-scroll-btn-left';
this.moveForwardClass = 'simply-scroll-btn-right';
this.scrollPos = 'Left';
}
this.posMin = 0;
this.posMax = this.$items.length * this.itemMax;
var addItems = Math.ceil(this.clipMax / this.itemMax);
//auto scroll loop & manual scroll bounce or end(to-end)
if (this.isAuto && this.o.autoMode == 'loop') {
this.$list.css(this.dimension, this.posMax + (this.itemMax * addItems) + 'px');
this.posMax += (this.clipMax - this.o.speed);
if (this.isForwards) {
this.$items.slice(0, addItems).clone(true).appendTo(this.$list);
this.resetPosition = 0;
} else {
this.$items.slice(-addItems).clone(true).prependTo(this.$list);
this.resetPosition = this.$items.length * this.itemMax;
//due to inconsistent RTL implementation force back to LTR then fake
if (this.isRTL) {
this.$clip[0].dir = 'ltr';
//based on feedback seems a good idea to force float right
this.$items.css('float', 'right');
}
}
//manual and loop
} else if (!this.isAuto && this.o.manualMode == 'loop') {
this.posMax += this.itemMax * addItems;
this.$list.css(this.dimension, this.posMax + (this.itemMax * addItems) + 'px');
this.posMax += (this.clipMax - this.o.speed);
var items_append = this.$items.slice(0, addItems).clone(true).appendTo(this.$list);
var items_prepend = this.$items.slice(-addItems).clone(true).prependTo(this.$list);
this.resetPositionForwards = this.resetPosition = addItems * this.itemMax;
this.resetPositionBackwards = this.$items.length * this.itemMax;
//extra events to force scroll direction change
var self = this;
this.$btnBack.bind(this.events.start, function () {
self.isForwards = false;
self.resetPosition = self.resetPositionBackwards;
});
this.$btnForward.bind(this.events.start, function () {
self.isForwards = true;
self.resetPosition = self.resetPositionForwards;
});
} else { //(!this.isAuto && this.o.manualMode=='end')
this.$list.css(this.dimension, this.posMax + 'px');
if (this.isForwards) {
this.resetPosition = 0;
} else {
this.resetPosition = this.$items.length * this.itemMax;
//due to inconsistent RTL implementation force back to LTR then fake
if (this.isRTL) {
this.$clip[0].dir = 'ltr';
//based on feedback seems a good idea to force float right
this.$items.css('float', 'right');
}
}
}
this.resetPos(); //ensure scroll position is reset
this.interval = null;
this.intervalDelay = Math.floor(1000 / this.o.frameRate);
if (!(!this.isAuto && this.o.manualMode == 'end')) { //loop mode
//ensure that speed is divisible by item width. Helps to always make images even not odd widths!
while (this.itemMax % this.o.speed !== 0) {
this.o.speed--;
if (this.o.speed === 0) {
this.o.speed = 1;
break;
}
}
}
var self = this;
this.trigger = null;
this.funcMoveBack = function (e) {
if (e !== undefined) {
e.preventDefault();
}
self.trigger = !self.isAuto && self.o.manualMode == 'end' ? this : null;
if (self.isAuto) {
self.isForwards ? self.moveBack() : self.moveForward();
} else {
self.moveBack();
}
};
this.funcMoveForward = function (e) {
if (e !== undefined) {
e.preventDefault();
}
self.trigger = !self.isAuto && self.o.manualMode == 'end' ? this : null;
if (self.isAuto) {
self.isForwards ? self.moveForward() : self.moveBack();
} else {
self.moveForward();
}
};
this.funcMovePause = function () {
self.movePause();
};
this.funcMoveStop = function () {
self.moveStop();
};
this.funcMoveResume = function () {
self.moveResume();
};
if (this.isAuto) {
this.paused = false;
function togglePause() {
if (self.paused === false) {
self.paused = true;
self.funcMovePause();
} else {
self.paused = false;
self.funcMoveResume();
}
return self.paused;
}
//disable pauseTouch when links are present
if (this.supportsTouch && this.$items.find('a').length) {
this.supportsTouch = false;
}
if (this.isAuto && this.o.pauseOnHover && !this.supportsTouch) {
this.$clip.bind(this.events.start, this.funcMovePause).bind(this.events.end, this.funcMoveResume);
} else if (this.isAuto && this.o.pauseOnTouch && !this.o.pauseButton && this.supportsTouch) {
var touchStartPos, scrollStartPos;
this.$clip.bind(this.events.start, function (e) {
togglePause();
var touch = e.originalEvent.touches[0];
touchStartPos = self.isHorizontal ? touch.pageX : touch.pageY;
scrollStartPos = self.$clip[0]['scroll' + self.scrollPos];
e.stopPropagation();
e.preventDefault();
}).bind(this.events.move, function (e) {
e.stopPropagation();
e.preventDefault();
var touch = e.originalEvent.touches[0],
endTouchPos = self.isHorizontal ? touch.pageX : touch.pageY,
pos = (touchStartPos - endTouchPos) + scrollStartPos;
if (pos < 0) pos = 0;
else if (pos > self.posMax) pos = self.posMax;
self.$clip[0]['scroll' + self.scrollPos] = pos;
//force pause
self.funcMovePause();
self.paused = true;
});
} else {
if (this.o.pauseButton) {
this.$btnPause = $(".simply-scroll-btn-pause", this.$container)
.bind('click', function (e) {
e.preventDefault();
togglePause() ? $(this).addClass('active') : $(this).removeClass('active');
});
}
}
this.funcMoveForward();
} else {
this.$btnBack
.addClass('simply-scroll-btn' + ' ' + this.moveBackClass)
.bind(this.events.start, this.funcMoveBack).bind(this.events.end, this.funcMoveStop);
this.$btnForward
.addClass('simply-scroll-btn' + ' ' + this.moveForwardClass)
.bind(this.events.start, this.funcMoveForward).bind(this.events.end, this.funcMoveStop);
if (this.o.manualMode == 'end') {
!this.isRTL ? this.$btnBack.addClass('disabled') : this.$btnForward.addClass('disabled');
}
}
},
moveForward: function () {
var self = this;
this.movement = 'forward';
if (this.trigger !== null) {
this.$btnBack.removeClass('disabled');
}
self.interval = setInterval(function () {
if (self.$clip[0]['scroll' + self.scrollPos] < (self.posMax - self.clipMax)) {
self.$clip[0]['scroll' + self.scrollPos] += self.o.speed;
} else if (self.isLoop) {
self.resetPos();
} else {
self.moveStop(self.movement);
}
}, self.intervalDelay);
},
moveBack: function () {
var self = this;
this.movement = 'back';
if (this.trigger !== null) {
this.$btnForward.removeClass('disabled');
}
self.interval = setInterval(function () {
if (self.$clip[0]['scroll' + self.scrollPos] > self.posMin) {
self.$clip[0]['scroll' + self.scrollPos] -= self.o.speed;
} else if (self.isLoop) {
self.resetPos();
} else {
self.moveStop(self.movement);
}
}, self.intervalDelay);
},
movePause: function () {
clearInterval(this.interval);
},
moveStop: function (moveDir) {
this.movePause();
if (this.trigger !== null) {
if (typeof moveDir !== 'undefined') {
$(this.trigger).addClass('disabled');
}
this.trigger = null;
}
if (this.isAuto) {
if (this.o.autoMode == 'bounce') {
moveDir == 'forward' ? this.moveBack() : this.moveForward();
}
}
},
moveResume: function () {
this.movement == 'forward' ? this.moveForward() : this.moveBack();
},
resetPos: function () {
this.$clip[0]['scroll' + this.scrollPos] = this.resetPosition;
}
});
})(jQuery, window);
| {
"content_hash": "5b042324fdc89cfe045a8e7087b21489",
"timestamp": "",
"source": "github",
"line_count": 394,
"max_line_length": 172,
"avg_line_length": 40.1243654822335,
"alnum_prop": 0.4750458599531912,
"repo_name": "linayaounsw/linayaounsw.github.io",
"id": "9be86ddf82bc4039230c8a1c697924e61d2b2802",
"size": "16076",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "js/jquery.simplyscroll.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "39673"
},
{
"name": "HTML",
"bytes": "195539"
},
{
"name": "JavaScript",
"bytes": "70961"
}
],
"symlink_target": ""
} |
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
tools:context="org.opensharingtoolkit.nodeapp.MainActivity" >
<item
android:id="@+id/action_settings"
android:orderInCategory="100"
android:title="@string/action_settings"
app:showAsAction="never"/>
</menu>
| {
"content_hash": "3ae4af206a1c1e45f29658498d33c0f0",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 65,
"avg_line_length": 35.083333333333336,
"alnum_prop": 0.6817102137767221,
"repo_name": "cgreenhalgh/opensharingtoolkit-nodeapps",
"id": "15eb2289c0d2114566cc0114a94fb9eb52828690",
"size": "421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "android/res/menu/main.xml",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CoffeeScript",
"bytes": "7409"
},
{
"name": "Java",
"bytes": "7817"
},
{
"name": "JavaScript",
"bytes": "8636"
}
],
"symlink_target": ""
} |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model app\models\ProductsSearch */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="products-search">
<?php $form = ActiveForm::begin([
'action' => ['index'],
'method' => 'get',
]); ?>
<?= $form->field($model, 'id_product') ?>
<?= $form->field($model, 'name') ?>
<?= $form->field($model, 'price') ?>
<?= $form->field($model, 'oldprice') ?>
<?= $form->field($model, 'url') ?>
<?php // echo $form->field($model, 'isview') ?>
<?php // echo $form->field($model, 'description') ?>
<div class="form-group">
<?= Html::submitButton('Search', ['class' => 'btn btn-primary']) ?>
<?= Html::resetButton('Reset', ['class' => 'btn btn-default']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| {
"content_hash": "7d4a1cea01d837c292876a22da921c35",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 75,
"avg_line_length": 23.358974358974358,
"alnum_prop": 0.5016465422612514,
"repo_name": "SergeiHaidukov/c-fashion.ru",
"id": "34932e631c4a53975c6da1ba6266fdc26cee0a7b",
"size": "911",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "views/products/_search.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "310"
},
{
"name": "Batchfile",
"bytes": "1030"
},
{
"name": "CSS",
"bytes": "1062978"
},
{
"name": "HTML",
"bytes": "21455"
},
{
"name": "JavaScript",
"bytes": "633286"
},
{
"name": "PHP",
"bytes": "236209"
}
],
"symlink_target": ""
} |
/**
* @class SimpleTasks.ux.ReminderColumn
* @extends Ext.grid.column.Column
* A grid column for selecting reminder times
*/
Ext.define('SimpleTasks.ux.ReminderColumn', {
extend: 'Ext.grid.column.Column',
xtype: 'remindercolumn',
config: {
/**
* @cfg {String} menuPosition
* Positing to show the menu relative to the reminder icon.
* Alignment position as used by Ext.Element.getAlignToXY
* Defaults to 'tl-bl'
*/
menuPosition: 'tl-bl'
},
tdCls: Ext.baseCSSPrefix + 'grid-cell-remindercolumn',
/**
* @event select
* Fires when a reminder time is selected from the dropdown menu
* @param {Ext.data.Model} record The underlying record of the row that was clicked to show the reminder menu
* @param {String|Number} value The value that was selected
*/
/**
* initializes the dropdown menu
* @private
*/
initMenu: function() {
var me = this,
items = [];
function createItem(text, value) {
return {
text: text,
listeners: {
click: Ext.bind(me.handleMenuItemClick, me, [value], true)
}
}
}
items.push(createItem('No Reminder'));
items.push({xtype: 'menuseparator'});
items.push(createItem('1 day before', 1));
items.push(createItem('2 days before', 2));
items.push(createItem('3 days before', 3));
items.push(createItem('1 week before', 7));
items.push(createItem('2 weeks before', 14));
items.push(createItem('Set Default Time...', 'set'));
me.menu = Ext.create('Ext.menu.Menu', {
plain: true,
items: items
});
},
/**
* Handles a click on a menu item
* @private
* @param {Ext.menu.Item} menuItem
* @param {Ext.EventObject} e
* @param {Object} options
* @param {String|Number} value
*/
handleMenuItemClick: function(menuItem, options, e, value) {
this.fireEvent('select', this.record, value);
},
/**
* Process and refire events routed from the GridView's processEvent method.
* @private
*/
processEvent: function(type, view, cell, rowIndex, colIndex, e) {
var me = this,
cssPrefix = Ext.baseCSSPrefix,
target = Ext.get(e.getTarget());
if (target.hasCls(cssPrefix + 'grid-reminder')) {
if(type === 'click') {
if(!me.menu) {
me.initMenu();
}
me.record = view.store.getAt(rowIndex);
me.menu.showBy(target, me.menuPosition);
}
} else {
return me.callParent(arguments);
}
},
/**
* Renderer for the reminder column
* @private
* @param {Number} value
* @param {Object} metaData
* @param {SimpleTasks.model.Task} task
* @param {Number} rowIndex
* @param {Number} colIndex
* @param {SimpleTasks.store.Tasks} store
* @param {Ext.grid.View} view
*/
renderer : function(value, metaData, task, rowIndex, colIndex, store, view){
var cssPrefix = Ext.baseCSSPrefix,
cls = [cssPrefix + 'grid-reminder'];
if(task.get('done') || !task.get('due')) {
// if the task is done or has no due date, a reminder cannot be set
return '';
}
if (!value) {
cls.push(cssPrefix + 'grid-reminder-empty');
}
return '<div class="' + cls.join(' ') + '"></div>';
}
});
| {
"content_hash": "72a6a89aab318c0e82baa9cac874037e",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 116,
"avg_line_length": 31.283333333333335,
"alnum_prop": 0.526638252530634,
"repo_name": "sqlwang/DeviceManagementSystem",
"id": "ea0483f8dabe3626ecb56dd70d01ea63c2e5368f",
"size": "3754",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "backend/webapp/ext/examples/classic/simple-tasks/app/ux/ReminderColumn.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "112"
},
{
"name": "Batchfile",
"bytes": "1541"
},
{
"name": "CSS",
"bytes": "26209291"
},
{
"name": "HTML",
"bytes": "56714"
},
{
"name": "JavaScript",
"bytes": "53945062"
},
{
"name": "PHP",
"bytes": "136411"
},
{
"name": "Python",
"bytes": "3330"
},
{
"name": "Ruby",
"bytes": "11215"
}
],
"symlink_target": ""
} |
<?php
namespace MagentoHackathon\Composer\Magento;
/**
* Parser class supporting translating path mappings according to
* the composer.json configuration.
*/
abstract class PathTranslationParser implements Parser
{
/**
* @var array Variants on each prefix that path mappings are checked
* against.
*/
protected $pathPrefixVariants = array('', './');
/**
* @var array Path mapping prefixes that need to be translated (i.e. to
* use a public directory as the web server root).
*/
protected $pathPrefixTranslations = array();
protected $pathSuffix;
/**
* Constructor. Sets the list of path translations to use.
*
* @param array $translations Path translations
*/
public function __construct($translations, $pathSuffix)
{
$this->pathPrefixTranslations = $this->createPrefixVariants($translations);
$this->pathSuffix = $pathSuffix;
}
/**
* Given an array of path mapping translations, combine them with a list
* of starting variations. This is so that a translation for 'js' will
* also match path mappings beginning with './js'.
*
* @param $translations
* @return array
*/
protected function createPrefixVariants($translations)
{
$newTranslations = array();
foreach($translations as $key => $value) {
foreach($this->pathPrefixVariants as $variant) {
$newTranslations[$variant.$key] = $value;
}
}
return $newTranslations;
}
/**
* Given a list of path mappings, check if any of the targets are for
* directories that have been moved under the public directory. If so,
* update the target paths to include 'public/'. As no standard Magento
* path mappings should ever start with 'public/', and path mappings
* that already include the public directory should always have
* js/skin/media paths starting with 'public/', it should be safe to call
* multiple times on either.
*
* @param $mappings Array of path mappings
* @return array Updated path mappings
*/
public function translatePathMappings($mappings)
{
// each element of $mappings is an array with two elements; first is
// the source and second is the target
foreach($mappings as &$mapping) {
foreach($this->pathPrefixTranslations as $prefix => $translate) {
if(strpos($mapping[1], $prefix) === 0) {
// replace the old prefix with the translated version
$mapping[1] = $translate . substr($mapping[1], strlen($prefix));
// should never need to translate a prefix more than once
// per path mapping
break;
}
}
//Adding path Suffix to the mapping info.
$mapping[1] = $this->pathSuffix . $mapping[1];
}
return $mappings;
}
}
| {
"content_hash": "8581124a195e8e99592ac488919f4b98",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 84,
"avg_line_length": 34.47126436781609,
"alnum_prop": 0.6128709569856619,
"repo_name": "j-froehlich/magento2_wk",
"id": "6ededdc1fe5016a59a5e572a9b2da1a379d396c2",
"size": "2999",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "vendor/magento/magento-composer-installer/src/MagentoHackathon/Composer/Magento/PathTranslationParser.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "13636"
},
{
"name": "CSS",
"bytes": "2076720"
},
{
"name": "HTML",
"bytes": "6151072"
},
{
"name": "JavaScript",
"bytes": "2488727"
},
{
"name": "PHP",
"bytes": "12466046"
},
{
"name": "Shell",
"bytes": "6088"
},
{
"name": "XSLT",
"bytes": "19979"
}
],
"symlink_target": ""
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.nativelibs4java.opencl.blas;
import com.nativelibs4java.opencl.CLEvent;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author ochafik
*/
public class CLEvents {
CLEvent lastWriteEvent;
List<CLEvent> readEvents = new ArrayList<CLEvent>();
List<Listener> listeners = new ArrayList<Listener>();
public interface Listener {
void writing(CLEvents evts);
void reading(CLEvents evts);
}
public interface Action {
CLEvent perform(CLEvent[] events);
}
public synchronized void addListener(Listener l) {
listeners.add(l);
}
public synchronized void removeListener(Listener l) {
listeners.remove(l);
}
static final CLEvent[] EMPTY_EVENTS = new CLEvent[0];
protected synchronized CLEvent clearEvents(Action action) {
int nReads = readEvents.size();
boolean hasWrite = lastWriteEvent != null;
int n = nReads + (hasWrite ? 1 : 0);
CLEvent[] evts = n == 0 ? EMPTY_EVENTS : readEvents.toArray(new CLEvent[n]);
if (hasWrite)
evts[nReads] = lastWriteEvent;
CLEvent evt = action.perform(evts);
lastWriteEvent = null;
readEvents.clear();
return evt;
}
public synchronized CLEvent performRead(Action action) {
for (Listener listener : listeners)
listener.writing(this);
CLEvent evt = action.perform(lastWriteEvent == null ? EMPTY_EVENTS : new CLEvent[] { lastWriteEvent });
if (evt != null) {
readEvents.add(evt);
lastWriteEvent = null; // read completed only if the optional write also completed
}
return evt;
}
public synchronized void performRead(Runnable action) {
for (Listener listener : listeners)
listener.reading(this);
waitForRead();
action.run();
}
public synchronized CLEvent performWrite(Action action) {
return lastWriteEvent = clearEvents(action);
}
/**
* Wait until all write operations are completed so that the data is readable.
*/
public synchronized void waitForRead() {
CLEvent.waitFor(lastWriteEvent);
lastWriteEvent = null;
}
/**
* Wait for all associated operations to complete (read or write).
*/
public synchronized void waitFor() {
clearEvents(new Action() {
public CLEvent perform(CLEvent[] evts) {
CLEvent.waitFor(evts);
return null;
}
});
}
}
| {
"content_hash": "e080e197a45ab64472a5fa2992fdd9c7",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 111,
"avg_line_length": 30.318181818181817,
"alnum_prop": 0.6154422788605697,
"repo_name": "ctrimble/JavaCL",
"id": "f78a714fe06129901e55cb2fbe2978e587462e1e",
"size": "2668",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Blas/src/main/java/com/nativelibs4java/opencl/blas/CLEvents.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "340"
},
{
"name": "C",
"bytes": "374527"
},
{
"name": "Java",
"bytes": "979092"
},
{
"name": "Makefile",
"bytes": "3090"
},
{
"name": "Scala",
"bytes": "10093"
},
{
"name": "Shell",
"bytes": "4327"
}
],
"symlink_target": ""
} |
title: Authenticate
description: This tutorial will show you how to use the Auth0 WCF SDK to add authentication and authorization to your API.
---
::: panel-info System Requirements
This tutorial and seed project have been tested with the following:
* Microsoft Visual Studio 2015
:::
This tutorial explains how to consume a WCF service, validating the identity of the caller.
When calling a Web Service (or an API in general) there are two ways users are typically authenticated:
* Through a client that has access to a key that can be used to obtain a token.
* Through a client that has access to a token that was obtained through some other method.
The first scenario usually happens on trusted clients (e.g. a script, a desktop application). The second scenario is more often a browser, or a mobile native app.
For this tutorial, we will assume the standard WCF template with a `basicHttpBinding`.
## Using Auth0 generated JsonWebTokens with WCF services
The integration consists of adding a `ServiceAuthorizationManager` (which is an extensibility point offered by WCF). This class intercepts all calls to a specific service and extracts the HTTP `Authorization` header that contains the JsonWebToken. Then it validates the token using a symmetric or asymmetric key, checks that it's not expired, and finally verifies that the `audience` is correct. If all these are correct, control is transfered to the user code with a `ClaimsPrincipal` object set for the app to use.
### 1. Install Auth0-WCF-Service-JWT NuGet package
Use the NuGet Package Manager (Tools -> Library Package Manager -> Package Manager Console) to install the **Auth0-MVC** package, running the command:
${snippet(meta.snippets.dependencies)}
> This package creates the `ServiceAuthorizationManager` and will add a set of configuration settings.
### 2. Completing your app Web.Config with Auth0 settings
${snippet(meta.snippets.setup)}
### 3. Accessing user information
Once the user is successfully authenticated with the application, a `ClaimsPrincipal` will be generated which can be accessed through the `User` or `Thread.CurrentPrincipal` properties:
${snippet(meta.snippets.use)}
### 4. Attaching a token on the client
Install the NuGet package on the client side
```
Install-Package Auth0-WCF-Client
```
Extract the `id_token` from the `ClaimsPrincipal` and attach it to the WCF request
```cs
// get JsonWebToken from logged in user
string token = ClaimsPrincipal.Current.FindFirst("id_token").Value;
// attach token to WCF request
client.ChannelFactory.Endpoint.Behaviors.Add(new AttachTokenEndpointBehavior(token));
// call WCF service
// client.CallService();
```
> **Note**: the above asumes that the WCF service is protected with the same client secret as the web site. If you want to call a service protected with a different secret you can obtain a delegation token as shown below:
```cs
// get JsonWebToken from logged in user
string token = ClaimsPrincipal.Current.FindFirst("id_token").Value;
// create an Auth0 client to call the /delegation endpoint using the client id and secret of the caller application
var auth0 = new Auth0.Client("...caller client id...", "...caller client secret...", "${account.namespace}");
var result = auth0.GetDelegationToken(token, "${account.clientClient}");
// attach token to WCF request
client.ChannelFactory.Endpoint.Behaviors.Add(new AttachTokenEndpointBehavior(result));
```
**Congratulations!**
| {
"content_hash": "ec73c771c1e9a8d96f984edde6129224",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 516,
"avg_line_length": 44.294871794871796,
"alnum_prop": 0.7765557163531114,
"repo_name": "Catografix/docs",
"id": "37c42c2b95ab7c5c4ec5dff4b58bdd65146eed71",
"size": "3459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "articles/server-apis/wcf-service/authenticate.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "GCC Machine Description",
"bytes": "16850"
},
{
"name": "HTML",
"bytes": "7108"
},
{
"name": "JavaScript",
"bytes": "2037"
},
{
"name": "Shell",
"bytes": "396"
}
],
"symlink_target": ""
} |
#ifndef _CONNECTION_FUNCTIONS_HEADER_
#define _CONNECTION_FUNCTIONS_HEADER_
#include "connection.h"
Connection * makeConnection(int socket);
void stopConnection(Connection * con);
void delConnection(Connection * con);
#endif /* _CONNECTION_FUNCTIONS_HEADER_ */
| {
"content_hash": "783117c9b9c53163ad9570c84153e56e",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 42,
"avg_line_length": 22.166666666666668,
"alnum_prop": 0.7669172932330827,
"repo_name": "JamesKoenig/Gs503ToSql",
"id": "ad384f0c0b71ef3bb217db95fa4b434506dfb857",
"size": "266",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "connectionFns.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "15071"
},
{
"name": "C++",
"bytes": "477"
},
{
"name": "Objective-C",
"bytes": "3144"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe Life do
it 'has a version number' do
expect(Life::VERSION).not_to be nil
end
end
| {
"content_hash": "ab90b1a9b07801af262130ffc9d82448",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 39,
"avg_line_length": 17.285714285714285,
"alnum_prop": 0.7024793388429752,
"repo_name": "jknabl/game-of-life",
"id": "b698e84984f539207f2b46191ae5d5fef301a4b4",
"size": "121",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/life_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "19822"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
<?php
namespace Phython\Exceptions;
class OutputException extends JsonException
{
//
}
| {
"content_hash": "fd2a2d951dcf9578bdb70702c7807ef0",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 43,
"avg_line_length": 11.625,
"alnum_prop": 0.7419354838709677,
"repo_name": "marella/phython",
"id": "45f991bd7c7d8f46e9897659b888f72c0e53658c",
"size": "93",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Phython/Exceptions/OutputException.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "15130"
},
{
"name": "Python",
"bytes": "1171"
},
{
"name": "Shell",
"bytes": "79"
}
],
"symlink_target": ""
} |
<!-- Example taken from http://getbootstrap.com/examples/jumbotron-narrow/ -->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="">
<meta name="author" content="">
<title>Castle with text</title>
<!-- Bootstrap core CSS -->
<link href="http://getbootstrap.com/dist/css/bootstrap.min.css" rel="stylesheet">
<!-- Custom styles for this template -->
<link href="http://getbootstrap.com/examples/jumbotron-narrow/jumbotron-narrow.css" rel="stylesheet">
<style type="text/css">
#madeindublin img {
/* make the logo light gray */
/*-webkit-filter: invert(1);*/
width: 46%;
}
</style>
<!-- Just for debugging purposes. Don't actually copy these 2 lines! -->
<!--[if lt IE 9]><script src="../../assets/js/ie8-responsive-file-warning.js"></script><![endif]-->
<script src="http://getbootstrap.com/assets/js/ie-emulation-modes-warning.js"></script>
<!-- IE10 viewport hack for Surface/desktop Windows 8 bug -->
<script src="http://getbootstrap.com/assets/js/ie10-viewport-bug-workaround.js"></script>
<!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="header">
<ul class="nav nav-pills pull-right">
<li class="active"><a href="#">Home</a></li>
<li><a href="#">About</a></li>
<li><a href="#">Contact</a></li>
</ul>
<h3 class="text-muted">Project name</h3>
</div>
<div class="jumbotron">
<h1>Castle with text</h1>
<p class="lead">Cras justo odio, dapibus ac facilisis in, egestas eget quam. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus.</p>
<p><a class="btn btn-lg btn-success" href="#" role="button">Sign up today</a></p>
</div>
<div class="row marketing">
<div class="col-lg-6">
<h4>Subheading</h4>
<p>Donec id elit non mi porta gravida at eget metus. Maecenas faucibus mollis interdum.</p>
<h4>Subheading</h4>
<p>Morbi leo risus, porta ac consectetur ac, vestibulum at eros. Cras mattis consectetur purus sit amet fermentum.</p>
<h4>Subheading</h4>
<p>Maecenas sed diam eget risus varius blandit sit amet non magna.</p>
</div>
<div class="col-lg-6">
<h4>Subheading</h4>
<p>Donec id elit non mi porta gravida at eget metus. Maecenas faucibus mollis interdum.</p>
<h4>Subheading</h4>
<p>Morbi leo risus, porta ac consectetur ac, vestibulum at eros. Cras mattis consectetur purus sit amet fermentum.</p>
<h4>Subheading</h4>
<p>Maecenas sed diam eget risus varius blandit sit amet non magna.</p>
</div>
</div>
<div class="footer">
<div class="row">
<div class="col-lg-6">
<p>© Company 2014</p>
<address>
<strong>Twitter, Inc.</strong><br>
795 Folsom Ave, Suite 600<br>
San Francisco, CA 94107<br>
<abbr title="Phone">P:</abbr> (123) 456-7890
</address>
</div>
<div class="col-lg-6">
<div id="madeindublin" >
<a href="http://madeindublin.org/" target="_blank" >
<img src="/img/made-in-dublin-castle_with_text-310x122.png" class=" pull-right">
</a>
</div>
</div>
</div>
</div>
</div> <!-- /container -->
</body>
</html> | {
"content_hash": "c9806b622f824640d3b340f2a65bf462",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 202,
"avg_line_length": 38.598130841121495,
"alnum_prop": 0.5588377723970944,
"repo_name": "madeindublin/madeindublin.github.io",
"id": "9625e9c9e8ae9989e2a772fa9424fa5a4d2bdff3",
"size": "4130",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "code-castle_with_text.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "80940"
},
{
"name": "HTML",
"bytes": "52710"
},
{
"name": "JavaScript",
"bytes": "28008"
},
{
"name": "Ruby",
"bytes": "2198"
}
],
"symlink_target": ""
} |
package org.cicadasong.cicadalib;
public class CicadaIntents {
private CicadaIntents() {
}
public static final String PACKAGE_PREFIX = "org.cicadasong.cicada.";
public static final String EXTRA_APP_MODE = "mode";
public static final String EXTRA_APP_NAME = "name";
public static final String EXTRA_SESSION_ID = "session_id";
public static final String INTENT_PUSH_CANVAS = "org.cicadasong.cicada.PUSH_CANVAS";
public static final String EXTRA_BUFFER = "buffer";
public static final String INTENT_VIBRATE = PACKAGE_PREFIX + "VIBRATE";
public static final String EXTRA_VIBRATE_ON_MSEC = "on";
public static final String EXTRA_VIBRATE_OFF_MSEC = "off";
public static final String EXTRA_VIBRATE_NUM_CYCLES = "cycles";
public static final String INTENT_START_NOTIFICATION = PACKAGE_PREFIX + "START_NOTIFICATION";
public static final String EXTRA_NOTIFICATION_ID = "notification_id"; // int
public static final String EXTRA_NOTIFICATION_BODY = "notification_body"; // string
public static final String EXTRA_NOTIFICATION_BODY_RECT = "notification_body_rect"; // int[]
public static final String EXTRA_PACKAGE_NAME = "package_name"; // string
public static final String EXTRA_CLASS_NAME = "class_name"; // string
public static final String INTENT_STOP_NOTIFICATION = PACKAGE_PREFIX + "STOP_NOTIFICATION";
}
| {
"content_hash": "f1cb83716dcaf5f0c37cb65aa811e139",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 95,
"avg_line_length": 46.96551724137931,
"alnum_prop": 0.7400881057268722,
"repo_name": "cicada-dev/cicada",
"id": "f45507962946a746d73697c6b959c271646cfdff",
"size": "1996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cicadalib/src/org/cicadasong/cicadalib/CicadaIntents.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "193756"
}
],
"symlink_target": ""
} |
namespace Exceptionless.Web.Models;
public class UpdateProject {
public string Name { get; set; }
public bool DeleteBotDataEnabled { get; set; }
}
| {
"content_hash": "70c49667d1586997b674d3f0db221a7a",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 50,
"avg_line_length": 26.166666666666668,
"alnum_prop": 0.7197452229299363,
"repo_name": "exceptionless/Exceptionless",
"id": "0b329277c21e4c6e384cb01a563ee435a48e2d13",
"size": "159",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/Exceptionless.Web/Models/Project/UpdateProject.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "1942473"
},
{
"name": "CSS",
"bytes": "264"
},
{
"name": "Dockerfile",
"bytes": "6609"
},
{
"name": "HTML",
"bytes": "161090"
},
{
"name": "JavaScript",
"bytes": "7181"
},
{
"name": "Mustache",
"bytes": "927"
},
{
"name": "PowerShell",
"bytes": "26960"
},
{
"name": "SCSS",
"bytes": "4905"
},
{
"name": "Shell",
"bytes": "1706"
}
],
"symlink_target": ""
} |
"""
July 10th, 2015
werewolf refactor
"""
import time
import math
import yaml
import json
import copy
import random
from collections import defaultdict
import change_state
from router import command_router
from send_message import send_message
# main entry into the app.
def process_message(data, g=None): # g=None so we can tests.
message = data.get('text', '')
if message.startswith('!'): # trigger is "!"
if not g: # if g is not set get game state.
g_copy = copy.deepcopy(change_state.get_game_state())
else:
g_copy = copy.deepcopy(g)
command = message[1:].split(" ") # everything after !
# let the router deal with this nonsense
game_response, channel = command_router(g_copy, command, data['user'])
if channel:
send_message(game_response, channel)
else:
send_message(game_response)
return game_response
return None
# hmset uid:1:transform fn 'vote' args player 'nick' status 'alive'
| {
"content_hash": "020c34d22a6a565a339d2715c359db11",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 78,
"avg_line_length": 24.333333333333332,
"alnum_prop": 0.6467710371819961,
"repo_name": "nickweinberg/werewolf-slackbot",
"id": "4b33d56364be96c6d1d5dc62232eb155c9e76c00",
"size": "1022",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/werewolf/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "48613"
}
],
"symlink_target": ""
} |
<a href="https://travis-ci.org/codinguser/gnucash-android" target="_blank">
<img src="https://travis-ci.org/codinguser/gnucash-android.svg?branch=develop" alt="Travis build status" />
</a>
# Introduction
GnuCash Android is a companion expense-tracker application for GnuCash (desktop) designed for Android.
It allows you to record transactions on-the-go and later import the data into GnuCash for the desktop.
Accounts | Transactions | Reports
:-------------------------:|:-------------------------:|:-------------------------:
 |  | 
The application supports Android 4.4 KitKat (API level 19) and above.
Features include:
* An easy-to-use interface.
* **Chart of Accounts**: A master account can have a hierarchy of detail accounts underneath it.
This allows similar account types (e.g. Cash, Bank, Stock) to be grouped into one master account (e.g. Assets).
* **Split Transactions**: A single transaction can be split into several pieces to record taxes, fees, and other compound entries.
* **Double Entry**: Every transaction must debit one account and credit another by an equal amount.
This ensures that the "books balance": that the difference between income and outflow exactly
equals the sum of all assets, be they bank, cash, stock or other.
* **Income/Expense Account Types (Categories)**: These serve not only to categorize your cash flow, but when used properly with the double-entry feature, these can provide an accurate Profit&Loss statement.
* **Scheduled Transactions**: GnuCash has the ability to automatically create and enter transactions.
* **Export to GnuCash XML**, QIF or OFX. Also, scheduled exports to 3rd-party sync services like DropBox and Google Drive
* **Reports**: View summary of transactions (income and expenses) as pie/bar/line charts
# Installation
There are different ways to get the GnuCash app for Android; through
the app store, from github or building it yourself.
### App Store
<a href="http://play.google.com/store/apps/details?id=org.gnucash.android">
<img alt="Android app on Google Play" src="http://developer.android.com/images/brand/en_generic_rgb_wo_60.png" />
</a>
### From GitHub
Download the .apk from https://github.com/codinguser/gnucash-android/releases
## Building
### With Gradle
This project requires the [Android SDK](http://developer.android.com/sdk/index.html)
to be installed in your development environment. In addition you'll need to set
the `ANDROID_HOME` environment variable to the location of your SDK. For example:
export ANDROID_HOME=/home/<user>/tools/android-sdk
After satisfying those requirements, the build is pretty simple:
* Run `./gradlew build installDevelopmentDebug` from the within the project folder.
It will build the project for you and install it to the connected Android device or running emulator.
The app is configured to allow you to install a development and production version in parallel on your device.
### With Android Studio
The easiest way to build is to install [Android Studio](https://developer.android.com/sdk/index.html) v2.+
with [Gradle](https://www.gradle.org/) v3.4.1
Once installed, then you can import the project into Android Studio:
1. Open `File`
2. Import Project
3. Select `build.gradle` under the project directory
4. Click `OK`
Then, Gradle will do everything for you.
## Support
Google+ Community: https://plus.google.com/communities/104728406764752407046
## Contributing
There are several ways you could contribute to the development.
* Pull requests are always welcome! You could contribute code by fixing bugs, adding new features or automated tests.
Take a look at the [bug tracker](https://github.com/codinguser/gnucash-android/issues?state=open)
for ideas where to start. It is also preferable to target issues in the current [milestone](https://github.com/codinguser/gnucash-android/milestones).
* Make sure to read our [contribution guidelines](https://github.com/codinguser/gnucash-android/blob/master/.github/CONTRIBUTING.md) before starting to code.
* Another way to contribute is by providing translations for languages, or improving translations.
Please visit [CrowdIn](https://crowdin.com/project/gnucash-android) in order to update and create new translations
For development, it is recommended to use the Android Studio for development which is available for free.
Import the project into the IDE using the build.gradle file. The IDE will resolve dependencies automatically.
# License
GnuCash Android is free software; you can redistribute it and/or
modify it under the terms of the Apache license, version 2.0.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| {
"content_hash": "6554301b289a289f7a4762cebf612953",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 208,
"avg_line_length": 46.97345132743363,
"alnum_prop": 0.739638281838734,
"repo_name": "codinguser/gnucash-android",
"id": "b0dc7dfb6c95f86edea9d5b218eef8c22ca36242",
"size": "5308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1781858"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import unittest
from contextlib import contextmanager
from pants.base.build_file import BuildFile
from pants.base.build_root import BuildRoot
from pants.base.file_system_project_tree import FileSystemProjectTree
from pants.build_graph.address import (Address, BuildFileAddress, InvalidSpecPath,
InvalidTargetName, parse_spec)
from pants.util.contextutil import pushd, temporary_dir
from pants.util.dirutil import touch
class ParseSpecTest(unittest.TestCase):
def test_parse_spec(self):
spec_path, target_name = parse_spec('a/b/c')
self.assertEqual(spec_path, 'a/b/c')
self.assertEqual(target_name, 'c')
spec_path, target_name = parse_spec('a/b/c:c')
self.assertEqual(spec_path, 'a/b/c')
self.assertEqual(target_name, 'c')
spec_path, target_name = parse_spec('a/b/c', relative_to='here') # no effect - we have a path
self.assertEqual(spec_path, 'a/b/c')
self.assertEqual(target_name, 'c')
def test_parse_local_spec(self):
spec_path, target_name = parse_spec(':c')
self.assertEqual(spec_path, '')
self.assertEqual(target_name, 'c')
spec_path, target_name = parse_spec(':c', relative_to='here')
self.assertEqual(spec_path, 'here')
self.assertEqual(target_name, 'c')
def test_parse_absolute_spec(self):
spec_path, target_name = parse_spec('//a/b/c')
self.assertEqual(spec_path, 'a/b/c')
self.assertEqual(target_name, 'c')
spec_path, target_name = parse_spec('//a/b/c:c')
self.assertEqual(spec_path, 'a/b/c')
self.assertEqual(target_name, 'c')
spec_path, target_name = parse_spec('//:c')
self.assertEqual(spec_path, '')
self.assertEqual(target_name, 'c')
def test_parse_bad_spec_non_normalized(self):
self.do_test_bad_spec_path('..')
self.do_test_bad_spec_path('.')
self.do_test_bad_spec_path('//..')
self.do_test_bad_spec_path('//.')
self.do_test_bad_spec_path('a/.')
self.do_test_bad_spec_path('a/..')
self.do_test_bad_spec_path('../a')
self.do_test_bad_spec_path('a/../a')
self.do_test_bad_spec_path('a/')
self.do_test_bad_spec_path('a/b/')
def test_parse_bad_spec_bad_path(self):
self.do_test_bad_spec_path('/a')
self.do_test_bad_spec_path('///a')
def test_parse_bad_spec_bad_name(self):
self.do_test_bad_target_name('a:')
self.do_test_bad_target_name('a::')
self.do_test_bad_target_name('//')
def test_parse_bad_spec_build_trailing_path_component(self):
self.do_test_bad_spec_path('BUILD')
self.do_test_bad_spec_path('BUILD.suffix')
self.do_test_bad_spec_path('//BUILD')
self.do_test_bad_spec_path('//BUILD.suffix')
self.do_test_bad_spec_path('a/BUILD')
self.do_test_bad_spec_path('a/BUILD.suffix')
self.do_test_bad_spec_path('//a/BUILD')
self.do_test_bad_spec_path('//a/BUILD.suffix')
self.do_test_bad_spec_path('a/BUILD:b')
self.do_test_bad_spec_path('a/BUILD.suffix:b')
self.do_test_bad_spec_path('//a/BUILD:b')
self.do_test_bad_spec_path('//a/BUILD.suffix:b')
def test_banned_chars_in_target_name(self):
with self.assertRaises(InvalidTargetName):
Address(*parse_spec('a/b:c@d'))
def do_test_bad_spec_path(self, spec):
with self.assertRaises(InvalidSpecPath):
Address(*parse_spec(spec))
def do_test_bad_target_name(self, spec):
with self.assertRaises(InvalidTargetName):
Address(*parse_spec(spec))
def test_subproject_spec(self):
# Ensure that a spec referring to a subproject gets assigned to that subproject properly.
def parse(spec, relative_to):
return parse_spec(spec,
relative_to=relative_to,
subproject_roots=[
'subprojectA',
'path/to/subprojectB',
])
# Ensure that a spec in subprojectA is determined correctly.
spec_path, target_name = parse('src/python/alib', 'subprojectA/src/python')
self.assertEqual('subprojectA/src/python/alib', spec_path)
self.assertEqual('alib', target_name)
spec_path, target_name = parse('src/python/alib:jake', 'subprojectA/src/python/alib')
self.assertEqual('subprojectA/src/python/alib', spec_path)
self.assertEqual('jake', target_name)
spec_path, target_name = parse(':rel', 'subprojectA/src/python/alib')
self.assertEqual('subprojectA/src/python/alib', spec_path)
self.assertEqual('rel', target_name)
# Ensure that a spec in subprojectB, which is more complex, is correct.
spec_path, target_name = parse('src/python/blib', 'path/to/subprojectB/src/python')
self.assertEqual('path/to/subprojectB/src/python/blib', spec_path)
self.assertEqual('blib', target_name)
spec_path, target_name = parse('src/python/blib:jane', 'path/to/subprojectB/src/python/blib')
self.assertEqual('path/to/subprojectB/src/python/blib', spec_path)
self.assertEqual('jane', target_name)
spec_path, target_name = parse(':rel', 'path/to/subprojectB/src/python/blib')
self.assertEqual('path/to/subprojectB/src/python/blib', spec_path)
self.assertEqual('rel', target_name)
# Ensure that a spec in the parent project is not mapped.
spec_path, target_name = parse('src/python/parent', 'src/python')
self.assertEqual('src/python/parent', spec_path)
self.assertEqual('parent', target_name)
spec_path, target_name = parse('src/python/parent:george', 'src/python')
self.assertEqual('src/python/parent', spec_path)
self.assertEqual('george', target_name)
spec_path, target_name = parse(':rel', 'src/python/parent')
self.assertEqual('src/python/parent', spec_path)
self.assertEqual('rel', target_name)
class BaseAddressTest(unittest.TestCase):
@contextmanager
def workspace(self, *buildfiles):
with temporary_dir() as root_dir:
with BuildRoot().temporary(root_dir):
with pushd(root_dir):
for buildfile in buildfiles:
touch(os.path.join(root_dir, buildfile))
yield os.path.realpath(root_dir)
def assert_address(self, spec_path, target_name, address):
self.assertEqual(spec_path, address.spec_path)
self.assertEqual(target_name, address.target_name)
class AddressTest(BaseAddressTest):
def test_equivalence(self):
self.assertNotEqual("Not really an address", Address('a/b', 'c'))
self.assertEqual(Address('a/b', 'c'), Address('a/b', 'c'))
self.assertEqual(Address('a/b', 'c'), Address.parse('a/b:c'))
self.assertEqual(Address.parse('a/b:c'), Address.parse('a/b:c'))
def test_parse(self):
self.assert_address('a/b', 'target', Address.parse('a/b:target'))
self.assert_address('a/b', 'target', Address.parse('//a/b:target'))
self.assert_address('a/b', 'b', Address.parse('a/b'))
self.assert_address('a/b', 'b', Address.parse('//a/b'))
self.assert_address('a/b', 'target', Address.parse(':target', relative_to='a/b'))
self.assert_address('', 'target', Address.parse('//:target', relative_to='a/b'))
self.assert_address('', 'target', Address.parse(':target'))
self.assert_address('a/b', 'target', Address.parse(':target', relative_to='a/b'))
class BuildFileAddressTest(BaseAddressTest):
def test_build_file_forms(self):
with self.workspace('a/b/c/BUILD') as root_dir:
build_file = BuildFile(FileSystemProjectTree(root_dir), relpath='a/b/c/BUILD')
self.assert_address('a/b/c', 'c', BuildFileAddress(build_file=build_file))
self.assert_address('a/b/c', 'foo', BuildFileAddress(build_file=build_file, target_name='foo'))
self.assertEqual('a/b/c:foo', BuildFileAddress(build_file=build_file, target_name='foo').spec)
with self.workspace('BUILD') as root_dir:
build_file = BuildFile(FileSystemProjectTree(root_dir), relpath='BUILD')
self.assert_address('', 'foo', BuildFileAddress(build_file=build_file, target_name='foo'))
self.assertEqual('//:foo', BuildFileAddress(build_file=build_file, target_name='foo').spec)
| {
"content_hash": "40e5f4a45321b25451dcaabfb40224be",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 101,
"avg_line_length": 40.944162436548226,
"alnum_prop": 0.6678651128192412,
"repo_name": "twitter/pants",
"id": "97c6259af1d0651640e4f34687ccd94983571854",
"size": "8213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/python/pants_test/build_graph/test_address.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "5639"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "85294"
},
{
"name": "Java",
"bytes": "498956"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "6700799"
},
{
"name": "Rust",
"bytes": "765598"
},
{
"name": "Scala",
"bytes": "89346"
},
{
"name": "Shell",
"bytes": "94395"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Base application theme. -->
<style name="AppTheme.Base" parent="Theme.AppCompat.Light.NoActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">#3F51B5</item>
<!-- Light Indigo -->
<item name="colorPrimaryDark">#3949AB</item>
<!-- Dark Indigo -->
<item name="colorAccent">#00B0FF</item>
<!-- Blue -->
</style>
<style name="AppTheme" parent="AppTheme.Base"></style>
<!--<style name="FrescoImageHolder">-->
<!--<item name="fresco:fadeDuration">300</item>-->
<!--<item name="fresco:actualImageScaleType">focusCrop</item>-->
<!--<item name="fresco:placeholderImage">@color/image_placeholder</item>-->
<!--<item name="fresco:placeholderImageScaleType">fitCenter</item>-->
<!--<item name="fresco:failureImage">@color/image_error</item>-->
<!--<item name="fresco:failureImageScaleType">centerInside</item>-->
<!--<item name="fresco:retryImage">@color/image_retrying</item>-->
<!--<item name="fresco:retryImageScaleType">centerCrop</item>-->
<!--<item name="fresco:progressBarImage">@color/image_progress_bar</item>-->
<!--<item name="fresco:progressBarImageScaleType">centerInside</item>-->
<!--<item name="fresco:progressBarAutoRotateInterval">1000</item>-->
<!--<item name="fresco:pressedStateOverlayImage">@color/white</item>-->
<!--<item name="fresco:roundAsCircle">false</item>-->
<!--<item name="fresco:roundedCornerRadius">1dp</item>-->
<!--<item name="fresco:roundTopLeft">true</item>-->
<!--<item name="fresco:roundTopRight">false</item>-->
<!--<item name="fresco:roundBottomLeft">false</item>-->
<!--<item name="fresco:roundBottomRight">true</item>-->
<!--<item name="fresco:roundWithOverlayColor">@color/image_error</item>-->
<!--<item name="fresco:roundingBorderWidth">2dp</item>-->
<!--<item name="fresco:roundingBorderColor">@color/image_progress_bar</item>-->
<!--</style>-->
</resources>
| {
"content_hash": "165ff8adaf746d7dfd00039ea796076e",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 87,
"avg_line_length": 55.78947368421053,
"alnum_prop": 0.6122641509433963,
"repo_name": "ab2005/imageloaderlib",
"id": "489d9e0801c14a7bf1bdfb699c0ef093f74ef56b",
"size": "2120",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/values/styles.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "174924"
},
{
"name": "Prolog",
"bytes": "150"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.trablone.csscreated.test">
<uses-sdk android:minSdkVersion="14" android:targetSdkVersion="19" />
<application>
<uses-library android:name="android.test.runner" />
</application>
<instrumentation android:name="android.test.InstrumentationTestRunner"
android:targetPackage="com.trablone.csscreated"
android:handleProfiling="false"
android:functionalTest="false"
android:label="Tests for com.trablone.csscreated"/>
</manifest>
| {
"content_hash": "b725ac59a26ac0792099ddf13c5cba72",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 74,
"avg_line_length": 41.25,
"alnum_prop": 0.6515151515151515,
"repo_name": "SnowVolf/XSS-Created",
"id": "7a3ec45b885f9890ce1599238843013d1d731d64",
"size": "660",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/build/intermediates/manifest/androidTest/debug/AndroidManifest.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "47050"
},
{
"name": "HTML",
"bytes": "811976"
},
{
"name": "Java",
"bytes": "55349"
},
{
"name": "JavaScript",
"bytes": "28660"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.9.1"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>Shopify SDK for Unity: Member List</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">Shopify SDK for Unity
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.9.1 -->
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&dn=gpl-2.0.txt GPL-v2 */
var searchBox = new SearchBox("searchBox", "search",false,'Search','.html');
/* @license-end */
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&dn=gpl-2.0.txt GPL-v2 */
$(function() {
initMenu('',true,false,'search.php','Search');
$(document).ready(function() { init_search(); });
});
/* @license-end */</script>
<div id="main-nav"></div>
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div id="nav-path" class="navpath">
<ul>
<li class="navelem"><a class="el" href="namespace_shopify.html">Shopify</a></li><li class="navelem"><a class="el" href="namespace_shopify_1_1_unity.html">Unity</a></li><li class="navelem"><a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html">UnknownNode</a></li> </ul>
</div>
</div><!-- top -->
<div class="header">
<div class="headertitle">
<div class="title">Shopify.Unity.UnknownNode Member List</div> </div>
</div><!--header-->
<div class="contents">
<p>This is the complete list of members for <a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html">Shopify.Unity.UnknownNode</a>, including all inherited members.</p>
<table class="directory">
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Clone</b>() (defined in <a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html">Shopify.Unity.UnknownNode</a>)</td><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html">Shopify.Unity.UnknownNode</a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html#a27575b9a2fcb3b4d35ece521026deed6">Create</a>(Dictionary< string, object > dataJSON)</td><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html">Shopify.Unity.UnknownNode</a></td><td class="entry"><span class="mlabel">inline</span><span class="mlabel">static</span></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Data</b> (defined in <a class="el" href="class_shopify_1_1_unity_1_1_s_d_k_1_1_abstract_response.html">Shopify.Unity.SDK.AbstractResponse</a>)</td><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_s_d_k_1_1_abstract_response.html">Shopify.Unity.SDK.AbstractResponse</a></td><td class="entry"><span class="mlabel">protected</span></td></tr>
<tr bgcolor="#f0f0f0"><td class="entry"><b>DataJSON</b> (defined in <a class="el" href="class_shopify_1_1_unity_1_1_s_d_k_1_1_abstract_response.html">Shopify.Unity.SDK.AbstractResponse</a>)</td><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_s_d_k_1_1_abstract_response.html">Shopify.Unity.SDK.AbstractResponse</a></td><td class="entry"></td></tr>
<tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Get< T ></b>(string field, string alias=null) (defined in <a class="el" href="class_shopify_1_1_unity_1_1_s_d_k_1_1_abstract_response.html">Shopify.Unity.SDK.AbstractResponse</a>)</td><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_s_d_k_1_1_abstract_response.html">Shopify.Unity.SDK.AbstractResponse</a></td><td class="entry"><span class="mlabel">inline</span><span class="mlabel">protected</span></td></tr>
<tr><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html#a397bdbe290f265d6482d0651f72cc3a9">id</a>()</td><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html">Shopify.Unity.UnknownNode</a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
<tr class="even"><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html#a5375a205cae81324f5a2d4af3c7350df">UnknownNode</a>(Dictionary< string, object > dataJSON)</td><td class="entry"><a class="el" href="class_shopify_1_1_unity_1_1_unknown_node.html">Shopify.Unity.UnknownNode</a></td><td class="entry"><span class="mlabel">inline</span></td></tr>
</table></div><!-- contents -->
<address class="footer"><small>
<a href="https://www.shopify.ca/">
<img class="footer" height="50" src="https://camo.githubusercontent.com/10d580ddb06e6e6ff66ae43959842201195c6269/68747470733a2f2f63646e2e73686f706966792e636f6d2f73686f706966792d6d61726b6574696e675f6173736574732f6275696c64732f31392e302e302f73686f706966792d66756c6c2d636f6c6f722d626c61636b2e737667" alt="Shopify">
</a>
</small></address> | {
"content_hash": "7f7889585ae73bc279a8eedd12ba5437",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 490,
"avg_line_length": 72.9080459770115,
"alnum_prop": 0.7072363235062273,
"repo_name": "Shopify/unity-buy-sdk",
"id": "7601abd0eade2ac9a0cf732c8678b4cb38aba10f",
"size": "6343",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/class_shopify_1_1_unity_1_1_unknown_node-members.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3634"
},
{
"name": "C#",
"bytes": "618254"
},
{
"name": "HTML",
"bytes": "19927"
},
{
"name": "Objective-C",
"bytes": "4627"
},
{
"name": "Objective-C++",
"bytes": "16008"
},
{
"name": "Ruby",
"bytes": "17843"
},
{
"name": "ShaderLab",
"bytes": "61646"
},
{
"name": "Shell",
"bytes": "17631"
},
{
"name": "Swift",
"bytes": "206627"
},
{
"name": "XSLT",
"bytes": "3509"
}
],
"symlink_target": ""
} |
package com.example.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
@SpringBootApplication
public class TextsearchApplication {
public static void main(String[] args) {
SpringApplication.run(TextsearchApplication.class, args);
}
}
| {
"content_hash": "a774396828100ab1e0d6fd937dc258d0",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 90,
"avg_line_length": 28.066666666666666,
"alnum_prop": 0.8123515439429929,
"repo_name": "ryslanzaharov/rzaharov",
"id": "f9b3840b13b2284afa955d24daebd315924c3700",
"size": "421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "textsearch/src/main/java/com/example/demo/TextsearchApplication.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6320"
},
{
"name": "FreeMarker",
"bytes": "14263"
},
{
"name": "HTML",
"bytes": "36373"
},
{
"name": "Java",
"bytes": "677006"
},
{
"name": "JavaScript",
"bytes": "21179"
},
{
"name": "XSLT",
"bytes": "8746"
}
],
"symlink_target": ""
} |
package io.reactivesocket.reactivestreams.extensions.internal.publishers;
import io.reactivesocket.reactivestreams.extensions.internal.SerializedSubscription;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import io.reactivesocket.reactivestreams.extensions.Px;
public final class ConcatPublisher<T> implements Px<T> {
private final Publisher<T> first;
private final Publisher<T> second;
public ConcatPublisher(Publisher<T> first, Publisher<T> second) {
this.first = first;
this.second = second;
}
@Override
public void subscribe(Subscriber<? super T> destination) {
first.subscribe(new Subscriber<T>() {
private SerializedSubscription subscription;
@Override
public void onSubscribe(Subscription s) {
subscription = new SerializedSubscription(s);
destination.onSubscribe(subscription);
}
@Override
public void onNext(T t) {
subscription.onItemReceived();
destination.onNext(t);
}
@Override
public void onError(Throwable t) {
destination.onError(t);
}
@Override
public void onComplete() {
second.subscribe(new Subscriber<T>() {
@Override
public void onSubscribe(Subscription s) {
subscription.replaceSubscription(s);
}
@Override
public void onNext(T t) {
destination.onNext(t);
}
@Override
public void onError(Throwable t) {
destination.onError(t);
}
@Override
public void onComplete() {
destination.onComplete();
}
});
}
});
}
}
| {
"content_hash": "acaa0648e56aea5957f64e2c539baf43",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 84,
"avg_line_length": 34.42028985507246,
"alnum_prop": 0.46989473684210525,
"repo_name": "NiteshKant/reactivesocket-java",
"id": "00e37cfbf1594f8958e88916be9d8bb1aaa5afb3",
"size": "2967",
"binary": false,
"copies": "1",
"ref": "refs/heads/0.5.x",
"path": "reactivesocket-publishers/src/main/java/io/reactivesocket/reactivestreams/extensions/internal/publishers/ConcatPublisher.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1180728"
},
{
"name": "Shell",
"bytes": "1136"
}
],
"symlink_target": ""
} |
<?xml version="1.0"?>
<phpunit xmlns="http://schema.phpunit.de/coverage/1.0">
<file name="AWG.php">
<totals>
<lines total="27" comments="16" code="11" executable="0" executed="0" percent=""/>
<methods count="0" tested="0" percent=""/>
<functions count="0" tested="0" percent=""/>
<classes count="1" tested="1" percent="100.00%"/>
<traits count="0" tested="0" percent=""/>
</totals>
<class name="AWG" start="21" executable="0" executed="0" crap="1">
<package full="Money" name="Money" sub="" category=""/>
<namespace name="SebastianBergmann\Money"/>
<method name="__construct" signature="__construct($amount)" start="23" end="26" crap="1" executable="0" executed="0" coverage="100"/>
</class>
</file>
</phpunit>
| {
"content_hash": "46e045a02683f09b18a1bedf358149c7",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 139,
"avg_line_length": 45.8235294117647,
"alnum_prop": 0.6033376123234917,
"repo_name": "fey89/Money",
"id": "2462b2f922482d454a55b7931b791b4ac356d8df",
"size": "779",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "build/logs/coverage/currency/AWG.php.xml",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "14670"
},
{
"name": "HTML",
"bytes": "3524520"
},
{
"name": "JavaScript",
"bytes": "41952"
},
{
"name": "PHP",
"bytes": "225819"
},
{
"name": "Smarty",
"bytes": "743"
}
],
"symlink_target": ""
} |
// <file>
// <copyright see="prj:///doc/copyright.txt"/>
// <license see="prj:///doc/license.txt"/>
// <owner name="Mike Krüger" email="mike@icsharpcode.net"/>
// <version>$Revision$</version>
// </file>
using System;
using System.Drawing;
using System.Windows.Forms;
namespace ICSharpCode.TextEditor.Gui.CompletionWindow
{
/// <summary>
/// Description of AbstractCompletionWindow.
/// </summary>
public abstract class AbstractCompletionWindow : System.Windows.Forms.Form
{
protected TextEditorControl control;
protected Size drawingSize;
Rectangle workingScreen;
Form parentForm;
protected AbstractCompletionWindow(Form parentForm, TextEditorControl control)
{
workingScreen = Screen.GetWorkingArea(parentForm);
// SetStyle(ControlStyles.Selectable, false);
this.parentForm = parentForm;
this.control = control;
SetLocation();
StartPosition = FormStartPosition.Manual;
FormBorderStyle = FormBorderStyle.None;
ShowInTaskbar = false;
MinimumSize = new Size(1, 1);
Size = new Size(1, 1);
}
protected virtual void SetLocation()
{
TextArea textArea = control.ActiveTextAreaControl.TextArea;
TextLocation caretPos = textArea.Caret.Position;
int xpos = textArea.TextView.GetDrawingXPos(caretPos.Y, caretPos.X);
int rulerHeight = textArea.TextEditorProperties.ShowHorizontalRuler ? textArea.TextView.FontHeight : 0;
Point pos = new Point(textArea.TextView.DrawingPosition.X + xpos,
textArea.TextView.DrawingPosition.Y + (textArea.Document.GetVisibleLine(caretPos.Y)) * textArea.TextView.FontHeight
- textArea.TextView.TextArea.VirtualTop.Y + textArea.TextView.FontHeight + rulerHeight);
Point location = control.ActiveTextAreaControl.PointToScreen(pos);
// set bounds
Rectangle bounds = new Rectangle(location, drawingSize);
if (!workingScreen.Contains(bounds)) {
if (bounds.Right > workingScreen.Right) {
bounds.X = workingScreen.Right - bounds.Width;
}
if (bounds.Left < workingScreen.Left) {
bounds.X = workingScreen.Left;
}
if (bounds.Top < workingScreen.Top) {
bounds.Y = workingScreen.Top;
}
if (bounds.Bottom > workingScreen.Bottom) {
bounds.Y = bounds.Y - bounds.Height - control.ActiveTextAreaControl.TextArea.TextView.FontHeight;
if (bounds.Bottom > workingScreen.Bottom) {
bounds.Y = workingScreen.Bottom - bounds.Height;
}
}
}
Bounds = bounds;
}
protected override CreateParams CreateParams {
get {
CreateParams p = base.CreateParams;
AddShadowToWindow(p);
return p;
}
}
static int shadowStatus;
/// <summary>
/// Adds a shadow to the create params if it is supported by the operating system.
/// </summary>
public static void AddShadowToWindow(CreateParams createParams)
{
if (shadowStatus == 0) {
// Test OS version
shadowStatus = -1; // shadow not supported
if (Environment.OSVersion.Platform == PlatformID.Win32NT) {
Version ver = Environment.OSVersion.Version;
if (ver.Major > 5 || ver.Major == 5 && ver.Minor >= 1) {
shadowStatus = 1;
}
}
}
if (shadowStatus == 1) {
createParams.ClassStyle |= 0x00020000; // set CS_DROPSHADOW
}
}
protected override bool ShowWithoutActivation {
get {
return true;
}
}
protected void ShowCompletionWindow()
{
Owner = parentForm;
Enabled = true;
this.Show();
control.Focus();
if (parentForm != null) {
parentForm.LocationChanged += new EventHandler(this.ParentFormLocationChanged);
}
control.ActiveTextAreaControl.VScrollBar.ValueChanged += new EventHandler(ParentFormLocationChanged);
control.ActiveTextAreaControl.HScrollBar.ValueChanged += new EventHandler(ParentFormLocationChanged);
control.ActiveTextAreaControl.TextArea.DoProcessDialogKey += new DialogKeyProcessor(ProcessTextAreaKey);
control.ActiveTextAreaControl.Caret.PositionChanged += new EventHandler(CaretOffsetChanged);
control.ActiveTextAreaControl.TextArea.LostFocus += new EventHandler(this.TextEditorLostFocus);
control.Resize += new EventHandler(ParentFormLocationChanged);
foreach (Control c in Controls) {
c.MouseMove += ControlMouseMove;
}
}
void ParentFormLocationChanged(object sender, EventArgs e)
{
SetLocation();
}
public virtual bool ProcessKeyEvent(char ch)
{
return false;
}
protected virtual bool ProcessTextAreaKey(Keys keyData)
{
if (!Visible) {
return false;
}
switch (keyData) {
case Keys.Escape:
Close();
return true;
}
return false;
}
protected virtual void CaretOffsetChanged(object sender, EventArgs e)
{
}
protected void TextEditorLostFocus(object sender, EventArgs e)
{
if (!control.ActiveTextAreaControl.TextArea.Focused && !this.ContainsFocus) {
Close();
}
}
protected override void OnClosed(EventArgs e)
{
base.OnClosed(e);
// take out the inserted methods
parentForm.LocationChanged -= new EventHandler(ParentFormLocationChanged);
foreach (Control c in Controls) {
c.MouseMove -= ControlMouseMove;
}
if (control.ActiveTextAreaControl.VScrollBar != null) {
control.ActiveTextAreaControl.VScrollBar.ValueChanged -= new EventHandler(ParentFormLocationChanged);
}
if (control.ActiveTextAreaControl.HScrollBar != null) {
control.ActiveTextAreaControl.HScrollBar.ValueChanged -= new EventHandler(ParentFormLocationChanged);
}
control.ActiveTextAreaControl.TextArea.LostFocus -= new EventHandler(this.TextEditorLostFocus);
control.ActiveTextAreaControl.Caret.PositionChanged -= new EventHandler(CaretOffsetChanged);
control.ActiveTextAreaControl.TextArea.DoProcessDialogKey -= new DialogKeyProcessor(ProcessTextAreaKey);
control.Resize -= new EventHandler(ParentFormLocationChanged);
Dispose();
}
protected override void OnMouseMove(MouseEventArgs e)
{
base.OnMouseMove(e);
ControlMouseMove(this, e);
}
/// <summary>
/// Invoked when the mouse moves over this form or any child control.
/// Shows the mouse cursor on the text area if it has been hidden.
/// </summary>
/// <remarks>
/// Derived classes should attach this handler to the MouseMove event
/// of all created controls which are not added to the Controls
/// collection.
/// </remarks>
protected void ControlMouseMove(object sender, MouseEventArgs e)
{
control.ActiveTextAreaControl.TextArea.ShowHiddenCursor(false);
}
}
}
| {
"content_hash": "dd91cfd82927cb2c2cdb1b8faeb650a2",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 141,
"avg_line_length": 31.074766355140188,
"alnum_prop": 0.6993984962406015,
"repo_name": "liqipeng/ICSharpCode.TextEditor",
"id": "8c17c5bb9f1303b45db60475db9a8f65beaf0a6c",
"size": "6653",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "Project/Src/Gui/CompletionWindow/AbstractCompletionWindow.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "974"
},
{
"name": "C#",
"bytes": "648471"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<declare-styleable name="MultiSwipeRefreshLayout">
<attr name="foreground" format="reference" />
</declare-styleable>
<declare-styleable name="AccountImageView">
<attr name="maskDrawable" format="reference" />
<attr name="borderDrawable" format="reference" />
<attr name="desaturateOnPress" format="boolean" />
</declare-styleable>
</resources> | {
"content_hash": "2a735f3707e89bcc4b3b99b4ba1f0c54",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 58,
"avg_line_length": 31.857142857142858,
"alnum_prop": 0.6636771300448431,
"repo_name": "jossiwolf/HeilwigApp",
"id": "9f7a141fa8a5d4138a1f102650193cd76c89933f",
"size": "446",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/src/main/res/values/attrs.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "48504"
}
],
"symlink_target": ""
} |
/*
Title: XBee Node Test
*/
/****************************************************************************
* Copyright (C) 2016 by Ken Sarkies (www.jiggerjuice.info) *
* *
* This file is part of XBee-Acquisition *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
***************************************************************************/
#ifndef XBEE_NODE_TEST_H_QT5
#define XBEE_NODE_TEST_H_QT5
#include <QDialog>
#include <QCloseEvent>
#include <QSerialPort>
#include "ui_xbee-node-test.h"
/* Port defaults 38400 baud */
#define INITIAL_BAUDRATE 5
#define SERIAL_PORT "/dev/ttyUSB0"
#define LOG_FILE "../xbee-node-test.dat"
/* Choose whether to use hardware flow control for serial comms. */
//#define USE_HARDWARE_FLOW
//-----------------------------------------------------------------------------
/* External variable needed for access by serial emulation code */
extern QSerialPort* port; //!< Serial port object pointer
//-----------------------------------------------------------------------------
/** @brief
*/
class XbeeNodeTest : public QDialog
{
Q_OBJECT
public:
XbeeNodeTest(QString* p, uint initialBaudrate, bool commandLine,
bool debug, char* logFileName, QWidget* parent = 0);
~XbeeNodeTest();
bool success();
QString error();
private slots:
void on_debugModeCheckBox_clicked();
void on_baudrateComboBox_activated(int newBaudrate);
void on_serialComboBox_activated(int newBaudrate);
void on_runButton_clicked();
void on_quitButton_clicked();
private:
// User Interface object
Ui::XbeeNodeTestDialog xbeeNodeTestFormUi;
bool openSerialPort(QString serialPort, int baudrate);
void setComboBoxes(uint initialBaudrate);
void codeRun(); // This is where the actual test code is run
QString errorMessage; //!< Messages for the calling application
bool commandLineOnly;
bool debugMode;
bool running;
int baudrate;
};
#endif
| {
"content_hash": "0bb0289904e38c69949ed139e7d4a772",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 79,
"avg_line_length": 39.142857142857146,
"alnum_prop": 0.5106171201061712,
"repo_name": "ksarkies/XBee-Acquisition",
"id": "0414fcbfec25d9466fc7f5739f08b0338747a002",
"size": "3014",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Development/XBee-node-test-pc/emulator-qt5/xbee-node-test.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "293495"
},
{
"name": "C++",
"bytes": "361449"
},
{
"name": "CMake",
"bytes": "221"
},
{
"name": "Makefile",
"bytes": "228250"
},
{
"name": "QMake",
"bytes": "1510"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2005-2014 The Kuali Foundation
Licensed under the Educational Community License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.opensource.org/licenses/ecl2.php
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>rice-ksb</artifactId>
<groupId>org.kuali.rice</groupId>
<version>2.5.0-M5-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>rice-ksb-server-impl</artifactId>
<version>2.5.0-M5-SNAPSHOT</version>
<name>Rice KSB Server Implementation</name>
<dependencies>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>rice-ksb-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>rice-core-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>rice-krad-data</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.persistence</groupId>
<artifactId>org.eclipse.persistence.jpa</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.reflections</groupId>
<artifactId>reflections</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<!-- Enable static weaving using META-INF/persistence-weaving.xml for this module -->
<plugin>
<groupId>au.com.alderaan</groupId>
<artifactId>eclipselink-staticweave-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project> | {
"content_hash": "8256de4e581c7f860e770e2c04022996",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 204,
"avg_line_length": 34.16279069767442,
"alnum_prop": 0.6769911504424779,
"repo_name": "ewestfal/rice-svn2git-test",
"id": "aae445d48b2bd84211978475a8364b94a9b72d34",
"size": "2938",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rice-middleware/ksb/server-impl/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "793243"
},
{
"name": "Groovy",
"bytes": "2170621"
},
{
"name": "Java",
"bytes": "34445604"
},
{
"name": "JavaScript",
"bytes": "2635300"
},
{
"name": "PHP",
"bytes": "15766"
},
{
"name": "Shell",
"bytes": "10444"
},
{
"name": "XSLT",
"bytes": "107686"
}
],
"symlink_target": ""
} |
'use strict';
angular.module('semdomtrans',
[
'ui.router',
'bellows.services',
'bellows.services.comments',
'bellows.filters',
'semdomtrans.edit',
'semdomtrans.comments',
'semdomtrans.services',
'semdomtrans.review',
'pascalprecht.translate'
])
.config(['$stateProvider', '$urlRouterProvider', function ($stateProvider, $urlRouterProvider) {
$urlRouterProvider.otherwise('/edit');
$stateProvider
.state('editor', {
url: '/edit',
views: {
'@': { templateUrl: '/angular-app/languageforge/semdomtrans/views/edit.html' },
'editItem@editor': {
templateUrl: '/angular-app/languageforge/semdomtrans/views/partials/editItem.html'
},
'editFilter@editor': {
templateUrl: '/angular-app/languageforge/semdomtrans/views/partials/editFilter.html'
}
}
})
.state('editor.editItem', {
url: '/:position'
})
.state('comments', {
url: '/comments/:position',
views: {
'': { templateUrl: '/angular-app/languageforge/semdomtrans/views/comments.html' }
}
})
.state('review', {
url: '/review',
views: {
'': { templateUrl: '/angular-app/languageforge/semdomtrans/views/review.html' }
}
});
}])
.controller('MainCtrl', ['$scope', 'semdomtransEditorDataService', 'semdomtransEditService',
'sessionService', 'lexCommentService', 'offlineCache', '$q', 'silNoticeService',
function ($scope, editorDataService, editorApi,
ss, commentsSerivce, offlineCache, $q, notice) {
$scope.rights = {};
$scope.rights.remove = ss.hasProjectRight(ss.domain.USERS, ss.operation.DELETE);
$scope.rights.create = ss.hasProjectRight(ss.domain.USERS, ss.operation.CREATE);
$scope.rights.edit = ss.hasProjectRight(ss.domain.USERS, ss.operation.EDIT);
$scope.items = editorDataService.entries;
$scope.workingSets = editorDataService.workingSets;
$scope.itemsTree = editorDataService.itemsTree;
if ($scope.items.length == 0 && !$scope.loadingDto) {
editorDataService.loadEditorData().then(function (result) {
editorDataService.processEditorDto(result);
});
}
$scope.exportProject = function exportProject() {
notice.setLoading('Exporting Semantic Domain Data to XML File');
editorApi.exportProject(function (result) {
notice.cancelLoading();
if (result.ok) {
window.location = 'http://' + result.data;
}
});
};
$scope.includedItems = {};
$scope.loadingDto = false;
// permissions stuff
$scope.rights = {
canEditProject: function canEditProject() {
return ss.hasProjectRight(ss.domain.PROJECTS, ss.operation.EDIT);
},
canEditEntry: function canEditEntry() {
return ss.hasProjectRight(ss.domain.ENTRIES, ss.operation.EDIT);
},
canDeleteEntry: function canDeleteEntry() {
return ss.hasProjectRight(ss.domain.ENTRIES, ss.operation.DELETE);
},
canComment: function canComment() {
return ss.hasProjectRight(ss.domain.COMMENTS, ss.operation.CREATE);
},
canDeleteComment: function canDeleteComment(commentAuthorId) {
if (ss.session.userId == commentAuthorId) {
return ss.hasProjectRight(ss.domain.COMMENTS, ss.operation.DELETE_OWN);
} else {
return ss.hasProjectRight(ss.domain.COMMENTS, ss.operation.DELETE);
}
},
canEditComment: function canEditComment(commentAuthorId) {
if (ss.session.userId == commentAuthorId) {
return ss.hasProjectRight(ss.domain.COMMENTS, ss.operation.EDIT_OWN);
} else {
return false;
}
},
canUpdateCommentStatus: function canUpdateCommentStatus() {
return ss.hasProjectRight(ss.domain.COMMENTS, ss.operation.EDIT);
}
};
$scope.project = ss.session.project;
$scope.projectSettings = ss.session.projectSettings;
$scope.currentUserRole = ss.session.projectSettings.currentUserRole;
}])
// not sure if we need breadcrumbs for this app
.controller('BreadcrumbCtrl', ['$scope', '$rootScope', 'breadcrumbService',
function ($scope, $rootScope, breadcrumbService) {
$scope.idmap = breadcrumbService.idmap;
$rootScope.$on('$routeChangeSuccess', function () {
$scope.breadcrumbs = breadcrumbService.read();
});
$scope.$watch('idmap', function () {
$scope.breadcrumbs = breadcrumbService.read();
}, true);
}])
;
| {
"content_hash": "653f1d6d25b79b281b9f9d04791d0fbe",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 100,
"avg_line_length": 33.23741007194245,
"alnum_prop": 0.6303030303030303,
"repo_name": "sil-student-projects/web-languageforge",
"id": "6b68b69c05d0ffc031eba739d18ddadfa249ea4a",
"size": "4620",
"binary": false,
"copies": "1",
"ref": "refs/heads/master-taylor",
"path": "src/angular-app/languageforge/semdomtrans/semdomtrans.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "265"
},
{
"name": "Batchfile",
"bytes": "494"
},
{
"name": "C#",
"bytes": "10048"
},
{
"name": "CSS",
"bytes": "516416"
},
{
"name": "HTML",
"bytes": "833654"
},
{
"name": "JavaScript",
"bytes": "985327"
},
{
"name": "PHP",
"bytes": "1419342"
},
{
"name": "Python",
"bytes": "53066"
},
{
"name": "Ruby",
"bytes": "19407"
},
{
"name": "Shell",
"bytes": "5951"
},
{
"name": "TypeScript",
"bytes": "840238"
}
],
"symlink_target": ""
} |
from ocvfacerec.facerec.feature import AbstractFeature
from ocvfacerec.facerec.classifier import AbstractClassifier
class PredictableModel(object):
def __init__(self, feature, classifier):
if not isinstance(feature, AbstractFeature):
raise TypeError("feature must be of type AbstractFeature!")
if not isinstance(classifier, AbstractClassifier):
raise TypeError("classifier must be of type AbstractClassifier!")
self.feature = feature
self.classifier = classifier
def compute(self, X, y):
features = self.feature.compute(X, y)
self.classifier.compute(features, y)
def predict(self, X):
q = self.feature.extract(X)
return self.classifier.predict(q)
def __repr__(self):
feature_repr = repr(self.feature)
classifier_repr = repr(self.classifier)
return "PredictableModel (feature=%s, classifier=%s)" % (feature_repr, classifier_repr)
| {
"content_hash": "b1b91009715c86ba0ff873fd1163eccf",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 95,
"avg_line_length": 37.03846153846154,
"alnum_prop": 0.6801661474558671,
"repo_name": "warp1337/opencv_facerecognizer",
"id": "bf5c3cfd711ea72a5d4e7e9ac8a93f4a694b13df",
"size": "2767",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ocvfacerec/facerec/model.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "211943"
},
{
"name": "Shell",
"bytes": "178"
}
],
"symlink_target": ""
} |
package io.netty.bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPromise;
import io.netty.channel.DefaultChannelPromise;
import io.netty.channel.EventLoop;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.ReflectiveChannelFactory;
import io.netty.util.AttributeKey;
import io.netty.util.concurrent.EventExecutor;
import io.netty.util.concurrent.GlobalEventExecutor;
import io.netty.util.internal.StringUtil;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* {@link AbstractBootstrap} is a helper class that makes it easy to bootstrap a {@link Channel}. It support
* method-chaining to provide an easy way to configure the {@link AbstractBootstrap}.
*
* <p>When not used in a {@link ServerBootstrap} context, the {@link #bind()} methods are useful for connectionless
* transports such as datagram (UDP).</p>
*/
public abstract class AbstractBootstrap<B extends AbstractBootstrap<B, C>, C extends Channel> implements Cloneable {
private volatile EventLoopGroup group;
@SuppressWarnings("deprecation")
private volatile ChannelFactory<? extends C> channelFactory;
private volatile SocketAddress localAddress;
private final Map<ChannelOption<?>, Object> options = new LinkedHashMap<ChannelOption<?>, Object>();
private final Map<AttributeKey<?>, Object> attrs = new LinkedHashMap<AttributeKey<?>, Object>();
private volatile ChannelHandler handler;
AbstractBootstrap() {
// Disallow extending from a different package.
}
AbstractBootstrap(AbstractBootstrap<B, C> bootstrap) {
group = bootstrap.group;
channelFactory = bootstrap.channelFactory;
handler = bootstrap.handler;
localAddress = bootstrap.localAddress;
synchronized (bootstrap.options) {
options.putAll(bootstrap.options);
}
synchronized (bootstrap.attrs) {
attrs.putAll(bootstrap.attrs);
}
}
/**
* The {@link EventLoopGroup} which is used to handle all the events for the to-be-creates
* {@link Channel}
*/
@SuppressWarnings("unchecked")
public B group(EventLoopGroup group) {
if (group == null) {
throw new NullPointerException("group");
}
if (this.group != null) {
throw new IllegalStateException("group set already");
}
this.group = group;
return (B) this;
}
/**
* The {@link Class} which is used to create {@link Channel} instances from.
* You either use this or {@link #channelFactory(io.netty.channel.ChannelFactory)} if your
* {@link Channel} implementation has no no-args constructor.
*/
public B channel(Class<? extends C> channelClass) {
if (channelClass == null) {
throw new NullPointerException("channelClass");
}
return channelFactory(new ReflectiveChannelFactory<C>(channelClass));
}
/**
* @deprecated Use {@link #channelFactory(io.netty.channel.ChannelFactory)} instead.
*/
@Deprecated
@SuppressWarnings("unchecked")
public B channelFactory(ChannelFactory<? extends C> channelFactory) {
if (channelFactory == null) {
throw new NullPointerException("channelFactory");
}
if (this.channelFactory != null) {
throw new IllegalStateException("channelFactory set already");
}
this.channelFactory = channelFactory;
return (B) this;
}
/**
* {@link io.netty.channel.ChannelFactory} which is used to create {@link Channel} instances from
* when calling {@link #bind()}. This method is usually only used if {@link #channel(Class)}
* is not working for you because of some more complex needs. If your {@link Channel} implementation
* has a no-args constructor, its highly recommend to just use {@link #channel(Class)} for
* simplify your code.
*/
@SuppressWarnings({ "unchecked", "deprecation" })
public B channelFactory(io.netty.channel.ChannelFactory<? extends C> channelFactory) {
return channelFactory((ChannelFactory<C>) channelFactory);
}
/**
* The {@link SocketAddress} which is used to bind the local "end" to.
*/
@SuppressWarnings("unchecked")
public B localAddress(SocketAddress localAddress) {
this.localAddress = localAddress;
return (B) this;
}
/**
* @see {@link #localAddress(SocketAddress)}
*/
public B localAddress(int inetPort) {
return localAddress(new InetSocketAddress(inetPort));
}
/**
* @see {@link #localAddress(SocketAddress)}
*/
public B localAddress(String inetHost, int inetPort) {
return localAddress(new InetSocketAddress(inetHost, inetPort));
}
/**
* @see {@link #localAddress(SocketAddress)}
*/
public B localAddress(InetAddress inetHost, int inetPort) {
return localAddress(new InetSocketAddress(inetHost, inetPort));
}
/**
* Allow to specify a {@link ChannelOption} which is used for the {@link Channel} instances once they got
* created. Use a value of {@code null} to remove a previous set {@link ChannelOption}.
*/
@SuppressWarnings("unchecked")
public <T> B option(ChannelOption<T> option, T value) {
if (option == null) {
throw new NullPointerException("option");
}
if (value == null) {
synchronized (options) {
options.remove(option);
}
} else {
synchronized (options) {
options.put(option, value);
}
}
return (B) this;
}
/**
* Allow to specify an initial attribute of the newly created {@link Channel}. If the {@code value} is
* {@code null}, the attribute of the specified {@code key} is removed.
*/
public <T> B attr(AttributeKey<T> key, T value) {
if (key == null) {
throw new NullPointerException("key");
}
if (value == null) {
synchronized (attrs) {
attrs.remove(key);
}
} else {
synchronized (attrs) {
attrs.put(key, value);
}
}
@SuppressWarnings("unchecked")
B b = (B) this;
return b;
}
/**
* Validate all the parameters. Sub-classes may override this, but should
* call the super method in that case.
*/
@SuppressWarnings("unchecked")
public B validate() {
if (group == null) {
throw new IllegalStateException("group not set");
}
if (channelFactory == null) {
throw new IllegalStateException("channel or channelFactory not set");
}
return (B) this;
}
/**
* Returns a deep clone of this bootstrap which has the identical configuration. This method is useful when making
* multiple {@link Channel}s with similar settings. Please note that this method does not clone the
* {@link EventLoopGroup} deeply but shallowly, making the group a shared resource.
*/
@Override
@SuppressWarnings("CloneDoesntDeclareCloneNotSupportedException")
public abstract B clone();
/**
* Create a new {@link Channel} and register it with an {@link EventLoop}.
*/
public ChannelFuture register() {
validate();
return initAndRegister();
}
/**
* Create a new {@link Channel} and bind it.
*/
public ChannelFuture bind() {
validate();
SocketAddress localAddress = this.localAddress;
if (localAddress == null) {
throw new IllegalStateException("localAddress not set");
}
return doBind(localAddress);
}
/**
* Create a new {@link Channel} and bind it.
*/
public ChannelFuture bind(int inetPort) {
return bind(new InetSocketAddress(inetPort));
}
/**
* Create a new {@link Channel} and bind it.
*/
public ChannelFuture bind(String inetHost, int inetPort) {
return bind(new InetSocketAddress(inetHost, inetPort));
}
/**
* Create a new {@link Channel} and bind it.
*/
public ChannelFuture bind(InetAddress inetHost, int inetPort) {
return bind(new InetSocketAddress(inetHost, inetPort));
}
/**
* Create a new {@link Channel} and bind it.
*/
public ChannelFuture bind(SocketAddress localAddress) {
validate();
if (localAddress == null) {
throw new NullPointerException("localAddress");
}
return doBind(localAddress);
}
private ChannelFuture doBind(final SocketAddress localAddress) {
final ChannelFuture regFuture = initAndRegister();
final Channel channel = regFuture.channel();
if (regFuture.cause() != null) {
return regFuture;
}
if (regFuture.isDone()) {
// At this point we know that the registration was complete and succesful.
ChannelPromise promise = channel.newPromise();
doBind0(regFuture, channel, localAddress, promise);
return promise;
} else {
// Registration future is almost always fulfilled already, but just in case it's not.
final PendingRegistrationPromise promise = new PendingRegistrationPromise(channel);
regFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
Throwable cause = future.cause();
if (cause != null) {
// Registration on the EventLoop failed so fail the ChannelPromise directly to not cause an
// IllegalStateException once we try to access the EventLoop of the Channel.
promise.setFailure(cause);
} else {
// Registration was successful, so set the correct executor to use.
// See https://github.com/netty/netty/issues/2586
promise.executor = channel.eventLoop();
}
doBind0(regFuture, channel, localAddress, promise);
}
});
return promise;
}
}
final ChannelFuture initAndRegister() {
final Channel channel = channelFactory().newChannel();
try {
init(channel);
} catch (Throwable t) {
channel.unsafe().closeForcibly();
// as the Channel is not registered yet we need to force the usage of the GlobalEventExecutor
return new DefaultChannelPromise(channel, GlobalEventExecutor.INSTANCE).setFailure(t);
}
ChannelFuture regFuture = group().register(channel);
if (regFuture.cause() != null) {
if (channel.isRegistered()) {
channel.close();
} else {
channel.unsafe().closeForcibly();
}
}
// If we are here and the promise is not failed, it's one of the following cases:
// 1) If we attempted registration from the event loop, the registration has been completed at this point.
// i.e. It's safe to attempt bind() or connect() now because the channel has been registered.
// 2) If we attempted registration from the other thread, the registration request has been successfully
// added to the event loop's task queue for later execution.
// i.e. It's safe to attempt bind() or connect() now:
// because bind() or connect() will be executed *after* the scheduled registration task is executed
// because register(), bind(), and connect() are all bound to the same thread.
return regFuture;
}
abstract void init(Channel channel) throws Exception;
private static void doBind0(
final ChannelFuture regFuture, final Channel channel,
final SocketAddress localAddress, final ChannelPromise promise) {
// This method is invoked before channelRegistered() is triggered. Give user handlers a chance to set up
// the pipeline in its channelRegistered() implementation.
channel.eventLoop().execute(new Runnable() {
@Override
public void run() {
if (regFuture.isSuccess()) {
channel.bind(localAddress, promise).addListener(ChannelFutureListener.CLOSE_ON_FAILURE);
} else {
promise.setFailure(regFuture.cause());
}
}
});
}
/**
* the {@link ChannelHandler} to use for serving the requests.
*/
@SuppressWarnings("unchecked")
public B handler(ChannelHandler handler) {
if (handler == null) {
throw new NullPointerException("handler");
}
this.handler = handler;
return (B) this;
}
final SocketAddress localAddress() {
return localAddress;
}
@SuppressWarnings("deprecation")
final ChannelFactory<? extends C> channelFactory() {
return channelFactory;
}
final ChannelHandler handler() {
return handler;
}
/**
* Return the configured {@link EventLoopGroup} or {@code null} if non is configured yet.
*/
public final EventLoopGroup group() {
return group;
}
final Map<ChannelOption<?>, Object> options() {
return options;
}
final Map<AttributeKey<?>, Object> attrs() {
return attrs;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append(StringUtil.simpleClassName(this));
buf.append('(');
if (group != null) {
buf.append("group: ");
buf.append(StringUtil.simpleClassName(group));
buf.append(", ");
}
if (channelFactory != null) {
buf.append("channelFactory: ");
buf.append(channelFactory);
buf.append(", ");
}
if (localAddress != null) {
buf.append("localAddress: ");
buf.append(localAddress);
buf.append(", ");
}
synchronized (options) {
if (!options.isEmpty()) {
buf.append("options: ");
buf.append(options);
buf.append(", ");
}
}
synchronized (attrs) {
if (!attrs.isEmpty()) {
buf.append("attrs: ");
buf.append(attrs);
buf.append(", ");
}
}
if (handler != null) {
buf.append("handler: ");
buf.append(handler);
buf.append(", ");
}
if (buf.charAt(buf.length() - 1) == '(') {
buf.append(')');
} else {
buf.setCharAt(buf.length() - 2, ')');
buf.setLength(buf.length() - 1);
}
return buf.toString();
}
private static final class PendingRegistrationPromise extends DefaultChannelPromise {
// Is set to the correct EventExecutor once the registration was successful. Otherwise it will
// stay null and so the GlobalEventExecutor.INSTANCE will be used for notifications.
private volatile EventExecutor executor;
private PendingRegistrationPromise(Channel channel) {
super(channel);
}
@Override
protected EventExecutor executor() {
EventExecutor executor = this.executor;
if (executor != null) {
// If the registration was a success executor is set.
//
// See https://github.com/netty/netty/issues/2586
return executor;
}
// The registration failed so we can only use the GlobalEventExecutor as last resort to notify.
return GlobalEventExecutor.INSTANCE;
}
}
}
| {
"content_hash": "6aaa0dec9fad1c50ca4a4c622dd642fb",
"timestamp": "",
"source": "github",
"line_count": 463,
"max_line_length": 119,
"avg_line_length": 35.183585313174945,
"alnum_prop": 0.603806015960712,
"repo_name": "sunng87/netty",
"id": "e6ece5a79f1239743c9ddb216345e9a5c7b9a4bb",
"size": "16924",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "transport/src/main/java/io/netty/bootstrap/AbstractBootstrap.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
declare(strict_types=1);
namespace Nastoletni\Code\Domain;
interface XkcdRepository
{
/**
* Returns random Xkcd comic.
*
* @return Xkcd
*/
public function getRandom(): Xkcd;
}
| {
"content_hash": "6b61b3acfc2d5236cea01cf007d5679f",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 38,
"avg_line_length": 14.2,
"alnum_prop": 0.6244131455399061,
"repo_name": "nastoletni/code",
"id": "153c02ab0760a32e7214b68bbafb1a2cd9cc2412",
"size": "213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Domain/XkcdRepository.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1417"
},
{
"name": "HTML",
"bytes": "8852"
},
{
"name": "JavaScript",
"bytes": "6481"
},
{
"name": "PHP",
"bytes": "78498"
}
],
"symlink_target": ""
} |
package org.apache.lucene.queryparser.flexible.standard.builders;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
import org.apache.lucene.queryparser.flexible.standard.nodes.RegexpQueryNode;
import org.apache.lucene.queryparser.flexible.standard.processors.MultiTermRewriteMethodProcessor;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.RegexpQuery;
/**
* Builds a {@link RegexpQuery} object from a {@link RegexpQueryNode} object.
*/
public class RegexpQueryNodeBuilder implements StandardQueryBuilder {
public RegexpQueryNodeBuilder() {
// empty constructor
}
public RegexpQuery build(QueryNode queryNode) throws QueryNodeException {
RegexpQueryNode regexpNode = (RegexpQueryNode) queryNode;
RegexpQuery q = new RegexpQuery(new Term(regexpNode.getFieldAsString(),
regexpNode.textToBytesRef()));
MultiTermQuery.RewriteMethod method = (MultiTermQuery.RewriteMethod) queryNode
.getTag(MultiTermRewriteMethodProcessor.TAG_ID);
if (method != null) {
q.setRewriteMethod(method);
}
return q;
}
}
| {
"content_hash": "b04763802cf0df4246c32e0662391746",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 98,
"avg_line_length": 34.027027027027025,
"alnum_prop": 0.7601270849880858,
"repo_name": "terrancesnyder/solr-analytics",
"id": "14da91977bf2f233f63fe016c35d9c0e09c591e1",
"size": "2075",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/RegexpQueryNodeBuilder.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "13898"
},
{
"name": "Java",
"bytes": "31968690"
},
{
"name": "JavaScript",
"bytes": "1221046"
},
{
"name": "Perl",
"bytes": "81566"
},
{
"name": "Python",
"bytes": "179898"
},
{
"name": "Shell",
"bytes": "19867"
}
],
"symlink_target": ""
} |
package game
type Inventory struct {
slots []Stack
itemLeft int
itemRight int
}
const (
INV_WIDTH = 5
INV_HEIGHT = 5
MAX_STACK = 50
)
func NewInventory() *Inventory {
// 0 -> (w * h) - 1 are the bag slots, 2 more for left equip, and 2 more again for right.
// [0, w*h - 1] = bag slots
// [w*h] = left equip
// [w*h + 1] = left reserve
// [w*h + 2] = right equip
// [w*h + 3] = right reserve
inv := make([]Stack, INV_WIDTH*INV_HEIGHT+4)
// Bag
i := 0
for item := range EveryItem() {
// Don't give people spawn blocks...
if item == ITEM_SPAWN {
continue;
}
if item.Stackable() {
inv[i] = NewStackOf(item, MAX_STACK)
} else {
inv[i] = NewStack(item)
}
i++
}
// Hands
inv[INV_WIDTH*INV_HEIGHT] = NewStack(ITEM_GUN)
inv[INV_WIDTH*INV_HEIGHT+2] = NewStack(ITEM_SHOVEL)
inv[INV_WIDTH*INV_HEIGHT+3] = NewStackOf(ITEM_DIRT, MAX_STACK)
return &Inventory{
slots: inv,
itemLeft: INV_WIDTH * INV_HEIGHT,
itemRight: INV_WIDTH*INV_HEIGHT + 2,
}
}
func (inv *Inventory) SetActiveItems(left, right int) {
inv.itemLeft = left
inv.itemRight = right
}
func (inv *Inventory) LeftItem() Item {
return inv.slots[inv.itemLeft].item
}
func (inv *Inventory) RightItem() Item {
return inv.slots[inv.itemRight].item
}
func (inv *Inventory) MoveItems(from, to int) {
temp := inv.slots[from]
inv.slots[from] = inv.slots[to]
inv.slots[to] = temp
}
func (inv *Inventory) findItemOfKind(item Item) int {
for i, slot := range inv.slots {
if slot.item == item {
return i
}
}
return -1
}
// Adds an item to the inventory. Returns true if the addition was successful,
// false if there is no room remaining in the inventory.
func (inv *Inventory) AddItem(item Item) bool {
for i := len(inv.slots) - 1; i >= 0; i-- {
slot := inv.slots[i]
if slot.item == item && slot.num < MAX_STACK {
inv.slots[i].num++
return true
}
}
emptySlot := inv.findItemOfKind(ITEM_NIL)
if emptySlot < 0 {
return false
}
inv.slots[emptySlot] = NewStack(item)
return true
}
// Removes an item from the inventory. Returns true if the removal was
// successful, false if the given item does not exist in the inventory.
func (inv *Inventory) RemoveItem(item Item) bool {
if item == ITEM_NIL {
return false
}
for i := len(inv.slots) - 1; i >= 0; i-- {
slot := inv.slots[i]
if slot.item == item {
inv.lowerStack(i)
return true
}
}
return false
}
func (inv *Inventory) lowerStack(i int) {
if inv.slots[i].num > 1 {
inv.slots[i].num--
return
}
inv.slots[i].num = 0
inv.slots[i].item = ITEM_NIL
}
func (inv *Inventory) ItemsToByteArray() []byte {
data := make([]byte, len(inv.slots)*2)
for i := 0; i < len(data); i += 2 {
data[i] = byte(inv.slots[i/2].item)
data[i+1] = byte(inv.slots[i/2].num)
}
return data
}
| {
"content_hash": "2c11939d710b5f041d9b8e70c397c5c8",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 90,
"avg_line_length": 21.75968992248062,
"alnum_prop": 0.6316351977199858,
"repo_name": "crazy2be/buildblast",
"id": "ad5a169a511268cd9c4514e749dfbb914ee76941",
"size": "2807",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/lib/game/inventory.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "375914"
},
{
"name": "Go",
"bytes": "205733"
},
{
"name": "HTML",
"bytes": "10740"
},
{
"name": "JavaScript",
"bytes": "1089836"
},
{
"name": "PLpgSQL",
"bytes": "3277"
},
{
"name": "Ruby",
"bytes": "4589"
},
{
"name": "Shell",
"bytes": "3919"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.