code stringlengths 2 1.05M | repo_name stringlengths 5 101 | path stringlengths 4 991 | language stringclasses 3 values | license stringclasses 5 values | size int64 2 1.05M |
|---|---|---|---|---|---|
# Copyright 2018 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the Apache License 2.0 (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
use strict;
package TLSProxy::Alert;
sub new
{
my $class = shift;
my ($server,
$encrypted,
$level,
$description) = @_;
my $self = {
server => $server,
encrypted => $encrypted,
level => $level,
description => $description
};
return bless $self, $class;
}
#Read only accessors
sub server
{
my $self = shift;
return $self->{server};
}
sub encrypted
{
my $self = shift;
return $self->{encrypted};
}
sub level
{
my $self = shift;
return $self->{level};
}
sub description
{
my $self = shift;
return $self->{description};
}
1;
| openssl/openssl | util/perl/TLSProxy/Alert.pm | Perl | apache-2.0 | 952 |
# A quite dimwitted pod2plaintext that need only know how to format whatever
# text comes out of Pod::BlackBox's _gen_errata
require 5;
package Pod::Simple::Checker;
use strict;
use Carp ();
use Pod::Simple::Methody ();
use Pod::Simple ();
use vars qw( @ISA $VERSION );
$VERSION = '3.26';
@ISA = ('Pod::Simple::Methody');
BEGIN { *DEBUG = defined(&Pod::Simple::DEBUG)
? \&Pod::Simple::DEBUG
: sub() {0}
}
use Text::Wrap 98.112902 (); # was 2001.0131, but I don't think we need that
$Text::Wrap::wrap = 'overflow';
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
sub any_errata_seen { # read-only accessor
return $_[1]->{'Errata_seen'};
}
sub new {
my $self = shift;
my $new = $self->SUPER::new(@_);
$new->{'output_fh'} ||= *STDOUT{IO};
$new->nix_X_codes(1);
$new->nbsp_for_S(1);
$new->{'Thispara'} = '';
$new->{'Indent'} = 0;
$new->{'Indentstring'} = ' ';
$new->{'Errata_seen'} = 0;
return $new;
}
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
sub handle_text { $_[0]{'Errata_seen'} and $_[0]{'Thispara'} .= $_[1] }
sub start_Para { $_[0]{'Thispara'} = '' }
sub start_head1 {
if($_[0]{'Errata_seen'}) {
$_[0]{'Thispara'} = '';
} else {
if($_[1]{'errata'}) { # start of errata!
$_[0]{'Errata_seen'} = 1;
$_[0]{'Thispara'} = $_[0]{'source_filename'} ?
"$_[0]{'source_filename'} -- " : ''
}
}
}
sub start_head2 { $_[0]{'Thispara'} = '' }
sub start_head3 { $_[0]{'Thispara'} = '' }
sub start_head4 { $_[0]{'Thispara'} = '' }
sub start_Verbatim { $_[0]{'Thispara'} = '' }
sub start_item_bullet { $_[0]{'Thispara'} = '* ' }
sub start_item_number { $_[0]{'Thispara'} = "$_[1]{'number'}. " }
sub start_item_text { $_[0]{'Thispara'} = '' }
sub start_over_bullet { ++$_[0]{'Indent'} }
sub start_over_number { ++$_[0]{'Indent'} }
sub start_over_text { ++$_[0]{'Indent'} }
sub start_over_block { ++$_[0]{'Indent'} }
sub end_over_bullet { --$_[0]{'Indent'} }
sub end_over_number { --$_[0]{'Indent'} }
sub end_over_text { --$_[0]{'Indent'} }
sub end_over_block { --$_[0]{'Indent'} }
# . . . . . Now the actual formatters:
sub end_head1 { $_[0]->emit_par(-4) }
sub end_head2 { $_[0]->emit_par(-3) }
sub end_head3 { $_[0]->emit_par(-2) }
sub end_head4 { $_[0]->emit_par(-1) }
sub end_Para { $_[0]->emit_par( 0) }
sub end_item_bullet { $_[0]->emit_par( 0) }
sub end_item_number { $_[0]->emit_par( 0) }
sub end_item_text { $_[0]->emit_par(-2) }
sub emit_par {
return unless $_[0]{'Errata_seen'};
my($self, $tweak_indent) = splice(@_,0,2);
my $indent = ' ' x ( 2 * $self->{'Indent'} + ($tweak_indent||0) );
# Yes, 'STRING' x NEGATIVE gives '', same as 'STRING' x 0
$self->{'Thispara'} =~ tr{\xAD}{}d if Pod::Simple::ASCII;
my $out = Text::Wrap::wrap($indent, $indent, $self->{'Thispara'} .= "\n");
$out =~ tr{\xA0}{ } if Pod::Simple::ASCII;
print {$self->{'output_fh'}} $out,
#"\n"
;
$self->{'Thispara'} = '';
return;
}
# . . . . . . . . . . And then off by its lonesome:
sub end_Verbatim {
return unless $_[0]{'Errata_seen'};
my $self = shift;
if(Pod::Simple::ASCII) {
$self->{'Thispara'} =~ tr{\xA0}{ };
$self->{'Thispara'} =~ tr{\xAD}{}d;
}
my $i = ' ' x ( 2 * $self->{'Indent'} + 4);
$self->{'Thispara'} =~ s/^/$i/mg;
print { $self->{'output_fh'} } '',
$self->{'Thispara'},
"\n\n"
;
$self->{'Thispara'} = '';
return;
}
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
1;
__END__
=head1 NAME
Pod::Simple::Checker -- check the Pod syntax of a document
=head1 SYNOPSIS
perl -MPod::Simple::Checker -e \
"exit Pod::Simple::Checker->filter(shift)->any_errata_seen" \
thingy.pod
=head1 DESCRIPTION
This class is for checking the syntactic validity of Pod.
It works by basically acting like a simple-minded version of
L<Pod::Simple::Text> that formats only the "Pod Errors" section
(if Pod::Simple even generates one for the given document).
This is a subclass of L<Pod::Simple> and inherits all its methods.
=head1 SEE ALSO
L<Pod::Simple>, L<Pod::Simple::Text>, L<Pod::Checker>
=head1 SUPPORT
Questions or discussion about POD and Pod::Simple should be sent to the
pod-people@perl.org mail list. Send an empty email to
pod-people-subscribe@perl.org to subscribe.
This module is managed in an open GitHub repository,
L<https://github.com/theory/pod-simple/>. Feel free to fork and contribute, or
to clone L<git://github.com/theory/pod-simple.git> and send patches!
Patches against Pod::Simple are welcome. Please send bug reports to
<bug-pod-simple@rt.cpan.org>.
=head1 COPYRIGHT AND DISCLAIMERS
Copyright (c) 2002 Sean M. Burke.
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
This program is distributed in the hope that it will be useful, but
without any warranty; without even the implied warranty of
merchantability or fitness for a particular purpose.
=head1 AUTHOR
Pod::Simple was created by Sean M. Burke <sburke@cpan.org>.
But don't bother him, he's retired.
Pod::Simple is maintained by:
=over
=item * Allison Randal C<allison@perl.org>
=item * Hans Dieter Pearcey C<hdp@cpan.org>
=item * David E. Wheeler C<dwheeler@cpan.org>
=back
=cut
| Dokaponteam/ITF_Project | xampp/perl/lib/Pod/Simple/Checker.pm | Perl | mit | 5,366 |
# Theodore Rambert, Gabe Kahen, Tim Lam
# Fmem-PFE
# Fmem Perl Front End
# Version 1.2
# Released Under the MIT License
# Uses Fmem (GPL), Perl (GPL), DD (GPLv3+) & Tk (BSD-style)
#!/usr/local/bin/perl
use strict;
use warnings;
use Tk;
use Tk::DirTree;
use Tk::Dialog;
my $text;
my($memsize, $curr_dir);
my @flags = (
"1", #noconv
"1", #sync
"1", #generate md5sum
"1" #use date as name
);
#starting directory
$curr_dir = "";
#check our rights
if (index(`whoami`, "root") < 0)
{
error("Requires root!");
warn "[Error] Must be run as root.\n";
exit;
}
#check for fmem
if (index(`stat /dev/fmem 2>&1`, "cannot stat") >= 0)
{
error("Fmem not found.");
warn "[Error] Fmem not found.\n";
warn "You can download it from http://hysteria.sk/~niekt0/foriana/fmem_current.tgz\n";
exit;
}
#pull the amount of installed RAM from logs
my $raw_mem_start = `free -m | grep Mem | awk '{ print \$2}'`;
warn "[Info] Raw memory line: $raw_mem_start";
$memsize = $raw_mem_start;
#create the file browsing window, then hide it
my $top = new MainWindow;
$top->withdraw;
#Create Window
my $mw = MainWindow->new;
my $w = $mw->Frame->pack(-side => 'top', -fill => 'x');
#Save To?
$w->Label(-text => "Destination:")->
pack(-side => 'left', -anchor => 'e');
$w->Entry(-textvariable => \$curr_dir)->
pack(-side => 'left', -anchor => 'e', -fill => 'x', -expand => 1);
$w->Button(-text => "Choose", -command => \&dir)->
pack(-side=> 'left', -anchor => 'e');
#Size?
my $w2 = $mw->Frame->pack(-side => 'top', -fill => 'x');
$w2->Label(-text => "Size in MB:")->
pack(-side => 'left', -anchor => 'e');
$w2->Entry(-textvariable => \$memsize)->
pack(-side => 'left', -anchor => 'e', -fill => 'x', -expand => 1);
my $option1 = $mw->Frame->pack(-side => 'left');
$option1->Checkbutton (-text=>"noerror", -variable=>\$flags[0])->pack(-side => 'top', -anchor => 'w');
my $option2 = $mw->Frame->pack(-side => 'right');
$option2->Checkbutton (-text=>"sync", -variable=>\$flags[1])->pack(-side => 'top', -anchor => 'e');
my $option3 = $mw->Frame->pack(-side => 'top');
$option3->Checkbutton (-text=>"generate md5sum", -variable=>\$flags[2])->pack();
my $option4 = $mw->Frame->pack(-side => 'top');
$option4->Checkbutton (-text=>"use date as filename", -variable=>\$flags[3])->pack();
#note area
#enter notes here
$mw->title("Text Entry");
$mw->Label(
-text => "Enter Notes here:")->pack();
#Future To-Do
#$mw->Text(-width => '50', -height => '10', -textvariable => \$text) -> pack();
$mw->Entry(
-width => 50,
-textvariable => \$text)->pack();
#Fancy Buttons
my $w3 = $mw->Frame->pack(-side => 'top', -fill => 'x');
$w3->Button(-text => "Copy", -command => \&mem, qw/-background cyan/)->
pack(-side => 'left');
$w3->Button(-text => "Exit", -command => \&quit, qw/-background red/)->
pack(-side => 'right', -anchor => 'w');
MainLoop;
sub mem
{
#If filename or memsize isn't defined || not a file || if memsize is not a positive number
if(!defined $curr_dir || !defined $memsize || !($memsize =~ /^[+]?\d+$/))
{
error("Undefined directory or bad memory size");
warn "[Error] Undefined directory or memory size\n";
exit;
}
else
{
my $date = `date`;
$date =~ s/\s//g;
chomp($memsize);
$curr_dir .= "/$date\_memory.dd";
warn "[Info] Writing to: $curr_dir\n";
if($flags[0] eq 1 && $flags[1] eq 1) #conv=noerror & sync chosen
{
warn "[Info] Running: dd if=/dev/fmem of=$curr_dir bs=1M count=$memsize conv=noerror,sync\n";
my $output = `dd if=/dev/fmem of=$curr_dir bs=1M count=$memsize conv=noerror,sync 2>/dev/null`;
}
elsif($flags[0] eq 1 && $flags[1] ne 1) #conv=noerror chosen
{
warn "[Info] Running: dd if=/dev/fmem of=$curr_dir bs=1M count=$memsize conv=noerror\n";
my $output = `dd if=/dev/fmem of=$curr_dir bs=1M count=$memsize conv=noerror 2>/dev/null`;
}
elsif($flags[0] ne 1 && $flags[1] eq 1) #sync chosen
{
warn "[Info] Running: dd if=/dev/fmem of=$curr_dir bs=1M count=$memsize sync\n";
my $output = `dd if=/dev/fmem of=$curr_dir bs=1M count=$memsize sync 2>/dev/null`;
}
else
{
warn "[Info] Running: dd if=/dev/fmem of=$curr_dir bs=1M count=$memsize\n";
my $output = `dd if=/dev/fmem of=$curr_dir bs=1M count=$memsize 2>/dev/null`;
}
#calc hash if chosen & check if file exists
if(-e $curr_dir && $flags[2] eq 1)
{
if(defined $text)
{
my $file= $curr_dir . "_notes.txt";
`openssl dgst -md5 $curr_dir > $file`;
system("echo 'Notes for $curr_dir:' $text >> $file");
}
else
{
`openssl dgst -md5 $curr_dir > $curr_dir.md5.txt`;
}
}
#clear $curr_dur so we can make multiple images
$curr_dir = "/";
}
}
sub quit
{
exit;
}
sub dir
{
$top = new MainWindow;
$top->withdraw;
#create the window...
my $t = $top->Toplevel;
$t->title("Choose Output Folder");
my $ok = 0;
my $f = $t->Frame->pack(-fill => "x", -side => "bottom");
my $d;
$d = $t->Scrolled('DirTree',
-scrollbars => 'osoe',
-width => 35,
-height => 20,
-selectmode => 'browse',
-exportselection =>1,
-browsecmd => sub { $curr_dir = shift },
-command => sub { $ok = 1; },
)->pack(-fill => "both", -expand => 1);
$d->chdir($curr_dir);
$f->Button(-text => 'Ok',
-command => sub { $top->destroy; }) ->pack(-side => 'left');;
}
sub error
{
my $err = new MainWindow;
$err->withdraw;
#Create Window
my $DialogRef = $err->Dialog(
-title => "Error",
-text => $_[0]
);
$DialogRef->Show();
}
| theodorerambert/fmem-pfe | Fmem-PFE_v1.2.pl | Perl | mit | 5,830 |
use strict;
use Data::Dumper;
use Carp;
#
# This is a SAS Component
#
=head1 NAME
get_relationship_IsComprisedOf
=head1 SYNOPSIS
get_relationship_IsComprisedOf [-c N] [-a] [--fields field-list] < ids > table.with.fields.added
=head1 DESCRIPTION
This relationship connects a biomass composition reaction to the
compounds specified as contained in the biomass.
Example:
get_relationship_IsComprisedOf -a < ids > table.with.fields.added
would read in a file of ids and add a column for each field in the relationship.
The standard input should be a tab-separated table (i.e., each line
is a tab-separated set of fields). Normally, the last field in each
line would contain the id. If some other column contains the id,
use
-c N
where N is the column (from 1) that contains the id.
This is a pipe command. The input is taken from the standard input, and the
output is to the standard output.
=head1 COMMAND-LINE OPTIONS
Usage: get_relationship_IsComprisedOf [arguments] < ids > table.with.fields.added
=over 4
=item -c num
Select the identifier from column num
=item -from field-list
Choose a set of fields from the Biomass
entity to return. Field-list is a comma-separated list of strings. The
following fields are available:
=over 4
=item id
=item mod_date
=item name
=item dna
=item protein
=item cell_wall
=item lipid
=item cofactor
=item energy
=back
=item -rel field-list
Choose a set of fields from the relationship to return. Field-list is a comma-separated list of
strings. The following fields are available:
=over 4
=item from_link
=item to_link
=item coefficient
=back
=item -to field-list
Choose a set of fields from the CompoundInstance entity to return. Field-list is a comma-separated list of
strings. The following fields are available:
=over 4
=item id
=item charge
=item formula
=back
=back
=head1 AUTHORS
L<The SEED Project|http://www.theseed.org>
=cut
use Bio::KBase::Utilities::ScriptThing;
use Bio::KBase::CDMI::CDMIClient;
use Getopt::Long;
#Default fields
my @all_from_fields = ( 'id', 'mod_date', 'name', 'dna', 'protein', 'cell_wall', 'lipid', 'cofactor', 'energy' );
my @all_rel_fields = ( 'from_link', 'to_link', 'coefficient' );
my @all_to_fields = ( 'id', 'charge', 'formula' );
my %all_from_fields = map { $_ => 1 } @all_from_fields;
my %all_rel_fields = map { $_ => 1 } @all_rel_fields;
my %all_to_fields = map { $_ => 1 } @all_to_fields;
my @default_fields = ('from-link', 'to-link');
my @from_fields;
my @rel_fields;
my @to_fields;
our $usage = <<'END';
Usage: get_relationship_IsComprisedOf [arguments] < ids > table.with.fields.added
--show-fields
List the available fields.
-c num
Select the identifier from column num
--from field-list
Choose a set of fields from the Biomass
entity to return. Field-list is a comma-separated list of strings. The
following fields are available:
id
mod_date
name
dna
protein
cell_wall
lipid
cofactor
energy
--rel field-list
Choose a set of fields from the relationship to return. Field-list is a comma-separated list of
strings. The following fields are available:
from_link
to_link
coefficient
--to field-list
Choose a set of fields from the CompoundInstance entity to
return. Field-list is a comma-separated list of strings. The following fields are available:
id
charge
formula
END
my $column;
my $input_file;
my $a;
my $f;
my $r;
my $t;
my $help;
my $show_fields;
my $i = "-";
my $geO = Bio::KBase::CDMI::CDMIClient->new_get_entity_for_script("c=i" => \$column,
"h" => \$help,
"show-fields" => \$show_fields,
"a" => \$a,
"from=s" => \$f,
"rel=s" => \$r,
"to=s" => \$t,
'i=s' => \$i);
if ($help) {
print $usage;
exit 0;
}
if ($show_fields)
{
print "from fields:\n";
print " $_\n" foreach @all_from_fields;
print "relation fields:\n";
print " $_\n" foreach @all_rel_fields;
print "to fields:\n";
print " $_\n" foreach @all_to_fields;
exit 0;
}
if ($a && ($f || $r || $t)) {die $usage};
if ($a) {
@from_fields = @all_from_fields;
@rel_fields = @all_rel_fields;
@to_fields = @all_to_fields;
} elsif ($f || $t || $r) {
my $err = 0;
if ($f) {
@from_fields = split(",", $f);
$err += check_fields(\@from_fields, %all_from_fields);
}
if ($r) {
@rel_fields = split(",", $r);
$err += check_fields(\@rel_fields, %all_rel_fields);
}
if ($t) {
@to_fields = split(",", $t);
$err += check_fields(\@to_fields, %all_to_fields);
}
if ($err) {exit 1;}
} else {
@rel_fields = @default_fields;
}
my $ih;
if ($input_file)
{
open $ih, "<", $input_file or die "Cannot open input file $input_file: $!";
}
else
{
$ih = \*STDIN;
}
while (my @tuples = Bio::KBase::Utilities::ScriptThing::GetBatch($ih, undef, $column)) {
my @h = map { $_->[0] } @tuples;
my $h = $geO->get_relationship_IsComprisedOf(\@h, \@from_fields, \@rel_fields, \@to_fields);
my %results;
for my $result (@$h) {
my @from;
my @rel;
my @to;
my $from_id;
my $res = $result->[0];
for my $key (@from_fields) {
push (@from,$res->{$key});
}
$res = $result->[1];
$from_id = $res->{'from_link'};
for my $key (@rel_fields) {
push (@rel,$res->{$key});
}
$res = $result->[2];
for my $key (@to_fields) {
push (@to,$res->{$key});
}
if ($from_id) {
push @{$results{$from_id}}, [@from, @rel, @to];
}
}
for my $tuple (@tuples)
{
my($id, $line) = @$tuple;
my $resultsForId = $results{$id};
if ($resultsForId) {
for my $result (@$resultsForId) {
print join("\t", $line, @$result) . "\n";
}
}
}
}
sub check_fields {
my ($fields, %all_fields) = @_;
my @err;
for my $field (@$fields) {
if (!$all_fields{$field})
{
push(@err, $field);
}
}
if (@err) {
my @f = keys %all_fields;
print STDERR "get_relationship_IsComprisedOf: unknown fields @err. Valid fields are @f\n";
return 1;
}
return 0;
}
| kbase/kb_seed | scripts/get_relationship_IsComprisedOf.pl | Perl | mit | 6,179 |
/*************************************************************************
File: presupDRT.pl
Copyright (C) 2004 Patrick Blackburn & Johan Bos
This file is part of BB2, version 1.0 (June 2004).
BB2 is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
BB2 is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with BB2; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*************************************************************************/
:- module(presupDRT,[presupDRT/0,
presupDRT/3,
presupDRTTestSuite/0,
infix/0,
prefix/0]).
:- use_module(readLine,[readLine/1]).
:- use_module(comsemPredicates,[prefix/0,
infix/0,
compose/3,
memberList/2,
reverseList/2,
removeFirst/3,
printRepresentations/1]).
:- use_module(betaConversionDRT,[betaConvert/2]).
:- use_module(mergeDRT,[mergeDrs/2]).
:- use_module(presupTestSuite,[discourse/2]).
:- use_module(bindingViolation,[bindingViolationDrs/1]).
:- use_module(freeVarCheck,[freeVarCheckDrs/1]).
:- use_module(sortalCheck,[sortalCheckDrs/1]).
:- [englishGrammar].
:- [englishLexicon].
:- [semLexPresupDRT].
:- [semRulesDRT].
'^'(V,G):-forall((G),writeln(V)).
/*========================================================================
Driver Predicates
========================================================================*/
presupDRT:-
readLine(Discourse),
presupDRT(Discourse,drs([],[]),DRSs),
printRepresentations(DRSs).
presupDRT(Text,Old,New):-
findall(Sem,Drs^(t([sem:Drs],Text,[]),resolveDrs(merge(Old,Drs),Sem)),New),
\+ New=[].
/*========================================================================
Test Suite Predicates
========================================================================*/
presupDRTTestSuite:-
nl, write('>>>>> PRESUP-DRT ON TEST SUITE <<<<<'), nl,
discourse(Discourse,Readings),
format('~nDiscourse: ~p (~p readings)',[Discourse,Readings]),
presupDRT(Discourse,drs([],[]),DRSs),
printRepresentations(DRSs),
fail.
presupDRTTestSuite.
/*========================================================================
Pronoun Resolution
========================================================================*/
resolveDrs(B,R):-
\+ findAlfaDrs(B,_,_,_,[]-_),
\+ bindingViolationDrs(B),
mergeDrs(B,R).
resolveDrs(ADRS,DRS):-
findAlfaDrs(ADRS,RDRS,alfa(Type,Alfa),Ac,[]-Bi),
resolveAlfa(Alfa,Type,Ac,Bi,RDRS),
resolveDrs(RDRS,DRS).
/*========================================================================
Find First Alfa-DRS (DRSs)
========================================================================*/
findAlfaDrs(alfa(T,B1,B2),alfa(T,R1,B2),Alfa,Ac,Bi1-Bi2):-
findAlfaDrs(B1,R1,Alfa,Ac,Bi1-Bi2), !.
findAlfaDrs(alfa(T,B1,B2),merge(A,B2),alfa(T,M1),[a(A)],Bi-Bi):-
mergeDrs(B1,M1).
findAlfaDrs(merge(B1,B),merge(R1,B),Alfa,Ac,Bi1-Bi2):-
findAlfaDrs(B1,R1,Alfa,Ac,Bi1-Bi2), !.
findAlfaDrs(merge(B1,B2),merge(R1,R2),Alfa,Ac,Bi1-Bi2):-
findAlfaDrs(B2,R2,Alfa,Ac,[r(M1,R1)|Bi1]-Bi2),
mergeDrs(B1,M1).
findAlfaDrs(drs(D,C1),merge(A,R),Alfa,[a(A)|Ac],Bi1-Bi2):-
findAlfaConds(C1,C2,Alfa,Ac,[r(drs(D,C2),R)|Bi1]-Bi2).
/*========================================================================
Find First Alfa-DRS (DRS-Conditions)
========================================================================*/
findAlfaConds([imp(B1,B)|C],[imp(B2,B)|C],Alfa,Ac,Bi1-Bi2):-
findAlfaDrs(B1,B2,Alfa,Ac,Bi1-Bi2), !.
findAlfaConds([imp(B1,B2)|C],[imp(merge(R1,A),R2)|C],Alfa,[a(A)|Ac],Bi1-Bi2):-
findAlfaDrs(B2,R2,Alfa,Ac,[r(M1,R1)|Bi1]-Bi2), !,
mergeDrs(B1,M1).
findAlfaConds([or(B1,B)|C],[or(B2,B)|C],Alfa,Ac,Bi1-Bi2):-
findAlfaDrs(B1,B2,Alfa,Ac,Bi1-Bi2), !.
findAlfaConds([or(B,B1)|C],[or(B,B2)|C],Alfa,Ac,Bi1-Bi2):-
findAlfaDrs(B1,B2,Alfa,Ac,Bi1-Bi2), !.
findAlfaConds([not(B1)|C],[not(B2)|C],Alfa,Ac,Bi1-Bi2):-
findAlfaDrs(B1,B2,Alfa,Ac,Bi1-Bi2), !.
findAlfaConds([X|C1],[X|C2],Alfa,Ac,Bi1-Bi2):-
findAlfaConds(C1,C2,Alfa,Ac,Bi1-Bi2).
/*========================================================================
Resolve Alfa
========================================================================*/
resolveAlfa(Alfa,_,Ac,Bi,B):-
bindAlfa(Bi,Alfa),
dontResolve(Ac),
sortalCheckDrs(B).
resolveAlfa(Alfa,Type,Ac,Bi,B):-
accommodateAlfa(Ac,Type,Alfa),
dontResolve(Bi),
freeVarCheckDrs(B).
/*------------------------------------------------------------------------
Binding
------------------------------------------------------------------------*/
bindAlfa([r(drs(D2,C2),drs(D3,C3))|P],drs([X|D1],C1)):-
memberList(X,D2),
mergeLists(D1,D2,D3),
mergeLists(C1,C2,C3),
dontResolve(P).
bindAlfa([r(R,R)|P],Alfa):-
bindAlfa(P,Alfa).
bindAlfa([a(drs([],[]))|P],Alfa):-
bindAlfa(P,Alfa).
/*------------------------------------------------------------------------
Accommodation
------------------------------------------------------------------------*/
accommodateAlfa(P1,nam,Alfa):-
removeFirst(a(Alfa),P1,P2),
dontResolve(P2).
accommodateAlfa([a(Alfa)|P],def,Alfa):-
dontResolve(P).
accommodateAlfa([r(R,R)|P],Type,Alfa):-
accommodateAlfa(P,Type,Alfa).
accommodateAlfa([a(drs([],[]))|P],def,Alfa):-
accommodateAlfa(P,def,Alfa).
/*========================================================================
Do not resolve remaining of projection path
========================================================================*/
dontResolve([]).
dontResolve([a(drs([],[]))|L]):-
dontResolve(L).
dontResolve([r(X,X)|L]):-
dontResolve(L).
/*========================================================================
Merge Lists - Check for Duplicates
========================================================================*/
mergeLists([],L,L).
mergeLists([X|R],L1,L2):-
memberList(Y,L1), X==Y, !,
mergeLists(R,L1,L2).
mergeLists([X|R],L1,[X|L2]):-
mergeLists(R,L1,L2).
/*========================================================================
Info
========================================================================*/
info:-
format('~n> ------------------------------------------------------------------ <',[]),
format('~n> presupDRT.pl, by Patrick Blackburn and Johan Bos <',[]),
format('~n> <',[]),
format('~n> ?- presupDRT. - parse a typed-in sentence <',[]),
format('~n> ?- presupDRT(S,Old,New). - parse a sentence and return DRS <',[]),
format('~n> ?- presupDRTTestSuite. - run the test suite <',[]),
format('~n> ------------------------------------------------------------------ <',[]),
format('~n~n',[]).
/*========================================================================
Display info at start
========================================================================*/
:- info.
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_nlu/ext/CURT/bb2/presupDRT.pl | Perl | mit | 7,693 |
package Bio::Graphics::Browser2::Plugin::Submitter;
# $Id: Submitter.pm,v 1.2 2008-10-19 02:37:59 lstein Exp $
# Submitter is an invisible plugin (Does not appear in the "Reports and Analysis" menu)
# designed to support rubber-band select menu items that submit sequence data and
# other parameters to external web sites such as NCBI blast. Check the GMOD wiki
# for documentation.
# This plugin: http://www.gmod.org/wiki/index.php/Submitter.pm
# Rubber-band selection: http://www.gmod.org/wiki/index.php/GBrowse_Rubber_Band_Selection.pm
use strict;
use CGI qw/standard escape unescape/;
use CGI 'html3';
use Bio::Graphics::Browser2::Plugin;
use Bio::Graphics::Browser2::Util;
use Bio::Graphics::FeatureFile;
use CGI qw(:standard *pre);
use vars qw($VERSION @ISA);
use Data::Dumper;
$VERSION = 0.1;
@ISA = qw(Bio::Graphics::Browser2::Plugin);
# not visible in plugin menu
sub hide {1}
sub mime_type {'text/html'}
# Gets the configuration for inidivual web site "targets"
# in a config stanze that looks like this:
#[Submitter:plugin]
#submitter = [UCSC_BLAT]
# confirm = 1
# url = http://genome.ucsc.edu/cgi-bin/hgBlat
# seq_label = userSeq
# output = hyperlink
# type = DNA
# org = Human
#
# [NCBI_BLAST]
# confirm = 1
# url = http://www.ncbi.nlm.nih.gov/blast/Blast.cgi
# seq_label = QUERY
# PAGE = Nucleotides
# PROGRAM = blastn
# DATABASE = nr
# CLIENT = web
# CMD = put
sub targets {
my $self = shift;
return $self->{targets} if $self->{targets};
my $submitter_cfg = $self->browser_config->plugin_setting('submitter');
my $text = _prepare_text($submitter_cfg);
my %config_values = $text =~ /\[([^\]]+)\]([^\[]+)/gm;
for my $target (keys %config_values) {
my %config = $config_values{$target} =~ /(\w+)\s*=(.+)$/gm;
next unless %config >= 2;
for (keys %config) {
$config{$_} =~ s/^\s+|\s+$//g;
}
$self->{targets}->{$target} = \%config;
}
$self->{targets}
}
# The text needs a bit of pre-processing to
# make sure the sub-stanza key-value pairs are
# respected and and embedded HTML is escaped
# Bio::Graphics FeatureFile.pm is not used for parsing
# because it does not respect case-sensitive keys
sub _prepare_text {
my $text = shift;
my @html = $text =~ /(\<.+\>)\[?/;
for my $html (@html) {
$text =~ s/$html/escape($html)/em;
}
$text =~ s/\[/\n\[/g;
$text =~ s/(\w+\s*=)/\n$1/g;
$text;
}
sub dump {
my $self = shift;
my $segment = shift;
my $targets = $self->targets;
my $target = $self->config_param('target')
|| fatal_error(qq(Error: A target for the submitter must be included in the URL "Submitter.target=target"));
my $config = $targets->{$target}
|| fatal_error(qq(Error: No configuration for target $target!));
my $seq = $segment->seq;
$seq = $seq->seq if ref $seq;
my $name = $segment->name;
my $url = $config->{url}
|| fatal_error('Error: a url for the external website is required');
my $seq_label = $config->{seq_label}
|| fatal_error('Error: a label is required for the sequence submission');
# Other form elements to include
my $extra_html = unescape($config->{extra_html});
# Whether to print a confirmation page before external submission
my $confirm = $config->{confirm};
# Format the display sequence as fasta
my $fasta = $seq;
$fasta =~ s/(\S{60})/$1\n/g;
$fasta = ">$name\n$fasta\n";
unless ($url =~ /^http/i) {
$url = "http://$url";
}
# pass-thru arguments -- to be sent to the extertnal web-site
my %args;
for my $arg (keys %$config) {
next if $arg =~ /^seq_label$|^confirm$|^url$|^fasta$|^extra_html$/;
$args{$arg} = unescape($config->{$arg});
}
# print a hidden form and a summary of the data. The "extra_html"
# configuration key will trigger inclusion of other form input
# elemants if required.
print start_form(-name=>'f1', -method=>'POST', -action=>$url), "\n";
for my $arg (keys %args) {
print hidden($arg => $args{$arg}), "\n";
}
print hidden($seq_label => $seq);
if ($extra_html || $confirm) {
my @rows = th({-colspan => 2, -style => "background:lightsteelblue"},
b("The following data will be submitted to $url"),
p(submit(-name => 'Confirm'),' ',
button(-name => 'Cancel', -onclick => 'javascript:window.close()')));
for my $arg (keys %args) {
next if $arg eq $seq_label;
$arg =~ s/extra_html/Additional options/;
push @rows, td({-width => 100, -style => 'background:lightyellow'},
[b("$arg:"), unescape($args{$arg})]);
}
if ($extra_html) {
push @rows, td({-width => 100, -style => 'background:lightyellow'},
[b("Other options"), pre(unescape($extra_html))]);
}
push @rows, td({-width => 100, -style => 'background:lightyellow'},
[b($seq_label), pre($fasta)]);
print table({-border=> 1}, Tr({-valign => 'top'}, \@rows));
}
print end_form;
unless ($confirm || $extra_html) {
print qq(<script type="text/javascript">document.f1.submit();</script>);
}
}
sub description{'A plugin to submit the selected region to an external website'}
sub config_defaults {{}}
sub configure_form {''}
1;
| solgenomics/sgn | features/gbrowse2/conf/plugins/Submitter.pm | Perl | mit | 5,363 |
#!/usr/bin/perl -w
#This script tests ZOOPS
use strict;
use Benchmark qw(:all);
(my $outfile = $0) =~ s/\.pl/\.out/;
(my $errfile = $outfile) =~ s/\.out/\.err/;
my $exec = '../gibbsmarkov.out';
my $param = ' -l 13 -t 100 -L 5 -em 0 -s 123 -gibbsamp -p 0.05 -best_ent -markov 3 -ds -r 1 -print_runs -zoops 0.2';
my $start = new Benchmark();
system("$exec ../../combo3_1485-001/combo3_1485-001-001_with_N_826.implant.seq $param 1>$outfile 2>$errfile");
my $end = new Benchmark();
my $diff = timediff($end, $start);
open(my $outfh, ">>$outfile");
printf $outfh ("Benchmark: %s\n", timestr($diff, 'all'));
close($outfh);
| pnpnpn/gimsan-py | gibbsmarkov/test/test15.pl | Perl | mit | 622 |
package O2::Obj::Object::Query::ConditionGroup;
use strict;
use base 'O2::Obj::Object';
use O2 qw($context);
use O2::Util::List qw(upush);
#-----------------------------------------------------------------------------
sub getQuery {
my ($obj) = @_;
return $obj->{query} if $obj->{query};
die "Can't find query object. Condition group not saved." unless $obj->getId();
my ($id) = $context->getSingleton('O2::Mgr::Object::QueryManager')->objectIdSearch(
conditionGroups => $obj->getId(),
);
die "Can't find query object" unless $id;
return $obj->{query} = $context->getObjectById($id) or die "Can't find query object: Couldn't instantiate $id";
}
#-----------------------------------------------------------------------------
sub addEitherSubQueryCondition {
my ($obj, $subQuery) = @_;
my $idField = $obj->getManager()->getModel()->getFieldByName('id');
$obj->addSubQueryCondition($idField, 'in', $subQuery);
}
#-----------------------------------------------------------------------------
sub addSubQueryCondition {
my ($obj, $field, $operator, $subQuery) = @_;
my $condition = $context->getSingleton('O2::Mgr::Object::Query::Condition::SubQueryManager')->newObject();
$condition->setField( $field );
$condition->setOperator( $operator );
$condition->setQuery( $subQuery );
$obj->addCondition($condition);
}
#-----------------------------------------------------------------------------
sub addHashCondition {
my ($obj, $field, $hashKey, $operator, $value, $isNumericArgument) = @_;
my $condition = $context->getSingleton('O2::Mgr::Object::Query::Condition::Hash::' . (ref $value ? 'Multi' : 'Single') . 'ValueManager')->newObject();
$condition->setField( $field );
$condition->setHashKey( $hashKey );
$condition->setOperator( $operator );
if (ref $value) {
$condition->setValues( @{$value} );
}
else {
$condition->setValue( $value );
$condition->setForceNumeric( 1 ) if $isNumericArgument;
}
$obj->addCondition($condition);
}
#-----------------------------------------------------------------------------
sub addScalarCondition {
my ($obj, $field, $operator, $value, $isNumericArgument) = @_;
my $condition = $context->getSingleton('O2::Mgr::Object::Query::Condition::ScalarManager')->newObject();
$condition->setField( $field );
$condition->setOperator( $operator );
$condition->setValue( $value );
$condition->setForceNumeric( 1 ) if $isNumericArgument;
$obj->addCondition($condition);
}
#-----------------------------------------------------------------------------
sub addCondition {
my ($obj, $condition) = @_;
my @conditions = $obj->getConditions();
$obj->setConditions(@conditions, $condition);
}
#-----------------------------------------------------------------------------
sub getSql {
my ($obj) = @_;
my $sql = '';
my (@placeHolders, @subSqls);
foreach my $condition ($obj->getConditions()) {
my ($_sql, @_placeHolders) = $condition->getSql();
push @subSqls, $_sql;
push @placeHolders, @_placeHolders;
}
$sql = join ( ' ' . $obj->getJoinWith() . ' ', @subSqls );
$sql = "($sql)" if @subSqls > 1;
return ($sql, @placeHolders);
}
#-----------------------------------------------------------------------------
sub getUsedTables {
my ($obj) = @_;
my @tableNames;
foreach my $condition ($obj->getConditions()) {
upush @tableNames, $condition->getUsedTables();
}
return @tableNames;
}
#-----------------------------------------------------------------------------
sub deletePermanently {
my ($obj) = @_;
foreach my $condition ($obj->getConditions()) {
$condition->deletePermanently();
}
$obj->SUPER::deletePermanently();
}
#-----------------------------------------------------------------------------
sub delete {
my ($obj) = @_;
foreach my $condition ($obj->getConditions()) {
$condition->delete();
}
$obj->SUPER::delete();
}
#-----------------------------------------------------------------------------
1;
| haakonsk/O2-Framework | lib/O2/Obj/Object/Query/ConditionGroup.pm | Perl | mit | 4,044 |
/* <module>
%
% PFC is a language extension for prolog.
%
% It adds a new type of module inheritance
%
% Dec 13, 2035
% Douglas Miles
*/
% was_module(mt_01,[]).
:- include(test_header).
:- begin_pfc.
:- set_defaultAssertMt(myMt).
baseKB:mtHybrid(socialMt).
socialMt:loves(sally,joe).
:- set_defaultAssertMt(myMt).
:- mpred_test(clause_u(socialMt:loves(_,_))).
:- mpred_test(\+clause_u(myMt:loves(_,_))).
:- mpred_test(\+clause_u(header_sanity:loves(_,_))).
| TeamSPoon/logicmoo_workspace | packs_sys/pfc/t/sanity_base/mt_01.pl | Perl | mit | 472 |
/*************************************************************************
name: lexicon_svenska_player.pl
date: 2004-10-25
author: Andreas Wallentin
*************************************************************************/
:- module( lexicon_player_svenska, [output_form/2,
input_form/2,
yn_answer/1]).
:- multifile synset/2.
:- discontiguous output_form/2, input_form/2.
:- use_module( library(lists), [ member/2, select/3, append/3, is_list/1 ] ).
%%:- use_module( library(charsio), [ format_to_chars/3 ] ).
%% för mer variation av output
:- use_module( library(random) ).
:- use_module( dbase ).
:- ensure_loaded( digits_svenska_player ).
:- ensure_loaded( semsort_player ).
:- ensure_loaded( groups ).
/*----------------------------------------------------------------------
output_form( +Move, -String )
-- Canned output
----------------------------------------------------------------------*/
/*
För mer variation i output, slumpas olika fraser fram.
Samma för avsluten.
*/
greetings(['Musikspelaren är klar för användning.','Välkommen till musikspelaren']).
byes(['Hej då!','Hoppas att du hade det trevligt','Adjö adjö']).
% getNoXInList(+VilketIOrdning, +Lista, -UtvaltSvar).
getNoNInList(1,[X|_],X).
getNoNInList(Num, [_|Xs], Svar):-
N is Num-1,
getNoNInList(N,Xs,Svar).
output_form( action(top), ['top'] ).
%% Called the first time the program is running
%%
output_form( greet, [Greeting] ):-
random(1,3,N),
greetings(List),
getNoNInList(N,List,Greeting).
output_form( quit, [Ends] ):-
random(1,4,N),
byes(List),
getNoNInList(N,List,Ends).
% ask-moves
output_form( ask(X^(action(X))), ['Vad kan jag göra för dig?'] ).
output_form( ask(action(T)), Str ):-
output_form(action(T), StrT ),
append( ['Vill du '], StrT, Str0 ),
append( Str0, ['?'], Str).
%% ta reda på saker från användaren
output_form( ask(X^playlist(X)),
['Vilken spellista vill du öppna?'] ).
output_form( ask(X^itemAdd(X)),
['Vilken låt vill du lägga till i spellistan?'] ).
output_form( ask(X^itemRem(X)),
['Vilken låt(indexnummer) vill du ta bort från spellistan?'] ).
output_form( ask(X^groupToAdd(X)),
['Vilken grupp är du ute efter?'] ).
output_form( ask(X^station(X)),
['Vilken radiostation vill du lyssna på?'] ).
output_form( ask(X^listenTo(X)),
['Vill du lyssna på radio eller låtar?'] ).
output_form( ask(X^artist(X)),
['Vilken artist menar du?'] ).
output_form( ask(X^song(X)),
['Vilken låt menar du?'] ).
output_form( ask(X^album(X)),
['Vilket album menar du?'] ).
output_form( ask(X^song_artist(X)),
['Vilken artist menar du?'] ).
output_form( ask(X^group(X)),
['Vilken artist menar du?'] ).
output_form( ask(X^item(X)),
['Vilken låt menar du?'] ).
output_form( ask(X^what_to_play(X)),
['Vilken låt i spellistan vill du spela?'] ).
output_form( answer(path(Path)), Ans ):-
( Path = ''
->
Ans = ['Det finns ingen sökväg som matchar sökkreterierna.']
;
Ans = ['Sökvägen till låten är:',Path]
).
output_form( answer(fail(Path^path(Path),no_matches)), Ans ):-
Ans = ['Sökvägen till låten är inte detnna:',Path].
output_form( answer(artists_song(Artist)), ['Följande artist/-er har gjort den:',Artist] ).
output_form( answer(artists_album(Artist)), Answer ):-
(
Artist = ''
->
Answer = ''
;
(
Artist = 'best_of'
->
Answer = ['Albumet är ett samlingsalbum']
;
Answer = ['Albumet har gjorts av',Artist]
)
).
output_form( answer(albums_by_artist(Albums)), Answer ):-
( Albums = ''
-> Answer = ['Det finns inga album']
; Answer = ['Följande album finns:',Albums]
).
output_form( answer(current_song([A,B])), Answer ):-
Answer = ['Du lyssnar på',A,'-',B].
output_form( answer(songs_by_artist(Songs)), ['De har gjort:',Songs] ).
output_form( issue(path(_)), ['fråga väg'] ).
output_form( action(restart), ['börja om'] ).
output_form( action(handle_player), ['hantera spelaren'] ).
output_form( action(handle_playlist), ['hantera spellistor'] ).
output_form( action(handle_stations), ['hantera radiostationer'] ).
output_form( action(start), ['starta spelaren'] ).
output_form( action(start_specific), ['spela en viss låt'] ).
output_form( action(stop), ['stoppa spelaren'] ).
output_form( action(pause), ['pausa musiken'] ).
output_form( action(resume), ['återuppta musiken'] ).
output_form( action(fast_rewind), ['spola i låten'] ).
output_form( action(start_playlist), ['spela en viss spellista'] ).
output_form( action(fast_forward), ['spola framåt'] ).
output_form( action(rewind), ['spola bakåt'] ).
output_form( action(next_song), ['till nästa'] ).
output_form( action(previous_song), ['till föregående'] ).
output_form( action(playlist_add), ['lägga till en låt i spellistan'] ).
output_form( action(playlist_del_specific), ['ta bort en låt från spellistan'] ).
output_form( action(playlist_del), ['ta bort listan'] ).
output_form( action(playlist_shuffle), ['blanda ordningen på låtarna'] ).
output_form( action(show_list), ['visa spellistan'] ).
%%% confirming actions
output_form( confirm(handle_player), ['done handling player'] ).
output_form( confirm(handle_playlist), ['done handling playlist'] ).
output_form( confirm(handle_stations), ['done handling stations'] ).
output_form( confirm(start), ['Startar musiken'] ).
output_form( confirm(start_specific), ['Startar musiken'] ).
output_form( confirm(stop), ['Musiken är stoppad'] ).
output_form( confirm(pause), ['Pausar spelaren'] ).
output_form( confirm(resume), ['Återupptar musiken'] ).
%output_form( confirm(fast_rewind), ['soplar åt nåt håll'] ).
output_form( confirm(start_playlist), ['Spelar spellista'] ).
output_form( confirm(fast_forward), ['Spolar lite framåt'] ).
output_form( confirm(rewind), ['Spolar lite bakåt'] ).
output_form( confirm(playlist_add), ['Spellistan är utökad'] ).
output_form( confirm(playlist_del_specific), ['Spellistan har reducerats'] ).
output_form( confirm(playlist_del), ['Spellistan är borttagen'] ).
output_form( confirm(playlist_shuffle), ['Spellistans ordning har blandats'] ).
output_form( confirm(show_list), ['Spellistan visad'] ).
output_form( confirm(vol_up), ['Ökar volymen'] ).
output_form( confirm(vol_down), ['Sänker volymen'] ).
output_form( confirm(next_song), ['Till nästa låt'] ).
output_form( confirm(previous_song), ['Till föregående låt'] ).
output_form( report('PlaylistAdd', failed(G,S)), Ans ):-
make_name(G,Group),
make_name(S,Song),
Ans = ['Tyvärr finns inte',Song,'med',Group].
output_form( report('Resume', failed ),
['Spelaren stod på inte på paus, så därför ingen resume'] ).
%%% output_form( report('Start', failed(Status) ), %%% spelare på paus
%%% ['Spelaren stod på',Status,'Då måste resume köras'] ).
altlist2altstr_and( [D], Str ):-
alt2altstr( D, Str1 ),
append( " och ", Str1, Str ).
altlist2altstr_and( [D|Ds], Str ):-
alt2altstr( D, Str1 ),
altlist2altstr_and( Ds, Str2 ),
append( Str1, ", ", Str3 ),
append(Str3, Str2, Str ).
altlist2altstr_or( [D], Str ):-
alt2altstr( D, Str1 ),
append( " eller ", Str1, Str ).
altlist2altstr_or( [D|Ds], Str ):-
alt2altstr( D, Str1 ),
altlist2altstr_or( Ds, Str2 ),
append( Str1, ", ", Str3 ),
append(Str3, Str2, Str ).
alt2altstr( D, Str ):-
output_form( D, Str ).
alt2altstr( D, Str ):-
name( D, Str ).
%%% used in output_form/2 with ask(set(...))
altlist2alts_or( [Alt], ['eller'|OutputAlt] ):-
output_form(Alt, OutputAlt ).
altlist2alts_or( [Alt|Alts], [','|Output] ):-
output_form(Alt, OutputAlt ),
altlist2alts_or(Alts, AltsOr),
append( OutputAlt, AltsOr, Output).
% object-level clarification and groundnig questions
output_form( ask(C), Output ):-
output_form( icm:und*pos:_*C, IcmPos ),
append( IcmPos0,['.'],IcmPos),
append( IcmPos0, [', är det korrekt?'], Output ).
output_form( ask(set([Alt0|Alts])), Output):-
output_form(Alt0, Alt0out),
altlist2alts_or( Alts, AltsOr ),
append(['Vill du '|Alt0out], AltsOr, Output0 ),
append(Output0, ['?'], Output).
output_form( Alt, OutputAlt ):-
input_form( OutputAlt, answer( Alt ) ).
output_form( answer(notexist(X,Q)), [' Ledsen, det finns inget som matchar din fråga om'|InputQDot]):-
input_form( InputQ, ask(X^Q) ),
append( InputQ, ['.'], InputQDot ).
output_form( answer(unknown(Q)), ['Ledsen, det finns inget som matchar din fråga om'|InputQDot]):-
input_form( InputQ, ask(Q) ),
append( InputQ, ['.'], InputQDot ).
% for asking metaissue clarification question
output_form( issue(Q), ['fråga om'|Out] ):-
input_form( Out, ask( Q ) ).
% for asking metaissue clarification question
%output_form( action(Action), ['to '|Out] ):-
% input_form( Out, request( Action ) ).
% ICM
% contact
output_form( icm:con*neg, ['Hallå?'] ).
% perception
output_form( icm:per*int, ['Ursäkta?'] ).
output_form( icm:per*int, ['Vad sade du?'] ).
output_form( icm:per*neg, ['Ursäkta, Jag hörde inte vad du sade.'] ).
output_form( icm:per*pos:String, ['Jag tyckte du sade',Name,'.'] ):-
name( Name, String ).
output_form( icm:sem*int, ['Vad menar du'] ).
output_form( icm:sem*neg, ['Förlåt, jag förstår inte.'] ).
output_form( icm:sem*pos:Move, InputDot ):-
input_form( Input, Move ),
append( Input, ['.'], InputDot ).
% understanding(pragmatic)
output_form( icm:und*neg, ['Jag förstår inte riktigt.'] ).
output_form( icm:und*pos:usr*issue(A^play_status(A)),
['Du vill veta vad spelaren gör'] ).
output_form( icm:und*pos:usr*issue(play_status(Status)),
['Du vill veta om videon',S0] ):-
status_output(Status,S0).
output_form( icm:und*pos:usr*issue(A^current_channel(A)),
['Du vill veta vilken kanal som är på.'] ).
output_form( icm:und*pos:usr*(not issue(Q)), ['Du frågade inte:'|AnsPDot] ):-
input_form( AnsP, ask( Q ) ),
append(AnsP,['.'],AnsPDot).
output_form( icm:und*pos:usr*(not P), AnsNotPDot ):-
output_form( icm:und*pos:usr*P, AnsPDot ),
append( ['inte'],AnsPDot,AnsNotPDot ).
output_form( icm:und*pos:usr*P, AnsPDot ):-
( output_form(P, AnsP);
input_form( AnsP, answer(P) ) ),
append(AnsP,['.'],AnsPDot).
% 020702 SL
output_form( icm:und*pos:usr*PX, IcmPos ):-
PX =.. [P,X],
isa( P, P1 ),
P1X =.. [P1,X],
output_form( icm:und*pos:usr*P1X, IcmPos ).
output_form( icm:und*int:usr*C, IcmInt ):-
output_form( ask(C), IcmInt ).
%output_form( icm:und*pos:C, IcmPos ),
%append( IcmPos0,['.'],IcmPos),
%append( IcmPos0, [', is that correct?'], IcmInt ).
%output_form( icm:und*int:usr*C, IcmInt ):-
% input_form( answer(C), IcmInt ).
output_form( icm:und*int:usr*C, Output ):-
output_form( icm:und*pos:_*C, IcmPos ),
append( IcmPos0,['.'],IcmPos),
append( IcmPos0, [', är det korrekt?'], Output ).
% clarification question
output_form( icm:und*int:usr*AltQ, Output):-
output_form( ask(AltQ), Output).
% "acceptance"/integration
% icm-Type(-Polarity(-Args))
output_form( icm:acc*pos, ['Okej.'] ).
% reject(issue(Q))
output_form( icm:acc*neg:issue(Q), ['Ledsen, jag kan inte svara på frågor om'|InputQDot]):-
input_form( InputQ, ask(Q) ),
append( InputQ, ['.'], InputQDot ).
% reject proposition P
output_form( icm:acc*neg:P, ['Ledsen, '|Rest]):-
input_form( InputP, answer(P) ),
append( InputP, [' är inte en korrekt parameter.'], Rest ).
% indicate loading a plan (pushed by findPlan)
%output_form( icm:loadplan, ['I need some information.'] ).
output_form( icm:loadplan, ['Låt oss se.'] ).
% reraise issue explicitly (feedback on user reraise, or system-initiated)
output_form( icm:reraise:Q, ['Gå tillbaks till frågan om '|InputQDot]):-
( input_form( InputQ, ask(Q) ); output_form( ask(Q), InputQ ) ),
append( InputQ, ['.'], InputQDot ).
% reraise action explicitly (feedback on user reraise, or system-initiated)
output_form( icm:reraise:A, ['Gå tillbaks till '|InputQDot]):-
( input_form( InputQ, request(A) ); output_form( action(A), InputQ ) ),
append( InputQ, ['.'], InputQDot ).
% reraise issue (system-initiated, where question follows immediately after)
output_form( icm:reraise, ['Så,']).
% accommodation
output_form( icm:accommodate:_, ['Visst.'] ).
output_form( icm:reaccommodate:Q, ['Gå tillbaks till frågan om'|AnsPDot] ):-
input_form( AnsP, ask( Q ) ),
append(AnsP,['.'],AnsPDot).
output_form( not C, ['Inte'|S] ):- output_form( C, S ).
/*----------------------------------------------------------------------
input_form( +Phrase, -Move )
-- Almost canned input
----------------------------------------------------------------------*/
input_form( [inte|S], answer(not(C))):- input_form(S,answer(C)).
input_form( [ja], answer(yes) ).
input_form( [nej], answer(no) ).
% simple stuff
input_form( [hej], greet ).
input_form( [hej,då], quit ).
input_form( [sluta], quit ).
input_form( [avbryt], quita ).
% ICM
input_form( [förlåt], icm:per*neg ).
input_form( [okej], icm:acc*pos ).
input_form( [ok], icm:acc*pos ).
input_form( [vet, inte], icm:acc*neg:issue ).
/******************************
ACTIONS
******************************/
%%%%% Requests %%%%%
input_form( [börja,om], request(restart) ).
input_form( [top], request(top) ).
input_form( [gå,uppåt], request(up) ).
input_form( [Player], request(handle_player) ) :- lexsem( Player, player ).
input_form( [Playlist], request(handle_playlist) ) :- lexsem( Playlist, playlist ).
input_form( [välja], request(listen_to) ).
input_form( [spela|X], [request(start_specific),answer(index(X))] ):-
sem_sort(X,index).
input_form( [spela|Group], [request(start),request(playlist_add),answer(group(Group))] ):-
sem_sort(Group,group).
input_form( [spela|Song], [request(start),request(playlist_add),answer(item(Song))] ):-
sem_sort(Song,item).
input_form( [Station], request(handle_stations) ) :- lexsem( Station, station ).
input_form( [Play], request(start) ) :- lexsem( Play, play ).
%input_form( [Play,nästa], request(next_song) ) :- lexsem( Play, play ).
%%% input_form( [Play,Song], request(play_song) ):-
%%% sem_sort(Song,item),
%%% lexsem( Play, play ).
input_form( [Stop], request(stop) ) :- lexsem( Stop, stop ).
input_form( [Pause], request(pause) ) :- lexsem( Pause, pause ).
input_form( [Resume], request(resume) ) :- lexsem( Resume, resume ).
input_form( [spola], request(fast_rewind) ).
input_form( [bakåt], request(rewind) ).
input_form( [framåt], request(fast_forward) ).
input_form( [nästa], request(next_song) ).
input_form( [föregående], request(previous_song) ).
input_form( [spela,spellista], request(start_playlist) ).
input_form( [en,spellista], request(start_playlist) ).
input_form( [lägga,till], request(playlist_add) ).
input_form( [lägg,till], request(playlist_add) ).
input_form( [sätta,på], request(playlist_add) ).
input_form( [visa,listan], request(show_list) ).
input_form( [visa,spellistan], request(show_list) ).
input_form( [höra,på], request(listen_to) ).
input_form( [lyssna,på], request(listen_to) ).
input_form( [blanda], request(playlist_shuffle) ).
%input_form( [List], request(playlist_del) ) :- lexsem(List,list).
%input_form( [låt], request(playlist_del_specific) ).
%% ny plan som frågar vad man vill ta bort
%input_form( [ta,bort], request(remove) ).
input_form( [ta,bort,List], request(playlist_del) ) :- lexsem(List,list).
input_form( [rensa,List], request(playlist_del) ) :- lexsem(List,list).
input_form( [ta,bort|X], [request(playlist_del_specific),
answer(index(X)) ] ):-
sem_sort(X,index).
input_form( [ta,bort,en,låt], request(playlist_del_specific) ).
input_form( [Inc], request(vol_up) ) :- lexsem(Inc,increase).
input_form( [Dec], request(vol_down) ) :- lexsem(Dec,decrease).
%%%%% Answers %%%%%
input_form( X, answer(index(X)) ):- sem_sort(X,index).
input_form( Station, answer(station(Station)) ):- sem_sort(Station,station).
input_form( Group, answer(group(Group)) ):- sem_sort(Group,group).
input_form( Playlist, answer(playlist(Playlist)) ):- sem_sort(Playlist,playlist).
%%input_form( [Year], answer(year(Year)) ):- sem_sort(Year,year).
input_form( SongRadio, answer(item(SongRadio)) ):- sem_sort(SongRadio,item).
input_form( Album, answer(album(Album)) ):- sem_sort(Album,album).
input_form( Station, answer(station(IP)) ):-
longNum(Station,IP),
sem_sort(IP,station).
%%%%% Questions to DB %%%%%
input_form( [vilka,album], ask(A^albums_by_artist(A)) ).
input_form( [söka,efter,album], ask(A^albums_by_artist(A)) ).
input_form( [vad,heter,den], ask(X^current_song(X)) ).
input_form( [låten,som,spelas,nu], ask(X^current_song(X)) ).
input_form( [vem,har,gjort,albumet], ask(A^artists_album(A)) ).
input_form( [vem,har,gjort,låten], ask(A^artists_song(A)) ).
input_form( [vilken,sökväg], ask(A^path(A)) ).
%%% för mer explicit input
input_form( [vem,har,gjort,låten|Song], [ask(A^artists_song(A)),answer(item(Song))] ):-
sem_sort(Song,item).
input_form( [vem,har,gjort,albumet|Album], [ask(A^artists_album(A)),answer(album(Album))] ):-
sem_sort(Album,album).
%%% mer generellt
input_form( [vem,har,skrivit|Song], [ask(A^artists_song(A)),answer(item(Song))] ):-
sem_sort(Song,item).
input_form( [vem,har,gjort|Song], [ask(A^artists_song(A)),answer(item(Song))] ):-
sem_sort(Song,item).
input_form( [vem,har,skrivit|Album], [ask(A^artists_album(A)),answer(album(Album))] ):-
sem_sort(Album,album).
input_form( [vem,har,gjort|Album], [ask(A^artists_album(A)),answer(album(Album))] ):-
sem_sort(Album,album).
input_form( [vilka,låtar], ask(Songs^songs_by_artist(Songs)) ).
%%% input_form( [vilka,grupper], ask(Groups^all_groups(Groups)) ).
/*
Kommande predikat...
input_form( [vilka,låtar,har,X,gjort], ask(Songs^songs_by_current_artist(Songs)) ):-
lexsem(X,ask_current).
input_form( [någonting,med], request(find_group) ).
*/
/*----------------------------------------------------------------------
yn_answer( ?YN )
----------------------------------------------------------------------*/
yn_answer(A):-
A = 'ja';
A = 'nej'.
/*----------------------------------------------------------------------
lexsem( ?Word, ?Concept )
-- Lexical semantics
----------------------------------------------------------------------*/
% use semantics as surface forms (only possible for english)
lexsem( Word, Concept ):-
synset( Words, Concept ),
member( Word, Words ).
%synset( [[videon],[video]], vcr ).
synset( [spelare,spelaren,spelarn,musiken], player ).
synset( [spellista,spellistor,spellistan,spellistorna], playlist ).
synset( [listor,listan,lista], list ).
synset( [starta,play,spela], play ).
synset( [radio,station,radiostation,stationer,radiostationer], station ).
synset( [[lägg,till],[lägga,till]], add ).
synset( [stop,stopp,stoppa,stanna], stop ).
synset( [resume,återuppta], resume ).
synset( [paus,pausa], pause ).
synset( [höj,höja,öka], increase ).
synset( [sänk,sänka,minska], decrease ).
synset( [han,hon,de,dem,dom], ask_current ).
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_nlu/ext/SIRIDUS/UGOT-D31/godis-apps/domain-player/player-old/Resources/lexicon_player_svenska.pl | Perl | mit | 19,929 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Users::Component::Account::Details::Edit;
### Page allowing user to edit his details
### @author hr5
use strict;
use warnings;
use base qw(EnsEMBL::Users::Component::Account);
use constant JS_CLASS_CHANGE_EMAIL => '_change_email';
sub content {
my $self = shift;
my $hub = $self->hub;
my $user = $hub->user;
my $email = $user->email;
my $form = $self->new_form({'action' => {qw(action Details function Save)}, 'csrf_safe' => 1});
my $js_change_email = $self->JS_CLASS_CHANGE_EMAIL;
my $email_note = '';
$_->type eq 'local' && $_->identity eq $email and $email_note = sprintf('You use this email to login to %s.<br />', $self->site_name) and last for @{$user->logins};
$form->add_field({
'label' => 'Email Address',
'notes' => qq(<div class="hidden $js_change_email">${email_note}An email will be sent to the new address for verification purposes if email address is changed.</div>),
'elements' => [{
'type' => 'noedit',
'element_class' => $js_change_email,
'value' => qq($email <a href="#ChangeEmail" class="small $js_change_email">Change</a>),
'no_input' => 1,
'is_html' => 1
}, {
'type' => 'email',
'name' => 'email',
'value' => $email,
'no_asterisk' => 1,
'shortnote' => qq(<a href="#Cancel" class="small $js_change_email hidden">Cancel</a>),
'element_class' => qq($js_change_email hidden)
}]
});
$self->add_user_details_fields($form, {
'name' => $user->name,
'organisation' => $user->organisation,
'country' => $user->country,
'no_list' => 1,
'no_email' => 1,
'button' => 'Save',
});
$form->fieldset->fields->[-1]->add_element({
'type' => 'reset',
'value' => 'Cancel',
'class' => $self->_JS_CANCEL
}, 1);
$form->add_hidden({'name' => $self->_JS_CANCEL, 'value' => $hub->PREFERENCES_PAGE});
return $self->js_section({'subsections' => [ $form->render ], 'js_panel' => 'AccountForm'});
}
1; | andrewyatz/public-plugins | users/modules/EnsEMBL/Users/Component/Account/Details/Edit.pm | Perl | apache-2.0 | 2,814 |
=head1 LICENSE
See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
use strict;
use warnings;
package Bio::EnsEMBL::G2P::DBSQL::LGMPanelDiseaseAdaptor;
use DBI qw(:sql_types);
use Bio::EnsEMBL::G2P::LGMPanelDisease;
our @ISA = ('Bio::EnsEMBL::G2P::DBSQL::BaseAdaptor');
sub store {
my $self = shift;
my $lgm_panel_disease = shift;
if (!ref($lgm_panel_disease) || !$lgm_panel_disease->isa('Bio::EnsEMBL::G2P::LGMPanelDisease')) {
die('Bio::EnsEMBL::G2P::LGMPanelDisease arg expected');
}
my $dbh = $self->dbc->db_handle;
my $sth = $dbh->prepare(q{
INSERT INTO LGM_panel_disease(
LGM_panel_id,
disease_id,
user_id,
created
) VALUES (?, ?, ?, CURRENT_TIMESTAMP)
});
$sth->execute(
$lgm_panel_disease->LGM_panel_id,
$lgm_panel_disease->disease_id,
$lgm_panel_disease->user_id
);
$sth->finish();
my $dbID = $dbh->last_insert_id(undef, undef, 'LGM_panel_disease', 'LGM_panel_disease_id');
$lgm_panel_disease->{LGM_panel_disease_id} = $dbID;
return $lgm_panel_disease;
}
sub fetch_by_dbID {
my $self = shift;
my $dbID = shift;
return $self->SUPER::fetch_by_dbID($dbID);
}
sub fetch_all {
my $self = shift;
return $self->generic_fetch();
}
sub fetch_all_by_LGMPanel {
my $self = shift;
my $lgm_panel = shift;
my $lgm_panel_id = $lgm_panel->dbID;
my $constraint = "LGM_panel_id=$lgm_panel_id;";
return $self->generic_fetch($constraint);
}
sub fetch_by_LGMPanel_Disease {
my $self = shift;
my $LGM_panel = shift;
my $disease = shift;
my $LGM_panel_id = $LGM_panel->dbID;
my $disease_id = $disease->dbID;
my $constraint = "LGM_panel_id=$LGM_panel_id AND disease_id=$disease_id;";
my $result = $self->generic_fetch($constraint);
return $result->[0];
}
sub _columns {
my $self = shift;
my @cols = (
'LGM_panel_disease_id',
'LGM_panel_id',
'disease_id',
'user_id',
'created',
);
return @cols;
}
sub _tables {
my $self = shift;
my @tables = (
['LGM_panel_disease', 'lgmpd'],
);
return @tables;
}
sub _objs_from_sth {
my ($self, $sth) = @_;
my ($LGM_panel_disease_id, $LGM_panel_id, $disease_id, $user_id, $created);
$sth->bind_columns(\($LGM_panel_disease_id, $LGM_panel_id, $disease_id, $user_id, $created));
my @objs;
while ($sth->fetch()) {
my $obj = Bio::EnsEMBL::G2P::LGMPanelDisease->new(
-LGM_panel_disease_id => $LGM_panel_disease_id,
-LGM_panel_id => $LGM_panel_id,
-disease_id => $disease_id,
-user_id => $user_id,
-created => $created,
-adaptor => $self,
);
push(@objs, $obj);
}
return \@objs;
}
1;
| at7/ensembl-gene2phenotype | modules/Bio/EnsEMBL/G2P/DBSQL/LGMPanelDiseaseAdaptor.pm | Perl | apache-2.0 | 3,213 |
package JSON::RPC2::Client;
use 5.010001;
use warnings;
use strict;
use utf8;
use Carp;
our $VERSION = 'v2.1.1';
use JSON::MaybeXS;
use Scalar::Util qw( weaken refaddr );
sub new {
my ($class) = @_;
my $self = {
next_id => 0,
free_id => [],
call => {},
id => {},
};
return bless $self, $class;
}
sub batch {
my ($self, @requests) = @_;
my @call = grep {ref} @requests;
@requests = grep {!ref} @requests;
croak 'at least one request required' if !@requests;
my $request = '['.join(q{,}, @requests).']';
return ($request, @call);
}
sub notify {
my ($self, $method, @params) = @_;
croak 'method required' if !defined $method;
return encode_json({
jsonrpc => '2.0',
method => $method,
(!@params ? () : (
params => \@params,
)),
});
}
sub notify_named {
my ($self, $method, @params) = @_;
croak 'method required' if !defined $method;
croak 'odd number of elements in %params' if @params % 2;
my %params = @params;
return encode_json({
jsonrpc => '2.0',
method => $method,
(!@params ? () : (
params => \%params,
)),
});
}
sub call {
my ($self, $method, @params) = @_;
croak 'method required' if !defined $method;
my ($id, $call) = $self->_get_id();
my $request = encode_json({
jsonrpc => '2.0',
method => $method,
(!@params ? () : (
params => \@params,
)),
id => $id,
});
return wantarray ? ($request, $call) : $request;
}
sub call_named {
my ($self, $method, @params) = @_;
croak 'method required' if !defined $method;
croak 'odd number of elements in %params' if @params % 2;
my %params = @params;
my ($id, $call) = $self->_get_id();
my $request = encode_json({
jsonrpc => '2.0',
method => $method,
(!@params ? () : (
params => \%params,
)),
id => $id,
});
return wantarray ? ($request, $call) : $request;
}
sub _get_id {
my $self = shift;
my $id = @{$self->{free_id}} ? pop @{$self->{free_id}} : $self->{next_id}++;
my $call = {};
$self->{call}{ refaddr($call) } = $call;
$self->{id}{ $id } = $call;
weaken($self->{id}{ $id });
return ($id, $call);
}
sub pending {
my ($self) = @_;
return values %{ $self->{call} };
}
sub cancel {
my ($self, $call) = @_;
croak 'no such request' if !delete $self->{call}{ refaddr($call) };
return;
}
sub batch_response {
my ($self, $json) = @_;
croak 'require 1 param' if @_ != 2;
undef $@;
my $response = ref $json ? $json : eval { decode_json($json) };
if ($@) {
return [ 'Parse error' ];
}
if ($response && ref $response eq 'HASH') {
return [ $self->response($response) ];
}
if (!$response || ref $response ne 'ARRAY') {
return [ 'expect Array or Object' ];
}
if (!@{$response}) {
return [ 'empty Array' ];
}
return map {[ $self->response($_) ]} @{$response};
}
sub response { ## no critic (ProhibitExcessComplexity RequireArgUnpacking)
my ($self, $json) = @_;
croak 'require 1 param' if @_ != 2;
undef $@;
my $response = ref $json ? $json : eval { decode_json($json) };
if ($@) {
return 'Parse error';
}
if (ref $response ne 'HASH') {
return 'expect Object';
}
# if (!defined $response->{jsonrpc} || $response->{jsonrpc} ne '2.0') { original
if (defined $response->{jsonrpc} && $response->{jsonrpc} ne '2.0') {
return 'expect {jsonrpc}="2.0"';
}
if (!exists $response->{id} || ref $response->{id} || !defined $response->{id}) {
return 'expect {id} is scalar';
}
if (!exists $self->{id}{ $response->{id} }) {
return 'unknown {id}';
}
if (!(exists $response->{result} xor exists $response->{error})) {
return 'expect {result} or {error}';
}
if (exists $response->{error}) {
my $e = $response->{error};
if (ref $e ne 'HASH') {
return 'expect {error} is Object';
}
if (!defined $e->{code} || ref $e->{code} || $e->{code} !~ /\A-?\d+\z/xms) {
return 'expect {error}{code} is Integer';
}
if (!defined $e->{message} || ref $e->{message}) {
return 'expect {error}{message} is String';
}
## no critic (ProhibitMagicNumbers)
if ((3 == keys %{$e} && !exists $e->{data}) || 3 < keys %{$e}) {
return 'only optional key must be {error}{data}';
}
}
my $id = $response->{id};
push @{ $self->{free_id} }, $id;
my $call = delete $self->{id}{ $id };
if ($call) {
$call = delete $self->{call}{ refaddr($call) };
}
if (!$call) {
return; # call was canceled
}
return (undef, $response->{result}, $response->{error}, $call);
}
1; # Magic true value required at end of module
__END__
=encoding utf8
=head1 NAME
JSON::RPC2::Client - Transport-independent JSON-RPC 2.0 client
=head1 VERSION
This document describes JSON::RPC2::Client version v2.1.1
=head1 SYNOPSIS
use JSON::RPC2::Client;
$client = JSON::RPC2::Client->new();
$json_request = $client->notify('method', @params);
$json_request = $client->notify_named('method', %params);
($json_request, $call) = $client->call('method', @params);
($json_request, $call) = $client->call_named('method', %params);
($json_request, @call) = $client->batch(
$client->call('method1', @params),
$client->call('method2', @params),
$client->notify('method', @params),
$client->call_named('method', %params),
$client->notify_named('method', %params),
);
$client->cancel($call);
($failed, $result, $error, $call) = $client->response($json_response);
for ($client->batch_response($json_response)) {
($failed, $result, $error, $call) = @{ $_ };
...
}
@call = $client->pending();
#
# EXAMPLE of simple blocking STDIN-STDOUT client
#
$client = JSON::RPC2::Client->new();
$json_request = $client->call('method', @params);
printf "%s\n", $json_request;
$json_response = <STDIN>;
chomp $json_response;
($failed, $result, $error) = $client->response($json_response);
if ($failed) {
die "bad response: $failed";
} elsif ($error) {
printf "method(@params) failed with code=%d: %s\n",
$error->{code}, $error->{message};
} else {
print "method(@params) returned $result\n";
}
=head1 DESCRIPTION
Transport-independent implementation of JSON-RPC 2.0 client.
Can be used both in sync (simple, for blocking I/O) and async
(for non-blocking I/O in event-based environment) mode.
=head1 INTERFACE
=head2 new
$client = JSON::RPC2::Client->new();
Create and return new client object, which can be used to generate requests
(notify(), call()), parse responses (responses()) and cancel pending requests
(cancel(), pending()).
Each client object keep track of request IDs, so you must use dedicated
client object for each connection to server.
=head2 notify
=head2 notify_named
$json_request = $client->notify( $remote_method, @remote_params );
$json_request = $client->notify_named( $remote_method, %remote_params );
Notifications doesn't receive any replies, so they unreliable.
Return ($json_request) - scalar which should be sent to server in any way.
=head2 call
=head2 call_named
($json_request, $call) = $client->call( $remote_method, @remote_params );
($json_request, $call) = $client->call_named( $remote_method, %remote_params );
Return ($json_request, $call) - scalar which should be sent to server in
any way and identifier of this remote procedure call.
The $call is just empty HASHREF, which can be used to: 1) keep user data
related to this call in hash fields - that $call will be returned by
response() when response to this call will be received; 2) to cancel()
this call before response will be received. There usually no need for
user to keep $call somewhere unless he wanna be able to cancel() that call.
In scalar context return only $json_request - this enough for simple
blocking clients which doesn't need to detect which of several pending()
calls was just replied or cancel() pending calls.
=head2 batch
($json_request, @call) = $client->batch(
$json_request1,
$json_request2,
$call2,
$json_request3,
...
);
Return ($json_request, @call) - scalar which should be sent to server in
any way and identifiers of these remote procedure calls (they'll be in
same order as they was in params). These two example are equivalent:
($json_request, $call1, $call3) = $client->batch(
$client->call('method1'),
$client->notify('method2'),
$client->call('method3'),
);
($json1, $call1) = $client->call('method1');
$json2 = $client->notify('method2');
($json3, $call3) = $client->call('method3');
$json_request = $client->batch($json1, $json2, $json3);
If you're using batch() to send some requests then you should process
RPC server's responses using batch_response(), not response().
=head2 batch_response
@responses = $client->batch_response( $json_response );
The $json_response can be either JSON string or ARRAYREF/HASHREF (useful
with C<< $handle->push_read(json => sub{...}) >> from L<AnyEvent::Handle>).
Will parse $json_response and return list with ARRAYREFS, which contain
4 elements returned by response().
It is safe to always use batch_response() instead of response(), even if
you don't send batch() requests at all.
=head2 response
($failed, $result, $error, $call) = $client->response( $json_response );
The $json_response can be either JSON string or HASHREF (useful
with C<< $handle->push_read(json => sub{...}) >> from L<AnyEvent::Handle>).
Will parse $json_response and return list with 4 elements:
($failed, $result, $error, $call)
$failed parse error message if $json_response is incorrect
$result data returned by successful remote method call
$error error returned by failed remote method call
$call identifier of this call
If $failed defined then all others are undefined. Usually that mean either
bug in JSON-RPC client or server.
Only one of $result and $error will be defined. Format of $result
completely depends on data returned by remote method. $error is HASHREF
with fields {code}, {message}, {data} - code should be integer, message
should be string, and data is optional value in arbitrary format.
The $call should be used to identify which of currently pending() calls
just returns - it will be same HASHREF as was initially returned by call()
when starting this remote procedure call, and may contain any user data
which was placed in it after calling call().
There also special case when all 4 values will be undefined - that happens
if $json_response was related to call which was already cancel()ed by user.
If you're using batch() to send some requests then you should process
RPC server's responses using batch_response(), not response().
=head2 cancel
$client->cancel( $call );
Will cancel that $call. This doesn't affect server - it will continue
processing related request and will send response when ready, but that
response will be ignored by client's response().
Return nothing.
=head2 pending
@call = $client->pending();
Return list with all currently pending $call's.
=head1 SUPPORT
=head2 Bugs / Feature Requests
Please report any bugs or feature requests through the issue tracker
at L<https://github.com/powerman/perl-JSON-RPC2/issues>.
You will be notified automatically of any progress on your issue.
=head2 Source Code
This is open source software. The code repository is available for
public review and contribution under the terms of the license.
Feel free to fork the repository and submit pull requests.
L<https://github.com/powerman/perl-JSON-RPC2>
git clone https://github.com/powerman/perl-JSON-RPC2.git
=head2 Resources
=over
=item * MetaCPAN Search
L<https://metacpan.org/search?q=JSON-RPC2>
=item * CPAN Ratings
L<http://cpanratings.perl.org/dist/JSON-RPC2>
=item * AnnoCPAN: Annotated CPAN documentation
L<http://annocpan.org/dist/JSON-RPC2>
=item * CPAN Testers Matrix
L<http://matrix.cpantesters.org/?dist=JSON-RPC2>
=item * CPANTS: A CPAN Testing Service (Kwalitee)
L<http://cpants.cpanauthors.org/dist/JSON-RPC2>
=back
=head1 AUTHOR
Alex Efros E<lt>powerman@cpan.orgE<gt>
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2009- by Alex Efros E<lt>powerman@cpan.orgE<gt>.
This is free software, licensed under:
The MIT (X11) License
=cut
| lclc/perl-stratum | perl-JSON-RPC2/lib/JSON/RPC2/Client.pm | Perl | apache-2.0 | 12,811 |
package Google::Ads::AdWords::v201402::CampaignCriterionService::query;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201402' }
__PACKAGE__->__set_name('query');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::SOAP::Typelib::ComplexType
);
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %query_of :ATTR(:get<query>);
__PACKAGE__->_factory(
[ qw( query
) ],
{
'query' => \%query_of,
},
{
'query' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
},
{
'query' => 'query',
}
);
} # end BLOCK
} # end of BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201402::CampaignCriterionService::query
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
query from the namespace https://adwords.google.com/api/adwords/cm/v201402.
Returns the list of campaign criteria that match the query. @param query The SQL-like AWQL query string. @return A list of campaign criteria. @throws ApiException if problems occur while parsing the query or fetching campaign criteria.
=head1 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * query
$element->set_query($data);
$element->get_query();
=back
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201402::CampaignCriterionService::query->new($data);
Constructor. The following data structure may be passed to new():
{
query => $some_value, # string
},
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201402/CampaignCriterionService/query.pm | Perl | apache-2.0 | 1,956 |
package Plack::Request;
use strict;
use warnings;
use 5.008_001;
our $VERSION = '1.0030';
use HTTP::Headers;
use Carp ();
use Hash::MultiValue;
use HTTP::Body;
use Plack::Request::Upload;
use Stream::Buffered;
use URI;
use URI::Escape ();
sub new {
my($class, $env) = @_;
Carp::croak(q{$env is required})
unless defined $env && ref($env) eq 'HASH';
bless { env => $env }, $class;
}
sub env { $_[0]->{env} }
sub address { $_[0]->env->{REMOTE_ADDR} }
sub remote_host { $_[0]->env->{REMOTE_HOST} }
sub protocol { $_[0]->env->{SERVER_PROTOCOL} }
sub method { $_[0]->env->{REQUEST_METHOD} }
sub port { $_[0]->env->{SERVER_PORT} }
sub user { $_[0]->env->{REMOTE_USER} }
sub request_uri { $_[0]->env->{REQUEST_URI} }
sub path_info { $_[0]->env->{PATH_INFO} }
sub path { $_[0]->env->{PATH_INFO} || '/' }
sub script_name { $_[0]->env->{SCRIPT_NAME} }
sub scheme { $_[0]->env->{'psgi.url_scheme'} }
sub secure { $_[0]->scheme eq 'https' }
sub body { $_[0]->env->{'psgi.input'} }
sub input { $_[0]->env->{'psgi.input'} }
sub content_length { $_[0]->env->{CONTENT_LENGTH} }
sub content_type { $_[0]->env->{CONTENT_TYPE} }
sub session { $_[0]->env->{'psgix.session'} }
sub session_options { $_[0]->env->{'psgix.session.options'} }
sub logger { $_[0]->env->{'psgix.logger'} }
sub cookies {
my $self = shift;
return {} unless $self->env->{HTTP_COOKIE};
# HTTP_COOKIE hasn't changed: reuse the parsed cookie
if ( $self->env->{'plack.cookie.parsed'}
&& $self->env->{'plack.cookie.string'} eq $self->env->{HTTP_COOKIE}) {
return $self->env->{'plack.cookie.parsed'};
}
$self->env->{'plack.cookie.string'} = $self->env->{HTTP_COOKIE};
my %results;
my @pairs = grep m/=/, split "[;,] ?", $self->env->{'plack.cookie.string'};
for my $pair ( @pairs ) {
# trim leading trailing whitespace
$pair =~ s/^\s+//; $pair =~ s/\s+$//;
my ($key, $value) = map URI::Escape::uri_unescape($_), split( "=", $pair, 2 );
# Take the first one like CGI.pm or rack do
$results{$key} = $value unless exists $results{$key};
}
$self->env->{'plack.cookie.parsed'} = \%results;
}
sub query_parameters {
my $self = shift;
$self->env->{'plack.request.query'} ||= $self->_parse_query;
}
sub _parse_query {
my $self = shift;
my @query;
my $query_string = $self->env->{QUERY_STRING};
if (defined $query_string) {
if ($query_string =~ /=/) {
# Handle ?foo=bar&bar=foo type of query
@query =
map { s/\+/ /g; URI::Escape::uri_unescape($_) }
map { /=/ ? split(/=/, $_, 2) : ($_ => '')}
split(/[&;]/, $query_string);
} else {
# Handle ...?dog+bones type of query
@query =
map { (URI::Escape::uri_unescape($_), '') }
split(/\+/, $query_string, -1);
}
}
Hash::MultiValue->new(@query);
}
sub content {
my $self = shift;
unless ($self->env->{'psgix.input.buffered'}) {
$self->_parse_request_body;
}
my $fh = $self->input or return '';
my $cl = $self->env->{CONTENT_LENGTH} or return '';
$fh->seek(0, 0); # just in case middleware/apps read it without seeking back
$fh->read(my($content), $cl, 0);
$fh->seek(0, 0);
return $content;
}
sub raw_body { $_[0]->content }
# XXX you can mutate headers with ->headers but it's not written through to the env
sub headers {
my $self = shift;
if (!defined $self->{headers}) {
my $env = $self->env;
$self->{headers} = HTTP::Headers->new(
map {
(my $field = $_) =~ s/^HTTPS?_//;
( $field => $env->{$_} );
}
grep { /^(?:HTTP|CONTENT)/i } keys %$env
);
}
$self->{headers};
}
sub content_encoding { shift->headers->content_encoding(@_) }
sub header { shift->headers->header(@_) }
sub referer { shift->headers->referer(@_) }
sub user_agent { shift->headers->user_agent(@_) }
sub body_parameters {
my $self = shift;
unless ($self->env->{'plack.request.body'}) {
$self->_parse_request_body;
}
return $self->env->{'plack.request.body'};
}
# contains body + query
sub parameters {
my $self = shift;
$self->env->{'plack.request.merged'} ||= do {
my $query = $self->query_parameters;
my $body = $self->body_parameters;
Hash::MultiValue->new($query->flatten, $body->flatten);
};
}
sub uploads {
my $self = shift;
if ($self->env->{'plack.request.upload'}) {
return $self->env->{'plack.request.upload'};
}
$self->_parse_request_body;
return $self->env->{'plack.request.upload'};
}
sub param {
my $self = shift;
return keys %{ $self->parameters } if @_ == 0;
my $key = shift;
return $self->parameters->{$key} unless wantarray;
return $self->parameters->get_all($key);
}
sub upload {
my $self = shift;
return keys %{ $self->uploads } if @_ == 0;
my $key = shift;
return $self->uploads->{$key} unless wantarray;
return $self->uploads->get_all($key);
}
sub uri {
my $self = shift;
my $base = $self->_uri_base;
# We have to escape back PATH_INFO in case they include stuff like
# ? or # so that the URI parser won't be tricked. However we should
# preserve '/' since encoding them into %2f doesn't make sense.
# This means when a request like /foo%2fbar comes in, we recognize
# it as /foo/bar which is not ideal, but that's how the PSGI PATH_INFO
# spec goes and we can't do anything about it. See PSGI::FAQ for details.
# See RFC 3986 before modifying.
my $path_escape_class = q{^/;:@&=A-Za-z0-9\$_.+!*'(),-};
my $path = URI::Escape::uri_escape($self->env->{PATH_INFO} || '', $path_escape_class);
$path .= '?' . $self->env->{QUERY_STRING}
if defined $self->env->{QUERY_STRING} && $self->env->{QUERY_STRING} ne '';
$base =~ s!/$!! if $path =~ m!^/!;
return URI->new($base . $path)->canonical;
}
sub base {
my $self = shift;
URI->new($self->_uri_base)->canonical;
}
sub _uri_base {
my $self = shift;
my $env = $self->env;
my $uri = ($env->{'psgi.url_scheme'} || "http") .
"://" .
($env->{HTTP_HOST} || (($env->{SERVER_NAME} || "") . ":" . ($env->{SERVER_PORT} || 80))) .
($env->{SCRIPT_NAME} || '/');
return $uri;
}
sub new_response {
my $self = shift;
require Plack::Response;
Plack::Response->new(@_);
}
sub _parse_request_body {
my $self = shift;
my $ct = $self->env->{CONTENT_TYPE};
my $cl = $self->env->{CONTENT_LENGTH};
if (!$ct && !$cl) {
# No Content-Type nor Content-Length -> GET/HEAD
$self->env->{'plack.request.body'} = Hash::MultiValue->new;
$self->env->{'plack.request.upload'} = Hash::MultiValue->new;
return;
}
my $body = HTTP::Body->new($ct, $cl);
# HTTP::Body will create temporary files in case there was an
# upload. Those temporary files can be cleaned up by telling
# HTTP::Body to do so. It will run the cleanup when the request
# env is destroyed. That the object will not go out of scope by
# the end of this sub we will store a reference here.
$self->env->{'plack.request.http.body'} = $body;
$body->cleanup(1);
my $input = $self->input;
my $buffer;
if ($self->env->{'psgix.input.buffered'}) {
# Just in case if input is read by middleware/apps beforehand
$input->seek(0, 0);
} else {
$buffer = Stream::Buffered->new($cl);
}
my $spin = 0;
while ($cl) {
$input->read(my $chunk, $cl < 8192 ? $cl : 8192);
my $read = length $chunk;
$cl -= $read;
$body->add($chunk);
$buffer->print($chunk) if $buffer;
if ($read == 0 && $spin++ > 2000) {
Carp::croak "Bad Content-Length: maybe client disconnect? ($cl bytes remaining)";
}
}
if ($buffer) {
$self->env->{'psgix.input.buffered'} = 1;
$self->env->{'psgi.input'} = $buffer->rewind;
} else {
$input->seek(0, 0);
}
$self->env->{'plack.request.body'} = Hash::MultiValue->from_mixed($body->param);
my @uploads = Hash::MultiValue->from_mixed($body->upload)->flatten;
my @obj;
while (my($k, $v) = splice @uploads, 0, 2) {
push @obj, $k, $self->_make_upload($v);
}
$self->env->{'plack.request.upload'} = Hash::MultiValue->new(@obj);
1;
}
sub _make_upload {
my($self, $upload) = @_;
my %copy = %$upload;
$copy{headers} = HTTP::Headers->new(%{$upload->{headers}});
Plack::Request::Upload->new(%copy);
}
1;
__END__
=head1 NAME
Plack::Request - Portable HTTP request object from PSGI env hash
=head1 SYNOPSIS
use Plack::Request;
my $app_or_middleware = sub {
my $env = shift; # PSGI env
my $req = Plack::Request->new($env);
my $path_info = $req->path_info;
my $query = $req->param('query');
my $res = $req->new_response(200); # new Plack::Response
$res->finalize;
};
=head1 DESCRIPTION
L<Plack::Request> provides a consistent API for request objects across
web server environments.
=head1 CAVEAT
Note that this module is intended to be used by Plack middleware
developers and web application framework developers rather than
application developers (end users).
Writing your web application directly using Plack::Request is
certainly possible but not recommended: it's like doing so with
mod_perl's Apache::Request: yet too low level.
If you're writing a web application, not a framework, then you're
encouraged to use one of the web application frameworks that support PSGI (L<http://plackperl.org/#frameworks>),
or see modules like L<HTTP::Engine> to provide higher level
Request and Response API on top of PSGI.
=head1 METHODS
Some of the methods defined in the earlier versions are deprecated in
version 0.99. Take a look at L</"INCOMPATIBILITIES">.
Unless otherwise noted, all methods and attributes are B<read-only>,
and passing values to the method like an accessor doesn't work like
you expect it to.
=head2 new
Plack::Request->new( $env );
Creates a new request object.
=head1 ATTRIBUTES
=over 4
=item env
Returns the shared PSGI environment hash reference. This is a
reference, so writing to this environment passes through during the
whole PSGI request/response cycle.
=item address
Returns the IP address of the client (C<REMOTE_ADDR>).
=item remote_host
Returns the remote host (C<REMOTE_HOST>) of the client. It may be
empty, in which case you have to get the IP address using C<address>
method and resolve on your own.
=item method
Contains the request method (C<GET>, C<POST>, C<HEAD>, etc).
=item protocol
Returns the protocol (HTTP/1.0 or HTTP/1.1) used for the current request.
=item request_uri
Returns the raw, undecoded request URI path. You probably do B<NOT>
want to use this to dispatch requests.
=item path_info
Returns B<PATH_INFO> in the environment. Use this to get the local
path for the requests.
=item path
Similar to C<path_info> but returns C</> in case it is empty. In other
words, it returns the virtual path of the request URI after C<<
$req->base >>. See L</"DISPATCHING"> for details.
=item script_name
Returns B<SCRIPT_NAME> in the environment. This is the absolute path
where your application is hosted.
=item scheme
Returns the scheme (C<http> or C<https>) of the request.
=item secure
Returns true or false, indicating whether the connection is secure (https).
=item body, input
Returns C<psgi.input> handle.
=item session
Returns (optional) C<psgix.session> hash. When it exists, you can
retrieve and store per-session data from and to this hash.
=item session_options
Returns (optional) C<psgix.session.options> hash.
=item logger
Returns (optional) C<psgix.logger> code reference. When it exists,
your application is supposed to send the log message to this logger,
using:
$req->logger->({ level => 'debug', message => "This is a debug message" });
=item cookies
Returns a reference to a hash containing the cookies. Values are
strings that are sent by clients and are URI decoded.
If there are multiple cookies with the same name in the request, this
method will ignore the duplicates and return only the first value. If
that causes issues for you, you may have to use modules like
CGI::Simple::Cookie to parse C<$request->header('Cookies')> by
yourself.
=item query_parameters
Returns a reference to a hash containing query string (GET)
parameters. This hash reference is L<Hash::MultiValue> object.
=item body_parameters
Returns a reference to a hash containing posted parameters in the
request body (POST). As with C<query_parameters>, the hash
reference is a L<Hash::MultiValue> object.
=item parameters
Returns a L<Hash::MultiValue> hash reference containing (merged) GET
and POST parameters.
=item content, raw_body
Returns the request content in an undecoded byte string for POST requests.
=item uri
Returns an URI object for the current request. The URI is constructed
using various environment values such as C<SCRIPT_NAME>, C<PATH_INFO>,
C<QUERY_STRING>, C<HTTP_HOST>, C<SERVER_NAME> and C<SERVER_PORT>.
Every time this method is called it returns a new, cloned URI object.
=item base
Returns an URI object for the base path of current request. This is
like C<uri> but only contains up to C<SCRIPT_NAME> where your
application is hosted at.
Every time this method is called it returns a new, cloned URI object.
=item user
Returns C<REMOTE_USER> if it's set.
=item headers
Returns an L<HTTP::Headers> object containing the headers for the current request.
=item uploads
Returns a reference to a hash containing uploads. The hash reference
is a L<Hash::MultiValue> object and values are L<Plack::Request::Upload>
objects.
=item content_encoding
Shortcut to $req->headers->content_encoding.
=item content_length
Shortcut to $req->headers->content_length.
=item content_type
Shortcut to $req->headers->content_type.
=item header
Shortcut to $req->headers->header.
=item referer
Shortcut to $req->headers->referer.
=item user_agent
Shortcut to $req->headers->user_agent.
=item param
Returns GET and POST parameters with a CGI.pm-compatible param
method. This is an alternative method for accessing parameters in
$req->parameters. Unlike CGI.pm, it does I<not> allow
setting or modifying query parameters.
$value = $req->param( 'foo' );
@values = $req->param( 'foo' );
@params = $req->param;
=item upload
A convenient method to access $req->uploads.
$upload = $req->upload('field');
@uploads = $req->upload('field');
@fields = $req->upload;
for my $upload ( $req->upload('field') ) {
print $upload->filename;
}
=item new_response
my $res = $req->new_response;
Creates a new L<Plack::Response> object. Handy to remove dependency on
L<Plack::Response> in your code for easy subclassing and duck typing
in web application frameworks, as well as overriding Response
generation in middlewares.
=back
=head2 Hash::MultiValue parameters
Parameters that can take one or multiple values (i.e. C<parameters>,
C<query_parameters>, C<body_parameters> and C<uploads>) store the
hash reference as a L<Hash::MultiValue> object. This means you can use
the hash reference as a plain hash where values are B<always> scalars
(B<NOT> array references), so you don't need to code ugly and unsafe
C<< ref ... eq 'ARRAY' >> anymore.
And if you explicitly want to get multiple values of the same key, you
can call the C<get_all> method on it, such as:
my @foo = $req->query_parameters->get_all('foo');
You can also call C<get_one> to always get one parameter independent
of the context (unlike C<param>), and even call C<mixed> (with
Hash::MultiValue 0.05 or later) to get the I<traditional> hash
reference,
my $params = $req->parameters->mixed;
where values are either a scalar or an array reference depending on
input, so it might be useful if you already have the code to deal with
that ugliness.
=head2 PARSING POST BODY and MULTIPLE OBJECTS
The methods to parse request body (C<content>, C<body_parameters> and
C<uploads>) are carefully coded to save the parsed body in the
environment hash as well as in the temporary buffer, so you can call
them multiple times and create Plack::Request objects multiple times
in a request and they should work safely, and won't parse request body
more than twice for the efficiency.
=head1 DISPATCHING
If your application or framework wants to dispatch (or route) actions
based on request paths, be sure to use C<< $req->path_info >> not C<<
$req->uri->path >>.
This is because C<path_info> gives you the virtual path of the request,
regardless of how your application is mounted. If your application is
hosted with mod_perl or CGI scripts, or even multiplexed with tools
like L<Plack::App::URLMap>, request's C<path_info> always gives you
the action path.
Note that C<path_info> might give you an empty string, in which case
you should assume that the path is C</>.
You will also want to use C<< $req->base >> as a base prefix when
building URLs in your templates or in redirections. It's a good idea
for you to subclass Plack::Request and define methods such as:
sub uri_for {
my($self, $path, $args) = @_;
my $uri = $self->base;
$uri->path($uri->path . $path);
$uri->query_form(@$args) if $args;
$uri;
}
So you can say:
my $link = $req->uri_for('/logout', [ signoff => 1 ]);
and if C<< $req->base >> is C</app> you'll get the full URI for
C</app/logout?signoff=1>.
=head1 INCOMPATIBILITIES
In version 0.99, many utility methods are removed or deprecated, and
most methods are made read-only. These methods were deleted in version
1.0001.
All parameter-related methods such as C<parameters>,
C<body_parameters>, C<query_parameters> and C<uploads> now contains
L<Hash::MultiValue> objects, rather than I<scalar or an array
reference depending on the user input> which is insecure. See
L<Hash::MultiValue> for more about this change.
C<< $req->path >> method had a bug, where the code and the document
was mismatching. The document was suggesting it returns the sub
request path after C<< $req->base >> but the code was always returning
the absolute URI path. The code is now updated to be an alias of C<<
$req->path_info >> but returns C</> in case it's empty. If you need
the older behavior, just call C<< $req->uri->path >> instead.
Cookie handling is simplified, and doesn't use L<CGI::Simple::Cookie>
anymore, which means you B<CAN NOT> set array reference or hash
reference as a cookie value and expect it be serialized. You're always
required to set string value, and encoding or decoding them is totally
up to your application or framework. Also, C<cookies> hash reference
now returns I<strings> for the cookies rather than CGI::Simple::Cookie
objects, which means you no longer have to write a wacky code such as:
$v = $req->cookie->{foo} ? $req->cookie->{foo}->value : undef;
and instead, simply do:
$v = $req->cookie->{foo};
=head1 AUTHORS
Tatsuhiko Miyagawa
Kazuhiro Osawa
Tokuhiro Matsuno
=head1 SEE ALSO
L<Plack::Response> L<HTTP::Request>, L<Catalyst::Request>
=head1 LICENSE
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
| MorganCabral/cabralcc.com | app/cgi-bin/lib/Plack/Request.pm | Perl | apache-2.0 | 19,574 |
use strict;
package CPAN::Reporter::Config;
our $VERSION = '1.2011'; # VERSION
use Config::Tiny 2.08 ();
use File::Glob ();
use File::HomeDir 0.58 ();
use File::Path qw/mkpath/;
use File::Spec 3.19 ();
use IPC::Cmd 0.76 ();
use IO::File ();
use CPAN 1.9301 (); # for printing warnings
#--------------------------------------------------------------------------#
# Back-compatibility checks -- just once per load
#--------------------------------------------------------------------------#
# 0.28_51 changed Mac OS X config file location -- if old directory is found,
# move it to the new location
if ( $^O eq 'darwin' ) {
my $old = File::Spec->catdir(File::HomeDir->my_documents,".cpanreporter");
my $new = File::Spec->catdir(File::HomeDir->my_home,".cpanreporter");
if ( ( -d $old ) && (! -d $new ) ) {
$CPAN::Frontend->mywarn( << "HERE");
CPAN::Reporter: since CPAN::Reporter 0.28_51, the Mac OSX config directory
has changed.
Old: $old
New: $new
Your existing configuration file will be moved automatically.
HERE
mkpath($new);
my $OLD_CONFIG = IO::File->new(
File::Spec->catfile($old, "config.ini"), "<"
) or die $!;
my $NEW_CONFIG = IO::File->new(
File::Spec->catfile($new, "config.ini"), ">"
) or die $!;
$NEW_CONFIG->print( do { local $/; <$OLD_CONFIG> } );
$OLD_CONFIG->close;
$NEW_CONFIG->close;
unlink File::Spec->catfile($old, "config.ini") or die $!;
rmdir($old) or die $!;
}
}
#--------------------------------------------------------------------------#
# Public
#--------------------------------------------------------------------------#
sub _configure {
my $config_dir = _get_config_dir();
my $config_file = _get_config_file();
mkpath $config_dir if ! -d $config_dir;
if ( ! -d $config_dir ) {
$CPAN::Frontend->myprint(
"\nCPAN::Reporter: couldn't create configuration directory '$config_dir': $!"
);
return;
}
my $config;
my $existing_options;
# explain grade:action pairs
$CPAN::Frontend->myprint( _grade_action_prompt() );
# read or create
if ( -f $config_file ) {
$CPAN::Frontend->myprint(
"\nCPAN::Reporter: found your CPAN::Reporter config file at:\n$config_file\n"
);
$config = _open_config_file();
# if we can't read it, bail out
if ( ! $config ) {
$CPAN::Frontend->mywarn("\n
CPAN::Reporter: configuration will not be changed\n");
return;
}
# clone what's in the config file
$existing_options = { %{$config->{_}} } if $config;
$CPAN::Frontend->myprint(
"\nCPAN::Reporter: Updating your CPAN::Reporter configuration settings:\n"
);
}
else {
$CPAN::Frontend->myprint(
"\nCPAN::Reporter: no config file found; creating a new one.\n"
);
$config = Config::Tiny->new();
}
my %spec = _config_spec();
for my $k ( _config_order() ) {
my $option_data = $spec{$k};
$CPAN::Frontend->myprint( "\n" . $option_data->{info}. "\n");
# options with defaults are mandatory
if ( defined $option_data->{default} ) {
# if we have a default, always show as a sane recommendation
if ( length $option_data->{default} ) {
$CPAN::Frontend->myprint(
"(Recommended: '$option_data->{default}')\n\n"
);
}
# repeat until validated
PROMPT:
while ( defined (
my $answer = CPAN::Shell::colorable_makemaker_prompt(
"$k?",
$existing_options->{$k} || $option_data->{default}
)
)) {
if ( ! $option_data->{validate} ||
$option_data->{validate}->($k, $answer, $config->{_})
) {
$config->{_}{$k} = $answer;
last PROMPT;
}
}
}
else {
# only initialize options without default if
# answer matches non white space and validates,
# otherwise reset it
my $answer = CPAN::Shell::colorable_makemaker_prompt(
"$k?",
$existing_options->{$k} || q{}
);
if ( $answer =~ /\S/ ) {
$config->{_}{$k} = $answer;
}
else {
delete $config->{_}{$k};
}
}
# delete existing as we proceed so we know what's left
delete $existing_options->{$k};
}
# initialize remaining existing options
$CPAN::Frontend->myprint(
"\nYour CPAN::Reporter config file also contains these advanced " .
"options:\n\n") if keys %$existing_options;
for my $k ( keys %$existing_options ) {
$config->{_}{$k} = CPAN::Shell::colorable_makemaker_prompt(
"$k?", $existing_options->{$k}
);
}
$CPAN::Frontend->myprint(
"\nCPAN::Reporter: writing config file to '$config_file'.\n"
);
if ( $config->write( $config_file ) ) {
return $config->{_};
}
else {
$CPAN::Frontend->mywarn( "\nCPAN::Reporter: error writing config file to '$config_file':\n"
. Config::Tiny->errstr(). "\n");
return;
}
}
#--------------------------------------------------------------------------#
# Private
#--------------------------------------------------------------------------#
#--------------------------------------------------------------------------#
# _config_order -- determines order of interactive config. Only items
# in interactive config will be written to a starter config file
#--------------------------------------------------------------------------#
sub _config_order {
return qw(
email_from
edit_report
send_report
transport
);
}
#--------------------------------------------------------------------------#
# _config_spec -- returns configuration options information
#
# Keys include
# default -- recommended value, used in prompts and as a fallback
# if an options is not set; mandatory if defined
# prompt -- short prompt for EU::MM prompting
# info -- long description shown before prompting
# validate -- CODE ref; return normalized option or undef if invalid
#--------------------------------------------------------------------------#
my %option_specs = (
email_from => {
default => '',
prompt => 'What email address will be used to reference your reports?',
info => <<'HERE',
CPAN::Reporter requires a valid email address to identify senders
in the body of a test report. Please use a standard email format
like: "John Doe" <jdoe@example.com>
HERE
},
smtp_server => {
default => undef, # (deprecated)
prompt => "[DEPRECATED] It's safe to remove this from your config file.",
},
edit_report => {
default => 'default:ask/no pass/na:no',
prompt => "Do you want to review or edit the test report?",
validate => \&_validate_grade_action_pair,
info => <<'HERE',
Before test reports are sent, you may want to review or edit the test
report and add additional comments about the result or about your system
or Perl configuration. By default, CPAN::Reporter will ask after
each report is generated whether or not you would like to edit the
report. This option takes "grade:action" pairs.
HERE
},
send_report => {
default => 'default:ask/yes pass/na:yes',
prompt => "Do you want to send the report?",
validate => \&_validate_grade_action_pair,
info => <<'HERE',
By default, CPAN::Reporter will prompt you for confirmation that
the test report should be sent before actually doing it. This
gives the opportunity to skip sending particular reports if
you need to (e.g. if you caused the failure). This option takes
"grade:action" pairs.
HERE
},
transport => {
default => 'Metabase uri https://metabase.cpantesters.org/api/v1/ id_file metabase_id.json',
prompt => 'Which transport system will be used to transmit the reports?',
validate => \&_validate_transport,
info => <<'HERE',
CPAN::Reporter sends your reports over HTTPS using Metabase. This option lets
you set a different uri, transport mechanism and metabase profile path. If you
are receiving HTTPS errors, you may change the uri to use plain HTTP, though
this is not recommended. Unless you know what you're doing, just accept
the default value.
HERE
},
send_duplicates => {
default => 'default:no',
prompt => "This report is identical to a previous one. Send it anyway?",
validate => \&_validate_grade_action_pair,
info => <<'HERE',
CPAN::Reporter records tests grades for each distribution, version and
platform. By default, duplicates of previous results will not be sent at
all, regardless of the value of the "send_report" option. This option takes
"grade:action" pairs.
HERE
},
send_PL_report => {
prompt => "Do you want to send the PL report?",
default => undef,
validate => \&_validate_grade_action_pair,
},
send_make_report => {
prompt => "Do you want to send the make/Build report?",
default => undef,
validate => \&_validate_grade_action_pair,
},
send_test_report => {
prompt => "Do you want to send the test report?",
default => undef,
validate => \&_validate_grade_action_pair,
},
send_skipfile => {
prompt => "What file has patterns for things that shouldn't be reported?",
default => undef,
validate => \&_validate_skipfile,
},
cc_skipfile => {
prompt => "What file has patterns for things that shouldn't CC to authors?",
default => undef,
validate => \&_validate_skipfile,
},
command_timeout => {
prompt => "If no timeout is set by CPAN, halt system commands after how many seconds?",
default => undef,
validate => \&_validate_seconds,
},
email_to => {
default => undef,
},
editor => {
default => undef,
},
debug => {
default => undef,
},
retry_submission => {
default => undef,
},
);
sub _config_spec { return %option_specs }
#--------------------------------------------------------------------------#
# _generate_profile
#
# Run 'metabase-profile' in the .cpanreporter directory
#--------------------------------------------------------------------------#
sub _generate_profile {
my ($id_file, $config) = @_;
my $cmd = IPC::Cmd::can_run('metabase-profile');
return unless $cmd;
# XXX this is an evil assumption about email addresses, but
# might do for simple cases that users might actually provide
my @opts = ("--output" => $id_file);
my $email = $config->{email_from};
if ($email =~ /\A(.+)\s+<([^>]+)>\z/ ) {
push @opts, "--email" => $2;
my $name = $1;
$name =~ s/\A["'](.*)["']\z/$1/;
push ( @opts, "--name" => $1)
if length $name;
}
else {
push @opts, "--email" => $email;
}
# XXX profile 'secret' is really just a generated API key, so we
# can create something fairly random for the user and use that
push @opts, "--secret" => sprintf("%08x", rand(2**31));
return scalar IPC::Cmd::run(
command => [ $cmd, @opts ],
verbose => 1,
);
}
#--------------------------------------------------------------------------#
# _get_config_dir
#--------------------------------------------------------------------------#
sub _get_config_dir {
if ( defined $ENV{PERL_CPAN_REPORTER_DIR} &&
length $ENV{PERL_CPAN_REPORTER_DIR}
) {
return $ENV{PERL_CPAN_REPORTER_DIR};
}
my $conf_dir = File::Spec->catdir(File::HomeDir->my_home, ".cpanreporter");
if ($^O eq 'MSWin32') {
my $alt_dir = File::Spec->catdir(File::HomeDir->my_documents, ".cpanreporter");
$conf_dir = $alt_dir if -d $alt_dir && ! -d $conf_dir;
}
return $conf_dir;
}
#--------------------------------------------------------------------------#
# _get_config_file
#--------------------------------------------------------------------------#
sub _get_config_file {
if ( defined $ENV{PERL_CPAN_REPORTER_CONFIG} &&
length $ENV{PERL_CPAN_REPORTER_CONFIG}
) {
return $ENV{PERL_CPAN_REPORTER_CONFIG};
}
else {
return File::Spec->catdir( _get_config_dir, "config.ini" );
}
}
#--------------------------------------------------------------------------#
# _get_config_options
#--------------------------------------------------------------------------#
sub _get_config_options {
my $config = shift;
# extract and return valid options, with fallback to defaults
my %spec = CPAN::Reporter::Config::_config_spec();
my %active;
OPTION: for my $option ( keys %spec ) {
if ( exists $config->{_}{$option} ) {
my $val = $config->{_}{$option};
if ( $spec{$option}{validate} &&
! $spec{$option}{validate}->($option, $val)
) {
$CPAN::Frontend->mywarn( "\nCPAN::Reporter: invalid option '$val' in '$option'. Using default instead.\n\n" );
$active{$option} = $spec{$option}{default};
next OPTION;
}
$active{$option} = $val;
}
else {
$active{$option} = $spec{$option}{default}
if defined $spec{$option}{default};
}
}
return \%active;
}
#--------------------------------------------------------------------------#
# _grade_action_prompt -- describes grade action pairs
#--------------------------------------------------------------------------#
sub _grade_action_prompt {
return << 'HERE';
Some of the following configuration options require one or more "grade:action"
pairs that determine what grade-specific action to take for that option.
These pairs should be space-separated and are processed left-to-right. See
CPAN::Reporter documentation for more details.
GRADE : ACTION ======> EXAMPLES
------- ------- --------
pass yes default:no
fail no default:yes pass:no
unknown ask/no default:ask/no pass:yes fail:no
na ask/yes
default
HERE
}
#--------------------------------------------------------------------------#
# _is_valid_action
#--------------------------------------------------------------------------#
my @valid_actions = qw{ yes no ask/yes ask/no ask };
sub _is_valid_action {
my $action = shift;
return grep { $action eq $_ } @valid_actions;
}
#--------------------------------------------------------------------------#
# _is_valid_grade
#--------------------------------------------------------------------------#
my @valid_grades = qw{ pass fail unknown na default };
sub _is_valid_grade {
my $grade = shift;
return grep { $grade eq $_ } @valid_grades;
}
#--------------------------------------------------------------------------#
# _normalize_id_file
#--------------------------------------------------------------------------#
sub _normalize_id_file {
my ($id_file) = @_;
# Windows does not use ~ to signify a home directory
if ( $^O eq 'MSWin32' && $id_file =~ m{^~/(.*)} ) {
$id_file = File::Spec->catdir(File::HomeDir->my_home, $1);
}
elsif ( $id_file =~ /~/ ) {
$id_file = File::Spec->canonpath(File::Glob::bsd_glob( $id_file ));
}
unless ( File::Spec->file_name_is_absolute( $id_file ) ) {
$id_file = File::Spec->catfile(
CPAN::Reporter::Config::_get_config_dir(), $id_file
);
}
return $id_file;
}
#--------------------------------------------------------------------------#
# _open_config_file
#--------------------------------------------------------------------------#
sub _open_config_file {
my $config_file = _get_config_file();
my $config = Config::Tiny->read( $config_file )
or $CPAN::Frontend->mywarn("CPAN::Reporter: couldn't read configuration file " .
"'$config_file': \n" . Config::Tiny->errstr() . "\n");
return $config;
}
#--------------------------------------------------------------------------#
# _validate
#
# anything is OK if there is no validation subroutine
#--------------------------------------------------------------------------#
sub _validate {
my ($name, $value) = @_;
return 1 if ! exists $option_specs{$name}{validate};
return $option_specs{$name}{validate}->($name, $value);
}
#--------------------------------------------------------------------------#
# _validate_grade_action
# returns hash of grade => action
# returns undef
#--------------------------------------------------------------------------#
sub _validate_grade_action_pair {
my ($name, $option) = @_;
$option ||= "no";
my %ga_map; # grade => action
PAIR: for my $grade_action ( split q{ }, $option ) {
my ($grade_list,$action);
if ( $grade_action =~ m{.:.} ) {
# parse pair for later check
($grade_list, $action) = $grade_action =~ m{\A([^:]+):(.+)\z};
}
elsif ( _is_valid_action($grade_action) ) {
# action by itself
$ga_map{default} = $grade_action;
next PAIR;
}
elsif ( _is_valid_grade($grade_action) ) {
# grade by itself
$ga_map{$grade_action} = "yes";
next PAIR;
}
elsif( $grade_action =~ m{./.} ) {
# gradelist by itself, so setup for later check
$grade_list = $grade_action;
$action = "yes";
}
else {
# something weird, so warn and skip
$CPAN::Frontend->mywarn(
"\nCPAN::Reporter: ignoring invalid grade:action '$grade_action' for '$name'.\n\n"
);
next PAIR;
}
# check gradelist
my %grades = map { ($_,1) } split( "/", $grade_list);
for my $g ( keys %grades ) {
if ( ! _is_valid_grade($g) ) {
$CPAN::Frontend->mywarn(
"\nCPAN::Reporter: ignoring invalid grade '$g' in '$grade_action' for '$name'.\n\n"
);
delete $grades{$g};
}
}
# check action
if ( ! _is_valid_action($action) ) {
$CPAN::Frontend->mywarn(
"\nCPAN::Reporter: ignoring invalid action '$action' in '$grade_action' for '$name'.\n\n"
);
next PAIR;
}
# otherwise, it all must be OK
$ga_map{$_} = $action for keys %grades;
}
return scalar(keys %ga_map) ? \%ga_map : undef;
}
sub _validate_transport {
my ($name, $option, $config) = @_;
my $transport = '';
if ( $option =~ /^(\w+(?:::\w+)*)\s?/ ) {
$transport = $1;
my $full_class = "Test::Reporter::Transport::$transport";
eval "use $full_class ()";
if ($@) {
$CPAN::Frontend->mywarn(
"\nCPAN::Reporter: error loading $full_class. Please install the missing module or choose a different transport mechanism.\n\n"
);
}
}
else {
$CPAN::Frontend->mywarn(
"\nCPAN::Reporter: Please provide a transport mechanism.\n\n"
);
return;
}
# we do extra validation for Metabase and offer to create the profile
if ( $transport eq 'Metabase' ) {
unless ( $option =~ /\buri\s+\S+/ ) {
$CPAN::Frontend->mywarn(
"\nCPAN::Reporter: Please provide a target uri.\n\n"
);
return;
}
unless ( $option =~ /\bid_file\s+(\S.+?)\s*$/ ) {
$CPAN::Frontend->mywarn(
"\nCPAN::Reporter: Please specify an id_file path.\n\n"
);
return;
}
my $id_file = _normalize_id_file($1);
# Offer to create if it doesn't exist
if ( ! -e $id_file ) {
my $answer = CPAN::Shell::colorable_makemaker_prompt(
"\nWould you like to run 'metabase-profile' now to create '$id_file'?", "y"
);
if ( $answer =~ /^y/i ) {
return _generate_profile( $id_file, $config );
}
else {
$CPAN::Frontend->mywarn( <<"END_ID_FILE" );
You can create a Metabase profile by typing 'metabase-profile' in your
command prompt and moving the resulting file to the location you specified.
If you did not specify an absolute path, put it in your .cpanreporter
directory. You will need to do this before continuing.
END_ID_FILE
return;
}
}
# Warn and fail validation if there but not readable
elsif (
not ( -r $id_file
or -r File::Spec->catdir(_get_config_dir(), $id_file)
)
) {
$CPAN::Frontend->mywarn(
"CPAN::Reporter: '$id_file' was not readable.\n\n"
);
return;
}
} # end Metabase
return 1;
}
sub _validate_seconds {
my ($name, $option) = @_;
return unless defined($option) && length($option)
&& ($option =~ /^\d/) && $option >= 0;
return $option;
}
sub _validate_skipfile {
my ($name, $option) = @_;
return unless $option;
my $skipfile = File::Spec->file_name_is_absolute( $option )
? $option : File::Spec->catfile( _get_config_dir(), $option );
return -r $skipfile ? $skipfile : undef;
}
1;
# ABSTRACT: Config file options for CPAN::Reporter
=pod
=encoding UTF-8
=head1 NAME
CPAN::Reporter::Config - Config file options for CPAN::Reporter
=head1 VERSION
version 1.2011
=head1 SYNOPSIS
From the CPAN shell:
cpan> o conf init test_report
=head1 DESCRIPTION
Default options for CPAN::Reporter are read from a configuration file
C<<< .cpanreporter/config.ini >>> in the user's home directory. (On Win32 platforms,
the directory will be located in the user's "Documents" directory.)
The location of the configuration directory or file may be specified
using environment variables instead.
The configuration file is in "ini" format, with the option name and value
separated by an "=" sign
email_from = "John Doe" <johndoe@nowhere.org>
edit_report = no
Interactive configuration of email address and common
action prompts may be repeated at any time from the CPAN shell.
cpan> o conf init test_report
If a configuration file does not exist, it will be created the first
time interactive configuration is performed.
Subsequent interactive configuration will also include any advanced
options that have been added manually to the configuration file.
=head1 INTERACTIVE CONFIGURATION OPTIONS
=head2 Email Address (required)
email_from = <email address>
CPAN::Reporter requires users to provide an email address that will be used
in the header of the report.
The email address provided should be a valid address format, e.g.:
email_from = user@domain
email_from = John Doe <user@domain>
email_from = "John Q. Public" <user@domain>
=head2 Transport (required)
transport = <transport class> [transport args]
This sets the transport mechanism passed to the C<<< transport() >>> method of
L<Test::Reporter>. Normally, CPAN::Reporter uses 'Metabase' for transport class
(i.e. L<Test::Reporter::Transport::Metabase>) and will provide a default set of
transport arguments.
Metabase transport arguments are two space-separated keyE<sol>value pairs:
=over
=item *
C<<< uri >>> -- URI for the Metabase API. Defaults to
C<<< https://metabase.cpantesters.org/api/v1/ >>>
=item *
C<<< id_file >>> -- path to the user's Metabase profile file.
Defaults to C<<< metabase_id.json >>>. (Assumed to be in the C<<< .cpanreporter >>>
directory).
=back
Prior to sending reports, a user must have a valid profile file at the path
specified. For Metabase transport, CPAN::Reporter will automatically rewrite a
relative C<<< id_file >>> path as an absolute path located in the C<<< .cpanreporter >>>
directory.
If the specified profile file does not exist, CPAN::Reporter will offer
to run C<<< metabase-profile >>> to create it.
For other transport types, see the documentation that comes with your choice of
Test::Reporter::Transport subclass for the proper way to set the C<<< transport >>>
configuration option.
=head2 Action Prompts
Several steps in the generation of a test report are optional. Configuration
options control whether an action should be taken automatically or whether
CPAN::Reporter should prompt the user for the action to take. The action to
take may be different for each report grade. For example, users may wish to
customize for which grades they wish to manually review a report before sending
it.
Most users should just accept the default settings until they have some
experience as CPAN Testers.
Valid actions, and their associated meaning, are as follows:
=over
=item *
C<<< yes >>> -- automatic yes
=item *
C<<< no >>> -- automatic no
=item *
C<<< ask/no >>> or just C<<< ask >>> -- ask each time, but default to no
=item *
C<<< ask/yes >>> -- ask each time, but default to yes
=back
For "ask" prompts, the default will be used if return is pressed immediately at
the prompt or if the C<<< PERL_MM_USE_DEFAULT >>> environment variable is set to a
true value.
Action prompt options take one or more space-separated "grade:action" pairs,
which are processed left to right.
edit_report = fail:ask/yes pass:no
An action by itself is taken as a default to be used for any grade which does
not have a grade-specific action. A default action may also be set by using
the word "default" in place of a grade.
edit_report = ask/no
edit_report = default:ask/no
A grade by itself is taken to have the action "yes" for that grade.
edit_report = default:no fail
Multiple grades may be specified together by separating them with a slash.
edit_report = pass:no fail/na/unknown:ask/yes
The action prompt options included in interactive configuration are:
=over
=item *
C<<< edit_report = <grade:action> ... >>> -- edit the test report before sending?
(default:askE<sol>no passE<sol>na:no)
=item *
C<<< send_report = <grade:action> ... >>> -- should test reports be sent at all?
(default:askE<sol>yes passE<sol>na:yes)
=back
Note that if C<<< send_report >>> is set to "no", CPAN::Reporter will still go through
the motions of preparing a report, but will discard it rather than send it.
A better way to disable CPAN::Reporter temporarily is with the CPAN option
C<<< test_report >>>:
cpan> o conf test_report 0
=head2 Mail Server (DEPRECATED)
CPAN::Reporter used to send mail directly to perl.org mail servers. The
C<<< smtp_server >>> option is now deprecated and will be ignored if it exists.
=head1 ADVANCED CONFIGURATION OPTIONS
These additional options are only necessary in special cases, for example if
the default editor cannot be found or if reports shouldn't be sent in
certain situations or for automated testing, and so on.
=over
=item *
C<<< command_timeout >>> -- if greater than zero and the CPAN config is
C<<< inactivity_timeout >>> is not set, then any commands executed by CPAN::Reporter
will be halted after this many seconds; useful for unattended smoke testing
to stop after some amount of time; generally, this should be large --
900 seconds or more -- as some distributions' tests take quite a long time to
run. On MSWin32, L<Win32::Job> is a needed and trying to kill a processes may
actually deadlock in some situations -- so use at your own risk.
=item *
C<<< editor = <editor> >>> -- editor to use to edit the test report; if not set,
Test::Reporter will use environment variables C<<< VISUAL >>>, C<<< EDITOR >>> or C<<< EDIT >>>
(in that order) to find an editor
=item *
C<<< retry_submission >>> -- if greater than zero, CPAN::Reporter will try to
resend the report after a few seconds in case the first attempt fails.
=item *
C<<< send_duplicates = <grade:action> ... >>> -- should duplicates of previous
reports be sent, regardless of C<<< send_report >>>? (default:no)
=item *
C<<< send_PL_report = <grade:action> ... >>> -- if defined, used in place of
C<<< send_report >>> during the PL phase
=item *
C<<< send_make_report = <grade:action> ... >>> -- if defined, used in place of
C<<< send_report >>> during the make phase
=item *
C<<< send_test_report = <grade:action> ... >>> -- if defined, used in place of
C<<< send_report >>> during the test phase
=item *
C<<< send_skipfile = <skipfile> >>> -- filename containing regular expressions (one
per line) to match against the distribution ID (e.g.
'AUTHORE<sol>Dist-Name-0.01.tar.gz'); the report will not be sent if a match is
found; non-absolute filename must be in the .cpanreporter config directory;
=back
If these options are manually added to the configuration file, they will
be included (and preserved) in subsequent interactive configuration.
=head2 Skipfile regular expressions
Skip files are expected to have one regular expression per line and will be
matched against the distribution ID, composed of the author's CPAN ID and the
distribution tarball name.
DAGOLDEN/CPAN-Reporter-1.00.tar.gz
Lines that begin with a sharp (#) are considered comments and will not be
matched. All regular expressions will be matched case insensitive and will
not be anchored unless you provide one.
As the format of a distribution ID is "AUTHORE<sol>tarball", anchoring at the
start of the line with a caret (^) will match the author and with a slash (E<sol>)
will match the distribution.
# any distributions by JOHNDOE
^JOHNDOE
# any distributions starting with Win32
/Win32
# a particular very specific distribution
^JOHNDOE/Foo-Bar-3.14
=head1 CONFIGURATION OPTIONS FOR DEBUGGING
These options are useful for debugging only:
=over
=item *
C<<< debug = <boolean> >>> -- turns debugging onE<sol>off
=back
=head1 ENVIRONMENT
The following environment variables may be set to alter the default locations
for CPAN::Reporter files:
=over
=item *
C<<< PERL_CPAN_REPORTER_DIR >>> -- if set, this directory is used in place of
the default C<<< .cpanreporter >>> directory; this will affect not only the location
of the default C<<< config.ini >>>, but also the location of the
L<CPAN::Reporter::History> database and any other files that live in that
directory
=item *
C<<< PERL_CPAN_REPORTER_CONFIG >>> -- if set, this file is used in place of
the default C<<< config.ini >>> file; it may be in any directory, regardless of the
choice of configuration directory
=back
=head1 SEE ALSO
=over
=item *
L<CPAN::Reporter>
=item *
L<CPAN::Reporter::History>
=item *
L<CPAN::Reporter::FAQ>
=back
=head1 AUTHOR
David Golden <dagolden@cpan.org>
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2006 by David Golden.
This is free software, licensed under:
The Apache License, Version 2.0, January 2004
=cut
__END__
# vim: ts=4 sts=4 sw=4 et:
| gitpan/CPAN-Reporter | lib/CPAN/Reporter/Config.pm | Perl | apache-2.0 | 31,486 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::quantum::scalar::snmp::mode::components::resources;
use strict;
use warnings;
use Exporter;
our $map_rassubsytem_status;
our $map_sensor_status;
our @ISA = qw(Exporter);
our @EXPORT_OK = qw($map_rassubsytem_status $map_sensor_status);
$map_rassubsytem_status = {
0 => 'unknown', 1 => 'redFailure', 2 => 'orangeDegraded',
3 => 'yellowWarning', 4 => 'blueAttention',
5 => 'greenInformation', 6 => 'greenGood',
};
$map_sensor_status = {
0 => 'unknown', 1 => 'normal', 2 => 'warning', 3 => 'critical',
};
1;
| Sims24/centreon-plugins | storage/quantum/scalar/snmp/mode/components/resources.pm | Perl | apache-2.0 | 1,303 |
=head1 LICENSE
Copyright (c) 1999-2011 The European Bioinformatics Institute and
Genome Research Limited. All rights reserved.
This software is distributed under a modified Apache license.
For license details, please see
http://www.ensembl.org/info/about/code_licence.html
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <dev@ensembl.org>.
Questions may also be sent to the Ensembl help desk at
<helpdesk@ensembl.org>.
=head1 NAME
Bio::EnsEMBL::Hive::RunnableDB::DumpMultiAlign::CreateSuperJobs
=head1 SYNOPSIS
This RunnableDB module is part of the DumpMultiAlign pipeline.
=head1 DESCRIPTION
This RunnableDB module generates DumpMultiAlign jobs from genomic_align_blocks
on the species supercontigs. The jobs are split into $split_size chunks
=cut
package Bio::EnsEMBL::Compara::RunnableDB::DumpMultiAlign::CreateSuperJobs;
use strict;
use Bio::EnsEMBL::Hive::DBSQL::AnalysisDataAdaptor;
use base ('Bio::EnsEMBL::Compara::RunnableDB::BaseRunnable');
use POSIX qw(ceil);
=head2 strict_hash_format
Description : Implements strict_hash_format() interface method of Bio::EnsEMBL::Hive::Process that is used to set the strictness level of the parameters' parser.
Here we return 0 in order to indicate that neither input_id() nor parameters() is required to contain a hash.
=cut
sub strict_hash_format {
return 0;
}
sub fetch_input {
my $self = shift;
}
sub run {
my $self = shift;
}
sub write_output {
my $self = shift @_;
my $output_ids;
my $compara_dba;
#
#Load registry and get compara database adaptor
#
if ($self->param('reg_conf')) {
Bio::EnsEMBL::Registry->load_all($self->param('reg_conf'),1);
$compara_dba = Bio::EnsEMBL::Registry->get_DBAdaptor($self->param('compara_dbname'), "compara");
} elsif ($self->param('compara_url')) {
#If define compara_url, must also define core_url(s)
Bio::EnsEMBL::Registry->load_registry_from_url($self->param('compara_url'));
if (!defined($self->param('core_url'))) {
$self->throw("Must define core_url if define compara_url");
}
my @core_urls = split ",", $self->param('core_url');
foreach my $core_url (@core_urls) {
Bio::EnsEMBL::Registry->load_registry_from_url($self->param('core_url'));
}
$compara_dba = Bio::EnsEMBL::Compara::DBSQL::DBAdaptor->new(-url=>$self->param('compara_url'));
} else {
Bio::EnsEMBL::Registry->load_all();
$compara_dba = Bio::EnsEMBL::Registry->get_DBAdaptor($self->param('compara_dbname'), "compara");
}
#
#Find supercontigs and number of genomic_align_blocks
#
my $sql = "
SELECT count(*)
FROM genomic_align
LEFT JOIN dnafrag
USING (dnafrag_id)
WHERE coord_system_name = ?
AND genome_db_id= ?
AND method_link_species_set_id=?";
my $sth = $compara_dba->dbc->prepare($sql);
$sth->execute($self->param('coord_system_name'),$self->param('genome_db_id'), $self->param('mlss_id'));
my ($total_blocks) = $sth->fetchrow_array;
my $tag = $self->param('coord_system_name');
#my $output_file = $self->param('output_dir') ."/" . $self->param('filename') . "." . $tag . "." . $self->param('format');
my $output_file = $self->param('filename') . "." . $tag . "." . $self->param('format');
my $format = $self->param('format');
my $coord_system_name = $self->param('coord_system_name');
#This doesn't work because DumpMultiAlignment adds _1 to the output file and can create more if there are lots of supercontigs.
#Since I create only one job, the compress will only start when all the chunks have been produced (if more than one) so I can use "*"
#my $this_suffix = "." . $format;
my $this_suffix = "*" . "." . $format;
my $dump_output_file = $output_file;
$dump_output_file =~ s/\.$format/$this_suffix/;
#Write out cmd for DumpMultiAlign and a few other parameters
#used in downstream analyses
$output_ids = "{\"coord_system\"=> \"$coord_system_name\", \"output_file\"=> \"$output_file\", \"num_blocks\"=> $total_blocks, \"dumped_output_file\"=> \"$dump_output_file\", \"format\"=> \"$format\"}";
$self->dataflow_output_id($output_ids, 2);
}
1;
| adamsardar/perl-libs-custom | EnsemblAPI/ensembl-compara/modules/Bio/EnsEMBL/Compara/RunnableDB/DumpMultiAlign/CreateSuperJobs.pm | Perl | apache-2.0 | 4,227 |
package Google::Ads::AdWords::v201402::AdwordsUserListService::ResponseHeader;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/rm/v201402' }
__PACKAGE__->__set_name('ResponseHeader');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::AdWords::v201402::SoapResponseHeader
);
}
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201402::AdwordsUserListService::ResponseHeader
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
ResponseHeader from the namespace https://adwords.google.com/api/adwords/rm/v201402.
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201402::AdwordsUserListService::ResponseHeader->new($data);
Constructor. The following data structure may be passed to new():
$a_reference_to, # see Google::Ads::AdWords::v201402::SoapResponseHeader
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201402/AdwordsUserListService/ResponseHeader.pm | Perl | apache-2.0 | 1,062 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::REST::Controller::ReturnError;
use Moose;
use namespace::autoclean;
use Carp::Clan qw(^EnsEMBL::REST::Controller::);
BEGIN { extends 'Catalyst::Controller::REST'; }
__PACKAGE__->config(
'default' => 'application/json',
'stash_key' => 'rest',
'map' => {
'text/x-yaml' => 'YAML::XS',
'application/json' => 'JSON::XS',
'text/plain' => 'JSON::XS',
'text/html' => 'YAML::HTML',
}
);
sub index : Path : Args(0) : ActionClass('REST') {
my ( $self, $c, $raw_error ) = @_;
$c->log->error($raw_error);
# # $error =~ s/\n/\\x/g;
# $error = "thist!";
# $error =~ s/s/\n/g;
# $c->log->warn( 'ERROR: ', $error );
# my $raw_msg = $c->stash->{error};
my ($error_cleaned) = $raw_error =~ m/MSG:\s(.*?)STACK/s;
$error_cleaned ||= 'something bad has happened';
$error_cleaned =~ s/\n//g;
$self->status_bad_request( $c, message => $error_cleaned );
}
sub index_GET { }
sub index_POST { }
sub from_ensembl : Path : Args(0) : ActionClass('REST') {
my ( $self, $c, $raw_error ) = @_;
my $error_no_linebreak = $raw_error;
$error_no_linebreak =~ s/\n//g;
$c->log->error($error_no_linebreak);
my ($error_cleaned) = $raw_error =~ m/MSG:\s(.*?)STACK/s;
$error_cleaned ||= 'something bad has happened';
$error_cleaned =~ s/\n//g;
$self->status_bad_request( $c, message => $error_cleaned );
}
sub from_ensembl_GET { }
sub from_ensembl_POST { }
sub custom : Path : Args(0) : ActionClass('REST') {
my ( $self, $c, $error_msg ) = @_;
$c->log->error($error_msg);
$self->status_bad_request( $c, message => $error_msg );
}
sub custom_GET { }
sub custom_POST { }
sub no_content: Path : Args(0) : ActionClass('REST') {
my ( $self, $c, $error_msg ) = @_;
$c->log->error($error_msg);
$self->status_no_content( $c, message => $error_msg );
}
sub no_content_GET { }
sub no_content_POST { }
sub not_found: Path : Args(0) : ActionClass('REST') {
my ( $self, $c, $error_msg ) = @_;
$self->status_not_found($c, message => $error_msg);
}
sub not_found_GET { }
sub not_found_POST { }
__PACKAGE__->meta->make_immutable;
1;
| willmclaren/ensembl-rest | lib/EnsEMBL/REST/Controller/ReturnError.pm | Perl | apache-2.0 | 2,929 |
#!/usr/bin/perl
# Copyright (c) 2010 Regents of the University of Minnesota
# This file is free software under the 3-clause BSD license.
# Last modified: 2010-02-24 22:29 CST
# See below for docs, or run with --help.
# NOTES:
#
# - Files and directories which are not owned by the running user but which
# have incorrect permissions cannot be fixed unless you sudo this script.
#
# - Symbolic links are ignored (chmod spits a Warning,
# "neither file nor directory, ignoring)
#
# FIXME:
#
# - Implement real argument parsing.
# - Take a --group argument to also chgrp.
# 2013.02.07: [lb] added --quiet to chmod if we're not --verbose.
# http://www.unixlore.net/articles/speeding-up-bulk-file-operations.html
# Expect [lb] finds the 2775 must come first.
# FIXME/MAYBE: Should fixperms --public really just do this instead??:
# sudo find $target_dir -type d -print0 | xargs -0 chmod 2775
# sudo find $target_dir -type f -print0 | xargs -0 chmod 664
use strict;
use warnings;
no warnings 'uninitialized';
use Fcntl;
use File::Find;
use File::stat;
use Getopt::Long;
use POSIX;
my $help_show = 0;
my $public_mode = 0;
my $verbose = 0;
my @dirs = '.';
if (not Getopt::Long::GetOptions('help' => \$help_show,
'public' => \$public_mode,
'verbose' => \$verbose)) {
$help_show = 1;
}
if ($help_show) {
print <<EOF;
Usage:
\$ $0 [--public] [--verbose] [DIRS]
This program repairs UNIX permissions for filesystem areas which should be
group accessible. It can operate in two modes:
Normal mode:
Set group permissions to match user permissions. Turn on sticky bit on
directories. Leave world permissions alone.
Public mode:
In addition to what's done for normal mode, set the world read bit, and set
the world execute bit if the owner execute bit is set.
Note that this makes all files and directories world-readable!
Email reid\@umn.edu with blame.
EOF
exit 1;
}
#if ($public_mode and -e '.svn') {
# print "ERROR! Don't run me in WWW mode in your working directory.\n";
# exit 1;
#}
if (@ARGV > 0) {
@dirs = @ARGV;
}
# Use the find options so we can follow symbolic links easily.
# See: http://perldoc.perl.org/File/Find.html
my %find_opts = (wanted => \&wanted,
follow_fast => 1,
# follow_skip 0 and 1 die on infinite link loops.
follow_skip => 2);
# File::Find::find(\&wanted, @dirs);
File::Find::find(\%find_opts, @dirs);
### Functions
sub wanted {
my ($dir, $name, $full) = ($File::Find::dir, $_, $File::Find::name);
#if ($verbose) {
# # Note that $dir/$name ==> $full.
# # print "On file: $name / in dir: $dir / path: $full\n";
# print "On file: $full\n";
#}
# Perl's file tests are part of a group known as the -X operator.
# http://perldoc.perl.org/functions/-X.html
# NOTE: $name is not the full path, but Perl changes our cwd for each file.
if (not (-d $name or -f $name)) {
# See if the file is a symbolic link.
if (-l $name) {
# The way follow_fast and follow_skip work, we'll get symbolic links only
# for infinite link loops.
if ($verbose) {
print "Symbolic link means infinite loop: $full\n";
}
} else {
# This file type is unexpected.
print STDERR "Warning: $full: not file, directory, or link\n";
}
}
else {
# Found a real file or directory.
if ($verbose) {
print "Found real dir or file: $name\n";
}
my $p_before = get_perms($name);
my $p_user = $p_before & 0700;
my $p_other = $p_before & 0007;
my $p_group = $p_user >> 3;
my $perms = $p_user | $p_group | $p_other;
if (-d $name) {
$perms |= 02000;
}
if ($public_mode) {
$perms |= ($p_user >> 6) & 0005;
if (-d $name) {
$perms |= 0001;
}
}
set_perms($name, $perms, $p_before, $full);
}
}
sub get_group {
my $s = stat($_[0]) or die "Can't stat $_[0]: $!";
return $s->gid;
}
sub get_perms {
my $s = stat($_[0]) or die "Can't stat $_[0]: $!";
return Fcntl::S_IMODE($s->mode);
}
sub set_perms {
my ($file, $perms, $p_before, $full) = @_;
if ($p_before != $perms) {
# Perms not what we want -- fix them
if ($verbose) {
print " $full: " . p2s($p_before) . " -> " . p2s($perms) ." \n";
}
chmod($perms, $file) or print STDERR "Can't chmod($file): $!\n";
}
}
sub owned_by_me {
my $s = stat($_[0]) or die "Can't stat $_: $!";
return ($s->uid == POSIX::getuid());
}
# Convert numeric permissions to their string representation
sub p2s {
return sprintf('%o', $_[0]);
}
| lbouma/Cyclopath | scripts/util/fixperms.pl | Perl | apache-2.0 | 4,628 |
package Paws::WorkDocs::GetFolderPath;
use Moose;
has AuthenticationToken => (is => 'ro', isa => 'Str', traits => ['ParamInHeader'], header_name => 'Authentication');
has Fields => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'fields');
has FolderId => (is => 'ro', isa => 'Str', traits => ['ParamInURI'], uri_name => 'FolderId', required => 1);
has Limit => (is => 'ro', isa => 'Int', traits => ['ParamInQuery'], query_name => 'limit');
has Marker => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'marker');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'GetFolderPath');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/api/v1/folders/{FolderId}/path');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'GET');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::WorkDocs::GetFolderPathResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::WorkDocs::GetFolderPath - Arguments for method GetFolderPath on Paws::WorkDocs
=head1 DESCRIPTION
This class represents the parameters used for calling the method GetFolderPath on the
Amazon WorkDocs service. Use the attributes of this class
as arguments to method GetFolderPath.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to GetFolderPath.
As an example:
$service_obj->GetFolderPath(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 AuthenticationToken => Str
Amazon WorkDocs authentication token. This field should not be set when
using administrative API actions, as in accessing the API using AWS
credentials.
=head2 Fields => Str
A comma-separated list of values. Specify "NAME" to include the names
of the parent folders.
=head2 B<REQUIRED> FolderId => Str
The ID of the folder.
=head2 Limit => Int
The maximum number of levels in the hierarchy to return.
=head2 Marker => Str
This value is not supported.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method GetFolderPath in L<Paws::WorkDocs>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/WorkDocs/GetFolderPath.pm | Perl | apache-2.0 | 2,644 |
package VMOMI::RecurrentTaskScheduler;
use parent 'VMOMI::TaskScheduler';
use strict;
use warnings;
our @class_ancestors = (
'TaskScheduler',
'DynamicData',
);
our @class_members = (
['interval', undef, 0, ],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/RecurrentTaskScheduler.pm | Perl | apache-2.0 | 446 |
package Paws::EC2::VpcCidrBlockAssociation;
use Moose;
has AssociationId => (is => 'ro', isa => 'Str', request_name => 'associationId', traits => ['NameInRequest']);
has CidrBlock => (is => 'ro', isa => 'Str', request_name => 'cidrBlock', traits => ['NameInRequest']);
has CidrBlockState => (is => 'ro', isa => 'Paws::EC2::VpcCidrBlockState', request_name => 'cidrBlockState', traits => ['NameInRequest']);
1;
### main pod documentation begin ###
=head1 NAME
Paws::EC2::VpcCidrBlockAssociation
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::EC2::VpcCidrBlockAssociation object:
$service_obj->Method(Att1 => { AssociationId => $value, ..., CidrBlockState => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::EC2::VpcCidrBlockAssociation object:
$result = $service_obj->Method(...);
$result->Att1->AssociationId
=head1 DESCRIPTION
This class has no description
=head1 ATTRIBUTES
=head2 AssociationId => Str
The association ID for the IPv4 CIDR block.
=head2 CidrBlock => Str
The IPv4 CIDR block.
=head2 CidrBlockState => L<Paws::EC2::VpcCidrBlockState>
Information about the state of the CIDR block.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::EC2>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/EC2/VpcCidrBlockAssociation.pm | Perl | apache-2.0 | 1,781 |
## Create or update JMS Queue
use warnings;
use strict;
use JSON;
use ElectricCommander;
use ElectricCommander::PropMod qw(/myProject/modules);
use WebSphere::WebSphere;
use WebSphere::Util;
$| = 1;
# -------------------------------------------------------------------------
# Variables
# -------------------------------------------------------------------------
my $opts = {
# Required parameters handling:
configname => '$[configname]',
specScope => '$[specScope]',
specAdministrativeName => '$[specAdministrativeName]',
jndiName => '$[jndiName]',
destinationJndiName => '$[destinationJndiName]',
destinationJndiType => '$[destinationJndiType]',
# Non-required parameters:
specAdministrativeDescription => '$[specAdministrativeDescription]',
clientChannelDefinitionUrl => '$[clientChannelDefinitionUrl]',
clientChannelDefinitionQueueManager => '$[clientChannelDefinitionQueueManager]',
additionalOptions => '$[additionalOptions]',
};
my $ec = ElectricCommander->new();
$ec->abortOnError(0);
my $websphere = WebSphere::WebSphere->new($ec, $opts->{configname}, '');
my $parsedSpecScope = $websphere->parseScope($opts->{specScope});
my $r = $websphere->getParamsRenderer(
name => $opts->{specAdministrativeName},
jndiName => $opts->{jndiName},
destinationJndiName => $opts->{destinationJndiName},
destinationType => $opts->{destinationJndiType},
);
if ($opts->{specAdministrativeDescription}) {
$r->{description} = $opts->{specAdministrativeDescription};
}
if ($opts->{clientChannelDefinitionUrl}) {
$r->{ccdtUrl} = $opts->{clientChannelDefinitionUrl};
}
if ($opts->{clientChannelDefinitionQueueManager}) {
$r->{ccdtQmgrName} = $opts->{clientChannelDefinitionQueueManager};
}
my $wasApi = 'WMQ_ActivationSpec';
my $params = $r->render();
$params .= ' ' . $opts->{additionalOptions};
$websphere->setTemplateProperties(
requestParameters => $params,
wasApi => $wasApi,
specScope => $parsedSpecScope
);
my $logger = $websphere->log();
my $file = 'create_or_update_wmq_jms_activation_spec.py';
my $script = $ec->getProperty("/myProject/wsadmin_scripts/$file")->getNodeText('//value');
$file = $websphere->write_jython_script(
$file, {},
augment_filename_with_random_numbers => 1
);
my $shellcmd = $websphere->_create_runfile($file, ());
my $escaped_shellcmd = $websphere->_mask_password($shellcmd);
$logger->info('WSAdmin command line: ', $escaped_shellcmd);
$logger->debug("WSAdmin script:");
$logger->debug('' . $script);
$logger->debug("== End of WSAdmin script ==");
my %props = ();
$props{CreateOrUpdateJMSQueueLine} = $escaped_shellcmd;
my $cmd_res = `$shellcmd 2>&1`;
$logger->info($cmd_res);
my $code = $? >> 8;
my $result_params = {
outcome => {
target => 'myCall',
result => '',
},
procedure => {
target => 'myCall',
msg => ''
},
pipeline => {
target => 'Create Or Update WMQ JMS Resource Result:',
msg => '',
}
};
# success
my $operation_mode = 'create';
$operation_mode = $1 if $cmd_res =~ m/Operation\smode:\s(.*?)$/ms;
if ($code == SUCCESS) {
my $message = sprintf 'Successfully created or updated %s: %s', $wasApi, $opts->{specAdministrativeName};
$message = $1 if $cmd_res =~ m/Status:\sOK,\sMessage:\s(.*?)$/ms;
$result_params->{outcome}->{result} = 'success';
$result_params->{procedure}->{msg} = $result_params->{pipeline}->{msg} = $message;
}
else {
my $error = $websphere->extractWebSphereExceptions($cmd_res);
if ($error) {
$error = "Error occured during $operation_mode of $opts->{specAdministrativeName}:\n$error";
}
else {
$error = "Unexpected error occured.";
}
$result_params->{outcome}->{result} = 'error';
$result_params->{procedure}->{msg} = $result_params->{pipeline}->{msg} = $error;
}
my $exit = $websphere->setResult(%$result_params);
$exit->();
| electric-cloud/EC-WebSphere | src/main/resources/project/server/JMS/createOrUpdateWMQJMSActivationSpec.pl | Perl | apache-2.0 | 4,130 |
=pod
=head1 NAME
Bio::EnsEMBL::EGPipeline::PostCompara::RunnableDB::GeneCoverageFactory
=cut
=head1 DESCRIPTION
=head1 AUTHOR
ckong
=cut
package Bio::EnsEMBL::EGPipeline::PostCompara::RunnableDB::GeneCoverageFactory;
use warnings;
use Data::Dumper;
use Bio::EnsEMBL::Registry;
use Bio::EnsEMBL::Utils::SqlHelper;
use base ('Bio::EnsEMBL::EGPipeline::PostCompara::RunnableDB::Base');
sub param_defaults {
return {
};
}
sub fetch_input {
my ($self) = @_;
my $division = $self->param_required('division');
my $sql_geneTree = "SELECT distinct(r.root_id)
FROM gene_tree_node n, gene_tree_root r, seq_member m, genome_db g, gene_align_member gam
WHERE m.seq_member_id = n.seq_member_id
AND gam.seq_member_id = m.seq_member_id
AND r.root_id = n.root_id
AND r.clusterset_id = 'default'
AND gam.gene_align_id = r.gene_align_id
AND g.genome_db_id = m.genome_db_id
ORDER BY r.root_id ";
$self->param('division', $division);
$self->param('sql_geneTree', $sql_geneTree);
=pod
$sql_geneTree = "SELECT distinct(r.root_id)
FROM gene_tree_node n, gene_tree_root r, member m, genome_db g, gene_align_member gam
WHERE m.member_id = n.member_id
AND gam.member_id = m.member_id
AND r.root_id = n.root_id
AND r.clusterset_id = 'default'
AND gam.gene_align_id = r.gene_align_id
AND g.genome_db_id = m.genome_db_id
ORDER BY r.root_id ";
=cut
return;
}
sub run {
my ($self) = @_;
my $division = $self->param('division');
my $sql_geneTree = $self->param('sql_geneTree');
my $dba_compara = Bio::EnsEMBL::Registry->get_DBAdaptor($division, "compara");
print STDERR "Analysing ".$dba_compara->dbc()->dbname()."\n";
my $helper = Bio::EnsEMBL::Utils::SqlHelper->new( -DB_CONNECTION => $dba_compara->dbc() );
my $array_ref = $helper->execute(-SQL => $sql_geneTree);
foreach my $row (@{$array_ref}) {
my $root_id = $row->[0];
$self->dataflow_output_id( { 'root_id'=> $root_id }, 2 );
}
$self->dataflow_output_id(1);
return;
}
sub write_output {
my ($self) = @_;
}
1;
| navygit/ncRNA_Pipeline | modules/Bio/EnsEMBL/EGPipeline/PostCompara/RunnableDB/GeneCoverageFactory.pm | Perl | apache-2.0 | 2,297 |
package VMOMI::OvfNoSpaceOnController;
use parent 'VMOMI::OvfUnsupportedElement';
use strict;
use warnings;
our @class_ancestors = (
'OvfUnsupportedElement',
'OvfUnsupportedPackage',
'OvfFault',
'VimFault',
'MethodFault',
);
our @class_members = (
['parent', undef, 0, ],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/OvfNoSpaceOnController.pm | Perl | apache-2.0 | 521 |
package VMOMI::SessionTerminatedEvent;
use parent 'VMOMI::SessionEvent';
use strict;
use warnings;
our @class_ancestors = (
'SessionEvent',
'Event',
'DynamicData',
);
our @class_members = (
['sessionId', undef, 0, ],
['terminatedUsername', undef, 0, ],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/SessionTerminatedEvent.pm | Perl | apache-2.0 | 498 |
#!/usr/bin/env perl
use strict;
use warnings;
use File::Temp qw(tempfile);
use Image::Checkerboard;
# Temporary file.
my (undef, $temp) = tempfile();
# Object.
my $obj = Image::Checkerboard->new;
# Create image.
my $type = $obj->create($temp);
# Print out type.
print $type."\n";
# Unlink file.
unlink $temp;
# Output:
# bmp | tupinek/Image-Checkerboard | examples/ex1.pl | Perl | bsd-2-clause | 332 |
package Geo::GoogleEarth::Pluggable::NetworkLink;
use base qw{Geo::GoogleEarth::Pluggable::Base};
use XML::LibXML::LazyBuilder qw{E};
use warnings;
use strict;
our $VERSION='0.14';
=head1 NAME
Geo::GoogleEarth::Pluggable::NetworkLink - Geo::GoogleEarth::Pluggable::NetworkLink
=head1 SYNOPSIS
use Geo::GoogleEarth::Pluggable;
my $document=Geo::GoogleEarth::Pluggable->new;
$document->NetworkLink(url=>"./anotherdocument.cgi");
=head1 DESCRIPTION
Geo::GoogleEarth::Pluggable::NetworkLink is a L<Geo::GoogleEarth::Pluggable::Base> with a few other methods.
=head1 USAGE
my $networklink=$document->NetworkLink(name=>"My NetworkLink",
url=>"./anotherdocument.cgi");
=head2 type
Returns the object type.
my $type=$networklink->type;
=cut
sub type {"NetworkLink"};
=head2 node
=cut
sub node {
my $self=shift;
my @element=(E(Snippet=>{maxLines=>scalar(@{$self->Snippet})}, join("\n", @{$self->Snippet})));
my @link=();
my %link=map {$_=>1} qw{href refreshMode refreshInterval viewRefreshMode viewRefreshTime viewBoundScale viewFormat httpQuery};
foreach my $key (keys %$self) {
next if $key eq "Snippet";
if ($key eq "url") {
push @link, E(href=>{}, $self->url);
} elsif(exists $link{$key}) { #these go in the Link element
push @link, E($key=>{}, $self->{$key}) unless ref($self->{$key});
} else {
push @element, E($key=>{}, $self->{$key}) unless ref($self->{$key});
}
}
push @element, E(Link=>{}, @link) if @link;
return E(NetworkLink=>{}, @element);
}
=head2 url
Sets or returns the Uniform Resource Locator (URL) for the NetworkLink
my $url=$networklink->url;
$networklink->url("./newdoc.cgi");
=cut
sub url {
my $self=shift();
if (@_) {
$self->{'url'}=shift();
}
return defined($self->{'url'}) ? $self->{'url'} : 'http://localhost/';
}
=head1 BUGS
Please log on RT and send to the geo-perl email list.
=head1 SUPPORT
DavisNetworks.com supports all Perl applications including this package.
=head1 AUTHOR
Michael R. Davis (mrdvt92)
CPAN ID: MRDVT
=head1 COPYRIGHT
This program is free software licensed under the...
The BSD License
The full text of the license can be found in the LICENSE file included with this module.
=head1 SEE ALSO
L<Geo::GoogleEarth::Pluggable> creates a GoogleEarth Document.
=cut
1;
| gitpan/Geo-GoogleEarth-Pluggable | lib/Geo/GoogleEarth/Pluggable/NetworkLink.pm | Perl | bsd-3-clause | 2,368 |
Title: Hello
Tags: foo, bar, baz
Hello world!
[cut] Foobar
=head1 Hello!
| gitpan/Mojo-Twist | t/article/20110922T121212-hello-there.pod | Perl | mit | 76 |
class RNA_Transcription {
method to_rna ($dna) {
$dna.trans( 'A' => 'U',
'G' => 'C',
'C' => 'G',
'T' => 'A');
}
}
| yanick/xperl6 | rna-transcription/Example.pm | Perl | mit | 190 |
#
# Copyright 2017 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::lync::2013::mssql::mode::poorcalls;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'global', type => 0 },
{ name => 'uri', type => 1, cb_prefix_output => 'prefix_uri_output', message_multiple => 'All users are ok' },
];
$self->{maps_counters}->{global} = [
{ label => 'global', set => {
key_values => [ { name => 'count' } ],
output_template => '%d Poor calls',
perfdatas => [
{ label => 'poor_calls', value => 'count_absolute', template => '%d',
unit => 'calls', min => 0, label_extra_instance => 0 },
],
}
},
];
$self->{maps_counters}->{uri} = [
{ label => 'user', set => {
key_values => [ { name => 'count' }, { name => 'display' } ],
output_template => 'count : %d',
perfdatas => [
{ label => 'poor_calls', value => 'count_absolute', template => '%d',
unit => 'calls', min => 0, label_extra_instance => 1, instance_use => 'display_absolute' },
],
}
},
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
'lookback:s' => { name => 'lookback', default => '65' },
'filter-user:s' => { name => 'filter_user' },
});
return $self;
}
sub prefix_uri_output {
my ($self, %options) = @_;
return "'" . $options{instance_value}->{display} . "' ";
}
sub manage_selection {
my ($self, %options) = @_;
# $options{sql} = sqlmode object
$self->{sql} = $options{sql};
$self->{sql}->connect();
$self->{sql}->query(query => "SELECT URI, LastPoorCallTime
FROM [QoEMetrics].[dbo].[User]
WHERE LastPoorCallTime>=dateadd(minute,-".$self->{option_results}->{lookback}.",getdate())");
my $total;
while (my $row = $self->{sql}->fetchrow_hashref()) {
if (defined($self->{option_results}->{filter_user}) && $self->{option_results}->{filter_user} ne '' &&
$row->{URI} !~ /$self->{option_results}->{filter_user}/) {
$self->{output}->output_add(long_msg => "Skipping '" . $row->{URI} . "': no matching filter.", debug => 1);
next;
}
$self->{global}->{count}++;
$self->{uri}->{$row->{URI}} = {count => 0, display => $row->{URI}} if (!defined($self->{uri}->{$row->{URI}}));
$self->{uri}->{$row->{URI}}->{count}++;
}
}
1;
__END__
=head1 MODE
Check Lync Poor Calls during last X minutes (Total and per users)
=over 8
=item B<--filter-user>
Filter user name (can be a regexp)
=item B<--filter-counters>
Only display some counters (regexp can be used).
=item B<--lookback>
Minutes to lookback (From you to UTC) default: 65
=item B<--warning-*>
Set warning threshold for number of poor calls. Can be : 'global', 'user'
=item B<--critical-*>
Set critical threshold for number of poor calls. Can be : 'global', 'user'
=back
=cut
| maksimatveev/centreon-plugins | apps/lync/2013/mssql/mode/poorcalls.pm | Perl | apache-2.0 | 4,219 |
package VASP::OUTCAR::IO;
use Moose::Role;
use MooseX::Types::Moose 'Str';
use namespace::autoclean;
with 'IO::Reader';
has 'slurped', (
is => 'ro',
isa => Str,
init_arg => undef,
lazy => 1,
default => sub { shift->slurp }
);
1
| vitduck/CPLAS | VASP/OUTCAR/IO.pm | Perl | bsd-3-clause | 289 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 8.0.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly. Use Unicode::UCD to access the Unicode character data
# base.
return <<'END';
V216
53
54
1637
1638
1781
1782
1989
1990
2411
2412
2539
2540
2667
2668
2795
2796
2923
2924
3051
3052
3179
3180
3307
3308
3435
3436
3563
3564
3669
3670
3797
3798
3877
3878
4165
4166
4245
4246
4973
4974
6117
6118
6133
6134
6165
6166
6475
6476
6613
6614
6789
6790
6805
6806
6997
6998
7093
7094
7237
7238
7253
7254
8309
8310
8325
8326
8548
8549
8564
8565
9316
9317
9336
9337
9356
9357
9465
9466
10106
10107
10116
10117
10126
10127
12325
12326
12836
12837
12932
12933
13317
13318
14378
14379
20116
20117
20237
20238
42533
42534
42730
42731
43221
43222
43269
43270
43477
43478
43509
43510
43605
43606
44021
44022
65301
65302
65803
65804
65859
65860
65864
65865
65871
65872
65887
65888
65907
65908
66277
66278
66337
66338
66725
66726
67709
67710
67756
67757
67836
67837
68036
68037
68332
68333
68859
68860
69220
69221
69718
69719
69739
69740
69877
69878
69947
69948
70101
70102
70117
70118
70389
70390
70869
70870
71253
71254
71365
71366
71477
71478
71909
71910
74755
74756
74762
74763
74768
74769
74777
74778
74786
74787
74791
74792
74801
74802
74809
74810
74829
74830
74836
74838
74858
74859
92773
92774
93013
93014
119652
119653
120787
120788
120797
120798
120807
120808
120817
120818
120827
120828
125131
125132
127238
127239
131361
131362
END
| operepo/ope | bin/usr/share/perl5/core_perl/unicore/lib/Nv/5.pl | Perl | mit | 1,723 |
#-----------------------------------------------------------
# xpedition.pl
# Determine the edition of XP (MediaCenter, TabletPC)
#
# History
# 20120722 - updated the %config hash
# 20090727 - created
#
# References
# http://windowsitpro.com/article/articleid/94531/
# how-can-a-script-determine-if-windows-xp-tablet-pc-edition-is-installed.html
# http://unasked.com/question/view/id/119610
#
# copyright 2009 H. Carvey
#-----------------------------------------------------------
package xpedition;
use strict;
my %config = (hive => "System",
hivemask => 4,
output => "report",
category => "",
hasShortDescr => 1,
hasDescr => 0,
hasRefs => 0,
osmask => 1,
version => 20120722);
sub getConfig{return %config}
sub getShortDescr {
return "Queries System hive for XP Edition info";
}
sub getDescr{}
sub getRefs {}
sub getHive {return $config{hive};}
sub getVersion {return $config{version};}
my $VERSION = getVersion();
sub pluginmain {
my $class = shift;
my $hive = shift;
my $key;
my $edition = 0;
::logMsg("Launching xpedition v.".$VERSION);
::rptMsg("xpedition v.".$VERSION); # banner
::rptMsg("(".getHive().") ".getShortDescr()."\n"); # 20110830 [fpi] + banner
my $reg = Parse::Win32Registry->new($hive);
my $root_key = $reg->get_root_key;
::rptMsg("xpedition v.".$VERSION);
eval {
$key = $root_key->get_subkey("WPA\\MediaCenter")->get_value("Installed")->get_data();
if ($key == 1) {
::rptMsg("MediaCenter Edition");
$edition = 1;
}
};
eval {
$key = $root_key->get_subkey("WPA\\TabletPC")->get_value("Installed")->get_data();
if ($key == 1) {
::rptMsg("TabletPC Edition");
$edition = 1;
}
};
}
1 | raman-bt/autopsy | RecentActivity/release/rr-full/plugins/xpedition.pl | Perl | apache-2.0 | 1,870 |
package JSON::PP;
# JSON-2.0
use 5.005;
use strict;
use base qw(Exporter);
use overload;
use Carp ();
use B ();
#use Devel::Peek;
$JSON::PP::VERSION = '2.27000';
@JSON::PP::EXPORT = qw(encode_json decode_json from_json to_json);
# instead of hash-access, i tried index-access for speed.
# but this method is not faster than what i expected. so it will be changed.
use constant P_ASCII => 0;
use constant P_LATIN1 => 1;
use constant P_UTF8 => 2;
use constant P_INDENT => 3;
use constant P_CANONICAL => 4;
use constant P_SPACE_BEFORE => 5;
use constant P_SPACE_AFTER => 6;
use constant P_ALLOW_NONREF => 7;
use constant P_SHRINK => 8;
use constant P_ALLOW_BLESSED => 9;
use constant P_CONVERT_BLESSED => 10;
use constant P_RELAXED => 11;
use constant P_LOOSE => 12;
use constant P_ALLOW_BIGNUM => 13;
use constant P_ALLOW_BAREKEY => 14;
use constant P_ALLOW_SINGLEQUOTE => 15;
use constant P_ESCAPE_SLASH => 16;
use constant P_AS_NONBLESSED => 17;
use constant P_ALLOW_UNKNOWN => 18;
BEGIN {
my @xs_compati_bit_properties = qw(
latin1 ascii utf8 indent canonical space_before space_after allow_nonref shrink
allow_blessed convert_blessed relaxed allow_unknown
);
my @pp_bit_properties = qw(
allow_singlequote allow_bignum loose
allow_barekey escape_slash as_nonblessed
);
# Perl version check, Unicode handling is enable?
# Helper module sets @JSON::PP::_properties.
my $helper = $] >= 5.008 ? 'JSON::PP58'
: $] >= 5.006 ? 'JSON::PP56'
: 'JSON::PP5005'
;
eval qq| require $helper |;
if ($@) { Carp::croak $@; }
for my $name (@xs_compati_bit_properties, @pp_bit_properties) {
my $flag_name = 'P_' . uc($name);
eval qq/
sub $name {
my \$enable = defined \$_[1] ? \$_[1] : 1;
if (\$enable) {
\$_[0]->{PROPS}->[$flag_name] = 1;
}
else {
\$_[0]->{PROPS}->[$flag_name] = 0;
}
\$_[0];
}
sub get_$name {
\$_[0]->{PROPS}->[$flag_name] ? 1 : '';
}
/;
}
}
# Functions
my %encode_allow_method
= map {($_ => 1)} qw/utf8 pretty allow_nonref latin1 self_encode escape_slash
allow_blessed convert_blessed indent indent_length allow_bignum
as_nonblessed
/;
my %decode_allow_method
= map {($_ => 1)} qw/utf8 allow_nonref loose allow_singlequote allow_bignum
allow_barekey max_size relaxed/;
my $JSON; # cache
sub encode_json ($) { # encode
($JSON ||= __PACKAGE__->new->utf8)->encode(@_);
}
sub decode_json { # decode
($JSON ||= __PACKAGE__->new->utf8)->decode(@_);
}
# Obsoleted
sub to_json($) {
Carp::croak ("JSON::PP::to_json has been renamed to encode_json.");
}
sub from_json($) {
Carp::croak ("JSON::PP::from_json has been renamed to decode_json.");
}
# Methods
sub new {
my $class = shift;
my $self = {
max_depth => 512,
max_size => 0,
indent => 0,
FLAGS => 0,
fallback => sub { encode_error('Invalid value. JSON can only reference.') },
indent_length => 3,
};
bless $self, $class;
}
sub encode {
return $_[0]->PP_encode_json($_[1]);
}
sub decode {
return $_[0]->PP_decode_json($_[1], 0x00000000);
}
sub decode_prefix {
return $_[0]->PP_decode_json($_[1], 0x00000001);
}
# accessor
# pretty printing
sub pretty {
my ($self, $v) = @_;
my $enable = defined $v ? $v : 1;
if ($enable) { # indent_length(3) for JSON::XS compatibility
$self->indent(1)->indent_length(3)->space_before(1)->space_after(1);
}
else {
$self->indent(0)->space_before(0)->space_after(0);
}
$self;
}
# etc
sub max_depth {
my $max = defined $_[1] ? $_[1] : 0x80000000;
$_[0]->{max_depth} = $max;
$_[0];
}
sub get_max_depth { $_[0]->{max_depth}; }
sub max_size {
my $max = defined $_[1] ? $_[1] : 0;
$_[0]->{max_size} = $max;
$_[0];
}
sub get_max_size { $_[0]->{max_size}; }
sub filter_json_object {
$_[0]->{cb_object} = defined $_[1] ? $_[1] : 0;
$_[0]->{F_HOOK} = ($_[0]->{cb_object} or $_[0]->{cb_sk_object}) ? 1 : 0;
$_[0];
}
sub filter_json_single_key_object {
if (@_ > 1) {
$_[0]->{cb_sk_object}->{$_[1]} = $_[2];
}
$_[0]->{F_HOOK} = ($_[0]->{cb_object} or $_[0]->{cb_sk_object}) ? 1 : 0;
$_[0];
}
sub indent_length {
if (!defined $_[1] or $_[1] > 15 or $_[1] < 0) {
Carp::carp "The acceptable range of indent_length() is 0 to 15.";
}
else {
$_[0]->{indent_length} = $_[1];
}
$_[0];
}
sub get_indent_length {
$_[0]->{indent_length};
}
sub sort_by {
$_[0]->{sort_by} = defined $_[1] ? $_[1] : 1;
$_[0];
}
sub allow_bigint {
Carp::carp("allow_bigint() is obsoleted. use allow_bignum() insted.");
}
###############################
###
### Perl => JSON
###
{ # Convert
my $max_depth;
my $indent;
my $ascii;
my $latin1;
my $utf8;
my $space_before;
my $space_after;
my $canonical;
my $allow_blessed;
my $convert_blessed;
my $indent_length;
my $escape_slash;
my $bignum;
my $as_nonblessed;
my $depth;
my $indent_count;
my $keysort;
sub PP_encode_json {
my $self = shift;
my $obj = shift;
$indent_count = 0;
$depth = 0;
my $idx = $self->{PROPS};
($ascii, $latin1, $utf8, $indent, $canonical, $space_before, $space_after, $allow_blessed,
$convert_blessed, $escape_slash, $bignum, $as_nonblessed)
= @{$idx}[P_ASCII .. P_SPACE_AFTER, P_ALLOW_BLESSED, P_CONVERT_BLESSED,
P_ESCAPE_SLASH, P_ALLOW_BIGNUM, P_AS_NONBLESSED];
($max_depth, $indent_length) = @{$self}{qw/max_depth indent_length/};
$keysort = $canonical ? sub { $a cmp $b } : undef;
if ($self->{sort_by}) {
$keysort = ref($self->{sort_by}) eq 'CODE' ? $self->{sort_by}
: $self->{sort_by} =~ /\D+/ ? $self->{sort_by}
: sub { $a cmp $b };
}
encode_error("hash- or arrayref expected (not a simple scalar, use allow_nonref to allow this)")
if(!ref $obj and !$idx->[ P_ALLOW_NONREF ]);
my $str = $self->object_to_json($obj);
$str .= "\n" if ( $indent ); # JSON::XS 2.26 compatible
unless ($ascii or $latin1 or $utf8) {
utf8::upgrade($str);
}
if ($idx->[ P_SHRINK ]) {
utf8::downgrade($str, 1);
}
return $str;
}
sub object_to_json {
my ($self, $obj) = @_;
my $type = ref($obj);
if($type eq 'HASH'){
return $self->hash_to_json($obj);
}
elsif($type eq 'ARRAY'){
return $self->array_to_json($obj);
}
elsif ($type) { # blessed object?
if (blessed($obj)) {
return $self->value_to_json($obj) if ( $obj->isa('JSON::PP::Boolean') );
if ( $convert_blessed and $obj->can('TO_JSON') ) {
my $result = $obj->TO_JSON();
if ( defined $result and $obj eq $result ) {
encode_error( sprintf(
"%s::TO_JSON method returned same object as was passed instead of a new one",
ref $obj
) );
}
return $self->object_to_json( $result );
}
return "$obj" if ( $bignum and _is_bignum($obj) );
return $self->blessed_to_json($obj) if ($allow_blessed and $as_nonblessed); # will be removed.
encode_error( sprintf("encountered object '%s', but neither allow_blessed "
. "nor convert_blessed settings are enabled", $obj)
) unless ($allow_blessed);
return 'null';
}
else {
return $self->value_to_json($obj);
}
}
else{
return $self->value_to_json($obj);
}
}
sub hash_to_json {
my ($self, $obj) = @_;
my ($k,$v);
my %res;
encode_error("json text or perl structure exceeds maximum nesting level (max_depth set too low?)")
if (++$depth > $max_depth);
my ($pre, $post) = $indent ? $self->_up_indent() : ('', '');
my $del = ($space_before ? ' ' : '') . ':' . ($space_after ? ' ' : '');
if ( my $tie_class = tied %$obj ) {
if ( $tie_class->can('TIEHASH') ) {
$tie_class =~ s/=.+$//;
tie %res, $tie_class;
}
}
# In the old Perl verions, tied hashes in bool context didn't work.
# So, we can't use such a way (%res ? a : b)
my $has;
for my $k (keys %$obj) {
my $v = $obj->{$k};
$res{$k} = $self->object_to_json($v) || $self->value_to_json($v);
$has = 1 unless ( $has );
}
--$depth;
$self->_down_indent() if ($indent);
return '{' . ( $has ? $pre : '' ) # indent
. ( $has ? join(",$pre", map { utf8::decode($_) if ($] < 5.008); # key for Perl 5.6
string_to_json($self, $_) . $del . $res{$_} # key : value
} _sort( $self, \%res )
) . $post # indent
: ''
)
. '}';
}
sub array_to_json {
my ($self, $obj) = @_;
my @res;
encode_error("json text or perl structure exceeds maximum nesting level (max_depth set too low?)")
if (++$depth > $max_depth);
my ($pre, $post) = $indent ? $self->_up_indent() : ('', '');
if (my $tie_class = tied @$obj) {
if ( $tie_class->can('TIEARRAY') ) {
$tie_class =~ s/=.+$//;
tie @res, $tie_class;
}
}
for my $v (@$obj){
push @res, $self->object_to_json($v) || $self->value_to_json($v);
}
--$depth;
$self->_down_indent() if ($indent);
return '[' . ( @res ? $pre : '' ) . ( @res ? join( ",$pre", @res ) . $post : '' ) . ']';
}
sub value_to_json {
my ($self, $value) = @_;
return 'null' if(!defined $value);
my $b_obj = B::svref_2object(\$value); # for round trip problem
my $flags = $b_obj->FLAGS;
return $value # as is
if ( ( $flags & B::SVf_IOK or $flags & B::SVp_IOK
or $flags & B::SVf_NOK or $flags & B::SVp_NOK
) and !($flags & B::SVf_POK )
); # SvTYPE is IV or NV?
my $type = ref($value);
if(!$type){
return string_to_json($self, $value);
}
elsif( blessed($value) and $value->isa('JSON::PP::Boolean') ){
return $$value == 1 ? 'true' : 'false';
}
elsif ($type) {
if ((overload::StrVal($value) =~ /=(\w+)/)[0]) {
return $self->value_to_json("$value");
}
if ($type eq 'SCALAR' and defined $$value) {
return $$value eq '1' ? 'true'
: $$value eq '0' ? 'false'
: $self->{PROPS}->[ P_ALLOW_UNKNOWN ] ? 'null'
: encode_error("cannot encode reference to scalar");
}
if ( $self->{PROPS}->[ P_ALLOW_UNKNOWN ] ) {
return 'null';
}
else {
if ( $type eq 'SCALAR' or $type eq 'REF' ) {
encode_error("cannot encode reference to scalar");
}
else {
encode_error("encountered $value, but JSON can only represent references to arrays or hashes");
}
}
}
else {
return $self->{fallback}->($value)
if ($self->{fallback} and ref($self->{fallback}) eq 'CODE');
return 'null';
}
}
my %esc = (
"\n" => '\n',
"\r" => '\r',
"\t" => '\t',
"\f" => '\f',
"\b" => '\b',
"\"" => '\"',
"\\" => '\\\\',
"\'" => '\\\'',
);
sub string_to_json {
my ($self, $arg) = @_;
$arg =~ s/([\x22\x5c\n\r\t\f\b])/$esc{$1}/g;
$arg =~ s/\//\\\//g if ($escape_slash);
$arg =~ s/([\x00-\x08\x0b\x0e-\x1f])/'\\u00' . unpack('H2', $1)/eg;
if ($ascii) {
$arg = JSON_PP_encode_ascii($arg);
}
if ($latin1) {
$arg = JSON_PP_encode_latin1($arg);
}
if ($utf8) {
utf8::encode($arg);
}
return '"' . $arg . '"';
}
sub blessed_to_json {
my $b_obj = B::svref_2object($_[1]);
if ($b_obj->isa('B::HV')) {
return $_[0]->hash_to_json($_[1]);
}
elsif ($b_obj->isa('B::AV')) {
return $_[0]->array_to_json($_[1]);
}
else {
return 'null';
}
}
sub encode_error {
my $error = shift;
Carp::croak "$error";
}
sub _sort {
my ($self, $res) = @_;
defined $keysort ? (sort $keysort (keys %$res)) : keys %$res;
}
sub _up_indent {
my $self = shift;
my $space = ' ' x $indent_length;
my ($pre,$post) = ('','');
$post = "\n" . $space x $indent_count;
$indent_count++;
$pre = "\n" . $space x $indent_count;
return ($pre,$post);
}
sub _down_indent { $indent_count--; }
sub PP_encode_box {
{
depth => $depth,
indent_count => $indent_count,
};
}
} # Convert
sub _encode_ascii {
join('',
map {
$_ <= 127 ?
chr($_) :
$_ <= 65535 ?
sprintf('\u%04x', $_) : sprintf('\u%x\u%x', _encode_surrogates($_));
} unpack('U*', $_[0])
);
}
sub _encode_latin1 {
join('',
map {
$_ <= 255 ?
chr($_) :
$_ <= 65535 ?
sprintf('\u%04x', $_) : sprintf('\u%x\u%x', _encode_surrogates($_));
} unpack('U*', $_[0])
);
}
sub _encode_surrogates { # from perlunicode
my $uni = $_[0] - 0x10000;
return ($uni / 0x400 + 0xD800, $uni % 0x400 + 0xDC00);
}
sub _is_bignum {
$_[0]->isa('Math::BigInt') or $_[0]->isa('Math::BigFloat');
}
#
# JSON => Perl
#
my $max_intsize;
BEGIN {
my $checkint = 1111;
for my $d (5..30) {
$checkint .= 1;
my $int = eval qq| $checkint |;
if ($int =~ /[eE]/) {
$max_intsize = $d - 1;
last;
}
}
}
{ # PARSE
my %escapes = ( # by Jeremy Muhlich <jmuhlich [at] bitflood.org>
b => "\x8",
t => "\x9",
n => "\xA",
f => "\xC",
r => "\xD",
'\\' => '\\',
'"' => '"',
'/' => '/',
);
my $text; # json data
my $at; # offset
my $ch; # 1chracter
my $len; # text length (changed according to UTF8 or NON UTF8)
# INTERNAL
my $depth; # nest counter
my $encoding; # json text encoding
my $is_valid_utf8; # temp variable
my $utf8_len; # utf8 byte length
# FLAGS
my $utf8; # must be utf8
my $max_depth; # max nest nubmer of objects and arrays
my $max_size;
my $relaxed;
my $cb_object;
my $cb_sk_object;
my $F_HOOK;
my $allow_bigint; # using Math::BigInt
my $singlequote; # loosely quoting
my $loose; #
my $allow_barekey; # bareKey
# $opt flag
# 0x00000001 .... decode_prefix
sub PP_decode_json {
my ($self, $opt); # $opt is an effective flag during this decode_json.
($self, $text, $opt) = @_;
($at, $ch, $depth) = (0, '', 0);
if (!defined $text or ref $text) {
decode_error("malformed text data.");
}
my $idx = $self->{PROPS};
($utf8, $relaxed, $loose, $allow_bigint, $allow_barekey, $singlequote)
= @{$idx}[P_UTF8, P_RELAXED, P_LOOSE .. P_ALLOW_SINGLEQUOTE];
if ( $utf8 ) {
utf8::downgrade( $text, 1 ) or Carp::croak("Wide character in subroutine entry");
}
else {
utf8::upgrade( $text );
}
$len = length $text;
($max_depth, $max_size, $cb_object, $cb_sk_object, $F_HOOK)
= @{$self}{qw/max_depth max_size cb_object cb_sk_object F_HOOK/};
if ($max_size > 1) {
use bytes;
my $bytes = length $text;
decode_error(
sprintf("attempted decode of JSON text of %s bytes size, but max_size is set to %s"
, $bytes, $max_size), 1
) if ($bytes > $max_size);
}
# Currently no effect
# should use regexp
my @octets = unpack('C4', $text);
$encoding = ( $octets[0] and $octets[1]) ? 'UTF-8'
: (!$octets[0] and $octets[1]) ? 'UTF-16BE'
: (!$octets[0] and !$octets[1]) ? 'UTF-32BE'
: ( $octets[2] ) ? 'UTF-16LE'
: (!$octets[2] ) ? 'UTF-32LE'
: 'unknown';
# my $result = value();
my $eof = !( my ( $result ) = value() ); # $eof for incr_parse
if ( $eof && ( $opt & 0x00000001 ) ) {
return undef;
}
if (!$idx->[ P_ALLOW_NONREF ] and !ref $result) {
decode_error(
'JSON text must be an object or array (but found number, string, true, false or null,'
. ' use allow_nonref to allow this)', 1);
}
if ($len >= $at) {
my $consumed = $at - 1;
white();
if ($ch) {
decode_error("garbage after JSON object") unless ($opt & 0x00000001);
return ($result, $consumed);
}
}
$result;
}
sub next_chr {
return $ch = undef if($at >= $len);
$ch = substr($text, $at++, 1);
}
sub value {
white();
return if(!defined $ch);
return object() if($ch eq '{');
return array() if($ch eq '[');
return string() if($ch eq '"' or ($singlequote and $ch eq "'"));
return number() if($ch =~ /[0-9]/ or $ch eq '-');
return word();
}
sub string {
my ($i, $s, $t, $u);
my $utf16;
my $is_utf8;
($is_valid_utf8, $utf8_len) = ('', 0);
$s = ''; # basically UTF8 flag on
if($ch eq '"' or ($singlequote and $ch eq "'")){
my $boundChar = $ch if ($singlequote);
OUTER: while( defined(next_chr()) ){
if((!$singlequote and $ch eq '"') or ($singlequote and $ch eq $boundChar)){
next_chr();
if ($utf16) {
decode_error("missing low surrogate character in surrogate pair");
}
utf8::decode($s) if($is_utf8);
return $s;
}
elsif($ch eq '\\'){
next_chr();
if(exists $escapes{$ch}){
$s .= $escapes{$ch};
}
elsif($ch eq 'u'){ # UNICODE handling
my $u = '';
for(1..4){
$ch = next_chr();
last OUTER if($ch !~ /[0-9a-fA-F]/);
$u .= $ch;
}
# U+D800 - U+DBFF
if ($u =~ /^[dD][89abAB][0-9a-fA-F]{2}/) { # UTF-16 high surrogate?
$utf16 = $u;
}
# U+DC00 - U+DFFF
elsif ($u =~ /^[dD][c-fC-F][0-9a-fA-F]{2}/) { # UTF-16 low surrogate?
unless (defined $utf16) {
decode_error("missing high surrogate character in surrogate pair");
}
$is_utf8 = 1;
$s .= JSON_PP_decode_surrogates($utf16, $u) || next;
$utf16 = undef;
}
else {
if (defined $utf16) {
decode_error("surrogate pair expected");
}
if ( ( my $hex = hex( $u ) ) > 127 ) {
$is_utf8 = 1;
$s .= JSON_PP_decode_unicode($u) || next;
}
else {
$s .= chr $hex;
}
}
}
else{
unless ($loose) {
decode_error('illegal backslash escape sequence in string');
}
$s .= $ch;
}
}
else{
if ( ord $ch > 127 ) {
if ( $utf8 ) {
unless( $ch = is_valid_utf8($ch) ) {
$at -= 1;
decode_error("malformed UTF-8 character in JSON string");
}
else {
$at += $utf8_len - 1;
}
}
else {
utf8::encode( $ch );
}
$is_utf8 = 1;
}
if (!$loose) {
if ($ch =~ /[\x00-\x1f\x22\x5c]/) { # '/' ok
$at--;
decode_error('invalid character encountered while parsing JSON string');
}
}
$s .= $ch;
}
}
}
decode_error("unexpected end of string while parsing JSON string");
}
sub white {
while( defined $ch ){
if($ch le ' '){
next_chr();
}
elsif($ch eq '/'){
next_chr();
if(defined $ch and $ch eq '/'){
1 while(defined(next_chr()) and $ch ne "\n" and $ch ne "\r");
}
elsif(defined $ch and $ch eq '*'){
next_chr();
while(1){
if(defined $ch){
if($ch eq '*'){
if(defined(next_chr()) and $ch eq '/'){
next_chr();
last;
}
}
else{
next_chr();
}
}
else{
decode_error("Unterminated comment");
}
}
next;
}
else{
$at--;
decode_error("malformed JSON string, neither array, object, number, string or atom");
}
}
else{
if ($relaxed and $ch eq '#') { # correctly?
pos($text) = $at;
$text =~ /\G([^\n]*(?:\r\n|\r|\n|$))/g;
$at = pos($text);
next_chr;
next;
}
last;
}
}
}
sub array {
my $a = [];
decode_error('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')
if (++$depth > $max_depth);
next_chr();
white();
if(defined $ch and $ch eq ']'){
--$depth;
next_chr();
return $a;
}
else {
while(defined($ch)){
push @$a, value();
white();
if (!defined $ch) {
last;
}
if($ch eq ']'){
--$depth;
next_chr();
return $a;
}
if($ch ne ','){
last;
}
next_chr();
white();
if ($relaxed and $ch eq ']') {
--$depth;
next_chr();
return $a;
}
}
}
decode_error(", or ] expected while parsing array");
}
sub object {
my $o = {};
my $k;
decode_error('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')
if (++$depth > $max_depth);
next_chr();
white();
if(defined $ch and $ch eq '}'){
--$depth;
next_chr();
if ($F_HOOK) {
return _json_object_hook($o);
}
return $o;
}
else {
while (defined $ch) {
$k = ($allow_barekey and $ch ne '"' and $ch ne "'") ? bareKey() : string();
white();
if(!defined $ch or $ch ne ':'){
$at--;
decode_error("':' expected");
}
next_chr();
$o->{$k} = value();
white();
last if (!defined $ch);
if($ch eq '}'){
--$depth;
next_chr();
if ($F_HOOK) {
return _json_object_hook($o);
}
return $o;
}
if($ch ne ','){
last;
}
next_chr();
white();
if ($relaxed and $ch eq '}') {
--$depth;
next_chr();
if ($F_HOOK) {
return _json_object_hook($o);
}
return $o;
}
}
}
$at--;
decode_error(", or } expected while parsing object/hash");
}
sub bareKey { # doesn't strictly follow Standard ECMA-262 3rd Edition
my $key;
while($ch =~ /[^\x00-\x23\x25-\x2F\x3A-\x40\x5B-\x5E\x60\x7B-\x7F]/){
$key .= $ch;
next_chr();
}
return $key;
}
sub word {
my $word = substr($text,$at-1,4);
if($word eq 'true'){
$at += 3;
next_chr;
return $JSON::PP::true;
}
elsif($word eq 'null'){
$at += 3;
next_chr;
return undef;
}
elsif($word eq 'fals'){
$at += 3;
if(substr($text,$at,1) eq 'e'){
$at++;
next_chr;
return $JSON::PP::false;
}
}
$at--; # for decode_error report
decode_error("'null' expected") if ($word =~ /^n/);
decode_error("'true' expected") if ($word =~ /^t/);
decode_error("'false' expected") if ($word =~ /^f/);
decode_error("malformed JSON string, neither array, object, number, string or atom");
}
sub number {
my $n = '';
my $v;
# According to RFC4627, hex or oct digts are invalid.
if($ch eq '0'){
my $peek = substr($text,$at,1);
my $hex = $peek =~ /[xX]/; # 0 or 1
if($hex){
decode_error("malformed number (leading zero must not be followed by another digit)");
($n) = ( substr($text, $at+1) =~ /^([0-9a-fA-F]+)/);
}
else{ # oct
($n) = ( substr($text, $at) =~ /^([0-7]+)/);
if (defined $n and length $n > 1) {
decode_error("malformed number (leading zero must not be followed by another digit)");
}
}
if(defined $n and length($n)){
if (!$hex and length($n) == 1) {
decode_error("malformed number (leading zero must not be followed by another digit)");
}
$at += length($n) + $hex;
next_chr;
return $hex ? hex($n) : oct($n);
}
}
if($ch eq '-'){
$n = '-';
next_chr;
if (!defined $ch or $ch !~ /\d/) {
decode_error("malformed number (no digits after initial minus)");
}
}
while(defined $ch and $ch =~ /\d/){
$n .= $ch;
next_chr;
}
if(defined $ch and $ch eq '.'){
$n .= '.';
next_chr;
if (!defined $ch or $ch !~ /\d/) {
decode_error("malformed number (no digits after decimal point)");
}
else {
$n .= $ch;
}
while(defined(next_chr) and $ch =~ /\d/){
$n .= $ch;
}
}
if(defined $ch and ($ch eq 'e' or $ch eq 'E')){
$n .= $ch;
next_chr;
if(defined($ch) and ($ch eq '+' or $ch eq '-')){
$n .= $ch;
next_chr;
if (!defined $ch or $ch =~ /\D/) {
decode_error("malformed number (no digits after exp sign)");
}
$n .= $ch;
}
elsif(defined($ch) and $ch =~ /\d/){
$n .= $ch;
}
else {
decode_error("malformed number (no digits after exp sign)");
}
while(defined(next_chr) and $ch =~ /\d/){
$n .= $ch;
}
}
$v .= $n;
if ($v !~ /[.eE]/ and length $v > $max_intsize) {
if ($allow_bigint) { # from Adam Sussman
require Math::BigInt;
return Math::BigInt->new($v);
}
else {
return "$v";
}
}
elsif ($allow_bigint) {
require Math::BigFloat;
return Math::BigFloat->new($v);
}
return 0+$v;
}
sub is_valid_utf8 {
$utf8_len = $_[0] =~ /[\x00-\x7F]/ ? 1
: $_[0] =~ /[\xC2-\xDF]/ ? 2
: $_[0] =~ /[\xE0-\xEF]/ ? 3
: $_[0] =~ /[\xF0-\xF4]/ ? 4
: 0
;
return unless $utf8_len;
my $is_valid_utf8 = substr($text, $at - 1, $utf8_len);
return ( $is_valid_utf8 =~ /^(?:
[\x00-\x7F]
|[\xC2-\xDF][\x80-\xBF]
|[\xE0][\xA0-\xBF][\x80-\xBF]
|[\xE1-\xEC][\x80-\xBF][\x80-\xBF]
|[\xED][\x80-\x9F][\x80-\xBF]
|[\xEE-\xEF][\x80-\xBF][\x80-\xBF]
|[\xF0][\x90-\xBF][\x80-\xBF][\x80-\xBF]
|[\xF1-\xF3][\x80-\xBF][\x80-\xBF][\x80-\xBF]
|[\xF4][\x80-\x8F][\x80-\xBF][\x80-\xBF]
)$/x ) ? $is_valid_utf8 : '';
}
sub decode_error {
my $error = shift;
my $no_rep = shift;
my $str = defined $text ? substr($text, $at) : '';
my $mess = '';
my $type = $] >= 5.008 ? 'U*'
: $] < 5.006 ? 'C*'
: utf8::is_utf8( $str ) ? 'U*' # 5.6
: 'C*'
;
for my $c ( unpack( $type, $str ) ) { # emulate pv_uni_display() ?
$mess .= $c == 0x07 ? '\a'
: $c == 0x09 ? '\t'
: $c == 0x0a ? '\n'
: $c == 0x0d ? '\r'
: $c == 0x0c ? '\f'
: $c < 0x20 ? sprintf('\x{%x}', $c)
: $c < 0x80 ? chr($c)
: sprintf('\x{%x}', $c)
;
if ( length $mess >= 20 ) {
$mess .= '...';
last;
}
}
unless ( length $mess ) {
$mess = '(end of string)';
}
Carp::croak (
$no_rep ? "$error" : "$error, at character offset $at (before \"$mess\")"
);
}
sub _json_object_hook {
my $o = $_[0];
my @ks = keys %{$o};
if ( $cb_sk_object and @ks == 1 and exists $cb_sk_object->{ $ks[0] } and ref $cb_sk_object->{ $ks[0] } ) {
my @val = $cb_sk_object->{ $ks[0] }->( $o->{$ks[0]} );
if (@val == 1) {
return $val[0];
}
}
my @val = $cb_object->($o) if ($cb_object);
if (@val == 0 or @val > 1) {
return $o;
}
else {
return $val[0];
}
}
sub PP_decode_box {
{
text => $text,
at => $at,
ch => $ch,
len => $len,
depth => $depth,
encoding => $encoding,
is_valid_utf8 => $is_valid_utf8,
};
}
} # PARSE
sub _decode_surrogates { # from perlunicode
my $uni = 0x10000 + (hex($_[0]) - 0xD800) * 0x400 + (hex($_[1]) - 0xDC00);
my $un = pack('U*', $uni);
utf8::encode( $un );
return $un;
}
sub _decode_unicode {
my $un = pack('U', hex shift);
utf8::encode( $un );
return $un;
}
###############################
# Utilities
#
BEGIN {
eval 'require Scalar::Util';
unless($@){
*JSON::PP::blessed = \&Scalar::Util::blessed;
}
else{ # This code is from Sclar::Util.
# warn $@;
eval 'sub UNIVERSAL::a_sub_not_likely_to_be_here { ref($_[0]) }';
*JSON::PP::blessed = sub {
local($@, $SIG{__DIE__}, $SIG{__WARN__});
ref($_[0]) ? eval { $_[0]->a_sub_not_likely_to_be_here } : undef;
};
}
}
# shamely copied and modified from JSON::XS code.
$JSON::PP::true = do { bless \(my $dummy = 1), "JSON::PP::Boolean" };
$JSON::PP::false = do { bless \(my $dummy = 0), "JSON::PP::Boolean" };
sub is_bool { defined $_[0] and UNIVERSAL::isa($_[0], "JSON::PP::Boolean"); }
sub true { $JSON::PP::true }
sub false { $JSON::PP::false }
sub null { undef; }
###############################
package JSON::PP::Boolean;
use overload (
"0+" => sub { ${$_[0]} },
"++" => sub { $_[0] = ${$_[0]} + 1 },
"--" => sub { $_[0] = ${$_[0]} - 1 },
fallback => 1,
);
###############################
package JSON::PP::IncrParser;
use strict;
use constant INCR_M_WS => 0; # initial whitespace skipping
use constant INCR_M_STR => 1; # inside string
use constant INCR_M_BS => 2; # inside backslash
use constant INCR_M_JSON => 3; # outside anything, count nesting
use constant INCR_M_C0 => 4;
use constant INCR_M_C1 => 5;
$JSON::PP::IncrParser::VERSION = '1.01';
my $unpack_format = $] < 5.006 ? 'C*' : 'U*';
sub new {
my ( $class ) = @_;
bless {
incr_nest => 0,
incr_text => undef,
incr_parsing => 0,
incr_p => 0,
}, $class;
}
sub incr_parse {
my ( $self, $coder, $text ) = @_;
$self->{incr_text} = '' unless ( defined $self->{incr_text} );
if ( defined $text ) {
if ( utf8::is_utf8( $text ) and !utf8::is_utf8( $self->{incr_text} ) ) {
utf8::upgrade( $self->{incr_text} ) ;
utf8::decode( $self->{incr_text} ) ;
}
$self->{incr_text} .= $text;
}
my $max_size = $coder->get_max_size;
if ( defined wantarray ) {
$self->{incr_mode} = INCR_M_WS;
if ( wantarray ) {
my @ret;
$self->{incr_parsing} = 1;
do {
push @ret, $self->_incr_parse( $coder, $self->{incr_text} );
unless ( !$self->{incr_nest} and $self->{incr_mode} == INCR_M_JSON ) {
$self->{incr_mode} = INCR_M_WS;
}
} until ( !$self->{incr_text} );
$self->{incr_parsing} = 0;
return @ret;
}
else { # in scalar context
$self->{incr_parsing} = 1;
my $obj = $self->_incr_parse( $coder, $self->{incr_text} );
$self->{incr_parsing} = 0 if defined $obj; # pointed by Martin J. Evans
return $obj ? $obj : undef; # $obj is an empty string, parsing was completed.
}
}
}
sub _incr_parse {
my ( $self, $coder, $text, $skip ) = @_;
my $p = $self->{incr_p};
my $restore = $p;
my @obj;
my $len = length $text;
if ( $self->{incr_mode} == INCR_M_WS ) {
while ( $len > $p ) {
my $s = substr( $text, $p, 1 );
$p++ and next if ( 0x20 >= unpack($unpack_format, $s) );
$self->{incr_mode} = INCR_M_JSON;
last;
}
}
while ( $len > $p ) {
my $s = substr( $text, $p++, 1 );
if ( $s eq '"' ) {
if ( $self->{incr_mode} != INCR_M_STR ) {
$self->{incr_mode} = INCR_M_STR;
}
else {
$self->{incr_mode} = INCR_M_JSON;
unless ( $self->{incr_nest} ) {
last;
}
}
}
if ( $self->{incr_mode} == INCR_M_JSON ) {
if ( $s eq '[' or $s eq '{' ) {
if ( ++$self->{incr_nest} > $coder->get_max_depth ) {
Carp::croak('json text or perl structure exceeds maximum nesting level (max_depth set too low?)');
}
}
elsif ( $s eq ']' or $s eq '}' ) {
last if ( --$self->{incr_nest} <= 0 );
}
elsif ( $s eq '#' ) {
while ( $len > $p ) {
last if substr( $text, $p++, 1 ) eq "\n";
}
}
}
}
$self->{incr_p} = $p;
return if ( $self->{incr_mode} == INCR_M_JSON and $self->{incr_nest} > 0 );
return '' unless ( length substr( $self->{incr_text}, 0, $p ) );
local $Carp::CarpLevel = 2;
$self->{incr_p} = $restore;
$self->{incr_c} = $p;
my ( $obj, $tail ) = $coder->decode_prefix( substr( $self->{incr_text}, 0, $p ) );
$self->{incr_text} = substr( $self->{incr_text}, $p );
$self->{incr_p} = 0;
return $obj or '';
}
sub incr_text {
if ( $_[0]->{incr_parsing} ) {
Carp::croak("incr_text can not be called when the incremental parser already started parsing");
}
$_[0]->{incr_text};
}
sub incr_skip {
my $self = shift;
$self->{incr_text} = substr( $self->{incr_text}, $self->{incr_c} );
$self->{incr_p} = 0;
}
sub incr_reset {
my $self = shift;
$self->{incr_text} = undef;
$self->{incr_p} = 0;
$self->{incr_mode} = 0;
$self->{incr_nest} = 0;
$self->{incr_parsing} = 0;
}
###############################
1;
__END__
=pod
=head1 NAME
JSON::PP - JSON::XS compatible pure-Perl module.
=head1 SYNOPSIS
use JSON::PP;
# exported functions, they croak on error
# and expect/generate UTF-8
$utf8_encoded_json_text = encode_json $perl_hash_or_arrayref;
$perl_hash_or_arrayref = decode_json $utf8_encoded_json_text;
# OO-interface
$coder = JSON::PP->new->ascii->pretty->allow_nonref;
$pretty_printed_unencoded = $coder->encode ($perl_scalar);
$perl_scalar = $coder->decode ($unicode_json_text);
# Note that JSON version 2.0 and above will automatically use
# JSON::XS or JSON::PP, so you should be able to just:
use JSON;
=head1 DESCRIPTION
This module is L<JSON::XS> compatible pure Perl module.
(Perl 5.8 or later is recommended)
JSON::XS is the fastest and most proper JSON module on CPAN.
It is written by Marc Lehmann in C, so must be compiled and
installed in the used environment.
JSON::PP is a pure-Perl module and has compatibility to JSON::XS.
=head2 FEATURES
=over
=item * correct unicode handling
This module knows how to handle Unicode (depending on Perl version).
See to L<JSON::XS/A FEW NOTES ON UNICODE AND PERL> and L<UNICODE HANDLING ON PERLS>.
=item * round-trip integrity
When you serialise a perl data structure using only datatypes supported by JSON,
the deserialised data structure is identical on the Perl level.
(e.g. the string "2.0" doesn't suddenly become "2" just because it looks like a number).
=item * strict checking of JSON correctness
There is no guessing, no generating of illegal JSON texts by default,
and only JSON is accepted as input by default (the latter is a security feature).
But when some options are set, loose chcking features are available.
=back
=head1 FUNCTIONS
Basically, check to L<JSON> or L<JSON::XS>.
=head2 encode_json
$json_text = encode_json $perl_scalar
=head2 decode_json
$perl_scalar = decode_json $json_text
=head2 JSON::PP::true
Returns JSON true value which is blessed object.
It C<isa> JSON::PP::Boolean object.
=head2 JSON::PP::false
Returns JSON false value which is blessed object.
It C<isa> JSON::PP::Boolean object.
=head2 JSON::PP::null
Returns C<undef>.
=head1 METHODS
Basically, check to L<JSON> or L<JSON::XS>.
=head2 new
$json = new JSON::PP
Rturns a new JSON::PP object that can be used to de/encode JSON
strings.
=head2 ascii
$json = $json->ascii([$enable])
$enabled = $json->get_ascii
If $enable is true (or missing), then the encode method will not generate characters outside
the code range 0..127. Any Unicode characters outside that range will be escaped using either
a single \uXXXX or a double \uHHHH\uLLLLL escape sequence, as per RFC4627.
(See to L<JSON::XS/OBJECT-ORIENTED INTERFACE>).
In Perl 5.005, there is no character having high value (more than 255).
See to L<UNICODE HANDLING ON PERLS>.
If $enable is false, then the encode method will not escape Unicode characters unless
required by the JSON syntax or other flags. This results in a faster and more compact format.
JSON::PP->new->ascii(1)->encode([chr 0x10401])
=> ["\ud801\udc01"]
=head2 latin1
$json = $json->latin1([$enable])
$enabled = $json->get_latin1
If $enable is true (or missing), then the encode method will encode the resulting JSON
text as latin1 (or iso-8859-1), escaping any characters outside the code range 0..255.
If $enable is false, then the encode method will not escape Unicode characters
unless required by the JSON syntax or other flags.
JSON::XS->new->latin1->encode (["\x{89}\x{abc}"]
=> ["\x{89}\\u0abc"] # (perl syntax, U+abc escaped, U+89 not)
See to L<UNICODE HANDLING ON PERLS>.
=head2 utf8
$json = $json->utf8([$enable])
$enabled = $json->get_utf8
If $enable is true (or missing), then the encode method will encode the JSON result
into UTF-8, as required by many protocols, while the decode method expects to be handled
an UTF-8-encoded string. Please note that UTF-8-encoded strings do not contain any
characters outside the range 0..255, they are thus useful for bytewise/binary I/O.
(In Perl 5.005, any character outside the range 0..255 does not exist.
See to L<UNICODE HANDLING ON PERLS>.)
In future versions, enabling this option might enable autodetection of the UTF-16 and UTF-32
encoding families, as described in RFC4627.
If $enable is false, then the encode method will return the JSON string as a (non-encoded)
Unicode string, while decode expects thus a Unicode string. Any decoding or encoding
(e.g. to UTF-8 or UTF-16) needs to be done yourself, e.g. using the Encode module.
Example, output UTF-16BE-encoded JSON:
use Encode;
$jsontext = encode "UTF-16BE", JSON::XS->new->encode ($object);
Example, decode UTF-32LE-encoded JSON:
use Encode;
$object = JSON::XS->new->decode (decode "UTF-32LE", $jsontext);
=head2 pretty
$json = $json->pretty([$enable])
This enables (or disables) all of the C<indent>, C<space_before> and
C<space_after> flags in one call to generate the most readable
(or most compact) form possible.
=head2 indent
$json = $json->indent([$enable])
$enabled = $json->get_indent
The default indent space lenght is three.
You can use C<indent_length> to change the length.
=head2 space_before
$json = $json->space_before([$enable])
$enabled = $json->get_space_before
=head2 space_after
$json = $json->space_after([$enable])
$enabled = $json->get_space_after
=head2 relaxed
$json = $json->relaxed([$enable])
$enabled = $json->get_relaxed
=head2 canonical
$json = $json->canonical([$enable])
$enabled = $json->get_canonical
If you want your own sorting routine, you can give a code referece
or a subroutine name to C<sort_by>. See to C<JSON::PP OWN METHODS>.
=head2 allow_nonref
$json = $json->allow_nonref([$enable])
$enabled = $json->get_allow_nonref
=head2 allow_unknown
$json = $json->allow_unknown ([$enable])
$enabled = $json->get_allow_unknown
=head2 allow_blessed
$json = $json->allow_blessed([$enable])
$enabled = $json->get_allow_blessed
=head2 convert_blessed
$json = $json->convert_blessed([$enable])
$enabled = $json->get_convert_blessed
=head2 filter_json_object
$json = $json->filter_json_object([$coderef])
=head2 filter_json_single_key_object
$json = $json->filter_json_single_key_object($key [=> $coderef])
=head2 shrink
$json = $json->shrink([$enable])
$enabled = $json->get_shrink
In JSON::XS, this flag resizes strings generated by either
C<encode> or C<decode> to their minimum size possible.
It will also try to downgrade any strings to octet-form if possible.
In JSON::PP, it is noop about resizing strings but tries
C<utf8::downgrade> to the returned string by C<encode>.
See to L<utf8>.
See to L<JSON::XS/OBJECT-ORIENTED INTERFACE>
=head2 max_depth
$json = $json->max_depth([$maximum_nesting_depth])
$max_depth = $json->get_max_depth
Sets the maximum nesting level (default C<512>) accepted while encoding
or decoding. If a higher nesting level is detected in JSON text or a Perl
data structure, then the encoder and decoder will stop and croak at that
point.
Nesting level is defined by number of hash- or arrayrefs that the encoder
needs to traverse to reach a given point or the number of C<{> or C<[>
characters without their matching closing parenthesis crossed to reach a
given character in a string.
If no argument is given, the highest possible setting will be used, which
is rarely useful.
See L<JSON::XS/SSECURITY CONSIDERATIONS> for more info on why this is useful.
When a large value (100 or more) was set and it de/encodes a deep nested object/text,
it may raise a warning 'Deep recursion on subroutin' at the perl runtime phase.
=head2 max_size
$json = $json->max_size([$maximum_string_size])
$max_size = $json->get_max_size
Set the maximum length a JSON text may have (in bytes) where decoding is
being attempted. The default is C<0>, meaning no limit. When C<decode>
is called on a string that is longer then this many bytes, it will not
attempt to decode the string but throw an exception. This setting has no
effect on C<encode> (yet).
If no argument is given, the limit check will be deactivated (same as when
C<0> is specified).
See L<JSON::XS/SSECURITY CONSIDERATIONS> for more info on why this is useful.
=head2 encode
$json_text = $json->encode($perl_scalar)
=head2 decode
$perl_scalar = $json->decode($json_text)
=head2 decode_prefix
($perl_scalar, $characters) = $json->decode_prefix($json_text)
=head1 INCREMENTAL PARSING
In JSON::XS 2.2, incremental parsing feature of JSON
texts was experimentally implemented.
Please check to L<JSON::XS/INCREMENTAL PARSING>.
=over 4
=item [void, scalar or list context] = $json->incr_parse ([$string])
This is the central parsing function. It can both append new text and
extract objects from the stream accumulated so far (both of these
functions are optional).
If C<$string> is given, then this string is appended to the already
existing JSON fragment stored in the C<$json> object.
After that, if the function is called in void context, it will simply
return without doing anything further. This can be used to add more text
in as many chunks as you want.
If the method is called in scalar context, then it will try to extract
exactly I<one> JSON object. If that is successful, it will return this
object, otherwise it will return C<undef>. If there is a parse error,
this method will croak just as C<decode> would do (one can then use
C<incr_skip> to skip the errornous part). This is the most common way of
using the method.
And finally, in list context, it will try to extract as many objects
from the stream as it can find and return them, or the empty list
otherwise. For this to work, there must be no separators between the JSON
objects or arrays, instead they must be concatenated back-to-back. If
an error occurs, an exception will be raised as in the scalar context
case. Note that in this case, any previously-parsed JSON texts will be
lost.
=item $lvalue_string = $json->incr_text
This method returns the currently stored JSON fragment as an lvalue, that
is, you can manipulate it. This I<only> works when a preceding call to
C<incr_parse> in I<scalar context> successfully returned an object. Under
all other circumstances you must not call this function (I mean it.
although in simple tests it might actually work, it I<will> fail under
real world conditions). As a special exception, you can also call this
method before having parsed anything.
This function is useful in two cases: a) finding the trailing text after a
JSON object or b) parsing multiple JSON objects separated by non-JSON text
(such as commas).
In Perl 5.005, C<lvalue> attribute is not available.
You must write codes like the below:
$string = $json->incr_text;
$string =~ s/\s*,\s*//;
$json->incr_text( $string );
=item $json->incr_skip
This will reset the state of the incremental parser and will remove the
parsed text from the input buffer. This is useful after C<incr_parse>
died, in which case the input buffer and incremental parser state is left
unchanged, to skip the text parsed so far and to reset the parse state.
=back
=head1 JSON::PP OWN METHODS
=head2 allow_singlequote
$json = $json->allow_singlequote([$enable])
If C<$enable> is true (or missing), then C<decode> will accept
JSON strings quoted by single quotations that are invalid JSON
format.
$json->allow_singlequote->decode({"foo":'bar'});
$json->allow_singlequote->decode({'foo':"bar"});
$json->allow_singlequote->decode({'foo':'bar'});
As same as the C<relaxed> option, this option may be used to parse
application-specific files written by humans.
=head2 allow_barekey
$json = $json->allow_barekey([$enable])
If C<$enable> is true (or missing), then C<decode> will accept
bare keys of JSON object that are invalid JSON format.
As same as the C<relaxed> option, this option may be used to parse
application-specific files written by humans.
$json->allow_barekey->decode('{foo:"bar"}');
=head2 allow_bignum
$json = $json->allow_bignum([$enable])
If C<$enable> is true (or missing), then C<decode> will convert
the big integer Perl cannot handle as integer into a L<Math::BigInt>
object and convert a floating number (any) into a L<Math::BigFloat>.
On the contary, C<encode> converts C<Math::BigInt> objects and C<Math::BigFloat>
objects into JSON numbers with C<allow_blessed> enable.
$json->allow_nonref->allow_blessed->allow_bignum;
$bigfloat = $json->decode('2.000000000000000000000000001');
print $json->encode($bigfloat);
# => 2.000000000000000000000000001
See to L<JSON::XS/MAPPING> aboout the normal conversion of JSON number.
=head2 loose
$json = $json->loose([$enable])
The unescaped [\x00-\x1f\x22\x2f\x5c] strings are invalid in JSON strings
and the module doesn't allow to C<decode> to these (except for \x2f).
If C<$enable> is true (or missing), then C<decode> will accept these
unescaped strings.
$json->loose->decode(qq|["abc
def"]|);
See L<JSON::XS/SSECURITY CONSIDERATIONS>.
=head2 escape_slash
$json = $json->escape_slash([$enable])
According to JSON Grammar, I<slash> (U+002F) is escaped. But default
JSON::PP (as same as JSON::XS) encodes strings without escaping slash.
If C<$enable> is true (or missing), then C<encode> will escape slashes.
=head2 (OBSOLETED)as_nonblessed
$json = $json->as_nonblessed
(OBSOLETED) If C<$enable> is true (or missing), then C<encode> will convert
a blessed hash reference or a blessed array reference (contains
other blessed references) into JSON members and arrays.
This feature is effective only when C<allow_blessed> is enable.
=head2 indent_length
$json = $json->indent_length($length)
JSON::XS indent space length is 3 and cannot be changed.
JSON::PP set the indent space length with the given $length.
The default is 3. The acceptable range is 0 to 15.
=head2 sort_by
$json = $json->sort_by($function_name)
$json = $json->sort_by($subroutine_ref)
If $function_name or $subroutine_ref are set, its sort routine are used
in encoding JSON objects.
$js = $pc->sort_by(sub { $JSON::PP::a cmp $JSON::PP::b })->encode($obj);
# is($js, q|{"a":1,"b":2,"c":3,"d":4,"e":5,"f":6,"g":7,"h":8,"i":9}|);
$js = $pc->sort_by('own_sort')->encode($obj);
# is($js, q|{"a":1,"b":2,"c":3,"d":4,"e":5,"f":6,"g":7,"h":8,"i":9}|);
sub JSON::PP::own_sort { $JSON::PP::a cmp $JSON::PP::b }
As the sorting routine runs in the JSON::PP scope, the given
subroutine name and the special variables C<$a>, C<$b> will begin
'JSON::PP::'.
If $integer is set, then the effect is same as C<canonical> on.
=head1 INTERNAL
For developers.
=over
=item PP_encode_box
Returns
{
depth => $depth,
indent_count => $indent_count,
}
=item PP_decode_box
Returns
{
text => $text,
at => $at,
ch => $ch,
len => $len,
depth => $depth,
encoding => $encoding,
is_valid_utf8 => $is_valid_utf8,
};
=back
=head1 MAPPING
See to L<JSON::XS/MAPPING>.
=head1 UNICODE HANDLING ON PERLS
If you do not know about Unicode on Perl well,
please check L<JSON::XS/A FEW NOTES ON UNICODE AND PERL>.
=head2 Perl 5.8 and later
Perl can handle Unicode and the JSON::PP de/encode methods also work properly.
$json->allow_nonref->encode(chr hex 3042);
$json->allow_nonref->encode(chr hex 12345);
Reuturns C<"\u3042"> and C<"\ud808\udf45"> respectively.
$json->allow_nonref->decode('"\u3042"');
$json->allow_nonref->decode('"\ud808\udf45"');
Returns UTF-8 encoded strings with UTF8 flag, regarded as C<U+3042> and C<U+12345>.
Note that the versions from Perl 5.8.0 to 5.8.2, Perl built-in C<join> was broken,
so JSON::PP wraps the C<join> with a subroutine. Thus JSON::PP works slow in the versions.
=head2 Perl 5.6
Perl can handle Unicode and the JSON::PP de/encode methods also work.
=head2 Perl 5.005
Perl 5.005 is a byte sementics world -- all strings are sequences of bytes.
That means the unicode handling is not available.
In encoding,
$json->allow_nonref->encode(chr hex 3042); # hex 3042 is 12354.
$json->allow_nonref->encode(chr hex 12345); # hex 12345 is 74565.
Returns C<B> and C<E>, as C<chr> takes a value more than 255, it treats
as C<$value % 256>, so the above codes are equivalent to :
$json->allow_nonref->encode(chr 66);
$json->allow_nonref->encode(chr 69);
In decoding,
$json->decode('"\u00e3\u0081\u0082"');
The returned is a byte sequence C<0xE3 0x81 0x82> for UTF-8 encoded
japanese character (C<HIRAGANA LETTER A>).
And if it is represented in Unicode code point, C<U+3042>.
Next,
$json->decode('"\u3042"');
We ordinary expect the returned value is a Unicode character C<U+3042>.
But here is 5.005 world. This is C<0xE3 0x81 0x82>.
$json->decode('"\ud808\udf45"');
This is not a character C<U+12345> but bytes - C<0xf0 0x92 0x8d 0x85>.
=head1 TODO
=over
=item speed
=item memory saving
=back
=head1 SEE ALSO
Most of the document are copied and modified from JSON::XS doc.
L<JSON::XS>
RFC4627 (L<http://www.ietf.org/rfc/rfc4627.txt>)
=head1 AUTHOR
Makamaka Hannyaharamitu, E<lt>makamaka[at]cpan.orgE<gt>
=head1 COPYRIGHT AND LICENSE
Copyright 2007-2010 by Makamaka Hannyaharamitu
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
| MG-RAST/MG-RAST | src/WebApplication/JSON/PP.pm | Perl | bsd-2-clause | 57,819 |
# This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/rnClxBLdxJ/northamerica. Olson data version 2013a
#
# Do not edit this file directly.
#
package DateTime::TimeZone::America::Toronto;
{
$DateTime::TimeZone::America::Toronto::VERSION = '1.57';
}
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::America::Toronto::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
59768947052, # utc_end 1895-01-01 05:17:32 (Tue)
DateTime::TimeZone::NEG_INFINITY, # local_start
59768928000, # local_end 1895-01-01 00:00:00 (Tue)
-19052,
0,
'LMT',
],
[
59768947052, # utc_start 1895-01-01 05:17:32 (Tue)
60503612400, # utc_end 1918-04-14 07:00:00 (Sun)
59768929052, # local_start 1895-01-01 00:17:32 (Tue)
60503594400, # local_end 1918-04-14 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60503612400, # utc_start 1918-04-14 07:00:00 (Sun)
60520543200, # utc_end 1918-10-27 06:00:00 (Sun)
60503598000, # local_start 1918-04-14 03:00:00 (Sun)
60520528800, # local_end 1918-10-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60520543200, # utc_start 1918-10-27 06:00:00 (Sun)
60526242000, # utc_end 1919-01-01 05:00:00 (Wed)
60520525200, # local_start 1918-10-27 01:00:00 (Sun)
60526224000, # local_end 1919-01-01 00:00:00 (Wed)
-18000,
0,
'EST',
],
[
60526242000, # utc_start 1919-01-01 05:00:00 (Wed)
60533929800, # utc_end 1919-03-31 04:30:00 (Mon)
60526224000, # local_start 1919-01-01 00:00:00 (Wed)
60533911800, # local_end 1919-03-30 23:30:00 (Sun)
-18000,
0,
'EST',
],
[
60533929800, # utc_start 1919-03-31 04:30:00 (Mon)
60551985600, # utc_end 1919-10-26 04:00:00 (Sun)
60533915400, # local_start 1919-03-31 00:30:00 (Mon)
60551971200, # local_end 1919-10-26 00:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60551985600, # utc_start 1919-10-26 04:00:00 (Sun)
60568326000, # utc_end 1920-05-02 07:00:00 (Sun)
60551967600, # local_start 1919-10-25 23:00:00 (Sat)
60568308000, # local_end 1920-05-02 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60568326000, # utc_start 1920-05-02 07:00:00 (Sun)
60581016000, # utc_end 1920-09-26 04:00:00 (Sun)
60568311600, # local_start 1920-05-02 03:00:00 (Sun)
60581001600, # local_end 1920-09-26 00:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60581016000, # utc_start 1920-09-26 04:00:00 (Sun)
60600985200, # utc_end 1921-05-15 07:00:00 (Sun)
60580998000, # local_start 1920-09-25 23:00:00 (Sat)
60600967200, # local_end 1921-05-15 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60600985200, # utc_start 1921-05-15 07:00:00 (Sun)
60611608800, # utc_end 1921-09-15 06:00:00 (Thu)
60600970800, # local_start 1921-05-15 03:00:00 (Sun)
60611594400, # local_end 1921-09-15 02:00:00 (Thu)
-14400,
1,
'EDT',
],
[
60611608800, # utc_start 1921-09-15 06:00:00 (Thu)
60632434800, # utc_end 1922-05-14 07:00:00 (Sun)
60611590800, # local_start 1921-09-15 01:00:00 (Thu)
60632416800, # local_end 1922-05-14 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60632434800, # utc_start 1922-05-14 07:00:00 (Sun)
60643317600, # utc_end 1922-09-17 06:00:00 (Sun)
60632420400, # local_start 1922-05-14 03:00:00 (Sun)
60643303200, # local_end 1922-09-17 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60643317600, # utc_start 1922-09-17 06:00:00 (Sun)
60663884400, # utc_end 1923-05-13 07:00:00 (Sun)
60643299600, # local_start 1922-09-17 01:00:00 (Sun)
60663866400, # local_end 1923-05-13 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60663884400, # utc_start 1923-05-13 07:00:00 (Sun)
60674767200, # utc_end 1923-09-16 06:00:00 (Sun)
60663870000, # local_start 1923-05-13 03:00:00 (Sun)
60674752800, # local_end 1923-09-16 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60674767200, # utc_start 1923-09-16 06:00:00 (Sun)
60694729200, # utc_end 1924-05-04 07:00:00 (Sun)
60674749200, # local_start 1923-09-16 01:00:00 (Sun)
60694711200, # local_end 1924-05-04 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60694729200, # utc_start 1924-05-04 07:00:00 (Sun)
60706821600, # utc_end 1924-09-21 06:00:00 (Sun)
60694714800, # local_start 1924-05-04 03:00:00 (Sun)
60706807200, # local_end 1924-09-21 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60706821600, # utc_start 1924-09-21 06:00:00 (Sun)
60726178800, # utc_end 1925-05-03 07:00:00 (Sun)
60706803600, # local_start 1924-09-21 01:00:00 (Sun)
60726160800, # local_end 1925-05-03 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60726178800, # utc_start 1925-05-03 07:00:00 (Sun)
60738271200, # utc_end 1925-09-20 06:00:00 (Sun)
60726164400, # local_start 1925-05-03 03:00:00 (Sun)
60738256800, # local_end 1925-09-20 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60738271200, # utc_start 1925-09-20 06:00:00 (Sun)
60757628400, # utc_end 1926-05-02 07:00:00 (Sun)
60738253200, # local_start 1925-09-20 01:00:00 (Sun)
60757610400, # local_end 1926-05-02 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60757628400, # utc_start 1926-05-02 07:00:00 (Sun)
60769720800, # utc_end 1926-09-19 06:00:00 (Sun)
60757614000, # local_start 1926-05-02 03:00:00 (Sun)
60769706400, # local_end 1926-09-19 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60769720800, # utc_start 1926-09-19 06:00:00 (Sun)
60789078000, # utc_end 1927-05-01 07:00:00 (Sun)
60769702800, # local_start 1926-09-19 01:00:00 (Sun)
60789060000, # local_end 1927-05-01 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60789078000, # utc_start 1927-05-01 07:00:00 (Sun)
60801775200, # utc_end 1927-09-25 06:00:00 (Sun)
60789063600, # local_start 1927-05-01 03:00:00 (Sun)
60801760800, # local_end 1927-09-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60801775200, # utc_start 1927-09-25 06:00:00 (Sun)
60820527600, # utc_end 1928-04-29 07:00:00 (Sun)
60801757200, # local_start 1927-09-25 01:00:00 (Sun)
60820509600, # local_end 1928-04-29 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60820527600, # utc_start 1928-04-29 07:00:00 (Sun)
60833829600, # utc_end 1928-09-30 06:00:00 (Sun)
60820513200, # local_start 1928-04-29 03:00:00 (Sun)
60833815200, # local_end 1928-09-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60833829600, # utc_start 1928-09-30 06:00:00 (Sun)
60851977200, # utc_end 1929-04-28 07:00:00 (Sun)
60833811600, # local_start 1928-09-30 01:00:00 (Sun)
60851959200, # local_end 1929-04-28 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60851977200, # utc_start 1929-04-28 07:00:00 (Sun)
60865279200, # utc_end 1929-09-29 06:00:00 (Sun)
60851962800, # local_start 1929-04-28 03:00:00 (Sun)
60865264800, # local_end 1929-09-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60865279200, # utc_start 1929-09-29 06:00:00 (Sun)
60883426800, # utc_end 1930-04-27 07:00:00 (Sun)
60865261200, # local_start 1929-09-29 01:00:00 (Sun)
60883408800, # local_end 1930-04-27 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60883426800, # utc_start 1930-04-27 07:00:00 (Sun)
60896728800, # utc_end 1930-09-28 06:00:00 (Sun)
60883412400, # local_start 1930-04-27 03:00:00 (Sun)
60896714400, # local_end 1930-09-28 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60896728800, # utc_start 1930-09-28 06:00:00 (Sun)
60914876400, # utc_end 1931-04-26 07:00:00 (Sun)
60896710800, # local_start 1930-09-28 01:00:00 (Sun)
60914858400, # local_end 1931-04-26 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60914876400, # utc_start 1931-04-26 07:00:00 (Sun)
60928178400, # utc_end 1931-09-27 06:00:00 (Sun)
60914862000, # local_start 1931-04-26 03:00:00 (Sun)
60928164000, # local_end 1931-09-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60928178400, # utc_start 1931-09-27 06:00:00 (Sun)
60946930800, # utc_end 1932-05-01 07:00:00 (Sun)
60928160400, # local_start 1931-09-27 01:00:00 (Sun)
60946912800, # local_end 1932-05-01 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60946930800, # utc_start 1932-05-01 07:00:00 (Sun)
60959628000, # utc_end 1932-09-25 06:00:00 (Sun)
60946916400, # local_start 1932-05-01 03:00:00 (Sun)
60959613600, # local_end 1932-09-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60959628000, # utc_start 1932-09-25 06:00:00 (Sun)
60978380400, # utc_end 1933-04-30 07:00:00 (Sun)
60959610000, # local_start 1932-09-25 01:00:00 (Sun)
60978362400, # local_end 1933-04-30 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
60978380400, # utc_start 1933-04-30 07:00:00 (Sun)
60991682400, # utc_end 1933-10-01 06:00:00 (Sun)
60978366000, # local_start 1933-04-30 03:00:00 (Sun)
60991668000, # local_end 1933-10-01 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
60991682400, # utc_start 1933-10-01 06:00:00 (Sun)
61009830000, # utc_end 1934-04-29 07:00:00 (Sun)
60991664400, # local_start 1933-10-01 01:00:00 (Sun)
61009812000, # local_end 1934-04-29 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61009830000, # utc_start 1934-04-29 07:00:00 (Sun)
61023132000, # utc_end 1934-09-30 06:00:00 (Sun)
61009815600, # local_start 1934-04-29 03:00:00 (Sun)
61023117600, # local_end 1934-09-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61023132000, # utc_start 1934-09-30 06:00:00 (Sun)
61041279600, # utc_end 1935-04-28 07:00:00 (Sun)
61023114000, # local_start 1934-09-30 01:00:00 (Sun)
61041261600, # local_end 1935-04-28 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61041279600, # utc_start 1935-04-28 07:00:00 (Sun)
61054581600, # utc_end 1935-09-29 06:00:00 (Sun)
61041265200, # local_start 1935-04-28 03:00:00 (Sun)
61054567200, # local_end 1935-09-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61054581600, # utc_start 1935-09-29 06:00:00 (Sun)
61072729200, # utc_end 1936-04-26 07:00:00 (Sun)
61054563600, # local_start 1935-09-29 01:00:00 (Sun)
61072711200, # local_end 1936-04-26 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61072729200, # utc_start 1936-04-26 07:00:00 (Sun)
61086031200, # utc_end 1936-09-27 06:00:00 (Sun)
61072714800, # local_start 1936-04-26 03:00:00 (Sun)
61086016800, # local_end 1936-09-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61086031200, # utc_start 1936-09-27 06:00:00 (Sun)
61104178800, # utc_end 1937-04-25 07:00:00 (Sun)
61086013200, # local_start 1936-09-27 01:00:00 (Sun)
61104160800, # local_end 1937-04-25 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61104178800, # utc_start 1937-04-25 07:00:00 (Sun)
61117480800, # utc_end 1937-09-26 06:00:00 (Sun)
61104164400, # local_start 1937-04-25 03:00:00 (Sun)
61117466400, # local_end 1937-09-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61117480800, # utc_start 1937-09-26 06:00:00 (Sun)
61135628400, # utc_end 1938-04-24 07:00:00 (Sun)
61117462800, # local_start 1937-09-26 01:00:00 (Sun)
61135610400, # local_end 1938-04-24 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61135628400, # utc_start 1938-04-24 07:00:00 (Sun)
61148930400, # utc_end 1938-09-25 06:00:00 (Sun)
61135614000, # local_start 1938-04-24 03:00:00 (Sun)
61148916000, # local_end 1938-09-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61148930400, # utc_start 1938-09-25 06:00:00 (Sun)
61167682800, # utc_end 1939-04-30 07:00:00 (Sun)
61148912400, # local_start 1938-09-25 01:00:00 (Sun)
61167664800, # local_end 1939-04-30 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61167682800, # utc_start 1939-04-30 07:00:00 (Sun)
61180380000, # utc_end 1939-09-24 06:00:00 (Sun)
61167668400, # local_start 1939-04-30 03:00:00 (Sun)
61180365600, # local_end 1939-09-24 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61180380000, # utc_start 1939-09-24 06:00:00 (Sun)
61199132400, # utc_end 1940-04-28 07:00:00 (Sun)
61180362000, # local_start 1939-09-24 01:00:00 (Sun)
61199114400, # local_end 1940-04-28 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61199132400, # utc_start 1940-04-28 07:00:00 (Sun)
61255465200, # utc_end 1942-02-09 07:00:00 (Mon)
61199118000, # local_start 1940-04-28 03:00:00 (Sun)
61255450800, # local_end 1942-02-09 03:00:00 (Mon)
-14400,
1,
'EDT',
],
[
61255465200, # utc_start 1942-02-09 07:00:00 (Mon)
61366287600, # utc_end 1945-08-14 23:00:00 (Tue)
61255450800, # local_start 1942-02-09 03:00:00 (Mon)
61366273200, # local_end 1945-08-14 19:00:00 (Tue)
-14400,
1,
'EWT',
],
[
61366287600, # utc_start 1945-08-14 23:00:00 (Tue)
61370287200, # utc_end 1945-09-30 06:00:00 (Sun)
61366273200, # local_start 1945-08-14 19:00:00 (Tue)
61370272800, # local_end 1945-09-30 02:00:00 (Sun)
-14400,
1,
'EPT',
],
[
61370287200, # utc_start 1945-09-30 06:00:00 (Sun)
61378318800, # utc_end 1946-01-01 05:00:00 (Tue)
61370269200, # local_start 1945-09-30 01:00:00 (Sun)
61378300800, # local_end 1946-01-01 00:00:00 (Tue)
-18000,
0,
'EST',
],
[
61378318800, # utc_start 1946-01-01 05:00:00 (Tue)
61388434800, # utc_end 1946-04-28 07:00:00 (Sun)
61378300800, # local_start 1946-01-01 00:00:00 (Tue)
61388416800, # local_end 1946-04-28 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61388434800, # utc_start 1946-04-28 07:00:00 (Sun)
61401736800, # utc_end 1946-09-29 06:00:00 (Sun)
61388420400, # local_start 1946-04-28 03:00:00 (Sun)
61401722400, # local_end 1946-09-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61401736800, # utc_start 1946-09-29 06:00:00 (Sun)
61419877200, # utc_end 1947-04-27 05:00:00 (Sun)
61401718800, # local_start 1946-09-29 01:00:00 (Sun)
61419859200, # local_end 1947-04-27 00:00:00 (Sun)
-18000,
0,
'EST',
],
[
61419877200, # utc_start 1947-04-27 05:00:00 (Sun)
61433179200, # utc_end 1947-09-28 04:00:00 (Sun)
61419862800, # local_start 1947-04-27 01:00:00 (Sun)
61433164800, # local_end 1947-09-28 00:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61433179200, # utc_start 1947-09-28 04:00:00 (Sun)
61451326800, # utc_end 1948-04-25 05:00:00 (Sun)
61433161200, # local_start 1947-09-27 23:00:00 (Sat)
61451308800, # local_end 1948-04-25 00:00:00 (Sun)
-18000,
0,
'EST',
],
[
61451326800, # utc_start 1948-04-25 05:00:00 (Sun)
61464628800, # utc_end 1948-09-26 04:00:00 (Sun)
61451312400, # local_start 1948-04-25 01:00:00 (Sun)
61464614400, # local_end 1948-09-26 00:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61464628800, # utc_start 1948-09-26 04:00:00 (Sun)
61482776400, # utc_end 1949-04-24 05:00:00 (Sun)
61464610800, # local_start 1948-09-25 23:00:00 (Sat)
61482758400, # local_end 1949-04-24 00:00:00 (Sun)
-18000,
0,
'EST',
],
[
61482776400, # utc_start 1949-04-24 05:00:00 (Sun)
61501521600, # utc_end 1949-11-27 04:00:00 (Sun)
61482762000, # local_start 1949-04-24 01:00:00 (Sun)
61501507200, # local_end 1949-11-27 00:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61501521600, # utc_start 1949-11-27 04:00:00 (Sun)
61514838000, # utc_end 1950-04-30 07:00:00 (Sun)
61501503600, # local_start 1949-11-26 23:00:00 (Sat)
61514820000, # local_end 1950-04-30 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61514838000, # utc_start 1950-04-30 07:00:00 (Sun)
61532978400, # utc_end 1950-11-26 06:00:00 (Sun)
61514823600, # local_start 1950-04-30 03:00:00 (Sun)
61532964000, # local_end 1950-11-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61532978400, # utc_start 1950-11-26 06:00:00 (Sun)
61546287600, # utc_end 1951-04-29 07:00:00 (Sun)
61532960400, # local_start 1950-11-26 01:00:00 (Sun)
61546269600, # local_end 1951-04-29 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61546287600, # utc_start 1951-04-29 07:00:00 (Sun)
61559589600, # utc_end 1951-09-30 06:00:00 (Sun)
61546273200, # local_start 1951-04-29 03:00:00 (Sun)
61559575200, # local_end 1951-09-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61559589600, # utc_start 1951-09-30 06:00:00 (Sun)
61577737200, # utc_end 1952-04-27 07:00:00 (Sun)
61559571600, # local_start 1951-09-30 01:00:00 (Sun)
61577719200, # local_end 1952-04-27 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61577737200, # utc_start 1952-04-27 07:00:00 (Sun)
61591039200, # utc_end 1952-09-28 06:00:00 (Sun)
61577722800, # local_start 1952-04-27 03:00:00 (Sun)
61591024800, # local_end 1952-09-28 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61591039200, # utc_start 1952-09-28 06:00:00 (Sun)
61609186800, # utc_end 1953-04-26 07:00:00 (Sun)
61591021200, # local_start 1952-09-28 01:00:00 (Sun)
61609168800, # local_end 1953-04-26 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61609186800, # utc_start 1953-04-26 07:00:00 (Sun)
61622488800, # utc_end 1953-09-27 06:00:00 (Sun)
61609172400, # local_start 1953-04-26 03:00:00 (Sun)
61622474400, # local_end 1953-09-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61622488800, # utc_start 1953-09-27 06:00:00 (Sun)
61640636400, # utc_end 1954-04-25 07:00:00 (Sun)
61622470800, # local_start 1953-09-27 01:00:00 (Sun)
61640618400, # local_end 1954-04-25 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61640636400, # utc_start 1954-04-25 07:00:00 (Sun)
61653938400, # utc_end 1954-09-26 06:00:00 (Sun)
61640622000, # local_start 1954-04-25 03:00:00 (Sun)
61653924000, # local_end 1954-09-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61653938400, # utc_start 1954-09-26 06:00:00 (Sun)
61672086000, # utc_end 1955-04-24 07:00:00 (Sun)
61653920400, # local_start 1954-09-26 01:00:00 (Sun)
61672068000, # local_end 1955-04-24 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61672086000, # utc_start 1955-04-24 07:00:00 (Sun)
61685388000, # utc_end 1955-09-25 06:00:00 (Sun)
61672071600, # local_start 1955-04-24 03:00:00 (Sun)
61685373600, # local_end 1955-09-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61685388000, # utc_start 1955-09-25 06:00:00 (Sun)
61704140400, # utc_end 1956-04-29 07:00:00 (Sun)
61685370000, # local_start 1955-09-25 01:00:00 (Sun)
61704122400, # local_end 1956-04-29 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61704140400, # utc_start 1956-04-29 07:00:00 (Sun)
61717442400, # utc_end 1956-09-30 06:00:00 (Sun)
61704126000, # local_start 1956-04-29 03:00:00 (Sun)
61717428000, # local_end 1956-09-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61717442400, # utc_start 1956-09-30 06:00:00 (Sun)
61735590000, # utc_end 1957-04-28 07:00:00 (Sun)
61717424400, # local_start 1956-09-30 01:00:00 (Sun)
61735572000, # local_end 1957-04-28 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61735590000, # utc_start 1957-04-28 07:00:00 (Sun)
61751311200, # utc_end 1957-10-27 06:00:00 (Sun)
61735575600, # local_start 1957-04-28 03:00:00 (Sun)
61751296800, # local_end 1957-10-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61751311200, # utc_start 1957-10-27 06:00:00 (Sun)
61767039600, # utc_end 1958-04-27 07:00:00 (Sun)
61751293200, # local_start 1957-10-27 01:00:00 (Sun)
61767021600, # local_end 1958-04-27 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61767039600, # utc_start 1958-04-27 07:00:00 (Sun)
61782760800, # utc_end 1958-10-26 06:00:00 (Sun)
61767025200, # local_start 1958-04-27 03:00:00 (Sun)
61782746400, # local_end 1958-10-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61782760800, # utc_start 1958-10-26 06:00:00 (Sun)
61798489200, # utc_end 1959-04-26 07:00:00 (Sun)
61782742800, # local_start 1958-10-26 01:00:00 (Sun)
61798471200, # local_end 1959-04-26 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61798489200, # utc_start 1959-04-26 07:00:00 (Sun)
61814210400, # utc_end 1959-10-25 06:00:00 (Sun)
61798474800, # local_start 1959-04-26 03:00:00 (Sun)
61814196000, # local_end 1959-10-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61814210400, # utc_start 1959-10-25 06:00:00 (Sun)
61829938800, # utc_end 1960-04-24 07:00:00 (Sun)
61814192400, # local_start 1959-10-25 01:00:00 (Sun)
61829920800, # local_end 1960-04-24 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61829938800, # utc_start 1960-04-24 07:00:00 (Sun)
61846264800, # utc_end 1960-10-30 06:00:00 (Sun)
61829924400, # local_start 1960-04-24 03:00:00 (Sun)
61846250400, # local_end 1960-10-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61846264800, # utc_start 1960-10-30 06:00:00 (Sun)
61861993200, # utc_end 1961-04-30 07:00:00 (Sun)
61846246800, # local_start 1960-10-30 01:00:00 (Sun)
61861975200, # local_end 1961-04-30 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61861993200, # utc_start 1961-04-30 07:00:00 (Sun)
61877714400, # utc_end 1961-10-29 06:00:00 (Sun)
61861978800, # local_start 1961-04-30 03:00:00 (Sun)
61877700000, # local_end 1961-10-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61877714400, # utc_start 1961-10-29 06:00:00 (Sun)
61893442800, # utc_end 1962-04-29 07:00:00 (Sun)
61877696400, # local_start 1961-10-29 01:00:00 (Sun)
61893424800, # local_end 1962-04-29 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61893442800, # utc_start 1962-04-29 07:00:00 (Sun)
61909164000, # utc_end 1962-10-28 06:00:00 (Sun)
61893428400, # local_start 1962-04-29 03:00:00 (Sun)
61909149600, # local_end 1962-10-28 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61909164000, # utc_start 1962-10-28 06:00:00 (Sun)
61924892400, # utc_end 1963-04-28 07:00:00 (Sun)
61909146000, # local_start 1962-10-28 01:00:00 (Sun)
61924874400, # local_end 1963-04-28 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61924892400, # utc_start 1963-04-28 07:00:00 (Sun)
61940613600, # utc_end 1963-10-27 06:00:00 (Sun)
61924878000, # local_start 1963-04-28 03:00:00 (Sun)
61940599200, # local_end 1963-10-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61940613600, # utc_start 1963-10-27 06:00:00 (Sun)
61956342000, # utc_end 1964-04-26 07:00:00 (Sun)
61940595600, # local_start 1963-10-27 01:00:00 (Sun)
61956324000, # local_end 1964-04-26 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61956342000, # utc_start 1964-04-26 07:00:00 (Sun)
61972063200, # utc_end 1964-10-25 06:00:00 (Sun)
61956327600, # local_start 1964-04-26 03:00:00 (Sun)
61972048800, # local_end 1964-10-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
61972063200, # utc_start 1964-10-25 06:00:00 (Sun)
61987791600, # utc_end 1965-04-25 07:00:00 (Sun)
61972045200, # local_start 1964-10-25 01:00:00 (Sun)
61987773600, # local_end 1965-04-25 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
61987791600, # utc_start 1965-04-25 07:00:00 (Sun)
62004117600, # utc_end 1965-10-31 06:00:00 (Sun)
61987777200, # local_start 1965-04-25 03:00:00 (Sun)
62004103200, # local_end 1965-10-31 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62004117600, # utc_start 1965-10-31 06:00:00 (Sun)
62019241200, # utc_end 1966-04-24 07:00:00 (Sun)
62004099600, # local_start 1965-10-31 01:00:00 (Sun)
62019223200, # local_end 1966-04-24 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62019241200, # utc_start 1966-04-24 07:00:00 (Sun)
62035567200, # utc_end 1966-10-30 06:00:00 (Sun)
62019226800, # local_start 1966-04-24 03:00:00 (Sun)
62035552800, # local_end 1966-10-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62035567200, # utc_start 1966-10-30 06:00:00 (Sun)
62051295600, # utc_end 1967-04-30 07:00:00 (Sun)
62035549200, # local_start 1966-10-30 01:00:00 (Sun)
62051277600, # local_end 1967-04-30 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62051295600, # utc_start 1967-04-30 07:00:00 (Sun)
62067016800, # utc_end 1967-10-29 06:00:00 (Sun)
62051281200, # local_start 1967-04-30 03:00:00 (Sun)
62067002400, # local_end 1967-10-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62067016800, # utc_start 1967-10-29 06:00:00 (Sun)
62082745200, # utc_end 1968-04-28 07:00:00 (Sun)
62066998800, # local_start 1967-10-29 01:00:00 (Sun)
62082727200, # local_end 1968-04-28 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62082745200, # utc_start 1968-04-28 07:00:00 (Sun)
62098466400, # utc_end 1968-10-27 06:00:00 (Sun)
62082730800, # local_start 1968-04-28 03:00:00 (Sun)
62098452000, # local_end 1968-10-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62098466400, # utc_start 1968-10-27 06:00:00 (Sun)
62114194800, # utc_end 1969-04-27 07:00:00 (Sun)
62098448400, # local_start 1968-10-27 01:00:00 (Sun)
62114176800, # local_end 1969-04-27 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62114194800, # utc_start 1969-04-27 07:00:00 (Sun)
62129916000, # utc_end 1969-10-26 06:00:00 (Sun)
62114180400, # local_start 1969-04-27 03:00:00 (Sun)
62129901600, # local_end 1969-10-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62129916000, # utc_start 1969-10-26 06:00:00 (Sun)
62145644400, # utc_end 1970-04-26 07:00:00 (Sun)
62129898000, # local_start 1969-10-26 01:00:00 (Sun)
62145626400, # local_end 1970-04-26 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62145644400, # utc_start 1970-04-26 07:00:00 (Sun)
62161365600, # utc_end 1970-10-25 06:00:00 (Sun)
62145630000, # local_start 1970-04-26 03:00:00 (Sun)
62161351200, # local_end 1970-10-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62161365600, # utc_start 1970-10-25 06:00:00 (Sun)
62177094000, # utc_end 1971-04-25 07:00:00 (Sun)
62161347600, # local_start 1970-10-25 01:00:00 (Sun)
62177076000, # local_end 1971-04-25 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62177094000, # utc_start 1971-04-25 07:00:00 (Sun)
62193420000, # utc_end 1971-10-31 06:00:00 (Sun)
62177079600, # local_start 1971-04-25 03:00:00 (Sun)
62193405600, # local_end 1971-10-31 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62193420000, # utc_start 1971-10-31 06:00:00 (Sun)
62209148400, # utc_end 1972-04-30 07:00:00 (Sun)
62193402000, # local_start 1971-10-31 01:00:00 (Sun)
62209130400, # local_end 1972-04-30 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62209148400, # utc_start 1972-04-30 07:00:00 (Sun)
62224869600, # utc_end 1972-10-29 06:00:00 (Sun)
62209134000, # local_start 1972-04-30 03:00:00 (Sun)
62224855200, # local_end 1972-10-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62224869600, # utc_start 1972-10-29 06:00:00 (Sun)
62240598000, # utc_end 1973-04-29 07:00:00 (Sun)
62224851600, # local_start 1972-10-29 01:00:00 (Sun)
62240580000, # local_end 1973-04-29 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62240598000, # utc_start 1973-04-29 07:00:00 (Sun)
62256319200, # utc_end 1973-10-28 06:00:00 (Sun)
62240583600, # local_start 1973-04-29 03:00:00 (Sun)
62256304800, # local_end 1973-10-28 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62256319200, # utc_start 1973-10-28 06:00:00 (Sun)
62261931600, # utc_end 1974-01-01 05:00:00 (Tue)
62256301200, # local_start 1973-10-28 01:00:00 (Sun)
62261913600, # local_end 1974-01-01 00:00:00 (Tue)
-18000,
0,
'EST',
],
[
62261931600, # utc_start 1974-01-01 05:00:00 (Tue)
62272047600, # utc_end 1974-04-28 07:00:00 (Sun)
62261913600, # local_start 1974-01-01 00:00:00 (Tue)
62272029600, # local_end 1974-04-28 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62272047600, # utc_start 1974-04-28 07:00:00 (Sun)
62287768800, # utc_end 1974-10-27 06:00:00 (Sun)
62272033200, # local_start 1974-04-28 03:00:00 (Sun)
62287754400, # local_end 1974-10-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62287768800, # utc_start 1974-10-27 06:00:00 (Sun)
62303497200, # utc_end 1975-04-27 07:00:00 (Sun)
62287750800, # local_start 1974-10-27 01:00:00 (Sun)
62303479200, # local_end 1975-04-27 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62303497200, # utc_start 1975-04-27 07:00:00 (Sun)
62319218400, # utc_end 1975-10-26 06:00:00 (Sun)
62303482800, # local_start 1975-04-27 03:00:00 (Sun)
62319204000, # local_end 1975-10-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62319218400, # utc_start 1975-10-26 06:00:00 (Sun)
62334946800, # utc_end 1976-04-25 07:00:00 (Sun)
62319200400, # local_start 1975-10-26 01:00:00 (Sun)
62334928800, # local_end 1976-04-25 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62334946800, # utc_start 1976-04-25 07:00:00 (Sun)
62351272800, # utc_end 1976-10-31 06:00:00 (Sun)
62334932400, # local_start 1976-04-25 03:00:00 (Sun)
62351258400, # local_end 1976-10-31 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62351272800, # utc_start 1976-10-31 06:00:00 (Sun)
62366396400, # utc_end 1977-04-24 07:00:00 (Sun)
62351254800, # local_start 1976-10-31 01:00:00 (Sun)
62366378400, # local_end 1977-04-24 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62366396400, # utc_start 1977-04-24 07:00:00 (Sun)
62382722400, # utc_end 1977-10-30 06:00:00 (Sun)
62366382000, # local_start 1977-04-24 03:00:00 (Sun)
62382708000, # local_end 1977-10-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62382722400, # utc_start 1977-10-30 06:00:00 (Sun)
62398450800, # utc_end 1978-04-30 07:00:00 (Sun)
62382704400, # local_start 1977-10-30 01:00:00 (Sun)
62398432800, # local_end 1978-04-30 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62398450800, # utc_start 1978-04-30 07:00:00 (Sun)
62414172000, # utc_end 1978-10-29 06:00:00 (Sun)
62398436400, # local_start 1978-04-30 03:00:00 (Sun)
62414157600, # local_end 1978-10-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62414172000, # utc_start 1978-10-29 06:00:00 (Sun)
62429900400, # utc_end 1979-04-29 07:00:00 (Sun)
62414154000, # local_start 1978-10-29 01:00:00 (Sun)
62429882400, # local_end 1979-04-29 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62429900400, # utc_start 1979-04-29 07:00:00 (Sun)
62445621600, # utc_end 1979-10-28 06:00:00 (Sun)
62429886000, # local_start 1979-04-29 03:00:00 (Sun)
62445607200, # local_end 1979-10-28 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62445621600, # utc_start 1979-10-28 06:00:00 (Sun)
62461350000, # utc_end 1980-04-27 07:00:00 (Sun)
62445603600, # local_start 1979-10-28 01:00:00 (Sun)
62461332000, # local_end 1980-04-27 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62461350000, # utc_start 1980-04-27 07:00:00 (Sun)
62477071200, # utc_end 1980-10-26 06:00:00 (Sun)
62461335600, # local_start 1980-04-27 03:00:00 (Sun)
62477056800, # local_end 1980-10-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62477071200, # utc_start 1980-10-26 06:00:00 (Sun)
62492799600, # utc_end 1981-04-26 07:00:00 (Sun)
62477053200, # local_start 1980-10-26 01:00:00 (Sun)
62492781600, # local_end 1981-04-26 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62492799600, # utc_start 1981-04-26 07:00:00 (Sun)
62508520800, # utc_end 1981-10-25 06:00:00 (Sun)
62492785200, # local_start 1981-04-26 03:00:00 (Sun)
62508506400, # local_end 1981-10-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62508520800, # utc_start 1981-10-25 06:00:00 (Sun)
62524249200, # utc_end 1982-04-25 07:00:00 (Sun)
62508502800, # local_start 1981-10-25 01:00:00 (Sun)
62524231200, # local_end 1982-04-25 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62524249200, # utc_start 1982-04-25 07:00:00 (Sun)
62540575200, # utc_end 1982-10-31 06:00:00 (Sun)
62524234800, # local_start 1982-04-25 03:00:00 (Sun)
62540560800, # local_end 1982-10-31 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62540575200, # utc_start 1982-10-31 06:00:00 (Sun)
62555698800, # utc_end 1983-04-24 07:00:00 (Sun)
62540557200, # local_start 1982-10-31 01:00:00 (Sun)
62555680800, # local_end 1983-04-24 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62555698800, # utc_start 1983-04-24 07:00:00 (Sun)
62572024800, # utc_end 1983-10-30 06:00:00 (Sun)
62555684400, # local_start 1983-04-24 03:00:00 (Sun)
62572010400, # local_end 1983-10-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62572024800, # utc_start 1983-10-30 06:00:00 (Sun)
62587753200, # utc_end 1984-04-29 07:00:00 (Sun)
62572006800, # local_start 1983-10-30 01:00:00 (Sun)
62587735200, # local_end 1984-04-29 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62587753200, # utc_start 1984-04-29 07:00:00 (Sun)
62603474400, # utc_end 1984-10-28 06:00:00 (Sun)
62587738800, # local_start 1984-04-29 03:00:00 (Sun)
62603460000, # local_end 1984-10-28 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62603474400, # utc_start 1984-10-28 06:00:00 (Sun)
62619202800, # utc_end 1985-04-28 07:00:00 (Sun)
62603456400, # local_start 1984-10-28 01:00:00 (Sun)
62619184800, # local_end 1985-04-28 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62619202800, # utc_start 1985-04-28 07:00:00 (Sun)
62634924000, # utc_end 1985-10-27 06:00:00 (Sun)
62619188400, # local_start 1985-04-28 03:00:00 (Sun)
62634909600, # local_end 1985-10-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62634924000, # utc_start 1985-10-27 06:00:00 (Sun)
62650652400, # utc_end 1986-04-27 07:00:00 (Sun)
62634906000, # local_start 1985-10-27 01:00:00 (Sun)
62650634400, # local_end 1986-04-27 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62650652400, # utc_start 1986-04-27 07:00:00 (Sun)
62666373600, # utc_end 1986-10-26 06:00:00 (Sun)
62650638000, # local_start 1986-04-27 03:00:00 (Sun)
62666359200, # local_end 1986-10-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62666373600, # utc_start 1986-10-26 06:00:00 (Sun)
62680287600, # utc_end 1987-04-05 07:00:00 (Sun)
62666355600, # local_start 1986-10-26 01:00:00 (Sun)
62680269600, # local_end 1987-04-05 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62680287600, # utc_start 1987-04-05 07:00:00 (Sun)
62697823200, # utc_end 1987-10-25 06:00:00 (Sun)
62680273200, # local_start 1987-04-05 03:00:00 (Sun)
62697808800, # local_end 1987-10-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62697823200, # utc_start 1987-10-25 06:00:00 (Sun)
62711737200, # utc_end 1988-04-03 07:00:00 (Sun)
62697805200, # local_start 1987-10-25 01:00:00 (Sun)
62711719200, # local_end 1988-04-03 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62711737200, # utc_start 1988-04-03 07:00:00 (Sun)
62729877600, # utc_end 1988-10-30 06:00:00 (Sun)
62711722800, # local_start 1988-04-03 03:00:00 (Sun)
62729863200, # local_end 1988-10-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62729877600, # utc_start 1988-10-30 06:00:00 (Sun)
62743186800, # utc_end 1989-04-02 07:00:00 (Sun)
62729859600, # local_start 1988-10-30 01:00:00 (Sun)
62743168800, # local_end 1989-04-02 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62743186800, # utc_start 1989-04-02 07:00:00 (Sun)
62761327200, # utc_end 1989-10-29 06:00:00 (Sun)
62743172400, # local_start 1989-04-02 03:00:00 (Sun)
62761312800, # local_end 1989-10-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62761327200, # utc_start 1989-10-29 06:00:00 (Sun)
62774636400, # utc_end 1990-04-01 07:00:00 (Sun)
62761309200, # local_start 1989-10-29 01:00:00 (Sun)
62774618400, # local_end 1990-04-01 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62774636400, # utc_start 1990-04-01 07:00:00 (Sun)
62792776800, # utc_end 1990-10-28 06:00:00 (Sun)
62774622000, # local_start 1990-04-01 03:00:00 (Sun)
62792762400, # local_end 1990-10-28 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62792776800, # utc_start 1990-10-28 06:00:00 (Sun)
62806690800, # utc_end 1991-04-07 07:00:00 (Sun)
62792758800, # local_start 1990-10-28 01:00:00 (Sun)
62806672800, # local_end 1991-04-07 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62806690800, # utc_start 1991-04-07 07:00:00 (Sun)
62824226400, # utc_end 1991-10-27 06:00:00 (Sun)
62806676400, # local_start 1991-04-07 03:00:00 (Sun)
62824212000, # local_end 1991-10-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62824226400, # utc_start 1991-10-27 06:00:00 (Sun)
62838140400, # utc_end 1992-04-05 07:00:00 (Sun)
62824208400, # local_start 1991-10-27 01:00:00 (Sun)
62838122400, # local_end 1992-04-05 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62838140400, # utc_start 1992-04-05 07:00:00 (Sun)
62855676000, # utc_end 1992-10-25 06:00:00 (Sun)
62838126000, # local_start 1992-04-05 03:00:00 (Sun)
62855661600, # local_end 1992-10-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62855676000, # utc_start 1992-10-25 06:00:00 (Sun)
62869590000, # utc_end 1993-04-04 07:00:00 (Sun)
62855658000, # local_start 1992-10-25 01:00:00 (Sun)
62869572000, # local_end 1993-04-04 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62869590000, # utc_start 1993-04-04 07:00:00 (Sun)
62887730400, # utc_end 1993-10-31 06:00:00 (Sun)
62869575600, # local_start 1993-04-04 03:00:00 (Sun)
62887716000, # local_end 1993-10-31 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62887730400, # utc_start 1993-10-31 06:00:00 (Sun)
62901039600, # utc_end 1994-04-03 07:00:00 (Sun)
62887712400, # local_start 1993-10-31 01:00:00 (Sun)
62901021600, # local_end 1994-04-03 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62901039600, # utc_start 1994-04-03 07:00:00 (Sun)
62919180000, # utc_end 1994-10-30 06:00:00 (Sun)
62901025200, # local_start 1994-04-03 03:00:00 (Sun)
62919165600, # local_end 1994-10-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62919180000, # utc_start 1994-10-30 06:00:00 (Sun)
62932489200, # utc_end 1995-04-02 07:00:00 (Sun)
62919162000, # local_start 1994-10-30 01:00:00 (Sun)
62932471200, # local_end 1995-04-02 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62932489200, # utc_start 1995-04-02 07:00:00 (Sun)
62950629600, # utc_end 1995-10-29 06:00:00 (Sun)
62932474800, # local_start 1995-04-02 03:00:00 (Sun)
62950615200, # local_end 1995-10-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62950629600, # utc_start 1995-10-29 06:00:00 (Sun)
62964543600, # utc_end 1996-04-07 07:00:00 (Sun)
62950611600, # local_start 1995-10-29 01:00:00 (Sun)
62964525600, # local_end 1996-04-07 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62964543600, # utc_start 1996-04-07 07:00:00 (Sun)
62982079200, # utc_end 1996-10-27 06:00:00 (Sun)
62964529200, # local_start 1996-04-07 03:00:00 (Sun)
62982064800, # local_end 1996-10-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
62982079200, # utc_start 1996-10-27 06:00:00 (Sun)
62995993200, # utc_end 1997-04-06 07:00:00 (Sun)
62982061200, # local_start 1996-10-27 01:00:00 (Sun)
62995975200, # local_end 1997-04-06 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
62995993200, # utc_start 1997-04-06 07:00:00 (Sun)
63013528800, # utc_end 1997-10-26 06:00:00 (Sun)
62995978800, # local_start 1997-04-06 03:00:00 (Sun)
63013514400, # local_end 1997-10-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63013528800, # utc_start 1997-10-26 06:00:00 (Sun)
63027442800, # utc_end 1998-04-05 07:00:00 (Sun)
63013510800, # local_start 1997-10-26 01:00:00 (Sun)
63027424800, # local_end 1998-04-05 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63027442800, # utc_start 1998-04-05 07:00:00 (Sun)
63044978400, # utc_end 1998-10-25 06:00:00 (Sun)
63027428400, # local_start 1998-04-05 03:00:00 (Sun)
63044964000, # local_end 1998-10-25 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63044978400, # utc_start 1998-10-25 06:00:00 (Sun)
63058892400, # utc_end 1999-04-04 07:00:00 (Sun)
63044960400, # local_start 1998-10-25 01:00:00 (Sun)
63058874400, # local_end 1999-04-04 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63058892400, # utc_start 1999-04-04 07:00:00 (Sun)
63077032800, # utc_end 1999-10-31 06:00:00 (Sun)
63058878000, # local_start 1999-04-04 03:00:00 (Sun)
63077018400, # local_end 1999-10-31 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63077032800, # utc_start 1999-10-31 06:00:00 (Sun)
63090342000, # utc_end 2000-04-02 07:00:00 (Sun)
63077014800, # local_start 1999-10-31 01:00:00 (Sun)
63090324000, # local_end 2000-04-02 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63090342000, # utc_start 2000-04-02 07:00:00 (Sun)
63108482400, # utc_end 2000-10-29 06:00:00 (Sun)
63090327600, # local_start 2000-04-02 03:00:00 (Sun)
63108468000, # local_end 2000-10-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63108482400, # utc_start 2000-10-29 06:00:00 (Sun)
63121791600, # utc_end 2001-04-01 07:00:00 (Sun)
63108464400, # local_start 2000-10-29 01:00:00 (Sun)
63121773600, # local_end 2001-04-01 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63121791600, # utc_start 2001-04-01 07:00:00 (Sun)
63139932000, # utc_end 2001-10-28 06:00:00 (Sun)
63121777200, # local_start 2001-04-01 03:00:00 (Sun)
63139917600, # local_end 2001-10-28 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63139932000, # utc_start 2001-10-28 06:00:00 (Sun)
63153846000, # utc_end 2002-04-07 07:00:00 (Sun)
63139914000, # local_start 2001-10-28 01:00:00 (Sun)
63153828000, # local_end 2002-04-07 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63153846000, # utc_start 2002-04-07 07:00:00 (Sun)
63171381600, # utc_end 2002-10-27 06:00:00 (Sun)
63153831600, # local_start 2002-04-07 03:00:00 (Sun)
63171367200, # local_end 2002-10-27 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63171381600, # utc_start 2002-10-27 06:00:00 (Sun)
63185295600, # utc_end 2003-04-06 07:00:00 (Sun)
63171363600, # local_start 2002-10-27 01:00:00 (Sun)
63185277600, # local_end 2003-04-06 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63185295600, # utc_start 2003-04-06 07:00:00 (Sun)
63202831200, # utc_end 2003-10-26 06:00:00 (Sun)
63185281200, # local_start 2003-04-06 03:00:00 (Sun)
63202816800, # local_end 2003-10-26 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63202831200, # utc_start 2003-10-26 06:00:00 (Sun)
63216745200, # utc_end 2004-04-04 07:00:00 (Sun)
63202813200, # local_start 2003-10-26 01:00:00 (Sun)
63216727200, # local_end 2004-04-04 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63216745200, # utc_start 2004-04-04 07:00:00 (Sun)
63234885600, # utc_end 2004-10-31 06:00:00 (Sun)
63216730800, # local_start 2004-04-04 03:00:00 (Sun)
63234871200, # local_end 2004-10-31 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63234885600, # utc_start 2004-10-31 06:00:00 (Sun)
63248194800, # utc_end 2005-04-03 07:00:00 (Sun)
63234867600, # local_start 2004-10-31 01:00:00 (Sun)
63248176800, # local_end 2005-04-03 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63248194800, # utc_start 2005-04-03 07:00:00 (Sun)
63266335200, # utc_end 2005-10-30 06:00:00 (Sun)
63248180400, # local_start 2005-04-03 03:00:00 (Sun)
63266320800, # local_end 2005-10-30 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63266335200, # utc_start 2005-10-30 06:00:00 (Sun)
63279644400, # utc_end 2006-04-02 07:00:00 (Sun)
63266317200, # local_start 2005-10-30 01:00:00 (Sun)
63279626400, # local_end 2006-04-02 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63279644400, # utc_start 2006-04-02 07:00:00 (Sun)
63297784800, # utc_end 2006-10-29 06:00:00 (Sun)
63279630000, # local_start 2006-04-02 03:00:00 (Sun)
63297770400, # local_end 2006-10-29 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63297784800, # utc_start 2006-10-29 06:00:00 (Sun)
63309279600, # utc_end 2007-03-11 07:00:00 (Sun)
63297766800, # local_start 2006-10-29 01:00:00 (Sun)
63309261600, # local_end 2007-03-11 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63309279600, # utc_start 2007-03-11 07:00:00 (Sun)
63329839200, # utc_end 2007-11-04 06:00:00 (Sun)
63309265200, # local_start 2007-03-11 03:00:00 (Sun)
63329824800, # local_end 2007-11-04 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63329839200, # utc_start 2007-11-04 06:00:00 (Sun)
63340729200, # utc_end 2008-03-09 07:00:00 (Sun)
63329821200, # local_start 2007-11-04 01:00:00 (Sun)
63340711200, # local_end 2008-03-09 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63340729200, # utc_start 2008-03-09 07:00:00 (Sun)
63361288800, # utc_end 2008-11-02 06:00:00 (Sun)
63340714800, # local_start 2008-03-09 03:00:00 (Sun)
63361274400, # local_end 2008-11-02 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63361288800, # utc_start 2008-11-02 06:00:00 (Sun)
63372178800, # utc_end 2009-03-08 07:00:00 (Sun)
63361270800, # local_start 2008-11-02 01:00:00 (Sun)
63372160800, # local_end 2009-03-08 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63372178800, # utc_start 2009-03-08 07:00:00 (Sun)
63392738400, # utc_end 2009-11-01 06:00:00 (Sun)
63372164400, # local_start 2009-03-08 03:00:00 (Sun)
63392724000, # local_end 2009-11-01 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63392738400, # utc_start 2009-11-01 06:00:00 (Sun)
63404233200, # utc_end 2010-03-14 07:00:00 (Sun)
63392720400, # local_start 2009-11-01 01:00:00 (Sun)
63404215200, # local_end 2010-03-14 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63404233200, # utc_start 2010-03-14 07:00:00 (Sun)
63424792800, # utc_end 2010-11-07 06:00:00 (Sun)
63404218800, # local_start 2010-03-14 03:00:00 (Sun)
63424778400, # local_end 2010-11-07 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63424792800, # utc_start 2010-11-07 06:00:00 (Sun)
63435682800, # utc_end 2011-03-13 07:00:00 (Sun)
63424774800, # local_start 2010-11-07 01:00:00 (Sun)
63435664800, # local_end 2011-03-13 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63435682800, # utc_start 2011-03-13 07:00:00 (Sun)
63456242400, # utc_end 2011-11-06 06:00:00 (Sun)
63435668400, # local_start 2011-03-13 03:00:00 (Sun)
63456228000, # local_end 2011-11-06 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63456242400, # utc_start 2011-11-06 06:00:00 (Sun)
63467132400, # utc_end 2012-03-11 07:00:00 (Sun)
63456224400, # local_start 2011-11-06 01:00:00 (Sun)
63467114400, # local_end 2012-03-11 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63467132400, # utc_start 2012-03-11 07:00:00 (Sun)
63487692000, # utc_end 2012-11-04 06:00:00 (Sun)
63467118000, # local_start 2012-03-11 03:00:00 (Sun)
63487677600, # local_end 2012-11-04 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63487692000, # utc_start 2012-11-04 06:00:00 (Sun)
63498582000, # utc_end 2013-03-10 07:00:00 (Sun)
63487674000, # local_start 2012-11-04 01:00:00 (Sun)
63498564000, # local_end 2013-03-10 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63498582000, # utc_start 2013-03-10 07:00:00 (Sun)
63519141600, # utc_end 2013-11-03 06:00:00 (Sun)
63498567600, # local_start 2013-03-10 03:00:00 (Sun)
63519127200, # local_end 2013-11-03 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63519141600, # utc_start 2013-11-03 06:00:00 (Sun)
63530031600, # utc_end 2014-03-09 07:00:00 (Sun)
63519123600, # local_start 2013-11-03 01:00:00 (Sun)
63530013600, # local_end 2014-03-09 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63530031600, # utc_start 2014-03-09 07:00:00 (Sun)
63550591200, # utc_end 2014-11-02 06:00:00 (Sun)
63530017200, # local_start 2014-03-09 03:00:00 (Sun)
63550576800, # local_end 2014-11-02 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63550591200, # utc_start 2014-11-02 06:00:00 (Sun)
63561481200, # utc_end 2015-03-08 07:00:00 (Sun)
63550573200, # local_start 2014-11-02 01:00:00 (Sun)
63561463200, # local_end 2015-03-08 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63561481200, # utc_start 2015-03-08 07:00:00 (Sun)
63582040800, # utc_end 2015-11-01 06:00:00 (Sun)
63561466800, # local_start 2015-03-08 03:00:00 (Sun)
63582026400, # local_end 2015-11-01 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63582040800, # utc_start 2015-11-01 06:00:00 (Sun)
63593535600, # utc_end 2016-03-13 07:00:00 (Sun)
63582022800, # local_start 2015-11-01 01:00:00 (Sun)
63593517600, # local_end 2016-03-13 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63593535600, # utc_start 2016-03-13 07:00:00 (Sun)
63614095200, # utc_end 2016-11-06 06:00:00 (Sun)
63593521200, # local_start 2016-03-13 03:00:00 (Sun)
63614080800, # local_end 2016-11-06 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63614095200, # utc_start 2016-11-06 06:00:00 (Sun)
63624985200, # utc_end 2017-03-12 07:00:00 (Sun)
63614077200, # local_start 2016-11-06 01:00:00 (Sun)
63624967200, # local_end 2017-03-12 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63624985200, # utc_start 2017-03-12 07:00:00 (Sun)
63645544800, # utc_end 2017-11-05 06:00:00 (Sun)
63624970800, # local_start 2017-03-12 03:00:00 (Sun)
63645530400, # local_end 2017-11-05 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63645544800, # utc_start 2017-11-05 06:00:00 (Sun)
63656434800, # utc_end 2018-03-11 07:00:00 (Sun)
63645526800, # local_start 2017-11-05 01:00:00 (Sun)
63656416800, # local_end 2018-03-11 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63656434800, # utc_start 2018-03-11 07:00:00 (Sun)
63676994400, # utc_end 2018-11-04 06:00:00 (Sun)
63656420400, # local_start 2018-03-11 03:00:00 (Sun)
63676980000, # local_end 2018-11-04 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63676994400, # utc_start 2018-11-04 06:00:00 (Sun)
63687884400, # utc_end 2019-03-10 07:00:00 (Sun)
63676976400, # local_start 2018-11-04 01:00:00 (Sun)
63687866400, # local_end 2019-03-10 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63687884400, # utc_start 2019-03-10 07:00:00 (Sun)
63708444000, # utc_end 2019-11-03 06:00:00 (Sun)
63687870000, # local_start 2019-03-10 03:00:00 (Sun)
63708429600, # local_end 2019-11-03 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63708444000, # utc_start 2019-11-03 06:00:00 (Sun)
63719334000, # utc_end 2020-03-08 07:00:00 (Sun)
63708426000, # local_start 2019-11-03 01:00:00 (Sun)
63719316000, # local_end 2020-03-08 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63719334000, # utc_start 2020-03-08 07:00:00 (Sun)
63739893600, # utc_end 2020-11-01 06:00:00 (Sun)
63719319600, # local_start 2020-03-08 03:00:00 (Sun)
63739879200, # local_end 2020-11-01 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63739893600, # utc_start 2020-11-01 06:00:00 (Sun)
63751388400, # utc_end 2021-03-14 07:00:00 (Sun)
63739875600, # local_start 2020-11-01 01:00:00 (Sun)
63751370400, # local_end 2021-03-14 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63751388400, # utc_start 2021-03-14 07:00:00 (Sun)
63771948000, # utc_end 2021-11-07 06:00:00 (Sun)
63751374000, # local_start 2021-03-14 03:00:00 (Sun)
63771933600, # local_end 2021-11-07 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63771948000, # utc_start 2021-11-07 06:00:00 (Sun)
63782838000, # utc_end 2022-03-13 07:00:00 (Sun)
63771930000, # local_start 2021-11-07 01:00:00 (Sun)
63782820000, # local_end 2022-03-13 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63782838000, # utc_start 2022-03-13 07:00:00 (Sun)
63803397600, # utc_end 2022-11-06 06:00:00 (Sun)
63782823600, # local_start 2022-03-13 03:00:00 (Sun)
63803383200, # local_end 2022-11-06 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63803397600, # utc_start 2022-11-06 06:00:00 (Sun)
63814287600, # utc_end 2023-03-12 07:00:00 (Sun)
63803379600, # local_start 2022-11-06 01:00:00 (Sun)
63814269600, # local_end 2023-03-12 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63814287600, # utc_start 2023-03-12 07:00:00 (Sun)
63834847200, # utc_end 2023-11-05 06:00:00 (Sun)
63814273200, # local_start 2023-03-12 03:00:00 (Sun)
63834832800, # local_end 2023-11-05 02:00:00 (Sun)
-14400,
1,
'EDT',
],
[
63834847200, # utc_start 2023-11-05 06:00:00 (Sun)
63845737200, # utc_end 2024-03-10 07:00:00 (Sun)
63834829200, # local_start 2023-11-05 01:00:00 (Sun)
63845719200, # local_end 2024-03-10 02:00:00 (Sun)
-18000,
0,
'EST',
],
[
63845737200, # utc_start 2024-03-10 07:00:00 (Sun)
63866296800, # utc_end 2024-11-03 06:00:00 (Sun)
63845722800, # local_start 2024-03-10 03:00:00 (Sun)
63866282400, # local_end 2024-11-03 02:00:00 (Sun)
-14400,
1,
'EDT',
],
];
sub olson_version { '2013a' }
sub has_dst_changes { 104 }
sub _max_year { 2023 }
sub _new_instance
{
return shift->_init( @_, spans => $spans );
}
sub _last_offset { -18000 }
my $last_observance = bless( {
'format' => 'E%sT',
'gmtoff' => '-5:00',
'local_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 720624,
'local_rd_secs' => 0,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 720624,
'utc_rd_secs' => 0,
'utc_year' => 1975
}, 'DateTime' ),
'offset_from_std' => 0,
'offset_from_utc' => -18000,
'until' => [],
'utc_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 720624,
'local_rd_secs' => 18000,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 720624,
'utc_rd_secs' => 18000,
'utc_year' => 1975
}, 'DateTime' )
}, 'DateTime::TimeZone::OlsonDB::Observance' )
;
sub _last_observance { $last_observance }
my $rules = [
bless( {
'at' => '2:00',
'from' => '2007',
'in' => 'Nov',
'letter' => 'S',
'name' => 'Canada',
'offset_from_std' => 0,
'on' => 'Sun>=1',
'save' => '0',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' ),
bless( {
'at' => '2:00',
'from' => '2007',
'in' => 'Mar',
'letter' => 'D',
'name' => 'Canada',
'offset_from_std' => 3600,
'on' => 'Sun>=8',
'save' => '1:00',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' )
]
;
sub _rules { $rules }
1;
| liuyangning/WX_web | xampp/perl/vendor/lib/DateTime/TimeZone/America/Toronto.pm | Perl | mit | 54,257 |
+{
locale_version => 0.93,
entry => <<'ENTRY', # for DUCET v6.2.0
00E7 ; [.161E.0020.0002.00E7] # LATIN SMALL LETTER C WITH CEDILLA
0063 0327 ; [.161E.0020.0002.00E7] # LATIN SMALL LETTER C WITH CEDILLA
00C7 ; [.161E.0020.0008.00C7] # LATIN CAPITAL LETTER C WITH CEDILLA
0043 0327 ; [.161E.0020.0008.00C7] # LATIN CAPITAL LETTER C WITH CEDILLA
011F ; [.1692.0020.0002.011F] # LATIN SMALL LETTER G WITH BREVE
0067 0306 ; [.1692.0020.0002.011F] # LATIN SMALL LETTER G WITH BREVE
011E ; [.1692.0020.0008.011E] # LATIN CAPITAL LETTER G WITH BREVE
0047 0306 ; [.1692.0020.0008.011E] # LATIN CAPITAL LETTER G WITH BREVE
0131 ; [.16CC.0020.0002.0131] # LATIN SMALL LETTER DOTLESS I
0049 ; [.16CC.0020.0008.0049] # LATIN CAPITAL LETTER I
00CC ; [.16CC.0020.0008.0049][.0000.0035.0002.0300] # LATIN CAPITAL LETTER I WITH GRAVE
00CD ; [.16CC.0020.0008.0049][.0000.0032.0002.0301] # LATIN CAPITAL LETTER I WITH ACUTE
00CE ; [.16CC.0020.0008.0049][.0000.003C.0002.0302] # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
00CF ; [.16CC.0020.0008.0049][.0000.0047.0002.0308] # LATIN CAPITAL LETTER I WITH DIAERESIS
012A ; [.16CC.0020.0008.0049][.0000.005B.0002.0304] # LATIN CAPITAL LETTER I WITH MACRON
012C ; [.16CC.0020.0008.0049][.0000.0037.0002.0306] # LATIN CAPITAL LETTER I WITH BREVE
012E ; [.16CC.0020.0008.0049][.0000.0059.0002.0328] # LATIN CAPITAL LETTER I WITH OGONEK
0130 ; [.16CD.0020.0008.0130] # LATIN CAPITAL LETTER I WITH DOT ABOVE
0049 0307 ; [.16CD.0020.0008.0130] # LATIN CAPITAL LETTER I WITH DOT ABOVE
00F6 ; [.1772.0020.0002.00F6] # LATIN SMALL LETTER O WITH DIAERESIS
006F 0308 ; [.1772.0020.0002.00F6] # LATIN SMALL LETTER O WITH DIAERESIS
00D6 ; [.1772.0020.0008.00D6] # LATIN CAPITAL LETTER O WITH DIAERESIS
004F 0308 ; [.1772.0020.0008.00D6] # LATIN CAPITAL LETTER O WITH DIAERESIS
022B ; [.1772.0020.0002.00F6][.0000.005B.0002.0304] # LATIN SMALL LETTER O WITH DIAERESIS AND MACRON
022A ; [.1772.0020.0008.00D6][.0000.005B.0002.0304] # LATIN CAPITAL LETTER O WITH DIAERESIS AND MACRON
015F ; [.17F4.0020.0002.015F] # LATIN SMALL LETTER S WITH CEDILLA
0073 0327 ; [.17F4.0020.0002.015F] # LATIN SMALL LETTER S WITH CEDILLA
015E ; [.17F4.0020.0008.015E] # LATIN CAPITAL LETTER S WITH CEDILLA
0053 0327 ; [.17F4.0020.0008.015E] # LATIN CAPITAL LETTER S WITH CEDILLA
00FC ; [.1837.0020.0002.00FC] # LATIN SMALL LETTER U WITH DIAERESIS
0075 0308 ; [.1837.0020.0002.00FC] # LATIN SMALL LETTER U WITH DIAERESIS
00DC ; [.1837.0020.0008.00DC] # LATIN CAPITAL LETTER U WITH DIAERESIS
0055 0308 ; [.1837.0020.0008.00DC] # LATIN CAPITAL LETTER U WITH DIAERESIS
01DC ; [.1837.0020.0002.00FC][.0000.0035.0002.0300] # LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE
01DB ; [.1837.0020.0008.00DC][.0000.0035.0002.0300] # LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE
01D8 ; [.1837.0020.0002.00FC][.0000.0032.0002.0301] # LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE
01D7 ; [.1837.0020.0008.00DC][.0000.0032.0002.0301] # LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE
01D6 ; [.1837.0020.0002.00FC][.0000.005B.0002.0304] # LATIN SMALL LETTER U WITH DIAERESIS AND MACRON
01D5 ; [.1837.0020.0008.00DC][.0000.005B.0002.0304] # LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON
01DA ; [.1837.0020.0002.00FC][.0000.0041.0002.030C] # LATIN SMALL LETTER U WITH DIAERESIS AND CARON
01D9 ; [.1837.0020.0008.00DC][.0000.0041.0002.030C] # LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON
ENTRY
};
| Dokaponteam/ITF_Project | xampp/perl/lib/Unicode/Collate/Locale/tr.pl | Perl | mit | 3,531 |
# Copyright 2004 by Audrey Tang <cpan@audreyt.org>
package Win32::Exe::DataDirectory;
use strict;
use base 'Win32::Exe::Base';
use constant FORMAT => (
VirtualAddress => 'V',
Size => 'V',
);
1;
| Dokaponteam/ITF_Project | xampp/perl/vendor/lib/Win32/Exe/DataDirectory.pm | Perl | mit | 210 |
package Module::Build::PPMMaker;
use strict;
use vars qw($VERSION);
$VERSION = '0.2808_01';
$VERSION = eval $VERSION;
# This code is mostly borrowed from ExtUtils::MM_Unix 6.10_03, with a
# few tweaks based on the PPD spec at
# http://www.xav.com/perl/site/lib/XML/PPD.html
# The PPD spec is based on <http://www.w3.org/TR/NOTE-OSD>
sub new {
my $package = shift;
return bless {@_}, $package;
}
sub make_ppd {
my ($self, %args) = @_;
my $build = delete $args{build};
my @codebase;
if (exists $args{codebase}) {
@codebase = ref $args{codebase} ? @{$args{codebase}} : ($args{codebase});
} else {
my $distfile = $build->ppm_name . '.tar.gz';
print "Using default codebase '$distfile'\n";
@codebase = ($distfile);
}
my %dist;
foreach my $info (qw(name author abstract version)) {
my $method = "dist_$info";
$dist{$info} = $build->$method() or die "Can't determine distribution's $info\n";
}
$dist{version} = $self->_ppd_version($dist{version});
$self->_simple_xml_escape($_) foreach $dist{abstract}, @{$dist{author}};
# TODO: could add <LICENSE HREF=...> tag if we knew what the URLs were for
# various licenses
my $ppd = <<"PPD";
<SOFTPKG NAME=\"$dist{name}\" VERSION=\"$dist{version}\">
<TITLE>$dist{name}</TITLE>
<ABSTRACT>$dist{abstract}</ABSTRACT>
@{[ join "\n", map " <AUTHOR>$_</AUTHOR>", @{$dist{author}} ]}
<IMPLEMENTATION>
PPD
# TODO: We could set <IMPLTYPE VALUE="PERL" /> or maybe
# <IMPLTYPE VALUE="PERL/XS" /> ???
# We don't include recommended dependencies because PPD has no way
# to distinguish them from normal dependencies. We don't include
# build_requires dependencies because the PPM installer doesn't
# build or test before installing. And obviously we don't include
# conflicts either.
foreach my $type (qw(requires)) {
my $prereq = $build->$type();
while (my ($modname, $spec) = each %$prereq) {
next if $modname eq 'perl';
my $min_version = '0.0';
foreach my $c ($build->_parse_conditions($spec)) {
my ($op, $version) = $c =~ /^\s* (<=?|>=?|==|!=) \s* ([\w.]+) \s*$/x;
# This is a nasty hack because it fails if there is no >= op
if ($op eq '>=') {
$min_version = $version;
last;
}
}
# Another hack - dependencies are on modules, but PPD expects
# them to be on distributions (I think).
$modname =~ s/::/-/g;
$ppd .= sprintf(<<'EOF', $modname, $self->_ppd_version($min_version));
<DEPENDENCY NAME="%s" VERSION="%s" />
EOF
}
}
# We only include these tags if this module involves XS, on the
# assumption that pure Perl modules will work on any OS. PERLCORE,
# unfortunately, seems to indicate that a module works with _only_
# that version of Perl, and so is only appropriate when a module
# uses XS.
if (keys %{$build->find_xs_files}) {
my $perl_version = $self->_ppd_version($build->perl_version);
$ppd .= sprintf(<<'EOF', $perl_version, $^O, $self->_varchname($build->config) );
<PERLCORE VERSION="%s" />
<OS NAME="%s" />
<ARCHITECTURE NAME="%s" />
EOF
}
foreach my $codebase (@codebase) {
$self->_simple_xml_escape($codebase);
$ppd .= sprintf(<<'EOF', $codebase);
<CODEBASE HREF="%s" />
EOF
}
$ppd .= <<'EOF';
</IMPLEMENTATION>
</SOFTPKG>
EOF
my $ppd_file = "$dist{name}.ppd";
my $fh = IO::File->new(">$ppd_file")
or die "Cannot write to $ppd_file: $!";
print $fh $ppd;
close $fh;
return $ppd_file;
}
sub _ppd_version {
my ($self, $version) = @_;
# generates something like "0,18,0,0"
return join ',', (split(/\./, $version), (0)x4)[0..3];
}
sub _varchname { # Copied from PPM.pm
my ($self, $config) = @_;
my $varchname = $config->{archname};
# Append "-5.8" to architecture name for Perl 5.8 and later
if (defined($^V) && ord(substr($^V,1)) >= 8) {
$varchname .= sprintf("-%d.%d", ord($^V), ord(substr($^V,1)));
}
return $varchname;
}
{
my %escapes = (
"\n" => "\\n",
'"' => '"',
'&' => '&',
'>' => '>',
'<' => '<',
);
my $rx = join '|', keys %escapes;
sub _simple_xml_escape {
$_[1] =~ s/($rx)/$escapes{$1}/go;
}
}
1;
__END__
=head1 NAME
Module::Build::PPMMaker - Perl Package Manager file creation
=head1 SYNOPSIS
On the command line, builds a .ppd file:
./Build ppd
=head1 DESCRIPTION
This package contains the code that builds F<.ppd> "Perl Package
Description" files, in support of ActiveState's "Perl Package
Manager". Details are here:
L<http://aspn.activestate.com/ASPN/Downloads/ActivePerl/PPM/>
=head1 AUTHOR
Dave Rolsky <autarch@urth.org>, Ken Williams <kwilliams@cpan.org>
=head1 COPYRIGHT
Copyright (c) 2001-2006 Ken Williams. All rights reserved.
This library is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
=head1 SEE ALSO
perl(1), Module::Build(3)
=cut
| leighpauls/k2cro4 | third_party/cygwin/lib/perl5/5.10/Module/Build/PPMMaker.pm | Perl | bsd-3-clause | 4,970 |
#! /usr/bin/env perl
use warnings;
use 5.005;
use strict;
my %option;
my %directive;
my $scanner = `grep -i '"%[a-z]' $ARGV[0]`;
$scanner =~ s/"\[-_\]"/-/g;
while (<STDIN>)
{
if (/^\s* # Initial spaces.
(?:(-\w),\s+)? # $1: $short: Possible short option.
(--[-\w]+) # $2: $long: Long option.
(\[?) # $3: $opt: '[' iff the argument is optional.
(?:=(\S+))? # $4: $arg: Possible argument name.
\s # Spaces.
/x)
{
my ($short, $long, $opt, $arg) = ($1, $2, $3, $4);
$short = '' if ! defined $short;
$short = '-d' if $long eq '--defines' && ! $short;
my $dir = '%' . substr($long, 2);
if (index ($scanner, "\"$dir\"") < 0)
{
if ($long eq '--force-define') { $dir = '%define'; }
else { $dir = ''; }
}
if ($arg)
{
# if $opt, $arg contains the closing ].
substr ($arg, -1) = ''
if $opt eq '[';
$arg =~ s/^=//;
$arg = lc ($arg);
my $dir_arg = $arg;
# If the argument is compite (e.g., for --define[=NAME[=VALUE]]),
# put each word in @var, to build @var{name}[=@var{value}], not
# @var{name[=value]}].
$arg =~ s/(\w+)/\@var{$1}/g;
my $long_arg = "=$arg";
if ($opt eq '[') {
$long_arg = "[$long_arg]";
$arg = "[$arg]";
}
# For arguments of directives: this only works if all arguments
# are strings and have the same syntax as on the command line.
if ($dir_arg eq 'name[=value]')
{
$dir_arg = '@var{name} ["@var{value}"]';
}
else
{
$dir_arg =~ s/(\w+)/\@var{"$1"}/g;
$dir_arg = '[' . $dir_arg . ']'
if $opt eq '[';
}
$long = "$long$long_arg";
$short = "$short $arg" if $short && $short ne '-d';
$dir = "$dir $dir_arg" if $dir;
}
$option{$long} = $short;
$directive{$long} = $dir;
}
}
my $sep = '';
foreach my $long (sort keys %option)
{
# Avoid trailing spaces.
print $sep;
$sep = "\n";
print '@item @option{', $long, "}\n\@tab";
print ' @option{', $option{$long}, '}' if $option{$long};
print "\n\@tab";
print ' @code{', $directive{$long}, '}' if $directive{$long};
print "\n";
}
| nanolith/luhnybin | src/contrib/bison/build-aux/cross-options.pl | Perl | apache-2.0 | 2,243 |
package #
Date::Manip::TZ::pakiri00;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:33:38 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our (%Dates,%LastRule);
END {
undef %Dates;
undef %LastRule;
}
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
%Dates = (
1 =>
[
[ [1,1,2,0,0,0],[1,1,1,13,30,40],'-10:29:20',[-10,-29,-20],
'LMT',0,[1901,1,1,10,29,19],[1900,12,31,23,59,59],
'0001010200:00:00','0001010113:30:40','1901010110:29:19','1900123123:59:59' ],
],
1901 =>
[
[ [1901,1,1,10,29,20],[1900,12,31,23,49,20],'-10:40:00',[-10,-40,0],
'LINT',0,[1979,10,1,10,39,59],[1979,9,30,23,59,59],
'1901010110:29:20','1900123123:49:20','1979100110:39:59','1979093023:59:59' ],
],
1979 =>
[
[ [1979,10,1,10,40,0],[1979,10,1,0,40,0],'-10:00:00',[-10,0,0],
'LINT',0,[1995,1,1,9,59,59],[1994,12,31,23,59,59],
'1979100110:40:00','1979100100:40:00','1995010109:59:59','1994123123:59:59' ],
],
1995 =>
[
[ [1995,1,1,10,0,0],[1995,1,2,0,0,0],'+14:00:00',[14,0,0],
'LINT',0,[9999,12,31,0,0,0],[9999,12,31,14,0,0],
'1995010110:00:00','1995010200:00:00','9999123100:00:00','9999123114:00:00' ],
],
);
%LastRule = (
);
1;
| jkb78/extrajnm | local/lib/perl5/Date/Manip/TZ/pakiri00.pm | Perl | mit | 1,789 |
package O2::Model::Target::Test;
use strict;
use base 'O2::Model::Target';
use O2 qw($context);
#-----------------------------------------------------------------------------
sub generate {
my ($obj, $model) = @_;
$obj->say("Generate test-script");
my $testScript = $obj->generateTestScript($model);
$obj->say($testScript) if $obj->getArg('print');
my $path = $obj->getTestScriptPath($model);
$obj->writeFile($path, $testScript);
if (lc ($obj->ask("Do you want to run the test-script ($path) now (Y/n) ?")) ne 'n') {
system "perl $path";
}
}
#-----------------------------------------------------------------------------
sub generateTestScript {
my ($obj, $model) = @_;
my $className = $model->getClassName();
my $mgrClass = $model->getManagerClassName();
my $hasMultilingualFields = $model->getMultilingualFields() ? 1 : 0;
my $script = <<END
use strict;
use Test::More qw(no_plan);
use O2::Script::Test::Common;
use_ok '$mgrClass';
use O2 qw(\$context \$config);
my \@localeCodes = \@{ \$config->get('o2.locales') };
my \$mgr = \$context->getSingleton('$mgrClass');
my \$newObj = \$mgr->newObject();
\$newObj->setMetaName('Test-script for $className/$mgrClass');
END
;
foreach my $className ($model->getClassNames()) {
foreach my $field ($model->getFieldsByClassName($className)) {
next if $field->isMultilingual();
next if $className ne 'O2::Obj::Object' && $field->isMetaField();
my $fieldName = $field->getName();
next if $className eq 'O2::Obj::Object' && ($fieldName eq 'id' || $fieldName eq 'metaClassName' || $fieldName eq 'metaCreateTime' || $fieldName eq 'metaChangeTime');
my $value = $obj->_getTestValue($field);
$script .= '$newObj->' . $field->getSetAccessor() . "($value);\n";
}
}
if ($hasMultilingualFields) {
$script .= "
foreach my \$localeCode (\@localeCodes) {
\$newObj->setCurrentLocale(\$localeCode);\n";
foreach my $field ($model->getMultilingualFields()) {
next if $field->isMetaField();
my $value = $obj->_getTestValue($field, '$localeCode');
$script .= " \$newObj->" . $field->getSetAccessor() . "($value);\n";
}
$script .= "}\n";
}
$script .= <<END
\$newObj->save();
ok(\$newObj->getId() > 0, 'Object saved ok');
my \$dbObj = \$context->getObjectById( \$newObj->getId() );
ok(\$dbObj, 'getObjectById returned something') or BAIL_OUT("Couldn't get object from database");
END
;
foreach my $className ($model->getClassNames()) {
foreach my $field ($model->getFieldsByClassName($className)) {
next if $field->isMultilingual();
next if $className ne 'O2::Obj::Object' && $field->isMetaField();
$script .= $obj->_getCodeToTestField($field, $model);
}
}
if ($hasMultilingualFields) {
$script .= "foreach my \$localeCode (\@localeCodes) {\n";
$script .= " \$newObj->setCurrentLocale(\$localeCode);\n";
$script .= " \$dbObj->setCurrentLocale(\$localeCode);\n";
foreach my $field ($model->getMultilingualFields()) {
next if $field->isMetaField();
$script .= ' ' . $obj->_getCodeToTestField($field, $model);
}
$script .= "}\n";
}
$script .= "\n# See if a simple object search works\n";
$script .= "my \@searchResults = \$mgr->objectSearch( objectId => \$newObj->getId() );\n";
$script .= "is(\$searchResults[0]->getId(), \$newObj->getId(), 'Search for objectId ok');\n\n";
$script .= <<END
END {
\$newObj->deletePermanently() if \$newObj;
deleteTestObjects();
}
END
;
return $script;
}
#-----------------------------------------------------------------------------
sub writeFile {
my ($obj, $path, $contentWithoutHash) = @_;
my $fileGenerator = $context->getSingleton('O2::Util::FileGenerator');
if (-e $path && $contentWithoutHash eq $fileGenerator->getFileContentWithoutHashLine($path)) {
return $obj->say("No changes need to be made");
}
if ( !-e $path || $fileGenerator->fileIsAutoGenerated($path) || lc ($obj->ask("File $path exists, overwrite? (y/N)")) eq 'y' ) {
$fileGenerator->writeFile($path, $contentWithoutHash);
$obj->say("Wrote $path");
}
}
#-----------------------------------------------------------------------------
sub getTestScriptPath {
my ($obj, $model) = @_;
my $fileName = $model->getClassName();
$fileName =~ s{::}{/}xmsg;
return $obj->getArg('currentRoot') . "/t/$fileName.t";
}
#-----------------------------------------------------------------------------
sub _getTestValue {
my ($obj, $field, $extraValue) = @_;
my $value = $field->getTestValue();
return $value if $value;
my $type = $field->getType();
if ($field->getListType() eq 'hash') {
my %hash = ( a => 1, b => 2 );
%hash = ( a => 'one', b => 'two' ) if $type eq 'varchar' || $field->isTextType();
my $string;
foreach my $key (keys %hash) {
$hash{$key} = $hash{$key} . " ($extraValue)" if $extraValue;
$string .= "'$key' => '$hash{$key}', ";
}
return $string;
}
elsif ($field->getListType() eq 'array') {
my @strings = (1, 2);
@strings = (' getTestObjectId()', 'getTestObjectId() ') if $field->isObjectType();
return join ', ', @strings if $type ne 'varchar' && !$field->isTextType();
@strings = qw(one two);
if ($extraValue) {
foreach my $i (0 .. scalar(@strings)-1) {
$strings[$i] = $strings[$i] . " ($extraValue)";
}
}
return '"' . join ('", "', @strings) . '"';
}
else {
$value = '';
my @validValues = $field->getValidValues();
if (@validValues) {
my $value = $validValues[0];
$value = "'$value'" if $field->isTextType() || $type =~ m{ \A varchar }xms || $type =~ m{ \A char }xms;
return $value;
}
if ($field->isTextType()) {
$value = 'Test-text';
$value .= " ($extraValue)" if $extraValue;
return qq{"$value"};
}
if ($type =~ m{ \A varchar }xms) {
$value = 'Test-varchar';
$value = substr $value, 0, $field->getLength() if $field->getLength() && $field->getLength() < length $value;
$value .= " ($extraValue)" if $extraValue;
return qq{"$value"};
}
if ($type =~ m{ \A char }xms) {
my $size = $field->getLength();
return "'" . ('a' x $size) . "'";
}
return 1 if $type eq 'int' || $type eq 'bit' || $type eq 'tinyint';
return 1.1 if $field->isFloatingPointType();
return 'time' if $type eq 'epoch';
return ' getTestObjectId() ' if $field->isObjectType();
return " \$context->getDateFormatter()->dateFormat(time, 'yyyy-MM-dd HH:mm:ss') " if $type eq 'date';
die "Unknown field type $type";
}
}
#-----------------------------------------------------------------------------
sub _getCodeToTestField {
my ($obj, $field, $model) = @_;
my $script = '';
my $testTitle = $obj->_getTestTitle($field);
my $getter = $field->getGetAccessor();
if ($field->getListType() eq 'array') {
if ($field->isObjectType() && $getter !~ m{ Ids \z }xms) {
$getter = $context->getSingleton('O2::Mgr::ObjectManager')->_appendIds($getter);
}
$script .= "is_deeply( [ \$dbObj->$getter() ], [ \$newObj->$getter() ], $testTitle );\n";
}
elsif ($field->getListType() eq 'hash') {
$script .= "is_deeply( { \$dbObj->$getter() }, { \$newObj->$getter() }, $testTitle );\n";
}
elsif ($field->getType() eq 'date') {
$script .= "is( \$dbObj->$getter()->format('yyyy-MM-dd HH:mm:ss'), \$newObj->$getter()->format('yyyy-MM-dd HH:mm:ss'), $testTitle );\n";
}
elsif ($field->isFloatingPointType()) {
$script .= "cmp_ok( \$dbObj->$getter(), '==', \$newObj->$getter(), $testTitle );\n";
}
elsif ($field->isObjectType() && $getter !~ m{ Id \z }xms) {
$getter .= 'Id';
$script .= "is( \$dbObj->$getter(), \$newObj->$getter(), $testTitle );\n";
}
else {
$script .= "is( \$dbObj->$getter(), \$newObj->$getter(), $testTitle );\n";
}
return $script;
}
#-----------------------------------------------------------------------------
sub _getTestTitle {
my ($obj, $field) = @_;
return "'" . $field->getName() . " retrieved ok.'";
}
#-----------------------------------------------------------------------------
1;
| haakonsk/O2-Framework | lib/O2/Model/Target/Test.pm | Perl | mit | 8,196 |
package Acme::Nogizaka46::NojoAmi;
use strict;
use warnings;
use base qw(Acme::Nogizaka46::Base);
our $VERSION = 0.3;
sub info {
return (
first_name_ja => '愛未',
family_name_ja => '能條',
first_name_en => 'Ami',
family_name_en => 'Nojo',
nick => [qw(あみあみ ジョンソン じょーさん)],
birthday => $_[0]->_datetime_from_date('1994-10-18'),
blood_type => 'A',
hometown => '神奈川',
class => 1,
center => undef,
graduate_date => undef,
);
}
1;
| 2GMon/perl-acme-nogizaka46 | lib/Acme/Nogizaka46/NojoAmi.pm | Perl | mit | 610 |
package Seqplorer::Model::View;
use strict;
use warnings;
use Carp qw/croak/;
#use base qw/Mojo::Base/;
use Mojo::Base -base;
#use Mojo::Base;
use Mojo::Util qw(encode md5_sum);
use List::Util qw(first);
use Mango::BSON ':bson';
use Scalar::Util qw(looks_like_number);
has [qw/ app mongoDB /];
sub get {
my $self = shift;
my $config = shift;
my $viewCollection = $self->mongoDB->db->collection('views');
my $viewId = $config->{'_id'};
if($viewId eq "variants"){
$viewId = "50d1def9721c5a2c32000000";
}elsif($viewId eq "samples"){
$viewId = "50d1df0a721c5a1d31000000";
}elsif($viewId eq "projects"){
$viewId = "50d1df83721c5a0f33000000";
}
my $cache = $self->app->cache;
if( defined $cache->get($viewId) ){
$self->app->log->debug("Cache hit for get view: $viewId");
return $cache->get($viewId);
}
$self->app->log->debug("Cache miss for get view: $viewId");
my $viewID_OID = ( $viewId =~ /^[0-9a-fA-F]{24}$/ ) ? Mango::BSON::ObjectID->new($viewId) : { '_id' => $viewId };
$self->app->log->debug("Get view from mongo: $viewId = $viewID_OID => ".ref($viewID_OID));
my $viewDoc = $viewCollection->find_one($viewID_OID);
#$self->app->log->debug("na find_one");
my %viewReturn;
$viewReturn{'columns'}=();
$viewReturn{'_id'}=$viewDoc->{'_id'};
$viewReturn{'view'}=$viewDoc->{'_id'};
$viewReturn{'dom'}=$viewDoc->{'dom'};
$viewReturn{'restrict'}=$viewDoc->{'restrict'};
my $collection = $viewDoc->{'collection'};
$viewReturn{'collection'}=$collection;
$viewReturn{'fields'}=();
$viewReturn{'mongoid'}=();
use Data::Dumper;
#$self->app->log->debug("voor collection_names ".Dumper($self->mongoDB->db->collection_names));
#my $existsUnique = grep { /${collection}_unique'/ } @{$self->mongoDB->db->collection_names};
my $existsUnique=1;
for my $column (@{$viewDoc->{'columns'}}) {
#$self->app->log->debug("viewDoc is: ".Dumper($column ));
#create dot notation from queryname
if(defined $column->{'queryname'}){
$column->{'dotnotation'} = join('.',@{$column->{'queryname'}});
}
#hash: key dotnotation to valus: queryname (query array)
#->unused in frontend
$viewReturn{'queryarray'}{$column->{'dotnotation'}}=$column->{'queryname'} if defined $column->{'dotnotation'};
#create array of dotnotation names of all cols
#->unused in frontend
push @{$viewReturn{'fields'}}, $column->{'dotnotation'} if defined $column->{'dotnotation'};
#add extra info from _unique collection
if($existsUnique > 0 && defined $column->{'dotnotation'}){
my $uniqueDoc = $self->mongoDB->db->collection($collection.'_unique_tmp')->find_one({'_id' => $column->{'dotnotation'} });
if(defined $uniqueDoc){
$column->{'searchtype'}=$uniqueDoc->{'type'};
if(defined $uniqueDoc->{'values'} && scalar(@{$uniqueDoc->{'values'}}) > 1 ){
$column->{'list'}=$uniqueDoc->{'values'};
}
if($uniqueDoc->{'type'} eq 'mongo_id'){
push @{$viewReturn{'mongoid'}}, $uniqueDoc->{'querykeys'};
}
}
}
push @{$viewReturn{'columns'}}, $column;
}
if(!defined $viewReturn{'mongoid'} ){
$viewReturn{'mongoid'}=$viewDoc->{'mongoid'}
}
$cache->set($viewId, \%viewReturn);
if( defined $cache->get($viewId) ){
$self->app->log->debug("Cache saved for get view: $viewId ");
}
return \%viewReturn;
}
sub edit {
my $self = shift;
my $data = shift;
my $viewCollection = $self->mongoDB->db->collection('views');
my $viewDoc = {} ;
if( defined $data->{'_id'} ){
$viewDoc->{'_id'} = $data->{'_id'};
my $viewID_OID = Mango::BSON::ObjectID->new($viewDoc->{'_id'});
$self->app->log->debug("Edit: Get view doc from mongo: ".$viewDoc->{'_id'}." = $viewID_OID => ".ref($viewID_OID));
$viewDoc = $viewCollection->find_one($viewID_OID);
}
$viewDoc->{'columns'}=$data->{'columns'};
$viewDoc->{'dom'}=$data->{'dom'};
$viewDoc->{'restrict'}=$data->{'restrict'};
$viewDoc->{'collection'}=$data->{'collection'};
$viewDoc->{'projects'}=$data->{'projects'};
$viewDoc->{'name'}=$data->{'name'};
my $viewDocId = $viewCollection->save($viewDoc);
$self->app->log->debug("Edit: Saved view doc to mongo: ".$viewDocId);
return $viewDocId;
}
sub editKey {
my $self = shift;
my $id = shift;
my $key = shift;
my $value = shift;
my $filterCollection = $self->mongoDB->db->collection('views');
my $filterDocId = $filterCollection->update({'_id' => $id},{ $key => $value });
$self->app->log->debug("Edit: view $key key: ".$filterDocId);
return $filterDocId;
}
sub delete {
my $self = shift;
#placeholder
}
sub _applyTemplate {
my $self = shift;
my $templateArg = shift;
my $stashRef = shift || {};
my $templateString = '';
if(scalar(keys %{$stashRef}) < 1){
$self->app->log->debug('no values passed to template, returning empty string');
return '';
}
if(ref($templateArg) eq 'HASH'){
$templateString = $self->_getTemplate($templateArg);
}else{
$templateString = $templateArg;
}
my $cache = $self->app->cache;
my $templateKey = md5_sum( join('_',keys %{$stashRef}).md5_sum($templateString) );
my $output;
#my $mt = Mojo::Template->new($templateString);
my $mt = $cache->get($templateKey);
$mt ||= $cache->set($templateKey => Mojo::Template->new)->get($templateKey);
#$self->app->log->debug("Rendering with stash = ".Dumper($stashRef));
if($mt->compiled){
$self->app->log->debug("Rendering cached template with new stash values.");
$output = $mt->interpret(values %{$stashRef});
}else{
my $prepend='';
# add Stash values to template
$prepend = '% my( $';
$prepend .= join(', $', grep {/^\w+$/} keys %{$stashRef} );
$prepend .= ') = @_;'."\n";
$output = $mt->render($prepend.$templateString, values %{$stashRef})
}
#$self->app->log->debug('rendered template = '.$templateString.' result = '.$output);
return $output;
}
sub _getTemplate {
my $self = shift;
my $templateNameOpt = shift;
my $output ='';
my $templateName = $templateNameOpt->{'name'} || 'default';
if( $templateName eq 'default'){
#pass $value array and concats with string defined in option
my $concatWith = $templateNameOpt->{'option'} || '</br>';
$output = '<%= $value %>';
}elsif( $templateName eq 'concat'){
#pass $value array and concats with string defined in option
my $concatWith = $templateNameOpt->{'option'} || '</br>';
$output = '<%= join \''.$concatWith.'\', @$value %>';
}elsif( $templateName eq 'mergecolumn'){
#has $value array and corresponding $mergevalue array
$output = <<'EOF';
<ul>
% my $i = 1;
% for my $val (@$value) {
% my $mVal = shift @$mergevalue;
% my $class = $i % 2 ? 'sub_odd_sample' : 'sub_even_sample';
% $i++;
<li class='<%= $class %>'><%= $val %>: <%= $mval %></li>
% }
</ul>
EOF
}elsif( $templateName eq 'object'){
$output = <<'EOF';
<ul>
% my $i = 1;
% for my $object (@$value) {
% my $class = $i % 2 ? 'sub_odd_sample' : 'sub_even_sample';
<li class='<%= $class %>'>
<table>
% for my $oKey (keys %{$object}) {
<tr><th><%= $oKey %></th><td><%= $object->{$oKey} %></td></tr>
% }
</table>
</li>
% $i++;
% }
</ul>
EOF
}elsif( $templateName eq 'list'){
$output = <<'EOF';
<ul>
% my $i = 1;
% for my $line (@$value) {
% my $class = $i % 2 ? 'sub_odd_sample' : 'sub_even_sample';
<li class='<%= $class %>'>
% if(ref($line) eq 'ARRAY'){
<%= join(' ',@$line) %>
% }else{
<%= $line %>
%}
</li>
% $i++;
% }
</ul>
EOF
}
return $output;
}
1;
__END__ | brdwilde/Seqplorer | api/lib/Seqplorer/Model/View.pm | Perl | mit | 7,522 |
/** <module> BSON manipulation.
*
* BSON document manipulation and conversion to-and-from bytes.
*
* @see <http://bsonspec.org/>
*/
:- module(_, [
doc_bytes/2,
docs_bytes/2,
version/1,
spec_version/1
% And see reexports below.
]).
:- reexport([
bson_doc
]).
:- include(bson(include/common)).
% Internal modules.
:- use_module(bson_util, []).
:- use_module(bson_decoder, []).
:- use_module(bson_encoder, []).
:- use_module(bson_bits, []).
:- use_module(bson_unicode, []).
% :- use_module(bson(bson_format), []). % Useful during dev.
%% version(?Version) is semidet.
%
% True if Version is a list representing the major, minor
% and patch version numbers of this library.
version([1,0,0]).
%% spec_version(?Version) is semidet.
%
% True if Version is a list representing the major and minor
% version numbers of the implemented BSON specification.
spec_version([1,0]).
%% docs_bytes(+Docs, ?Bytes) is semidet.
%% docs_bytes(?Docs, +Bytes) is semidet.
%
% True if Bytes is the flat-list BSON byte-encoding of all the
% documents in the list Docs.
%
% @param Docs is a list of key-value pair lists.
% @param Bytes is a list of bytes (in 0..255).
%
% @throws bson_error(Description, EnvList)
docs_bytes(Docs, Bytes) :-
core:nonvar(Bytes),
!,
bson_decoder:bytes_to_docs(Bytes, Docs).
docs_bytes(Docs, Bytes) :-
core:nonvar(Docs),
!,
bson_encoder:docs_to_bytes(Docs, Bytes).
docs_bytes(_Docs, _Bytes) :-
throw(bson_error('at least one arg must be instantiated', [])).
%% doc_bytes(+Doc, ?Bytes) is semidet.
%% doc_bytes(?Doc, +Bytes) is semidet.
%
% True if Bytes is the BSON byte-encoding of Doc.
%
% @param Doc is a list of key-value pairs.
% @param Bytes is a list of bytes (in 0..255).
%
% @throws bson_error(Description, EnvList)
doc_bytes(Doc, Bytes) :-
core:nonvar(Bytes),
!,
bson_decoder:bytes_to_doc(Bytes, Doc).
doc_bytes(Doc, Bytes) :-
core:nonvar(Doc),
!,
bson_encoder:doc_to_bytes(Doc, Bytes).
doc_bytes(_Doc, _Bytes) :-
throw(bson_error('at least one arg must be instantiated', [])).
| khueue/prolog-bson | src/bson.pl | Perl | mit | 2,122 |
#!/usr/bin/perl
use File::Basename;
use Bio::SeqIO;
use Bio::PrimarySeq;
# create a directory
unless (-d "archaea") {
mkdir "archaea";
}
chdir "archaea";
# get the assembly file
if (-e "assembly_summary.txt") {
unlink("assembly_summary.txt");
}
system("wget -q ftp://ftp.ncbi.nlm.nih.gov/genomes/refseq/archaea/assembly_summary.txt") == 0
or die "failed: $?";
unless (-e "assembly_summary.txt") {
warn "Unable to download assembly_summary.txt\n";
exit;
}
# parse the file
open(IN, "assembly_summary.txt") || die "Unable to open file\n";
# skip the titles
while (<IN>) {
last;
}
# parse the data
while(<IN>) {
chomp();
my @d = split(/\t/);
if ($d[11] eq "Complete Genome") {
my $ftppath = $d[19];
# get the unique assembly name
my $aname = basename $ftppath;
# construct the full path
my $fullpath = "$ftppath" . "/" . $aname . "_genomic.fna.gz";
# download
system("wget -q $fullpath") == 0
or die "failed: $?";
unless (-e "${aname}_genomic.fna.gz") {
warn "We don't have ${aname}_genomic.fna.gz, did download fail?";
next;
}
# gunzip
system("gunzip ${aname}_genomic.fna.gz") == 0
or die "failed: $?";
unless (-e "${aname}_genomic.fna") {
warn "We don't have ${aname}_genomic.fna, did gunzip fail?";
next;
}
# get tax id
my $taxid = $d[5];
# add tax id to header in Kraken format
my $in = Bio::SeqIO->new(-file => "${aname}_genomic.fna", -format => 'fasta');
my $out = Bio::SeqIO->new(-file => ">${aname}_genomic.tax.fna", -format => 'fasta');
# go through all sequences and add the tax id
while(my $seq = $in->next_seq()) {
# add kraken:taxid to the unique ID
my $id = $seq->primary_id;
#print "$id\n";
$id = $id . '|' . "kraken:taxid" . '|' . $taxid;
# create new seq object with updated ID
my $newseq = Bio::PrimarySeq->new(-id => $id, -seq => $seq->seq, -desc => $seq->description);
# write it out
$out->write_seq($newseq);
}
# remove original
unlink("${aname}_genomic.fna");
}
}
close IN;
| mw55309/Kraken_db_install_scripts | download_archaea.pl | Perl | mit | 2,052 |
#!/usr/bin/perl
# Author: Todd Larason <jtl@molehill.org>
# $XFree86: xc/programs/xterm/vttests/256colors2.pl,v 1.2 2002/03/26 01:46:43 dickey Exp $
# use the resources for colors 0-15 - usually more-or-less a
# reproduction of the standard ANSI colors, but possibly more
# pleasing shades
# colors 16-231 are a 6x6x6 color cube
for ($red = 0; $red < 6; $red++) {
for ($green = 0; $green < 6; $green++) {
for ($blue = 0; $blue < 6; $blue++) {
printf("\x1b]4;%d;rgb:%2.2x/%2.2x/%2.2x\x1b\\",
16 + ($red * 36) + ($green * 6) + $blue,
($red ? ($red * 40 + 55) : 0),
($green ? ($green * 40 + 55) : 0),
($blue ? ($blue * 40 + 55) : 0));
}
}
}
# colors 232-255 are a grayscale ramp, intentionally leaving out
# black and white
for ($gray = 0; $gray < 24; $gray++) {
$level = ($gray * 10) + 8;
printf("\x1b]4;%d;rgb:%2.2x/%2.2x/%2.2x\x1b\\",
232 + $gray, $level, $level, $level);
}
# display the colors
# first the system ones:
print "System colors:\n";
for ($color = 0; $color < 8; $color++) {
print "\x1b[48;5;${color}m ";
}
print "\x1b[0m\n";
for ($color = 8; $color < 16; $color++) {
print "\x1b[48;5;${color}m ";
}
print "\x1b[0m\n\n";
# now the color cube
print "Color cube, 6x6x6:\n";
for ($green = 0; $green < 6; $green++) {
for ($red = 0; $red < 6; $red++) {
for ($blue = 0; $blue < 6; $blue++) {
$color = 16 + ($red * 36) + ($green * 6) + $blue;
print "\x1b[48;5;${color}m ";
}
print "\x1b[0m ";
}
print "\n";
}
# now the grayscale ramp
print "Grayscale ramp:\n";
for ($color = 232; $color < 256; $color++) {
print "\x1b[48;5;${color}m ";
}
print "\x1b[0m\n";
| mikker/dotfiles | bin/256colors.pl | Perl | mit | 1,688 |
=pod
=head1 NAME
i2d_PKCS7_bio_stream - output PKCS7 structure in BER format.
=head1 SYNOPSIS
#include <openssl/pkcs7.h>
int i2d_PKCS7_bio_stream(BIO *out, PKCS7 *p7, BIO *data, int flags);
=head1 DESCRIPTION
i2d_PKCS7_bio_stream() outputs a PKCS7 structure in BER format.
It is otherwise identical to the function SMIME_write_PKCS7().
=head1 NOTES
This function is effectively a version of the d2i_PKCS7_bio() supporting
streaming.
=head1 BUGS
The prefix "i2d" is arguably wrong because the function outputs BER format.
=head1 RETURN VALUES
i2d_PKCS7_bio_stream() returns 1 for success or 0 for failure.
=head1 SEE ALSO
L<ERR_get_error(3)>, L<PKCS7_sign(3)>,
L<PKCS7_verify(3)>, L<PKCS7_encrypt(3)>
L<PKCS7_decrypt(3)>,
L<SMIME_write_PKCS7(3)>,
L<PEM_write_bio_PKCS7_stream(3)>
=head1 HISTORY
i2d_PKCS7_bio_stream() was added to OpenSSL 1.0.0
=cut
| vbloodv/blood | extern/openssl.orig/doc/crypto/i2d_PKCS7_bio_stream.pod | Perl | mit | 870 |
package Options_States;
use strict;
use warnings;
use Options_Base;
our @ISA = qw(Options_Base);
sub getOptions {
my %Options = ();
my $count = 0;
my %States = (
800 => 'NT',
2000 => 'NSW',
2600 => 'ACT',
3000 => 'VIC',
4000 => 'QLD',
5000 => 'SA',
6000 => 'WA',
7000 => 'TAS',
);
foreach my $key (keys %States) {
$count++;
$Options{$count} = {
id => $key,
desc => $States{$key},
};
}
return \%Options;
}
sub getSelectName {
return 'stateID';
}
sub getSelectID {
return 'd_stateID';
}
sub getSelectDesc {
return 'state';
}
sub getDefaultValue {
return 0;
}
1;
| facascante/slimerp | fifs/web/gendropdown/Options_States.pm | Perl | mit | 736 |
#!/usr/bin/perl -w
use strict;
my $genomeHisat2 = '/opt/index/hisat2_genome/hs/genome';
my $splices = '/opt/index/hisat2_genome/hs/splicesites.txt';
my $outdir = 'Hs4St';
my $cpu = 4;
opendir(DIR,'.') or die "$!\n";
while(my $subdir = readdir(DIR)) {
next unless -d $subdir;
next if $subdir eq '.';
next if $subdir eq '..';
next if (-e "$subdir/$outdir/");
my $read1 = $subdir . ".fastq";
next unless (-e "$subdir/$read1");
print "Analyzing $subdir\n";
chdir($subdir);
mkdir $outdir;
system("hisat2 --dta -p $cpu -x $genomeHisat2 -U $read1 --known-splicesite-infile $splices -S $outdir/aligned.sam");
chdir('..');
}
| cckim47/kimlab | rnaseq/alignAndModel/align_Hs4St.pl | Perl | mit | 667 |
#!/usr/bin/perl
package Saklient::Cloud::Resources::CommonServiceProvider;
use strict;
use warnings;
use Carp;
use Error qw(:try);
use Data::Dumper;
use Saklient::Errors::SaklientException;
use Saklient::Cloud::Client;
use Saklient::Cloud::Resources::Resource;
use base qw(Saklient::Cloud::Resources::Resource);
#** @class Saklient::Cloud::Resources::CommonServiceProvider
#
# @brief ライセンス種別情報。
#*
#** @var private string Saklient::Cloud::Resources::CommonServiceProvider::$m_id
#
# @brief ID
#*
my $m_id;
#** @var private string Saklient::Cloud::Resources::CommonServiceProvider::$m_clazz
#
# @brief クラス {@link ECommonServiceClass}
#*
my $m_clazz;
#** @var private string Saklient::Cloud::Resources::CommonServiceProvider::$m_name
#
# @brief 名前
#*
my $m_name;
#** @method private string _api_path
#
# @private
#*
sub _api_path {
my $self = shift;
my $_argnum = scalar @_;
return "/commonserviceprovider";
}
#** @method private string _root_key
#
# @private
#*
sub _root_key {
my $self = shift;
my $_argnum = scalar @_;
return "CommonServiceProvider";
}
#** @method private string _root_key_m
#
# @private
#*
sub _root_key_m {
my $self = shift;
my $_argnum = scalar @_;
return "CommonServiceProviders";
}
#** @method public string _class_name
#
# @private
#*
sub _class_name {
my $self = shift;
my $_argnum = scalar @_;
return "CommonServiceProvider";
}
#** @method public string _id
#
# @private
#*
sub _id {
my $self = shift;
my $_argnum = scalar @_;
return $self->get_id();
}
#** @method public void new ($client, $obj, $wrapped)
#
# @ignore @param {Saklient::Cloud::Client} client
# @param bool $wrapped
#*
sub new {
my $class = shift;
my $self;
my $_argnum = scalar @_;
my $client = shift;
my $obj = shift;
my $wrapped = shift || (0);
$self = $class->SUPER::new($client);
Saklient::Util::validate_arg_count($_argnum, 2);
Saklient::Util::validate_type($client, "Saklient::Cloud::Client");
Saklient::Util::validate_type($wrapped, "bool");
$self->api_deserialize($obj, $wrapped);
return $self;
}
#** @var private bool Saklient::Cloud::Resources::CommonServiceProvider::$n_id
#
# @brief null
#*
my $n_id = 0;
#** @method private string get_id
#
# @brief (This method is generated in Translator_default#buildImpl)
#*
sub get_id {
my $self = shift;
my $_argnum = scalar @_;
return $self->{'m_id'};
}
#** @method public string id ()
#
# @brief ID
#*
sub id {
if (1 < scalar(@_)) {
my $ex = new Saklient::Errors::SaklientException('non_writable_field', "Non-writable field: Saklient::Cloud::Resources::CommonServiceProvider#id");
throw $ex;
}
return $_[0]->get_id();
}
#** @var private bool Saklient::Cloud::Resources::CommonServiceProvider::$n_clazz
#
# @brief null
#*
my $n_clazz = 0;
#** @method private string get_clazz
#
# @brief (This method is generated in Translator_default#buildImpl)
#*
sub get_clazz {
my $self = shift;
my $_argnum = scalar @_;
return $self->{'m_clazz'};
}
#** @method public string clazz ()
#
# @brief クラス {@link ECommonServiceClass}
#*
sub clazz {
if (1 < scalar(@_)) {
my $ex = new Saklient::Errors::SaklientException('non_writable_field', "Non-writable field: Saklient::Cloud::Resources::CommonServiceProvider#clazz");
throw $ex;
}
return $_[0]->get_clazz();
}
#** @var private bool Saklient::Cloud::Resources::CommonServiceProvider::$n_name
#
# @brief null
#*
my $n_name = 0;
#** @method private string get_name
#
# @brief (This method is generated in Translator_default#buildImpl)
#*
sub get_name {
my $self = shift;
my $_argnum = scalar @_;
return $self->{'m_name'};
}
#** @method public string name ()
#
# @brief 名前
#*
sub name {
if (1 < scalar(@_)) {
my $ex = new Saklient::Errors::SaklientException('non_writable_field', "Non-writable field: Saklient::Cloud::Resources::CommonServiceProvider#name");
throw $ex;
}
return $_[0]->get_name();
}
#** @method private void api_deserialize_impl ($r)
#
# @brief (This method is generated in Translator_default#buildImpl)
#*
sub api_deserialize_impl {
my $self = shift;
my $_argnum = scalar @_;
my $r = shift;
Saklient::Util::validate_arg_count($_argnum, 1);
$self->{'is_new'} = !defined($r);
if ($self->{'is_new'}) {
$r = {};
}
$self->{'is_incomplete'} = 0;
if (Saklient::Util::exists_path($r, "ID")) {
$self->{'m_id'} = !defined(Saklient::Util::get_by_path($r, "ID")) ? undef : "" . Saklient::Util::get_by_path($r, "ID");
}
else {
$self->{'m_id'} = undef;
$self->{'is_incomplete'} = 1;
}
$self->{'n_id'} = 0;
if (Saklient::Util::exists_path($r, "Class")) {
$self->{'m_clazz'} = !defined(Saklient::Util::get_by_path($r, "Class")) ? undef : "" . Saklient::Util::get_by_path($r, "Class");
}
else {
$self->{'m_clazz'} = undef;
$self->{'is_incomplete'} = 1;
}
$self->{'n_clazz'} = 0;
if (Saklient::Util::exists_path($r, "Name")) {
$self->{'m_name'} = !defined(Saklient::Util::get_by_path($r, "Name")) ? undef : "" . Saklient::Util::get_by_path($r, "Name");
}
else {
$self->{'m_name'} = undef;
$self->{'is_incomplete'} = 1;
}
$self->{'n_name'} = 0;
}
#** @method private any api_serialize_impl ($withClean)
#
# @ignore@param {bool} withClean
#*
sub api_serialize_impl {
my $self = shift;
my $_argnum = scalar @_;
my $withClean = shift || (0);
Saklient::Util::validate_type($withClean, "bool");
my $ret = {};
if ($withClean || $self->{'n_id'}) {
Saklient::Util::set_by_path($ret, "ID", $self->{'m_id'});
}
if ($withClean || $self->{'n_clazz'}) {
Saklient::Util::set_by_path($ret, "Class", $self->{'m_clazz'});
}
if ($withClean || $self->{'n_name'}) {
Saklient::Util::set_by_path($ret, "Name", $self->{'m_name'});
}
return $ret;
}
1;
| sakura-internet/saklient.perl | lib/Saklient/Cloud/Resources/CommonServiceProvider.pm | Perl | mit | 5,722 |
#
package IO;
use XSLoader ();
use Carp;
use strict;
use warnings;
our $VERSION = "1.40";
XSLoader::load 'IO', $VERSION;
sub import {
shift;
warnings::warnif('deprecated', qq{Parameterless "use IO" deprecated})
if @_ == 0 ;
my @l = @_ ? @_ : qw(Handle Seekable File Pipe Socket Dir);
local @INC = @INC;
pop @INC if $INC[-1] eq '.';
eval join("", map { "require IO::" . (/(\w+)/)[0] . ";\n" } @l)
or croak $@;
}
1;
__END__
=head1 NAME
IO - load various IO modules
=head1 SYNOPSIS
use IO qw(Handle File); # loads IO modules, here IO::Handle, IO::File
use IO; # DEPRECATED
=head1 DESCRIPTION
C<IO> provides a simple mechanism to load several of the IO modules
in one go. The IO modules belonging to the core are:
IO::Handle
IO::Seekable
IO::File
IO::Pipe
IO::Socket
IO::Dir
IO::Select
IO::Poll
Some other IO modules don't belong to the perl core but can be loaded
as well if they have been installed from CPAN. You can discover which
ones exist by searching for "^IO::" on http://search.cpan.org.
For more information on any of these modules, please see its respective
documentation.
=head1 DEPRECATED
use IO; # loads all the modules listed below
The loaded modules are IO::Handle, IO::Seekable, IO::File, IO::Pipe,
IO::Socket, IO::Dir. You should instead explicitly import the IO
modules you want.
=cut
| operepo/ope | client_tools/svc/rc/usr/lib/perl5/core_perl/IO.pm | Perl | mit | 1,449 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Common::TargetImpressionShareSimulationPointList;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {points => $args->{points}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V8/Common/TargetImpressionShareSimulationPointList.pm | Perl | apache-2.0 | 1,040 |
package Paws::EC2::InstanceIpv6Address;
use Moose;
has Ipv6Address => (is => 'ro', isa => 'Str', request_name => 'ipv6Address', traits => ['NameInRequest']);
1;
### main pod documentation begin ###
=head1 NAME
Paws::EC2::InstanceIpv6Address
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::EC2::InstanceIpv6Address object:
$service_obj->Method(Att1 => { Ipv6Address => $value, ..., Ipv6Address => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::EC2::InstanceIpv6Address object:
$result = $service_obj->Method(...);
$result->Att1->Ipv6Address
=head1 DESCRIPTION
This class has no description
=head1 ATTRIBUTES
=head2 Ipv6Address => Str
The IPv6 address.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::EC2>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/EC2/InstanceIpv6Address.pm | Perl | apache-2.0 | 1,322 |
package Paws::CodePipeline::ExecutionDetails;
use Moose;
has ExternalExecutionId => (is => 'ro', isa => 'Str', request_name => 'externalExecutionId', traits => ['NameInRequest']);
has PercentComplete => (is => 'ro', isa => 'Int', request_name => 'percentComplete', traits => ['NameInRequest']);
has Summary => (is => 'ro', isa => 'Str', request_name => 'summary', traits => ['NameInRequest']);
1;
### main pod documentation begin ###
=head1 NAME
Paws::CodePipeline::ExecutionDetails
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::CodePipeline::ExecutionDetails object:
$service_obj->Method(Att1 => { ExternalExecutionId => $value, ..., Summary => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::CodePipeline::ExecutionDetails object:
$result = $service_obj->Method(...);
$result->Att1->ExternalExecutionId
=head1 DESCRIPTION
The details of the actions taken and results produced on an artifact as
it passes through stages in the pipeline.
=head1 ATTRIBUTES
=head2 ExternalExecutionId => Str
The system-generated unique ID of this action used to identify this job
worker in any external systems, such as AWS CodeDeploy.
=head2 PercentComplete => Int
The percentage of work completed on the action, represented on a scale
of zero to one hundred percent.
=head2 Summary => Str
The summary of the current status of the actions.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::CodePipeline>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/CodePipeline/ExecutionDetails.pm | Perl | apache-2.0 | 2,019 |
package Google::Ads::AdWords::v201402::CustomerFeedService::get;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201402' }
__PACKAGE__->__set_name('get');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::SOAP::Typelib::ComplexType
);
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %selector_of :ATTR(:get<selector>);
__PACKAGE__->_factory(
[ qw( selector
) ],
{
'selector' => \%selector_of,
},
{
'selector' => 'Google::Ads::AdWords::v201402::Selector',
},
{
'selector' => 'selector',
}
);
} # end BLOCK
} # end of BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201402::CustomerFeedService::get
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
get from the namespace https://adwords.google.com/api/adwords/cm/v201402.
Returns a list of customer feeds that meet the selector criteria. @param selector Determines which customer feeds to return. If empty, all customer feeds are returned. @return The list of customer feeds. @throws ApiException Indicates a problem with the request.
=head1 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * selector
$element->set_selector($data);
$element->get_selector();
=back
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201402::CustomerFeedService::get->new($data);
Constructor. The following data structure may be passed to new():
{
selector => $a_reference_to, # see Google::Ads::AdWords::v201402::Selector
},
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201402/CustomerFeedService/get.pm | Perl | apache-2.0 | 2,033 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::server::dell::openmanage::snmp::mode::components::battery;
use strict;
use warnings;
my %map_status = (
1 => 'other',
2 => 'unknown',
3 => 'ok',
4 => 'nonCritical',
5 => 'critical',
6 => 'nonRecoverable',
);
my %map_reading = (
1 => 'Predictive Failure',
2 => 'Failed',
4 => 'Presence Detected',
);
# In MIB '10892.mib'
my $mapping = {
batteryStatus => { oid => '.1.3.6.1.4.1.674.10892.1.600.50.1.5', map => \%map_status },
batteryReading => { oid => '.1.3.6.1.4.1.674.10892.1.600.50.1.6', map => \%map_reading },
batteryLocationName => { oid => '.1.3.6.1.4.1.674.10892.1.600.50.1.7' },
};
my $oid_batteryTableEntry = '.1.3.6.1.4.1.674.10892.1.600.50.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_batteryTableEntry, start => $mapping->{batteryStatus}->{oid}, end => $mapping->{batteryLocationName}->{oid} };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking batteries");
$self->{components}->{battery} = {name => 'batteries', total => 0, skip => 0};
return if ($self->check_filter(section => 'battery'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_batteryTableEntry}})) {
next if ($oid !~ /^$mapping->{batteryStatus}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_batteryTableEntry}, instance => $instance);
next if ($self->check_filter(section => 'battery', instance => $instance));
$self->{components}->{battery}->{total}++;
$self->{output}->output_add(long_msg => sprintf("Battery '%s' status is '%s' [instance: %s, reading: %s, location: %s]",
$instance, $result->{batteryStatus}, $instance, $result->{batteryReading}, $result->{batteryLocationName}
));
my $exit = $self->get_severity(label => 'default', section => 'battery', value => $result->{batteryStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Battery '%s' status is '%s'",
$instance, $result->{batteryStatus}));
}
}
}
1;
| Sims24/centreon-plugins | hardware/server/dell/openmanage/snmp/mode/components/battery.pm | Perl | apache-2.0 | 3,195 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package centreon::common::violin::snmp::mode::components::lfc;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
my $oid_localTargetFcEntry = '.1.3.6.1.4.1.35897.1.2.1.6.1';
my $oid_wwn = '.1.3.6.1.4.1.35897.1.2.1.6.1.2';
my $oid_enable = '.1.3.6.1.4.1.35897.1.2.1.6.1.3';
my $oid_portState = '.1.3.6.1.4.1.35897.1.2.1.6.1.7';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_localTargetFcEntry };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking local fc");
$self->{components}->{lfc} = {name => 'local fc', total => 0, skip => 0};
return if ($self->check_filter(section => 'lfc'));
foreach my $oid (keys %{$self->{results}->{$oid_localTargetFcEntry}}) {
next if ($oid !~ /^$oid_wwn\.(.*)$/);
my $wwn = $self->{results}->{$oid_localTargetFcEntry}->{$oid};
my $enable = $self->{results}->{$oid_localTargetFcEntry}->{$oid_enable . '.' .$1};
my $state = $self->{results}->{$oid_localTargetFcEntry}->{$oid_portState . '.' .$1};
if ($enable == 2) {
$self->{output}->output_add(long_msg => sprintf("Skipping instance '$wwn' (not enable)"));
next;
}
next if ($self->check_filter(section => 'lfc', instance => $wwn));
$self->{components}->{lfc}->{total}++;
$self->{output}->output_add(long_msg => sprintf("Local FC '%s' is %s.",
$wwn, $state));
my $exit = $self->get_severity(section => 'lfc', value => $state);
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Local FC '%s' is %s", $wwn, $state));
}
}
}
1;
| wilfriedcomte/centreon-plugins | centreon/common/violin/snmp/mode/components/lfc.pm | Perl | apache-2.0 | 2,592 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::netapp::ontap::restapi::custom::api;
use strict;
use warnings;
use centreon::plugins::http;
use JSON::XS;
sub new {
my ($class, %options) = @_;
my $self = {};
bless $self, $class;
if (!defined($options{output})) {
print "Class Custom: Need to specify 'output' argument.\n";
exit 3;
}
if (!defined($options{options})) {
$options{output}->add_option_msg(short_msg => "Class Custom: Need to specify 'options' argument.");
$options{output}->option_exit();
}
if (!defined($options{noptions})) {
$options{options}->add_options(arguments => {
'api-username:s' => { name => 'api_username' },
'api-password:s' => { name => 'api_password' },
'hostname:s' => { name => 'hostname' },
'port:s' => { name => 'port' },
'proto:s' => { name => 'proto' },
'timeout:s' => { name => 'timeout' },
'unknown-http-status:s' => { name => 'unknown_http_status' },
'warning-http-status:s' => { name => 'warning_http_status' },
'critical-http-status:s' => { name => 'critical_http_status' }
});
}
$options{options}->add_help(package => __PACKAGE__, sections => 'REST API OPTIONS', once => 1);
$self->{output} = $options{output};
$self->{http} = centreon::plugins::http->new(%options);
return $self;
}
sub set_options {
my ($self, %options) = @_;
$self->{option_results} = $options{option_results};
}
sub set_defaults {}
sub check_options {
my ($self, %options) = @_;
$self->{hostname} = (defined($self->{option_results}->{hostname})) ? $self->{option_results}->{hostname} : '';
$self->{port} = (defined($self->{option_results}->{port})) ? $self->{option_results}->{port} : 443;
$self->{proto} = (defined($self->{option_results}->{proto})) ? $self->{option_results}->{proto} : 'https';
$self->{timeout} = (defined($self->{option_results}->{timeout})) ? $self->{option_results}->{timeout} : 10;
$self->{api_username} = (defined($self->{option_results}->{api_username})) ? $self->{option_results}->{api_username} : '';
$self->{api_password} = (defined($self->{option_results}->{api_password})) ? $self->{option_results}->{api_password} : '';
$self->{unknown_http_status} = (defined($self->{option_results}->{unknown_http_status})) ? $self->{option_results}->{unknown_http_status} : '%{http_code} < 200 or %{http_code} >= 300';
$self->{warning_http_status} = (defined($self->{option_results}->{warning_http_status})) ? $self->{option_results}->{warning_http_status} : '';
$self->{critical_http_status} = (defined($self->{option_results}->{critical_http_status})) ? $self->{option_results}->{critical_http_status} : '';
if ($self->{hostname} eq '') {
$self->{output}->add_option_msg(short_msg => "Need to specify --hostname option.");
$self->{output}->option_exit();
}
if ($self->{api_username} eq '') {
$self->{output}->add_option_msg(short_msg => "Need to specify --api-username option.");
$self->{output}->option_exit();
}
if ($self->{api_password} eq '') {
$self->{output}->add_option_msg(short_msg => "Need to specify --api-password option.");
$self->{output}->option_exit();
}
return 0;
}
sub build_options_for_httplib {
my ($self, %options) = @_;
$self->{option_results}->{hostname} = $self->{hostname};
$self->{option_results}->{timeout} = $self->{timeout};
$self->{option_results}->{port} = $self->{port};
$self->{option_results}->{proto} = $self->{proto};
$self->{option_results}->{timeout} = $self->{timeout};
$self->{option_results}->{credentials} = 1;
$self->{option_results}->{basic} = 1;
$self->{option_results}->{username} = $self->{api_username};
$self->{option_results}->{password} = $self->{api_password};
}
sub settings {
my ($self, %options) = @_;
return if (defined($self->{settings_done}));
$self->build_options_for_httplib();
$self->{http}->add_header(key => 'Accept', value => 'application/json');
$self->{http}->add_header(key => 'Content-Type', value => 'application/json');
$self->{http}->set_options(%{$self->{option_results}});
$self->{settings_done} = 1;
}
sub get_hostname {
my ($self, %options) = @_;
return $self->{hostname};
}
sub request_api {
my ($self, %options) = @_;
$self->settings();
my $content = $self->{http}->request(
url_path => $options{endpoint},
unknown_status => $self->{unknown_http_status},
warning_status => $self->{warning_http_status},
critical_status => $self->{critical_http_status}
);
if (!defined($content) || $content eq '') {
$self->{output}->add_option_msg(short_msg => "API returns empty content [code: '" . $self->{http}->get_code() . "'] [message: '" . $self->{http}->get_message() . "']");
$self->{output}->option_exit();
}
my $decoded;
eval {
$decoded = JSON::XS->new->utf8->decode($content);
};
if ($@) {
$self->{output}->add_option_msg(short_msg => "Cannot decode response (add --debug option to display returned content)");
$self->{output}->option_exit();
}
return $decoded;
}
1;
__END__
=head1 NAME
Netapp ONTAP Rest API
=head1 REST API OPTIONS
Netapp ONTAP Rest API
=over 8
=item B<--hostname>
Netapp hostname.
=item B<--port>
Port used (Default: 443)
=item B<--proto>
Specify https if needed (Default: 'https')
=item B<--api-username>
Netapp API username.
=item B<--api-password>
Netapp API password.
=item B<--timeout>
Set timeout in seconds (Default: 10).
=back
=head1 DESCRIPTION
B<custom>.
=cut
| Tpo76/centreon-plugins | storage/netapp/ontap/restapi/custom/api.pm | Perl | apache-2.0 | 6,576 |
package Paws::AppStream::ListAssociatedStacks;
use Moose;
has FleetName => (is => 'ro', isa => 'Str', required => 1);
has NextToken => (is => 'ro', isa => 'Str');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'ListAssociatedStacks');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::AppStream::ListAssociatedStacksResult');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::AppStream::ListAssociatedStacks - Arguments for method ListAssociatedStacks on Paws::AppStream
=head1 DESCRIPTION
This class represents the parameters used for calling the method ListAssociatedStacks on the
Amazon AppStream service. Use the attributes of this class
as arguments to method ListAssociatedStacks.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to ListAssociatedStacks.
As an example:
$service_obj->ListAssociatedStacks(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> FleetName => Str
The name of the fleet.
=head2 NextToken => Str
The pagination token to use to retrieve the next page of results for
this operation. If this value is null, it retrieves the first page.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method ListAssociatedStacks in L<Paws::AppStream>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/AppStream/ListAssociatedStacks.pm | Perl | apache-2.0 | 1,861 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package cloud::microsoft::office365::sharepoint::mode::siteusage;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
use Time::Local;
sub custom_active_perfdata {
my ($self, %options) = @_;
my %total_options = ();
if ($self->{instance_mode}->{option_results}->{units} eq '%') {
$total_options{total} = $self->{result_values}->{total};
$total_options{cast_int} = 1;
}
if ($self->{result_values}->{report_date} =~ /^([0-9]{4})-([0-9]{2})-([0-9]{2})$/) {
$self->{output}->perfdata_add(label => 'perfdate', value => timelocal(0,0,12,$3,$2-1,$1-1900));
}
$self->{output}->perfdata_add(
label => 'active_sites', nlabel => 'sharepoint.sites.active.count',
value => $self->{result_values}->{active},
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{thlabel}, %total_options),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{thlabel}, %total_options),
unit => 'sites', min => 0, max => $self->{result_values}->{total}
);
}
sub custom_active_threshold {
my ($self, %options) = @_;
my $threshold_value = $self->{result_values}->{active};
if ($self->{instance_mode}->{option_results}->{units} eq '%') {
$threshold_value = $self->{result_values}->{prct_active};
}
my $exit = $self->{perfdata}->threshold_check(
value => $threshold_value,
threshold => [
{ label => 'critical-' . $self->{thlabel}, exit_litteral => 'critical' },
{ label => 'warning-' . $self->{thlabel}, exit_litteral => 'warning' }
]
);
return $exit;
}
sub custom_active_output {
my ($self, %options) = @_;
return sprintf(
'Active sites on %s : %d/%d (%.2f%%)',
$self->{result_values}->{report_date},
$self->{result_values}->{active},
$self->{result_values}->{total},
$self->{result_values}->{prct_active}
);
}
sub custom_active_calc {
my ($self, %options) = @_;
$self->{result_values}->{active} = $options{new_datas}->{$self->{instance} . '_active'};
$self->{result_values}->{total} = $options{new_datas}->{$self->{instance} . '_total'};
$self->{result_values}->{report_date} = $options{new_datas}->{$self->{instance} . '_report_date'};
$self->{result_values}->{prct_active} = ($self->{result_values}->{total} != 0) ? $self->{result_values}->{active} * 100 / $self->{result_values}->{total} : 0;
return 0;
}
sub custom_usage_perfdata {
my ($self, %options) = @_;
my $extra_label = '';
$extra_label = '_' . $self->{result_values}->{display} if (!defined($options{extra_instance}) || $options{extra_instance} != 0);
$self->{output}->perfdata_add(
label => 'used' . $extra_label, nlabel => $self->{result_values}->{display} . '#sharepoint.sites.usage.bytes',
unit => 'B',
value => $self->{result_values}->{used},
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{thlabel}, total => $self->{result_values}->{total}, cast_int => 1),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{thlabel}, total => $self->{result_values}->{total}, cast_int => 1),
min => 0, max => $self->{result_values}->{total}
);
}
sub custom_usage_threshold {
my ($self, %options) = @_;
my ($exit, $threshold_value);
$threshold_value = $self->{result_values}->{used};
$threshold_value = $self->{result_values}->{free} if (defined($self->{instance_mode}->{option_results}->{free}));
if ($self->{instance_mode}->{option_results}->{units} eq '%') {
$threshold_value = $self->{result_values}->{prct_used};
$threshold_value = $self->{result_values}->{prct_free} if (defined($self->{instance_mode}->{option_results}->{free}));
}
$exit = $self->{perfdata}->threshold_check(
value => $threshold_value,
threshold => [
{ label => 'critical-' . $self->{thlabel}, exit_litteral => 'critical' },
{ label => 'warning-' . $self->{thlabel}, exit_litteral => 'warning' }
]
);
return $exit;
}
sub custom_usage_output {
my ($self, %options) = @_;
my ($used_value, $used_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{used});
my ($free_value, $free_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{free});
my ($total_value, $total_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{total});
return sprintf(
'Total: %s Used: %s (%.2f%%) Free: %s (%.2f%%)',
$total_value . " " . $total_unit,
$used_value . " " . $used_unit, $self->{result_values}->{prct_used},
$free_value . " " . $free_unit, $self->{result_values}->{prct_free}
);
}
sub custom_usage_calc {
my ($self, %options) = @_;
$self->{result_values}->{display} = $options{new_datas}->{$self->{instance} . '_url'};
$self->{result_values}->{total} = $options{new_datas}->{$self->{instance} . '_storage_allocated'};
$self->{result_values}->{used} = $options{new_datas}->{$self->{instance} . '_storage_used'};
if ($self->{result_values}->{total} != 0) {
$self->{result_values}->{free} = $self->{result_values}->{total} - $self->{result_values}->{used};
$self->{result_values}->{prct_used} = $self->{result_values}->{used} * 100 / $self->{result_values}->{total};
$self->{result_values}->{prct_free} = 100 - $self->{result_values}->{prct_used};
} else {
$self->{result_values}->{free} = '0';
$self->{result_values}->{prct_used} = '0';
$self->{result_values}->{prct_free} = '0';
}
return 0;
}
sub prefix_global_output {
my ($self, %options) = @_;
return "Total ";
}
sub prefix_site_output {
my ($self, %options) = @_;
return "Site '" . $options{instance_value}->{url} . "' [ID: " . $options{instance_value}->{id} . "] ";
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'active', type => 0 },
{ name => 'global', type => 0, cb_prefix_output => 'prefix_global_output' },
{ name => 'sites', type => 1, cb_prefix_output => 'prefix_site_output', message_multiple => 'All sites usage are ok' },
];
$self->{maps_counters}->{active} = [
{ label => 'active-sites', set => {
key_values => [ { name => 'active' }, { name => 'total' }, { name => 'report_date' } ],
closure_custom_calc => $self->can('custom_active_calc'),
closure_custom_output => $self->can('custom_active_output'),
closure_custom_threshold_check => $self->can('custom_active_threshold'),
closure_custom_perfdata => $self->can('custom_active_perfdata')
}
}
];
$self->{maps_counters}->{global} = [
{ label => 'total-usage-active', nlabel => 'sharepoint.sites.active.usage.total.bytes', set => {
key_values => [ { name => 'storage_used_active' } ],
output_template => 'Usage (active sites): %s %s',
output_change_bytes => 1,
perfdatas => [
{ label => 'total_usage_active', template => '%d', min => 0, unit => 'B' }
]
}
},
{ label => 'total-usage-inactive', nlabel => 'sharepoint.sites.inactive.usage.total.bytes', set => {
key_values => [ { name => 'storage_used_inactive' } ],
output_template => 'Usage (inactive sites): %s %s',
output_change_bytes => 1,
perfdatas => [
{ label => 'total_usage_inactive', template => '%d', min => 0, unit => 'B' }
]
}
},
{ label => 'total-file-count-active', nlabel => 'sharepoint.sites.active.files.total.count', set => {
key_values => [ { name => 'file_count_active' } ],
output_template => 'File Count (active sites): %d',
perfdatas => [
{ label => 'total_file_count_active', template => '%d', min => 0 }
]
}
},
{ label => 'total-file-count-inactive', nlabel => 'sharepoint.sites.inactive.files.total.count', set => {
key_values => [ { name => 'file_count_inactive' } ],
output_template => 'File Count (inactive sites): %d',
perfdatas => [
{ label => 'total_file_count_inactive', template => '%d', min => 0 }
]
}
},
{ label => 'total-active-file-count', nlabel => 'sharepoint.sites.files.active.total.count', set => {
key_values => [ { name => 'active_file_count' } ],
output_template => 'Active File Count (active sites): %d',
perfdatas => [
{ label => 'total_active_file_count', template => '%d', min => 0 }
]
}
},
{ label => 'total-visited-page-count', nlabel => 'sharepoint.sites.pages.visited.total.count', set => {
key_values => [ { name => 'visited_page_count' } ],
output_template => 'Visited Page Count (active sites): %d',
perfdatas => [
{ label => 'total_visited_page_count', template => '%d', min => 0 }
]
}
},
{ label => 'total-page-view-count', nlabel => 'sharepoint.sites.pages.viewed.total.count', set => {
key_values => [ { name => 'page_view_count' } ],
output_template => 'Page View Count (active sites): %d',
perfdatas => [
{ label => 'total_page_view_count', template => '%d', min => 0 }
]
}
}
];
$self->{maps_counters}->{sites} = [
{ label => 'usage', set => {
key_values => [ { name => 'storage_used' }, { name => 'storage_allocated' }, { name => 'url' }, { name => 'id' } ],
closure_custom_calc => $self->can('custom_usage_calc'),
closure_custom_output => $self->can('custom_usage_output'),
closure_custom_perfdata => $self->can('custom_usage_perfdata'),
closure_custom_threshold_check => $self->can('custom_usage_threshold')
}
},
{ label => 'file-count', nlabel => 'sharepoint.sites.files.count', set => {
key_values => [ { name => 'file_count' }, { name => 'url' }, { name => 'id' } ],
output_template => 'File Count: %d',
perfdatas => [
{ label => 'file_count', template => '%d',
min => 0, label_extra_instance => 1, instance_use => 'url' }
]
}
},
{ label => 'active-file-count', nlabel => 'sharepoint.sites.files.active.count', set => {
key_values => [ { name => 'active_file_count' }, { name => 'url' }, { name => 'id' } ],
output_template => 'Active File Count: %d',
perfdatas => [
{ label => 'active_file_count', template => '%d',
min => 0, label_extra_instance => 1, instance_use => 'url' }
]
}
},
{ label => 'visited-page-count', nlabel => 'sharepoint.sites.pages.visited.count', set => {
key_values => [ { name => 'visited_page_count' }, { name => 'url' }, { name => 'id' } ],
output_template => 'Visited Page Count: %d',
perfdatas => [
{ label => 'visited_page_count', template => '%d',
min => 0, label_extra_instance => 1, instance_use => 'url' }
]
}
},
{ label => 'page-view-count', nlabel => 'sharepoint.sites.pages.viewed.count', set => {
key_values => [ { name => 'page_view_count' }, { name => 'url' }, { name => 'id' } ],
output_template => 'Page View Count: %d',
perfdatas => [
{ label => 'page_view_count', template => '%d',
min => 0, label_extra_instance => 1, instance_use => 'url' }
]
}
}
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {
'filter-url:s' => { name => 'filter_url' },
'filter-id:s' => { name => 'filter_id' },
'units:s' => { name => 'units', default => '%' },
'free' => { name => 'free' },
'filter-counters:s' => { name => 'filter_counters', default => 'active-sites|total' }
});
return $self;
}
sub manage_selection {
my ($self, %options) = @_;
$self->{active} = { active => 0, total => 0, report_date => '' };
$self->{global} = {
storage_used_active => 0, storage_used_inactive => 0,
file_count_active => 0, file_count_inactive => 0,
active_file_count => 0 , visited_page_count => 0 , page_view_count => 0
};
$self->{sites} = {};
my $results = $options{custom}->office_get_sharepoint_site_usage(param => "period='D7'");
my $results_daily = [];
if (scalar(@{$results})) {
$self->{active}->{report_date} = $results->[0]->{'Report Refresh Date'};
$results_daily = $options{custom}->office_get_sharepoint_site_usage(param => "date=" . $self->{active}->{report_date});
}
foreach my $site (@{$results}, @{$results_daily}) {
if (defined($self->{option_results}->{filter_url}) && $self->{option_results}->{filter_url} ne '' &&
$site->{'Site URL'} !~ /$self->{option_results}->{filter_url}/) {
$self->{output}->output_add(long_msg => "skipping '" . $site->{'Site URL'} . "': no matching filter name.", debug => 1);
next;
}
if (defined($self->{option_results}->{filter_id}) && $self->{option_results}->{filter_id} ne '' &&
$site->{'Site Id'} !~ /$self->{option_results}->{filter_id}/) {
$self->{output}->output_add(long_msg => "skipping '" . $site->{'Site Id'} . "': no matching filter name.", debug => 1);
next;
}
if ($site->{'Report Period'} != 1) {
if (!defined($site->{'Last Activity Date'}) || ($site->{'Last Activity Date'} ne $self->{active}->{report_date})) {
$self->{global}->{storage_used_inactive} += ($site->{'Storage Used (Byte)'} ne '') ? $site->{'Storage Used (Byte)'} : 0;
$self->{global}->{file_count_inactive} += ($site->{'File Count'} ne '') ? $site->{'File Count'} : 0;
$self->{output}->output_add(long_msg => "skipping '" . $site->{'Site URL'} . "': no activity.", debug => 1);
}
$self->{active}->{total}++;
next;
}
$self->{active}->{active}++;
$self->{global}->{storage_used_active} += ($site->{'Storage Used (Byte)'} ne '') ? $site->{'Storage Used (Byte)'} : 0;
$self->{global}->{file_count_active} += ($site->{'File Count'} ne '') ? $site->{'File Count'} : 0;
$self->{global}->{active_file_count} += ($site->{'Active File Count'} ne '') ? $site->{'Active File Count'} : 0;
$self->{global}->{visited_page_count} += ($site->{'Visited Page Count'} ne '') ? $site->{'Visited Page Count'} : 0;
$self->{global}->{page_view_count} += ($site->{'Page View Count'} ne '') ? $site->{'Page View Count'} : 0;
$self->{sites}->{$site->{'Site URL'}}->{url} = $site->{'Site URL'};
$self->{sites}->{$site->{'Site URL'}}->{id} = $site->{'Site Id'};
$self->{sites}->{$site->{'Site URL'}}->{file_count} = $site->{'File Count'};
$self->{sites}->{$site->{'Site URL'}}->{active_file_count} = $site->{'Active File Count'};
$self->{sites}->{$site->{'Site URL'}}->{visited_page_count} = $site->{'Visited Page Count'};
$self->{sites}->{$site->{'Site URL'}}->{page_view_count} = $site->{'Page View Count'};
$self->{sites}->{$site->{'Site URL'}}->{storage_used} = $site->{'Storage Used (Byte)'};
$self->{sites}->{$site->{'Site URL'}}->{storage_allocated} = $site->{'Storage Allocated (Byte)'};
$self->{sites}->{$site->{'Site URL'}}->{last_activity_date} = $site->{'Last Activity Date'};
}
}
1;
__END__
=head1 MODE
Check sites usage (reporting period over the last refreshed day).
(See link for details about metrics :
https://docs.microsoft.com/en-us/microsoft-365/admin/activity-reports/sharepoint-site-usage?view=o365-worldwide)
=over 8
=item B<--filter-*>
Filter sites.
Can be: 'url', 'id' (can be a regexp).
=item B<--warning-*>
Threshold warning.
Can be: 'active-sites',
'total-usage-active' (count), 'total-usage-inactive' (count),
'total-file-count-active' (count), 'total-file-count-inactive' (count),
'total-active-file-count' (count), 'total-visited-page-count' (count),
'total-page-view-count' (count),
'usage' (count), 'file-count' (count), 'active-file-count' (count),
'visited-page-count' (count), 'page-view-count' (count).
=item B<--critical-*>
Threshold critical.
Can be: 'active-sites',
'total-usage-active' (count), 'total-usage-inactive' (count),
'total-file-count-active' (count), 'total-file-count-inactive' (count),
'total-active-file-count' (count), 'total-visited-page-count' (count),
'total-page-view-count' (count),
'usage' (count), 'file-count' (count), 'active-file-count' (count),
'visited-page-count' (count), 'page-view-count' (count).
=item B<--filter-counters>
Only display some counters (regexp can be used).
Example to hide per user counters: --filter-counters='active-sites|total'
(Default: 'active-sites|total')
=item B<--units>
Unit of thresholds (Default: '%') ('%', 'count').
=back
=cut
| Tpo76/centreon-plugins | cloud/microsoft/office365/sharepoint/mode/siteusage.pm | Perl | apache-2.0 | 18,811 |
package VMOMI::ArrayOfVmfsConfigOption;
use parent 'VMOMI::ComplexType';
use strict;
use warnings;
our @class_ancestors = ( );
our @class_members = (
['VmfsConfigOption', 'VmfsConfigOption', 1, 1],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/ArrayOfVmfsConfigOption.pm | Perl | apache-2.0 | 426 |
package VMOMI::GhostDvsProxySwitchDetectedEvent;
use parent 'VMOMI::HostEvent';
use strict;
use warnings;
our @class_ancestors = (
'HostEvent',
'Event',
'DynamicData',
);
our @class_members = (
['switchUuid', undef, 1, ],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/GhostDvsProxySwitchDetectedEvent.pm | Perl | apache-2.0 | 463 |
# Update the compset file and remove the vertex pairs with low counts.
# vertex pair to remove is the one which together have low counts, remove that pair from the set
# USAGE: perl update_compatible_set.pl -cs compatible_set -w writeoutputfile -t thresholdForPairedCounts -wg UpdatedGraph
use Getopt::Long;
$comp_set = "";
$threshold = 10;
$updated_compset = "";
$updated_graph = "";
$cond_graph = "";
GetOptions("cs=s",\$comp_set,"w=s",\$updated_compset,"t:i",\$threshold,"wg:s",\$updated_graph) or die("Enter input correctly\n");
$cond_graph = $comp_set;
$graph = $comp_set;
if( $cond_graph =~ /comp.txt/)
{
$cond_graph =~ s/comp.txt/cond.graph/;
$graph =~ s/comp.txt/graph/;
print $cond_graph."\n";;
}
elsif ($cond_graph =~ /bubble.txt/)
{
$cond_graph =~ s/bubble.txt/cond.graph/;
$graph =~ s/bubble.txt/graph/;
print $cond_graph."\n";
}
else
{
$cond_graph = "";
$graph ="";
}
main();
sub main
{
load_compatible_set();
open(temp,$graph) if ($graph ne "");
$line = <temp>; chomp($line); ($k1,$k2) = split(/ /,$line);
$Kmersize = length($k1);
close temp;
load_cond_graph() if($cond_graph ne "");
load_graph() if($graph ne "");
}
sub load_compatible_set
{
# Load the compatible set first
open(file,$comp_set) or die("Error opening $comp_set file\n");
open(wrfile,">$updated_compset");
while($l=<file>)
{
chomp($l);
@vs = split(/ /,$l);
%valid_position = ();
%valid_counts = ();
($id,$position) = split(/:/,$vs[0]); # vertex and its depth
for ($i=1;$i<scalar(@vs); $i++) # iterate through all the vertex pairs in the adjacency list
{
($id2,$position2,$count) = split(/:/,$vs[$i]);
if($count > $threshold)
{
$valid_position{$id2} = $position2;
$valid_counts{$id2} = $count;
}
}
if (scalar(keys %valid_counts)>0)
{
print wrfile $vs[0]." ";
for $k(sort {$valid_position{$a}<=>$valid_position{$b}} keys %valid_position)
{
print wrfile "$k:$valid_position{$k}:$valid_counts{$k} ";
$condvertex{$id} = 1;
$condvertex{$k} = 1;
}
print wrfile "\n";
}
}
close file;
close wrfile;
}
sub load_cond_graph
{
open(file, $cond_graph) or die("Condensed graph didn't load");
$not_existing = 0;$no_vertex = 0;
while($l=<file>)
{
chomp($l);
if($l =~ /[AGCT]/)
{
($vertex,$seq) = split(/ /,$l);
$cond_ver{$vertex} = $seq;
if(!exists($condvertex{$vertex}))
{
$len = length($seq);
for ($i=0;$i<$len-$Kmersize+1; $i++)
{
$remove_vertex{substr($seq,$i,$Kmersize)} = 1;
}
$no_vertex ++;
}
}
else
{
($ver1,$ver2) = split(/ /,$l);
if(!exists($condvertex{$ver1}) || !exists($condvertex{$ver2}))
{
$not_existing++;
}
}
}
close file;
print "$not_existing edges were lost\n$no_vertex vertices were lost\n";
}
sub load_graph
{
open(file,$graph) or die("Graph file $graph did not load\n");
open(wrfile,">$updated_graph");
while($l=<file>)
{
chomp($l);
($k1,$k2) = split(/ /,$l);
if(!(exists($remove_vertex{$k1}) || exists($remove_vertex{$k2})))
{
print wrfile $l."\n";
}
}
close file;
close wrfile;
}
| raunaq-m/MLEHaplo | update_compatible_set.pl | Perl | bsd-2-clause | 3,205 |
#
# CommonGrammar.pm : part of the Mace toolkit for building distributed systems
#
# Copyright (c) 2011, Charles Killian, James W. Anderson, Adolfo Rodriguez, Dejan Kostic
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the names of the contributors, nor their associated universities
# or organizations may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# ----END-OF-LEGAL-STUFF----
package Mace::Compiler::CommonGrammar;
use strict;
use Mace::Compiler::Grammar;
use constant COMMON => q{
Word : /\S*/
LookaheadWord : ...!<matchrule: $arg{rule}>[%arg] Word
LookaheadString : LookaheadWord[%arg](s?)
CopyLookaheadString : StartPos LookaheadString[%arg] EndPos
{
$return = substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos});
}
ToEnd : Word(s?)
StartPos : // { $thisoffset }
| <error>
EndPos : { $prevoffset }
Line : '' { $thisline }
Column : { $thiscolumn }
FileLine : '' {
my $line = 0;
while (defined $thisparser->{local}{offsetmap}->[$line] and $thisparser->{local}{offsetmap}->[$line] <= $thisoffset) {
$line++;
}
$line--;
if ($line <= 0) {
confess("Line $line < 0 : thisline $thisline thisoffset $thisoffset");
}
# if (not defined $thisparser->{local}{linemap}->[$thisline]) {
# print "WARNING: offsetline $line thisline $thisline\n";
# }
#if (not defined $thisparser->{local}{linemap}->[$line]) {
# # $return = [ 0, "error", $thisline ];
# open(OUT, ">", "/tmp/foo");
# print OUT "Thisline: $thisline prevline $prevline\n";
# my $i = 0;
# for my $l (@{$thisparser->{local}{linemap}}) {
# print OUT "$i - $l - ".$thisparser->{local}{filemap}->[$i]."\n";
# $i++;
# }
# print OUT "File::\n";
# print OUT $Mace::Compiler::Grammar::text;
# print OUT "Remaining::\n$text\n";
# close(OUT);
# confess("Invalid line $thisline");
#} else {
$return = [ $thisparser->{local}{linemap}->[$line], $thisparser->{local}{filemap}->[$line], $line ];
#}
}
FileLineEnd : {
my $line = 0;
while (defined $thisparser->{local}{offsetmap}->[$line] and $thisparser->{local}{offsetmap}->[$line] < $prevoffset) {
$line++;
}
$line--;
# if (not defined $thisparser->{local}{linemap}->[$prevline]) {
# print "WARNING: offsetline $line prevline $prevline\n";
# }
# if (not defined $thisparser->{local}{linemap}->[$line]) {
# # $return = [ 0, "error", $prevline ];
# open(OUT, ">", "/tmp/foo");
# print OUT "prevline: $prevline\n";
# my $i = 0;
# for my $l (@{$thisparser->{local}{linemap}}) {
# print OUT "$i - $l - ".$thisparser->{local}{filemap}->[$i]."\n";
# $i++;
# }
# print OUT "File::\n";
# print OUT $Mace::Compiler::Grammar::text;
# print OUT "Remaining::\n$text\n";
# close(OUT);
# confess("Invalid line $prevline");
# } else {
$return = [ $thisparser->{local}{linemap}->[$line], $thisparser->{local}{filemap}->[$line], $line ];
# }
}
Id : /[_a-zA-Z][a-zA-Z0-9_]*/
SemiStatementToken : m|[^;{}][^;{}/]*|
SemiStatementBegin : SemiStatementToken(s)
BraceBlockFoo : '{' SemiStatementFoo(s?) '}' { $return = $item[2]; }
BraceBlock : '{' SemiStatement(s?) '}'
{
my $node = Mace::Compiler::ParseTreeObject::BraceBlock->new();
$node->not_null(scalar(@{$item[2]}));
if (scalar(@{$item[2]})) {
$node->semi_statements(@{$item[2]});
}
$return = $node;
#$return = $item[2]; # original
}
Enum : /enum\s/ Id '{' SemiStatementBegin '}'
ParsedExpression : Expression
{
$return = Mace::Compiler::ParseTreeObject::ParsedExpression->new(expr=>$item{Expression});
}
ParsedReturn : /return\b/ ';'
{
$return = Mace::Compiler::ParseTreeObject::ParsedReturn->new(type=>0);
}
| /return\b/ ParsedExpression ';'
{
$return = Mace::Compiler::ParseTreeObject::ParsedReturn->new(type=>1, parsed_expr=>$item{ParsedExpression});
}
ParsedVar : StaticToken(?) Parameter[%arg, initializerOk => 1]
{
if (scalar(@{$item[1]})) {
$return = Mace::Compiler::ParseTreeObject::ParsedVar->new(is_static=>1, is_semi=>$arg{semi}, parameter=>$item{Parameter}->toString(noline => 1));
} else {
$return = Mace::Compiler::ParseTreeObject::ParsedVar->new(is_static=>0, is_semi=>$arg{semi}, parameter=>$item{Parameter}->toString(noline => 1));
}
}
| <error>
ParsedElse: /else\b/ <commit> StatementOrBraceBlock
{
$return = Mace::Compiler::ParseTreeObject::ParsedElse->new(null=>0, stmt_or_block=>$item{StatementOrBraceBlock});
}
| <error?> <reject>
| {
$return = Mace::Compiler::ParseTreeObject::ParsedElse->new(null=>1);
}
ElseAndIf: /else\b/ /if\b/
ParsedElseIfs: ...ElseAndIf <commit> ParsedElseIf ParsedElseIfs
{
$return = Mace::Compiler::ParseTreeObject::ParsedElseIfs->new(null=>0, parsed_else_if=>$item{ParsedElseIf}, parsed_else_ifs=>$item{ParsedElseIfs});
}
| <error?> <reject>
| {
$return = Mace::Compiler::ParseTreeObject::ParsedElseIfs->new(null=>1);
}
ParsedElseIf: ElseAndIf <commit> '(' ParsedExpression ')' StatementOrBraceBlock
{
$return = Mace::Compiler::ParseTreeObject::ParsedElseIf->new(type=>"parsed_expression", parsed_expr=>$item{ParsedExpression}, stmt_or_block=>$item{StatementOrBraceBlock});
}
| ElseAndIf <commit> '(' ExpressionOrAssignLValue ')' StatementOrBraceBlock
{
$return = Mace::Compiler::ParseTreeObject::ParsedElseIf->new(type=>"expression_or_assign_lvalue", expr_or_assign=>$item{ExpressionOrAssignLValue}, stmt_or_block=>$item{StatementOrBraceBlock});
}
| <error>
ParsedIf : /if\b/ '(' ParsedExpression ')' StatementOrBraceBlock ParsedElseIfs ParsedElse
{
$return = Mace::Compiler::ParseTreeObject::ParsedIf->new(type=>"parsed_expression", parsed_expr=>$item{ParsedExpression}, stmt_or_block=>$item{StatementOrBraceBlock}, parsed_else_ifs=>$item{ParsedElseIfs}, parsed_else=>$item{ParsedElse});
}
| /if\b/ '(' ExpressionOrAssignLValue ')' StatementOrBraceBlock ParsedElseIfs ParsedElse
{
$return = Mace::Compiler::ParseTreeObject::ParsedIf->new(type=>"expression_or_assign_lvalue", expr_or_assign=>$item{ExpressionOrAssignLValue}, stmt_or_block=>$item{StatementOrBraceBlock}, parsed_else_ifs=>$item{ParsedElseIfs}, parsed_else=>$item{ParsedElse});
}
| <error>
ParsedDoWhile : /do\b/ <commit> StatementOrBraceBlock /while\b/ '(' ParsedExpression ')' (';')(?)
{
$return = Mace::Compiler::ParseTreeObject::ParsedDoWhile->new(stmt_or_block=>$item{StatementOrBraceBlock}, parsed_expr=>$item{ParsedExpression});
}
ParsedWhile : /while\b/ <commit> '(' ParsedExpression ')' StatementOrBraceBlock
{
$return = Mace::Compiler::ParseTreeObject::ParsedWhile->new(parsed_expr=>$item{ParsedExpression}, stmt_or_block=>$item{StatementOrBraceBlock});
}
ParsedAbort : 'ABORT' '(' QuotedString ')' ';'
{
$return = Mace::Compiler::ParseTreeObject::ParsedAbort->new(quoted_string=>$item{QuotedString});
}
| <error>
ParsedAssertMsg : 'ASSERTMSG' '(' Expression ',' QuotedString ')' ';'
{
$return = Mace::Compiler::ParseTreeObject::ParsedAssertMsg->new(expr=>$item{Expression}, quoted_string=>$item{QuotedString});
}
| <error>
ParsedAssert : 'ASSERT' '(' Expression ')' ';'
{
$return = Mace::Compiler::ParseTreeObject::ParsedAssert->new(expr=>$item{Expression});
}
| <error>
ParsedFCall : ExpressionLValue[parseFunctionCall => 1]
{
$return = Mace::Compiler::ParseTreeObject::ParsedFCall->new(expr_lvalue=>$item{ExpressionLValue});
}
| <error>
ParsedLValue : ParsedPlusPlus
{
$return = Mace::Compiler::ParseTreeObject::ParsedLValue->new(type=>"parsed_plus_plus", parsed_plus_plus=>$item{ParsedPlusPlus});
}
| ParsedBinaryAssignOp
{
$return = Mace::Compiler::ParseTreeObject::ParsedLValue->new(type=>"parsed_binary_assign_op", parsed_binary_assign_op=>$item{ParsedBinaryAssignOp});
}
| ExpressionLValue
{
$return = Mace::Compiler::ParseTreeObject::ParsedLValue->new(type=>"expression_lvalue", parsed_expr_lvalue=>$item{ExpressionLValue});
}
#ParsedLValue : ParsedPlusPlus | ParsedBinaryAssignOp | ExpressionLValue
| <error>
# ??
#| ScopedId '[' Expression ']' { $return = $item{ScopedId} . '[' . $item{Expression} . ']'; }
ParsedBinaryAssignOp : ExpressionLValue AssignBinaryOp Expression CheckSemi[%arg]
{
$return = Mace::Compiler::ParseTreeObject::ParsedBinaryAssignOp->new(type=>"expression", expr_lvalue=>$item{ExpressionLValue}, assign_binary_op=>$item{AssignBinaryOp}, expr=>$item{Expression}, is_semi=>$arg{is_semi});
}
| ExpressionLValue AssignBinaryOp <commit> ParsedLValue CheckSemi[%arg]
{
$return = Mace::Compiler::ParseTreeObject::ParsedBinaryAssignOp->new(type=>"parsed_lvalue", expr_lvalue=>$item{ExpressionLValue}, assign_binary_op=>$item{AssignBinaryOp}, parsed_lvalue=>$item{ParsedLValue}, is_semi=>$item{is_semi});
}
| <uncommit> <defer: print "ParsedBinaryAssignOp failed.";> <error?> <error>
ParsedPlusPlus : ExpressionLValue '++'
{
$return = Mace::Compiler::ParseTreeObject::ParsedPlusPlus->new(type=>"post++", expr_lvalue=>$item{ExpressionLValue});
}
| '++' ExpressionLValue
{
$return = Mace::Compiler::ParseTreeObject::ParsedPlusPlus->new(type=>"pre++", expr_lvalue=>$item{ExpressionLValue});
}
| ExpressionLValue '--'
{
$return = Mace::Compiler::ParseTreeObject::ParsedPlusPlus->new(type=>"post--", expr_lvalue=>$item{ExpressionLValue});
}
| '--' ExpressionLValue
{
$return = Mace::Compiler::ParseTreeObject::ParsedPlusPlus->new(type=>"pre--", expr_lvalue=>$item{ExpressionLValue});
}
ParsedForVar : ParsedVar
{
$return = Mace::Compiler::ParseTreeObject::ParsedForVar->new(type=>"parsed_var", parsed_var=>$item{ParsedVar});
}
| ParsedBinaryAssignOp
{
$return = Mace::Compiler::ParseTreeObject::ParsedForVar->new(type=>"parsed_binary_assign_op", parsed_binary_assign_op=>$item{ParsedBinaryAssignOp});
}
|
{
$return = Mace::Compiler::ParseTreeObject::ParsedForVar->new(type=>"null");
}
ParsedForUpdate : ParsedPlusPlus
{
$return = Mace::Compiler::ParseTreeObject::ParsedForUpdate->new(type=>"parsed_plus_plus", parsed_plus_plus=>$item{ParsedPlusPlus});
}
| ParsedBinaryAssignOp
{
$return = Mace::Compiler::ParseTreeObject::ParsedForUpdate->new(type=>"parsed_binary_assign_op", parsed_binary_assign_op=>$item{ParsedBinaryAssignOp});
}
|
{
$return = Mace::Compiler::ParseTreeObject::ParsedForUpdate->new(type=>"null");
}
ParsedForLoop : /for\b/ '(' ParsedForVar ';' Expression ';' ParsedForUpdate ')' StatementOrBraceBlock
{
$return = Mace::Compiler::ParseTreeObject::ParsedForLoop->new(parsed_for_var=>$item{ParsedForVar}, expr=>$item{Expression}, parsed_for_update=>$item{ParsedForUpdate}, stmt_or_block=>$item{StatementOrBraceBlock});
}
OutputStream1 : 'maceout' | 'maceerr' | 'macewarn' | 'macedbg' '(' Number ')' {$return = "macedbg(".$item{Number}.")";} | 'cout' | 'cerr' | 'std::cout' | 'std::cerr' | 'kenout' | <error>
OutputStream : StartPos OutputStream1 EndPos
{
$return = substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos});
}
OutputOperator1 : '<<' | <error>
OutputOperator : StartPos OutputOperator1 EndPos
{
$return = substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos});
}
ParsedLogging : OutputStream <commit> OutputOperator Expression ';'
{
$return = Mace::Compiler::ParseTreeObject::ParsedLogging->new(output_stream=>$item{OutputStream}, output_operator=>$item{OutputOperator}, expr=>$item{Expression});
}
ParsedOutput : ExpressionLValue OutputOperator <commit> Expression
{
$return = Mace::Compiler::ParseTreeObject::ParsedOutput->new(expr_lvalue=>$item{ExpressionLValue}, output_operator=>$item{OutputOperator}, expr=>$item{Expression});
}
ParsedDefaultCase : 'default' <commit> ':' SemiStatement(s?)
{
my $node = Mace::Compiler::ParseTreeObject::ParsedDefaultCase->new();
$node->type("default");
$node->not_null(scalar(@{$item[-1]}));
if (scalar(@{$item[-1]})) {
$node->semi_statements(@{$item[-1]});
}
$return = $node;
}
| <error?> <reject>
|
{
$return = Mace::Compiler::ParseTreeObject::ParsedDefaultCase->new(type=>"null");
}
ParsedSwitchConstant : Number
{
$return = Mace::Compiler::ParseTreeObject::ParsedSwitchConstant->new(type=>"number", val=>$item{Number});
}
| Character
{
$return = Mace::Compiler::ParseTreeObject::ParsedSwitchConstant->new(type=>"character", val=>$item{Character});
}
| ScopedId
{
$return = Mace::Compiler::ParseTreeObject::ParsedSwitchConstant->new(type=>"scoped_id", scoped_id=>$item{ScopedId});
}
ParsedSwitchCase : 'case' ParsedSwitchConstant ':' SemiStatement(s?)
{
my $node = Mace::Compiler::ParseTreeObject::ParsedSwitchCase->new();
$node->parsed_switch_constant($item{ParsedSwitchConstant});
$node->not_null(scalar(@{$item[-1]}));
if (scalar(@{$item[-1]})) {
$node->semi_statements(@{$item[-1]});
}
$return = $node;
}
ParsedSwitchCases : ...'case' <commit> ParsedSwitchCase ParsedSwitchCases
{
$return = Mace::Compiler::ParseTreeObject::ParsedSwitchCases->new(type=>"case", parsed_switch_case=>$item{ParsedSwitchCase}, parsed_switch_cases=>$item{ParsedSwitchCases});
}
| <error?><reject>
|
{
$return = Mace::Compiler::ParseTreeObject::ParsedSwitchCases->new(type=>"null");
}
ParsedSwitch : 'switch' '(' Expression ')' '{' ParsedSwitchCases ParsedDefaultCase '}' (';')(?)
{
$return = Mace::Compiler::ParseTreeObject::ParsedSwitch->new(expr=>$item{Expression}, parsed_switch_cases=>$item{ParsedSwitchCases}, parsed_default_case=>$item{ParsedDefaultCase});
}
ParsedMacro : '#' <commit> /[^\n]+/
{
$return = Mace::Compiler::ParseTreeObject::ParsedMacro->new(item=>$item[3]);
}
| <error?> <error>
ParsedControlFlow : 'break' | 'continue'
ParsedCaseOrDefault : 'case' ParsedSwitchConstant ':'
{
my $node = Mace::Compiler::ParseTreeObject::ParsedCaseOrDefault->new();
$node->type("case");
$node->parsed_switch_constant($item{ParsedSwitchConstant});
$return = $node;
}
| 'default' ':'
{
$return = Mace::Compiler::ParseTreeObject::ParsedCaseOrDefault->new(type=>"default");
}
ParsedExpectStatement : 'EXPECT' '(' Expression ')' '{' StatementBlock '}'
{
$return = Mace::Compiler::ParseTreeObject::ParsedExpectStatement->new(type=>"stmt_block", expr=>$item{Expression}, stmt_block=>$item{StatementBlock});
}
| 'EXPECT' '(' Expression ')' SemiStatement <error: You need a semi-colon after an EXPECT condition, or an opening brace to start a success block.>
| 'EXPECT' <commit> '(' Expression ')' ';'
{
$return = Mace::Compiler::ParseTreeObject::ParsedExpectStatement->new(type=>"expr", expr=>$item{Expression});
}
| <error>
ParsedCatch : 'catch' '(' ParsedVar <commit> ')' '{' StatementBlock '}'
{
$return = Mace::Compiler::ParseTreeObject::ParsedCatch->new(type=>"parsed_var", parsed_var=>$item{ParsedVar}, stmt_block=>$item{StatementBlock});
}
| 'catch' <commit> '(' '...' ')' '{' StatementBlock '}'
{
$return = Mace::Compiler::ParseTreeObject::ParsedCatch->new(type=>"...", stmt_block=>$item{StatementBlock});
}
| <error?> <error>
ParsedCatches : .../catch\b/ <commit> ParsedCatch ParsedCatches
{
$return = Mace::Compiler::ParseTreeObject::ParsedCatches->new(type=>"catch", parsed_catch=>$item{ParsedCatch}, parsed_catches=>$item{ParsedCatches});
}
| <error?> <reject>
|
{
$return = Mace::Compiler::ParseTreeObject::ParsedCatches->new(type=>"null");
}
ParsedTryCatch : 'try' <commit> '{' StatementBlock '}' .../catch\b/ ParsedCatches
{
$return = Mace::Compiler::ParseTreeObject::ParsedTryCatch->new(stmt_block=>$item{StatementBlock}, parsed_catches=>$item{ParsedCatches});
}
| <error>
StatementOrBraceBlock : '{' <commit> StatementBlock '}'
{
$return = Mace::Compiler::ParseTreeObject::StatementOrBraceBlock->new(type=>"statement_block", stmt_block=>$item{StatementBlock});
}
| SemiStatement
{
$return = Mace::Compiler::ParseTreeObject::StatementOrBraceBlock->new(type=>"semi_statement", semi_stmt=>$item{SemiStatement});
}
| <error?> <error>
StatementBlock : SemiStatement(s?) .../\}/
{
my $node = Mace::Compiler::ParseTreeObject::StatementBlock->new();
$node->not_null(scalar(@{$item[1]}));
if (scalar(@{$item[1]})) {
$node->semi_statements(@{$item[1]});
}
$return = $node;
}
SemiStatement : Enum ';'
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"enum", enum=>$item{Enum});
#print "SemiStatement[Enum]: ".$return->toString()."\n";
}
| .../return\b/ <commit> ParsedReturn
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_return", parsed_return=>$item{ParsedReturn});
#print "SemiStatement[ParsedReturn]: ".$return->toString()."\n";
}
| .../if\b/ <commit> ParsedIf
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_if", parsed_if=>$item{ParsedIf});
#print "SemiStatement[ParsedIf]: ".$return->toString()."\n";
}
| .../for\b/ <commit> ParsedForLoop
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_for_loop", parsed_for_loop=>$item{ParsedForLoop});
#print "SemiStatement[ParsedForLoop]: ".$return->toString()."\n";
}
| .../do\b/ <commit> ParsedDoWhile
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_do_while", parsed_do_while=>$item{ParsedDoWhile});
#print "SemiStatement[ParsedDoWhile]: ".$return->toString()."\n";
}
| .../while\b/ <commit> ParsedWhile
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_while", parsed_while=>$item{ParsedWhile});
#print "SemiStatement[ParsedWhile]: ".$return->toString()."\n";
}
| ...OutputStream <commit> ParsedLogging
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_logging", parsed_logging=>$item{ParsedLogging});
#print "SemiStatement[ParsedLogging]: ".$return->toString()."\n";
}
| .../switch\b/ <commit> ParsedSwitch
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_switch", parsed_switch=>$item{ParsedSwitch});
#print "SemiStatement[ParsedSwitch]: ".$return->toString()."\n";
}
| .../try\b/ <commit> ParsedTryCatch
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_try_catch", parsed_try_catch=>$item{ParsedTryCatch});
#print "SemiStatement[ParsedTryCatch]: ".$return->toString()."\n";
}
| .../#/ <commit> ParsedMacro
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_macro", parsed_macro=>$item{ParsedMacro});
#print "SemiStatement[ParsedMacro]: ".$return->toString()."\n";
}
| .../EXPECT\b/ <commit> ParsedExpectStatement
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_expect_stmt", parsed_expect_stmt=>$item{ParsedExpectStatement});
#print "SemiStatement[ParsedExpectStatement]: ".$return->toString()."\n";
}
| .../ASSERTMSG\b/ <commit> ParsedAssertMsg
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_assert_msg", parsed_assert_msg=>$item{ParsedAssertMsg});
#print "SemiStatement[ParsedAssertMsg]: ".$return->toString()."\n";
}
| .../ASSERT\b/ <commit> ParsedAssert
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_assert", parsed_assert=>$item{ParsedAssert});
#print "SemiStatement[ParsedAssert]: ".$return->toString()."\n";
}
| .../ABORT\b/ <commit> ParsedAbort
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_abort", parsed_abort=>$item{ParsedAbort});
#print "SemiStatement[ParsedAbort]: ".$return->toString()."\n";
}
| /assert\b/ <commit> <error?: Please use ASSERT rather than assert>
| /abort\b/ <commit> <error?: Please use ABORT rather than abort>
| /drand48\b/ <commit> <error?: Please use RandomUtil rather than drand48>
| /random\b/ <commit> <error?: Please use RandomUtil rather than random>
| <error?> <reject>
| ParsedFCall ';' #{ $return = $item[1]; }
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_fcall", parsed_fcall=>$item{ParsedFCall});
#print "SemiStatement[ParsedFCall]: ".$return->toString()."\n";
}
| ParsedBinaryAssignOp[semi=>1] #{ $return = $item[1]; }
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_binary_assign_op", parsed_binary_assign_op=>$item{ParsedBinaryAssignOp});
#print "SemiStatement[ParsedBinaryAssignOp]: ".$return->toString()."\n";
}
| ParsedPlusPlus ';' #{ $return = $item[1]; }
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_plus_plus", parsed_plus_plus=>$item{ParsedPlusPlus});
#print "SemiStatement[ParsedPlusPlus]: ".$return->toString()."\n";
}
| ParsedControlFlow ';' #{ $return = $item[1]; }
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_control_flow", parsed_control_flow=>$item{ParsedControlFlow});
#print "SemiStatement[ParsedControlFlow]: ".$return->toString()."\n";
}
| ParsedCaseOrDefault <commit>
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_case_or_default", parsed_case_or_default=>$item{ParsedCaseOrDefault});
#print "SemiStatement[ParsedCaseOrDefault]: ".$return->toString()."\n";
}
| ParsedVar[semi=>1, arrayok=>1]
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_var", parsed_var=>$item{ParsedVar});
#print "SemiStatement[ParsedVar]: ".$return->toString()."\n";
}
| ParsedOutput ';'
{
$return = Mace::Compiler::ParseTreeObject::SemiStatement->new(type=>"parsed_output", parsed_output=>$item{ParsedOutput});
#print "SemiStatement[ParsedOutput]: ".$return->toString()."\n";
}
| StartPos SemiStatementBegin BraceBlock(?) (';')(?) EndPos { print "WARN (line $thisline): GENERIC SEMI-STATEMENT: ".substr($Mace::Compiler::Grammar::text, $item{StartPos}, 1+$item{EndPos}-$item{StartPos}).". Default parser will be used instead.\n"; } <error: Generic Semi-Statement on $thisline.>
#| <defer: Mace::Compiler::Globals::warning('unusual', $thisparser->{local}{filemap}->[$thisline], $thisparser->{local}{linemap}->[$thisline], "Bare Brace Block Found")> BraceBlock (';')(?) #{ $return = "UNUSUAL BARE BRACEBLOCK"; }
#| <error>
SemiStatementFoo : Enum ';'
| SemiStatementBegin BraceBlockFoo(?) (';')(?)
| <defer: Mace::Compiler::Globals::warning('unusual', $thisparser->{local}{filemap}->[$thisline], $thisparser->{local}{linemap}->[$thisline], "Bare Brace Block Found")> BraceBlockFoo (';')(?)
| <error>
MethodDecl : VirtualMethod | Method
VirtualMethod : 'virtual' Method
{
$item{Method}->isVirtual(1);
$return = $item{Method};
}
MethodTermFoo : StartPos FileLineEnd BraceBlockFoo EndPos
{
my $startline = "";
my $endline = "";
#if(defined($Mace::Compiler::Globals::filename) and $Mace::Compiler::Globals::filename ne '') {
$startline = "\n#line ".$item{FileLineEnd}->[0]." \"".$item{FileLineEnd}->[1]."\"\n";
$endline = "\n// __INSERT_LINE_HERE__\n";
#}
# for my $statement (@{$item{BraceBlockFoo}}) {
# print "PARSED STATEMENT: $statement\n";
# }
$return = $startline.substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos}).$endline;
}
| '=' '0' ';' { $return = "0"; }
| ';' { $return = ""; }
| <reject:!$arg{forceColon}> ':' { $return = ""; }
MethodTerm : StartPos FileLineEnd BraceBlock EndPos
{
my $startline = "";
my $endline = "";
#if(defined($Mace::Compiler::Globals::filename) and $Mace::Compiler::Globals::filename ne '') {
$startline = "\n#line ".$item{FileLineEnd}->[0]." \"".$item{FileLineEnd}->[1]."\"\n";
$endline = "\n// __INSERT_LINE_HERE__\n";
#}
# if(defined($arg{methodName}))
# {
# print "| ".$arg{methodName}." {";
# print $item{BraceBlock}->toString()."\n";
# print "| }\n";
# print "|\n";
# } else {
# print "| Undefined {\n";
# print $item{BraceBlock}->toString()."\n";
# print "| }\n";
# print "|\n";
# }
# $return = $startline.substr($Mace::Compiler::Grammar::text, $item{StartPos},
# 1 + $item{EndPos} - $item{StartPos}).$endline;
# $return = $item{BraceBlock}->toString();
$return = Mace::Compiler::ParseTreeObject::MethodTerm->new(type=>"block", block=>$item{BraceBlock});
}
| '=' '0' ';'
{
$return = Mace::Compiler::ParseTreeObject::MethodTerm->new(type=>"zero");
}
| ';'
{
$return = Mace::Compiler::ParseTreeObject::MethodTerm->new(type=>"null");
}
| <reject:!$arg{forceColon}> ':'
{
$return = Mace::Compiler::ParseTreeObject::MethodTerm->new(type=>"null");
}
Expression : Expression1
#Expression : StartPos Expression1 EndPos
{
$return = Mace::Compiler::ParseTreeObject::Expression->new(expr1=>$item{Expression1});
# $return = substr($Mace::Compiler::Grammar::text, $item{StartPos},
# 1 + $item{EndPos} - $item{StartPos});
}
AssignBinaryOp1 : '/=' | '*=' | '+=' | '-=' | '<<=' | '>>=' | '|=' | '&=' | '^=' |'=' ...!'=' | '%=' | <error>
AssignBinaryOp : StartPos AssignBinaryOp1 EndPos
{
$return = substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos});
}
PrePostAssignOp : '++' | '--' | <error>
PrePostAssignOp1 : StartPos PrePostAssignOp1 EndPos
{
$return = substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos});
}
BinaryOp1 : '*' | '/' | '+' ...!/[+=]/ | '<<' ...!'=' | '>>' ...!'=' | '!=' | '==' | '<=' | '>=' | '<' | '>' | '||' | '|' | '&&' | '&' | '^' | '.' | '->' | '-' ...!/[-=]/ | '%' ...!'=' | <error>
BinaryOp : StartPos BinaryOp1 EndPos
{
$return = substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos});
}
UnaryOp1 : '!' | '~' | '*' | '&' | /new\b/ | /delete\b/ | 'delete' '[' ']' | <error>
UnaryOp : StartPos UnaryOp1 EndPos
{
$return = substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos});
}
# Assume - operators have usual conventions on r/w (+, -, ++, +=, ...)
ExpressionOrAssignLValue : StartPos ExpressionOrAssignLValue1 EndPos
{
$return = Mace::Compiler::ParseTreeObject::ExpressionOrAssignLValue->new(expr_or_assign_lvalue1=>$item{ExpressionOrAssignLValue1});
}
ExpressionOrAssignLValue1 : ExpressionLValue1 PrePostAssignOp
{
$return = Mace::Compiler::ParseTreeObject::ExpressionOrAssignLValue1->new(type=>"post_op", expr_lvalue1=>$item{ExpressionLValue1}, prepost_assign_op=>$item{PrePostAssignOp});
}
| PrePostAssignOp ExpressionLValue1
{
$return = Mace::Compiler::ParseTreeObject::ExpressionOrAssignLValue1->new(type=>"pre_op", prepost_assign_op=>$item{PrePostAssignOp}, expr_lvalue1=>$item{ExpressionLValue1});
}
| ExpressionLValue1 AssignBinaryOp Expression1
{
$return = Mace::Compiler::ParseTreeObject::ExpressionOrAssignLValue1->new(type=>"assign_op", expr_lvalue1=>$item{ExpressionLValue1}, assign_binary_op=>$item{AssignBinaryOp}, expr1=>$item{Expression1});
}
| ExpressionLValue1
{
$return = Mace::Compiler::ParseTreeObject::ExpressionOrAssignLValue1->new(type=>"expr_lvalue1", expr_lvalue1=>$item{ExpressionLValue1});
}
| <error>
ExpressionLValue :
StartPos ExpressionLValue1 EndPos <commit> <reject: $arg{parseFunctionCall} and not ($item{ExpressionLValue1}->getRef() eq "FUNCTION_CALL")>
{
#print "ExpressionLValue1->getRef() : ".$item{ExpressionLValue1}->getRef()."\n";
$return = Mace::Compiler::ParseTreeObject::ExpressionLValue->new(expr_lvalue1=>$item{ExpressionLValue1});
}
| <error?: Parsed Expression LValue, but Required Function Call> <error>
ExpressionLValue1 : ExpressionLValue2 '.' <commit> ExpressionLValue1
{
#print "ExpressionLValue1: ".$item{ExpressionLValue2}->toString()." . ".$item[-1]->toString()."\n";
$return = Mace::Compiler::ParseTreeObject::ExpressionLValue1->new(type=>"dot", expr_lvalue2=>$item{ExpressionLValue2}, expr_lvalue1=>$item[-1]);
}
| '*' <commit> ExpressionLValue1
{
$return = Mace::Compiler::ParseTreeObject::ExpressionLValue1->new(type=>"star", expr_lvalue1=>$item[-1]);
}
| ExpressionLValue2 '->' <commit> ExpressionLValue1
{
$return = Mace::Compiler::ParseTreeObject::ExpressionLValue1->new(type=>"arrow", expr_lvalue2=>$item{ExpressionLValue2}, expr_lvalue1=>$item[-1]);
}
| ExpressionLValue2 '?' <commit> ExpressionLValue1 ':' ExpressionLValue1
{
$return = Mace::Compiler::ParseTreeObject::ExpressionLValue1->new(type=>"question", expr_lvalue2=>$item{ExpressionLValue2}, expr_lvalue1a=>$item[-3], expr_lvalue1b=>$item[-1] );
}
| ExpressionLValue2
{
$return = Mace::Compiler::ParseTreeObject::ExpressionLValue1->new(type=>"expr_lvalue2", expr_lvalue2=>$item{ExpressionLValue2});
}
| <error>
ArrayIndex : '[' <commit> Expression1 ']'
{
$return = Mace::Compiler::ParseTreeObject::ArrayIndex->new(expr1=>$item{Expression1});
}
| <error>
ExpressionLValue2: <reject>
| ScopedId ...'[' ArrayIndex(s)
{
my $node = Mace::Compiler::ParseTreeObject::ExpressionLValue2->new(type=>"array", scoped_id=>$item{ScopedId});
$node->array_index(@{$item[3]});
# print "ExpressionLValue2[ARRAY]: ".$node->toString()."\n";
$return = $node;
}
| ScopedId '(' Expression1(s? /,/) ')'# { $return = "FUNCTION_CALL"; }
{
my $node = Mace::Compiler::ParseTreeObject::ExpressionLValue2->new(type=>"fcall", scoped_id=>$item{ScopedId});
$node->not_null_expr1(scalar(@{$item[3]}));
if (scalar(@{$item[3]})) {
$node->expr1(@{$item[3]});
}
# print "ExpressionLValue2[F-CALL]: ".$node->toString()."\n";
$return = $node;
}
| ScopedId '(' <commit> ExpressionOrAssignLValue1(s /,/) ')'# { $return = "FUNCTION_CALL"; }
{
my $node = Mace::Compiler::ParseTreeObject::ExpressionLValue2->new(type=>"fcall_assign", scoped_id=>$item{ScopedId});
$node->expr_or_assign_lvalue1(@{$item[-2]});
# print "ExpressionLValue2[F-CALL]: ".$node->toString()."\n";
$return = $node;
}
| ScopedId
{
$return = Mace::Compiler::ParseTreeObject::ExpressionLValue2->new(type=>"scoped_id", scoped_id=>$item{ScopedId});
# print "ExpressionLValue2[ScopedId]: ".$return->toString()."\n";
}
| <error>
#Changed to Expression1 to allow unary operators before things like my.foo
Expression1 : UnaryOp <commit> Expression1
{
$return = Mace::Compiler::ParseTreeObject::Expression1->new(type=>"unary_op", unary_op=>$item{UnaryOp}, expr1=>$item{Expression1} );
# print "Expression1[unary_op] : ".$return->toString()."\n";
}
| Expression2 BinaryOp <commit> Expression1
{
$return = Mace::Compiler::ParseTreeObject::Expression1->new(type=>"binary_op", expr2=>$item{Expression2}, binary_op=>$item{BinaryOp}, expr1=>$item{Expression1} );
# print "Expression1[binary_op] : ".$return->toString()." op : ".$item{BinaryOp}."\n";
}
| Expression2 '?' <commit> Expression1 ':' Expression1
{
$return = Mace::Compiler::ParseTreeObject::Expression1->new(type=>"question", expr2=>$item{Expression2}, expr1=>$item[-3], expr1a=>$item[-1] );
# print "Expression1[question] : ".$return->toString()."\n";
}
| Expression2
{
$return = Mace::Compiler::ParseTreeObject::Expression1->new(type=>"expr2", expr2=>$item{Expression2} );
# print "Expression1[expr2] : ".$return->toString()."\n";
}
| <error>
QuotedString : <skip:'\s*'>
/" #Opening Quote
[^"\\\\]* #Any number of characters not a quote or slash
( #Group: 1
\\\\ #Followed by a slash
. #Any character
[^"\\\\]* #Any number of characters not a quote or slash
)* #1: Repeated any number of times
" #Closing quote
/sx
Number : /0x[a-fA-F0-9]+(LL)?/ | /-?\d+LL/ | /-?\d+(\.\d+)?/ | <error>
Character : /'\\?.'/ | <error>
ParenOrBrace : '(' | '[' | <error>
ArrayIndOrFunction : '(' Expression1(s? /,/) ')'
{
my $node = Mace::Compiler::ParseTreeObject::ArrayIndOrFunction->new(type=>"func");
$node->not_null_expr1_list(scalar(@{$item[2]}));
if (scalar(@{$item[2]})) {
$node->expr1_list(@{$item[2]});
}
# print "ArrayIndOrFunction[FUNC] : ".$node->toString()."\n";
$return = $node;
}
| '[' Expression1 ']'
{
$return = Mace::Compiler::ParseTreeObject::ArrayIndOrFunction->new(type=>"array", expr1=>$item{Expression1});
# print "ArrayIndOrFunction[ARRAY] : ".$return->toString()."\n";
}
| <error>
ArrayIndOrFunctionParts : ...ParenOrBrace <commit> ArrayIndOrFunction ArrayIndOrFunctionParts
{
$return = Mace::Compiler::ParseTreeObject::ArrayIndOrFunctionParts->new(not_null=>1, array_ind_or_function=>$item{ArrayIndOrFunction}, array_ind_or_function_parts=>$item{ArrayIndOrFunctionParts});
# print "ArrayIndOrFunctionParts[ARRAY-FUNC] : ".$return->toString()."\n";
}
| <error?> <reject>
|
{
$return = Mace::Compiler::ParseTreeObject::ArrayIndOrFunctionParts->new(not_null=>0);
# print "ArrayIndOrFunctionParts[NULL]\n";
}
Expression2 : Number
{
$return = Mace::Compiler::ParseTreeObject::Expression2->new(type=>"number", number=>$item{Number} );
}
| ScopedId ...ParenOrBrace <commit> ArrayIndOrFunctionParts
{
$return = Mace::Compiler::ParseTreeObject::Expression2->new(type=>"array_func", scoped_id=>$item{ScopedId}, array_ind_or_function_parts=>$item{ArrayIndOrFunctionParts} );
# print "Expression2[ARRAY_OR_FUNC] : ".$return->toString()."\n";
}
| ..."'" <commit> "'" /[^\']*/ "'"
{
$return = Mace::Compiler::ParseTreeObject::Expression2->new(type=>"string", string=>$item[-2] );
}
| ...'"' <commit> QuotedString
{
$return = Mace::Compiler::ParseTreeObject::Expression2->new(type=>"quoted_string", quoted_string=>$item[-1] );
}
| '(' Type ')' Expression1
{
$return = Mace::Compiler::ParseTreeObject::Expression2->new(type=>"typecast", typecast=>$item{Type}, expr1=>$item{Expression1} );
}
| '(' <commit> Expression1 ')'
{
$return = Mace::Compiler::ParseTreeObject::Expression2->new(type=>"expr1", expr1=>$item{Expression1} );
}
| ScopedId
{
$return = Mace::Compiler::ParseTreeObject::Expression2->new(type=>"scoped_id", scoped_id=>$item{ScopedId} );
}
| <error>
TypeOptions : <reject: !$arg{typeopt}> '__attribute((' <commit> TypeOption(s /,/) '))'
{
$return = $item[4];
} | { $return = []; }
TypeOption : <rulevar: %opt>
TypeOption : FileLine Id '(' TypeOptionParamList['options'=>\%opt] ')'
{
$return = Mace::Compiler::TypeOption->new(name => $item{Id}, line => $item{FileLine}->[0], file => $item{FileLine}->[1]);
$return->options(%opt);
} | <error>
TypeOptionParamList :
(
Id '=' <commit> Expression
{ $arg{options}->{$item{Id}} = $item{Expression}->toString() }
# { $arg{options}->{$item{Id}} = $item{Expression} }
| Expression
{ $arg{options}->{$item{Expression}->toString()} = $item{Expression}->toString() }
# { $arg{options}->{$item{Expression}} = $item{Expression} }
)(s? /;/) (';')(?) ...')' | <error>
ArraySizes : <reject: !$arg{arrayok}> ArraySize(s) | { $return = []; }
ArraySize : '[' Expression ']' { $return = $item{Expression}->toString(); } | <error>
#ArraySize : '[' Expression ']' { $return = $item{Expression}; } | <error>
CheckSemi : <reject: !$arg{semi}> ';' <commit> | <reject: $arg{semi}> | <error>
Parameter : ...Type ParameterType[%arg]
{
# print "in Parameter:" . $item{ParameterType}->{type} .":" . $item{ParameterType}->{name} . "\n";
$return = $item{ParameterType};
}
| <reject:!defined($arg{typeOptional})> ParameterId[%arg]
| <error>
# FIXME : should be able to process "int x=3, y=2;"
ParameterType : <reject: $arg{declareonly}> Type FileLineEnd Id ArraySizes[%arg] TypeOptions[%arg] '=' Expression CheckSemi[%arg]
{
# print "ParameterType[AssignExp] : ".$item{Type}->type()." ".$item{Id}." := ".$item{Expression}->toString()."\n";
#use Mace::Compiler::Context;
my $p = Mace::Compiler::Param->new(name => $item{Id},
type => $item{Type},
hasDefault => 1,
filename => $item{FileLineEnd}->[1],
line => $item{FileLineEnd}->[0],
default => $item{Expression}->toString());
# default => $item{Expression});
$p->typeOptions(@{$item{TypeOptions}});
$p->arraySizes(@{$item{ArraySizes}});
$return = $p;
}
| <reject: $arg{declareonly}> Type FileLineEnd Id ArraySizes[%arg] TypeOptions[%arg] '=' <commit> ExpressionOrAssignLValue CheckSemi[%arg]
{
# print "ParameterType[AssignExprOrAssign] : ".$item{Type}->type()." ".$item{Id}." := ".$item{ExpressionOrAssignLValue}->toString()."\n";
my $p = Mace::Compiler::Param->new(name => $item{Id},
type => $item{Type},
hasDefault => 1,
filename => $item{FileLineEnd}->[1],
line => $item{FileLineEnd}->[0],
default => $item{ExpressionOrAssignLValue}->toString());
# default => $item{ExpressionOrAssignLValue});
$p->typeOptions(@{$item{TypeOptions}});
$p->arraySizes(@{$item{ArraySizes}});
$return = $p;
}
| <reject: !$arg{mustinit}> <commit> <error>
| <reject: !defined($arg{initializerOk})> Type FileLineEnd Id ArraySizes[%arg] '(' Expression(s? /,/) ')' CheckSemi[%arg]
{
my $p = Mace::Compiler::Param->new(name => $item{Id},
type => $item{Type},
# hasDefault => 1,
# hasExpression => 1,
filename => $item{FileLineEnd}->[1],
line => $item{FileLineEnd}->[0],
expression => "(".join(", ",map { $_->toString() } @{$item[-3]}).")",
default => $item{Type}->type()."(".join(", ",map { $_->toString() } @{$item[-3]}).")");
# default => $item{Type}->type()."(".join(", ", @{$item[-3]}).")");
if( scalar(@{$item[-5]}) ) {
$p->hasDefault(1);
# print "ParameterType[ArrayExprDefault] : ".$item{Type}->type()." ".$item{Id}."(".join(", ",map { $_->toString() } @{$item[-3]}).")"."\n";
} else {
$p->hasExpression(1);
# print "ParameterType[ArrayExprExpr] : ".$item{Type}->type()." ".$item{Id}."(".join(", ",map { $_->toString() } @{$item[-3]}).")"."\n";
}
$p->arraySizes(@{$item{ArraySizes}});
$return = $p;
}
| <reject: !defined($arg{initializerOk})> Type FileLineEnd Id ArraySizes[%arg] '(' <commit> ExpressionOrAssignLValue(s? /,/) ')' CheckSemi[%arg]
{
my $p = Mace::Compiler::Param->new(name => $item{Id},
type => $item{Type},
# hasDefault => 1,
# hasExpression => 1,
filename => $item{FileLineEnd}->[1],
line => $item{FileLineEnd}->[0],
expression => "(".join(", ",map { $_->toString() } @{$item[-3]}).")",
default => $item{Type}->type()."(".join(", ", map { $_->toString() } @{$item[-3]}).")");
# default => $item{Type}->type()."(".join(", ", @{$item[-3]}).")");
if( scalar(@{$item[-6]}) ) {
# print "ParameterType[ArrayExprLValueDefault] : ".$item{Type}->type()." ".$item{Id}."(".join(", ", map { $_->toString() } @{$item[-3]}).")"."\n";
$p->hasDefault(1);
} else {
# print "ParameterType[ArrayExprLValueExpr] : ".$item{Type}->type()." ".$item{Id}."(".join(", ", map { $_->toString() } @{$item[-3]}).")"."\n";
$p->hasExpression(1);
}
$p->arraySizes(@{$item{ArraySizes}});
$return = $p;
}
| Type FileLineEnd Id <commit> ArraySizes[%arg] TypeOptions[%arg] CheckSemi
{
#print "Param1 type ".$item{Type}->toString()."\n";
# print "ParameterType[Var] : ".$item{Type}->type()." ".$item{Id}."\n";
my $p = Mace::Compiler::Param->new(name => $item{Id},
type => $item{Type},
filename => $item{FileLineEnd}->[1],
line => $item{FileLineEnd}->[0],
hasDefault => 0);
$p->typeOptions(@{$item{TypeOptions}});
$p->arraySizes(@{$item{ArraySizes}});
$return = $p;
}
| <reject: !$arg{declareonly}> <commit> <error>
| <reject:!defined($arg{mapOk})> Type FileLineEnd DirArrow[direction => $arg{usesOrImplements}] Type
{
#print "Param2 type ".$item{Type}->toString()."\n";
# print "ParameterType[Noname] : ".$item[5]->type()."\n";
my $p = Mace::Compiler::Param->new(name => "noname_".$thisrule->{'local'}{'paramnum'}++,
type => $item[5],
typeSerial => $item[2],
filename => $item{FileLineEnd}->[1],
line => $item{FileLineEnd}->[0],
hasDefault => 0);
$return = $p;
}
| Type FileLineEnd ('=' Expression)(?) <reject:!defined($arg{noIdOk})>
{
#print "Param2 type ".$item{Type}->toString()."\n";
# print "ParameterType[NonameExpr] : ".$item{Type}->type()."\n";
my $p = Mace::Compiler::Param->new(name => "noname_".$thisrule->{'local'}{'paramnum'}++,
type => $item{Type},
filename => $item{FileLineEnd}->[1],
line => $item{FileLineEnd}->[0],
hasDefault => scalar(@{$item[3]}));
if ($p->hasDefault()) {
$p->default(${$item[3]}[0]);
}
$return = $p;
}
| StartPos SemiStatementBegin EndPos {
#print "Note (line $thisline): NOT PARAMETER-TYPE: ".substr($Mace::Compiler::Grammar::text, $item{StartPos}, 1+$item{EndPos}-$item{StartPos})."\n";
} <reject>
| <error?> <error>
ParameterId : Id FileLineEnd <reject:!defined($arg{typeOptional})>
{
# print "ParameterId : ".$item{Id}."\n";
#print "Param2 type ".$item{Type}->toString()."\n";
my $p = Mace::Compiler::Param->new(name => $item{Id},
#type => $item{Type},
filename => $item{FileLineEnd}->[1],
line => $item{FileLineEnd}->[0],
hasDefault => 0);
$return = $p;
}
| <error>
ATTypeDef : /typedef\s/ FileLine Type Id ';'
{
$return = Mace::Compiler::TypeDef->new(name=>$item{Id}, type=>$item{Type}, line => $item{FileLine}->[0], filename => $item{FileLine}->[1]);
}
| <error>
AutoType : Id FileLine TypeOptions[typeopt => 1] '{' ATTypeDef(s?) Parameter[typeopt => 1, semi => 1](s?) Constructor[className => $item{Id}](s?) Method[staticOk=>1](s?) '}' (';')(?)
{
my $at = Mace::Compiler::AutoType->new(name => $item{Id},
line => $item{FileLine}->[0],
filename => $item{FileLine}->[1],
);
$at->typeOptions(@{$item{TypeOptions}});
$at->typedefs(@{$item[5]});
$at->fields(@{$item[6]});
$at->constructors(@{$item[7]});
$at->methods(@{$item[8]});
for my $m (@{$item[7]}) {
if($m->name ne $item{Id}) {
Mace::Compiler::Globals::error("bad_auto_type", $item{FileLine}->[1], $item{FileLine}->[0], "Constructor name does not match auto_type name");
}
}
my $key = "service";
if (defined($arg{key})) {
$key = $arg{key};
}
$thisparser->{'local'}{$key}->push_auto_types($at);
}
| <error>
#XXX: reject
DirArrow : '<-' <commit> <reject: $arg{direction} eq "uses"> | '->' <commit> <reject: $arg{direction} eq "implements">
KeyEqVal : Id '=' Id { $return = [ $item[1], $item[-1] ] }
MethodOptions : '[' KeyEqVal(s /;/) (';')(?) ']' { $return = $item[2];}
InitializerItem : ScopedId '(' Expression(s? /,/) ')'
InitializerList : StartPos FileLineEnd ':' InitializerItem(s /,/) EndPos
{
my $startline = "";
my $endline = "";
#if(defined($Mace::Compiler::Globals::filename) and $Mace::Compiler::Globals::filename ne '') {
$startline = "\n#line ".$item{FileLineEnd}->[0]." \"".$item{FileLineEnd}->[1]."\"\n";
$endline = "\n// __INSERT_LINE_HERE__\n";
#}
$return = $startline.substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos}).$endline;
}
| {$return = ""}
Constructor : <reject:defined($arg{className})> <commit> /\b$thisparser->{'local'}{'classname'}\b/ '(' Parameter(s? /,/) ')' InitializerList MethodTerm
{
my $t = Mace::Compiler::Type->new(type => "");
my $m = Mace::Compiler::Method->new(name => $thisparser->{'local'}{'classname'},
returnType => $t,
isUsedVariablesParsed => 1,
body => $item{InitializerList}.$item{MethodTerm}->toString());
if (scalar(@{$item[5]})) {
$m->params(@{$item[5]});
}
$return = $m;
}
| <reject:!defined($arg{className})> <commit> /\b$arg{className}\b/ '(' Parameter(s? /,/) ')' InitializerList MethodTerm
{
my $t = Mace::Compiler::Type->new(type => "");
my $m = Mace::Compiler::Method->new(name => $arg{className},
returnType => $t,
isUsedVariablesParsed => 1,
body => $item{InitializerList}.$item{MethodTerm}->toString());
if (scalar(@{$item[5]})) {
$m->params(@{$item[5]});
}
$return = $m;
}
| <error>
Destructor : ('virtual')(?) '~' /\b$thisparser->{'local'}{'classname'}\b/ '(' ')' MethodTerm
{
my $t = Mace::Compiler::Type->new(type => "");
my $m = Mace::Compiler::Method->new(name => '~' . $thisparser->{'local'}{'classname'},
isUsedVariablesParsed => 1,
returnType => $t,
body => $item{MethodTerm}->toString());
if (defined($item[1])) {
$m->isVirtual(1);
}
$return = $m;
}
ThrowType : Type { $return = $item{Type}->toString() } | "..."
Throws : 'throw' '(' ThrowType ')'
{
$return = 'throw('.$item{ThrowType}.')';
}
#MethodReturnType : <reject:$arg{noReturn}> Type | <reject:!$arg{noReturn}> { $return = Mace::Compiler::Type->new(); } | <error>
MethodReturnType : Type ...!'(' { $return = $item{Type} } | <reject:!$arg{noReturn}> { $return = Mace::Compiler::Type->new(); } | <error>
MethodOperator : '==' | '<=' | '>=' | '<' | '>' | '=' | '!=' | '+' | '*' | '/' | '->' | '-'
MethodName : /operator\b/ <commit> MethodOperator { $return = "operator".$item{MethodOperator}; } | Id | <error>
StaticToken : /static\b/
#Method : StaticToken(?) <reject:!defined($arg{context}) or (defined($arg{context}) and !$arg{context}) or (!$arg{staticOk} and scalar(@{$item[1]}))> MethodReturnType[%arg] MethodName FileLineEnd '(' Parameter[%arg](s? /,/) ')' ConstToken(?) Throws(?) MethodOptions(?) MethodTerm[forceColon => $arg{forceColon}, methodName => $item{MethodName}]
Method : StaticToken(?) <reject:!( $Mace::Compiler::Globals::useSnapshot and $Mace::Compiler::Globals::useParseVariables) or (!$arg{staticOk} and scalar(@{$item[1]}))> MethodReturnType[%arg] MethodName FileLineEnd '(' Parameter[%arg](s? /,/) ')' ConstToken(?) Throws(?) MethodOptions(?) MethodTerm[forceColon => $arg{forceColon}, methodName => $item{MethodName}]
{
# context = 1
#print STDERR "useSnapshot : ".$Mace::Compiler::Globals::useSnapshot."\n";
# if( defined($item{MethodName}) ) {
# print STDERR "Method ".$item{MethodName}." uses INCONTEXT parser.\n";
# } else {
# print STDERR "Method [unnamed] uses INCONTEXT parser.\n";
# }
# print "DEBUG: ".$item{FileLine}->[2]."\n";
# print "DEBUG1: ".$item{FileLine}->[0]."\n";
# print "DEBUG2: ".$item{FileLine}->[1]."\n";
# my $mt = $item{MethodTerm};
my $m = Mace::Compiler::Method->new(name => $item{MethodName},
returnType => $item{MethodReturnType},
isConst => scalar(@{$item[-4]}),
isStatic => scalar(@{$item[1]}),
isUsedVariablesParsed => 1,
line => $item{FileLineEnd}->[0],
filename => $item{FileLineEnd}->[1],
body => $item{MethodTerm}->toString(),
);
if( defined $arg{context} ){
$m->targetContextObject( $arg{context} );
}
if( defined $arg{snapshot} ){
$m->snapshotContextObjects( $arg{snapshot} );
}
$m->usedStateVariables(@{$item{MethodTerm}->usedVar()});
if (scalar($item[-3])) {
$m->throw(@{$item[-3]}[0]);
}
# print STDERR "MethodName : ".$item{MethodName}."\n";
if (scalar(@{$item[7]})) {
$m->params(@{$item[7]});
# for my $el (@{$item[7]})
# {
# print STDERR " Param: ".$el->name()."\n";
# }
}
if (scalar(@{$item[-2]})) {
my $ref = ${$item[-2]}[0];
for my $el (@$ref) {
$m->options(@$el);
# print STDERR "MethodOptions DEBUG: ".$el->[0]."=".$el->[1]."\n";
}
}
$return = $m;
}
#| StaticToken(?) <reject:( defined($arg{context}) and $arg{context} ) or (!$arg{staticOk} and scalar(@{$item[1]}))> MethodReturnType[%arg] MethodName FileLineEnd '(' Parameter[%arg](s? /,/) ')' ConstToken(?) Throws(?) MethodOptions(?) MethodTermFoo[forceColon => $arg{forceColon}, methodName => $item{MethodName}]
| StaticToken(?) <reject:(!$arg{staticOk} and scalar(@{$item[1]}))> MethodReturnType[%arg] MethodName FileLineEnd '(' Parameter[%arg](s? /,/) ')' ConstToken(?) Throws(?) MethodOptions(?) MethodTermFoo[forceColon => $arg{forceColon}, methodName => $item{MethodName}]
{
# context = 0
# print STDERR "useSnapshot : ".$Mace::Compiler::Globals::useSnapshot."\n";
# if( defined($item{MethodName}) ) {
# print STDERR "Method ".$item{MethodName}." uses DEFAULT parser.\n";
# } else {
# print STDERR "Method [unnamed] uses DEFAULT parser.\n";
# }
# print $item{MethodName}."\n";
# print "DEBUG: ".$item{FileLine}->[2]."\n";
# print "DEBUG1: ".$item{FileLine}->[0]."\n";
# print "DEBUG2: ".$item{FileLine}->[1]."\n";
# my $mt = $item{MethodTerm};
my $m = Mace::Compiler::Method->new(name => $item{MethodName},
returnType => $item{MethodReturnType},
isConst => scalar(@{$item[-4]}),
isStatic => scalar(@{$item[1]}),
isUsedVariablesParsed => 0,
line => $item{FileLineEnd}->[0],
filename => $item{FileLineEnd}->[1],
body => $item{MethodTermFoo},
);
if( defined $arg{context} ){
$m->targetContextObject( $arg{context} );
}
if( defined $arg{snapshot} ){
$m->snapshotContextObjects( $arg{snapshot} );
}
# $m->usedStateVariables(@{$item{MethodTerm}->usedVar()});
if (scalar($item[-3])) {
$m->throw(@{$item[-3]}[0]);
}
if (scalar(@{$item[7]})) {
$m->params(@{$item[7]});
}
if (scalar(@{$item[-2]})) {
my $ref = ${$item[-2]}[0];
for my $el (@$ref) {
$m->options(@$el);
# print STDERR "MethodOptions DEBUG: ".$el->[0]."=".$el->[1]."\n";
}
}
$return = $m;
}
| <error>
PointerType : NonPointerType ConstToken ('*')(s) | NonPointerType ('*')(s) ConstToken ('*')(s) | NonPointerType ('*')(s?) | <error>
NonPointerType : BasicType | StructType | ScopedType | <error>
#XXX-CK: Shouldn't this really be a recursion on Type, not Id? After all, you can have std::map<> or map<>::iterator . . .
ScopedId : StartPos TemplateTypeId ('::' TemplateTypeId)(s?) EndPos
{
my $node = Mace::Compiler::ParseTreeObject::ScopedId->new(val=>substr($Mace::Compiler::Grammar::text, $item{StartPos}, 1 + $item{EndPos} - $item{StartPos}));
$return = $node;
}
#{ $return = substr($Mace::Compiler::Grammar::text, $item{StartPos}, 1 + $item{EndPos} - $item{StartPos}); }
ScopedType : TemplateType ('::' TemplateType)(s?)
#NOTE: CK -- added ScopedType to Template type to allow ::
# CK -- Added | Expression since template parameters could be constant expressions
# CK -- BasicType added to parse primitive types and their modifiers
TemplateTypeId : Id '<' ( ConstToken(?) (PointerType | Number) )(s /,/) '>' | Id | <error>
TemplateType : Id '<' <commit> ( ConstToken(?) (PointerType | Number) )(s /,/) '>' | Id | <error>
ConstToken : /const\b/
RefToken : '&'
#NOTE: CK -- added \b to avoid problems parsing things like int8_t. Also -- removed 'long long'
TypeModifier : /\blong\b/ | /\bsigned\b/ | /\bunsigned\b/ | /\bshort\b/ | <error>
PrimitiveType : /\bint\b/ | /\bdouble\b/ | /\bfloat\b/ | /\bchar\b/ | <error>
BasicType : TypeModifier(0..3) PrimitiveType | TypeModifier(1..3) | <error>
StructType : 'struct' Id
Type : ConstToken(?) StartPos PointerType EndPos ConstToken(?) RefToken(?)
{
my $type = substr($Mace::Compiler::Grammar::text, $item{StartPos},
1 + $item{EndPos} - $item{StartPos});
$return = Mace::Compiler::Type->new(type => Mace::Util::trim($type),
isConst1 => scalar(@{$item[1]}),
isConst2 => scalar(@{$item[-2]}),
isConst => (scalar(@{$item[1]}) or scalar(@{$item[-2]})),
isRef => scalar(@{$item[-1]}));
}
| <error>
ProtectionToken : /public\b/ | /private\b/ | /protected\b/
EOFile : /^\Z/
}; # COMMON grammar
1;
| jojochuang/eventwave | perl5/Mace/Compiler/CommonGrammar.pm | Perl | bsd-3-clause | 57,473 |
package Resmon::Updater;
use strict;
use File::Find;
use IO::Socket;
my $assess;
my $newfiles;
my $changedfiles;
my %times;
my $debug;
my $resmondir;
sub update {
# Ignore a HUP, otherwise we will kill ourselves when we try to reload
# because we are called as 'resmon'
$SIG{'HUP'} = 'IGNORE';
# Debug mode (currently specified with the -d option to resmon). WARNING:
# turning this on will reload resmon on every invocation regardless of
# whether there were any files updated or not.
$debug = shift;
$resmondir = shift;
# Check for subversion
my $svn;
foreach my $i (qw(svn /usr/bin/svn /usr/local/bin/svn /opt/omni/bin/svn)) {
if (-x "$i") {
print "Found subversion at $i\n" if $debug;
$svn = $i;
last;
}
}
if (!$svn) {
print STDERR "Cannot find subversion. Exiting.\n";
return 2;
}
# Find the last revision, in case we need to revert
my $last_rev=`$svn info $resmondir | awk '/^Revision/ {print \$2;}'`;
chomp $last_rev;
print "Last rev: $last_rev\n" if $debug;
# Run the update
chdir $resmondir || die "Cannot chdir to $resmondir: $!\n";
$assess = 0;
$newfiles = 0;
$changedfiles = 0;
%times = ();
my @dirs = ("$resmondir/lib/Resmon/Module");
find( { wanted => \&track_mod_times, no_chdir => 1 }, @dirs);
`$svn update -q`;
$assess = 1;
find( { wanted => \&track_mod_times, no_chdir => 1 }, @dirs);
print "Newfiles: $newfiles ChangedFiles: $changedfiles\n" if $debug;
if ($newfiles + $changedfiles || $debug) {
print "We have changed files, reloading resmon...\n" if $debug;
reload_resmon();
## Check to see if everything went OK
sleep(3);
if (!get_resmon_status()) {
print STDERR "There is a problem with the update, reverting to ";
print STDERR "revision $last_rev\n";
my $output = `$svn update -r $last_rev $resmondir`;
print $output if $debug;
reload_resmon();
return 3;
}
return 1;
}
return 0;
}
sub get_resmon_status {
# Load resmon config file and find what port we need to connect to to
# check if everything went OK
my $port = 0;
my $state, my $modstatus, my $configstatus, my $message, my $revision;
if (!open(CONF, "<$resmondir/resmon.conf")) {
print STDERR "Unable to open config file";
return 0;
}
while(<CONF>) {
if (/PORT\W*(.+);/) {
$port = $1;
}
}
close(CONF);
if (!$port) {
print STDERR "Unable to determine port";
return 0;
}
print "Port is: $port\n" if $debug;
my $host = "127.0.0.1";
my $handle = IO::Socket::INET->new(Proto => "tcp",
PeerAddr => $host,
PeerPort => $port);
if (!$handle) {
print STDERR "can't connect to port $port on $host: $!";
return 0;
}
print $handle "GET /RESMON/resmon HTTP/1.0\n\n";
while(<$handle>) {
if (/<state>(\w+)<\/state>/) {
$state=$1;
} elsif (/<modstatus>(\w+)<\/modstatus>/) {
$modstatus=$1;
} elsif (/<configstatus>(\w+)<\/configstatus>/) {
$configstatus=$1;
} elsif (/<message>(.+)<\/message>/) {
$message=$1;
} elsif (/<revision>r(\d+)<\/revision>/) {
$revision=$1;
}
}
print "State: $state\nModules: $modstatus\n" if $debug;
print "Config: $configstatus\nRevision: $revision\n" if $debug;
print "Message: $message\n" if $debug;
if ("$state" eq "OK") {
print "Status is OK\n" if $debug;
return 1;
} elsif ("$state" eq "BAD") {
print "Status is BAD\n" if $debug;
return 0;
} else {
print STDERR "Unable to determine resmon status\n";
return 0;
}
}
sub reload_resmon {
## Get a process listing
my $pscmd;
if ($^O eq 'linux' || $^O eq 'openbsd') {
$pscmd = 'ps ax -o pid,args';
} elsif ($^O eq 'solaris') {
$pscmd = 'ps -ef -o pid,args';
}
my $psout = `$pscmd`;
my @procs=grep(/perl (\/opt\/resmon\/|.\/)resmon/, split(/\n/, $psout));
foreach my $proc (@procs) {
$proc =~ s/^\s//;
print "$proc\n" if $debug;
my ($pid, $args) = split(/\W/, $proc, 2);
print "Killing PID:$pid\n" if $debug;
kill('HUP', $pid);
}
}
sub track_mod_times {
my $mtime = (stat $_)[9];
return unless -f $_;
return if /\/\.svn$/ || /\/\.svn\//;
if($assess == 0) {
$times{$_} = $mtime;
} else {
$newfiles++ unless(exists($times{$_}));
$changedfiles++ if(exists($times{$_}) and ($times{$_} != $mtime));
}
}
1;
| omniti-labs/resmon | lib/Resmon/Updater.pm | Perl | bsd-3-clause | 4,792 |
#!/usr/bin/env perl
#Basically followed the instructions here to get this to work:
#https://groups.google.com/forum/#!topic/biomart-users/skO4zgqzGBA
use strict;
use warnings;
use lib "$ENV{HOME}/src/bioperl-1.6.1";
use lib "$ENV{HOME}/git/ensembl_v75_api/modules";
use Bio::EnsEMBL::Registry;
my $reg = "Bio::EnsEMBL::Registry";
$reg->load_registry_from_db(
-host => 'ensembldb.ensembl.org',
-user => 'anonymous',
-dbname => 'homo_sapiens_core_75_37'
);
open (OUTFILE, ">ensembl_canonical_transcripts_v75.txt");
my $gene_adaptor = $reg->get_adaptor('human', 'core', 'gene');
#my @genes;
#push (@genes, $gene_adaptor->fetch_by_display_label('EGFR'));
my @genes = @{$gene_adaptor->fetch_all};
while(my $gene = shift @genes) {
my $canonical_t_id = $gene->canonical_transcript->stable_id;
my $canonical_t_version = $gene->canonical_transcript->version;
my $gid = $gene->stable_id;
my $symbol = $gene->external_name;
my $result = "$canonical_t_id"."."."$canonical_t_version\t$gid\t$symbol";
print (OUTFILE $result, "\n");
print $result, "\n";
}
exit;
| genome/civic-server | public/downloads/getEnsemblCanonicalTranscripts.pl | Perl | mit | 1,083 |
#!/usr/local/bin/perl
#------------------------------
# Version : 20060121
# Writer : Mico Cheng
# Use for : cut domain
# Host : x
#-------------------------------
$file = shift;
open FH,"$file" or die "$!\n";
while (<FH>) {
chomp;
if ($_ =~ /(\w|\d)*\.^.*$/) {
print "$_\n";
}
}
| TonyChengTW/TBCMail | cut_domain.pl | Perl | apache-2.0 | 304 |
#!/usr/bin/perl
#--------------------------------------------------------------------
#----- FSFW CLI library
#-----
#----- Copyright(C) 2014 The Trustees of Indiana University
#--------------------------------------------------------------------
#----- $HeadURL:
#----- $Id:
#-----
#----- Library that acts as a CLI for the FSFW Webservices
#---------------------------------------------------------------------
package FSFW::CLI;
use strict;
use Term::ReadLine; #using Term::ReadLine::Gnu however best practices say not to require it directly?
use GRNOC::Config;
use FindBin;
use Data::Dumper;
use GRNOC::WebService::Client;
#use Text::Table;
our $VERSION = "1.0.0";
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my %attributes = (
host => undef,
user => undef,
password => undef,
timeout => 30,
debug => 0,
@_,
);
my $self = \%attributes;
bless( $self, $class );
$self->_init();
return $self;
}
sub _init {
my $self = shift;
$self->{config} = GRNOC::Config->new( config_file => '/etc/fsfw/cli_config.xml' );
my $base_url = $self->{'config'}->get('/config/base_url');
my $port = $self->{'config'}->get('/config/port');
$base_url = 'http://localhost';
$port = '8080';
$self->{history} = [];
$self->{history_index} = 0;
$self->{'ws'} = GRNOC::WebService::Client->new(
url => "$base_url:$port/admin/switches/json",
uid => 'test_user',
passwd => 'supersecretpassword',
realm => 'foo',
usePost => 0,
);
$self->build_command_list();
$self->{'term'} = Term::ReadLine->new('FSFW CLI');
$self->set_prompt('FSFW#');
my $attribs = $self->{'term'}->Attribs;
#setting completion function up for Term::Readline
my $cli = $self;
$attribs->{completion_function} = sub {
my ( $text, $line, $start ) = @_;
$self->command_complete( $text, $line, $start );
};
return;
}
sub expand_commands {
my $self = shift;
my $input = shift;
my $new_text;
my @input_parts = split( " ", $input );
foreach my $input_part (@input_parts) {
$input_part = quotemeta($input_part);
}
my $command_list = $self->get_command_list();
my $times_matched = 0;
my $exact_matches = {};
my $partial_matches = {};
#for each command, use exact match for each step avaliable, bomb out if there are multiple partial matches
foreach my $command (@$command_list) {
my $matching_parts = 0;
my @command_parts = split( " ", $command );
for ( my $i = 0 ; $i < scalar(@input_parts) ; $i++ ) {
unless ( $partial_matches->{$i} ) {
$partial_matches->{$i} = {};
}
if ( $command_parts[$i] =~ /^$input_parts[$i]$/ ) {
$exact_matches->{$i} = $command_parts[$i];
}
elsif ( $command_parts[$i] =~ /^$input_parts[$i].*/ ) {
$matching_parts++;
$partial_matches->{$i}->{ $command_parts[$i] } = 1;
#print "$command_parts[$i] matches $input_parts[$i]\n";
}
}
}
for ( my $i = 0 ; $i < scalar(@input_parts) ; $i++ ) {
if ( $exact_matches->{$i} ) {
$new_text .= "$exact_matches->{$i} ";
}
elsif ( $partial_matches->{$i} && scalar( keys %{ $partial_matches->{$i} } ) == 1 ) {
my @values = keys %{ $partial_matches->{$i} };
$new_text .= "$values[0] ";
}
else {
#no partial or exact matches, or multiple partial matches
warn "command not found\n";
return ( 1, $input );
}
}
chop($new_text);
return ( 0, $new_text );
# if ( $new_text && $times_matched == 1 ) {
# return ( 0, $new_text );
# }
# elsif ( $times_matched < 1 ) {
# # print "Command not found!\n";
# }
# elsif ( $times_matched > 1 ) {
# my $new_command;
# my $low_bar=0;
# foreach my $command (keys %$exact_match){
# if ($exact_match->{$command} > $low_bar){
# warn "found exact match $command";
# $new_command=$command;
# }
# }
# if($new_command){
# #print "expanding $input to $new_command\n";
# return (0, $new_command);
# }
# print "Command not unique!\n";
# }
# return ( 1, $input );
}
sub command_complete {
my $self = shift;
my ( $text, $line, $start ) = @_;
my $command_list = $self->get_command_list();
my @matches_at_level;
# $line= quotemeta($line);
my @text_parts = split( " ", $line );
foreach (@text_parts) {
$_ = quotemeta($_);
}
if ( $line eq "" ) {
#warn "Line is empty\n\n";
my @return;
foreach my $command (@$command_list) {
my @command_parts = split( " ", $command );
my $last_word = $command_parts[0];
push( @return, $command_parts[0] );
}
return @return;
}
foreach my $command (@$command_list) {
my $offset = 0;
my $is_match = 1;
my @command_parts = split( " ", $command );
my $last_word = $command_parts[0];
#default to assuming the whole line matches until we find a word that doesn't in our current depth
#for n words in line
# if all words match and there are no options after it.. this is a full command, woot you've hit the end of the line
# if all words match and you have more than one match, your last word options should be all options that match at this depth
# if all words match and there are no other matches at this depth, give next depth option
#warn "number of parts in arry $#text_parts";
for ( my $i = 0 ; $i <= $#text_parts ; $i++ ) {
# warn "command: $command_parts[$i], text:$text_parts[$i]\n";
unless ( $matches_at_level[$i] ) {
$matches_at_level[$i] = {};
}
unless ( $matches_at_level[ $i + 1 ] ) {
$matches_at_level[ $i + 1 ] = {};
}
my $is_exact_match = 0;
if ( $command_parts[$i] =~ /^$text_parts[$i].*/ ) {
#print "matched $command_parts[$i]\n";
#$last_word = $command_parts[$i];
if ( $i == $#text_parts ) {
#print "adding at level $i\n";
#if we've started the next command only accept full matches for additions to the list of matches?
unless ( $line =~ /.*?\s+$/ ) {
#print "index $i, $command_parts[$i] matches $text_parts[$i]";
$matches_at_level[$i]->{ $command_parts[$i] } = 1;
}
}
}
elsif ( $is_match && $i == $#text_parts && $text eq "" ) {
#everything matches up to this point, add next option
#$matches_at_level[$i]->{$command_parts[$i]} =1;
#$matches_at_level[$i+1]->{$command_parts[$i+1]}=1; #= $command_parts[$i+1];
}
else {
if ( $i == $#text_parts ) {
# print "index $i, $command_parts[$i] does not match $text_parts[$i]";
}
$is_match = 0;
last;
}
if ( $command_parts[$i] =~ /^$text_parts[$i]$/ ) {
#exactly matches, so add current line and next line to matches at levels
if ( $i == $#text_parts ) {
# print "index $i, $command_parts[$i] exactly matches $text_parts[$i]";
$matches_at_level[$i]->{ $command_parts[$i] } = 1;
$matches_at_level[ $i + 1 ]->{ $command_parts[ $i + 1 ] } = 1;
}
}
}
#if ($command =~ /^$line$/){
#command is complete, perfect match don't add any options?
# return;
#}
}
#warn Dumper (\@matches_at_level);
#warn scalar(keys %{$matches_at_level[$#text_parts]});
if ( $matches_at_level[$#text_parts] && ( scalar( keys %{ $matches_at_level[$#text_parts] } ) > 1 ) ) {
#multiple matches at top level
my @return = keys %{ $matches_at_level[$#text_parts] };
# print "multiple matches at this level $#text_parts : returning ".Dumper(\@return);
return @return;
}
if ( $matches_at_level[$#text_parts] && ( scalar( keys %{ $matches_at_level[$#text_parts] } ) == 1 ) ) {
if ( $matches_at_level[ $#text_parts + 1 ] && scalar( keys %{ $matches_at_level[ $#text_parts + 1 ] } ) ) {
my @return = keys %{ $matches_at_level[ $#text_parts + 1 ] };
# print "Only one match at current level returning next level: ".Dumper(\@return);
#return @return;
return @return;
}
my @return = keys %{ $matches_at_level[$#text_parts] };
#print "Only one match at current level and no next level matches yet: ".Dumper(\@return);
return @return;
}
#print "found no matches at $#text_parts \n";
return;
#my @return = keys (%matches);
#print Dumper (\%matches);
#return @return;
}
#stubbed out in case we ever have a legitimate auth system for the WS.
sub login {
my $self = shift;
return 1;
}
sub build_command_list {
my $self = shift;
my $ws = $self->{'ws'};
my $base_url = $self->{'config'}->get('/config/base_url');
my $port = $self->{'config'}->get('/config/port');
$base_url = 'http://localhost';
$port = '8080';
$self->{'possible_commands'} = [ 'show slices', 'show switches','set slice status'. 'help', '?', 'quit', 'exit' ];
my @expandable_commands = ( 'show status', 'show flows', 'set slice status' );
$ws->set_url("$base_url:$port/fsfw/admin/slices/json");
my $slices_obj = $ws->foo();
#print Dumper ($slices_obj);
my @slices;
foreach my $slice ( keys %$slices_obj ) {
push( @slices, $slice );
}
unless ( grep { defined $_ } @slices ) {
die "No slices found, check if FSFW is running? exiting.";
}
$ws->set_url("$base_url:$port/fsfw/admin/switches/json");
my $dpids_obj = $ws->foo();
unless ( grep { defined $_ } @$dpids_obj ) {
print "No switches found connected to FSFW, show status and show flows will be unavailable.\n\n";
}
my $dpid_per_slice = {};
foreach my $slice (@slices) {
$dpid_per_slice->{$slice} = [];
}
foreach my $switch (@$dpids_obj) {
foreach my $slice (@slices) {
push @{ $dpid_per_slice->{$slice} }, $switch->{'dpid'};
}
}
foreach my $expandable_command (@expandable_commands) {
foreach my $slice (@slices) {
foreach my $dpid ( @{ $dpid_per_slice->{$slice} } ) {
if($expandable_command eq 'set slice status'){
push (@{ $self->{'possible_commands'} }, "$expandable_command $slice $dpid enable");
push (@{ $self->{'possible_commands'} }, "$expandable_command $slice $dpid disable");
}else{
push( @{ $self->{'possible_commands'} }, "$expandable_command $slice $dpid" );
}
}
}
}
return;
}
sub get_command_list {
my $self = shift;
return $self->{'possible_commands'};
}
sub set_prompt {
my $self = shift;
$self->{'prompt'} = shift;
return;
}
sub get_prompt {
my $self = shift;
return $self->{'prompt'};
}
sub handle_input {
my $self = shift;
my $input = shift;
my $insert_text = 0;
my $ws = $self->{'ws'};
my $base_url = $self->{'config'}->get('/config/base_url');
my $port = $self->{'config'}->get('/config/port');
$base_url = 'http://localhost';
$port = '8080';
( $insert_text, $input ) = $self->expand_commands($input);
if ( $input =~ /^exit$/ || $input =~ /^quit$/ ) {
exit;
}
if ( $input =~ /^help$/ || $input =~ /^\?$/ ) {
print <<END;
show slices
Returns a list of slices, and for each slice a list of DPIDs configured to have access to it
show switches
Returns details of each switch connected to FSFW:
show status [slice] [dpid]
returns status of the slice with parameters:
set slice status [slice] [dpid] [status]
sets the admin status of the slice
show flows [slice] [dpid]
returns all flows for this dpid with metadata, match and actions for each.
quit | exit
exit application
help
returns this message
END
}
elsif ( $input =~ /^show switches$/ ) {
my $dpid = $1 || undef;
$ws->set_url("$base_url:$port/fsfw/admin/switches/json");
my $status_obj = $ws->foo();
unless ( grep { defined $_ } @$status_obj ) {
print "No switches found attached FSFW\n";
}
foreach my $switch (@$status_obj) {
if ( $dpid && $switch->{'dpid'} != $dpid ) {
next;
}
my ($address) = $switch->{'inetAddress'} =~ /\/(\S+):\d+/;
print "DPID:\t$switch->{'dpid'}\n";
print "IP:\t$address\n";
print "Vendor:\t" . $switch->{'descriptionStatistics'}->{'manufacturerDescription'} . "\n";
print "Device:\t" . $switch->{'descriptionStatistics'}->{'hardwareDescription'} . "\n";
print "Software Version:\t" . $switch->{'descriptionStatistics'}->{'softwareDescription'} . "\n";
print "Ports:\n";
print "Name\tPort Number\tStatus\n\n";
#my $port_table=Text::Table->new("Port Name","Port Number", "Status");
foreach my $port ( @{ $switch->{'ports'} } ) {
my $port_num = $self->_unsign_int( $port->{'portNumber'} );
print "$port->{'name'}\t$port_num\tUP\n";
}
print "\n\n";
}
}
elsif ( $input =~ /^show slices$/ ) {
$ws->set_url("$base_url:$port/fsfw/admin/slices/json");
my $slices = $ws->foo();
foreach my $slice ( keys %$slices ) {
my $dpids = $slices->{$slice};
print "Slice Name: $slice\n\nSwitches Configured for Slice:\n";
unless ( grep { defined $_ } @$dpids ) {
print "No switches.\n";
}
foreach my $dpid (@$dpids) {
print "$dpid\n";
}
print "\n";
}
#print Dumper ($slices);
}
elsif ( $input =~ /^show flows (\S+) (\S+)/ ) {
#warn "showing flows.";
my $vlan_id;
my $port_id;
$ws->set_url("$base_url:$port/fsfw/flows/$1/$2/json");
my $flows = $ws->foo();
#print Dumper ($flows); #NONPROD
unless ( defined($flows) && grep { defined $_ } @$flows ) {
print "No Flows to display\n";
}
foreach my $flow (@$flows) {
my $output_text = "";
my $flow_matches = 1;
my $priority = $self->_unsign_int( $flow->{'priority'} );
my $table_id = $self->_unsign_int( $flow->{'tableId'} );
my $cookie = $self->_unsign_int( $flow->{'cookie'} );
my $duration = $self->_unsign_int( $flow->{'durationSeconds'} );
my $idle_timeout = $self->_unsign_int( $flow->{'idleTimeout'} );
my $hard_timeout = $self->_unsign_int( $flow->{'hardTimeout'} );
my $packet_count = $self->_unsign_int( $flow->{'packetCount'} );
my $byte_count = $self->_unsign_int( $flow->{'byteCount'} );
$output_text .= "Table ID: $table_id\tCookie: $cookie\n";
$output_text .= "Priority: $priority Idle timeout(sec):$idle_timeout\tHard timeout(sec):$hard_timeout\n";
$output_text .= "Packet Count: $packet_count\tByte Count:$byte_count\n";
my $match = $flow->{'match'};
$output_text .= "Match:\n";
foreach my $key ( sort keys %$match ) {
my $value = $match->{$key};
#wildcards?
# if ($vlan_id){
# if ($key eq 'dataLayerVirtualLan'){
# if ($value == $vlan_id){
# $flow_matches =1;
# }
# }
# }
if ( $value == 0 || $value eq '0.0.0.0' || $value eq '00:00:00:00:00:00' ) {
next;
}
#unsign ints
if ( $value =~ /^-\d+$/ ) {
$value = $self->_unsign_int($value);
}
$output_text .= "$key:\t$value\n";
}
$output_text .= "Actions: ";
my $actions = $flow->{'actions'};
my @processed_actions;
foreach my $action (@$actions) {
my $type = $action->{'type'};
my $action_str = "$type";
foreach my $key ( sort keys %$action ) {
if ( $key eq 'length' || $key eq 'lengthU' || $key eq 'maxLength' || $key eq 'type' ) {
next;
}
my $value = $action->{$key};
if ( $value =~ /^-\d+$/ ) {
$value = $self->_unsign_int($value);
}
$action_str .= " $value";
}
#print "adding $action_str";
push( @processed_actions, $action_str );
}
$output_text .= join( ",", @processed_actions ) . "\n\n";
#if ($vlan_id || $port_id){
# if ($flow_matches){
# print $output_text;
# }
# }
#else {
print $output_text;
#}
}
}
elsif ( $input =~ /^show status (\S+) (\S+)/ ) {
$ws->set_url("$base_url:$port/fsfw/status/$1/$2/json");
my $status_obj = $ws->foo();
#print Dumper ($status_obj);
print "Status for $2 in slice $1:\n\n";
foreach my $key ( sort keys %$status_obj ) {
print "$key\t$status_obj->{$key}\n";
}
}elsif( $input =~ /^set slice status (\S+) (\S+) (\S+)/){
my $status = $3;
my $state;
if($status eq 'enable'){
$state = 'true';
}elsif($status eq 'disable'){
$state = 'false';
}
#warn "State is set to " . $state . "\n";
if(!defined($state)){
print "Invalid state: " . $status . " must be enable/disable\n\n";
}else{
$ws->set_url("$base_url:$port/fsfw/admin/set_state/$1/$2/$state/json");
my $status_obj = $ws->foo();
if($status_obj == 1){
print "Slice $1 for DPID $2 was successfully " . $status . "d\n\n";
}else{
print "An error occured attempting to set Slice $1 for DPID $2 to " . $status . "\n\n";
}
}
}
return; #$insert_text;
}
sub terminal_loop {
my $self = shift;
my $line;
my $term = $self->{'term'};
my $insert_text;
my $preput = "";
while ( defined( $line = $term->readline( $self->get_prompt(), $preput ) ) ) {
$insert_text = $self->handle_input($line);
if ($insert_text) {
$preput = $line;
}
else {
$preput = "";
}
}
}
#converts signed int to unsigned int;
sub _unsign_int {
my $self = shift;
my $int = shift;
return unpack( "S", pack( "s", $int ) );
}
1;
| ajragusa/FlowSpaceFirewall | src/main/perl/FSFW-CLI/lib/FSFW/CLI.pm | Perl | apache-2.0 | 19,512 |
%
% 08-pacman.pl
% marelle-deps
%
% installs_with_pacman(Pkg).
% Pkg installs with pacman package of same name on Arch Linux
:- multifile installs_with_pacman/1.
% installs_with_pacman(Pkg, PacName).
% Pkg installs with pacman package called PacName on Arch Linux
% PacName can also be a list of packages.
:- multifile installs_with_pacman/2.
installs_with_pacman(P, P) :- installs_with_pacman(P).
:- dynamic pacman_updated/0.
pkg('pacman-update').
met('pacman-update', linux(arch)) :- pacman_updated.
meet('pacman-update', linux(arch)) :-
sh('sudo pacman -Syu'),
assertz(pacman_updated).
depends(P, linux(arch), ['pacman-update']) :-
installs_with_pacman(P, _).
% attempt to install a package with pacman
install_pacman(Pkg) :-
sudo_or_empty(Sudo),
sh([Sudo, 'pacman -S --noconfirm ', Pkg]).
% succeed only if the package is already installed
check_pacman(Pkg) :-
sh(['pacman -Qi ', Pkg, '>/dev/null 2>/dev/null']).
met(P, linux(arch)) :-
installs_with_pacman(P, PkgName), !,
check_pacman(PkgName).
meet(P, linux(arch)) :-
installs_with_pacman(P, PkgName), !,
install_pacman(PkgName).
| pmoura/marelle | 08-pacman.pl | Perl | bsd-2-clause | 1,144 |
#!/usr/bin/perl
## --------------------------------------------------------------------------
##
## Copyright 1996-2009 The NASM Authors - All Rights Reserved
## See the file AUTHORS included with the NASM distribution for
## the specific copyright holders.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following
## conditions are met:
##
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above
## copyright notice, this list of conditions and the following
## disclaimer in the documentation and/or other materials provided
## with the distribution.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
## CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
## INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
## MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
## DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
## NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
## LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
## HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
## CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
## EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
##
## --------------------------------------------------------------------------
#
# inslist.pl produce inslist.src
#
# Opcode prefixes which need their own opcode tables
# LONGER PREFIXES FIRST!
@disasm_prefixes = qw(0F24 0F25 0F38 0F3A 0F7A 0FA6 0FA7 0F);
print STDERR "Reading insns.dat...\n";
@args = ();
undef $output;
foreach $arg ( @ARGV ) {
if ( $arg =~ /^\-/ ) {
if ( $arg =~ /^\-([adins])$/ ) {
$output = $1;
} else {
die "$0: Unknown option: ${arg}\n";
}
} else {
push (@args, $arg);
}
}
$fname = "../insns.dat" unless $fname = $args[0];
open (F, $fname) || die "unable to open $fname";
print STDERR "Writing inslist.src...\n";
open S, ">inslist.src";
$line = 0;
$insns = 0;
while (<F>) {
$line++;
next if (/^\s*$/); # blank lines
if ( /^\s*;/ ) # comments
{
if ( /^\s*;\#\s*(.+)/ ) # section subheader
{
print S "\n\\S{} $1\n\n";
}
next;
}
chomp;
unless (/^\s*(\S+)\s+(\S+)\s+(\S+|\[.*\])\s+(\S+)\s*$/) {
warn "line $line does not contain four fields\n";
next;
}
my @entry = ($1, $2, $3, $4);
$entry[1] =~ s/ignore//;
$entry[1] =~ s/void//;
$entry[3] =~ s/ignore//;
$entry[3] =~ s/,SB//;
$entry[3] =~ s/,SM//;
$entry[3] =~ s/,SM2//;
$entry[3] =~ s/,SQ//;
$entry[3] =~ s/,AR2//;
printf S "\\c %-16s %-24s %s\n",$entry[0],$entry[1],$entry[3];
$insns++;
}
print S "\n";
close S;
close F;
printf STDERR "Done: %d instructions\n", $insns;
| coapp-packages/nasm | doc/inslist.pl | Perl | bsd-2-clause | 3,211 |
package #
Date::Manip::TZ::amlos_00;
# Copyright (c) 2008-2014 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Fri Nov 21 10:41:40 EST 2014
# Data version: tzdata2014j
# Code version: tzcode2014j
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our (%Dates,%LastRule);
END {
undef %Dates;
undef %LastRule;
}
our ($VERSION);
$VERSION='6.48';
END { undef $VERSION; }
%Dates = (
1 =>
[
[ [1,1,2,0,0,0],[1,1,1,16,7,2],'-07:52:58',[-7,-52,-58],
'LMT',0,[1883,11,18,19,59,59],[1883,11,18,12,7,1],
'0001010200:00:00','0001010116:07:02','1883111819:59:59','1883111812:07:01' ],
],
1883 =>
[
[ [1883,11,18,20,0,0],[1883,11,18,12,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1918,3,31,9,59,59],[1918,3,31,1,59,59],
'1883111820:00:00','1883111812:00:00','1918033109:59:59','1918033101:59:59' ],
],
1918 =>
[
[ [1918,3,31,10,0,0],[1918,3,31,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1918,10,27,8,59,59],[1918,10,27,1,59,59],
'1918033110:00:00','1918033103:00:00','1918102708:59:59','1918102701:59:59' ],
[ [1918,10,27,9,0,0],[1918,10,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1919,3,30,9,59,59],[1919,3,30,1,59,59],
'1918102709:00:00','1918102701:00:00','1919033009:59:59','1919033001:59:59' ],
],
1919 =>
[
[ [1919,3,30,10,0,0],[1919,3,30,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1919,10,26,8,59,59],[1919,10,26,1,59,59],
'1919033010:00:00','1919033003:00:00','1919102608:59:59','1919102601:59:59' ],
[ [1919,10,26,9,0,0],[1919,10,26,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1942,2,9,9,59,59],[1942,2,9,1,59,59],
'1919102609:00:00','1919102601:00:00','1942020909:59:59','1942020901:59:59' ],
],
1942 =>
[
[ [1942,2,9,10,0,0],[1942,2,9,3,0,0],'-07:00:00',[-7,0,0],
'PWT',1,[1945,8,14,22,59,59],[1945,8,14,15,59,59],
'1942020910:00:00','1942020903:00:00','1945081422:59:59','1945081415:59:59' ],
],
1945 =>
[
[ [1945,8,14,23,0,0],[1945,8,14,16,0,0],'-07:00:00',[-7,0,0],
'PPT',1,[1945,9,30,8,59,59],[1945,9,30,1,59,59],
'1945081423:00:00','1945081416:00:00','1945093008:59:59','1945093001:59:59' ],
[ [1945,9,30,9,0,0],[1945,9,30,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1948,3,14,9,59,59],[1948,3,14,1,59,59],
'1945093009:00:00','1945093001:00:00','1948031409:59:59','1948031401:59:59' ],
],
1948 =>
[
[ [1948,3,14,10,0,0],[1948,3,14,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1949,1,1,8,59,59],[1949,1,1,1,59,59],
'1948031410:00:00','1948031403:00:00','1949010108:59:59','1949010101:59:59' ],
],
1949 =>
[
[ [1949,1,1,9,0,0],[1949,1,1,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1950,4,30,9,59,59],[1950,4,30,1,59,59],
'1949010109:00:00','1949010101:00:00','1950043009:59:59','1950043001:59:59' ],
],
1950 =>
[
[ [1950,4,30,10,0,0],[1950,4,30,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1950,9,24,8,59,59],[1950,9,24,1,59,59],
'1950043010:00:00','1950043003:00:00','1950092408:59:59','1950092401:59:59' ],
[ [1950,9,24,9,0,0],[1950,9,24,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1951,4,29,9,59,59],[1951,4,29,1,59,59],
'1950092409:00:00','1950092401:00:00','1951042909:59:59','1951042901:59:59' ],
],
1951 =>
[
[ [1951,4,29,10,0,0],[1951,4,29,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1951,9,30,8,59,59],[1951,9,30,1,59,59],
'1951042910:00:00','1951042903:00:00','1951093008:59:59','1951093001:59:59' ],
[ [1951,9,30,9,0,0],[1951,9,30,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1952,4,27,9,59,59],[1952,4,27,1,59,59],
'1951093009:00:00','1951093001:00:00','1952042709:59:59','1952042701:59:59' ],
],
1952 =>
[
[ [1952,4,27,10,0,0],[1952,4,27,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1952,9,28,8,59,59],[1952,9,28,1,59,59],
'1952042710:00:00','1952042703:00:00','1952092808:59:59','1952092801:59:59' ],
[ [1952,9,28,9,0,0],[1952,9,28,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1953,4,26,9,59,59],[1953,4,26,1,59,59],
'1952092809:00:00','1952092801:00:00','1953042609:59:59','1953042601:59:59' ],
],
1953 =>
[
[ [1953,4,26,10,0,0],[1953,4,26,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1953,9,27,8,59,59],[1953,9,27,1,59,59],
'1953042610:00:00','1953042603:00:00','1953092708:59:59','1953092701:59:59' ],
[ [1953,9,27,9,0,0],[1953,9,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1954,4,25,9,59,59],[1954,4,25,1,59,59],
'1953092709:00:00','1953092701:00:00','1954042509:59:59','1954042501:59:59' ],
],
1954 =>
[
[ [1954,4,25,10,0,0],[1954,4,25,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1954,9,26,8,59,59],[1954,9,26,1,59,59],
'1954042510:00:00','1954042503:00:00','1954092608:59:59','1954092601:59:59' ],
[ [1954,9,26,9,0,0],[1954,9,26,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1955,4,24,9,59,59],[1955,4,24,1,59,59],
'1954092609:00:00','1954092601:00:00','1955042409:59:59','1955042401:59:59' ],
],
1955 =>
[
[ [1955,4,24,10,0,0],[1955,4,24,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1955,9,25,8,59,59],[1955,9,25,1,59,59],
'1955042410:00:00','1955042403:00:00','1955092508:59:59','1955092501:59:59' ],
[ [1955,9,25,9,0,0],[1955,9,25,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1956,4,29,9,59,59],[1956,4,29,1,59,59],
'1955092509:00:00','1955092501:00:00','1956042909:59:59','1956042901:59:59' ],
],
1956 =>
[
[ [1956,4,29,10,0,0],[1956,4,29,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1956,9,30,8,59,59],[1956,9,30,1,59,59],
'1956042910:00:00','1956042903:00:00','1956093008:59:59','1956093001:59:59' ],
[ [1956,9,30,9,0,0],[1956,9,30,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1957,4,28,9,59,59],[1957,4,28,1,59,59],
'1956093009:00:00','1956093001:00:00','1957042809:59:59','1957042801:59:59' ],
],
1957 =>
[
[ [1957,4,28,10,0,0],[1957,4,28,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1957,9,29,8,59,59],[1957,9,29,1,59,59],
'1957042810:00:00','1957042803:00:00','1957092908:59:59','1957092901:59:59' ],
[ [1957,9,29,9,0,0],[1957,9,29,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1958,4,27,9,59,59],[1958,4,27,1,59,59],
'1957092909:00:00','1957092901:00:00','1958042709:59:59','1958042701:59:59' ],
],
1958 =>
[
[ [1958,4,27,10,0,0],[1958,4,27,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1958,9,28,8,59,59],[1958,9,28,1,59,59],
'1958042710:00:00','1958042703:00:00','1958092808:59:59','1958092801:59:59' ],
[ [1958,9,28,9,0,0],[1958,9,28,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1959,4,26,9,59,59],[1959,4,26,1,59,59],
'1958092809:00:00','1958092801:00:00','1959042609:59:59','1959042601:59:59' ],
],
1959 =>
[
[ [1959,4,26,10,0,0],[1959,4,26,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1959,9,27,8,59,59],[1959,9,27,1,59,59],
'1959042610:00:00','1959042603:00:00','1959092708:59:59','1959092701:59:59' ],
[ [1959,9,27,9,0,0],[1959,9,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1960,4,24,9,59,59],[1960,4,24,1,59,59],
'1959092709:00:00','1959092701:00:00','1960042409:59:59','1960042401:59:59' ],
],
1960 =>
[
[ [1960,4,24,10,0,0],[1960,4,24,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1960,9,25,8,59,59],[1960,9,25,1,59,59],
'1960042410:00:00','1960042403:00:00','1960092508:59:59','1960092501:59:59' ],
[ [1960,9,25,9,0,0],[1960,9,25,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1961,4,30,9,59,59],[1961,4,30,1,59,59],
'1960092509:00:00','1960092501:00:00','1961043009:59:59','1961043001:59:59' ],
],
1961 =>
[
[ [1961,4,30,10,0,0],[1961,4,30,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1961,9,24,8,59,59],[1961,9,24,1,59,59],
'1961043010:00:00','1961043003:00:00','1961092408:59:59','1961092401:59:59' ],
[ [1961,9,24,9,0,0],[1961,9,24,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1962,4,29,9,59,59],[1962,4,29,1,59,59],
'1961092409:00:00','1961092401:00:00','1962042909:59:59','1962042901:59:59' ],
],
1962 =>
[
[ [1962,4,29,10,0,0],[1962,4,29,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1962,10,28,8,59,59],[1962,10,28,1,59,59],
'1962042910:00:00','1962042903:00:00','1962102808:59:59','1962102801:59:59' ],
[ [1962,10,28,9,0,0],[1962,10,28,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1963,4,28,9,59,59],[1963,4,28,1,59,59],
'1962102809:00:00','1962102801:00:00','1963042809:59:59','1963042801:59:59' ],
],
1963 =>
[
[ [1963,4,28,10,0,0],[1963,4,28,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1963,10,27,8,59,59],[1963,10,27,1,59,59],
'1963042810:00:00','1963042803:00:00','1963102708:59:59','1963102701:59:59' ],
[ [1963,10,27,9,0,0],[1963,10,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1964,4,26,9,59,59],[1964,4,26,1,59,59],
'1963102709:00:00','1963102701:00:00','1964042609:59:59','1964042601:59:59' ],
],
1964 =>
[
[ [1964,4,26,10,0,0],[1964,4,26,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1964,10,25,8,59,59],[1964,10,25,1,59,59],
'1964042610:00:00','1964042603:00:00','1964102508:59:59','1964102501:59:59' ],
[ [1964,10,25,9,0,0],[1964,10,25,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1965,4,25,9,59,59],[1965,4,25,1,59,59],
'1964102509:00:00','1964102501:00:00','1965042509:59:59','1965042501:59:59' ],
],
1965 =>
[
[ [1965,4,25,10,0,0],[1965,4,25,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1965,10,31,8,59,59],[1965,10,31,1,59,59],
'1965042510:00:00','1965042503:00:00','1965103108:59:59','1965103101:59:59' ],
[ [1965,10,31,9,0,0],[1965,10,31,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1966,4,24,9,59,59],[1966,4,24,1,59,59],
'1965103109:00:00','1965103101:00:00','1966042409:59:59','1966042401:59:59' ],
],
1966 =>
[
[ [1966,4,24,10,0,0],[1966,4,24,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1966,10,30,8,59,59],[1966,10,30,1,59,59],
'1966042410:00:00','1966042403:00:00','1966103008:59:59','1966103001:59:59' ],
[ [1966,10,30,9,0,0],[1966,10,30,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1967,4,30,9,59,59],[1967,4,30,1,59,59],
'1966103009:00:00','1966103001:00:00','1967043009:59:59','1967043001:59:59' ],
],
1967 =>
[
[ [1967,4,30,10,0,0],[1967,4,30,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1967,10,29,8,59,59],[1967,10,29,1,59,59],
'1967043010:00:00','1967043003:00:00','1967102908:59:59','1967102901:59:59' ],
[ [1967,10,29,9,0,0],[1967,10,29,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1968,4,28,9,59,59],[1968,4,28,1,59,59],
'1967102909:00:00','1967102901:00:00','1968042809:59:59','1968042801:59:59' ],
],
1968 =>
[
[ [1968,4,28,10,0,0],[1968,4,28,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1968,10,27,8,59,59],[1968,10,27,1,59,59],
'1968042810:00:00','1968042803:00:00','1968102708:59:59','1968102701:59:59' ],
[ [1968,10,27,9,0,0],[1968,10,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1969,4,27,9,59,59],[1969,4,27,1,59,59],
'1968102709:00:00','1968102701:00:00','1969042709:59:59','1969042701:59:59' ],
],
1969 =>
[
[ [1969,4,27,10,0,0],[1969,4,27,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1969,10,26,8,59,59],[1969,10,26,1,59,59],
'1969042710:00:00','1969042703:00:00','1969102608:59:59','1969102601:59:59' ],
[ [1969,10,26,9,0,0],[1969,10,26,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1970,4,26,9,59,59],[1970,4,26,1,59,59],
'1969102609:00:00','1969102601:00:00','1970042609:59:59','1970042601:59:59' ],
],
1970 =>
[
[ [1970,4,26,10,0,0],[1970,4,26,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1970,10,25,8,59,59],[1970,10,25,1,59,59],
'1970042610:00:00','1970042603:00:00','1970102508:59:59','1970102501:59:59' ],
[ [1970,10,25,9,0,0],[1970,10,25,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1971,4,25,9,59,59],[1971,4,25,1,59,59],
'1970102509:00:00','1970102501:00:00','1971042509:59:59','1971042501:59:59' ],
],
1971 =>
[
[ [1971,4,25,10,0,0],[1971,4,25,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1971,10,31,8,59,59],[1971,10,31,1,59,59],
'1971042510:00:00','1971042503:00:00','1971103108:59:59','1971103101:59:59' ],
[ [1971,10,31,9,0,0],[1971,10,31,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1972,4,30,9,59,59],[1972,4,30,1,59,59],
'1971103109:00:00','1971103101:00:00','1972043009:59:59','1972043001:59:59' ],
],
1972 =>
[
[ [1972,4,30,10,0,0],[1972,4,30,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1972,10,29,8,59,59],[1972,10,29,1,59,59],
'1972043010:00:00','1972043003:00:00','1972102908:59:59','1972102901:59:59' ],
[ [1972,10,29,9,0,0],[1972,10,29,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1973,4,29,9,59,59],[1973,4,29,1,59,59],
'1972102909:00:00','1972102901:00:00','1973042909:59:59','1973042901:59:59' ],
],
1973 =>
[
[ [1973,4,29,10,0,0],[1973,4,29,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1973,10,28,8,59,59],[1973,10,28,1,59,59],
'1973042910:00:00','1973042903:00:00','1973102808:59:59','1973102801:59:59' ],
[ [1973,10,28,9,0,0],[1973,10,28,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1974,1,6,9,59,59],[1974,1,6,1,59,59],
'1973102809:00:00','1973102801:00:00','1974010609:59:59','1974010601:59:59' ],
],
1974 =>
[
[ [1974,1,6,10,0,0],[1974,1,6,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1974,10,27,8,59,59],[1974,10,27,1,59,59],
'1974010610:00:00','1974010603:00:00','1974102708:59:59','1974102701:59:59' ],
[ [1974,10,27,9,0,0],[1974,10,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1975,2,23,9,59,59],[1975,2,23,1,59,59],
'1974102709:00:00','1974102701:00:00','1975022309:59:59','1975022301:59:59' ],
],
1975 =>
[
[ [1975,2,23,10,0,0],[1975,2,23,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1975,10,26,8,59,59],[1975,10,26,1,59,59],
'1975022310:00:00','1975022303:00:00','1975102608:59:59','1975102601:59:59' ],
[ [1975,10,26,9,0,0],[1975,10,26,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1976,4,25,9,59,59],[1976,4,25,1,59,59],
'1975102609:00:00','1975102601:00:00','1976042509:59:59','1976042501:59:59' ],
],
1976 =>
[
[ [1976,4,25,10,0,0],[1976,4,25,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1976,10,31,8,59,59],[1976,10,31,1,59,59],
'1976042510:00:00','1976042503:00:00','1976103108:59:59','1976103101:59:59' ],
[ [1976,10,31,9,0,0],[1976,10,31,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1977,4,24,9,59,59],[1977,4,24,1,59,59],
'1976103109:00:00','1976103101:00:00','1977042409:59:59','1977042401:59:59' ],
],
1977 =>
[
[ [1977,4,24,10,0,0],[1977,4,24,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1977,10,30,8,59,59],[1977,10,30,1,59,59],
'1977042410:00:00','1977042403:00:00','1977103008:59:59','1977103001:59:59' ],
[ [1977,10,30,9,0,0],[1977,10,30,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1978,4,30,9,59,59],[1978,4,30,1,59,59],
'1977103009:00:00','1977103001:00:00','1978043009:59:59','1978043001:59:59' ],
],
1978 =>
[
[ [1978,4,30,10,0,0],[1978,4,30,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1978,10,29,8,59,59],[1978,10,29,1,59,59],
'1978043010:00:00','1978043003:00:00','1978102908:59:59','1978102901:59:59' ],
[ [1978,10,29,9,0,0],[1978,10,29,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1979,4,29,9,59,59],[1979,4,29,1,59,59],
'1978102909:00:00','1978102901:00:00','1979042909:59:59','1979042901:59:59' ],
],
1979 =>
[
[ [1979,4,29,10,0,0],[1979,4,29,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1979,10,28,8,59,59],[1979,10,28,1,59,59],
'1979042910:00:00','1979042903:00:00','1979102808:59:59','1979102801:59:59' ],
[ [1979,10,28,9,0,0],[1979,10,28,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1980,4,27,9,59,59],[1980,4,27,1,59,59],
'1979102809:00:00','1979102801:00:00','1980042709:59:59','1980042701:59:59' ],
],
1980 =>
[
[ [1980,4,27,10,0,0],[1980,4,27,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1980,10,26,8,59,59],[1980,10,26,1,59,59],
'1980042710:00:00','1980042703:00:00','1980102608:59:59','1980102601:59:59' ],
[ [1980,10,26,9,0,0],[1980,10,26,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1981,4,26,9,59,59],[1981,4,26,1,59,59],
'1980102609:00:00','1980102601:00:00','1981042609:59:59','1981042601:59:59' ],
],
1981 =>
[
[ [1981,4,26,10,0,0],[1981,4,26,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1981,10,25,8,59,59],[1981,10,25,1,59,59],
'1981042610:00:00','1981042603:00:00','1981102508:59:59','1981102501:59:59' ],
[ [1981,10,25,9,0,0],[1981,10,25,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1982,4,25,9,59,59],[1982,4,25,1,59,59],
'1981102509:00:00','1981102501:00:00','1982042509:59:59','1982042501:59:59' ],
],
1982 =>
[
[ [1982,4,25,10,0,0],[1982,4,25,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1982,10,31,8,59,59],[1982,10,31,1,59,59],
'1982042510:00:00','1982042503:00:00','1982103108:59:59','1982103101:59:59' ],
[ [1982,10,31,9,0,0],[1982,10,31,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1983,4,24,9,59,59],[1983,4,24,1,59,59],
'1982103109:00:00','1982103101:00:00','1983042409:59:59','1983042401:59:59' ],
],
1983 =>
[
[ [1983,4,24,10,0,0],[1983,4,24,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1983,10,30,8,59,59],[1983,10,30,1,59,59],
'1983042410:00:00','1983042403:00:00','1983103008:59:59','1983103001:59:59' ],
[ [1983,10,30,9,0,0],[1983,10,30,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1984,4,29,9,59,59],[1984,4,29,1,59,59],
'1983103009:00:00','1983103001:00:00','1984042909:59:59','1984042901:59:59' ],
],
1984 =>
[
[ [1984,4,29,10,0,0],[1984,4,29,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1984,10,28,8,59,59],[1984,10,28,1,59,59],
'1984042910:00:00','1984042903:00:00','1984102808:59:59','1984102801:59:59' ],
[ [1984,10,28,9,0,0],[1984,10,28,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1985,4,28,9,59,59],[1985,4,28,1,59,59],
'1984102809:00:00','1984102801:00:00','1985042809:59:59','1985042801:59:59' ],
],
1985 =>
[
[ [1985,4,28,10,0,0],[1985,4,28,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1985,10,27,8,59,59],[1985,10,27,1,59,59],
'1985042810:00:00','1985042803:00:00','1985102708:59:59','1985102701:59:59' ],
[ [1985,10,27,9,0,0],[1985,10,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1986,4,27,9,59,59],[1986,4,27,1,59,59],
'1985102709:00:00','1985102701:00:00','1986042709:59:59','1986042701:59:59' ],
],
1986 =>
[
[ [1986,4,27,10,0,0],[1986,4,27,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1986,10,26,8,59,59],[1986,10,26,1,59,59],
'1986042710:00:00','1986042703:00:00','1986102608:59:59','1986102601:59:59' ],
[ [1986,10,26,9,0,0],[1986,10,26,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1987,4,5,9,59,59],[1987,4,5,1,59,59],
'1986102609:00:00','1986102601:00:00','1987040509:59:59','1987040501:59:59' ],
],
1987 =>
[
[ [1987,4,5,10,0,0],[1987,4,5,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1987,10,25,8,59,59],[1987,10,25,1,59,59],
'1987040510:00:00','1987040503:00:00','1987102508:59:59','1987102501:59:59' ],
[ [1987,10,25,9,0,0],[1987,10,25,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1988,4,3,9,59,59],[1988,4,3,1,59,59],
'1987102509:00:00','1987102501:00:00','1988040309:59:59','1988040301:59:59' ],
],
1988 =>
[
[ [1988,4,3,10,0,0],[1988,4,3,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1988,10,30,8,59,59],[1988,10,30,1,59,59],
'1988040310:00:00','1988040303:00:00','1988103008:59:59','1988103001:59:59' ],
[ [1988,10,30,9,0,0],[1988,10,30,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1989,4,2,9,59,59],[1989,4,2,1,59,59],
'1988103009:00:00','1988103001:00:00','1989040209:59:59','1989040201:59:59' ],
],
1989 =>
[
[ [1989,4,2,10,0,0],[1989,4,2,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1989,10,29,8,59,59],[1989,10,29,1,59,59],
'1989040210:00:00','1989040203:00:00','1989102908:59:59','1989102901:59:59' ],
[ [1989,10,29,9,0,0],[1989,10,29,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1990,4,1,9,59,59],[1990,4,1,1,59,59],
'1989102909:00:00','1989102901:00:00','1990040109:59:59','1990040101:59:59' ],
],
1990 =>
[
[ [1990,4,1,10,0,0],[1990,4,1,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1990,10,28,8,59,59],[1990,10,28,1,59,59],
'1990040110:00:00','1990040103:00:00','1990102808:59:59','1990102801:59:59' ],
[ [1990,10,28,9,0,0],[1990,10,28,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1991,4,7,9,59,59],[1991,4,7,1,59,59],
'1990102809:00:00','1990102801:00:00','1991040709:59:59','1991040701:59:59' ],
],
1991 =>
[
[ [1991,4,7,10,0,0],[1991,4,7,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1991,10,27,8,59,59],[1991,10,27,1,59,59],
'1991040710:00:00','1991040703:00:00','1991102708:59:59','1991102701:59:59' ],
[ [1991,10,27,9,0,0],[1991,10,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1992,4,5,9,59,59],[1992,4,5,1,59,59],
'1991102709:00:00','1991102701:00:00','1992040509:59:59','1992040501:59:59' ],
],
1992 =>
[
[ [1992,4,5,10,0,0],[1992,4,5,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1992,10,25,8,59,59],[1992,10,25,1,59,59],
'1992040510:00:00','1992040503:00:00','1992102508:59:59','1992102501:59:59' ],
[ [1992,10,25,9,0,0],[1992,10,25,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1993,4,4,9,59,59],[1993,4,4,1,59,59],
'1992102509:00:00','1992102501:00:00','1993040409:59:59','1993040401:59:59' ],
],
1993 =>
[
[ [1993,4,4,10,0,0],[1993,4,4,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1993,10,31,8,59,59],[1993,10,31,1,59,59],
'1993040410:00:00','1993040403:00:00','1993103108:59:59','1993103101:59:59' ],
[ [1993,10,31,9,0,0],[1993,10,31,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1994,4,3,9,59,59],[1994,4,3,1,59,59],
'1993103109:00:00','1993103101:00:00','1994040309:59:59','1994040301:59:59' ],
],
1994 =>
[
[ [1994,4,3,10,0,0],[1994,4,3,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1994,10,30,8,59,59],[1994,10,30,1,59,59],
'1994040310:00:00','1994040303:00:00','1994103008:59:59','1994103001:59:59' ],
[ [1994,10,30,9,0,0],[1994,10,30,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1995,4,2,9,59,59],[1995,4,2,1,59,59],
'1994103009:00:00','1994103001:00:00','1995040209:59:59','1995040201:59:59' ],
],
1995 =>
[
[ [1995,4,2,10,0,0],[1995,4,2,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1995,10,29,8,59,59],[1995,10,29,1,59,59],
'1995040210:00:00','1995040203:00:00','1995102908:59:59','1995102901:59:59' ],
[ [1995,10,29,9,0,0],[1995,10,29,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1996,4,7,9,59,59],[1996,4,7,1,59,59],
'1995102909:00:00','1995102901:00:00','1996040709:59:59','1996040701:59:59' ],
],
1996 =>
[
[ [1996,4,7,10,0,0],[1996,4,7,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1996,10,27,8,59,59],[1996,10,27,1,59,59],
'1996040710:00:00','1996040703:00:00','1996102708:59:59','1996102701:59:59' ],
[ [1996,10,27,9,0,0],[1996,10,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1997,4,6,9,59,59],[1997,4,6,1,59,59],
'1996102709:00:00','1996102701:00:00','1997040609:59:59','1997040601:59:59' ],
],
1997 =>
[
[ [1997,4,6,10,0,0],[1997,4,6,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1997,10,26,8,59,59],[1997,10,26,1,59,59],
'1997040610:00:00','1997040603:00:00','1997102608:59:59','1997102601:59:59' ],
[ [1997,10,26,9,0,0],[1997,10,26,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1998,4,5,9,59,59],[1998,4,5,1,59,59],
'1997102609:00:00','1997102601:00:00','1998040509:59:59','1998040501:59:59' ],
],
1998 =>
[
[ [1998,4,5,10,0,0],[1998,4,5,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1998,10,25,8,59,59],[1998,10,25,1,59,59],
'1998040510:00:00','1998040503:00:00','1998102508:59:59','1998102501:59:59' ],
[ [1998,10,25,9,0,0],[1998,10,25,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[1999,4,4,9,59,59],[1999,4,4,1,59,59],
'1998102509:00:00','1998102501:00:00','1999040409:59:59','1999040401:59:59' ],
],
1999 =>
[
[ [1999,4,4,10,0,0],[1999,4,4,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[1999,10,31,8,59,59],[1999,10,31,1,59,59],
'1999040410:00:00','1999040403:00:00','1999103108:59:59','1999103101:59:59' ],
[ [1999,10,31,9,0,0],[1999,10,31,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2000,4,2,9,59,59],[2000,4,2,1,59,59],
'1999103109:00:00','1999103101:00:00','2000040209:59:59','2000040201:59:59' ],
],
2000 =>
[
[ [2000,4,2,10,0,0],[2000,4,2,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2000,10,29,8,59,59],[2000,10,29,1,59,59],
'2000040210:00:00','2000040203:00:00','2000102908:59:59','2000102901:59:59' ],
[ [2000,10,29,9,0,0],[2000,10,29,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2001,4,1,9,59,59],[2001,4,1,1,59,59],
'2000102909:00:00','2000102901:00:00','2001040109:59:59','2001040101:59:59' ],
],
2001 =>
[
[ [2001,4,1,10,0,0],[2001,4,1,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2001,10,28,8,59,59],[2001,10,28,1,59,59],
'2001040110:00:00','2001040103:00:00','2001102808:59:59','2001102801:59:59' ],
[ [2001,10,28,9,0,0],[2001,10,28,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2002,4,7,9,59,59],[2002,4,7,1,59,59],
'2001102809:00:00','2001102801:00:00','2002040709:59:59','2002040701:59:59' ],
],
2002 =>
[
[ [2002,4,7,10,0,0],[2002,4,7,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2002,10,27,8,59,59],[2002,10,27,1,59,59],
'2002040710:00:00','2002040703:00:00','2002102708:59:59','2002102701:59:59' ],
[ [2002,10,27,9,0,0],[2002,10,27,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2003,4,6,9,59,59],[2003,4,6,1,59,59],
'2002102709:00:00','2002102701:00:00','2003040609:59:59','2003040601:59:59' ],
],
2003 =>
[
[ [2003,4,6,10,0,0],[2003,4,6,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2003,10,26,8,59,59],[2003,10,26,1,59,59],
'2003040610:00:00','2003040603:00:00','2003102608:59:59','2003102601:59:59' ],
[ [2003,10,26,9,0,0],[2003,10,26,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2004,4,4,9,59,59],[2004,4,4,1,59,59],
'2003102609:00:00','2003102601:00:00','2004040409:59:59','2004040401:59:59' ],
],
2004 =>
[
[ [2004,4,4,10,0,0],[2004,4,4,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2004,10,31,8,59,59],[2004,10,31,1,59,59],
'2004040410:00:00','2004040403:00:00','2004103108:59:59','2004103101:59:59' ],
[ [2004,10,31,9,0,0],[2004,10,31,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2005,4,3,9,59,59],[2005,4,3,1,59,59],
'2004103109:00:00','2004103101:00:00','2005040309:59:59','2005040301:59:59' ],
],
2005 =>
[
[ [2005,4,3,10,0,0],[2005,4,3,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2005,10,30,8,59,59],[2005,10,30,1,59,59],
'2005040310:00:00','2005040303:00:00','2005103008:59:59','2005103001:59:59' ],
[ [2005,10,30,9,0,0],[2005,10,30,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2006,4,2,9,59,59],[2006,4,2,1,59,59],
'2005103009:00:00','2005103001:00:00','2006040209:59:59','2006040201:59:59' ],
],
2006 =>
[
[ [2006,4,2,10,0,0],[2006,4,2,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2006,10,29,8,59,59],[2006,10,29,1,59,59],
'2006040210:00:00','2006040203:00:00','2006102908:59:59','2006102901:59:59' ],
[ [2006,10,29,9,0,0],[2006,10,29,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2007,3,11,9,59,59],[2007,3,11,1,59,59],
'2006102909:00:00','2006102901:00:00','2007031109:59:59','2007031101:59:59' ],
],
2007 =>
[
[ [2007,3,11,10,0,0],[2007,3,11,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2007,11,4,8,59,59],[2007,11,4,1,59,59],
'2007031110:00:00','2007031103:00:00','2007110408:59:59','2007110401:59:59' ],
[ [2007,11,4,9,0,0],[2007,11,4,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2008,3,9,9,59,59],[2008,3,9,1,59,59],
'2007110409:00:00','2007110401:00:00','2008030909:59:59','2008030901:59:59' ],
],
2008 =>
[
[ [2008,3,9,10,0,0],[2008,3,9,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2008,11,2,8,59,59],[2008,11,2,1,59,59],
'2008030910:00:00','2008030903:00:00','2008110208:59:59','2008110201:59:59' ],
[ [2008,11,2,9,0,0],[2008,11,2,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2009,3,8,9,59,59],[2009,3,8,1,59,59],
'2008110209:00:00','2008110201:00:00','2009030809:59:59','2009030801:59:59' ],
],
2009 =>
[
[ [2009,3,8,10,0,0],[2009,3,8,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2009,11,1,8,59,59],[2009,11,1,1,59,59],
'2009030810:00:00','2009030803:00:00','2009110108:59:59','2009110101:59:59' ],
[ [2009,11,1,9,0,0],[2009,11,1,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2010,3,14,9,59,59],[2010,3,14,1,59,59],
'2009110109:00:00','2009110101:00:00','2010031409:59:59','2010031401:59:59' ],
],
2010 =>
[
[ [2010,3,14,10,0,0],[2010,3,14,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2010,11,7,8,59,59],[2010,11,7,1,59,59],
'2010031410:00:00','2010031403:00:00','2010110708:59:59','2010110701:59:59' ],
[ [2010,11,7,9,0,0],[2010,11,7,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2011,3,13,9,59,59],[2011,3,13,1,59,59],
'2010110709:00:00','2010110701:00:00','2011031309:59:59','2011031301:59:59' ],
],
2011 =>
[
[ [2011,3,13,10,0,0],[2011,3,13,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2011,11,6,8,59,59],[2011,11,6,1,59,59],
'2011031310:00:00','2011031303:00:00','2011110608:59:59','2011110601:59:59' ],
[ [2011,11,6,9,0,0],[2011,11,6,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2012,3,11,9,59,59],[2012,3,11,1,59,59],
'2011110609:00:00','2011110601:00:00','2012031109:59:59','2012031101:59:59' ],
],
2012 =>
[
[ [2012,3,11,10,0,0],[2012,3,11,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2012,11,4,8,59,59],[2012,11,4,1,59,59],
'2012031110:00:00','2012031103:00:00','2012110408:59:59','2012110401:59:59' ],
[ [2012,11,4,9,0,0],[2012,11,4,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2013,3,10,9,59,59],[2013,3,10,1,59,59],
'2012110409:00:00','2012110401:00:00','2013031009:59:59','2013031001:59:59' ],
],
2013 =>
[
[ [2013,3,10,10,0,0],[2013,3,10,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2013,11,3,8,59,59],[2013,11,3,1,59,59],
'2013031010:00:00','2013031003:00:00','2013110308:59:59','2013110301:59:59' ],
[ [2013,11,3,9,0,0],[2013,11,3,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2014,3,9,9,59,59],[2014,3,9,1,59,59],
'2013110309:00:00','2013110301:00:00','2014030909:59:59','2014030901:59:59' ],
],
2014 =>
[
[ [2014,3,9,10,0,0],[2014,3,9,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2014,11,2,8,59,59],[2014,11,2,1,59,59],
'2014030910:00:00','2014030903:00:00','2014110208:59:59','2014110201:59:59' ],
[ [2014,11,2,9,0,0],[2014,11,2,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2015,3,8,9,59,59],[2015,3,8,1,59,59],
'2014110209:00:00','2014110201:00:00','2015030809:59:59','2015030801:59:59' ],
],
2015 =>
[
[ [2015,3,8,10,0,0],[2015,3,8,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2015,11,1,8,59,59],[2015,11,1,1,59,59],
'2015030810:00:00','2015030803:00:00','2015110108:59:59','2015110101:59:59' ],
[ [2015,11,1,9,0,0],[2015,11,1,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2016,3,13,9,59,59],[2016,3,13,1,59,59],
'2015110109:00:00','2015110101:00:00','2016031309:59:59','2016031301:59:59' ],
],
2016 =>
[
[ [2016,3,13,10,0,0],[2016,3,13,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2016,11,6,8,59,59],[2016,11,6,1,59,59],
'2016031310:00:00','2016031303:00:00','2016110608:59:59','2016110601:59:59' ],
[ [2016,11,6,9,0,0],[2016,11,6,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2017,3,12,9,59,59],[2017,3,12,1,59,59],
'2016110609:00:00','2016110601:00:00','2017031209:59:59','2017031201:59:59' ],
],
2017 =>
[
[ [2017,3,12,10,0,0],[2017,3,12,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2017,11,5,8,59,59],[2017,11,5,1,59,59],
'2017031210:00:00','2017031203:00:00','2017110508:59:59','2017110501:59:59' ],
[ [2017,11,5,9,0,0],[2017,11,5,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2018,3,11,9,59,59],[2018,3,11,1,59,59],
'2017110509:00:00','2017110501:00:00','2018031109:59:59','2018031101:59:59' ],
],
2018 =>
[
[ [2018,3,11,10,0,0],[2018,3,11,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2018,11,4,8,59,59],[2018,11,4,1,59,59],
'2018031110:00:00','2018031103:00:00','2018110408:59:59','2018110401:59:59' ],
[ [2018,11,4,9,0,0],[2018,11,4,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2019,3,10,9,59,59],[2019,3,10,1,59,59],
'2018110409:00:00','2018110401:00:00','2019031009:59:59','2019031001:59:59' ],
],
2019 =>
[
[ [2019,3,10,10,0,0],[2019,3,10,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2019,11,3,8,59,59],[2019,11,3,1,59,59],
'2019031010:00:00','2019031003:00:00','2019110308:59:59','2019110301:59:59' ],
[ [2019,11,3,9,0,0],[2019,11,3,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2020,3,8,9,59,59],[2020,3,8,1,59,59],
'2019110309:00:00','2019110301:00:00','2020030809:59:59','2020030801:59:59' ],
],
2020 =>
[
[ [2020,3,8,10,0,0],[2020,3,8,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2020,11,1,8,59,59],[2020,11,1,1,59,59],
'2020030810:00:00','2020030803:00:00','2020110108:59:59','2020110101:59:59' ],
[ [2020,11,1,9,0,0],[2020,11,1,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2021,3,14,9,59,59],[2021,3,14,1,59,59],
'2020110109:00:00','2020110101:00:00','2021031409:59:59','2021031401:59:59' ],
],
2021 =>
[
[ [2021,3,14,10,0,0],[2021,3,14,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2021,11,7,8,59,59],[2021,11,7,1,59,59],
'2021031410:00:00','2021031403:00:00','2021110708:59:59','2021110701:59:59' ],
[ [2021,11,7,9,0,0],[2021,11,7,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2022,3,13,9,59,59],[2022,3,13,1,59,59],
'2021110709:00:00','2021110701:00:00','2022031309:59:59','2022031301:59:59' ],
],
2022 =>
[
[ [2022,3,13,10,0,0],[2022,3,13,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2022,11,6,8,59,59],[2022,11,6,1,59,59],
'2022031310:00:00','2022031303:00:00','2022110608:59:59','2022110601:59:59' ],
[ [2022,11,6,9,0,0],[2022,11,6,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2023,3,12,9,59,59],[2023,3,12,1,59,59],
'2022110609:00:00','2022110601:00:00','2023031209:59:59','2023031201:59:59' ],
],
2023 =>
[
[ [2023,3,12,10,0,0],[2023,3,12,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2023,11,5,8,59,59],[2023,11,5,1,59,59],
'2023031210:00:00','2023031203:00:00','2023110508:59:59','2023110501:59:59' ],
[ [2023,11,5,9,0,0],[2023,11,5,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2024,3,10,9,59,59],[2024,3,10,1,59,59],
'2023110509:00:00','2023110501:00:00','2024031009:59:59','2024031001:59:59' ],
],
2024 =>
[
[ [2024,3,10,10,0,0],[2024,3,10,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2024,11,3,8,59,59],[2024,11,3,1,59,59],
'2024031010:00:00','2024031003:00:00','2024110308:59:59','2024110301:59:59' ],
[ [2024,11,3,9,0,0],[2024,11,3,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2025,3,9,9,59,59],[2025,3,9,1,59,59],
'2024110309:00:00','2024110301:00:00','2025030909:59:59','2025030901:59:59' ],
],
2025 =>
[
[ [2025,3,9,10,0,0],[2025,3,9,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2025,11,2,8,59,59],[2025,11,2,1,59,59],
'2025030910:00:00','2025030903:00:00','2025110208:59:59','2025110201:59:59' ],
[ [2025,11,2,9,0,0],[2025,11,2,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2026,3,8,9,59,59],[2026,3,8,1,59,59],
'2025110209:00:00','2025110201:00:00','2026030809:59:59','2026030801:59:59' ],
],
2026 =>
[
[ [2026,3,8,10,0,0],[2026,3,8,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2026,11,1,8,59,59],[2026,11,1,1,59,59],
'2026030810:00:00','2026030803:00:00','2026110108:59:59','2026110101:59:59' ],
[ [2026,11,1,9,0,0],[2026,11,1,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2027,3,14,9,59,59],[2027,3,14,1,59,59],
'2026110109:00:00','2026110101:00:00','2027031409:59:59','2027031401:59:59' ],
],
2027 =>
[
[ [2027,3,14,10,0,0],[2027,3,14,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2027,11,7,8,59,59],[2027,11,7,1,59,59],
'2027031410:00:00','2027031403:00:00','2027110708:59:59','2027110701:59:59' ],
[ [2027,11,7,9,0,0],[2027,11,7,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2028,3,12,9,59,59],[2028,3,12,1,59,59],
'2027110709:00:00','2027110701:00:00','2028031209:59:59','2028031201:59:59' ],
],
2028 =>
[
[ [2028,3,12,10,0,0],[2028,3,12,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2028,11,5,8,59,59],[2028,11,5,1,59,59],
'2028031210:00:00','2028031203:00:00','2028110508:59:59','2028110501:59:59' ],
[ [2028,11,5,9,0,0],[2028,11,5,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2029,3,11,9,59,59],[2029,3,11,1,59,59],
'2028110509:00:00','2028110501:00:00','2029031109:59:59','2029031101:59:59' ],
],
2029 =>
[
[ [2029,3,11,10,0,0],[2029,3,11,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2029,11,4,8,59,59],[2029,11,4,1,59,59],
'2029031110:00:00','2029031103:00:00','2029110408:59:59','2029110401:59:59' ],
[ [2029,11,4,9,0,0],[2029,11,4,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2030,3,10,9,59,59],[2030,3,10,1,59,59],
'2029110409:00:00','2029110401:00:00','2030031009:59:59','2030031001:59:59' ],
],
2030 =>
[
[ [2030,3,10,10,0,0],[2030,3,10,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2030,11,3,8,59,59],[2030,11,3,1,59,59],
'2030031010:00:00','2030031003:00:00','2030110308:59:59','2030110301:59:59' ],
[ [2030,11,3,9,0,0],[2030,11,3,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2031,3,9,9,59,59],[2031,3,9,1,59,59],
'2030110309:00:00','2030110301:00:00','2031030909:59:59','2031030901:59:59' ],
],
2031 =>
[
[ [2031,3,9,10,0,0],[2031,3,9,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2031,11,2,8,59,59],[2031,11,2,1,59,59],
'2031030910:00:00','2031030903:00:00','2031110208:59:59','2031110201:59:59' ],
[ [2031,11,2,9,0,0],[2031,11,2,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2032,3,14,9,59,59],[2032,3,14,1,59,59],
'2031110209:00:00','2031110201:00:00','2032031409:59:59','2032031401:59:59' ],
],
2032 =>
[
[ [2032,3,14,10,0,0],[2032,3,14,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2032,11,7,8,59,59],[2032,11,7,1,59,59],
'2032031410:00:00','2032031403:00:00','2032110708:59:59','2032110701:59:59' ],
[ [2032,11,7,9,0,0],[2032,11,7,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2033,3,13,9,59,59],[2033,3,13,1,59,59],
'2032110709:00:00','2032110701:00:00','2033031309:59:59','2033031301:59:59' ],
],
2033 =>
[
[ [2033,3,13,10,0,0],[2033,3,13,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2033,11,6,8,59,59],[2033,11,6,1,59,59],
'2033031310:00:00','2033031303:00:00','2033110608:59:59','2033110601:59:59' ],
[ [2033,11,6,9,0,0],[2033,11,6,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2034,3,12,9,59,59],[2034,3,12,1,59,59],
'2033110609:00:00','2033110601:00:00','2034031209:59:59','2034031201:59:59' ],
],
2034 =>
[
[ [2034,3,12,10,0,0],[2034,3,12,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2034,11,5,8,59,59],[2034,11,5,1,59,59],
'2034031210:00:00','2034031203:00:00','2034110508:59:59','2034110501:59:59' ],
[ [2034,11,5,9,0,0],[2034,11,5,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2035,3,11,9,59,59],[2035,3,11,1,59,59],
'2034110509:00:00','2034110501:00:00','2035031109:59:59','2035031101:59:59' ],
],
2035 =>
[
[ [2035,3,11,10,0,0],[2035,3,11,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2035,11,4,8,59,59],[2035,11,4,1,59,59],
'2035031110:00:00','2035031103:00:00','2035110408:59:59','2035110401:59:59' ],
[ [2035,11,4,9,0,0],[2035,11,4,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2036,3,9,9,59,59],[2036,3,9,1,59,59],
'2035110409:00:00','2035110401:00:00','2036030909:59:59','2036030901:59:59' ],
],
2036 =>
[
[ [2036,3,9,10,0,0],[2036,3,9,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2036,11,2,8,59,59],[2036,11,2,1,59,59],
'2036030910:00:00','2036030903:00:00','2036110208:59:59','2036110201:59:59' ],
[ [2036,11,2,9,0,0],[2036,11,2,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2037,3,8,9,59,59],[2037,3,8,1,59,59],
'2036110209:00:00','2036110201:00:00','2037030809:59:59','2037030801:59:59' ],
],
2037 =>
[
[ [2037,3,8,10,0,0],[2037,3,8,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2037,11,1,8,59,59],[2037,11,1,1,59,59],
'2037030810:00:00','2037030803:00:00','2037110108:59:59','2037110101:59:59' ],
[ [2037,11,1,9,0,0],[2037,11,1,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2038,3,14,9,59,59],[2038,3,14,1,59,59],
'2037110109:00:00','2037110101:00:00','2038031409:59:59','2038031401:59:59' ],
],
2038 =>
[
[ [2038,3,14,10,0,0],[2038,3,14,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2038,11,7,8,59,59],[2038,11,7,1,59,59],
'2038031410:00:00','2038031403:00:00','2038110708:59:59','2038110701:59:59' ],
[ [2038,11,7,9,0,0],[2038,11,7,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2039,3,13,9,59,59],[2039,3,13,1,59,59],
'2038110709:00:00','2038110701:00:00','2039031309:59:59','2039031301:59:59' ],
],
2039 =>
[
[ [2039,3,13,10,0,0],[2039,3,13,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2039,11,6,8,59,59],[2039,11,6,1,59,59],
'2039031310:00:00','2039031303:00:00','2039110608:59:59','2039110601:59:59' ],
[ [2039,11,6,9,0,0],[2039,11,6,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2040,3,11,9,59,59],[2040,3,11,1,59,59],
'2039110609:00:00','2039110601:00:00','2040031109:59:59','2040031101:59:59' ],
],
2040 =>
[
[ [2040,3,11,10,0,0],[2040,3,11,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2040,11,4,8,59,59],[2040,11,4,1,59,59],
'2040031110:00:00','2040031103:00:00','2040110408:59:59','2040110401:59:59' ],
[ [2040,11,4,9,0,0],[2040,11,4,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2041,3,10,9,59,59],[2041,3,10,1,59,59],
'2040110409:00:00','2040110401:00:00','2041031009:59:59','2041031001:59:59' ],
],
2041 =>
[
[ [2041,3,10,10,0,0],[2041,3,10,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2041,11,3,8,59,59],[2041,11,3,1,59,59],
'2041031010:00:00','2041031003:00:00','2041110308:59:59','2041110301:59:59' ],
[ [2041,11,3,9,0,0],[2041,11,3,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2042,3,9,9,59,59],[2042,3,9,1,59,59],
'2041110309:00:00','2041110301:00:00','2042030909:59:59','2042030901:59:59' ],
],
2042 =>
[
[ [2042,3,9,10,0,0],[2042,3,9,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2042,11,2,8,59,59],[2042,11,2,1,59,59],
'2042030910:00:00','2042030903:00:00','2042110208:59:59','2042110201:59:59' ],
[ [2042,11,2,9,0,0],[2042,11,2,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2043,3,8,9,59,59],[2043,3,8,1,59,59],
'2042110209:00:00','2042110201:00:00','2043030809:59:59','2043030801:59:59' ],
],
2043 =>
[
[ [2043,3,8,10,0,0],[2043,3,8,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2043,11,1,8,59,59],[2043,11,1,1,59,59],
'2043030810:00:00','2043030803:00:00','2043110108:59:59','2043110101:59:59' ],
[ [2043,11,1,9,0,0],[2043,11,1,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2044,3,13,9,59,59],[2044,3,13,1,59,59],
'2043110109:00:00','2043110101:00:00','2044031309:59:59','2044031301:59:59' ],
],
2044 =>
[
[ [2044,3,13,10,0,0],[2044,3,13,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2044,11,6,8,59,59],[2044,11,6,1,59,59],
'2044031310:00:00','2044031303:00:00','2044110608:59:59','2044110601:59:59' ],
[ [2044,11,6,9,0,0],[2044,11,6,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2045,3,12,9,59,59],[2045,3,12,1,59,59],
'2044110609:00:00','2044110601:00:00','2045031209:59:59','2045031201:59:59' ],
],
2045 =>
[
[ [2045,3,12,10,0,0],[2045,3,12,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2045,11,5,8,59,59],[2045,11,5,1,59,59],
'2045031210:00:00','2045031203:00:00','2045110508:59:59','2045110501:59:59' ],
[ [2045,11,5,9,0,0],[2045,11,5,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2046,3,11,9,59,59],[2046,3,11,1,59,59],
'2045110509:00:00','2045110501:00:00','2046031109:59:59','2046031101:59:59' ],
],
2046 =>
[
[ [2046,3,11,10,0,0],[2046,3,11,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2046,11,4,8,59,59],[2046,11,4,1,59,59],
'2046031110:00:00','2046031103:00:00','2046110408:59:59','2046110401:59:59' ],
[ [2046,11,4,9,0,0],[2046,11,4,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2047,3,10,9,59,59],[2047,3,10,1,59,59],
'2046110409:00:00','2046110401:00:00','2047031009:59:59','2047031001:59:59' ],
],
2047 =>
[
[ [2047,3,10,10,0,0],[2047,3,10,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2047,11,3,8,59,59],[2047,11,3,1,59,59],
'2047031010:00:00','2047031003:00:00','2047110308:59:59','2047110301:59:59' ],
[ [2047,11,3,9,0,0],[2047,11,3,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2048,3,8,9,59,59],[2048,3,8,1,59,59],
'2047110309:00:00','2047110301:00:00','2048030809:59:59','2048030801:59:59' ],
],
2048 =>
[
[ [2048,3,8,10,0,0],[2048,3,8,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2048,11,1,8,59,59],[2048,11,1,1,59,59],
'2048030810:00:00','2048030803:00:00','2048110108:59:59','2048110101:59:59' ],
[ [2048,11,1,9,0,0],[2048,11,1,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2049,3,14,9,59,59],[2049,3,14,1,59,59],
'2048110109:00:00','2048110101:00:00','2049031409:59:59','2049031401:59:59' ],
],
2049 =>
[
[ [2049,3,14,10,0,0],[2049,3,14,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2049,11,7,8,59,59],[2049,11,7,1,59,59],
'2049031410:00:00','2049031403:00:00','2049110708:59:59','2049110701:59:59' ],
[ [2049,11,7,9,0,0],[2049,11,7,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2050,3,13,9,59,59],[2050,3,13,1,59,59],
'2049110709:00:00','2049110701:00:00','2050031309:59:59','2050031301:59:59' ],
],
2050 =>
[
[ [2050,3,13,10,0,0],[2050,3,13,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2050,11,6,8,59,59],[2050,11,6,1,59,59],
'2050031310:00:00','2050031303:00:00','2050110608:59:59','2050110601:59:59' ],
[ [2050,11,6,9,0,0],[2050,11,6,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2051,3,12,9,59,59],[2051,3,12,1,59,59],
'2050110609:00:00','2050110601:00:00','2051031209:59:59','2051031201:59:59' ],
],
2051 =>
[
[ [2051,3,12,10,0,0],[2051,3,12,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2051,11,5,8,59,59],[2051,11,5,1,59,59],
'2051031210:00:00','2051031203:00:00','2051110508:59:59','2051110501:59:59' ],
[ [2051,11,5,9,0,0],[2051,11,5,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2052,3,10,9,59,59],[2052,3,10,1,59,59],
'2051110509:00:00','2051110501:00:00','2052031009:59:59','2052031001:59:59' ],
],
2052 =>
[
[ [2052,3,10,10,0,0],[2052,3,10,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2052,11,3,8,59,59],[2052,11,3,1,59,59],
'2052031010:00:00','2052031003:00:00','2052110308:59:59','2052110301:59:59' ],
[ [2052,11,3,9,0,0],[2052,11,3,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2053,3,9,9,59,59],[2053,3,9,1,59,59],
'2052110309:00:00','2052110301:00:00','2053030909:59:59','2053030901:59:59' ],
],
2053 =>
[
[ [2053,3,9,10,0,0],[2053,3,9,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2053,11,2,8,59,59],[2053,11,2,1,59,59],
'2053030910:00:00','2053030903:00:00','2053110208:59:59','2053110201:59:59' ],
[ [2053,11,2,9,0,0],[2053,11,2,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2054,3,8,9,59,59],[2054,3,8,1,59,59],
'2053110209:00:00','2053110201:00:00','2054030809:59:59','2054030801:59:59' ],
],
2054 =>
[
[ [2054,3,8,10,0,0],[2054,3,8,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2054,11,1,8,59,59],[2054,11,1,1,59,59],
'2054030810:00:00','2054030803:00:00','2054110108:59:59','2054110101:59:59' ],
[ [2054,11,1,9,0,0],[2054,11,1,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2055,3,14,9,59,59],[2055,3,14,1,59,59],
'2054110109:00:00','2054110101:00:00','2055031409:59:59','2055031401:59:59' ],
],
2055 =>
[
[ [2055,3,14,10,0,0],[2055,3,14,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2055,11,7,8,59,59],[2055,11,7,1,59,59],
'2055031410:00:00','2055031403:00:00','2055110708:59:59','2055110701:59:59' ],
[ [2055,11,7,9,0,0],[2055,11,7,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2056,3,12,9,59,59],[2056,3,12,1,59,59],
'2055110709:00:00','2055110701:00:00','2056031209:59:59','2056031201:59:59' ],
],
2056 =>
[
[ [2056,3,12,10,0,0],[2056,3,12,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2056,11,5,8,59,59],[2056,11,5,1,59,59],
'2056031210:00:00','2056031203:00:00','2056110508:59:59','2056110501:59:59' ],
[ [2056,11,5,9,0,0],[2056,11,5,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2057,3,11,9,59,59],[2057,3,11,1,59,59],
'2056110509:00:00','2056110501:00:00','2057031109:59:59','2057031101:59:59' ],
],
2057 =>
[
[ [2057,3,11,10,0,0],[2057,3,11,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2057,11,4,8,59,59],[2057,11,4,1,59,59],
'2057031110:00:00','2057031103:00:00','2057110408:59:59','2057110401:59:59' ],
[ [2057,11,4,9,0,0],[2057,11,4,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2058,3,10,9,59,59],[2058,3,10,1,59,59],
'2057110409:00:00','2057110401:00:00','2058031009:59:59','2058031001:59:59' ],
],
2058 =>
[
[ [2058,3,10,10,0,0],[2058,3,10,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2058,11,3,8,59,59],[2058,11,3,1,59,59],
'2058031010:00:00','2058031003:00:00','2058110308:59:59','2058110301:59:59' ],
[ [2058,11,3,9,0,0],[2058,11,3,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2059,3,9,9,59,59],[2059,3,9,1,59,59],
'2058110309:00:00','2058110301:00:00','2059030909:59:59','2059030901:59:59' ],
],
2059 =>
[
[ [2059,3,9,10,0,0],[2059,3,9,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2059,11,2,8,59,59],[2059,11,2,1,59,59],
'2059030910:00:00','2059030903:00:00','2059110208:59:59','2059110201:59:59' ],
[ [2059,11,2,9,0,0],[2059,11,2,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2060,3,14,9,59,59],[2060,3,14,1,59,59],
'2059110209:00:00','2059110201:00:00','2060031409:59:59','2060031401:59:59' ],
],
2060 =>
[
[ [2060,3,14,10,0,0],[2060,3,14,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2060,11,7,8,59,59],[2060,11,7,1,59,59],
'2060031410:00:00','2060031403:00:00','2060110708:59:59','2060110701:59:59' ],
[ [2060,11,7,9,0,0],[2060,11,7,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2061,3,13,9,59,59],[2061,3,13,1,59,59],
'2060110709:00:00','2060110701:00:00','2061031309:59:59','2061031301:59:59' ],
],
2061 =>
[
[ [2061,3,13,10,0,0],[2061,3,13,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2061,11,6,8,59,59],[2061,11,6,1,59,59],
'2061031310:00:00','2061031303:00:00','2061110608:59:59','2061110601:59:59' ],
[ [2061,11,6,9,0,0],[2061,11,6,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2062,3,12,9,59,59],[2062,3,12,1,59,59],
'2061110609:00:00','2061110601:00:00','2062031209:59:59','2062031201:59:59' ],
],
2062 =>
[
[ [2062,3,12,10,0,0],[2062,3,12,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2062,11,5,8,59,59],[2062,11,5,1,59,59],
'2062031210:00:00','2062031203:00:00','2062110508:59:59','2062110501:59:59' ],
[ [2062,11,5,9,0,0],[2062,11,5,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2063,3,11,9,59,59],[2063,3,11,1,59,59],
'2062110509:00:00','2062110501:00:00','2063031109:59:59','2063031101:59:59' ],
],
2063 =>
[
[ [2063,3,11,10,0,0],[2063,3,11,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2063,11,4,8,59,59],[2063,11,4,1,59,59],
'2063031110:00:00','2063031103:00:00','2063110408:59:59','2063110401:59:59' ],
[ [2063,11,4,9,0,0],[2063,11,4,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2064,3,9,9,59,59],[2064,3,9,1,59,59],
'2063110409:00:00','2063110401:00:00','2064030909:59:59','2064030901:59:59' ],
],
2064 =>
[
[ [2064,3,9,10,0,0],[2064,3,9,3,0,0],'-07:00:00',[-7,0,0],
'PDT',1,[2064,11,2,8,59,59],[2064,11,2,1,59,59],
'2064030910:00:00','2064030903:00:00','2064110208:59:59','2064110201:59:59' ],
[ [2064,11,2,9,0,0],[2064,11,2,1,0,0],'-08:00:00',[-8,0,0],
'PST',0,[2065,3,8,9,59,59],[2065,3,8,1,59,59],
'2064110209:00:00','2064110201:00:00','2065030809:59:59','2065030801:59:59' ],
],
);
%LastRule = (
'zone' => {
'dstoff' => '-07:00:00',
'stdoff' => '-08:00:00',
},
'rules' => {
'03' => {
'flag' => 'ge',
'dow' => '7',
'num' => '8',
'type' => 'w',
'time' => '02:00:00',
'isdst' => '1',
'abb' => 'PDT',
},
'11' => {
'flag' => 'ge',
'dow' => '7',
'num' => '1',
'type' => 'w',
'time' => '02:00:00',
'isdst' => '0',
'abb' => 'PST',
},
},
);
1;
| nriley/Pester | Source/Manip/TZ/amlos_00.pm | Perl | bsd-2-clause | 57,088 |
package WebComponent::FormWizard;
# FormWizard - component for to create wizards for complex input forms
use strict;
use warnings;
use Data::Dumper;
use XML::Simple;
use WebComponent::FormWizard::DataStructures;
use Conf;
use base qw( WebComponent );
1;
=pod
=head1 NAME
FormWizard - component to create wizards for complex input forms
=head1 DESCRIPTION
WebComponent to create input forms in wizard form
=head1 METHODS
=over 4
=item * B<new> ()
Called when the object is initialized. Expands SUPER::new.
=cut
sub new {
my $self = shift->SUPER::new(@_);
$self->application->register_component('TabView', 'fw_tv_'.$self->id);
$self->application->register_component('Hover', 'fw_hover_'.$self->id);
$self->application->register_component('Hover' , 'fw_hover_migs_'.$self->id);
$self->application->register_component('Ajax', 'fw_ajax_'.$self->id);
$self->{config_file} = undef;
$self->{summary} = 0;
$self->{orientation} = 'horizontal';
$self->{width} = undef;
$self->{height} = undef;
$self->{noprefix} = 0;
$self->{prefix} = '';
$self->{using_categories} = 0;
$self->{page} = $self->application->page;
$self->{struct} = WebComponent::FormWizard::DataStructures->new();
$self->{allow_random_navigation} = 0;
$self->{submit_button} = 1;
$self->{enable_ajax} = 0;
$self->{debug} = $self->application->cgi->param('debug') || "0" ;
return $self;
}
=item * B<output> ()
Returns the html output of the FormWizard component.
=cut
sub output {
my ($self) = @_;
# get some variables
my $application = $self->application;
my $cgi = $application->cgi;
# get data
$self->{data} = $self->{struct}->data();
# create the tabview
my $tv = $application->component('fw_tv_'.$self->id);
$tv->orientation($self->orientation);
if ($self->width) {
$tv->width($self->width);
}
if ($self->height) {
$tv->height($self->height);
}
my $ori = "";
if ($self->orientation eq 'vertical') {
$ori = ', "vertical"';
if ($self->using_categories) {
$ori = ', "sub"';
}
}
# get the current step
my $current_step = $cgi->param('wz_'.$self->id().'_current_step') || 1;
my $content = "";
if ($self->enable_ajax) {
$content .= $self->application->component('fw_ajax_'.$self->id)->output;
}
$content .= $self->application->page->start_form('wizard_form_'.$self->id()) if ($self->submit_button);
# javascript: all 'selectall_' multi-selects have all options submitted
my $formName = $self->form_name();
my $scripts .= qq~
<script>
function enable_multi_select() {
fwForm = document.getElementById('$formName');
for (i = 0; i < fwForm.elements.length; ++i) {
if (fwForm.elements[i].type == 'select-multiple') {
if ( /selectall_/.test(fwForm.elements[i].name) ) {
fwForm.elements[i].disabled = false;
for (j = 0; j < fwForm.elements[i].options.length; ++j) {
fwForm.elements[i].options[j].selected = true;
}
fwForm.elements[i].name = fwForm.elements[i].name.replace(/selectall_/,'');
}
}
}
return true;
}
</script>
~;
$content .= $scripts;
my $h = -1 ; # Tab counter , TabView starts with 0
my $i = 1;
my $cat = $self->categories();
# remember number of categories and add submit button to all steps in the last category if steps are exclusive
my $nr_categories = scalar @{ $cat->{order} };
my $category_counter = 0 ;
foreach my $cat_name (@{ $cat->{order} }) {
$category_counter++;
$h++;
# need subcategories
my $multiple_steps = 0;
my $nr_steps_in_cat = scalar @{$cat->{groups}->{$cat_name}};
if ( $nr_steps_in_cat > 1 ) {
if ( $cat->{groups}->{$cat_name}->[0]->{exclusive} ) {
my $step_content = $self->create_group_selection_box( $cat->{groups}->{$cat_name} , $cat_name , $category_counter || $h ) || "";
my ($last, $next) = $self->create_navigation_buttons( $tv , $h , $nr_categories , $i , $cat->{ nr_steps } , 1 , 1 , 1 , $ori);
my $navigation_buttons = "<table width=100%><tr><td style='text-align: left;'>$last</td><td style='text-align: right;'>$next</td></tr></table>";
$tv->add_tab($cat_name, $navigation_buttons . "<hr>" . $step_content . $navigation_buttons);
$i++;
next;
} else {
$tv->add_tab($cat_name , '');
$multiple_steps = 1;
}
}
# steps per category
my $nr_step_per_category = 1;
foreach my $step (@{$cat->{groups}->{$cat_name}}) {
my $step_content = "<table style='height: 100%; width: 100%;'><tr><td>";
if ($step->{intro} || $step->{data}->{intro}) {
$step_content .= "<p>". ($step->{intro} || $step->{data}->{intro})."</p>";
}
$step_content .= $self->layout_questions($step, $h, $i);
if ($step->{summary}) {
$step_content .= "<p>".$step->{summary}."</p>";
}
my ($last, $next) = $self->create_navigation_buttons( $tv , $h , $nr_categories , $i , $cat->{ nr_steps } , 1 , 1 , 1 , $ori);
$step_content .= "</td></tr><tr><td style='vertical-align: bottom;'><table width=100%><tr><td style='text-align: left;'>$last</td><td style='text-align: right;'>$next</td></tr></table></td></tr></table>";
if (scalar @{$cat->{groups}->{$cat_name}} > 1) {
my $checked = "";
my $disabled = 0;
my $title = $step->{title} || $step->{data}->{title};
if ($step->{exclusive}) {
if ($nr_step_per_category == 1) {
$checked = " checked=checked"
} else {
$disabled = 1;
}
$title = "<span title='this step is exclusive, you can only select one'><input type='radio' name='$cat_name'$checked onclick='enable_subtab(this, ".$tv->id.", $h, ".($nr_step_per_category-1).");'>".$title."</span>";
}
if ($self->{allow_random_navigation}) {
$disabled = 0;
}
$tv->add_sub_tab( $h, $title, $step_content, $disabled );
} else {
my $disabled = 1;
if ($self->{allow_random_navigation}) {
$disabled = 0;
}
$tv->add_tab( ($step->{title} || $step->{data}->{title}) , $step_content, $disabled );
}
$i++;
$nr_step_per_category++;
}
}
$content .= $tv->output();
$content .= $self->application->page->end_form() if ($self->submit_button);
my $hover = $self->application->component('fw_hover_'.$self->id);
$content .= $hover->output();
return $content;
}
sub config_file {
my ($self, $fh) = @_;
if (defined($fh)) {
$self->{config_file} = $fh;
$self->{struct}->readFormWizardConfig($fh);
$self->{struct}->config2data();
}
return $self->{config_file};
}
sub summary {
my ($self, $summary) = @_;
if (defined($summary)) {
$self->{summary} = $summary;
}
return $self->{summary};
}
sub orientation {
my ($self, $orientation) = @_;
if (defined($orientation)) {
$self->{orientation} = $orientation;
}
return $self->{orientation};
}
sub width {
my ($self, $width) = @_;
if (defined($width)) {
$self->{width} = $width;
}
return $self->{width};
}
sub height {
my ($self, $height) = @_;
if (defined($height)) {
$self->{height} = $height;
}
return $self->{height};
}
sub layout_questions {
my ($self, $step, $category_nr, $step_nr) = @_;
my ($layout, $questions , $info_width ) = ( ($step->{layout} || $step->{data}->{layout}) ,
$step->{question} ,
($step->{info_width} || $step->{data}->{info_width} || '')
);
my $content = "";
my $mandatory_hiddens = "";
my $hover = $self->application->component('fw_hover_'.$self->id);
$hover->add_tooltip('mandatory', "This question is mandatory");
$hover->add_tooltip('migs', "This is a MIGS term");
$self->application->add_message('info' , "Found " . scalar @$questions . " questions!") if ($self->debug) ;
# check which layout to use
if ($layout eq "single-column") {
$content .= "<table>";
my $i = 1;
foreach my $question (@$questions) {
my $help = "";
if (defined($question->{help})) {
my $qid = "wizard_" . $self->id . "_q_" . $category_nr . "_" . $step_nr . "_" . $i;
$hover->add_tooltip($qid, $question->{help});
$help .= " <img src='$Conf::cgi_url/Html/wac_infobulb.png' onmouseover='hover(event, \"" . $qid . "\", \"" . $hover->id() . "\");'>";
}
if (defined($question->{info})) {
my $info = $question->{info};
$help .= " <i>". $info."</i>";
}
if (length($help)) {
my $hstring = $help;
if ($info_width) {
$help = "<td style='width: " . $info_width . "px;'>";
} else {
$help = "<td nowrap='nowrap'>";
}
$help .= $hstring . "</td>";
}
my $mandatory = '';
if ($question->{mandatory}) {
$mandatory_hiddens .= "<input type='hidden' name='mandatory_hiddens_".$step_nr."_".$self->id."' value='".$question->{name}."|".$question->{text}."'>";
$mandatory = "<span style='font-weight: bold; color: red; cursor: pointer;' onmouseover='hover(event, \"mandatory\", \"" . $hover->id() . "\");'><sup>*</sup></span>";
}
if ($question->{migs}) {
$content .= "<tr><td>";
$content .= "<div style='font-weight: bold; color: blue;' onmouseover='hover(event, \"migs\", \"" . $hover->id() . "\");'>";
$content .= $question->{text}."$mandatory</div><br>".$self->question_type($question, $i)."</td>$help</tr>";
}
else{
$content .= "<tr><td>".$question->{text}."$mandatory<br>".$self->question_type($question, $i)."</td>$help</tr>";
}
$i++;
}
$content .= "</table>";
}
else {
$content .= "<table width='90%'>";
my $i = 1;
foreach my $question (@$questions) {
my $help = "";
if (defined($question->{help})) {
my $qid = "wizard_" . $self->id . "_q_" . $category_nr . "_" . $step_nr . "_" . $i;
$hover->add_tooltip($qid, $question->{help});
$help .= " <img src='$Conf::cgi_url/Html/wac_infobulb.png' onmouseover='hover(event, \"" . $qid . "\", \"" . $hover->id() . "\");'>";
}
if (defined($question->{info})) {
my $info = $question->{info};
$info =~ s/([^\n]{ 50 , 70})(?:\b\s*|\n)/$1<br>/gi;
$help .= " <i>".$question->{info}."</i>";
}
if (length($help)) {
my $hstring = $help;
if ($info_width) {
$help = "<td style='width: " . $info_width . "px;'>";
} else {
$help = "<td nowrap='nowrap'>";
}
$help .= $hstring . "</td>";
}
my $mandatory = '';
if ($question->{mandatory}) {
$mandatory_hiddens .= "<input type='hidden' name='mandatory_hiddens_".$step_nr."_".$self->id."' value='".$question->{name}."|".$question->{text}."'>";
$mandatory = "<span style='font-weight: bold; color: red; cursor: pointer;' onmouseover='hover(event, \"mandatory\", \"" . $hover->id() . "\");'><sup>*</sup></span>";
}
if ($question->{migs}) {
$content .= "<tr><td>";
$content .= "<span style='font-weight: bold; color: blue;' onmouseover='hover(event, \"migs\", \"" . $hover->id() . "\");'>";
$content .= $question->{text}."$mandatory</span></td><td>".$self->question_type($question, $i)."</td><td>$help</td></tr>";
} else {
$content .= "<tr><td>".$question->{text}."$mandatory</td><td>".$self->question_type($question, $i)."</td><td>$help</td></tr>";
}
$i++;
}
$content .= "</table>";
}
$content .= $mandatory_hiddens;
return $content;
}
sub prefill {
my ($self, $prefill) = @_;
if (defined($prefill)) {
$self->{prefill} = $prefill;
}
return $self->{prefill};
}
sub question_type {
my ($self, $question, $step_num) = @_;
my $prefill = $self->prefill();
my $content = "";
$question->{default} = '' unless ($question->{default});
if (exists($question->{id})) {
$content .= "<span id='" . $question->{id} . "'>";
}
my $name = $question->{name} || '';
unless ($name) {
my $value = $question->{text};
$value =~s/\s+/_/g;
$name = lc $value;
$question->{name} = $name;
}
# set question default to prefill value if defined
if ( (ref $prefill) && (defined $prefill->{$name}) ) {
$question->{default} = $prefill->{$name};
}
# else empty array, give it a string
elsif ( (ref($question->{default}) eq 'ARRAY') && (@{$question->{default}} == 0) ) {
$question->{default} = [''];
}
# set default to scalar if list (for types in formwizard)
my $default_scalar = '';
if ( ref($question->{default}) ) {
if ( (ref($question->{default}) eq 'ARRAY') && scalar(@{$question->{default}}) ) {
$default_scalar = $question->{default}->[0];
}
} else {
$default_scalar = $question->{default};
}
# write div for ajax
$content .= "<div id='ajax_main_$name'>";
if ($question->{type} eq "select") {
$content .= "<select name='$name'>\n<option value=''>Please Select</option>\n";
foreach my $option (@{$question->{options}}) {
unless (ref($option) eq "HASH") {
$option->{value} = $option;
$option->{text} = $option->{value};
}
unless (defined $option->{value}) {
my $value = $option->{text};
$value =~s/\s+/_/g;
$option->{value} = lc $value;
}
my $default = ($default_scalar eq $option->{value}) ? " selected='selected'" : "";
$content .= "<option value='".$option->{value}."'$default>".$option->{text}."</option>\n";
}
$content .= "</select>";
} elsif ($question->{type} eq "radio") {
foreach my $option (@{$question->{options}}) {
unless (ref($option) eq "HASH") {
my $opt = {};
$opt->{value} = $option;
$opt->{text} = $option;
$option = $opt;
}
my $default = ($default_scalar eq $option->{value}) ? " checked='checked'" : "";
$content .= "<input type='radio' name='$name' value='".$option->{value}."'$default>".$option->{text};
}
} elsif ($question->{type} eq "checkbox") {
foreach my $option (@{$question->{options}}) {
unless (ref($option) eq "HASH") {
my $opt = {value => $option, text => $option, checked => 0 };
$option = $opt;
}
my $default = "";
if($default_scalar eq $option->{value}) {
$option->{checked} = 1;
}
if ($option->{checked}) {
$default = " checked='checked'";
}
$content .= "<input type='checkbox' name='$name' value='".$option->{value}."'$default>".$option->{text}."<br>";
}
} elsif ($question->{type} eq "list") {
$content .= "<select name='$name' multiple='multiple'>";
foreach my $option (@{$question->{options}}) {
unless (ref($option) eq "HASH") {
my $opt = {value => $option, text => $option, checked => 0 };
$option = $opt;
}
my $default = "";
if($default_scalar eq $option->{value}) {
$option->{selected} = 1;
}
if ($option->{selected}) {
$default = " selected='selected'";
}
$content .= "<option value='".$option->{value}."'$default>".$option->{text}."</option>";
}
$content .= "</select>";
} elsif ($question->{type} eq "textarea") {
$content .= "<textarea name='$name' value='$default_scalar' cols='30' rows='10'>$default_scalar</textarea>";
} elsif ($question->{type} eq "text") {
$content .= $self->text_field($question, $default_scalar);
} elsif ($question->{type} eq 'date') {
$question->{size} = "size='10'";
$question->{unit} = '';
$question->{validation} = "id='DPC_$name'";
$content .= $self->text_field($question, $default_scalar);
} elsif ($question->{type} eq "OOD_List") {
$content .= $self->OOD_List($question);
} elsif ($question->{type} eq "OOD_Tree") {
$content .= $self->OOD_Tree($question);
} elsif ($question->{type} eq "OOD_Ontology_Tree") {
$content .= $self->OOD_Tree($question, 1);
} elsif ($question->{type} eq 'user_list') {
my $options = "";
if ($question->{default} && (ref $question->{default} eq 'ARRAY')) {
foreach my $p ( @{$question->{default}} ) {
$options .= "<option value='$p' selected='selected'>$p</option>";
}
}
$content .= qq~
<table><tr>
<td>
<input type='text' id='text_$name' /><br>
<input type='button' value='add' onclick='
if (document.getElementById("text_$name").value.length) {
document.getElementById("select_$name").add(new Option(document.getElementById("text_$name").value, document.getElementById("text_$name").value, 1, 1), null)
}' />
<input type='button' value='remove' onclick='
if (document.getElementById("select_$name").options.length) {
document.getElementById("select_$name").remove(document.getElementById("select_$name").options.length-1);
}' />
</td><td>
<select name='$name' multiple=multiple size=5 disabled=disabled id='select_$name'>
$options
</select>
</td>
</tr></table>
~;
} elsif ($question->{type} eq 'user_kv_list') {
my $options = "";
if ($question->{default} && (ref $question->{default} eq 'ARRAY')) {
foreach my $p ( @{$question->{default}} ) {
$options .= "<option value='$p' selected='selected'>$p</option>";
}
}
$content .= qq~
<table><tr>
<td>
<input type='text' id='text_k_$name' />
<input type='text' id='text_v_$name' /><br>
<input type='button' value='add' onclick='
if (document.getElementById("text_$name").value.length) {
document.getElementById("select_$name").add(new Option(document.getElementById("text_k_$name").value+": "+document.getElementById("text_v_$name").value, document.getElementById("text_k_$name").value+": "+document.getElementById("text_v_$name").value, 1, 1), null)
}' />
<input type='button' value='remove' onclick='
if (document.getElementById("select_$name").options.length) {
document.getElementById("select_$name").remove(document.getElementById("select_$name").options.length-1);
}' />
</td><td>
<select name='$name' multiple=multiple size=5 disabled=disabled id='select_$name'>"
$options
</select>
</td>
</tr></table>
~;
} else {
my $error = '';
my $package = "WebComponent::FormWizard::" . $question->{type};
{
no strict;
eval "require $package;";
$error = $@;
}
if ($error) {
# no package, default is text
$content .= $self->text_field($question, $default_scalar, $error);
}
else {
# create the object
my $type = $package->new($self, $question);
if (ref($type) && $type->isa($package) && $type->can("output")) {
$content .= $type->output;
} else {
$content .= $self->text_field($question, $default_scalar, $error);
}
}
}
# close div for ajax
$content .= "</div>";
if (exists $question->{id}) { $content .= "</span>"; }
return $content;
}
sub text_field {
my ($self, $question, $default, $error) = @_;
## for debugging
# $error = $error ? "<br><pre>$error</pre>" : "";
$error = "";
$question->{size} = $question->{size} || '';
$question->{validation} = $question->{validation} || '';
my $default_package = "WebComponent::FormWizard::Measurement";
# if units included, use Measurement type
if ($question->{unit} && (ref $question->{unit} eq "ARRAY")) {
{
no strict;
eval "require $default_package;";
}
my $obj = $default_package->new($self, $question);
if (ref($obj) && $obj->can("output")) { return $obj->output; }
}
return qq(<input type="text" name="$question->{name}" value="$default" $question->{size} $question->{validation} />$error);
}
sub form_name{
my ($self, $name) = @_;
if ($name) {
$self->{form_name} = $name;
}
unless ($self->{form_name}) {
$self->{form_name} = "forms.wizard_form_".$self->id();
}
return $self->{form_name};
}
sub noprefix {
my ($self, $noprefix) = @_;
$self->{noprefix} = $self->{struct}->noprefix($noprefix);
return $self->{noprefix};
}
sub prefix {
my ($self, $prefix) = @_;
$self->{prefix} = $self->{struct}->prefix($prefix);
return $self->{prefix};
}
sub using_categories {
my ($self, $categories) = @_;
$self->{using_categories} = $self->{struct}->using_categories($categories);
return $self->{using_categories};
}
sub categories {
my ($self) = @_;
return $self->{struct}->categories();
}
sub page {
my ($self) = @_;
return $self->{page};
}
sub data {
my ($self) = @_;
return $self->{struct}->data();
}
sub struct {
my ($self) = @_;
return $self->{struct};
}
sub enable_ajax {
my ($self, $enable) = @_;
if (defined($enable)) {
$self->{enable_ajax} = $enable;
}
return $self->{enable_ajax};
}
sub require_javascript {
return ["$Conf::cgi_url/Html/datepickercontrol.js","$Conf::cgi_url/Html/FormWizard.js"];
}
sub require_css {
return "$Conf::cgi_url/Html/datepickercontrol.css";
}
sub allow_random_navigation {
my ($self, $allow) = @_;
if (defined($allow)) {
$self->{allow_random_navigation} = $allow;
}
return $self->{allow_random_navigation};
}
sub submit_button {
my ($self, $allow) = @_;
if (defined($allow)) {
$self->{submit_button} = $allow;
}
return $self->{submit_button};
}
#
# Ontology Lookup
#
sub Ontology {
my ($self, $question) = @_;
# get page functions
my $name = $question->{name} || '';
my $cgi = $self->application->cgi;
my $wid = $self->{_id};
# Set params to remember and submit to ajax functions
my $default_name = (ref($question->{default}) && (ref($question->{default}) eq 'ARRAY')) ? $question->{default}->[0] : $question->{default};
my $main_ajax_id = $name ? "ajax_main_$name" : $cgi->param('main_ajax');
my $edit_ajax_id = $name ? "ajax_edit_$name" : $cgi->param('edit_ajax');
my $question_name = $name || $cgi->param('question_name_ajax');
my $question_type = $question->{type} || $cgi->param('question_type_ajax');
my $question_text = $question->{text} || $cgi->param('question_text_ajax');
unless ($question_type) {
return "<p>missing question type</p>";
}
my $ajax_call = qq~
<a style='cursor: pointer;' onclick="
if (document.getElementById('$name').value) {
execute_ajax('Ontology_lookup','$edit_ajax_id','main_ajax=$main_ajax_id&edit_ajax=$edit_ajax_id&question_name_ajax=$question_name&question_type_ajax=$question_type&question_text_ajax=$question_text&selection='+document.getElementById('$name').value, null, null, null, 'FormWizard|$wid');
} else {
alert('you must enter a term to search');
}">
<b>search term</b></a>
~;
my $content = qq~
<input name='$question_name' value='$default_name' id='$question_name' />
<input type='hidden' name='${question_name}_accession' value='' id='${question_name}_accession' />
<input type='hidden' name='${question_name}_definition' value='' id='${question_name}_definition' />
<input type='hidden' name='${question_name}_ontology' value='' id='${question_name}_ontology' />
$ajax_call<br>
<div id='$edit_ajax_id' style='cursor: pointer;'><div>
~;
return $content;
}
sub Ontology_lookup {
my ($self) = @_;
my $cgi = $self->application->cgi;
my $wid = $self->{_id};
my $main_ajax_id = $cgi->param('main_ajax');
my $edit_ajax_id = $cgi->param('edit_ajax');
my $question_name = $cgi->param('question_name_ajax');
my $question_type = $cgi->param('question_type_ajax');
my $question_text = $cgi->param('question_text_ajax');
my $selection = $cgi->param('selection');
my $target_name_term = $cgi->param('target_name_term');
my $target_name_desc = $cgi->param('target_name_desc');
my $from_tree = $cgi->param('from_tree');
$selection =~ s/^\s+//;
my @indexed_selection = split(/\s+/, $selection);
use LWP::Simple;
use XML::Simple;
my $response = get "http://terminizer.org/terminizerBackEnd/service?sourceText=$selection";
unless ($response) { return "No look up service available"; }
my $ref = XMLin($response, forcearray => ["MatchedTermList","MatchedTerm","Token"]);
my $term_table = "<p><table><tr><td><b>Ontology term</b></td><td><b>Definition</b></td></tr>";
my $num_found = 0;
foreach my $matched_token ( @{$ref->{MatchedTermList}} ) {
my $hits = $matched_token->{MatchedTerm};
foreach my $hit (@$hits) {
my $definition = $hit->{Definition} unless (ref $hit->{Definition});
my $accession = $hit->{Accession};
my ($suggested) = $hit->{OmixedItemID} =~/Term\/terminizer\/(.+)/;
my %token_id = map { $_ => 1 } split ("," , $hit->{TokenIndices});
my $matched = '';
for (my $i=0 ; $i < scalar @indexed_selection ; $i++) {
if ($token_id{$i}) { $matched .= "<b> $indexed_selection[$i] </b> "; }
else { $matched .= $indexed_selection[$i] . " "; }
}
next unless $definition;
$num_found++;
my $suggested_safe = $suggested;
$suggested_safe =~ s/"/\\"/g;
$suggested_safe =~ s/'/\\'/g;
my $definition_safe = $definition;
$definition_safe =~ s/"/\\"/g;
$definition_safe =~ s/'/\\'/g;
my $event = "";
if ($from_tree) {
$event = qq~
<a onclick='
document.getElementById("add_entry_button").style.display="inline";
document.getElementById("$target_name_term").value="$suggested_safe";
document.getElementById("$target_name_desc").value="$definition_safe";
document.getElementById("$edit_ajax_id").innerHTML="";'> $suggested </a>
~;
} else {
$event = qq~
<a onclick='
document.getElementById("$question_name").value="$suggested";
document.getElementById("$edit_ajax_id").innerHTML="";'> $suggested </a
~;
}
$term_table .= "<tr><td>$event</td><td><def>" . ($definition || 'no definition available') . "</def></td></tr>\n";
}
}
unless ($num_found) {
my $event = "";
if ($from_tree) {
$event = qq~
<a onclick='
document.getElementById("add_entry_button").style.display="inline";
document.getElementById("$edit_ajax_id").innerHTML="";'> - no definitions found - </a>
~;
} else {
$event = " - no definitions found - ";
}
$term_table .= "<tr><td colspan=2>$event</td></tr>";
}
$term_table .= "</table></p>\n";
my $content = $term_table;
unless ($from_tree) {
$content .= qq~
<a style='cursor: pointer;' onclick="
if (document.getElementById('$question_name').value) {
execute_ajax('Ontology_lookup', '$edit_ajax_id',
'main_ajax=$main_ajax_id&edit_ajax=$edit_ajax_id&question_name_ajax=$question_name&question_type_ajax=$question_type&question_text_ajax=$question_text&selection='+document.getElementById('$question_name').value,
null, null, null, 'FormWizard|$wid');
} else {
alert('you must enter a term to search');
}"'>
<b>search term</b></a>
~;
}
return $content;
}
#
# Ontology on Demand List structure
#
sub OOD_List {
my ($self, $question) = @_;
# get page functions
my $name = $question->{name} || '';
my $wid = $self->{_id};
my $app = $self->application;
my $cgi = $app->cgi;
my $ood = $app->data_handle('OOD');
# Set params to remember and submit to ajax functions
my $main_ajax_id = $name ? "ajax_main_$name" : $cgi->param('main_ajax');
my $edit_ajax_id = $name ? "ajax_edit_$name" : $cgi->param('edit_ajax');
my $question_name = $name || $cgi->param('question_name_ajax');
my $question_type = $question->{type} || $cgi->param('question_type_ajax');
my $question_text = $question->{text} || $cgi->param('question_text_ajax') || '';
my $question_def = (ref($question->{default}) && (ref($question->{default}) eq 'ARRAY')) ? $question->{default}->[0] : $question->{default};
my $cat = $cgi->param('cat') || $question->{ontologyName} || $question->{ood_category} || $name;
my $new_ood_entry = $cgi->param('new_ood_entry') || '';
unless ($question_type) {
return "<p>missing question type</p>";
}
unless ($ood) {
$app->add_message('warning', "No OOD, please contact the administrator");
return "<p>OOD not found</p>";
}
unless ($cat) {
$app->add_message('warning', "No category for OOD, please contact the administrator");
return "<p>Category $cat not found</p>";
}
# connect to DB and retrive data for list
my $cats = $ood->Category->get_objects( {name => $cat} );
my $category;
if ( scalar(@$cats) ) {
$category = $cats->[0];
} else {
$category = $ood->Category->create( {name => $cat,
ID => "FormWizard_$cat",
extendable => "1",
description => "created automaticallly from xml template " .
($self->{config_file} ? $self->{config_file} : '')
} );
}
unless(ref $category) {
$app->add_message('warning', "category $question_name not found");
return "<p>category not found</p>";
}
# add new term to DB
if ($cgi->param('new_ood_entry')) {
$self->OOD_add2list( $category, $app, $cgi, $ood );
}
my $entries = $self->get_list($ood, $category);
if ($question->{sort_order} && $question->{sort_order} eq "alphabetical") {
@$entries = sort { $a->name cmp $b->name } @$entries;
}
my @labels = map { $_->name } @$entries;
my @values = map { $_->_id } @$entries;
unshift(@labels, "Please select");
unshift(@values, "unknown");
my $select = "no component selected";
if ($question->{'display'} && ($question->{'display'} eq "FilterSelect")) {
$app->register_component('FilterSelect', "FilterSelect$name");
my $filter_select_component = $app->component("FilterSelect$name");
$filter_select_component->labels( \@labels );
$filter_select_component->values( \@values );
$filter_select_component->size(8);
$filter_select_component->width(200);
$filter_select_component->name($name);
$select = $filter_select_component->output;
}
else{
$select = $cgi->popup_menu( -id => "parent_$name",
-name => $name,
-values => \@labels,
-default => $question_def
);
}
# field for final value
my $table = "<table><tr>\n<td><div>$select</div></td>";
if ( $app->session->user ) {
$table .= qq~
<td><div id="$edit_ajax_id" style="cursor: pointer;">
<a style='cursor: pointer;' onclick="
execute_ajax('OOD_edit_list', '$edit_ajax_id',
'main_ajax=$main_ajax_id&edit_ajax=$edit_ajax_id&question_name_ajax=$question_name&question_type_ajax=$question_type&question_text_ajax=$question_text&selection='+document.getElementById('parent_$name').options[document.getElementById('parent_$name').selectedIndex].value+'&cat=$cat',
null, null, null, 'FormWizard|$wid');">
<b>add term</b></a>
</div>~;
}
$table .= "</td></tr></table>\n";
return $table;
}
sub OOD_edit_list {
my ($self) = @_;
my $cgi = $self->application->cgi;
my $main_ajax_id = $cgi->param('main_ajax');
my $edit_ajax_id = $cgi->param('edit_ajax');
my $question_name = $cgi->param('question_name_ajax');
my $question_type = $cgi->param('question_type_ajax');
my $selection = $cgi->param('selection');
my $cat = $cgi->param('cat');
if ($selection eq 'Please select') {
$selection = '';
}
my $button_txt = ($selection eq 'unknown' || $selection eq '') ? 'add term' : "add term after '$selection'";
my $fw_id = $self->{_id};
return qq~
<table><tr>
<th>New term</th>
<td><input id='new_ood_entry_term' name='new_ood_entry' type='text' size='30' maxlength='200'></td>
</tr><tr>
<th>Definition</th>
<td><textarea id='new_ood_entry_definition' name='new_ood_entry_definition' value='' cols='30' rows='10'></textarea></td>
</tr><tr>
<td colspan=2>
<input type="button" value="$button_txt" onclick="
execute_ajax('$question_type', '$main_ajax_id',
'main_ajax=$main_ajax_id&edit_ajax=$edit_ajax_id&question_name_ajax=$question_name&question_type_ajax=$question_type&new_ood_parent=$selection&new_ood_entry='+document.getElementById('new_ood_entry_term').value+'&new_ood_entry_definition='+document.getElementById('new_ood_entry_definition').value+'&cat=$cat',
null, null, null, 'FormWizard|$fw_id');" />
<input type="button" value="cancel" onclick="
execute_ajax('$question_type', '$main_ajax_id',
'main_ajax=$main_ajax_id&edit_ajax=$edit_ajax_id&question_name_ajax=$question_name&question_type_ajax=$question_type&new_ood_entry='+document.getElementById('new_ood_entry_term').value+'&cat=$cat',
null, null, null, 'FormWizard|$fw_id');" />
</td>
</tr></table>~;
}
sub OOD_add2list{
my ($self, $category, $app, $cgi, $ood) = @_;
my $parent = $cgi->param('new_ood_parent') || "";
my $new_term = $cgi->param('new_ood_entry') || "";
my $definition = $cgi->param('new_ood_entry_definition') || "";
unless ($new_term && $definition) {
$app->add_message('warning', "No definition or term, aborting.");
return 0;
}
# check if the name already exists
my $entry = $ood->Entry->get_objects( { name => $new_term,
category => $category } );
if (scalar(@$entry)) {
$app->add_message('warning', "term $new_term already exists in the ontology");
return 0;
}
my $root = $ood->Entry->get_objects( { category => $category,
parent => undef } );
$root = scalar(@$root) ? $root->[0] : undef;
# term does not exists, create it
my $new_node = $ood->Entry->create( { ID => $category->ID,
name => $new_term,
category => $category,
definition => $definition,
creator => $app->session->user,
user_entry => '1',
editable => '0',
} );
unless (ref $new_node) {
$app->add_message('warning',"Can't add $new_term to " . $category->name . ", aborting");
return 0;
}
if ($parent) {
my $parent_object = $ood->Entry->get_objects( { name => $parent,
category => $category } );
if (scalar(@$parent_object)) {
$parent_object = $parent_object->[0];
my $child;
if (scalar(@{$parent_object->child})) {
$child = shift @{$parent_object->child};
}
push(@{$parent_object->child}, $new_node);
$new_node->parent($parent_object);
if ($child) {
push(@{$new_node->child}, $child);
$child->parent($new_node);
}
} else {
$app->add_message('warning',"could not retrieve parent entry from ontology, aborting");
$new_node->delete();
return 0;
}
} elsif ($root) {
push(@{$new_node->child}, $root);
$root->parent($new_node);
}
$app->add_message('info',"Entry " . $new_node->name . " for " . $new_node->category->name . " created");
return 1;
}
sub get_list {
my ($self, $ood, $category) = @_;
my $entries = $ood->Entry->get_objects( {category => $category} );
my $parents = {};
my $sorted_entries = [];
foreach my $entry (@$entries) {
if ($entry->parent) {
$parents->{$entry->parent->_id} = $entry;
} else {
if (defined($sorted_entries->[0])) {
@$entries = sort { $a->{name} cmp $b->{name} } @$entries;
return $entries;
} else {
$sorted_entries->[0] = $entry;
}
}
}
for (my $i=0; $i<scalar(@$entries) - 1; $i++) {
next unless (defined($parents->{$sorted_entries->[scalar(@$sorted_entries) - 1]}));
push(@$sorted_entries, $parents->{$sorted_entries->[scalar(@$sorted_entries) - 1]->_id});
}
return $sorted_entries;
}
#
# Ontology on Demand Tree structure
#
sub OOD_Tree {
my ($self, $question, $use_ontology) = @_;
my $wid = $self->{_id};
my $name = $question->{name};
my $app = $self->application;
my $cgi = $app->cgi;
my $ood = $app->data_handle('OOD');
# Set params to remember and submit to ajax functions
my $main_ajax_id = $name ? "ajax_main_$name" : $cgi->param('main_ajax') ;
my $edit_ajax_id = $name ? "ajax_edit_$name" : $cgi->param('edit_ajax') ;
my $question_name = $name || $cgi->param('question_name_ajax');
my $question_type = "OOD_Tree";
my $question_text = $question->{text} || $cgi->param('question_text_ajax') || '';
my $question_def = (ref($question->{default}) && (ref($question->{default}) eq 'ARRAY')) ? $question->{default}->[0] : $question->{default};
my $cat = $cgi->param('cat') || $question->{ontologyName} || $question->{name};
my $new_ood_entry = $cgi->param('new_ood_entry') || '';
unless ($question_type) {
return "<p>missing question type</p>";
}
unless ($ood) {
$app->add_message('warning', "No OOD, please contact the administrator");
return "<p>OOD not found</p>";
}
unless ($cat) {
$app->add_message('warning', "Category for OOD, please contact the administrator");
return "<p>Category not found</p>";
}
# connect to DB and retrive data for list
my $cats = $ood->Category->get_objects( {name => $cat} );
my $category;
if (scalar(@$cats)) {
$category = $cats->[0];
} else {
$category = $ood->Category->create( {name => $cat,
ID => "FormWizard_$cat",
extendable => "1",
description => "created automaticallly from xml template " .
($self->{config_file} ? $self->{config_file} : '')
} );
}
unless (ref $category) {
$app->add_message('warning', "category $question_name not found");
return "<p>category not found</p>";
}
my $tree_component_name = "tree_".$question_name."_".$question->{type};
$app->register_component('Tree', $tree_component_name);
my $tree = $app->component( $tree_component_name );
# add new term to DB
if ($cgi->param('new_ood_entry')) {
$self->OOD_add2tree($category, $app, $cgi, $ood);
}
my $entries = $self->get_tree($ood, $category, $question_def);
my $tid = $tree->id;
$tree->data($entries);
$tree->selectable(1);
$tree->select_leaves_only(0);
$tree->name("tree_".$question_name);
# field for final value
unless (ref($question->{default})) {
$question->{default} = [ $question->{default} ];
}
my $table = qq~
<table><tr>
<td><select name="selectall_$question_name" multiple="multiple" style="min-width:120px" size="10" id="q_sel_${cat}_$question_name">
~;
foreach my $d (@{$question->{default}}) {
if ($d) { $table .= "<option value='$d' selected=selected>$d</option>\n"; }
}
$table .= qq~
</select></td>
<td align="center" style="padding-left: 15px;">
<input type="button" value=" <-- " id="b_add_${cat}_$question_name" /><br>
<input type="button" value=" --> " id="b_del_${cat}_$question_name" /><br>
<input type="button" value="Clear All" id="b_clear_${cat}_$question_name" />
</td><td>
<div style="padding-left: 15px;">~ . $tree->output . "</div></td>\n";
if ( $app->session->user ) {
$table .= qq~
<td><div id="$edit_ajax_id" style="cursor: pointer;"><a onclick="
if (document.getElementById('${tid}tree_$question_name')) {
execute_ajax('OOD_edit_tree', '$edit_ajax_id', 'main_ajax=$main_ajax_id&edit_ajax=$edit_ajax_id&question_name_ajax=$question_name&question_type_ajax=$question_type&question_text_ajax=$question_text&selection='+document.getElementById('${tid}tree_$question_name').value+'&tid=${tid}&cat=$cat&use_ont=1', null, null, null, 'FormWizard|$wid');
} else {
alert('you must select a category first'); }">
<b>add term</b></a></div>
~;
}
$table .= "</td></tr></table>\n";
my $content = qq~
<script type="text/javascript">
\$(document).ready( function() {
\$("#b_add_${cat}_$question_name").click( function() {
var term = \$("#${tid}tree_$question_name").val();
if ( term ) {
\$("#q_sel_${cat}_$question_name").append('<option selected="selected" value="'+term+'">'+term+'</option>');
}
return false;
});
\$("#b_del_${cat}_$question_name").click( function() {
\$("#q_sel_${cat}_$question_name option:selected").remove();
return false;
});
\$("#b_clear_${cat}_$question_name").click( function() {
\$("#q_sel_${cat}_$question_name option").remove();
return false;
});
});
</script>
$table
~;
return $content;
}
sub OOD_add2tree {
my ($self, $category, $app, $cgi, $ood) = @_;
my $parent = $cgi->param('new_ood_parent') || "";
my $new_term = $cgi->param('new_ood_entry') || "";
my $definition = $cgi->param('new_ood_entry_definition') || "";
unless ($new_term && $definition) {
$app->add_message('warning', "No definition or term, aborting.");
return 0;
}
# check if the name already exists
my $entry = $ood->Entry->get_objects( { name => $new_term,
category => $category } );
if (scalar(@$entry)) {
$app->add_message('warning', "term $new_term already exists in the ontology");
return 0;
}
my $root = $ood->Entry->get_objects( { category => $category,
parent => undef } );
if (scalar(@$root)) {
$root = $root->[0];
} else {
$root = undef;
}
# term does not exists, create it
my $new_node = $ood->Entry->create( { ID => $category->ID,
name => $new_term,
category => $category,
definition => $definition,
creator => $app->session->user,
user_entry => '1',
editable => '0',
} );
unless (ref $new_node) {
$app->add_message('warning', "Can't add $new_term to " . $category->name . ", aborting");
return 0;
}
if ($parent) {
my $parent_object = $ood->Entry->get_objects( { name => $parent,
category => $category } );
if (scalar(@$parent_object)) {
$parent_object = $parent_object->[0];
push @{$parent_object->child}, $new_node;
$new_node->parent($parent_object);
} else {
$app->add_message('warning', "could not retrieve parent entry from ontology, aborting");
$new_node->delete();
return 0;
}
}
$app->add_message('info' , "Entry " . $new_node->name . " for " . $new_node->category->name . " created");
return 1;
}
sub OOD_edit_tree {
my ($self, $value) = @_;
my $wid = $self->{_id};
my $cgi = $self->application->cgi;
my $main_ajax_id = $cgi->param('main_ajax');
my $edit_ajax_id = $cgi->param('edit_ajax');
my $question_name = $cgi->param('question_name_ajax');
my $question_type = $cgi->param('question_type_ajax');
my $selection = $cgi->param('selection');
my $tid = $cgi->param('tid');
my $cat = $cgi->param('cat');
my $use_ontology = $cgi->param('use_ont');
my $button = "";
if ($selection eq 'unknown' || $selection eq '') {
$button = qq~
<input type="button" value="add root node" onclick="
execute_ajax('$question_type', '$main_ajax_id',
'main_ajax=$main_ajax_id&edit_ajax=$edit_ajax_id&question_name_ajax=$question_name&question_type_ajax=$question_type&new_ood_entry='+document.getElementById('new_ood_entry_term').value+'&new_ood_entry_definition='+document.getElementById('new_ood_entry_definition').value+'&cat=$cat',
null, null, null, 'FormWizard|$wid');">
~;
} else {
my $visible = $use_ontology ? " style='display: none;'" : "";
$button .= qq~
<input id="add_entry_button" type="button" value="add term as subcategory below $selection" onclick="
execute_ajax('$question_type', '$main_ajax_id'
'main_ajax=$main_ajax_id&edit_ajax=$edit_ajax_id&question_name_ajax=$question_name&question_type_ajax=$question_type&new_ood_parent='+document.getElementById('${tid}tree_$question_name').value+'&new_ood_entry='+document.getElementById('new_ood_entry_term').value+'&new_ood_entry_definition='+document.getElementById('new_ood_entry_definition').value+'&cat=$cat',
null, null, null, 'FormWizard|$wid');"$visible>
~;
}
my $ajax_call = "";
if ($use_ontology) {
$ajax_call = qq~
<a style='cursor: pointer;' onclick="
if (document.getElementById('new_ood_entry_term').value) {
execute_ajax('Ontology_lookup', 'new_ood_entry_ont_hits',
'edit_ajax=new_ood_entry_ont_hits&target_name_term=new_ood_entry_term&target_name_desc=new_ood_entry_definition&from_tree=1&selection='+document.getElementById('new_ood_entry_term').value,
null, null, null, 'FormWizard|$wid');
} else {
alert('you must enter a term to search'); }">
<b>search term</b></a>
~;
}
my $html = qq~
<table>
<tr>
<th>New term</th>
<td><input id='new_ood_entry_term' name='new_ood_entry' type='text' size='30' maxlength='200'>$ajax_call</td>
</tr><tr>
<th>Definition</th>
<td><textarea id='new_ood_entry_definition' name='new_ood_entry_definition' value='' cols='30' rows='10'></textarea></td>
</tr>~;
if ($use_ontology) { $html .= "<tr><td colspan=2 id='new_ood_entry_ont_hits'></td></tr>\n"; }
$html .= qq~<tr>
<td colspan=2>$button <input type="button" value="cancel" onclick="
execute_ajax('$question_type', '$main_ajax_id',
'main_ajax=$main_ajax_id&edit_ajax=$edit_ajax_id&question_name_ajax=$question_name&question_type_ajax=$question_type&new_ood_entry='+document.getElementById('new_ood_entry_term').value+'&cat=$cat',
null, null, null, 'FormWizard|$wid');"></td>
</tr>
</table>
~;
return $html;
}
sub get_tree {
my ($self, $ood, $category, $default) = @_;
unless ($default) { $default = ""; }
my $tree = [];
my $entries = $ood->Entry->get_objects( {category => $category} );
my $parents = {};
# print STDERR Dumper $category ;
# print STDERR Dumper $entries ;
foreach my $entry (@$entries) {
if ($entry->user_entry) {
$entry->{label} = "<b>" . $entry->{name} . "</b>";
}
else{
$entry->{label} = $entry->{name};
}
$entry->{value} = $entry->{name};
$entry->{selected} = "selected" if ($entry->{value} eq $default) ;
if ($entry->parent) {
push(@{$parents->{$entry->parent->_id}}, $entry);
} else {
push(@$tree, $entry);
}
}
@$tree = sort { $a->{name} cmp $b->{name} } @$tree;
foreach my $e (@$tree) { tree_children($e, $parents); }
return $tree;
}
sub tree_children {
my ($entry, $parents) = @_;
if (exists($parents->{$entry->_id})) {
$entry->{children} = $parents->{$entry->_id};
foreach my $e (@{$entry->{children}}) {
tree_children($e, $parents);
}
} else {
$entry->{children} = [];
}
return;
}
sub create_navigation_buttons{
my ( $self , $tabview , $current_tab_nr , $max_tabs , $step_nr_global , $max_steps_global , $step_nr_local , $max_steps_local , $exclusive , $orientation) = @_ ;
my $next = "<input type='button' value='next' onclick='if(check_mandatory(".$self->id.", $step_nr_global)){tab_view_select(\"".$tabview->id()."\", ". ($current_tab_nr+1) ."$orientation);}'>";
my $last = "<input type='button' value='previous' onclick='tab_view_select(\"".$tabview->id()."\", ".($current_tab_nr - 1)."$orientation);'>";
if (($max_steps_local > 1) && (! $exclusive)) {
$next = "<input type='button' value='next' onclick='if(check_mandatory(".$self->id.", $step_nr_global)){tab_view_select(\"".$tabview->id()."\", $current_tab_nr , \"sub\" , $step_nr_local);}'>" if ($step_nr_local < $max_steps_local ) ;
$last = "<input type='button' value='previous' onclick='tab_view_select(\"".$tabview->id()."\", $current_tab_nr , \"sub\" , ".($step_nr_local - 2).");'>" if ($step_nr_local > 1) ;
}
if ($step_nr_global == 1) {
$last = "";
}
if ( ( ($current_tab_nr + 1) == $max_tabs and $exclusive ) or $step_nr_global == $max_steps_global ) {
if ($self->submit_button){
$next = "<input type='button' value='finish' onclick='enable_multi_select() ; ".$self->form_name().".submit();'>" ;
}
else{
$next = ' ';
}
}
return ($last , $next);
}
sub create_group_selection_box{
my ($self , $steps , $catName , $cat_nr) = @_ ;
my $application = $self->application;
my $cgi = $application->cgi;
my @values;
my %labels;
my $popup_name = "popup_".$catName ;
$catName =~ s/[\s\'\"]+/_/g ;
$popup_name =~ s/[\s\'\"]+/_/g ;
my $content = '';
$content .= "<select id='$popup_name' name='$popup_name' onchange=\"switch_category_display('$popup_name')\">\n" ;
$content .= "<option value=''>Please select</option>\n" ;
my $tab_divs = '' ;
my $step_nr = 0 ;
foreach my $step (@$steps){
my $label = $step->{title} || $step->{data}->{title} || "-1" ;
my $value = $label ;
$value =~ s/[\s\"\'\/]+/_/g ;
push @values , $value ;
$labels{$value} = $label ;
$content .= "<option value='$value'>$label</option>\n" ;
my $step_content = $self->layout_questions( $step , $cat_nr, $step_nr) ;
$step_nr++;
$tab_divs .= " <div id='div_sub_$value' style='display:none'>$step_content</div>\n";
}
$content .= "</select>\n<hr>\n";
$content .= "<input type='hidden' name='current_selection_$popup_name' id='current_selection_$popup_name' value='' >\n";
$tab_divs .= "<div id='div_display_$catName' style='display:none'></div>\n" ;
$content .= $tab_divs . "\n" ;
my $scripts = "<script>\n";
$scripts .= qq~
function switch_display_$catName (DISPLAY , MENU) {
var menu = document.getElementById(MENU);
var selection = menu.options[menu.options.selectedIndex].value;
var old_selection = document.getElementById( 'current_selection_' + MENU ).value ;
alert( "New: " + selection + " Index: " + menu.options.selectedIndex );
alert ("Old: " + old_selection);
var new_div = document.getElementById( "div_sub_" + selection );
var old_div = document.getElementById( "div_sub_" + old_selection );
document.getElementById( 'current_selection_' + MENU ).value = selection ;
new_div.style.display="inline";
old_div.style.display="none";
}~;
$scripts .= "</script>\n";
$content .= $scripts ;
return $content ;
};
sub debug {
my ($self) = @_ ;
return $self->{debug} ;
}
| paczian/MG-RAST | src/WebApplication/WebComponent/FormWizard.pm | Perl | bsd-2-clause | 49,578 |
# vim: ts=4 sts=4 sw=4 et:
package HTTP::Tiny;
use strict;
use warnings;
# ABSTRACT: A small, simple, correct HTTP/1.1 client
our $VERSION = '0.017'; # VERSION
use Carp ();
my @attributes;
BEGIN {
@attributes = qw(agent default_headers max_redirect max_size proxy timeout);
no strict 'refs';
for my $accessor ( @attributes ) {
*{$accessor} = sub {
@_ > 1 ? $_[0]->{$accessor} = $_[1] : $_[0]->{$accessor};
};
}
}
sub new {
my($class, %args) = @_;
(my $agent = $class) =~ s{::}{-}g;
my $self = {
agent => $agent . "/" . ($class->VERSION || 0),
max_redirect => 5,
timeout => 60,
};
for my $key ( @attributes ) {
$self->{$key} = $args{$key} if exists $args{$key}
}
# Never override proxy argument as this breaks backwards compat.
if (!exists $self->{proxy} && (my $http_proxy = $ENV{http_proxy})) {
if ($http_proxy =~ m{\Ahttp://[^/?#:@]+:\d+/?\z}) {
$self->{proxy} = $http_proxy;
}
else {
Carp::croak(qq{Environment 'http_proxy' must be in format http://<host>:<port>/\n});
}
}
return bless $self, $class;
}
for my $sub_name ( qw/get head put post delete/ ) {
my $req_method = uc $sub_name;
no strict 'refs';
eval <<"HERE"; ## no critic
sub $sub_name {
my (\$self, \$url, \$args) = \@_;
\@_ == 2 || (\@_ == 3 && ref \$args eq 'HASH')
or Carp::croak(q/Usage: \$http->$sub_name(URL, [HASHREF])/ . "\n");
return \$self->request('$req_method', \$url, \$args || {});
}
HERE
}
sub post_form {
my ($self, $url, $data, $args) = @_;
(@_ == 3 || @_ == 4 && ref $args eq 'HASH')
or Carp::croak(q/Usage: $http->post_form(URL, DATAREF, [HASHREF])/ . "\n");
my $headers = {};
while ( my ($key, $value) = each %{$args->{headers} || {}} ) {
$headers->{lc $key} = $value;
}
delete $args->{headers};
return $self->request('POST', $url, {
%$args,
content => $self->www_form_urlencode($data),
headers => {
%$headers,
'content-type' => 'application/x-www-form-urlencoded'
},
}
);
}
sub mirror {
my ($self, $url, $file, $args) = @_;
@_ == 3 || (@_ == 4 && ref $args eq 'HASH')
or Carp::croak(q/Usage: $http->mirror(URL, FILE, [HASHREF])/ . "\n");
if ( -e $file and my $mtime = (stat($file))[9] ) {
$args->{headers}{'if-modified-since'} ||= $self->_http_date($mtime);
}
my $tempfile = $file . int(rand(2**31));
open my $fh, ">", $tempfile
or Carp::croak(qq/Error: Could not open temporary file $tempfile for downloading: $!\n/);
binmode $fh;
$args->{data_callback} = sub { print {$fh} $_[0] };
my $response = $self->request('GET', $url, $args);
close $fh
or Carp::croak(qq/Error: Could not close temporary file $tempfile: $!\n/);
if ( $response->{success} ) {
rename $tempfile, $file
or Carp::croak(qq/Error replacing $file with $tempfile: $!\n/);
my $lm = $response->{headers}{'last-modified'};
if ( $lm and my $mtime = $self->_parse_http_date($lm) ) {
utime $mtime, $mtime, $file;
}
}
$response->{success} ||= $response->{status} eq '304';
unlink $tempfile;
return $response;
}
my %idempotent = map { $_ => 1 } qw/GET HEAD PUT DELETE OPTIONS TRACE/;
sub request {
my ($self, $method, $url, $args) = @_;
@_ == 3 || (@_ == 4 && ref $args eq 'HASH')
or Carp::croak(q/Usage: $http->request(METHOD, URL, [HASHREF])/ . "\n");
$args ||= {}; # we keep some state in this during _request
# RFC 2616 Section 8.1.4 mandates a single retry on broken socket
my $response;
for ( 0 .. 1 ) {
$response = eval { $self->_request($method, $url, $args) };
last unless $@ && $idempotent{$method}
&& $@ =~ m{^(?:Socket closed|Unexpected end)};
}
if (my $e = "$@") {
$response = {
success => q{},
status => 599,
reason => 'Internal Exception',
content => $e,
headers => {
'content-type' => 'text/plain',
'content-length' => length $e,
}
};
}
return $response;
}
sub www_form_urlencode {
my ($self, $data) = @_;
(@_ == 2 && ref $data)
or Carp::croak(q/Usage: $http->www_form_urlencode(DATAREF)/ . "\n");
(ref $data eq 'HASH' || ref $data eq 'ARRAY')
or Carp::croak("form data must be a hash or array reference");
my @params = ref $data eq 'HASH' ? %$data : @$data;
@params % 2 == 0
or Carp::croak("form data reference must have an even number of terms\n");
my @terms;
while( @params ) {
my ($key, $value) = splice(@params, 0, 2);
if ( ref $value eq 'ARRAY' ) {
unshift @params, map { $key => $_ } @$value;
}
else {
push @terms, join("=", map { $self->_uri_escape($_) } $key, $value);
}
}
return join("&", sort @terms);
}
#--------------------------------------------------------------------------#
# private methods
#--------------------------------------------------------------------------#
my %DefaultPort = (
http => 80,
https => 443,
);
sub _request {
my ($self, $method, $url, $args) = @_;
my ($scheme, $host, $port, $path_query) = $self->_split_url($url);
my $request = {
method => $method,
scheme => $scheme,
host_port => ($port == $DefaultPort{$scheme} ? $host : "$host:$port"),
uri => $path_query,
headers => {},
};
my $handle = HTTP::Tiny::Handle->new(timeout => $self->{timeout});
if ($self->{proxy}) {
$request->{uri} = "$scheme://$request->{host_port}$path_query";
die(qq/HTTPS via proxy is not supported\n/)
if $request->{scheme} eq 'https';
$handle->connect(($self->_split_url($self->{proxy}))[0..2]);
}
else {
$handle->connect($scheme, $host, $port);
}
$self->_prepare_headers_and_cb($request, $args);
$handle->write_request($request);
my $response;
do { $response = $handle->read_response_header }
until (substr($response->{status},0,1) ne '1');
if ( my @redir_args = $self->_maybe_redirect($request, $response, $args) ) {
$handle->close;
return $self->_request(@redir_args, $args);
}
if ($method eq 'HEAD' || $response->{status} =~ /^[23]04/) {
# response has no message body
}
else {
my $data_cb = $self->_prepare_data_cb($response, $args);
$handle->read_body($data_cb, $response);
}
$handle->close;
$response->{success} = substr($response->{status},0,1) eq '2';
return $response;
}
sub _prepare_headers_and_cb {
my ($self, $request, $args) = @_;
for ($self->{default_headers}, $args->{headers}) {
next unless defined;
while (my ($k, $v) = each %$_) {
$request->{headers}{lc $k} = $v;
}
}
$request->{headers}{'host'} = $request->{host_port};
$request->{headers}{'connection'} = "close";
$request->{headers}{'user-agent'} ||= $self->{agent};
if (defined $args->{content}) {
$request->{headers}{'content-type'} ||= "application/octet-stream";
if (ref $args->{content} eq 'CODE') {
$request->{headers}{'transfer-encoding'} = 'chunked'
unless $request->{headers}{'content-length'}
|| $request->{headers}{'transfer-encoding'};
$request->{cb} = $args->{content};
}
else {
my $content = $args->{content};
if ( $] ge '5.008' ) {
utf8::downgrade($content, 1)
or die(qq/Wide character in request message body\n/);
}
$request->{headers}{'content-length'} = length $content
unless $request->{headers}{'content-length'}
|| $request->{headers}{'transfer-encoding'};
$request->{cb} = sub { substr $content, 0, length $content, '' };
}
$request->{trailer_cb} = $args->{trailer_callback}
if ref $args->{trailer_callback} eq 'CODE';
}
return;
}
sub _prepare_data_cb {
my ($self, $response, $args) = @_;
my $data_cb = $args->{data_callback};
$response->{content} = '';
if (!$data_cb || $response->{status} !~ /^2/) {
if (defined $self->{max_size}) {
$data_cb = sub {
$_[1]->{content} .= $_[0];
die(qq/Size of response body exceeds the maximum allowed of $self->{max_size}\n/)
if length $_[1]->{content} > $self->{max_size};
};
}
else {
$data_cb = sub { $_[1]->{content} .= $_[0] };
}
}
return $data_cb;
}
sub _maybe_redirect {
my ($self, $request, $response, $args) = @_;
my $headers = $response->{headers};
my ($status, $method) = ($response->{status}, $request->{method});
if (($status eq '303' or ($status =~ /^30[127]/ && $method =~ /^GET|HEAD$/))
and $headers->{location}
and ++$args->{redirects} <= $self->{max_redirect}
) {
my $location = ($headers->{location} =~ /^\//)
? "$request->{scheme}://$request->{host_port}$headers->{location}"
: $headers->{location} ;
return (($status eq '303' ? 'GET' : $method), $location);
}
return;
}
sub _split_url {
my $url = pop;
# URI regex adapted from the URI module
my ($scheme, $authority, $path_query) = $url =~ m<\A([^:/?#]+)://([^/?#]*)([^#]*)>
or die(qq/Cannot parse URL: '$url'\n/);
$scheme = lc $scheme;
$path_query = "/$path_query" unless $path_query =~ m<\A/>;
my $host = (length($authority)) ? lc $authority : 'localhost';
$host =~ s/\A[^@]*@//; # userinfo
my $port = do {
$host =~ s/:([0-9]*)\z// && length $1
? $1
: ($scheme eq 'http' ? 80 : $scheme eq 'https' ? 443 : undef);
};
return ($scheme, $host, $port, $path_query);
}
# Date conversions adapted from HTTP::Date
my $DoW = "Sun|Mon|Tue|Wed|Thu|Fri|Sat";
my $MoY = "Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec";
sub _http_date {
my ($sec, $min, $hour, $mday, $mon, $year, $wday) = gmtime($_[1]);
return sprintf("%s, %02d %s %04d %02d:%02d:%02d GMT",
substr($DoW,$wday*4,3),
$mday, substr($MoY,$mon*4,3), $year+1900,
$hour, $min, $sec
);
}
sub _parse_http_date {
my ($self, $str) = @_;
require Time::Local;
my @tl_parts;
if ($str =~ /^[SMTWF][a-z]+, +(\d{1,2}) ($MoY) +(\d\d\d\d) +(\d\d):(\d\d):(\d\d) +GMT$/) {
@tl_parts = ($6, $5, $4, $1, (index($MoY,$2)/4), $3);
}
elsif ($str =~ /^[SMTWF][a-z]+, +(\d\d)-($MoY)-(\d{2,4}) +(\d\d):(\d\d):(\d\d) +GMT$/ ) {
@tl_parts = ($6, $5, $4, $1, (index($MoY,$2)/4), $3);
}
elsif ($str =~ /^[SMTWF][a-z]+ +($MoY) +(\d{1,2}) +(\d\d):(\d\d):(\d\d) +(?:[^0-9]+ +)?(\d\d\d\d)$/ ) {
@tl_parts = ($5, $4, $3, $2, (index($MoY,$1)/4), $6);
}
return eval {
my $t = @tl_parts ? Time::Local::timegm(@tl_parts) : -1;
$t < 0 ? undef : $t;
};
}
# URI escaping adapted from URI::Escape
# c.f. http://www.w3.org/TR/html4/interact/forms.html#h-17.13.4.1
# perl 5.6 ready UTF-8 encoding adapted from JSON::PP
my %escapes = map { chr($_) => sprintf("%%%02X", $_) } 0..255;
$escapes{' '}="+";
my $unsafe_char = qr/[^A-Za-z0-9\-\._~]/;
sub _uri_escape {
my ($self, $str) = @_;
if ( $] ge '5.008' ) {
utf8::encode($str);
}
else {
$str = pack("U*", unpack("C*", $str)) # UTF-8 encode a byte string
if ( length $str == do { use bytes; length $str } );
$str = pack("C*", unpack("C*", $str)); # clear UTF-8 flag
}
$str =~ s/($unsafe_char)/$escapes{$1}/ge;
return $str;
}
package
HTTP::Tiny::Handle; # hide from PAUSE/indexers
use strict;
use warnings;
use Errno qw[EINTR EPIPE];
use IO::Socket qw[SOCK_STREAM];
sub BUFSIZE () { 32768 } ## no critic
my $Printable = sub {
local $_ = shift;
s/\r/\\r/g;
s/\n/\\n/g;
s/\t/\\t/g;
s/([^\x20-\x7E])/sprintf('\\x%.2X', ord($1))/ge;
$_;
};
my $Token = qr/[\x21\x23-\x27\x2A\x2B\x2D\x2E\x30-\x39\x41-\x5A\x5E-\x7A\x7C\x7E]/;
sub new {
my ($class, %args) = @_;
return bless {
rbuf => '',
timeout => 60,
max_line_size => 16384,
max_header_lines => 64,
%args
}, $class;
}
my $ssl_verify_args = {
check_cn => "when_only",
wildcards_in_alt => "anywhere",
wildcards_in_cn => "anywhere"
};
sub connect {
@_ == 4 || die(q/Usage: $handle->connect(scheme, host, port)/ . "\n");
my ($self, $scheme, $host, $port) = @_;
if ( $scheme eq 'https' ) {
eval "require IO::Socket::SSL"
unless exists $INC{'IO/Socket/SSL.pm'};
die(qq/IO::Socket::SSL must be installed for https support\n/)
unless $INC{'IO/Socket/SSL.pm'};
}
elsif ( $scheme ne 'http' ) {
die(qq/Unsupported URL scheme '$scheme'\n/);
}
$self->{fh} = 'IO::Socket::INET'->new(
PeerHost => $host,
PeerPort => $port,
Proto => 'tcp',
Type => SOCK_STREAM,
Timeout => $self->{timeout}
) or die(qq/Could not connect to '$host:$port': $@\n/);
binmode($self->{fh})
or die(qq/Could not binmode() socket: '$!'\n/);
if ( $scheme eq 'https') {
IO::Socket::SSL->start_SSL($self->{fh});
ref($self->{fh}) eq 'IO::Socket::SSL'
or die(qq/SSL connection failed for $host\n/);
$self->{fh}->verify_hostname( $host, $ssl_verify_args )
or die(qq/SSL certificate not valid for $host\n/);
}
$self->{host} = $host;
$self->{port} = $port;
return $self;
}
sub close {
@_ == 1 || die(q/Usage: $handle->close()/ . "\n");
my ($self) = @_;
CORE::close($self->{fh})
or die(qq/Could not close socket: '$!'\n/);
}
sub write {
@_ == 2 || die(q/Usage: $handle->write(buf)/ . "\n");
my ($self, $buf) = @_;
if ( $] ge '5.008' ) {
utf8::downgrade($buf, 1)
or die(qq/Wide character in write()\n/);
}
my $len = length $buf;
my $off = 0;
local $SIG{PIPE} = 'IGNORE';
while () {
$self->can_write
or die(qq/Timed out while waiting for socket to become ready for writing\n/);
my $r = syswrite($self->{fh}, $buf, $len, $off);
if (defined $r) {
$len -= $r;
$off += $r;
last unless $len > 0;
}
elsif ($! == EPIPE) {
die(qq/Socket closed by remote server: $!\n/);
}
elsif ($! != EINTR) {
die(qq/Could not write to socket: '$!'\n/);
}
}
return $off;
}
sub read {
@_ == 2 || @_ == 3 || die(q/Usage: $handle->read(len [, allow_partial])/ . "\n");
my ($self, $len, $allow_partial) = @_;
my $buf = '';
my $got = length $self->{rbuf};
if ($got) {
my $take = ($got < $len) ? $got : $len;
$buf = substr($self->{rbuf}, 0, $take, '');
$len -= $take;
}
while ($len > 0) {
$self->can_read
or die(q/Timed out while waiting for socket to become ready for reading/ . "\n");
my $r = sysread($self->{fh}, $buf, $len, length $buf);
if (defined $r) {
last unless $r;
$len -= $r;
}
elsif ($! != EINTR) {
die(qq/Could not read from socket: '$!'\n/);
}
}
if ($len && !$allow_partial) {
die(qq/Unexpected end of stream\n/);
}
return $buf;
}
sub readline {
@_ == 1 || die(q/Usage: $handle->readline()/ . "\n");
my ($self) = @_;
while () {
if ($self->{rbuf} =~ s/\A ([^\x0D\x0A]* \x0D?\x0A)//x) {
return $1;
}
if (length $self->{rbuf} >= $self->{max_line_size}) {
die(qq/Line size exceeds the maximum allowed size of $self->{max_line_size}\n/);
}
$self->can_read
or die(qq/Timed out while waiting for socket to become ready for reading\n/);
my $r = sysread($self->{fh}, $self->{rbuf}, BUFSIZE, length $self->{rbuf});
if (defined $r) {
last unless $r;
}
elsif ($! != EINTR) {
die(qq/Could not read from socket: '$!'\n/);
}
}
die(qq/Unexpected end of stream while looking for line\n/);
}
sub read_header_lines {
@_ == 1 || @_ == 2 || die(q/Usage: $handle->read_header_lines([headers])/ . "\n");
my ($self, $headers) = @_;
$headers ||= {};
my $lines = 0;
my $val;
while () {
my $line = $self->readline;
if (++$lines >= $self->{max_header_lines}) {
die(qq/Header lines exceeds maximum number allowed of $self->{max_header_lines}\n/);
}
elsif ($line =~ /\A ([^\x00-\x1F\x7F:]+) : [\x09\x20]* ([^\x0D\x0A]*)/x) {
my ($field_name) = lc $1;
if (exists $headers->{$field_name}) {
for ($headers->{$field_name}) {
$_ = [$_] unless ref $_ eq "ARRAY";
push @$_, $2;
$val = \$_->[-1];
}
}
else {
$val = \($headers->{$field_name} = $2);
}
}
elsif ($line =~ /\A [\x09\x20]+ ([^\x0D\x0A]*)/x) {
$val
or die(qq/Unexpected header continuation line\n/);
next unless length $1;
$$val .= ' ' if length $$val;
$$val .= $1;
}
elsif ($line =~ /\A \x0D?\x0A \z/x) {
last;
}
else {
die(q/Malformed header line: / . $Printable->($line) . "\n");
}
}
return $headers;
}
sub write_request {
@_ == 2 || die(q/Usage: $handle->write_request(request)/ . "\n");
my($self, $request) = @_;
$self->write_request_header(@{$request}{qw/method uri headers/});
$self->write_body($request) if $request->{cb};
return;
}
my %HeaderCase = (
'content-md5' => 'Content-MD5',
'etag' => 'ETag',
'te' => 'TE',
'www-authenticate' => 'WWW-Authenticate',
'x-xss-protection' => 'X-XSS-Protection',
);
sub write_header_lines {
(@_ == 2 && ref $_[1] eq 'HASH') || die(q/Usage: $handle->write_header_lines(headers)/ . "\n");
my($self, $headers) = @_;
my $buf = '';
while (my ($k, $v) = each %$headers) {
my $field_name = lc $k;
if (exists $HeaderCase{$field_name}) {
$field_name = $HeaderCase{$field_name};
}
else {
$field_name =~ /\A $Token+ \z/xo
or die(q/Invalid HTTP header field name: / . $Printable->($field_name) . "\n");
$field_name =~ s/\b(\w)/\u$1/g;
$HeaderCase{lc $field_name} = $field_name;
}
for (ref $v eq 'ARRAY' ? @$v : $v) {
/[^\x0D\x0A]/
or die(qq/Invalid HTTP header field value ($field_name): / . $Printable->($_). "\n");
$buf .= "$field_name: $_\x0D\x0A";
}
}
$buf .= "\x0D\x0A";
return $self->write($buf);
}
sub read_body {
@_ == 3 || die(q/Usage: $handle->read_body(callback, response)/ . "\n");
my ($self, $cb, $response) = @_;
my $te = $response->{headers}{'transfer-encoding'} || '';
if ( grep { /chunked/i } ( ref $te eq 'ARRAY' ? @$te : $te ) ) {
$self->read_chunked_body($cb, $response);
}
else {
$self->read_content_body($cb, $response);
}
return;
}
sub write_body {
@_ == 2 || die(q/Usage: $handle->write_body(request)/ . "\n");
my ($self, $request) = @_;
if ($request->{headers}{'content-length'}) {
return $self->write_content_body($request);
}
else {
return $self->write_chunked_body($request);
}
}
sub read_content_body {
@_ == 3 || @_ == 4 || die(q/Usage: $handle->read_content_body(callback, response, [read_length])/ . "\n");
my ($self, $cb, $response, $content_length) = @_;
$content_length ||= $response->{headers}{'content-length'};
if ( $content_length ) {
my $len = $content_length;
while ($len > 0) {
my $read = ($len > BUFSIZE) ? BUFSIZE : $len;
$cb->($self->read($read, 0), $response);
$len -= $read;
}
}
else {
my $chunk;
$cb->($chunk, $response) while length( $chunk = $self->read(BUFSIZE, 1) );
}
return;
}
sub write_content_body {
@_ == 2 || die(q/Usage: $handle->write_content_body(request)/ . "\n");
my ($self, $request) = @_;
my ($len, $content_length) = (0, $request->{headers}{'content-length'});
while () {
my $data = $request->{cb}->();
defined $data && length $data
or last;
if ( $] ge '5.008' ) {
utf8::downgrade($data, 1)
or die(qq/Wide character in write_content()\n/);
}
$len += $self->write($data);
}
$len == $content_length
or die(qq/Content-Length missmatch (got: $len expected: $content_length)\n/);
return $len;
}
sub read_chunked_body {
@_ == 3 || die(q/Usage: $handle->read_chunked_body(callback, $response)/ . "\n");
my ($self, $cb, $response) = @_;
while () {
my $head = $self->readline;
$head =~ /\A ([A-Fa-f0-9]+)/x
or die(q/Malformed chunk head: / . $Printable->($head) . "\n");
my $len = hex($1)
or last;
$self->read_content_body($cb, $response, $len);
$self->read(2) eq "\x0D\x0A"
or die(qq/Malformed chunk: missing CRLF after chunk data\n/);
}
$self->read_header_lines($response->{headers});
return;
}
sub write_chunked_body {
@_ == 2 || die(q/Usage: $handle->write_chunked_body(request)/ . "\n");
my ($self, $request) = @_;
my $len = 0;
while () {
my $data = $request->{cb}->();
defined $data && length $data
or last;
if ( $] ge '5.008' ) {
utf8::downgrade($data, 1)
or die(qq/Wide character in write_chunked_body()\n/);
}
$len += length $data;
my $chunk = sprintf '%X', length $data;
$chunk .= "\x0D\x0A";
$chunk .= $data;
$chunk .= "\x0D\x0A";
$self->write($chunk);
}
$self->write("0\x0D\x0A");
$self->write_header_lines($request->{trailer_cb}->())
if ref $request->{trailer_cb} eq 'CODE';
return $len;
}
sub read_response_header {
@_ == 1 || die(q/Usage: $handle->read_response_header()/ . "\n");
my ($self) = @_;
my $line = $self->readline;
$line =~ /\A (HTTP\/(0*\d+\.0*\d+)) [\x09\x20]+ ([0-9]{3}) [\x09\x20]+ ([^\x0D\x0A]*) \x0D?\x0A/x
or die(q/Malformed Status-Line: / . $Printable->($line). "\n");
my ($protocol, $version, $status, $reason) = ($1, $2, $3, $4);
die (qq/Unsupported HTTP protocol: $protocol\n/)
unless $version =~ /0*1\.0*[01]/;
return {
status => $status,
reason => $reason,
headers => $self->read_header_lines,
protocol => $protocol,
};
}
sub write_request_header {
@_ == 4 || die(q/Usage: $handle->write_request_header(method, request_uri, headers)/ . "\n");
my ($self, $method, $request_uri, $headers) = @_;
return $self->write("$method $request_uri HTTP/1.1\x0D\x0A")
+ $self->write_header_lines($headers);
}
sub _do_timeout {
my ($self, $type, $timeout) = @_;
$timeout = $self->{timeout}
unless defined $timeout && $timeout >= 0;
my $fd = fileno $self->{fh};
defined $fd && $fd >= 0
or die(qq/select(2): 'Bad file descriptor'\n/);
my $initial = time;
my $pending = $timeout;
my $nfound;
vec(my $fdset = '', $fd, 1) = 1;
while () {
$nfound = ($type eq 'read')
? select($fdset, undef, undef, $pending)
: select(undef, $fdset, undef, $pending) ;
if ($nfound == -1) {
$! == EINTR
or die(qq/select(2): '$!'\n/);
redo if !$timeout || ($pending = $timeout - (time - $initial)) > 0;
$nfound = 0;
}
last;
}
$! = 0;
return $nfound;
}
sub can_read {
@_ == 1 || @_ == 2 || die(q/Usage: $handle->can_read([timeout])/ . "\n");
my $self = shift;
return $self->_do_timeout('read', @_)
}
sub can_write {
@_ == 1 || @_ == 2 || die(q/Usage: $handle->can_write([timeout])/ . "\n");
my $self = shift;
return $self->_do_timeout('write', @_)
}
1;
__END__
=pod
=head1 NAME
HTTP::Tiny - A small, simple, correct HTTP/1.1 client
=head1 VERSION
version 0.017
=head1 SYNOPSIS
use HTTP::Tiny;
my $response = HTTP::Tiny->new->get('http://example.com/');
die "Failed!\n" unless $response->{success};
print "$response->{status} $response->{reason}\n";
while (my ($k, $v) = each %{$response->{headers}}) {
for (ref $v eq 'ARRAY' ? @$v : $v) {
print "$k: $_\n";
}
}
print $response->{content} if length $response->{content};
=head1 DESCRIPTION
This is a very simple HTTP/1.1 client, designed for doing simple GET
requests without the overhead of a large framework like L<LWP::UserAgent>.
It is more correct and more complete than L<HTTP::Lite>. It supports
proxies (currently only non-authenticating ones) and redirection. It
also correctly resumes after EINTR.
=head1 METHODS
=head2 new
$http = HTTP::Tiny->new( %attributes );
This constructor returns a new HTTP::Tiny object. Valid attributes include:
=over 4
=item *
C<agent>
A user-agent string (defaults to 'HTTP::Tiny/$VERSION')
=item *
C<default_headers>
A hashref of default headers to apply to requests
=item *
C<max_redirect>
Maximum number of redirects allowed (defaults to 5)
=item *
C<max_size>
Maximum response size (only when not using a data callback). If defined,
responses larger than this will return an exception.
=item *
C<proxy>
URL of a proxy server to use (default is C<$ENV{http_proxy}> if set)
=item *
C<timeout>
Request timeout in seconds (default is 60)
=back
Exceptions from C<max_size>, C<timeout> or other errors will result in a
pseudo-HTTP status code of 599 and a reason of "Internal Exception". The
content field in the response will contain the text of the exception.
=head2 get|head|put|post|delete
$response = $http->get($url);
$response = $http->get($url, \%options);
$response = $http->head($url);
These methods are shorthand for calling C<request()> for the given method. The
URL must have unsafe characters escaped and international domain names encoded.
See C<request()> for valid options and a description of the response.
The C<success> field of the response will be true if the status code is 2XX.
=head2 post_form
$response = $http->post_form($url, $form_data);
$response = $http->post_form($url, $form_data, \%options);
This method executes a C<POST> request and sends the key/value pairs from a
form data hash or array reference to the given URL with a C<content-type> of
C<application/x-www-form-urlencoded>. See documentation for the
C<www_form_urlencode> method for details on the encoding.
The URL must have unsafe characters escaped and international domain names
encoded. See C<request()> for valid options and a description of the response.
Any C<content-type> header or content in the options hashref will be ignored.
The C<success> field of the response will be true if the status code is 2XX.
=head2 mirror
$response = $http->mirror($url, $file, \%options)
if ( $response->{success} ) {
print "$file is up to date\n";
}
Executes a C<GET> request for the URL and saves the response body to the file
name provided. The URL must have unsafe characters escaped and international
domain names encoded. If the file already exists, the request will includes an
C<If-Modified-Since> header with the modification timestamp of the file. You
may specify a different C<If-Modified-Since> header yourself in the C<<
$options->{headers} >> hash.
The C<success> field of the response will be true if the status code is 2XX
or if the status code is 304 (unmodified).
If the file was modified and the server response includes a properly
formatted C<Last-Modified> header, the file modification time will
be updated accordingly.
=head2 request
$response = $http->request($method, $url);
$response = $http->request($method, $url, \%options);
Executes an HTTP request of the given method type ('GET', 'HEAD', 'POST',
'PUT', etc.) on the given URL. The URL must have unsafe characters escaped and
international domain names encoded. A hashref of options may be appended to
modify the request.
Valid options are:
=over 4
=item *
headers
A hashref containing headers to include with the request. If the value for
a header is an array reference, the header will be output multiple times with
each value in the array. These headers over-write any default headers.
=item *
content
A scalar to include as the body of the request OR a code reference
that will be called iteratively to produce the body of the response
=item *
trailer_callback
A code reference that will be called if it exists to provide a hashref
of trailing headers (only used with chunked transfer-encoding)
=item *
data_callback
A code reference that will be called for each chunks of the response
body received.
=back
If the C<content> option is a code reference, it will be called iteratively
to provide the content body of the request. It should return the empty
string or undef when the iterator is exhausted.
If the C<data_callback> option is provided, it will be called iteratively until
the entire response body is received. The first argument will be a string
containing a chunk of the response body, the second argument will be the
in-progress response hash reference, as described below. (This allows
customizing the action of the callback based on the C<status> or C<headers>
received prior to the content body.)
The C<request> method returns a hashref containing the response. The hashref
will have the following keys:
=over 4
=item *
success
Boolean indicating whether the operation returned a 2XX status code
=item *
status
The HTTP status code of the response
=item *
reason
The response phrase returned by the server
=item *
content
The body of the response. If the response does not have any content
or if a data callback is provided to consume the response body,
this will be the empty string
=item *
headers
A hashref of header fields. All header field names will be normalized
to be lower case. If a header is repeated, the value will be an arrayref;
it will otherwise be a scalar string containing the value
=back
On an exception during the execution of the request, the C<status> field will
contain 599, and the C<content> field will contain the text of the exception.
=head2 www_form_urlencode
$params = $http->www_form_urlencode( $data );
$response = $http->get("http://example.com/query?$params");
This method converts the key/value pairs from a data hash or array reference
into a C<x-www-form-urlencoded> string. The keys and values from the data
reference will be UTF-8 encoded and escaped per RFC 3986. If a value is an
array reference, the key will be repeated with each of the values of the array
reference. The key/value pairs in the resulting string will be sorted by key
and value.
=for Pod::Coverage agent
default_headers
max_redirect
max_size
proxy
timeout
=head1 LIMITATIONS
HTTP::Tiny is I<conditionally compliant> with the
L<HTTP/1.1 specification|http://www.w3.org/Protocols/rfc2616/rfc2616.html>.
It attempts to meet all "MUST" requirements of the specification, but does not
implement all "SHOULD" requirements.
Some particular limitations of note include:
=over
=item *
HTTP::Tiny focuses on correct transport. Users are responsible for ensuring
that user-defined headers and content are compliant with the HTTP/1.1
specification.
=item *
Users must ensure that URLs are properly escaped for unsafe characters and that
international domain names are properly encoded to ASCII. See L<URI::Escape>,
L<URI::_punycode> and L<Net::IDN::Encode>.
=item *
Redirection is very strict against the specification. Redirection is only
automatic for response codes 301, 302 and 307 if the request method is 'GET' or
'HEAD'. Response code 303 is always converted into a 'GET' redirection, as
mandated by the specification. There is no automatic support for status 305
("Use proxy") redirections.
=item *
Persistent connections are not supported. The C<Connection> header will
always be set to C<close>.
=item *
Direct C<https> connections are supported only if L<IO::Socket::SSL> is
installed. There is no support for C<https> connections via proxy.
Any SSL certificate that matches the host is accepted -- SSL certificates
are not verified against certificate authorities.
=item *
Cookies are not directly supported. Users that set a C<Cookie> header
should also set C<max_redirect> to zero to ensure cookies are not
inappropriately re-transmitted.
=item *
Only the C<http_proxy> environment variable is supported in the format
C<http://HOST:PORT/>. If a C<proxy> argument is passed to C<new> (including
undef), then the C<http_proxy> environment variable is ignored.
=item *
There is no provision for delaying a request body using an C<Expect> header.
Unexpected C<1XX> responses are silently ignored as per the specification.
=item *
Only 'chunked' C<Transfer-Encoding> is supported.
=item *
There is no support for a Request-URI of '*' for the 'OPTIONS' request.
=back
=head1 SEE ALSO
=over 4
=item *
L<LWP::UserAgent>
=back
=for :stopwords cpan testmatrix url annocpan anno bugtracker rt cpants kwalitee diff irc mailto metadata placeholders
=head1 SUPPORT
=head2 Bugs / Feature Requests
Please report any bugs or feature requests through the issue tracker
at L<http://rt.cpan.org/Public/Dist/Display.html?Name=HTTP-Tiny>.
You will be notified automatically of any progress on your issue.
=head2 Source Code
This is open source software. The code repository is available for
public review and contribution under the terms of the license.
L<https://github.com/dagolden/p5-http-tiny>
git clone https://github.com/dagolden/p5-http-tiny.git
=head1 AUTHORS
=over 4
=item *
Christian Hansen <chansen@cpan.org>
=item *
David Golden <dagolden@cpan.org>
=back
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 2012 by Christian Hansen.
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
=cut
| efortuna/AndroidSDKClone | ndk_experimental/prebuilt/linux-x86_64/lib/perl5/5.16.2/HTTP/Tiny.pm | Perl | apache-2.0 | 35,004 |
package Locale::Codes::Country;
# Copyright (C) 2001 Canon Research Centre Europe (CRE).
# Copyright (C) 2002-2009 Neil Bowers
# Copyright (c) 2010-2013 Sullivan Beck
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
use strict;
require 5.006;
use warnings;
require Exporter;
use Carp;
use Locale::Codes;
use Locale::Codes::Constants;
use Locale::Codes::Country_Codes;
use Locale::Codes::Country_Retired;
#=======================================================================
# Public Global Variables
#=======================================================================
our($VERSION,@ISA,@EXPORT,@EXPORT_OK);
$VERSION='3.25';
@ISA = qw(Exporter);
@EXPORT = qw(code2country
country2code
all_country_codes
all_country_names
country_code2code
LOCALE_CODE_ALPHA_2
LOCALE_CODE_ALPHA_3
LOCALE_CODE_NUMERIC
LOCALE_CODE_FIPS
LOCALE_CODE_DOM
);
sub code2country {
return Locale::Codes::_code2name('country',@_);
}
sub country2code {
return Locale::Codes::_name2code('country',@_);
}
sub country_code2code {
return Locale::Codes::_code2code('country',@_);
}
sub all_country_codes {
return Locale::Codes::_all_codes('country',@_);
}
sub all_country_names {
return Locale::Codes::_all_names('country',@_);
}
sub rename_country {
return Locale::Codes::_rename('country',@_);
}
sub add_country {
return Locale::Codes::_add_code('country',@_);
}
sub delete_country {
return Locale::Codes::_delete_code('country',@_);
}
sub add_country_alias {
return Locale::Codes::_add_alias('country',@_);
}
sub delete_country_alias {
return Locale::Codes::_delete_alias('country',@_);
}
sub rename_country_code {
return Locale::Codes::_rename_code('country',@_);
}
sub add_country_code_alias {
return Locale::Codes::_add_code_alias('country',@_);
}
sub delete_country_code_alias {
return Locale::Codes::_delete_code_alias('country',@_);
}
#=======================================================================
#
# Old function for backward compatibility
#
#=======================================================================
sub alias_code {
my($alias,$code,@args) = @_;
my $success = rename_country_code($code,$alias,@args);
return 0 if (! $success);
return $alias;
}
1;
# Local Variables:
# mode: cperl
# indent-tabs-mode: nil
# cperl-indent-level: 3
# cperl-continued-statement-offset: 2
# cperl-continued-brace-offset: 0
# cperl-brace-offset: 0
# cperl-brace-imaginary-offset: 0
# cperl-label-offset: -2
# End:
| Dokaponteam/ITF_Project | xampp/perl/lib/Locale/Codes/Country.pm | Perl | mit | 2,714 |
#! /usr/bin/env perl
if ($ARGV[0]) {
print "Node A post script failed\n";
} else {
print "Node A post script succeeded\n";
}
exit($ARGV[0]);
| djw8605/htcondor | src/condor_tests/job_dagman_default_log-subdir/job_dagman_default_log-nodeA-post.pl | Perl | apache-2.0 | 145 |
#!/usr/bin/perl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
use warnings;
use strict;
use JSON;
use Data::Dumper;
use Term::ReadKey;
use LWP::UserAgent;
use File::Path qw{ make_path };
use File::Find;
use File::Spec;
use Time::HiRes qw(gettimeofday tv_interval);
#use Data::Compare;
use Test::Deep;
use Test::More;
use List::Compare;
use Getopt::Long;
GetOptions(servercount => \my $servercount, filecount => \my $filecount);
my $tmp_dir_base = "/tmp/files";
my $CURL_OPTS;
my $mode = shift;
my $configFile = shift;
my $config = configure( $configFile );
my $perform_snapshot = $config->{perform_snapshot};
if ($mode eq 'getref') {
get_ref();
}
elsif ($mode eq 'getnew') {
get_new();
}
elsif ($mode eq 'compare') {
do_the_compare();
}
else {
print "Help\n";
}
sub get_ref {
# get a cookie from the reference system; CURL_OPTS is global
if ( !defined( $config->{to1_passwd} ) ) {
$config->{to1_passwd} = &get_to_passwd( $config->{to1_user} );
}
my $to_login = $config->{to1_user} . ":" . $config->{to1_passwd};
my $cookie = &get_cookie( $config->{to1_url}, $to_login );
$CURL_OPTS = "-H 'Cookie: $cookie' -w %{response_code} -k -L -s -S --connect-timeout 5 --retry 5 --retry-delay 5 --basic";
&get_files( $config->{to1_url}, "$tmp_dir_base/ref" );
&get_crconfigs( $config->{to1_url}, "$tmp_dir_base/ref" );
}
#
sub get_new {
if ( !defined( $config->{to2_passwd} ) ) {
$config->{to2_passwd} = &get_to_passwd( $config->{to2_user} );
}
my $to_login = $config->{to2_user} . ":" . $config->{to2_passwd};
my $cookie = &get_cookie( $config->{to2_url}, $to_login );
$CURL_OPTS = "-H 'Cookie: $cookie' -w %{response_code} -k -L -s -S --connect-timeout 5 --retry 5 --retry-delay 5 --basic";
&get_files( $config->{to2_url}, "$tmp_dir_base/new" );
&get_crconfigs( $config->{to2_url}, "$tmp_dir_base/new" );
}
sub do_the_compare {
&compare_all_files();
}
done_testing();
exit(0);
##################################################################################################
##################################################################################################
# compare all files in $tmp_dir_base . "/ref" to $tmp_dir_base . "/new"
sub compare_all_files {
find( \&compare, $tmp_dir_base . "/ref" );
}
# th real work for compare_all_files
sub compare {
my $f1 = File::Spec->rel2abs($_);
if ( -f $f1 ) {
( my $f2 = $f1 ) =~ s/ref/new/;
if ( $f1 !~ /.json$/ ) {
&compare_files( $f1, $f2 );
}
else {
&compare_files( $f1, $f2, 1 );
}
}
}
# read a parent.config line into an object
sub get_parent_config_item {
my $line = shift;
my $config;
my @parts = split( /\s+/, $line );
foreach my $p (@parts) {
my ( $key, $val ) = split( /=/, $p );
$config->{$key} = $val;
}
return $config;
}
# parent.config is a bit different, in that the order of parents is irrelevant for urlhas and consistent_hash
sub compare_parent_dot_configs {
my $f = shift;
my $d1 = shift;
my $d2 = shift;
if ( $d1 eq $d2 ) {
return 0;
}
my @lines1 = split( /\n/, $d1 );
my @lines2 = split( /\n/, $d2 );
my $full_config1;
foreach my $line (@lines1) {
my $config_line = &get_parent_config_item($line);
$full_config1->{ $config_line->{dest_domain} } = $config_line;
}
my $full_config2;
foreach my $line (@lines2) {
my $config_line = &get_parent_config_item($line);
$full_config2->{ $config_line->{dest_domain} } = $config_line;
}
foreach my $domain ( keys %{$full_config1} ) {
my $config1 = $full_config1->{$domain};
my $config2 = $full_config2->{$domain};
if ( defined( $config1->{round_robin} ) && $config1->{round_robin} =~ /hash/ ) {
my $pstring = $config1->{parent};
$pstring =~ s/\"//g;
foreach my $parent ( split( /;/, $pstring ) ) {
$config1->{parents_hash}{$parent} = 1;
}
$pstring = $config2->{parent};
$pstring =~ s/"//g;
foreach my $parent ( split( /;/, $pstring ) ) {
$config2->{parents_hash}{$parent} = 1;
}
$config1->{parent} = undef;
$config2->{parent} = undef;
}
my $ok = cmp_deeply( $config1, $config2, "parent.config deep compare for $f:$domain" );
if ( !$ok ) {
print Dumper($config1);
print Dumper($config2);
}
}
}
sub compare_files {
my $f1 = shift;
my $f2 = shift;
my $json = shift || 0;
open my $fh, '<', $f1 or print "$f1 is missing\n";
my ( $d1, $d2 );
while (<$fh>) {
next if (/^#/);
next if ( $f1 =~ /_xml.config$/ && /^\s*<!--.*-->\s*$/ );
# TODO -- experimenting with adding "substitutions" key to config
# not working yet...
if (exists $config->{substitutions}) {
my %s = %{$config->{substitutions}};
for my $key (keys %s) {
my $val = $s{$key};
if (s/$key/$val/g) {
print "Substituted $key for $val:\n $_\n";
}
}
}
$d1 .= $_;
}
close($fh);
open $fh, '<', $f2 or print "$f2 is missing\n";
while (<$fh>) {
next if (/^#/);
next if ( $f2 =~ /_xml.config$/ && /^\s*<!--.*-->\s*$/ );
$d2 .= $_;
}
close($fh);
if ( !defined($d1) || !defined($d2) ) {
return;
}
if ( $f1 =~ /parent.config$/ ) {
&compare_parent_dot_configs( $f1, $d1, $d2 );
}
elsif ( $f1 =~ /CRConfig.json$/ || $f1 =~ /ort1$/ ) {
my $h1 = JSON->new->allow_nonref->utf8->decode($d1);
my $h2 = JSON->new->allow_nonref->utf8->decode($d2);
if ( defined( $h1->{stats} ) ) {
$h1->{stats}{tm_user} = $h2->{stats}{tm_user};
$h1->{stats}{date} = $h2->{stats}{date};
$h1->{stats}{tm_version} = $h2->{stats}{tm_version};
$h1->{stats}{tm_path} = $h2->{stats}{tm_path};
$h1->{stats}{tm_host} = $h2->{stats}{tm_host};
}
my $ok = cmp_deeply( $h1, $h2, "compare $f1" );
}
elsif ( $f1 =~ /\.json$/) {
my $h1 = JSON->new->allow_nonref->utf8->decode($d1);
my $h2 = JSON->new->allow_nonref->utf8->decode($d2);
my $ok = cmp_deeply( $h1, $h2, "compare $f1" );
}
else {
my $ok = cmp_deeply( $d1, $d2, "compare $f1" );
}
}
sub get_crconfigs {
my $to_url = shift;
my $outpath = shift;
my $to_cdn_url = $to_url . '/api/1.2/cdns.json';
my $result = &curl_me($to_cdn_url);
my $cdn_json = decode_json($result);
foreach my $cdn ( @{ $cdn_json->{response} } ) {
next unless $cdn->{name} ne "ALL";
my $dir = $outpath . '/cdn-' . $cdn->{name};
make_path( $dir );
if ($perform_snapshot) {
print "Generating CRConfig for " . $cdn->{name};
my $start = [gettimeofday];
&curl_me( $to_url . "/tools/write_crconfig/" . $cdn->{name} );
my $load_time = tv_interval($start);
print " time: " . $load_time . "\n";
}
print "Getting CRConfig for " . $cdn->{name};
my $start = [gettimeofday];
my $fcontents = &curl_me( $to_url . '/CRConfig-Snapshots/' . $cdn->{name} . '/CRConfig.json' );
open( my $fh, '>', $dir . '/CRConfig.json' );
my $load_time = tv_interval($start);
print " time: " . $load_time . "\n";
print $fh $fcontents;
close $fh;
}
}
{
my %profile_sample;
sub get_sample_servers {
if (scalar keys %profile_sample > 0) {
return %profile_sample;
}
my $to_url = shift;
my $to_server_url = $to_url . '/api/1.2/servers.json';
my $result = &curl_me($to_server_url);
my $server_json = decode_json($result);
foreach my $server ( @{ $server_json->{response} } ) {
my $profile = $server->{profile};
next if exists $profile_sample{$profile};
$profile_sample{ $profile } = $server->{hostName};
last;
}
return %profile_sample;
}
}
sub get_files {
my $to_url = shift;
my $outpath = shift;
my %profile_sample = get_sample_servers( $to_url );
print "Sample servers: " . Data::Dumper->new( [ \%profile_sample ] )->Indent(1)->Terse(1)->Dump();
print "Writing files to $outpath\n";
foreach my $sample_server ( keys %profile_sample ) {
next unless ( $sample_server =~ /^EDGE/ || $sample_server =~ /^MID/ );
my $dir = "$outpath/$profile_sample{$sample_server}";
make_path( $dir );
my $server_metadata = $to_url . '/api/1.2/servers/' . $profile_sample{$sample_server} . '/configfiles/ats.json';
my $result = &curl_me($server_metadata);
open( my $fh, '>', $dir . '/ats.json' );
print $fh $result;
close $fh;
my $file_list_json = decode_json($result);
my $config_files = $file_list_json->{configFiles};
for my $item ( @{$config_files} ) {
my $filename = $item->{fnameOnDisk};
my $url = $to_url . $item->{apiUri};
my $scope = $item->{scope};
my $cdn = $file_list_json->{info}{cdnName};
my $profile = $file_list_json->{info}{profileName};
if ( $scope eq "cdn" ) {
$url = $to_url . '/api/1.2/cdns/' . $cdn . "/configfiles/ats/" . $filename;
}
elsif ( $scope eq "profile" ) {
$url = $to_url . '/api/1.2/profiles/' . $profile . "/configfiles/ats/" . $filename;
}
elsif ( $scope eq "server" ) {
$url = $to_url . '/api/1.2/servers/' . $profile_sample{$sample_server} . "/configfiles/ats/" . $filename;
}
print "Getting " . $sample_server . " " . $filename . " (url " . $url . ")\n";
my $fcontents = &curl_me($url);
open( my $fh, '>', $dir . '/' . $filename );
print $fh $fcontents;
close $fh;
last;
}
}
}
sub get_to_passwd {
my $user = shift;
print "Traffic Ops passwd for " . $user . ":";
ReadMode('noecho'); # don't echo
chomp( my $passwd = <STDIN> );
ReadMode(0); # back to normal
print "\n";
return $passwd;
}
# read the config json.
sub configure {
my $filename = shift;
my $json_text = do {
open( my $json_fh, "<:encoding(UTF-8)", $filename )
or die("Can't open \$filename\": $!\n");
local $/;
<$json_fh>;
};
my $json = JSON->new;
return $json->decode($json_text);
}
## rest is from other scripts, should probably be replaced by something better.
sub curl_me {
my $url = shift;
my $retry_counter = 5;
my $result = `/usr/bin/curl $CURL_OPTS $url 2>&1`;
while ( $result =~ m/^curl\: \(\d+\)/ && $retry_counter > 0 ) {
$result =~ s/(\r|\f|\t|\n)/ /g;
print "WARN Error receiving $url: $result\n";
$retry_counter--;
sleep 5;
$result = `/usr/bin/curl $CURL_OPTS $url 2>&1`;
}
if ( $result =~ m/^curl\: \(\d+\)/ && $retry_counter == 0 ) {
print "FATAL $url returned in error five times!\n";
exit 1;
}
else {
print "INFO Success receiving $url.\n";
#print "Result: $result\n\n\n";
}
my (@chars) = split( //, $result );
my $response_code = pop(@chars) . pop(@chars) . pop(@chars);
$response_code = reverse($response_code);
#print "DEBUG Received $response_code for $url.\n";
if ( $response_code >= 400 ) {
print "ERROR Received error code $response_code for $url!\n";
return $response_code;
}
for ( 0 .. 2 ) { chop($result) }
if ( $url =~ m/\.json$/ ) {
eval {
decode_json($result);
1;
} or do {
my $error = $@;
print "FATAL $url did not return valid JSON: $result | error: $error\n";
exit 1;
}
}
my $size = length($result);
if ( $size == 0 ) {
print "FATAL URL: $url returned empty!! Bailing!\n";
exit 1;
}
return $result;
}
sub get_cookie {
my $tm_host = shift;
my $tm_login = shift;
my ( $u, $p ) = split( /:/, $tm_login );
my $cmd = "curl -vLks -X POST -d 'u=" . $u . "' -d 'p=" . $p . "' " . $tm_host . "/login -o /dev/null 2>&1 | grep Set-Cookie | awk '{print \$3}'";
#print utput_log_fh "DEBUG Getting cookie with $cmd.\n";
my $cookie = `$cmd`;
chomp $cookie;
$cookie =~ s/;$//;
if ( $cookie =~ m/mojolicious/ ) {
#print "DEBUG Cookie is $cookie.\n";
return $cookie;
}
else {
print "ERROR Cookie not found from Traffic Ops!\n";
return 0;
}
}
| mdb/incubator-trafficcontrol | test/traffic_ops_cfg/cfg_test.pl | Perl | apache-2.0 | 12,980 |
package Fixtures::Integration::Role;
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Do not edit! Generated code.
# See https://github.com/Comcast/traffic_control/wiki/The%20Kabletown%20example
use Moose;
extends 'DBIx::Class::EasyFixture';
use namespace::autoclean;
my %definition_for = (
'0' => {
new => 'Role',
using => {
name => 'admin',
description => 'super-user',
priv_level => '30',
},
},
'1' => {
new => 'Role',
using => {
name => 'disallowed',
description => 'block all access',
priv_level => '0',
},
},
'3' => {
new => 'Role',
using => {
name => 'operations',
description => 'block all access',
priv_level => '20',
},
},
'4' => {
new => 'Role',
using => {
name => 'portal',
description => 'Portal User',
priv_level => '2',
},
},
'5' => {
new => 'Role',
using => {
name => 'read-only user',
description => 'block all access',
priv_level => '10',
},
},
);
sub name {
return "Role";
}
sub get_definition {
my ( $self,
$name ) = @_;
return $definition_for{$name};
}
sub all_fixture_names {
# sort by db name to guarantee insertion order
return (sort { $definition_for{$a}{using}{name} cmp $definition_for{$b}{using}{name} } keys %definition_for);
}
__PACKAGE__->meta->make_immutable;
1;
| rscrimojr/incubator-trafficcontrol | traffic_ops/app/lib/Fixtures/Integration/Role.pm | Perl | apache-2.0 | 2,040 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
package Avro::DataFileReader;
use strict;
use warnings;
use Object::Tiny qw{
fh
reader_schema
sync_marker
block_max_size
};
use constant MARKER_SIZE => 16;
# TODO: refuse to read a block more than block_max_size, instead
# do partial reads
use Avro::DataFile;
use Avro::BinaryDecoder;
use Avro::Schema;
use Carp;
use IO::String;
use IO::Uncompress::RawInflate ;
use Fcntl();
sub new {
my $class = shift;
my $datafile = $class->SUPER::new(@_);
my $schema = $datafile->{reader_schema};
croak "schema is invalid"
if $schema && ! eval { $schema->isa("Avro::Schema") };
return $datafile;
}
sub codec {
my $datafile = shift;
return $datafile->metadata->{'avro.codec'};
}
sub writer_schema {
my $datafile = shift;
unless (exists $datafile->{_writer_schema}) {
my $json_schema = $datafile->metadata->{'avro.schema'};
$datafile->{_writer_schema} = Avro::Schema->parse($json_schema);
}
return $datafile->{_writer_schema};
}
sub metadata {
my $datafile = shift;
unless (exists $datafile->{_metadata}) {
my $header = $datafile->header;
$datafile->{_metadata} = $header->{meta} || {};
}
return $datafile->{_metadata};
}
sub header {
my $datafile = shift;
unless (exists $datafile->{_header}) {
$datafile->{_header} = $datafile->read_file_header;
}
return $datafile->{_header};
}
sub read_file_header {
my $datafile = shift;
my $data = Avro::BinaryDecoder->decode(
reader_schema => $Avro::DataFile::HEADER_SCHEMA,
writer_schema => $Avro::DataFile::HEADER_SCHEMA,
reader => $datafile->{fh},
);
croak "Magic '$data->{magic}' doesn't match"
unless $data->{magic} eq Avro::DataFile->AVRO_MAGIC;
$datafile->{sync_marker} = $data->{sync}
or croak "sync marker appears invalid";
my $codec = $data->{meta}{'avro.codec'} || "";
throw Avro::DataFile::Error::UnsupportedCodec($codec)
unless Avro::DataFile->is_codec_valid($codec);
return $data;
}
sub all {
my $datafile = shift;
my @objs;
my @block_objs;
do {
if ($datafile->eof) {
@block_objs = ();
}
else {
$datafile->read_block_header if $datafile->eob;
@block_objs = $datafile->read_to_block_end;
push @objs, @block_objs;
}
} until !@block_objs;
return @objs
}
sub next {
my $datafile = shift;
my $count = shift;
my @objs;
$datafile->read_block_header if $datafile->eob;
return () if $datafile->eof;
my $block_count = $datafile->{object_count};
if ($block_count <= $count) {
push @objs, $datafile->read_to_block_end;
croak "Didn't read as many objects than expected"
unless scalar @objs == $block_count;
push @objs, $datafile->next($count - $block_count);
}
else {
push @objs, $datafile->read_within_block($count);
}
return @objs;
}
sub read_within_block {
my $datafile = shift;
my $count = shift;
my $reader = $datafile->reader;
my $writer_schema = $datafile->writer_schema;
my $reader_schema = $datafile->reader_schema || $writer_schema;
my @objs;
while ($count-- > 0 && $datafile->{object_count} > 0) {
push @objs, Avro::BinaryDecoder->decode(
writer_schema => $writer_schema,
reader_schema => $reader_schema,
reader => $reader,
);
$datafile->{object_count}--;
}
return @objs;
}
sub skip {
my $datafile = shift;
my $count = shift;
my $block_count = $datafile->{object_count};
if ($block_count <= $count) {
$datafile->skip_to_block_end
or croak "Cannot skip to end of block!";
$datafile->skip($count - $block_count);
}
else {
my $writer_schema = $datafile->writer_schema;
## could probably be optimized
while ($count--) {
Avro::BinaryDecoder->skip($writer_schema, $datafile->reader);
$datafile->{object_count}--;
}
}
}
sub read_block_header {
my $datafile = shift;
my $fh = $datafile->{fh};
$datafile->header unless $datafile->{_header};
$datafile->{object_count} = Avro::BinaryDecoder->decode_long(
undef, undef, $fh,
);
$datafile->{block_size} = Avro::BinaryDecoder->decode_long(
undef, undef, $fh,
);
$datafile->{block_start} = tell $fh;
return unless $datafile->codec eq 'deflate';
## we need to read the entire block into memory, to inflate it
my $nread = read $fh, my $block, $datafile->{block_size} + MARKER_SIZE
or croak "Error reading from file: $!";
## remove the marker
my $marker = substr $block, -(MARKER_SIZE), MARKER_SIZE, '';
$datafile->{block_marker} = $marker;
## this is our new reader
$datafile->{reader} = IO::Uncompress::RawInflate->new(\$block);
return;
}
sub verify_marker {
my $datafile = shift;
my $marker = $datafile->{block_marker};
unless (defined $marker) {
## we are in the fh case
read $datafile->{fh}, $marker, MARKER_SIZE;
}
unless (($marker || "") eq $datafile->sync_marker) {
croak "Oops synchronization issue (marker mismatch)";
}
return;
}
sub skip_to_block_end {
my $datafile = shift;
if (my $reader = $datafile->{reader}) {
seek $reader, 0, Fcntl->SEEK_END;
return;
}
my $remaining_size = $datafile->{block_size}
+ $datafile->{block_start}
- tell $datafile->{fh};
seek $datafile->{fh}, $remaining_size, 0;
$datafile->verify_marker; ## will do a read
return 1;
}
sub read_to_block_end {
my $datafile = shift;
my $reader = $datafile->reader;
my @objs = $datafile->read_within_block( $datafile->{object_count} );
$datafile->verify_marker;
return @objs;
}
sub reader {
my $datafile = shift;
return $datafile->{reader} || $datafile->{fh};
}
## end of block
sub eob {
my $datafile = shift;
return 1 if $datafile->eof;
if ($datafile->{reader}) {
return 1 if $datafile->{reader}->eof;
}
else {
my $pos = tell $datafile->{fh};
return 1 unless $datafile->{block_start};
return 1 if $pos >= $datafile->{block_start} + $datafile->{block_size};
}
return 0;
}
sub eof {
my $datafile = shift;
if ($datafile->{reader}) {
return 0 unless $datafile->{reader}->eof;
}
return 1 if $datafile->{fh}->eof;
return 0;
}
package Avro::DataFile::Error::UnsupportedCodec;
use parent 'Error::Simple';
1;
| eonezhang/avro | lang/perl/lib/Avro/DataFileReader.pm | Perl | apache-2.0 | 7,506 |
%Source: Ports of programs found in TPDB/C/AProVE_numeric
%query:test_fun(g,g).
% source: port of http://cl2-informatik.uibk.ac.at/mercurial.cgi/TPDB/file/72cccd64ec42/C/AProVE_numeric/svcomp_a.04.c
test_fun(X, Y) :- loop(X, Y, 0).
loop(X, Y, C) :- X > Y, Y1 is Y + 1, C1 is C + 1, loop(X, Y1, C1).
loop(X, Y, C) :- X =< Y.
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Prolog/AProVE_numeric/svcomp-a-04-c.pl | Perl | mit | 324 |
package Bio::EnsEMBL::GlyphSet::corebinmarkers;
use strict;
use vars qw(@ISA);
use Bio::EnsEMBL::GlyphSet_simple;
@ISA = qw(Bio::EnsEMBL::GlyphSet_simple);
#sub my_label { return "Overgo Markers"; }
sub features {
my ($self) = @_;
my $slice = $self->{'container'};
warn $slice;
warn join ", " , @{$slice->get_all_MarkerFeatures('core_bin_marker')};
return $slice->get_all_MarkerFeatures('core_bin_marker');
}
sub href {
my( $self, $id ) = @_;
return $self->ID_URL( 'OVERGO', $id );
}
sub zmenu {
my( $self, $id ) = @_;
return { 'caption' => "OVERGO ".$id, "$id" => $self->href( $id ) };
}
1;
| warelab/gramene-ensembl | maize/modules/Bio/EnsEMBL/GlyphSet/corebinmarkers.pm | Perl | mit | 649 |
#!/usr/bin/env perl
use 5.014; # do not lower; required for non-destructive substitution
use strict;
use warnings;
use autodie;
use File::Basename;
use File::Find;
use File::Copy qw(copy);
use File::Path qw(make_path remove_tree);
use IPC::System::Simple qw(system);
use Cwd qw(getcwd);
use Regexp::Common;
use Sort::Naturally;
use Getopt::Long;
my %opts;
GetOptions(\%opts,
'accession|id=s',
'sequence_format|sf=s',
'output_dir|p=s',
'repeat_database|db=s',
'threads|t=i',
'percent_identity|pid=i',
'coverage|cov=f',
'cluster_size|cls=i',
'genome_size|gs=i',
'species|s=s',
'h|help',
);
## check input
usage() and exit(0) if $opts{help};
if (!$opts{accession} || !$opts{output_dir} || !$opts{repeat_database} || !$opts{genome_size}) {
say "\n[ERROR]: Command line not parsed correctly. Check input and refer to the usage. Exiting.\n";
usage();
exit(1);
}
unless (-e $opts{output_dir}) {
make_path($opts{output_dir}, {verbose => 0, mode => 0711,});
}
my $sumdir = File::Spec->catdir($opts{output_dir}, 'annotation_summaries');
unless (-e $sumdir) {
make_path($sumdir, {verbose => 0, mode => 0711,});
}
my $outfile = File::Spec->catfile($opts{output_dir}, $opts{accession}.'_all_transposome_results.tsv');
open my $out, '>>', $outfile;
say $out join "\t", 'x_coverage', 'number_clustered', 'number_unclustered', 'repeat_fraction',
'singleton_fraction', 'total_repeat_percentage', 'total_annotated_repeat_percentage',
'family_count', 'elapsed_time';
## set defaults
$opts{threads} //= 1;
$opts{cluster_size} //= 100;
$opts{percent_identity} //= 90;
$opts{coverage} //= 0.55;
$opts{sequence_format} //= 'fastq';
my $merge_thresh = 100;
## try to get the sequence files
my ($forward, $reverse) = ($opts{accession}.'_1.fastq', $opts{accession}.'_2.fastq');
unless (-e $forward && -e $reverse) {
my $fgzip = $opts{accession}.'_1.fastq.gz';
my $rgzip = $opts{accession}.'_2.fastq.gz';
my $fbzip = $opts{accession}.'_1.fastq.bz2';
my $rbzip = $opts{accession}.'_2.fastq.bz2';
$forward = -e $fgzip ? $fgzip : $fbzip;
$reverse = -e $rgzip ? $rgzip : $rbzip;
unless (-e $forward && -e $reverse) {
say STDERR "\n[ERROR]: Could not find sequence files. Make sure they exist and end with '.fastq', '.fastq.gz' or '.fastq.bz2'. Exiting.\n";
exit(1);
}
}
my ($iname, $ipath, $isuffix) = fileparse($forward, qr/\.[^.]*/);
my $base = $iname =~ s/_1.*//r;
my %cvalues;
my $cvalue = $opts{genome_size} * 1e6;
for my $x (qw(0.01 0.03 0.05 0.07 0.09 0.10)) {
my $reads = $x*$cvalue / 100;
my $bases = $x*$cvalue;
my $pairs = $reads / 2;
$cvalues{$x} = { bases => $bases, reads => $reads, pairs => $pairs };
}
for my $xcov (sort { $a <=> $b } keys %cvalues) {
say STDERR "=====> Working on $xcov...";
$merge_thresh += 100;
my $intseq = join_pairs($cvalues{$xcov}{reads}, $forward, $reverse);
my $outdir = File::Spec->catdir($opts{output_dir}, $base."_${xcov}X");
my $logfile = $base.'_transposome_'."${xcov}X".'.log';
my $clogfile = $base.'_transposome_clustering_'."${xcov}X".'.log';
# for getting family counts
my $sumfile = File::Spec->catfile($outdir, $base.'_transposome_clustering_'."${xcov}X".'_annotations_summary.tsv');
my $savfile = File::Spec->catfile($sumdir, $base.'_transposome_clustering_'."${xcov}X".'_annotations_summary.tsv');
my %run_opts = (
coverage => $xcov,
sequence_file => $intseq,
sequence_format => $opts{sequence_format},
threads => $opts{threads},
outdir => $outdir,
repeatdb => $opts{repeat_database},
cluster_size => $opts{cluster_size},
logfile => $logfile,
cluster_logfile => $clogfile);
my $config = write_config(\%run_opts);
system('transposome', '-c', $config) == 0 or die $!;
my $reslog = File::Spec->catfile($outdir, $logfile);
write_results($xcov, $cvalues{$xcov}{reads}, $reslog, $sumfile, $out);
copy $sumfile, $savfile or die "[ERROR]: Copy failed: $!";
remove_tree($outdir, { safe => 1 });
unlink $intseq, $config;
}
say STDERR "=====> Done running Transposome. Results can be found in: $opts{output_dir}";
exit;
#
# Methods
#
sub join_pairs {
my ($sample_size, $forward, $reverse) = @_;
my $pair_size = sprintf("%.0f", $sample_size/2);
my $intseq = File::Spec->catfile($opts{output_dir}, $base."_$sample_size".'_interl.fastq.gz');
my $cmd = "pairfq joinpairs -f <(seqtk sample -s 11 $forward $pair_size) -r <(seqtk sample -s 11 $reverse $pair_size)";
$cmd .= " -o $intseq -c gzip";
system_bash($cmd);
return $intseq;
}
sub write_results {
my ($xcov, $sample_size, $logfile, $sumfile, $out) = @_;
open my $in, '<', $logfile;
my ($clstot, $uclstot, $repfrac, $singfrac, $reptot, $annotot, $time);
while (my $line = <$in>) {
chomp $line;
if ($line =~ /Results - Total sequences clustered:\s+($RE{num}{real})/) {
$clstot = $1;
}
if ($line =~ /Results - Total sequences unclustered:\s+($RE{num}{real})/) {
$uclstot = $1;
}
if ($line =~ /Results - Repeat fraction from clusters:\s+($RE{num}{real})/) {
$repfrac = $1;
}
if ($line =~ /Results - Singleton repeat fraction:\s+($RE{num}{real})/) {
$singfrac = $1;
}
if ($line =~ /Results - Total repeat fraction \(theoretical\):\s+($RE{num}{real})/) {
$reptot = $1;
}
if ($line =~ /Results - Total repeat fraction from annotations \(biological\):\s+($RE{num}{real})/) {
$annotot = $1;
}
if ($line =~ /======== Transposome completed at:.*Elapsed time: (.*). ========/) {
$time = $1;
}
}
open my $sum, '<', $sumfile;
my $famct = 0;
while (my $line = <$sum>) {
chomp $line;
next if $line =~ /^ReadNum/;
$famct++;
}
close $sum;
say $out join "\t", "${xcov}X ($sample_size reads)", $clstot, $uclstot, $repfrac, $singfrac, $reptot, $annotot, $famct, $time;
return;
}
sub write_config {
my ($run_opts) = @_;
my $config =
"blast_input:
- sequence_file: $run_opts->{sequence_file}
- sequence_format: $run_opts->{sequence_format}
- thread: $run_opts->{threads}
- output_directory: $run_opts->{outdir}
clustering_options:
- in_memory: 1
- percent_identity: 90
- fraction_coverage: 0.55
annotation_input:
- repeat_database: $run_opts->{repeatdb}
annotation_options:
- cluster_size: $run_opts->{cluster_size}
output:
- run_log_file: $run_opts->{logfile}
- cluster_log_file: $run_opts->{cluster_logfile}";
my $config_file = "transposome_config_$run_opts->{coverage}X.yml";
open my $out, '>', $config_file;
say $out $config;
close $out;
return $config_file;
}
sub system_bash {
my @args = ( 'bash', '-c', shift );
system([0..5], @args);
}
sub usage {
my $script = basename($0);
print STDERR <<END
USAGE: $script -a FILEBASE -o transposome_results_out -t 24 -db sunflower_tephra_annotations_repbase-fmt.fasta -gs 3600
Required:
-a|accession : An SRA accession (or simply the file basename). There should be two files found
in the working directory that end in '_1.fastq' and '_2.fastq'. In the example above,
'FILEBASE_1.fastq.gz' and 'FILEBASE_2.fastq.gz' could be the actual file names. Note that
the files may be compressed with gzip or bzip2, so the ending can be '.gz' or '.bzip2' but
uncompressed files with the actual '.fastq' ending works fine as well.
-o|output_dir : A name for the output directory to hold results.
-db|repeat_database : A sequence file of repeats to be used for annotation. This file must be formatted for
use with Transposome, see the following link:
https://github.com/sestaton/Transposome/wiki/Preparing-sequence-data-for-Tranposome
-gs|genome_size : The 1C genome size expressed in Mbp (e.g., use 2500 for a 2.5 Gbp genome).
Options:
-sf|sequence_format : The input sequence format (Default: FASTQ).
-pid|percent_identity : Percent identity between pairwise matches in all vs. all blast (Default: 90).
-fcov|fraction_coverage : The fraction coverage between pairwise matches in all vs. all blast (Default: 0.55).
-cls|cluster_size : The minimum size of a cluster to be used for annotation (Default: 100).
-t|threads : The number of parallel blast processes to run (Default: 1).
-h|help : Print a usage statement.
END
}
| sestaton/transposome-scripts | full_analysis_with_varying_coverage.pl | Perl | mit | 8,736 |
use strict;
use Data::Dumper;
use Carp;
#
# This is a SAS Component
#
=head1 NAME
get_relationship_IsRealLocationOf
=head1 SYNOPSIS
get_relationship_IsRealLocationOf [-c N] [-a] [--fields field-list] < ids > table.with.fields.added
=head1 DESCRIPTION
This relationship connects a specific instance of a compound in a model
to the specific instance of the model subcellular location where the compound exists.
Example:
get_relationship_IsRealLocationOf -a < ids > table.with.fields.added
would read in a file of ids and add a column for each field in the relationship.
The standard input should be a tab-separated table (i.e., each line
is a tab-separated set of fields). Normally, the last field in each
line would contain the id. If some other column contains the id,
use
-c N
where N is the column (from 1) that contains the id.
This is a pipe command. The input is taken from the standard input, and the
output is to the standard output.
=head1 COMMAND-LINE OPTIONS
Usage: get_relationship_IsRealLocationOf [arguments] < ids > table.with.fields.added
=over 4
=item -c num
Select the identifier from column num
=item -from field-list
Choose a set of fields from the LocationInstance
entity to return. Field-list is a comma-separated list of strings. The
following fields are available:
=over 4
=item id
=item index
=item label
=item pH
=item potential
=back
=item -rel field-list
Choose a set of fields from the relationship to return. Field-list is a comma-separated list of
strings. The following fields are available:
=over 4
=item from_link
=item to_link
=back
=item -to field-list
Choose a set of fields from the CompoundInstance entity to return. Field-list is a comma-separated list of
strings. The following fields are available:
=over 4
=item id
=item charge
=item formula
=back
=back
=head1 AUTHORS
L<The SEED Project|http://www.theseed.org>
=cut
use Bio::KBase::Utilities::ScriptThing;
use Bio::KBase::CDMI::CDMIClient;
use Getopt::Long;
#Default fields
my @all_from_fields = ( 'id', 'index', 'label', 'pH', 'potential' );
my @all_rel_fields = ( 'from_link', 'to_link', );
my @all_to_fields = ( 'id', 'charge', 'formula' );
my %all_from_fields = map { $_ => 1 } @all_from_fields;
my %all_rel_fields = map { $_ => 1 } @all_rel_fields;
my %all_to_fields = map { $_ => 1 } @all_to_fields;
my @default_fields = ('from-link', 'to-link');
my @from_fields;
my @rel_fields;
my @to_fields;
our $usage = <<'END';
Usage: get_relationship_IsRealLocationOf [arguments] < ids > table.with.fields.added
--show-fields
List the available fields.
-c num
Select the identifier from column num
--from field-list
Choose a set of fields from the LocationInstance
entity to return. Field-list is a comma-separated list of strings. The
following fields are available:
id
index
label
pH
potential
--rel field-list
Choose a set of fields from the relationship to return. Field-list is a comma-separated list of
strings. The following fields are available:
from_link
to_link
--to field-list
Choose a set of fields from the CompoundInstance entity to
return. Field-list is a comma-separated list of strings. The following fields are available:
id
charge
formula
END
my $column;
my $input_file;
my $a;
my $f;
my $r;
my $t;
my $help;
my $show_fields;
my $i = "-";
my $geO = Bio::KBase::CDMI::CDMIClient->new_get_entity_for_script("c=i" => \$column,
"h" => \$help,
"show-fields" => \$show_fields,
"a" => \$a,
"from=s" => \$f,
"rel=s" => \$r,
"to=s" => \$t,
'i=s' => \$i);
if ($help) {
print $usage;
exit 0;
}
if ($show_fields)
{
print "from fields:\n";
print " $_\n" foreach @all_from_fields;
print "relation fields:\n";
print " $_\n" foreach @all_rel_fields;
print "to fields:\n";
print " $_\n" foreach @all_to_fields;
exit 0;
}
if ($a && ($f || $r || $t)) {die $usage};
if ($a) {
@from_fields = @all_from_fields;
@rel_fields = @all_rel_fields;
@to_fields = @all_to_fields;
} elsif ($f || $t || $r) {
my $err = 0;
if ($f) {
@from_fields = split(",", $f);
$err += check_fields(\@from_fields, %all_from_fields);
}
if ($r) {
@rel_fields = split(",", $r);
$err += check_fields(\@rel_fields, %all_rel_fields);
}
if ($t) {
@to_fields = split(",", $t);
$err += check_fields(\@to_fields, %all_to_fields);
}
if ($err) {exit 1;}
} else {
@rel_fields = @default_fields;
}
my $ih;
if ($input_file)
{
open $ih, "<", $input_file or die "Cannot open input file $input_file: $!";
}
else
{
$ih = \*STDIN;
}
while (my @tuples = Bio::KBase::Utilities::ScriptThing::GetBatch($ih, undef, $column)) {
my @h = map { $_->[0] } @tuples;
my $h = $geO->get_relationship_IsRealLocationOf(\@h, \@from_fields, \@rel_fields, \@to_fields);
my %results;
for my $result (@$h) {
my @from;
my @rel;
my @to;
my $from_id;
my $res = $result->[0];
for my $key (@from_fields) {
push (@from,$res->{$key});
}
$res = $result->[1];
$from_id = $res->{'from_link'};
for my $key (@rel_fields) {
push (@rel,$res->{$key});
}
$res = $result->[2];
for my $key (@to_fields) {
push (@to,$res->{$key});
}
if ($from_id) {
push @{$results{$from_id}}, [@from, @rel, @to];
}
}
for my $tuple (@tuples)
{
my($id, $line) = @$tuple;
my $resultsForId = $results{$id};
if ($resultsForId) {
for my $result (@$resultsForId) {
print join("\t", $line, @$result) . "\n";
}
}
}
}
sub check_fields {
my ($fields, %all_fields) = @_;
my @err;
for my $field (@$fields) {
if (!$all_fields{$field})
{
push(@err, $field);
}
}
if (@err) {
my @f = keys %all_fields;
print STDERR "get_relationship_IsRealLocationOf: unknown fields @err. Valid fields are @f\n";
return 1;
}
return 0;
}
| kbase/kb_seed | scripts/get_relationship_IsRealLocationOf.pl | Perl | mit | 6,039 |
#!/usr/bin/perl
###############################################################################
# Copyright (C) 1994 - 2009, Performance Dynamics Company #
# #
# This software is licensed as described in the file COPYING, which #
# you should have received as part of this distribution. The terms #
# are also available at http://www.perfdynamics.com/Tools/copyright.html. #
# #
# You may opt to use, copy, modify, merge, publish, distribute and/or sell #
# copies of the Software, and permit persons to whom the Software is #
# furnished to do so, under the terms of the COPYING file. #
# #
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY #
# KIND, either express or implied. #
###############################################################################
#
# $Id: closed1.pl,v 4.3 2009/03/26 02:55:32 pfeller Exp $
#
#------------------------------------------------------------------------------
use pdq;
#------------------------------------------------------------------------------
#
# Based on time_share.c
#
# Illustrates PDQ solver for closed uni-server queue. Compare with repair.c
#
#------------------------------------------------------------------------------
#---- Model specific variables ------------------------------------------------
#$pop = 200.0;
$pop = 100.0;
$think = 300.0;
$servt = 0.63;
#---- Initialize the model giving it a name -----------------------------------
pdq::Init("Time Share Computer");
pdq::SetComment("This is just a simple M/M/1 queue.");
#---- Define the workload and circuit type ------------------------------------
$streams = pdq::CreateClosed("compile", $pdq::TERM, $pop, $think);
# So the value of $streams should correspond to the value of $pdq::streams
#---- Define the queueing center ----------------------------------------------
$nodes = pdq::CreateNode("CPU", $pdq::CEN, $pdq::FCFS);
# So the value of $nodes should correspond to the value of $pdq::nodes
#---- Define service demand ---------------------------------------------------
pdq::SetDemand("CPU", "compile", $servt);
#---- Solve the model ---------------------------------------------------------
pdq::Solve($pdq::EXACT);
pdq::Report();
| peterlharding/PDQ | examples/ppa_1998/chap2/closed1.pl | Perl | mit | 2,623 |
package strict;
$strict::VERSION = "1.07";
# Verify that we're called correctly so that strictures will work.
unless ( __FILE__ =~ /(^|[\/\\])\Q${\__PACKAGE__}\E\.pmc?$/ ) {
# Can't use Carp, since Carp uses us!
my (undef, $f, $l) = caller;
die("Incorrect use of pragma '${\__PACKAGE__}' at $f line $l.\n");
}
my %bitmask = (
refs => 0x00000002,
subs => 0x00000200,
vars => 0x00000400
);
my %explicit_bitmask = (
refs => 0x00000020,
subs => 0x00000040,
vars => 0x00000080
);
sub bits {
my $bits = 0;
my @wrong;
foreach my $s (@_) {
if (exists $bitmask{$s}) {
$^H |= $explicit_bitmask{$s};
}
else { push @wrong, $s };
$bits |= $bitmask{$s} || 0;
}
if (@wrong) {
require Carp;
Carp::croak("Unknown 'strict' tag(s) '@wrong'");
}
$bits;
}
my @default_bits = qw(refs subs vars);
sub import {
shift;
$^H |= bits(@_ ? @_ : @default_bits);
}
sub unimport {
shift;
$^H &= ~ bits(@_ ? @_ : @default_bits);
}
1;
__END__
| Bjay1435/capstone | rootfs/usr/share/perl/5.18.2/strict.pm | Perl | mit | 1,006 |
=head1 NAME
Attean::API::AbbreviatingParser - Role for parsers that use construct absolute IRIs based on prefixed names or base IRIs
=head1 VERSION
This document describes Attean::API::AbbreviatingParser version 0.002
=head1 DESCRIPTION
The Attean::API::AbbreviatingParser role defines an API for parsers
of RDF and SPARQL result data that construct absolute IRIs during parsing by
using IRI prefixes and base IRIs.
=head1 ROLES
This role consumes the L<Attean::API::Parser> role.
=head1 ATTRIBUTES
The following attributes exist:
=over 4
=item C<< base >>
An L<IRI> object representing the base against which relative IRIs
in the parsed data are resolved (may be set explicitly or set by a directive in
the input data).
=item C<< namespaces >>
A HASH reference mapping prefix strings to L<URI::NamespaceMap> objects.
=back
=head1 BUGS
Please report any bugs or feature requests to through the GitHub web interface
at L<https://github.com/kasei/attean/issues>.
=head1 SEE ALSO
L<http://www.perlrdf.org/>
=head1 AUTHOR
Gregory Todd Williams C<< <gwilliams@cpan.org> >>
=head1 COPYRIGHT
Copyright (c) 2014 Gregory Todd Williams.
This program is free software; you can redistribute it and/or modify it under
the same terms as Perl itself.
=cut
| gitpan/Attean | lib/Attean/API/AbbreviatingParser.pod | Perl | mit | 1,266 |
#!/usr/bin/perl -w
use strict;
use Math::Int2Base qw( int2base );
my @words = (
"like", "just", "love", "know", "never", "want",
"time", "out", "there", "make", "look", "eye",
"down", "only", "think", "heart", "back", "then",
"into", "about", "more", "away", "still", "them",
"take", "thing", "even", "through", "long", "always",
"world", "too", "friend", "tell", "try", "hand",
"thought", "over", "here", "other", "need", "smile",
"again", "much", "cry", "been", "night", "ever",
"little", "said", "end", "some", "those", "around",
"mind", "people", "girl", "leave", "dream", "left",
"turn", "myself", "give", "nothing", "really", "off",
"before", "something", "find", "walk", "wish", "good",
"once", "place", "ask", "stop", "keep", "watch",
"seem", "everything", "wait", "got", "yet", "made",
"remember", "start", "alone", "run", "hope", "maybe",
"believe", "body", "hate", "after", "close", "talk",
"stand", "own", "each", "hurt", "help", "home",
"god", "soul", "new", "many", "two", "inside",
"should", "true", "first", "fear", "mean", "better",
"play", "another", "gone", "change", "use", "wonder",
"someone", "hair", "cold", "open", "best", "any",
"behind", "happen", "water", "dark", "laugh", "stay",
"forever", "name", "work", "show", "sky", "break",
"came", "deep", "door", "put", "black", "together",
"upon", "happy", "such", "great", "white", "matter",
"fill", "past", "please", "burn", "cause", "enough",
"touch", "moment", "soon", "voice", "scream", "anything",
"stare", "sound", "red", "everyone", "hide", "kiss",
"truth", "death", "beautiful", "mine", "blood", "broken",
"very", "pass", "next", "forget", "tree", "wrong",
"air", "mother", "understand", "lip", "hit", "wall",
"memory", "sleep", "free", "high", "realize", "school",
"might", "skin", "sweet", "perfect", "blue", "kill",
"breath", "dance", "against", "fly", "between", "grow",
"strong", "under", "listen", "bring", "sometimes", "speak",
"pull", "person", "become", "family", "begin", "ground",
"real", "small", "father", "sure", "feet", "rest",
"young", "finally", "land", "across", "today", "different",
"guy", "line", "fire", "reason", "reach", "second",
"slowly", "write", "eat", "smell", "mouth", "step",
"learn", "three", "floor", "promise", "breathe", "darkness",
"push", "earth", "guess", "save", "song", "above",
"along", "both", "color", "house", "almost", "sorry",
"anymore", "brother", "okay", "dear", "game", "fade",
"already", "apart", "warm", "beauty", "heard", "notice",
"question", "shine", "began", "piece", "whole", "shadow",
"secret", "street", "within", "finger", "point", "morning",
"whisper", "child", "moon", "green", "story", "glass",
"kid", "silence", "since", "soft", "yourself", "empty",
"shall", "angel", "answer", "baby", "bright", "dad",
"path", "worry", "hour", "drop", "follow", "power",
"war", "half", "flow", "heaven", "act", "chance",
"fact", "least", "tired", "children", "near", "quite",
"afraid", "rise", "sea", "taste", "window", "cover",
"nice", "trust", "lot", "sad", "cool", "force",
"peace", "return", "blind", "easy", "ready", "roll",
"rose", "drive", "held", "music", "beneath", "hang",
"mom", "paint", "emotion", "quiet", "clear", "cloud",
"few", "pretty", "bird", "outside", "paper", "picture",
"front", "rock", "simple", "anyone", "meant", "reality",
"road", "sense", "waste", "bit", "leaf", "thank",
"happiness", "meet", "men", "smoke", "truly", "decide",
"self", "age", "book", "form", "alive", "carry",
"escape", "damn", "instead", "able", "ice", "minute",
"throw", "catch", "leg", "ring", "course", "goodbye",
"lead", "poem", "sick", "corner", "desire", "known",
"problem", "remind", "shoulder", "suppose", "toward", "wave",
"drink", "jump", "woman", "pretend", "sister", "week",
"human", "joy", "crack", "grey", "pray", "surprise",
"dry", "knee", "less", "search", "bleed", "caught",
"clean", "embrace", "future", "king", "son", "sorrow",
"chest", "hug", "remain", "sat", "worth", "blow",
"daddy", "final", "parent", "tight", "also", "create",
"lonely", "safe", "cross", "dress", "evil", "silent",
"bone", "fate", "perhaps", "anger", "class", "scar",
"snow", "tiny", "tonight", "continue", "control", "dog",
"edge", "mirror", "month", "suddenly", "comfort", "given",
"loud", "quickly", "gaze", "plan", "rush", "stone",
"town", "battle", "ignore", "spirit", "stood", "stupid",
"yours", "brown", "build", "dust", "hey", "kept",
"pay", "phone", "twist", "although", "ball", "beyond",
"hidden", "nose", "taken", "fail", "float", "pure",
"somehow", "wash", "wrap", "angry", "cheek", "creature",
"forgotten", "heat", "rip", "single", "space", "special",
"weak", "whatever", "yell", "anyway", "blame", "job",
"choose", "country", "curse", "drift", "echo", "figure",
"grew", "laughter", "neck", "suffer", "worse", "yeah",
"disappear", "foot", "forward", "knife", "mess", "somewhere",
"stomach", "storm", "beg", "idea", "lift", "offer",
"breeze", "field", "five", "often", "simply", "stuck",
"win", "allow", "confuse", "enjoy", "except", "flower",
"seek", "strength", "calm", "grin", "gun", "heavy",
"hill", "large", "ocean", "shoe", "sigh", "straight",
"summer", "tongue", "accept", "crazy", "everyday", "exist",
"grass", "mistake", "sent", "shut", "surround", "table",
"ache", "brain", "destroy", "heal", "nature", "shout",
"sign", "stain", "choice", "doubt", "glance", "glow",
"mountain", "queen", "stranger", "throat", "tomorrow", "city",
"either", "fish", "flame", "rather", "shape", "spin",
"spread", "ash", "distance", "finish", "image", "imagine",
"important", "nobody", "shatter", "warmth", "became", "feed",
"flesh", "funny", "lust", "shirt", "trouble", "yellow",
"attention", "bare", "bite", "money", "protect", "amaze",
"appear", "born", "choke", "completely", "daughter", "fresh",
"friendship", "gentle", "probably", "six", "deserve", "expect",
"grab", "middle", "nightmare", "river", "thousand", "weight",
"worst", "wound", "barely", "bottle", "cream", "regret",
"relationship", "stick", "test", "crush", "endless", "fault",
"itself", "rule", "spill", "art", "circle", "join",
"kick", "mask", "master", "passion", "quick", "raise",
"smooth", "unless", "wander", "actually", "broke", "chair",
"deal", "favorite", "gift", "note", "number", "sweat",
"box", "chill", "clothes", "lady", "mark", "park",
"poor", "sadness", "tie", "animal", "belong", "brush",
"consume", "dawn", "forest", "innocent", "pen", "pride",
"stream", "thick", "clay", "complete", "count", "draw",
"faith", "press", "silver", "struggle", "surface", "taught",
"teach", "wet", "bless", "chase", "climb", "enter",
"letter", "melt", "metal", "movie", "stretch", "swing",
"vision", "wife", "beside", "crash", "forgot", "guide",
"haunt", "joke", "knock", "plant", "pour", "prove",
"reveal", "steal", "stuff", "trip", "wood", "wrist",
"bother", "bottom", "crawl", "crowd", "fix", "forgive",
"frown", "grace", "loose", "lucky", "party", "release",
"surely", "survive", "teacher", "gently", "grip", "speed",
"suicide", "travel", "treat", "vein", "written", "cage",
"chain", "conversation", "date", "enemy", "however", "interest",
"million", "page", "pink", "proud", "sway", "themselves",
"winter", "church", "cruel", "cup", "demon", "experience",
"freedom", "pair", "pop", "purpose", "respect", "shoot",
"softly", "state", "strange", "bar", "birth", "curl",
"dirt", "excuse", "lord", "lovely", "monster", "order",
"pack", "pants", "pool", "scene", "seven", "shame",
"slide", "ugly", "among", "blade", "blonde", "closet",
"creek", "deny", "drug", "eternity", "gain", "grade",
"handle", "key", "linger", "pale", "prepare", "swallow",
"swim", "tremble", "wheel", "won", "cast", "cigarette",
"claim", "college", "direction", "dirty", "gather", "ghost",
"hundred", "loss", "lung", "orange", "present", "swear",
"swirl", "twice", "wild", "bitter", "blanket", "doctor",
"everywhere", "flash", "grown", "knowledge", "numb", "pressure",
"radio", "repeat", "ruin", "spend", "unknown", "buy",
"clock", "devil", "early", "false", "fantasy", "pound",
"precious", "refuse", "sheet", "teeth", "welcome", "add",
"ahead", "block", "bury", "caress", "content", "depth",
"despite", "distant", "marry", "purple", "threw", "whenever",
"bomb", "dull", "easily", "grasp", "hospital", "innocence",
"normal", "receive", "reply", "rhyme", "shade", "someday",
"sword", "toe", "visit", "asleep", "bought", "center",
"consider", "flat", "hero", "history", "ink", "insane",
"muscle", "mystery", "pocket", "reflection", "shove", "silently",
"smart", "soldier", "spot", "stress", "train", "type",
"view", "whether", "bus", "energy", "explain", "holy",
"hunger", "inch", "magic", "mix", "noise", "nowhere",
"prayer", "presence", "shock", "snap", "spider", "study",
"thunder", "trail", "admit", "agree", "bag", "bang",
"bound", "butterfly", "cute", "exactly", "explode", "familiar",
"fold", "further", "pierce", "reflect", "scent", "selfish",
"sharp", "sink", "spring", "stumble", "universe", "weep",
"women", "wonderful", "action", "ancient", "attempt", "avoid",
"birthday", "branch", "chocolate", "core", "depress", "drunk",
"especially", "focus", "fruit", "honest", "match", "palm",
"perfectly", "pillow", "pity", "poison", "roar", "shift",
"slightly", "thump", "truck", "tune", "twenty", "unable",
"wipe", "wrote", "coat", "constant", "dinner", "drove",
"egg", "eternal", "flight", "flood", "frame", "freak",
"gasp", "glad", "hollow", "motion", "peer", "plastic",
"root", "screen", "season", "sting", "strike", "team",
"unlike", "victim", "volume", "warn", "weird", "attack",
"await", "awake", "built", "charm", "crave", "despair",
"fought", "grant", "grief", "horse", "limit", "message",
"ripple", "sanity", "scatter", "serve", "split", "string",
"trick", "annoy", "blur", "boat", "brave", "clearly",
"cling", "connect", "fist", "forth", "imagination", "iron",
"jock", "judge", "lesson", "milk", "misery", "nail",
"naked", "ourselves", "poet", "possible", "princess", "sail",
"size", "snake", "society", "stroke", "torture", "toss",
"trace", "wise", "bloom", "bullet", "cell", "check",
"cost", "darling", "during", "footstep", "fragile", "hallway",
"hardly", "horizon", "invisible", "journey", "midnight", "mud",
"nod", "pause", "relax", "shiver", "sudden", "value",
"youth", "abuse", "admire", "blink", "breast", "bruise",
"constantly", "couple", "creep", "curve", "difference", "dumb",
"emptiness", "gotta", "honor", "plain", "planet", "recall",
"rub", "ship", "slam", "soar", "somebody", "tightly",
"weather", "adore", "approach", "bond", "bread", "burst",
"candle", "coffee", "cousin", "crime", "desert", "flutter",
"frozen", "grand", "heel", "hello", "language", "level",
"movement", "pleasure", "powerful", "random", "rhythm", "settle",
"silly", "slap", "sort", "spoken", "steel", "threaten",
"tumble", "upset", "aside", "awkward", "bee", "blank",
"board", "button", "card", "carefully", "complain", "crap",
"deeply", "discover", "drag", "dread", "effort", "entire",
"fairy", "giant", "gotten", "greet", "illusion", "jeans",
"leap", "liquid", "march", "mend", "nervous", "nine",
"replace", "rope", "spine", "stole", "terror", "accident",
"apple", "balance", "boom", "childhood", "collect", "demand",
"depression", "eventually", "faint", "glare", "goal", "group",
"honey", "kitchen", "laid", "limb", "machine", "mere",
"mold", "murder", "nerve", "painful", "poetry", "prince",
"rabbit", "shelter", "shore", "shower", "soothe", "stair",
"steady", "sunlight", "tangle", "tease", "treasure", "uncle",
"begun", "bliss", "canvas", "cheer", "claw", "clutch",
"commit", "crimson", "crystal", "delight", "doll", "existence",
"express", "fog", "football", "gay", "goose", "guard",
"hatred", "illuminate", "mass", "math", "mourn", "rich",
"rough", "skip", "stir", "student", "style", "support",
"thorn", "tough", "yard", "yearn", "yesterday", "advice",
"appreciate", "autumn", "bank", "beam", "bowl", "capture",
"carve", "collapse", "confusion", "creation", "dove", "feather",
"girlfriend", "glory", "government", "harsh", "hop", "inner",
"loser", "moonlight", "neighbor", "neither", "peach", "pig",
"praise", "screw", "shield", "shimmer", "sneak", "stab",
"subject", "throughout", "thrown", "tower", "twirl", "wow",
"army", "arrive", "bathroom", "bump", "cease", "cookie",
"couch", "courage", "dim", "guilt", "howl", "hum",
"husband", "insult", "led", "lunch", "mock", "mostly",
"natural", "nearly", "needle", "nerd", "peaceful", "perfection",
"pile", "price", "remove", "roam", "sanctuary", "serious",
"shiny", "shook", "sob", "stolen", "tap", "vain",
"void", "warrior", "wrinkle", "affection", "apologize", "blossom",
"bounce", "bridge", "cheap", "crumble", "decision", "descend",
"desperately", "dig", "dot", "flip", "frighten", "heartbeat",
"huge", "lazy", "lick", "odd", "opinion", "process",
"puzzle", "quietly", "retreat", "score", "sentence", "separate",
"situation", "skill", "soak", "square", "stray", "taint",
"task", "tide", "underneath", "veil", "whistle", "anywhere",
"bedroom", "bid", "bloody", "burden", "careful", "compare",
"concern", "curtain", "decay", "defeat", "describe", "double",
"dreamer", "driver", "dwell", "evening", "flare", "flicker",
"grandma", "guitar", "harm", "horrible", "hungry", "indeed",
"lace", "melody", "monkey", "nation", "object", "obviously",
"rainbow", "salt", "scratch", "shown", "shy", "stage",
"stun", "third", "tickle", "useless", "weakness", "worship",
"worthless", "afternoon", "beard", "boyfriend", "bubble", "busy",
"certain", "chin", "concrete", "desk", "diamond", "doom",
"drawn", "due", "felicity", "freeze", "frost", "garden",
"glide", "harmony", "hopefully", "hunt", "jealous", "lightning",
"mama", "mercy", "peel", "physical", "position", "pulse",
"punch", "quit", "rant", "respond", "salty", "sane",
"satisfy", "savior", "sheep", "slept", "social", "sport",
"tuck", "utter", "valley", "wolf", "aim", "alas",
"alter", "arrow", "awaken", "beaten", "belief", "brand",
"ceiling", "cheese", "clue", "confidence", "connection", "daily",
"disguise", "eager", "erase", "essence", "everytime", "expression",
"fan", "flag", "flirt", "foul", "fur", "giggle",
"glorious", "ignorance", "law", "lifeless", "measure", "mighty",
"muse", "north", "opposite", "paradise", "patience", "patient",
"pencil", "petal", "plate", "ponder", "possibly", "practice",
"slice", "spell", "stock", "strife", "strip", "suffocate",
"suit", "tender", "tool", "trade", "velvet", "verse",
"waist", "witch", "aunt", "bench", "bold", "cap",
"certainly", "click", "companion", "creator", "dart", "delicate",
"determine", "dish", "dragon", "drama", "drum", "dude",
"everybody", "feast", "forehead", "former", "fright", "fully",
"gas", "hook", "hurl", "invite", "juice", "manage",
"moral", "possess", "raw", "rebel", "royal", "scale",
"scary", "several", "slight", "stubborn", "swell", "talent",
"tea", "terrible", "thread", "torment", "trickle", "usually",
"vast", "violence", "weave", "acid", "agony", "ashamed",
"awe", "belly", "blend", "blush", "character", "cheat",
"common", "company", "coward", "creak", "danger", "deadly",
"defense", "define", "depend", "desperate", "destination", "dew",
"duck", "dusty", "embarrass", "engine", "example", "explore",
"foe", "freely", "frustrate", "generation", "glove", "guilty",
"health", "hurry", "idiot", "impossible", "inhale", "jaw",
"kingdom", "mention", "mist", "moan", "mumble", "mutter",
"observe", "ode", "pathetic", "pattern", "pie", "prefer",
"puff", "rape", "rare", "revenge", "rude", "scrape",
"spiral", "squeeze", "strain", "sunset", "suspend", "sympathy",
"thigh", "throne", "total", "unseen", "weapon", "weary"
);
my $max_words = $#words;
print "$max_words words\n\n";
my $dice;
my $counter=0;
foreach (@words) {
$dice= int2base ( $counter, 6 );
$dice = sprintf("%05d", $dice);
$dice =~ s/0/6/g;
print "$dice $_\n";
$counter++;
}
| vstoykovbg/make-random-seed | Diceware-like-list-for-Electrum-1.pl | Perl | mit | 24,310 |
package BeerList::Schema;
use Moose;
use namespace::autoclean;
extends 'DBIx::Class::Schema';
__PACKAGE__->load_namespaces();
__PACKAGE__->meta->make_immutable(inline_constructor => 0);
1;
| bentglasstube/beerlist | lib/BeerList/Schema.pm | Perl | mit | 194 |
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=pod
=head1 NAME
Bio::EnsEMBL::Analysis::RunnableDB::Gsnap
=head1 SYNOPSIS
my $runnableDB = Bio::EnsEMBL::Analysis::RunnableDB::Gsnap->new( );
$runnableDB->fetch_input();
$runnableDB->run();
=head1 DESCRIPTION
This module uses Gsnap to align fastq to a genomic sequence
=head1 CONTACT
Post general queries to B<ensembl-dev@ebi.ac.uk>
=head1 APPENDIX
=cut
package Bio::EnsEMBL::Analysis::RunnableDB::Gsnap;
use warnings ;
use strict;
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
use Bio::EnsEMBL::Analysis::Config::GeneBuild::Gsnap;
use Bio::EnsEMBL::Analysis::Runnable::Gsnap;
use Bio::EnsEMBL::Analysis::RunnableDB::BaseGeneBuild;
use vars qw(@ISA);
@ISA = ("Bio::EnsEMBL::Analysis::RunnableDB::BaseGeneBuild");
sub new {
my ( $class, @args ) = @_;
my $self = $class->SUPER::new(@args);
$self->read_and_check_config($GSNAP_CONFIG_BY_LOGIC);
return $self;
}
sub fetch_input {
my ($self) = @_;
my %parameters = %{$self->parameters_hash};
my $program = $self->analysis->program_file;
my $filename = $self->INDIR ."/" .$self->input_id;
$self->throw("Gsnap program not defined in analysis \n")
if not defined $program;
my $runnable = Bio::EnsEMBL::Analysis::Runnable::Gsnap->new
(
-analysis => $self->analysis,
-program => $program,
-options => $self->OPTIONS,
-indir => $self->INDIR,
-outdir => $self->OUTDIR,
-genome => $self->GENOMEDIR,
-genomename => $self->GENOMENAME,
-fastq => $filename,
-paired => $self->PAIRED,
-samtools => $self->SAMTOOLS_PATH,
-header => $self->HEADER,
%parameters,
);
$self->runnable($runnable);
}
sub run {
my ($self) = @_;
$self->throw("Can't run - no runnable objects") unless ( $self->runnable );
my ($runnable) = @{$self->runnable};
eval {
$runnable->run;
};
if(my $err = $@){
chomp $err;
$self->throw("ERROR $err \n");
}
}
# override write output as we have nothing for the db
sub write_output {
my ($self) = @_;
}
#Containers
#=================================================================
sub OUTDIR {
my $total_reads = 0;
my ($self,$value) = @_;
if (defined $value) {
$self->{'_OUTDIR'} = $value;
}
if (exists($self->{'_OUTDIR'})) {
return $self->{'_OUTDIR'};
} else {
return undef;
}
}
sub INDIR {
my ($self,$value) = @_;
if (defined $value) {
$self->{'_INDIR'} = $value;
}
if (exists($self->{'_INDIR'})) {
return $self->{'_INDIR'};
} else {
return undef;
}
}
sub GENOMEDIR {
my ($self,$value) = @_;
if (defined $value) {
$self->{'_GENOMEDIR'} = $value;
}
if (exists($self->{'_GENOMEDIR'})) {
return $self->{'_GENOMEDIR'};
} else {
return undef;
}
}
sub GENOMENAME {
my ($self,$value) = @_;
if (defined $value) {
$self->{'_GENOMENAME'} = $value;
}
if (exists($self->{'_GENOMENAME'})) {
return $self->{'_GENOMENAME'};
} else {
return undef;
}
}
sub OPTIONS {
my ($self,$value) = @_;
if (defined $value) {
$self->{'_OPTIONS'} = $value;
}
if (exists($self->{'_OPTIONS'})) {
return $self->{'_OPTIONS'};
} else {
return undef;
}
}
sub PAIRED {
my ($self,$value) = @_;
if (defined $value) {
$self->{'_PAIRED'} = $value;
}
if (exists($self->{'_PAIRED'})) {
return $self->{'_PAIRED'};
} else {
return undef;
}
}
sub SAMTOOLS_PATH {
my ($self,$value) = @_;
if (defined $value) {
$self->{'_SAMTOOLS_PATH'} = $value;
}
if (exists($self->{'_SAMTOOLS_PATH'})) {
return $self->{'_SAMTOOLS_PATH'};
} else {
return undef;
}
}
sub HEADER {
my ($self,$value) = @_;
if (defined $value) {
$self->{'_HEADER'} = $value;
}
if (exists($self->{'_HEADER'})) {
return $self->{'_HEADER'};
} else {
return undef;
}
}
| mn1/ensembl-analysis | modules/Bio/EnsEMBL/Analysis/RunnableDB/Gsnap.pm | Perl | apache-2.0 | 4,504 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::fortinet::fortimanager::snmp::mode::memory;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub custom_usage_perfdata {
my ($self, %options) = @_;
$self->{output}->perfdata_add(label => 'used', unit => 'B',
value => $self->{result_values}->{used},
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{label}, total => $self->{result_values}->{total}, cast_int => 1),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{label}, total => $self->{result_values}->{total}, cast_int => 1),
min => 0, max => $self->{result_values}->{total});
}
sub custom_usage_threshold {
my ($self, %options) = @_;
my $exit = $self->{perfdata}->threshold_check(value => $self->{result_values}->{prct_used}, threshold => [ { label => 'critical-' . $self->{label}, exit_litteral => 'critical' }, { label => 'warning-' . $self->{label}, exit_litteral => 'warning' } ]);
return $exit;
}
sub custom_usage_output {
my ($self, %options) = @_;
my ($total_size_value, $total_size_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{total});
my ($total_used_value, $total_used_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{used});
my ($total_free_value, $total_free_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{free});
my $msg = sprintf("Memory Usage Total: %s Used: %s (%.2f%%) Free: %s (%.2f%%)",
$total_size_value . " " . $total_size_unit,
$total_used_value . " " . $total_used_unit, $self->{result_values}->{prct_used},
$total_free_value . " " . $total_free_unit, $self->{result_values}->{prct_free});
return $msg;
}
sub custom_usage_calc {
my ($self, %options) = @_;
$self->{result_values}->{total} = $options{new_datas}->{$self->{instance} . '_total'};
$self->{result_values}->{used} = $options{new_datas}->{$self->{instance} . '_used'};
$self->{result_values}->{free} = $options{new_datas}->{$self->{instance} . '_total'} - $options{new_datas}->{$self->{instance} . '_used'};
$self->{result_values}->{prct_free} = $self->{result_values}->{free} * 100 / $self->{result_values}->{total};
$self->{result_values}->{prct_used} = $self->{result_values}->{used} * 100 / $self->{result_values}->{total};
return 0;
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'memory', type => 0 }
];
$self->{maps_counters}->{memory} = [
{ label => 'usage', set => {
key_values => [ { name => 'used' }, { name => 'total' } ],
closure_custom_calc => $self->can('custom_usage_calc'),
closure_custom_output => $self->can('custom_usage_output'),
closure_custom_perfdata => $self->can('custom_usage_perfdata'),
closure_custom_threshold_check => $self->can('custom_usage_threshold'),
}
},
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
});
return $self;
}
sub manage_selection {
my ($self, %options) = @_;
my $oid_fmSysMemCapacity = '.1.3.6.1.4.1.12356.103.2.1.3.0'; # KB
my $oid_fmSysMemUsed = '.1.3.6.1.4.1.12356.103.2.1.2.0'; # KB
my $result = $options{snmp}->get_leef(oids => [$oid_fmSysMemCapacity, $oid_fmSysMemUsed],
nothing_quit => 1);
$self->{memory} = { used => $result->{$oid_fmSysMemUsed} * 1024, total => $result->{$oid_fmSysMemCapacity} * 1024 };
}
1;
__END__
=head1 MODE
Check memory usage.
=over 8
=item B<--warning-usage>
Threshold warning (in percent).
=item B<--critical-usage>
Threshold critical (in percent).
=back
=cut | centreon/centreon-plugins | network/fortinet/fortimanager/snmp/mode/memory.pm | Perl | apache-2.0 | 4,933 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::redback::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'cpu' => 'network::redback::snmp::mode::cpu',
'disk-usage' => 'network::redback::snmp::mode::disk',
'hardware' => 'network::redback::snmp::mode::hardware',
'interfaces' => 'snmp_standard::mode::interfaces',
'list-interfaces' => 'snmp_standard::mode::listinterfaces',
'memory' => 'network::redback::snmp::mode::memory',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Redback hardware in SNMP.
=cut
| bcournaud/centreon-plugins | network/redback/snmp/plugin.pm | Perl | apache-2.0 | 1,707 |
#!/usr/bin/perl
use strict;
use warnings;
use CQS::FileUtils;
use CQS::SystemUtils;
use CQS::ConfigUtils;
use CQS::ClassFactory;
my $cqstools = "/home/shengq1/cqstools/CQS.Tools.exe";
my $hg19_mrna_gff = "/data/cqs/shengq1/reference/miRBase20/hsa.gff3";
my $hg19_trna_bed = "/data/cqs/guoy1/reference/smallrna/hg19_tRNA_ucsc_ensembl.bed";
my $hg19_trna_fasta = "/data/cqs/guoy1/reference/smallrna/hg19_tRNA_ucsc_ensembl.bed.fa";
my $hg19_smallrna_bed = "/data/cqs/guoy1/reference/smallrna/hg19_smallRNA_ucsc_ensembl.bed";
my $target_dir = create_directory_or_die("/scratch/cqs/shengq1/pipelines/miRNAseq-pipeline");
my $email = "quanhu.sheng\@vanderbilt.edu";
my $task_name = "mirna";
my $samtools = "/home/shengq1/local/bin/samtools/samtools";
my $bowtie1_option_1mm = "-a -m 100 --best --strata -v 1 -l 12 -p 8";
my $bowtie1_human_index = "/data/cqs/guoy1/reference/hg19/bowtie_index/hg19";
my $mirnacount_option = "-s"; #ignore score
my $trnacount_option = "--length --sequence";
my $mirna_overlap_count_option = "-s --gtf_key miRNA";
my $fasta_file = "/data/cqs/shengq1/reference/miRBase20/mature.dna.fa";
my $files = {
"Sample1" => ["/gpfs21/scratch/cqs/shengq1/vangard/VANGARD00055_guoyan_mirna_v2/data/Sample1_12.fastq.gz"],
"Sample2" => ["/gpfs21/scratch/cqs/shengq1/vangard/VANGARD00055_guoyan_mirna_v2/data/Sample2_12.fastq.gz"],
"Sample3" => ["/gpfs21/scratch/cqs/shengq1/vangard/VANGARD00055_guoyan_mirna_v2/data/Sample3_12.fastq.gz"],
"Sample4" => ["/gpfs21/scratch/cqs/shengq1/vangard/VANGARD00055_guoyan_mirna_v2/data/Sample4_12.fastq.gz"],
"Sample5" => ["/gpfs21/scratch/cqs/shengq1/vangard/VANGARD00055_guoyan_mirna_v2/data/Sample5_12.fastq.gz"],
};
my $groups = { "Sample" => [ "Sample1", "Sample2", "Sample3", "Sample4", "Sample5" ] };
my $config = {
general => {
task_name => $task_name,
path_file => "/home/shengq1/local/bin/path.txt",
cluster => "slurm", #"slurm" or "torque"
},
cutadapt => {
class => "Cutadapt",
perform => 1,
target_dir => "${target_dir}/cutadapt",
option => "-O 10 -m 12",
source => $files,
adaptor => "TGGAATTCTCGGGTGCCAAGG",
extension => "_clipped.fastq",
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "24",
"mem" => "20gb"
},
},
fastqlen => {
class => "FastqLen",
perform => 1,
target_dir => "${target_dir}/fastqlen",
option => "",
source_ref => "cutadapt",
cqstools => $cqstools,
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "24",
"mem" => "20gb"
},
},
identical => {
class => "FastqIdentical",
perform => 1,
target_dir => "${target_dir}/identical",
option => "",
source_ref => [ "cutadapt", ".fastq.gz" ],
cqstools => $cqstools,
extension => "_clipped_identical.fastq",
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "24",
"mem" => "20gb"
},
},
#not identical, for IGV
bowtie1_genome_cutadapt_topN_1mm_notidentical => {
class => "Bowtie1",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_notidentical",
option => $bowtie1_option_1mm,
source_ref => [ "cutadapt", ".fastq.gz" ],
bowtie1_index => $bowtie1_human_index,
samonly => 0,
sh_direct => 0,
pbs => {
"email" => $email,
"nodes" => "1:ppn=8",
"walltime" => "72",
"mem" => "40gb"
},
},
#1 mismatch search
bowtie1_genome_cutadapt_topN_1mm => {
class => "Bowtie1",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm",
option => $bowtie1_option_1mm,
source_ref => [ "identical", ".fastq\$" ],
bowtie1_index => $bowtie1_human_index,
samonly => 0,
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=8",
"walltime" => "72",
"mem" => "40gb"
},
},
mirna_1mm_count => {
class => "MirnaCount",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_count_miRNA",
option => $mirnacount_option,
source_ref => "bowtie1_genome_cutadapt_topN_1mm",
fastq_files_ref => "identical",
seqcount_ref => [ "identical", ".dupcount\$" ],
cqs_tools => $cqstools,
gff_file => $hg19_mrna_gff,
fasta_file => $fasta_file,
samtools => $samtools,
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "72",
"mem" => "40gb"
},
},
miRNA_1mm_table => {
class => "CQSMirnaTable",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_count_miRNA_table",
option => "",
source_ref => "mirna_1mm_count",
cqs_tools => $cqstools,
groups => $groups,
prefix => "miRNA_1mm_",
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "10",
"mem" => "10gb"
},
},
miRNA_1mm_count_overlap => {
class => "CQSMappedCount",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_count_miRNA_overlap",
option => $mirna_overlap_count_option,
source_ref => "bowtie1_genome_cutadapt_topN_1mm",
fastq_files_ref => "identical",
seqcount_ref => [ "identical", ".dupcount\$" ],
cqs_tools => $cqstools,
gff_file => $hg19_mrna_gff,
fasta_file => $fasta_file,
samtools => $samtools,
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "72",
"mem" => "40gb"
},
},
miRNA_1mm_overlap_position => {
class => "CQSMappedPosition",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_count_miRNA_overlap_position",
option => "-o " . $task_name . "_miRNA.position",
source_ref => "miRNA_1mm_count_overlap",
cqs_tools => $cqstools,
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "10",
"mem" => "10gb"
},
},
tRNA_1mm_count => {
class => "CQSMappedCount",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_count_tRNA",
option => $trnacount_option,
source_ref => "bowtie1_genome_cutadapt_topN_1mm",
fastq_files_ref => "identical",
seqcount_ref => [ "identical", ".dupcount\$" ],
cqs_tools => $cqstools,
gff_file => $hg19_trna_bed,
fasta_file => $hg19_trna_fasta,
samtools => $samtools,
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "72",
"mem" => "40gb"
},
},
tRNA_1mm_table => {
class => "CQSMappedTable",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_count_tRNA_table",
option => "",
source_ref => [ "tRNA_1mm_count", ".xml" ],
groups => $groups,
cqs_tools => $cqstools,
prefix => "tRNA_1mm_",
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "10",
"mem" => "10gb"
},
},
tRNA_1mm_position => {
class => "CQSMappedPosition",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_count_tRNA_position",
option => "-o " . $task_name . "_tRNA.position",
source_ref => "tRNA_1mm_count",
cqs_tools => $cqstools,
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "10",
"mem" => "10gb"
},
},
smallRNA_1mm_count => {
class => "CQSMappedCount",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_count_smallRNA",
option => $trnacount_option,
source_ref => "bowtie1_genome_cutadapt_topN_1mm",
fastq_files_ref => "identical",
seqcount_ref => [ "identical", ".dupcount\$" ],
cqs_tools => $cqstools,
gff_file => $hg19_smallrna_bed,
samtools => $samtools,
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "72",
"mem" => "40gb"
},
},
smallRNA_1mm_category => {
class => "CQSSmallRNACategory",
perform => 1,
target_dir => "${target_dir}/topN_bowtie1_genome_cutadapt_1mm_count_smallRNA_category",
option => "",
source_ref => [ "smallRNA_1mm_count", ".mapped.xml\$" ],
mirna_count_ref => [ "mirna_1mm_count", ".mapped.xml\$" ],
groups => $groups,
cqs_tools => $cqstools,
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=1",
"walltime" => "72",
"mem" => "40gb"
},
},
sequencetask => {
class => "CQS::SequenceTask",
perform => 1,
target_dir => "${target_dir}/sequencetask",
option => "",
source => {
individual => [
"cutadapt", "fastqlen", "identical",
"bowtie1_genome_cutadapt_topN_1mm_notidentical",
"bowtie1_genome_cutadapt_topN_1mm",
"mirna_1mm_count", "miRNA_1mm_count_overlap", "tRNA_1mm_count", "smallRNA_1mm_count",
],
summary => [ "miRNA_1mm_table", "tRNA_1mm_table", "smallRNA_1mm_category", "miRNA_1mm_overlap_position", "tRNA_1mm_position" ],
},
sh_direct => 1,
pbs => {
"email" => $email,
"nodes" => "1:ppn=8",
"walltime" => "72",
"mem" => "40gb"
},
},
};
performConfig($config);
1;
| realizor/ngsperl | templates/miRNAseq-pipeline.pl | Perl | apache-2.0 | 10,662 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V10::Resources::FeedItemSetLink;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
feedItem => $args->{feedItem},
feedItemSet => $args->{feedItemSet},
resourceName => $args->{resourceName}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V10/Resources/FeedItemSetLink.pm | Perl | apache-2.0 | 1,117 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::ZMenu::SupportingEvidenceAlignment;
use strict;
use base qw(EnsEMBL::Web::ZMenu);
sub content {
my $self = shift;
my $hub = $self->hub;
my $hit_name = $hub->param('id');
my $hit_db = $self->object->get_sf_hit_db_name($hit_name);
my $link_name = $hit_db eq 'RFAM' ? [ split '-', $hit_name ]->[0] : $hit_name;
#Uniprot can't deal with versions in accessions
if ($hit_db =~ /^Uniprot/){
$link_name =~ s/(\w*)\.\d+/$1/;
}
## And ENA adds version numbers that aren't used in URL
if ($hit_db eq 'EMBL') {
$link_name =~ s/#\w+//;
}
my $hit_length = $hub->param('hit_length');
my $hit_url = $hub->get_ExtURL_link($link_name, $hit_db, $link_name);
my $tsid = $hub->param('t_version') ? $hub->param('t').".".$hub->param('t_version') : $hub->param('t');
my $esid = $hub->param('exon');
$self->caption("$hit_name ($hit_db)");
if ($esid) {
my $exon_length = $hub->param('exon_length');
$self->add_entry({ label_html => "Entry removed from $hit_db" }) if $hub->param('er');
$self->add_entry({
type => 'View alignments',
label => "$esid ($tsid)",
link => $hub->url({
type => 'Transcript',
action => 'SupportingEvidence',
function => 'Alignment',
sequence => $hit_name,
exon => $esid
})
});
$self->add_entry({
type => 'View record',
label => $hit_name,
link => $hit_url,
abs_url => 1
});
$self->add_entry({
type => 'Exon length',
label => "$exon_length bp"
});
if ($hub->param('five_end_mismatch')) {
$self->add_entry({
type => "5' mismatch",
label => $hub->param('five_end_mismatch') . ' bp'
});
}
if ($hub->param('three_end_mismatch')) {
$self->add_entry({
type => "3' mismatch",
label => $hub->param('three_end_mismatch') . ' bp'
});
}
} else {
$self->add_entry({
type => 'View record',
labe => $hit_name,
link => $hit_url,
abs_url => 1
});
}
}
1;
| Ensembl/ensembl-webcode | modules/EnsEMBL/Web/ZMenu/SupportingEvidenceAlignment.pm | Perl | apache-2.0 | 2,832 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package cloud::azure::storage::storageaccount::mode::filesharecount;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub prefix_metric_output {
my ($self, %options) = @_;
return "Resource '" . $options{instance_value}->{display} . "' " . $options{instance_value}->{stat} . " ";
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'metric', type => 1, cb_prefix_output => 'prefix_metric_output', message_multiple => "All count metrics are ok", skipped_code => { -10 => 1 } },
];
foreach my $aggregation ('average', 'total') {
foreach my $metric ('FileShareCount') {
my $metric_label = lc($metric);
my $entry = { label => $metric_label . '-' . $aggregation, set => {
key_values => [ { name => $metric_label . '_' . $aggregation }, { name => 'display' }, { name => 'stat' } ],
output_template => $metric . ': %s',
perfdatas => [
{ label => $metric_label . '_' . $aggregation, value => $metric_label . '_' . $aggregation ,
template => '%s', label_extra_instance => 1, instance_use => 'display',
min => 0 },
],
}
};
push @{$self->{maps_counters}->{metric}}, $entry;
}
}
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
"resource:s@" => { name => 'resource' },
"resource-group:s" => { name => 'resource_group' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
if (!defined($self->{option_results}->{resource})) {
$self->{output}->add_option_msg(short_msg => "Need to specify either --resource <name> with --resource-group option or --resource <id>.");
$self->{output}->option_exit();
}
$self->{az_resource} = $self->{option_results}->{resource};
$self->{az_resource_group} = $self->{option_results}->{resource_group} if (defined($self->{option_results}->{resource_group}));
$self->{az_resource_type} = 'storageAccounts';
$self->{az_resource_namespace} = 'Microsoft.Storage';
$self->{az_timeframe} = defined($self->{option_results}->{timeframe}) ? $self->{option_results}->{timeframe} : 3600;
$self->{az_interval} = defined($self->{option_results}->{interval}) ? $self->{option_results}->{interval} : "PT1H";
$self->{az_aggregations} = ['Average'];
if (defined($self->{option_results}->{aggregation})) {
$self->{az_aggregations} = [];
foreach my $stat (@{$self->{option_results}->{aggregation}}) {
if ($stat ne '') {
push @{$self->{az_aggregations}}, ucfirst(lc($stat));
}
}
}
foreach my $metric ('FileShareCount') {
push @{$self->{az_metrics}}, $metric;
}
}
sub manage_selection {
my ($self, %options) = @_;
my %metric_results;
foreach my $resource (@{$self->{az_resource}}) {
my $resource_group = $self->{az_resource_group};
my $resource_name = $resource;
my $namespace_full = '/fileServices/default';
if ($resource_name =~ /^\/subscriptions\/.*\/resourceGroups\/(.*)\/providers\/Microsoft\.Storage\/storageAccounts\/(.*)$/) {
$resource_group = $1;
$resource_name = $2;
}
($metric_results{$resource_name}, undef, undef) = $options{custom}->azure_get_metrics(
resource => $resource_name . $namespace_full,
resource_group => $resource_group,
resource_type => $self->{az_resource_type},
resource_namespace => $self->{az_resource_namespace},
metrics => $self->{az_metrics},
aggregations => $self->{az_aggregations},
timeframe => $self->{az_timeframe},
interval => $self->{az_interval},
);
foreach my $metric (@{$self->{az_metrics}}) {
my $metric_name = lc($metric);
$metric_name =~ s/ /_/g;
foreach my $aggregation (@{$self->{az_aggregations}}) {
next if (!defined($metric_results{$resource_name}->{$metric_name}->{lc($aggregation)}) && !defined($self->{option_results}->{zeroed}));
$self->{metric}->{$resource_name . "_" . lc($aggregation)}->{display} = $resource_name;
$self->{metric}->{$resource_name . "_" . lc($aggregation)}->{stat} = lc($aggregation);
$self->{metric}->{$resource_name . "_" . lc($aggregation)}->{$metric_name . "_" . lc($aggregation)} = defined($metric_results{$resource_name}->{$metric_name}->{lc($aggregation)}) ? $metric_results{$resource_name}->{$metric_name}->{lc($aggregation)} : 0;
}
}
}
if (scalar(keys %{$self->{metric}}) <= 0) {
$self->{output}->add_option_msg(short_msg => 'No metrics. Check your options or use --zeroed option to set 0 on undefined values');
$self->{output}->option_exit();
}
}
1;
__END__
=head1 MODE
Check storage account resources file share count metric.
Example:
Using resource name :
perl centreon_plugins.pl --plugin=cloud::azure::storage::storageaccount::plugin --custommode=azcli --mode=file-share-count
--resource=MYFILER --resource-group=MYHOSTGROUP --aggregation='average' --critical-filesharecount-average='10' --verbose
Using resource id :
perl centreon_plugins.pl --plugin=cloud::azure::storage::storageaccount::plugin --custommode=azcli --mode=file-share-count
--resource='/subscriptions/xxx/resourceGroups/xxx/providers/Microsoft.Storage/storageAccounts/xxx/fileServices/default'
--aggregation='average' --critical-filesharecount-average='10' --verbose
Default aggregation: 'average' / Total and average are valid.
=over 8
=item B<--resource>
Set resource name or id (Required).
=item B<--resource-group>
Set resource group (Required if resource's name is used).
=item B<--warning-filesharecount-*>
Thresholds warning (* can be: 'average', 'total').
=item B<--critical-filesharecount-*>
Thresholds critical (* can be: 'average', 'total').
=back
=cut
| Tpo76/centreon-plugins | cloud/azure/storage/storageaccount/mode/filesharecount.pm | Perl | apache-2.0 | 7,324 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::aruba::standard::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'ap-connections' => 'centreon::common::aruba::snmp::mode::apconnections',
'ap-users' => 'centreon::common::aruba::snmp::mode::apusers',
'cpu' => 'centreon::common::aruba::snmp::mode::cpu',
'hardware' => 'centreon::common::aruba::snmp::mode::hardware',
'interfaces' => 'snmp_standard::mode::interfaces',
'list-interfaces' => 'snmp_standard::mode::listinterfaces',
'memory' => 'centreon::common::aruba::snmp::mode::memory',
'storage' => 'centreon::common::aruba::snmp::mode::storage',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Aruba equipments in SNMP.
=cut
| wilfriedcomte/centreon-plugins | network/aruba/standard/snmp/plugin.pm | Perl | apache-2.0 | 1,942 |
#!/usr/bin/perl -w
my @lines;
chomp(@lines = <STDIN>);
my $width = shift @lines;
my $format = "%${width}s\n" x @lines;
my $ruler;
if ($width % 10 == 0) {
$ruler = "1234567890" x ($width / 10);
} else {
my $rem = $width % 10;
$width /= 10;
$ruler = "1234567890" x $width;
foreach (1..${rem}) {
$ruler .= $_;
}
}
print "$ruler\n";
printf $format, @lines;
| alexhilton/miscellaneous | perl/exercise/ex0503.pl | Perl | apache-2.0 | 386 |
package Google::Ads::AdWords::v201809::CampaignGroupService::getResponse;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' }
__PACKAGE__->__set_name('getResponse');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::SOAP::Typelib::ComplexType
);
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %rval_of :ATTR(:get<rval>);
__PACKAGE__->_factory(
[ qw( rval
) ],
{
'rval' => \%rval_of,
},
{
'rval' => 'Google::Ads::AdWords::v201809::CampaignGroupPage',
},
{
'rval' => 'rval',
}
);
} # end BLOCK
} # end of BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::CampaignGroupService::getResponse
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
getResponse from the namespace https://adwords.google.com/api/adwords/cm/v201809.
=head1 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * rval
$element->set_rval($data);
$element->get_rval();
=back
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201809::CampaignGroupService::getResponse->new($data);
Constructor. The following data structure may be passed to new():
{
rval => $a_reference_to, # see Google::Ads::AdWords::v201809::CampaignGroupPage
},
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/CampaignGroupService/getResponse.pm | Perl | apache-2.0 | 1,783 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.