code stringlengths 2 1.05M | repo_name stringlengths 5 101 | path stringlengths 4 991 | language stringclasses 3 values | license stringclasses 5 values | size int64 2 1.05M |
|---|---|---|---|---|---|
package #
Date::Manip::Offset::off244;
# Copyright (c) 2008-2014 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Fri Nov 21 11:03:45 EST 2014
# Data version: tzdata2014j
# Code version: tzcode2014j
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.orgtz
use strict;
use warnings;
require 5.010000;
our ($VERSION);
$VERSION='6.48';
END { undef $VERSION; }
our ($Offset,%Offset);
END {
undef $Offset;
undef %Offset;
}
$Offset = '-00:43:08';
%Offset = (
0 => [
'africa/monrovia',
],
);
1;
| nriley/Pester | Source/Manip/Offset/off244.pm | Perl | bsd-2-clause | 853 |
package RSP;
use Moose;
use Cwd;
use base 'Mojo';
use Scalar::Util qw( weaken );
our $VERSION = '1.2';
use Application::Config 'rsp.conf';
use RSP::Config;
use Try::Tiny;
# use Carp::Always;
our $CONFIG;
sub conf {
my $class = shift;
if(!$CONFIG){
$CONFIG = RSP::Config->new(config => { %{ $class->config } });
}
return $CONFIG;
}
sub BUILD {
my ($self) = @_;
my @extension_stack = @{ $self->conf->available_extensions };
my $unable_to_comply = {};
while(my $ext = shift(@extension_stack)){
my $class = $ext;
Class::MOP::load_class($class);
if($class->can('does') && $class->does('RSP::Role::AppMutation')){
if($class->can_apply_mutations($self->conf)){
$class->apply_mutations($self->conf);
} else {
push(@extension_stack, $class);
my $tries = ++$unable_to_comply->{$class};
if($unable_to_comply > scalar(@extension_stack)){
die "Unable to apply extension mutations";
}
}
}
}
#$self->conf->meta->make_immutable;
}
use RSP::Transaction::Mojo;
sub handler {
my ($self, $tx) = @_;
my $rsptx = RSP::Transaction::Mojo->new
->request( $tx->req )
->response( $tx->res );
try {
$rsptx->process_transaction;
} catch {
$tx->res->code(500);
$tx->res->headers->content_type('text/plain');
$tx->res->body($_);
$rsptx->log($_);
};
#$rsptx->request( undef );
#$rsptx->response( undef );
#$rsptx = undef;
return $tx;
}
sub root {
my $self = shift;
return $self->conf->root;
}
1;
| sanyaade-mobiledev/smart-platform | lib/RSP.pm | Perl | mit | 1,719 |
package DateTime::Duration;
{
$DateTime::Duration::VERSION = '0.74';
}
use strict;
use warnings;
use Carp ();
use DateTime;
use DateTime::Helpers;
use Params::Validate qw( validate SCALAR );
use overload (
fallback => 1,
'+' => '_add_overload',
'-' => '_subtract_overload',
'*' => '_multiply_overload',
'<=>' => '_compare_overload',
'cmp' => '_compare_overload',
);
use constant MAX_NANOSECONDS => 1_000_000_000; # 1E9 = almost 32 bits
my @all_units = qw( months days minutes seconds nanoseconds );
# XXX - need to reject non-integers but accept infinity, NaN, &
# 1.56e+18
sub new {
my $class = shift;
my %p = validate(
@_, {
years => { type => SCALAR, default => 0 },
months => { type => SCALAR, default => 0 },
weeks => { type => SCALAR, default => 0 },
days => { type => SCALAR, default => 0 },
hours => { type => SCALAR, default => 0 },
minutes => { type => SCALAR, default => 0 },
seconds => { type => SCALAR, default => 0 },
nanoseconds => { type => SCALAR, default => 0 },
end_of_month => {
type => SCALAR, default => undef,
regex => qr/^(?:wrap|limit|preserve)$/
},
}
);
my $self = bless {}, $class;
$self->{months} = ( $p{years} * 12 ) + $p{months};
$self->{days} = ( $p{weeks} * 7 ) + $p{days};
$self->{minutes} = ( $p{hours} * 60 ) + $p{minutes};
$self->{seconds} = $p{seconds};
if ( $p{nanoseconds} ) {
$self->{nanoseconds} = $p{nanoseconds};
$self->_normalize_nanoseconds;
}
else {
# shortcut - if they don't need nanoseconds
$self->{nanoseconds} = 0;
}
$self->{end_of_month} = (
defined $p{end_of_month} ? $p{end_of_month}
: $self->{months} < 0 ? 'preserve'
: 'wrap'
);
return $self;
}
# make the signs of seconds, nanos the same; 0 < abs(nanos) < MAX_NANOS
# NB this requires nanoseconds != 0 (callers check this already)
sub _normalize_nanoseconds {
my $self = shift;
return
if ( $self->{nanoseconds} == DateTime::INFINITY()
|| $self->{nanoseconds} == DateTime::NEG_INFINITY()
|| $self->{nanoseconds} eq DateTime::NAN() );
my $seconds = $self->{seconds} + $self->{nanoseconds} / MAX_NANOSECONDS;
$self->{seconds} = int($seconds);
$self->{nanoseconds} = $self->{nanoseconds} % MAX_NANOSECONDS;
$self->{nanoseconds} -= MAX_NANOSECONDS if $seconds < 0;
}
sub clone { bless { %{ $_[0] } }, ref $_[0] }
sub years { abs( $_[0]->in_units('years') ) }
sub months { abs( $_[0]->in_units( 'months', 'years' ) ) }
sub weeks { abs( $_[0]->in_units('weeks') ) }
sub days { abs( $_[0]->in_units( 'days', 'weeks' ) ) }
sub hours { abs( $_[0]->in_units('hours') ) }
sub minutes { abs( $_[0]->in_units( 'minutes', 'hours' ) ) }
sub seconds { abs( $_[0]->in_units('seconds') ) }
sub nanoseconds { abs( $_[0]->in_units( 'nanoseconds', 'seconds' ) ) }
sub is_positive { $_[0]->_has_positive && !$_[0]->_has_negative }
sub is_negative { !$_[0]->_has_positive && $_[0]->_has_negative }
sub _has_positive {
( grep { $_ > 0 } @{ $_[0] }{@all_units} ) ? 1 : 0;
}
sub _has_negative {
( grep { $_ < 0 } @{ $_[0] }{@all_units} ) ? 1 : 0;
}
sub is_zero {
return 0 if grep { $_ != 0 } @{ $_[0] }{@all_units};
return 1;
}
sub delta_months { $_[0]->{months} }
sub delta_days { $_[0]->{days} }
sub delta_minutes { $_[0]->{minutes} }
sub delta_seconds { $_[0]->{seconds} }
sub delta_nanoseconds { $_[0]->{nanoseconds} }
sub deltas {
map { $_ => $_[0]->{$_} } @all_units;
}
sub in_units {
my $self = shift;
my @units = @_;
my %units = map { $_ => 1 } @units;
my %ret;
my ( $months, $days, $minutes, $seconds )
= @{$self}{qw( months days minutes seconds )};
if ( $units{years} ) {
$ret{years} = int( $months / 12 );
$months -= $ret{years} * 12;
}
if ( $units{months} ) {
$ret{months} = $months;
}
if ( $units{weeks} ) {
$ret{weeks} = int( $days / 7 );
$days -= $ret{weeks} * 7;
}
if ( $units{days} ) {
$ret{days} = $days;
}
if ( $units{hours} ) {
$ret{hours} = int( $minutes / 60 );
$minutes -= $ret{hours} * 60;
}
if ( $units{minutes} ) {
$ret{minutes} = $minutes;
}
if ( $units{seconds} ) {
$ret{seconds} = $seconds;
$seconds = 0;
}
if ( $units{nanoseconds} ) {
$ret{nanoseconds} = $seconds * MAX_NANOSECONDS + $self->{nanoseconds};
}
wantarray ? @ret{@units} : $ret{ $units[0] };
}
sub is_wrap_mode { $_[0]->{end_of_month} eq 'wrap' ? 1 : 0 }
sub is_limit_mode { $_[0]->{end_of_month} eq 'limit' ? 1 : 0 }
sub is_preserve_mode { $_[0]->{end_of_month} eq 'preserve' ? 1 : 0 }
sub end_of_month_mode { $_[0]->{end_of_month} }
sub calendar_duration {
my $self = shift;
return ( ref $self )
->new( map { $_ => $self->{$_} } qw( months days end_of_month ) );
}
sub clock_duration {
my $self = shift;
return ( ref $self )
->new( map { $_ => $self->{$_} }
qw( minutes seconds nanoseconds end_of_month ) );
}
sub inverse {
my $self = shift;
my %p = @_;
my %new;
foreach my $u (@all_units) {
$new{$u} = $self->{$u};
# avoid -0 bug
$new{$u} *= -1 if $new{$u};
}
$new{end_of_month} = $p{end_of_month}
if exists $p{end_of_month};
return ( ref $self )->new(%new);
}
sub add_duration {
my ( $self, $dur ) = @_;
foreach my $u (@all_units) {
$self->{$u} += $dur->{$u};
}
$self->_normalize_nanoseconds if $self->{nanoseconds};
return $self;
}
sub add {
my $self = shift;
return $self->add_duration( ( ref $self )->new(@_) );
}
sub subtract_duration { return $_[0]->add_duration( $_[1]->inverse ) }
sub subtract {
my $self = shift;
return $self->subtract_duration( ( ref $self )->new(@_) );
}
sub multiply {
my $self = shift;
my $multiplier = shift;
foreach my $u (@all_units) {
$self->{$u} *= $multiplier;
}
$self->_normalize_nanoseconds if $self->{nanoseconds};
return $self;
}
sub compare {
my ( $class, $dur1, $dur2, $dt ) = @_;
$dt ||= DateTime->now;
return DateTime->compare( $dt->clone->add_duration($dur1),
$dt->clone->add_duration($dur2) );
}
sub _add_overload {
my ( $d1, $d2, $rev ) = @_;
( $d1, $d2 ) = ( $d2, $d1 ) if $rev;
if ( DateTime::Helpers::isa( $d2, 'DateTime' ) ) {
$d2->add_duration($d1);
return;
}
# will also work if $d1 is a DateTime.pm object
return $d1->clone->add_duration($d2);
}
sub _subtract_overload {
my ( $d1, $d2, $rev ) = @_;
( $d1, $d2 ) = ( $d2, $d1 ) if $rev;
Carp::croak(
"Cannot subtract a DateTime object from a DateTime::Duration object")
if DateTime::Helpers::isa( $d2, 'DateTime' );
return $d1->clone->subtract_duration($d2);
}
sub _multiply_overload {
my $self = shift;
my $new = $self->clone;
return $new->multiply(@_);
}
sub _compare_overload {
Carp::croak( 'DateTime::Duration does not overload comparison.'
. ' See the documentation on the compare() method for details.'
);
}
1;
# ABSTRACT: Duration objects for date math
=pod
=head1 NAME
DateTime::Duration - Duration objects for date math
=head1 VERSION
version 0.74
=head1 SYNOPSIS
use DateTime::Duration;
$dur = DateTime::Duration->new(
years => 3,
months => 5,
weeks => 1,
days => 1,
hours => 6,
minutes => 15,
seconds => 45,
nanoseconds => 12000
);
my ( $days, $hours, $seconds ) = $dur->in_units('days', 'hours', 'seconds');
# Human-readable accessors, always positive, but consider using
# DateTime::Format::Duration instead
$dur->years;
$dur->months;
$dur->weeks;
$dur->days;
$dur->hours;
$dur->minutes;
$dur->seconds;
$dur->nanoseconds;
$dur->is_wrap_mode
$dur->is_limit_mode
$dur->is_preserve_mode
print $dur->end_of_month_mode;
# Multiply all values by -1
my $opposite = $dur->inverse;
my $bigger = $dur1 + $dur2;
my $smaller = $dur1 - $dur2; # the result could be negative
my $bigger = $dur1 * 3;
my $base_dt = DateTime->new( year => 2000 );
my @sorted =
sort { DateTime::Duration->compare( $a, $b, $base_dt ) } @durations;
if ( $dur->is_positive ) { ... }
if ( $dur->is_zero ) { ... }
if ( $dur->is_negative ) { ... }
=head1 DESCRIPTION
This is a simple class for representing duration objects. These
objects are used whenever you do date math with DateTime.pm.
See the L<How Date Math is Done|DateTime/"How Date Math is Done">
section of the DateTime.pm documentation for more details. The short
course: One cannot in general convert between seconds, minutes, days,
and months, so this class will never do so. Instead, create the
duration with the desired units to begin with, for example by calling
the appropriate subtraction/delta method on a C<DateTime.pm> object.
=head1 METHODS
Like C<DateTime> itself, C<DateTime::Duration> returns the object from
mutator methods in order to make method chaining possible.
C<DateTime::Duration> has the following methods:
=over 4
=item * new( ... )
This method takes the parameters "years", "months", "weeks", "days",
"hours", "minutes", "seconds", "nanoseconds", and "end_of_month". All
of these except "end_of_month" are numbers. If any of the numbers are
negative, the entire duration is negative.
All of the numbers B<must be integers>.
Internally, years as just treated as 12 months. Similarly, weeks are
treated as 7 days, and hours are converted to minutes. Seconds and
nanoseconds are both treated separately.
The "end_of_month" parameter must be either "wrap", "limit", or
"preserve". This parameter specifies how date math that crosses the
end of a month is handled.
In "wrap" mode, adding months or years that result in days beyond the
end of the new month will roll over into the following month. For
instance, adding one year to Feb 29 will result in Mar 1.
If you specify "end_of_month" mode as "limit", the end of the month is
never crossed. Thus, adding one year to Feb 29, 2000 will result in
Feb 28, 2001. If you were to then add three more years this will
result in Feb 28, 2004.
If you specify "end_of_month" mode as "preserve", the same calculation
is done as for "limit" except that if the original date is at the end
of the month the new date will also be. For instance, adding one
month to Feb 29, 2000 will result in Mar 31, 2000.
For positive durations, the "end_of_month" parameter defaults to wrap.
For negative durations, the default is "limit". This should match how
most people "intuitively" expect datetime math to work.
=item * clone
Returns a new object with the same properties as the object on which
this method was called.
=item * in_units( ... )
Returns the length of the duration in the units (any of those that can
be passed to C<new>) given as arguments. All lengths are integral,
but may be negative. Smaller units are computed from what remains
after taking away the larger units given, so for example:
my $dur = DateTime::Duration->new( years => 1, months => 15 );
$dur->in_units( 'years' ); # 2
$dur->in_units( 'months' ); # 27
$dur->in_units( 'years', 'months' ); # (2, 3)
$dur->in_units( 'weeks', 'days' ); # (0, 0) !
The last example demonstrates that there will not be any conversion
between units which don't have a fixed conversion rate. The only
conversions possible are:
=over 8
=item * years <=> months
=item * weeks <=> days
=item * hours <=> minutes
=item * seconds <=> nanoseconds
=back
For the explanation of why this is the case, please see the L<How Datetime
Math Works|DateTime/"How DateTime Math Works"> section of the DateTime.pm
documentation
Note that the numbers returned by this method may not match the values
given to the constructor.
In list context, in_units returns the lengths in the order of the units
given. In scalar context, it returns the length in the first unit (but
still computes in terms of all given units).
If you need more flexibility in presenting information about
durations, please take a look a C<DateTime::Format::Duration>.
=item * is_positive, is_zero, is_negative
Indicates whether or not the duration is positive, zero, or negative.
If the duration contains both positive and negative units, then it
will return false for B<all> of these methods.
=item * is_wrap_mode, is_limit_mode, is_preserve_mode
Indicates what mode is used for end of month wrapping.
=item * end_of_month_mode
Returns one of "wrap", "limit", or "preserve".
=item * calendar_duration
Returns a new object with the same I<calendar> delta (months and days
only) and end of month mode as the current object.
=item * clock_duration
Returns a new object with the same I<clock> deltas (minutes, seconds,
and nanoseconds) and end of month mode as the current object.
=item * inverse( ... )
Returns a new object with the same deltas as the current object, but
multiple by -1. The end of month mode for the new object will be the
default end of month mode, which depends on whether the new duration
is positive or negative.
You can set the end of month mode in the inverted duration explicitly by
passing "end_of_month => ..." to the C<inverse()> method.
=item * add_duration( $duration_object ), subtract_duration( $duration_object )
Adds or subtracts one duration from another.
=item * add( ... ), subtract( ... )
Syntactic sugar for addition and subtraction. The parameters given to
these methods are used to create a new object, which is then passed to
C<add_duration()> or C<subtract_duration()>, as appropriate.
=item * multiply( $number )
Multiplies each unit in the by the specified number.
=item * DateTime::Duration->compare( $duration1, $duration2, $base_datetime )
This is a class method that can be used to compare or sort durations.
Comparison is done by adding each duration to the specified
C<DateTime.pm> object and comparing the resulting datetimes. This is
necessary because without a base, many durations are not comparable.
For example, 1 month may or may not be longer than 29 days, depending
on what datetime it is added to.
If no base datetime is given, then the result of C<< DateTime->now >>
is used instead. Using this default will give non-repeatable results
if used to compare two duration objects containing different units.
It will also give non-repeatable results if the durations contain
multiple types of units, such as months and days.
However, if you know that both objects only consist of one type of
unit (months I<or> days I<or> hours, etc.), and each duration contains
the same type of unit, then the results of the comparison will be
repeatable.
=item * delta_months, delta_days, delta_minutes, delta_seconds, delta_nanoseconds
These methods provide the information C<DateTime.pm> needs for doing date
math. The numbers returned may be positive or negative. This is mostly useful
for doing date math in L<DateTime>.
=item * deltas
Returns a hash with the keys "months", "days", "minutes", "seconds", and
"nanoseconds", containing all the delta information for the object. This is
mostly useful for doing date math in L<DateTime>.
=item * years, months, weeks, days, hours, minutes, seconds, nanoseconds
These methods return numbers indicating how many of the given unit the
object represents, after having done a conversion to any larger units.
For example, days are first converted to weeks, and then the remainder
is returned. These numbers are always positive.
Here's what each method returns:
$dur->years() == abs( $dur->in_units('years') )
$dur->months() == abs( ( $dur->in_units( 'months', 'years' ) )[0] )
$dur->weeks() == abs( $dur->in_units( 'weeks' ) )
$dur->days() == abs( ( $dur->in_units( 'days', 'weeks' ) )[0] )
$dur->hours() == abs( $dur->in_units( 'hours' ) )
$dur->minutes == abs( ( $dur->in_units( 'minutes', 'hours' ) )[0] )
$dur->seconds == abs( $dur->in_units( 'seconds' ) )
$dur->nanoseconds() == abs( ( $dur->in_units( 'nanoseconds', 'seconds' ) )[0] )
If this seems confusing, remember that you can always use the
C<in_units()> method to specify exactly what you want.
Better yet, if you are trying to generate output suitable for humans,
use the C<DateTime::Format::Duration> module.
=back
=head2 Overloading
This class overloads addition, subtraction, and mutiplication.
Comparison is B<not> overloaded. If you attempt to compare durations
using C<< <=> >> or C<cmp>, then an exception will be thrown! Use the
C<compare()> class method instead.
=head1 SUPPORT
Support for this module is provided via the datetime@perl.org email
list. See http://lists.perl.org/ for more details.
=head1 SEE ALSO
datetime@perl.org mailing list
http://datetime.perl.org/
=head1 AUTHOR
Dave Rolsky <autarch@urth.org>
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2012 by Dave Rolsky.
This is free software, licensed under:
The Artistic License 2.0 (GPL Compatible)
=cut
__END__
| leighpauls/k2cro4 | third_party/perl/perl/vendor/lib/DateTime/Duration.pm | Perl | bsd-3-clause | 17,491 |
#!/usr/bin/perl
use strict;
use warnings;
use File::Find;
our %used_keys = ();
our %registered_keys = ();
my $piwigo_dir = $ARGV[0]; # '/home/pierrick/public_html/piwigo/dev/trunk';
my $type = $ARGV[1]; # common, admin, install, upgrade
find(\&used_keys, $piwigo_dir);
load_registered_keys($type);
# foreach my $key (sort keys %used_keys) {
# # print "{".$key."}", ' is used', "\n";
# if (not defined $registered_keys{$key}) {
# # print "{".$key."}", ' is missing', "\n";
# print '$lang[\''.$key.'\'] = \''.$key.'\';', "\n";
# }
# }
my %ignore_keys = (
'%d new photo' => 1,
'%d new photos' => 1,
'%d category updated' => 1,
'%d categories updated' => 1,
'%d new comment' => 1,
'%d new comments' => 1,
'%d comment to validate' => 1,
'%d comments to validate' => 1,
'%d new user' => 1,
'%d new users' => 1,
'%d waiting element' => 1,
'%d waiting elements' => 1,
'user_status_admin' => '',
'user_status_generic' => '',
'user_status_guest' => '',
'user_status_normal' => '',
'user_status_webmaster' => '',
'Level 0' => '',
'Level 1' => '',
'Level 2' => '',
'Level 4' => '',
'Level 8' => '',
'ACCESS_0' => '',
'ACCESS_1' => '',
'ACCESS_2' => '',
'ACCESS_3' => '',
'ACCESS_4' => '',
'ACCESS_5' => '',
'month' => '',
'day' => '',
'chronology_monthly_calendar' => '',
'chronology_monthly_list' => '',
'chronology_weekly_list' => '',
'public' => '',
'private' => '',
'none' => '',
'other' => '',
'high' => '',
'Waiting page: %s' => '',
'Admin: %s' => '',
'Manage this user comment: %s' => '',
'Main "guest" user does not exist' => '',
'Main "guest" user status is incorrect' => '',
'Main "webmaster" user does not exist' => '',
'Main "webmaster" user status is incorrect' => '',
'Default user does not exist' => '',
'(!) This comment requires validation' => '',
);
foreach my $key (sort keys %registered_keys) {
if (not defined $used_keys{$key} and not defined $ignore_keys{$key}) {
print "{".$key."}", ' is not used anywhere', "\n";
}
}
sub used_keys {
if ($File::Find::name !~ m/(tpl|php)$/) {
return 0;
}
if ($File::Find::name =~ m{/(plugins|language|_data)/}) {
return 0;
}
if ('upgrade' eq $type) {
if ($File::Find::name !~ m{upgrade\.(tpl|php)$}) {
# return 0;
}
}
if ('install' eq $type) {
if ($File::Find::name =~ m{upgrade\.(tpl|php)$}) {
return 0;
}
if ($File::Find::name !~ m{/install(\.tpl|\.php|/)}) {
return 0;
}
}
if ('admin' eq $type) {
if ($File::Find::name =~ m{upgrade\.(tpl|php)$}) {
return 0;
}
if ($File::Find::name =~ m{/install(\.tpl|\.php|/)}) {
return 0;
}
my $is_admin = 0;
if ($File::Find::name =~ m{themes/default/template/mail}) {
$is_admin = 1;
}
if ($File::Find::name =~ m{/admin/}) {
$is_admin = 1;
}
if ($File::Find::name =~ m{/admin\.php$}) {
$is_admin = 1;
}
if (not $is_admin) {
return 0;
}
}
if ('common' eq $type) {
if ($File::Find::name =~ m{upgrade\.(tpl|php)$}) {
return 0;
}
if ($File::Find::name =~ m{/install(\.tpl|\.php|/)}) {
return 0;
}
if ($File::Find::name =~ m{/admin(/|\.php)} or $File::Find::name =~ m{themes/default/template/mail}) {
return 0;
}
}
if (-f) {
my $big_string = '';
open(my $fhi, '<', $File::Find::name);
while (<$fhi>) {
chomp;
s{//.*$}{};
$big_string.= $_;
}
close($fhi);
while ($big_string =~ m/\{(['"])(.+?)\1\|\@translate/g) {
$used_keys{$2}++;
}
while ($big_string =~ m/l10n \s* \( \s* (['"]) (.+?) \1 \s* \)/xg) {
$used_keys{$2}++;
}
while ($big_string =~ m/l10n_args \s* \( \s* (['"]) (.+?) \1 \s* ,/xg) {
$used_keys{$2}++;
}
while ($big_string =~ m/l10n_dec \s* \( \s* (['"]) (.+?) \1 \s* ,\s* (['"]) (.+?) \3 \s* ,/xg) {
$used_keys{$2}++;
$used_keys{$4}++;
}
}
}
sub load_registered_keys {
my ($type) = @_;
my %files_for_type = (
common => [qw/common/],
admin => [qw/common admin/],
install => [qw/common admin install/],
upgrade => [qw/common admin install upgrade/],
);
foreach my $file_code (@{$files_for_type{$type}}) {
my $filepath = $piwigo_dir.'/language/en_UK/'.$file_code.'.lang.php';
open(my $fhi, '<', $filepath);
while (<$fhi>) {
if (m/\$lang\[ \s* (['"]) (.+?) \1 \s* \]/x) {
$registered_keys{$2}++;
}
}
}
}
| ale252/theatre | web/piwigo/tools/missing_keys.pl | Perl | mit | 4,998 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
package AI::MXNet::NS;
# this class is similar to Exporter, in that it will add an "import"
# method to the calling package. It is to allow a package to emulate
# the python "import mxnet as mx" style aliasing as "use AI::MXNet 'mx'"
use strict;
use warnings;
sub _sym : lvalue
{
my ($pkg, $name) = @_;
no strict 'refs';
*{"$pkg\::$name"};
}
sub import
{
my (undef, $opt) = @_;
my $class = caller();
my $func = sub { $class };
_sym($class, 'import') = sub {
my (undef, @names) = @_;
@names = map { s/[^\w:]//sgr } @names;
my $target = caller();
_sym($names[0], '') = _sym($class, '') if
@names == 1 and $opt and $opt eq 'global';
_sym($target, $_) = $func for @names;
};
}
my $autoload_template = q(
sub AUTOLOAD
{
our ($AUTOLOAD, %AUTOLOAD);
my $name = $AUTOLOAD =~ s/.*:://sr;
my $func = $AUTOLOAD{$name};
Carp::carp(qq(Can't locate object method "$name" via package "${\ __PACKAGE__ }"))
unless $func;
goto $func;
}
);
# using AUTOLOAD here allows for the addition of an AI::MXNet::SomeClass
# class to coexist with an AI::MXNet->SomeClass() shorthand constructor.
sub register
{
my ($class, $target) = @_;
my $name = $class =~ s/.*:://sr;
my $dest = $class->can('new');
${_sym($target, 'AUTOLOAD')}{$name} = sub {
splice @_, 0, 1, $class;
goto $dest;
};
return if $target->can('AUTOLOAD');
eval sprintf 'package %s { %s }', $target, $autoload_template;
die if $@;
return;
}
1;
| zhreshold/mxnet | perl-package/AI-MXNet/lib/AI/MXNet/NS.pm | Perl | apache-2.0 | 2,375 |
package API::FederationDeliveryService;
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
use UI::Utils;
use Mojo::Base 'Mojolicious::Controller';
use Data::Dumper;
sub index {
my $self = shift;
my $fed_id = $self->param('fedId');
my @data;
my $rs_data = $self->db->resultset("FederationDeliveryservice")->search( { 'federation' => $fed_id }, { prefetch => [ 'deliveryservice' ] } );
while ( my $row = $rs_data->next ) {
push(
@data, {
"id" => $row->deliveryservice->id,
"cdn" => $row->deliveryservice->cdn->name,
"type" => $row->deliveryservice->type->name,
"xmlId" => $row->deliveryservice->xml_id,
}
);
}
$self->success( \@data );
}
sub assign_dss_to_federation {
my $self = shift;
my $fed_id = $self->param('fedId');
my $params = $self->req->json;
my $ds_ids = $params->{dsIds};
my $replace = $params->{replace};
my $count = 0;
if ( !&is_admin($self) ) {
return $self->forbidden();
}
my $fed = $self->db->resultset('Federation')->find( { id => $fed_id } );
if ( !defined($fed) ) {
return $self->not_found();
}
if ( ref($ds_ids) ne 'ARRAY' ) {
return $self->alert("Delivery Service IDs must be an array");
}
if ( $replace ) {
if (!scalar @{ $ds_ids }) {
return $self->alert("A federation must have at least one delivery service assigned");
}
# start fresh and delete existing fed/ds associations if replace=true
my $delete = $self->db->resultset('FederationDeliveryservice')->search( { federation => $fed_id } );
$delete->delete();
}
my @values = ( [ qw( federation deliveryservice ) ]); # column names are required for 'populate' function
foreach my $ds_id (@{ $ds_ids }) {
push(@values, [ $fed_id, $ds_id ]);
$count++;
}
$self->db->resultset("FederationDeliveryservice")->populate(\@values);
my $msg = $count . " delivery service(s) were assigned to the " . $fed->cname . " federation";
&log( $self, $msg, "APICHANGE" );
my $response = $params;
return $self->success($response, $msg);
}
sub delete {
my $self = shift;
my $fed_id = $self->param('fedId');
my $ds_id = $self->param('dsId');
if ( !&is_admin($self) ) {
return $self->forbidden();
}
my $fed_dss = $self->db->resultset("FederationDeliveryservice")->search( { 'federation' => $fed_id } );
if ( $fed_dss->count() < 2 ) {
return $self->alert("A federation must have at least one delivery service assigned");
}
my $fed_ds = $self->db->resultset("FederationDeliveryservice")->search( { 'federation.id' => $fed_id, 'deliveryservice' => $ds_id }, { prefetch => [ 'federation', 'deliveryservice' ] } );
if ( !defined($fed_ds) ) {
return $self->not_found();
}
my $row = $fed_ds->next;
my $rs = $fed_ds->delete();
if ($rs) {
my $msg = "Removed delivery service [ " . $row->deliveryservice->xml_id . " ] from federation [ " . $row->federation->cname . " ]";
&log( $self, $msg, "APICHANGE" );
return $self->success_message($msg);
}
return $self->alert( "Failed to remove delivery service from federation." );
}
1;
| jeffmart/incubator-trafficcontrol | traffic_ops/app/lib/API/FederationDeliveryService.pm | Perl | apache-2.0 | 3,895 |
#!/usr/bin/perl
##
## Copyright (c) 2010 The WebM project authors. All Rights Reserved.
##
## Use of this source code is governed by a BSD-style license
## that can be found in the LICENSE file in the root of the source
## tree. An additional intellectual property rights grant can be found
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
##
# ads2gas.pl
# Author: Eric Fung (efung (at) acm.org)
#
# Convert ARM Developer Suite 1.0.1 syntax assembly source to GNU as format
#
# Usage: cat inputfile | perl ads2gas.pl > outputfile
#
print "@ This file was created from a .asm file\n";
print "@ using the ads2gas.pl script.\n";
print "\t.equ DO1STROUNDING, 0\n";
while (<STDIN>)
{
# Comment character
s/;/@/g;
# Hexadecimal constants prefaced by 0x
s/#&/#0x/g;
# Convert :OR: to |
s/:OR:/ | /g;
# Convert :AND: to &
s/:AND:/ & /g;
# Convert :NOT: to ~
s/:NOT:/ ~ /g;
# Convert :SHL: to <<
s/:SHL:/ << /g;
# Convert :SHR: to >>
s/:SHR:/ >> /g;
# Convert ELSE to .else
s/ELSE/.else/g;
# Convert ENDIF to .endif
s/ENDIF/.endif/g;
# Convert ELSEIF to .elseif
s/ELSEIF/.elseif/g;
# Convert LTORG to .ltorg
s/LTORG/.ltorg/g;
# Convert IF :DEF:to .if
# gcc doesn't have the ability to do a conditional
# if defined variable that is set by IF :DEF: on
# armasm, so convert it to a normal .if and then
# make sure to define a value elesewhere
if (s/\bIF :DEF:\b/.if /g)
{
s/=/==/g;
}
# Convert IF to .if
if (s/\bIF\b/.if/g)
{
s/=+/==/g;
}
# Convert INCLUDE to .INCLUDE "file"
s/INCLUDE(\s*)(.*)$/.include $1\"$2\"/;
# Code directive (ARM vs Thumb)
s/CODE([0-9][0-9])/.code $1/;
# No AREA required
s/^\s*AREA.*$/.text/;
# DCD to .word
# This one is for incoming symbols
s/DCD\s+\|(\w*)\|/.long $1/;
# DCW to .short
s/DCW\s+\|(\w*)\|/.short $1/;
s/DCW(.*)/.short $1/;
# Constants defined in scope
s/DCD(.*)/.long $1/;
s/DCB(.*)/.byte $1/;
# RN to .req
if (s/RN\s+([Rr]\d+|lr)/.req $1/)
{
print;
next;
}
# Make function visible to linker, and make additional symbol with
# prepended underscore
s/EXPORT\s+\|([\$\w]*)\|/.global $1 \n\t.type $1, function/;
s/IMPORT\s+\|([\$\w]*)\|/.global $1/;
# No vertical bars required; make additional symbol with prepended
# underscore
s/^\|(\$?\w+)\|/_$1\n\t$1:/g;
# Labels need trailing colon
# s/^(\w+)/$1:/ if !/EQU/;
# put the colon at the end of the line in the macro
s/^([a-zA-Z_0-9\$]+)/$1:/ if !/EQU/;
# Strip ALIGN
s/\sALIGN/@ ALIGN/g;
# Strip ARM
s/\sARM/@ ARM/g;
# Strip REQUIRE8
#s/\sREQUIRE8/@ REQUIRE8/g;
s/\sREQUIRE8/@ /g; #EQU cause problem
# Strip PRESERVE8
s/\sPRESERVE8/@ PRESERVE8/g;
# Strip PROC and ENDPROC
s/\sPROC/@/g;
s/\sENDP/@/g;
# EQU directive
s/(.*)EQU(.*)/.equ $1, $2/;
# Begin macro definition
if (/MACRO/) {
$_ = <STDIN>;
s/^/.macro/;
s/\$//g; # remove formal param reference
s/;/@/g; # change comment characters
}
# For macros, use \ to reference formal params
s/\$/\\/g; # End macro definition
s/MEND/.endm/; # No need to tell it where to stop assembling
next if /^\s*END\s*$/;
print;
}
| roisagiv/webrtc-ios | third_party/libvpx/source/libvpx/build/make/ads2gas.pl | Perl | bsd-3-clause | 3,535 |
=begin comment
OEML - REST API
This section will provide necessary information about the `CoinAPI OEML REST API` protocol. <br/> This API is also available in the Postman application: <a href=\"https://postman.coinapi.io/\" target=\"_blank\">https://postman.coinapi.io/</a> <br/><br/> Implemented Standards: * [HTTP1.0](https://datatracker.ietf.org/doc/html/rfc1945) * [HTTP1.1](https://datatracker.ietf.org/doc/html/rfc2616) * [HTTP2.0](https://datatracker.ietf.org/doc/html/rfc7540)
The version of the OpenAPI document: v1
Contact: support@coinapi.io
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# Do not edit the class manually.
# Ref: https://openapi-generator.tech
#
package WWW::OpenAPIClient::ApiClient;
use strict;
use warnings;
use utf8;
use MIME::Base64;
use LWP::UserAgent;
use HTTP::Headers;
use HTTP::Response;
use HTTP::Request::Common qw(DELETE POST GET HEAD PUT);
use HTTP::Status;
use URI::Query;
use JSON;
use URI::Escape;
use Scalar::Util;
use Log::Any qw($log);
use Carp;
use Module::Runtime qw(use_module);
use WWW::OpenAPIClient::Configuration;
sub new {
my $class = shift;
my $config;
if ( $_[0] && ref $_[0] && ref $_[0] eq 'WWW::OpenAPIClient::Configuration' ) {
$config = $_[0];
} else {
$config = WWW::OpenAPIClient::Configuration->new(@_);
}
my (%args) = (
'ua' => LWP::UserAgent->new,
'config' => $config,
);
return bless \%args, $class;
}
# Set the user agent of the API client
#
# @param string $user_agent The user agent of the API client
#
sub set_user_agent {
my ($self, $user_agent) = @_;
$self->{http_user_agent}= $user_agent;
}
# Set timeout
#
# @param integer $seconds Number of seconds before timing out [set to 0 for no timeout]
#
sub set_timeout {
my ($self, $seconds) = @_;
if (!looks_like_number($seconds)) {
croak('Timeout variable must be numeric.');
}
$self->{http_timeout} = $seconds;
}
# make the HTTP request
# @param string $resourcePath path to method endpoint
# @param string $method method to call
# @param array $queryParams parameters to be place in query URL
# @param array $postData parameters to be placed in POST body
# @param array $headerParams parameters to be place in request header
# @return mixed
sub call_api {
my $self = shift;
my ($resource_path, $method, $query_params, $post_params, $header_params, $body_data, $auth_settings) = @_;
# update parameters based on authentication settings
$self->update_params_for_auth($header_params, $query_params, $auth_settings);
my $_url = $self->{config}{base_url} . $resource_path;
# build query
if (%$query_params) {
$_url = ($_url . '?' . eval { URI::Query->new($query_params)->stringify });
}
# body data
$body_data = to_json($body_data->to_hash) if defined $body_data && $body_data->can('to_hash'); # model to json string
my $_body_data = %$post_params ? $post_params : $body_data;
# Make the HTTP request
my $_request;
if ($method eq 'POST') {
# multipart
$header_params->{'Content-Type'} = lc $header_params->{'Content-Type'} eq 'multipart/form' ?
'form-data' : $header_params->{'Content-Type'};
$_request = POST($_url, %$header_params, Content => $_body_data);
}
elsif ($method eq 'PUT') {
# multipart
$header_params->{'Content-Type'} = lc $header_params->{'Content-Type'} eq 'multipart/form' ?
'form-data' : $header_params->{'Content-Type'};
$_request = PUT($_url, %$header_params, Content => $_body_data);
}
elsif ($method eq 'GET') {
my $headers = HTTP::Headers->new(%$header_params);
$_request = GET($_url, %$header_params);
}
elsif ($method eq 'HEAD') {
my $headers = HTTP::Headers->new(%$header_params);
$_request = HEAD($_url,%$header_params);
}
elsif ($method eq 'DELETE') { #TODO support form data
my $headers = HTTP::Headers->new(%$header_params);
$_request = DELETE($_url, %$headers);
}
elsif ($method eq 'PATCH') { #TODO
}
else {
}
$self->{ua}->timeout($self->{http_timeout} || $self->{config}{http_timeout});
$self->{ua}->agent($self->{http_user_agent} || $self->{config}{http_user_agent});
$log->debugf("REQUEST: %s", $_request->as_string);
my $_response = $self->{ua}->request($_request);
$log->debugf("RESPONSE: %s", $_response->as_string);
unless ($_response->is_success) {
croak(sprintf "API Exception(%s): %s\n%s", $_response->code, $_response->message, $_response->content);
}
return $_response->content;
}
# Take value and turn it into a string suitable for inclusion in
# the path, by url-encoding.
# @param string $value a string which will be part of the path
# @return string the serialized object
sub to_path_value {
my ($self, $value) = @_;
return uri_escape($self->to_string($value));
}
# Take value and turn it into a string suitable for inclusion in
# the query, by imploding comma-separated if it's an object.
# If it's a string, pass through unchanged. It will be url-encoded
# later.
# @param object $object an object to be serialized to a string
# @return string the serialized object
sub to_query_value {
my ($self, $object) = @_;
if (ref($object) eq 'ARRAY') {
return join(',', @$object);
} else {
return $self->to_string($object);
}
}
# Take value and turn it into a string suitable for inclusion in
# the header. If it's a string, pass through unchanged
# If it's a datetime object, format it in ISO8601
# @param string $value a string which will be part of the header
# @return string the header string
sub to_header_value {
my ($self, $value) = @_;
return $self->to_string($value);
}
# Take value and turn it into a string suitable for inclusion in
# the http body (form parameter). If it's a string, pass through unchanged
# If it's a datetime object, format it in ISO8601
# @param string $value the value of the form parameter
# @return string the form string
sub to_form_value {
my ($self, $value) = @_;
return $self->to_string($value);
}
# Take value and turn it into a string suitable for inclusion in
# the parameter. If it's a string, pass through unchanged
# If it's a datetime object, format it in ISO8601
# @param string $value the value of the parameter
# @return string the header string
sub to_string {
my ($self, $value) = @_;
if (ref($value) eq "DateTime") { # datetime in ISO8601 format
return $value->datetime();
}
else {
return $value;
}
}
# Deserialize a JSON string into an object
#
# @param string $class class name is passed as a string
# @param string $data data of the body
# @return object an instance of $class
sub deserialize
{
my ($self, $class, $data) = @_;
$log->debugf("deserializing %s for %s", $data, $class);
if (not defined $data) {
return undef;
} elsif ( (substr($class, 0, 5)) eq 'HASH[') { #hash
if ($class =~ /^HASH\[(.*),(.*)\]$/) {
my ($key_type, $type) = ($1, $2);
my %hash;
my $decoded_data = decode_json $data;
foreach my $key (keys %$decoded_data) {
if (ref $decoded_data->{$key} eq 'HASH') {
$hash{$key} = $self->deserialize($type, encode_json $decoded_data->{$key});
} else {
$hash{$key} = $self->deserialize($type, $decoded_data->{$key});
}
}
return \%hash;
} else {
#TODO log error
}
} elsif ( (substr($class, 0, 6)) eq 'ARRAY[' ) { # array of data
return $data if $data eq '[]'; # return if empty array
my $_sub_class = substr($class, 6, -1);
my $_json_data = decode_json $data;
my @_values = ();
foreach my $_value (@$_json_data) {
if (ref $_value eq 'ARRAY') {
push @_values, $self->deserialize($_sub_class, encode_json $_value);
} else {
push @_values, $self->deserialize($_sub_class, $_value);
}
}
return \@_values;
} elsif ($class eq 'DateTime') {
return DateTime->from_epoch(epoch => str2time($data));
} elsif (grep /^$class$/, ('string', 'int', 'float', 'bool', 'object')) {
return $data;
} else { # model
my $_instance = use_module("WWW::OpenAPIClient::Object::$class")->new;
if (ref $data eq "HASH") {
return $_instance->from_hash($data);
} else { # string, need to json decode first
return $_instance->from_hash(decode_json $data);
}
}
}
# return 'Accept' based on an array of accept provided
# @param [Array] header_accept_array Array fo 'Accept'
# @return String Accept (e.g. application/json)
sub select_header_accept
{
my ($self, @header) = @_;
if (@header == 0 || (@header == 1 && $header[0] eq '')) {
return undef;
} elsif (grep(/^application\/json$/i, @header)) {
return 'application/json';
} else {
return join(',', @header);
}
}
# return the content type based on an array of content-type provided
# @param [Array] content_type_array Array fo content-type
# @return String Content-Type (e.g. application/json)
sub select_header_content_type
{
my ($self, @header) = @_;
if (@header == 0 || (@header == 1 && $header[0] eq '')) {
return 'application/json'; # default to application/json
} elsif (grep(/^application\/json$/i, @header)) {
return 'application/json';
} else {
return join(',', @header);
}
}
# Get API key (with prefix if set)
# @param string key name
# @return string API key with the prefix
sub get_api_key_with_prefix
{
my ($self, $key_name) = @_;
my $api_key = $self->{config}{api_key}{$key_name};
return unless $api_key;
my $prefix = $self->{config}{api_key_prefix}{$key_name};
return $prefix ? "$prefix $api_key" : $api_key;
}
# update header and query param based on authentication setting
#
# @param array $headerParams header parameters (by ref)
# @param array $queryParams query parameters (by ref)
# @param array $authSettings array of authentication scheme (e.g ['api_key'])
sub update_params_for_auth {
my ($self, $header_params, $query_params, $auth_settings) = @_;
return $self->_global_auth_setup($header_params, $query_params)
unless $auth_settings && @$auth_settings;
# one endpoint can have more than 1 auth settings
foreach my $auth (@$auth_settings) {
# determine which one to use
if (!defined($auth)) {
# TODO show warning about auth setting not defined
}
else {
# TODO show warning about security definition not found
}
}
}
# The endpoint API class has not found any settings for auth. This may be deliberate,
# in which case update_params_for_auth() will be a no-op. But it may also be that the
# OpenAPI Spec does not describe the intended authorization. So we check in the config for any
# auth tokens and if we find any, we use them for all endpoints;
sub _global_auth_setup {
my ($self, $header_params, $query_params) = @_;
my $tokens = $self->{config}->get_tokens;
return unless keys %$tokens;
# basic
if (my $uname = delete $tokens->{username}) {
my $pword = delete $tokens->{password};
$header_params->{'Authorization'} = 'Basic '.encode_base64($uname.":".$pword);
}
# oauth
if (my $access_token = delete $tokens->{access_token}) {
$header_params->{'Authorization'} = 'Bearer ' . $access_token;
}
# other keys
foreach my $token_name (keys %$tokens) {
my $in = $tokens->{$token_name}->{in};
my $token = $self->get_api_key_with_prefix($token_name);
if ($in eq 'head') {
$header_params->{$token_name} = $token;
}
elsif ($in eq 'query') {
$query_params->{$token_name} = $token;
}
else {
die "Don't know where to put token '$token_name' ('$in' is not 'head' or 'query')";
}
}
}
1;
| coinapi/coinapi-sdk | oeml-sdk/perl/lib/WWW/OpenAPIClient/ApiClient.pm | Perl | mit | 12,297 |
#!/usr/bin/env perl
package ProNTo::Methods;
#use diagnostics;
use strict;
use warnings;
use Getopt::Long;
use File::Spec; # for class methods related to file path parsing
use File::Basename; # for dirname function
use Exporter;
our @ISA = qw(Exporter);
# #################################
# TBC : which variables to export ?
# #################################
our @EXPORT_OK = qw(help);
# Initialising variables in scope, then assign some of them
my ($abslibfile, $libdirname);
BEGIN {
# __FILE__ provides a relative path to the bin file
# According to http://stackoverflow.com/a/90721/2668831 this is the most robust method
$abslibfile = File::Spec->rel2abs(__FILE__);
# use dirname to go "up" a directory from lib file to the folder containing other lib files, then descend into the lib path:
$libdirname = dirname($abslibfile);
# set some default variable values here?
};
use lib $libdirname;
# This module exists only to provide/expose common methods to other modules
# It has no main subroutine, nor a PACKAGE line specifying any default function call.
sub helpMessages {
my $helptype = shift;
my %helpMessageHash = (
'BAD_SEQ_FILE' => 'Error, could not open input file.',
'NO_SEQ_FILE' => 'Error, could not open input file: no file specified.',
'NO_ENZ' => 'No enzyme selected',
'BAD_MIN_ORF' => 'Error, minimum ORF size must be a non-negative integer.',
'ORF_SELECT' => 'Error, please supply either a comma-separated list of reading frames to use (1 to 6) or set the use-longest-orf flag'
);
if (!defined $helptype) {
# give back undefined if called without an argument
return undef;
} elsif (exists $helpMessageHash{$helptype}) {
# give back help message if possible
return $helpMessageHash{$helptype};
} else {
# give back undefined if no such help message
return undef;
}
}
sub usageStatement {
my @helpstrings = (
"Protein Digestion Options",
"=====================================",
"Options:",
" [-t]: digests protein with trypsin",
" [-l]: digests protein with endoproteinase Lys-C",
" [-a]: digests protein with endoproteinase Arg-C",
" [-v]: digests protein with V8 proteinase",
" [-c]: number of allowed missed cleavages. Default value is 0.",
" [-h]: prints help",
""
);
my $helpstr = join("\n", @helpstrings);
return $helpstr;
}
sub help {
my $helptype = shift;
my $usagehelp = usageStatement; # provide generic usage guide from subroutine in this module
my $helpmessage;
if (!defined $helptype) {
# Provide generic help message for unparametrised call to help()
return $usagehelp;
} else {
# generic handling for a hash of potential help types in another subroutine
$helpmessage = helpMessages($helptype);
}
# if the call to helpMessages produced a message, prepend to usage output
if (defined $helpmessage) {
$helpmessage = join("\n", ($helpmessage, $usagehelp));
} else {
# Unknown option to the help function, ignore it (shouldn't happen - use for debugging!)
$helpmessage = $usagehelp . "\nWARNING! Unknown internal parameter to helpMessages(): $helptype";
}
return $helpmessage;
}
# End of module evaluates to true
1;
__END__
# End of file evaluates to false
0; | lmmx/ProNTo | lib/ProNTo/Methods.pm | Perl | mit | 3,414 |
#!/usr/bin/perl -w
# Use DBus to create a conference call with skype
# Released 10 March 2009 by orthopteroid@gmail.net
# Requires the perl module Net::DBus
# Based upon "Control Skype API via DBus using perl" by jlh at gmx dot ch
use strict;
use warnings;
package SkypeConference; # -----------------------------------------------------------
use base 'Net::DBus::Object';
use Net::DBus;
$SkypeConference::CONF_ID = 0;
@SkypeConference::Attendees =();
sub new {
my ($class, $AttendeeList) = @_;
my $bus = Net::DBus->session;
# export a service and the object /com/Skype/Client, so Skype can
# invoke the 'Notify' method on it in order to communicate with us.
my $exp_service = $bus->export_service("com.Skype.API") or die;
my $self = $class->SUPER::new($exp_service, '/com/Skype/Client') or die;
bless $self, $class;
# get a handle to Skype's /com/Skype object, so we can invoke the
# 'Invoke' method on it to communicate with Skype.
my $service = $bus->get_service("com.Skype.API") or die;
$self->{invoker} = $service->get_object("/com/Skype") or die;
# setup is done, let's shake hands
print $self->Invoke("NAME SkypeConference") . "\n";
print $self->Invoke("PROTOCOL 8") . "\n";
$AttendeeList =~ s/ /, /g; # convert spaces to 'comma space's
print $self->Invoke("CALL $AttendeeList") . "\n";
return $self;
}
sub Notify {
my ($self, $string) = @_;
if ( $string =~ /CALL (\w+) CONF_ID (\w+)/ )
{
push @SkypeConference::Attendees, $1;
$SkypeConference::CONF_ID = $2;
}
if ( $string !~ /DURATION/ ) { print "-> $string\n"; }
if ( $string =~ /FINISHED/ )
{
# 1st caller out kills the conference
foreach ( @SkypeConference::Attendees )
{
print $self->Invoke("ALTER CALL $_ HANGUP") . "\n";
}
@SkypeConference::Attendees = ();
(Net::DBus::Reactor->main)->shutdown();
}
return 0; # careful what is returned here
}
sub Invoke {
my ($self, $string) = @_;
print "$string\n";
return $self->{invoker}->Invoke($string);
}
package main; # ---------------------------------------------------------------
use Net::DBus::Reactor;
if( $#ARGV < 1 ) { die "Must have at least 2 numbers to conference.\n"; }
my $skype = SkypeConference->new( "@ARGV" );
# Hook main loop
my $reactor = Net::DBus::Reactor->main;
$reactor->run;
# end of file
| orthopteroid/dbus-skype-conference | skype-conference.pl | Perl | mit | 2,288 |
package WebService::Telnic::Client::Domain;
use WebService::Telnic::Util qw(XMLin);
our $VERSION = '0.2';
sub createZone {
my $self = shift;
my %data = @_;
croak("Need at least one of nameserverCount or nameservers")
unless $data{nameserverCount} || $data{nameservers};
$data{nameservers} ||= [];
my $method = join "#", $self->{namespaces}->{domain}, "createZoneRequest";
my $body = qq(<domain:createZoneRequest zoneName="$self->{domain}">);
$body .= qq(<domain:nameservers) . ($data{nameserverCount} ? qq( count="$data{nameserverCount}">) : ">");
$body .= qq(<domain:host>$_</domain:host>) for @{ $data{nameservers} };
$body .= qq(</domain:nameservers></domain:createZoneRequest>);
my $res = $self->soap($method, $body);
return unless $res->is_success;
return 1;
}
sub updateZone {
my $self = shift;
my %data = @_;
croak("Need at least one of nameserverCount or nameservers")
unless $data{nameserverCount} || $data{nameservers};
$data{nameservers} ||= [];
my $method = join "#", $self->{namespaces}->{domain}, "updateZoneRequest";
my $body = qq(<domain:updateZoneRequest zoneName="$self->{domain}">);
$body .= qq(<domain:nameservers) . ($data{nameserverCount} ? qq( count="$data{nameserverCount}">) : ">");
$body .= qq(<domain:host>$_</domain:host>) for @{ $data{nameservers} };
$body .= qq(</domain:nameservers></domain:updateZoneRequest>);
my $res = $self->soap($method, $body);
return unless $res->is_success;
return 1;
}
sub deleteZone {
my $self = shift;
my %data = @_;
my $method = join "#", $self->{namespaces}->{domain}, "deleteZoneRequest";
my $body = qq(<domain:deleteZoneRequest zoneName="$self->{domain}" />);
my $res = $self->soap($method, $body);
return unless $res->is_success;
return 1;
}
sub getZone {
my $self = shift;
my %data = @_;
my $method = join "#", $self->{namespaces}->{domain}, "getZoneRequest";
my $body = qq(<domain:getZoneRequest zoneName="$self->{domain}" />);
my $res = $self->soap($method, $body);
return unless $res->is_success;
my $xml = XMLin( $res->content, RemoveNS => 1, ForceArray => [qw(keyword)] );
return $xml->{Body}
->{getZoneResponse};
}
sub listZones {
my $self = shift;
my %data = @_;
my $method = join "#", $self->{namespaces}->{domain}, "listZonesRequest";
my $body = qq(<domain:listZonesRequest />);
my $res = $self->soap($method, $body);
return unless $res->is_success;
my $xml = XMLin( $res->content, RemoveNS => 1, ForceArray => [qw(keyword)] );
return $xml->{Body}
->{listZonesResponse};
}
sub createDomain {
my $self = shift;
my %data = @_;
my $method = join "#", $self->{namespaces}->{domain}, "createDomainRequest";
my $body = qq(<domain:createDomainRequest domainName="$self->{domain}" />);
my $res = $self->soap($method, $body);
return unless $res->is_success;
return 1;
}
sub deleteDomain {
my $self = shift;
my %data = @_;
my $method = join "#", $self->{namespaces}->{domain}, "deleteDomainRequest";
my $body = qq(<domain:deleteDomainRequest domainName="$self->{domain}" />);
my $res = $self->soap($method, $body);
return unless $res->is_success;
return 1;
}
sub getDomain {
my $self = shift;
my %data = @_;
my $method = join "#", $self->{namespaces}->{domain}, "getDomainRequest";
my $body = qq(<domain:getDomainRequest domainName="$self->{domain}" />);
my $res = $self->soap($method, $body);
return unless $res->is_success;
my $xml = XMLin( $res->content, RemoveNS => 1, ForceArray => [qw(keyword)] );
return $xml->{Body}
->{getDomainResponse}
}
sub listDomains {
my $self = shift;
my %data = @_;
my $method = join "#", $self->{namespaces}->{domain}, "listDomainsRequest";
my $body = qq(<domain:listDomainsRequest />);
my $res = $self->soap($method, $body);
return unless $res->is_success;
my $xml = XMLin( $res->content, RemoveNS => 1, ForceArray => [qw(keyword)] );
return $xml->{Body}
->{listDomainsResponse};
}
1;
| pmakholm/telnic-perl | lib/WebService/Telnic/Client/Domain.pm | Perl | mit | 4,248 |
#!/usr/bin/perl
use strict;
use warnings;
use DBI;
my @drivers = DBI->available_drivers() ;
die "No dirvers defined! \n" unless @drivers;
foreach my $driver (@drivers) {
print "Driver: $driver \n";
my @data_sources = DBI->data_sources ($driver);
foreach my $data_source (@data_sources) {
print "\t Data Source: $data_source \n";
}
print "\n";
}
| MarsBighead/mustang | Perl/db-info.pl | Perl | mit | 381 |
#
# FOAMkit: Project management for OpenFOAM
#
# sim.pm - Subroutines for running simulation.
#
package sim;
use strict;
use warnings;
use List::Util qw( max );
use foamkitenv;
use kittools;
use kitsetup;
use base 'Exporter';
our @EXPORT = qw/ reset_initial_fields run_simulation monitor_simulation end_simulation continue_simulation is_sim_running get_last_sim get_last_sim_number /;
#
# Resets the 0 directory.
#
sub reset_initial_fields
{
my %env = get_foamkit_env();
# Simply force the setup of the initial directory
setup_dir("initial", 1, 1);
}
#
# Runs a simulation.
#
sub run_simulation
{
my %env = get_foamkit_env();
# Get the output directory name
my $simnum = get_sim_number();
my $outdir = "$env{SIM_DIR}/sim${simnum}_" . get_timestamp();
# Create the output directory
mkdir "$outdir", 0755;
do_simulation($outdir, $simnum, 0);
}
#
# Monitor the currently running simulation.
#
sub monitor_simulation
{
my %env = get_foamkit_env();
if (!is_sim_running())
{
print "No simulation running!\n";
return 0;
}
my $outdir = get_last_sim();
run_command("bash $env{CASE_DIR}/scripts/monitor/Allrun $outdir", $env{CASE_DIR});
return 1;
}
#
# End a currently running simulation.
#
sub end_simulation
{
my %env = get_foamkit_env();
if (!is_sim_running())
{
print "No simulation running!\n";
return 0;
}
run_command("bash $env{CASE_DIR}/scripts/sim/Allend", $env{CASE_DIR});
return 1;
}
#
# Continues the last run simulation.
#
sub continue_simulation
{
do_simulation(get_last_sim(), get_last_sim_number(), 1);
}
#
# Helper for simulation.
#
sub do_simulation
{
my ($outdir, $simnum, $continue) = @_;
my %env = get_foamkit_env();
# Make sure nothing is running right now
if (is_sim_running())
{
print STDERR "There is already something running! If you've killed the process manually, remove the simulation_running line in foamkit.dat and try again.\n";
return 0;
}
# Save state
add_setup_data(("simulation_running" => 1, "last_simulation" => "$outdir"));
my $logfile;
# All output should go both to the screen and to a file
if ($continue)
{
open $logfile, ">>", "$outdir/sim.txt";
print $logfile, "\n\n";
log_text($logfile, "CONTINUING\n");
log_text($logfile, "==========\n\n");
}
else
{
open $logfile, ">", "$outdir/sim.txt";
log_text($logfile, "RUNNING SIMULATION\n");
log_text($logfile, "==================\n\n");
}
my $starttime = time;
log_text($logfile, "Sim number: $simnum\n");
log_text($logfile, "Start time: " . get_timestamp(1) . "\n");
log_text($logfile, "Output directory: $outdir\n");
log_text($logfile, "\n");
# Do simulation: there should be an Allrun script in the sim directory
run_command("bash $env{CASE_DIR}/scripts/sim/Allrun $outdir $continue", $env{CASE_DIR}, $logfile);
my $endtime = time;
my $duration = $endtime - $starttime;
log_text($logfile, "\nFinish time: " . get_timestamp(1) . "\n");
log_text($logfile, "Duration: " . format_timespan($duration) . "\n");
close $logfile;
add_setup_data(("simulation_running" => 0));
return 1;
}
#
# Returns 1 if a simulation is running, 0 otherwise.
#
sub is_sim_running
{
my %setup_data = get_setup_data();
return (defined $setup_data{"simulation_running"} && $setup_data{"simulation_running"} == 1);
}
#
# Returns the directory of the last simulation done.
#
sub get_last_sim
{
my %setup_data = get_setup_data();
return "" unless defined $setup_data{"last_simulation"};
return $setup_data{"last_simulation"};
}
#
# Returns the number of the last simulation done.
#
sub get_last_sim_number
{
my $lastsim = get_last_sim();
$lastsim =~ s:^.*/sim(\d+)_\d+_\d+$:$1:;
return $lastsim;
}
#
# Returns the next available simulation number. e.g. if last simulation was
# 23 (as in, it was in the directory sim23_*), this one is 24 and uses the
# directory sim24_xxxxxx_xxxxxx.
#
sub get_sim_number
{
my %env = get_foamkit_env();
my $simdir = $env{SIM_DIR};
# Add all simulation numbers to a hash
my %simnums = ();
my @dirs = read_files_in_dir($simdir, 0); # 0 = not recursive
foreach (@dirs)
{
s:^$simdir/::;
next unless /^sim(\d+)/;
$simnums{$1} = 1;
}
return 1 if (scalar keys %simnums == 0);
# Find the first unused sim number
return max(keys %simnums) + 1;
}
1;
| MasterOfBinary/foamkit | lib/sim.pm | Perl | mit | 4,400 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::juniper::common::screenos::mode::memory;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"warning:s" => { name => 'warning' },
"critical:s" => { name => 'critical' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
$self->{snmp} = $options{snmp};
my $oid_nsResMemAllocate= '.1.3.6.1.4.1.3224.16.2.1.0';
my $oid_nsResMemLeft = '.1.3.6.1.4.1.3224.16.2.2.0';
my $oid_nsResMemFrag = '.1.3.6.1.4.1.3224.16.2.3.0';
my $result = $self->{snmp}->get_leef(oids => [$oid_nsResMemAllocate, $oid_nsResMemLeft, $oid_nsResMemFrag], nothing_quit => 1);
my $memory_used = $result->{$oid_nsResMemAllocate};
my $memory_free = $result->{$oid_nsResMemLeft};
my $memory_frag = $result->{$oid_nsResMemFrag};
my $total_size = $memory_used + $memory_free + $memory_frag;
my $prct_used = $memory_used * 100 / $total_size;
my $prct_free = $memory_free * 100 / $total_size;
my $prct_frag = $memory_frag * 100 / $total_size;
my $exit = $self->{perfdata}->threshold_check(value => $prct_used, threshold => [ { label => 'critical', 'exit_litteral' => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
my ($total_value, $total_unit) = $self->{perfdata}->change_bytes(value => $total_size);
my ($used_value, $used_unit) = $self->{perfdata}->change_bytes(value => $memory_used);
my ($free_value, $free_unit) = $self->{perfdata}->change_bytes(value => $memory_free);
my ($frag_value, $frag_unit) = $self->{perfdata}->change_bytes(value => $memory_frag);
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Memory Usage Total: %s Used: %s (%.2f%%) Free: %s (%.2f%%) Fragmented: %s (%.2f%%)",
$total_value . " " . $total_unit,
$used_value . " " . $used_unit, $prct_used,
$free_value . " " . $free_unit, $prct_free,
$frag_value . " " . $frag_unit, $prct_frag));
$self->{output}->perfdata_add(label => "used", unit => 'B',
value => $memory_used,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning', total => $total_size),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical', total => $total_size),
min => 0, max => $total_size);
$self->{output}->perfdata_add(label => "fragmented",
value => $memory_frag,
min => 0, max => $total_size);
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check Juniper memory usage (NETSCREEN-RESOURCE-MIB).
=over 8
=item B<--warning>
Threshold warning in percent.
=item B<--critical>
Threshold critical in percent.
=back
=cut
| bcournaud/centreon-plugins | network/juniper/common/screenos/mode/memory.pm | Perl | apache-2.0 | 4,793 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V9::Services::KeywordPlanAdGroupService::MutateKeywordPlanAdGroupsResponse;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
partialFailureError => $args->{partialFailureError},
results => $args->{results}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V9/Services/KeywordPlanAdGroupService/MutateKeywordPlanAdGroupsResponse.pm | Perl | apache-2.0 | 1,138 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::server::dell::idrac::snmp::mode::components::storagectrl;
use strict;
use warnings;
use hardware::server::dell::idrac::snmp::mode::components::resources qw(%map_status);
my $mapping = {
controllerComponentStatus => { oid => '.1.3.6.1.4.1.674.10892.5.5.1.20.130.1.1.38', map => \%map_status },
controllerFQDD => { oid => '.1.3.6.1.4.1.674.10892.5.5.1.20.130.1.1.78' },
};
my $oid_controllerTableEntry = '.1.3.6.1.4.1.674.10892.5.5.1.20.130.1.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_controllerTableEntry };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking storage controllers");
$self->{components}->{storagectrl} = {name => 'storage controllers', total => 0, skip => 0};
return if ($self->check_filter(section => 'storagectrl'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_controllerTableEntry}})) {
next if ($oid !~ /^$mapping->{controllerComponentStatus}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_controllerTableEntry}, instance => $instance);
next if ($self->check_filter(section => 'storagectrl', instance => $instance));
$self->{components}->{storagectrl}->{total}++;
$self->{output}->output_add(long_msg => sprintf("storage controller '%s' status is '%s' [instance = %s]",
$result->{controllerFQDD}, $result->{controllerComponentStatus}, $instance,
));
my $exit = $self->get_severity(label => 'default.status', section => 'storagectrl.status', value => $result->{controllerComponentStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Storage controllers '%s' status is '%s'", $result->{controllerFQDD}, $result->{controllerComponentStatus}));
}
}
}
1; | Sims24/centreon-plugins | hardware/server/dell/idrac/snmp/mode/components/storagectrl.pm | Perl | apache-2.0 | 2,907 |
=head1 LICENSE
Copyright [1999-2013] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package Bio::EnsEMBL::GlyphSet::gsv_transcript;
use strict;
use base qw(Bio::EnsEMBL::GlyphSet_transcript);
sub render_normal {
my $self = shift;
my $type = $self->type;
return unless defined $type;
return unless $self->strand == -1;
my $offset = $self->{'container'}->start - 1;
my $config = $self->{'config'};
my @transcripts = $config->{'transcripts'};
my $y = 0;
my $h = 8; # Single transcript mode - set height to 30 - width to 8
my $pix_per_bp = $config->transform->{'scalex'};
my $bitmap_length = $config->image_width;
my $length = $config->container_width;
my $transcript_drawn = 0;
my $voffset = 0;
my $trans_ref = $config->{'transcript'};
my $strand = $trans_ref->{'exons'}[0][2]->strand;
my $gene = $trans_ref->{'gene'};
my $transcript = $trans_ref->{'transcript'};
my @exons = sort { $a->[0] <=> $b->[0] } @{$trans_ref->{'exons'}};
my $colour = $self->my_colour($self->colour_key($gene, $transcript));
my $coding_start = $trans_ref->{'coding_start'};
my $coding_end = $trans_ref->{'coding_end'};
my ($fontname, $fontsize) = $self->get_font_details('caption');
my @res = $self->get_text_width(0, 'X', '', font => $fontname, ptsize => $fontsize);
my $th = $res[3];
my $y = $config->{'_add_labels'} ? $th : 0;
## First of all draw the lines behind the exons.....
foreach my $subslice (@{$config->{'subslices'}}) {
$self->push($self->Rect({
x => $subslice->[0] + $subslice->[2] - 1,
y => $y + $h / 2,
h => 1,
width => $subslice->[1] - $subslice->[0],
colour => $colour,
absolutey => 1
}));
}
## Now draw the exons themselves....
foreach my $exon (@exons) {
next unless defined $exon; # Skip this exon if it is not defined (can happen w/ genscans)
# We are finished if this exon starts outside the slice
my ($box_start, $box_end);
# only draw this exon if is inside the slice
if ($exon->[0] < 0 && $transcript->slice->is_circular) { # Features overlapping chromosome origin
my $transcript_length = $transcript->slice->seq_region_length;
$exon->[0] += $transcript_length;
$exon->[1] += $transcript_length;
$coding_start += $transcript_length;
$coding_end += $transcript_length;
}
$box_start = $exon->[0];
$box_start = 1 if $box_start < 1;
$box_end = $exon->[1];
$box_end = $length if $box_end > $length;
# Calculate and draw the coding region of the exon
if ($coding_start && $coding_end) {
my $filled_start = $box_start < $coding_start ? $coding_start : $box_start;
my $filled_end = $box_end > $coding_end ? $coding_end : $box_end;
# only draw the coding region if there is such a region
if ($filled_start <= $filled_end) {
# Draw a filled rectangle in the coding region of the exon
$self->push($self->Rect({
x => $filled_start -1,
y => $y,
width => $filled_end - $filled_start + 1,
height => $h,
colour => $colour,
absolutey => 1,
href => $self->href($transcript, $exon->[2]),
}));
}
}
if ($box_start < $coding_start || $box_end > $coding_end ) {
# The start of the transcript is before the start of the coding
# region OR the end of the transcript is after the end of the
# coding regions. Non coding portions of exons, are drawn as
# non-filled rectangles
# Draw a non-filled rectangle around the entire exon
$self->push($self->Rect({
x => $box_start - 1 ,
y => $y,
width => $box_end - $box_start + 1,
height => $h,
bordercolour => $colour,
absolutey => 1,
title => $exon->[2]->stable_id,
href => $self->href($transcript, $exon->[2]),
}));
}
} # we are finished if there is no other exon defined
if ($config->{'_add_labels'}) {
$h = 0;
my $l = length $transcript->stable_id;
my $name = ' ' . $transcript->external_name;
$l = length $name if length $name > $l;
foreach my $text_label ($transcript->stable_id, $name) {
next unless $text_label;
next if $text_label eq ' ';
$self->push($self->Text({
x => -100,
y => $h,
height => $th,
width => 0,
font => $fontname,
ptsize => $fontsize,
halign => 'left',
colour => $colour,
text => $text_label,
absolutey => 1,
absolutex => 1,
}));
$h += $th + 1;
}
}
}
sub href {
my ($self, $transcript, $exon) = @_;
my $tid = $transcript->stable_id;
my $eid = $exon->stable_id;
return $self->_url({
type => 'Gene',
action => 'VariationTranscript',
vt => $tid,
e => $eid,
});
}
sub error_track_name { return $_[0]->species_defs->AUTHORITY. ' transcripts'; }
1;
| Ensembl/ensembl-draw | modules/Bio/EnsEMBL/GlyphSet/gsv_transcript.pm | Perl | apache-2.0 | 5,894 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::Component::Variation::PopulationGraphs;
use strict;
use base qw(EnsEMBL::Web::Component::Variation);
sub _init {
my $self = shift;
$self->cacheable(0);
$self->ajaxable(1);
}
sub content {
my $self = shift;
my $hub = $self->hub;
my $pop_freq = $self->object->format_group_population_freqs();
return unless defined $pop_freq;
my $graph_id = 0;
my $height = 50;
my $width = 118;
my $max_width = 150;
my $max_pie_chart_count = 35;
my (@graphs, $pop_tree, %sub_pops, @alleles);
my $vf = $hub->param('vf');
my $vf_object = $vf ? $hub->database('variation')->get_VariationFeatureAdaptor->fetch_by_dbID($vf) : undef;
my $ref_allele = $vf_object ? $vf_object->ref_allele_string : '';
my @inputs = (
q{<input class="graph_config" type="hidden" name="legendpos" value="'east'" />},
q{<input class="graph_config" type="hidden" name="legendmark" value="'arrow'" />},
q{<input class="graph_dimensions" type="hidden" value="[25,25,20]" />},
);
# Get alleles list
my $project_name;
my $pop_width = $width;
my $has_100 = 0;
foreach my $pop_name (sort keys %$pop_freq) {
my $values = '';
$pop_tree = $self->update_pop_tree($pop_tree, $pop_name, $pop_freq->{$pop_name}{'sub_pop'}) if defined $pop_freq->{$pop_name}{'sub_pop'};
$project_name = $pop_freq->{$pop_name}{'group'} if (!$project_name);
foreach my $ssid (keys %{$pop_freq->{$pop_name}{'freq'}}) {
foreach my $allele (keys %{$pop_freq->{$pop_name}{'freq'}{$ssid}}) {
my $freq = $pop_freq->{$pop_name}{'freq'}{$ssid}{$allele};
my $allele_length = length($allele);
my $new_width = ($allele_length > 1) ? $width + (($allele_length - 1) * 5) : $width;
$pop_width = $new_width if ($new_width > $pop_width);
$has_100 = 1 if ($freq == 1);
push (@alleles, $allele) if $freq > 0 && !(grep $allele eq $_, @alleles);
}
}
}
$width = ($pop_width > $max_width) ? $max_width : $pop_width;
$width += 5 if ($has_100 == 1);
my $nb_alleles = scalar @alleles;
if ($nb_alleles > 2) {
while ($nb_alleles != 2) {
$height += 5;
$nb_alleles--;
}
}
# Create graphs
my $population_count = scalar keys %$pop_freq;
my $too_many_populations = ($population_count > $max_pie_chart_count);
foreach my $pop_name (sort { ($a !~ /ALL/ cmp $b !~ /ALL/) || $a cmp $b } keys %$pop_freq) {
if ($too_many_populations) {
next if ($pop_name !~ /ALL/);
}
my $values = '';
my $short_name = $self->get_short_name($pop_name);
my $pop_desc = $pop_freq->{$pop_name}{'desc'};
if (!$pop_desc || $pop_desc eq '') {
$pop_desc = $short_name;
$pop_desc =~ s/_/ /g;
}
$pop_desc = $self->strip_HTML($pop_desc);
# Constructs the array for the pie charts: [allele,frequency]
foreach my $al (sort { ($a !~ /$ref_allele/ cmp $b !~ /$ref_allele/) || $a cmp $b } @alleles) {
foreach my $ssid (keys %{$pop_freq->{$pop_name}{'freq'}}) {
my $freq = $pop_freq->{$pop_name}{'freq'}{$ssid}{$al};
next unless $freq;
$values .= ',' if $values ne '';
$freq = $self->format_number($freq);
$freq = 0.5 if $freq < 0.5; # Fixed bug if freq between 0 and 0.5
my $a_label = $al;
if (length($al)>4) {
$a_label = substr($al,0,4).'...';
}
$values .= "[$freq,'$a_label']";
last;
}
}
push @inputs, qq{<input type="hidden" class="graph_data" value="[$values]" />};
# Main "ALL" population OR no population structure
if ($short_name =~ /ALL/ || scalar(keys(%$pop_tree)) == 0) {
push @graphs, sprintf('
<div class="pie_chart_holder">
<div class="pie_chart%s">
<div style="margin:4px">
<span class="_ht ht" style="font-size:1em;font-weight:bold" title="%s">%s</span>
</div>
<div id="graphHolder%s" style="width:%ipx;height:%ipx"></div>
</div>
</div>
', $short_name eq 'ALL' ? ' all_population' : '', $pop_desc, $short_name, $graph_id, $width, $height);
}
# Super-population
elsif ($pop_tree->{$short_name}) {
push @graphs, sprintf('
<div class="pie_chart_holder">
<div class="pie_chart">
<div style="margin:4px">
<span class="_ht ht" style="font-size:1em;font-weight:bold" title="%s">%s</span>
</div>
<div id="graphHolder%s" style="width:%ipx;height:%ipx"></div>
</div>
<a class="toggle %s _slide_toggle set_cookie" href="#" style="margin-left:5px" rel="population_freq_%s" title="Click to toggle sub-population frequencies">Sub-populations</a>
</div>
', $pop_desc, $short_name, $graph_id, $width, $height,
$hub->get_cookie_value("toggle_population_freq_$short_name") eq 'open' ? 'open' : 'closed',
$short_name, $short_name);
}
# Sub-populations
else {
foreach (grep $pop_tree->{$_}{$short_name}, keys %$pop_tree) {
push @{$sub_pops{$_}}, sprintf('
<div class="pie_chart_holder">
<div class="pie_chart">
<div style="margin:4px">
<span class="_ht ht" style="font-size:1em;font-weight:bold" title="%s">%s</span>
</div>
<div id="graphHolder%s" style="width:%ipx;height:%ipx"></div>
</div>
</div>
', $pop_desc, $short_name, $graph_id, $width, $height);
}
}
$graph_id++;
}
my $html = sprintf(
'<h2>%s allele frequencies</h2><div><input type="hidden" class="panel_type" value="PopulationGraph" />%s</div><div class="population_genetics_pie">%s</div>',
$project_name,
join('', @inputs),
join('', @graphs)
);
foreach my $sp (sort keys %sub_pops) {
my $sub_html = join '', @{$sub_pops{$sp}};
my $show = $hub->get_cookie_value("toggle_population_freq_$sp") eq 'open';
$html .= sprintf('
<div class="population_freq_%s population_genetics_pie" id="population_freq_%s">
<div class="toggleable" %s>
<div><p><b>%s sub-populations</b></p></div>
%s
</div>
</div>
', $sp, $sp, $show ? '' : 'style="display:none"', $sp, $sub_html);
}
return $html;
}
sub format_number {
### Population_genotype_alleles
### Arg1 : null or a number
### Returns "unknown" if null or formats the number to 3 decimal places
my ($self, $number) = @_;
return 'unknown' if (!defined($number) || $number eq 'unknown');
$number = $number * 100;
return sprintf '%.2f', $number;
}
sub update_pop_tree {
my ($self, $p_tree, $p_name, $sub_list) = @_;
my $p_short_name = $self->get_short_name($p_name);
foreach my $sub_pop (keys %$sub_list) {
my $sub_name = $sub_list->{$sub_pop}{'Name'};
my $sub_short_name = $self->get_short_name($sub_name);
$p_tree->{$p_short_name}{$sub_short_name} = 1;
}
return $p_tree;
}
sub get_short_name {
my $self = shift;
my $p_name = shift;
my @composed_name = split(':', $p_name);
my $short_name = $composed_name[$#composed_name];
if ($short_name =~ /phase_\d+_(.+)$/) {
$short_name = $1;
}
return $short_name;
}
1;
| Ensembl/ensembl-webcode | modules/EnsEMBL/Web/Component/Variation/PopulationGraphs.pm | Perl | apache-2.0 | 8,009 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::server::ibm::bladecenter::snmp::mode::components::powermodule;
use strict;
use warnings;
my %map_pw_state = (
0 => 'unknown',
1 => 'good',
2 => 'warning',
3 => 'notAvailable',
4 => 'critical',
);
my %map_pw_exists = (
0 => 'false',
1 => 'true',
);
# In MIB 'mmblade.mib' and 'cme.mib'
my $mapping = {
powerModuleExists => { oid => '.1.3.6.1.4.1.2.3.51.2.2.4.1.1.2', map => \%map_pw_exists },
powerModuleState => { oid => '.1.3.6.1.4.1.2.3.51.2.2.4.1.1.3', map => \%map_pw_state },
powerModuleDetails => { oid => '.1.3.6.1.4.1.2.3.51.2.2.4.1.1.4' },
};
my $oid_powerModuleHealthEntry = '.1.3.6.1.4.1.2.3.51.2.2.4.1.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_powerModuleHealthEntry, start => $mapping->{powerModuleExists}->{oid} };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking power modules");
$self->{components}->{powermodule} = {name => 'power modules', total => 0, skip => 0};
return if ($self->check_filter(section => 'powermodule'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_powerModuleHealthEntry}})) {
next if ($oid !~ /^$mapping->{powerModuleState}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_powerModuleHealthEntry}, instance => $instance);
next if ($self->check_filter(section => 'powermodule', instance => $instance));
next if ($result->{powerModuleExists} =~ /No/i &&
$self->absent_problem(section => 'powermodule', instance => $instance));
$self->{components}->{powermodule}->{total}++;
$self->{output}->output_add(long_msg => sprintf("Power module '%s' state is %s [details: %s]",
$instance, $result->{powerModuleState}, $result->{powerModuleDetails}));
my $exit = $self->get_severity(section => 'powermodule', value => $result->{powerModuleState});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Power module '%s' state is %s",
$instance, $result->{powerModuleState}));
}
}
}
1; | Tpo76/centreon-plugins | hardware/server/ibm/bladecenter/snmp/mode/components/powermodule.pm | Perl | apache-2.0 | 3,196 |
package Manoc::View::Javascript;
use Moose;
use namespace::autoclean;
extends 'Catalyst::View::JavaScript';
__PACKAGE__->config(
output => 1
);
=head1 NAME
Manoc::View::Javascript - Catalyst View
=head1 DESCRIPTION
Catalyst View.
=encoding utf8
=head1 AUTHOR
Gabriele
=head1 LICENSE
This library is free software. You can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
#__PACKAGE__->meta->make_immutable; #(inline_constructor => 0);
1;
| gmambro/nocview-poc | lib/Manoc/View/Javascript.pm | Perl | apache-2.0 | 484 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::server::cisco::ucs::mode::components::iocard;
use strict;
use warnings;
use hardware::server::cisco::ucs::mode::components::resources qw(%mapping_presence %mapping_operability);
# In MIB 'CISCO-UNIFIED-COMPUTING-EQUIPMENT-MIB'
my $mapping1 = {
cucsEquipmentIOCardPresence => { oid => '.1.3.6.1.4.1.9.9.719.1.15.30.1.31', map => \%mapping_presence },
};
my $mapping2 = {
cucsEquipmentIOCardOperState => { oid => '.1.3.6.1.4.1.9.9.719.1.15.30.1.25', map => \%mapping_operability },
};
my $oid_cucsEquipmentIOCardDn = '.1.3.6.1.4.1.9.9.719.1.15.30.1.2';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $mapping1->{cucsEquipmentIOCardPresence}->{oid} },
{ oid => $mapping2->{cucsEquipmentIOCardOperState}->{oid} }, { oid => $oid_cucsEquipmentIOCardDn };
}
sub check {
my ($self) = @_;
# In MIB 'CISCO-UNIFIED-COMPUTING-EQUIPMENT-MIB'
$self->{output}->output_add(long_msg => "Checking io cards");
$self->{components}->{iocard} = {name => 'io cards', total => 0, skip => 0};
return if ($self->check_filter(section => 'iocard'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_cucsEquipmentIOCardDn}})) {
$oid =~ /\.(\d+)$/;
my $instance = $1;
my $iocard_dn = $self->{results}->{$oid_cucsEquipmentIOCardDn}->{$oid};
my $result = $self->{snmp}->map_instance(mapping => $mapping1, results => $self->{results}->{$mapping1->{cucsEquipmentIOCardPresence}->{oid}}, instance => $instance);
my $result2 = $self->{snmp}->map_instance(mapping => $mapping2, results => $self->{results}->{$mapping2->{cucsEquipmentIOCardOperState}->{oid}}, instance => $instance);
next if ($self->absent_problem(section => 'iocard', instance => $iocard_dn));
next if ($self->check_filter(section => 'iocard', instance => $iocard_dn));
$self->{output}->output_add(long_msg => sprintf("IO cards '%s' state is '%s' [presence: %s].",
$iocard_dn, $result2->{cucsEquipmentIOCardOperState},
$result->{cucsEquipmentIOCardPresence})
);
my $exit = $self->get_severity(section => 'iocard.presence', label => 'default.presence', value => $result->{cucsEquipmentIOCardPresence});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("IO cards '%s' presence is: '%s'",
$iocard_dn, $result->{cucsEquipmentIOCardPresence})
);
next;
}
$self->{components}->{iocard}->{total}++;
$exit = $self->get_severity(section => 'iocard.operability', label => 'default.operability', value => $result2->{cucsEquipmentIOCardOperState});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("IO cards '%s' state is '%s'.",
$iocard_dn, $result2->{cucsEquipmentIOCardOperState}
)
);
}
}
}
1;
| wilfriedcomte/centreon-plugins | hardware/server/cisco/ucs/mode/components/iocard.pm | Perl | apache-2.0 | 4,287 |
#!/usr/bin/perl
print "This is for test!\n";
| chengxiyinian/github_test | test.pl | Perl | apache-2.0 | 45 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::hp::procurve::snmp::mode::components::fan;
use strict;
use warnings;
my %map_fan_status = (
0 => 'failed',
1 => 'removed',
2 => 'off',
3 => 'underspeed',
4 => 'overspeed',
5 => 'ok',
6 => 'maxstate',
);
my $mapping = {
hpicfFanState => { oid => '.1.3.6.1.4.1.11.2.14.11.5.1.54.2.1.1.4', map => \%map_fan_status },
};
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $mapping->{hpicfFanState}->{oid} };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking fans");
$self->{components}->{fan} = {name => 'fans', total => 0, skip => 0};
return if ($self->check_filter(section => 'fan'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$mapping->{hpicfFanState}->{oid}}})) {
next if ($oid !~ /^$mapping->{hpicfFanState}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$mapping->{hpicfFanState}->{oid}}, instance => $instance);
next if ($self->check_filter(section => 'fan', instance => $instance));
next if ($result->{hpicfFanState} =~ /removed/i &&
$self->absent_problem(section => 'fan', instance => $instance));
$self->{components}->{fan}->{total}++;
$self->{output}->output_add(long_msg => sprintf("fan '%s' state is %s [instance: %s].",
$instance, $result->{hpicfFanState}, $instance
));
my $exit = $self->get_severity(section => 'fan', value => $result->{hpicfFanState});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("fan '%s' state is %s",
$instance, $result->{hpicfFanState}));
}
}
}
1;
| wilfriedcomte/centreon-plugins | network/hp/procurve/snmp/mode/components/fan.pm | Perl | apache-2.0 | 2,796 |
#!/usr/bin/perl
use warnings;
require BankAccount;
require Exporter;
our @ISA = qw/Exporter/;
our @Exporter = qw/BankAccount/;
package Bank;
#constructor takes two arguments, bank name and nextAccountNo.
sub new {
my $class = shift;
my $self = {
name => shift,
accountBase => {}, #hash reference
nextAccountNumber => shift,
};
bless $self, $class;
return $self;
}
#Need consumer name as argument for creating a new account.
sub newAccount {
my ($self, $consumer) = @_;
my $accountnum = $self->{nextAccountNumber};
#create bankaccount object.
my $newc = BankAccount->new($consumer,$acountnum);
$self->{accountBase}{$accountnum} = $newc;
$self->{nextAccountNumber}=$self->{nextAccountNumber}+1;
return $accountnum;
}
#need account number as key to get this user's balance.
sub getBalance {
my ($self,$accnum) = @_;
my $getacc = $self->{accountBase}{$accnum};
return $getacc->getBalance();
}
#go into bankaccount object and change the balance
sub deposit {
my ($self,$accnum,$amount) = @_;
my $getacc = $self->{accountBase}{$accnum};
$getacc->deposit($amount);
}
sub withdraw {
my ($self,$accnum,$amount) = @_;
my $getacc = $self->{accountBase}{$accnum};
$getacc->withdraw($amount);
}
sub applyInterests {
my ($self) = @_;
$hash1 = $self->{accountBase};
#go through all keys in accountBase for the purpose of all users updating.
while( my ($k, $v) = each %$hash1 ) {
my $acc = $self->{accountBase}{$k};
my $inc = 0.01*$acc->getBalance();
$acc->deposit($inc);
}
}
sub listAccounts {
my ($self) = @_;
my @array1 = ();
$hash1 = $self->{accountBase};
while( my ($k, $v) = each %$hash1 ) {
push @array1, $k;
}
#use selection sort to get a sorted account number array
foreach my $i (0 .. $#array1- 1)
{my $min = $i + 1;
$array1[$_] < $array1[$min] and $min = $_ foreach $min .. $#array1;
$array1[$i] > $array1[$min] and @array1[$i, $min] = @array1[$min, $i];}
my @array2 = reverse(@array1);
#print every details of each account
foreach (@array2){
my $acc = $self->{accountBase}{$_};
$acc->{accountNo} = $_;
$acc->printAccountDetails();
}
}
| DataMonster/Perl | exer/banksample/Bank.pm | Perl | apache-2.0 | 2,245 |
=head1 LICENSE
See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 NAME
Bio::EnsEMBL::Compara::PipeConfig::CreateHmmProfiles_conf
=head1 SYNOPSIS
init_pipeline.pl Bio::EnsEMBL::Compara::PipeConfig::CreateHmmProfiles_conf -host mysql-ens-compara-prod-X -port XXXX \
-mlss_id <your_mlss_id>
=head1 DESCRIPTION
The PipeConfig file for Creating HMM profiles. This pipeline fetches the
PANTHER profiles and perform Hmmer searches to classify our sequences and
then build the new HMMs. These families are then filtered and processed.
=cut
package Bio::EnsEMBL::Compara::PipeConfig::CreateHmmProfiles_conf;
use strict;
use warnings;
use Bio::EnsEMBL::Hive::Version 2.4;
use Bio::EnsEMBL::Hive::PipeConfig::HiveGeneric_conf;
use base ('Bio::EnsEMBL::Compara::PipeConfig::ComparaGeneric_conf');
sub default_options {
my ($self) = @_;
return {
%{$self->SUPER::default_options}, # inherit the generic ones
# where to find the list of Compara methods. Unlikely to be changed
'method_link_dump_file' => $self->check_file_in_ensembl('ensembl-compara/sql/method_link.txt'),
# custom pipeline name, in case you don't like the default one
# 'rel_with_suffix' is the concatenation of 'ensembl_release' and 'rel_suffix'
# Tag attached to every single tree
'division' => undef,
#default parameters for the geneset qc
'coverage_threshold' => 50, #percent
'species_threshold' => '#expr(#species_count#/2)expr#', #half of ensembl species
# dependent parameters: updating 'base_dir' should be enough
'work_dir' => $self->o('pipeline_dir'),
'fasta_dir' => $self->o('work_dir') . '/blast_db', # affects 'dump_subset_create_blastdb' and 'blastp'
'cluster_dir' => $self->o('work_dir') . '/cluster',
'dump_dir' => $self->o('work_dir') . '/dumps',
'tmp_hmmsearch' => $self->o('work_dir') . '/tmp_hmmsearch',
'big_tmp_dir' => $self->o('work_dir') . '/scratch',
'seed_hmm_library_basedir' => $self->o('work_dir') . '/seed_hmms',
'panther_hmm_library_basedir' => $self->o('work_dir') . '/hmm_panther_12',
'worker_compara_hmm_library_basedir' => $self->o('work_dir') . '/compara_hmm_'.$self->o('ensembl_release'),
'worker_treefam_only_hmm_library_basedir' => $self->o('work_dir') . '/treefam_hmms/2015-12-18_only_TF_hmmer3/',
# "Member" parameters:
'allow_ambiguity_codes' => 1,
'allow_missing_coordinates' => 0,
'allow_missing_cds_seqs' => 0,
# blast parameters:
# Amount of sequences to be included in each blast job
'num_sequences_per_blast_job' => 100,
# define blast parameters and evalues for ranges of sequence-length
# Important note: -max_hsps parameter is only available on ncbi-blast-2.3.0 or higher.
'all_blast_params' => [
[ 0, 35, '-seg no -max_hsps 1 -use_sw_tback -num_threads 1 -matrix PAM30 -word_size 2', '1e-4' ],
[ 35, 50, '-seg no -max_hsps 1 -use_sw_tback -num_threads 1 -matrix PAM70 -word_size 2', '1e-6' ],
[ 50, 100, '-seg no -max_hsps 1 -use_sw_tback -num_threads 1 -matrix BLOSUM80 -word_size 2', '1e-8' ],
[ 100, 10000000, '-seg no -max_hsps 1 -use_sw_tback -num_threads 1 -matrix BLOSUM62 -word_size 3', '1e-10' ], # should really be infinity, but ten million should be big enough
],
# clustering parameters:
# affects 'hcluster_dump_input_per_genome'
'outgroups' => {},
# (half of the previously used 'clutering_max_gene_count=1500) affects 'hcluster_run'
'clustering_max_gene_halfcount' => 750,
# File with gene / peptide names that must be excluded from the
# clusters (e.g. know to disturb the trees)
'gene_blacklist_file' => '/dev/null',
# tree building parameters:
'use_quick_tree_break' => 1,
'split_genes_gene_count' => 5000,
'mcoffee_short_gene_count' => 20,
'mcoffee_himem_gene_count' => 250,
'mafft_gene_count' => 300,
'mafft_himem_gene_count' => 400,
'mafft_runtime' => 7200,
'treebest_threshold_n_residues' => 10000,
'treebest_threshold_n_genes' => 400,
'update_threshold_trees' => 0.2,
# alignment filtering options
'threshold_n_genes' => 20,
'threshold_aln_len' => 1000,
'threshold_n_genes_large' => 2000,
'threshold_aln_len_large' => 15000,
'noisy_cutoff' => 0.4,
'noisy_cutoff_large' => 1,
# species tree reconciliation
# you can define your own species_tree for 'treebest'. It can contain multifurcations
'species_tree_input_file' => undef,
# When automatically binarizing the tree, should we assume timetree tags to be there ?
'use_timetree_times' => 0,
# you can define your own species_tree for 'notung'. It *has* to be binary
'binary_species_tree_input_file' => undef,
# executable locations:
# HMM specific parameters
# The location of the HMM library:
'compara_hmm_library_basedir' => $self->o('shared_hps_dir') . '/compara_hmm_'.$self->o('ensembl_release')."/",
'target_compara_hmm_library_basedir' => $self->o('warehouse_dir') . '/treefam_hmms/compara_hmm_'.$self->o('ensembl_release')."/",
'treefam_hmm_library_basedir' => $self->o('warehouse_dir') . '/treefam_hmms/2015-12-18/',
'target_treefam_only_hmm_library_basedir' => $self->o('warehouse_dir') . '/2015-12-18_only_TF_hmmer3/',
#README file with the list of all names, genome_ids, assemblies, etc
'readme_file' => $self->o('target_compara_hmm_library_basedir') . '/README_hmm_profiles'.$self->o('ensembl_release').'.txt',
'seed_hmm_library_name' => 'seed_hmm_compara.hmm3',
'hmm_thresholding_table' => 'hmm_thresholding',
'hmmer_search_cutoff' => '1e-23',
'min_num_members' => 4,
'min_num_species' => 2,
'min_taxonomic_coverage' => 0.5,
'min_ratio_species_genes' => 0.5,
'max_gappiness' => 0.9,
'sequence_limit' => 50,
'max_chunk_length' => 0,
'max_chunk_size' => 100,
'output_prefix' => "hmm_split_",
# cdhit is used to filter out proteins that are too close to each other
'cdhit_identity_threshold' => 0.99,
#name of the profile to be created:
'hmm_library_name' => 'panther_12_0.hmm3',
#Compara HMM profile name:
'compara_hmm_library_name' => 'compara_hmm_'.$self->o('ensembl_release').'.hmm3',
#URL to find the PANTHER profiles:
'panther_url' => 'ftp://ftp.pantherdb.org/panther_library/current_release/',
#File name in the 'panther_url':
'panther_file' => 'PANTHER12.0_ascii.tgz',
# List of directories that contain Panther-like databases (with books/ and globals/)
# It requires two more arguments for each file: the name of the library, and whether subfamilies should be loaded
'panther_like_databases' => [],
#'panther_like_databases' => [ ["/lustre/scratch110/ensembl/mp12/panther_hmms/PANTHER7.2_ascii", "PANTHER7.2", 1] ],
# List of MultiHMM files to load (and their names)
#'multihmm_files' => [ ["/lustre/scratch110/ensembl/mp12/pfamA_HMM_fs.txt", "PFAM"] ],
'multihmm_files' => [],
# Dumps coming from InterPro
'panther_annotation_file' => '/dev/null',
#'panther_annotation_file' => '/nfs/nobackup2/ensemblgenomes/ckong/workspace/buildhmmprofiles/panther_Interpro_annot_v8_1/loose_dummy.txt',
# A file that holds additional tags we want to add to the HMM clusters (for instance: Best-fit models)
'extra_model_tags_file' => undef,
# hive_capacity values for some analyses:
'reuse_capacity' => 30,
'blast_factory_capacity' => 50,
'blastp_capacity' => 200,
'blastpu_capacity' => 5000,
'mcoffee_capacity' => 2000,
'alignment_filtering_capacity' => 200,
'filter_1_capacity' => 50,
'filter_2_capacity' => 50,
'filter_3_capacity' => 50,
'cluster_tagging_capacity' => 200,
'build_hmm_capacity' => 100,
'hc_capacity' => 4,
'decision_capacity' => 4,
'loadmembers_capacity' => 30,
'split_genes_capacity' => 100,
'HMMer_search_capacity' => 8000,
'HMMer_search_all_hits_capacity' => 1000,
# Setting priorities
'mcoffee_himem_priority' => 40,
'mafft_himem_priority' => 35,
'mafft_priority' => 30,
'mcoffee_priority' => 20,
'noisy_priority' => 20,
# hive priority for non-LOCAL health_check analysis:
'hc_priority' => -10,
# connection parameters to various databases:
# Uncomment and update the database locations
# the master database for synchronization of various ids (use undef if you don't have a master database)
'master_db' => 'mysql://ensro@mysql-ens-compara-prod-4:4401/treefam_master',
#'master_db' => 'mysql://ensro@mysql-ens-compara-prod-1:4485/ensembl_compara_master',
'ncbi_db' => $self->o('master_db'),
# Where the members come from (as loaded by the LoadMembers pipeline)
'member_db' => 'mysql://ensro@mysql-ens-compara-prod-4:4401/mateus_load_members_tf_90',
};
}
# This section has to be filled in any derived class
sub resource_classes {
my ($self) = @_;
return {
%{$self->SUPER::resource_classes('include_multi_threaded')}, # inherit the standard resource classes, incl. multi-threaded
'4Gb_big_tmp_job' => { 'LSF' => ['-C0 -M4000 -R"select[mem>4000] rusage[mem=4000]"', '-worker_base_tmp_dir ' . $self->o('big_tmp_dir')] },
};
}
sub pipeline_checks_pre_init {
my ($self) = @_;
# The master db must be defined to allow mapping stable_ids and checking species for reuse
die "The master dabase must be defined with a mlss_id" if $self->o('master_db') and not $self->o('mlss_id');
die "mlss_id can not be defined in the absence of a master dabase" if $self->o('mlss_id') and not $self->o('master_db');
# Without a master database, we must provide other parameters
die if not $self->o('master_db') and not $self->o('ncbi_db');
}
sub pipeline_create_commands {
my ($self) = @_;
return [
@{$self->SUPER::pipeline_create_commands}, # here we inherit creation of database, hive tables and compara tables
$self->pipeline_create_commands_rm_mkdir(['cluster_dir', 'dump_dir', 'fasta_dir', 'tmp_hmmsearch', 'big_tmp_dir']),
$self->pipeline_create_commands_rm_mkdir(['compara_hmm_library_basedir', 'panther_hmm_library_basedir', 'seed_hmm_library_basedir']),
];
}
sub pipeline_wide_parameters {
# these parameter values are visible to all analyses, can be overridden by parameters{} and input_id{}
my ($self) = @_;
return {
%{$self->SUPER::pipeline_wide_parameters}, # here we inherit anything from the base class
'master_db' => $self->o('master_db'),
'ncbi_db' => $self->o('ncbi_db'),
'member_db' => $self->o('member_db'),
'cluster_dir' => $self->o('cluster_dir'),
'fasta_dir' => $self->o('fasta_dir'),
'dump_dir' => $self->o('dump_dir'),
'tmp_hmmsearch' => $self->o('tmp_hmmsearch'),
'target_compara_hmm_library_basedir' => $self->o('target_compara_hmm_library_basedir'),
'panther_hmm_library_basedir' => $self->o('panther_hmm_library_basedir'),
'seed_hmm_library_basedir' => $self->o('seed_hmm_library_basedir'),
'seed_hmm_library_name' => $self->o('seed_hmm_library_name'),
'binary_species_tree_input_file' => $self->o('binary_species_tree_input_file'),
'all_blast_params' => $self->o('all_blast_params'),
'use_quick_tree_break' => $self->o('use_quick_tree_break'),
};
}
sub core_pipeline_analyses {
my ($self) = @_;
my %hc_analysis_params = (
-analysis_capacity => $self->o('hc_capacity'),
-priority => $self->o('hc_priority'),
-batch_size => 20,
-max_retry_count => 1,
);
my %decision_analysis_params = (
-analysis_capacity => $self->o('decision_capacity'),
-priority => $self->o('hc_priority'),
-batch_size => 20,
-max_retry_count => 1,
);
my %blastp_parameters = (
'blast_bin_dir' => $self->o('blast_bin_dir'),
'blast_params' => "#expr( #all_blast_params#->[#param_index#]->[2])expr#",
'evalue_limit' => "#expr( #all_blast_params#->[#param_index#]->[3])expr#",
);
return [
# ---------------------------------------------[backbone]--------------------------------------------------------------------------------
{ -logic_name => 'backbone_fire_db_prepare',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::AssertMatchingVersions',
-input_ids => [ { } ],
-flow_into => {
'1->A' => [ 'copy_ncbi_tables_factory' ],
'A->1' => [ 'backbone_fire_clustering' ],
},
},
{ -logic_name => 'backbone_fire_clustering',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::DatabaseDumper',
-parameters => {
'table_list' => 'peptide_align_feature%',
'exclude_list' => 1,
'output_file' => '#dump_dir#/snapshot_1_before_clustering.sql.gz',
},
-flow_into => {
'1->A' => [ 'build_hmm_entry_point' ],
'A->1' => [ 'backbone_fire_tree_building' ],
},
},
{ -logic_name => 'backbone_fire_tree_building',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::DatabaseDumper',
-parameters => {
'table_list' => 'peptide_align_feature%',
'exclude_list' => 1,
'output_file' => '#dump_dir#/snapshot_3_before_tree_building.sql.gz',
},
-flow_into => {
'1->A' => [ 'cluster_factory' ],
'A->1' => [ 'backbone_pipeline_finished' ],
},
},
{ -logic_name => 'backbone_pipeline_finished',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
},
# ---------------------------------------------[copy tables from master]-----------------------------------------------------------------
{ -logic_name => 'copy_ncbi_tables_factory',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::JobFactory',
-parameters => {
'inputlist' => [ 'ncbi_taxa_node', 'ncbi_taxa_name' ],
'column_names' => [ 'table' ],
},
-flow_into => {
'2->A' => [ 'copy_ncbi_table' ],
'A->1' => [ 'check_member_db_is_same_version' ],
},
},
{ -logic_name => 'copy_ncbi_table',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::MySQLTransfer',
-parameters => {
'src_db_conn' => '#ncbi_db#',
'mode' => 'overwrite',
'filter_cmd' => 'sed "s/ENGINE=MyISAM/ENGINE=InnoDB/"',
},
},
{ -logic_name => 'populate_method_links_from_db',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::MySQLTransfer',
-parameters => {
'src_db_conn' => '#master_db#',
'mode' => 'overwrite',
'filter_cmd' => 'sed "s/ENGINE=MyISAM/ENGINE=InnoDB/"',
'table' => 'method_link',
},
-flow_into => [ 'offset_tables' ],
},
# CreateReuseSpeciesSets/PrepareSpeciesSetsMLSS may want to create new
# entries. We need to make sure they don't collide with the master database
{ -logic_name => 'offset_tables',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::SqlCmd',
-parameters => {
'sql' => [
'ALTER TABLE species_set_header AUTO_INCREMENT=10000001',
'ALTER TABLE method_link_species_set AUTO_INCREMENT=10000001',
],
},
-flow_into => [ 'load_genomedb_factory' ],
},
# ---------------------------------------------[load GenomeDB entries from member_db]---------------------------------------------
{ -logic_name => 'load_genomedb_factory',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GenomeDBFactory',
-parameters => {
'compara_db' => '#master_db#', # that's where genome_db_ids come from
'mlss_id' => $self->o('mlss_id'),
# Add the locators coming from member_db
'extra_parameters' => [ 'locator' ],
'genome_db_data_source' => '#member_db#',
},
-rc_name => '4Gb_job',
-flow_into => {
'2->A' => {
'load_genomedb' => { 'master_dbID' => '#genome_db_id#', 'locator' => '#locator#' },
},
'A->1' => [ 'create_mlss_ss' ],
},
},
{ -logic_name => 'load_genomedb',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::LoadOneGenomeDB',
-flow_into => [ 'genome_member_copy' ],
-batch_size => 10,
-hive_capacity => 30,
-max_retry_count => 2,
},
{ -logic_name => 'populate_method_links_from_file',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::DbCmd',
-parameters => {
'method_link_dump_file' => $self->o('method_link_dump_file'),
'executable' => 'mysqlimport',
'append' => [ '#method_link_dump_file#' ],
},
-flow_into => {
1 => {
'load_genomedb_factory' => INPUT_PLUS( { 'master_db' => '#member_db#', } ),
}
},
},
{ -logic_name => 'create_mlss_ss',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::PrepareSpeciesSetsMLSS',
-parameters => {
'whole_method_links' => [ 'PROTEIN_TREES' ],
},
-rc_name => '2Gb_job',
-parameters => {
'create_homology_mlss' => '0',
},
-flow_into => {
1 => [ 'make_treebest_species_tree', 'hc_members_globally' ],
},
},
{ -logic_name => 'check_member_db_is_same_version',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::AssertMatchingVersions',
-parameters => {
'db_conn' => '#member_db#',
},
-flow_into => WHEN(
'#master_db#' => 'populate_method_links_from_db',
ELSE 'populate_method_links_from_file',
),
},
# ---------------------------------------------[load species tree]-------------------------------------------------------------------
{ -logic_name => 'make_treebest_species_tree',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::MakeSpeciesTree',
-parameters => {
'species_tree_input_file' => $self->o('species_tree_input_file'), # empty by default, but if nonempty this file will be used instead of tree generation from genome_db
#Options needed when using strains:
#-----------------------------------------------------
'allow_subtaxa' => 1,
'multifurcation_deletes_all_subnodes' => [ 10088 ],
#-----------------------------------------------------
},
-flow_into => {
2 => [ 'hc_species_tree' ],
}
},
{ -logic_name => 'hc_species_tree',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::SqlHealthChecks',
-parameters => {
mode => 'species_tree',
binary => 0,
n_missing_species_in_tree => 0,
},
-flow_into => WHEN(
'#use_notung# and #binary_species_tree_input_file#' => 'load_binary_species_tree',
'#use_notung# and !#binary_species_tree_input_file#' => 'make_binary_species_tree',
),
%hc_analysis_params,
},
{ -logic_name => 'load_binary_species_tree',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::MakeSpeciesTree',
-parameters => {
'label' => 'binary',
'species_tree_input_file' => '#binary_species_tree_input_file#',
},
-flow_into => {
2 => [ 'hc_binary_species_tree' ],
}
},
{ -logic_name => 'make_binary_species_tree',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::CAFESpeciesTree',
-parameters => {
'new_label' => 'binary',
'label' => 'default',
'use_timetree_times' => $self->o('use_timetree_times'),
},
-flow_into => {
2 => [ 'hc_binary_species_tree' ],
}
},
{ -logic_name => 'hc_binary_species_tree',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::SqlHealthChecks',
-parameters => {
mode => 'species_tree',
binary => 1,
n_missing_species_in_tree => 0,
},
%hc_analysis_params,
},
# ---------------------------------------------[reuse members]-----------------------------------------------------------------------
{ -logic_name => 'genome_member_copy',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::CopyCanonRefMembersByGenomeDB',
-parameters => {
'reuse_db' => '#member_db#',
'biotype_filter' => 'biotype_group = "coding"',
'exclude_tables' => [ 'exon_boundaries', 'hmm_annot', 'seq_member_projection_stable_id' ],
},
-hive_capacity => $self->o('reuse_capacity'),
-flow_into => [ 'hc_members_per_genome' ],
},
{ -logic_name => 'hc_members_per_genome',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::SqlHealthChecks',
-parameters => {
mode => 'members_per_genome',
allow_ambiguity_codes => $self->o('allow_ambiguity_codes'),
allow_missing_coordinates => $self->o('allow_missing_coordinates'),
allow_missing_cds_seqs => $self->o('allow_missing_cds_seqs'),
only_canonical => 1,
},
%hc_analysis_params,
},
{ -logic_name => 'hc_members_globally',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::SqlHealthChecks',
-parameters => {
mode => 'members_globally',
},
%hc_analysis_params,
},
# ---------------------------------------------[create and populate blast analyses]--------------------------------------------------
#--------------------------------------------------------[load the HMM profiles]----------------------------------------------------
#----------------------------------------------[classify canonical members based on HMM searches]-----------------------------------
{ -logic_name => 'load_PANTHER',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::LoadPanther',
-rc_name => '4Gb_big_tmp_job',
-max_retry_count => 0,
-parameters => {
'library_name' => $self->o('hmm_library_name'),
'hmmer_home' => $self->o('hmmer3_home'),
'panther_hmm_lib' => $self->o('panther_hmm_library_basedir'),
'url' => $self->o('panther_url'),
'file' => $self->o('panther_file'),
},
-flow_into => [ 'chunk_sequence' ],
},
{ -logic_name => 'chunk_sequence',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::FastaFactory',
-parameters => {
'sequence_limit' => $self->o('sequence_limit'),
'max_chunk_length' => $self->o('max_chunk_length'),
'max_chunk_size' => $self->o('max_chunk_size'),
'input_format' => 'fasta',
'seq_filter' => '^TF',
'inputfile' => $self->o('treefam_hmm_library_basedir')."/globals/con.Fasta",
'output_dir' => $self->o('tmp_hmmsearch'),
'output_prefix' => $self->o('output_prefix'),
'hash_directories' => 1,
'split_by_sequence_count' => 1,
},
-flow_into => {
'2->A' => [ 'treefam_panther_hmm_overlapping' ],
'A->1' => [ 'build_seed_hmms' ],
},
},
{ -logic_name => 'treefam_panther_hmm_overlapping',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HmmOverlap',
-rc_name => '1Gb_job',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'library_name' => $self->o('hmm_library_name'),
'panther_hmm_lib' => $self->o('panther_hmm_library_basedir'),
},
-hive_capacity => $self->o('HMMer_search_capacity'),
-flow_into => {
-1 => [ 'treefam_panther_hmm_overlapping_himem' ], # MEMLIMIT
},
},
{ -logic_name => 'treefam_panther_hmm_overlapping_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HmmOverlap',
-rc_name => '2Gb_job',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'library_name' => $self->o('hmm_library_name'),
'panther_hmm_lib' => $self->o('panther_hmm_library_basedir'),
},
-hive_capacity => $self->o('HMMer_search_capacity'),
},
{ -logic_name => 'build_seed_hmms',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::BuildSeedHmms',
-rc_name => '1Gb_job',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'panther_hmm_library_name' => $self->o('hmm_library_name'),
'treefam_hmm_lib' => $self->o('treefam_hmm_library_basedir'),
'target_treefam_only_hmm_lib' => $self->o('target_treefam_only_hmm_library_basedir'),
'panther_hmm_lib' => $self->o('panther_hmm_library_basedir'),
'seed_hmm_library_basedir' => $self->o('seed_hmm_library_basedir'),
'seed_hmm_library_name' => $self->o('seed_hmm_library_name'),
},
-hive_capacity => $self->o('HMMer_search_capacity'),
-flow_into => {
1 => [ 'backup_before_cdhit_diversity' ],
},
},
{ -logic_name => 'backup_before_cdhit_diversity',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::DatabaseDumper',
-parameters => {
'table_list' => 'peptide_align_feature%',
'exclude_list' => 1,
'output_file' => '#dump_dir#/snapshot_2_before_cdhit_divergency.sql.gz',
},
-flow_into => {
'1->A' => [ 'diversity_CDHit' ],
'A->1' => [ 'HMMer_search_factory' ],
},
},
{ -logic_name => 'diversity_CDHit',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::CDHitDiversity',
-parameters => {
'cdhit_exe' => $self->o('cdhit_exe'),
'cdhit_identity_threshold' => '0.99',
'cdhit_num_threads' => 8,
'cdhit_memory_in_mb' => 0,
},
-flow_into => {
-1 => [ 'diversity_CDHit_himem' ],
3 => [ '?table_name=seq_member_projection' ],
},
-hive_capacity => $self->o('build_hmm_capacity'),
-batch_size => 50,
-priority => -20,
-rc_name => '16Gb_16c_job',
},
{ -logic_name => 'diversity_CDHit_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::CDHitDiversity',
-parameters => {
'cdhit_exe' => $self->o('cdhit_exe'),
'cdhit_identity_threshold' => '0.99',
'cdhit_num_threads' => 8,
'cdhit_memory_in_mb' => 0,
},
-flow_into => {
3 => [ '?table_name=seq_member_projection' ],
},
-hive_capacity => $self->o('build_hmm_capacity'),
-batch_size => 10,
-priority => -20,
-rc_name => '32Gb_16c_job',
},
{ -logic_name => 'HMMer_search_factory',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::FactoryUnannotatedMembers',
-parameters => {
'use_diversity_filter' => 1,
},
-rc_name => '4Gb_job',
-hive_capacity => $self->o('blast_factory_capacity'),
-flow_into => {
'2->A' => [ 'HMMer_search' ],
'A->1' => [ 'HMM_clusterize' ],
},
},
{
-logic_name => 'HMMer_search',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HMMerSearch',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'library_name' => $self->o('seed_hmm_library_name'),
'library_basedir' => $self->o('seed_hmm_library_basedir'),
'hmmer_cutoff' => $self->o('hmmer_search_cutoff'),
},
-hive_capacity => $self->o('HMMer_search_capacity'),
-rc_name => '2Gb_job',
-flow_into => {
-1 => [ 'HMMer_search_himem' ], # MEMLIMIT
},
},
{
-logic_name => 'HMMer_search_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HMMerSearch',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'library_name' => $self->o('seed_hmm_library_name'),
'library_basedir' => $self->o('seed_hmm_library_basedir'),
'hmmer_cutoff' => $self->o('hmmer_search_cutoff'),
},
-hive_capacity => $self->o('HMMer_search_capacity'),
-rc_name => '4Gb_job',
-priority=> 20,
-flow_into => {
-1 => [ 'HMMer_search_super_himem' ], # MEMLIMIT
},
},
{
-logic_name => 'HMMer_search_super_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HMMerSearch',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'library_name' => $self->o('seed_hmm_library_name'),
'library_basedir' => $self->o('seed_hmm_library_basedir'),
'hmmer_cutoff' => $self->o('hmmer_search_cutoff'),
},
-hive_capacity => $self->o('HMMer_search_capacity'),
-rc_name => '32Gb_job',
-priority=> 25,
},
{
-logic_name => 'HMM_clusterize',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HMMClusterize',
-parameters => {
'extra_tags_file' => $self->o('extra_model_tags_file'),
},
-rc_name => '16Gb_job',
-flow_into => [ 'dump_unannotated_members' ],
},
# -------------------------------------------------[Blast unannotated members]-------------------------------------------------------
{ -logic_name => 'dump_unannotated_members',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::DumpUnannotatedMembersIntoFasta',
-parameters => {
'fasta_file' => '#fasta_dir#/unannotated.fasta',
},
-rc_name => '16Gb_job',
-hive_capacity => $self->o('reuse_capacity'),
-flow_into => [ 'make_blastdb_unannotated' ],
},
{ -logic_name => 'make_blastdb_unannotated',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd',
-parameters => {
'blast_bin_dir' => $self->o('blast_bin_dir'),
'cmd' => '#blast_bin_dir#/makeblastdb -dbtype prot -parse_seqids -logfile #fasta_name#.blastdb_log -in #fasta_name#',
},
-flow_into => {
-1 => [ 'make_blastdb_unannotated_himem' ],
1 => [ 'unannotated_all_vs_all_factory' ],
}
},
{ -logic_name => 'make_blastdb_unannotated_himem',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd',
-parameters => {
'blast_bin_dir' => $self->o('blast_bin_dir'),
'cmd' => '#blast_bin_dir#/makeblastdb -dbtype prot -parse_seqids -logfile #fasta_name#.blastdb_log -in #fasta_name#',
},
-rc_name => '1Gb_job',
-flow_into => [ 'unannotated_all_vs_all_factory' ],
},
{ -logic_name => 'unannotated_all_vs_all_factory',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::BlastFactoryUnannotatedMembers',
-parameters => {
'step' => $self->o('num_sequences_per_blast_job'),
},
-rc_name => '16Gb_job',
-hive_capacity => $self->o('blast_factory_capacity'),
-flow_into => {
'2->A' => [ 'blastp_unannotated' ],
'A->1' => [ 'hcluster_dump_input_all_pafs' ]
},
},
{ -logic_name => 'blastp_unannotated',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::BlastpUnannotated',
-parameters => {
'blast_db' => '#fasta_dir#/unannotated.fasta',
%blastp_parameters,
},
-rc_name => '250Mb_6_hour_job',
-flow_into => {
-1 => [ 'blastp_unannotated_himem' ], # MEMLIMIT
-2 => 'break_batch',
},
-hive_capacity => $self->o('blastpu_capacity'),
},
{ -logic_name => 'blastp_unannotated_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::BlastpUnannotated',
-parameters => {
'blast_db' => '#fasta_dir#/unannotated.fasta',
%blastp_parameters,
},
-rc_name => '2Gb_6_hour_job',
-flow_into => {
-2 => 'break_batch',
},
-priority => 20,
-hive_capacity => $self->o('blastpu_capacity'),
},
{ -logic_name => 'blastp_unannotated_no_runlimit',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::BlastpUnannotated',
-parameters => {
'blast_db' => '#fasta_dir#/unannotated.fasta',
%blastp_parameters,
},
-flow_into => {
-1 => [ 'blastp_unannotated_himem_no_runlimit' ], # MEMLIMIT
},
-hive_capacity => $self->o('blastpu_capacity'),
},
{ -logic_name => 'blastp_unannotated_himem_no_runlimit',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::BlastpUnannotated',
-parameters => {
'blast_db' => '#fasta_dir#/unannotated.fasta',
%blastp_parameters,
},
-rc_name => '2Gb_job',
-priority => 20,
-hive_capacity => $self->o('blastpu_capacity'),
},
{ -logic_name => 'break_batch',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::BreakUnannotatedBlast',
-flow_into => {
2 => 'blastp_unannotated_no_runlimit',
}
},
{ -logic_name => 'hcluster_dump_input_all_pafs',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::HclusterPrepareSingleTable',
-parameters => {
'outgroups' => $self->o('outgroups'),
},
-rc_name => '4Gb_job',
-hive_capacity => $self->o('reuse_capacity'),
-flow_into => [ 'hcluster_run' ],
},
# ---------------------------------------------[create and populate blast analyses]--------------------------------------------------
# ---------------------------------------------[clustering step]---------------------------------------------------------------------
{ -logic_name => 'build_hmm_entry_point',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::Dummy',
-flow_into => {
'1->A' => [ 'load_PANTHER' ],
'A->1' => [ 'remove_blacklisted_genes' ],
},
},
{ -logic_name => 'hcluster_run',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd',
-parameters => {
'clustering_max_gene_halfcount' => $self->o('clustering_max_gene_halfcount'),
'hcluster_exe' => $self->o('hcluster_exe'),
'cmd' => '#hcluster_exe# -m #clustering_max_gene_halfcount# -w 0 -s 0.34 -O -C #cluster_dir#/hcluster.cat -o #cluster_dir#/hcluster.out #cluster_dir#/hcluster.txt; sleep 30',
},
-flow_into => {
1 => [ 'hcluster_parse_output' ],
},
-rc_name => '32Gb_job',
},
{ -logic_name => 'hcluster_parse_output',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::HclusterParseOutput',
-rc_name => '16Gb_job',
},
{ -logic_name => 'cluster_tagging',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::ClusterTagging',
-hive_capacity => $self->o('cluster_tagging_capacity'),
-rc_name => '4Gb_job',
-batch_size => 50,
-flow_into => {
-1 => [ 'cluster_tagging_himem' ], # MEMLIMIT
},
},
{ -logic_name => 'cluster_tagging_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::ClusterTagging',
-hive_capacity => $self->o('cluster_tagging_capacity'),
-rc_name => '8Gb_job',
-batch_size => 50,
},
{ -logic_name => 'filter_1_factory',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::JobFactory',
-parameters => {
'inputquery' => 'SELECT root_id AS gene_tree_id, COUNT(seq_member_id) AS tree_num_genes FROM gene_tree_root JOIN gene_tree_node USING (root_id) WHERE tree_type = "tree" AND clusterset_id="default" GROUP BY root_id',
},
-flow_into => {
2 => 'filter_level_1',
}
},
{ -logic_name => 'filter_level_1',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::FilterSmallClusters',
-parameters => {
'min_num_members' => $self->o('min_num_members'),
'min_num_species' => $self->o('min_num_species'),
'min_taxonomic_coverage' => $self->o('min_taxonomic_coverage'),
'min_ratio_species_genes' => $self->o('min_ratio_species_genes'),
},
-hive_capacity => $self->o('filter_1_capacity'),
-batch_size => 10,
},
{ -logic_name => 'remove_blacklisted_genes',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::RemoveBlacklistedGenes',
-parameters => {
blacklist_file => $self->o('gene_blacklist_file'),
},
-flow_into => [ 'clusterset_backup' ],
-rc_name => '500Mb_job',
},
{ -logic_name => 'create_additional_clustersets',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::CreateClustersets',
-parameters => {
member_type => 'protein',
'additional_clustersets' => [qw(treebest phyml-aa phyml-nt nj-dn nj-ds nj-mm raxml raxml_parsimony raxml_bl notung copy raxml_update filter_level_1 filter_level_2 filter_level_3 filter_level_4 fasttree )],
},
-flow_into => [ 'cluster_tagging_factory' ],
},
# ---------------------------------------------[Pluggable QC step]----------------------------------------------------------
{ -logic_name => 'clusterset_backup',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::SqlCmd',
-parameters => {
'sql' => 'INSERT IGNORE INTO gene_tree_backup (seq_member_id, root_id) SELECT seq_member_id, root_id FROM gene_tree_node WHERE seq_member_id IS NOT NULL',
},
-flow_into => [ 'create_additional_clustersets' ],
},
{ -logic_name => 'cluster_tagging_factory',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::JobFactory',
-parameters => {
'inputquery' => 'SELECT root_id AS gene_tree_id FROM gene_tree_root WHERE tree_type = "tree" AND clusterset_id="default"',
},
-flow_into => {
'2->A' => [ 'cluster_tagging' ],
'A->1' => [ 'filter_1_factory' ],
},
},
# ---------------------------------------------[main tree fan]-------------------------------------------------------------
{ -logic_name => 'cluster_factory',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::JobFactory',
-parameters => {
'inputquery' => 'SELECT root_id AS gene_tree_id, COUNT(seq_member_id) AS tree_num_genes FROM gene_tree_root JOIN gene_tree_node USING (root_id) WHERE tree_type = "tree" AND clusterset_id="filter_level_1" GROUP BY root_id',
},
-flow_into => {
'2->A' => [ 'alignment_entry_point' ],
'A->1' => [ 'hc_global_tree_set' ],
},
},
{ -logic_name => 'alignment_entry_point',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::LoadTags',
-parameters => {
'tags' => {
'gene_count' => 0,
'reuse_aln_runtime' => 0,
},
'mcoffee_short_gene_count' => $self->o('mcoffee_short_gene_count'),
'mcoffee_himem_gene_count' => $self->o('mcoffee_himem_gene_count'),
'mafft_gene_count' => $self->o('mafft_gene_count'),
'mafft_himem_gene_count' => $self->o('mafft_himem_gene_count'),
'mafft_runtime' => $self->o('mafft_runtime'),
},
-flow_into => {
'1->A' => WHEN (
'(#tree_gene_count# < #mcoffee_short_gene_count#) and (#tree_reuse_aln_runtime#/1000 < #mafft_runtime#)' => 'mcoffee_short',
'(#tree_gene_count# >= #mcoffee_short_gene_count# and #tree_gene_count# < #mcoffee_himem_gene_count#) and (#tree_reuse_aln_runtime#/1000 < #mafft_runtime#)' => 'mcoffee',
'(#tree_gene_count# >= #mcoffee_himem_gene_count# and #tree_gene_count# < #mafft_gene_count#) and (#tree_reuse_aln_runtime#/1000 < #mafft_runtime#)' => 'mcoffee_himem',
'(#tree_gene_count# >= #mafft_gene_count# and #tree_gene_count# < #mafft_himem_gene_count#) or (#tree_reuse_aln_runtime#/1000 >= #mafft_runtime#)' => 'mafft',
'(#tree_gene_count# >= #mafft_himem_gene_count#) or (#tree_reuse_aln_runtime#/1000 >= #mafft_runtime#)' => 'mafft_himem',
),
'A->1' => [ 'filter_decision' ],
},
%decision_analysis_params,
},
{ -logic_name => 'hc_global_tree_set',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::SqlHealthChecks',
-parameters => {
mode => 'global_tree_set',
},
-flow_into => [ 'backup_before_cdhit_filter', 'write_stn_tags' ],
%hc_analysis_params,
},
{ -logic_name => 'write_stn_tags',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::DbCmd',
-parameters => {
'input_file' => $self->o('tree_stats_sql'),
},
-flow_into => [ 'email_tree_stats_report' ],
},
{ -logic_name => 'email_tree_stats_report',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::HTMLReport',
-parameters => {
'email' => $self->o('email'),
},
},
# ---------------------------------------------[Pluggable MSA steps]----------------------------------------------------------
{ -logic_name => 'mcoffee_short',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::MCoffee',
-parameters => {
'cmd_max_runtime' => '43200',
'method' => 'cmcoffee',
'mcoffee_exe' => $self->o('mcoffee_exe'),
'extaligners_exe_dir' => $self->o('extaligners_exe_dir'),
'escape_branch' => -1,
},
-hive_capacity => $self->o('mcoffee_capacity'),
-batch_size => 20,
-rc_name => '1Gb_job',
-flow_into => {
-1 => [ 'mcoffee' ], # MEMLIMIT
-2 => [ 'mafft' ],
},
},
{ -logic_name => 'mcoffee',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::MCoffee',
-parameters => {
'cmd_max_runtime' => '43200',
'method' => 'cmcoffee',
'mcoffee_exe' => $self->o('mcoffee_exe'),
'extaligners_exe_dir' => $self->o('extaligners_exe_dir'),
'escape_branch' => -1,
},
-analysis_capacity => $self->o('mcoffee_capacity'),
-rc_name => '2Gb_job',
-priority => $self->o('mcoffee_priority'),
-flow_into => {
-1 => [ 'mcoffee_himem' ], # MEMLIMIT
-2 => [ 'mafft' ],
},
},
{ -logic_name => 'mafft',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::Mafft',
-parameters => {
'mafft_exe' => $self->o('mafft_exe'),
'escape_branch' => -1,
},
-hive_capacity => $self->o('mcoffee_capacity'),
-rc_name => '2Gb_job',
-priority => $self->o('mafft_priority'),
-flow_into => {
-1 => [ 'mafft_himem' ], # MEMLIMIT
},
},
{ -logic_name => 'mcoffee_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::MCoffee',
-parameters => {
'cmd_max_runtime' => '43200',
'method' => 'cmcoffee',
'mcoffee_exe' => $self->o('mcoffee_exe'),
'extaligners_exe_dir' => $self->o('extaligners_exe_dir'),
'escape_branch' => -2,
},
-hive_capacity => $self->o('mcoffee_capacity'),
-rc_name => '8Gb_job',
-priority => $self->o('mcoffee_himem_priority'),
-flow_into => {
-1 => [ 'mafft_himem' ],
-2 => [ 'mafft_himem' ],
},
},
{ -logic_name => 'mafft_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::Mafft',
-parameters => {
'mafft_exe' => $self->o('mafft_exe'),
},
-hive_capacity => $self->o('mcoffee_capacity'),
-rc_name => '8Gb_job',
-priority => $self->o('mafft_himem_priority'),
},
{ -logic_name => 'filter_decision',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::LoadTags',
-parameters => {
'tags' => {
'gene_count' => 0,
'aln_length' => 0,
},
'threshold_n_genes' => $self->o('threshold_n_genes'),
'threshold_aln_len' => $self->o('threshold_aln_len'),
'threshold_n_genes_large' => $self->o('threshold_n_genes_large'),
'threshold_aln_len_large' => $self->o('threshold_aln_len_large'),
},
-flow_into =>
WHEN(
'(#tree_gene_count# <= #threshold_n_genes#) || (#tree_aln_length# <= #threshold_aln_len#)' => 'filter_level_2',
'(#tree_gene_count# >= #threshold_n_genes_large# and #tree_aln_length# > #threshold_aln_len#) || (#tree_aln_length# >= #threshold_aln_len_large# and #tree_gene_count# > #threshold_n_genes#)' => 'noisy_large',
ELSE 'noisy',
),
%decision_analysis_params,
},
{ -logic_name => 'noisy',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::Noisy',
-parameters => {
'noisy_exe' => $self->o('noisy_exe'),
'noisy_cutoff' => $self->o('noisy_cutoff'),
},
-hive_capacity => $self->o('alignment_filtering_capacity'),
-rc_name => '4Gb_job',
-flow_into => [ 'filter_level_2' ],
-batch_size => 5,
},
{ -logic_name => 'noisy_large',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::Noisy',
-parameters => {
'noisy_exe' => $self->o('noisy_exe'),
'noisy_cutoff' => $self->o('noisy_cutoff_large'),
},
-hive_capacity => $self->o('alignment_filtering_capacity'),
-rc_name => '16Gb_job',
-priority => $self->o('noisy_priority'),
-flow_into => [ 'filter_level_2' ],
-batch_size => 5,
},
{ -logic_name => 'filter_level_2',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::FilterGappyClusters',
-parameters => {
'max_gappiness' => $self->o('max_gappiness'),
},
-hive_capacity => $self->o('filter_2_capacity'),
-batch_size => 5,
-flow_into => [ 'filter_level_3' ],
},
{ -logic_name => 'filter_level_3',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::FilterSubfamiliesPatterns',
-parameters => {
'fasttree_exe' => $self->o('fasttree_exe'),
'treebest_exe' => $self->o('treebest_exe'),
'output_clusterset_id' => 'fasttree',
'input_clusterset_id' => 'default',
},
-hive_capacity => $self->o('filter_3_capacity'),
-rc_name => '2Gb_job',
-batch_size => 5,
-flow_into => {
2 => WHEN (
'(#tree_gene_count# >= #mafft_gene_count# and #tree_gene_count# < #mafft_himem_gene_count#)' => 'mafft_supertree',
'(#tree_gene_count# >= #mafft_himem_gene_count#)' => 'mafft_supertree_himem',
),
-1 => [ 'filter_level_3_himem' ], # MEMLIMIT
}
},
{ -logic_name => 'filter_level_3_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::FilterSubfamiliesPatterns',
-parameters => {
'fasttree_exe' => $self->o('fasttree_exe'),
'treebest_exe' => $self->o('treebest_exe'),
'output_clusterset_id' => 'fasttree',
'input_clusterset_id' => 'default',
},
-hive_capacity => $self->o('filter_3_capacity'),
-rc_name => '16Gb_job',
-batch_size => 5,
-flow_into => {
2 => WHEN (
'(#tree_gene_count# >= #mafft_gene_count# and #tree_gene_count# < #mafft_himem_gene_count#)' => 'mafft_supertree',
'(#tree_gene_count# >= #mafft_himem_gene_count#)' => 'mafft_supertree_himem',
),
}
},
{ -logic_name => 'mafft_supertree',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::Mafft',
-parameters => {
'mafft_exe' => $self->o('mafft_exe'),
'escape_branch' => -1,
},
-hive_capacity => $self->o('mcoffee_capacity'),
-rc_name => '2Gb_job',
-priority => $self->o('mafft_priority'),
-flow_into => {
-1 => [ 'mafft_supertree_himem' ], # MEMLIMIT
},
},
{ -logic_name => 'mafft_supertree_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::Mafft',
-parameters => {
'mafft_exe' => $self->o('mafft_exe'),
},
-hive_capacity => $self->o('mcoffee_capacity'),
-rc_name => '8Gb_job',
-priority => $self->o('mafft_himem_priority'),
},
# ---------------------------------------------[main tree creation loop]-------------------------------------------------------------
# ---------------------------------------------[alignment filtering]-------------------------------------------------------------
# ---------------------------------------------[small trees decision]-------------------------------------------------------------
# ---------------------------------------------[model test]-------------------------------------------------------------
# ---------------------------------------------[tree building with treebest]-------------------------------------------------------------
# ---------------------------------------------[tree building with raxml]-------------------------------------------------------------
# ---------------------------------------------[tree reconciliation / rearrangements]-------------------------------------------------------------
{ -logic_name => 'split_genes',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::FindContiguousSplitGenes',
-parameters => {
split_genes_gene_count => $self->o('split_genes_gene_count'),
},
-hive_capacity => $self->o('split_genes_capacity'),
-rc_name => '500Mb_job',
-batch_size => 20,
-flow_into => [ 'build_HMM_aa_v3' ],
},
{ -logic_name => 'build_HMM_aa_v3',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::BuildHMM',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'hmmer_version' => 3,
'check_split_genes' => 1,
'cdna' => 0,
},
-hive_capacity => $self->o('build_hmm_capacity'),
-batch_size => 5,
-priority => -20,
-rc_name => '1Gb_job',
-flow_into => {
-1 => 'build_HMM_aa_v3_himem',
},
},
{ -logic_name => 'build_HMM_aa_v3_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::BuildHMM',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'hmmer_version' => 3,
'check_split_genes' => 1,
'cdna' => 0,
},
-hive_capacity => $self->o('build_hmm_capacity'),
-priority => -15,
-rc_name => '4Gb_job',
},
# ---------------------------------------------[Quick tree break steps]-----------------------------------------------------------------------
# -------------------------------------------[CDHit step (filter_level_4)]---------------------------------------------------------------------
{ -logic_name => 'backup_before_cdhit_filter',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::DatabaseDumper',
-parameters => {
'table_list' => 'peptide_align_feature%',
'exclude_list' => 1,
'output_file' => '#dump_dir#/snapshot_4_before_cdhit_filter.sql.gz',
},
-flow_into => {
'1' => [ 'CDHit_factory' ],
},
},
{ -logic_name => 'CDHit_factory',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::JobFactory',
-parameters => {
'inputquery' => 'SELECT root_id AS gene_tree_id FROM gene_tree_root WHERE tree_type = "tree" AND clusterset_id="filter_level_3"',
},
-flow_into => {
'2->A' => [ 'CDHit' ],
'A->1' => [ 'prepare_hmm_profiles' ],
},
},
{ -logic_name => 'CDHit',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::CDHit',
-parameters => {
'cdhit_exe' => $self->o('cdhit_exe'),
'cdhit_identity_threshold' => $self->o('cdhit_identity_threshold'),
'cdhit_num_threads' => 1,
'cdhit_memory_in_mb' => 0,
},
-flow_into => {
1 => [ 'CDHit_alignment_entry_point' ], # MEMLIMIT
-1 => [ 'CDHit_himem' ], # MEMLIMIT
},
-hive_capacity => $self->o('build_hmm_capacity'),
-batch_size => 50,
-priority => -20,
-rc_name => '1Gb_job',
},
{ -logic_name => 'CDHit_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::CDHit',
-parameters => {
'cdhit_exe' => $self->o('cdhit_exe'),
'cdhit_identity_threshold' => $self->o('cdhit_identity_threshold'),
'cdhit_num_threads' => 4,
'cdhit_memory_in_mb' => 0,
},
-flow_into => {
1 => [ 'CDHit_alignment_entry_point' ], # MEMLIMIT
},
-hive_capacity => $self->o('build_hmm_capacity'),
-batch_size => 10,
-priority => -20,
-rc_name => '4Gb_4c_job',
},
{ -logic_name => 'CDHit_alignment_entry_point',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::GeneTrees::LoadTags',
-parameters => {
'tags' => {
'gene_count' => 0,
'reuse_aln_runtime' => 0,
},
'mcoffee_short_gene_count' => $self->o('mcoffee_short_gene_count'),
'mcoffee_himem_gene_count' => $self->o('mcoffee_himem_gene_count'),
'mafft_gene_count' => $self->o('mafft_gene_count'),
'mafft_himem_gene_count' => $self->o('mafft_himem_gene_count'),
'mafft_runtime' => $self->o('mafft_runtime'),
},
-flow_into => {
'1->A' => WHEN (
'(#tree_gene_count# < #mcoffee_short_gene_count#) and (#tree_reuse_aln_runtime#/1000 < #mafft_runtime#)' => 'cdhit_mcoffee_short',
'(#tree_gene_count# >= #mcoffee_short_gene_count# and #tree_gene_count# < #mcoffee_himem_gene_count#) and (#tree_reuse_aln_runtime#/1000 < #mafft_runtime#)' => 'cdhit_mcoffee',
'(#tree_gene_count# >= #mcoffee_himem_gene_count# and #tree_gene_count# < #mafft_gene_count#) and (#tree_reuse_aln_runtime#/1000 < #mafft_runtime#)' => 'cdhit_mcoffee_himem',
'(#tree_gene_count# >= #mafft_gene_count# and #tree_gene_count# < #mafft_himem_gene_count#) or (#tree_reuse_aln_runtime#/1000 >= #mafft_runtime#)' => 'cdhit_mafft',
'(#tree_gene_count# >= #mafft_himem_gene_count#) or (#tree_reuse_aln_runtime#/1000 >= #mafft_runtime#)' => 'cdhit_mafft_himem',
),
'A->1' => [ 'split_genes' ],
},
%decision_analysis_params,
},
{ -logic_name => 'cdhit_mcoffee_short',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::MCoffee',
-parameters => {
'cmd_max_runtime' => '43200',
'method' => 'cmcoffee',
'mcoffee_exe' => $self->o('mcoffee_exe'),
'extaligners_exe_dir' => $self->o('extaligners_exe_dir'),
'escape_branch' => -1,
},
-hive_capacity => $self->o('mcoffee_capacity'),
-batch_size => 20,
-rc_name => '1Gb_job',
-flow_into => {
-1 => [ 'cdhit_mcoffee' ], # MEMLIMIT
-2 => [ 'cdhit_mafft' ],
},
},
{ -logic_name => 'cdhit_mcoffee',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::MCoffee',
-parameters => {
'cmd_max_runtime' => '43200',
'method' => 'cmcoffee',
'mcoffee_exe' => $self->o('mcoffee_exe'),
'extaligners_exe_dir' => $self->o('extaligners_exe_dir'),
'escape_branch' => -1,
},
-analysis_capacity => $self->o('mcoffee_capacity'),
-rc_name => '2Gb_job',
-priority => $self->o('mcoffee_priority'),
-flow_into => {
-1 => [ 'cdhit_mcoffee_himem' ], # MEMLIMIT
-2 => [ 'cdhit_mafft' ],
},
},
{ -logic_name => 'cdhit_mafft',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::Mafft',
-parameters => {
'mafft_exe' => $self->o('mafft_exe'),
'escape_branch' => -1,
},
-hive_capacity => $self->o('mcoffee_capacity'),
-rc_name => '2Gb_job',
-priority => $self->o('mafft_priority'),
-flow_into => {
-1 => [ 'cdhit_mafft_himem' ], # MEMLIMIT
},
},
{ -logic_name => 'cdhit_mcoffee_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::MCoffee',
-parameters => {
'cmd_max_runtime' => '43200',
'method' => 'cmcoffee',
'mcoffee_exe' => $self->o('mcoffee_exe'),
'extaligners_exe_dir' => $self->o('extaligners_exe_dir'),
'escape_branch' => -2,
},
-hive_capacity => $self->o('mcoffee_capacity'),
-rc_name => '8Gb_job',
-priority => $self->o('mcoffee_himem_priority'),
-flow_into => {
-1 => [ 'cdhit_mafft_himem' ],
-2 => [ 'cdhit_mafft_himem' ],
},
},
{ -logic_name => 'cdhit_mafft_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::Mafft',
-parameters => {
'mafft_exe' => $self->o('mafft_exe'),
},
-hive_capacity => $self->o('mcoffee_capacity'),
-rc_name => '8Gb_job',
-priority => $self->o('mafft_himem_priority'),
},
# ---------------------------------------------[HMM thresholding step]-------------------------------------------------------------
{ -logic_name => 'prepare_hmm_profiles',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::PrepareHmmProfiles',
-rc_name => '1Gb_job',
-max_retry_count => 0,
-parameters => {
'library_name' => $self->o('compara_hmm_library_name'),
'hmmer_home' => $self->o('hmmer3_home'),
'worker_compara_hmm_lib' => $self->o('worker_compara_hmm_library_basedir'),
},
-flow_into => {
1 => [ 'hmm_thresholding_factory' ],
},
},
{ -logic_name => 'hmm_thresholding_factory',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HmmThresholdFactory',
-parameters => {
'inputquery' => 'SELECT root_id, seq_member_id FROM gene_tree_root JOIN gene_tree_node USING (root_id) WHERE tree_type = "tree" AND clusterset_id = "filter_level_4" AND seq_member_id IS NOT NULL',
},
-rc_name => '2Gb_job',
-flow_into => {
'2->A' => [ 'hmm_thresholding_searches' ],
'A->1' => [ 'compute_thresholds' ],
},
},
{ -logic_name => 'hmm_thresholding_searches',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HMMerSearch',
-rc_name => '2Gb_job',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'library_name' => $self->o('compara_hmm_library_name'),
'library_basedir' => $self->o('worker_compara_hmm_library_basedir'),
'target_table' => $self->o('hmm_thresholding_table'),
'source_clusterset_id' => 'filter_level_4',
'fetch_all_seqs' => 1,
'store_all_hits' => 1,
},
-hive_capacity => $self->o('HMMer_search_all_hits_capacity'),
-batch_size => 5,
-flow_into => {
-1 => [ 'hmm_thresholding_searches_himem' ],
}
},
{ -logic_name => 'hmm_thresholding_searches_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HMMerSearch',
-rc_name => '8Gb_job',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'library_name' => $self->o('compara_hmm_library_name'),
'library_basedir' => $self->o('worker_compara_hmm_library_basedir'),
'target_table' => $self->o('hmm_thresholding_table'),
'source_clusterset_id' => 'filter_level_4',
'fetch_all_seqs' => 1,
'store_all_hits' => 1,
},
-batch_size => 1,
-priority => 10,
-hive_capacity => $self->o('HMMer_search_all_hits_capacity'),
-flow_into => {
-1 => [ 'hmm_thresholding_searches_super_himem' ],
}
},
{ -logic_name => 'hmm_thresholding_searches_super_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::HMMerSearch',
-rc_name => '64Gb_job',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'library_name' => $self->o('compara_hmm_library_name'),
'library_basedir' => $self->o('worker_compara_hmm_library_basedir'),
'target_table' => $self->o('hmm_thresholding_table'),
'source_clusterset_id' => 'filter_level_4',
'fetch_all_seqs' => 1,
'store_all_hits' => 1,
},
-batch_size => 1,
-priority => 20,
-hive_capacity => $self->o('HMMer_search_all_hits_capacity'),
},
{ -logic_name => 'compute_thresholds',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::ComputeHmmThresholds',
-rc_name => '4Gb_job',
-flow_into => {
1 => [ 'build_HMM_with_tags_factory' ],
},
},
#new build_HMM_aa_v3 with cut_off tags
#-----------------------------------------------------------------------------------------------
{ -logic_name => 'build_HMM_with_tags_factory',
-module => 'Bio::EnsEMBL::Hive::RunnableDB::JobFactory',
-parameters => {
'inputquery' => 'SELECT root_id AS gene_tree_id FROM gene_tree_root WHERE tree_type = "tree" AND clusterset_id="filter_level_4"',
},
-flow_into => {
'2->A' => [ 'build_HMM_with_tags' ],
'A->1' => [ 'prepare_hmm_profiles_post_thresholding' ],
},
},
{ -logic_name => 'prepare_hmm_profiles_post_thresholding',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ComparaHMM::PrepareHmmProfiles',
-rc_name => '1Gb_job',
-max_retry_count => 0,
-parameters => {
'library_name' => $self->o('compara_hmm_library_name'),
'hmmer_home' => $self->o('hmmer3_home'),
'readme_file' => $self->o('readme_file'),
'store_in_warehouse'=> 1,
'target_compara_hmm_lib' => $self->o('target_compara_hmm_library_basedir'),
'worker_compara_hmm_lib' => $self->o('worker_compara_hmm_library_basedir'),
},
},
{ -logic_name => 'build_HMM_with_tags',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::BuildHMM',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'hmmer_version' => 3,
'check_split_genes' => 1,
'cdna' => 0,
'include_thresholds' => 1,
'check_split_genes' => 1,
},
-hive_capacity => $self->o('build_hmm_capacity'),
-batch_size => 5,
-priority => -20,
-rc_name => '1Gb_job',
-flow_into => {
-1 => 'build_HMM_with_tags_himem',
},
},
{ -logic_name => 'build_HMM_with_tags_himem',
-module => 'Bio::EnsEMBL::Compara::RunnableDB::ProteinTrees::BuildHMM',
-parameters => {
'hmmer_home' => $self->o('hmmer3_home'),
'hmmer_version' => 3,
'check_split_genes' => 1,
'cdna' => 0,
'include_thresholds' => 1,
'check_split_genes' => 1,
},
-hive_capacity => $self->o('build_hmm_capacity'),
-priority => -15,
-rc_name => '4Gb_job',
},
#-----------------------------------------------------------------------------------------------
];
}
1;
| Ensembl/ensembl-compara | modules/Bio/EnsEMBL/Compara/PipeConfig/CreateHmmProfiles_conf.pm | Perl | apache-2.0 | 77,199 |
package VMOMI::VirtualHdAudioCard;
use parent 'VMOMI::VirtualSoundCard';
use strict;
use warnings;
our @class_ancestors = (
'VirtualSoundCard',
'VirtualDevice',
'DynamicData',
);
our @class_members = ( );
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/VirtualHdAudioCard.pm | Perl | apache-2.0 | 438 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::selenium::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_simple);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
# $options->{options} = options object
$self->{version} = '1.0';
%{$self->{modes}} = (
'scenario' => 'apps::selenium::mode::scenario',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Selenium server.
=cut
| golgoth31/centreon-plugins | apps/selenium/plugin.pm | Perl | apache-2.0 | 1,258 |
# Copyright (c) 2014 Timm Murray
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
package Device::WebIO::Device::VideoOutputCallback;
$Device::WebIO::Device::VideoOutputCallback::VERSION = '0.009';
use v5.12;
use Moo::Role;
with 'Device::WebIO::Device::VideoOutput';
requires 'vid_stream_callback';
requires 'vid_stream_begin_loop';
1;
__END__
=head1 NAME
Device::WebIO::Device::VideoOutputCallback - Role for callback-driven video
=head1 DESCRIPTION
This is an extension of C<VideoOutput> which can trigger callbacks.
=head1 REQUIRED METHODS
=head2 vid_stream_callback
vid_stream_callback( $channel, $type, $callback );
Add a callback that will be called for each frame of video. The callback will
be passed an arrayref of bytes for the frame data.
Only 1 callback per channel will be kept.
=head2 vid_stream_begin_loop
vid_stream_begin_loop( $channel );
Begin the loop that will start the callbacks.
=head1 LICENSE
Copyright (c) 2014 Timm Murray
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are
permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of
conditions and the following disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=cut
| gitpan/Device-WebIO | lib/Device/WebIO/Device/VideoOutputCallback.pm | Perl | bsd-2-clause | 3,564 |
#!/usr/bin/perl
use strict;
use FindBin;
use lib "$FindBin::Bin/../lib";
use lib "$FindBin::Bin/../lib/Grids/VM/Memory/lib";
use lib "$FindBin::Bin/../lib/Grids/VM/Memory/blib/arch/auto/Grids/VM/Memory";
use lib "$FindBin::Bin/../lib/Grids/VM/Register/lib";
use lib "$FindBin::Bin/../lib/Grids/VM/Register/blib/arch/auto/Grids/VM/Register";
use Grids::VM;
use Grids::Console;
use Grids::Code::Program;
use AnyEvent;
my $infile = shift;
my $vm = Grids::VM->new(memory_limit => 16 * 1024 * 1024);
my %handlers = (
help => \&help,
load => \&load,
asld => \&asld,
step => \&step,
st => \&step,
s => \&step,
mem => \&mem,
run => \&run,
r => \&run,
regs => \®s,
reg => \®s,
reset => \&reset,
);
my $msg;
if ($infile) {
load(undef, $infile);
$msg = dis_current_instruction();
}
my $main = AnyEvent->condvar;
my $con = Grids::Console->new(
cv => $main,
title => "GridsVM",
prompt => "GridsVM> ",
handlers => \%handlers,
message => $msg,
);
$con->listen_for_input;
$main->recv;
########### commands
sub load {
my ($self, $filename) = @_;
return "File $filename does not exist" unless -e $filename;
my $program = Grids::Code::Program->load_from_file($filename)
or return "Unable to load $filename";
$vm->load_program($program);
return "Loaded program";
}
sub asld {
my ($self, $filename) = @_;
my $code = slurp($filename);
my $program = Grids::Code->assemble_program($code);
return vmload($program->segments);
}
sub regs {
my @args = @_;
my $regs = $vm->regs;
my $ret = '';
my $i = 0;
$ret .= sprintf "PC: 0x%08X\n", $vm->pc;
my $regcount = @Grids::VM::REGS - 1;
foreach my $reg (0 .. $regcount) {
my $regname = $vm->reg_name($i);
$ret .= sprintf "[%02d %4s]: 0x%08X ", $i++, $regname, $vm->reg($reg);
$ret .= "\n" if $i % 2 == 0;
}
return $ret;
}
sub reset {
my $self = shift;
$vm->init_regs;
dis_current_instruction();
}
sub run {
my ($self) = @_;
print "Running...\n";
$vm->run;
return "Program reached end of memory";
}
sub step {
my ($self) = @_;
if ($vm->step) {
return dis_current_instruction();
} else {
return "End of program reached";
}
}
# returns disassembly of current instruction
sub dis_current_instruction {
return sprintf "0x%08X> %s\n", $vm->pc, Grids::Code->disassemble_string($vm->current_instruction);
}
sub mem {
my ($self, $len) = @_;
$len ||= 256;
my $ret = '';
my $bytes;
my $size = $vm->mem->size;
$size = $size > $len ? $len : $size;
for (my $pos = 0; $pos < $size; $pos++) {
my $byte = unpack("C", $vm->mem->get($pos, 1));
$ret .= sprintf "%08X: 0x%02X %3d 0b%08b %c\n", $pos, $byte, $byte, $byte, $byte;
}
return $ret;
}
sub help {
my ($self) = @_;
return q {
load (filename) - loads a file containing GridsCode
asld (filename) - (assemble and load) assemble a GridsAsm file and load it
mem - dump memory
step - step program one instruction
run - runs program
regs - dump registers
quit - quit
};
}
###########
# slurp in a file and return its contents, returns undef on error
sub slurp {
my ($filename) = @_;
unless (-e $filename) {
print STDERR "$filename does not exist.\n";
return undef;
}
my ($contents, $in);
unless (open($in, $filename)) {
print STDERR "Could not open $filename: $!\n";
return undef;
}
# slurp in file
do {
local $/;
$contents = <$in>;
};
close $in;
return $contents;
}
sub vmload {
my $segment_map = shift;
my $prog = new Grids::Code::Program(segments => $segment_map);
$vm->load_program($prog);
printf "Loaded %i bytes\n", length $prog->bytes;
return dis_current_instruction();
}
| revmischa/grids | tools/gridsvm.pl | Perl | bsd-3-clause | 4,097 |
package MyElements::getCommonNamesFromTSN;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'http://itis_service.itis.usgs.org' }
__PACKAGE__->__set_name('getCommonNamesFromTSN');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
SOAP::WSDL::XSD::Typelib::ComplexType
);
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use Class::Std::Fast::Storable constructor => 'none';
use base qw(SOAP::WSDL::XSD::Typelib::ComplexType);
Class::Std::initialize();
{ # BLOCK to scope variables
my %tsn_of :ATTR(:get<tsn>);
__PACKAGE__->_factory(
[ qw( tsn
) ],
{
'tsn' => \%tsn_of,
},
{
'tsn' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
},
{
'tsn' => 'tsn',
}
);
} # end BLOCK
} # end of BLOCK
1;
=pod
=head1 NAME
MyElements::getCommonNamesFromTSN
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
getCommonNamesFromTSN from the namespace http://itis_service.itis.usgs.org.
=head1 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * tsn
$element->set_tsn($data);
$element->get_tsn();
=back
=head1 METHODS
=head2 new
my $element = MyElements::getCommonNamesFromTSN->new($data);
Constructor. The following data structure may be passed to new():
{
tsn => $some_value, # string
},
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| phylotastic-legacy/TNRastic | tnrs_handler/bin/tnrs_adapter/itis/SOAPInterface/MyElements/getCommonNamesFromTSN.pm | Perl | bsd-3-clause | 1,632 |
#!/usr/bin/perl -w
#
# $Id: phyopt.pl 270 2008-02-23 03:48:41Z lapp $
#
# Copyright 2007-2008 James Estill
#
# This file is part of BioSQL.
#
# BioSQL is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# BioSQL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with BioSQL. If not, see <http://www.gnu.org/licenses/>.
#
#-----------------------------------------------------------+
# |
# phyopt.pl - Optimize phylodb database. |
# |
#-----------------------------------------------------------+
# |
# CONTACT: JamesEstill_at_gmail.com |
# STARTED: 07/04/2007 |
# UPDATED: 08/19/2007 |
# |
# DESCRIPTION: |
# Optimize a PhyloDB. Add left and right index values and |
# determine transitive closure. |
# |
# LICENSE: |
# GNU Lesser Public License |
# http://www.gnu.org/licenses/lgpl.html |
# |
#-----------------------------------------------------------+
#
# TO DO:
# - PGSQL support
# - The internal nodes used by TreeI will not be the same
# as the nodes used in the database so the DB ID will
# need to be fetched when adding edges to the database.
# - Add taxa to the biosql database and add taxa information
# from the tree to the PhyloDB node table. This would required
# using the taxon_id field in the node table
# - Add error information to the diagnostics section of the
# documentation
#-----------------------------+
# INCLUDES |
#-----------------------------+
use strict;
use DBI;
use Getopt::Long;
use constant LOG_CHUNK => 10000;
#-----------------------------+
# VARIABLE SCOPE |
#-----------------------------+
my $VERSION = "1.0"; #
my $usrname = $ENV{DBI_USER}; # User name to connect to database
my $pass = $ENV{DBI_PASSWORD}; # Password to connect to database
my $dsn = $ENV{DBI_DSN}; # DSN for database connection
my $infile; # Full path to the input file to parse
my $format = 'newick'; # Data format used in infile
my $db; # Database name (ie. biosql)
my $host; # Database host (ie. localhost)
my $driver; # Database driver (ie. mysql)
my $sqldir; # Directory that contains the sql to run
# to create the tables.
my $quiet = 0; # Run the program in quiet mode
# will not prompt for command line options
my $tree_name; # The name of the tree
# For files with multiple trees, this may
# be used as a base name to name the trees with
my $statement; # Var to hold SQL statement string
#my $sth; # Statement handle for SQL statement object
my @trees = (); # Array holding the names of the trees that will
# be exported
my $verbose; # Boolean, but chatty or not
my $tree; # This is what H. Lapp used
my $show_help = 0; # Display help
my $show_man = 0; # Show the man page via perldoc
my $show_usage = 0; # Show the basic usage for the program
my $show_version = 0; # Show the program version
#-----------------------------+
# COMMAND LINE OPTIONS |
#-----------------------------+
my $ok = GetOptions("d|dsn=s" => \$dsn,
"u|dbuser=s" => \$usrname,
"p|dbpass=s" => \$pass,
"s|sqldir=s" => \$sqldir,
"driver=s" => \$driver,
"dbname=s" => \$db,
"host=s" => \$host,
"t|tree=s" => \$tree_name,
"q|quiet" => \$quiet,
"verbose" => \$verbose,
"version" => \$show_version,
"man" => \$show_man,
"usage" => \$show_usage,
"h|help" => \$show_help,
);
#-----------------------------+
# SHOW REQUESTED HELP |
#-----------------------------+
if ($show_usage) {
print_help("");
}
if ($show_help || (!$ok) ) {
print_help("full");
}
if ($show_version) {
print "\n$0:\nVersion: $VERSION\n\n";
exit;
}
if ($show_man) {
# User perldoc to generate the man documentation.
system("perldoc $0");
exit($ok ? 0 : 2);
}
print "Staring $0 ..\n" if $verbose;
# A full dsn can be passed at the command line or components
# can be put together
unless ($dsn) {
# Set default values if none given at command line
$db = "biosql" unless $db;
$host = "localhost" unless $host;
$driver = "mysql" unless $driver;
$dsn = "DBI:$driver:database=$db;host=$host";
} else {
# We need to parse the database name, driver etc from the dsn string
# in the form of DBI:$driver:database=$db;host=$host
# Other dsn strings will not be parsed properly
# Split commands are often faster then regular expressions
# However, a regexp may offer a more stable parse then splits do
my ($cruft, $prefix, $suffix, $predb, $prehost);
($prefix, $driver, $suffix) = split(/:/,$dsn);
($predb, $prehost) = split(/;/, $suffix);
($cruft, $db) = split(/=/,$predb);
($cruft, $host) = split(/=/,$prehost);
# Print for debug
print "\tDSN:\t$dsn\n";
print "\tPRE:\t$prefix\n";
print "\tDRIVER:\t$driver\n";
print "\tSUF:\t$suffix\n";
print "\tDB:\t$db\n";
print "\tHOST:\t$host\n";
}
#-----------------------------+
# GET DB PASSWORD |
#-----------------------------+
# This prevents the password from being globally visible
# I don't know what happens with this in anything but Linux
# so I may need to get rid of this or modify it
# if it crashes on other OS's
unless ($pass) {
print "\nEnter password for the user $usrname\n";
system('stty', '-echo') == 0 or die "can't turn off echo: $?";
$pass = <STDIN>;
system('stty', 'echo') == 0 or die "can't turn on echo: $?";
chomp $pass;
}
#-----------------------------+
# CONNECT TO THE DATABASE |
#-----------------------------+
# Commented out while I work on fetching tree structure
my $dbh = &connect_to_db($dsn, $usrname, $pass);
#-----------------------------+
# SQL STATEMENTS |
#-----------------------------+
# As taken from tree-precompute
my $sel_children = prepare_sth(
$dbh, "SELECT child_node_id FROM edge WHERE parent_node_id = ?");
my $upd_nestedSet = prepare_sth(
$dbh, "UPDATE node SET left_idx = ?, right_idx = ? WHERE node_id = ?");
my $reset_nestedSet = prepare_sth(
$dbh, "UPDATE node SET left_idx = null, right_idx = null ".
"WHERE tree_id =?");
#my $sel_trees =
# "SELECT t.name, t.node_id, t.tree_id FROM tree t, biodatabase db "
# ."WHERE db.biodatabase_id = t.biodatabase_id";
# Jame changed this to the following
my $sel_trees =
"SELECT name, node_id, tree_id FROM tree";
#-----------------------------+
# EXIT HANDLER |
#-----------------------------+
#END {
# &end_work($dbh);
#}
#-----------------------------+
# GET THE TREES TO PROCESS |
#-----------------------------+
# TODO: Check to see if the tree does exist in the database
# throw error message if it does not
# This also needed for phyexport.pl
my @bind_params = ();
#if (defined($tree)) {
if (defined($tree_name)) {
$sel_trees .= " AND t.name = ?";
#push(@bind_params, $tree);
push(@bind_params, $tree_name);
}
# This will not work with Jamie's implementation
#if (defined($namespace)) {
# $sel_trees .= " AND db.name = ?";
# push(@bind_params, $namespace);
#}
my $sth = prepare_sth($dbh, $sel_trees);
execute_sth($sth, @bind_params);
while(my $row = $sth->fetchrow_arrayref) {
my ($tree_name, $root_id, $tree_id) = @$row;
print STDERR "Computing nested set values for tree $tree_name...\n";
print STDERR "\tresetting existing values\n" if $verbose;
# we need to reset the values to null first to prevent any
# possible unique key violations when updating on a tree that has
# them already
execute_sth($reset_nestedSet, $tree_id);
# Jamie added the commit here
print STDERR "\tcomputing new values:\n" if $verbose;
# recursively traverse the tree, depth-first, filling in the value
# along the way
handle_progress(0) if $verbose; # initialize
walktree($root_id);
# Jamie added commit here
$dbh->commit;
handle_progress(LOG_CHUNK, 1) if $verbose; # final tally
print STDERR "Computing transitive closure for tree $tree_name...\n";
# transitive closure for the given tree; this will delete existing
# paths first
compute_tc($dbh, $tree_id);
print STDERR "Done.\n";
$dbh->commit;
}
# End of program
$sth->finish();
$dbh->disconnect();
print "\n$0 has finished.\n";
exit;
#-----------------------------------------------------------+
# SUBFUNCTIONS |
#-----------------------------------------------------------+
sub walktree {
# Taken from tree-precompute
my $id = shift;
my $left = shift || 1;
my $right = $left+1; # default for leaf
execute_sth($sel_children,$id);
my @children = ();
while (my $row = $sel_children->fetchrow_arrayref) {
push(@children,$row->[0]);
}
foreach my $child (@children) {
$right = walktree($child, $right);
$right++;
}
execute_sth($upd_nestedSet, $left, $right, $id);
handle_progress(LOG_CHUNK) if $verbose;
return $right;
}
sub handle_progress{
my $chunk = shift;
my $final = shift;
our $_time = time() if $chunk == 0;
our $_n = 0 if $chunk == 0;
our $_last_n = 0 if $chunk == 0;
return if $chunk == 0;
$_n++ unless $final;
if ($final || (($_n-$chunk) >= $_last_n)) {
my $elapsed = time() - $_time;
my $fmt = "\t%d done (in %d secs, %4.1f rows/s)\n";
printf STDERR $fmt, $_n, $elapsed, ($_n-$_last_n)/($elapsed||1);
$_time = time() if $elapsed;
$_last_n = $_n;
}
}
sub compute_tc {
# Taken from tree-precompute
# my ($dbh, $tree) =
my $dbh = shift;
my $tree = shift;
my $del_sql =
"DELETE FROM node_path WHERE child_node_id IN ("
."SELECT node_id FROM node WHERE tree_id = ?)";
my $zero_sql =
"INSERT INTO node_path (child_node_id, parent_node_id, distance)"
." SELECT n.node_id, n.node_id, 0 FROM node n WHERE n.tree_id = ?";
my $init_sql =
"INSERT INTO node_path (child_node_id, parent_node_id, path, distance)"
." SELECT e.child_node_id, e.parent_node_id, n.left_idx, 1"
." FROM edge e, node n"
." WHERE e.child_node_id = n.node_id AND n.tree_id = ?";
my $path_sql =
"INSERT INTO node_path (child_node_id, parent_node_id, path, distance)"
." SELECT e.child_node_id, p.parent_node_id,"
." p.path||'.'||n.left_idx, p.distance+1"
." FROM node_path p, edge e, node n"
." WHERE p.child_node_id = e.parent_node_id"
." AND n.node_id = e.child_node_id AND n.tree_id = ?"
." AND p.distance = ?";
print STDERR "\tdeleting existing transitive closure\n" if $verbose;
my $sth = prepare_sth($dbh,$del_sql);
execute_sth($sth, $tree);
print STDERR "\tcreating zero length paths\n" if $verbose;
$sth = prepare_sth($dbh,$zero_sql);
execute_sth($sth,$tree);
print STDERR "\tcreating paths with length=1\n" if $verbose;
$sth = prepare_sth($dbh,$init_sql);
execute_sth($sth,$tree);
$sth = prepare_sth($dbh,$path_sql);
my $dist = 1;
my $rv = 1;
while ($rv > 0) {
print STDERR "\textending paths with length=$dist\n" if $verbose;
$rv = execute_sth($sth, $tree, $dist);
$dist++;
}
}
sub end_work {
# Copied from load_itis_taxonomy.pl
my ($dbh, $commit) = @_;
# skip if $dbh not set up yet, or isn't an open connection
return unless $dbh && $dbh->{Active};
# end the transaction
my $rv = $commit ? $dbh->commit() : $dbh->rollback();
if(!$rv) {
print STDERR ($commit ? "commit " : "rollback ").
"failed: ".$dbh->errstr;
}
$dbh->disconnect() unless defined($commit);
}
sub connect_to_db {
my ($cstr) = @_;
return connect_to_mysql(@_) if $cstr =~ /:mysql:/i;
return connect_to_pg(@_) if $cstr =~ /:pg:/i;
die "can't understand driver in connection string: $cstr\n";
}
sub connect_to_pg {
my ($cstr, $user, $pass) = @_;
my $dbh = DBI->connect($cstr, $user, $pass,
{PrintError => 0,
RaiseError => 1,
AutoCommit => 0});
$dbh || &error("DBI connect failed : ",$dbh->errstr);
return($dbh);
} # End of ConnectToPG subfunction
sub connect_to_mysql {
my ($cstr, $user, $pass) = @_;
my $dbh = DBI->connect($cstr,
$user,
$pass,
{PrintError => 0,
RaiseError => 1,
AutoCommit => 0});
$dbh || &error("DBI connect failed : ",$dbh->errstr);
return($dbh);
}
sub prepare_sth {
my $dbh = shift;
# my ($dbh) = @_;
my $sth = $dbh->prepare(@_);
die "failed to prepare statement '$_[0]': ".$dbh->errstr."\n" unless $sth;
return $sth;
}
sub execute_sth {
# I would like to return the statement string here to figure
# out where problems are.
# Takes a statement handle
my $sth = shift;
my $rv = $sth->execute(@_);
unless ($rv) {
$dbh->disconnect();
die "failed to execute statement: ".$sth->errstr."\n"
}
return $rv;
} # End of execute_sth subfunction
sub last_insert_id {
#my ($dbh,$table_name,$driver) = @_;
# The use of last_insert_id assumes that the no one
# is interleaving nodes while you are working with the db
my $dbh = shift;
my $table_name = shift;
my $driver = shift;
# The following replace by sending driver info to the sufunction
#my $driver = $dbh->get_info(SQL_DBMS_NAME);
if (lc($driver) eq 'mysql') {
return $dbh->{'mysql_insertid'};
} elsif ((lc($driver) eq 'pg') || ($driver eq 'PostgreSQL')) {
my $sql = "SELECT currval('${table_name}_pk_seq')";
my $stmt = $dbh->prepare_cached($sql);
my $rv = $stmt->execute;
die "failed to retrieve last ID generated\n" unless $rv;
my $row = $stmt->fetchrow_arrayref;
$stmt->finish;
return $row->[0];
} else {
die "don't know what to do with driver $driver\n";
}
} # End of last_insert_id subfunction
# The following pulled directly from the DBI module
# this is an attempt to see if I can get the DSNs to parse
# for some reason, this is returning the driver information in the
# place of scheme
sub parse_dsn {
my ($dsn) = @_;
$dsn =~ s/^(dbi):(\w*?)(?:\((.*?)\))?://i or return;
my ($scheme, $driver, $attr, $attr_hash) = (lc($1), $2, $3);
$driver ||= $ENV{DBI_DRIVER} || '';
$attr_hash = { split /\s*=>?\s*|\s*,\s*/, $attr, -1 } if $attr;
return ($scheme, $driver, $attr, $attr_hash, $dsn);
}
sub print_help {
# Print requested help or exit.
# Options are to just print the full
my ($opt) = @_;
my $usage = "USAGE:\n".
" phyopt.pl -d 'DBI:mysql:database=biosql;host=localhost'\n".
" -u UserName -p dbPass -t MyTree\n";
my $args = "REQUIRED ARGUMENTS:\n".
" --dsn # The DSN string the database to connect to\n".
" # Must conform to:\n".
" # 'DBI:mysql:database=biosql;host=localhost'\n".
"\n".
"OPTIONS:\n".
" --dbname # Name of the database to connect to\n".
" --host # Database host\n".
" --driver # Driver for connecting to the database\n".
" --dbuser # Name to log on to the database with\n".
" --dbpass # Password to log on to the database with\n".
" --tree # Name of the tree to optimize\n".
" --version # Show the program version\n".
" --usage # Show program usage\n".
" --help # Show this help message\n".
" --man # Open full program manual\n".
" --verbose # Run the program with maximum output\n".
" --quiet # Run program with minimal output\n";
if ($opt =~ "full") {
print "\n$usage\n\n";
print "$args\n\n";
}
else {
print "\n$usage\n\n";
}
exit;
}
=head1 NAME
phyopt.pl - Optimize trees in a PhyloDB database
=head1 VERSION
This documentation refers to phyopt.pl version 1.0.
=head1 SYNOPSIS
USAGE: phyopt.pl -d 'DBI:mysql:database=biosql;host=localhost'
-u UserName -p dbPass -t MyTree
REQUIRED ARGUMENTS:
--dsn # The DSN string the database to connect to
# Must conform to:
# 'DBI:mysql:database=biosql;host=localhost'
--dbuser # User name to connect with
--dbpass # Password to connect with
ALTERNATIVE TO --dsn:
--driver # "mysql", "Pg", "Oracle" (default "mysql")
--dbname # Name of database to use
--host # optional: host to connect with
ADDITIONAL OPTIONS:
--tree # Name of the tree to optimize.
# Otherwise the entire db is optimized.
--quiet # Run the program in quiet mode.
--verbose # Run the program in verbose mode.
ADDITIONAL INFORMATION:
--version # Show the program version
--usage # Show program usage
--help # Print short help message
--man # Open full program manual
=head1 DESCRIPTION
The phyopt program will optimize trees in a PhyloDB database by computing
transitive closure paths as well as the left and right index values for
the nested set indexes.
=head1 COMMAND LINE ARGUMENTS
=head2 Required Arguments
=over
=item -d, --dsn
The DSN of the database to connect to; default is the value in the
environment variable DBI_DSN. If DBI_DSN has not been defined and
the string is not passed to the command line, the dsn will be
constructed from --driver, --dbname, --host
DSN must be in the form:
DBI:mysql:database=biosql;host=localhost
=item -u, --dbuser
The user name to connect with; default is the value in the environment
variable DBI_USER.
This user must have permission to create databases.
=item -p, --dbpass
The password to connect with; default is the value in the environment
variable DBI_PASSWORD. If this is not provided at the command line
the user is prompted.
=back
=head2 Alternative to --dsn
An alternative to passing the full dsn at the command line is to
provide the components separately.
=over 2
=item --host
The database host to connect to; default is localhost.
=item --dbname
The database name to connect to; default is biosql.
=item --driver
The database driver to connect with; default is mysql.
Options other then mysql are currently not supported.
=back
=head2 Additional Options
=over 2
=item -t, --tree
Name of the tree that will be optimized. Otherwise all trees in the
database will be optimized.
=item -q, --quiet
Run the program in quiet mode.
=item --verbose
Execute the program in verbose mode.
=back
=head2 Additional Information
=over 2
=item --version
Show the program version.
=item --usage
Show program usage statement.
=item --help
Show a short help message.
=item --man
Show the full program manual.
=back
=head1 EXAMPLES
B<Optimize a single tree>
The following command options would optimize the tree named MyTree in the
biosql database.
phyopt.pl -d 'DBI:mysql:database=biosql;host=localhost'
-u name -p password -t MyTree
B<Optimize entire database>
The following commmand options would optimize all of the individual
trees in the database named Angio.
phyopt.pl -d 'DBI:mysql:database=Angio;host=localhost'
-u name -p password
This could also be done by passing the individual components for
connecting to the database.
phyopt.pl --driver mysql --database Angio --host localhost
-u name -p password
=head1 DIAGNOSTICS
The error messages below are followed by descriptions of the error
and possible solutions.
=head1 CONFIGURATION AND ENVIRONMENT
Many of the options passed at the command line can be set as
options in the user's environment.
=over 2
=item DBI_USER
User name to connect to the database.
=item DBI_PASSWORD
Password for the database connection
=item DBI_DSN
DSN for database connection.
=back
For example in the bash shell this would be done be editing your .bashrc file
to contain:
export DBI_USER=yourname
export DBI_PASS=yourpassword
export DBI_DSN='DBI:mysql:database=biosql;host-localhost'
=head1 DEPENDENCIES
The phyimport.pl program is dependent on the following Perl modules:
=over2
=item DBI - L<http://dbi.perl.org>
The PERL Database Interface (DBI) module allows for connections
to multiple databases.
=item DBD:MySQL -
L<http://search.cpan.org/~capttofu/DBD-mysql-4.005/lib/DBD/mysql.pm>
MySQL database driver for DBI module.
=item DBD:Pg -
L<http://search.cpan.org/~rudy/DBD-Pg-1.32/Pg.pm>
PostgreSQL database driver for the DBI module.
=item Getopt::Long - L<http://perldoc.perl.org/Getopt/Long.html>
The Getopt module allows for the passing of command line options
to perl scripts.
=item Bio::Tree - L<http://www.bioperl.org>
The Bio::Tree module is part of the bioperl package.
=back
A RDBMS is also required. This can be one of:
=over 2
=item MySQL - L<http://www.mysql.com>
=item PostgreSQL - L<http://www.postgresql.org>
=back
=head1 BUGS AND LIMITATIONS
Known limitations:
=over2
=item *
Currently only stable with the MySQL Database driver.
=item *
DSN string must currently be in the form:
DBI:mysql:database=biosql;host=localhost
=back
Please report additional problems to
James Estill E<lt>JamesEstill at gmail.comE<gt>
=head1 SEE ALSO
The program phyinit.pl is a component of a package of comand line programs
for PhyloDB management. Additional programs include:
=over
=item phyinit.pl
Initialize a PhyloDB database.
=item phyimport.pl
Import common phylogenetic file formats.
=item phyexport.pl
Export tree data in PhyloDB to common file formats.
=item phyqry.pl
Return a standard report of information for a given tree.
=item phymod.pl
Modify an existing phylogenetic database by deleting, adding or
copying branches.
=back
=head1 LICENSE
This file is part of BioSQL.
BioSQL is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
BioSQL is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with BioSQL. If not, see <http://www.gnu.org/licenses/>.
=head1 AUTHORS
James C. Estill E<lt>JamesEstill at gmail.comE<gt>
Hilmar Lapp E<lt>hlapp at gmx.netE<gt>
William Piel E<lt>william.piel at yale.eduE<gt>
=head1 HISTORY
Started: 07/04/2007
Updated: 08/19/2007
=cut
| sauloal/projects | taxonomy/biosql/biosql-schema/scripts/phyopt.pl | Perl | mit | 24,235 |
package Agua::Common::Database;
use Moose::Role;
=head2
PACKAGE Agua::Common::Util
PURPOSE
UTILITY METHODS FOR Agua::Common
=cut
#### USE LIB FOR INHERITANCE
use FindBin qw($Bin);
use lib "$Bin/../../";
use Term::ReadKey;
use Agua::DBaseFactory;
use Data::Dumper;
has 'database' => ( isa => 'Str|Undef', is => 'rw' );
has 'db' => ( isa => 'Agua::DBase::MySQL', is => 'rw', required => 0 );
sub setDbh {
my $self = shift;
my $args = shift;
$self->logNote("args", $args);
my $database = $args->{database} || $self->database();
my $dbuser = $args->{dbuser};
my $dbpassword = $args->{dbpassword};
my $dbtype = $args->{dbtype};
my $dbfile = $args->{dbfile};
my $logfile = $self->logfile();
my $log = $self->log();
my $printlog = $self->printlog();
$self->logNote("ARGS database", $database);
$self->logNote("ARGS dbtype", $dbtype);
$self->logNote("ARGS dbuser", $dbuser);
#$self->logNote("AFTER dbpassword", $dbpassword);
$dbfile = $self->conf()->getKey('database', 'DBFILE') if not defined $dbfile;
$dbtype = $self->conf()->getKey('database', 'DBTYPE') if not defined $dbtype;
#$database = $self->conf()->getKey('database', undef) if not defined $database;
#$database = $self->conf()->getKey('database:DATABASE', undef) if not defined $database;
$self->logNote("database", $database);
$dbuser = $self->conf()->getKey('database', 'USER') if not defined $dbuser;
$dbpassword = $self->conf()->getKey('database', 'PASSWORD') if not defined $dbpassword;
$database = $self->conf()->getKey('database', 'DATABASE') if not defined $database or $database eq "";
$self->logNote("CONF database", $database);
$self->logNote("CONF dbtype", $dbtype);
$self->logNote("CONF dbuser", $dbuser);
if ( $self->can('isTestUser') and $self->isTestUser() ) {
$dbuser = $self->conf()->getKey('database', 'TESTUSER') if not defined $dbuser;
$dbpassword = $self->conf()->getKey('database', 'TESTPASSWORD') if not defined $dbpassword;
$database = $self->conf()->getKey('database', 'TESTDATABASE') if not defined $database;
}
$self->logNote("AFTER database", $database);
$self->logNote("AFTER dbtype", $dbtype);
$self->logNote("AFTER dbuser", $dbuser);
#$self->logNote("AFTER dbpassword", $dbpassword);
$self->logError("dbtype not defined") and return if not $dbtype;
$self->logError("dbuser not defined") and return if not $dbuser;
$self->logError("dbpassword not defined") and return if not $dbpassword;
$self->logError("database not defined") and return if not $database;
#### SET DATABASE IF PROVIDED IN JSON
if ( $self->can('json') ) {
my $json = $self->json();
$database = $json->{database} if defined $json and defined $json->{database} and $json->{database};
}
$self->logNote("FINAL database", $database);
$self->logNote("FINAL dbtype", $dbtype);
$self->logNote("FINAL dbuser", $dbuser);
$self->logNote("FINAL dbpassword", $dbpassword);
##### CREATE DB OBJECT USING DBASE FACTORY
my $db = Agua::DBaseFactory->new(
$dbtype,
{
dbfile => $dbfile,
database => $database,
dbuser => $dbuser,
dbpassword => $dbpassword,
logfile => $logfile,
log => $log,
printlog => $printlog,
parent => $self
}
) or print qq{ error: 'Agua::Database::setDbh Cannot create database object $database: $!' } and return;
$self->logError("db not defined") and return if not defined $db;
$self->db($db);
return $db;
}
sub grantPrivileges {
my $self = shift;
my $tempfile = shift;
my $rootdbpassword = shift;
my $database = shift;
my $dbuser = shift;
my $dbpassword = shift;
my $privileges = shift;
my $host = shift;
$self->logError("tempfile not defined") and return if not defined $tempfile;
$self->logError("rootdbpassword not defined") and return if not defined $rootdbpassword;
$self->logError("database not defined") and return if not defined $database;
$self->logError("dbuser not defined") and return if not defined $dbuser;
$self->logError("dbpassword not defined") and return if not defined $dbpassword;
$self->logError("privileges not defined") and return if not defined $privileges;
$self->logError("host not defined") and return if not defined $host;
#### CREATE DATABASE AND Agua USER AND PASSWORD
$self->logNote("tempfile", $tempfile);
my $create = qq{
USE mysql;
GRANT ALL PRIVILEGES ON $database.* TO $dbuser\@localhost IDENTIFIED BY '$dbpassword';
FLUSH PRIVILEGES;};
`echo "$create" > $tempfile`;
my $command = "mysql -u root -p$rootdbpassword < $tempfile";
$self->logNote("$command");
print `$command`;
`rm -fr $tempfile`;
}
sub inputRootPassword {
my $self = shift;
#### MASK TYPING FOR PASSWORD INPUT
ReadMode 2;
my $rootdbpassword = $self->inputValue("Root dbpassword (will not appear on screen)");
#### UNMASK TYPING
ReadMode 0;
$self->rootdbpassword($rootdbpassword);
return $rootdbpassword;
}
sub inputValue {
my $self = shift;
my $message = shift;
my $default = shift;
$self->logError("message is not defined") and return if not defined $message;
$default = '' if not defined $default;
$self->logDebug("$message [$default]: ");
print "$message [$default]: ";
my $input = '';
while ( $input =~ /^\s*$/ )
{
$input = <STDIN>;
$input =~ s/\s+//g;
$default = $input if $input;
print "\n" and return $default if $default;
$self->logDebug("$message [$default]: ");
print "$message [$default]: ";
}
}
sub _updateTable {
=head2
SUBROUTINE _updateTable
PURPOSE
UPDATE ONE OR MORE ENTRIES IN A TABLE
INPUTS
1. NAME OF TABLE
2. HASH CONTAINING OBJECT TO BE UPDATED
3. HASH CONTAINING TABLE FIELD KEY-VALUE PAIRS
=cut
my $self = shift;
my $table = shift;
my $hash = shift;
my $required_fields = shift;
my $set_hash = shift;
my $set_fields = shift;
$self->logNote("Common::_updateTable(table, hash, required_fields, set_fields)");
$self->logError("hash not defined") and return if not defined $hash;
$self->logError("required_fields not defined") and return if not defined $required_fields;
$self->logError("set_hash not defined") and return if not defined $set_hash;
$self->logError("set_fields not defined") and return if not defined $set_fields;
$self->logError("table not defined") and return if not defined $table;
#### CHECK REQUIRED FIELDS ARE DEFINED
my $not_defined = $self->db()->notDefined($hash, $required_fields);
$self->logError("undefined values: @$not_defined") and return if @$not_defined;
#### GET WHERE
my $where = $self->db()->where($hash, $required_fields);
#### GET SET
my $set = $self->db()->set($set_hash, $set_fields);
$self->logError("set values not defined") and return if not defined $set;
##### UPDATE TABLE
my $query = qq{UPDATE $table $set $where};
$self->logNote("$query");
my $result = $self->db()->do($query);
$self->logNote("result", $result);
}
sub _addToTable {
=head2
SUBROUTINE _addToTable
PURPOSE
ADD AN ENTRY TO A TABLE
INPUTS
1. NAME OF TABLE
2. ARRAY OF KEY FIELDS THAT MUST BE DEFINED
3. HASH CONTAINING TABLE FIELD KEY-VALUE PAIRS
=cut
my $self = shift;
my $table = shift;
my $hash = shift;
my $required_fields = shift;
my $inserted_fields = shift;
#### CHECK FOR ERRORS
$self->logError("hash not defined for table: $table") and return if not defined $hash;
$self->logError("required_fields not defined for table: $table") and return if not defined $required_fields;
$self->logError("table not defined") and return if not defined $table;
#### CHECK REQUIRED FIELDS ARE DEFINED
my $not_defined = $self->db()->notDefined($hash, $required_fields);
$self->logError("table '$table' undefined values: @$not_defined") and return if @$not_defined;
#### GET ALL FIELDS BY DEFAULT IF INSERTED FIELDS NOT DEFINED
$inserted_fields = $self->db()->fields($table) if not defined $inserted_fields;
$self->logError("table '$table' fields not defined") and return if not defined $inserted_fields;
my $fields_csv = join ",", @$inserted_fields;
##### INSERT INTO TABLE
my $values_csv = $self->db()->fieldsToCsv($inserted_fields, $hash);
my $query = qq{INSERT INTO $table ($fields_csv)
VALUES ($values_csv)};
$self->logNote("$query");
my $result = $self->db()->do($query);
$self->logNote("result", $result);
return $result;
}
sub _removeFromTable {
=head2
SUBROUTINE _removeFromTable
PURPOSE
REMOVE AN ENTRY FROM A TABLE
INPUTS
1. HASH CONTAINING TABLE FIELD KEY-VALUE PAIRS
2. ARRAY OF KEY FIELDS THAT MUST BE DEFINED
3. NAME OF TABLE
=cut
my $self = shift;
my $table = shift;
my $hash = shift;
my $required_fields = shift;
#### CHECK INPUTS
$self->logError("hash not defined") and return if not defined $hash;
$self->logError("required_fields not defined") and return if not defined $required_fields;
$self->logError("table not defined") and return if not defined $table;
#### CHECK REQUIRED FIELDS ARE DEFINED
my $not_defined = $self->db()->notDefined($hash, $required_fields);
$self->logError("undefined values: @$not_defined") and return if @$not_defined;
#### DO DELETE
my $where = $self->db()->where($hash, $required_fields);
my $query = qq{DELETE FROM $table
$where};
$self->logNote("\n$query");
my $result = $self->db()->do($query);
$self->logNote("result", $result); ;
return 1 if defined $result;
return 0;
}
sub arrayToArrayhash {
=head2
SUBROUTINE: arrayToArrayhash
PURPOSE:
CONVERT AN ARRAY INTO AN ARRAYHASH, E.G.:
{
key1 : [ entry1, entry2 ],
key2 : [ ... ]
...
}
=cut
my $self = shift;
my $array = shift;
my $key = shift;
#$self->logNote("Common::arrayToArrayhash(array, key)");
#$self->logNote("array: @$array");
#$self->logNote("key", $key);
my $arrayhash = {};
for my $entry ( @$array )
{
if ( not defined $entry->{$key} )
{
$self->logNote("entry->{$key} not defined in entry. Returning.");
return;
}
$arrayhash->{$entry->{$key}} = [] if not exists $arrayhash->{$entry->{$key}};
push @{$arrayhash->{$entry->{$key}}}, $entry;
}
#$self->logNote("returning arrayhash", $arrayhash);
return $arrayhash;
}
1;
| agua/agua | lib/Agua/Common/Database.pm | Perl | mit | 10,341 |
#!/usr/bin/env perl
#
#-------------------------------------------------------------------------------
# Copyright (c) 2014-2019 René Just, Darioush Jalali, and Defects4J contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#-------------------------------------------------------------------------------
=pod
=head1 NAME
create_mml.pl -- create a Major-compatible MML file, which defines program mutations.
=head1 SYNOPSIS
create_mml.pl -p project_id -b bug_id -c classes_list -o out_dir
=head1 OPTIONS
=over 4
=item -p C<project_id>
The id of the project for which the MML file is generated.
See L<Project|Project/"Available Project IDs"> module for available project IDs.
=item -b C<bug_id>
The bug id for which the MML file is generated. Format: C<\d+>.
=item -c F<classes_list>
The file contains all classes that should be mutated -- one class per line.
=item -o F<out_dir>
The output directory to which the generated MML files are written.
=back
=head1 DESCRIPTION
Generates an MML file for the given C<bug_id>. The file F<classes_list> has to proivde the
list of classes that should be mutated.
The generated MML file is named F<C<bug_id>.mml> and written to F<out_dir>.
=cut
use warnings;
use strict;
use FindBin;
use File::Basename;
use Cwd qw(abs_path);
use Getopt::Std;
use Pod::Usage;
use lib abs_path("$FindBin::Bin/../core");
use Constants;
use Mutation;
use Utils;
#
# Process arguments and issue usage message if necessary.
#
my %cmd_opts;
getopts('p:c:o:b:', \%cmd_opts) or pod2usage(1);
pod2usage(1) unless defined $cmd_opts{p} and defined $cmd_opts{c} and defined $cmd_opts{o} and defined $cmd_opts{b};
# TODO: Unused parameter: PID
my $PID = $cmd_opts{p};
my $CLASSES = Utils::get_abs_path($cmd_opts{c});
my $OUT_DIR = Utils::get_abs_path($cmd_opts{o});
my $BID = $cmd_opts{b};
$BID =~ /^(\d+)$/ or die "Wrong bug id format (\\d+): $BID!";
-e $CLASSES or die "File with classes to mutate does not exist: $CLASSES";
# The mutation operators that should be enabled
my @MUT_OPS = ("AOR", "LOR","SOR", "COR", "ROR", "ORU", "LVR", "STD");
my $mml_src = "$OUT_DIR/$BID.mml";
my $mml_bin = "${mml_src}.bin";
Mutation::create_mml($CLASSES, $mml_src, \@MUT_OPS);
-e "$mml_bin" or die "Mml file does not exist: $mml_bin!";
| rjust/defects4j | framework/util/create_mml.pl | Perl | mit | 3,284 |
# The Computer Language Benchmarks Game
# http://shootout.alioth.debian.org/
# contributed by Marc Lehmann
use strict;
use Coro;
use Coro::Semaphore;
my $threads = 503;
my $result;
my @data = $ARGV[0];
my @mutex;
sub thr_func {
my ($id, $next) = @_;
my $next = ($id + 1) % $threads;
while () {
$mutex[$id]->down;
if ($data[$id]) {
$data[$next] = $data[$id] - 1;
$mutex[$next]->up;
} else {
$result = $next;
print "$next\n";
exit;
}
}
}
for (0 .. $threads - 1) {
$mutex[$_] = new Coro::Semaphore 0;
async \&thr_func, $_;
}
$mutex[0]->up;
schedule;
| chrislo/sourceclassifier | sources/perl/threadring.perl-2.perl | Perl | mit | 644 |
package Google::Ads::AdWords::v201406::CustomerSyncService::CustomerSyncServiceInterfacePort;
use strict;
use warnings;
use Class::Std::Fast::Storable;
use Scalar::Util qw(blessed);
use base qw(SOAP::WSDL::Client::Base);
# only load if it hasn't been loaded before
require Google::Ads::AdWords::v201406::TypeMaps::CustomerSyncService
if not Google::Ads::AdWords::v201406::TypeMaps::CustomerSyncService->can('get_class');
sub START {
$_[0]->set_proxy('https://adwords.google.com/api/adwords/ch/v201406/CustomerSyncService') if not $_[2]->{proxy};
$_[0]->set_class_resolver('Google::Ads::AdWords::v201406::TypeMaps::CustomerSyncService')
if not $_[2]->{class_resolver};
$_[0]->set_prefix($_[2]->{use_prefix}) if exists $_[2]->{use_prefix};
}
sub get {
my ($self, $body, $header) = @_;
die "get must be called as object method (\$self is <$self>)" if not blessed($self);
return $self->SUPER::call({
operation => 'get',
soap_action => '',
style => 'document',
body => {
'use' => 'literal',
namespace => 'http://schemas.xmlsoap.org/wsdl/soap/',
encodingStyle => '',
parts => [qw( Google::Ads::AdWords::v201406::CustomerSyncService::get )],
},
header => {
'use' => 'literal',
namespace => 'http://schemas.xmlsoap.org/wsdl/soap/',
encodingStyle => '',
parts => [qw( Google::Ads::AdWords::v201406::CustomerSyncService::RequestHeader )],
},
headerfault => {
}
}, $body, $header);
}
1;
__END__
=pod
=head1 NAME
Google::Ads::AdWords::v201406::CustomerSyncService::CustomerSyncServiceInterfacePort - SOAP Interface for the CustomerSyncService Web Service
=head1 SYNOPSIS
use Google::Ads::AdWords::v201406::CustomerSyncService::CustomerSyncServiceInterfacePort;
my $interface = Google::Ads::AdWords::v201406::CustomerSyncService::CustomerSyncServiceInterfacePort->new();
my $response;
$response = $interface->get();
=head1 DESCRIPTION
SOAP Interface for the CustomerSyncService web service
located at https://adwords.google.com/api/adwords/ch/v201406/CustomerSyncService.
=head1 SERVICE CustomerSyncService
=head2 Port CustomerSyncServiceInterfacePort
=head1 METHODS
=head2 General methods
=head3 new
Constructor.
All arguments are forwarded to L<SOAP::WSDL::Client|SOAP::WSDL::Client>.
=head2 SOAP Service methods
Method synopsis is displayed with hash refs as parameters.
The commented class names in the method's parameters denote that objects
of the corresponding class can be passed instead of the marked hash ref.
You may pass any combination of objects, hash and list refs to these
methods, as long as you meet the structure.
List items (i.e. multiple occurences) are not displayed in the synopsis.
You may generally pass a list ref of hash refs (or objects) instead of a hash
ref - this may result in invalid XML if used improperly, though. Note that
SOAP::WSDL always expects list references at maximum depth position.
XML attributes are not displayed in this synopsis and cannot be set using
hash refs. See the respective class' documentation for additional information.
=head3 get
Returns information about changed entities inside a customer's account. @param selector Specifies the filter for selecting changehistory events for a customer. @return A Customer->Campaign->AdGroup hierarchy containing information about the objects changed at each level. All Campaigns that are requested in the selector will be returned, regardless of whether or not they have changed, but unchanged AdGroups will be ignored.
Returns a L<Google::Ads::AdWords::v201406::CustomerSyncService::getResponse|Google::Ads::AdWords::v201406::CustomerSyncService::getResponse> object.
$response = $interface->get( {
selector => $a_reference_to, # see Google::Ads::AdWords::v201406::CustomerSyncSelector
},,
);
=head1 AUTHOR
Generated by SOAP::WSDL on Mon Jun 30 09:51:26 2014
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201406/CustomerSyncService/CustomerSyncServiceInterfacePort.pm | Perl | apache-2.0 | 4,092 |
=head1 NAME
OpenResty::Spec::Install - Installation instructions for OpenResty servers
=head1 DESCRIPTION
This is a basic guideline for settting up an OpenResty server on your own machine. Someone has succeeded in setting up one on Windows XP using ActivePerl 5.8.8. The normal development environment is Linux though. If you have any particular question, feel free to ask us by sending an email to the authors.
=over
=item 1.
Grab the openresty package and unpack it to some place, let's say it's openresty.
=item 2.
Enter the openresty directory, run "perl Makefile.PL" to check missing dependencies:
$ cd openresty
$ perl Makefile.PL
$ sudo make # This will install missing dependencies
$ make test # run the test suite using the PgMocked backend
For the PostgreSQL database, you need to prepare a PostgreSQL account (e.g.
"agentzh"); and you need to create an empty database (e.g., "test"),
and you need to create a stored precedure language named "plpgsql" for that database,
contact your PostgreSQL DBA for it or read the PostgreSQL manual.
Normally, the following commands are used:
$ createdb test
$ createuser -P agentzh
$ createlang plpgsql test
=item 3.
Edit your F<etc/site_openresty.conf> file, change the configure settings
under [backend] section according to your previous settings. The default settings look like this:
[backend]
recording=0
# You should change the line below to type=Pg or type=PgFarm
type=PgMocked
host=localhost
user=agentzh
password=agentzh
database=test
Most of the time, you need to change C<type=PgMocked> to C<type=Pg>, as well as the last 3 lines (unless you're using exactly the same user, password, and database name). The default "PgMocked" backend is a mocked PostgreSQL database which is useful only for testing purposes.
=item 4.
For the Pg backend, one needs to create the "anonymous" role in his database (like "test"):
$ createuser -RSDL anonymous
You shouldn't grant any permissions to it.
=item 5.
Create a "tester" user account for our test suite in OpenResty (drop it if it already exists):
$ bin/openresty deluser tester
$ bin/openresty adduser tester
Give a password (say, "password") to its Admin role. Also create a second
user account "tester2":
$ bin/openresty adduser tester2
Update your F<etc/site_openresty.conf> to reflect your these settings:
[test_suite]
use_http=0
server=tester:password@localhost
server2=tester2:password@localhost
You may have your own passwords here though.
=item 6.
To have OpenResty's built-in actions C<RunView> and C<RunAction> working, you
need to build the F<restyscript> compiler in the subdirectory F<haskell/>. It
is written in Haskell and please see the README file in F<haskell/> for
detailed installation instruction:
L<http://svn.openfoundry.org/openapi/trunk/haskell/README>
If you're really nervous about installing GHC and other Haskell libraries,
you can fetch a binary version of the F<restyscript> compiler if you're
on an 32-bit x86 linux:
$ wget 'http://openresty.org/restyscript' -O haskell/bin/restyscript
$ chmod +x haskell/bin/restyscript
A quick test would be
$ echo 'select 3' | haskell/bin/restyscript view rs
select 3
=item 7.
Now you can already run the test suite without an external HTTP server like Apache (but with a true Pg backend):
$ make test
Also, it's already possible to start the OpenResty server using the standalone server provided by L<HTTP::Server::Simple>:
$ bin/openresty start
HTTP::Server::Simple: You can connect to your server at http://localhost:8000/
=item 8.
It's preferred to run OpenResty.pm as a FastCGI application under Apache2 or lighttpd. And Apache2 mod_fcgid is recommended.
See L<OpenResty::Spec::Install::Apache> For configuration docs for Apache. And see L<OpenResty::Spec::Install::Lighttpd> for configuration docs for Lighttpd.
=back
=head2 HOW TO TEST ONE SPECIFIC TEST SUITE FILE
It's also possible to debug a simple .t file, for instance,
make t/01-sanity.t -f dev.mk
Or use the OPENRESTY_TEST_SERVER environment to test a remote OpenResty server, for example:
OPENRESTY_TEST_SERVER=teser:password@10.62.136.86 prove -Ilib -r t
where 10.62.136.86 is the IP (or hostname or URL) of your OpenResty server
being tested.
To test the Pg cluster rather than the desktop Pg, update your F<etc/site_openresty.conf>:
[backend]
type=PgFarm
and also set other items in the same group if necessary.
=head1 SYSTEM-WIDE INSTALLATION
Some times it's desired to do "C<sudo make install>" and install all the OpenResty modules and command line tools to the system-wide perl.
The steps are simple:
=over
=item 1.
Build the C<haskell/bin/restyscript> program
=item 2.
The usual CPAN module installation process:
perl Makefile.PL
make
sudo make install
=item 3.
Copy the config files to F</etc/openresty/>. Essentially, it is
mkdir /etc/openresty
cp etc/*.conf /etc/openresty/
=item 4.
Now you can test your installation by starting the standalone server for OPenResty:
openresty start
=back
=head1 AUTHOR
Agent Zhang (agentzh) C<< <agentzh@yahoo.cn> >>,
Laser Henry (laser) C<< <laserhenry@gmail.com> >>.
=head1 SEE ALSO
L<openresty>, L<OpenResty::Config>, L<OpenResty::Spec::AccountAdmin>, L<OpenResty::Spec::Upgrading>, L<OpenResty::Spec::TestSuite>, L<OpenResty>.
| beni55/old-openresty | lib/OpenResty/Spec/Install.pod | Perl | bsd-3-clause | 5,444 |
package Bio::KBase::fbaModelServices::Server;
use Data::Dumper;
use Moose;
use JSON;
use Bio::KBase::Log;
use Bio::KBase::AuthToken;
extends 'RPC::Any::Server::JSONRPC::PSGI';
has 'instance_dispatch' => (is => 'ro', isa => 'HashRef');
has 'user_auth' => (is => 'ro', isa => 'UserAuth');
has 'valid_methods' => (is => 'ro', isa => 'HashRef', lazy => 1,
builder => '_build_valid_methods');
has 'loggers' => (is => 'ro', required => 1, builder => '_build_loggers');
our $CallContext;
our %return_counts = (
'get_models' => 1,
'get_fbas' => 1,
'get_gapfills' => 1,
'get_gapgens' => 1,
'get_reactions' => 1,
'get_compounds' => 1,
'get_alias' => 1,
'get_aliassets' => 1,
'get_media' => 1,
'get_biochemistry' => 1,
'import_probanno' => 1,
'genome_object_to_workspace' => 1,
'genome_to_workspace' => 1,
'domains_to_workspace' => 1,
'compute_domains' => 1,
'add_feature_translation' => 1,
'genome_to_fbamodel' => 1,
'translate_fbamodel' => 1,
'build_pangenome' => 1,
'genome_heatmap_from_pangenome' => 1,
'ortholog_family_from_pangenome' => 1,
'pangenome_to_proteome_comparison' => 1,
'import_fbamodel' => 1,
'export_fbamodel' => 1,
'export_object' => 1,
'export_genome' => 1,
'adjust_model_reaction' => 1,
'adjust_biomass_reaction' => 1,
'addmedia' => 1,
'export_media' => 1,
'runfba' => 1,
'quantitative_optimization' => 1,
'generate_model_stats' => 1,
'minimize_reactions' => 1,
'export_fba' => 1,
'import_phenotypes' => 1,
'simulate_phenotypes' => 1,
'add_media_transporters' => 1,
'export_phenotypeSimulationSet' => 1,
'integrate_reconciliation_solutions' => 1,
'queue_runfba' => 1,
'queue_gapfill_model' => 1,
'gapfill_model' => 1,
'queue_gapgen_model' => 1,
'gapgen_model' => 1,
'queue_wildtype_phenotype_reconciliation' => 1,
'queue_reconciliation_sensitivity_analysis' => 1,
'queue_combine_wildtype_phenotype_reconciliation' => 1,
'run_job' => 1,
'queue_job' => 1,
'set_cofactors' => 1,
'find_reaction_synonyms' => 1,
'role_to_reactions' => 1,
'reaction_sensitivity_analysis' => 1,
'filter_iterative_solutions' => 1,
'delete_noncontributing_reactions' => 1,
'annotate_workspace_Genome' => 1,
'gtf_to_genome' => 1,
'fasta_to_ProteinSet' => 1,
'ProteinSet_to_Genome' => 1,
'fasta_to_ContigSet' => 1,
'ContigSet_to_Genome' => 1,
'probanno_to_genome' => 1,
'get_mapping' => 1,
'subsystem_of_roles' => 1,
'adjust_mapping_role' => 1,
'adjust_mapping_complex' => 1,
'adjust_mapping_subsystem' => 1,
'get_template_model' => 1,
'import_template_fbamodel' => 1,
'adjust_template_reaction' => 1,
'adjust_template_biomass' => 1,
'add_stimuli' => 1,
'import_regulatory_model' => 1,
'compare_models' => 1,
'compare_genomes' => 1,
'import_metagenome_annotation' => 1,
'models_to_community_model' => 1,
'metagenome_to_fbamodels' => 1,
'import_expression' => 1,
'import_regulome' => 1,
'create_promconstraint' => 1,
'add_biochemistry_compounds' => 1,
'update_object_references' => 1,
'add_reactions' => 1,
'remove_reactions' => 1,
'modify_reactions' => 1,
'add_features' => 1,
'remove_features' => 1,
'modify_features' => 1,
'import_trainingset' => 1,
'preload_trainingset' => 1,
'build_classifier' => 1,
'classify_genomes' => 1,
'build_tissue_model' => 1,
'version' => 1,
);
our %method_authentication = (
'get_models' => 'optional',
'get_fbas' => 'optional',
'get_gapfills' => 'optional',
'get_gapgens' => 'optional',
'get_reactions' => 'optional',
'get_compounds' => 'optional',
'get_alias' => 'optional',
'get_aliassets' => 'optional',
'get_media' => 'optional',
'get_biochemistry' => 'optional',
'import_probanno' => 'required',
'genome_object_to_workspace' => 'required',
'genome_to_workspace' => 'required',
'domains_to_workspace' => 'required',
'compute_domains' => 'required',
'add_feature_translation' => 'required',
'genome_to_fbamodel' => 'required',
'translate_fbamodel' => 'required',
'build_pangenome' => 'required',
'genome_heatmap_from_pangenome' => 'required',
'ortholog_family_from_pangenome' => 'required',
'pangenome_to_proteome_comparison' => 'required',
'import_fbamodel' => 'required',
'export_fbamodel' => 'optional',
'export_object' => 'optional',
'export_genome' => 'optional',
'adjust_model_reaction' => 'required',
'adjust_biomass_reaction' => 'required',
'addmedia' => 'required',
'export_media' => 'optional',
'runfba' => 'required',
'quantitative_optimization' => 'required',
'generate_model_stats' => 'required',
'minimize_reactions' => 'required',
'export_fba' => 'optional',
'import_phenotypes' => 'required',
'simulate_phenotypes' => 'required',
'add_media_transporters' => 'required',
'export_phenotypeSimulationSet' => 'optional',
'integrate_reconciliation_solutions' => 'required',
'queue_runfba' => 'required',
'queue_gapfill_model' => 'required',
'gapfill_model' => 'required',
'queue_gapgen_model' => 'required',
'gapgen_model' => 'required',
'queue_wildtype_phenotype_reconciliation' => 'required',
'queue_reconciliation_sensitivity_analysis' => 'required',
'queue_combine_wildtype_phenotype_reconciliation' => 'required',
'run_job' => 'required',
'queue_job' => 'required',
'set_cofactors' => 'required',
'find_reaction_synonyms' => 'optional',
'role_to_reactions' => 'optional',
'reaction_sensitivity_analysis' => 'required',
'filter_iterative_solutions' => 'required',
'delete_noncontributing_reactions' => 'required',
'annotate_workspace_Genome' => 'required',
'gtf_to_genome' => 'required',
'fasta_to_ProteinSet' => 'required',
'ProteinSet_to_Genome' => 'required',
'fasta_to_ContigSet' => 'required',
'ContigSet_to_Genome' => 'required',
'probanno_to_genome' => 'required',
'get_mapping' => 'optional',
'subsystem_of_roles' => 'optional',
'adjust_mapping_role' => 'required',
'adjust_mapping_complex' => 'required',
'adjust_mapping_subsystem' => 'required',
'get_template_model' => 'optional',
'import_template_fbamodel' => 'required',
'adjust_template_reaction' => 'required',
'adjust_template_biomass' => 'required',
'add_stimuli' => 'required',
'import_regulatory_model' => 'required',
'compare_models' => 'optional',
'compare_genomes' => 'optional',
'import_metagenome_annotation' => 'required',
'models_to_community_model' => 'required',
'metagenome_to_fbamodels' => 'required',
'import_expression' => 'required',
'import_regulome' => 'required',
'create_promconstraint' => 'required',
'add_biochemistry_compounds' => 'required',
'update_object_references' => 'required',
'add_reactions' => 'required',
'remove_reactions' => 'required',
'modify_reactions' => 'required',
'add_features' => 'required',
'remove_features' => 'required',
'modify_features' => 'required',
'import_trainingset' => 'required',
'preload_trainingset' => 'required',
'build_classifier' => 'required',
'classify_genomes' => 'required',
'build_tissue_model' => 'required',
);
sub _build_valid_methods
{
my($self) = @_;
my $methods = {
'get_models' => 1,
'get_fbas' => 1,
'get_gapfills' => 1,
'get_gapgens' => 1,
'get_reactions' => 1,
'get_compounds' => 1,
'get_alias' => 1,
'get_aliassets' => 1,
'get_media' => 1,
'get_biochemistry' => 1,
'import_probanno' => 1,
'genome_object_to_workspace' => 1,
'genome_to_workspace' => 1,
'domains_to_workspace' => 1,
'compute_domains' => 1,
'add_feature_translation' => 1,
'genome_to_fbamodel' => 1,
'translate_fbamodel' => 1,
'build_pangenome' => 1,
'genome_heatmap_from_pangenome' => 1,
'ortholog_family_from_pangenome' => 1,
'pangenome_to_proteome_comparison' => 1,
'import_fbamodel' => 1,
'export_fbamodel' => 1,
'export_object' => 1,
'export_genome' => 1,
'adjust_model_reaction' => 1,
'adjust_biomass_reaction' => 1,
'addmedia' => 1,
'export_media' => 1,
'runfba' => 1,
'quantitative_optimization' => 1,
'generate_model_stats' => 1,
'minimize_reactions' => 1,
'export_fba' => 1,
'import_phenotypes' => 1,
'simulate_phenotypes' => 1,
'add_media_transporters' => 1,
'export_phenotypeSimulationSet' => 1,
'integrate_reconciliation_solutions' => 1,
'queue_runfba' => 1,
'queue_gapfill_model' => 1,
'gapfill_model' => 1,
'queue_gapgen_model' => 1,
'gapgen_model' => 1,
'queue_wildtype_phenotype_reconciliation' => 1,
'queue_reconciliation_sensitivity_analysis' => 1,
'queue_combine_wildtype_phenotype_reconciliation' => 1,
'run_job' => 1,
'queue_job' => 1,
'set_cofactors' => 1,
'find_reaction_synonyms' => 1,
'role_to_reactions' => 1,
'reaction_sensitivity_analysis' => 1,
'filter_iterative_solutions' => 1,
'delete_noncontributing_reactions' => 1,
'annotate_workspace_Genome' => 1,
'gtf_to_genome' => 1,
'fasta_to_ProteinSet' => 1,
'ProteinSet_to_Genome' => 1,
'fasta_to_ContigSet' => 1,
'ContigSet_to_Genome' => 1,
'probanno_to_genome' => 1,
'get_mapping' => 1,
'subsystem_of_roles' => 1,
'adjust_mapping_role' => 1,
'adjust_mapping_complex' => 1,
'adjust_mapping_subsystem' => 1,
'get_template_model' => 1,
'import_template_fbamodel' => 1,
'adjust_template_reaction' => 1,
'adjust_template_biomass' => 1,
'add_stimuli' => 1,
'import_regulatory_model' => 1,
'compare_models' => 1,
'compare_genomes' => 1,
'import_metagenome_annotation' => 1,
'models_to_community_model' => 1,
'metagenome_to_fbamodels' => 1,
'import_expression' => 1,
'import_regulome' => 1,
'create_promconstraint' => 1,
'add_biochemistry_compounds' => 1,
'update_object_references' => 1,
'add_reactions' => 1,
'remove_reactions' => 1,
'modify_reactions' => 1,
'add_features' => 1,
'remove_features' => 1,
'modify_features' => 1,
'import_trainingset' => 1,
'preload_trainingset' => 1,
'build_classifier' => 1,
'classify_genomes' => 1,
'build_tissue_model' => 1,
'version' => 1,
};
return $methods;
}
my $DEPLOY = 'KB_DEPLOYMENT_CONFIG';
my $SERVICE = 'KB_SERVICE_NAME';
sub get_config_file
{
my ($self) = @_;
if(!defined $ENV{$DEPLOY}) {
return undef;
}
return $ENV{$DEPLOY};
}
sub get_service_name
{
my ($self) = @_;
if(!defined $ENV{$SERVICE}) {
return undef;
}
return $ENV{$SERVICE};
}
sub logcallback
{
my ($self) = @_;
$self->loggers()->{serverlog}->set_log_file(
$self->{loggers}->{userlog}->get_log_file());
}
sub log
{
my ($self, $level, $context, $message) = @_;
my $user = defined($context->user_id()) ? $context->user_id(): undef;
$self->loggers()->{serverlog}->log_message($level, $message, $user,
$context->module(), $context->method(), $context->call_id(),
$context->client_ip());
}
sub _build_loggers
{
my ($self) = @_;
my $submod = $self->get_service_name() || 'fbaModelServices';
my $loggers = {};
my $callback = sub {$self->logcallback();};
$loggers->{userlog} = Bio::KBase::Log->new(
$submod, {}, {ip_address => 1, authuser => 1, module => 1,
method => 1, call_id => 1, changecallback => $callback,
config => $self->get_config_file()});
$loggers->{serverlog} = Bio::KBase::Log->new(
$submod, {}, {ip_address => 1, authuser => 1, module => 1,
method => 1, call_id => 1,
logfile => $loggers->{userlog}->get_log_file()});
$loggers->{serverlog}->set_log_level(6);
return $loggers;
}
#override of RPC::Any::Server
sub handle_error {
my ($self, $error) = @_;
unless (ref($error) eq 'HASH' ||
(blessed $error and $error->isa('RPC::Any::Exception'))) {
$error = RPC::Any::Exception::PerlError->new(message => $error);
}
my $output;
eval {
my $encoded_error = $self->encode_output_from_exception($error);
$output = $self->produce_output($encoded_error);
};
return $output if $output;
die "$error\n\nAlso, an error was encountered while trying to send"
. " this error: $@\n";
}
#override of RPC::Any::JSONRPC
sub encode_output_from_exception {
my ($self, $exception) = @_;
my %error_params;
if (ref($exception) eq 'HASH') {
%error_params = %{$exception};
if(defined($error_params{context})) {
my @errlines;
$errlines[0] = $error_params{message};
push @errlines, split("\n", $error_params{data});
$self->log($Bio::KBase::Log::ERR, $error_params{context}, \@errlines);
delete $error_params{context};
}
} else {
%error_params = (
message => $exception->message,
code => $exception->code,
);
}
my $json_error;
if ($self->_last_call) {
$json_error = $self->_last_call->return_error(%error_params);
}
# Default to default_version. This happens when we throw an exception
# before inbound parsing is complete.
else {
$json_error = $self->_default_error(%error_params);
}
return $self->encode_output_from_object($json_error);
}
sub call_method {
my ($self, $data, $method_info) = @_;
my ($module, $method, $modname) = @$method_info{qw(module method modname)};
my $ctx = Bio::KBase::fbaModelServices::ServerContext->new($self->{loggers}->{userlog},
client_ip => $self->_plack_req->address);
$ctx->module($modname);
$ctx->method($method);
$ctx->call_id($self->{_last_call}->{id});
my $args = $data->{arguments};
{
# Service fbaModelServices requires authentication.
my $method_auth = $method_authentication{$method};
$ctx->authenticated(0);
if ($method_auth eq 'none')
{
# No authentication required here. Move along.
}
else
{
my $token = $self->_plack_req->header("Authorization");
if (!$token && $method_auth eq 'required')
{
$self->exception('PerlError', "Authentication required for fbaModelServices but no authentication header was passed");
}
my $auth_token = Bio::KBase::AuthToken->new(token => $token, ignore_authrc => 1);
my $valid = $auth_token->validate();
# Only throw an exception if authentication was required and it fails
if ($method_auth eq 'required' && !$valid)
{
$self->exception('PerlError', "Token validation failed: " . $auth_token->error_message);
} elsif ($valid) {
$ctx->authenticated(1);
$ctx->user_id($auth_token->user_id);
$ctx->token( $token);
}
}
}
my $new_isa = $self->get_package_isa($module);
no strict 'refs';
local @{"${module}::ISA"} = @$new_isa;
local $CallContext = $ctx;
my @result;
{
my $err;
eval {
$self->log($Bio::KBase::Log::INFO, $ctx, "start method");
@result = $module->$method(@{ $data->{arguments} });
$self->log($Bio::KBase::Log::INFO, $ctx, "end method");
};
if ($@)
{
my $err = $@;
my $nicerr;
if(ref($err) eq "Bio::KBase::Exceptions::KBaseException") {
$nicerr = {code => -32603, # perl error from RPC::Any::Exception
message => $err->error,
data => $err->trace->as_string,
context => $ctx
};
} else {
my $str = "$err";
$str =~ s/Bio::KBase::CDMI::Service::call_method.*//s; # is this still necessary? not sure
my $msg = $str;
$msg =~ s/ at [^\s]+.pm line \d+.\n$//;
$nicerr = {code => -32603, # perl error from RPC::Any::Exception
message => $msg,
data => $str,
context => $ctx
};
}
die $nicerr;
}
}
my $result;
if ($return_counts{$method} == 1)
{
$result = [[$result[0]]];
}
else
{
$result = \@result;
}
return $result;
}
sub get_method
{
my ($self, $data) = @_;
my $full_name = $data->{method};
$full_name =~ /^(\S+)\.([^\.]+)$/;
my ($package, $method) = ($1, $2);
if (!$package || !$method) {
$self->exception('NoSuchMethod',
"'$full_name' is not a valid method. It must"
. " contain a package name, followed by a period,"
. " followed by a method name.");
}
if (!$self->valid_methods->{$method})
{
$self->exception('NoSuchMethod',
"'$method' is not a valid method in service fbaModelServices.");
}
my $inst = $self->instance_dispatch->{$package};
my $module;
if ($inst)
{
$module = $inst;
}
else
{
$module = $self->get_module($package);
if (!$module) {
$self->exception('NoSuchMethod',
"There is no method package named '$package'.");
}
Class::MOP::load_class($module);
}
if (!$module->can($method)) {
$self->exception('NoSuchMethod',
"There is no method named '$method' in the"
. " '$package' package.");
}
return { module => $module, method => $method, modname => $package };
}
package Bio::KBase::fbaModelServices::ServerContext;
use strict;
=head1 NAME
Bio::KBase::fbaModelServices::ServerContext
head1 DESCRIPTION
A KB RPC context contains information about the invoker of this
service. If it is an authenticated service the authenticated user
record is available via $context->user. The client IP address
is available via $context->client_ip.
=cut
use base 'Class::Accessor';
__PACKAGE__->mk_accessors(qw(user_id client_ip authenticated token
module method call_id));
sub new
{
my($class, $logger, %opts) = @_;
my $self = {
%opts,
};
$self->{_logger} = $logger;
$self->{_debug_levels} = {7 => 1, 8 => 1, 9 => 1,
'DEBUG' => 1, 'DEBUG2' => 1, 'DEBUG3' => 1};
return bless $self, $class;
}
sub _get_user
{
my ($self) = @_;
return defined($self->user_id()) ? $self->user_id(): undef;
}
sub _log
{
my ($self, $level, $message) = @_;
$self->{_logger}->log_message($level, $message, $self->_get_user(),
$self->module(), $self->method(), $self->call_id(),
$self->client_ip());
}
sub log_err
{
my ($self, $message) = @_;
$self->_log($Bio::KBase::Log::ERR, $message);
}
sub log_info
{
my ($self, $message) = @_;
$self->_log($Bio::KBase::Log::INFO, $message);
}
sub log_debug
{
my ($self, $message, $level) = @_;
if(!defined($level)) {
$level = 1;
}
if($self->{_debug_levels}->{$level}) {
} else {
if ($level =~ /\D/ || $level < 1 || $level > 3) {
die "Invalid log level: $level";
}
$level += 6;
}
$self->_log($level, $message);
}
sub set_log_level
{
my ($self, $level) = @_;
$self->{_logger}->set_log_level($level);
}
sub get_log_level
{
my ($self) = @_;
return $self->{_logger}->get_log_level();
}
sub clear_log_level
{
my ($self) = @_;
$self->{_logger}->clear_user_log_level();
}
1;
| scanon/KBaseFBAModeling | lib/Bio/KBase/fbaModelServices/Server.pm | Perl | mit | 20,880 |
###########################################################################
#
# This file is auto-generated by the Perl DateTime Suite locale
# generator (0.05). This code generator comes with the
# DateTime::Locale distribution in the tools/ directory, and is called
# generate-from-cldr.
#
# This file as generated from the CLDR XML locale data. See the
# LICENSE.cldr file included in this distribution for license details.
#
# This file was generated from the source file fr.xml
# The source file version number was 1.148, generated on
# 2009/06/15 17:12:26.
#
# Do not edit this file directly.
#
###########################################################################
package DateTime::Locale::fr;
use strict;
use warnings;
use utf8;
use base 'DateTime::Locale::root';
sub cldr_version { return "1\.7\.1" }
{
my $am_pm_abbreviated = [ "AM", "PM" ];
sub am_pm_abbreviated { return $am_pm_abbreviated }
}
{
my $date_format_full = "EEEE\ d\ MMMM\ y";
sub date_format_full { return $date_format_full }
}
{
my $date_format_long = "d\ MMMM\ y";
sub date_format_long { return $date_format_long }
}
{
my $date_format_medium = "d\ MMM\ y";
sub date_format_medium { return $date_format_medium }
}
{
my $date_format_short = "dd\/MM\/yy";
sub date_format_short { return $date_format_short }
}
{
my $datetime_format = "\{1\}\ \{0\}";
sub datetime_format { return $datetime_format }
}
{
my $day_format_abbreviated = [ "lun\.", "mar\.", "mer\.", "jeu\.", "ven\.", "sam\.", "dim\." ];
sub day_format_abbreviated { return $day_format_abbreviated }
}
sub day_format_narrow { $_[0]->day_stand_alone_narrow() }
{
my $day_format_wide = [ "lundi", "mardi", "mercredi", "jeudi", "vendredi", "samedi", "dimanche" ];
sub day_format_wide { return $day_format_wide }
}
sub day_stand_alone_abbreviated { $_[0]->day_format_abbreviated() }
{
my $day_stand_alone_narrow = [ "L", "M", "M", "J", "V", "S", "D" ];
sub day_stand_alone_narrow { return $day_stand_alone_narrow }
}
sub day_stand_alone_wide { $_[0]->day_format_wide() }
{
my $era_abbreviated = [ "av\.\ J\.\-C\.", "ap\.\ J\.\-C\." ];
sub era_abbreviated { return $era_abbreviated }
}
sub era_narrow { $_[0]->era_abbreviated() }
{
my $era_wide = [ "avant\ Jésus\-Christ", "après\ Jésus\-Christ" ];
sub era_wide { return $era_wide }
}
{
my $first_day_of_week = "1";
sub first_day_of_week { return $first_day_of_week }
}
{
my $month_format_abbreviated = [ "janv\.", "févr\.", "mars", "avr\.", "mai", "juin", "juil\.", "août", "sept\.", "oct\.", "nov\.", "déc\." ];
sub month_format_abbreviated { return $month_format_abbreviated }
}
sub month_format_narrow { $_[0]->month_stand_alone_narrow() }
{
my $month_format_wide = [ "janvier", "février", "mars", "avril", "mai", "juin", "juillet", "août", "septembre", "octobre", "novembre", "décembre" ];
sub month_format_wide { return $month_format_wide }
}
sub month_stand_alone_abbreviated { $_[0]->month_format_abbreviated() }
{
my $month_stand_alone_narrow = [ "J", "F", "M", "A", "M", "J", "J", "A", "S", "O", "N", "D" ];
sub month_stand_alone_narrow { return $month_stand_alone_narrow }
}
sub month_stand_alone_wide { $_[0]->month_format_wide() }
{
my $quarter_format_abbreviated = [ "T1", "T2", "T3", "T4" ];
sub quarter_format_abbreviated { return $quarter_format_abbreviated }
}
{
my $quarter_format_narrow = [ "T1", "T2", "T3", "T4" ];
sub quarter_format_narrow { return $quarter_format_narrow }
}
{
my $quarter_format_wide = [ "1er\ trimestre", "2e\ trimestre", "3e\ trimestre", "4e\ trimestre" ];
sub quarter_format_wide { return $quarter_format_wide }
}
sub quarter_stand_alone_abbreviated { $_[0]->quarter_format_abbreviated() }
{
my $quarter_stand_alone_narrow = [ "1", "2", "3", "4" ];
sub quarter_stand_alone_narrow { return $quarter_stand_alone_narrow }
}
sub quarter_stand_alone_wide { $_[0]->quarter_format_wide() }
{
my $time_format_full = "HH\:mm\:ss\ zzzz";
sub time_format_full { return $time_format_full }
}
{
my $time_format_long = "HH\:mm\:ss\ z";
sub time_format_long { return $time_format_long }
}
{
my $time_format_medium = "HH\:mm\:ss";
sub time_format_medium { return $time_format_medium }
}
{
my $time_format_short = "HH\:mm";
sub time_format_short { return $time_format_short }
}
{
my $_format_for_EEEd = "d\ EEE";
sub _format_for_EEEd { return $_format_for_EEEd }
}
{
my $_format_for_HHmm = "HH\:mm";
sub _format_for_HHmm { return $_format_for_HHmm }
}
{
my $_format_for_HHmmss = "HH\:mm\:ss";
sub _format_for_HHmmss { return $_format_for_HHmmss }
}
{
my $_format_for_Hm = "H\:mm";
sub _format_for_Hm { return $_format_for_Hm }
}
{
my $_format_for_M = "L";
sub _format_for_M { return $_format_for_M }
}
{
my $_format_for_MEd = "EEE\ d\/M";
sub _format_for_MEd { return $_format_for_MEd }
}
{
my $_format_for_MMM = "LLL";
sub _format_for_MMM { return $_format_for_MMM }
}
{
my $_format_for_MMMEd = "E\ d\ MMM";
sub _format_for_MMMEd { return $_format_for_MMMEd }
}
{
my $_format_for_MMMMEd = "EEE\ d\ MMMM";
sub _format_for_MMMMEd { return $_format_for_MMMMEd }
}
{
my $_format_for_MMMMd = "d\ MMMM";
sub _format_for_MMMMd { return $_format_for_MMMMd }
}
{
my $_format_for_MMMd = "d\ MMM";
sub _format_for_MMMd { return $_format_for_MMMd }
}
{
my $_format_for_MMMdd = "dd\ MMM";
sub _format_for_MMMdd { return $_format_for_MMMdd }
}
{
my $_format_for_MMd = "d\/MM";
sub _format_for_MMd { return $_format_for_MMd }
}
{
my $_format_for_MMdd = "dd\/MM";
sub _format_for_MMdd { return $_format_for_MMdd }
}
{
my $_format_for_Md = "d\/M";
sub _format_for_Md { return $_format_for_Md }
}
{
my $_format_for_d = "d";
sub _format_for_d { return $_format_for_d }
}
{
my $_format_for_mmss = "mm\:ss";
sub _format_for_mmss { return $_format_for_mmss }
}
{
my $_format_for_ms = "mm\:ss";
sub _format_for_ms { return $_format_for_ms }
}
{
my $_format_for_y = "y";
sub _format_for_y { return $_format_for_y }
}
{
my $_format_for_yM = "M\/yyyy";
sub _format_for_yM { return $_format_for_yM }
}
{
my $_format_for_yMEd = "EEE\ d\/M\/yyyy";
sub _format_for_yMEd { return $_format_for_yMEd }
}
{
my $_format_for_yMMM = "MMM\ y";
sub _format_for_yMMM { return $_format_for_yMMM }
}
{
my $_format_for_yMMMEd = "EEE\ d\ MMM\ y";
sub _format_for_yMMMEd { return $_format_for_yMMMEd }
}
{
my $_format_for_yMMMM = "MMMM\ y";
sub _format_for_yMMMM { return $_format_for_yMMMM }
}
{
my $_format_for_yQ = "\'T\'Q\ y";
sub _format_for_yQ { return $_format_for_yQ }
}
{
my $_format_for_yQQQ = "QQQ\ y";
sub _format_for_yQQQ { return $_format_for_yQQQ }
}
{
my $_format_for_yyMM = "MM\/yy";
sub _format_for_yyMM { return $_format_for_yyMM }
}
{
my $_format_for_yyMMM = "MMM\ yy";
sub _format_for_yyMMM { return $_format_for_yyMMM }
}
{
my $_format_for_yyMMMEEEd = "EEE\ d\ MMM\ yy";
sub _format_for_yyMMMEEEd { return $_format_for_yyMMMEEEd }
}
{
my $_format_for_yyMMMd = "d\ MMM\ yy";
sub _format_for_yyMMMd { return $_format_for_yyMMMd }
}
{
my $_format_for_yyQ = "\'T\'Q\ yy";
sub _format_for_yyQ { return $_format_for_yyQ }
}
{
my $_format_for_yyQQQQ = "QQQQ\ yy";
sub _format_for_yyQQQQ { return $_format_for_yyQQQQ }
}
{
my $_format_for_yyyyMMMM = "MMMM\ y";
sub _format_for_yyyyMMMM { return $_format_for_yyyyMMMM }
}
{
my $_available_formats =
{
"EEEd" => "d\ EEE",
"HHmm" => "HH\:mm",
"HHmmss" => "HH\:mm\:ss",
"Hm" => "H\:mm",
"M" => "L",
"MEd" => "EEE\ d\/M",
"MMM" => "LLL",
"MMMEd" => "E\ d\ MMM",
"MMMMEd" => "EEE\ d\ MMMM",
"MMMMd" => "d\ MMMM",
"MMMd" => "d\ MMM",
"MMMdd" => "dd\ MMM",
"MMd" => "d\/MM",
"MMdd" => "dd\/MM",
"Md" => "d\/M",
"d" => "d",
"mmss" => "mm\:ss",
"ms" => "mm\:ss",
"y" => "y",
"yM" => "M\/yyyy",
"yMEd" => "EEE\ d\/M\/yyyy",
"yMMM" => "MMM\ y",
"yMMMEd" => "EEE\ d\ MMM\ y",
"yMMMM" => "MMMM\ y",
"yQ" => "\'T\'Q\ y",
"yQQQ" => "QQQ\ y",
"yyMM" => "MM\/yy",
"yyMMM" => "MMM\ yy",
"yyMMMEEEd" => "EEE\ d\ MMM\ yy",
"yyMMMd" => "d\ MMM\ yy",
"yyQ" => "\'T\'Q\ yy",
"yyQQQQ" => "QQQQ\ yy",
"yyyyMMMM" => "MMMM\ y"
};
sub _available_formats { return $_available_formats }
}
1;
__END__
=pod
=encoding utf8
=head1 NAME
DateTime::Locale::fr
=head1 SYNOPSIS
use DateTime;
my $dt = DateTime->now( locale => 'fr' );
print $dt->month_name();
=head1 DESCRIPTION
This is the DateTime locale package for French.
=head1 DATA
This locale inherits from the L<DateTime::Locale::root> locale.
It contains the following data.
=head2 Days
=head3 Wide (format)
lundi
mardi
mercredi
jeudi
vendredi
samedi
dimanche
=head3 Abbreviated (format)
lun.
mar.
mer.
jeu.
ven.
sam.
dim.
=head3 Narrow (format)
L
M
M
J
V
S
D
=head3 Wide (stand-alone)
lundi
mardi
mercredi
jeudi
vendredi
samedi
dimanche
=head3 Abbreviated (stand-alone)
lun.
mar.
mer.
jeu.
ven.
sam.
dim.
=head3 Narrow (stand-alone)
L
M
M
J
V
S
D
=head2 Months
=head3 Wide (format)
janvier
février
mars
avril
mai
juin
juillet
août
septembre
octobre
novembre
décembre
=head3 Abbreviated (format)
janv.
févr.
mars
avr.
mai
juin
juil.
août
sept.
oct.
nov.
déc.
=head3 Narrow (format)
J
F
M
A
M
J
J
A
S
O
N
D
=head3 Wide (stand-alone)
janvier
février
mars
avril
mai
juin
juillet
août
septembre
octobre
novembre
décembre
=head3 Abbreviated (stand-alone)
janv.
févr.
mars
avr.
mai
juin
juil.
août
sept.
oct.
nov.
déc.
=head3 Narrow (stand-alone)
J
F
M
A
M
J
J
A
S
O
N
D
=head2 Quarters
=head3 Wide (format)
1er trimestre
2e trimestre
3e trimestre
4e trimestre
=head3 Abbreviated (format)
T1
T2
T3
T4
=head3 Narrow (format)
T1
T2
T3
T4
=head3 Wide (stand-alone)
1er trimestre
2e trimestre
3e trimestre
4e trimestre
=head3 Abbreviated (stand-alone)
T1
T2
T3
T4
=head3 Narrow (stand-alone)
1
2
3
4
=head2 Eras
=head3 Wide
avant Jésus-Christ
après Jésus-Christ
=head3 Abbreviated
av. J.-C.
ap. J.-C.
=head3 Narrow
av. J.-C.
ap. J.-C.
=head2 Date Formats
=head3 Full
2008-02-05T18:30:30 = mardi 5 février 2008
1995-12-22T09:05:02 = vendredi 22 décembre 1995
-0010-09-15T04:44:23 = samedi 15 septembre -10
=head3 Long
2008-02-05T18:30:30 = 5 février 2008
1995-12-22T09:05:02 = 22 décembre 1995
-0010-09-15T04:44:23 = 15 septembre -10
=head3 Medium
2008-02-05T18:30:30 = 5 févr. 2008
1995-12-22T09:05:02 = 22 déc. 1995
-0010-09-15T04:44:23 = 15 sept. -10
=head3 Short
2008-02-05T18:30:30 = 05/02/08
1995-12-22T09:05:02 = 22/12/95
-0010-09-15T04:44:23 = 15/09/-10
=head3 Default
2008-02-05T18:30:30 = 5 févr. 2008
1995-12-22T09:05:02 = 22 déc. 1995
-0010-09-15T04:44:23 = 15 sept. -10
=head2 Time Formats
=head3 Full
2008-02-05T18:30:30 = 18:30:30 UTC
1995-12-22T09:05:02 = 09:05:02 UTC
-0010-09-15T04:44:23 = 04:44:23 UTC
=head3 Long
2008-02-05T18:30:30 = 18:30:30 UTC
1995-12-22T09:05:02 = 09:05:02 UTC
-0010-09-15T04:44:23 = 04:44:23 UTC
=head3 Medium
2008-02-05T18:30:30 = 18:30:30
1995-12-22T09:05:02 = 09:05:02
-0010-09-15T04:44:23 = 04:44:23
=head3 Short
2008-02-05T18:30:30 = 18:30
1995-12-22T09:05:02 = 09:05
-0010-09-15T04:44:23 = 04:44
=head3 Default
2008-02-05T18:30:30 = 18:30:30
1995-12-22T09:05:02 = 09:05:02
-0010-09-15T04:44:23 = 04:44:23
=head2 Datetime Formats
=head3 Full
2008-02-05T18:30:30 = mardi 5 février 2008 18:30:30 UTC
1995-12-22T09:05:02 = vendredi 22 décembre 1995 09:05:02 UTC
-0010-09-15T04:44:23 = samedi 15 septembre -10 04:44:23 UTC
=head3 Long
2008-02-05T18:30:30 = 5 février 2008 18:30:30 UTC
1995-12-22T09:05:02 = 22 décembre 1995 09:05:02 UTC
-0010-09-15T04:44:23 = 15 septembre -10 04:44:23 UTC
=head3 Medium
2008-02-05T18:30:30 = 5 févr. 2008 18:30:30
1995-12-22T09:05:02 = 22 déc. 1995 09:05:02
-0010-09-15T04:44:23 = 15 sept. -10 04:44:23
=head3 Short
2008-02-05T18:30:30 = 05/02/08 18:30
1995-12-22T09:05:02 = 22/12/95 09:05
-0010-09-15T04:44:23 = 15/09/-10 04:44
=head3 Default
2008-02-05T18:30:30 = 5 févr. 2008 18:30:30
1995-12-22T09:05:02 = 22 déc. 1995 09:05:02
-0010-09-15T04:44:23 = 15 sept. -10 04:44:23
=head2 Available Formats
=head3 d (d)
2008-02-05T18:30:30 = 5
1995-12-22T09:05:02 = 22
-0010-09-15T04:44:23 = 15
=head3 EEEd (d EEE)
2008-02-05T18:30:30 = 5 mar.
1995-12-22T09:05:02 = 22 ven.
-0010-09-15T04:44:23 = 15 sam.
=head3 HHmm (HH:mm)
2008-02-05T18:30:30 = 18:30
1995-12-22T09:05:02 = 09:05
-0010-09-15T04:44:23 = 04:44
=head3 HHmmss (HH:mm:ss)
2008-02-05T18:30:30 = 18:30:30
1995-12-22T09:05:02 = 09:05:02
-0010-09-15T04:44:23 = 04:44:23
=head3 Hm (H:mm)
2008-02-05T18:30:30 = 18:30
1995-12-22T09:05:02 = 9:05
-0010-09-15T04:44:23 = 4:44
=head3 hm (h:mm a)
2008-02-05T18:30:30 = 6:30 PM
1995-12-22T09:05:02 = 9:05 AM
-0010-09-15T04:44:23 = 4:44 AM
=head3 Hms (H:mm:ss)
2008-02-05T18:30:30 = 18:30:30
1995-12-22T09:05:02 = 9:05:02
-0010-09-15T04:44:23 = 4:44:23
=head3 hms (h:mm:ss a)
2008-02-05T18:30:30 = 6:30:30 PM
1995-12-22T09:05:02 = 9:05:02 AM
-0010-09-15T04:44:23 = 4:44:23 AM
=head3 M (L)
2008-02-05T18:30:30 = 2
1995-12-22T09:05:02 = 12
-0010-09-15T04:44:23 = 9
=head3 Md (d/M)
2008-02-05T18:30:30 = 5/2
1995-12-22T09:05:02 = 22/12
-0010-09-15T04:44:23 = 15/9
=head3 MEd (EEE d/M)
2008-02-05T18:30:30 = mar. 5/2
1995-12-22T09:05:02 = ven. 22/12
-0010-09-15T04:44:23 = sam. 15/9
=head3 MMd (d/MM)
2008-02-05T18:30:30 = 5/02
1995-12-22T09:05:02 = 22/12
-0010-09-15T04:44:23 = 15/09
=head3 MMdd (dd/MM)
2008-02-05T18:30:30 = 05/02
1995-12-22T09:05:02 = 22/12
-0010-09-15T04:44:23 = 15/09
=head3 MMM (LLL)
2008-02-05T18:30:30 = févr.
1995-12-22T09:05:02 = déc.
-0010-09-15T04:44:23 = sept.
=head3 MMMd (d MMM)
2008-02-05T18:30:30 = 5 févr.
1995-12-22T09:05:02 = 22 déc.
-0010-09-15T04:44:23 = 15 sept.
=head3 MMMdd (dd MMM)
2008-02-05T18:30:30 = 05 févr.
1995-12-22T09:05:02 = 22 déc.
-0010-09-15T04:44:23 = 15 sept.
=head3 MMMEd (E d MMM)
2008-02-05T18:30:30 = mar. 5 févr.
1995-12-22T09:05:02 = ven. 22 déc.
-0010-09-15T04:44:23 = sam. 15 sept.
=head3 MMMMd (d MMMM)
2008-02-05T18:30:30 = 5 février
1995-12-22T09:05:02 = 22 décembre
-0010-09-15T04:44:23 = 15 septembre
=head3 MMMMEd (EEE d MMMM)
2008-02-05T18:30:30 = mar. 5 février
1995-12-22T09:05:02 = ven. 22 décembre
-0010-09-15T04:44:23 = sam. 15 septembre
=head3 mmss (mm:ss)
2008-02-05T18:30:30 = 30:30
1995-12-22T09:05:02 = 05:02
-0010-09-15T04:44:23 = 44:23
=head3 ms (mm:ss)
2008-02-05T18:30:30 = 30:30
1995-12-22T09:05:02 = 05:02
-0010-09-15T04:44:23 = 44:23
=head3 y (y)
2008-02-05T18:30:30 = 2008
1995-12-22T09:05:02 = 1995
-0010-09-15T04:44:23 = -10
=head3 yM (M/yyyy)
2008-02-05T18:30:30 = 2/2008
1995-12-22T09:05:02 = 12/1995
-0010-09-15T04:44:23 = 9/-010
=head3 yMEd (EEE d/M/yyyy)
2008-02-05T18:30:30 = mar. 5/2/2008
1995-12-22T09:05:02 = ven. 22/12/1995
-0010-09-15T04:44:23 = sam. 15/9/-010
=head3 yMMM (MMM y)
2008-02-05T18:30:30 = févr. 2008
1995-12-22T09:05:02 = déc. 1995
-0010-09-15T04:44:23 = sept. -10
=head3 yMMMEd (EEE d MMM y)
2008-02-05T18:30:30 = mar. 5 févr. 2008
1995-12-22T09:05:02 = ven. 22 déc. 1995
-0010-09-15T04:44:23 = sam. 15 sept. -10
=head3 yMMMM (MMMM y)
2008-02-05T18:30:30 = février 2008
1995-12-22T09:05:02 = décembre 1995
-0010-09-15T04:44:23 = septembre -10
=head3 yQ ('T'Q y)
2008-02-05T18:30:30 = T1 2008
1995-12-22T09:05:02 = T4 1995
-0010-09-15T04:44:23 = T3 -10
=head3 yQQQ (QQQ y)
2008-02-05T18:30:30 = T1 2008
1995-12-22T09:05:02 = T4 1995
-0010-09-15T04:44:23 = T3 -10
=head3 yyMM (MM/yy)
2008-02-05T18:30:30 = 02/08
1995-12-22T09:05:02 = 12/95
-0010-09-15T04:44:23 = 09/-10
=head3 yyMMM (MMM yy)
2008-02-05T18:30:30 = févr. 08
1995-12-22T09:05:02 = déc. 95
-0010-09-15T04:44:23 = sept. -10
=head3 yyMMMd (d MMM yy)
2008-02-05T18:30:30 = 5 févr. 08
1995-12-22T09:05:02 = 22 déc. 95
-0010-09-15T04:44:23 = 15 sept. -10
=head3 yyMMMEEEd (EEE d MMM yy)
2008-02-05T18:30:30 = mar. 5 févr. 08
1995-12-22T09:05:02 = ven. 22 déc. 95
-0010-09-15T04:44:23 = sam. 15 sept. -10
=head3 yyQ ('T'Q yy)
2008-02-05T18:30:30 = T1 08
1995-12-22T09:05:02 = T4 95
-0010-09-15T04:44:23 = T3 -10
=head3 yyQQQQ (QQQQ yy)
2008-02-05T18:30:30 = 1er trimestre 08
1995-12-22T09:05:02 = 4e trimestre 95
-0010-09-15T04:44:23 = 3e trimestre -10
=head3 yyyyMMMM (MMMM y)
2008-02-05T18:30:30 = février 2008
1995-12-22T09:05:02 = décembre 1995
-0010-09-15T04:44:23 = septembre -10
=head2 Miscellaneous
=head3 Prefers 24 hour time?
Yes
=head3 Local first day of the week
lundi
=head1 SUPPORT
See L<DateTime::Locale>.
=head1 AUTHOR
Dave Rolsky <autarch@urth.org>
=head1 COPYRIGHT
Copyright (c) 2008 David Rolsky. All rights reserved. This program is
free software; you can redistribute it and/or modify it under the same
terms as Perl itself.
This module was generated from data provided by the CLDR project, see
the LICENSE.cldr in this distribution for details on the CLDR data's
license.
=cut
| liuyangning/WX_web | xampp/perl/vendor/lib/DateTime/Locale/fr.pm | Perl | mit | 18,100 |
package Net::HTTP;
use strict;
use vars qw($VERSION @ISA);
$VERSION = "5.812";
eval { require IO::Socket::INET } || require IO::Socket;
require Net::HTTP::Methods;
require Carp;
@ISA=qw(IO::Socket::INET Net::HTTP::Methods);
sub new {
my $class = shift;
Carp::croak("No Host option provided") unless @_;
$class->SUPER::new(@_);
}
sub configure {
my($self, $cnf) = @_;
$self->http_configure($cnf);
}
sub http_connect {
my($self, $cnf) = @_;
$self->SUPER::configure($cnf);
}
1;
__END__
=head1 NAME
Net::HTTP - Low-level HTTP connection (client)
=head1 SYNOPSIS
use Net::HTTP;
my $s = Net::HTTP->new(Host => "www.perl.com") || die $@;
$s->write_request(GET => "/", 'User-Agent' => "Mozilla/5.0");
my($code, $mess, %h) = $s->read_response_headers;
while (1) {
my $buf;
my $n = $s->read_entity_body($buf, 1024);
die "read failed: $!" unless defined $n;
last unless $n;
print $buf;
}
=head1 DESCRIPTION
The C<Net::HTTP> class is a low-level HTTP client. An instance of the
C<Net::HTTP> class represents a connection to an HTTP server. The
HTTP protocol is described in RFC 2616. The C<Net::HTTP> class
support C<HTTP/1.0> and C<HTTP/1.1>.
C<Net::HTTP> is a sub-class of C<IO::Socket::INET>. You can mix the
methods described below with reading and writing from the socket
directly. This is not necessary a good idea, unless you know what you
are doing.
The following methods are provided (in addition to those of
C<IO::Socket::INET>):
=over
=item $s = Net::HTTP->new( %options )
The C<Net::HTTP> constructor method takes the same options as
C<IO::Socket::INET>'s as well as these:
Host: Initial host attribute value
KeepAlive: Initial keep_alive attribute value
SendTE: Initial send_te attribute_value
HTTPVersion: Initial http_version attribute value
PeerHTTPVersion: Initial peer_http_version attribute value
MaxLineLength: Initial max_line_length attribute value
MaxHeaderLines: Initial max_header_lines attribute value
The C<Host> option is also the default for C<IO::Socket::INET>'s
C<PeerAddr>. The C<PeerPort> defaults to 80 if not provided.
The C<Listen> option provided by C<IO::Socket::INET>'s constructor
method is not allowed.
If unable to connect to the given HTTP server then the constructor
returns C<undef> and $@ contains the reason. After a successful
connect, a C<Net:HTTP> object is returned.
=item $s->host
Get/set the default value of the C<Host> header to send. The $host
must not be set to an empty string (or C<undef>) for HTTP/1.1.
=item $s->keep_alive
Get/set the I<keep-alive> value. If this value is TRUE then the
request will be sent with headers indicating that the server should try
to keep the connection open so that multiple requests can be sent.
The actual headers set will depend on the value of the C<http_version>
and C<peer_http_version> attributes.
=item $s->send_te
Get/set the a value indicating if the request will be sent with a "TE"
header to indicate the transfer encodings that the server can choose to
use. If the C<Compress::Zlib> module is installed then this will
announce that this client accept both the I<deflate> and I<gzip>
encodings.
=item $s->http_version
Get/set the HTTP version number that this client should announce.
This value can only be set to "1.0" or "1.1". The default is "1.1".
=item $s->peer_http_version
Get/set the protocol version number of our peer. This value will
initially be "1.0", but will be updated by a successful
read_response_headers() method call.
=item $s->max_line_length
Get/set a limit on the length of response line and response header
lines. The default is 4096. A value of 0 means no limit.
=item $s->max_header_length
Get/set a limit on the number of headers lines that a response can
have. The default is 128. A value of 0 means no limit.
=item $s->format_request($method, $uri, %headers, [$content])
Format a request message and return it as a string. If the headers do
not include a C<Host> header, then a header is inserted with the value
of the C<host> attribute. Headers like C<Connection> and
C<Keep-Alive> might also be added depending on the status of the
C<keep_alive> attribute.
If $content is given (and it is non-empty), then a C<Content-Length>
header is automatically added unless it was already present.
=item $s->write_request($method, $uri, %headers, [$content])
Format and send a request message. Arguments are the same as for
format_request(). Returns true if successful.
=item $s->format_chunk( $data )
Returns the string to be written for the given chunk of data.
=item $s->write_chunk($data)
Will write a new chunk of request entity body data. This method
should only be used if the C<Transfer-Encoding> header with a value of
C<chunked> was sent in the request. Note, writing zero-length data is
a no-op. Use the write_chunk_eof() method to signal end of entity
body data.
Returns true if successful.
=item $s->format_chunk_eof( %trailers )
Returns the string to be written for signaling EOF when a
C<Transfer-Encoding> of C<chunked> is used.
=item $s->write_chunk_eof( %trailers )
Will write eof marker for chunked data and optional trailers. Note
that trailers should not really be used unless is was signaled
with a C<Trailer> header.
Returns true if successful.
=item ($code, $mess, %headers) = $s->read_response_headers( %opts )
Read response headers from server and return it. The $code is the 3
digit HTTP status code (see L<HTTP::Status>) and $mess is the textual
message that came with it. Headers are then returned as key/value
pairs. Since key letter casing is not normalized and the same key can
even occur multiple times, assigning these values directly to a hash
is not wise. Only the $code is returned if this method is called in
scalar context.
As a side effect this method updates the 'peer_http_version'
attribute.
Options might be passed in as key/value pairs. There are currently
only two options supported; C<laxed> and C<junk_out>.
The C<laxed> option will make read_response_headers() more forgiving
towards servers that have not learned how to speak HTTP properly. The
C<laxed> option is a boolean flag, and is enabled by passing in a TRUE
value. The C<junk_out> option can be used to capture bad header lines
when C<laxed> is enabled. The value should be an array reference.
Bad header lines will be pushed onto the array.
The C<laxed> option must be specified in order to communicate with
pre-HTTP/1.0 servers that don't describe the response outcome or the
data they send back with a header block. For these servers
peer_http_version is set to "0.9" and this method returns (200,
"Assumed OK").
The method will raise an exception (die) if the server does not speak
proper HTTP or if the C<max_line_length> or C<max_header_length>
limits are reached. If the C<laxed> option is turned on and
C<max_line_length> and C<max_header_length> checks are turned off,
then no exception will be raised and this method will always
return a response code.
=item $n = $s->read_entity_body($buf, $size);
Reads chunks of the entity body content. Basically the same interface
as for read() and sysread(), but the buffer offset argument is not
supported yet. This method should only be called after a successful
read_response_headers() call.
The return value will be C<undef> on read errors, 0 on EOF, -1 if no data
could be returned this time, otherwise the number of bytes assigned
to $buf. The $buf is set to "" when the return value is -1.
You normally want to retry this call if this function returns either
-1 or C<undef> with C<$!> as EINTR or EAGAIN (see L<Errno>). EINTR
can happen if the application catches signals and EAGAIN can happen if
you made the socket non-blocking.
This method will raise exceptions (die) if the server does not speak
proper HTTP. This can only happen when reading chunked data.
=item %headers = $s->get_trailers
After read_entity_body() has returned 0 to indicate end of the entity
body, you might call this method to pick up any trailers.
=item $s->_rbuf
Get/set the read buffer content. The read_response_headers() and
read_entity_body() methods use an internal buffer which they will look
for data before they actually sysread more from the socket itself. If
they read too much, the remaining data will be left in this buffer.
=item $s->_rbuf_length
Returns the number of bytes in the read buffer. This should always be
the same as:
length($s->_rbuf)
but might be more efficient.
=back
=head1 SUBCLASSING
The read_response_headers() and read_entity_body() will invoke the
sysread() method when they need more data. Subclasses might want to
override this method to control how reading takes place.
The object itself is a glob. Subclasses should avoid using hash key
names prefixed with C<http_> and C<io_>.
=head1 SEE ALSO
L<LWP>, L<IO::Socket::INET>, L<Net::HTTP::NB>
=head1 COPYRIGHT
Copyright 2001-2003 Gisle Aas.
This library is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
=cut
| leighpauls/k2cro4 | third_party/cygwin/lib/perl5/vendor_perl/5.10/Net/HTTP.pm | Perl | bsd-3-clause | 9,125 |
/* Part of Extended Tools for SWI-Prolog
Author: Edison Mera Menendez
E-mail: efmera@gmail.com
WWW: https://github.com/edisonm/xtools
Copyright (C): 2015, Process Design Center, Breda, The Netherlands.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
:- module(check_assertions, []).
:- use_module(library(apply)).
:- use_module(library(lists)).
:- use_module(library(option)).
:- use_module(library(yall)).
:- use_module(library(pairs)).
:- use_module(library(checkers/checker)).
:- use_module(library(assertions)).
:- use_module(library(check), []).
:- use_module(library(codewalk)).
:- use_module(library(compact_pi_list)).
:- use_module(library(intercept)).
:- use_module(library(normalize_pi)).
:- use_module(library(resolve_calln)).
:- use_module(library(location_utils)).
:- use_module(library(from_utils)).
:- use_module(library(qualify_meta_goal)).
:- use_module(library(assertions)).
:- use_module(library(option_utils)).
:- use_module(library(checkable_predicate)).
:- use_module(library(ctrtchecks)).
:- use_module(library(rtchecks_rt)).
:- use_module(library(rtchecks_utils), []).
:- dynamic
violations_db/3.
:- multifile
ignore_predicate/2,
prolog:message//1.
% :- table
% generate_ctchecks/4,
% do_check_property_ctcheck/2.
checker:check(assertions, Result, Options) :-
cleanup_db,
check_assertions(Options, Result).
cleanup_db :-
retractall(violations_db(_, _, _)).
check_assertions(Options1, Pairs) :-
foldl(select_option_default,
[method(Method1)-clause],
Options1, Options2),
( \+ memberchk(Method1, [source, clause]) % only these methods are supported
->Method = clause,
print_message(
warning,
format("Method `~w' not supported, using `~w' instead",
[Method1, Method]))
; Method = Method1
),
merge_options(Options2,
[method(Method),
trace_variables([non_fresh]),
on_trace(collect_violations)
], Options),
option_module_files(Options, MFileD),
walk_code([module_files(MFileD)|Options]),
findall(error-Issue,
( retract(violations_db(CPI, CTChecks, From)),
from_location(From, Loc),
Issue = body(Loc-CPI)-CTChecks
; current_head_ctcheck(MFileD, Issue)
), Pairs, Props),
prop_ctcheck(MFileD, Props).
current_head_ctcheck(MFileD, head(Loc-PI)-AssrErrorL) :-
PI=M:F/A,
get_dict(M, MFileD, FileD),
current_predicate(M:F/A),
functor(H, F, A),
MH = M:H,
\+ predicate_property(MH, imported_from(_)),
\+ is_built_in(MH),
\+ predicate_property(MH, foreign),
generate_ctchecks(H, M, [], CTCheck),
CTCheck \= _:true,
clause(MH, _, Clause),
clause_property(Clause, file(File)),
get_dict(File, FileD, _),
do_check_property_ctcheck(CTCheck, AssrErrorL),
% Although we have duplicated logic, we don't call check_property_ctcheck/3
% here because is too slow:
% check_property_ctcheck(H, M, CTChecks),
AssrErrorL \= [],
from_location(clause(Clause), Loc).
prop_ctcheck(MFileD, Trans) :-
findall(Pair, current_prop_ctcheck(MFileD, Pair), Pairs),
sort(Pairs, Sorted),
group_pairs_by_key(Sorted, Groups),
maplist([K-L, (error-(prop(G)-K))]
>>group_pairs_by_key(L, G), Groups, Trans).
current_prop_ctcheck(MFileD, (Checker-PLoc/Issues)-(Loc-PI)) :-
prop_asr(head, M:Head, From, Asr),
get_dict(M, MFileD, FileD),
from_to_file(From, File),
get_dict(File, FileD, _),
functor(Head, HF,HA),
PI=M:HF/HA,
( member(Part, [comp, call, succ, glob]),
curr_prop_asr(Part, PM:Prop, PFrom, Asr),
resolve_head(Prop, PM, N:H)
),
checker_t(Checker),
term_variables(Head, Vars),
'$expand':mark_vars_non_fresh(Vars),
check_property(Checker, H, N, M:Head, Issues),
numbervars(Issues, 0, _),
from_location(PFrom, PLoc),
from_location(From, Loc).
resolve_head(V, M, M:V) :-
var(V),
% Note: this should not happen
!,
fail.
resolve_head(M:H1, _, H) :- !,
resolve_head(H1, M, H).
resolve_head((A,B), M, H) :- !,
( resolve_head(A, M, H)
; resolve_head(B, M, H)
).
resolve_head((A;B), M, H) :-
!,
%% ERROR: When Checker=ctcheck, we are reporting an issue even if one of the
%% next two branches does not have any:
( resolve_head(A, M, H)
; resolve_head(B, M, H)
).
resolve_head(H, M, M:H).
prolog:message(acheck(assertions)) -->
['Check asssertions',nl,
'-----------------',nl,
'The predicates contain assertions that are inconsistent', nl,
'with the implementation.', nl, nl].
prolog:message(acheck(assertions, Type-IssueL)) -->
type_message(Type),
{type_issue_t(Type, IssueT)},
foldl(prop_issue(IssueT), IssueL).
type_issue_t(body(_), ctchecks).
type_issue_t(head(_), ctchecks).
type_issue_t(prop(_), property).
prop_issue(ctchecks, CTChecks) -->
prolog:message(acheck(checks, CTChecks)).
prop_issue(property, Checker-IssueL) -->
foldl(property_issue(Checker), IssueL).
property_issue(ctcheck, Loc/(PI-CTChecks)) -->
[" "], Loc, ["In call to ~w:"-[PI], nl],
prop_issue(ctchecks, CTChecks).
property_issue(defined, Loc/Prop) -->
[" "], Loc, ["~w is undefined"-[Prop], nl].
property_issue(is_prop, Loc/Prop) -->
[" "], Loc, ["~w is not a property"-[Prop], nl].
type_message(body(Loc-PI)) --> Loc, ['In the body of ~q:'-[PI], nl].
type_message(head(Loc-PI)) --> Loc, ['In the head of ~q:'-[PI], nl].
type_message(prop(LocPIL)) --> foldl(type_message_prop, LocPIL).
type_message_prop(Loc-PIL) -->
{compact_pi_list(PIL, PIC)},
Loc, ['In assertions of ~q:'-[PIC], nl].
ignore_predicate(assertion_head(_, _, _, _, _, _, _), assertions).
ignore_predicate(_, M) :- ignore_module(M).
ignore_module(extend_args).
% Issues in the assertion body will be reported when checking properties.
ignore_predicate(M:Call) :- ignore_predicate(Call, M).
:- public collect_violations/3.
%! collect_violations(+Module, :Goal, +Caller, +From)
%
% Collect the assertion violations of a given Goal. Note that Module refer to
% the module of the source code, while Goal could have another context module,
% for instance, if module qualification was used in the body of a predicate.
:- meta_predicate collect_violations(0,0,+).
collect_violations(M:Goal, Caller, From) :-
( \+ ignore_predicate(Caller),
check_property_ctcheck(Goal, M, Caller, CTChecks),
CTChecks \= []
->normalize_pi(Caller, CPI),
update_fact_from(violations_db(CPI, CTChecks), From)
; true
).
check_property_ctcheck(Goal, M, Caller, AssrErrorL) :-
tabled_generate_ctchecks(Goal, M, Caller, CTCheck),
CTCheck \= _:true,
% Skip lack of assertions or assertions that will not
% trigger violations
do_check_property_ctcheck(CTCheck, AssrErrorL),
ignore(( nb_current('$variable_names', VNL),
maplist(set_variable_names, VNL)
)).
set_variable_names(Name=Variable) :- ignore(Variable = '$VAR'(Name)).
do_check_property_ctcheck(CTCheck, AssrErrorL) :-
SErrors = s([]),
intercept(catch(CTCheck, Error, send_signal(Error)),
AssrError, cpc_handler(AssrError), SErrors-CTCheck),
SErrors = s(CAssrErrorL),
maplist(collect_assr_error(CTCheck), CAssrErrorL, AssrErrorL).
cpc_handler(AssrError, SErrors-CTCheck) :-
SErrors = s(CAssrErrorL1),
nb_setarg(1, SErrors, [CTCheck-AssrError|CAssrErrorL1]).
collect_assr_error(CTCheck, CTCheck-AssrError, AssrError).
checker_t(defined).
checker_t(is_prop).
checker_t(ctcheck).
check_property(defined, H, M, _, M:F/A) :-
% Also reported by check_undefined, but is here to avoid dependency with
% other analysis.
functor(H, F, A),
\+ current_predicate(M:F/A).
check_property(is_prop, H, M, _, M:F/A) :-
resolve_calln(M:H, M:G),
functor(G, F, A),
\+ verif_is_property(M, F, A).
check_property(ctcheck, H, M, Caller, (M:F/A)-CTChecks) :-
% compile-time checks. Currently only compatibility checks.
check_property_ctcheck(H, M, Caller, CTChecks),
CTChecks \= [],
resolve_calln(M:H, M:G),
functor(G, F, A).
var_info(A, P) -->
( { var_property(A, fresh(Fresh)) }
->[P=Fresh]
; []
).
%! tabled_generate_ctchecks(+Head, ?Context, +Caller, -Goal) is det
%
tabled_generate_ctchecks(H, M, Caller, Goal) :-
functor(H, F, A),
functor(P, F, A),
H =.. [F|Args],
P =.. [F|PInf],
foldl(var_info, Args, PInf, VInf, []),
term_attvars(M:H, Vars),
maplist(del_attrs, Vars),
( meta_call_goal(H, M, Caller, Meta)
->qualify_meta_goal(CM:P, Meta, G)
; G = P
),
generate_ctchecks(G, M, VInf, Goal),
CM = M,
P = H.
%! generate_ctchecks(+Goal, +Context, +VInf, -CTChecks) is det
%
% Generate compile-time checks, currently only compatibility is checked, fails
% if no ctchecks can be applied to Pred. VInf contains information about fresh
% variables.
%
generate_ctchecks(Goal, M, VInf, CTChecks) :-
collect_assertions(ct, Goal, M, AsrL),
( AsrL \= []
->maplist(wrap_asr_ctcheck(VInf), AsrL, PAsrL),
CTChecks = ctrtchecks:check_call(ct, PAsrL, M:Goal)
; CTChecks = check_assertions:true
).
wrap_asr_ctcheck(VInf, Asr, ctcheck(VInf, Asr)).
assertions:asr_aprop(ctcheck(VInf, Asr), Key, Prop, From) :-
asr_aprop_ctcheck(Key, VInf, Asr, Prop, From).
%! asr_aprop_ctcheck(Asr, Section, Property, From)
%
% Assertion abstraction: If we can not determine the mode at compile time, at
% least check for compatibility (instead of instantiation). This abstraction
% makes static check decidable, the tradeoff is that we lose precision but we
% gain computability of checks at compile-time.
asr_aprop_ctcheck(head, _, A, P, F) :- curr_prop_asr(head, P, F, A).
asr_aprop_ctcheck(stat, _, A, P, F) :- curr_prop_asr(stat, P, F, A).
asr_aprop_ctcheck(type, _, A, P, F) :- curr_prop_asr(type, P, F, A).
asr_aprop_ctcheck(dict, _, A, P, F) :- curr_prop_asr(dict, P, F, A).
asr_aprop_ctcheck(comm, _, A, P, F) :- curr_prop_asr(comm, P, F, A).
asr_aprop_ctcheck(comp, _, A, P, F) :- curr_prop_asr(comp, P, F, A).
asr_aprop_ctcheck(comp, _, A, P, F) :- curr_prop_asr(succ, P, F, A). % TBD: Key = succ
asr_aprop_ctcheck(Key, L, Asr, Prop, From) :-
asr_aprop_ctcheck_abstraction(Key, L, Asr, Prop, From).
prop_abstraction(call, true).
% prop_abstraction(succ, false).
asr_aprop_ctcheck_abstraction(Key, L, Asr, Prop, From) :-
prop_abstraction(RKey, Fresh),
curr_prop_asr(RKey, Prop, From, Asr),
term_variables(Prop, Vars),
( member(Var=Fresh, L),
member(Arg, Vars),
Arg==Var
->Key = RKey
; Key = comp
).
verif_is_property(system, true, 0) :- !. % ignore true (identity)
verif_is_property(M, F, A) :-
functor(H, F, A),
prop_asr(H, M, _, prop, _, _, _).
| TeamSPoon/logicmoo_workspace | packs_lib/xtools/prolog/checkers/check_assertions.pl | Perl | mit | 12,272 |
##############################################################################
# Copyright © 2009 Six Apart Ltd.
# This program is free software: you can redistribute it and/or modify it
# under the terms of version 2 of the GNU General Public License as published
# by the Free Software Foundation, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details. You should have received a copy of the GNU
# General Public License version 2 along with this program. If not, see
# <http://www.gnu.org/licenses/>.
package EmailRetitler;
sub retitle {
my ($cb, %params) = @_;
my ($headers, $body_ref) = @params{qw( headers body )};
if ($$body_ref =~ s{ \A Subject: [ \t]* ([^\r\n]+) [\r\n]* }{}xms) {
my $subject = $1;
$headers->{Subject} = $subject;
}
return 1;
}
1;
| movabletype/mt-plugin-email-retitler | plugins/EmailRetitler/lib/EmailRetitler.pm | Perl | mit | 1,036 |
#±Ìt@CÍs-jisÅ·B
$text="";
while(<STDIN>){
my($input)=$_;
$input=~ s/[\n\r]//g;
$input=~ s/\s//g;
$input=~ s/@//g;
$text.=$input;
}
if($text=~ /\#OCTAVEREVERSE/i){
$text=~ tr/\<\>/\>\</;
$text=~ s/\#OCTAVEREVERSE//gi;
}
while($text=~ /\$([^\;\=\$]+)\=([^\;\=\$]+)\;/){
my($vkey,$vword)=($1,$2);
$text=~ s/\$([^\;\=\$]+)\=([^\;\=\$]+)\;//;
$text=~ s/\$$vkey/$vword/g;
}
$text=~ s/\@[\w^Vv]\-?\d*\,?\-?\d*\,?\-?\d*\,?\-?\d*\,?\-?\d*\,?\-?\d*\,?\-?\d*//g;
@lines=split(/;/,$text);
foreach my $input(@lines)
{
$input=~ s/\/\*.+?\*\///g;
$input=~ s/\/\:2?([^\:\/\d]+?)\:\//$1$1/g;
while($input=~ /\/\:(\d*)([^\:\/\d][^\:\/]*?)\:\//){
my($cnt);
my($tmp);
if($1 eq "0"){$tmp=""}else{
if($1 eq ""){$cnt=2;}else{$cnt=$1+0;}
$tmp=($2) x $cnt;
}
$input=~s /\/\:(\d*)([^\:\/\d][^\:\/]*?)\:\//$tmp/;
}
while($input=~ /\/\:(\d*)([^\:\/\d][^\:\/]*?)(\/?)([^\:\/]*?)\:\//){
my($cnt);
my($tmp);
if($1 eq "0"){$tmp=""}else{
if($1 eq ""){$cnt=2;}else{$cnt=$1+0;}
if($3 eq ""){$tmp=($2.$4) x $cnt;}else{$tmp=($2.$4)x($cnt-1).$2;}
}
$input=~s /\/\:(\d*)([^\:\/\d][^\:\/]*?)(\/?)([^\:\/]*?)\:\//$tmp/;
}
#print "\n";
if($input ne ""){print $input.";\n";}
}
| kurema/PerlJunks | mml2number/preprocess.pl | Perl | mit | 1,206 |
#
# Threaded worker & proto
# - code sample -
# Dmitry Sergeev 2014
#
package MyWorker;
use strict;
use threads ('yield', 'exit' => 'threads_only');
use MyDefaults;
use MyRunnableFunction;
use JSON;
use Exporter;
use vars qw[@ISA @EXPORT];
@ISA = qw[Exporter];
@EXPORT = qw[
processRequest
killAllTasks
];
my %TASK;
sub taskState {
my $tid = shift;
if (my $task = $TASK{$tid}) {
return {
task => $tid,
command => $task->{command},
started => $task->{started},
ready => $task->{thread}->is_running() ? 0 : 1
};
}
}
sub killTask {
my $task_id = shift;
if (my $task = $TASK{$task_id}) {
$task->{thread}->is_running ? $task->{thread}->kill('KILL')->detach() : $task->{thread}->join();
delete $TASK{$task_id};
}
}
# killing all tasks
sub killAllTasks {
killTask $_ foreach keys %TASK;
}
sub protoSuccess {
my $payload = shift;
return { Success => 1, Payload => $payload };
}
sub protoError {
my $errorDescription = shift;
return { Success => 0, Error => $errorDescription };
}
# Task's autokill & autoclean the results
$SIG{ALRM} = sub {
foreach my $tid (keys %TASK) {
my $task = $TASK{$tid};
if ($task->{thread}->is_running) {
# running tasks
if (TASK_RUN_TTL && $task->{started} + TASK_RUN_TTL < time()) {
print "Worker: Killing task $tid by timeout\n";
killTask $tid;
}
}
else {
# results
$task->{stopped} = time() unless $task->{stopped};
if (TASK_RESULT_TTL && $task->{stopped} + TASK_RESULT_TTL < time()) {
# finalizing task & deleting result
print "Worker: Cleaning the result of task $tid\n";
$task->{thread}->join();
delete $TASK{$tid};
}
}
}
alarm 1;
};
alarm 1;
# protocol commands
my %PROTO = (
RUN => sub {
my $cmdName = shift;
my @prms = @_;
return protoError P_ERROR_PRM_INVALID unless $cmdName;
if (my $func = $RUN_FUNCTION{$cmdName}) {
my $thr = threads->create(sub {
$SIG{KILL} = sub { print "Thread exiting..\n"; threads->exit(); };
print "Thread: executing command $cmdName (" . join(',', @prms) . ")\n";
return &$func(@prms);
});
# XXX: Seems is no sense to generate unique id specially
my $tid = $thr->tid();
$TASK{$tid} = {
command => $cmdName,
thread => $thr,
started => time()
};
return protoSuccess { task => $tid };
}
else {
return protoError P_ERROR_NO_FUNCTION;
}
},
# get task result
GET => sub {
my $task_id = shift;
return protoError P_ERROR_PRM_INVALID unless $task_id;
if (my $task = $TASK{$task_id}) {
unless ($task->{thread}->is_running()) {
my $reply = protoSuccess { Command => $task->{command}, Result => $task->{thread}->join() };
delete $TASK{$task_id};
return $reply;
}
else {
return protoError P_ERROR_NOT_READY;
}
}
else {
return protoError P_ERROR_NO_TASK;
}
},
# get list of allowed commands
LISTCMDS => sub {
return protoSuccess [ keys %RUN_FUNCTION ];
},
# get task info
INFO => sub {
my $task_id = shift;
return protoError P_ERROR_PRM_INVALID unless $task_id;
if (my $task = $TASK{$task_id}) {
return protoSuccess taskState $task_id;
}
else {
return protoError P_ERROR_NO_TASK;
}
},
# get list of current tasks
LIST => sub {
return protoSuccess [ map { taskState $_ } keys %TASK ];
},
# stop the task
STOP => sub {
my $task_id = shift;
return protoError P_ERROR_PRM_INVALID unless $task_id;
if (my $task = $TASK{$task_id}) {
if ($task->{thread}->is_running) {
killTask $task_id;
return protoSuccess ();
}
else {
return protoError P_ERROR_TASK_NOT_RUNNING;
}
}
else {
return protoError P_ERROR_NO_TASK;
}
},
# killall tasks
KILLALL => sub {
killAllTasks();
return protoSuccess ();
}
);
# processing the request
sub processRequest {
my $request = shift;
my ($command, @args) = split /\s+/, $request;
if ($command && exists $PROTO{uc $command}) {
print "Worker: Client requesting: $command " . join(',', @args). "\n";
return to_json &{$PROTO{uc $command}}(@args);
}
else {
return to_json protoError P_ERROR_BAD_REQUEST;
}
}
1;
| nexusproject/p5-example-server | lib/MyWorker.pm | Perl | mit | 4,712 |
# See bottom of file for license and copyright information
package VisDoc::Logger;
use strict;
use warnings;
our $writeDir;
our $logText = '';
our $logStdOut = 0;
sub clear {
$logText = '';
}
sub setLogStdOut {
my ($state) = @_;
$logStdOut = $state;
}
sub printOutput {
my ($message) = @_;
print STDERR $message if $logStdOut;
}
sub logParsedFile {
my ($file) = @_;
my ( $volume, $directories, $name ) = File::Spec->splitpath($file);
$logText .= "PARSED#$name#$file\n";
printOutput("parsed:$file\n");
}
sub logTime {
my ( $startTime, $endTime ) = @_;
my $seconds = $endTime - $startTime;
$logText .= "TIME#seconds=$seconds\n";
printOutput("time:$seconds secs.\n");
}
sub logWrittenHtml {
my ($file) = @_;
printOutput("written:$file\n");
}
sub getLogText {
return $logText;
}
1;
# VisDoc - Code documentation generator, http://visdoc.org
# This software is licensed under the MIT License
#
# The MIT License
#
# Copyright (c) 2010-2011 Arthur Clemens, VisDoc contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
| ArthurClemens/VisDoc | code/perl/lib/VisDoc/Logger.pm | Perl | mit | 2,126 |
package #
Date::Manip::Offset::off072;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:44:43 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
our ($Offset,%Offset);
END {
undef $Offset;
undef %Offset;
}
$Offset = '+02:57:36';
%Offset = (
0 => [
'asia/baghdad',
],
);
1;
| jkb78/extrajnm | local/lib/perl5/Date/Manip/Offset/off072.pm | Perl | mit | 851 |
use strict;
use warnings;
if (@ARGV != 1 && @ARGV != 1) {
print "Usage: set_out_bw.pl [bw]\n";
print "Example: set_out_bw.pl 10mbit\n";
print "Usage: set_out_bw.pl clean\n";
die;
}
if (@ARGV == 1 && $ARGV[0] eq "clean") {
system("tc qdisc del dev eth0 root");
print "cleaned\n";
} else {
die if (not $ARGV[0] =~ /mbit/);
system("tc qdisc del dev eth0 root");
if (@ARGV == 2) {
die;
} else {
system("tc qdisc add dev eth0 root handle 1: cbq avpkt 1500 bandwidth 100mbit");
system("tc class add dev eth0 parent 1: classid 1:1 cbq rate $ARGV[0] allot 1500 prio 1 bounded isolated");
system("tc filter add dev eth0 parent 1: protocol ip prio 16 u32 match ip sport 6003 0xffff flowid 1:1");
#system("tc qdisc add dev eth0 parent 1:1 handle 10: netem delay 50ms limit 2000"); #need "limit" for udp
#system("tc qdisc add dev eth0 parent 1:1 handle 10: netem delay 50ms loss 2%"); #to further set latency and loss
print "set to $ARGV[0]\n";
}
}
| mixianghang/newhttp2 | examples/set_bw_limit.pl | Perl | mit | 1,175 |
package #
Date::Manip::TZ::assred00;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:33:48 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our (%Dates,%LastRule);
END {
undef %Dates;
undef %LastRule;
}
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
%Dates = (
1 =>
[
[ [1,1,2,0,0,0],[1,1,2,10,14,52],'+10:14:52',[10,14,52],
'LMT',0,[1924,5,1,13,45,7],[1924,5,1,23,59,59],
'0001010200:00:00','0001010210:14:52','1924050113:45:07','1924050123:59:59' ],
],
1924 =>
[
[ [1924,5,1,13,45,8],[1924,5,1,23,45,8],'+10:00:00',[10,0,0],
'MAGT',0,[1930,6,20,13,59,59],[1930,6,20,23,59,59],
'1924050113:45:08','1924050123:45:08','1930062013:59:59','1930062023:59:59' ],
],
1930 =>
[
[ [1930,6,20,14,0,0],[1930,6,21,1,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1981,3,31,12,59,59],[1981,3,31,23,59,59],
'1930062014:00:00','1930062101:00:00','1981033112:59:59','1981033123:59:59' ],
],
1981 =>
[
[ [1981,3,31,13,0,0],[1981,4,1,1,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1981,9,30,11,59,59],[1981,9,30,23,59,59],
'1981033113:00:00','1981040101:00:00','1981093011:59:59','1981093023:59:59' ],
[ [1981,9,30,12,0,0],[1981,9,30,23,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1982,3,31,12,59,59],[1982,3,31,23,59,59],
'1981093012:00:00','1981093023:00:00','1982033112:59:59','1982033123:59:59' ],
],
1982 =>
[
[ [1982,3,31,13,0,0],[1982,4,1,1,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1982,9,30,11,59,59],[1982,9,30,23,59,59],
'1982033113:00:00','1982040101:00:00','1982093011:59:59','1982093023:59:59' ],
[ [1982,9,30,12,0,0],[1982,9,30,23,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1983,3,31,12,59,59],[1983,3,31,23,59,59],
'1982093012:00:00','1982093023:00:00','1983033112:59:59','1983033123:59:59' ],
],
1983 =>
[
[ [1983,3,31,13,0,0],[1983,4,1,1,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1983,9,30,11,59,59],[1983,9,30,23,59,59],
'1983033113:00:00','1983040101:00:00','1983093011:59:59','1983093023:59:59' ],
[ [1983,9,30,12,0,0],[1983,9,30,23,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1984,3,31,12,59,59],[1984,3,31,23,59,59],
'1983093012:00:00','1983093023:00:00','1984033112:59:59','1984033123:59:59' ],
],
1984 =>
[
[ [1984,3,31,13,0,0],[1984,4,1,1,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1984,9,29,14,59,59],[1984,9,30,2,59,59],
'1984033113:00:00','1984040101:00:00','1984092914:59:59','1984093002:59:59' ],
[ [1984,9,29,15,0,0],[1984,9,30,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1985,3,30,14,59,59],[1985,3,31,1,59,59],
'1984092915:00:00','1984093002:00:00','1985033014:59:59','1985033101:59:59' ],
],
1985 =>
[
[ [1985,3,30,15,0,0],[1985,3,31,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1985,9,28,14,59,59],[1985,9,29,2,59,59],
'1985033015:00:00','1985033103:00:00','1985092814:59:59','1985092902:59:59' ],
[ [1985,9,28,15,0,0],[1985,9,29,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1986,3,29,14,59,59],[1986,3,30,1,59,59],
'1985092815:00:00','1985092902:00:00','1986032914:59:59','1986033001:59:59' ],
],
1986 =>
[
[ [1986,3,29,15,0,0],[1986,3,30,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1986,9,27,14,59,59],[1986,9,28,2,59,59],
'1986032915:00:00','1986033003:00:00','1986092714:59:59','1986092802:59:59' ],
[ [1986,9,27,15,0,0],[1986,9,28,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1987,3,28,14,59,59],[1987,3,29,1,59,59],
'1986092715:00:00','1986092802:00:00','1987032814:59:59','1987032901:59:59' ],
],
1987 =>
[
[ [1987,3,28,15,0,0],[1987,3,29,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1987,9,26,14,59,59],[1987,9,27,2,59,59],
'1987032815:00:00','1987032903:00:00','1987092614:59:59','1987092702:59:59' ],
[ [1987,9,26,15,0,0],[1987,9,27,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1988,3,26,14,59,59],[1988,3,27,1,59,59],
'1987092615:00:00','1987092702:00:00','1988032614:59:59','1988032701:59:59' ],
],
1988 =>
[
[ [1988,3,26,15,0,0],[1988,3,27,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1988,9,24,14,59,59],[1988,9,25,2,59,59],
'1988032615:00:00','1988032703:00:00','1988092414:59:59','1988092502:59:59' ],
[ [1988,9,24,15,0,0],[1988,9,25,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1989,3,25,14,59,59],[1989,3,26,1,59,59],
'1988092415:00:00','1988092502:00:00','1989032514:59:59','1989032601:59:59' ],
],
1989 =>
[
[ [1989,3,25,15,0,0],[1989,3,26,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1989,9,23,14,59,59],[1989,9,24,2,59,59],
'1989032515:00:00','1989032603:00:00','1989092314:59:59','1989092402:59:59' ],
[ [1989,9,23,15,0,0],[1989,9,24,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1990,3,24,14,59,59],[1990,3,25,1,59,59],
'1989092315:00:00','1989092402:00:00','1990032414:59:59','1990032501:59:59' ],
],
1990 =>
[
[ [1990,3,24,15,0,0],[1990,3,25,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1990,9,29,14,59,59],[1990,9,30,2,59,59],
'1990032415:00:00','1990032503:00:00','1990092914:59:59','1990093002:59:59' ],
[ [1990,9,29,15,0,0],[1990,9,30,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1991,3,30,14,59,59],[1991,3,31,1,59,59],
'1990092915:00:00','1990093002:00:00','1991033014:59:59','1991033101:59:59' ],
],
1991 =>
[
[ [1991,3,30,15,0,0],[1991,3,31,2,0,0],'+11:00:00',[11,0,0],
'MAGST',1,[1991,9,28,15,59,59],[1991,9,29,2,59,59],
'1991033015:00:00','1991033102:00:00','1991092815:59:59','1991092902:59:59' ],
[ [1991,9,28,16,0,0],[1991,9,29,2,0,0],'+10:00:00',[10,0,0],
'MAGT',0,[1992,1,18,15,59,59],[1992,1,19,1,59,59],
'1991092816:00:00','1991092902:00:00','1992011815:59:59','1992011901:59:59' ],
],
1992 =>
[
[ [1992,1,18,16,0,0],[1992,1,19,3,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1992,3,28,11,59,59],[1992,3,28,22,59,59],
'1992011816:00:00','1992011903:00:00','1992032811:59:59','1992032822:59:59' ],
[ [1992,3,28,12,0,0],[1992,3,29,0,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1992,9,26,10,59,59],[1992,9,26,22,59,59],
'1992032812:00:00','1992032900:00:00','1992092610:59:59','1992092622:59:59' ],
[ [1992,9,26,11,0,0],[1992,9,26,22,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1993,3,27,14,59,59],[1993,3,28,1,59,59],
'1992092611:00:00','1992092622:00:00','1993032714:59:59','1993032801:59:59' ],
],
1993 =>
[
[ [1993,3,27,15,0,0],[1993,3,28,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1993,9,25,14,59,59],[1993,9,26,2,59,59],
'1993032715:00:00','1993032803:00:00','1993092514:59:59','1993092602:59:59' ],
[ [1993,9,25,15,0,0],[1993,9,26,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1994,3,26,14,59,59],[1994,3,27,1,59,59],
'1993092515:00:00','1993092602:00:00','1994032614:59:59','1994032701:59:59' ],
],
1994 =>
[
[ [1994,3,26,15,0,0],[1994,3,27,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1994,9,24,14,59,59],[1994,9,25,2,59,59],
'1994032615:00:00','1994032703:00:00','1994092414:59:59','1994092502:59:59' ],
[ [1994,9,24,15,0,0],[1994,9,25,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1995,3,25,14,59,59],[1995,3,26,1,59,59],
'1994092415:00:00','1994092502:00:00','1995032514:59:59','1995032601:59:59' ],
],
1995 =>
[
[ [1995,3,25,15,0,0],[1995,3,26,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1995,9,23,14,59,59],[1995,9,24,2,59,59],
'1995032515:00:00','1995032603:00:00','1995092314:59:59','1995092402:59:59' ],
[ [1995,9,23,15,0,0],[1995,9,24,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1996,3,30,14,59,59],[1996,3,31,1,59,59],
'1995092315:00:00','1995092402:00:00','1996033014:59:59','1996033101:59:59' ],
],
1996 =>
[
[ [1996,3,30,15,0,0],[1996,3,31,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1996,10,26,14,59,59],[1996,10,27,2,59,59],
'1996033015:00:00','1996033103:00:00','1996102614:59:59','1996102702:59:59' ],
[ [1996,10,26,15,0,0],[1996,10,27,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1997,3,29,14,59,59],[1997,3,30,1,59,59],
'1996102615:00:00','1996102702:00:00','1997032914:59:59','1997033001:59:59' ],
],
1997 =>
[
[ [1997,3,29,15,0,0],[1997,3,30,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1997,10,25,14,59,59],[1997,10,26,2,59,59],
'1997032915:00:00','1997033003:00:00','1997102514:59:59','1997102602:59:59' ],
[ [1997,10,25,15,0,0],[1997,10,26,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1998,3,28,14,59,59],[1998,3,29,1,59,59],
'1997102515:00:00','1997102602:00:00','1998032814:59:59','1998032901:59:59' ],
],
1998 =>
[
[ [1998,3,28,15,0,0],[1998,3,29,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1998,10,24,14,59,59],[1998,10,25,2,59,59],
'1998032815:00:00','1998032903:00:00','1998102414:59:59','1998102502:59:59' ],
[ [1998,10,24,15,0,0],[1998,10,25,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[1999,3,27,14,59,59],[1999,3,28,1,59,59],
'1998102415:00:00','1998102502:00:00','1999032714:59:59','1999032801:59:59' ],
],
1999 =>
[
[ [1999,3,27,15,0,0],[1999,3,28,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[1999,10,30,14,59,59],[1999,10,31,2,59,59],
'1999032715:00:00','1999032803:00:00','1999103014:59:59','1999103102:59:59' ],
[ [1999,10,30,15,0,0],[1999,10,31,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2000,3,25,14,59,59],[2000,3,26,1,59,59],
'1999103015:00:00','1999103102:00:00','2000032514:59:59','2000032601:59:59' ],
],
2000 =>
[
[ [2000,3,25,15,0,0],[2000,3,26,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2000,10,28,14,59,59],[2000,10,29,2,59,59],
'2000032515:00:00','2000032603:00:00','2000102814:59:59','2000102902:59:59' ],
[ [2000,10,28,15,0,0],[2000,10,29,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2001,3,24,14,59,59],[2001,3,25,1,59,59],
'2000102815:00:00','2000102902:00:00','2001032414:59:59','2001032501:59:59' ],
],
2001 =>
[
[ [2001,3,24,15,0,0],[2001,3,25,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2001,10,27,14,59,59],[2001,10,28,2,59,59],
'2001032415:00:00','2001032503:00:00','2001102714:59:59','2001102802:59:59' ],
[ [2001,10,27,15,0,0],[2001,10,28,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2002,3,30,14,59,59],[2002,3,31,1,59,59],
'2001102715:00:00','2001102802:00:00','2002033014:59:59','2002033101:59:59' ],
],
2002 =>
[
[ [2002,3,30,15,0,0],[2002,3,31,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2002,10,26,14,59,59],[2002,10,27,2,59,59],
'2002033015:00:00','2002033103:00:00','2002102614:59:59','2002102702:59:59' ],
[ [2002,10,26,15,0,0],[2002,10,27,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2003,3,29,14,59,59],[2003,3,30,1,59,59],
'2002102615:00:00','2002102702:00:00','2003032914:59:59','2003033001:59:59' ],
],
2003 =>
[
[ [2003,3,29,15,0,0],[2003,3,30,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2003,10,25,14,59,59],[2003,10,26,2,59,59],
'2003032915:00:00','2003033003:00:00','2003102514:59:59','2003102602:59:59' ],
[ [2003,10,25,15,0,0],[2003,10,26,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2004,3,27,14,59,59],[2004,3,28,1,59,59],
'2003102515:00:00','2003102602:00:00','2004032714:59:59','2004032801:59:59' ],
],
2004 =>
[
[ [2004,3,27,15,0,0],[2004,3,28,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2004,10,30,14,59,59],[2004,10,31,2,59,59],
'2004032715:00:00','2004032803:00:00','2004103014:59:59','2004103102:59:59' ],
[ [2004,10,30,15,0,0],[2004,10,31,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2005,3,26,14,59,59],[2005,3,27,1,59,59],
'2004103015:00:00','2004103102:00:00','2005032614:59:59','2005032701:59:59' ],
],
2005 =>
[
[ [2005,3,26,15,0,0],[2005,3,27,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2005,10,29,14,59,59],[2005,10,30,2,59,59],
'2005032615:00:00','2005032703:00:00','2005102914:59:59','2005103002:59:59' ],
[ [2005,10,29,15,0,0],[2005,10,30,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2006,3,25,14,59,59],[2006,3,26,1,59,59],
'2005102915:00:00','2005103002:00:00','2006032514:59:59','2006032601:59:59' ],
],
2006 =>
[
[ [2006,3,25,15,0,0],[2006,3,26,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2006,10,28,14,59,59],[2006,10,29,2,59,59],
'2006032515:00:00','2006032603:00:00','2006102814:59:59','2006102902:59:59' ],
[ [2006,10,28,15,0,0],[2006,10,29,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2007,3,24,14,59,59],[2007,3,25,1,59,59],
'2006102815:00:00','2006102902:00:00','2007032414:59:59','2007032501:59:59' ],
],
2007 =>
[
[ [2007,3,24,15,0,0],[2007,3,25,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2007,10,27,14,59,59],[2007,10,28,2,59,59],
'2007032415:00:00','2007032503:00:00','2007102714:59:59','2007102802:59:59' ],
[ [2007,10,27,15,0,0],[2007,10,28,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2008,3,29,14,59,59],[2008,3,30,1,59,59],
'2007102715:00:00','2007102802:00:00','2008032914:59:59','2008033001:59:59' ],
],
2008 =>
[
[ [2008,3,29,15,0,0],[2008,3,30,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2008,10,25,14,59,59],[2008,10,26,2,59,59],
'2008032915:00:00','2008033003:00:00','2008102514:59:59','2008102602:59:59' ],
[ [2008,10,25,15,0,0],[2008,10,26,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2009,3,28,14,59,59],[2009,3,29,1,59,59],
'2008102515:00:00','2008102602:00:00','2009032814:59:59','2009032901:59:59' ],
],
2009 =>
[
[ [2009,3,28,15,0,0],[2009,3,29,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2009,10,24,14,59,59],[2009,10,25,2,59,59],
'2009032815:00:00','2009032903:00:00','2009102414:59:59','2009102502:59:59' ],
[ [2009,10,24,15,0,0],[2009,10,25,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2010,3,27,14,59,59],[2010,3,28,1,59,59],
'2009102415:00:00','2009102502:00:00','2010032714:59:59','2010032801:59:59' ],
],
2010 =>
[
[ [2010,3,27,15,0,0],[2010,3,28,3,0,0],'+12:00:00',[12,0,0],
'MAGST',1,[2010,10,30,14,59,59],[2010,10,31,2,59,59],
'2010032715:00:00','2010032803:00:00','2010103014:59:59','2010103102:59:59' ],
[ [2010,10,30,15,0,0],[2010,10,31,2,0,0],'+11:00:00',[11,0,0],
'MAGT',0,[2011,3,26,14,59,59],[2011,3,27,1,59,59],
'2010103015:00:00','2010103102:00:00','2011032614:59:59','2011032701:59:59' ],
],
2011 =>
[
[ [2011,3,26,15,0,0],[2011,3,27,3,0,0],'+12:00:00',[12,0,0],
'MAGT',0,[2014,10,25,13,59,59],[2014,10,26,1,59,59],
'2011032615:00:00','2011032703:00:00','2014102513:59:59','2014102601:59:59' ],
],
2014 =>
[
[ [2014,10,25,14,0,0],[2014,10,26,1,0,0],'+11:00:00',[11,0,0],
'SRET',0,[9999,12,31,0,0,0],[9999,12,31,11,0,0],
'2014102514:00:00','2014102601:00:00','9999123100:00:00','9999123111:00:00' ],
],
);
%LastRule = (
);
1;
| jkb78/extrajnm | local/lib/perl5/Date/Manip/TZ/assred00.pm | Perl | mit | 16,262 |
#!/usr/bin/perl
# Api for querying goals
# INPUT parameters:
# num - limits the number of results. Default 1000
# startTime - The beginning of the timerange. Defaults to one day
# endTime - The end of the time range. Defaults to current date.
# [onlyTopGoals] - Optional argument for getting only top goals. Default false.
# datetime format = "2013-09-10T23:00:14+09:00"
#
#my $dateTimeFormat = '%Y.%m.%dT%T%O';
my $dateTimeFormat = '%Y.%m.%dT%T';#TODO Add timezone handling
# OUTPUT
# TODO: Output JSONP
# Format: JSON
# title
# goalPath - string representation of the path from top goal to the current goal
# creator
# dateTime
# subGoal - list of subgoal urls
use DateTime;
use Date::Parse;
use DateTime::Format::Strptime;
use JSON;
use Try::Tiny;
require("sparql.pl");
# Configuration
my $graph_uri = "http://collab.open-opinion.org";
#$debug = true;# Uncomment this line to run in debug mode.
# End config
my $q = CGI->new;
my @params = $q->param();
# Parse parameters
$num = uri_unescape( $q->param('num') );
if ( !defined( $num ) ){
$num = 10000;
}
if ( defined( $q->param('endTime') ) ){
# Parse the parameter
my $parser = DateTime::Format::Strptime->new(
pattern => $dateTimeFormat,
on_error => 'undef',
);
$endTime = $parser->parse_datetime( uri_escape( $q->param('endTime') ) );
}
if( !defined($endTime) ){
$endTime = DateTime->now();
}
if ( defined( $q->param('startTime') ) ){
# Parse the parameter
my $parser = DateTime::Format::Strptime->new(
pattern => $dateTimeFormat,
#on_error => 'undef',
);
$startTime = $parser->parse_datetime( $q->param('startTime') );
#$startTime = $parser->parse_datetime( uri_escape( $q->param('startTime') ) );
}
if ( !defined ( $startTime ) ){
$startTime = $endTime->clone();
# Set default time range
$startTime->add( days => -30 );
}
my $dateType = $q->param( 'dateType' );
my $onlyTopGoals = $q->param( 'onlyTopGoals' );
my $created = $q->param( 'created' );
my $keyword = $q->param( 'keyword' );
my $goalStatus = $q->param( 'goalStatus' );
# Generate Sparql query
# Prefix
$sparql = 'PREFIX dc: <http://purl.org/dc/terms/>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX foaf: <http://xmlns.com/foaf/0.1/> ';
# Select
$sparql .= "select distinct ?goal ?title ?desc ?parentGoal ?submDate (COUNT(?subg) AS ?CntSubGoals)
where {
?goal rdf:type socia:Goal;
dc:title ?title.
OPTIONAL { ?goal dc:description ?desc. }
OPTIONAL { ?goal dc:dateSubmitted ?submDate }
OPTIONAL { ?goal socia:subGoalOf ?parentGoal }
OPTIONAL { ?goal socia:subGoal ?subg.}
}
GROUP BY ?goal ?title ?desc ?parentGoal ?submDate
LIMIT $num";
#
## Debug print
if ( $debug ){
# Print paramers
print "Content-type: text/text\r\n\r\n";
print "DEBUG\n\n";
print "Params:\n";
foreach $key ( $q->param ){
print "$key: " . $q->param($key) ."\n"
}
print "\n\nNum: " . $num . "\n";
print "startTime: $startTime \n";
print "endTime: $endTime \n";
print "onlyTop: $onlyTop \n";
print "\n\nThe query!\n";
print $sparql;
print "\n\nThe query url encoded \n";
print uri_escape( $sparql );
exit();
}
print "Access-Control-Allow-Origin: *\n";
print "Content-Type: application/json; charset=UTF-8\n\n";
my $result_json = execute_sparql( $sparql );
my $test = decode_json $result_json;
# The virtuoso`s json is not good, create well formatted dataset
my %result = {};
$result->{goals} = [];
# Loop all goals and do group by
for ( $i = 0; $i < scalar @{$test->{'results'}->{'bindings'}}; $i++ ){
# Add new goal
#print "adding new goal\n";
$tmp = {};
$tmp->{cntSubGoals} = $test->{results}->{bindings}[$i]->{cntSubGoals}{value};
#$tmp->{wishers} = [];
$tmp->{url} = $test->{results}->{bindings}[$i]->{goal}{value};
$tmp->{title} = $test->{results}->{bindings}[$i]->{title}{value};
$tmp->{creator} = $test->{results}->{bindings}[$i]->{title}{value};;
$tmp->{creatorUrl} = "http://test.com";
#$$tmp->{path} = [];
$tmp->{dateTime} = $test->{results}->{bindings}[$i]->{submDate}{value};
push(@{$result->{goals}}, $tmp);
}
# Build the paths to root node
# TODO IMPORTANT fix to use concurrency. Needs concurrent hash or....
for ( $i = 0; $i < scalar @{$result->{'goals'}}; $i++ ){
my $path = BuildPath( $result->{'goals'}[$i]->{url} );
$result->{goals}[$i]->{goalPath} = $path;
}
# Return the result
my $js = new JSON;
print $js->pretty->encode( $result);
exit;
# END
sub BuildPath{
my $workURI = $_[0];
my @resultArray = ();
my $index = 0;
my $resultString = "";
my $isFirst = 1;
while ( $workURI ){
my $query = "PREFIX dc: <http://purl.org/dc/terms/>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
select distinct ?goal ?title ?parentGoal
where {
?goal rdf:type socia:Goal;
dc:title ?title.
OPTIONAL { ?goal socia:subGoalOf ?parentGoal }
FILTER ( ?goal = <$workURI>)}";
try{
my $temp = execute_sparql( $query );
my $result_json = decode_json($temp);
my %pathPoint = ();
if($isFirst == 1 ){
$isFirst = 0;
}else{
$resultString = " > " . $resultString
}
$resultString = $result_json->{results}{bindings}[0]->{title}{value} . $resultString;
$pathPoint->{index} = $index;
$pathPoint->{title} = $result_json->{results}{bindings}[0]->{title}{value};
$pathPoint->{URI} = $workURI;
push(@resultArray, $pathPoint );
#print $workURI . " " .$index."\n";
$index = $index + 1;
$workURI = $result_json->{results}{bindings}[0]->{parentGoal}{value};
} catch {
# Error ocurrend, end building the path
$workURI = False;
}
}
print $resultString
return $resultString;
#return @resultArray;
}
| siramatu/goalshare | Site/cgi-bin/query_goals.pl | Perl | mit | 5,825 |
package #
Date::Manip::Offset::off298;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:44:43 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
our ($Offset,%Offset);
END {
undef $Offset;
undef %Offset;
}
$Offset = '-04:20:52';
%Offset = (
0 => [
'america/argentina/tucuman',
],
);
1;
| jkb78/extrajnm | local/lib/perl5/Date/Manip/Offset/off298.pm | Perl | mit | 864 |
#!/usr/bin/perl
require 'quickies.pl'
($User,$Planet,$AuthCode,$Alliance)=split(/&/,$ENV{QUERY_STRING});
$user_information = $MasterPath . "/User Information";
dbmopen(%authCode, "$user_information/accesscode", 0777);
if(($AuthCode ne $authCode{$User}) || ($AuthCode eq "")){
print "<SCRIPT>alert(\"Security Failure. Please notify the GSD team immediately.\");history.back();</SCRIPT>";
die;
}
dbmclose(%authCode);
$Header = "#333333";$HeaderFont = "#CCCCCC";$Sub = "#999999";$SubFont = "#000000";$Content = "#666666";$ContentFont = "#FFFFFF";
$AlliancePath = $MasterPath . "/se/Planets/$Planet/alliances/$Alliance";
$HomePath = $MasterPath . "/se/Planets/";
open (IN, "$AlliancePath/members.txt");
@Members = <IN>;
close (IN);
&chopper (@Members);
$LeaderFlag = $LCounter = 0;
foreach $Item (@Members) {
($Rank,$Leader,$Blah) = split(/\|/,$Item);
if (substr ($Item, 0, 1) eq "0" && (-e "$HomePath/$Blah/users/$Leader")) {$LCounter ++;}
if ($Leader eq $User && $Rank == 0) {
$LeaderFlag = 1;
} elsif ($Leader eq $User && $Rank == 1) {
$LeaderFlag = 3;
} elsif ($Leader eq $User && $Rank > 1) {
$LeaderFlag = 2;
}
}
if ($LCounter == 0 && $LeaderFlag == 3) {
$LeaderFlag = 1;
}
unless ($LeaderFlag == 1) {
print qq!Location: http://www.bluewand.com/cgi-bin/classic/AllyDisplay.pl?$User&$Planet&$AuthCode&$Alliance\r\n\r\n!;
die;
}
print "Content-type: text/html\n\n";
$Path = "AllyLetter.pl";
$Path2 = "AllyUtil.pl";
$AllyPath = $MasterPath . "/se/Planets/$Planet/alliances/$Alliance";
$AllyTekPath = $MasterPath . "/se/Planets/$Planet/alliances/$Alliance/tech";
$NAlliance = $Alliance;
$NAlliance =~ tr/_/ /;
&parse_form;
open (DATAIN, "$AllyPath/ranks.txt");
flock (DATAIN, 1);
@Ranks = <DATAIN>;
close (DATAIN);
&chopper (@Ranks);
if ($data{'one'} ne "") {@Ranks[0] = $data{'one'}}
if ($data{'two'} ne "") {@Ranks[1] = $data{'two'}}
if ($data{'three'} ne "") {@Ranks[2] = $data{'three'}}
if ($data{'four'} ne "") {@Ranks[3] = $data{'four'}}
if ($data{'five'} ne "") {@Ranks[4] = $data{'five'}}
if ($data{'six'} ne "") {@Ranks[5] = $data{'six'}}
if ($data{'AllianceMessage'} ne "") {
open (OUT, ">$AllyPath/message.txt");
flock (OUT, 2);
print OUT qq£$data{'AllianceMessage'}\n£;
close (OUT);
}
if ($data{'AllianceMessage'} eq "") {
open (IN, "$AllyPath/message.txt");
flock (IN, 1);
$AllianceMessage = <IN>;
close (IN);
} else {$AllianceMessage=$data{'AllianceMessage'}}
open (DATAOUT, ">$AllyPath/ranks.txt");
flock (DATAOUT, 2);
foreach $WriteLine (@Ranks) {
print DATAOUT "$WriteLine\n";
}
close (DATAOUT);
open (DATAIN, "$AllyPath/applicant.txt");
flock (DATAIN, 1);
@Apps = <DATAIN>;
close (DATAIN);
&chopper (@Apps);
foreach $Run (@Apps) {
($Ap,$Plan) = split(/\|/,$Run);
$Applicants{$Ap} = $Plan;
}
open (DATAIN, "$AllyPath/members.txt");
flock (DATAIN, 1);
@Apps = <DATAIN>;
close (DATAIN);
&chopper (@Apps);
$Numbers=0;
foreach $Run (@Apps) {
($Rank,$Member,$Planets2) = split(/\|/,$Run);
$Members = $Member;
$Members =~ tr/_/ /;
@Options[$Numbers] = qqÞ<OPTION VALUE=$Member>$Members</OPTION>Þ;
$Numbers++;
}
print qqÞ
<html>
<SCRIPT>
parent.frames[1].location.reload()
</SCRIPT>
<body BGCOLOR="#000000" text="#FFFFFF">
<table border=1 cellspacing=0 width=100%><TR><TD BGCOLOR="$Header"><CENTER><B><font FACE="Arial" size="-1">Set Alliance Stats</font></TD></TR></TAble><BR><BR>
<CENTER>
<Table border=0 width=100%><TR height=60><TD width=50%>
<table border=1 cellspacing=0 width=100%>
<TR><TD BGCOLOR="$Header"><Center><font face=verdana size=-1>Disband Alliance</TD></TR>
<TR valign=top><TD BGCOLOR="$Content" valign=middle><Center><font face=verdana size=-1><img src="http://www.bluewand.com/classic/images/Ingame/invis.gif" height=20 width=1><a href="http://www.bluewand.com/cgi-bin/classic/AllyUtil.pl?$User&$Planet&$AuthCode&$Alliance&&1&22111" target ="Frame5" ONMOUSEOVER = "parent.window.status='Disband Alliance';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">Click to disband Alliance</a></TD></TR>
</table></TD><TD width=50%>
<table border=1 cellspacing=0 width=100%>
<TR><TD BGCOLOR="$Header"><Center><font face=verdana size=-1>Transfer Leadership</TD></TR>
<form method=post action="http://www.bluewand.com/cgi-bin/classic/AllyUtil2.pl?$User&$Planet&$AuthCode&$Alliance&blah&1&10101"><TR valign=top height=10><TD BGCOLOR="$Content"><Center><font face=verdana size=-2><select name=transfer>@Options</select> <input type=submit value="Transfer" name=submit2></TD></form></TR>
</table></TD></TR></table>
<BR>
<FORM method="POST" action="http://www.bluewand.com/cgi-bin/classic/AllySet.pl?$User&$Planet&$AuthCode&$Alliance">
<table border=1 cellspacing=0 width=60%>
<TR BGCOLOR="$Header"><TD><font FACE="Arial" size="-1">Rank One</TD><TD><font FACE="Arial" size="-1">Rank Two</TD><TD><font FACE="Arial" size="-1">Rank Three</TD><TR>
<TR BGCOLOR="$Content"><TD><font FACE="Arial" size="-1"><INPUT TYPE="text" size="20" name=one value="@Ranks[0]"></TD><TD><font FACE="Arial" size="-1"><INPUT TYPE="text" size="20" name=two value="@Ranks[1]"></TD><TD><font FACE="Arial" size="-1"><INPUT TYPE="text" size="20" name=three value="@Ranks[2]"></TD><TR>
<TR BGCOLOR="$Header"><TD><font FACE="Arial" size="-1">Rank Four</TD><TD><font FACE="Arial" size="-1">Rank Five</TD><TD><font FACE="Arial" size="-1">Rank Six</TD><TR>
<TR BGCOLOR="$Content"><TD><font FACE="Arial" size="-1"><INPUT TYPE="text" size="20" name=four value="@Ranks[3]"></TD><TD><font FACE="Arial" size="-1"><INPUT TYPE="text" size="20" name=five value="@Ranks[4]"></TD><TD><font FACE="Arial" size="-1"><INPUT TYPE="text" size="20" name=six value="@Ranks[5]"></TD><TR>
</table><BR><BR></center>
<Font face=verdana>
Applicants
<table border=0 cellspacing=0 width=100%>
<TR><TD width=80%>
<table border=1 cellspacing=0 width=100%>
<TR BGCOLOR="$Header"><TD><font FACE="Arial" size="-1">Nations Name</TD><TD><font FACE="Arial" size="-1">Read Message</TD><TD><font FACE="Arial" size="-1">View National Statistics</TD></TR>Þ;
foreach $Item (keys(%Applicants)) {
$ApCount++;
$Aps = $Item;
$Aps =~ tr/_/ /;
#<a href="$Path?$User&$Planet&$AuthCode&$Alliance" target ="Frame5" ONMOUSEOVER = "parent.window.status='Application Message';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">
print qqÞ
<TR BGCOLOR="$Content"><TD><font FACE="Arial" size="-1">$Aps</TD><TD><font FACE="Arial" size="-1"><a href="$Path?$User&$Planet&$AuthCode&$Alliance&$Item&$Applicants{$Item}" target ="Frame5" ONMOUSEOVER = "parent.window.status='Application Message';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">Message</a></TD><TD><font FACE="Arial" size="-1">View National Statistics</TD></TR>Þ;
}
if ($ApCount < 1) {
print qq!<TR><TD BGCOLOR=$Content colspan=3><font FACE="Arial" size="-1"><CENTER>There are currently no applicants</center></TD></TR>!;
}
print qqÞ
</table></TD><TD><BR>
<table border=1 cellspacing=0 width=100%>Þ;
foreach $Item (keys(%Applicants)) {
$Aps = $Item;
$Aps =~ tr/_/ /;
print qqÞ
<TR>
<TD BGCOLOR=$Content><font FACE="Arial" size="-1"><a href="$Path2?$User&$Planet&$AuthCode&$Alliance&$Item&$Applicants{$Item}&10101" target ="Frame5" ONMOUSEOVER = "parent.window.status='Accept $Aps';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">Accept</a></TD>
<TD BGCOLOR=$Content><font FACE="Arial" size="-1"><a href="$Path2?$User&$Planet&$AuthCode&$Alliance&$Item&$Applicants{$Item}&10111" target ="Frame5" ONMOUSEOVER = "parent.window.status='Accept $Aps';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">Reject</a></TD>
</TR>Þ;
}
print qqÞ
</table>
</TD></TR></table>
<BR><BR>
<font face=verdana>Members
<table border=1 cellspacing=0 width=60% bgcolor=$Content>
<TR bgcolor=$Header><TD><font face=verdana size=-1>Member</TD><TD><font face=arial size=-1>Rank</TD><TD><font face=arial size=-1>Expel Member</TD><TD><font face=arial size=-1>Last Played</TD></TR>Þ;
open (OUT, ">$AllyPath/members.txt");
foreach $Run (@Apps) {
($Rank,$Member,$MPlanet) = split(/\|/,$Run);
$Members = $Member;
if ($data{$Members} ne "") {$Rank=$data{$Members}}
print OUT "$Rank\|$Members\|$MPlanet\n";
$Member =~ tr/_/ /;
if ($Rank == 0) {$a = "SELECTED";$b=$c=$d=$e=$f=""}
if ($Rank == 1) {$b = "SELECTED";$a=$c=$d=$e=$f=""}
if ($Rank == 2) {$c = "SELECTED";$b=$a=$d=$e=$f=""}
if ($Rank == 3) {$d = "SELECTED";$b=$c=$a=$e=$f=""}
if ($Rank == 4) {$e = "SELECTED";$b=$c=$d=$a=$f=""}
if ($Rank == 5) {$f = "SELECTED";$b=$c=$d=$e=$a=""}
if ($Rank == 0) {
@RankOptions = qqÞ<OPTION VALUE=0 $a>$Ranks[0]</OPTION></select>Þ;
} else {
@RankOptions = qqÞ
<OPTION VALUE=0 $a>$Ranks[0]</OPTION>
<OPTION VALUE=1 $b>$Ranks[1]</OPTION>
<OPTION VALUE=2 $c>$Ranks[2]</OPTION>
<OPTION VALUE=3 $d>$Ranks[3]</OPTION>
<OPTION VALUE=4 $e>$Ranks[4]</OPTION>
<OPTION VALUE=5 $f>$Ranks[5]</OPTION></select>Þ;
}
$MemberPath = $MasterPath . "/se/Planets/$MPlanet/users/$Member";
$ExpelLink = qqÞ<a href="http://www.bluewand.com/cgi-bin/classic/AllyUtil2.pl?$User&$Planet&$AuthCode&$Alliance&$Members&&11101" target ="Frame5" ONMOUSEOVER = "parent.window.status='Expel Member';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">Expel $Member</a>Þ;
$MailLink = qqÞ<a href="http://www.bluewand.com/cgi-bin/classic/Message.pl?$User&$Planet&$AuthCode&110101&$Member"target ="Frame5" ONMOUSEOVER = "parent.window.status='Mail Member';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">$Member</a>Þ;
$Age = int(-C "$MemberPath/turns.txt");
print qqÞ<TR bgcolor=><TD><font face=verdana size=-1>$MailLink</TD><TD><font face=verdana size=-2><center><select name=$Members>@RankOptions</TD><TD><font face=arial size=-1>$ExpelLink</TD><TD><font face=arial size=-1><center>$Age days ago</TD></TR>Þ;
}
close (OUT);
if (-e "$AllyPath/Faction.txt") {
print qqÞ
</table><BR><BR>
<table width=100% border=1 cellspacing=0>
<TR><TD bgcolor=$Header><Center><font face=verdana size=-1>Alliance-Wide Message</td></tr>
<Tr><TD bgcolor=$Content><font face=verdana size=-1><center><textarea name="AllianceMessage" wrap=virtual cols=70 rows=5>$AllianceMessage</textarea></TD></TR>
</table>
<font face=verdana>Shared Technology<BR>
<Table border=1 cellspacing=0 width=60% bgcolor=$Content>Þ;
opendir (DIR, "$AllyTekPath");
@AllyTeks = readdir (DIR);
closedir (DIR);
foreach $Item (@AllyTeks) {
if ($Item ne '.' and $Item ne '..') {
if ($Item =~ /.apl/i) {
$Itema = $Item;
$Item =~ s/.apl//;
$Itema =~ tr/ /_/;
$Option = qqÞ<a href="$Path2?$User&$Planet&$AuthCode&$Alliance&$Itema&1&11111" target ="Frame5" ONMOUSEOVER = "parent.window.status='Begin Shared Research';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">Begin Cooperating</a>Þ;
$Img = qq!<Td><Center><IMG SRC="http://www.shatteredempires.com/SE/images/notstarted2.gif"></TD>!;
}
else {
open (IN, "$AllyTekPath/$Item");
@Infos = <IN>;
close (IN);
&chopper (@Infos);
$Itema = $Item;
$Itema =~ tr/ /_/;
if (@Infos[1] >= @Infos[0] and @Infos[1] != 0) {
$Option = qqÞ<a href="$Path2?$User&$Planet&$AuthCode&$Alliance&$Itema&1&21111" target ="Frame5" ONMOUSEOVER = "parent.window.status='Distribute Completed Technology';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">Distribute</a>Þ;
$Img = qq!<Td><Center><IMG SRC="http://www.shatteredempires.com/SE/images/finished2.gif"></TD>!;
} else {
if (@Infos[1] != 0) {
$Opt = int((@Infos[1]/@Infos[0])*100);
} else {$Opt = 0}
$Option = qqÞ$Opt% CompletedÞ;
$Img = qq!<Td><Center><IMG SRC="http://www.shatteredempires.com/SE/images/inprogress2.gif"></TD>!;
}
$Item =~ s/.wrk//;
}
print qqÞ<TR>$Img<TD><FONT face=verdana size=-1>$Item</TD><TD><FONT face=arial size=-1>$Option</TD></TR>Þ;
}
}
} else {$Tank = 1}
print qqÞ</table></font><BR>Þ;
if ($Tank == 1) {
print qq!
<table width=100% border=1 cellspacing=0>
<TR><TD bgcolor=$Header><Center><font face=verdana size=-1>Alliance-Wide Message</td></tr>
<Tr><TD bgcolor=$Content><font face=verdana size=-1><center><textarea name="AllianceMessage" wrap=virtual cols=70 rows=5>$AllianceMessage</textarea></TD></TR>
</table><BR><BR>
<center>
<Table width=50% border=1 cellspacing=0><TR><TD bgcolor="$Content"><center><FONT face=verdana size=-1><A href="http://www.bluewand.com/cgi-bin/classic/AllyFaction.pl?$User&$Planet&$AuthCode&$Alliance" target ="Frame5" ONMOUSEOVER = "parent.window.status='Alliance Application';return true" ONMOUSEOUT = "parent.window.status='';return true" STYLE="text-decoration:none;color:white">Apply for Alliance Status</A></TD></TR></table><BR><BR>!;
}
print qqÞ
<center><font size=-1><input type=submit name=submit value="Make Changes">
</FORM>
Þ;
sub parse_form {
# Get the input
read(STDIN, $buffer, $ENV{'CONTENT_LENGTH'});
# Split the name-value pairs
@pairs = split(/&/, $buffer);
foreach $pair (@pairs) {
($name, $value) = split(/=/, $pair);
# Un-Webify plus signs and %-encoding
$value =~ tr/+/ /;
$value =~ s/%([a-fA-F0-9][a-fA-F0-9])/pack("C", hex($1))/eg;
$value =~ s/<!--(.|\n)*-->//g;
$value =~ s/<([^>]|\n)*>//g;
$data{$name} = $value;
}
}
#
#sub chopper{
# foreach $k(@_){
# chop($k);
# }
#}
#
#sub Space {
# local($_) = @_;
# 1 while s/^(-?\d+)(\d{3})/$1 $2/;
# return $_;
#}
| cpraught/shattered-empires | AllySet.pl | Perl | mit | 13,682 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=head1 AUTHOR
Juguang Xiao <juguang@tll.org.sg>
=cut
=head1 NAME
Bio::EnsEMBL::Utils::Converter::bio_ens_transcript - the instance converter
=head1 SYNOPISIS
=head1 DESCRIPTION
=head1 METHODS
=cut
package Bio::EnsEMBL::Utils::Converter::bio_ens_transcript;
use strict;
use vars qw(@ISA);
use Bio::EnsEMBL::Transcript;
use Bio::EnsEMBL::Utils::Converter::bio_ens;
@ISA = qw(Bio::EnsEMBL::Utils::Converter::bio_ens);
sub _convert_single {
my ($self, $arg) = @_;
unless($arg->isa('Bio::SeqFeature::Gene::Transcript')){
$self->throw("A Bio::SeqFeature::Gene::Transcript object needed");
}
my $transcript = $arg;
my @exons = $transcript->exons_ordered;
$self->{_converter_for_exons}->contig($self->contig);
$self->{_converter_for_exons}->analysis($self->analysis);
my $ens_exons = $self->{_converter_for_exons}->convert(\@exons);
my $ens_transcript = Bio::EnsEMBL::Transcript->new(@{$ens_exons});
$ens_transcript->start($transcript->start);
$ens_transcript->end($transcript->end);
# $ens_transcript->strand($transcript->strand);
return $ens_transcript;
}
sub _initialize {
my ($self, @args) = @_;
$self->SUPER::_initialize(@args);
$self->{_converter_for_exons} = new Bio::EnsEMBL::Utils::Converter(
-in => 'Bio::SeqFeature::Gene::Exon',
-out => 'Bio::EnsEMBL::Exon'
);
}
| james-monkeyshines/ensembl | modules/Bio/EnsEMBL/Utils/Converter/bio_ens_transcript.pm | Perl | apache-2.0 | 2,341 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::alcatel::omniswitch::snmp::mode::components::module;
use strict;
use warnings;
use network::alcatel::omniswitch::snmp::mode::components::resources qw(%oids $mapping);
sub load {}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking module");
$self->{components}->{module} = {name => 'modules', total => 0, skip => 0};
return if ($self->check_filter(section => 'module'));
my @instances = ();
foreach my $key (keys %{$self->{results}->{$oids{common}->{entPhysicalClass}}}) {
if ($self->{results}->{$oids{common}->{entPhysicalClass}}->{$key} == 9) {
next if ($key !~ /^$oids{common}->{entPhysicalClass}\.(.*)$/);
push @instances, $1;
}
}
foreach my $instance (@instances) {
next if (!defined($self->{results}->{entity}->{$oids{$self->{type}}{chasEntPhysAdminStatus} . '.' . $instance}));
my $result = $self->{snmp}->map_instance(mapping => $mapping->{$self->{type}}, results => $self->{results}->{entity}, instance => $instance);
next if ($self->check_filter(section => 'module', instance => $instance));
$self->{components}->{module}->{total}++;
$self->{output}->output_add(long_msg => sprintf("module '%s/%s' [instance: %s, admin status: %s] operationnal status is %s.",
$result->{entPhysicalName}, $result->{entPhysicalDescr}, $instance,
$result->{chasEntPhysAdminStatus}, $result->{chasEntPhysOperStatus})
);
if ($result->{chasEntPhysPower} > 0) {
$self->{output}->perfdata_add(
label => "power", unit => 'W',
nlabel => 'hardware.module.power.watt',
instances => $instance,
value => $result->{chasEntPhysPower},
min => 0
);
}
my $exit = $self->get_severity(label => 'admin', section => 'module.admin', value => $result->{chasEntPhysAdminStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("module '%s/%s/%s' admin status is %s",
$result->{entPhysicalName}, $result->{entPhysicalDescr}, $instance,
$result->{chasEntPhysAdminStatus}));
next;
}
$exit = $self->get_severity(label => 'oper', section => 'module.oper', value => $result->{chasEntPhysOperStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("module '%s/%s/%s' operational status is %s",
$result->{entPhysicalName}, $result->{entPhysicalDescr}, $instance,
$result->{chasEntPhysOperStatus}));
}
}
}
1;
| Sims24/centreon-plugins | network/alcatel/omniswitch/snmp/mode/components/module.pm | Perl | apache-2.0 | 4,017 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::juniper::common::screenos::mode::cpu;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"warning:s" => { name => 'warning', default => '' },
"critical:s" => { name => 'critical', default => '' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
($self->{warn1m}, $self->{warn5m}, $self->{warn15m}) = split /,/, $self->{option_results}->{warning};
($self->{crit1m}, $self->{crit5m}, $self->{crit15m}) = split /,/, $self->{option_results}->{critical};
if (($self->{perfdata}->threshold_validate(label => 'warn1min', value => $self->{warn1m})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning (1min) threshold '" . $self->{warn1m} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'warn5min', value => $self->{warn5m})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning (5min) threshold '" . $self->{warn5m} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'warn15min', value => $self->{warn15m})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning (15min) threshold '" . $self->{warn15m} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'crit1min', value => $self->{crit1m})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical (1min) threshold '" . $self->{crit1m} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'crit5min', value => $self->{crit5m})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical (5min) threshold '" . $self->{crit5m} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'crit15min', value => $self->{crit15m})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical (15min) threshold '" . $self->{crit15m} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
$self->{snmp} = $options{snmp};
my $oid_nsResCpuLast1Min = '.1.3.6.1.4.1.3224.16.1.2.0';
my $oid_nsResCpuLast5Min = '.1.3.6.1.4.1.3224.16.1.3.0';
my $oid_nsResCpuLast15Min = '.1.3.6.1.4.1.3224.16.1.4.0';
my $result = $self->{snmp}->get_leef(oids => [$oid_nsResCpuLast1Min, $oid_nsResCpuLast5Min,
$oid_nsResCpuLast15Min], nothing_quit => 1);
my $cpu1min = $result->{$oid_nsResCpuLast1Min};
my $cpu5min = $result->{$oid_nsResCpuLast5Min};
my $cpu15min = $result->{$oid_nsResCpuLast15Min};
my $exit1 = $self->{perfdata}->threshold_check(value => $cpu1min,
threshold => [ { label => 'crit1min', exit_litteral => 'critical' }, { label => 'warn1min', exit_litteral => 'warning' } ]);
my $exit2 = $self->{perfdata}->threshold_check(value => $cpu5min,
threshold => [ { label => 'crit5min', exit_litteral => 'critical' }, { label => 'warn5min', exit_litteral => 'warning' } ]);
my $exit3 = $self->{perfdata}->threshold_check(value => $cpu15min,
threshold => [ { label => 'crit15min', exit_litteral => 'critical' }, { label => 'warn15min', exit_litteral => 'warning' } ]);
my $exit = $self->{output}->get_most_critical(status => [ $exit1, $exit2, $exit3 ]);
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("CPU Usage: %.2f%% (1min), %.2f%% (5min), %.2f%% (15min)",
$cpu1min, $cpu5min, $cpu15min));
$self->{output}->perfdata_add(label => "cpu_1min", unit => '%',
value => $cpu1min,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warn1min'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'crit1min'),
min => 0, max => 100);
$self->{output}->perfdata_add(label => "cpu_5min", unit => '%',
value => $cpu5min,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warn5min'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'crit5min'),
min => 0, max => 100);
$self->{output}->perfdata_add(label => "cpu_15min", unit => '%',
value => $cpu15min,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warn15min'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'crit15min'),
min => 0, max => 100);
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check Juniper cpu usage (NETSCREEN-RESOURCE-MIB).
=over 8
=item B<--warning>
Threshold warning in percent (1min,5min,15min).
=item B<--critical>
Threshold critical in percent (1min,5min,15min).
=back
=cut
| bcournaud/centreon-plugins | network/juniper/common/screenos/mode/cpu.pm | Perl | apache-2.0 | 6,429 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
# Parse UniProt (SwissProt & SPTrEMBL) files to create xrefs.
#
# Files actually contain both types of xref, distinguished by ID line;
#
# ID CYC_PIG Reviewed; 104 AA. Swissprot
# ID Q3ASY8_CHLCH Unreviewed; 36805 AA. SPTrEMBL
package XrefParser::UniProtParser_descriptions_only;
use strict;
use warnings;
use Carp;
use POSIX qw(strftime);
use File::Basename;
use base qw( XrefParser::BaseParser );
# --------------------------------------------------------------------------------
sub run {
my ($self, $ref_arg) = @_;
my $source_id = $ref_arg->{source_id};
my $species_id = $ref_arg->{species_id};
my $files = $ref_arg->{files};
my $release_file = $ref_arg->{rel_file};
my $verbose = $ref_arg->{verbose};
if((!defined $source_id) or (!defined $species_id) or (!defined $files) or (!defined $release_file)){
croak "Need to pass source_id, species_id, files and rel_file as pairs";
}
$verbose |=0;
my $file = @{$files}[0];
my ( $sp_source_id, $sptr_source_id, $sp_release, $sptr_release );
$sp_source_id =
$self->get_source_id_for_source_name('Uniprot/SWISSPROT',"sequence_mapped");
$sptr_source_id =
$self->get_source_id_for_source_name('Uniprot/SPTREMBL');
print "SwissProt source id for $file: $sp_source_id\n" if($verbose);
print "SpTREMBL source id for $file: $sptr_source_id\n" if($verbose);
my @xrefs =
$self->create_xrefs( $sp_source_id, $sptr_source_id, $species_id, $file, $verbose );
if ( !@xrefs ) {
return 1; # 1 error
}
# # delete previous if running directly rather than via BaseParser
# if (!defined(caller(1))) {
# print "Deleting previous xrefs for these sources\n" if($verbose);
# $self->delete_by_source(\@xrefs);
# }
# upload
if(!defined($self->upload_xref_object_graphs(@xrefs))){
return 1;
}
if ( defined $release_file ) {
# These two lines are duplicated from the create_xrefs() method
# below...
my $sp_pred_source_id =
$self->get_source_id_for_source_name(
'Uniprot/SWISSPROT_predicted');
my $sptr_pred_source_id =
$self->get_source_id_for_source_name(
'Uniprot/SPTREMBL_predicted');
# Parse Swiss-Prot and SpTrEMBL release info from
# $release_file.
my $release_io = $self->get_filehandle($release_file);
while ( defined( my $line = $release_io->getline() ) ) {
if ( $line =~ m#(UniProtKB/Swiss-Prot Release .*)# ) {
$sp_release = $1;
print "Swiss-Prot release is '$sp_release'\n" if($verbose);
} elsif ( $line =~ m#(UniProtKB/TrEMBL Release .*)# ) {
$sptr_release = $1;
print "SpTrEMBL release is '$sptr_release'\n" if($verbose);
}
}
$release_io->close();
# Set releases
$self->set_release( $sp_source_id, $sp_release );
$self->set_release( $sptr_source_id, $sptr_release );
$self->set_release( $sp_pred_source_id, $sp_release );
$self->set_release( $sptr_pred_source_id, $sptr_release );
}
return 0; # successfull
}
# --------------------------------------------------------------------------------
# Parse file into array of xref objects
sub create_xrefs {
my ($self, $sp_source_id, $sptr_source_id, $species_id, $file, $verbose ) = @_;
my $num_sp = 0;
my $num_sptr = 0;
my $num_sp_pred = 0;
my $num_sptr_pred = 0;
# Get predicted equivalents of various sources used here
my $sp_pred_source_id =
$self->get_source_id_for_source_name('Uniprot/SWISSPROT_predicted');
my $sptr_pred_source_id =
$self->get_source_id_for_source_name('Uniprot/SPTREMBL_predicted');
print "Predicted SwissProt source id for $file: $sp_pred_source_id\n" if($verbose);
print "Predicted SpTREMBL source id for $file: $sptr_pred_source_id\n" if($verbose);
my $uniprot_io = $self->get_filehandle($file);
if ( !defined $uniprot_io ) { return }
my @xrefs;
local $/ = "//\n";
# Create a hash of all valid taxon_ids for this species
my %species2tax = $self->species_id2taxonomy();
my @tax_ids = @{$species2tax{$species_id}};
my %taxonomy2species_id = map{ $_=>$species_id } @tax_ids;
while ( $_ = $uniprot_io->getline() ) {
# if an OX line exists, only store the xref if the taxonomy ID that the OX
# line refers to is in the species table
# due to some records having more than one tax_id, we need to check them
# all and only proceed if one of them matches.
#OX NCBI_TaxID=158878, 158879;
#OX NCBI_TaxID=103690;
my ($ox) = $_ =~ /OX\s+[a-zA-Z_]+=([0-9 ,]+);/;
my @ox = ();
my $found = 0;
if ( defined $ox ) {
@ox = split /\, /, $ox;
# my %taxonomy2species_id = $self->taxonomy2species_id();
foreach my $taxon_id_from_file (@ox) {
if ( exists $taxonomy2species_id{$taxon_id_from_file} ){
$found = 1;
}
}
}
next if (!$found); # no taxon_id's match, so skip to next record
my $xref;
# set accession (and synonyms if more than one)
# AC line may have primary accession and possibly several ; separated synonyms
# May also be more than one AC line
my ($acc) = $_ =~ /(AC\s+.+)/s; # will match first AC line and everything else
my @all_lines = split /\n/, $acc;
# extract ^AC lines only & build list of accessions
my @accessions;
foreach my $line (@all_lines) {
my ($accessions_only) = $line =~ /^AC\s+(.+)/;
push(@accessions, (split /;\s*/, $accessions_only)) if ($accessions_only);
}
$xref->{ACCESSION} = $accessions[0];
for (my $a=1; $a <= $#accessions; $a++) {
push(@{$xref->{"SYNONYMS"} }, $accessions[$a]);
}
# Check for CC (caution) lines containing certain text
# if this appears then set the source of this and and dependent xrefs to the predicted equivalents
my $is_predicted = /CC.*EMBL\/GenBank\/DDBJ whole genome shotgun \(WGS\) entry/;
my ($label, $sp_type) = $_ =~ /ID\s+(\w+)\s+(\w+)/;
# SwissProt/SPTrEMBL are differentiated by having STANDARD/PRELIMINARY here
if ($sp_type =~ /^Reviewed/i) {
$xref->{SOURCE_ID} = $sp_source_id;
if ($is_predicted) {
$xref->{SOURCE_ID} = $sp_pred_source_id;
$num_sp_pred++;
} else {
$xref->{SOURCE_ID} = $sp_source_id;
$num_sp++;
}
} elsif ($sp_type =~ /Unreviewed/i) {
if ($is_predicted) {
$xref->{SOURCE_ID} = $sptr_pred_source_id;
$num_sptr_pred++;
} else {
$xref->{SOURCE_ID} = $sptr_source_id;
$num_sptr++;
}
} else {
next; # ignore if it's neither one nor t'other
}
# some straightforward fields
$xref->{LABEL} = $label;
$xref->{SPECIES_ID} = $species_id;
$xref->{SEQUENCE_TYPE} = 'peptide';
$xref->{STATUS} = 'experimental';
# May have multi-line descriptions
my ($description_and_rest) = $_ =~ /(DE\s+.*)/s;
@all_lines = split /\n/, $description_and_rest;
# extract ^DE lines only & build cumulative description string
my $description = " ";
my $name = "";
my $sub_description = "";
foreach my $line (@all_lines) {
next if(!($line =~ /^DE/));
# get the data
if($line =~ /^DE RecName: Full=(.*);/){
$name .= $1;
}
elsif($line =~ /RecName: Full=(.*);/){
$description .= $1;
}
elsif($line =~ /SubName: Full=(.*);/){
$sub_description .= $1;
}
$description =~ s/^\s*//g;
$description =~ s/\s*$//g;
my $desc = $name.$description;
if(!length($desc)){
$desc = $sub_description;
}
$desc =~ s/\(\s*EC\s*\S*\)//g;
$xref->{DESCRIPTION} = $desc;
push @xrefs, $xref;
}
}
$uniprot_io->close();
print "Read $num_sp SwissProt xrefs and $num_sptr SPTrEMBL xrefs from $file\n" if($verbose);
print "Found $num_sp_pred predicted SwissProt xrefs and $num_sptr_pred predicted SPTrEMBL xrefs\n" if (($num_sp_pred > 0 || $num_sptr_pred > 0) and $verbose);
return \@xrefs;
#TODO - currently include records from other species - filter on OX line??
}
1;
| willmclaren/ensembl | misc-scripts/xref_mapping/XrefParser/UniProtParser_descriptions_only.pm | Perl | apache-2.0 | 8,826 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::extreme::snmp::mode::components::fan;
use strict;
use warnings;
my %map_fan_status = (
1 => 'operational',
2 => 'not operational',
);
my $mapping = {
extremeFanOperational => { oid => '.1.3.6.1.4.1.1916.1.1.1.9.1.2', map => \%map_fan_status },
extremeFanSpeed => { oid => '.1.3.6.1.4.1.1916.1.1.1.9.1.4' },
};
my $oid_extremeFanStatusEntry = '.1.3.6.1.4.1.1916.1.1.1.9.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, {
oid => $oid_extremeFanStatusEntry,
start => $mapping->{extremeFanOperational}->{oid},
end => $mapping->{extremeFanSpeed}->{oid}
};
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking fans");
$self->{components}->{fan} = { name => 'fans', total => 0, skip => 0 };
return if ($self->check_filter(section => 'fan'));
my ($exit, $warn, $crit, $checked);
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_extremeFanStatusEntry}})) {
next if ($oid !~ /^$mapping->{extremeFanOperational}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_extremeFanStatusEntry}, instance => $instance);
next if ($self->check_filter(section => 'fan', instance => $instance));
$self->{components}->{fan}->{total}++;
$self->{output}->output_add(long_msg =>
sprintf(
"Fan '%s' status is '%s' [instance = %s, speed = %s]",
$instance,
$result->{extremeFanOperational},
$instance,
defined($result->{extremeFanSpeed}) ? $result->{extremeFanSpeed} : 'unknown'
)
);
$exit = $self->get_severity(section => 'fan', value => $result->{extremeFanOperational});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(
severity => $exit,
short_msg => sprintf(
"Fan '%s' status is '%s'",
$instance,
$result->{extremeFanOperational}
)
);
}
($exit, $warn, $crit, $checked) = $self->get_severity_numeric(section => 'fan', instance => $instance, value => $result->{extremeFanSpeed});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(
severity => $exit,
short_msg => sprintf(
"Fan '%s' is '%s' rpm",
$instance,
$result->{extremeFanSpeed}
)
);
}
$self->{output}->perfdata_add(
label => 'fan', unit => 'rpm',
nlabel => 'hardware.fan.speed.rpm',
instances => $instance,
value => $result->{extremeFanSpeed},
warning => $warn,
critical => $crit,
min => 0
);
}
}
1;
| centreon/centreon-plugins | network/extreme/snmp/mode/components/fan.pm | Perl | apache-2.0 | 3,874 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V9::Enums::PolicyTopicEvidenceDestinationMismatchUrlTypeEnum;
use strict;
use warnings;
use Const::Exporter enums => [
UNSPECIFIED => "UNSPECIFIED",
UNKNOWN => "UNKNOWN",
DISPLAY_URL => "DISPLAY_URL",
FINAL_URL => "FINAL_URL",
FINAL_MOBILE_URL => "FINAL_MOBILE_URL",
TRACKING_URL => "TRACKING_URL",
MOBILE_TRACKING_URL => "MOBILE_TRACKING_URL"
];
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V9/Enums/PolicyTopicEvidenceDestinationMismatchUrlTypeEnum.pm | Perl | apache-2.0 | 1,023 |
package Google::Ads::AdWords::v201809::QueryError;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use base qw(Google::Ads::AdWords::v201809::ApiError);
# Variety: sequence
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %fieldPath_of :ATTR(:get<fieldPath>);
my %fieldPathElements_of :ATTR(:get<fieldPathElements>);
my %trigger_of :ATTR(:get<trigger>);
my %errorString_of :ATTR(:get<errorString>);
my %ApiError__Type_of :ATTR(:get<ApiError__Type>);
my %reason_of :ATTR(:get<reason>);
my %message_of :ATTR(:get<message>);
__PACKAGE__->_factory(
[ qw( fieldPath
fieldPathElements
trigger
errorString
ApiError__Type
reason
message
) ],
{
'fieldPath' => \%fieldPath_of,
'fieldPathElements' => \%fieldPathElements_of,
'trigger' => \%trigger_of,
'errorString' => \%errorString_of,
'ApiError__Type' => \%ApiError__Type_of,
'reason' => \%reason_of,
'message' => \%message_of,
},
{
'fieldPath' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'fieldPathElements' => 'Google::Ads::AdWords::v201809::FieldPathElement',
'trigger' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'errorString' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'ApiError__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'reason' => 'Google::Ads::AdWords::v201809::QueryError::Reason',
'message' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
},
{
'fieldPath' => 'fieldPath',
'fieldPathElements' => 'fieldPathElements',
'trigger' => 'trigger',
'errorString' => 'errorString',
'ApiError__Type' => 'ApiError.Type',
'reason' => 'reason',
'message' => 'message',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::QueryError
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
QueryError from the namespace https://adwords.google.com/api/adwords/cm/v201809.
A QueryError represents possible errors for query parsing and execution.
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * reason
=item * message
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/QueryError.pm | Perl | apache-2.0 | 2,732 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::exchange::2010::local::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_simple);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
# $options->{options} = options object
$self->{version} = '0.1';
%{$self->{modes}} = (
'activesync-mailbox' => 'apps::exchange::2010::local::mode::activesyncmailbox',
'databases' => 'apps::exchange::2010::local::mode::databases',
'list-databases' => 'apps::exchange::2010::local::mode::listdatabases',
'imap-mailbox' => 'apps::exchange::2010::local::mode::imapmailbox',
'mapi-mailbox' => 'apps::exchange::2010::local::mode::mapimailbox',
'outlook-webservices' => 'apps::exchange::2010::local::mode::outlookwebservices',
'owa-mailbox' => 'apps::exchange::2010::local::mode::owamailbox',
'queues' => 'apps::exchange::2010::local::mode::queues',
'replication-health' => 'apps::exchange::2010::local::mode::replicationhealth',
'services' => 'apps::exchange::2010::local::mode::services',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Windows Exchange 2010 locally.
!!! Experimental system !!!
=cut
| golgoth31/centreon-plugins | apps/exchange/2010/local/plugin.pm | Perl | apache-2.0 | 2,297 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::emc::symmetrix::vmax::local::mode::components::power;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
use centreon::plugins::misc;
#Power system: OK
#
#+--------------------------------------------------------+--------------------------------------+
#| Item | Status |
#+--------------------------------------------------------+--------------------------------------+
#| Power input type | Three Phases |
#| System Bay AC Zones Status | OK |
#| AC Status Zone A | Zone AC OK |
#| AC Status Zone B | Zone AC OK |
#| Power modules status | OK |
#| System Bay 1 | OK |
#| Engine SPS 4A | OK |
#| General Status | OK |
#| Detailed Status | OK |
#| Condition Register | OK |
#| Battery Life (sec) | 600 |
#| Days of Operation | Unknown |
#| Slot | Slot B |
#| Manufacturer Information | ASTEC,AA23540,7E, 04/11/2008 |
#+--------------------------------------------------------+--------------------------------------+
#| Item | Status |
#+--------------------------------------------------------+--------------------------------------+
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking powers");
$self->{components}->{power} = {name => 'powers', total => 0, skip => 0};
return if ($self->check_filter(section => 'power'));
if ($self->{content_file_health_env} !~ /Ethernet cabling.*?Power system.*?---------.*?Item.*?---------.*?\n(.*?\n)\+---------/msi) {
$self->{output}->output_add(long_msg => 'skipping: cannot find powers');
return ;
}
my $content = $1;
my $total_components = 0;
my @stack = ({ indent => 0, long_instance => '' });
while ($content =~ /^\|([ \t]+)(.*?)\|(.*?)\|\n/msig) {
my ($indent, $name, $status) = (length($1), centreon::plugins::misc::trim($2), centreon::plugins::misc::trim($3));
pop @stack while ($indent <= $stack[$#stack]->{indent});
my $long_instance = $stack[$#stack]->{long_instance} . '>' . $name;
if ($indent > $stack[$#stack]->{indent}) {
push @stack, { indent => $indent,
long_instance => $stack[$#stack]->{long_instance} . '>' . $name };
}
next if ($name !~ /status/i);
next if ($self->check_filter(section => 'power', instance => $long_instance));
$self->{components}->{power}->{total}++;
$self->{output}->output_add(long_msg => sprintf("power '%s' status is '%s'",
$long_instance, $status));
my $exit = $self->get_severity(label => 'default', section => 'power', value => $status);
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Power '%s' status is '%s'",
$long_instance, $status));
}
}
}
1;
| bcournaud/centreon-plugins | storage/emc/symmetrix/vmax/local/mode/components/power.pm | Perl | apache-2.0 | 4,889 |
package API::Region;
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
use UI::Utils;
use Mojo::Base 'Mojolicious::Controller';
use Data::Dumper;
use JSON;
use MojoPlugins::Response;
my $finfo = __FILE__ . ":";
sub index {
my $self = shift;
my @data;
my $orderby = $self->param('orderby') || "name";
my $rs_data = $self->db->resultset("Region")->search( undef, { prefetch => ['division'], order_by => 'me.' . $orderby } );
while ( my $row = $rs_data->next ) {
push(
@data, {
"id" => $row->id,
"name" => $row->name,
"division" => $row->division->id,
"divisionName" => $row->division->name
}
);
}
$self->success( \@data );
}
sub index_by_name {
my $self = shift;
my $name = $self->param('name');
my $rs_data = $self->db->resultset("Region")->search( { 'me.name' => $name }, { prefetch => ['division'] } );
my @data = ();
while ( my $row = $rs_data->next ) {
my $division = { "id" => $row->division->id,
"name" => $row->division->name
};
push(
@data, {
"id" => $row->id,
"name" => $row->name,
"division" => $division,
}
);
}
$self->success( \@data );
}
sub show {
my $self = shift;
my $id = $self->param('id');
my $rs_data = $self->db->resultset("Region")->search( { 'me.id' => $id }, { prefetch => ['division'] } );
my @data = ();
while ( my $row = $rs_data->next ) {
push(
@data, {
"id" => $row->id,
"name" => $row->name,
"division" => $row->division->id,
"divisionName" => $row->division->name
}
);
}
$self->success( \@data );
}
sub update {
my $self = shift;
my $id = $self->param('id');
my $params = $self->req->json;
if ( !&is_oper($self) ) {
return $self->forbidden();
}
my $region = $self->db->resultset('Region')->find( { id => $id } );
if ( !defined($region) ) {
return $self->not_found();
}
if ( !defined($params) ) {
return $self->alert("Parameters must be in JSON format.");
}
if ( !defined( $params->{name} ) ) {
return $self->alert("Region name is required.");
}
if ( !defined( $params->{division} ) ) {
return $self->alert("Division Id is required.");
}
my $values = {
name => $params->{name},
division => $params->{division}
};
my $rs = $region->update($values);
if ($rs) {
my $response;
$response->{id} = $rs->id;
$response->{name} = $rs->name;
$response->{division} = $rs->division->id;
$response->{divisionName}= $rs->division->name;
$response->{lastUpdated} = $rs->last_updated;
&log( $self, "Updated Region name '" . $rs->name . "' for id: " . $rs->id, "APICHANGE" );
return $self->success( $response, "Region update was successful." );
}
else {
return $self->alert("Region update failed.");
}
}
sub create {
my $self = shift;
my $params = $self->req->json;
if ( !&is_oper($self) ) {
return $self->forbidden();
}
my $name = $params->{name};
if ( !defined($name) ) {
return $self->alert("Region name is required.");
}
my $division_id = $params->{division};
if ( !defined($division_id) ) {
return $self->alert("Division Id is required.");
}
my $existing = $self->db->resultset('Region')->search( { name => $name } )->get_column('name')->single();
if ($existing) {
return $self->alert("A region with name \"$name\" already exists.");
}
my $values = {
name => $params->{name} ,
division => $params->{division}
};
my $insert = $self->db->resultset('Region')->create($values);
my $rs = $insert->insert();
if ($rs) {
my $response;
$response->{id} = $rs->id;
$response->{name} = $rs->name;
$response->{division} = $rs->division->id;
$response->{divisionName} = $rs->division->name;
$response->{lastUpdated} = $rs->last_updated;
&log( $self, "Created Region name '" . $rs->name . "' for id: " . $rs->id, "APICHANGE" );
return $self->success( $response, "Region create was successful." );
}
else {
return $self->alert("Region create failed.");
}
}
sub create_for_division {
my $self = shift;
my $division_name = $self->param('division_name');
my $params = $self->req->json;
if ( !defined($params) ) {
return $self->alert("parameters must be in JSON format, please check!");
}
if ( !&is_oper($self) ) {
return $self->alert("You must be an ADMIN or OPER to perform this operation!");
}
my $existing_region = $self->db->resultset('Region')->search( { name => $params->{name} } )->get_column('name')->single();
if ( defined($existing_region) ) {
return $self->alert( "region[" . $params->{name} . "] already exists." );
}
my $divsion_id = $self->db->resultset('Division')->search( { name => $division_name } )->get_column('id')->single();
if ( !defined($divsion_id) ) {
return $self->alert( "division[" . $division_name . "] does not exist." );
}
my $insert = $self->db->resultset('Region')->create(
{
name => $params->{name},
division => $divsion_id
}
);
$insert->insert();
my $response;
my $rs = $self->db->resultset('Region')->find( { id => $insert->id } );
if ( defined($rs) ) {
$response->{id} = $rs->id;
$response->{name} = $rs->name;
$response->{divisionName} = $division_name;
$response->{divsionId} = $rs->division->id;
return $self->success($response);
}
return $self->alert( "create region " . $params->{name} . " failed." );
}
sub delete {
my $self = shift;
my $id = $self->param('id');
if ( !&is_oper($self) ) {
return $self->forbidden();
}
my $region = $self->db->resultset('Region')->find( { id => $id } );
if ( !defined($region) ) {
return $self->not_found();
}
my $rs = $region->delete();
if ($rs) {
return $self->success_message("Region deleted.");
} else {
return $self->alert( "Region delete failed." );
}
}
sub delete_by_name {
my $self = shift;
my $name = $self->param('name');
if ( !&is_oper($self) ) {
return $self->forbidden();
}
my $region = $self->db->resultset('Region')->find( { name => $name } );
if ( !defined($region) ) {
return $self->not_found();
}
my $rs = $region->delete();
if ($rs) {
return $self->success_message("Region deleted.");
} else {
return $self->alert( "Region delete failed." );
}
}
1;
| knutsel/traffic_control-1 | traffic_ops/app/lib/API/Region.pm | Perl | apache-2.0 | 6,767 |
#
# Copyright 1999-2002,2004 The Apache Software Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
use vars qw($opt_h $opt_x);
use Getopt::Std;
my %charMapping = ( ':', 'chColon',
'\\', 'chBackSlash',
'[', 'chOpenSquare',
']', 'chCloseSquare',
'{', 'chOpenCurly',
'}', 'chCloseCurly',
'?', 'chQuestion',
'(', 'chOpenParen',
')', 'chCloseParen',
'/', 'chForwardSlash',
'*', 'chAsterisk',
'+', 'chPlus',
'.', 'chPeriod',
'-', 'chDash',
'|', 'chPipe',
'_', 'chUnderscore',
',', 'chComma',
'&', 'chAmpersand',
'0', 'chDigit_0', '1', 'chDigit_1', '2', 'chDigit_2',
'3', 'chDigit_3', '4', 'chDigit_4', '5', 'chDigit_5',
'6', 'chDigit_6', '7', 'chDigit_7', '8', 'chDigit_8',
'9', 'chDigit_9');
#
# usage: display usage message
#
sub usage() {
print<<EOF;
usage: $0 [ options ] word
Takes a word and produces a static XMLCh * definition for it.
Options:
-h Displays this help message
-x add the XERCES_CPP_NAMESPACE_QUALIFIER before each item
EOF
exit(1);
}
#
# main:
#
getopts("hx");
if ($opt_h or @ARGV == 0) {
usage();
}
my $word = $ARGV[0];
print "{ ";
while ($word=~s/^(.)//) {
if (defined($charMapping{$1})) {
$ch = $charMapping{$1};
} else {
$ch = $1;
if ($ch=~/[A-Za-z]/) {
$ch = "chLatin_$ch";
} else {
$ch = "UNKNOWN_CHAR_$ch";
}
}
if($opt_x) {
print "XERCES_CPP_NAMESPACE_QUALIFIER ";
}
print "$ch, ";
}
if($opt_x) {
print "XERCES_CPP_NAMESPACE_QUALIFIER ";
}
print "chNull };\n";
| rherardi/xerces-c-src_2_7_0 | scripts/makeStringDefinition.pl | Perl | apache-2.0 | 2,495 |
=head1 LICENSE
Copyright [1999-2013] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package Bio::EnsEMBL::GlyphSet::preliminary;
use strict;
use base qw(Bio::EnsEMBL::GlyphSet);
sub _init {
my ($self) = @_;
return unless ($self->strand() == 1);
return unless my $mod = $self->species_defs->ENSEMBL_PRELIM;
my( $FONT,$FONTSIZE ) = $self->get_font_details( 'text' );
my $top = 0;
foreach my $line (split /\|/, $mod) {
my( $txt, $bit, $w,$th ) = $self->get_text_width( 0, $line, '', 'ptsize' => $FONTSIZE, 'font' => $FONT );
$self->push( $self->Text({
'x' => int( ($self->{'container'}->length()+1)/2 ),
'y' => $top,
'height' => $th,
'font' => $FONT,
'ptsize' => $FONTSIZE,
'colour' => 'red3',
'text' => $line,
'absolutey' => 1,
}) );
$top += $th + 4;
}
}
1;
| Ensembl/ensembl-draw | modules/Bio/EnsEMBL/GlyphSet/preliminary.pm | Perl | apache-2.0 | 1,451 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package Bio::EnsEMBL::DataFile;
use strict;
use warnings;
use base qw/Bio::EnsEMBL::Storable/;
use Bio::EnsEMBL::ApiVersion;
use Bio::EnsEMBL::Utils::Argument qw/rearrange/;
use Bio::EnsEMBL::Utils::Exception qw/throw warning/;
use Bio::EnsEMBL::Utils::Scalar qw/:assert/;
use Bio::EnsEMBL::Utils::URI qw/is_uri/;
use File::Spec;
=head2 new
Arg [-ADAPTOR] : Bio::EnsEMBL::DBSQL::DataFileAdaptor
Arg [-DBID] : Integer $dbID
Arg [-COORD_SYSTEM] : Bio::EnsEMBL::CoordSystem $coord_system
Arg [-ANALYSIS] : Bio::EnsEMBL::Analysis $analysis
Arg [-NAME] : String $name
Arg [-VERSION_LOCK] : Boolean $version_lock
Arg [-ABSOLUTE] : Boolean $absolute
Arg [-URL] : String $url
Arg [-FILE_TYPE] : String $file_type
Example : Bio::EnsEMBL::DataFile->new();
Description : Returns a new instance of this object
Returntype : Bio::EnsEMBL::DataFile
Exceptions : Thrown if data is not as expected
=cut
sub new {
my ($class, @args) = @_;
my $self = $class->SUPER::new(@args);
my ($coord_system, $analysis, $name, $version_lock, $absolute, $url, $file_type) =
rearrange([qw/coord_system analysis name version_lock absolute url file_type/], @args);
$self->coord_system($coord_system);
$self->analysis($analysis);
$self->name($name);
$self->version_lock($version_lock);
$self->absolute($absolute);
$self->url($url);
$self->file_type($file_type);
return $self;
}
=head2 get_ExternalAdaptor
Arg[1] : Scalar; (optional) base path. Uses defaults if not given
Arg[2] : Scalar; (optional) file type
Example : my $ea = $df->get_ExternalAdaptor('/base/path');
Description : Delegates to the parent adaptor to retrieve the external
adaptor for this data type
Returntype : Adaptor; will be an adaptor that can read the given data file
Exceptions : Thrown if there is no attached adaptor.
=cut
sub get_ExternalAdaptor {
my ($self, $base_path, $requested_type) = @_;
my $adaptor = $self->adaptor();
throw "No DataFileAdaptor found in this object. Cannot request ExternalAdaptor" if ! $adaptor;
return $adaptor->DataFile_to_adaptor($self, $base_path, $requested_type);
}
=head2 path
Arg[1] : Scalar base of the path to use. Can be ignored if the instance
already represents a canonical path
Example : my $f = $df->path();
Description : Used to generate the path to the file resource. Can return a
path to the file or a URL but it is up to the using code to
know how to interprate the different returned forms.
If the data file url is canonical then this is just returned.
If not then a path is generated of the form
B</base/path/production_name/coord_system_version/[software_version]/db_group/name.ext>
Returntype : Scalar the absolute path/url to the given resource
Exceptions : Thrown if the linked Coordinate System lacks a version and the
current database also lacks a default version
Caller : public
=cut
sub path {
my ($self, $base) = @_;
my $all_paths = $self->get_all_paths($base);
return $all_paths->[0];
}
sub get_all_paths {
my ($self, $base) = @_;
return [$self->url()] if $self->absolute();
my @all_paths;
$base = $self->adaptor()->get_base_path($base) if ! $base;
my $production_name = $self->adaptor()->db()->get_MetaContainer()->get_production_name();
my $cs_version = $self->coord_system()->version();
if(! $cs_version) {
my ($highest_cs) = @{$self->adaptor()->db()->get_CoordSystemAdaptor()->fetch_all()};
$cs_version = $highest_cs->version();
}
if(!$cs_version) {
my $name = $self->name();
throw "The file '${name}' in species '${$production_name} is attached to a CoordinateSystem lacking a version and has no default assembly. Please fix";
}
my @portions;
push(@portions, $production_name);
push(@portions, $cs_version);
push(@portions, software_version()) if $self->version_lock();
push(@portions, $self->adaptor()->db()->group());
#Targets are the files to generate
my @targets;
#If URL is populated we assume we need to add this onto the end but removing the /
if($self->url()) {
my @split = split(/\//, $self->url());
push(@targets, [@split]);
}
else {
my $extensions = $self->adaptor()->DataFile_to_extensions($self);
foreach my $ext (@{$extensions}) {
my $filename = sprintf(q{%s.%s}, $self->name(), $ext);
push(@targets, [$filename]);
}
}
my $is_uri = is_uri($base);
foreach my $t (@targets) {
my $path;
if($is_uri) {
$path = join(q{/}, $base, @portions, @{$t});
}
else {
$path = File::Spec->catfile($base, @portions, @{$t});
}
push(@all_paths, $path);
}
return \@all_paths;
}
=head2 coord_system
Arg[1] : Bio::EnsEMBL::CoordSystem Optional setter
Description : Mutator for the coord system field. All files are linked to one
Returntype : Bio::EnsEMBL::CoordSystem
Exceptions : Thrown if not of the expected type
=cut
sub coord_system {
my ($self, $coord_system) = @_;
if(defined $coord_system) {
assert_ref($coord_system, 'Bio::EnsEMBL::CoordSystem', 'coord_system');
$self->{'coord_system'} = $coord_system;
}
return $self->{'coord_system'};
}
=head2 analysis
Arg[1] : Bio::EnsEMBL::Analysis Optional setter
Description : Mutator for the analysis field. All files are linked to one
Returntype : Bio::EnsEMBL::Analysis
Exceptions : Thrown if not of the expected type
=cut
sub analysis {
my ($self, $analysis) = @_;
if(defined $analysis) {
assert_ref($analysis, 'Bio::EnsEMBL::Analysis', 'analysis');
$self->{'analysis'} = $analysis;
}
return $self->{'analysis'};
}
=head2 name
Arg[1] : String Optional setter
Description : Mutator for the name of the file. Can be used in file location
generation
Returntype : String
=cut
sub name {
my ($self, $name) = @_;
if(defined $name) {
$self->{'name'} = $name;
}
return $self->{'name'};
}
=head2 version_lock
Arg[1] : Boolean Optional setter
Description : Boolean indicating if the file is linked to the version of the
database it was found in.
Returntype : Boolean
=cut
sub version_lock {
my ($self, $version_lock) = @_;
if(defined $version_lock) {
assert_boolean($version_lock, 'version_lock');
$self->{'version_lock'} = $version_lock;
}
return $self->{'version_lock'};
}
=head2 absolute
Arg[1] : Boolean Optional setter
Description : Indicates if the URL of this file is an absolute one i.e.
should be used verbatim or not.
Returntype : Boolean
=cut
sub absolute {
my ($self, $absolute) = @_;
if(defined $absolute) {
assert_boolean($absolute, 'absolute');
$self->{'absolute'} = $absolute;
}
return $self->{'absolute'};
}
=head2 url
Arg[1] : String Optional setter
Description : Location of the file. Can be optional and if set means once
we are in an automatic location use this value to locate
the file.
Returntype : String
=cut
sub url {
my ($self, $url) = @_;
$self->{'url'} = $url if defined $url;
return $self->{'url'};
}
=head2 file_type
Arg[1] : String Optional setter
Description : The type of file we are working with. Can be used to generate
a file name.
Returntype : String
=cut
sub file_type {
my ($self, $file_type) = @_;
$self->{'file_type'} = $file_type if defined $file_type;
return $self->{'file_type'};
}
#=head2 files
#
# Args :
# Example : my $files = @{$df->files()};
# Description : Returns all the file names we expect to cover for a flat file
# Returntype : type return_description
# Exceptions :
# Caller : caller
# Status : status
#
#=cut
#
#
#sub files {
# my ($self) = @_;
#
#}
1;
| Ensembl/ensembl | modules/Bio/EnsEMBL/DataFile.pm | Perl | apache-2.0 | 8,758 |
###########################################$
# Copyright 2008-2010 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions
# and limitations under the License.
###########################################$
# __ _ _ ___
# ( )( \/\/ )/ __)
# /__\ \ / \__ \
# (_)(_) \/\/ (___/
#
# Amazon EC2 Perl Library
# API Version: 2010-06-15
# Generated: Wed Jul 21 13:37:54 PDT 2010
#
package Amazon::EC2::Model::ResetSnapshotAttributeRequest;
use base qw (Amazon::EC2::Model);
#
# Amazon::EC2::Model::ResetSnapshotAttributeRequest
#
# Properties:
#
#
# SnapshotId: string
# Attribute: string
#
#
#
sub new {
my ($class, $data) = @_;
my $self = {};
$self->{_fields} = {
SnapshotId => { FieldValue => undef, FieldType => "string"},
Attribute => { FieldValue => undef, FieldType => "string"},
};
bless ($self, $class);
if (defined $data) {
$self->_fromHashRef($data);
}
return $self;
}
sub getSnapshotId {
return shift->{_fields}->{SnapshotId}->{FieldValue};
}
sub setSnapshotId {
my ($self, $value) = @_;
$self->{_fields}->{SnapshotId}->{FieldValue} = $value;
return $self;
}
sub withSnapshotId {
my ($self, $value) = @_;
$self->setSnapshotId($value);
return $self;
}
sub isSetSnapshotId {
return defined (shift->{_fields}->{SnapshotId}->{FieldValue});
}
sub getAttribute {
return shift->{_fields}->{Attribute}->{FieldValue};
}
sub setAttribute {
my ($self, $value) = @_;
$self->{_fields}->{Attribute}->{FieldValue} = $value;
return $self;
}
sub withAttribute {
my ($self, $value) = @_;
$self->setAttribute($value);
return $self;
}
sub isSetAttribute {
return defined (shift->{_fields}->{Attribute}->{FieldValue});
}
1;
| electric-cloud/EC-EC2 | src/main/resources/project/lib/Amazon/EC2/Model/ResetSnapshotAttributeRequest.pm | Perl | apache-2.0 | 2,496 |
#!/usr/bin/perl
# Mingyu @ Aug 29 2011
# User Dependent case
# Train with random 5 trials of a SINGLE right-handed user and test with the rest 5 trials
# Should use the exp1.pl to call exp1_single.pl
use File::Path qw(make_path remove_tree);
use File::stat;
if ($#ARGV !=3)
{
print "usage: exp1_single [user] [datatype] [run#] [data_dir]\n";
print " [data_dir]: the base path to the \$datatype folder\n";
exit;
}
else
{
$usr = $ARGV[0]; # "B1", "C1", etc.
$dtype = $ARGV[1]; # "AW", "PO", etc.
$run = $ARGV[2]; # specify the combination of trials for training
$runStr= sprintf("%03d", $run);
$data_dir = $ARGV[3]; # base path to the data folder of .htk files
}
#-------------------------------------------------------------------------
# Define the gestures in use
#-------------------------------------------------------------------------
@gests = ();
for (my $i=0; $i<20; $i++)
{
$gests[$i] = sprintf("g%02d", $i);
}
#-------------------------------------------------------------------------
# Set some common parameters
# Check directories and setup paths
# Prepare the log and err files (Redirect STDOUT & STDERR)
#-------------------------------------------------------------------------
$gMLF = "mlf/gest.mlf"; # global def of gest MLF
$path = "exp1/$usr/$dtype";
$trn_script = "$path/run$runStr/train.scp";
$tst_script = "$path/run$runStr/test.scp";
$hmm0 = "$path/run$runStr/hmm0";
$hmm1 = "$path/run$runStr/hmm1";
$hmm2 = "$path/run$runStr/hmm2";
$proto = "proto/template_$dtype"; # the HMM proto
$trnMLF= "$path/run$runStr/trn.mlf";
$tstMLF= "$path/run$runStr/tst.mlf";
$opt = "-A -T 1";
$minVar = "-v 0.001";
unless ((-d $hmm0) and (-d $hmm1) and (-d $hmm2))
{
make_path "$hmm0";
make_path "$hmm1";
make_path "$hmm2";
}
#unless ((-e $trn_script) and (-e $tst_script))
{
my $line = &extract("UD.idx", $run);
chomp($line);
@idx = split("\t", $line);
open FILE_trn, ">$trn_script" or die $!;
open FILE_tst, ">$tst_script" or die $!;
foreach my $g (@gests)
{
foreach my $j (0..4)
{
my $trn_name = sprintf("%s_%s_t%02d.htk", $g, $usr, $idx[$j]);
my $tst_name = sprintf("%s_%s_t%02d.htk", $g, $usr, $idx[$j+5]);
print FILE_trn "$data_dir/$dtype/$trn_name\n";
print FILE_tst "$data_dir/$dtype/$tst_name\n";
}
}
close FILE_trn;
close FILE_tst;
}
open (REGOUT, ">&STDOUT") or die "Can't open REGOUT: $!\n";
open (STDOUT, ">$path/log$runStr.txt") or die $!;
open (STDERR, ">$path/err$runStr.txt") or die $!;
#-------------------------------------------------------------------------
# Prepare the word model, dictionary, grammer, and word network
#-------------------------------------------------------------------------
$hmmlist = "$path/run$runStr/gestList";
$dic = "$path/run$runStr/gestDic";
$gram = "$path/run$runStr/gestGram";
$wnet = "$path/run$runStr/gestNet";
open FILE_model, ">$hmmlist" or die $!; # gestList = hmmList (each gest has its own hmm)
open FILE_dic, ">$dic" or die $!;
open FILE_gram, ">$gram" or die $!;
foreach my $g (@gests)
{
print FILE_model "$g\n";
print FILE_dic "$g\t$g\n";
}
print FILE_gram "\$gest = ".join(' | ', @gests)." \;\n";
print FILE_gram "( \$gest ) ";
close FILE_model or die $!;
close FILE_dic or die $!;
close FILE_gram or die $!;
&systemE("HParse $gram $wnet", "Error: HParse()");
#-------------------------------------------------------------------------
# Training
# HCompV & HInit & HRest
# -A: print current prompt argument
# -T: trace
# -S: script for data files (.htk)
# -M: output HMM dir
# -o: output name of the HMM
# -n: use the Var from HCompV
#-------------------------------------------------------------------------
foreach my $gest (@gests)
{
## Mingyu: after proper linear scaling, HCompV seems to work worse than HInit directly.
#&systemE("HCompV $opt -I $gMLF -l $gest -S $trn_script -M $hmm0 -o $gest $proto",
# "Error: HCompV()");
#&systemE("HInit $opt $minVar -I $gMLF -l $gest -S $trn_script -M $hmm1 -n $hmm0/$gest",
# "Error: HInit()"); # -n use the var from
&systemE("HInit $opt $minVar -I $gMLF -l $gest -S $trn_script -M $hmm1 -o $gest $proto",
"Error: HInit()");
&systemE("HRest $opt -I $gMLF -l $gest -S $trn_script -M $hmm2 $hmm1/$gest",
"Error: HRest()");
}
#-------------------------------------------------------------------------
# Recognition
# HVite & HResults
# Mingyu: HRestuls in upper layer after 50 trials
#-------------------------------------------------------------------------
# Test with the training set
&systemE("HVite $opt -d $hmm1 -S $trn_script -i $trnMLF -w $wnet $dic $hmmlist", "Error: HVite()");
# Test with the testing set
&systemE("HVite $opt -d $hmm1 -S $tst_script -i $tstMLF -w $wnet $dic $hmmlist", "Error: HVite()");
# Collect recognition results
&systemE("HResults $opt -I $gMLF $hmmlist $trnMLF", "Error: HResults()");
&systemE("HResults $opt -I $gMLF $hmmlist $tstMLF", "Error: HResults()");
#-------------------------------------------------------------------------
# Finish: clean up
#-------------------------------------------------------------------------
close REGOUT;
close STDOUT;
close STDERR;
if (stat("$path/err$runStr.txt")->size == 0) # no stderr
{
unlink("$path/err$runStr.txt");
}
#-------------------------------------------------------------------------
# systemE()-Execute perl's system() and then catch any error.
#-------------------------------------------------------------------------
sub systemE
{
# Get arguments
my ($commandString, $optionalString) = @_;
system($commandString)==0 or die "Error: $optionalString:$?\n";
}
#-------------------------------------------------------------------------
# extract()-Return certain line of a given file
#-------------------------------------------------------------------------
sub extract
{
my ($filename, $line_no)=@_;
my $line;
open (FILE, $filename) || die "$filename can't be opened $! ";
if ($line_no =~ /\D/) {
while ($line=<FILE>) {
if ($line =~ /$line_no/) {
return $line;
}
}
}
else {
foreach (1..$line_no) {
$line = <FILE>;
}
return $line;
}
}
| mingyu623/6DMG | 6DMG_htk/gestures/exp1_single.pl | Perl | bsd-2-clause | 6,322 |
#!/usr/bin/perl
use strict;
use Getopt::Std;
use Cwd;
sub printOptions
{
print("Usage :: perl predictCommonStart.pl\n");
print("-d Working directory. <REQUIRED> \n");
print("-v Logging level. 1 is ERROR, 2 is WARNING, 3 is INFO. Default is 1.\n");
print("-l Log file with complete path. Default - Log messages will be written to standard output.\n");
print("-p Perl directory. <REQUIRED> \n");
print("-s Scripts directory. <REQUIRED> \n");
print("-j Java bin directory. <REQUIRED> \n");
print("-c Java code directory. <REQUIRED> \n");
}
sub trim($)
{
my $string = shift;
$string =~ s/^\s+//;
$string =~ s/\s+$//;
return $string;
}
sub main
{
our($opt_d,$opt_v,$opt_l,$opt_p,$opt_s,$opt_j,$opt_c);
getopt("dvlspjc");
if((not defined $opt_d) || (not defined $opt_p) || (not defined $opt_s) || (not defined $opt_j) || (not defined $opt_c))
{
printOptions;
exit(0);
}
my $working_dir = $opt_d;
my $log = $opt_v;
my $log_file = $opt_l;
my $perl_dir = $opt_p;
my $scripts_dir = $opt_s;
my $java_bin_dir = $opt_j;
my $java_code_dir = $opt_c;
if(not defined $log)
{
$log = 1;
}
my $message = "Logging messages from predictCommonStart.pl......";
my $priority = 2;
system "$perl_dir/perl $scripts_dir/logging.pl -v $log -l \'$log_file\' -p $priority -m \'$message\'";
chdir($working_dir);
my $rel_start_pos_file = "$working_dir/selected-genes-rel-startsites.txt";
if(not -e $rel_start_pos_file)
{
$message = "$rel_start_pos_file does not exist";
$priority = 1;
system "$perl_dir/perl $scripts_dir/logging.pl -v $log -l \'$log_file\' -p $priority -m \'$message\'";
exit(-1);
}
my $alignment_dir = "$working_dir/alignments/output";
if(not -d $alignment_dir)
{
$message = "$alignment_dir does not exist";
$priority = 1;
system "$perl_dir/perl $scripts_dir/logging.pl -v $log -l \'$log_file\' -p $priority -m \'$message\'";
exit(-1);
}
my $percent_id_file = "$working_dir/best-gene-pairs-percent-id.txt";
if(not -e $percent_id_file)
{
$message = "$percent_id_file does not exist";
$priority = 1;
system "$perl_dir/perl $scripts_dir/logging.pl -v $log -l \'$log_file\' -p $priority -m \'$message\'";
exit(-1);
}
my $feature_files_dir = "$working_dir/alignments/feature-files";
if(-d $feature_files_dir)
{
system "rm -rf $feature_files_dir";
}
mkdir($feature_files_dir);
my $prediction_dir = "$working_dir/prediction";
if(-d $prediction_dir)
{
system "rm -rf $prediction_dir";
}
mkdir($prediction_dir);
## now call the java program
chdir($java_code_dir);
my $classpath = ".:$java_code_dir/bin";
my $java_exec_cmd;
if(($log_file ne "") && ($log_file ne "nodef"))
{
$java_exec_cmd = "$java_bin_dir/java -Xms256m -Xmx256m -classpath $classpath gov.lanl.burk.genefinder.CommonStartFinder $rel_start_pos_file $alignment_dir $feature_files_dir $percent_id_file $prediction_dir $log $log_file";
}
else
{
$java_exec_cmd = "$java_bin_dir/java -classpath $classpath gov.lanl.burk.genefinder.CommonStartFinder $rel_start_pos_file $alignment_dir $feature_files_dir $percent_id_file $prediction_dir $log";
}
system "$java_exec_cmd";
$message = "Done..";
$priority = 2;
system "$perl_dir/perl $scripts_dir/logging.pl -v $log -l \'$log_file\' -p $priority -m \'$message\'";
}
main;
| mewall/gmv | code/scripts/predictCommonStart.pl | Perl | bsd-3-clause | 3,317 |
=pod
=head1 NAME
SSL_group_to_name - get name of group
=head1 SYNOPSIS
#include <openssl/ssl.h>
const char *SSL_group_to_name(const SSL *ssl, int id);
=head1 DESCRIPTION
SSL_group_to_name() is used to retrieve the TLS group name
associated with a given TLS group ID, as registered via built-in
or external providers and as returned by a call to SSL_get1_groups()
or SSL_get_shared_group().
=head1 RETURN VALUES
If non-NULL, SSL_group_to_name() returns the TLS group name
corresponding to the given I<id> as a NULL-terminated string.
If SSL_group_to_name() returns NULL, an error occurred; possibly no
corresponding tlsname was registered during provider initialisation.
Note that the return value is valid only during the lifetime of the
SSL object I<ssl>.
=head1 SEE ALSO
L<ssl(7)>
=head1 COPYRIGHT
Copyright 2021 The OpenSSL Project Authors. All Rights Reserved.
Licensed under the Apache License 2.0 (the "License"). You may not use
this file except in compliance with the License. You can obtain a copy
in the file LICENSE in the source distribution or at
L<https://www.openssl.org/source/license.html>.
=cut
| jens-maus/amissl | openssl/doc/man3/SSL_group_to_name.pod | Perl | bsd-3-clause | 1,133 |
#!perl
use Test::More;
use strict;
use warnings;
our $es;
my $r;
# SORT
is $es->search(
query => { match_all => {} },
sort => ['num'],
)->{hits}{hits}[0]{_source}{num}, 2,
"Query with sort";
is $es->search(
query => { match_all => {} },
sort => [ { num => { reverse => \1 } } ],
)->{hits}{hits}[0]{_source}{num}, 31,
" - reverse sort";
is $es->search(
query => { match_all => {} },
sort => { 'num' => 'asc' },
)->{hits}{hits}[0]{_source}{num}, 2,
" - asc";
is $es->search(
query => { match_all => {} },
sort => [ { num => 'desc' } ],
)->{hits}{hits}[0]{_source}{num}, 31,
" - desc";
1;
| gitpan/Search-Elasticsearch-Compat | t/request_tests/search_sort.pl | Perl | apache-2.0 | 662 |
#!/usr/bin/perl
#
# Created: Thu Aug 15 11:57:33 1996 too
# Last modified: Mon Dec 27 09:23:56 1999 too
#
# Copyright (c) 1996-1999 Tomi Ollila. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
die "Usage: mkdep CPP-command [CPP options] file1 [file2...]\n"
if ($#ARGV < 1);
$cmdl = shift(@ARGV);
$cmdl = "$cmdl " . shift (@ARGV) while ($ARGV[0] =~ /^-[A-Z]/);
while ($file = shift(@ARGV))
{
$file =~ s/\.o$/.c/;
open(F, "$cmdl $file|");
&parseout;
close(F);
}
sub initinit
{
%used = ();
$of = $file;
$of =~ s/\.c$/.lo/;
$str = "$of:\t$file";
$len = length $str;
}
sub initstr
{
$str = "\t";
$len = length $str;
}
sub parseout
{
&initinit;
while (<F>)
{
s/\\\\/\//g;
next unless (/^# [0-9]* "(.*\.h)"/);
next if ($1 =~ /^\//);
next if $used{$1};
$used{$1} = 1;
$nlen = length($1) + 1;
if ($len + $nlen > 72)
{
print $str, "\\\n";
&initstr;
$str = $str . $1;
}
else { $str = $str . " " . $1; }
$len += $nlen;
}
print $str, "\n";
}
| bkuebler/openshift-apache-http-cartridge | src/httpd-2.4.6/build/mkdep.perl | Perl | apache-2.0 | 2,277 |
# This file was created by configpm when Perl was built. Any changes
# made to this file will be lost the next time perl is built.
# for a description of the variables, please have a look at the
# Glossary file, as written in the Porting folder, or use the url:
# http://perl5.git.perl.org/perl.git/blob/HEAD:/Porting/Glossary
package Config;
use strict;
use warnings;
use vars '%Config';
# Skip @Config::EXPORT because it only contains %Config, which we special
# case below as it's not a function. @Config::EXPORT won't change in the
# lifetime of Perl 5.
my %Export_Cache = (myconfig => 1, config_sh => 1, config_vars => 1,
config_re => 1, compile_date => 1, local_patches => 1,
bincompat_options => 1, non_bincompat_options => 1,
header_files => 1);
@Config::EXPORT = qw(%Config);
@Config::EXPORT_OK = keys %Export_Cache;
# Need to stub all the functions to make code such as print Config::config_sh
# keep working
sub bincompat_options;
sub compile_date;
sub config_re;
sub config_sh;
sub config_vars;
sub header_files;
sub local_patches;
sub myconfig;
sub non_bincompat_options;
# Define our own import method to avoid pulling in the full Exporter:
sub import {
shift;
@_ = @Config::EXPORT unless @_;
my @funcs = grep $_ ne '%Config', @_;
my $export_Config = @funcs < @_ ? 1 : 0;
no strict 'refs';
my $callpkg = caller(0);
foreach my $func (@funcs) {
die qq{"$func" is not exported by the Config module\n}
unless $Export_Cache{$func};
*{$callpkg.'::'.$func} = \&{$func};
}
*{"$callpkg\::Config"} = \%Config if $export_Config;
return;
}
die "Perl lib version (5.16.0) doesn't match executable '$0' version ($])"
unless $^V;
$^V eq 5.16.0
or die "Perl lib version (5.16.0) doesn't match executable '$0' version (" .
sprintf("v%vd",$^V) . ")";
sub FETCH {
my($self, $key) = @_;
# check for cached value (which may be undef so we use exists not defined)
return exists $self->{$key} ? $self->{$key} : $self->fetch_string($key);
}
sub TIEHASH {
bless $_[1], $_[0];
}
sub DESTROY { }
sub AUTOLOAD {
require 'Config_heavy.pl';
goto \&launcher unless $Config::AUTOLOAD =~ /launcher$/;
die "&Config::AUTOLOAD failed on $Config::AUTOLOAD";
}
# tie returns the object, so the value returned to require will be true.
tie %Config, 'Config', {
archlibexp => 'C:\\strawberry\\perl\\lib',
archname => 'MSWin32-x86-multi-thread',
cc => 'gcc',
d_readlink => undef,
d_symlink => undef,
dlext => 'dll',
dlsrc => 'dl_win32.xs',
dont_use_nlink => undef,
exe_ext => '.exe',
inc_version_list => '',
intsize => '4',
ldlibpthname => '',
libpth => 'C:\\strawberry\\c\\lib C:\\strawberry\\c\\i686-w64-mingw32\\lib',
osname => 'MSWin32',
osvers => '4.0',
path_sep => ';',
privlibexp => 'C:\\strawberry\\perl\\lib',
scriptdir => 'C:\\strawberry\\perl\\bin',
sitearchexp => 'C:\\strawberry\\perl\\site\\lib',
sitelibexp => 'C:\\strawberry\\perl\\site\\lib',
so => 'dll',
useithreads => 'define',
usevendorprefix => 'define',
version => '5.16.0',
};
eval {
require Portable;
Portable->import('Config');
};
1;
| leighpauls/k2cro4 | third_party/perl/perl/lib/Config.pm | Perl | bsd-3-clause | 3,312 |
# This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/ympzZnp0Uq/australasia. Olson data version 2012c
#
# Do not edit this file directly.
#
package DateTime::TimeZone::Pacific::Auckland;
{
$DateTime::TimeZone::Pacific::Auckland::VERSION = '1.46';
}
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::Pacific::Auckland::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY,
58943247656,
DateTime::TimeZone::NEG_INFINITY,
58943289600,
41944,
0,
'LMT'
],
[
58943247656,
60805348200,
58943289056,
60805389600,
41400,
0,
'NZMT'
],
[
60805348200,
60815626200,
60805393200,
60815671200,
45000,
1,
'NZST'
],
[
60815626200,
60834983400,
60815667600,
60835024800,
41400,
0,
'NZMT'
],
[
60834983400,
60848287200,
60835026600,
60848330400,
43200,
1,
'NZST'
],
[
60848287200,
60866433000,
60848328600,
60866474400,
41400,
0,
'NZMT'
],
[
60866433000,
60879736800,
60866476200,
60879780000,
43200,
1,
'NZST'
],
[
60879736800,
60897882600,
60879778200,
60897924000,
41400,
0,
'NZMT'
],
[
60897882600,
60911186400,
60897925800,
60911229600,
43200,
1,
'NZST'
],
[
60911186400,
60929332200,
60911227800,
60929373600,
41400,
0,
'NZMT'
],
[
60929332200,
60943240800,
60929375400,
60943284000,
43200,
1,
'NZST'
],
[
60943240800,
60960781800,
60943282200,
60960823200,
41400,
0,
'NZMT'
],
[
60960781800,
60974690400,
60960825000,
60974733600,
43200,
1,
'NZST'
],
[
60974690400,
60992231400,
60974731800,
60992272800,
41400,
0,
'NZMT'
],
[
60992231400,
61009768800,
60992274600,
61009812000,
43200,
1,
'NZST'
],
[
61009768800,
61023076200,
61009810200,
61023117600,
41400,
0,
'NZMT'
],
[
61023076200,
61041218400,
61023119400,
61041261600,
43200,
1,
'NZST'
],
[
61041218400,
61054525800,
61041259800,
61054567200,
41400,
0,
'NZMT'
],
[
61054525800,
61072668000,
61054569000,
61072711200,
43200,
1,
'NZST'
],
[
61072668000,
61085975400,
61072709400,
61086016800,
41400,
0,
'NZMT'
],
[
61085975400,
61104117600,
61086018600,
61104160800,
43200,
1,
'NZST'
],
[
61104117600,
61117425000,
61104159000,
61117466400,
41400,
0,
'NZMT'
],
[
61117425000,
61135567200,
61117468200,
61135610400,
43200,
1,
'NZST'
],
[
61135567200,
61148874600,
61135608600,
61148916000,
41400,
0,
'NZMT'
],
[
61148874600,
61167621600,
61148917800,
61167664800,
43200,
1,
'NZST'
],
[
61167621600,
61180324200,
61167663000,
61180365600,
41400,
0,
'NZMT'
],
[
61180324200,
61199071200,
61180367400,
61199114400,
43200,
1,
'NZST'
],
[
61199071200,
61212378600,
61199112600,
61212420000,
41400,
0,
'NZMT'
],
[
61212378600,
61378257600,
61212421800,
61378300800,
43200,
1,
'NZST'
],
[
61378257600,
62288316000,
61378300800,
62288359200,
43200,
0,
'NZST'
],
[
62288316000,
62297992800,
62288362800,
62298039600,
46800,
1,
'NZDT'
],
[
62297992800,
62319160800,
62298036000,
62319204000,
43200,
0,
'NZST'
],
[
62319160800,
62330652000,
62319207600,
62330698800,
46800,
1,
'NZDT'
],
[
62330652000,
62351215200,
62330695200,
62351258400,
43200,
0,
'NZST'
],
[
62351215200,
62362101600,
62351262000,
62362148400,
46800,
1,
'NZDT'
],
[
62362101600,
62382664800,
62362144800,
62382708000,
43200,
0,
'NZST'
],
[
62382664800,
62393551200,
62382711600,
62393598000,
46800,
1,
'NZDT'
],
[
62393551200,
62414114400,
62393594400,
62414157600,
43200,
0,
'NZST'
],
[
62414114400,
62425000800,
62414161200,
62425047600,
46800,
1,
'NZDT'
],
[
62425000800,
62445564000,
62425044000,
62445607200,
43200,
0,
'NZST'
],
[
62445564000,
62456450400,
62445610800,
62456497200,
46800,
1,
'NZDT'
],
[
62456450400,
62477013600,
62456493600,
62477056800,
43200,
0,
'NZST'
],
[
62477013600,
62487900000,
62477060400,
62487946800,
46800,
1,
'NZDT'
],
[
62487900000,
62508463200,
62487943200,
62508506400,
43200,
0,
'NZST'
],
[
62508463200,
62519954400,
62508510000,
62520001200,
46800,
1,
'NZDT'
],
[
62519954400,
62540517600,
62519997600,
62540560800,
43200,
0,
'NZST'
],
[
62540517600,
62551404000,
62540564400,
62551450800,
46800,
1,
'NZDT'
],
[
62551404000,
62571967200,
62551447200,
62572010400,
43200,
0,
'NZST'
],
[
62571967200,
62582853600,
62572014000,
62582900400,
46800,
1,
'NZDT'
],
[
62582853600,
62603416800,
62582896800,
62603460000,
43200,
0,
'NZST'
],
[
62603416800,
62614303200,
62603463600,
62614350000,
46800,
1,
'NZDT'
],
[
62614303200,
62634866400,
62614346400,
62634909600,
43200,
0,
'NZST'
],
[
62634866400,
62645752800,
62634913200,
62645799600,
46800,
1,
'NZDT'
],
[
62645752800,
62666316000,
62645796000,
62666359200,
43200,
0,
'NZST'
],
[
62666316000,
62677202400,
62666362800,
62677249200,
46800,
1,
'NZDT'
],
[
62677202400,
62697765600,
62677245600,
62697808800,
43200,
0,
'NZST'
],
[
62697765600,
62709256800,
62697812400,
62709303600,
46800,
1,
'NZDT'
],
[
62709256800,
62729820000,
62709300000,
62729863200,
43200,
0,
'NZST'
],
[
62729820000,
62740706400,
62729866800,
62740753200,
46800,
1,
'NZDT'
],
[
62740706400,
62759455200,
62740749600,
62759498400,
43200,
0,
'NZST'
],
[
62759455200,
62773365600,
62759502000,
62773412400,
46800,
1,
'NZDT'
],
[
62773365600,
62790904800,
62773408800,
62790948000,
43200,
0,
'NZST'
],
[
62790904800,
62804815200,
62790951600,
62804862000,
46800,
1,
'NZDT'
],
[
62804815200,
62822354400,
62804858400,
62822397600,
43200,
0,
'NZST'
],
[
62822354400,
62836264800,
62822401200,
62836311600,
46800,
1,
'NZDT'
],
[
62836264800,
62853804000,
62836308000,
62853847200,
43200,
0,
'NZST'
],
[
62853804000,
62868319200,
62853850800,
62868366000,
46800,
1,
'NZDT'
],
[
62868319200,
62885253600,
62868362400,
62885296800,
43200,
0,
'NZST'
],
[
62885253600,
62899768800,
62885300400,
62899815600,
46800,
1,
'NZDT'
],
[
62899768800,
62916703200,
62899812000,
62916746400,
43200,
0,
'NZST'
],
[
62916703200,
62931218400,
62916750000,
62931265200,
46800,
1,
'NZDT'
],
[
62931218400,
62948152800,
62931261600,
62948196000,
43200,
0,
'NZST'
],
[
62948152800,
62962668000,
62948199600,
62962714800,
46800,
1,
'NZDT'
],
[
62962668000,
62980207200,
62962711200,
62980250400,
43200,
0,
'NZST'
],
[
62980207200,
62994117600,
62980254000,
62994164400,
46800,
1,
'NZDT'
],
[
62994117600,
63011656800,
62994160800,
63011700000,
43200,
0,
'NZST'
],
[
63011656800,
63025567200,
63011703600,
63025614000,
46800,
1,
'NZDT'
],
[
63025567200,
63043106400,
63025610400,
63043149600,
43200,
0,
'NZST'
],
[
63043106400,
63057621600,
63043153200,
63057668400,
46800,
1,
'NZDT'
],
[
63057621600,
63074556000,
63057664800,
63074599200,
43200,
0,
'NZST'
],
[
63074556000,
63089071200,
63074602800,
63089118000,
46800,
1,
'NZDT'
],
[
63089071200,
63106005600,
63089114400,
63106048800,
43200,
0,
'NZST'
],
[
63106005600,
63120520800,
63106052400,
63120567600,
46800,
1,
'NZDT'
],
[
63120520800,
63138060000,
63120564000,
63138103200,
43200,
0,
'NZST'
],
[
63138060000,
63151970400,
63138106800,
63152017200,
46800,
1,
'NZDT'
],
[
63151970400,
63169509600,
63152013600,
63169552800,
43200,
0,
'NZST'
],
[
63169509600,
63183420000,
63169556400,
63183466800,
46800,
1,
'NZDT'
],
[
63183420000,
63200959200,
63183463200,
63201002400,
43200,
0,
'NZST'
],
[
63200959200,
63215474400,
63201006000,
63215521200,
46800,
1,
'NZDT'
],
[
63215474400,
63232408800,
63215517600,
63232452000,
43200,
0,
'NZST'
],
[
63232408800,
63246924000,
63232455600,
63246970800,
46800,
1,
'NZDT'
],
[
63246924000,
63263858400,
63246967200,
63263901600,
43200,
0,
'NZST'
],
[
63263858400,
63278373600,
63263905200,
63278420400,
46800,
1,
'NZDT'
],
[
63278373600,
63295308000,
63278416800,
63295351200,
43200,
0,
'NZST'
],
[
63295308000,
63309823200,
63295354800,
63309870000,
46800,
1,
'NZDT'
],
[
63309823200,
63326757600,
63309866400,
63326800800,
43200,
0,
'NZST'
],
[
63326757600,
63343087200,
63326804400,
63343134000,
46800,
1,
'NZDT'
],
[
63343087200,
63358207200,
63343130400,
63358250400,
43200,
0,
'NZST'
],
[
63358207200,
63374536800,
63358254000,
63374583600,
46800,
1,
'NZDT'
],
[
63374536800,
63389656800,
63374580000,
63389700000,
43200,
0,
'NZST'
],
[
63389656800,
63405986400,
63389703600,
63406033200,
46800,
1,
'NZDT'
],
[
63405986400,
63421106400,
63406029600,
63421149600,
43200,
0,
'NZST'
],
[
63421106400,
63437436000,
63421153200,
63437482800,
46800,
1,
'NZDT'
],
[
63437436000,
63452556000,
63437479200,
63452599200,
43200,
0,
'NZST'
],
[
63452556000,
63468885600,
63452602800,
63468932400,
46800,
1,
'NZDT'
],
[
63468885600,
63484610400,
63468928800,
63484653600,
43200,
0,
'NZST'
],
[
63484610400,
63500940000,
63484657200,
63500986800,
46800,
1,
'NZDT'
],
[
63500940000,
63516060000,
63500983200,
63516103200,
43200,
0,
'NZST'
],
[
63516060000,
63532389600,
63516106800,
63532436400,
46800,
1,
'NZDT'
],
[
63532389600,
63547509600,
63532432800,
63547552800,
43200,
0,
'NZST'
],
[
63547509600,
63563839200,
63547556400,
63563886000,
46800,
1,
'NZDT'
],
[
63563839200,
63578959200,
63563882400,
63579002400,
43200,
0,
'NZST'
],
[
63578959200,
63595288800,
63579006000,
63595335600,
46800,
1,
'NZDT'
],
[
63595288800,
63610408800,
63595332000,
63610452000,
43200,
0,
'NZST'
],
[
63610408800,
63626738400,
63610455600,
63626785200,
46800,
1,
'NZDT'
],
[
63626738400,
63641858400,
63626781600,
63641901600,
43200,
0,
'NZST'
],
[
63641858400,
63658188000,
63641905200,
63658234800,
46800,
1,
'NZDT'
],
[
63658188000,
63673912800,
63658231200,
63673956000,
43200,
0,
'NZST'
],
[
63673912800,
63690242400,
63673959600,
63690289200,
46800,
1,
'NZDT'
],
[
63690242400,
63705362400,
63690285600,
63705405600,
43200,
0,
'NZST'
],
[
63705362400,
63721692000,
63705409200,
63721738800,
46800,
1,
'NZDT'
],
[
63721692000,
63736812000,
63721735200,
63736855200,
43200,
0,
'NZST'
],
[
63736812000,
63753141600,
63736858800,
63753188400,
46800,
1,
'NZDT'
],
[
63753141600,
63768261600,
63753184800,
63768304800,
43200,
0,
'NZST'
],
[
63768261600,
63784591200,
63768308400,
63784638000,
46800,
1,
'NZDT'
],
[
63784591200,
63799711200,
63784634400,
63799754400,
43200,
0,
'NZST'
],
[
63799711200,
63816040800,
63799758000,
63816087600,
46800,
1,
'NZDT'
],
[
63816040800,
63831160800,
63816084000,
63831204000,
43200,
0,
'NZST'
],
];
sub olson_version { '2012c' }
sub has_dst_changes { 64 }
sub _max_year { 2022 }
sub _new_instance
{
return shift->_init( @_, spans => $spans );
}
sub _last_offset { 43200 }
my $last_observance = bless( {
'format' => 'NZ%sT',
'gmtoff' => '12:00',
'local_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 710397,
'local_rd_secs' => 0,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 710397,
'utc_rd_secs' => 0,
'utc_year' => 1947
}, 'DateTime' ),
'offset_from_std' => 0,
'offset_from_utc' => 43200,
'until' => [],
'utc_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 710396,
'local_rd_secs' => 43200,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 710396,
'utc_rd_secs' => 43200,
'utc_year' => 1946
}, 'DateTime' )
}, 'DateTime::TimeZone::OlsonDB::Observance' )
;
sub _last_observance { $last_observance }
my $rules = [
bless( {
'at' => '2:00s',
'from' => '2007',
'in' => 'Sep',
'letter' => 'D',
'name' => 'NZ',
'offset_from_std' => 3600,
'on' => 'lastSun',
'save' => '1:00',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' ),
bless( {
'at' => '2:00s',
'from' => '2008',
'in' => 'Apr',
'letter' => 'S',
'name' => 'NZ',
'offset_from_std' => 0,
'on' => 'Sun>=1',
'save' => '0',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' )
]
;
sub _rules { $rules }
1;
| leighpauls/k2cro4 | third_party/perl/perl/vendor/lib/DateTime/TimeZone/Pacific/Auckland.pm | Perl | bsd-3-clause | 12,929 |
#!/usr/local/bin/perl
#
# TI_CGTOOLS.pl, Texas Instruments CGTOOLS under Unix or MSYS.
#
$ssl= "ssl";
$crypto="crypto";
if ($fips && !$shlib)
{
$crypto="fips";
$crypto_compat = "cryptocompat.lib";
}
else
{
$crypto="crypto";
}
if ($fipscanisterbuild)
{
$fips_canister_path = "\$(LIB_D)/fipscanister.obj";
}
$o='/';
$cp='cp';
$cp2='$(PERL) util/copy.pl -stripcr';
$mkdir='$(PERL) util/mkdir-p.pl';
$rm='rm -f';
$zlib_lib="zlib1.lib";
# Santize -L options for ms link
$l_flags =~ s/-L("\[^"]+")/\/libpath:$1/g;
$l_flags =~ s/-L(\S+)/\/libpath:$1/g;
# C compiler stuff
$cc='cl6x';
$base_cflags= " $mf_cflag";
my $f;
$opt_cflags='';
$dbg_cflags=$f.' -g -DDEBUG -D_DEBUG';
$lflags='';
*::cc_compile_target = sub {
my ($target,$source,$ex_flags)=@_;
my $ret;
$ex_flags.=" -DMK1MF_BUILD" if ($source =~/cversion/);
$ret ="$target: \$(SRC_D)$o$source\n\t";
if ($fipscanisterbuild && $source=~/\.asm$/) {
$ret.="\$(PERL) util${o}fipsas.pl . \$< norunasm \$(CFLAG)\n\t";
}
$ret.="\$(CC) --obj_directory=\$(OBJ_D) $ex_flags -c \$(SRC_D)$o$source\n";
$target =~ s/.*${o}([^${o}]+)/$1/;
$source =~ s/.*${o}([^${o}\.]+)\..*/$1${obj}/;
$ret.="\tmv \$(OBJ_D)${o}$source \$(OBJ_D)${o}$target\n" if ($target ne $source);
$ret.="\n";
return($ret);
};
*::perlasm_compile_target = sub {
my ($target,$source,$bname)=@_;
my $ret;
$bname =~ s/(.*)\.[^\.]$/$1/;
$ret=<<___;
\$(TMP_D)$o$bname.asm: $source
\$(PERL) $source \$\@
___
$ret .= "\t\$(PERL) util${o}fipsas.pl . \$@ norunasm \$(CFLAG)\n" if $fipscanisterbuild;
$ret.=<<___;
$target: \$(TMP_D)$o$bname.asm
\$(ASM) --obj_directory=\$(OBJ_D) \$(TMP_D)$o$bname.asm
___
};
$mlflags='';
$out_def ="c6x";
$tmp_def ="$out_def/tmp";
$inc_def="$out_def/inc";
if ($debug)
{
$cflags=$dbg_cflags.$base_cflags;
}
else
{
$cflags=$opt_cflags.$base_cflags;
}
$obj='.obj';
$asm_suffix='.asm';
$ofile="";
# EXE linking stuff
$link='$(CC) -z';
$efile="-o ";
$exep='.out';
$ex_libs='';
# static library stuff
$mklib='ar6x';
$ranlib='';
$plib="";
$libp=".lib";
$shlibp=($shlib)?".dll":".lib";
$lfile='-o ';
$shlib_ex_obj="";
$asm='$(CC) $(CFLAG) -c';
$bn_asm_obj='';
$bn_asm_src='';
$des_enc_obj='';
$des_enc_src='';
$bf_enc_obj='';
$bf_enc_src='';
if (!$no_asm)
{
import_asm($mf_bn_asm, "bn", \$bn_asm_obj, \$bn_asm_src);
import_asm($mf_aes_asm, "aes", \$aes_asm_obj, \$aes_asm_src);
import_asm($mf_des_asm, "des", \$des_enc_obj, \$des_enc_src);
import_asm($mf_bf_asm, "bf", \$bf_enc_obj, \$bf_enc_src);
import_asm($mf_cast_asm, "cast", \$cast_enc_obj, \$cast_enc_src);
import_asm($mf_rc4_asm, "rc4", \$rc4_enc_obj, \$rc4_enc_src);
import_asm($mf_rc5_asm, "rc5", \$rc5_enc_obj, \$rc5_enc_src);
import_asm($mf_md5_asm, "md5", \$md5_asm_obj, \$md5_asm_src);
import_asm($mf_sha_asm, "sha", \$sha1_asm_obj, \$sha1_asm_src);
import_asm($mf_rmd_asm, "ripemd", \$rmd160_asm_obj, \$rmd160_asm_src);
import_asm($mf_wp_asm, "whrlpool", \$whirlpool_asm_obj, \$whirlpool_asm_src);
import_asm($mf_modes_asm, "modes", \$modes_asm_obj, \$modes_asm_src);
import_asm($mf_cpuid_asm, "", \$cpuid_asm_obj, \$cpuid_asm_src);
$perl_asm = 1;
}
sub do_lib_rule
{
my($objs,$target,$name,$shlib,$ign,$base_addr) = @_;
local($ret);
$taget =~ s/\//$o/g if $o ne '/';
my $base_arg;
if ($base_addr ne "")
{
$base_arg= " /base:$base_addr";
}
else
{
$base_arg = "";
}
if ($name ne "")
{
$name =~ tr/a-z/A-Z/;
$name = "/def:ms/${name}.def";
}
# $target="\$(LIB_D)$o$target";
# $ret.="$target: $objs\n";
if (!$shlib)
{
# $ret.="\t\$(RM) \$(O_$Name)\n";
$ret.="$target: $objs\n";
$ret.="\t\$(MKLIB) $lfile$target $objs\n";
}
else
{
local($ex)=($target =~ /O_CRYPTO/)?'':' $(L_CRYPTO)';
$ex.=" $zlib_lib" if $zlib_opt == 1 && $target =~ /O_CRYPTO/;
if ($fips && $target =~ /O_CRYPTO/)
{
$ret.="$target: $objs \$(PREMAIN_DSO_EXE)";
$ret.="\n\tFIPS_LINK=\"\$(LINK)\" \\\n";
$ret.="\tFIPS_CC=\$(CC)\\\n";
$ret.="\tFIPS_CC_ARGS=/Fo\$(OBJ_D)${o}fips_premain.obj \$(SHLIB_CFLAGS) -c\\\n";
$ret.="\tPREMAIN_DSO_EXE=\$(PREMAIN_DSO_EXE)\\\n";
$ret.="\tFIPS_SHA1_EXE=\$(FIPS_SHA1_EXE)\\\n";
$ret.="\tFIPS_TARGET=$target\\\n";
$ret.="\tFIPSLIB_D=\$(FIPSLIB_D)\\\n";
$ret.="\t\$(FIPSLINK) \$(MLFLAGS) /map $base_arg $efile$target ";
$ret.="$name \$(SHLIB_EX_OBJ) $objs \$(EX_LIBS) ";
$ret.="\$(OBJ_D)${o}fips_premain.obj $ex\n";
}
else
{
$ret.="$target: $objs";
$ret.="\n\t\$(LINK) \$(MLFLAGS) $efile$target $name \$(SHLIB_EX_OBJ) $objs $ex \$(EX_LIBS)\n";
}
$ret.="\tIF EXIST \$@.manifest mt -nologo -manifest \$@.manifest -outputresource:\$@;2\n\n";
}
$ret.="\n";
return($ret);
}
sub do_link_rule
{
my($target,$files,$dep_libs,$libs,$standalone)=@_;
local($ret,$_);
$file =~ s/\//$o/g if $o ne '/';
$n=&bname($targer);
$ret.="$target: $files $dep_libs\n";
if ($standalone == 1)
{
$ret.=" \$(LINK) \$(LFLAGS) $efile$target ";
$ret.= "\$(EX_LIBS) " if ($files =~ /O_FIPSCANISTER/ && !$fipscanisterbuild);
$ret.="$files $libs\n";
}
elsif ($standalone == 2)
{
$ret.="\t\$(LINK) \$(LFLAGS) $efile$target $files \$(O_FIPSCANISTER) $out_def/application.cmd\n";
$ret.="\t$out_def/incore6x $target\n\n";
}
else
{
$ret.="\t\$(LINK) \$(LFLAGS) $efile$target ";
$ret.="\t\$(APP_EX_OBJ) $files $libs\n";
}
return($ret);
}
sub do_rlink_rule
{
local($target,$rl_start, $rl_mid, $rl_end,$dep_libs,$libs)=@_;
local($ret,$_);
my $files = "$rl_start $rl_mid $rl_end";
$file =~ s/\//$o/g if $o ne '/';
$n=&bname($target);
$ret.="$target: $files $dep_libs\n";
$ret.="\t\$(LINK) -r $lfile$target $files $out_def/fipscanister.cmd\n";
$ret.="\t\$(PERL) $out_def${o}fips_standalone_sha1 $target > ${target}.sha1\n";
$ret.="\t\$(PERL) util${o}copy.pl -stripcr fips${o}fips_premain.c \$(LIB_D)${o}fips_premain.c\n";
$ret.="\t\$(CP) fips${o}fips_premain.c.sha1 \$(LIB_D)${o}fips_premain.c.sha1\n";
$ret.="\n";
return($ret);
}
sub import_asm
{
my ($mf_var, $asm_name, $oref, $sref) = @_;
my $asm_dir;
if ($asm_name eq "")
{
$asm_dir = "crypto$o";
}
else
{
$asm_dir = "crypto$o$asm_name$oasm$o";
}
$$oref = "";
$$sref = "";
$mf_var =~ s/\.o//g;
foreach (split(/ /, $mf_var))
{
$$sref .= $asm_dir . $_ . ".asm ";
}
foreach (split(/ /, $mf_var))
{
$$oref .= "\$(TMP_D)\\" . $_ . ".obj ";
}
$$oref =~ s/ $//;
$$sref =~ s/ $//;
}
1;
| GaloisInc/hacrypto | src/C/openssl/openssl-fips-ecp-2.0.9/util/pl/TI_CGTOOLS.pl | Perl | bsd-3-clause | 6,351 |
package ExtUtils::MM_NW5;
=head1 NAME
ExtUtils::MM_NW5 - methods to override UN*X behaviour in ExtUtils::MakeMaker
=head1 SYNOPSIS
use ExtUtils::MM_NW5; # Done internally by ExtUtils::MakeMaker if needed
=head1 DESCRIPTION
See ExtUtils::MM_Unix for a documentation of the methods provided
there. This package overrides the implementation of these methods, not
the semantics.
=over
=cut
use strict;
use ExtUtils::MakeMaker::Config;
use File::Basename;
our $VERSION = '6.63_02';
require ExtUtils::MM_Win32;
our @ISA = qw(ExtUtils::MM_Win32);
use ExtUtils::MakeMaker qw( &neatvalue );
$ENV{EMXSHELL} = 'sh'; # to run `commands`
my $BORLAND = $Config{'cc'} =~ /^bcc/i;
my $GCC = $Config{'cc'} =~ /^gcc/i;
=item os_flavor
We're Netware in addition to being Windows.
=cut
sub os_flavor {
my $self = shift;
return ($self->SUPER::os_flavor, 'Netware');
}
=item init_platform
Add Netware macros.
LIBPTH, BASE_IMPORT, NLM_VERSION, MPKTOOL, TOOLPATH, BOOT_SYMBOL,
NLM_SHORT_NAME, INCLUDE, PATH, MM_NW5_REVISION
=item platform_constants
Add Netware macros initialized above to the Makefile.
=cut
sub init_platform {
my($self) = shift;
# To get Win32's setup.
$self->SUPER::init_platform;
# incpath is copied to makefile var INCLUDE in constants sub, here just
# make it empty
my $libpth = $Config{'libpth'};
$libpth =~ s( )(;);
$self->{'LIBPTH'} = $libpth;
$self->{'BASE_IMPORT'} = $Config{'base_import'};
# Additional import file specified from Makefile.pl
if($self->{'base_import'}) {
$self->{'BASE_IMPORT'} .= ', ' . $self->{'base_import'};
}
$self->{'NLM_VERSION'} = $Config{'nlm_version'};
$self->{'MPKTOOL'} = $Config{'mpktool'};
$self->{'TOOLPATH'} = $Config{'toolpath'};
(my $boot = $self->{'NAME'}) =~ s/:/_/g;
$self->{'BOOT_SYMBOL'}=$boot;
# If the final binary name is greater than 8 chars,
# truncate it here.
if(length($self->{'BASEEXT'}) > 8) {
$self->{'NLM_SHORT_NAME'} = substr($self->{'BASEEXT'},0,8);
}
# Get the include path and replace the spaces with ;
# Copy this to makefile as INCLUDE = d:\...;d:\;
($self->{INCLUDE} = $Config{'incpath'}) =~ s/([ ]*)-I/;/g;
# Set the path to CodeWarrior binaries which might not have been set in
# any other place
$self->{PATH} = '$(PATH);$(TOOLPATH)';
$self->{MM_NW5_VERSION} = $VERSION;
}
sub platform_constants {
my($self) = shift;
my $make_frag = '';
# Setup Win32's constants.
$make_frag .= $self->SUPER::platform_constants;
foreach my $macro (qw(LIBPTH BASE_IMPORT NLM_VERSION MPKTOOL
TOOLPATH BOOT_SYMBOL NLM_SHORT_NAME INCLUDE PATH
MM_NW5_VERSION
))
{
next unless defined $self->{$macro};
$make_frag .= "$macro = $self->{$macro}\n";
}
return $make_frag;
}
=item const_cccmd
=cut
sub const_cccmd {
my($self,$libperl)=@_;
return $self->{CONST_CCCMD} if $self->{CONST_CCCMD};
return '' unless $self->needs_linking();
return $self->{CONST_CCCMD} = <<'MAKE_FRAG';
CCCMD = $(CC) $(CCFLAGS) $(INC) $(OPTIMIZE) \
$(PERLTYPE) $(MPOLLUTE) -o $@ \
-DVERSION=\"$(VERSION)\" -DXS_VERSION=\"$(XS_VERSION)\"
MAKE_FRAG
}
=item static_lib
=cut
sub static_lib {
my($self) = @_;
return '' unless $self->has_link_code;
my $m = <<'END';
$(INST_STATIC): $(OBJECT) $(MYEXTLIB) $(INST_ARCHAUTODIR)$(DFSEP).exists
$(RM_RF) $@
END
# If this extension has it's own library (eg SDBM_File)
# then copy that to $(INST_STATIC) and add $(OBJECT) into it.
$m .= <<'END' if $self->{MYEXTLIB};
$self->{CP} $(MYEXTLIB) $@
END
my $ar_arg;
if( $BORLAND ) {
$ar_arg = '$@ $(OBJECT:^"+")';
}
elsif( $GCC ) {
$ar_arg = '-ru $@ $(OBJECT)';
}
else {
$ar_arg = '-type library -o $@ $(OBJECT)';
}
$m .= sprintf <<'END', $ar_arg;
$(AR) %s
$(NOECHO) $(ECHO) "$(EXTRALIBS)" > $(INST_ARCHAUTODIR)\extralibs.ld
$(CHMOD) 755 $@
END
$m .= <<'END' if $self->{PERL_SRC};
$(NOECHO) $(ECHO) "$(EXTRALIBS)" >> $(PERL_SRC)\ext.libs
END
return $m;
}
=item dynamic_lib
Defines how to produce the *.so (or equivalent) files.
=cut
sub dynamic_lib {
my($self, %attribs) = @_;
return '' unless $self->needs_linking(); #might be because of a subdir
return '' unless $self->has_link_code;
my($otherldflags) = $attribs{OTHERLDFLAGS} || ($BORLAND ? 'c0d32.obj': '');
my($inst_dynamic_dep) = $attribs{INST_DYNAMIC_DEP} || "";
my($ldfrom) = '$(LDFROM)';
(my $boot = $self->{NAME}) =~ s/:/_/g;
my $m = <<'MAKE_FRAG';
# This section creates the dynamically loadable $(INST_DYNAMIC)
# from $(OBJECT) and possibly $(MYEXTLIB).
OTHERLDFLAGS = '.$otherldflags.'
INST_DYNAMIC_DEP = '.$inst_dynamic_dep.'
# Create xdc data for an MT safe NLM in case of mpk build
$(INST_DYNAMIC): $(OBJECT) $(MYEXTLIB) $(BOOTSTRAP) $(INST_ARCHAUTODIR)$(DFSEP).exists
$(NOECHO) $(ECHO) Export boot_$(BOOT_SYMBOL) > $(BASEEXT).def
$(NOECHO) $(ECHO) $(BASE_IMPORT) >> $(BASEEXT).def
$(NOECHO) $(ECHO) Import @$(PERL_INC)\perl.imp >> $(BASEEXT).def
MAKE_FRAG
if ( $self->{CCFLAGS} =~ m/ -DMPK_ON /) {
$m .= <<'MAKE_FRAG';
$(MPKTOOL) $(XDCFLAGS) $(BASEEXT).xdc
$(NOECHO) $(ECHO) xdcdata $(BASEEXT).xdc >> $(BASEEXT).def
MAKE_FRAG
}
# Reconstruct the X.Y.Z version.
my $version = join '.', map { sprintf "%d", $_ }
$] =~ /(\d)\.(\d{3})(\d{2})/;
$m .= sprintf ' $(LD) $(LDFLAGS) $(OBJECT:.obj=.obj) -desc "Perl %s Extension ($(BASEEXT)) XS_VERSION: $(XS_VERSION)" -nlmversion $(NLM_VERSION)', $version;
# Taking care of long names like FileHandle, ByteLoader, SDBM_File etc
if($self->{NLM_SHORT_NAME}) {
# In case of nlms with names exceeding 8 chars, build nlm in the
# current dir, rename and move to auto\lib.
$m .= q{ -o $(NLM_SHORT_NAME).$(DLEXT)}
} else {
$m .= q{ -o $(INST_AUTODIR)\\$(BASEEXT).$(DLEXT)}
}
# Add additional lib files if any (SDBM_File)
$m .= q{ $(MYEXTLIB) } if $self->{MYEXTLIB};
$m .= q{ $(PERL_INC)\Main.lib -commandfile $(BASEEXT).def}."\n";
if($self->{NLM_SHORT_NAME}) {
$m .= <<'MAKE_FRAG';
if exist $(INST_AUTODIR)\$(NLM_SHORT_NAME).$(DLEXT) del $(INST_AUTODIR)\$(NLM_SHORT_NAME).$(DLEXT)
move $(NLM_SHORT_NAME).$(DLEXT) $(INST_AUTODIR)
MAKE_FRAG
}
$m .= <<'MAKE_FRAG';
$(CHMOD) 755 $@
MAKE_FRAG
return $m;
}
1;
__END__
=back
=cut
| leighpauls/k2cro4 | third_party/perl/perl/lib/ExtUtils/MM_NW5.pm | Perl | bsd-3-clause | 6,565 |
=head1 This Week on perl5-porters (24-30 March 2003)
This week was a rather interesting week among the Perl 5 porters. Read
about pattern matching extensions, CPAN distribution issues, and various
bugs and problems.
=head2 New regexp flag proposed
Yitzchak Scott-Thoennes proposed a new flag, C</w>, to modify the return
value of the pattern match operator in list context. Basically it prepends
the value of $& to the list of returned values C<($1..$n)>.
Rafael Garcia-Suarez and Merijn Brand asked about the implied constructs
C<qr//w> and C</(?w)/> : these shouldn't be allowed since C</w> affects
the return value of the match, not the pattern itself.
Tim Conrow pointed out that C</w> conflicts with the Perl 6 switch of the
same name. Jos Boumans proposed C</r> instead. A new version of the patch
is in the works.
http://archive.develooper.com/perl5-porters@perl.org/msg93670.html
=head2 A warnings.pm for old perls
Nicholas Clark forwards to P5P an idea of Simon Wistow : to release a
dummy C<warnings> module to CPAN, so that using lexical warnings in a
module doesn't also mandate at least perl 5.6.0. Stas Bekman then proposes
to create a Perl-Backport distribution, aimed at installing various
back-compatibility stubs based on the current perl's version. As he says,
I<CPAN module authors just need to include Perl::Backport in their
PREREQ_PM and it'll do the rest of the job.>
http://archive.develooper.com/perl5-porters@perl.org/msg93632.html
=head2 A generic solution for dual-life CPAN packages
Stas Bekman made a proposal to help the CPAN indexer to distinguish
between the different distributions that provide a specific package, and
to decide which one should be installed by CPAN.pm / CPANPLUS. His
idea is to introduce a new variable, say $CPAN_MASTER_PACKAGE, to hold
the name of the master distribution containing a module. An alternative
option would be to include a file BUNDLED in the distribution, to list all
packages that are to be ignored by the indexer.
Andreas Koenig points out that the contents of the F<inc/> subdirectories
of CPAN distributions are currently ignored by the indexer. For example,
C<only-0.26> and C<PAR-0.66> come with Autrijus Tang's C<Module::Install>
distributed under F<inc/>. Autrijus notes that the C<inc> name is not yet
definitive and should not be relied upon. He also liked Stas' BUNDLED file
idea and suggested that this file could use the MANIFEST.SKIP syntax (i.e.
C<^inc/> to excludes files from F<inc/>).
http://archive.develooper.com/perl5-porters@perl.org/msg93635.html
=head2 In brief
John L. Allen has problems compiling Perl 5.8.0 on AIX with long doubles
with the latest version of IBM's compiler (vac). Apparently Configure
isn't getting right the modfl support. He's working on this with Merijn.
http://archive.develooper.com/perl5-porters@perl.org/msg93682.html
Doug Thayer reports a case of segfault, using GDBM in a threaded program
(bug #21699). Arthur Bergman says that the fix is probably to make
GDBM thread aware.
http://archive.develooper.com/perl5-porters@perl.org/msg93648.html
Jean Forget reminds us that he put on CPAN an alpha version of an enhanced
C<diagnostics> module, containing bug fixes and support for
internationalization. Wolfgang Laun announces that he'll have a look at
it.
http://archive.develooper.com/perl5-porters@perl.org/msg93652.html
Michael G Schwern announced a new alpha release of MakeMaker (version
6.06_03, then 6.06_04). Testers welcome.
http://archive.develooper.com/perl5-porters@perl.org/msg93812.html
He also reported bug #21742 : encountering a C<require Foo::Bar> statement
invoked in void context and from an C<eval(STRING)> statement, perl 5.8.0
isn't able to compute correctly the module's return value, because it
provides a wrong context for it. (The correct context to be provided would
be scalar context, and perl actually provides void context in this case.)
http://archive.develooper.com/perl5-porters@perl.org/msg93747.html
Juerd reports bug #21744 about B::Deparse. A code snippet is worth a dozen
words :
$ perl -MO=Deparse -e'print "${foo}::bar"'
print "$foo::bar";
Enache Adrian (which I haven't already mentioned, although he posted a
load of bug fixes this week, as usual) provides a fix.
http://archive.develooper.com/perl5-porters@perl.org/msg93766.html
=head2 About this summary
This summary was brought to you by Rafael Garcia-Suarez. Weekly summaries
are available on F<http://use.perl.org/> and/or via a mailing list, which
subscription address is F<perl5-summary-subscribe@perl.org>. Comments,
corrections, additions, and suggestions are welcome.
Special thanks to the metamark.net folks for providing this short url
service, and to Iain Truskett for WWW::Shorten.
| autarch/perlweb | docs/dev/perl5/list-summaries/2003/p5p-200303-3.pod | Perl | apache-2.0 | 4,786 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is built by mktables from e.g. UnicodeData.txt.
# Any changes made here will be lost!
#
# Binary property 'Diacritic'
#
return <<'END';
005E Diacritic
0060 Diacritic
00A8 Diacritic
00AF Diacritic
00B4 Diacritic
00B7 00B8 Diacritic
02B0 034E Diacritic
0350 0357 Diacritic
035D 0362 Diacritic
0374 0375 Diacritic
037A Diacritic
0384 0385 Diacritic
0483 0487 Diacritic
0559 Diacritic
0591 05A1 Diacritic
05A3 05BD Diacritic
05BF Diacritic
05C1 05C2 Diacritic
05C4 Diacritic
064B 0652 Diacritic
0657 0658 Diacritic
06DF 06E0 Diacritic
06E5 06E6 Diacritic
06EA 06EC Diacritic
0730 074A Diacritic
07A6 07B0 Diacritic
07EB 07F5 Diacritic
093C Diacritic
094D Diacritic
0951 0954 Diacritic
0971 Diacritic
09BC Diacritic
09CD Diacritic
0A3C Diacritic
0A4D Diacritic
0ABC Diacritic
0ACD Diacritic
0B3C Diacritic
0B4D Diacritic
0BCD Diacritic
0C4D Diacritic
0CBC Diacritic
0CCD Diacritic
0D4D Diacritic
0DCA Diacritic
0E47 0E4C Diacritic
0E4E Diacritic
0EC8 0ECC Diacritic
0F18 0F19 Diacritic
0F35 Diacritic
0F37 Diacritic
0F39 Diacritic
0F3E 0F3F Diacritic
0F82 0F84 Diacritic
0F86 0F87 Diacritic
0FC6 Diacritic
1037 Diacritic
1039 103A Diacritic
1087 108D Diacritic
108F Diacritic
17C9 17D3 Diacritic
17DD Diacritic
1939 193B Diacritic
1B34 Diacritic
1B44 Diacritic
1B6B 1B73 Diacritic
1BAA Diacritic
1C36 1C37 Diacritic
1C78 1C7D Diacritic
1D2C 1D6A Diacritic
1DC4 1DCF Diacritic
1DFE 1DFF Diacritic
1FBD Diacritic
1FBF 1FC1 Diacritic
1FCD 1FCF Diacritic
1FDD 1FDF Diacritic
1FED 1FEF Diacritic
1FFD 1FFE Diacritic
2E2F Diacritic
302A 302F Diacritic
3099 309C Diacritic
30FC Diacritic
A66F Diacritic
A67C A67D Diacritic
A67F Diacritic
A717 A721 Diacritic
A788 Diacritic
A8C4 Diacritic
A92B A92E Diacritic
A953 Diacritic
FB1E Diacritic
FE20 FE26 Diacritic
FF3E Diacritic
FF40 Diacritic
FF70 Diacritic
FF9E FF9F Diacritic
FFE3 Diacritic
1D167 1D169 Diacritic
1D16D 1D172 Diacritic
1D17B 1D182 Diacritic
1D185 1D18B Diacritic
1D1AA 1D1AD Diacritic
END
| leighpauls/k2cro4 | third_party/cygwin/lib/perl5/5.10/unicore/lib/gc_sc/Dia.pl | Perl | bsd-3-clause | 2,048 |
package DDG::Spice::YachtSpecs;
# ABSTRACT: Returns information of Yacht
use DDG::Spice;
spice is_cached => 1;
#General attributes for meta data
name "YachtSpecs";
source "http://yachtharbour.com";
icon_url "http://yachtharbour.com/favicon.ico";
description "Gives out yacht specifications and photo";
primary_example_queries "yacht azzam", "lady moura yacht";
category "facts";
topics "special_interest","travel","trivia";
code_url "https://github.com/duckduckgo/zeroclickinfo-spice/blob/master/lib/DDG/Spice/YachtSpecs.pm";
attribution web => ["http://YachtHarbour.com"],
twitter => "yachtharbour";
#Connection to API
spice to => 'http://yachtharbour.com/tools/api.php?name=$1';
spice wrap_jsonp_callback => 1;
#Triggers
triggers startend => "yacht", "superyacht", "megayacht", "luxury yacht", "motor yacht";
my $skip = join "|", share('skipwords.txt')->slurp(chomp => 1);
# Handle statement
handle remainder => sub {
return unless $_;
# Do not trigger IA if query matches any words in skipwords.txt file
return if m/$skip/i;
return $_;
};
1;
| ppant/zeroclickinfo-spice | lib/DDG/Spice/YachtSpecs.pm | Perl | apache-2.0 | 1,087 |
package NoteJam;
use Moose;
use namespace::autoclean;
use Catalyst::Runtime 5.90080;
use Catalyst qw/
ConfigLoader
Static::Simple
Authentication
Session
Session::Store::File
Session::State::Cookie
SmartURI
StatusMessage
/;
extends 'Catalyst';
our $VERSION = '0.01';
__PACKAGE__->config(
name => 'NoteJam',
default_view => 'HTML',
default_model => 'NoteJam::User',
psgi_middleware => ['XSRFBlock'],
'Plugin::SmartURI' => {
disposition => 'relative',
},
'Plugin::Authentication' => {
default => {
credential => {
class => 'Password',
password_field => 'password',
password_type => 'hashed',
password_hash_type => 'SHA-1',
},
store => {
class => 'DBIx::Class',
user_model => 'NoteJam::User',
},
},
},
disable_component_resolution_regex_fallback => 1,
);
__PACKAGE__->setup;
1;
| hstaugaard/notejam | catalyst/lib/NoteJam.pm | Perl | mit | 1,070 |
#!/usr/bin/perl
use strict;
use warnings;
use CXGN::Chado::Cvterm;
use CXGN::Scrap::AjaxPage;
use XML::Twig;
my $doc = CXGN::Scrap::AjaxPage->new();
$doc->send_http_header();
print $doc->header();
my ( $cv_accession, $action, $indent ) = $doc->get_encoded_arguments("cv_accession", "action", "indent");
my $dbh = CXGN::DB::Connection->new();
$cv_accession =~ tr/a-z/A-Z/;
# Browser Searching
if($action eq "specific") {
my $cv_term = CXGN::Chado::Cvterm->new_with_accession( $dbh, $cv_accession );
my $obsolete = "false";
if( $cv_term->get_cvterm_id() ne "" && $cv_term->term_is_obsolete() eq "true" ) {
$obsolete = "true";
print "obsolete";
}
if( $cv_term->get_cvterm_id() ne "" && $obsolete ne "true" ) {
# Populate root list
my @roots_list = ();
my @roots = CXGN::Chado::Cvterm::get_roots($dbh, $cv_term->get_db_name() );
foreach my $new_root ( @roots ) {
push( @roots_list, $new_root );
}
my $rootNumber = scalar( @roots_list );
# Paths will be stored as an array of arrays
my @paths = ();
# Explicitly initialize the first array, the rest will be dynamic
my @init = ();
push( @init, [$cv_term, undef] );
unshift( @paths, \@init );
# Monitor variables
my $complete = "false";
# Will become true if and only if every path traces back to a root
my $doneCounter = 0;
# Monitors how many paths are done -- when all are done, complete becomes true
# If searching for a root, the path is already done
FINDIFROOT: for( my $i = 0; $i < scalar( @roots_list ); $i++ ) {
if( $init[0]->[0]->get_accession() eq $roots_list[$i]->get_accession() ) {
unshift( @init, "done" );
$paths[0] = \@init;
$doneCounter++;
$complete = "true";
last FINDIFROOT;
}
}
# Find paths
while( $complete ne "true" ) {
# Identify latest term in each path
my $pathNumber = scalar( @paths );
for( my $i = 0; $i < $pathNumber; $i++ ) {
my $pathArrayRef = $paths[$i];
my @workingPath = @$pathArrayRef;
my $nextTerm = "done";
if( ref( $workingPath[0] ) eq "ARRAY" ) {
$nextTerm = $workingPath[0]->[0];
}
# Read only paths that are not done, this saves time
if( $nextTerm ne "done" ) {
my @parents = $nextTerm->get_parents();
my $parentNumber = scalar( @parents );
if( $parentNumber > 1 ) {
# Take out original path, then push copies of original path with new parents into paths list
my $index = $i;
my $originalPath = splice( @paths, $index, 1 );
ROOTCHECKER: for( my $j = 0; $j < $parentNumber; $j++ ) {
my @nextPath = @$originalPath;
unshift( @nextPath, $parents[$j] );
for( my $k = 0; $k < scalar( @roots_list ); $k++ ) {
if( $nextPath[0]->[0]->get_accession() eq $roots_list[$k]->get_accession() ) {
$nextPath[0] = [ $roots_list[$k], undef ];
unshift( @nextPath, "done" );
push( @paths, \@nextPath );
$doneCounter++;
last ROOTCHECKER;
}
}
push( @paths, \@nextPath );
}
}
else {
# Simple: put the parent in the array and see if it's a root or not
unshift( @workingPath, $parents[0] );
ROOTCHECK: for( my $j = 0; $j < scalar( @roots_list ); $j++ ) {
if( $workingPath[0]->[0]->get_accession() eq $roots_list[$j]->get_accession() ) {
$workingPath[0] = [ $roots_list[$j], undef ];
unshift( @workingPath, "done" );
$doneCounter++;
last ROOTCHECK;
}
}
$paths[$i] = \@workingPath;
}
}
}
my $test = scalar( @paths );
if( $doneCounter == $test ) {
$complete = "true";
}
}
# Generate XML tree
my $xmlRoot = XML::Twig::Elt->new('specific');
my $treeRootTag = "term";
my %termIndentHash = ();
for( my $i = 0; $i < scalar( @paths ); $i++ ) {
my $pathRef = $paths[$i];
my @path = @$pathRef;
for( my $j = 1; $j < scalar( @path ); $j++ ) {
my $treeRootContent = $paths[$i]->[$j]->[0]->get_db_name().":".$paths[$i]->[$j]->[0]->get_accession();
my $fullName = $treeRootContent;
$treeRootContent .= ' -- '.$paths[$i]->[$j]->[0]->get_cvterm_name();
my $elementID = $j."--".$fullName;
my $next = XML::Twig::Elt->new( $treeRootTag, $treeRootContent );
$next->set_att( id => $fullName );
$next->set_att( divID => $elementID );
$next->set_att( indent => $j );
my $childNumber = $paths[$i]->[$j]->[0]->count_children();
$next->set_att( children => $childNumber );
if( scalar( $xmlRoot->descendants() ) > 0 ) {
my $element = $xmlRoot;
while( $element = $element->next_elt( '#ELT' ) ) {
if( $j > 1 ) {
my $previousRootContent = $paths[$i]->[$j-1]->[0]->get_db_name().":";
$previousRootContent .= $paths[$i]->[$j-1]->[0]->get_accession();
my $text = $element->text;
my $startIndex = index( $text, ":" ) + 1;
$text = substr( $text, $startIndex - 3, $startIndex + 7 );
my $idText = substr( $element->trimmed_text, 0, 10 );
my $idIndent = $element->att( 'indent' );
if( $text eq $previousRootContent ) {
my $newElement = "true";
if( exists $termIndentHash{$idText} ) {
if( !grep( $idIndent, @{$termIndentHash{$idText}} ) ) {
push @{ $termIndentHash{$idText}}, $idIndent;
}
}
if( $newElement ne "false" ) {
if( $next->att( 'indent' ) - $element->att( 'indent' ) == 1 ) {
eval{$next->paste( 'last_child', $element )};
$termIndentHash{$idText} = [$idIndent];
}
}
}
}
}
} else {
$next->paste( $xmlRoot );
$termIndentHash{$next->trimmed_text} = ["1"];
}
}
}
# Format and print XML tree
my $text = $xmlRoot->sprint;
$text =~ s|>|>\n|g; # Put newlines after tag boundaries
$text =~ s|<|\n<|g; # Put newlines before tag boundaries
$text =~ s|>\n([A-Z])|>$1|g; # Remove newlines when they come before an accession
my $newLineIndex = 0; # Remove blank lines by removing extra newlines; go through string multiple
while( $newLineIndex != -1 ) { # times if necessary
$text =~ s|\n\n|\n|g;
$newLineIndex = index( $text, "\n\n" );
}
$text =~ s|(<term[A-Za-z0-9 _\,\<\>\+\=\/\'\"\:\t-]*)\n(</term>)|$1$2|g;
# Condense the final term of each path, and its end tag, onto one line for easy identification
print $text;
}
}
# Browser Scanning
else {
# Assemble term list
my @term_list = ();
my $cv_term = undef;
if ($action eq "children") {
# Get all children of a term
$cv_term = CXGN::Chado::Cvterm->new_with_accession($dbh, $cv_accession);
@term_list = $cv_term->get_children();
}
else {
# This gets roots for a specific database
my @new_roots = CXGN::Chado::Cvterm::get_roots($dbh, $action);
foreach my $new_root (@new_roots) {
push @term_list, [ $new_root, undef ];
}
}
$indent++;
# Print out XML
foreach my $t (@term_list) {
my $id = $t->[0]->get_db_name().":".$t->[0]->get_accession();
my $divID = $indent."--".$id;
my $childNumber = $t->[0]->count_children();
my $term = "<term children='$childNumber' divID='$divID' id='$id' indent='$indent'> ";
$term .= $t->[0]->get_db_name().":".$t->[0]->get_accession(). " -- ".$t->[0]->get_cvterm_name();
$term .= "</term>";
print "$term\n";
}
}
print $doc->footer();
| solgenomics/sgn | cgi-bin/chado/ontology_browser_ajax.pl | Perl | mit | 7,338 |
# -*- perl -*-
# !!! DO NOT EDIT !!!
# This file was automatically generated.
package Net::Amazon::Validate::ItemSearch::us::Title;
use 5.006;
use strict;
use warnings;
sub new {
my ($class , %options) = @_;
my $self = {
'_default' => 'Books',
%options,
};
push @{$self->{_options}}, 'Apparel';
push @{$self->{_options}}, 'Automotive';
push @{$self->{_options}}, 'Baby';
push @{$self->{_options}}, 'Beauty';
push @{$self->{_options}}, 'Books';
push @{$self->{_options}}, 'Classical';
push @{$self->{_options}}, 'DVD';
push @{$self->{_options}}, 'DigitalMusic';
push @{$self->{_options}}, 'Electronics';
push @{$self->{_options}}, 'GourmetFood';
push @{$self->{_options}}, 'HealthPersonalCare';
push @{$self->{_options}}, 'HomeGarden';
push @{$self->{_options}}, 'Industrial';
push @{$self->{_options}}, 'Jewelry';
push @{$self->{_options}}, 'Kitchen';
push @{$self->{_options}}, 'MP3Downloads';
push @{$self->{_options}}, 'Magazines';
push @{$self->{_options}}, 'Merchants';
push @{$self->{_options}}, 'Miscellaneous';
push @{$self->{_options}}, 'Music';
push @{$self->{_options}}, 'MusicalInstruments';
push @{$self->{_options}}, 'OfficeProducts';
push @{$self->{_options}}, 'OutdoorLiving';
push @{$self->{_options}}, 'PCHardware';
push @{$self->{_options}}, 'PetSupplies';
push @{$self->{_options}}, 'Photo';
push @{$self->{_options}}, 'SilverMerchants';
push @{$self->{_options}}, 'Software';
push @{$self->{_options}}, 'SportingGoods';
push @{$self->{_options}}, 'Tools';
push @{$self->{_options}}, 'Toys';
push @{$self->{_options}}, 'UnboxVideo';
push @{$self->{_options}}, 'VHS';
push @{$self->{_options}}, 'Video';
push @{$self->{_options}}, 'VideoGames';
push @{$self->{_options}}, 'Wireless';
push @{$self->{_options}}, 'WirelessAccessories';
bless $self, $class;
}
sub user_or_default {
my ($self, $user) = @_;
if (defined $user && length($user) > 0) {
return $self->find_match($user);
}
return $self->default();
}
sub default {
my ($self) = @_;
return $self->{_default};
}
sub find_match {
my ($self, $value) = @_;
for (@{$self->{_options}}) {
return $_ if lc($_) eq lc($value);
}
die "$value is not a valid value for us::Title!\n";
}
1;
__END__
=head1 NAME
Net::Amazon::Validate::ItemSearch::us::Title;
=head1 DESCRIPTION
The default value is Books, unless mode is specified.
The list of available values are:
Apparel
Automotive
Baby
Beauty
Books
Classical
DVD
DigitalMusic
Electronics
GourmetFood
HealthPersonalCare
HomeGarden
Industrial
Jewelry
Kitchen
MP3Downloads
Magazines
Merchants
Miscellaneous
Music
MusicalInstruments
OfficeProducts
OutdoorLiving
PCHardware
PetSupplies
Photo
SilverMerchants
Software
SportingGoods
Tools
Toys
UnboxVideo
VHS
Video
VideoGames
Wireless
WirelessAccessories
=cut
| carlgao/lenga | images/lenny64-peon/usr/share/perl5/Net/Amazon/Validate/ItemSearch/us/Title.pm | Perl | mit | 3,119 |
package Class::DBI::SQL::Transformer;
use strict;
use warnings;
=head1 NAME
Class::DBI::SQL::Transformer - Transform SQL
=head1 SYNOPSIS
my $trans = $tclass->new($self, $sql, @args);
return $self->SUPER::transform_sql($trans->sql => $trans->args);
=head1 DESCRIPTION
Class::DBI hooks into the transform_sql() method in Ima::DBI to provide
its own SQL extensions. Class::DBI::SQL::Transformer does the heavy
lifting of these transformations.
=head1 CONSTRUCTOR
=head2 new
my $trans = $tclass->new($self, $sql, @args);
Create a new transformer for the SQL and arguments that will be used
with the given object (or class).
=cut
sub new {
my ($me, $caller, $sql, @args) = @_;
bless {
_caller => $caller,
_sql => $sql,
_args => [@args],
_transformed => 0,
} => $me;
}
=head2 sql / args
my $sql = $trans->sql;
my @args = $trans->args;
The transformed SQL and args.
=cut
# TODO Document what the different transformations are
# and factor out how they're called so that people can pick and mix the
# ones they want and add new ones.
sub sql {
my $self = shift;
$self->_do_transformation if !$self->{_transformed};
return $self->{_transformed_sql};
}
sub args {
my $self = shift;
$self->_do_transformation if !$self->{_transformed};
return @{ $self->{_transformed_args} };
}
sub _expand_table {
my $self = shift;
my ($class, $alias) = split /=/, shift, 2;
my $caller = $self->{_caller};
my $table = $class ? $class->table : $caller->table;
$self->{cmap}{ $alias || $table } = $class || ref $caller || $caller;
($alias ||= "") &&= " $alias";
return $table . $alias;
}
sub _expand_join {
my $self = shift;
my $joins = shift;
my @table = split /\s+/, $joins;
my $caller = $self->{_caller};
my %tojoin = map { $table[$_] => $table[ $_ + 1 ] } 0 .. $#table - 1;
my @sql;
while (my ($t1, $t2) = each %tojoin) {
my ($c1, $c2) = map $self->{cmap}{$_}
|| $caller->_croak("Don't understand table '$_' in JOIN"), ($t1, $t2);
my $join_col = sub {
my ($c1, $c2) = @_;
my $meta = $c1->meta_info('has_a');
my ($col) = grep $meta->{$_}->foreign_class eq $c2, keys %$meta;
$col;
};
my $col = $join_col->($c1 => $c2) || do {
($c1, $c2) = ($c2, $c1);
($t1, $t2) = ($t2, $t1);
$join_col->($c1 => $c2);
};
$caller->_croak("Don't know how to join $c1 to $c2") unless $col;
push @sql, sprintf " %s.%s = %s.%s ", $t1, $col, $t2, $c2->primary_column;
}
return join " AND ", @sql;
}
sub _do_transformation {
my $me = shift;
my $sql = $me->{_sql};
my @args = @{ $me->{_args} };
my $caller = $me->{_caller};
$sql =~ s/__TABLE\(?(.*?)\)?__/$me->_expand_table($1)/eg;
$sql =~ s/__JOIN\((.*?)\)__/$me->_expand_join($1)/eg;
$sql =~ s/__ESSENTIAL__/join ", ", $caller->_essential/eg;
$sql =~
s/__ESSENTIAL\((.*?)\)__/join ", ", map "$1.$_", $caller->_essential/eg;
if ($sql =~ /__IDENTIFIER__/) {
my $key_sql = join " AND ", map "$_=?", $caller->primary_columns;
$sql =~ s/__IDENTIFIER__/$key_sql/g;
}
$me->{_transformed_sql} = $sql;
$me->{_transformed_args} = [@args];
$me->{_transformed} = 1;
return 1;
}
1;
| carlgao/lenga | images/lenny64-peon/usr/share/perl5/Class/DBI/SQL/Transformer.pm | Perl | mit | 3,137 |
# Time-stamp: "Sat Jul 14 00:27:39 2001 by Automatic Bizooty (__blocks2pm.plx)"
$Text::\SEPA\Unicode\Unidecode::Char[0xba] = [
'mya', 'myag', 'myagg', 'myags', 'myan', 'myanj', 'myanh', 'myad', 'myal', 'myalg', 'myalm', 'myalb', 'myals', 'myalt', 'myalp', 'myalh',
'myam', 'myab', 'myabs', 'myas', 'myass', 'myang', 'myaj', 'myac', 'myak', 'myat', 'myap', 'myah', 'myae', 'myaeg', 'myaegg', 'myaegs',
'myaen', 'myaenj', 'myaenh', 'myaed', 'myael', 'myaelg', 'myaelm', 'myaelb', 'myaels', 'myaelt', 'myaelp', 'myaelh', 'myaem', 'myaeb', 'myaebs', 'myaes',
'myaess', 'myaeng', 'myaej', 'myaec', 'myaek', 'myaet', 'myaep', 'myaeh', 'meo', 'meog', 'meogg', 'meogs', 'meon', 'meonj', 'meonh', 'meod',
'meol', 'meolg', 'meolm', 'meolb', 'meols', 'meolt', 'meolp', 'meolh', 'meom', 'meob', 'meobs', 'meos', 'meoss', 'meong', 'meoj', 'meoc',
'meok', 'meot', 'meop', 'meoh', 'me', 'meg', 'megg', 'megs', 'men', 'menj', 'menh', 'med', 'mel', 'melg', 'melm', 'melb',
'mels', 'melt', 'melp', 'melh', 'mem', 'meb', 'mebs', 'mes', 'mess', 'meng', 'mej', 'mec', 'mek', 'met', 'mep', 'meh',
'myeo', 'myeog', 'myeogg', 'myeogs', 'myeon', 'myeonj', 'myeonh', 'myeod', 'myeol', 'myeolg', 'myeolm', 'myeolb', 'myeols', 'myeolt', 'myeolp', 'myeolh',
'myeom', 'myeob', 'myeobs', 'myeos', 'myeoss', 'myeong', 'myeoj', 'myeoc', 'myeok', 'myeot', 'myeop', 'myeoh', 'mye', 'myeg', 'myegg', 'myegs',
'myen', 'myenj', 'myenh', 'myed', 'myel', 'myelg', 'myelm', 'myelb', 'myels', 'myelt', 'myelp', 'myelh', 'myem', 'myeb', 'myebs', 'myes',
'myess', 'myeng', 'myej', 'myec', 'myek', 'myet', 'myep', 'myeh', 'mo', 'mog', 'mogg', 'mogs', 'mon', 'monj', 'monh', 'mod',
'mol', 'molg', 'molm', 'molb', 'mols', 'molt', 'molp', 'molh', 'mom', 'mob', 'mobs', 'mos', 'moss', 'mong', 'moj', 'moc',
'mok', 'mot', 'mop', 'moh', 'mwa', 'mwag', 'mwagg', 'mwags', 'mwan', 'mwanj', 'mwanh', 'mwad', 'mwal', 'mwalg', 'mwalm', 'mwalb',
'mwals', 'mwalt', 'mwalp', 'mwalh', 'mwam', 'mwab', 'mwabs', 'mwas', 'mwass', 'mwang', 'mwaj', 'mwac', 'mwak', 'mwat', 'mwap', 'mwah',
'mwae', 'mwaeg', 'mwaegg', 'mwaegs', 'mwaen', 'mwaenj', 'mwaenh', 'mwaed', 'mwael', 'mwaelg', 'mwaelm', 'mwaelb', 'mwaels', 'mwaelt', 'mwaelp', 'mwaelh',
'mwaem', 'mwaeb', 'mwaebs', 'mwaes', 'mwaess', 'mwaeng', 'mwaej', 'mwaec', 'mwaek', 'mwaet', 'mwaep', 'mwaeh', 'moe', 'moeg', 'moegg', 'moegs',
];
1;
| dmitrirussu/php-sepa-xml-generator | src/Unicode/data/perl_source/xba.pm | Perl | mit | 2,327 |
#!/usr/bin/env perl
use strict;
use warnings;
use Data::Printer;
use WebService::Zulip;
my $zulip = WebService::Zulip->new(load_zulip_info());
my $result = $zulip->send_message(
content => 'hi stan from the zulip api',
to => 'stan@schwertly.com',
type => 'private'
);
p $result;
sub load_zulip_info {
my $filename = shift || '.zuliprc';
return unless -e $filename;
open my $fh, '<', $filename or die "$!";
{
local $/ = undef;
return decode_json(<$fh>);
}
} | Stantheman/WebService-Zulip | examples/send_message.pl | Perl | mit | 481 |
#
# Some more random examples of
# subroutines, using different features.
#
use strict;
#
# Some folks like to use shift to get the parmeters.
sub sent
{
my $subj = shift @_;
my $verb = shift @_;
my $adj = shift @_;
my $obj = shift @_;
print uc(substr($subj,0,1)), substr($subj,1), " $verb the $adj $obj.\n";
}
#
# The @_ array is special, because changing the parameters in it changes
# the arguments.
my $snakebreath; # We can refer to this in changeme.
sub changeme
{
my $first = shift @_;
$first = 'this'; # Does not change in caller.
$_[0] = 'that'; # Does change in caller.
$snakebreath = 77; # Plain global ref.
}
sent('alex', 'stole', 'red', 'wagon');
sent('susan', 'ignored', 'awful', 'pun');
$snakebreath = 99;
my ($x, $y) = ('today', 'tomorrow');
print "\n$snakebreath $x $y\n";
changeme($x, $y);
print "$snakebreath $x $y\n";
| Mitali-Sodhi/CodeLingo | Dataset/perl/subs2.pl | Perl | mit | 887 |
package EnsEMBL::Web::UserConfig::chromosome;
use strict;
use EnsEMBL::Web::UserConfig;
use vars qw(@ISA);
@ISA = qw(EnsEMBL::Web::UserConfig);
sub init {
my ($self) = @_;
$self->{'_userdatatype_ID'} = 6;
$self->{'no_image_frame'} = 1;
$self->{'general'}->{'chromosome'} = {
'_artefacts' => [qw(ideogram assemblyexception)],
'_options' => [],
'_settings' => {
'simplehap' => 1,
'width' => 300,
'show_thjview' => 'yes',
'show_contigview' => 'yes',
'show_cytoview' => 'yes',
'bgcolor' => 'background1',
'bgcolour1' => 'background1',
'bgcolour2' => 'background1',
},
'ideogram' => {
'on' => "on",
'pos' => '6',
},
'assemblyexception' => {
'on' => "on",
'pos' => '9998',
'str' => 'x',
'height' => 1,
'dep' => 6,
'lab' => 'black',
'navigation' => 'on',
},
# 'corebinmarkers' => {
# 'on' => "on",
# 'pos' => '2000',
# 'dep' => '200',
# 'str' => 'r',
# 'col' => 'green',
# 'labels' => 'on',
# 'available' => 'features core_bin_marker', ## track will work with or without
# },
};
## $self->add_track( 'redbox', 'on'=>'off', 'col' => 'red', 'zindex' => -20, 'pos' => 1000100 );
}
1;
| warelab/gramene-ensembl | maize/modules/EnsEMBL/Web/UserConfig/chromosome.pm | Perl | mit | 1,338 |
package File::Dir::Dumper::Stream::JSON::Reader;
use warnings;
use strict;
use 5.012;
use parent 'File::Dir::Dumper::Base';
use Carp;
use JSON::MaybeXS qw(decode_json);
__PACKAGE__->mk_accessors(qw(_in));
=head1 NAME
File::Dir::Dumper::Stream::JSON::Reader - reader for stream of JSON objects.
=head1 VERSION
Version 0.0.10
=cut
our $VERSION = '0.0.10';
=head1 SYNOPSIS
use File::Dir::Dumper::Stream::JSON::Reader;
my $reader = File::Dir::Dumper::Stream::JSON::Reader->new(
{
input => \*FILEHANDLE,
}
);
while (defined(my $token = $reader->fetch())
{
# Do something with $token.
}
=head1 METHODS
=head2 $self->new({ input => $in_filehandle})
Initializes a new object that reads from the filehandle $in_filehandle.
=head2 $self->fetch()
Fetches the next object. Returns undef upon end of file.
=cut
sub _init
{
my $self = shift;
my $args = shift;
$self->_in($args->{input});
$self->_init_stream();
return;
}
sub _readline
{
my $self = shift;
return readline($self->_in());
}
sub _eof
{
my $self = shift;
return eof($self->_in());
}
sub _init_stream
{
my $self = shift;
if ($self->_readline() ne "# JSON Stream by Shlomif - Version 0.2.0\n")
{
Carp::confess "No header for JSON stream";
}
return;
}
sub fetch
{
my $self = shift;
my $buffer = "";
my $line;
if ($self->_eof())
{
return;
}
LINES:
while (!$self->_eof())
{
$line = $self->_readline();
if ($line eq "--/f\n")
{
return decode_json($buffer);
}
else
{
$buffer .= $line;
}
}
Carp::confess "Error! Reached end of file without record terminator.";
}
=head1 AUTHOR
Shlomi Fish, C<< <shlomif@cpan.org> >>
=head1 BUGS
Please report any bugs or feature requests to C<bug-file-dir-dumper at rt.cpan.org>, or through
the web interface at L<http://rt.cpan.org/NoAuth/ReportBug.html?Queue=File-Dir-Dumper>. I will be notified, and then you'll
automatically be notified of progress on your bug as I make changes.
=head1 SUPPORT
You can find documentation for this module with the perldoc command.
perldoc File::Dir::Dumper
You can also look for information at:
=over 4
=item * RT: CPAN's request tracker
L<http://rt.cpan.org/NoAuth/Bugs.html?Dist=File-Dir-Dumper>
=item * AnnoCPAN: Annotated CPAN documentation
L<http://annocpan.org/dist/File-Dir-Dumper>
=item * CPAN Ratings
L<http://cpanratings.perl.org/d/File-Dir-Dumper>
=item * Search CPAN
L<http://search.cpan.org/dist/File-Dir-Dumper>
=back
=head1 ACKNOWLEDGEMENTS
=head1 COPYRIGHT & LICENSE
Copyright 2008 Shlomi Fish, all rights reserved.
This program is released under the following license: MIT/X11 Licence.
=cut
1; # End of File::Dir::Dumper
| gitpan/File-Dir-Dumper | lib/File/Dir/Dumper/Stream/JSON/Reader.pm | Perl | mit | 2,876 |
#!/usr/bin/perl
$| = 1;
use XML::Simple;
use Data::Dumper;
use LWP::UserAgent;
# Content API key provided by The Associated Press
my $apiKey = "Your AP Content API key goes here";
# Search terms passed in on the command line as a parameter to the script
my $searchTerms = $ARGV[0];
$searchTerms =~ s/ /\%20/g;
# Number of results, defaults to 10
my $numResults = 10;
if($ARGV[1]){
$numResults = $ARGV[1];
}
# Content API URL
my $apiBaseUrl = "http://api.ap.org/v2/search";
my $apiUrlArgs = "?count=" . $numResults . "&apikey=" . $apiKey . "&q=" . $searchTerms;
my $apiUrl = $apiBaseUrl . $apiUrlArgs;
#### Make the Content API Request ####
my $userAgent = new LWP::UserAgent;
$userAgent->agent("APContentAPIFetcher/1.0");
print "Making AP Content API request...\n";
print " APIUrl: " . $apiBaseUrl . "\n";
print " APIKey: " . $apiKey . "\n";
print " Query : " . $searchTerms . "\n";
print " Count : " . $numResults . "\n";
my $startTime = time;
my $request = new HTTP::Request("GET", $apiUrl);
my $response = $userAgent->request($request);
my $requestTime = time - $startTime;
print "Got AP Content API response...\n";
print " HTTP " . $response->code . " - " . length($response->content) . " bytes in " . $requestTime . " seconds.\n";
#### Parse the Content API Response into XML ####
my $xmlResponse = new XML::Simple(KeyAttr=>[]);
my $xmlData = $xmlResponse -> XMLin($response->content);
#### Loop through each feed item ####
print " Feed Items...\n";
foreach my $entry(@{$xmlData->{entry}}){
my $entryId = $entry->{id};
$entryId =~ s/.*\///g;
$entryId =~ s/\?.*//g;
print " id: " . $entryId . "\n";
#### Loop through each item's link and save all 3 photo sizes ####
foreach my $link(@{$entry->{link}}){
my $rel = $link->{rel};
print " link: " . $rel . "\n";
if($rel =~ /thumbnail/i || $rel =~ /preview/i || $rel =~ /main/i){
my $href = $link->{href};
$href =~ s/\&\;/\&/g;
$href .= "&apikey=" . $apiKey;
my $fileExt = $href;
$fileExt =~ s/.*\.//;
$fileExt =~ s/\&.*//;
my $fileName = $entryId . "." . $rel . "." . $fileExt;
my $fileStartTime = time;
print " Making AP Content API request for $fileName...\n";
my $fileRequest = new HTTP::Request("GET", $href);
my $fileResponse = $userAgent->request($fileRequest);
my $fileRequestTime = time - $fileStartTime;
print " Got AP Content API response...\n";
print " HTTP " . $fileResponse->code . " - " . length($fileResponse->content) . " bytes in " . $fileRequestTime . " seconds.\n";
open(FILE, ">$fileName");
binmode(FILE);
print FILE $fileResponse->content;
close(FILE);
print " Wrote " . $fileName . "\n";
}
}
}
| TheAssociatedPress/APISamples | APContentAPI/perl/APContentAPI.pl | Perl | mit | 2,806 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
#
# Ensembl module for Bio::EnsEMBL::Variation::DBSQL::SourceAdaptor
#
#
=head1 NAME
Bio::EnsEMBL::Variation::DBSQL::SourceAdaptor
=head1 SYNOPSIS
$reg = 'Bio::EnsEMBL::Registry';
$reg->load_registry_from_db(-host => 'ensembldb.ensembl.org',-user => 'anonymous');
$sta = $reg->get_adaptor("human","variation","source");
# fetch a source by its name
$source = $sta->fetch_by_name('dbSNP');
=head1 DESCRIPTION
This adaptor provides database connectivity for Source objects.
=head1 METHODS
=cut
use strict;
use warnings;
package Bio::EnsEMBL::Variation::DBSQL::SourceAdaptor;
use Bio::EnsEMBL::DBSQL::BaseAdaptor;
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
use Bio::EnsEMBL::Variation::Source;
use DBI qw(:sql_types);
use base qw{Bio::EnsEMBL::DBSQL::BaseAdaptor};
=head2 fetch_by_name
Arg [1] : string $name
Example : $source = $source_adaptor->fetch_by_name('dbSNP');
Description: Retrieves a source object via its name
Returntype : Bio::EnsEMBL::Variation::Source
Exceptions : throw if name argument is not defined
Caller : general
Status : Stable
=cut
sub fetch_by_name {
my $self = shift;
my $name = shift;
throw('name argument expected') if(!defined($name));
my $result = $self->generic_fetch("s.name='$name'");
return ($result ? $result->[0] : undef);
}
=head2 fetch_by_dbID
Arg [1] : int $dbID
Example : $source = $source_adaptor->fetch_by_dbID(1);
Description: Retrieves a Source object via its internal identifier.
If no such source exists undef is returned.
Returntype : Bio::EnsEMBL::Variation::Source
Exceptions : throw if dbID arg is not defined
Caller : general
Status : Stable
=cut
sub fetch_by_dbID {
my $self = shift;
my $dbID = shift;
throw('dbID argument expected') if(!defined($dbID));
my $result = $self->generic_fetch("s.source_id=$dbID");
return ($result ? $result->[0] : undef);
}
=head2 fetch_all_by_dbID_list
Arg [1] : listref $list
Example : $source = $source_adaptor->fetch_all_by_dbID_list([1,2]);
Description: Retrieves a listref of source objects via a list of internal
dbID identifiers
Returntype : listref of Bio::EnsEMBL::Variation::Source objects
Exceptions : throw if list argument is not defined
Caller : general
Status : Stable
=cut
sub fetch_all_by_dbID_list {
my $self = shift;
my $list = shift;
if(!defined($list) || ref($list) ne 'ARRAY') {
throw("list reference argument is required");
}
return undef if (scalar(@$list)==0);
my $id_str = (@$list > 1) ? " IN (".join(',',@$list).")" : ' = \''.$list->[0].'\'';
my $result = $self->generic_fetch("s.source_id $id_str");
return ($result ? $result : undef);
}
sub _columns {
return qw(s.source_id s.name s.version s.description s.url s.type s.somatic_status s.data_types);
}
sub _tables { return (['source', 's']); }
#
# private method, creates source objects from an executed statement handle
# ordering of columns must be consistant
#
sub _objs_from_sth {
my $self = shift;
my $sth = shift;
my @source;
my ($source_id,$source_name,$source_version,$source_description,$source_url,$source_type,$source_somatic_status,$source_data_types);
$sth->bind_columns(\$source_id, \$source_name, \$source_version, \$source_description, \$source_url, \$source_type, \$source_somatic_status, \$source_data_types);
while($sth->fetch()) {
my @data_types = (defined($source_data_types)) ? split(/,/,$source_data_types) : undef;
push @source, Bio::EnsEMBL::Variation::Source->new
(-dbID => $source_id,
-ADAPTOR => $self,
-NAME => $source_name,
-VERSION => $source_version,
-DESCRIPTION => $source_description,
-URL => $source_url,
-TYPE => $source_type,
-SOMATIC_STATUS => $source_somatic_status,
-DATA_TYPES => \@data_types);
}
return \@source;
}
=head2 get_source_version
Arg[1] : string $name
Example : $version = $sa->get_source_version('dbSNP');
Description : Retrieves from the database the version for the source given as an argument
ReturnType : int
Exceptions : none
Caller : general
Status : Stable
=cut
sub get_source_version{
my $self = shift;
my $name = shift;
my $version;
my $sth = $self->prepare(qq{SELECT version from source where name = ? });
$sth->bind_param(1,$name,SQL_VARCHAR);
$sth->execute();
$sth->bind_columns(\$version);
$sth->fetch();
$sth->finish();
return $version;
}
sub store {
my ($self, $source) = @_;
my $dbh = $self->dbc->db_handle;
my $sth = $dbh->prepare(q{
INSERT INTO source (
name,
version,
description,
url,
type,
somatic_status,
data_types
) VALUES (?,?,?,?,?,?,?)
});
$sth->execute(
$source->name,
$source->version || undef,
$source->description || undef,
$source->url || undef,
$source->type || undef,
$source->somatic_status || 'germline',
(join ",", @{$source->get_all_data_types}) || undef
);
$sth->finish;
# get dbID
my $dbID = $dbh->last_insert_id(undef, undef, 'source', 'source_id');
$source->{dbID} = $dbID;
$source->{adaptor} = $self;
}
=head2 update_version
Arg[1] : source object
Example : $sa->update_version( $source_object);
Description : Update the version for the source
ReturnType : none
Exceptions : none
Caller : internal pipelines
Status : Experimental
=cut
sub update_version{
my ($self, $source) = @_;
## don't over-write to null
return unless $source->version();
my $dbh = $self->dbc->db_handle;
my $sth = $dbh->prepare(q[ update source set version = ? where source_id =?]);
$sth->execute( $source->version(), $source->dbID() );
}
1;
| Ensembl/ensembl-variation | modules/Bio/EnsEMBL/Variation/DBSQL/SourceAdaptor.pm | Perl | apache-2.0 | 6,920 |
package Paws::ElasticTranscoder::Thumbnails;
use Moose;
has AspectRatio => (is => 'ro', isa => 'Str');
has Format => (is => 'ro', isa => 'Str');
has Interval => (is => 'ro', isa => 'Str');
has MaxHeight => (is => 'ro', isa => 'Str');
has MaxWidth => (is => 'ro', isa => 'Str');
has PaddingPolicy => (is => 'ro', isa => 'Str');
has Resolution => (is => 'ro', isa => 'Str');
has SizingPolicy => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ElasticTranscoder::Thumbnails
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::ElasticTranscoder::Thumbnails object:
$service_obj->Method(Att1 => { AspectRatio => $value, ..., SizingPolicy => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::ElasticTranscoder::Thumbnails object:
$result = $service_obj->Method(...);
$result->Att1->AspectRatio
=head1 DESCRIPTION
Thumbnails for videos.
=head1 ATTRIBUTES
=head2 AspectRatio => Str
To better control resolution and aspect ratio of thumbnails, we
recommend that you use the values C<MaxWidth>, C<MaxHeight>,
C<SizingPolicy>, and C<PaddingPolicy> instead of C<Resolution> and
C<AspectRatio>. The two groups of settings are mutually exclusive. Do
not use them together.
The aspect ratio of thumbnails. Valid values include:
C<auto>, C<1:1>, C<4:3>, C<3:2>, C<16:9>
If you specify C<auto>, Elastic Transcoder tries to preserve the aspect
ratio of the video in the output file.
=head2 Format => Str
The format of thumbnails, if any. Valid values are C<jpg> and C<png>.
You specify whether you want Elastic Transcoder to create thumbnails
when you create a job.
=head2 Interval => Str
The approximate number of seconds between thumbnails. Specify an
integer value.
=head2 MaxHeight => Str
The maximum height of thumbnails in pixels. If you specify auto,
Elastic Transcoder uses 1080 (Full HD) as the default value. If you
specify a numeric value, enter an even integer between 32 and 3072.
=head2 MaxWidth => Str
The maximum width of thumbnails in pixels. If you specify auto, Elastic
Transcoder uses 1920 (Full HD) as the default value. If you specify a
numeric value, enter an even integer between 32 and 4096.
=head2 PaddingPolicy => Str
When you set C<PaddingPolicy> to C<Pad>, Elastic Transcoder may add
black bars to the top and bottom and/or left and right sides of
thumbnails to make the total size of the thumbnails match the values
that you specified for thumbnail C<MaxWidth> and C<MaxHeight> settings.
=head2 Resolution => Str
To better control resolution and aspect ratio of thumbnails, we
recommend that you use the values C<MaxWidth>, C<MaxHeight>,
C<SizingPolicy>, and C<PaddingPolicy> instead of C<Resolution> and
C<AspectRatio>. The two groups of settings are mutually exclusive. Do
not use them together.
The width and height of thumbnail files in pixels. Specify a value in
the format C< I<width> > x C< I<height> > where both values are even
integers. The values cannot exceed the width and height that you
specified in the C<Video:Resolution> object.
=head2 SizingPolicy => Str
Specify one of the following values to control scaling of thumbnails:
=over
=item *
C<Fit>: Elastic Transcoder scales thumbnails so they match the value
that you specified in thumbnail MaxWidth or MaxHeight settings without
exceeding the other value.
=item *
C<Fill>: Elastic Transcoder scales thumbnails so they match the value
that you specified in thumbnail C<MaxWidth> or C<MaxHeight> settings
and matches or exceeds the other value. Elastic Transcoder centers the
image in thumbnails and then crops in the dimension (if any) that
exceeds the maximum value.
=item *
C<Stretch>: Elastic Transcoder stretches thumbnails to match the values
that you specified for thumbnail C<MaxWidth> and C<MaxHeight> settings.
If the relative proportions of the input video and thumbnails are
different, the thumbnails will be distorted.
=item *
C<Keep>: Elastic Transcoder does not scale thumbnails. If either
dimension of the input video exceeds the values that you specified for
thumbnail C<MaxWidth> and C<MaxHeight> settings, Elastic Transcoder
crops the thumbnails.
=item *
C<ShrinkToFit>: Elastic Transcoder scales thumbnails down so that their
dimensions match the values that you specified for at least one of
thumbnail C<MaxWidth> and C<MaxHeight> without exceeding either value.
If you specify this option, Elastic Transcoder does not scale
thumbnails up.
=item *
C<ShrinkToFill>: Elastic Transcoder scales thumbnails down so that
their dimensions match the values that you specified for at least one
of C<MaxWidth> and C<MaxHeight> without dropping below either value. If
you specify this option, Elastic Transcoder does not scale thumbnails
up.
=back
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::ElasticTranscoder>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/ElasticTranscoder/Thumbnails.pm | Perl | apache-2.0 | 5,423 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::RNAProduct - A class representing the mature RNA product
of a transcript
=head1 DESCRIPTION
Objects of this class represent mature RNA products of
transcripts. Examples of such products include MicroRNA (miRNA),
circular RNA (circRNA) or piwi-interacting RNA (piRNA), and they
commonly play a role in gene expression.
=head1 SYNOPSIS
my $rnaproduct = Bio::EnsEMBL::RNAProduct->new(
-SEQ_START => 36,
-SEQ_END => 58
);
# Stable-ID setter
$rnaproduct->stable_id('ENSS00090210');
# Get start and end position in the precursor transcript
my $start = $rnaproduct->start();
my $end = $rnaproduct->end();
=cut
package Bio::EnsEMBL::RNAProduct;
use vars qw($AUTOLOAD);
use strict;
use warnings;
use Bio::EnsEMBL::Utils::Exception qw(throw warning );
use Bio::EnsEMBL::Utils::Argument qw( rearrange );
use Bio::EnsEMBL::Utils::RNAProductTypeMapper;
use Bio::EnsEMBL::Utils::Scalar qw( assert_ref wrap_array );
use Scalar::Util qw(weaken);
use Bio::EnsEMBL::Storable;
use parent qw(Bio::EnsEMBL::Storable);
=head2 new
Arg [-SEQ_START] : The offset in the Transcript indicating the start
position of the product sequence.
Arg [-SEQ_END] : The offset in the Transcript indicating the end
position of the product sequence.
Arg [-START_EXON] : The Exon object in which the RNAProduct starts
Arg [-END_EXON] : The Exon object in which the RNAProduct ends
Arg [-STABLE_ID] : The stable identifier for this RNAPRoduct
Arg [-VERSION] : The version of the stable identifier
Arg [-DBID] : The internal identifier of this RNAProduct
Arg [-ADAPTOR] : The RNAProductAdaptor for this RNAProduct
Arg [-SEQ] : Manually sets the nucleotide sequence of this
RNAProduct. May be useful if this RNAProduct is not
stored in a database.
Arg [-CREATED_DATE] : the date the RNAProduct was created
Arg [-MODIFIED_DATE]: the date the RNAProduct was modified
Example : my $rp = Bio::EnsEMBL::RNAProduct->new(
-SEQ_START => 36,
-SEQ_END => 58
);
Description: Constructor. Creates a new RNAProduct object
Returntype : Bio::EnsEMBL::RNAProduct
Exceptions : none
Caller : general
Status : In Development
=cut
# perlcritic doesn't know about rearrange(), silence it
sub new { ## no critic (Subroutines::RequireArgUnpacking)
my $caller = shift;
my $class = ref($caller) || $caller;
my $type_code = Bio::EnsEMBL::Utils::RNAProductTypeMapper::mapper()
->class_to_type_code($class);
my ($seq_start, $seq_end, $start_exon, $end_exon, $stable_id, $version, $dbID,
$adaptor, $seq, $created_date, $modified_date ) =
rearrange(["SEQ_START", "SEQ_END", "START_EXON", "END_EXON",
"STABLE_ID", "VERSION", "DBID", "ADAPTOR", "SEQ",
"CREATED_DATE", "MODIFIED_DATE"], @_);
# For consistency between stable_id() and stable_id_version()
$stable_id //= '';
# Default version
$version //= 1;
my $self = bless {
'start' => $seq_start,
'end' => $seq_end,
'start_exon' => $start_exon,
'end_exon' => $end_exon,
'stable_id' => $stable_id,
'version' => $version,
'dbID' => $dbID,
'seq' => $seq,
'created_date' => $created_date,
'modified_date' => $modified_date,
'type_code' => $type_code,
}, $class;
$self->adaptor($adaptor);
return $self;
}
=head2 add_Attributes
Arg [1..N] : Bio::EnsEMBL::Attribute $attribute
Attributes to add.
Example : $rnaproduct->add_Attributes($selenocysteine_attribute);
Description: Adds an Attribute to the RNAProduct.
If you add an attribute before you retrieve any from database,
lazy load will be disabled.
Returntype : none
Exceptions : throw on incorrect arguments
Caller : general
Status : Stable
=cut
sub add_Attributes {
my ($self, @attribs) = @_;
if (! exists $self->{'attributes'}) {
$self->{'attributes'} = [];
}
for my $attrib (@attribs) {
if (! $attrib->isa("Bio::EnsEMBL::Attribute")) {
throw("Argument to add_Attribute must be a Bio::EnsEMBL::Attribute");
}
push (@{$self->{'attributes'}}, $attrib);
# Invalidate the current sequence string in case the new attribute is a SeqEdit
$self->{seq} = undef;
}
return;
}
=head2 add_DBEntry
Arg [1] : Bio::EnsEMBL::DBEntry $dbe
The dbEntry to be added
Example : $rnaproduct->add_DBEntry($xref);
Description: Associates a DBEntry with this RNAProduct. Note that adding
DBEntries will prevent future lazy-loading of DBEntries for this
RNAProduct (see get_all_DBEntries).
Returntype : none
Exceptions : thrown on incorrect argument type
Caller : general
Status : Stable
=cut
sub add_DBEntry {
my ($self, $dbe) = @_;
if (!$dbe || !ref($dbe) || !$dbe->isa('Bio::EnsEMBL::DBEntry')) {
throw('Expected DBEntry argument');
}
$self->{'dbentries'} ||= [];
push @{$self->{'dbentries'}}, $dbe;
return;
}
=head2 cdna_end
Example : $rnaproduct_cdna_end = $rnaproduct->cdna_end();
Description : Returns the end position of the RNAProduct in cDNA
coordinates.
Since RNAProducts do not span multiple exons, this is
simply an alias for end().
Return type : Integer
Caller : General
Status : Stable
=cut
sub cdna_end {
my $self = shift;
return $self->end();
}
=head2 cdna_start
Example : $rnaproduct_cdna_start = $rnaproduct->cdna_start();
Description : Returns the start position of the RNAProduct in cDNA
coordinates.
Since RNAProducts do not span multiple exons, this is
simply an alias for start().
Return type : Integer
Caller : General
Status : Stable
=cut
sub cdna_start {
my $self = shift;
return $self->start();
}
=head2 created_date
Arg [1] : (optional) string $created_date - created date to set
Example : $rnaproduct->created_date('2007-01-10 20:52:00');
Description: Getter/setter for attribute created_date
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub created_date {
my $self = shift;
if ( @_ ) {
$self->{'created_date'} = shift;
}
return $self->{'created_date'};
}
=head2 display_id
Example : print $rnaproduct->display_id();
Description: This method returns a string that is considered to be
the 'display' identifier. For RNAProducts this is (depending on
availability and in this order) the stable ID, the dbID or an
empty string.
Returntype : string
Exceptions : none
Caller : web drawing code
Status : Stable
=cut
sub display_id {
my $self = shift;
return $self->stable_id() || $self->dbID() || '';
}
=head2 end
Arg [1] : (optional) int $end - end position to set
Example : $rnaproduct->end(39);
Description: Getter/setter for the value of end, which is a position within
the precursor Transcript.
Returntype : int
Exceptions : none
Caller : general
Status : Stable
=cut
sub end {
my $self = shift;
if ( @_ ) {
$self->{'end'} = shift;
}
return $self->{'end'};
}
=head2 end_Exon
Arg [1] : (optional) Bio::EnsEMBL::Exon || undef - start exon to assign
Example : $rnaproduct->end_Exon($exon1);
Description: Getter/setter for the value of end_Exon, which denotes the
exon at which RNAProduct ends.
Returntype : Bio::EnsEMBL::Exon
Exceptions : thrown on wrong argument type
Caller : general
Status : Stable
=cut
sub end_Exon {
my ($self, $exon) = @_;
if (defined($exon)) {
# Normal setter
assert_ref($exon, 'Bio::EnsEMBL::Exon');
$self->{'end_exon'} = $exon;
}
elsif (@_ > 1) {
# User has explicitly passed undef. Break connection to exon.
delete( $self->{'end_exon'} );
}
return $self->{'end_exon'};
}
=head2 genomic_end
Args : None
Example : $rnaproduct_genomic_end = $rnaproduct->genomic_end();
Description : Returns the end position of the RNAProduct in genomic
coordinates on the forward strand.
Return type : Integer
Exceptions : None
Caller : General
Status : Stable
=cut
sub genomic_end {
my $self = shift;
if (!exists $self->{'genomic_end'}) {
my $transcript = $self->transcript();
if ($transcript->strand() >= 0) {
$self->{'genomic_end'} =
$transcript->start() + ($self->end() - 1);
} else {
$self->{'genomic_end'} =
$transcript->end() - ($self->start() - 1);
}
}
return $self->{'genomic_end'};
}
=head2 genomic_start
Args : None
Example : $rnaproduct_genomic_start = $rnaproduct->genomic_start();
Description : Returns the start position of the RNAProduct in
genomic coordinates on the forward strand.
Return type : Integer
Exceptions : None
Caller : General
Status : Stable
=cut
sub genomic_start {
my $self = shift;
if (!exists $self->{'genomic_start'}) {
my $transcript = $self->transcript();
if ($transcript->strand() >= 0) {
$self->{'genomic_start'} =
$transcript->start() + ($self->start() - 1);
} else {
$self->{'genomic_start'} =
$transcript->end() - ($self->end() - 1);
}
}
return $self->{'genomic_start'};
}
=head2 get_all_Attributes
Arg [1] : optional string $attrib_code
The code of the attribute type to retrieve values for.
Example : ($n_attr) = @{$tl->get_all_Attributes('note')};
@rp_attributes = @{$rnaproduct->get_all_Attributes()};
Description: Gets a list of Attributes of this RNAProduct.
Optionally just get Attributes for given code.
Returntype : listref Bio::EnsEMBL::Attribute
Exceptions : none
Caller : general
Status : Stable
=cut
sub get_all_Attributes {
my ($self, $attrib_code) = @_;
# If not cached, retrieve all of the attributes for this RNAProduct
if (!defined($self->{'attributes'}) && defined($self->adaptor())) {
my $aa = $self->adaptor->db->get_AttributeAdaptor();
$self->{'attributes'} = $aa->fetch_all_by_RNAProduct($self);
}
if (defined $attrib_code) {
my @results = grep { uc($_->code()) eq uc($attrib_code) }
@{$self->{'attributes'}};
return \@results;
} else {
return $self->{'attributes'};
}
}
=head2 get_all_DBEntries
Arg [1] : (optional) String, external database name,
SQL wildcard characters (_ and %) can be used to
specify patterns.
Arg [2] : (optional) String, external_db type,
('ARRAY','ALT_TRANS','ALT_GENE','MISC','LIT','PRIMARY_DB_SYNONYM','ENSEMBL'),
SQL wildcard characters (_ and %) can be used to
specify patterns.
Example : my @dbentries = @{ $rnaproduct->get_all_DBEntries() };
@dbentries = @{ $rnaproduct->get_all_DBEntries('Uniprot%') };
@dbentries = @{ $rnaproduct->get_all_DBEntries('%', 'ENSEMBL') };
Description: Retrieves DBEntries (xrefs) for this RNAProduct.
This method will attempt to lazy-load DBEntries
from a database if an adaptor is available and no
DBEntries are present on the RNAProduct (i.e. they
have not already been added or loaded).
Returntype : Listref to Bio::EnsEMBL::DBEntry objects
Exceptions : none
Caller : ?
Status : Stable
=cut
sub get_all_DBEntries {
my ($self, $ex_db_exp, $ex_db_type) = @_;
my $cache_name = 'dbentries';
if (defined($ex_db_exp)) {
$cache_name .= $ex_db_exp;
}
if (defined($ex_db_type)) {
$cache_name .= $ex_db_type;
}
# If not cached, retrieve all of the xrefs for this RNAProduct
if (!defined($self->{$cache_name}) && defined($self->adaptor())) {
$self->{$cache_name} = $self->adaptor()->db()->get_DBEntryAdaptor()->
fetch_all_by_RNAProduct( $self, $ex_db_exp, $ex_db_type );
}
$self->{$cache_name} ||= [];
return $self->{$cache_name};
}
=head2 get_all_DBLinks
Arg [1] : (optional) String, database name
SQL wildcard characters (_ and %) can be used to
specify patterns.
Arg [2] : (optional) String, external database type, can be one of
('ARRAY','ALT_TRANS','ALT_GENE','MISC','LIT','PRIMARY_DB_SYNONYM','ENSEMBL'),
SQL wildcard characters (_ and %) can be used to
specify patterns.
Example : my @dblinks = @{ $rnaproduct->get_all_DBLinks() };
@dblinks = @{ $rnaproduct->get_all_DBLinks('mirbase%') };
@dblinks = @{ $rnaproduct->get_all_DBLinks('%', 'ENSEMBL') };
Description: This is here for consistancy with the Transcript
and Gene classes. It is a synonym for the
get_all_DBEntries() method.
Return type: Listref to Bio::EnsEMBL::DBEntry objects
Exceptions : none
Caller : general
Status : Stable
=cut
# this is an alias, we do NOT want to unpack @_
sub get_all_DBLinks { ## no critic (Subroutines::RequireArgUnpacking)
my $self = shift;
return $self->get_all_DBEntries(@_);
}
=head2 get_all_object_xrefs
Arg [1] : (optional) String, external database name
Arg [2] : (optional) String, external_db type
Example : @oxrefs = @{ $rnaproduct->get_all_object_xrefs() };
Description: Retrieves xrefs for this RNAProduct.
This method will attempt to lazy-load xrefs from a
database if an adaptor is available and no xrefs
are present on the RNAProduct (i.e. they have not
already been added or loaded).
NB: This method is an alias for the
get_all_DBentries() method.
Return type: Listref of Bio::EnsEMBL::DBEntry objects
Status : Stable
=cut
# this is an alias, we do NOT want to unpack @_
sub get_all_object_xrefs { ## no critic (Subroutines::RequireArgUnpacking)
my $self = shift;
return $self->get_all_DBEntries(@_);
}
=head2 get_all_xrefs
Arg [1] : String database name (optional)
SQL wildcard characters (_ and %) can be used to
specify patterns.
Example : @xrefs = @{ $rnaproduct->get_all_xrefs() };
@xrefs = @{ $rnaproduct->get_all_xrefs('mirbase%') };
Description: This method is here for consistancy with the Gene
and Transcript classes. It is an alias for the
get_all_DBLinks() method, which in turn directly
calls get_all_DBEntries().
Return type: Listref of Bio::EnsEMBL::DBEntry objects
Status : Stable
=cut
# this is an alias, we do NOT want to unpack @_
sub get_all_xrefs { ## no critic (Subroutines::RequireArgUnpacking)
my $self = shift;
return $self->get_all_DBLinks(@_);
}
=head2 modified_date
Arg [1] : (optional) string $modified_date - modification date to set
Example : $rnaproduct->modified_date('2007-01-10 20:52:00');
Description: Getter/setter for attribute modified_date
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub modified_date {
my $self = shift;
if ( @_ ) {
$self->{'modified_date'} = shift;
}
return $self->{'modified_date'};
}
=head2 length
Example : print "RNA length =", $rnaproduct->length();
Description: Retrieves the length of the nucleotide sequence represented
by this RNAProduct object.
Returntype : int
Exceptions : none
Caller : webcode (protview etc.)
Status : Stable
=cut
# PBP do allow homonyms as methods but perlcritic cannot, tell these
# apart from the forbidden ones, as stated in the documentation of the
# relevant policy
sub length { ## no critic (Subroutines::ProhibitBuiltinHomonyms)
my $self = shift;
my $seq = $self->seq();
return ($seq) ? CORE::length($seq) : 0;
}
=head2 load
Arg [1] : Boolean $load_xrefs
Load (or don't load) xrefs. Default is to load xrefs.
Example : $rnaproduct->load();
Description : The Ensembl API makes extensive use of
lazy-loading. Under some circumstances (e.g.,
when copying genes between databases), all data of
an object needs to be fully loaded. This method
loads the parts of the object that are usually
lazy-loaded.
Returns : none
=cut
sub load {
my ($self, $load_xrefs) = @_;
if ( !defined $load_xrefs ) {
$load_xrefs = 1;
}
$self->seq();
$self->stable_id();
$self->get_all_Attributes();
if ($load_xrefs) {
$self->get_all_DBEntries();
}
return;
}
=head2 seq
Example : print $rnaproduct->seq();
Description: Retrieves a string representation of the nucleotide sequence
of this RNAProduct. This retrieves the transcript from the
database and gets its sequence, or retrieves the sequence which
was set via the constructor/setter.
Returntype : string
Exceptions : warning if the sequence is not set and cannot be retrieved from
the database.
Caller : webcode (protview etc.)
Status : Stable
=cut
sub seq {
my ($self, $sequence) = @_;
if (defined($sequence)) {
$self->{'seq'} = $sequence;
} elsif (!defined($self->{'seq'})) {
my $tr_seq = $self->transcript()->seq();
if ($tr_seq->length() <= 0) {
throw('Got no or empty sequence from the database');
}
$self->{'seq'} = $tr_seq->subseq($self->{'start'}, $self->{'end'});
}
return ( $self->{'seq'} // q{} );
}
=head2 stable_id
Arg [1] : (optional) string $stable_id - stable ID to set
Example : $rnaproduct->stable_id('ENSS00090210');
Description: Getter/setter for attribute stable_id.
Unlike stable_id_version(), setting a new stable ID does NOT
reset the version number.
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub stable_id {
my $self = shift;
if ( @_ ) {
$self->{'stable_id'} = shift;
}
return $self->{'stable_id'};
}
=head2 stable_id_version
Arg [1] : (optional) String - the stable ID with version to set
Example : $rnaproduct->stable_id("ENSS0059890.3");
Description: Getter/setter for stable id with version for this RNAProduct.
If the input string omits the version part, the version gets reset
to undef; use stable_id() if you want to avoid this.
Returntype : String
Exceptions : none
Caller : general
Status : Stable
=cut
sub stable_id_version {
my $self = shift;
if (my $stable_id = shift) {
# If there is at least one embedded period assume everything
# beyond the last one is the version number. This may not work for
# some species, if you are worried about ambiguity use stable_id() +
# version() explicitly.
my $vindex = rindex($stable_id, '.');
($self->{stable_id},
$self->{version}) = ($vindex > 0 ?
(substr($stable_id, 0, $vindex),
substr($stable_id, $vindex + 1)) :
$stable_id, undef
);
}
return $self->{stable_id} . ($self->{version} ? ".$self->{version}" : '');
}
=head2 start
Arg [1] : (optional) int $start - start position to set
Example : $rnaproduct->start(17);
Description: Getter/setter for the value of start, which is a position within
the precursor Transcript.
Returntype : int
Exceptions : none
Caller : general
Status : Stable
=cut
sub start {
my $self = shift;
if ( @_ ) {
$self->{'start'} = shift;
}
return $self->{'start'};
}
=head2 start_Exon
Arg [1] : (optional) Bio::EnsEMBL::Exon || undef - start exon to assign
Example : $rnaproduct->start_Exon($exon1);
Description: Getter/setter for the value of start_Exon, which denotes the
exon at which RNAProduct starts.
Returntype : Bio::EnsEMBL::Exon
Exceptions : thrown on wrong argument type
Caller : general
Status : Stable
=cut
sub start_Exon {
my ($self, $exon) = @_;
if (defined($exon)) {
# Normal setter
assert_ref($exon, 'Bio::EnsEMBL::Exon');
$self->{'start_exon'} = $exon;
}
elsif (@_ > 1) {
# User has explicitly passed undef. Break connection to exon.
delete( $self->{'start_exon'} );
}
return $self->{'start_exon'};
}
=head2 summary_as_hash
Example : $rnaproduct_summary = $rnaproduct->summary_as_hash();
Description : Retrieves a textual summary of this RNAProduct.
Not inherited from Feature.
Returns : hashref of arrays of descriptive strings
Status : Intended for internal use
=cut
sub summary_as_hash {
my $self = shift;
my %summary;
my $id = $self->display_id;
if ($self->version) {
$id .= "." . $self->version;
}
$summary{'id'} = $id;
$summary{'rnaproduct_id'} = $id;
$summary{'genomic_start'} = $self->genomic_start;
$summary{'genomic_end'} = $self->genomic_end;
$summary{'length'} = $self->length;
my $transcript = $self->transcript;
$summary{'Parent'} = $transcript->display_id;
return \%summary;
}
=head2 synchronise_attributes
Example : $rnaproduct->synchronise_attributes();
Description : Some RNAProduct attributes, e.g. stem-loop arm in case
of MicroRNA, use a local cache of their value for
convenience. Unless the corresponding setters update both
the cache value and the attribute (which would defeat
the convenience thing), we have to make sure the former
get propagated to the latter before storing the object
in the database:
- if no corresponding attribute exists, create one;
- if there is one, update its value.
Class-specific maps of attributes to synchronise are
provided by
RNAProductTypeMapper::class_attribute_cache_map() .
Returntype : none
Exceptions : throws if the object contains multiple attributes with the
given code and the choice which one to update is
ambiguous.
Caller : RNAProductAdaptor
Status : At Risk (In Development)
=cut
sub synchronise_attributes {
my ($self) = @_;
my $attribute_cache_map = Bio::EnsEMBL::Utils::RNAProductTypeMapper::mapper()
->class_attribute_cache_map(ref($self));
while (my ($cache_key, $attr_code) = each %{$attribute_cache_map}) {
my $existing_attributes = $self->get_all_Attributes($attr_code);
my $n_existing_attrs = scalar @{$existing_attributes};
if ($n_existing_attrs > 0) {
# At the moment we do not support multiple occurrences of target
# attributes at all
if ($n_existing_attrs > 1) {
throw("Object has multiple '$attr_code' attributes and we do not know"
. " which one to update");
}
else {
$existing_attributes->[0]->value($self->{$cache_key});
}
}
else {
# No corresponding attribute exists, most likely because we are
# dealing with a newly created object which has never been pushed
# to the database.
$self->add_Attributes(Bio::EnsEMBL::Attribute->new(
-CODE => $attr_code,
-VALUE => $self->{$cache_key},
));
}
}
return;
}
=head2 transcript
Arg [1] : Transcript object (optional)
Description : Sets or retrieves the transcript object associated
with this RNAProduct object.
Exceptions : Throws if there is no adaptor or no dbID defined for
the RNAProduct object.
Returntype : Bio::EnsEMBL::Transcript
=cut
sub transcript {
my ($self, $transcript) = @_;
if (defined($transcript)) {
# Normal setter
assert_ref($transcript, 'Bio::EnsEMBL::Transcript');
$self->{'transcript'} = $transcript;
weaken($self->{'transcript'}); # Avoid circular references.
} elsif (@_ > 1) {
# User has explicitly passed undef. Break connection to transcript.
delete( $self->{'transcript'} );
} elsif (!defined($self->{'transcript'})) {
my $adaptor = $self->{'adaptor'};
if (!defined($adaptor)) {
throw("Adaptor not set for RNAProduct, cannot fetch its transcript");
}
my $dbID = $self->{'dbID'};
if (!defined($dbID)) {
throw("dbID not set for RNAProduct, cannot fetch its transcript.");
}
$self->{'transcript'} =
$adaptor->db()->get_TranscriptAdaptor()
->fetch_by_rnaproduct_id($dbID);
# Do not weaken the reference if we had to get the transcript from the
# database. The user is probably working on RNA products directly,
# not going through transcripts.
}
return $self->{'transcript'};
}
=head2 type_code
Example : my $rp_type_code = $rnaproduct->type_code();
Description: Getter for the RNAProduct type (e.g. miRNA, circRNA, ...).
The type is expressed as human-readable code.
This is somewhat redundant because similar information can
be obtained simply by looking at the class of the object,
indeed type_code is not meant to be modified independently
of the class. However, there are certain use cases when the
latter are more convenient than the former.
Returntype : string
Exceptions : none
Caller : ?
Status : In Development
=cut
sub type_code {
my $self = shift;
return $self->{'type_code'};
}
=head2 version
Arg [1] : (optional) string $version - version to set
Example : $rnaproduct->version(2);
Description: Getter/setter for attribute version
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub version {
my $self = shift;
if ( @_ ) {
$self->{'version'} = shift;
}
return $self->{'version'};
}
1;
| james-monkeyshines/ensembl | modules/Bio/EnsEMBL/RNAProduct.pm | Perl | apache-2.0 | 27,364 |
package TinCan::Extensions;
use Moo;
use namespace::clean;
use TinCan::Map;
our $VERSION = '0.01';
extends 'TinCan::Map';
1;
__END__
=head1 NAME
TinCan::Extensions - Extensions model
=head1 SYNOPSIS
use TinCan::Extensions;
my $extensions = TinCan::Extensions->new(
'http://.....' => 'some value',
);
...
=head1 LICENSE AND COPYRIGHT
Copyright 2014 Rustici Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
L<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
| RusticiSoftware/TinCanPerl | lib/TinCan/Extensions.pm | Perl | apache-2.0 | 936 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package os::windows::snmp::mode::service;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
my %map_installed_state = (
1 => 'uninstalled',
2 => 'install-pending',
3 => 'uninstall-pending',
4 => 'installed'
);
my %map_operating_state = (
1 => 'active',
2 => 'continue-pending',
3 => 'pause-pending',
4 => 'paused'
);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {
'warning:s' => { name => 'warning', },
'critical:s' => { name => 'critical', },
'service:s@' => { name => 'service', },
'regexp' => { name => 'use_regexp', },
'state:s' => { name => 'state' }
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (!defined($self->{option_results}->{service})) {
$self->{output}->add_option_msg(short_msg => "Need to specify at least one '--service' option.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
my $oid_svSvcEntry = '.1.3.6.1.4.1.77.1.2.3.1';
my $oid_svSvcInstalledState = '.1.3.6.1.4.1.77.1.2.3.1.2';
my $oid_svSvcOperatingState = '.1.3.6.1.4.1.77.1.2.3.1.3';
my $result = $options{snmp}->get_table(oid => $oid_svSvcEntry, start => $oid_svSvcInstalledState, end => $oid_svSvcOperatingState);
my $services_match = {};
$self->{output}->output_add(
severity => 'OK',
short_msg => 'All service states are ok'
);
use Encode;
foreach my $oid ($options{snmp}->oid_lex_sort(keys %$result)) {
next if ($oid !~ /^$oid_svSvcOperatingState\.(\d+)\.(.*)$/);
my $instance = $1 . '.' . $2;
my $svc_name = $self->{output}->decode(join('', map(chr($_), split(/\./, $2))));
my $svc_installed_state = $result->{$oid_svSvcInstalledState . '.' . $instance};
my $svc_operating_state = $result->{$oid_svSvcOperatingState . '.' . $instance};
for (my $i = 0; $i < scalar(@{$self->{option_results}->{service}}); $i++) {
$services_match->{$i} = {} if (!defined($services_match->{$i}));
my $filter = $self->{option_results}->{service}->[$i];
if (defined($self->{option_results}->{use_regexp}) && $svc_name =~ /$filter/) {
$services_match->{$i}->{$svc_name} = {
operating_state => $svc_operating_state,
installed_state => $svc_installed_state
}
} elsif ($svc_name eq $filter) {
$services_match->{$i}->{$svc_name} = {
operating_state => $svc_operating_state,
installed_state => $svc_installed_state
}
}
}
}
for (my $i = 0; $i < scalar(@{$self->{option_results}->{service}}); $i++) {
my $numbers = 0;
my $svc_name_state_wrong = {};
foreach my $svc_name (keys %{$services_match->{$i}}) {
my $operating_state = $services_match->{$i}->{$svc_name}->{operating_state};
my $installed_state = $services_match->{$i}->{$svc_name}->{installed_state};
$self->{output}->output_add(long_msg =>
sprintf(
"Service '%s' match (pattern: '%s') [operating state = %s, installed state = %s]",
$svc_name, $self->{option_results}->{service}->[$i],
$map_operating_state{$operating_state},
$map_installed_state{$installed_state}
)
);
if (defined($self->{option_results}->{state}) && $map_operating_state{$operating_state} !~ /$self->{option_results}->{state}/) {
delete $services_match->{$i}->{$svc_name};
$svc_name_state_wrong->{$svc_name} = $operating_state;
next;
}
$numbers++;
}
my $exit = $self->{perfdata}->threshold_check(
value => $numbers, threshold => [
{ label => 'critical', exit_litteral => 'critical' },
{ label => 'warning', exit_litteral => 'warning' }
]
);
$self->{output}->output_add(
long_msg => sprintf(
"Service pattern '%s': service list %s",
$self->{option_results}->{service}->[$i],
join(', ', keys %{$services_match->{$i}})
)
);
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
if (scalar(keys %$svc_name_state_wrong) > 0) {
$self->{output}->output_add(
severity => $exit,
short_msg => sprintf(
"Service pattern '%s' problem: %s [following services match but has the wrong state]",
$self->{option_results}->{service}->[$i],
join(', ', keys %$svc_name_state_wrong)
)
);
} else {
$self->{output}->output_add(
severity => $exit,
short_msg => sprintf("Service problem '%s'", $self->{option_results}->{service}->[$i])
);
}
}
}
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check Windows Services in SNMP
=over 8
=item B<--warning>
Threshold warning.
=item B<--critical>
Threshold critical.
=item B<--service>
Services to check. (can set multiple times)
=item B<--regexp>
Allows to use regexp to filter services.
=item B<--state>
Service state. (Regexp allowed)
=back
=cut
| Tpo76/centreon-plugins | os/windows/snmp/mode/service.pm | Perl | apache-2.0 | 7,143 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::stonesoft::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'memory' => 'network::stonesoft::snmp::mode::memory',
'cpu' => 'network::stonesoft::snmp::mode::cpu',
'connections' => 'network::stonesoft::snmp::mode::connections',
'interfaces' => 'snmp_standard::mode::interfaces',
'storage' => 'network::stonesoft::snmp::mode::storage',
'cluster-state' => 'network::stonesoft::snmp::mode::clusterstate',
'cluster-load' => 'network::stonesoft::snmp::mode::clusterload',
'rejected-packets' => 'network::stonesoft::snmp::mode::rejectedpackets',
'dropped-packets' => 'network::stonesoft::snmp::mode::droppedpackets',
'list-interfaces' => 'snmp_standard::mode::listinterfaces',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Stonesoft firewall in SNMP.
=cut
| bcournaud/centreon-plugins | network/stonesoft/snmp/plugin.pm | Perl | apache-2.0 | 2,150 |
=head1 LICENSE
See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=head1 NAME
Bio::EnsEMBL::Compara::DBSQL::XrefAssociationAdaptor
=head1 DESCRIPTION
This adaptor allows the storage and retrieval of the assoications between gene tree members and annotations such as InterPro and GO
=head1 INHERITANCE TREE
Bio::EnsEMBL::Compara::DBSQL::XrefAssociationAdaptor
`- Bio::EnsEMBL::Compara::DBSQL::BaseAdaptor
=head1 AUTHORSHIP
Ensembl Team. Individual contributions can be found in the GIT log.
=head1 APPENDIX
The rest of the documentation details each of the object methods.
Internal methods are usually preceded with an underscore (_)
=cut
package Bio::EnsEMBL::Compara::DBSQL::XrefAssociationAdaptor;
use strict;
use warnings;
use Bio::EnsEMBL::Utils::Exception qw(throw);
use Bio::EnsEMBL::Utils::Scalar qw(check_ref);
use Bio::EnsEMBL::Registry;
use Data::Dumper;
use base ('Bio::EnsEMBL::Compara::DBSQL::BaseAdaptor');
my $insert_member_base_sql = q/insert ignore into member_xref(gene_member_id,dbprimary_acc,external_db_id)/;
my $insert_member_sql = $insert_member_base_sql. q/ select gene_member_id,?,? from gene_member where stable_id=? and source_name='ENSEMBLGENE'/;
my $get_member_id_sql = q/select gene_member_id from gene_member where stable_id=? and source_name='ENSEMBLGENE'/;
my $delete_member_sql = q/delete mx.* from member_xref mx, gene_member m, genome_db g
where g.name=? and mx.external_db_id=?
and g.genome_db_id=m.genome_db_id and m.gene_member_id=mx.gene_member_id/;
my $base_get_sql = q/
select distinct g.stable_id,x.dbprimary_acc
from CORE.xref x
join CORE.external_db db using (external_db_id)
join CORE.object_xref ox using (xref_id)
GENE_JOIN join CORE.seq_region s on (g.seq_region_id=s.seq_region_id)
join CORE.coord_system c using (coord_system_id)
where db.db_name=? and c.species_id=?/;
my $translation_join = q/join CORE.translation t on (t.translation_id=ox.ensembl_id and ox.ensembl_object_type='Translation')
join CORE.transcript tc using (transcript_id)
join CORE.gene g using (gene_id)/;
my $transcript_join = q/join CORE.transcript tc on (tc.transcript_id=ox.ensembl_id and ox.ensembl_object_type='Transcript')
join CORE.gene g using (gene_id)/;
my $gene_join =
q/join CORE.gene g on (g.gene_id=ox.ensembl_id and ox.ensembl_object_type='Gene')/;
my $get_associations_direct = q/
select dbprimary_acc,count(*) as cnt from gene_tree_root r
join gene_tree_node n using (root_id)
join seq_member m using (seq_member_id)
join member_xref mg on (m.gene_member_id=mg.gene_member_id)
join external_db e using (external_db_id)
where r.root_id=? and e.db_name=?
group by dbprimary_acc,db_name order by cnt desc, dbprimary_acc asc
/;
my $get_members_for_xref = q/
select m.gene_member_id from member_xref mg
join gene_member m on (m.gene_member_id=mg.gene_member_id)
join seq_member mp on (mp.gene_member_id=m.gene_member_id)
join gene_tree_node gn on (gn.seq_member_id=mp.seq_member_id)
join gene_tree_root r using (root_id)
join external_db e using (external_db_id)
where mg.dbprimary_acc=? and e.db_name=? and r.root_id=?;
/;
my $get_member_xrefs_for_tree = q/
select mg.dbprimary_acc as acc, mg.gene_member_id
from gene_tree_root r
join gene_tree_node n using (root_id)
join seq_member m on (m.seq_member_id=n.seq_member_id)
join member_xref mg on (m.gene_member_id=mg.gene_member_id)
join external_db e using (external_db_id)
where r.root_id=? and e.db_name=? order by acc
/;
my $get_external_db_id = q/select external_db_id from external_db where db_name=?/;
=head2 store_member_associations
Arg[1] : Core database adaptor
Arg[2] : External database name
Arg[3] : Optional callback that generates a hash of gene stable ID to external database accession
Example : $adaptor->store_member_associations($dba, 'GO');
Description: Method to retrieve external database accessions for genes in the supplied core and store them in the compara database
Returntype : None
Exceptions :
Caller :
=cut
sub store_member_associations {
my ( $self, $dba, $db_name, $callback ) = @_;
my $external_db_id = $self->dbc()->sql_helper()->execute_single_result(-SQL=>$get_external_db_id, -PARAMS=>[$db_name]);
if(!defined $external_db_id) {
throw "compara external_db entry not found for $db_name";
}
$callback ||= sub {
my ( $compara, $core, $db_name ) = @_;
my $member_acc_hash;
for my $join_query ( $translation_join, $transcript_join, $gene_join ) {
my $sql = $base_get_sql;
$sql =~ s/GENE_JOIN/$join_query/;
$sql =~ s/CORE.//g;
$core->dbc()->sql_helper()->execute_no_return(
-SQL => $sql,
-CALLBACK => sub {
my @row = @{ shift @_ };
push @{ $member_acc_hash->{ $row[0] } }, $row[1];
return;
},
-PARAMS => [$db_name,$core->species_id()] );
}
return $member_acc_hash;
};
my $member_acc_hash = $callback->( $self, $dba, $db_name );
$self->dbc()->sql_helper()->execute_update(-SQL=>$delete_member_sql, -PARAMS=>[$dba->get_MetaContainer->get_production_name(),$external_db_id]);
while(my ($sid,$accs) = each %$member_acc_hash) {
my ($gene_member_id) = @{$self->dbc()->sql_helper()->execute_simple(-SQL=>$get_member_id_sql, -PARAMS=>[$sid])};
if(defined $gene_member_id) {
my @pars = map {"($gene_member_id,\"$_\",$external_db_id)"} uniq(@$accs);
my $sql = $insert_member_base_sql . 'values' . join(',',@pars);
$self->dbc()->sql_helper()->execute_update(-SQL=>$sql, -PARAMS=>[]);
}
}
return;
}
sub uniq {
return keys %{{ map { $_ => 1 } @_ }};
}
=head2 get_associated_xrefs_for_tree
Arg[1] : Gene tree object or dbID
Arg[2] : External database name
Example : $adaptor->get_associated_xrefs_for_tree($tree,'GO');
Description : Retrieve hash of associated dbprimary_accs and numbers of members for the supplied tree and database
Returntype : Hashref of accessions to counts
Exceptions :
Caller :
=cut
sub get_associated_xrefs_for_tree {
my ( $self, $gene_tree, $db_name ) = @_;
if ( check_ref( $gene_tree, 'Bio::EnsEMBL::Compara::GeneTree' ) ) {
$gene_tree = $gene_tree->root_id();
}
return
$self->dbc()->sql_helper()->execute_simple(
-SQL => $get_associations_direct,
-PARAMS => [ $gene_tree, $db_name ]
);
}
=head2 get_members_for_xref
Arg[1] : Gene tree object or dbID
Arg[2] : Primary accession
Arg[3] : External database name
Example : $adaptor->get_associated_xrefs_for_tree_from_summary($tree,'GO:123456','GO');
Description : Retrieve members for the supplied tree, primary acc and database.
Returntype : Arrayref of members
Exceptions :
Caller :
=cut
sub get_members_for_xref {
my ( $self, $gene_tree, $dbprimary_acc, $db_name ) = @_;
if ( check_ref( $gene_tree, 'Bio::EnsEMBL::Compara::GeneTree' ) ) {
$gene_tree = $gene_tree->root_id();
}
my $gene_member_ids =
$self->dbc()->sql_helper()->execute_simple(
-SQL => $get_members_for_xref,
-PARAMS => [ $dbprimary_acc, $db_name, $gene_tree ]
);
my $gene_members = [];
if ( scalar(@$gene_member_ids) > 0 ) {
$gene_members = $self->_gene_member_adaptor()->fetch_all_by_dbID_list($gene_member_ids);
}
return $gene_members;
}
=head2 get_all_member_associations
Arg[1] : Gene tree object or dbID
Arg[2] : Primary accession
Arg[3] : External database name
Example : $adaptor->get_associated_xrefs_for_tree_from_summary($tree,'GO:123456','GO');
Description : Retrieve gene_members and xref associations for the supplied tree, primary acc and database.
Returntype : Hashref containing database accessions as keys and arrayrefs of gene_members as keys
Exceptions :
Caller :
=cut
sub get_all_member_associations {
my ( $self, $gene_tree, $db_name ) = @_;
if ( check_ref( $gene_tree, 'Bio::EnsEMBL::Compara::GeneTree' ) ) {
$gene_tree = $gene_tree->root_id();
}
my $assocs = {};
$self->dbc()->sql_helper()->execute_no_return(
-SQL => $get_member_xrefs_for_tree,
-PARAMS => [ $gene_tree, $db_name ],
-CALLBACK => sub {
my ($row) = @_;
push @{ $assocs->{ $row->[0] } }, $row->[1];
return;
} );
while ( my ( $x, $ms ) = each %$assocs ) {
$assocs->{$x} = $self->_gene_member_adaptor()->fetch_all_by_dbID_list($ms);
}
return $assocs;
}
sub _gene_member_adaptor {
my ($self) = @_;
if ( !defined $self->{_gene_member_adaptor} ) {
$self->{_gene_member_adaptor} = $self->db->get_GeneMemberAdaptor();
}
return $self->{_gene_member_adaptor};
}
1;
| Ensembl/ensembl-compara | modules/Bio/EnsEMBL/Compara/DBSQL/XrefAssociationAdaptor.pm | Perl | apache-2.0 | 9,364 |
=head1 LICENSE
See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 NAME
Bio::EnsEMBL::Compara::PipeConfig::Pan::DumpAllForRelease_conf
=head1 SYNOPSIS
init_pipeline.pl Bio::EnsEMBL::Compara::PipeConfig::Pan::DumpAllForRelease_conf -host mysql-ens-compara-prod-X -port XXXX
=head1 DESCRIPTION
Specialized version of the DumpAllForRelease pipeline for the Pan
division. Please, refer to the parent class for further information.
=cut
package Bio::EnsEMBL::Compara::PipeConfig::Pan::DumpAllForRelease_conf;
use strict;
use warnings;
use base ('Bio::EnsEMBL::Compara::PipeConfig::DumpAllForRelease_conf');
sub default_options {
my ($self) = @_;
return {
%{ $self->SUPER::default_options }, # inherit the generic ones
##the list of mlss_ids that we have re_ran/updated and cannot be detected through first_release
#'updated_mlss_ids' => [ 9802, 9803, 9804, 9805, 9806, 9807, 9788, 9789, 9810, 9794, 9809, 9748, 9749, 9750, 9751, 9763, 9764, 9765,
# 9766, 9778, 9779, 9780, 9781, 9797, 9798, 9799, 9800, 9801, 9808, 9787, 9813, 9814, 9812 ],
'dump_dir' => $self->o('dump_root') . '/release-' . $self->o('eg_release'),
'division' => 'pan',
'prev_rel_ftp_root' => $self->o('ftp_root') . '/release-' . $self->o('prev_eg_release') . '/pan_ensembl',
};
}
1;
| Ensembl/ensembl-compara | modules/Bio/EnsEMBL/Compara/PipeConfig/Pan/DumpAllForRelease_conf.pm | Perl | apache-2.0 | 1,972 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Admin::Component::Healthcheck::Details;
use strict;
use base qw(EnsEMBL::Admin::Component::Healthcheck);
sub caption {
my $object = shift->object;
my $extra = $object->view_type && $object->view_param ? sprintf(' (%s)', $object->view_param) : '';
return "Healthcheck details$extra";
}
sub content {
## @return HTML to be displayed
my $self = shift;
my $object = $self->object;
my $hub = $self->hub;
my $reports = $object->rose_objects('reports');
my $param = $object->view_param;
my $type = $object->view_type;
my $rids = $object->requested_reports;
return $self->no_healthcheck_found(scalar @{$object->rose_objects('control_reports') || []}) unless $reports && @$reports;
# if no filter selected, display the failure summary table for the given view type
if (!$param && !@$rids) {
return sprintf('<p>Click on a %s to view details</p>%s', $object->view_title, $self->failure_summary_table({
'count' => $self->group_report_counts($reports, [$type])->{$type},
'type' => $type,
'session_id' => $object->last_session_id,
'release' => $object->requested_release,
'default_list' => $object->get_default_list
}));
}
my $html = '';
my $db_list = [];
#group all reports by database_name
my $grouped_reports = {};
push @{$grouped_reports->{$_->database_name} ||= []}, $_ for @$reports;
my $serial_number = 0;
if ($type ne 'database_name') {
my $form = $self->get_all_releases_dropdown_form('View in release', 'release');
$form->add_hidden({'name' => '1', 'value' => $param});
$html .= $form->render;
}
$html .= qq{<div class="_hc_infobox tinted-box">
<p>For each database, reports are sorted on the basis of Date (initial failure date) with latest report appearing on the top.</p>
<p>Reports that have not been annotated 'manual ok' are displayed in different colours for results <span class="hc-problem">problem</span>, <span class="hc-warning">warning</span> and <span class="hc-info">info</span>.</p>
<p>Reports that were fixed in a previous healthcheck session but have appeared again are marked in <span class="hc-notfixed">this colour</span>.</p>
<p>Reports that appeared for the first time in the recent healthcheck session are in <span class="hc-new">bold</span> letters.</p>
</div>};
my $js_ref = 0; #counter used by JavaScript only
foreach my $database_name (sort keys %$grouped_reports) {
$js_ref++;
my $table = $self->new_table([], [], {'class' => 'tint'});
$table->add_columns(#note - use the space properly, we have too much to display in annotation and text columns
{'key' => 'db_sp_tc', 'title' => $self->_get_first_column($object->function), 'width' => qq(20%)},
{'key' => 'type', 'title' => 'Type', 'width' => '20px' },
{'key' => 'text', 'title' => 'Text', 'width' => qq(40%)},
{'key' => 'comment', 'title' => 'Annotation', 'width' => qq(40%)},
{'key' => 'team', 'title' => 'Team/person responsible', 'width' => '100px'},
{'key' => 'created', 'title' => 'Initial Failure Date', 'width' => '60px' },
);
#sort reports on the basis of creation time
my $i = 0;
my $db_reports = [];
my $temp = { map { ($_->created || '0').++$i => $_ } @{$grouped_reports->{$database_name}} };
push @$db_reports, $temp->{$_} for reverse sort keys %$temp;
foreach my $report (@$db_reports) {
my $report_id = $report->report_id;
my $result = $report->result;
my $db_sp_tc = [];
for (qw(database_type species testcase)) {
next if $_ eq $type;
push @$db_sp_tc, $self->get_healthcheck_link({'type' => $_, 'param' => ucfirst($report->$_), 'release' => $object->requested_release, 'cut_long' => 'cut'});
}
#annotation column
my $comment = '';
my $annotation = $report->annotation;
if ($annotation) {
$comment .= $annotation->comment if $annotation->comment;
my $modified_by = '';
my $created_by = '';
if ($annotation->created_by) {
$created_by .= '<div class="hc-comment-info">Added by: '.$self->_get_user_link($annotation->created_by_user);
$created_by .= ' on '.$self->hc_format_date($annotation->created_at) if $annotation->created_at;
$created_by .= '</div>';
}
if ($annotation->modified_by) {
$modified_by .= '<div class="hc-comment-info">Modified by: '.$self->_get_user_link($annotation->modified_by_user);
$modified_by .= ' on '.$self->hc_format_date($annotation->modified_at) if $annotation->modified_at;
$modified_by .= '</div>';
}
(my $temp = $created_by) =~ s/Added/Modified/;
$modified_by = '' if $modified_by eq $temp;
$comment .= $created_by.$modified_by;
$comment .= '<div class="hc-comment-info">Action: '.$self->annotation_action($annotation->action)->{'title'}.'</div>'
if $annotation->action && $self->annotation_action($annotation->action)->{'value'} ne '';
}
my $anno_action = $annotation ? $annotation->action || '' : '';
my $text_class = $anno_action =~ /manual_ok|healthcheck_bug/ ? 'hc-oked' : sprintf('hc-%s', lc $result);
$text_class = $annotation->session_id eq $report->last_session_id ? '' : 'hc-notfixed' if $anno_action eq 'fixed';
$text_class .= $report->first_session_id == $report->last_session_id ? ' hc-new' : ' hc-notnew';
my $link_class = join ' ', keys %{{ map { $_."-link" => 1 } split (' ', $text_class)}};
$comment .= sprintf qq(<div class="hc-comment-link"><a class="$link_class" href="%s" rel="$js_ref">%s</a></div>),
$hub->url({'action' => 'Annotation', 'rid' => $report_id}),
$comment eq '' ? 'Add Annotation' : 'Edit'
;
$table->add_row({
'db_sp_tc' => join ('<br />', @$db_sp_tc),
'comment' => $comment,
'type' => sprintf('<abbr title="%s">%s</abbr>', ucfirst lc $result, substr($result, 0, 1)),
'text' => qq(<span class="$text_class">).join (', ', split (/,\s?/, $report->text)).'</span>', #split-joining is done to wrap long strings
'created' => $report->created ? $self->hc_format_compressed_date($report->created) : '<i>unknown</i>',
'team' => join ', ', map { $self->get_healthcheck_link({'type' => 'team_responsible', 'param' => $_, 'release' => $object->requested_release}) } split(/\s*and\s*/, lc $report->team_responsible),
});
}
$html .= sprintf('<a name="%s"></a><h3 class="hc-dbheading">%1$s</h3>%s', $database_name || 'Unknown', $table->render);
push @$db_list, $database_name || 'Unknown';
}
return sprintf('%s%s', $object->function eq 'Database' ? '' : sprintf('<h3>List of affected databases:</h3><ul>%s</ul>', join('', map {sprintf('<li><a href="#%s">%1$s</a></li>', $_)} @$db_list)), $html);
}
sub _get_user_link {
## private helper method to print a link for the user with his email
my ($self, $user) = @_;
return sprintf('<a href="mailto:%s">%s</a>', $user->email, $user->name) if $user;
return 'unknown user';
}
sub _get_first_column {
## private helper method to give the first column of the table for a given view type
return {
'Database' => 'DB Type<br />Species<br />Testcase',
'DBType' => 'Species<br />Testcase',
'Species' => 'Database Type<br />Testcase',
'Testcase' => 'Database Type<br />Species',
'Team' => 'DB Type<br />Species<br />Testcase',
}->{$_[-1]};
}
1;
| andrewyatz/public-plugins | admin/modules/EnsEMBL/Admin/Component/Healthcheck/Details.pm | Perl | apache-2.0 | 8,431 |
#!/usr/bin/env perl
#render the Structure page(tab) in Unison
###########################################################
use strict;
use warnings;
use FindBin;
use lib "$FindBin::RealBin/../perl5", "$FindBin::RealBin/../perl5-ext";
use CGI( -debug );
use CGI::Carp qw(fatalsToBrowser);
use IO::String;
use Unison::WWW;
use Unison::WWW::EmbPage qw(infer_pseq_id);
use Unison::WWW::Table;
use Unison::Exceptions;
use Unison::Jmol;
use Unison::Utilities::pseq_structure;
use Unison::Utilities::pseq_features qw( %opts );
my $p = new Unison::WWW::EmbPage;
my $u = $p->{unison};
my $v = $p->Vars();
$v->{width} = 600 if not defined $v->{width};
$v->{height} = 400 if not defined $v->{height};
$p->ensure_required_params(qw(pseq_id));
# these files are for the image map
my ( $png_fh, $png_fn, $png_urn ) = $p->tempfile( SUFFIX => '.png' );
my $pseq_structure = new Unison::Utilities::pseq_structure( $v->{pseq_id} );
$pseq_structure->unison($u);
my $jmol = new Unison::Jmol( $v->{width},$v->{height});
$pseq_structure->jmol($jmol);
my %opts = ( %Unison::Utilities::pseq_features::opts, %$v );
get_user_specs($jmol);
try {
my $structures_ar = $pseq_structure->find_structures();
my $templates_ar = $pseq_structure->find_templates();
$p->die("Sorry no structures/templates found\n")
if ( $pseq_structure->{'num_structures'} == 0
and $pseq_structure->{'num_templates'} == 0 );
$pseq_structure->load_first_structure();
$p->add_html( $jmol->script_header() );
my ($pdb_id) = substr( $pseq_structure->{'loaded_structure'}, 0, 4 );
my $imagemap = generate_imagemap()
|| $p->die("pseq_structure.pl couldn't generate imagemap");
my $parent_url = "pseq_structure.pl?pseq_id=$v->{pseq_id}";
$parent_url .= "&userfeatures=$v->{userfeatures}" if ( $v->{userfeatures} );
$parent_url .= "&highlight=$v->{highlight}" if ( $v->{highlight} );
print $p->render(
"<center>",
(
defined( $ENV{HTTP_REFERER} ) ? ''
: "<b>Unison:$v->{pseq_id}: Structural Features"
),
(
$jmol->initialize(
"pdb$pdb_id.ent", $pseq_structure->{'loaded_structure'},
$pseq_structure, $structures_ar,
$templates_ar
)
),
"<img src=\"$png_urn\" usemap=\"#FEATURE_MAP\">",
"\n<MAP NAME=\"FEATURE_MAP\">\n",
$imagemap,
"</MAP>\n",
(
defined( $ENV{HTTP_REFERER} ) ? ''
: "<a href=$parent_url>Unison Main Page</a>"
),
"</center>"
);
}
catch Unison::Exception with {
$p->die( $_[0] );
};
#=================================================================================================
sub generate_imagemap {
my $imagemap;
$opts{features}{$_}++ foreach qw(template hmm snp user);
$opts{view} = 1;
$opts{structure} = $pseq_structure;
my $panel =
Unison::Utilities::pseq_features::pseq_features_panel( $u, %opts );
# write the png to the temp file
$png_fh->print( $panel->gd()->png() );
$png_fh->close();
# assemble the imagemap as a string
foreach my $box ( $panel->boxes() ) {
my ( $feature, $x1, $y1, $x2, $y2 ) = @$box;
my $attr = $feature->{attributes};
next unless defined $attr;
$imagemap .= sprintf(
'<AREA SHAPE="RECT" COORDS="%d,%d,%d,%d" TOOLTIP="%s" HREF="%s">'
. "\n",
$x1, $y1, $x2, $y2,
$attr->{tooltip} || '',
$attr->{href} || ''
);
}
return $imagemap;
}
#=================================================================================================
sub get_user_specs {
if ( defined( $v->{userfeatures} ) ) {
foreach ( split( /,/, $v->{userfeatures} ) ) {
$p->die(
"wrong userfeatures format expecting :: name@\coord[-coord]\n")
unless (/(\S+)\@(\S+)/);
my ( $start, $end ) = split( /-/, $2 );
$opts{user_feats}{$1}{type} = 'user';
$opts{user_feats}{$1}{start} = $start;
$opts{user_feats}{$1}{end} = $end;
}
}
if ( defined( $v->{highlight} ) ) {
foreach ( split( /,/, $v->{highlight} ) ) {
$p->die(
"wrong highlight format expecting source:feature[:color]\n")
unless (/(\S+)\:(\S+)/);
my @hl = split(/\:/);
$p->die("Looks like you didn't define $hl[1]\n")
if ( $hl[0] eq 'user'
and !defined( $opts{user_feats}{ $hl[1] } ) );
if ( $hl[0] =~ /hmm/i ) {
my $ar = $pseq_structure->get_hmm_range( $hl[1] );
$p->die("Couldn't find $hl[1] domain in PFAM hits")
if ( $#{$ar} < 1 );
(
$opts{user_feats}{ $hl[1] }{type},
$opts{user_feats}{ $hl[1] }{start},
$opts{user_feats}{ $hl[1] }{end}
) = ( 'hmm', $ar->[0], $ar->[1] );
}
if ( $hl[2] =~ /^\*/ ) {
$p->die(
"$hl[2] 6 digits expected with RGB hexadecimal format\n")
if ( length( $hl[2] ) != 7 );
$hl[2] =
hex( substr( $hl[2], 1, 2 ) ) . "-"
. hex( substr( $hl[2], 3, 2 ) ) . "-"
. hex( substr( $hl[2], 5, 2 ) )
|| $p->die(
"Something probably wrong with your RGB hexadecimal format\n"
);
}
$hl[2] =~ s/\[//;
$hl[2] =~ s/\]//;
$opts{user_feats}{ $hl[1] }{color} = $hl[2]
if ( $hl[0] =~ /user/i or $hl[0] =~ /hmm/i );
$p->die(
"source for the feature to be highlighted must be either user or hmm: you entered $hl[0]"
) unless ( $hl[0] =~ /user/i or $hl[0] =~ /hmm/i );
}
$jmol->set_highlight_regions( $opts{user_feats} );
}
}
| unison/unison | www/emb_pseq_structure.pl | Perl | apache-2.0 | 5,990 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Clownfish::CFC::Model::Class;
use Clownfish::CFC;
our $VERSION = '0.004002';
$VERSION = eval $VERSION;
1;
| gitpan/Clownfish-CFC | lib/Clownfish/CFC/Model/Class.pm | Perl | apache-2.0 | 898 |
% ----------------------------------------------------------------------
%
% Linda communication for Distributed CIAO, for SICStus 2.1
% Based on library file linda/client.pl of SICStus 2.1
% (C) UPM-CLIP 1995
%
% ----------------------------------------------------------------------
%% :- module(linda,[
%% linda_client/1,
%% close_client/0,
%% in/1,
%% in/2,
%% in_noblock/1,
%% out/1,
%% rd/1,
%% rd/2,
%% rd_noblock/1,
%% rd_findall/3,
%% linda_timeout/2,
%% halt_server/0,
%% open_client/2,
%% in_stream/2,
%% out_stream/2]).
:- ensure_loaded(library(lists)).
%% :- dynamic linda_stream/2.
:- set(time_out(20000)).
protocol(0'p). /* 0'f = special format, 0'p = write_canonical */
linda_client(NewAddress) :-
setting(linda_stream(Stream,OldAddress)),
current_stream(_N,socket,Stream), !,
( NewAddress = OldAddress ->
true % Seems ok, just ignore it
; format(user_error,
'{ERROR: linda_client/1: Already client to ~w}~n',
[OldAddress])
).
linda_client(Address) :-
unsetall(linda_stream(_,_)),%Might be a try to reestablish a broken conn.
open_client(Address,Stream),
set(linda_stream(Stream,Address)),
ping(Answer),
ping_answer_ok(Answer).
ping_answer_ok(pong) :- !.
ping_answer_ok(A) :-
format(user_error,
'{ERROR: linda_client/1: strange answer from server: ~q}~n',
[A]).
open_client(Host-Port, Stream):- connect_to_socket(Host, Port, Stream).
%-----------------------------------------------------------------------------
close_client :-
unset(linda_stream(Stream,_)),
close(Stream).
ping(Answer) :-
to_linda(0'p, ping),
time_out_select,
from_linda(Answer).
out(T) :- to_linda(0'o, T).
in(T) :-
to_linda(0'i, T),
from_linda(T).
in(Tuples,Tuple) :-
Tuples = [_|_],
to_linda(0'I, Tuples),
from_linda(Tuple), !,
member(Tuple,Tuples). % for unification of Tuples with answer
in_noblock(T) :-
to_linda(0'j, T),
time_out_select,
from_linda(0's, T).
rd_noblock(T) :-
to_linda(0's, T),
time_out_select,
from_linda(0's, T).
rd(T) :-
to_linda(0'r, T),
from_linda(T).
rd(Tuples,Tuple) :-
Tuples = [_|_],
to_linda(0'R, Tuples),
from_linda(Tuple), !,
member(Tuple,Tuples). % for unification of Tuples with answer
rd_findall(Template,Tuple,Bag) :-
to_linda(0'f, f(Template,Tuple,Bag)),
time_out_select,
from_linda(Bag).
linda_trace(OnOff) :-
(var(OnOff) ; OnOff=on ; OnOff=off), !,
to_linda(0't, OnOff),
time_out_select,
from_linda(OnOff).
linda_call(Goal) :-
to_linda(0'c, Goal),
from_linda(0's, Goal).
halt_server :- % makes also close_client
setting(linda_stream(Stream,Address)),
( current_stream(_N,socket,Stream) ->
current_output(CU),
set_output(Stream),
protocol(P),
put(P),
put(0'h),
flush_output(Stream),
set_output(CU),
current_input(CI),
set_input(Stream),
get0(_),
set_input(CI),
close_client
; linda_client(Address) -> % Connection broken; could reestablish it
halt_server
; format(user_error,
'{ERROR: the connection with linda has been shut down, can''t reopen it!}',
[]),
fail
).
in_stream(Stream,T) :-
to_linda_stream(Stream, 0'i, T),
from_linda_stream(Stream, T).
out_stream(Stream, T) :- to_linda_stream(Stream, 0'o, T).
%-----------------------------------------------------------------------------
to_linda(Code, Item) :-
setting(linda_stream(Stream,Address)),
( current_stream(_N,socket,Stream) ->
current_output(CU),
set_output(Stream),
protocol(P),
write_out(P,Code,Item),
flush_output(Stream),
set_output(CU)
; linda_client(Address) -> % Connection broken; could reestablish it
to_linda(Code, Item)
; format(user_error,
'{ERROR: the connection with linda has been shut down, can''t reopen it!}',
[]),
fail
).
to_linda_stream(Stream, Code, Item) :-
( current_stream(_N,socket,Stream) ->
current_output(CU),
set_output(Stream),
protocol(P),
write_out(P,Code,Item),
flush_output(Stream),
set_output(CU)
; format(user_error,
'{ERROR: the connection has been shut down!}',
[]),
fail
).
%-----------------------------------------------------------------------------
write_out(0'p,Code,Item) :-
put(0'p),
put(Code),
write_canonical(Item),write('.'),nl.
write_out(0'f,Code,Item) :-
put(0'f),
put(Code),
fast_write(Item).
read_in(0'p,Item) :- read(Item).
read_in(0'f,Item) :- fast_read(Item).
%-----------------------------------------------------------------------------
from_linda(Item) :-
setting(linda_stream(Stream,_Address)),
current_input(S),
set_input(Stream),
protocol(P),
read_in(P,Item),
set_input(S).
from_linda_stream(Stream, Item) :-
current_input(S),
set_input(Stream),
protocol(P),
read_in(P,Item),
set_input(S).
from_linda(Code, Item) :-
setting(linda_stream(Stream,_Address)),
current_input(S),
set_input(Stream),
get0(Cl),
( Cl=Code ->
protocol(P),
read_in(P,Item),
set_input(S)
; set_input(S),
fail
).
%-----------------------------------------------------------------------------
time_out_select :-
setting(time_out(TimeOut)),
time_out_select1(TimeOut).
time_out_select1(off) :- !.
time_out_select1(TimeOut) :-
number(TimeOut),
setting(linda_stream(Stream,_Address)),
select_socket(_,_,TimeOut,[Stream],[_|_]), !.
time_out_select1(_) :-
format(user_error,'{ERROR: wait for linda timed out}~n',[]),
fail.
%-----------------------------------------------------------------------------
linda_timeout(Old, New) :-
( var(New) ->
Old = New
; check_timeout_arg(New),
unset(time_out(Old)),
set(time_out(New))
).
check_timeout_arg(Msecs) :- number(Msecs), Msecs >= 0.
check_timeout_arg(off).
| leuschel/ecce | www/CiaoDE/ciao/library.development/blackboard/bb_client.pl | Perl | apache-2.0 | 6,029 |
#!/usr/bin/env perl
package Interhack::Plugin::InGame::TriggerReload;
use Calf::Role 'reload';
our $VERSION = '1.99_01';
# deps {{{
sub depend { 'Util' }
# }}}
# attributes {{{
# }}}
# method modifiers {{{
sub BUILD
{
my $self = shift;
$self->extended_command(reload => \&reload);
}
# }}}
# methods {{{
sub reload
{
my $self = shift;
$self->topline(''); # avoid infinite recursion :)
$self->refresh;
}
# }}}
1;
| TAEB/Interhack2 | lib/Interhack/Plugin/InGame/TriggerReload.pm | Perl | bsd-3-clause | 434 |
prime_factors(N, Ans) :-
prime_factors(N, [], 2, Ans).
prime_factors(1, Acc, _, Ans) :-
reverse(Acc, Ans), !.
prime_factors(N, Acc, F, Ans) :-
Mod is N mod F, Mod = 0, N1 is N div F, !,
prime_factors(N1, [F|Acc], F, Ans).
prime_factors(N, Acc, F, Ans) :-
F1 is F + 1,
prime_factors(N, Acc, F1, Ans).
| m00nlight/99-problems | prolog/p-35.pl | Perl | bsd-3-clause | 326 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.