code
stringlengths
2
1.05M
repo_name
stringlengths
5
101
path
stringlengths
4
991
language
stringclasses
3 values
license
stringclasses
5 values
size
int64
2
1.05M
package Paws::WorkDocs::Subscription; use Moose; has EndPoint => (is => 'ro', isa => 'Str'); has Protocol => (is => 'ro', isa => 'Str'); has SubscriptionId => (is => 'ro', isa => 'Str'); 1; ### main pod documentation begin ### =head1 NAME Paws::WorkDocs::Subscription =head1 USAGE This class represents one of two things: =head3 Arguments in a call to a service Use the attributes of this class as arguments to methods. You shouldn't make instances of this class. Each attribute should be used as a named argument in the calls that expect this type of object. As an example, if Att1 is expected to be a Paws::WorkDocs::Subscription object: $service_obj->Method(Att1 => { EndPoint => $value, ..., SubscriptionId => $value }); =head3 Results returned from an API call Use accessors for each attribute. If Att1 is expected to be an Paws::WorkDocs::Subscription object: $result = $service_obj->Method(...); $result->Att1->EndPoint =head1 DESCRIPTION Describes a subscription. =head1 ATTRIBUTES =head2 EndPoint => Str The endpoint of the subscription. =head2 Protocol => Str The protocol of the subscription. =head2 SubscriptionId => Str The ID of the subscription. =head1 SEE ALSO This class forms part of L<Paws>, describing an object used in L<Paws::WorkDocs> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/WorkDocs/Subscription.pm
Perl
apache-2.0
1,485
# # Copyright 2022 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package network::stormshield::local::mode::qosusage; use base qw(centreon::plugins::templates::counter); use strict; use warnings; sub set_counters { my ($self, %options) = @_; $self->{maps_counters_type} = [ { name => 'qos', type => 1, cb_prefix_output => 'prefix_qos_output', message_multiple => 'All QoS are ok', skipped_code => { -10 => 1 } }, ]; $self->{maps_counters}->{qos} = [ { label => 'in', set => { key_values => [ { name => 'in' }, { name => 'display' }, { name => 'speed_in' } ], closure_custom_calc => $self->can('custom_qos_calc'), closure_custom_calc_extra_options => { label_ref => 'in' }, closure_custom_output => $self->can('custom_qos_output'), closure_custom_perfdata => $self->can('custom_qos_perfdata'), closure_custom_threshold_check => $self->can('custom_qos_threshold'), } }, { label => 'in-peak', set => { key_values => [ { name => 'in_peak' }, { name => 'display' } ], output_template => 'In Peak : %s %s/s', output_change_bytes => 2, perfdatas => [ { label => 'traffic_in_peak', value => 'in_peak', template => '%.2f', unit => 'b/s', min => 0, label_extra_instance => 1, instance_use => 'display' }, ], } }, { label => 'out', set => { key_values => [ { name => 'out' }, { name => 'display' }, { name => 'speed_out' } ], closure_custom_calc => $self->can('custom_qos_calc'), closure_custom_calc_extra_options => { label_ref => 'out' }, closure_custom_output => $self->can('custom_qos_output'), closure_custom_perfdata => $self->can('custom_qos_perfdata'), closure_custom_threshold_check => $self->can('custom_qos_threshold'), } }, { label => 'out-peak', set => { key_values => [ { name => 'out_peak' }, { name => 'display' } ], output_template => 'Out Peak : %s %s/s', output_change_bytes => 2, perfdatas => [ { label => 'traffic_out_peak', value => 'out_peak', template => '%.2f', unit => 'b/s', min => 0, label_extra_instance => 1, instance_use => 'display' }, ], } }, ]; } sub custom_qos_perfdata { my ($self, %options) = @_; my ($warning, $critical); if ($self->{instance_mode}->{option_results}->{units_traffic} eq '%' && defined($self->{result_values}->{speed})) { $warning = $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{thlabel}, total => $self->{result_values}->{speed}, cast_int => 1); $critical = $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{thlabel}, total => $self->{result_values}->{speed}, cast_int => 1); } elsif ($self->{instance_mode}->{option_results}->{units_traffic} eq 'b/s') { $warning = $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{thlabel}); $critical = $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{thlabel}); } $self->{output}->perfdata_add( label => 'traffic_' . $self->{result_values}->{label}, unit => 'b/s', instances => $self->use_instances(extra_instance => $options{extra_instance}) ? $self->{result_values}->{display} : undef, value => sprintf("%.2f", $self->{result_values}->{traffic}), warning => $warning, critical => $critical, min => 0, max => $self->{result_values}->{speed} ); } sub custom_qos_threshold { my ($self, %options) = @_; my $exit = 'ok'; if ($self->{instance_mode}->{option_results}->{units_traffic} eq '%' && defined($self->{result_values}->{speed})) { $exit = $self->{perfdata}->threshold_check(value => $self->{result_values}->{traffic_prct}, threshold => [ { label => 'critical-' . $self->{thlabel}, exit_litteral => 'critical' }, { label => 'warning-' . $self->{thlabel}, exit_litteral => 'warning' } ]); } elsif ($self->{instance_mode}->{option_results}->{units_traffic} eq 'b/s') { $exit = $self->{perfdata}->threshold_check(value => $self->{result_values}->{traffic}, threshold => [ { label => 'critical-' . $self->{thlabel}, exit_litteral => 'critical' }, { label => 'warning-' . $self->{thlabel}, exit_litteral => 'warning' } ]); } return $exit; } sub custom_qos_output { my ($self, %options) = @_; my ($traffic_value, $traffic_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{traffic}, network => 1); my ($total_value, $total_unit); if (defined($self->{result_values}->{speed}) && $self->{result_values}->{speed} =~ /[0-9]/) { ($total_value, $total_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{speed}, network => 1); } my $msg = sprintf("Traffic %s : %s/s (%s on %s)", ucfirst($self->{result_values}->{label}), $traffic_value . $traffic_unit, defined($self->{result_values}->{traffic_prct}) ? sprintf("%.2f%%", $self->{result_values}->{traffic_prct}) : '-', defined($total_value) ? $total_value . $total_unit : '-'); return $msg; } sub custom_qos_calc { my ($self, %options) = @_; $self->{result_values}->{label} = $options{extra_options}->{label_ref}; $self->{result_values}->{display} = $options{new_datas}->{$self->{instance} . '_display'}; $self->{result_values}->{traffic} = $options{new_datas}->{$self->{instance} . '_' . $self->{result_values}->{label}}; if ($options{new_datas}->{$self->{instance} . '_speed_' . $self->{result_values}->{label}} > 0) { $self->{result_values}->{speed} = $options{new_datas}->{$self->{instance} . '_speed_' . $self->{result_values}->{label}} * 1000 * 1000; $self->{result_values}->{traffic_prct} = $self->{result_values}->{traffic} * 100 / $self->{result_values}->{speed}; } elsif (defined($self->{instance_mode}->{option_results}->{'speed_' . $self->{result_values}->{label}}) && $self->{instance_mode}->{option_results}->{'speed_' . $self->{result_values}->{label}} =~ /[0-9]/) { $self->{result_values}->{traffic_prct} = $self->{result_values}->{traffic} * 100 / ($self->{instance_mode}->{option_results}->{'speed_' . $self->{result_values}->{label}} * 1000 * 1000); $self->{result_values}->{speed} = $self->{instance_mode}->{option_results}->{'speed_' . $self->{result_values}->{label}} * 1000 * 1000; } return 0; } sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options); bless $self, $class; $options{options}->add_options(arguments => { "filter-name:s" => { name => 'filter_name' }, "filter-vlan:s" => { name => 'filter_vlan' }, "speed-in:s" => { name => 'speed_in' }, "speed-out:s" => { name => 'speed_out' }, "units-traffic:s" => { name => 'units_traffic', default => '%' }, "hostname:s" => { name => 'hostname' }, "ssh-option:s@" => { name => 'ssh_option' }, "ssh-path:s" => { name => 'ssh_path' }, "ssh-command:s" => { name => 'ssh_command', default => 'ssh' }, "timeout:s" => { name => 'timeout', default => 30 }, "sudo" => { name => 'sudo' }, "command:s" => { name => 'command', default => 'tail' }, "command-path:s" => { name => 'command_path' }, "command-options:s" => { name => 'command_options', default => '-1 /log/l_monitor' }, "config-speed-file:s" => { name => 'config_speed_file' }, }); return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::check_options(%options); if (defined($self->{option_results}->{hostname}) && $self->{option_results}->{hostname} ne '') { $self->{option_results}->{remote} = 1; } $self->{hostname} = $self->{option_results}->{hostname}; if (!defined($self->{hostname})) { $self->{hostname} = 'me'; } } sub prefix_qos_output { my ($self, %options) = @_; return "QoS '" . $options{instance_value}->{display} . "' "; } sub load_speed_config { my ($self, %options) = @_; $self->{config_speeds} = {}; return if (!defined($self->{option_results}->{config_speed_file}) || $self->{option_results}->{config_speed_file} eq ''); $self->{content} = do { local $/ = undef; if (open my $fh, "<", $self->{option_results}->{config_speed_file}) { <$fh>; } }; return if (!defined($self->{content})); #[TEST] #Type=CBQ #Min=0 #Max=5000 #Min_Rev=0 #Max_Rev=5000 #QLength=0 #PrioritizeAck=1 #PrioritizeLowDelay=1 #Color=000000 #Comment= # # Units: Kb while ($self->{content} =~ /\[(.*?)\].*?Max=(.*?)\n.*?Max_Rev=(.*?)\n/msg) { $self->{config_speeds}->{$1} = { speed_in => $3 / 1000, speed_out => $2 / 1000 } } } sub manage_selection { my ($self, %options) = @_; #id=firewall time="2017-01-31 16:56:36" fw="XXXX" tz=+0100 startime="2017-01-31 16:56:36" security=70 system=1 CPU=3,2,1 Pvm=0,0,0,0,0,0,0,0,0,0,0 Vlan96=VLAN-XXX-DMZ,15140,17768,21952,28280 Vlan76=dmz-xxx-xxx,769592,948320,591584,795856 my $content = centreon::plugins::misc::execute(output => $self->{output}, options => $self->{option_results}, sudo => $self->{option_results}->{sudo}, command => $self->{option_results}->{command}, command_path => $self->{option_results}->{command_path}, command_options => $self->{option_results}->{command_options}); $self->load_speed_config(); $self->{qos} = {}; # Version 3, there is 7 fields (5 before) my $pattern = '(\S+?)=([^,]+?),(\d+),(\d+),(\d+),(\d+)(?:\s|\Z)'; if ($content !~ /$pattern/) { $pattern = '(\S+?)=([^,]+?),(\d+),(\d+),(\d+),(\d+),\d+,\d+(?:\s|\Z)'; } while ($content =~ /$pattern/msg) { my ($vlan, $name, $in, $in_max, $out, $out_max) = ($1, $2, $3, $4, $5, $6); if (defined($self->{option_results}->{filter_name}) && $self->{option_results}->{filter_name} ne '' && $name !~ /$self->{option_results}->{filter_name}/) { $self->{output}->output_add(long_msg => "skipping '" . $name . "': no matching filter.", debug => 1); next; } if (defined($self->{option_results}->{filter_vlan}) && $self->{option_results}->{filter_vlan} ne '' && $vlan !~ /$self->{option_results}->{filter_vlan}/) { $self->{output}->output_add(long_msg => "skipping '" . $vlan . "': no matching filter.", debug => 1); next; } $self->{qos}->{$name} = { display => $name, in => $in, in_peak => $in_max, out => $out, out_peak => $out_max, speed_in => defined($self->{config_speeds}->{$name}->{speed_in}) ? $self->{config_speeds}->{$name}->{speed_in} : 0, speed_out => defined($self->{config_speeds}->{$name}->{speed_out}) ? $self->{config_speeds}->{$name}->{speed_out} : 0}; } if (scalar(keys %{$self->{qos}}) <= 0) { $self->{output}->add_option_msg(short_msg => "No QoS found."); $self->{output}->option_exit(); } } 1; __END__ =head1 MODE Check QoS usage. =over 8 =item B<--filter-name> Filter by QoS name (can be a regexp). =item B<--filter-vlan> Filter by vlan name (can be a regexp). =item B<--hostname> Hostname to query. =item B<--ssh-option> Specify multiple options like the user (example: --ssh-option='-l=centreon-engine' --ssh-option='-p=52'). =item B<--ssh-path> Specify ssh command path (default: none) =item B<--ssh-command> Specify ssh command (default: 'ssh'). Useful to use 'plink'. =item B<--timeout> Timeout in seconds for the command (Default: 30). =item B<--sudo> Use 'sudo' to execute the command. =item B<--command> Command to get information (Default: 'tail'). Can be changed if you have output in a file. =item B<--command-path> Command path (Default: none). =item B<--command-options> Command options (Default: '-1 /log/l_monitor'). =item B<--speed-in> Set interface speed for incoming traffic (in Mb). =item B<--speed-out> Set interface speed for outgoing traffic (in Mb). =item B<--config-speed-file> File with speed configurations. =item B<--units-traffic> Units of thresholds for the traffic (Default: '%') ('%', 'b/s'). =item B<--warning-*> Threshold warning. Can be: 'in', 'in-peak', 'out', 'out-peak'. =item B<--critical-*> Threshold critical. Threshold warning. Can be: 'in', 'in-peak', 'out', 'out-peak'. =back =cut
centreon/centreon-plugins
network/stormshield/local/mode/qosusage.pm
Perl
apache-2.0
13,946
# # Copyright 2022 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package storage::oracle::zs::restapi::mode::components::memory; use strict; use warnings; sub check { my ($self) = @_; $self->{output}->output_add(long_msg => 'checking memories'); $self->{components}->{memory} = { name => 'memory', total => 0, skip => 0 }; return if ($self->check_filter(section => 'memory')); foreach my $chassis (values %{$self->{results}}) { foreach (@{$chassis->{memory}}) { my $instance = $chassis->{name} . ':' . $_->{label}; next if ($self->check_filter(section => 'memory', instance => $instance)); $self->{components}->{memory}->{total}++; my $status = $_->{faulted} ? 'faulted' : 'ok'; $self->{output}->output_add( long_msg => sprintf( "memory '%s' status is '%s' [instance = %s]", $instance, $status, $instance, ) ); my $exit = $self->get_severity(label => 'default', section => 'memory', value => $status); if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) { $self->{output}->output_add( severity => $exit, short_msg => sprintf("Memory '%s' status is '%s'", $instance, $status) ); } } } } 1;
centreon/centreon-plugins
storage/oracle/zs/restapi/mode/components/memory.pm
Perl
apache-2.0
2,126
#! /usr/bin/perl -w # (Last emacs Update: Tue May 27, 2014 1:52 pm by Gary Delp v-1.1.15) # strip the html from the ETCZ files. use warnings; use strict; sub process_file($) { my $fni = shift; print "Processing $fni\n"; if ( -r $fni and $fni =~ m/^(.*).html$/o ) { my $fno = $1; open(my $fdi, "<", $fni) or die "Cannot open for $fni for reading $!"; open(my $fdo, ">", $fno) or die "Cannot open for $fno for writing $!"; print "Writing $fno\n"; my $pre = 0; LINES: foreach my $line (<$fdi>) { next unless $pre or $line =~ m/<PRE>/io; if ($line =~ m'</PRE>|</BODY>'io) { $pre = 0; next LINES; } $line =~ s#</?(I|B|FONT[^>]*)>##go; $line =~ s/[\n\r\t ]*$//o; print $fdo "$line\n" if $pre; $pre = 1; } close $fdi or die ("error on close of $fni "); close $fdo or die ("error on close of $fno "); } } while (<>) { chomp; next if m'index.html'oi; process_file($_); }
ElderDelp/elisp-collection
scripts/strip_html.pl
Perl
apache-2.0
943
package VMOMI::VsanHostRuntimeInfoDiskIssue; use parent 'VMOMI::DynamicData'; use strict; use warnings; our @class_ancestors = ( 'DynamicData', ); our @class_members = ( ['diskId', undef, 0, ], ['issue', undef, 0, ], ); sub get_class_ancestors { return @class_ancestors; } sub get_class_members { my $class = shift; my @super_members = $class->SUPER::get_class_members(); return (@super_members, @class_members); } 1;
stumpr/p5-vmomi
lib/VMOMI/VsanHostRuntimeInfoDiskIssue.pm
Perl
apache-2.0
454
# $Id: NetBSD.pm,v 1.3 2008/10/27 20:31:21 drhyde Exp $ package # Devel::AssertOS::NetBSD; use Devel::CheckOS; $VERSION = '1.1'; sub os_is { $^O eq 'netbsd' ? 1 : 0; } Devel::CheckOS::die_unsupported() unless(os_is()); =head1 COPYRIGHT and LICENCE Copyright 2007 - 2008 David Cantrell This software is free-as-in-speech software, and may be used, distributed, and modified under the terms of either the GNU General Public Licence version 2 or the Artistic Licence. It's up to you which one you use. The full text of the licences can be found in the files GPL2.txt and ARTISTIC.txt, respectively. =cut 1;
gitpan/Tapper-Base
inc/Devel/AssertOS/NetBSD.pm
Perl
bsd-2-clause
614
package nginx; use 5.006001; use strict; use warnings; require Exporter; our @ISA = qw(Exporter); our @EXPORT = qw( OK DECLINED HTTP_OK HTTP_CREATED HTTP_ACCEPTED HTTP_NO_CONTENT HTTP_PARTIAL_CONTENT HTTP_MOVED_PERMANENTLY HTTP_MOVED_TEMPORARILY HTTP_REDIRECT HTTP_SEE_OTHER HTTP_NOT_MODIFIED HTTP_TEMPORARY_REDIRECT HTTP_BAD_REQUEST HTTP_UNAUTHORIZED HTTP_PAYMENT_REQUIRED HTTP_FORBIDDEN HTTP_NOT_FOUND HTTP_NOT_ALLOWED HTTP_NOT_ACCEPTABLE HTTP_REQUEST_TIME_OUT HTTP_CONFLICT HTTP_GONE HTTP_LENGTH_REQUIRED HTTP_REQUEST_ENTITY_TOO_LARGE HTTP_REQUEST_URI_TOO_LARGE HTTP_UNSUPPORTED_MEDIA_TYPE HTTP_RANGE_NOT_SATISFIABLE HTTP_INTERNAL_SERVER_ERROR HTTP_SERVER_ERROR HTTP_NOT_IMPLEMENTED HTTP_BAD_GATEWAY HTTP_SERVICE_UNAVAILABLE HTTP_GATEWAY_TIME_OUT HTTP_INSUFFICIENT_STORAGE ); our $VERSION = '1.3.13'; require XSLoader; XSLoader::load('nginx', $VERSION); # Preloaded methods go here. use constant OK => 0; use constant DECLINED => -5; use constant HTTP_OK => 200; use constant HTTP_CREATED => 201; use constant HTTP_ACCEPTED => 202; use constant HTTP_NO_CONTENT => 204; use constant HTTP_PARTIAL_CONTENT => 206; use constant HTTP_MOVED_PERMANENTLY => 301; use constant HTTP_MOVED_TEMPORARILY => 302; use constant HTTP_REDIRECT => 302; use constant HTTP_SEE_OTHER => 303; use constant HTTP_NOT_MODIFIED => 304; use constant HTTP_TEMPORARY_REDIRECT => 307; use constant HTTP_BAD_REQUEST => 400; use constant HTTP_UNAUTHORIZED => 401; use constant HTTP_PAYMENT_REQUIRED => 402; use constant HTTP_FORBIDDEN => 403; use constant HTTP_NOT_FOUND => 404; use constant HTTP_NOT_ALLOWED => 405; use constant HTTP_NOT_ACCEPTABLE => 406; use constant HTTP_REQUEST_TIME_OUT => 408; use constant HTTP_CONFLICT => 409; use constant HTTP_GONE => 410; use constant HTTP_LENGTH_REQUIRED => 411; use constant HTTP_REQUEST_ENTITY_TOO_LARGE => 413; use constant HTTP_REQUEST_URI_TOO_LARGE => 414; use constant HTTP_UNSUPPORTED_MEDIA_TYPE => 415; use constant HTTP_RANGE_NOT_SATISFIABLE => 416; use constant HTTP_INTERNAL_SERVER_ERROR => 500; use constant HTTP_SERVER_ERROR => 500; use constant HTTP_NOT_IMPLEMENTED => 501; use constant HTTP_BAD_GATEWAY => 502; use constant HTTP_SERVICE_UNAVAILABLE => 503; use constant HTTP_GATEWAY_TIME_OUT => 504; use constant HTTP_INSUFFICIENT_STORAGE => 507; sub rflush { my $r = shift; $r->flush; } 1; __END__ =head1 NAME nginx - Perl interface to the nginx HTTP server API =head1 SYNOPSIS use nginx; =head1 DESCRIPTION This module provides a Perl interface to the nginx HTTP server API. =head1 SEE ALSO http://nginx.org/en/docs/http/ngx_http_perl_module.html =head1 AUTHOR Igor Sysoev =head1 COPYRIGHT AND LICENSE Copyright (C) Igor Sysoev Copyright (C) Nginx, Inc. =cut
Yawning/nginx-polarssl
src/http/modules/perl/nginx.pm
Perl
bsd-2-clause
3,303
package App::Netdisco::SSHCollector::Platform::IOS; # vim: set expandtab tabstop=8 softtabstop=4 shiftwidth=4: =head1 NAME App::Netdisco::SSHCollector::Platform::IOS =head1 DESCRIPTION Collect ARP entries from Cisco IOS devices. =cut use strict; use warnings; use Dancer ':script'; use Moo; =head1 PUBLIC METHODS =over 4 =item B<arpnip($host, $ssh)> Retrieve ARP entries from device. C<$host> is the hostname or IP address of the device. C<$ssh> is a Net::OpenSSH connection to the device. Returns an array of hashrefs in the format { mac => MACADDR, ip => IPADDR }. =cut sub arpnip { my ($self, $hostlabel, $ssh, @args) = @_; debug "$hostlabel $$ arpnip()"; my @data = $ssh->capture("show ip arp"); chomp @data; my @arpentries; # Internet 172.16.20.15 13 0024.b269.867d ARPA FastEthernet0/0.1 foreach my $line (@data) { next unless $line =~ m/^Internet/; my @fields = split m/\s+/, $line; push @arpentries, { mac => $fields[3], ip => $fields[1] }; } return @arpentries; } 1;
gitpan/App-Netdisco
lib/App/Netdisco/SSHCollector/Platform/IOS.pm
Perl
bsd-3-clause
1,063
# # ExpressionOrAssignLValue1.pm : part of the Mace toolkit for building distributed systems # # Copyright (c) 2010, Sunghwan Yoo, Charles Killian # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the names of Duke University nor The University of # California, San Diego, nor the names of the authors or contributors # may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # ----END-OF-LEGAL-STUFF---- package Mace::Compiler::ParseTreeObject::ExpressionOrAssignLValue1; use strict; use v5.10.1; use feature 'switch'; use Class::MakeMethods::Template::Hash ( 'new' => 'new', 'scalar' => 'type', 'object' => ["expr_lvalue1" => { class => "Mace::Compiler::ParseTreeObject::ExpressionLValue1" }], 'scalar' => "prepost_assign_op", 'scalar' => "assign_binary_op", ); sub toString { my $this = shift; given ($this->type()) { when ("post_op") { return $this->expr_lvalue1()->toString().$this->prepost_assign_op(); } when ("pre_op") { return $this->prepost_assign_op().$this->expr_lvalue1()->toString(); } when ("assign_op") { return $this->expr_lvalue1()->toString().$this->assign_binary_op().$this->expr1()->toString(); } when ("expr_lvalue1") { return $this->expr_lvalue1()->toString(); } default { return "ExpressionOrAssignLValue1:NOT-PARSED"; } } } sub usedVar { my $this = shift; my @array = (); my $type = $this->type(); given ($type) { when ("post_op") { @array = $this->expr_lvalue1()->usedVar(); } when ("pre_op") { @array = $this->expr_lvalue1()->usedVar(); } when ("assign_op") { @array = ($this->expr_lvalue1()->usedVar(),$this->expr1()->usedVar()); } when ("expr_lvalue1") { @array = $this->expr_lvalue1()->usedVar(); } default { @array = (); } } return @array; } 1;
jojochuang/eventwave
perl5/Mace/Compiler/ParseTreeObject/ExpressionOrAssignLValue1.pm
Perl
bsd-3-clause
3,203
package CSS::Prepare::Property::Background; use Modern::Perl; use CSS::Prepare::Property::Expansions; use CSS::Prepare::Property::Values; sub parse { my $self = shift; my $has_hack = shift; my $location = shift; my %declaration = @_; my $property = $declaration{'property'}; my $value = $declaration{'value'}; my %canonical; my @errors; my $valid_property_or_error = sub { my $type = shift; my $sub = "is_${type}_value"; my $is_valid = 0; eval { no strict 'refs'; $is_valid = &$sub( $value ); }; if ( $is_valid ) { $canonical{ $property } = $value; } else { push @errors, { error => "invalid ${type} property: '${value}'" }; } }; &$valid_property_or_error( 'background_colour' ) if 'background-color' eq $property || 'background-colour' eq $property; &$valid_property_or_error( 'background_image' ) if 'background-image' eq $property; &$valid_property_or_error( 'background_repeat' ) if 'background-repeat' eq $property; &$valid_property_or_error( 'background_attachment' ) if 'background-attachment' eq $property; &$valid_property_or_error( 'background_position' ) if 'background-position' eq $property; if ( 'background' eq $property ) { my %types = ( 'background-color' => $background_colour_value, 'background-image' => $background_image_value, 'background-repeat' => $background_repeat_value, 'background-attachment' => $background_attachment_value, 'background-position' => $background_position_value, ); %canonical = validate_any_order_shorthand( $value, %types ); push @errors, { error => "invalid background property: '${value}'" } unless %canonical; } if ( defined $canonical{'background-image'} ) { $canonical{'background-image'} = shorten_url_value( $canonical{'background-image'}, $location, $self, ); } return \%canonical, \@errors; } sub output { my $self = shift; my $block = shift; my @properties = qw( background-color background-image background-repeat background-attachment background-position ); my @values; my @output; my @value_only; foreach my $property ( @properties ) { my $value = $block->{ $property }; if ( defined $value ) { $value = shorten_background_position_value( $value ) if 'background-position' eq $property; $value = shorten_colour_value( $value ) if 'background-color' eq $property; push @values, sprintf $self->output_format, "${property}:", $value; push @value_only, $value if $value; } } if ( 5 == scalar @values ) { my $value = join $self->output_separator, @value_only; push @output, sprintf $self->output_format, 'background:', $value; } else { push @output, @values; } return @output; } sub shorten_background_position_value { my $value = shift; return unless defined $value; # CSS2.1 14.2.1: "If only one value is specified, the second value # is assumed to be ’center’." $value =~ s{(.+) \s+ (?: center | 50\% ) $}{$1}x; return $value; } 1;
norm/CSS-Prepare
p5-CSS-Prepare/lib/CSS/Prepare/Property/Background.pm
Perl
bsd-3-clause
3,841
use strict; use warnings; package Counter; use self; sub new { my $class = shift; return bless { v => 0 }, $class; } sub set { my ($v) = @args; $self->{v} = $v; } sub out { $self->{v}; } sub inc { $self->{v}++; } package ChildofCounter; use base 'Counter'; package SecondCounter; use self; sub new { my $class = shift; return bless { v => 0 }, $class; } sub set { my ($v) = @args; $self->{v} = $v; } sub out { $self->{v}; } sub inc { $self->{v}++; } 1;
gitpan/self
t/lib2/Counter.pm
Perl
mit
537
package Google::Ads::AdWords::v201409::IntegerSetAttribute; use strict; use warnings; __PACKAGE__->_set_element_form_qualified(1); sub get_xmlns { 'https://adwords.google.com/api/adwords/o/v201409' }; our $XML_ATTRIBUTE_CLASS; undef $XML_ATTRIBUTE_CLASS; sub __get_attr_class { return $XML_ATTRIBUTE_CLASS; } use base qw(Google::Ads::AdWords::v201409::Attribute); # Variety: sequence use Class::Std::Fast::Storable constructor => 'none'; use base qw(Google::Ads::SOAP::Typelib::ComplexType); { # BLOCK to scope variables my %Attribute__Type_of :ATTR(:get<Attribute__Type>); my %value_of :ATTR(:get<value>); __PACKAGE__->_factory( [ qw( Attribute__Type value ) ], { 'Attribute__Type' => \%Attribute__Type_of, 'value' => \%value_of, }, { 'Attribute__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string', 'value' => 'SOAP::WSDL::XSD::Typelib::Builtin::int', }, { 'Attribute__Type' => 'Attribute.Type', 'value' => 'value', } ); } # end BLOCK 1; =pod =head1 NAME Google::Ads::AdWords::v201409::IntegerSetAttribute =head1 DESCRIPTION Perl data type class for the XML Schema defined complexType IntegerSetAttribute from the namespace https://adwords.google.com/api/adwords/o/v201409. {@link Attribute} type that contains a Set of integer values. =head2 PROPERTIES The following properties may be accessed using get_PROPERTY / set_PROPERTY methods: =over =item * value =back =head1 METHODS =head2 new Constructor. The following data structure may be passed to new(): =head1 AUTHOR Generated by SOAP::WSDL =cut
gitpan/GOOGLE-ADWORDS-PERL-CLIENT
lib/Google/Ads/AdWords/v201409/IntegerSetAttribute.pm
Perl
apache-2.0
1,653
% (c) Florence Benoy, Andy King and Fred Mesnard % % See the TPLP paper "Calculating Convex Hulls with % a Linear Solver" for explanation and discussion % /* example call: | ?- hull:convex_hull([X1,Y1],[X1=0,Y1=1],[X2,Y2],[X2>=0,Y2=X2],V,S). S = [_A>=0,_A-_B>= -1,_A-_B=<0], V = [_A,_B] ? */ :- module(convex_hull, [project/3, convex_hull/6, tell_cs/1]). :- use_module(library(clpq)). project(Xs, Cxs, ProjectCxs) :- call_residue(copy_term(Xs-Cxs, CpyXs-CpyCxs), _), tell_cs(CpyCxs), prepare_dump(CpyXs, Xs, Zs, DumpCxs, ProjectCxs), dump(Zs, Vs, DumpCxs), Xs = Vs. tell_cs([]). tell_cs([C|Cs]) :- {C}, tell_cs(Cs). prepare_dump([], [], [], Cs, Cs). prepare_dump([X|Xs], YsIn, ZsOut, CsIn, CsOut) :- (ground(X) -> YsIn = [Y|Ys], ZsOut = [_|Zs], CsOut = [Y=X|Cs] ; YsIn = [_|Ys], ZsOut = [X|Zs], CsOut = Cs ), prepare_dump(Xs, Ys, Zs, CsIn, Cs). convex_hull(Xs, Cxs, Ys, Cys, Zs, Czs) :- scale(Cxs, Sig1, [], C1s), scale(Cys, Sig2, C1s, C2s), add_vect(Xs, Ys, Zs, C2s, C3s), project(Zs, [Sig1 >= 0, Sig2 >= 0, Sig1+Sig2 = 1|C3s], Czs). scale([], _, Cs, Cs). scale([C1|C1s], Sig, C2s, C3s) :- C1 =.. [RelOp, A1, B1], C2 =.. [RelOp, A2, B2], mul_exp(A1, Sig, A2), mul_exp(B1, Sig, B2), scale(C1s, Sig, [C2|C2s], C3s). mul_exp(E1, Sigma, E2) :- once(mulexp(E1, Sigma, E2)). mulexp( X, _, X) :- var(X). mulexp(N*X, _, N*X) :- ground(N), var(X). mulexp( -X, Sig, -Y) :- mulexp(X, Sig, Y). mulexp(A+B, Sig, C+D) :- mulexp(A, Sig, C), mulexp(B, Sig, D). mulexp(A-B, Sig, C-D) :- mulexp(A, Sig, C), mulexp(B, Sig, D). mulexp( N, Sig, N*Sig) :- ground(N). add_vect([], [], [], Cs, Cs). add_vect([U|Us], [V|Vs], [W|Ws], C1s, C2s) :- add_vect(Us, Vs, Ws, [W = U+V|C1s], C2s).
leuschel/logen
old_logen/convex_hull.pl
Perl
apache-2.0
1,852
package Paws::Glue::GetMappingResponse; use Moose; has Mapping => (is => 'ro', isa => 'ArrayRef[Paws::Glue::MappingEntry]', required => 1); has _request_id => (is => 'ro', isa => 'Str'); ### main pod documentation begin ### =head1 NAME Paws::Glue::GetMappingResponse =head1 ATTRIBUTES =head2 B<REQUIRED> Mapping => ArrayRef[L<Paws::Glue::MappingEntry>] A list of mappings to the specified targets. =head2 _request_id => Str =cut 1;
ioanrogers/aws-sdk-perl
auto-lib/Paws/Glue/GetMappingResponse.pm
Perl
apache-2.0
451
use strict; package main; # Set UserIndex $userIndex = setUserIndex(); # init message variables $sessionObj->param("userMessage", ""); my $errorMessage = ""; $adminName = $request->param('adminName'); checkAdminName($adminName); syncAdminName($adminName); $userName = $request->param('userName'); checkUserName($adminName, $userName); syncUserName($userName); my $memberName = $request->param('memberName'); $hgName = $request->param('hgName'); $permission = $request->param('permission'); insertSharedHostGroupMember($adminName, $userName, $memberName, $hgName, $permission); $queryString = "adminName=" . URLEncode($adminName) . "&userName=" . URLEncode($userName) . "&hgName=" . URLEncode($hgName) . "&permission=" . URLEncode($permission); ################################################### SUBROUTINES #INSERT TO HOST GROUP sub insertSharedHostGroupMember { my ($adminName, $ownerName, $memberName, $hgName, $permission) = @_; # Update user2HostGroup Index my $user2HostGroup = lock_retrieve("$perfhome/var/db/mappings/user2HostGroup.ser") or die("Could not lock_retrieve from $perfhome/var/db/mappings/user2HostGroup.ser"); $user2HostGroup->{$adminName}->{$ownerName}->{$memberName}->{$hgName} = $permission; lock_store($user2HostGroup, "$perfhome/var/db/mappings/user2HostGroup.ser") or die("Can't store host2HostGroup in $perfhome/var/db/mappings/user2HostGroup.ser\n"); } 1;
ktenzer/perfstat
ui/appConfigs/hostGroups/share/act_insertSharedUser.pl
Perl
apache-2.0
1,423
#!/usr/bin/perl -w use strict; use Data::Dumper; use CoGeX; use POSIX; use Getopt::Long; my ($dsgid, $delete_seqs, $db, $user, $pass, $space_min, $space_max, $length_min, $length_max, $use_strand, $value_max); GetOptions ( "dsgid=i" => \$dsgid, "database|db=s" => \$db, "user|u=s" => \$user, "password|pw=s" => \$pass, "space_min|smin=i" => \$space_min, "space_max|smax=i" => \$space_max, "length_min|lmin=i" => \$length_min, "length_max|lmax=i" => \$length_max, "val_max|vmax=i" => \$value_max, "strand|s=i" => \$use_strand, ); unless ($dsgid && $db && $user && $pass) { print qq{ welcome to $0 Usage: $0 -dsgid <database id for dataset group> -db <database name> -u <database user name> -pw <database password> This program will generate fake quant data for a genome in CoGe. Output is to STDOUT as a tab delimited file Options: -space_min | smin min space (nt) between quantitative measurements (default 1) -space_max | smax max space (nt) between quantitative measurements (default 2) -length_min | lmin min length (nt) of quantitative measurements (default 0) Note: min of length 0 means the measurement will span one nt. Nucleotide math is hard. -length_max | lmax max length (nt) of quantitative measurements (default 2) -strand use this strand instead of random }; exit; } my $connstr = "dbi:mysql:dbname=$db;host=localhost;port=3306"; my $coge = CoGeX->connect($connstr, $user, $pass ); my $dsg = $coge->resultset('Genome')->find($dsgid); unless ($dsg) { print "unable to find entry for $dsgid\n"; exit; } gen_fake_data(dsg=>$dsg, space_min=>$space_min, space_max=>$space_max, length_min=>$length_min, length_max=>$length_max); sub gen_fake_data { my %opts = @_; my $dsg = $opts{dsg}; my $spacing_min = $opts{space_min}; #min space (nt) between quants my $spacing_max = $opts{space_max}; #max space (nt) between quants my $length_min = $opts{length_min}; #min length (nt) of the quant my $length_max = $opts{length_max}; #max length (nt) of the quant $spacing_min = 1 unless $spacing_min; $spacing_min = 1 if $spacing_min < 1; #can't be less than 1nt away $spacing_max = 2 unless $spacing_max; $spacing_max = $spacing_min if $spacing_max < $spacing_min; #value validation $length_min = 0 if not defined $length_min; #unless $length_min; $length_min = 0 if $length_min < 0; #can't be less than 0nt in size $length_max = 2 if not defined $length_max; #unless $length_max; $length_max = $length_min if $length_max < $length_min; #value validation $value_max = 1 unless defined $value_max; print "# Generated by scripts/gen_fake_data.pl\n"; print "# smin=$spacing_min smax=$spacing_max lmin=$length_min lmax=$length_max vmax=$value_max\n"; print "# CHR START STOP STRAND VALUE\n"; foreach my $chr ($dsg->chromosomes) { my $pos = 1; #starting position my $last = $dsg->get_chromosome_length($chr); #get last position on chromosome; while ($pos <=$last) { my $spacing; $spacing= $spacing_min if $spacing_min == $spacing_max; $spacing = int(rand($spacing_max-$spacing_min+1))+$spacing_min unless $spacing; my $length; $length = $length_min if $length_min == $length_max; $length = int(rand($length_max-$length_min+1))+$length_min unless defined $length; my $val1 = rand($value_max); my $val2 = rand(10); my $strand = int(rand(2)); $strand = $use_strand if defined $use_strand; $strand = -1 unless $strand; #if == 0, set to minus strand my $start = $pos+$spacing; last if $start > $last; my $stop = $start+$length; $stop = $last if ($stop > $last); $pos += $spacing+$length; print join (",", $chr, $start, $stop, $strand, $val1, $val2),"\n"; } } }
LyonsLab/coge
scripts/utils/gen_fake_data.pl
Perl
bsd-2-clause
3,999
package NEXT; $VERSION = '0.65'; use Carp; use strict; use overload (); sub NEXT::ELSEWHERE::ancestors { my @inlist = shift; my @outlist = (); while (my $next = shift @inlist) { push @outlist, $next; no strict 'refs'; unshift @inlist, @{"$outlist[-1]::ISA"}; } return @outlist; } sub NEXT::ELSEWHERE::ordered_ancestors { my @inlist = shift; my @outlist = (); while (my $next = shift @inlist) { push @outlist, $next; no strict 'refs'; push @inlist, @{"$outlist[-1]::ISA"}; } return sort { $a->isa($b) ? -1 : $b->isa($a) ? +1 : 0 } @outlist; } sub NEXT::ELSEWHERE::buildAUTOLOAD { my $autoload_name = caller() . '::AUTOLOAD'; no strict 'refs'; *{$autoload_name} = sub { my ($self) = @_; my $depth = 1; until (((caller($depth))[3]||q{}) !~ /^\(eval\)$/) { $depth++ } my $caller = (caller($depth))[3]; my $wanted = $NEXT::AUTOLOAD || $autoload_name; undef $NEXT::AUTOLOAD; my ($caller_class, $caller_method) = do { $caller =~ m{(.*)::(.*)}g }; my ($wanted_class, $wanted_method) = do { $wanted =~ m{(.*)::(.*)}g }; croak "Can't call $wanted from $caller" unless $caller_method eq $wanted_method; my $key = ref $self && overload::Overloaded($self) ? overload::StrVal($self) : $self; local ($NEXT::NEXT{$key,$wanted_method}, $NEXT::SEEN) = ($NEXT::NEXT{$key,$wanted_method}, $NEXT::SEEN); unless ($NEXT::NEXT{$key,$wanted_method}) { my @forebears = NEXT::ELSEWHERE::ancestors ref $self || $self, $wanted_class; while (@forebears) { last if shift @forebears eq $caller_class } no strict 'refs'; @{$NEXT::NEXT{$key,$wanted_method}} = map { my $stash = \%{"${_}::"}; ($stash->{$caller_method} && (*{$stash->{$caller_method}}{CODE})) ? *{$stash->{$caller_method}}{CODE} : () } @forebears unless $wanted_method eq 'AUTOLOAD'; @{$NEXT::NEXT{$key,$wanted_method}} = map { my $stash = \%{"${_}::"}; ($stash->{AUTOLOAD} && (*{$stash->{AUTOLOAD}}{CODE})) ? "${_}::AUTOLOAD" : () } @forebears unless @{$NEXT::NEXT{$key,$wanted_method}||[]}; $NEXT::SEEN->{$key,*{$caller}{CODE}}++; } my $call_method = shift @{$NEXT::NEXT{$key,$wanted_method}}; while (do { $wanted_class =~ /^NEXT\b.*\b(UNSEEN|DISTINCT)\b/ } && defined $call_method && $NEXT::SEEN->{$key,$call_method}++) { $call_method = shift @{$NEXT::NEXT{$key,$wanted_method}}; } unless (defined $call_method) { return unless do { $wanted_class =~ /^NEXT:.*:ACTUAL/ }; (local $Carp::CarpLevel)++; croak qq(Can't locate object method "$wanted_method" ), qq(via package "$caller_class"); }; return $self->$call_method(@_[1..$#_]) if ref $call_method eq 'CODE'; no strict 'refs'; do { ($wanted_method=${$caller_class."::AUTOLOAD"}) =~ s/.*::// } if $wanted_method eq 'AUTOLOAD'; $$call_method = $caller_class."::NEXT::".$wanted_method; return $call_method->(@_); }; } no strict 'vars'; package NEXT; NEXT::ELSEWHERE::buildAUTOLOAD(); package NEXT::UNSEEN; @ISA = 'NEXT'; NEXT::ELSEWHERE::buildAUTOLOAD(); package NEXT::DISTINCT; @ISA = 'NEXT'; NEXT::ELSEWHERE::buildAUTOLOAD(); package NEXT::ACTUAL; @ISA = 'NEXT'; NEXT::ELSEWHERE::buildAUTOLOAD(); package NEXT::ACTUAL::UNSEEN; @ISA = 'NEXT'; NEXT::ELSEWHERE::buildAUTOLOAD(); package NEXT::ACTUAL::DISTINCT; @ISA = 'NEXT'; NEXT::ELSEWHERE::buildAUTOLOAD(); package NEXT::UNSEEN::ACTUAL; @ISA = 'NEXT'; NEXT::ELSEWHERE::buildAUTOLOAD(); package NEXT::DISTINCT::ACTUAL; @ISA = 'NEXT'; NEXT::ELSEWHERE::buildAUTOLOAD(); package EVERY; sub EVERY::ELSEWHERE::buildAUTOLOAD { my $autoload_name = caller() . '::AUTOLOAD'; no strict 'refs'; *{$autoload_name} = sub { my ($self) = @_; my $depth = 1; until (((caller($depth))[3]||q{}) !~ /^\(eval\)$/) { $depth++ } my $caller = (caller($depth))[3]; my $wanted = $EVERY::AUTOLOAD || $autoload_name; undef $EVERY::AUTOLOAD; my ($wanted_class, $wanted_method) = do { $wanted =~ m{(.*)::(.*)}g }; my $key = ref($self) && overload::Overloaded($self) ? overload::StrVal($self) : $self; local $NEXT::ALREADY_IN_EVERY{$key,$wanted_method} = $NEXT::ALREADY_IN_EVERY{$key,$wanted_method}; return if $NEXT::ALREADY_IN_EVERY{$key,$wanted_method}++; my @forebears = NEXT::ELSEWHERE::ordered_ancestors ref $self || $self, $wanted_class; @forebears = reverse @forebears if do { $wanted_class =~ /\bLAST\b/ }; no strict 'refs'; my %seen; my @every = map { my $sub = "${_}::$wanted_method"; !*{$sub}{CODE} || $seen{$sub}++ ? () : $sub } @forebears unless $wanted_method eq 'AUTOLOAD'; my $want = wantarray; if (@every) { if ($want) { return map {($_, [$self->$_(@_[1..$#_])])} @every; } elsif (defined $want) { return { map {($_, scalar($self->$_(@_[1..$#_])))} @every }; } else { $self->$_(@_[1..$#_]) for @every; return; } } @every = map { my $sub = "${_}::AUTOLOAD"; !*{$sub}{CODE} || $seen{$sub}++ ? () : "${_}::AUTOLOAD" } @forebears; if ($want) { return map { $$_ = ref($self)."::EVERY::".$wanted_method; ($_, [$self->$_(@_[1..$#_])]); } @every; } elsif (defined $want) { return { map { $$_ = ref($self)."::EVERY::".$wanted_method; ($_, scalar($self->$_(@_[1..$#_]))) } @every }; } else { for (@every) { $$_ = ref($self)."::EVERY::".$wanted_method; $self->$_(@_[1..$#_]); } return; } }; } package EVERY::LAST; @ISA = 'EVERY'; EVERY::ELSEWHERE::buildAUTOLOAD(); package EVERY; @ISA = 'NEXT'; EVERY::ELSEWHERE::buildAUTOLOAD(); 1; __END__ =head1 NAME NEXT.pm - Provide a pseudo-class NEXT (et al) that allows method redispatch =head1 SYNOPSIS use NEXT; package A; sub A::method { print "$_[0]: A method\n"; $_[0]->NEXT::method() } sub A::DESTROY { print "$_[0]: A dtor\n"; $_[0]->NEXT::DESTROY() } package B; use base qw( A ); sub B::AUTOLOAD { print "$_[0]: B AUTOLOAD\n"; $_[0]->NEXT::AUTOLOAD() } sub B::DESTROY { print "$_[0]: B dtor\n"; $_[0]->NEXT::DESTROY() } package C; sub C::method { print "$_[0]: C method\n"; $_[0]->NEXT::method() } sub C::AUTOLOAD { print "$_[0]: C AUTOLOAD\n"; $_[0]->NEXT::AUTOLOAD() } sub C::DESTROY { print "$_[0]: C dtor\n"; $_[0]->NEXT::DESTROY() } package D; use base qw( B C ); sub D::method { print "$_[0]: D method\n"; $_[0]->NEXT::method() } sub D::AUTOLOAD { print "$_[0]: D AUTOLOAD\n"; $_[0]->NEXT::AUTOLOAD() } sub D::DESTROY { print "$_[0]: D dtor\n"; $_[0]->NEXT::DESTROY() } package main; my $obj = bless {}, "D"; $obj->method(); # Calls D::method, A::method, C::method $obj->missing_method(); # Calls D::AUTOLOAD, B::AUTOLOAD, C::AUTOLOAD # Clean-up calls D::DESTROY, B::DESTROY, A::DESTROY, C::DESTROY =head1 DESCRIPTION NEXT.pm adds a pseudoclass named C<NEXT> to any program that uses it. If a method C<m> calls C<$self-E<gt>NEXT::m()>, the call to C<m> is redispatched as if the calling method had not originally been found. In other words, a call to C<$self-E<gt>NEXT::m()> resumes the depth-first, left-to-right search of C<$self>'s class hierarchy that resulted in the original call to C<m>. Note that this is not the same thing as C<$self-E<gt>SUPER::m()>, which begins a new dispatch that is restricted to searching the ancestors of the current class. C<$self-E<gt>NEXT::m()> can backtrack past the current class -- to look for a suitable method in other ancestors of C<$self> -- whereas C<$self-E<gt>SUPER::m()> cannot. A typical use would be in the destructors of a class hierarchy, as illustrated in the synopsis above. Each class in the hierarchy has a DESTROY method that performs some class-specific action and then redispatches the call up the hierarchy. As a result, when an object of class D is destroyed, the destructors of I<all> its parent classes are called (in depth-first, left-to-right order). Another typical use of redispatch would be in C<AUTOLOAD>'ed methods. If such a method determined that it was not able to handle a particular call, it might choose to redispatch that call, in the hope that some other C<AUTOLOAD> (above it, or to its left) might do better. By default, if a redispatch attempt fails to find another method elsewhere in the objects class hierarchy, it quietly gives up and does nothing (but see L<"Enforcing redispatch">). This gracious acquiescence is also unlike the (generally annoying) behaviour of C<SUPER>, which throws an exception if it cannot redispatch. Note that it is a fatal error for any method (including C<AUTOLOAD>) to attempt to redispatch any method that does not have the same name. For example: sub D::oops { print "oops!\n"; $_[0]->NEXT::other_method() } =head2 Enforcing redispatch It is possible to make C<NEXT> redispatch more demandingly (i.e. like C<SUPER> does), so that the redispatch throws an exception if it cannot find a "next" method to call. To do this, simple invoke the redispatch as: $self->NEXT::ACTUAL::method(); rather than: $self->NEXT::method(); The C<ACTUAL> tells C<NEXT> that there must actually be a next method to call, or it should throw an exception. C<NEXT::ACTUAL> is most commonly used in C<AUTOLOAD> methods, as a means to decline an C<AUTOLOAD> request, but preserve the normal exception-on-failure semantics: sub AUTOLOAD { if ($AUTOLOAD =~ /foo|bar/) { # handle here } else { # try elsewhere shift()->NEXT::ACTUAL::AUTOLOAD(@_); } } By using C<NEXT::ACTUAL>, if there is no other C<AUTOLOAD> to handle the method call, an exception will be thrown (as usually happens in the absence of a suitable C<AUTOLOAD>). =head2 Avoiding repetitions If C<NEXT> redispatching is used in the methods of a "diamond" class hierarchy: # A B # / \ / # C D # \ / # E use NEXT; package A; sub foo { print "called A::foo\n"; shift->NEXT::foo() } package B; sub foo { print "called B::foo\n"; shift->NEXT::foo() } package C; @ISA = qw( A ); sub foo { print "called C::foo\n"; shift->NEXT::foo() } package D; @ISA = qw(A B); sub foo { print "called D::foo\n"; shift->NEXT::foo() } package E; @ISA = qw(C D); sub foo { print "called E::foo\n"; shift->NEXT::foo() } E->foo(); then derived classes may (re-)inherit base-class methods through two or more distinct paths (e.g. in the way C<E> inherits C<A::foo> twice -- through C<C> and C<D>). In such cases, a sequence of C<NEXT> redispatches will invoke the multiply inherited method as many times as it is inherited. For example, the above code prints: called E::foo called C::foo called A::foo called D::foo called A::foo called B::foo (i.e. C<A::foo> is called twice). In some cases this I<may> be the desired effect within a diamond hierarchy, but in others (e.g. for destructors) it may be more appropriate to call each method only once during a sequence of redispatches. To cover such cases, you can redispatch methods via: $self->NEXT::DISTINCT::method(); rather than: $self->NEXT::method(); This causes the redispatcher to only visit each distinct C<method> method once. That is, to skip any classes in the hierarchy that it has already visited during redispatch. So, for example, if the previous example were rewritten: package A; sub foo { print "called A::foo\n"; shift->NEXT::DISTINCT::foo() } package B; sub foo { print "called B::foo\n"; shift->NEXT::DISTINCT::foo() } package C; @ISA = qw( A ); sub foo { print "called C::foo\n"; shift->NEXT::DISTINCT::foo() } package D; @ISA = qw(A B); sub foo { print "called D::foo\n"; shift->NEXT::DISTINCT::foo() } package E; @ISA = qw(C D); sub foo { print "called E::foo\n"; shift->NEXT::DISTINCT::foo() } E->foo(); then it would print: called E::foo called C::foo called A::foo called D::foo called B::foo and omit the second call to C<A::foo> (since it would not be distinct from the first call to C<A::foo>). Note that you can also use: $self->NEXT::DISTINCT::ACTUAL::method(); or: $self->NEXT::ACTUAL::DISTINCT::method(); to get both unique invocation I<and> exception-on-failure. Note that, for historical compatibility, you can also use C<NEXT::UNSEEN> instead of C<NEXT::DISTINCT>. =head2 Invoking all versions of a method with a single call Yet another pseudo-class that NEXT.pm provides is C<EVERY>. Its behaviour is considerably simpler than that of the C<NEXT> family. A call to: $obj->EVERY::foo(); calls I<every> method named C<foo> that the object in C<$obj> has inherited. That is: use NEXT; package A; @ISA = qw(B D X); sub foo { print "A::foo " } package B; @ISA = qw(D X); sub foo { print "B::foo " } package X; @ISA = qw(D); sub foo { print "X::foo " } package D; sub foo { print "D::foo " } package main; my $obj = bless {}, 'A'; $obj->EVERY::foo(); # prints" A::foo B::foo X::foo D::foo Prefixing a method call with C<EVERY::> causes every method in the object's hierarchy with that name to be invoked. As the above example illustrates, they are not called in Perl's usual "left-most-depth-first" order. Instead, they are called "breadth-first-dependency-wise". That means that the inheritance tree of the object is traversed breadth-first and the resulting order of classes is used as the sequence in which methods are called. However, that sequence is modified by imposing a rule that the appropriate method of a derived class must be called before the same method of any ancestral class. That's why, in the above example, C<X::foo> is called before C<D::foo>, even though C<D> comes before C<X> in C<@B::ISA>. In general, there's no need to worry about the order of calls. They will be left-to-right, breadth-first, most-derived-first. This works perfectly for most inherited methods (including destructors), but is inappropriate for some kinds of methods (such as constructors, cloners, debuggers, and initializers) where it's more appropriate that the least-derived methods be called first (as more-derived methods may rely on the behaviour of their "ancestors"). In that case, instead of using the C<EVERY> pseudo-class: $obj->EVERY::foo(); # prints" A::foo B::foo X::foo D::foo you can use the C<EVERY::LAST> pseudo-class: $obj->EVERY::LAST::foo(); # prints" D::foo X::foo B::foo A::foo which reverses the order of method call. Whichever version is used, the actual methods are called in the same context (list, scalar, or void) as the original call via C<EVERY>, and return: =over =item * A hash of array references in list context. Each entry of the hash has the fully qualified method name as its key and a reference to an array containing the method's list-context return values as its value. =item * A reference to a hash of scalar values in scalar context. Each entry of the hash has the fully qualified method name as its key and the method's scalar-context return values as its value. =item * Nothing in void context (obviously). =back =head2 Using C<EVERY> methods The typical way to use an C<EVERY> call is to wrap it in another base method, that all classes inherit. For example, to ensure that every destructor an object inherits is actually called (as opposed to just the left-most-depth-first-est one): package Base; sub DESTROY { $_[0]->EVERY::Destroy } package Derived1; use base 'Base'; sub Destroy {...} package Derived2; use base 'Base', 'Derived1'; sub Destroy {...} et cetera. Every derived class than needs its own clean-up behaviour simply adds its own C<Destroy> method (I<not> a C<DESTROY> method), which the call to C<EVERY::LAST::Destroy> in the inherited destructor then correctly picks up. Likewise, to create a class hierarchy in which every initializer inherited by a new object is invoked: package Base; sub new { my ($class, %args) = @_; my $obj = bless {}, $class; $obj->EVERY::LAST::Init(\%args); } package Derived1; use base 'Base'; sub Init { my ($argsref) = @_; ... } package Derived2; use base 'Base', 'Derived1'; sub Init { my ($argsref) = @_; ... } et cetera. Every derived class than needs some additional initialization behaviour simply adds its own C<Init> method (I<not> a C<new> method), which the call to C<EVERY::LAST::Init> in the inherited constructor then correctly picks up. =head1 AUTHOR Damian Conway (damian@conway.org) =head1 BUGS AND IRRITATIONS Because it's a module, not an integral part of the interpreter, NEXT.pm has to guess where the surrounding call was found in the method look-up sequence. In the presence of diamond inheritance patterns it occasionally guesses wrong. It's also too slow (despite caching). Comment, suggestions, and patches welcome. =head1 COPYRIGHT Copyright (c) 2000-2001, Damian Conway. All Rights Reserved. This module is free software. It may be used, redistributed and/or modified under the same terms as Perl itself.
operepo/ope
bin/usr/share/perl5/core_perl/NEXT.pm
Perl
mit
18,387
use utf8; # # Copyright 2015 Comcast Cable Communications Management, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # package Schema::Result::DeliveryServiceInfoForDomainList; # this view returns the regexp set for a delivery services, ordered by type, set_number. # to use, do # # $rs = $self->db->resultset('DeliveryServiceInfoForDomainList')->search({}, { bind => [ $domain ]}); # # where $id is the deliveryservice id. use strict; use warnings; use base 'DBIx::Class::Core'; __PACKAGE__->table_class('DBIx::Class::ResultSource::View'); __PACKAGE__->table("DeliveryServiceInfoForDomainList:"); __PACKAGE__->result_source_instance->is_virtual(1); __PACKAGE__->result_source_instance->view_definition( " SELECT DISTINCT deliveryservice.xml_id AS xml_id, deliveryservice.id AS ds_id, deliveryservice.dscp AS dscp, deliveryservice.signed AS signed, deliveryservice.qstring_ignore AS qstring_ignore, deliveryservice.org_server_fqdn as org_server_fqdn, deliveryservice.multi_site_origin as multi_site_origin, deliveryservice.range_request_handling as range_request_handling, deliveryservice.origin_shield as origin_shield, regex.pattern AS pattern, retype.name AS re_type, dstype.name AS ds_type, parameter.value AS domain_name, deliveryservice_regex.set_number AS set_number, deliveryservice.edge_header_rewrite as edge_header_rewrite, deliveryservice.mid_header_rewrite as mid_header_rewrite, deliveryservice.regex_remap as regex_remap, deliveryservice.cacheurl as cacheurl, deliveryservice.remap_text as remap_text, deliveryservice.protocol as protocol FROM deliveryservice JOIN deliveryservice_regex ON deliveryservice_regex.deliveryservice = deliveryservice.id JOIN regex ON deliveryservice_regex.regex = regex.id JOIN type as retype ON regex.type = retype.id JOIN type as dstype ON deliveryservice.type = dstype.id JOIN profile_parameter ON deliveryservice.profile = profile_parameter.profile JOIN parameter ON parameter.id = profile_parameter.parameter JOIN deliveryservice_server ON deliveryservice_server.deliveryservice = deliveryservice.id JOIN server ON deliveryservice_server.server = server.id WHERE parameter.name = 'domain_name' AND parameter.value = ? ORDER BY ds_id, re_type , deliveryservice_regex.set_number " ); __PACKAGE__->add_columns( "xml_id", { data_type => "varchar", is_nullable => 0, size => 45 }, "org_server_fqdn", { data_type => "varchar", is_nullable => 0, size => 45 }, "multi_site_origin", { data_type => "integer", is_nullable => 0 }, "ds_id", { data_type => "integer", is_nullable => 0 }, "dscp", { data_type => "integer", is_nullable => 0 }, "signed", { data_type => "integer", is_nullable => 0 }, "qstring_ignore", { data_type => "integer", is_nullable => 0 }, "pattern", { data_type => "varchar", is_nullable => 0, size => 45 }, "re_type", { data_type => "varchar", is_nullable => 0, size => 45 }, "ds_type", { data_type => "varchar", is_nullable => 0, size => 45 }, "set_number", { data_type => "integer", is_nullable => 0 }, "domain_name", { data_type => "varchar", is_nullable => 0, size => 45 }, "edge_header_rewrite", { data_type => "varchar", is_nullable => 0, size => 1024 }, "mid_header_rewrite", { data_type => "varchar", is_nullable => 0, size => 1024 }, "regex_remap", { data_type => "varchar", is_nullable => 0, size => 1024 }, "cacheurl", { data_type => "varchar", is_nullable => 0, size => 1024 }, "remap_text", { data_type => "varchar", is_nullable => 0, size => 2048 }, "protocol", { data_type => "tinyint", is_nullable => 0, size => 4 }, "range_request_handling", { data_type => "tinyint", is_nullable => 0, size => 4 }, "origin_shield", { data_type => "varchar", is_nullable => 0, size => 1024 }, ); 1;
PSUdaemon/traffic_control
traffic_ops/app/lib/Schema/Result/DeliveryServiceInfoForDomainList.pm
Perl
apache-2.0
4,567
package Imager::Test; use strict; use Test::Builder; require Exporter; use vars qw(@ISA @EXPORT_OK $VERSION); use Carp qw(croak); use Config; $VERSION = "1.000"; @ISA = qw(Exporter); @EXPORT_OK = qw( diff_text_with_nul test_image_raw test_image_16 test_image test_image_double test_image_mono test_image_gray test_image_gray_16 test_image_named is_color1 is_color3 is_color4 is_color_close3 is_fcolor1 is_fcolor3 is_fcolor4 color_cmp is_image is_imaged is_image_similar isnt_image image_bounds_checks mask_tests test_colorf_gpix test_color_gpix test_colorf_glin can_test_threads ); sub diff_text_with_nul { my ($desc, $text1, $text2, @params) = @_; my $builder = Test::Builder->new; print "# $desc\n"; my $imbase = Imager->new(xsize => 100, ysize => 100); my $imcopy = Imager->new(xsize => 100, ysize => 100); $builder->ok($imbase->string(x => 5, 'y' => 50, size => 20, string => $text1, @params), "$desc - draw text1"); $builder->ok($imcopy->string(x => 5, 'y' => 50, size => 20, string => $text2, @params), "$desc - draw text2"); $builder->isnt_num(Imager::i_img_diff($imbase->{IMG}, $imcopy->{IMG}), 0, "$desc - check result different"); } sub is_color3($$$$$) { my ($color, $red, $green, $blue, $comment) = @_; my $builder = Test::Builder->new; unless (defined $color) { $builder->ok(0, $comment); $builder->diag("color is undef"); return; } unless ($color->can('rgba')) { $builder->ok(0, $comment); $builder->diag("color is not a color object"); return; } my ($cr, $cg, $cb) = $color->rgba; unless ($builder->ok($cr == $red && $cg == $green && $cb == $blue, $comment)) { print <<END_DIAG; Color mismatch: Red: $red vs $cr Green: $green vs $cg Blue: $blue vs $cb END_DIAG return; } return 1; } sub is_color_close3($$$$$$) { my ($color, $red, $green, $blue, $tolerance, $comment) = @_; my $builder = Test::Builder->new; unless (defined $color) { $builder->ok(0, $comment); $builder->diag("color is undef"); return; } unless ($color->can('rgba')) { $builder->ok(0, $comment); $builder->diag("color is not a color object"); return; } my ($cr, $cg, $cb) = $color->rgba; unless ($builder->ok(abs($cr - $red) <= $tolerance && abs($cg - $green) <= $tolerance && abs($cb - $blue) <= $tolerance, $comment)) { $builder->diag(<<END_DIAG); Color out of tolerance ($tolerance): Red: expected $red vs received $cr Green: expected $green vs received $cg Blue: expected $blue vs received $cb END_DIAG return; } return 1; } sub is_color4($$$$$$) { my ($color, $red, $green, $blue, $alpha, $comment) = @_; my $builder = Test::Builder->new; unless (defined $color) { $builder->ok(0, $comment); $builder->diag("color is undef"); return; } unless ($color->can('rgba')) { $builder->ok(0, $comment); $builder->diag("color is not a color object"); return; } my ($cr, $cg, $cb, $ca) = $color->rgba; unless ($builder->ok($cr == $red && $cg == $green && $cb == $blue && $ca == $alpha, $comment)) { $builder->diag(<<END_DIAG); Color mismatch: Red: $cr vs $red Green: $cg vs $green Blue: $cb vs $blue Alpha: $ca vs $alpha END_DIAG return; } return 1; } sub is_fcolor4($$$$$$;$) { my ($color, $red, $green, $blue, $alpha, $comment_or_diff, $comment_or_undef) = @_; my ($comment, $mindiff); if (defined $comment_or_undef) { ( $mindiff, $comment ) = ( $comment_or_diff, $comment_or_undef ) } else { ( $mindiff, $comment ) = ( 0.001, $comment_or_diff ) } my $builder = Test::Builder->new; unless (defined $color) { $builder->ok(0, $comment); $builder->diag("color is undef"); return; } unless ($color->can('rgba')) { $builder->ok(0, $comment); $builder->diag("color is not a color object"); return; } my ($cr, $cg, $cb, $ca) = $color->rgba; unless ($builder->ok(abs($cr - $red) <= $mindiff && abs($cg - $green) <= $mindiff && abs($cb - $blue) <= $mindiff && abs($ca - $alpha) <= $mindiff, $comment)) { $builder->diag(<<END_DIAG); Color mismatch: Red: $cr vs $red Green: $cg vs $green Blue: $cb vs $blue Alpha: $ca vs $alpha END_DIAG return; } return 1; } sub is_fcolor1($$$;$) { my ($color, $grey, $comment_or_diff, $comment_or_undef) = @_; my ($comment, $mindiff); if (defined $comment_or_undef) { ( $mindiff, $comment ) = ( $comment_or_diff, $comment_or_undef ) } else { ( $mindiff, $comment ) = ( 0.001, $comment_or_diff ) } my $builder = Test::Builder->new; unless (defined $color) { $builder->ok(0, $comment); $builder->diag("color is undef"); return; } unless ($color->can('rgba')) { $builder->ok(0, $comment); $builder->diag("color is not a color object"); return; } my ($cgrey) = $color->rgba; unless ($builder->ok(abs($cgrey - $grey) <= $mindiff, $comment)) { print <<END_DIAG; Color mismatch: Gray: $cgrey vs $grey END_DIAG return; } return 1; } sub is_fcolor3($$$$$;$) { my ($color, $red, $green, $blue, $comment_or_diff, $comment_or_undef) = @_; my ($comment, $mindiff); if (defined $comment_or_undef) { ( $mindiff, $comment ) = ( $comment_or_diff, $comment_or_undef ) } else { ( $mindiff, $comment ) = ( 0.001, $comment_or_diff ) } my $builder = Test::Builder->new; unless (defined $color) { $builder->ok(0, $comment); $builder->diag("color is undef"); return; } unless ($color->can('rgba')) { $builder->ok(0, $comment); $builder->diag("color is not a color object"); return; } my ($cr, $cg, $cb) = $color->rgba; unless ($builder->ok(abs($cr - $red) <= $mindiff && abs($cg - $green) <= $mindiff && abs($cb - $blue) <= $mindiff, $comment)) { $builder->diag(<<END_DIAG); Color mismatch: Red: $cr vs $red Green: $cg vs $green Blue: $cb vs $blue END_DIAG return; } return 1; } sub is_color1($$$) { my ($color, $grey, $comment) = @_; my $builder = Test::Builder->new; unless (defined $color) { $builder->ok(0, $comment); $builder->diag("color is undef"); return; } unless ($color->can('rgba')) { $builder->ok(0, $comment); $builder->diag("color is not a color object"); return; } my ($cgrey) = $color->rgba; unless ($builder->ok($cgrey == $grey, $comment)) { $builder->diag(<<END_DIAG); Color mismatch: Grey: $grey vs $cgrey END_DIAG return; } return 1; } sub test_image_raw { my $green=Imager::i_color_new(0,255,0,255); my $blue=Imager::i_color_new(0,0,255,255); my $red=Imager::i_color_new(255,0,0,255); my $img=Imager::ImgRaw::new(150,150,3); Imager::i_box_filled($img,70,25,130,125,$green); Imager::i_box_filled($img,20,25,80,125,$blue); Imager::i_arc($img,75,75,30,0,361,$red); Imager::i_conv($img,[0.1, 0.2, 0.4, 0.2, 0.1]); $img; } sub test_image { my $green = Imager::Color->new(0, 255, 0, 255); my $blue = Imager::Color->new(0, 0, 255, 255); my $red = Imager::Color->new(255, 0, 0, 255); my $img = Imager->new(xsize => 150, ysize => 150); $img->box(filled => 1, color => $green, box => [ 70, 24, 130, 124 ]); $img->box(filled => 1, color => $blue, box => [ 20, 26, 80, 126 ]); $img->arc(x => 75, y => 75, r => 30, color => $red); $img->filter(type => 'conv', coef => [ 0.1, 0.2, 0.4, 0.2, 0.1 ]); $img; } sub test_image_16 { my $green = Imager::Color->new(0, 255, 0, 255); my $blue = Imager::Color->new(0, 0, 255, 255); my $red = Imager::Color->new(255, 0, 0, 255); my $img = Imager->new(xsize => 150, ysize => 150, bits => 16); $img->box(filled => 1, color => $green, box => [ 70, 24, 130, 124 ]); $img->box(filled => 1, color => $blue, box => [ 20, 26, 80, 126 ]); $img->arc(x => 75, y => 75, r => 30, color => $red); $img->filter(type => 'conv', coef => [ 0.1, 0.2, 0.4, 0.2, 0.1 ]); $img; } sub test_image_double { my $green = Imager::Color->new(0, 255, 0, 255); my $blue = Imager::Color->new(0, 0, 255, 255); my $red = Imager::Color->new(255, 0, 0, 255); my $img = Imager->new(xsize => 150, ysize => 150, bits => 'double'); $img->box(filled => 1, color => $green, box => [ 70, 24, 130, 124 ]); $img->box(filled => 1, color => $blue, box => [ 20, 26, 80, 126 ]); $img->arc(x => 75, y => 75, r => 30, color => $red); $img->filter(type => 'conv', coef => [ 0.1, 0.2, 0.4, 0.2, 0.1 ]); $img; } sub test_image_gray { my $g50 = Imager::Color->new(128, 128, 128); my $g30 = Imager::Color->new(76, 76, 76); my $g70 = Imager::Color->new(178, 178, 178); my $img = Imager->new(xsize => 150, ysize => 150, channels => 1); $img->box(filled => 1, color => $g50, box => [ 70, 24, 130, 124 ]); $img->box(filled => 1, color => $g30, box => [ 20, 26, 80, 126 ]); $img->arc(x => 75, y => 75, r => 30, color => $g70); $img->filter(type => 'conv', coef => [ 0.1, 0.2, 0.4, 0.2, 0.1 ]); return $img; } sub test_image_gray_16 { my $g50 = Imager::Color->new(128, 128, 128); my $g30 = Imager::Color->new(76, 76, 76); my $g70 = Imager::Color->new(178, 178, 178); my $img = Imager->new(xsize => 150, ysize => 150, channels => 1, bits => 16); $img->box(filled => 1, color => $g50, box => [ 70, 24, 130, 124 ]); $img->box(filled => 1, color => $g30, box => [ 20, 26, 80, 126 ]); $img->arc(x => 75, y => 75, r => 30, color => $g70); $img->filter(type => 'conv', coef => [ 0.1, 0.2, 0.4, 0.2, 0.1 ]); return $img; } sub test_image_mono { require Imager::Fill; my $fh = Imager::Fill->new(hatch => 'check1x1'); my $img = Imager->new(xsize => 150, ysize => 150, type => "paletted"); my $black = Imager::Color->new(0, 0, 0); my $white = Imager::Color->new(255, 255, 255); $img->addcolors(colors => [ $black, $white ]); $img->box(fill => $fh, box => [ 70, 24, 130, 124 ]); $img->box(filled => 1, color => $white, box => [ 20, 26, 80, 126 ]); $img->arc(x => 75, y => 75, r => 30, color => $black, aa => 0); return $img; } my %name_to_sub = ( basic => \&test_image, basic16 => \&test_image_16, basic_double => \&test_image_double, gray => \&test_image_gray, gray16 => \&test_image_gray_16, mono => \&test_image_mono, ); sub test_image_named { my $name = shift or croak("No name supplied to test_image_named()"); my $sub = $name_to_sub{$name} or croak("Unknown name $name supplied to test_image_named()"); return $sub->(); } sub _low_image_diff_check { my ($left, $right, $comment) = @_; my $builder = Test::Builder->new; unless (defined $left) { $builder->ok(0, $comment); $builder->diag("left is undef"); return; } unless (defined $right) { $builder->ok(0, $comment); $builder->diag("right is undef"); return; } unless ($left->{IMG}) { $builder->ok(0, $comment); $builder->diag("left image has no low level object"); return; } unless ($right->{IMG}) { $builder->ok(0, $comment); $builder->diag("right image has no low level object"); return; } unless ($left->getwidth == $right->getwidth) { $builder->ok(0, $comment); $builder->diag("left width " . $left->getwidth . " vs right width " . $right->getwidth); return; } unless ($left->getheight == $right->getheight) { $builder->ok(0, $comment); $builder->diag("left height " . $left->getheight . " vs right height " . $right->getheight); return; } unless ($left->getchannels == $right->getchannels) { $builder->ok(0, $comment); $builder->diag("left channels " . $left->getchannels . " vs right channels " . $right->getchannels); return; } return 1; } sub is_image_similar($$$$) { my ($left, $right, $limit, $comment) = @_; { local $Test::Builder::Level = $Test::Builder::Level + 1; _low_image_diff_check($left, $right, $comment) or return; } my $builder = Test::Builder->new; my $diff = Imager::i_img_diff($left->{IMG}, $right->{IMG}); if ($diff > $limit) { $builder->ok(0, $comment); $builder->diag("image data difference > $limit - $diff"); if ($limit == 0) { # find the first mismatch PIXELS: for my $y (0 .. $left->getheight()-1) { for my $x (0.. $left->getwidth()-1) { my @lsamples = $left->getsamples(x => $x, y => $y, width => 1); my @rsamples = $right->getsamples(x => $x, y => $y, width => 1); if ("@lsamples" ne "@rsamples") { $builder->diag("first mismatch at ($x, $y) - @lsamples vs @rsamples"); last PIXELS; } } } } return; } return $builder->ok(1, $comment); } sub is_image($$$) { my ($left, $right, $comment) = @_; local $Test::Builder::Level = $Test::Builder::Level + 1; return is_image_similar($left, $right, 0, $comment); } sub is_imaged($$$;$) { my $epsilon = Imager::i_img_epsilonf(); if (@_ > 3) { ($epsilon) = splice @_, 2, 1; } my ($left, $right, $comment) = @_; { local $Test::Builder::Level = $Test::Builder::Level + 1; _low_image_diff_check($left, $right, $comment) or return; } my $builder = Test::Builder->new; my $same = Imager::i_img_samef($left->{IMG}, $right->{IMG}, $epsilon, $comment); if (!$same) { $builder->ok(0, $comment); $builder->diag("images different"); # find the first mismatch PIXELS: for my $y (0 .. $left->getheight()-1) { for my $x (0.. $left->getwidth()-1) { my @lsamples = $left->getsamples(x => $x, y => $y, width => 1, type => "float"); my @rsamples = $right->getsamples(x => $x, y => $y, width => 1, type => "float"); if ("@lsamples" ne "@rsamples") { $builder->diag("first mismatch at ($x, $y) - @lsamples vs @rsamples"); last PIXELS; } } } return; } return $builder->ok(1, $comment); } sub isnt_image { my ($left, $right, $comment) = @_; my $builder = Test::Builder->new; my $diff = Imager::i_img_diff($left->{IMG}, $right->{IMG}); return $builder->ok($diff, "$comment"); } sub image_bounds_checks { my $im = shift; my $builder = Test::Builder->new; $builder->ok(!$im->getpixel(x => -1, y => 0), 'bounds check get (-1, 0)'); $builder->ok(!$im->getpixel(x => 10, y => 0), 'bounds check get (10, 0)'); $builder->ok(!$im->getpixel(x => 0, y => -1), 'bounds check get (0, -1)'); $builder->ok(!$im->getpixel(x => 0, y => 10), 'bounds check get (0, 10)'); $builder->ok(!$im->getpixel(x => -1, y => 0), 'bounds check get (-1, 0) float'); $builder->ok(!$im->getpixel(x => 10, y => 0), 'bounds check get (10, 0) float'); $builder->ok(!$im->getpixel(x => 0, y => -1), 'bounds check get (0, -1) float'); $builder->ok(!$im->getpixel(x => 0, y => 10), 'bounds check get (0, 10) float'); my $black = Imager::Color->new(0, 0, 0); require Imager::Color::Float; my $blackf = Imager::Color::Float->new(0, 0, 0); $builder->ok(!$im->setpixel(x => -1, y => 0, color => $black), 'bounds check set (-1, 0)'); $builder->ok(!$im->setpixel(x => 10, y => 0, color => $black), 'bounds check set (10, 0)'); $builder->ok(!$im->setpixel(x => 0, y => -1, color => $black), 'bounds check set (0, -1)'); $builder->ok(!$im->setpixel(x => 0, y => 10, color => $black), 'bounds check set (0, 10)'); $builder->ok(!$im->setpixel(x => -1, y => 0, color => $blackf), 'bounds check set (-1, 0) float'); $builder->ok(!$im->setpixel(x => 10, y => 0, color => $blackf), 'bounds check set (10, 0) float'); $builder->ok(!$im->setpixel(x => 0, y => -1, color => $blackf), 'bounds check set (0, -1) float'); $builder->ok(!$im->setpixel(x => 0, y => 10, color => $blackf), 'bounds check set (0, 10) float'); } sub test_colorf_gpix { my ($im, $x, $y, $expected, $epsilon, $comment) = @_; my $builder = Test::Builder->new; defined $comment or $comment = ''; my $c = Imager::i_gpixf($im, $x, $y); unless ($c) { $builder->ok(0, "$comment - retrieve color at ($x,$y)"); return; } unless ($builder->ok(colorf_cmp($c, $expected, $epsilon) == 0, "$comment - got right color ($x, $y)")) { my @c = $c->rgba; my @exp = $expected->rgba; $builder->diag(<<EOS); # got: ($c[0], $c[1], $c[2]) # expected: ($exp[0], $exp[1], $exp[2]) EOS } 1; } sub test_color_gpix { my ($im, $x, $y, $expected, $comment) = @_; my $builder = Test::Builder->new; defined $comment or $comment = ''; my $c = Imager::i_get_pixel($im, $x, $y); unless ($c) { $builder->ok(0, "$comment - retrieve color at ($x,$y)"); return; } unless ($builder->ok(color_cmp($c, $expected) == 0, "got right color ($x, $y)")) { my @c = $c->rgba; my @exp = $expected->rgba; $builder->diag(<<EOS); # got: ($c[0], $c[1], $c[2]) # expected: ($exp[0], $exp[1], $exp[2]) EOS return; } return 1; } sub test_colorf_glin { my ($im, $x, $y, $pels, $comment) = @_; my $builder = Test::Builder->new; my @got = Imager::i_glinf($im, $x, $x+@$pels, $y); @got == @$pels or return $builder->is_num(scalar(@got), scalar(@$pels), "$comment - pixels retrieved"); return $builder->ok(!grep(colorf_cmp($pels->[$_], $got[$_], 0.005), 0..$#got), "$comment - check colors ($x, $y)"); } sub colorf_cmp { my ($c1, $c2, $epsilon) = @_; defined $epsilon or $epsilon = 0; my @s1 = $c1->rgba; my @s2 = $c2->rgba; # print "# (",join(",", @s1[0..2]),") <=> (",join(",", @s2[0..2]),")\n"; return abs($s1[0]-$s2[0]) >= $epsilon && $s1[0] <=> $s2[0] || abs($s1[1]-$s2[1]) >= $epsilon && $s1[1] <=> $s2[1] || abs($s1[2]-$s2[2]) >= $epsilon && $s1[2] <=> $s2[2]; } sub color_cmp { my ($c1, $c2) = @_; my @s1 = $c1->rgba; my @s2 = $c2->rgba; return $s1[0] <=> $s2[0] || $s1[1] <=> $s2[1] || $s1[2] <=> $s2[2]; } # these test the action of the channel mask on the image supplied # which should be an OO image. sub mask_tests { my ($im, $epsilon) = @_; my $builder = Test::Builder->new; defined $epsilon or $epsilon = 0; # we want to check all four of ppix() and plin(), ppix() and plinf() # basic test procedure: # first using default/all 1s mask, set to white # make sure we got white # set mask to skip a channel, set to grey # make sure only the right channels set print "# channel mask tests\n"; # 8-bit color tests my $white = Imager::NC(255, 255, 255); my $grey = Imager::NC(128, 128, 128); my $white_grey = Imager::NC(128, 255, 128); print "# with ppix\n"; $builder->ok($im->setmask(mask=>~0), "set to default mask"); $builder->ok($im->setpixel(x=>0, 'y'=>0, color=>$white), "set to white all channels"); test_color_gpix($im->{IMG}, 0, 0, $white, "ppix"); $builder->ok($im->setmask(mask=>0xF-0x2), "set channel to exclude channel1"); $builder->ok($im->setpixel(x=>0, 'y'=>0, color=>$grey), "set to grey, no channel 2"); test_color_gpix($im->{IMG}, 0, 0, $white_grey, "ppix masked"); print "# with plin\n"; $builder->ok($im->setmask(mask=>~0), "set to default mask"); $builder->ok($im->setscanline(x=>0, 'y'=>1, pixels => [$white]), "set to white all channels"); test_color_gpix($im->{IMG}, 0, 1, $white, "plin"); $builder->ok($im->setmask(mask=>0xF-0x2), "set channel to exclude channel1"); $builder->ok($im->setscanline(x=>0, 'y'=>1, pixels=>[$grey]), "set to grey, no channel 2"); test_color_gpix($im->{IMG}, 0, 1, $white_grey, "plin masked"); # float color tests my $whitef = Imager::NCF(1.0, 1.0, 1.0); my $greyf = Imager::NCF(0.5, 0.5, 0.5); my $white_greyf = Imager::NCF(0.5, 1.0, 0.5); print "# with ppixf\n"; $builder->ok($im->setmask(mask=>~0), "set to default mask"); $builder->ok($im->setpixel(x=>0, 'y'=>2, color=>$whitef), "set to white all channels"); test_colorf_gpix($im->{IMG}, 0, 2, $whitef, $epsilon, "ppixf"); $builder->ok($im->setmask(mask=>0xF-0x2), "set channel to exclude channel1"); $builder->ok($im->setpixel(x=>0, 'y'=>2, color=>$greyf), "set to grey, no channel 2"); test_colorf_gpix($im->{IMG}, 0, 2, $white_greyf, $epsilon, "ppixf masked"); print "# with plinf\n"; $builder->ok($im->setmask(mask=>~0), "set to default mask"); $builder->ok($im->setscanline(x=>0, 'y'=>3, pixels => [$whitef]), "set to white all channels"); test_colorf_gpix($im->{IMG}, 0, 3, $whitef, $epsilon, "plinf"); $builder->ok($im->setmask(mask=>0xF-0x2), "set channel to exclude channel1"); $builder->ok($im->setscanline(x=>0, 'y'=>3, pixels=>[$greyf]), "set to grey, no channel 2"); test_colorf_gpix($im->{IMG}, 0, 3, $white_greyf, $epsilon, "plinf masked"); } 1; __END__ =head1 NAME Imager::Test - common functions used in testing Imager =head1 SYNOPSIS use Imager::Test 'diff_text_with_nul'; diff_text_with_nul($test_name, $text1, $text2, @string_options); =head1 DESCRIPTION This is a repository of functions used in testing Imager. Some functions will only be useful in testing Imager itself, while others should be useful in testing modules that use Imager. No functions are exported by default. =head1 FUNCTIONS =head2 Test functions =for stopwords OO =over =item is_color1($color, $grey, $comment) Tests if the first channel of $color matches $grey. =item is_color3($color, $red, $green, $blue, $comment) Tests if $color matches the given ($red, $green, $blue) =item is_color4($color, $red, $green, $blue, $alpha, $comment) Tests if $color matches the given ($red, $green, $blue, $alpha) =item is_fcolor1($fcolor, $grey, $comment) =item is_fcolor1($fcolor, $grey, $epsilon, $comment) Tests if $fcolor's first channel is within $epsilon of ($grey). For the first form $epsilon is taken as 0.001. =item is_fcolor3($fcolor, $red, $green, $blue, $comment) =item is_fcolor3($fcolor, $red, $green, $blue, $epsilon, $comment) Tests if $fcolor's channels are within $epsilon of ($red, $green, $blue). For the first form $epsilon is taken as 0.001. =item is_fcolor4($fcolor, $red, $green, $blue, $alpha, $comment) =item is_fcolor4($fcolor, $red, $green, $blue, $alpha, $epsilon, $comment) Tests if $fcolor's channels are within $epsilon of ($red, $green, $blue, $alpha). For the first form $epsilon is taken as 0.001. =item is_image($im1, $im2, $comment) Tests if the 2 images have the same content. Both images must be defined, have the same width, height, channels and the same color in each pixel. The color comparison is done at 8-bits per pixel. The color representation such as direct vs paletted, bits per sample are not checked. Equivalent to is_image_similar($im1, $im2, 0, $comment). =item is_imaged($im, $im2, $comment) =item is_imaged($im, $im2, $epsilon, $comment) Tests if the two images have the same content at the double/sample level. C<$epsilon> defaults to the platform DBL_EPSILON multiplied by four. =item is_image_similar($im1, $im2, $maxdiff, $comment) Tests if the 2 images have similar content. Both images must be defined, have the same width, height and channels. The cum of the squares of the differences of each sample are calculated and must be less than or equal to I<$maxdiff> for the test to pass. The color comparison is done at 8-bits per pixel. The color representation such as direct vs paletted, bits per sample are not checked. =item isnt_image($im1, $im2, $comment) Tests that the two images are different. For regressions tests where something (like text output of "0") produced no change, but should have produced a change. =item test_colorf_gpix($im, $x, $y, $expected, $epsilon, $comment) Retrieves the pixel ($x,$y) from the low-level image $im and compares it to the floating point color $expected, with a tolerance of epsilon. =item test_color_gpix($im, $x, $y, $expected, $comment) Retrieves the pixel ($x,$y) from the low-level image $im and compares it to the floating point color $expected. =item test_colorf_glin($im, $x, $y, $pels, $comment) Retrieves the floating point pixels ($x, $y)-[$x+@$pels, $y] from the low level image $im and compares them against @$pels. =item is_color_close3($color, $red, $green, $blue, $tolerance, $comment) Tests if $color's first three channels are within $tolerance of ($red, $green, $blue). =back =head2 Test suite functions Functions that perform one or more tests, typically used to test various parts of Imager's implementation. =over =item image_bounds_checks($im) Attempts to write to various pixel positions outside the edge of the image to ensure that it fails in those locations. Any new image type should pass these tests. Does 16 separate tests. =item mask_tests($im, $epsilon) Perform a standard set of mask tests on the OO image $im. Does 24 separate tests. =item diff_text_with_nul($test_name, $text1, $text2, @options) Creates 2 test images and writes $text1 to the first image and $text2 to the second image with the string() method. Each call adds 3 C<ok>/C<not ok> to the output of the test script. Extra options that should be supplied include the font and either a color or channel parameter. This was explicitly created for regression tests on #21770. =back =head2 Helper functions =over =item test_image_raw() Returns a 150x150x3 Imager::ImgRaw test image. =item test_image() Returns a 150x150x3 8-bit/sample OO test image. Name: C<basic>. =item test_image_16() Returns a 150x150x3 16-bit/sample OO test image. Name: C<basic16> =item test_image_double() Returns a 150x150x3 double/sample OO test image. Name: C<basic_double>. =item test_image_gray() Returns a 150x150 single channel OO test image. Name: C<gray>. =item test_image_gray_16() Returns a 150x150 16-bit/sample single channel OO test image. Name: C<gray16>. =item test_image_mono() Returns a 150x150 bilevel image that passes the is_bilevel() test. Name: C<mono>. =item test_image_named($name) Return one of the other test images above based on name. =item color_cmp($c1, $c2) Performs an ordering of 3-channel colors (like <=>). =item colorf_cmp($c1, $c2) Performs an ordering of 3-channel floating point colors (like <=>). =back =head1 AUTHOR Tony Cook <tony@develop-help.com> =cut
Dokaponteam/ITF_Project
xampp/perl/vendor/lib/Imager/Test.pm
Perl
mit
26,248
package MojoX::Authen::Handler; use base 'Mojo::Base'; use strict; use warnings; use Carp; use Mojo::Log; __PACKAGE__->attr(cb => sub { }); __PACKAGE__->attr(log => sub { Mojo::Log->new }); sub handle { croak 'Method \'handle\' unimplemented by subclass!' } sub finish { } 1;
Kulturchaot/mojox-authen
lib/MojoX/Authen/Handler.pm
Perl
mit
283
######################################################################## use strict; use Data::Dumper; use Carp; =head1 svr_scanP Scan proteins for a designated pattern ------ Example: svr_all_features 83333.1 peg | svr_scanP 'CxxC' would produce a 3-column table. The first column would contain the string in a PEG that matched the pattern, the second the location in the PEG encoded by a gene, and the third the PEG id. Here is a more interesting pattern: CxxH 2...5 GC It would match things like CAAGRIC, CIGHAAAAAG, etc. The x...y notation means "a string of x to y characters in length". The character 'x' is will match any amino acid. The match is case-insensitive. ------ The standard input should be a tab-separated table (i.e., each line is a tab-separated set of fields). Normally, the last field in each line would contain the PEG for which functions are being requested. If some other column contains the PEGs, use -c N where N is the column (from 1) that contains the PEG in each case. This is a pipe command. The input is taken from the standard input, and the output is to the standard output. =head2 Command-Line Options =over 4 =item -c Column This is used only if the column containing PEGs is not the last. =back =head2 Output Format The standard output is a tab-delimited file. It consists of the input file with three extra columns added (the matched string, the location, and the matched PEG). Note that when the pattern is made up of multiple components, you get embedded blanks within the field giving the matched string. =cut use SeedUtils; use SAPserver; my $sapObject = SAPserver->new(); use Getopt::Long; my $usage = "usage: svr_scanP Pattern [-c column]"; my $column; my $rc = GetOptions('c=i' => \$column); if (! $rc) { print STDERR $usage; exit } (@ARGV > 0) || die "you need to specifiy a pattern"; my @lines = map { chomp; [split(/\t/,$_)] } <STDIN>; (@lines > 0) || exit; if (! $column) { $column = @{$lines[0]} } my @fids = map { $_->[$column-1] } @lines; my $matches = &scan_for_matches($sapObject,$ARGV[0],\@fids); foreach $_ (@lines) { my $hits = $matches->{$_->[$column-1]}; foreach my $hit (@$hits) { print join("\t",@$_,@$hit),"\n"; } } sub scan_for_matches { my($sapObject,$pat,$pegs) = @_; my $hitsH; my $hitsF = "tmp.scanP.hits.$$"; my $patternF = "tmp.scanP.pattern.$$"; open(TMP,">",$patternF) || die "could not open $patternF"; print TMP $pat,"\n"; close(TMP); open(HITS,"| svr_fasta -protein -fasta | scan_for_matches -p $patternF > $hitsF") || die "could not run scan_for_matches"; foreach my $peg (@$pegs) { print HITS "$peg\n"; } close(HITS); open(HITS,"<",$hitsF) || die "could not open $hitsF"; while (defined($_ = <HITS>) && ($_ =~ /^>(.*)\:\[(\d+),(\d+)\]/)) { my $peg = $1; my $loc = $2; my $str = <HITS>; chomp $str; push(@{$hitsH->{$peg}},[$loc,$str]); } unlink($hitsF,$patternF); return $hitsH; }
kbase/kb_seed
service-scripts/svr_scanP.pl
Perl
mit
3,002
#!/usr/bin/perl use strict; use warnings; # Formats ANNOVAR variants from .annovar_multianno.txt that # are not formatted like the rest of the lines in the file. # Works with inconsistent_lines.txt my $file = shift; open(my $fh, '<', $file) or die "Can't open $file:$!\n"; while (<$fh>) { chomp $_; next if $_ =~ '^Chr'; my ($chr, $start, $end, $ref, $alt, $ann, $gene, $gene_detail, $exonic_ann, $aa, $vcf_file_cols) = split(/\t/, $_); my $pos = $chr.':'.$start.':'.$end; my @details = split(';', $gene_detail); my @trxs; my $trxs; foreach (@details) { my @gene_details = split(',', $_); foreach (@gene_details) { my @id_details = split(':', $_); push(@trxs, $id_details[0]); } } if (scalar @trxs > 1) { $trxs = join(';', @trxs); } else { $trxs = "@trxs"; } print "$pos\t$trxs\t$ann\n"; } close $fh;
geniusphil/2017GenomicEpidemiologyWorkshop
script/make_annovar_variant_table_inconsistent_lines.pl
Perl
mit
871
#!/usr/bin/perl # # ***** BEGIN LICENSE BLOCK ***** # Zimbra Collaboration Suite Server # Copyright (C) 2006, 2007, 2008, 2009, 2010 Zimbra, Inc. # # The contents of this file are subject to the Zimbra Public License # Version 1.3 ("License"); you may not use this file except in # compliance with the License. You may obtain a copy of the License at # http://www.zimbra.com/license. # # Software distributed under the License is distributed on an "AS IS" # basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. # ***** END LICENSE BLOCK ***** # use strict; use Migrate; my $startTime = time(); my $CREATE_DB_SQL; my $NUM_GROUPS = 1000; my %MBOX_GROUPS; my %DROPPED_DBS; sub init(); sub addGroupIdColumn(); sub getGroupId($); sub createMailboxGroup($); sub exportImportMailbox($); sub dropDatabase($); sub dropOrphans(); init(); Migrate::verifySchemaVersion(27); my $sql; my $sqlfile = "/tmp/migrate-MailboxGroup.sql"; open(SQL, "> $sqlfile") or die "Unable to open $sqlfile for write: $!"; $sql = addGroupIdColumn(); print SQL $sql; print SQL "\n"; my @mailboxIds = Migrate::getMailboxIds(); foreach my $id (@mailboxIds) { my $oldDb = "mailbox$id"; my $newDb = "mboxgroup" . getGroupId($id); $sql = createMailboxGroup($newDb); if ($sql) { print SQL $sql; print SQL "\n"; } $sql = exportImportMailbox($id); print SQL $sql; print SQL "\n"; $sql = dropDatabase($oldDb); print SQL $sql; print SQL "\n"; } #$sql = dropOrphans(); #print SQL $sql; #print SQL "\n"; close(SQL); print "Executing SQL statements in $sqlfile\n"; my $tempFile = "/tmp/migrate20060911.out.$$"; my $rc = 0xffff & system("/opt/zimbra/bin/mysql -v -A zimbra < $sqlfile > $tempFile 2>&1"); if ($rc != 0) { die "mysql invocation failed, exit code = $rc: $!"; open(OUTPUT, $tempFile); while (<OUTPUT>) { print; } close(OUTPUT); } print "Successfully finished executing SQL statements in $sqlfile\n"; #unlink($sqlfile); Migrate::updateSchemaVersion(27, 28); my $elapsed = time() - $startTime; print "Took $elapsed seconds\n"; exit(0); ##################### sub init() { $CREATE_DB_SQL = getMboxGroupSchemaSql(); my $numGroups = `zmlocalconfig -q -m nokey zimbra_mailbox_groups`; chomp($numGroups) if (defined($numGroups)); $numGroups += 0; # make sure it's a number $numGroups = 1000 if ($numGroups == 0); $NUM_GROUPS = $numGroups; } sub dropDatabase($) { my $db = shift; $DROPPED_DBS{$db} = 1; return "# Dropping database $db\nDROP DATABASE IF EXISTS $db;\n"; } sub dropOrphans() { my $sql = ''; my @orphans = Migrate::runSql("SHOW DATABASES LIKE 'mailbox\%'"); foreach my $db (@orphans) { if (!exists($DROPPED_DBS{$db})) { $sql .= dropDatabase($db); } } return $sql; } sub getGroupId($) { my $id = shift; return ($id - 1) % $NUM_GROUPS + 1; } sub addGroupIdColumn() { my $sql = <<_ADD_GROUP_ID_; # Adding group_id column to zimbra.mailbox table ALTER TABLE zimbra.mailbox ADD COLUMN group_id INTEGER UNSIGNED NOT NULL AFTER id; UPDATE zimbra.mailbox SET group_id = MOD(id - 1, $NUM_GROUPS) + 1; _ADD_GROUP_ID_ return $sql; } sub createMailboxGroup($) { my $db = shift; return '' if (exists $MBOX_GROUPS{$db}); my $sql = $CREATE_DB_SQL; $sql =~ s/\${DATABASE_NAME}/$db/gm; $MBOX_GROUPS{$db} = 1; return $sql; } sub getDumpFile($$) { my ($id, $table) = @_; return "/tmp/migrate20060911-$$-mbox$id-$table.dat"; } sub exportImportMailbox($) { my $id = shift; my $oldDb = "mailbox$id"; my $newDb = "mboxgroup" . getGroupId($id); my $sql = ''; my $file; my $exportOptions = "FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\\n'"; my $fileMailItem = getDumpFile($id, 'mail_item'); my $fileOpenConversation = getDumpFile($id, 'open_conversation'); my $fileAppointment = getDumpFile($id, 'appointment'); my $fileTombstone = getDumpFile($id, 'tombstone'); ########################## # # Export tables # ########################## $sql .= <<_EXPORT_MAIL_ITEM_; # Exporting $oldDb.mail_item data to $fileMailItem SELECT $id, id, type, parent_id, folder_id, index_id, imap_id, date, size, volume_id, blob_digest, unread, flags, tags, sender, subject, metadata, mod_metadata, change_date, mod_content INTO OUTFILE '$fileMailItem' $exportOptions FROM $oldDb.mail_item; _EXPORT_MAIL_ITEM_ $sql .= <<_EXPORT_OPEN_CONVERSATION_; # Exporting $oldDb.open_conversation data to $fileOpenConversation SELECT $id, hash, conv_id INTO OUTFILE '$fileOpenConversation' $exportOptions FROM $oldDb.open_conversation; _EXPORT_OPEN_CONVERSATION_ $sql .= <<_EXPORT_APPOINTMENT_; # Exporting $oldDb.appointment data to $fileAppointment SELECT $id, uid, item_id, start_time, end_time INTO OUTFILE '$fileAppointment' $exportOptions FROM $oldDb.appointment; _EXPORT_APPOINTMENT_ $sql .= <<_EXPORT_TOMBSTONE_; # Exporting $oldDb.tombstone data to $fileTombstone SELECT $id, sequence, date, ids INTO OUTFILE '$fileTombstone' $exportOptions FROM $oldDb.tombstone; _EXPORT_TOMBSTONE_ ########################## # # Import tables into mboxgroupN database # ########################## my @tables = ('mail_item', 'open_conversation', 'appointment', 'tombstone'); my @files = ($fileMailItem, $fileOpenConversation, $fileAppointment, $fileTombstone); my $i; for ($i = 0; $i < 4; $i++) { my $table = $tables[$i]; my $file = $files[$i]; $sql .= <<_IMPORT_; # Importing $file file into $newDb.$table SET FOREIGN_KEY_CHECKS = 0; SET UNIQUE_CHECKS = 0; LOAD DATA INFILE '$file' REPLACE INTO TABLE $newDb.$table $exportOptions; SET FOREIGN_KEY_CHECKS = 1; SET UNIQUE_CHECKS = 1; _IMPORT_ } $sql .= 'system rm -f'; for ($i = 0; $i < 4; $i++) { my $file = $files[$i]; $sql .= " $file"; } $sql .= "\n"; return $sql; } sub getMboxGroupSchemaSql() { my $sql = <<'_SCHEMA_SQL_'; DROP DATABASE IF EXISTS ${DATABASE_NAME}; CREATE DATABASE ${DATABASE_NAME} DEFAULT CHARACTER SET utf8; CREATE TABLE IF NOT EXISTS ${DATABASE_NAME}.mail_item ( mailbox_id INTEGER UNSIGNED NOT NULL, id INTEGER UNSIGNED NOT NULL, type TINYINT NOT NULL, # 1 = folder, 3 = tag, etc. parent_id INTEGER UNSIGNED, folder_id INTEGER UNSIGNED, index_id INTEGER UNSIGNED, imap_id INTEGER UNSIGNED, date INTEGER UNSIGNED NOT NULL, # stored as a UNIX-style timestamp size INTEGER UNSIGNED NOT NULL, volume_id TINYINT UNSIGNED, blob_digest VARCHAR(28) BINARY, # reference to blob, meaningful for messages only (type == 5) unread INTEGER UNSIGNED, # stored separately from the other flags so we can index it flags INTEGER NOT NULL DEFAULT 0, tags BIGINT NOT NULL DEFAULT 0, sender VARCHAR(128), subject TEXT, metadata TEXT, mod_metadata INTEGER UNSIGNED NOT NULL, # change number for last row modification change_date INTEGER UNSIGNED, # UNIX-style timestamp for last row modification mod_content INTEGER UNSIGNED NOT NULL, # change number for last change to "content" (e.g. blob) PRIMARY KEY (mailbox_id, id), INDEX i_type (mailbox_id, type), # for looking up folders and tags INDEX i_parent_id (mailbox_id, parent_id),# for looking up a parent\'s children INDEX i_folder_id_date (mailbox_id, folder_id, date), # for looking up by folder and sorting by date INDEX i_index_id (mailbox_id, index_id), # for looking up based on search results INDEX i_unread (mailbox_id, unread), # there should be a small number of items with unread=TRUE # no compound index on (unread, date), so we save space at # the expense of sorting a small number of rows INDEX i_date (mailbox_id, date), # fallback index in case other constraints are not specified INDEX i_mod_metadata (mailbox_id, mod_metadata), # used by the sync code INDEX i_tags_date (mailbox_id, tags, date), # for tag searches INDEX i_flags_date (mailbox_id, flags, date), # for flag searches INDEX i_volume_id (mailbox_id, volume_id), # for the foreign key into the volume table CONSTRAINT fk_mail_item_mailbox_id FOREIGN KEY (mailbox_id) REFERENCES zimbra.mailbox(id), CONSTRAINT fk_mail_item_parent_id FOREIGN KEY (mailbox_id, parent_id) REFERENCES ${DATABASE_NAME}.mail_item(mailbox_id, id), CONSTRAINT fk_mail_item_folder_id FOREIGN KEY (mailbox_id, folder_id) REFERENCES ${DATABASE_NAME}.mail_item(mailbox_id, id), CONSTRAINT fk_mail_item_volume_id FOREIGN KEY (volume_id) REFERENCES zimbra.volume(id) ) ENGINE = InnoDB; CREATE TABLE IF NOT EXISTS ${DATABASE_NAME}.open_conversation ( mailbox_id INTEGER UNSIGNED NOT NULL, hash CHAR(28) BINARY NOT NULL, conv_id INTEGER UNSIGNED NOT NULL, PRIMARY KEY (mailbox_id, hash), INDEX i_conv_id (mailbox_id, conv_id), CONSTRAINT fk_open_conversation_mailbox_id FOREIGN KEY (mailbox_id) REFERENCES zimbra.mailbox(id), CONSTRAINT fk_open_conversation_conv_id FOREIGN KEY (mailbox_id, conv_id) REFERENCES ${DATABASE_NAME}.mail_item(mailbox_id, id) ON DELETE CASCADE ) ENGINE = InnoDB; CREATE TABLE IF NOT EXISTS ${DATABASE_NAME}.appointment ( mailbox_id INTEGER UNSIGNED NOT NULL, uid VARCHAR(255) NOT NULL, item_id INTEGER UNSIGNED NOT NULL, start_time DATETIME NOT NULL, end_time DATETIME, PRIMARY KEY (mailbox_id, uid), INDEX i_item_id (mailbox_id, item_id), CONSTRAINT fk_appointment_mailbox_id FOREIGN KEY (mailbox_id) REFERENCES zimbra.mailbox(id), CONSTRAINT fk_appointment_item_id FOREIGN KEY (mailbox_id, item_id) REFERENCES ${DATABASE_NAME}.mail_item(mailbox_id, id) ON DELETE CASCADE ) ENGINE = InnoDB; CREATE TABLE IF NOT EXISTS ${DATABASE_NAME}.tombstone ( mailbox_id INTEGER UNSIGNED NOT NULL, sequence INTEGER UNSIGNED NOT NULL, # change number for deletion date INTEGER UNSIGNED NOT NULL, # deletion date as a UNIX-style timestamp ids TEXT, INDEX i_sequence (mailbox_id, sequence), CONSTRAINT fk_tombstone_mailbox_id FOREIGN KEY (mailbox_id) REFERENCES zimbra.mailbox(id) ) ENGINE = InnoDB; _SCHEMA_SQL_ return $sql; }
nico01f/z-pec
ZimbraServer/src/db/migration/migrate20060911-MailboxGroup.pl
Perl
mit
10,564
use strict; use Data::Dumper; use Carp; # # This is a SAS Component # =head1 svr_location_of Get physical locations of genes. ------ Example: svr_all_features 3702.1 peg | svr_location_of would produce a 2-column table. The first column would contain PEG IDs for genes occurring in genome 3702.1, and the second would contain the locations of those genes. ------ The standard input should be a tab-separated table (i.e., each line is a tab-separated set of fields). Normally, the last field in each line would contain the ID of the feature for which locations are being requested. If some other column contains the feature IDs, use -c N where N is the column (from 1) that contains the PEG in each case. This is a pipe command. The input is taken from the standard input, and the output is to the standard output. =head2 Command-Line Options =over 4 =item -c Column This is used only if the column containing PEGs is not the last. =item -bounds Normally, the location is returned as a comma-delimited list of location strings (each containing a contig ID, a start location, a strand indicator, and a length). Normally this would be a single location string, but some genes have multiple contiguous segments, and each segment is a separate string. If this option is specified, then only a single location-- one that covers all segments of the gene-- is output. =back =head2 Output Format The standard output is a tab-delimited file. It consists of the input file with an extra column added (the location associated with the feature). =cut use SeedUtils; use SAPserver; use ScriptThing; use Getopt::Long; my $usage = "usage: svr_location_of [-c column] [-bounds]"; my $column; my $bounds = 0; my $url = ''; my $rc = GetOptions('c=i' => \$column, "bounds" => \$bounds, "url=s" => \$url); my $sapObject = SAPserver->new(url => $url); if (! $rc) { print STDERR $usage; exit } while (my @tuples = ScriptThing::GetBatch(\*STDIN, undef, $column)) { # Get the locations for this batch of features. my $fidHash = $sapObject->fid_locations(-ids => [map { $_->[0] } @tuples], -boundaries => $bounds); # Loop through them, generating output. for my $tuple (@tuples) { # Get the feature ID and input line for this tuple. my ($fid, $line) = @$tuple; # Find the locations for this feature. my $locs = $fidHash->{$fid}; if (! defined $locs) { # Here no location was found. print STDERR "$fid not found.\n"; } elsif (ref $locs ne 'ARRAY') { # Here we have a singleton location. print "$line\t$locs\n"; } else { # Here we have a list of location segments. my $locString = join(", ", @$locs); print "$line\t$locString\n"; } } }
kbase/kb_seed
service-scripts/svr_location_of.pl
Perl
mit
2,814
# This file is auto-generated by the Perl DateTime Suite time zone # code generator (0.07) This code generator comes with the # DateTime::TimeZone module distribution in the tools/ directory # # Generated from /tmp/BU3Xn7v6Kb/northamerica. Olson data version 2015g # # Do not edit this file directly. # package DateTime::TimeZone::America::Fort_Nelson; $DateTime::TimeZone::America::Fort_Nelson::VERSION = '1.94'; use strict; use Class::Singleton 1.03; use DateTime::TimeZone; use DateTime::TimeZone::OlsonDB; @DateTime::TimeZone::America::Fort_Nelson::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' ); my $spans = [ [ DateTime::TimeZone::NEG_INFINITY, # utc_start 59421802247, # utc_end 1884-01-01 08:10:47 (Tue) DateTime::TimeZone::NEG_INFINITY, # local_start 59421772800, # local_end 1884-01-01 00:00:00 (Tue) -29447, 0, 'LMT', ], [ 59421802247, # utc_start 1884-01-01 08:10:47 (Tue) 60503623200, # utc_end 1918-04-14 10:00:00 (Sun) 59421773447, # local_start 1884-01-01 00:10:47 (Tue) 60503594400, # local_end 1918-04-14 02:00:00 (Sun) -28800, 0, 'PST', ], [ 60503623200, # utc_start 1918-04-14 10:00:00 (Sun) 60520554000, # utc_end 1918-10-27 09:00:00 (Sun) 60503598000, # local_start 1918-04-14 03:00:00 (Sun) 60520528800, # local_end 1918-10-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 60520554000, # utc_start 1918-10-27 09:00:00 (Sun) 61255476000, # utc_end 1942-02-09 10:00:00 (Mon) 60520525200, # local_start 1918-10-27 01:00:00 (Sun) 61255447200, # local_end 1942-02-09 02:00:00 (Mon) -28800, 0, 'PST', ], [ 61255476000, # utc_start 1942-02-09 10:00:00 (Mon) 61366287600, # utc_end 1945-08-14 23:00:00 (Tue) 61255450800, # local_start 1942-02-09 03:00:00 (Mon) 61366262400, # local_end 1945-08-14 16:00:00 (Tue) -25200, 1, 'PWT', ], [ 61366287600, # utc_start 1945-08-14 23:00:00 (Tue) 61370298000, # utc_end 1945-09-30 09:00:00 (Sun) 61366262400, # local_start 1945-08-14 16:00:00 (Tue) 61370272800, # local_end 1945-09-30 02:00:00 (Sun) -25200, 1, 'PPT', ], [ 61370298000, # utc_start 1945-09-30 09:00:00 (Sun) 61378329600, # utc_end 1946-01-01 08:00:00 (Tue) 61370269200, # local_start 1945-09-30 01:00:00 (Sun) 61378300800, # local_end 1946-01-01 00:00:00 (Tue) -28800, 0, 'PST', ], [ 61378329600, # utc_start 1946-01-01 08:00:00 (Tue) 61409865600, # utc_end 1947-01-01 08:00:00 (Wed) 61378300800, # local_start 1946-01-01 00:00:00 (Tue) 61409836800, # local_end 1947-01-01 00:00:00 (Wed) -28800, 0, 'PST', ], [ 61409865600, # utc_start 1947-01-01 08:00:00 (Wed) 61419895200, # utc_end 1947-04-27 10:00:00 (Sun) 61409836800, # local_start 1947-01-01 00:00:00 (Wed) 61419866400, # local_end 1947-04-27 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61419895200, # utc_start 1947-04-27 10:00:00 (Sun) 61433197200, # utc_end 1947-09-28 09:00:00 (Sun) 61419870000, # local_start 1947-04-27 03:00:00 (Sun) 61433172000, # local_end 1947-09-28 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61433197200, # utc_start 1947-09-28 09:00:00 (Sun) 61451344800, # utc_end 1948-04-25 10:00:00 (Sun) 61433168400, # local_start 1947-09-28 01:00:00 (Sun) 61451316000, # local_end 1948-04-25 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61451344800, # utc_start 1948-04-25 10:00:00 (Sun) 61464646800, # utc_end 1948-09-26 09:00:00 (Sun) 61451319600, # local_start 1948-04-25 03:00:00 (Sun) 61464621600, # local_end 1948-09-26 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61464646800, # utc_start 1948-09-26 09:00:00 (Sun) 61482794400, # utc_end 1949-04-24 10:00:00 (Sun) 61464618000, # local_start 1948-09-26 01:00:00 (Sun) 61482765600, # local_end 1949-04-24 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61482794400, # utc_start 1949-04-24 10:00:00 (Sun) 61496096400, # utc_end 1949-09-25 09:00:00 (Sun) 61482769200, # local_start 1949-04-24 03:00:00 (Sun) 61496071200, # local_end 1949-09-25 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61496096400, # utc_start 1949-09-25 09:00:00 (Sun) 61514848800, # utc_end 1950-04-30 10:00:00 (Sun) 61496067600, # local_start 1949-09-25 01:00:00 (Sun) 61514820000, # local_end 1950-04-30 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61514848800, # utc_start 1950-04-30 10:00:00 (Sun) 61527546000, # utc_end 1950-09-24 09:00:00 (Sun) 61514823600, # local_start 1950-04-30 03:00:00 (Sun) 61527520800, # local_end 1950-09-24 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61527546000, # utc_start 1950-09-24 09:00:00 (Sun) 61546298400, # utc_end 1951-04-29 10:00:00 (Sun) 61527517200, # local_start 1950-09-24 01:00:00 (Sun) 61546269600, # local_end 1951-04-29 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61546298400, # utc_start 1951-04-29 10:00:00 (Sun) 61559600400, # utc_end 1951-09-30 09:00:00 (Sun) 61546273200, # local_start 1951-04-29 03:00:00 (Sun) 61559575200, # local_end 1951-09-30 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61559600400, # utc_start 1951-09-30 09:00:00 (Sun) 61577748000, # utc_end 1952-04-27 10:00:00 (Sun) 61559571600, # local_start 1951-09-30 01:00:00 (Sun) 61577719200, # local_end 1952-04-27 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61577748000, # utc_start 1952-04-27 10:00:00 (Sun) 61591050000, # utc_end 1952-09-28 09:00:00 (Sun) 61577722800, # local_start 1952-04-27 03:00:00 (Sun) 61591024800, # local_end 1952-09-28 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61591050000, # utc_start 1952-09-28 09:00:00 (Sun) 61609197600, # utc_end 1953-04-26 10:00:00 (Sun) 61591021200, # local_start 1952-09-28 01:00:00 (Sun) 61609168800, # local_end 1953-04-26 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61609197600, # utc_start 1953-04-26 10:00:00 (Sun) 61622499600, # utc_end 1953-09-27 09:00:00 (Sun) 61609172400, # local_start 1953-04-26 03:00:00 (Sun) 61622474400, # local_end 1953-09-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61622499600, # utc_start 1953-09-27 09:00:00 (Sun) 61640647200, # utc_end 1954-04-25 10:00:00 (Sun) 61622470800, # local_start 1953-09-27 01:00:00 (Sun) 61640618400, # local_end 1954-04-25 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61640647200, # utc_start 1954-04-25 10:00:00 (Sun) 61653949200, # utc_end 1954-09-26 09:00:00 (Sun) 61640622000, # local_start 1954-04-25 03:00:00 (Sun) 61653924000, # local_end 1954-09-26 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61653949200, # utc_start 1954-09-26 09:00:00 (Sun) 61672096800, # utc_end 1955-04-24 10:00:00 (Sun) 61653920400, # local_start 1954-09-26 01:00:00 (Sun) 61672068000, # local_end 1955-04-24 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61672096800, # utc_start 1955-04-24 10:00:00 (Sun) 61685398800, # utc_end 1955-09-25 09:00:00 (Sun) 61672071600, # local_start 1955-04-24 03:00:00 (Sun) 61685373600, # local_end 1955-09-25 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61685398800, # utc_start 1955-09-25 09:00:00 (Sun) 61704151200, # utc_end 1956-04-29 10:00:00 (Sun) 61685370000, # local_start 1955-09-25 01:00:00 (Sun) 61704122400, # local_end 1956-04-29 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61704151200, # utc_start 1956-04-29 10:00:00 (Sun) 61717453200, # utc_end 1956-09-30 09:00:00 (Sun) 61704126000, # local_start 1956-04-29 03:00:00 (Sun) 61717428000, # local_end 1956-09-30 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61717453200, # utc_start 1956-09-30 09:00:00 (Sun) 61735600800, # utc_end 1957-04-28 10:00:00 (Sun) 61717424400, # local_start 1956-09-30 01:00:00 (Sun) 61735572000, # local_end 1957-04-28 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61735600800, # utc_start 1957-04-28 10:00:00 (Sun) 61748902800, # utc_end 1957-09-29 09:00:00 (Sun) 61735575600, # local_start 1957-04-28 03:00:00 (Sun) 61748877600, # local_end 1957-09-29 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61748902800, # utc_start 1957-09-29 09:00:00 (Sun) 61767050400, # utc_end 1958-04-27 10:00:00 (Sun) 61748874000, # local_start 1957-09-29 01:00:00 (Sun) 61767021600, # local_end 1958-04-27 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61767050400, # utc_start 1958-04-27 10:00:00 (Sun) 61780352400, # utc_end 1958-09-28 09:00:00 (Sun) 61767025200, # local_start 1958-04-27 03:00:00 (Sun) 61780327200, # local_end 1958-09-28 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61780352400, # utc_start 1958-09-28 09:00:00 (Sun) 61798500000, # utc_end 1959-04-26 10:00:00 (Sun) 61780323600, # local_start 1958-09-28 01:00:00 (Sun) 61798471200, # local_end 1959-04-26 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61798500000, # utc_start 1959-04-26 10:00:00 (Sun) 61811802000, # utc_end 1959-09-27 09:00:00 (Sun) 61798474800, # local_start 1959-04-26 03:00:00 (Sun) 61811776800, # local_end 1959-09-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61811802000, # utc_start 1959-09-27 09:00:00 (Sun) 61829949600, # utc_end 1960-04-24 10:00:00 (Sun) 61811773200, # local_start 1959-09-27 01:00:00 (Sun) 61829920800, # local_end 1960-04-24 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61829949600, # utc_start 1960-04-24 10:00:00 (Sun) 61843251600, # utc_end 1960-09-25 09:00:00 (Sun) 61829924400, # local_start 1960-04-24 03:00:00 (Sun) 61843226400, # local_end 1960-09-25 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61843251600, # utc_start 1960-09-25 09:00:00 (Sun) 61862004000, # utc_end 1961-04-30 10:00:00 (Sun) 61843222800, # local_start 1960-09-25 01:00:00 (Sun) 61861975200, # local_end 1961-04-30 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61862004000, # utc_start 1961-04-30 10:00:00 (Sun) 61874701200, # utc_end 1961-09-24 09:00:00 (Sun) 61861978800, # local_start 1961-04-30 03:00:00 (Sun) 61874676000, # local_end 1961-09-24 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61874701200, # utc_start 1961-09-24 09:00:00 (Sun) 61893453600, # utc_end 1962-04-29 10:00:00 (Sun) 61874672400, # local_start 1961-09-24 01:00:00 (Sun) 61893424800, # local_end 1962-04-29 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61893453600, # utc_start 1962-04-29 10:00:00 (Sun) 61909174800, # utc_end 1962-10-28 09:00:00 (Sun) 61893428400, # local_start 1962-04-29 03:00:00 (Sun) 61909149600, # local_end 1962-10-28 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61909174800, # utc_start 1962-10-28 09:00:00 (Sun) 61924903200, # utc_end 1963-04-28 10:00:00 (Sun) 61909146000, # local_start 1962-10-28 01:00:00 (Sun) 61924874400, # local_end 1963-04-28 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61924903200, # utc_start 1963-04-28 10:00:00 (Sun) 61940624400, # utc_end 1963-10-27 09:00:00 (Sun) 61924878000, # local_start 1963-04-28 03:00:00 (Sun) 61940599200, # local_end 1963-10-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61940624400, # utc_start 1963-10-27 09:00:00 (Sun) 61956352800, # utc_end 1964-04-26 10:00:00 (Sun) 61940595600, # local_start 1963-10-27 01:00:00 (Sun) 61956324000, # local_end 1964-04-26 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61956352800, # utc_start 1964-04-26 10:00:00 (Sun) 61972074000, # utc_end 1964-10-25 09:00:00 (Sun) 61956327600, # local_start 1964-04-26 03:00:00 (Sun) 61972048800, # local_end 1964-10-25 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 61972074000, # utc_start 1964-10-25 09:00:00 (Sun) 61987802400, # utc_end 1965-04-25 10:00:00 (Sun) 61972045200, # local_start 1964-10-25 01:00:00 (Sun) 61987773600, # local_end 1965-04-25 02:00:00 (Sun) -28800, 0, 'PST', ], [ 61987802400, # utc_start 1965-04-25 10:00:00 (Sun) 62004128400, # utc_end 1965-10-31 09:00:00 (Sun) 61987777200, # local_start 1965-04-25 03:00:00 (Sun) 62004103200, # local_end 1965-10-31 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62004128400, # utc_start 1965-10-31 09:00:00 (Sun) 62019252000, # utc_end 1966-04-24 10:00:00 (Sun) 62004099600, # local_start 1965-10-31 01:00:00 (Sun) 62019223200, # local_end 1966-04-24 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62019252000, # utc_start 1966-04-24 10:00:00 (Sun) 62035578000, # utc_end 1966-10-30 09:00:00 (Sun) 62019226800, # local_start 1966-04-24 03:00:00 (Sun) 62035552800, # local_end 1966-10-30 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62035578000, # utc_start 1966-10-30 09:00:00 (Sun) 62051306400, # utc_end 1967-04-30 10:00:00 (Sun) 62035549200, # local_start 1966-10-30 01:00:00 (Sun) 62051277600, # local_end 1967-04-30 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62051306400, # utc_start 1967-04-30 10:00:00 (Sun) 62067027600, # utc_end 1967-10-29 09:00:00 (Sun) 62051281200, # local_start 1967-04-30 03:00:00 (Sun) 62067002400, # local_end 1967-10-29 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62067027600, # utc_start 1967-10-29 09:00:00 (Sun) 62082756000, # utc_end 1968-04-28 10:00:00 (Sun) 62066998800, # local_start 1967-10-29 01:00:00 (Sun) 62082727200, # local_end 1968-04-28 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62082756000, # utc_start 1968-04-28 10:00:00 (Sun) 62098477200, # utc_end 1968-10-27 09:00:00 (Sun) 62082730800, # local_start 1968-04-28 03:00:00 (Sun) 62098452000, # local_end 1968-10-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62098477200, # utc_start 1968-10-27 09:00:00 (Sun) 62114205600, # utc_end 1969-04-27 10:00:00 (Sun) 62098448400, # local_start 1968-10-27 01:00:00 (Sun) 62114176800, # local_end 1969-04-27 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62114205600, # utc_start 1969-04-27 10:00:00 (Sun) 62129926800, # utc_end 1969-10-26 09:00:00 (Sun) 62114180400, # local_start 1969-04-27 03:00:00 (Sun) 62129901600, # local_end 1969-10-26 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62129926800, # utc_start 1969-10-26 09:00:00 (Sun) 62145655200, # utc_end 1970-04-26 10:00:00 (Sun) 62129898000, # local_start 1969-10-26 01:00:00 (Sun) 62145626400, # local_end 1970-04-26 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62145655200, # utc_start 1970-04-26 10:00:00 (Sun) 62161376400, # utc_end 1970-10-25 09:00:00 (Sun) 62145630000, # local_start 1970-04-26 03:00:00 (Sun) 62161351200, # local_end 1970-10-25 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62161376400, # utc_start 1970-10-25 09:00:00 (Sun) 62177104800, # utc_end 1971-04-25 10:00:00 (Sun) 62161347600, # local_start 1970-10-25 01:00:00 (Sun) 62177076000, # local_end 1971-04-25 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62177104800, # utc_start 1971-04-25 10:00:00 (Sun) 62193430800, # utc_end 1971-10-31 09:00:00 (Sun) 62177079600, # local_start 1971-04-25 03:00:00 (Sun) 62193405600, # local_end 1971-10-31 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62193430800, # utc_start 1971-10-31 09:00:00 (Sun) 62209159200, # utc_end 1972-04-30 10:00:00 (Sun) 62193402000, # local_start 1971-10-31 01:00:00 (Sun) 62209130400, # local_end 1972-04-30 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62209159200, # utc_start 1972-04-30 10:00:00 (Sun) 62224880400, # utc_end 1972-10-29 09:00:00 (Sun) 62209134000, # local_start 1972-04-30 03:00:00 (Sun) 62224855200, # local_end 1972-10-29 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62224880400, # utc_start 1972-10-29 09:00:00 (Sun) 62240608800, # utc_end 1973-04-29 10:00:00 (Sun) 62224851600, # local_start 1972-10-29 01:00:00 (Sun) 62240580000, # local_end 1973-04-29 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62240608800, # utc_start 1973-04-29 10:00:00 (Sun) 62256330000, # utc_end 1973-10-28 09:00:00 (Sun) 62240583600, # local_start 1973-04-29 03:00:00 (Sun) 62256304800, # local_end 1973-10-28 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62256330000, # utc_start 1973-10-28 09:00:00 (Sun) 62272058400, # utc_end 1974-04-28 10:00:00 (Sun) 62256301200, # local_start 1973-10-28 01:00:00 (Sun) 62272029600, # local_end 1974-04-28 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62272058400, # utc_start 1974-04-28 10:00:00 (Sun) 62287779600, # utc_end 1974-10-27 09:00:00 (Sun) 62272033200, # local_start 1974-04-28 03:00:00 (Sun) 62287754400, # local_end 1974-10-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62287779600, # utc_start 1974-10-27 09:00:00 (Sun) 62303508000, # utc_end 1975-04-27 10:00:00 (Sun) 62287750800, # local_start 1974-10-27 01:00:00 (Sun) 62303479200, # local_end 1975-04-27 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62303508000, # utc_start 1975-04-27 10:00:00 (Sun) 62319229200, # utc_end 1975-10-26 09:00:00 (Sun) 62303482800, # local_start 1975-04-27 03:00:00 (Sun) 62319204000, # local_end 1975-10-26 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62319229200, # utc_start 1975-10-26 09:00:00 (Sun) 62334957600, # utc_end 1976-04-25 10:00:00 (Sun) 62319200400, # local_start 1975-10-26 01:00:00 (Sun) 62334928800, # local_end 1976-04-25 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62334957600, # utc_start 1976-04-25 10:00:00 (Sun) 62351283600, # utc_end 1976-10-31 09:00:00 (Sun) 62334932400, # local_start 1976-04-25 03:00:00 (Sun) 62351258400, # local_end 1976-10-31 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62351283600, # utc_start 1976-10-31 09:00:00 (Sun) 62366407200, # utc_end 1977-04-24 10:00:00 (Sun) 62351254800, # local_start 1976-10-31 01:00:00 (Sun) 62366378400, # local_end 1977-04-24 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62366407200, # utc_start 1977-04-24 10:00:00 (Sun) 62382733200, # utc_end 1977-10-30 09:00:00 (Sun) 62366382000, # local_start 1977-04-24 03:00:00 (Sun) 62382708000, # local_end 1977-10-30 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62382733200, # utc_start 1977-10-30 09:00:00 (Sun) 62398461600, # utc_end 1978-04-30 10:00:00 (Sun) 62382704400, # local_start 1977-10-30 01:00:00 (Sun) 62398432800, # local_end 1978-04-30 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62398461600, # utc_start 1978-04-30 10:00:00 (Sun) 62414182800, # utc_end 1978-10-29 09:00:00 (Sun) 62398436400, # local_start 1978-04-30 03:00:00 (Sun) 62414157600, # local_end 1978-10-29 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62414182800, # utc_start 1978-10-29 09:00:00 (Sun) 62429911200, # utc_end 1979-04-29 10:00:00 (Sun) 62414154000, # local_start 1978-10-29 01:00:00 (Sun) 62429882400, # local_end 1979-04-29 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62429911200, # utc_start 1979-04-29 10:00:00 (Sun) 62445632400, # utc_end 1979-10-28 09:00:00 (Sun) 62429886000, # local_start 1979-04-29 03:00:00 (Sun) 62445607200, # local_end 1979-10-28 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62445632400, # utc_start 1979-10-28 09:00:00 (Sun) 62461360800, # utc_end 1980-04-27 10:00:00 (Sun) 62445603600, # local_start 1979-10-28 01:00:00 (Sun) 62461332000, # local_end 1980-04-27 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62461360800, # utc_start 1980-04-27 10:00:00 (Sun) 62477082000, # utc_end 1980-10-26 09:00:00 (Sun) 62461335600, # local_start 1980-04-27 03:00:00 (Sun) 62477056800, # local_end 1980-10-26 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62477082000, # utc_start 1980-10-26 09:00:00 (Sun) 62492810400, # utc_end 1981-04-26 10:00:00 (Sun) 62477053200, # local_start 1980-10-26 01:00:00 (Sun) 62492781600, # local_end 1981-04-26 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62492810400, # utc_start 1981-04-26 10:00:00 (Sun) 62508531600, # utc_end 1981-10-25 09:00:00 (Sun) 62492785200, # local_start 1981-04-26 03:00:00 (Sun) 62508506400, # local_end 1981-10-25 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62508531600, # utc_start 1981-10-25 09:00:00 (Sun) 62524260000, # utc_end 1982-04-25 10:00:00 (Sun) 62508502800, # local_start 1981-10-25 01:00:00 (Sun) 62524231200, # local_end 1982-04-25 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62524260000, # utc_start 1982-04-25 10:00:00 (Sun) 62540586000, # utc_end 1982-10-31 09:00:00 (Sun) 62524234800, # local_start 1982-04-25 03:00:00 (Sun) 62540560800, # local_end 1982-10-31 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62540586000, # utc_start 1982-10-31 09:00:00 (Sun) 62555709600, # utc_end 1983-04-24 10:00:00 (Sun) 62540557200, # local_start 1982-10-31 01:00:00 (Sun) 62555680800, # local_end 1983-04-24 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62555709600, # utc_start 1983-04-24 10:00:00 (Sun) 62572035600, # utc_end 1983-10-30 09:00:00 (Sun) 62555684400, # local_start 1983-04-24 03:00:00 (Sun) 62572010400, # local_end 1983-10-30 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62572035600, # utc_start 1983-10-30 09:00:00 (Sun) 62587764000, # utc_end 1984-04-29 10:00:00 (Sun) 62572006800, # local_start 1983-10-30 01:00:00 (Sun) 62587735200, # local_end 1984-04-29 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62587764000, # utc_start 1984-04-29 10:00:00 (Sun) 62603485200, # utc_end 1984-10-28 09:00:00 (Sun) 62587738800, # local_start 1984-04-29 03:00:00 (Sun) 62603460000, # local_end 1984-10-28 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62603485200, # utc_start 1984-10-28 09:00:00 (Sun) 62619213600, # utc_end 1985-04-28 10:00:00 (Sun) 62603456400, # local_start 1984-10-28 01:00:00 (Sun) 62619184800, # local_end 1985-04-28 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62619213600, # utc_start 1985-04-28 10:00:00 (Sun) 62634934800, # utc_end 1985-10-27 09:00:00 (Sun) 62619188400, # local_start 1985-04-28 03:00:00 (Sun) 62634909600, # local_end 1985-10-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62634934800, # utc_start 1985-10-27 09:00:00 (Sun) 62650663200, # utc_end 1986-04-27 10:00:00 (Sun) 62634906000, # local_start 1985-10-27 01:00:00 (Sun) 62650634400, # local_end 1986-04-27 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62650663200, # utc_start 1986-04-27 10:00:00 (Sun) 62666384400, # utc_end 1986-10-26 09:00:00 (Sun) 62650638000, # local_start 1986-04-27 03:00:00 (Sun) 62666359200, # local_end 1986-10-26 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62666384400, # utc_start 1986-10-26 09:00:00 (Sun) 62672169600, # utc_end 1987-01-01 08:00:00 (Thu) 62666355600, # local_start 1986-10-26 01:00:00 (Sun) 62672140800, # local_end 1987-01-01 00:00:00 (Thu) -28800, 0, 'PST', ], [ 62672169600, # utc_start 1987-01-01 08:00:00 (Thu) 62680298400, # utc_end 1987-04-05 10:00:00 (Sun) 62672140800, # local_start 1987-01-01 00:00:00 (Thu) 62680269600, # local_end 1987-04-05 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62680298400, # utc_start 1987-04-05 10:00:00 (Sun) 62697834000, # utc_end 1987-10-25 09:00:00 (Sun) 62680273200, # local_start 1987-04-05 03:00:00 (Sun) 62697808800, # local_end 1987-10-25 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62697834000, # utc_start 1987-10-25 09:00:00 (Sun) 62711748000, # utc_end 1988-04-03 10:00:00 (Sun) 62697805200, # local_start 1987-10-25 01:00:00 (Sun) 62711719200, # local_end 1988-04-03 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62711748000, # utc_start 1988-04-03 10:00:00 (Sun) 62729888400, # utc_end 1988-10-30 09:00:00 (Sun) 62711722800, # local_start 1988-04-03 03:00:00 (Sun) 62729863200, # local_end 1988-10-30 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62729888400, # utc_start 1988-10-30 09:00:00 (Sun) 62743197600, # utc_end 1989-04-02 10:00:00 (Sun) 62729859600, # local_start 1988-10-30 01:00:00 (Sun) 62743168800, # local_end 1989-04-02 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62743197600, # utc_start 1989-04-02 10:00:00 (Sun) 62761338000, # utc_end 1989-10-29 09:00:00 (Sun) 62743172400, # local_start 1989-04-02 03:00:00 (Sun) 62761312800, # local_end 1989-10-29 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62761338000, # utc_start 1989-10-29 09:00:00 (Sun) 62774647200, # utc_end 1990-04-01 10:00:00 (Sun) 62761309200, # local_start 1989-10-29 01:00:00 (Sun) 62774618400, # local_end 1990-04-01 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62774647200, # utc_start 1990-04-01 10:00:00 (Sun) 62792787600, # utc_end 1990-10-28 09:00:00 (Sun) 62774622000, # local_start 1990-04-01 03:00:00 (Sun) 62792762400, # local_end 1990-10-28 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62792787600, # utc_start 1990-10-28 09:00:00 (Sun) 62806701600, # utc_end 1991-04-07 10:00:00 (Sun) 62792758800, # local_start 1990-10-28 01:00:00 (Sun) 62806672800, # local_end 1991-04-07 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62806701600, # utc_start 1991-04-07 10:00:00 (Sun) 62824237200, # utc_end 1991-10-27 09:00:00 (Sun) 62806676400, # local_start 1991-04-07 03:00:00 (Sun) 62824212000, # local_end 1991-10-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62824237200, # utc_start 1991-10-27 09:00:00 (Sun) 62838151200, # utc_end 1992-04-05 10:00:00 (Sun) 62824208400, # local_start 1991-10-27 01:00:00 (Sun) 62838122400, # local_end 1992-04-05 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62838151200, # utc_start 1992-04-05 10:00:00 (Sun) 62855686800, # utc_end 1992-10-25 09:00:00 (Sun) 62838126000, # local_start 1992-04-05 03:00:00 (Sun) 62855661600, # local_end 1992-10-25 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62855686800, # utc_start 1992-10-25 09:00:00 (Sun) 62869600800, # utc_end 1993-04-04 10:00:00 (Sun) 62855658000, # local_start 1992-10-25 01:00:00 (Sun) 62869572000, # local_end 1993-04-04 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62869600800, # utc_start 1993-04-04 10:00:00 (Sun) 62887741200, # utc_end 1993-10-31 09:00:00 (Sun) 62869575600, # local_start 1993-04-04 03:00:00 (Sun) 62887716000, # local_end 1993-10-31 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62887741200, # utc_start 1993-10-31 09:00:00 (Sun) 62901050400, # utc_end 1994-04-03 10:00:00 (Sun) 62887712400, # local_start 1993-10-31 01:00:00 (Sun) 62901021600, # local_end 1994-04-03 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62901050400, # utc_start 1994-04-03 10:00:00 (Sun) 62919190800, # utc_end 1994-10-30 09:00:00 (Sun) 62901025200, # local_start 1994-04-03 03:00:00 (Sun) 62919165600, # local_end 1994-10-30 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62919190800, # utc_start 1994-10-30 09:00:00 (Sun) 62932500000, # utc_end 1995-04-02 10:00:00 (Sun) 62919162000, # local_start 1994-10-30 01:00:00 (Sun) 62932471200, # local_end 1995-04-02 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62932500000, # utc_start 1995-04-02 10:00:00 (Sun) 62950640400, # utc_end 1995-10-29 09:00:00 (Sun) 62932474800, # local_start 1995-04-02 03:00:00 (Sun) 62950615200, # local_end 1995-10-29 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62950640400, # utc_start 1995-10-29 09:00:00 (Sun) 62964554400, # utc_end 1996-04-07 10:00:00 (Sun) 62950611600, # local_start 1995-10-29 01:00:00 (Sun) 62964525600, # local_end 1996-04-07 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62964554400, # utc_start 1996-04-07 10:00:00 (Sun) 62982090000, # utc_end 1996-10-27 09:00:00 (Sun) 62964529200, # local_start 1996-04-07 03:00:00 (Sun) 62982064800, # local_end 1996-10-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 62982090000, # utc_start 1996-10-27 09:00:00 (Sun) 62996004000, # utc_end 1997-04-06 10:00:00 (Sun) 62982061200, # local_start 1996-10-27 01:00:00 (Sun) 62995975200, # local_end 1997-04-06 02:00:00 (Sun) -28800, 0, 'PST', ], [ 62996004000, # utc_start 1997-04-06 10:00:00 (Sun) 63013539600, # utc_end 1997-10-26 09:00:00 (Sun) 62995978800, # local_start 1997-04-06 03:00:00 (Sun) 63013514400, # local_end 1997-10-26 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63013539600, # utc_start 1997-10-26 09:00:00 (Sun) 63027453600, # utc_end 1998-04-05 10:00:00 (Sun) 63013510800, # local_start 1997-10-26 01:00:00 (Sun) 63027424800, # local_end 1998-04-05 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63027453600, # utc_start 1998-04-05 10:00:00 (Sun) 63044989200, # utc_end 1998-10-25 09:00:00 (Sun) 63027428400, # local_start 1998-04-05 03:00:00 (Sun) 63044964000, # local_end 1998-10-25 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63044989200, # utc_start 1998-10-25 09:00:00 (Sun) 63058903200, # utc_end 1999-04-04 10:00:00 (Sun) 63044960400, # local_start 1998-10-25 01:00:00 (Sun) 63058874400, # local_end 1999-04-04 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63058903200, # utc_start 1999-04-04 10:00:00 (Sun) 63077043600, # utc_end 1999-10-31 09:00:00 (Sun) 63058878000, # local_start 1999-04-04 03:00:00 (Sun) 63077018400, # local_end 1999-10-31 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63077043600, # utc_start 1999-10-31 09:00:00 (Sun) 63090352800, # utc_end 2000-04-02 10:00:00 (Sun) 63077014800, # local_start 1999-10-31 01:00:00 (Sun) 63090324000, # local_end 2000-04-02 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63090352800, # utc_start 2000-04-02 10:00:00 (Sun) 63108493200, # utc_end 2000-10-29 09:00:00 (Sun) 63090327600, # local_start 2000-04-02 03:00:00 (Sun) 63108468000, # local_end 2000-10-29 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63108493200, # utc_start 2000-10-29 09:00:00 (Sun) 63121802400, # utc_end 2001-04-01 10:00:00 (Sun) 63108464400, # local_start 2000-10-29 01:00:00 (Sun) 63121773600, # local_end 2001-04-01 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63121802400, # utc_start 2001-04-01 10:00:00 (Sun) 63139942800, # utc_end 2001-10-28 09:00:00 (Sun) 63121777200, # local_start 2001-04-01 03:00:00 (Sun) 63139917600, # local_end 2001-10-28 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63139942800, # utc_start 2001-10-28 09:00:00 (Sun) 63153856800, # utc_end 2002-04-07 10:00:00 (Sun) 63139914000, # local_start 2001-10-28 01:00:00 (Sun) 63153828000, # local_end 2002-04-07 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63153856800, # utc_start 2002-04-07 10:00:00 (Sun) 63171392400, # utc_end 2002-10-27 09:00:00 (Sun) 63153831600, # local_start 2002-04-07 03:00:00 (Sun) 63171367200, # local_end 2002-10-27 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63171392400, # utc_start 2002-10-27 09:00:00 (Sun) 63185306400, # utc_end 2003-04-06 10:00:00 (Sun) 63171363600, # local_start 2002-10-27 01:00:00 (Sun) 63185277600, # local_end 2003-04-06 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63185306400, # utc_start 2003-04-06 10:00:00 (Sun) 63202842000, # utc_end 2003-10-26 09:00:00 (Sun) 63185281200, # local_start 2003-04-06 03:00:00 (Sun) 63202816800, # local_end 2003-10-26 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63202842000, # utc_start 2003-10-26 09:00:00 (Sun) 63216756000, # utc_end 2004-04-04 10:00:00 (Sun) 63202813200, # local_start 2003-10-26 01:00:00 (Sun) 63216727200, # local_end 2004-04-04 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63216756000, # utc_start 2004-04-04 10:00:00 (Sun) 63234896400, # utc_end 2004-10-31 09:00:00 (Sun) 63216730800, # local_start 2004-04-04 03:00:00 (Sun) 63234871200, # local_end 2004-10-31 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63234896400, # utc_start 2004-10-31 09:00:00 (Sun) 63248205600, # utc_end 2005-04-03 10:00:00 (Sun) 63234867600, # local_start 2004-10-31 01:00:00 (Sun) 63248176800, # local_end 2005-04-03 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63248205600, # utc_start 2005-04-03 10:00:00 (Sun) 63266346000, # utc_end 2005-10-30 09:00:00 (Sun) 63248180400, # local_start 2005-04-03 03:00:00 (Sun) 63266320800, # local_end 2005-10-30 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63266346000, # utc_start 2005-10-30 09:00:00 (Sun) 63279655200, # utc_end 2006-04-02 10:00:00 (Sun) 63266317200, # local_start 2005-10-30 01:00:00 (Sun) 63279626400, # local_end 2006-04-02 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63279655200, # utc_start 2006-04-02 10:00:00 (Sun) 63297795600, # utc_end 2006-10-29 09:00:00 (Sun) 63279630000, # local_start 2006-04-02 03:00:00 (Sun) 63297770400, # local_end 2006-10-29 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63297795600, # utc_start 2006-10-29 09:00:00 (Sun) 63309290400, # utc_end 2007-03-11 10:00:00 (Sun) 63297766800, # local_start 2006-10-29 01:00:00 (Sun) 63309261600, # local_end 2007-03-11 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63309290400, # utc_start 2007-03-11 10:00:00 (Sun) 63329850000, # utc_end 2007-11-04 09:00:00 (Sun) 63309265200, # local_start 2007-03-11 03:00:00 (Sun) 63329824800, # local_end 2007-11-04 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63329850000, # utc_start 2007-11-04 09:00:00 (Sun) 63340740000, # utc_end 2008-03-09 10:00:00 (Sun) 63329821200, # local_start 2007-11-04 01:00:00 (Sun) 63340711200, # local_end 2008-03-09 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63340740000, # utc_start 2008-03-09 10:00:00 (Sun) 63361299600, # utc_end 2008-11-02 09:00:00 (Sun) 63340714800, # local_start 2008-03-09 03:00:00 (Sun) 63361274400, # local_end 2008-11-02 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63361299600, # utc_start 2008-11-02 09:00:00 (Sun) 63372189600, # utc_end 2009-03-08 10:00:00 (Sun) 63361270800, # local_start 2008-11-02 01:00:00 (Sun) 63372160800, # local_end 2009-03-08 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63372189600, # utc_start 2009-03-08 10:00:00 (Sun) 63392749200, # utc_end 2009-11-01 09:00:00 (Sun) 63372164400, # local_start 2009-03-08 03:00:00 (Sun) 63392724000, # local_end 2009-11-01 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63392749200, # utc_start 2009-11-01 09:00:00 (Sun) 63404244000, # utc_end 2010-03-14 10:00:00 (Sun) 63392720400, # local_start 2009-11-01 01:00:00 (Sun) 63404215200, # local_end 2010-03-14 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63404244000, # utc_start 2010-03-14 10:00:00 (Sun) 63424803600, # utc_end 2010-11-07 09:00:00 (Sun) 63404218800, # local_start 2010-03-14 03:00:00 (Sun) 63424778400, # local_end 2010-11-07 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63424803600, # utc_start 2010-11-07 09:00:00 (Sun) 63435693600, # utc_end 2011-03-13 10:00:00 (Sun) 63424774800, # local_start 2010-11-07 01:00:00 (Sun) 63435664800, # local_end 2011-03-13 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63435693600, # utc_start 2011-03-13 10:00:00 (Sun) 63456253200, # utc_end 2011-11-06 09:00:00 (Sun) 63435668400, # local_start 2011-03-13 03:00:00 (Sun) 63456228000, # local_end 2011-11-06 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63456253200, # utc_start 2011-11-06 09:00:00 (Sun) 63467143200, # utc_end 2012-03-11 10:00:00 (Sun) 63456224400, # local_start 2011-11-06 01:00:00 (Sun) 63467114400, # local_end 2012-03-11 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63467143200, # utc_start 2012-03-11 10:00:00 (Sun) 63487702800, # utc_end 2012-11-04 09:00:00 (Sun) 63467118000, # local_start 2012-03-11 03:00:00 (Sun) 63487677600, # local_end 2012-11-04 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63487702800, # utc_start 2012-11-04 09:00:00 (Sun) 63498592800, # utc_end 2013-03-10 10:00:00 (Sun) 63487674000, # local_start 2012-11-04 01:00:00 (Sun) 63498564000, # local_end 2013-03-10 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63498592800, # utc_start 2013-03-10 10:00:00 (Sun) 63519152400, # utc_end 2013-11-03 09:00:00 (Sun) 63498567600, # local_start 2013-03-10 03:00:00 (Sun) 63519127200, # local_end 2013-11-03 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63519152400, # utc_start 2013-11-03 09:00:00 (Sun) 63530042400, # utc_end 2014-03-09 10:00:00 (Sun) 63519123600, # local_start 2013-11-03 01:00:00 (Sun) 63530013600, # local_end 2014-03-09 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63530042400, # utc_start 2014-03-09 10:00:00 (Sun) 63550602000, # utc_end 2014-11-02 09:00:00 (Sun) 63530017200, # local_start 2014-03-09 03:00:00 (Sun) 63550576800, # local_end 2014-11-02 02:00:00 (Sun) -25200, 1, 'PDT', ], [ 63550602000, # utc_start 2014-11-02 09:00:00 (Sun) 63561492000, # utc_end 2015-03-08 10:00:00 (Sun) 63550573200, # local_start 2014-11-02 01:00:00 (Sun) 63561463200, # local_end 2015-03-08 02:00:00 (Sun) -28800, 0, 'PST', ], [ 63561492000, # utc_start 2015-03-08 10:00:00 (Sun) DateTime::TimeZone::INFINITY, # utc_end 63561466800, # local_start 2015-03-08 03:00:00 (Sun) DateTime::TimeZone::INFINITY, # local_end -25200, 0, 'MST', ], ]; sub olson_version {'2015g'} sub has_dst_changes {71} sub _max_year {2025} sub _new_instance { return shift->_init( @_, spans => $spans ); } 1;
rosiro/wasarabi
local/lib/perl5/DateTime/TimeZone/America/Fort_Nelson.pm
Perl
mit
37,063
#!/usr/bin/perl use strict; use warnings; use Bio::Seq; use Bio::SeqIO; use Bio::DB::Fasta; #makes a searchable db from my fasta file ############################################################################### ########## Make ChLG FASTA from scaffolds to ChLG AGP ######### ############################################################################### # Make DB of scaffolds my $fasta_in_file = "/homes/bioinfo/bionano/Trib_cast_0002_september_2014/ncbi/Tcas5.2_scaffolds.fasta"; #my $fasta_in_file = "/homes/bioinfo/bionano/Trib_cast_0002_september_2014/ncbi/test.fa"; my $db = Bio::DB::Fasta->new("$fasta_in_file"); # Make ChLG FASTA my $fasta_out_file = "/homes/bioinfo/bionano/Trib_cast_0002_september_2014/ncbi/Tcas5.2_chlg_pre_header.fasta"; my $seq_out = Bio::SeqIO->new('-file' => ">$fasta_out_file",'-format' => 'fasta'); #Create new fasta outfile object. # open final contigs to scaffolds AGP my $chlg_from_scaffolds_in_file = "/homes/bioinfo/bionano/Trib_cast_0002_september_2014/ncbi/Tcas5.2_chlg_from_scaffolds.agp"; open (my $chlg_from_scaffolds_in, "<", $chlg_from_scaffolds_in_file) or die "can't open $chlg_from_scaffolds_in_file: $!"; my $first=1; my $old_mol="X"; my $scaffold_id; my $new_seq = ''; ### initialize first superscaffold my $seq_in = Bio::SeqIO->new(-file => "<$fasta_in_file", -format => 'fasta'); while(<$chlg_from_scaffolds_in>) { unless (/^#/) { my @columns = (split(/\t/)); my $new_mol=$columns[0]; if ($columns[4] eq "W") { ################################################################### ################# starting/changing ChLGs #################### ################################################################### if ($new_mol ne $old_mol) ## if we are not on the same molecule { unless ($first==1) # unless this is the first molecule map write the old one { my $scaffold_obj = Bio::Seq->new( -display_id => $scaffold_id, -seq => $new_seq, -alphabet => 'dna'); $seq_out->write_seq($scaffold_obj); ## write the finished superscaffold } $scaffold_id = "$columns[0]"; ## initialize new superscaffold $new_seq = ''; $first=0; } ################################################################### #### Continue building superscaffolds: append gaps ########## ################################################################### if ($old_mol eq $new_mol) { $new_seq = "$new_seq"."n" x 100; ## add n's if the last contig is on the same molecule } ################################################################### #### append scaffolds to ChLGs ########## ################################################################### my ($start,$stop) = ($columns[6], $columns[7]); my $id = "$columns[5]"; $new_seq = "$new_seq".$db->seq("$id:$start,$stop"); ## add the new sequence to the growing superscaffold $old_mol=$new_mol; ## now the current molecule will be listed as the last molecule we have seen # if (eof) ## if this is the last row in the stitchmap table # { # my $scaffold_obj = Bio::Seq->new( -display_id => $scaffold_id, -seq => $new_seq, -alphabet => 'dna'); # $seq_out->write_seq($scaffold_obj); ## Write the final sequence object # } } } } my $scaffold_obj = Bio::Seq->new( -display_id => $scaffold_id, -seq => $new_seq, -alphabet => 'dna'); $seq_out->write_seq($scaffold_obj); ## Write the final sequence object #my $fasta_out_file = "/homes/bioinfo/bionano/Trib_cast_0002_september_2014/ncbi/Tcas5.2_chlg_pre_header.fasta"; open (my $fasta_out, "<", $fasta_out_file) or die "can't open $fasta_out_file: $!"; my $new_fasta_out_file = "/homes/bioinfo/bionano/Trib_cast_0002_september_2014/ncbi/Tcas5.2_chlg.fasta"; open (my $new_fasta_out, ">", $new_fasta_out_file) or die "can't open $new_fasta_out_file: $!"; while(<$fasta_out>) { if (/^>/) { chomp; print $new_fasta_out "$_ [organism=Tribolium castaneum] [strain=Georgia GA2] [country=USA: Kansas] [collection-date=Apr-2003]\n"; } else { print $new_fasta_out "$_"; } } unlink $fasta_out_file;
kstatebioinfo/stitch_paper
manually_currated_output/make_chlg.pl
Perl
cc0-1.0
4,510
# Copyright 2020, Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package Google::Ads::GoogleAds::V9::Resources::TargetSpend; use strict; use warnings; use base qw(Google::Ads::GoogleAds::BaseEntity); use Google::Ads::GoogleAds::Utils::GoogleAdsHelper; sub new { my ($class, $args) = @_; my $self = { cpcBidCeilingMicros => $args->{cpcBidCeilingMicros}, targetSpendMicros => $args->{targetSpendMicros}}; # Delete the unassigned fields in this object for a more concise JSON payload remove_unassigned_fields($self, $args); bless $self, $class; return $self; } 1;
googleads/google-ads-perl
lib/Google/Ads/GoogleAds/V9/Resources/TargetSpend.pm
Perl
apache-2.0
1,100
# OpenXPKI::Server::Workflow::Activity::Reports::CertExport::GetConfig # Written by Oliver Welter for the OpenXPKI project 2013 # Copyright (c) 2013 by The OpenXPKI Project package OpenXPKI::Server::Workflow::Activity::Reports::CertExport::GetConfig; =head1 Name OpenXPKI::Server::Workflow::Activity::Reports::CertExport::GetConfig =head1 Description Load the export config specified by the config_path read from the context. Looks up the identifiers of the necessary encryption certificates. The config is written to the workflow context. =cut use strict; use base qw( OpenXPKI::Server::Workflow::Activity ); use OpenXPKI::Server::Context qw( CTX ); use OpenXPKI::Exception; use OpenXPKI::Debug; use English; use OpenXPKI::Serialization::Simple; use Data::Dumper; sub execute { ##! 1: 'execute' my $self = shift; my $workflow = shift; my $context = $workflow->context(); # Get config id from workflow my $config_path = $context->param('config_path'); my $config = CTX('config'); $context->param( 'max_records' , $config->get( "$config_path.max_records" ) || 100 ); $context->param( 'key_namespace', $config->get( "$config_path.key_namespace" ) || 'certificate.privatekey'); $context->param( 'queue_namespace', $config->get( "$config_path.queue_namespace" ) || 'certificate.export.default'); # Tempdir and Umask for export file $context->param( 'tmpfile_tmpdir' , $config->get( "$config_path.tmpdir" ) || '/var/tmp/' ); my $umask = $config->get( "$config_path.umask" ); if ($umask) { $context->param( 'tmpfile_umask' , $umask ); if ( $umask !~ /^0[0-7]{3}$/) { OpenXPKI::Exception->throw( message => 'I18N_OPENXPKI_SERVER_WORKFLOW_ACTIVITY_REPORTS_CERTEXPORT_GETCONFIG_WRONG_UMASK', params => { umask => $context->param( 'tmpfile_umask' ) } ); } } else { $context->param( 'tmpfile_umask', '' ); } # The encryption target is given by subject and realm (optional) or a list of ids my $enc_target = $config->get_hash( "$config_path.encryption_target" ); if (!$enc_target || (!$enc_target->{subject})) { OpenXPKI::Exception->throw( message => 'I18N_OPENXPKI_SERVER_WORKFLOW_ACTIVITY_REPORTS_CERTEXPORT_GETCONFIG_NO_SUBJECT' ); } # Realm might be empty $enc_target->{realm} = CTX('session')->get_pki_realm() unless($enc_target->{realm}); my $enc_cert = CTX('api')->search_cert({ SUBJECT => $enc_target->{subject}, PKI_REALM => $enc_target->{realm}, VALID_AT => time() }); my $enc_cert_ids = [ map $_->{IDENTIFIER} , @{$enc_cert} ]; ##! 8: 'Enc Target ' . Dumper $enc_cert_ids my $ser = OpenXPKI::Serialization::Simple->new(); $context->param( 'enc_cert_ids' , $ser->serialize( $enc_cert_ids ) ); ##! 16: 'Config vars : ' . Dumper $context->param() CTX('log')->log( MESSAGE => sprintf ("export config, max records: %01d, queue %s, no of enc targets: %01d ", $context->param( 'max_records' ), $context->param( 'queue_namespace' ), scalar @{$enc_cert}), PRIORITY => 'info', FACILITY => ['application', 'audit'] ); return 1; } 1; =head1 Name OpenXPKI::Server::Workflow::Activity::Reports::CertExport::GetConfig =head1 Description Load the configuration from the config layer into the workflow context, setting default values if parameter is missing. Takes the path to the config from the context value "config_path". =head1 Configuration myexport: tmpdir: /var/tmp umask: 0640 max_records: 5 key_namespace: certificate.privatekey queue_namespace: certificate.export.default encryption_target: subject: CN=Mailgateway,O=MyCompany,C=COM realm: server-ca The keys max_records, key_namespace, queue_namespace are all optional, with the values above used as default. The encryption_target hash is used to search for certificates, subject is mandatory and can contain wildcards as accepted by sql LIKE. The realm defaults to the sessions realm if not given. The certificate_identifier of all certificates which are valid at the time of the search are written to enc_cert_ids. This method is a co-worker for OpenXPKI::Server::Workflow::Activity::Reports::CertExport::GenerateExportFile where the params are explained in detail.
durko/openxpki
core/server/OpenXPKI/Server/Workflow/Activity/Reports/CertExport/GetConfig.pm
Perl
apache-2.0
4,511
# # Copyright 2019 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package apps::video::zixi::restapi::mode::broadcasterinputusage; use base qw(centreon::plugins::templates::counter); use strict; use warnings; use Digest::MD5 qw(md5_hex); use centreon::plugins::templates::catalog_functions qw(catalog_status_threshold); sub custom_status_output { my ($self, %options) = @_; my $msg = 'status : ' . $self->{result_values}->{status} . ' [error: ' . $self->{result_values}->{error} . ']'; return $msg; } sub custom_status_calc { my ($self, %options) = @_; $self->{result_values}->{status} = $options{new_datas}->{$self->{instance} . '_status'}; $self->{result_values}->{source} = $options{new_datas}->{$self->{instance} . '_source'}; $self->{result_values}->{error} = $options{new_datas}->{$self->{instance} . '_error'}; return 0; } sub set_counters { my ($self, %options) = @_; $self->{maps_counters_type} = [ { name => 'input', type => 1, cb_prefix_output => 'prefix_input_output', message_multiple => 'All inputs are ok', skipped_code => { -11 => 1 } }, ]; $self->{maps_counters}->{input} = [ { label => 'status', threshold => 0, set => { key_values => [ { name => 'status' }, { name => 'source' }, { name => 'error' } ], closure_custom_calc => $self->can('custom_status_calc'), closure_custom_output => $self->can('custom_status_output'), closure_custom_perfdata => sub { return 0; }, closure_custom_threshold_check => \&catalog_status_threshold, } }, { label => 'traffic-in', set => { key_values => [ { name => 'traffic_in', diff => 1 }, { name => 'source' } ], per_second => 1, output_change_bytes => 2, output_template => 'Traffic In : %s %s/s', perfdatas => [ { label => 'traffic_in', value => 'traffic_in_per_second', template => '%.2f', min => 0, unit => 'b/s', label_extra_instance => 1, instance_use => 'source_absolute' }, ], } }, { label => 'traffic-out', set => { key_values => [ { name => 'traffic_out', diff => 1 }, { name => 'source' } ], per_second => 1, output_change_bytes => 2, output_template => 'Traffic Out : %s %s/s', perfdatas => [ { label => 'traffic_out', value => 'traffic_out_per_second', template => '%.2f', min => 0, unit => 'b/s', label_extra_instance => 1, instance_use => 'source_absolute' }, ], } }, ]; } sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options, statefile => 1); bless $self, $class; $options{options}->add_options(arguments => { "filter-source:s" => { name => 'filter_source' }, "warning-status:s" => { name => 'warning_status' }, "critical-status:s" => { name => 'critical_status', default => '%{status} !~ /Connecting|Connected/i || %{error} !~ /none/i' }, }); return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::check_options(%options); $self->change_macros(macros => ['warning_status', 'critical_status']); } sub prefix_input_output { my ($self, %options) = @_; return "Input '" . $options{instance_value}->{source} . "' "; } my %mapping_input_status = (0 => 'none', 1 => 'unknown', 2 => 'resolve error', 3 => 'timeout', 4 => 'network error', 5 => 'protocol error', 6 => 'server is full', 7 => 'connection rejected', 8 => 'authentication error', 9 => 'license error', 10 => 'end of file', 11 => 'flood error', 12 => 'redirect', 13 => 'stopped', 14 => 'limit', 15 => 'not found', 16 => 'not supported', 17 => 'local file system error', 18 => 'remote file system error', 19 => 'stream replaced', 20 => 'p2p abort', 21 => 'compression error', 22 => 'source collision error', 23 => 'adaptive', 24 => 'tcp connection error', 25 => 'rtmp connection error', 26 => 'rtmp handshake error', 27 => 'tcp connection closed', 28 => 'rtmp stream error', 29 => 'rtmp publish error', 30 => 'rtmp stream closed', 31 => 'rtmp play error', 32 => 'rtmp protocol error', 33 => 'rtmp analyze timeout', 34 => 'busy', 35 => 'encryption error', 36 => 'transcoder error', 37 => 'error in invocation a transcoder subprocess', 38 => 'error communicating with a transcoder subprocess', 39 => 'error in RTMP Akamai authentication', 40 => 'maximum outputs for the source reached', 41 => 'generic error', 42 => 'zero bitrate warning', 43 => 'low bitrate warning', 44 => 'multicast join failed', ); sub manage_selection { my ($self, %options) = @_; $self->{input} = {}; my $result = $options{custom}->get(path => '/zixi/streams.json?complete=1'); foreach my $entry (@{$result->{streams}}) { if (defined($self->{option_results}->{filter_source}) && $self->{option_results}->{filter_source} ne '' && $entry->{source} !~ /$self->{option_results}->{filter_source}/) { $self->{output}->output_add(long_msg => "skipping '" . $entry->{source} . "': no matching filter.", debug => 1); next; } $self->{input}->{$entry->{id}} = { source => $entry->{source}, status => $entry->{status}, error => $mapping_input_status{$entry->{error_code}}, traffic_in => $entry->{stats}->{net_recv}->{bytes} * 8, traffic_out => $entry->{stats}->{net_send}->{bytes} * 8, }; } if (scalar(keys %{$self->{input}}) <= 0) { $self->{output}->add_option_msg(short_msg => "No input found."); $self->{output}->option_exit(); } $self->{cache_name} = "zixi_" . $self->{mode} . '_' . $options{custom}->{hostname} . '_' . $options{custom}->{port} . '_' . (defined($self->{option_results}->{filter_counters}) ? md5_hex($self->{option_results}->{filter_counters}) : md5_hex('all')) . '_' . (defined($self->{option_results}->{filter_name}) ? md5_hex($self->{option_results}->{filter_name}) : md5_hex('all')); } 1; __END__ =head1 MODE Check input usage. =over 8 =item B<--filter-source> Filter source (can be a regexp). =item B<--filter-counters> Only display some counters (regexp can be used). Example: --filter-counters='^status$' =item B<--warning-*> Threshold warning. Can be: 'traffic-in', 'traffic-out'. =item B<--critical-*> Threshold critical. Can be: 'traffic-in', 'traffic-out'. =item B<--warning-status> Set warning threshold for status (Default: -) Can used special variables like: %{source}, %{status}, %{error}. =item B<--critical-status> Set critical threshold for status (Default: '%{status} !~ /Connecting|Connected/i || %{error} !~ /none/i'). Can used special variables like: %{source}, %{status}, %{error}. =back =cut
Sims24/centreon-plugins
apps/video/zixi/restapi/mode/broadcasterinputusage.pm
Perl
apache-2.0
7,978
package Paws::Lightsail::RebootInstanceResult; use Moose; has Operations => (is => 'ro', isa => 'ArrayRef[Paws::Lightsail::Operation]', traits => ['NameInRequest'], request_name => 'operations' ); has _request_id => (is => 'ro', isa => 'Str'); ### main pod documentation begin ### =head1 NAME Paws::Lightsail::RebootInstanceResult =head1 ATTRIBUTES =head2 Operations => ArrayRef[L<Paws::Lightsail::Operation>] An array of key-value pairs containing information about the request operation. =head2 _request_id => Str =cut 1;
ioanrogers/aws-sdk-perl
auto-lib/Paws/Lightsail/RebootInstanceResult.pm
Perl
apache-2.0
543
package API::ToExtension; # # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # use UI::Utils; use Mojo::Base 'Mojolicious::Controller'; use Data::Dumper; use POSIX qw(strftime); use Time::Local; use Utils::Helper::ResponseHelper; sub index { my $self = shift; my @data; my $rs = $self->db->resultset('ToExtension')->search( undef, { prefetch => ['type'] } ); while ( my $row = $rs->next ) { next unless ( $row->type->name ne 'CHECK_EXTENSION_OPEN_SLOT' ); # Open slots are not in the list push( @data, { id => $row->id, name => $row->name, version => $row->version, info_url => $row->info_url, script_file => $row->script_file, isactive => $row->isactive, additional_config_json => $row->additional_config_json, description => $row->description, servercheck_short_name => $row->servercheck_short_name, # servercheck_column_name => $row->servercheck_column_name, # Hide col name from the extension developer type => $row->type->name } ); } # Config extensions are driven by the parameter/profile setup, much like the normal config files, using the name => 'location' # parameter, and we use the id of the parameter as the id of the extension $rs = $self->db->resultset('Parameter')->search( { name => 'location', config_file => { -like => 'to_ext_%.config' } } ); while ( my $row = $rs->next ) { my $file; $file = $row->config_file; my $subroutine = $self->db->resultset('ProfileParameter') ->search( { -and => [ 'parameter.config_file' => $file, 'parameter.name' => 'SubRoutine' ] }, { prefetch => [ 'parameter', 'profile' ] } )->get_column('parameter.value')->single(); $subroutine =~ s/::[^:]+$/::info/; $self->app->log->error( "ToExtDotInfo == " . $subroutine ); my $info = &{ \&{$subroutine} }(); push( @data, { id => $row->id, name => $info->{name}, version => $info->{version}, info_url => $info->{info_url}, script_file => $info->{script_file}, isactive => "n/a", additional_config_json => "n/a", description => $info->{description}, servercheck_short_name => "n/a", type => "CONFIG_EXTENSION", } ); } # # $rs = $self->db->resultset('Parameter')->search( { name => 'datasource', config_file => 'global' } ); # while ( my $row = $rs->next ) { # my $source; # $source = $row->value; # my $ext_hash_ref = &Extensions::DatasourceList::hash_ref(); # my $subroutine = $ext_hash_ref->{$source}; # if ( !defined($subroutine) ) { # $self->app->log->error( "No subroutine found for: " . $source ); # } # my $isub; # ( $isub = $subroutine ) =~ s/::[^:]+$/::info/; # my $info = &{ \&{$isub} }(); # print Dumper($info); # push( # @data, { # id => $row->id, # name => $info->{name}, # version => $info->{version}, # info_url => $info->{info_url}, # script_file => $info->{script_file}, # isactive => "n/a", # additional_config_json => "n/a", # description => $info->{description}, # servercheck_short_name => "n/a", # type => "DATASOURCE_EXTENSION", # } # ); # } $self->success( \@data ); } # update creates if there is no id in the json. sub update { my $self = shift; my $msg = "Error"; my $new_id = 1; my $jdata = $self->req->json; if ( $self->current_user()->{username} ne "extension" ) { return $self->alert( { error => "Invalid user for this API. Only the \"extension\" user can use this." } ); } if ( defined( $jdata->{id} ) ) { return $self->alert( { error => "ToExtension update not supported; delete and re-add." } ); } else { # we are creating. # print Dumper($jdata); my $type_id = &type_id( $self, $jdata->{type} ); if ( !defined($type_id) || !( $jdata->{type} =~ /^CHECK_EXTENSION_/ || $jdata->{type} =~ /^CONFIG_EXTENSION$/ || $jdata->{type} =~ /^STATISTIC_EXTENSION$/ ) ) { return $self->alert( { error => "Invalid Extension type: " . $jdata->{type} } ); } if ( $jdata->{type} =~ /CHECK_EXTENSION_/ ) { foreach my $f (qw/name servercheck_short_name/) { my $exists = $self->db->resultset('ToExtension')->search( { $f => $jdata->{$f} } )->single(); if ( defined($exists) ) { return $self->alert( { error => "A Check extension is already loaded with " . $f . " = " . $jdata->{$f} } ); } } # check extensions go in an open slot in the extensions table, first check if there's an open slot. my $open_type = &type_id( $self, 'CHECK_EXTENSION_OPEN_SLOT' ); my $slot = $self->db->resultset('ToExtension')->search( { type => $open_type }, { rows => 1 } )->single(); if ( !defined($slot) ) { return $self->alert( { error => "No open slots left for checks, delete one first." } ); } $slot->update( { name => $jdata->{name}, version => $jdata->{version}, info_url => $jdata->{info_url}, script_file => $jdata->{script_file}, isactive => $jdata->{isactive}, additional_config_json => $jdata->{additional_config_json}, description => $jdata->{description}, servercheck_short_name => $jdata->{servercheck_short_name}, type => $type_id } ); # set all values in servercheck to 0 my $clear = $self->db->resultset('Servercheck')->search( {} ); # all $clear->update( { '`' . $slot->servercheck_column_name . '`' => 0 } ); # return $self->success_message( "Check Extension Loaded.", { id => $slot->id } ); } # Should not get here for CHECK_EXTENSION_* type, already returned the success above. my $insert = $self->db->resultset('ToExtension')->create( name => $jdata->{name}, version => $jdata->{version}, info_url => $jdata->{info_url}, script_file => $jdata->{script_file}, isactive => $jdata->{isactive}, additional_config_json => $jdata->{additional_config_json}, description => $jdata->{description}, servercheck_short_name => $jdata->{servercheck_short_name}, servercheck_column_name => $jdata->{servercheck_column_name}, type => $type_id ); $insert->insert(); $new_id = $insert->id; if ( !defined($new_id) ) { return $self->alert( { error => "Unknown database error when inserting Extension." } ); } } return $self->success_message( "Extension loaded.", { id => $new_id } ); } sub delete { my $self = shift; my $msg = "Error"; my $new_id = 1; my $id = $self->param('id'); # print Dumper($self->req); if ( $self->current_user()->{username} ne "extension" ) { return $self->alert( { error => "Invalid user for this API. Only the \"extension\" user can use this." } ); } if ( !defined($id) ) { return $self->alert( { error => "ToExtension delete requires an id." } ); } else { my $delete = $self->db->resultset('ToExtension')->search( { id => $id } )->single(); if ( !defined($delete) ) { return $self->alert( { error => "ToExtension with id " . $id . " not found." } ); } if ( $delete->type->name =~ /^CHECK_EXTENSION_/ ) { my $open_type_id = &type_id( $self, 'CHECK_EXTENSION_OPEN_SLOT' ); $delete->update( { name => 'OPEN', version => '0', info_url => '', script_file => '', isactive => '0', additional_config_json => '', servercheck_short_name => '', type => $open_type_id, } ); } else { $delete->delete(); } } return $self->success_message("Extension deleted."); } 1;
knutsel/traffic_control-1
traffic_ops/app/lib/API/ToExtension.pm
Perl
apache-2.0
8,427
# Copyright 2020, Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package Google::Ads::GoogleAds::V9::Enums::FlightPlaceholderFieldEnum; use strict; use warnings; use Const::Exporter enums => [ UNSPECIFIED => "UNSPECIFIED", UNKNOWN => "UNKNOWN", DESTINATION_ID => "DESTINATION_ID", ORIGIN_ID => "ORIGIN_ID", FLIGHT_DESCRIPTION => "FLIGHT_DESCRIPTION", ORIGIN_NAME => "ORIGIN_NAME", DESTINATION_NAME => "DESTINATION_NAME", FLIGHT_PRICE => "FLIGHT_PRICE", FORMATTED_PRICE => "FORMATTED_PRICE", FLIGHT_SALE_PRICE => "FLIGHT_SALE_PRICE", FORMATTED_SALE_PRICE => "FORMATTED_SALE_PRICE", IMAGE_URL => "IMAGE_URL", FINAL_URLS => "FINAL_URLS", FINAL_MOBILE_URLS => "FINAL_MOBILE_URLS", TRACKING_URL => "TRACKING_URL", ANDROID_APP_LINK => "ANDROID_APP_LINK", SIMILAR_DESTINATION_IDS => "SIMILAR_DESTINATION_IDS", IOS_APP_LINK => "IOS_APP_LINK", IOS_APP_STORE_ID => "IOS_APP_STORE_ID" ]; 1;
googleads/google-ads-perl
lib/Google/Ads/GoogleAds/V9/Enums/FlightPlaceholderFieldEnum.pm
Perl
apache-2.0
1,604
#! /usr/bin/perl # Copyright 2006,2016 BitMover, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. my $ignore = 0; my $sa = ""; my @args = (); my $one = ""; my $two = ""; my $three = ""; my $format = 1; my $formatcountdown = -1; # XXX For now, just process all files as a single stream open(OUT, ">/tmp/deroff_$$") or die "Can't open /tmp/deroff_$$ for writing"; while (<>) { # Ignore .ig and .de sections if (/^\.\s*\./) { # ".." ends a .ig or .de $ignore = 0; next; } elsif ($ignore) { next; } elsif (/^\.\s*ig/ or /^\.\s*de/) { $ignore = 1; next; } # Handle see alsos unless (s/^\.\s*SA\s*//) { if ($sa) { # we've been collecting see alsos fmt("$sa."); # print 'em out $sa = ""; } } else { # found a see also--collect it chomp; if ($sa) { $sa = "$sa, bk $_"; } else { $sa = "bk $_"; } next; } # Random prettification s/\\\|//g; s/\\-/-/g; s/\\</</g; s/\\>/>/g; s/\\\*</</g; s/\\\*>/>/g; s/\\\*\[<]/</g; s/\\\*\[>]/>/g; s/\\er/\\r/g; s/\\en/\\n/g; s/\\\*\(lq/\"/g; s/\\\*\(rq/\"/g; s/\\fB//g; s/\\fI//g; s/\\fP//g; s/\\fR//g; s/\\f\(CB//g; s/\\f\[CB]//g; s/\\f\(CW//g; s/\\f\[CW]//g; s/\\s+[0-9]//g; s/\\s-[0-9]//g; s/\\s0//g; s/\\\(em/--/g; s/\\\*\(BK/BitKeeper/g; s/\\\*\[BK]/BitKeeper/g; s/\\\*\(BM/BitMover/g; s/\\\*\[BM]/BitMover/g; s/\\\*\[ATT]/AT&T SCCS/g; s/\\\*\(UN/UNIX/g; s/\\\*\[UN]/UNIX/g; s/\\\*\[UNIX]/UNIX/g; s/\\\*\(R/RCS/g; s/\\\*\[R]/RCS/g; s/\\\*\(SC/SCCS/g; s/\\\*\[SC]/SCCS/g; s/\\\*\(CV/CVS/g; s/\\\*\[CV]/CVS/g; # Strip these lines completely if (/^\.\\"/ or /^\.\s*Id/ or /^\.\s*TH/ or /^\.\s*\}/ or /^\.\s*_SA/ or # Huh? /^\.\s*ad/ or /^\.\s*box/ or /^\.\s*ce/ or /^\.\s*ds/ or /^\.\s*fi/ or /^\.\s*ft/ or /^\.\s*hy/ or /^\.\s*if/ or /^\.\s*in/ or /^\.\s*ne/ or /^\.\s*nh/ or /^\.\s*nr/ or /^\.\s*ns/ or /^\.\s*so/ or /^\.\s*sp/ or /^\.\s*ta/ or /^\.\s*ti/ or /^\.\s*xx/) { next; } # Replace these with blank lines if (/^\.\s*LP/ or /^\.\s*PP/ or /^\.\s*RS/ or /^\.\s*RE/ or /^\.\s*SP/ or /^\.\s*Sp/ or /^\.\s*br/ or /^\.\s*head/) { $_ = ""; &flush; } # Don't format these blocks if (/^\.\s*CS/ or /^\.\s*DS/ or /^\.\s*FS/ or /^\.\s*GS/ or /^\.\s*TS/ or /^\.\s*WS/ or /^\.\s*nf/) { $_ = "\n"; $format = 0; &flush; } # Start formatting again if (/^\.\s*CE/ or /^\.\s*DE/ or /^\.\s*FE/ or /^\.\s*GE/ or /^\.\s*TE/ or /^\.\s*WE/ or /^\.\s*fi/) { $_ = ""; $format = 1; } # Strip macro, smoosh args, and add '\c' continuation if (s/^\.\s*Bc\s*// or s/^\.\s*Ic\s*//) { ($one, $two) = getargs($_); $_ = "$one$two\\c"; } # Strip macro, smoosh args, and add '\c' continuation if (s/^\.\s*ARGc\s*//) { ($one, $two) = getargs($_); $_ = "<$one>$two\\c"; } # Strip macro and smoosh args together if (s/^\.\s*BI\s*// or s/^\.\s*BR\s*// or s/^\.\s*IB\s*// or s/^\.\s*IP\s*// or s/^\.\s*IR\s*// or s/^\.\s*CR\s*// or s/^\.\s*RB\s*// or s/^\.\s*RI\s*// or s/^\.\s*V\s*//) { ($one, $two) = getargs($_); $_ = "$one$two\n"; } # Strip macro and smoosh args together if (s/^\.\s*ARG\s*//) { ($one, $two) = getargs($_); $_ = "<$one>$two\n"; } # Strip macro, quote first arg, and smoosh if (s/^\.\s*QI\s*// or s/^\.\s*QR\s*//) { ($one, $two) = getargs($_); $_ = qq("$one"$two\n); } # Strip macro, smoosh args, and quote if (s/^\.\s*Qreq\s*//) { ($one, $two) = getargs($_); $_ = qq("$one<$two>"\n); } # OPT* macros without brackets if (s/^\.\s*OPTequal\s*//) { ($one, $two, $three) = getargs($_); $_ = "$one<$two>=<$three>\n"; } if (s/^\.\s*OPTopt\s*//) { ($one, $two) = getargs($_); $_ = "$one\[<$two>]\n"; } if (s/^\.\s*OPTreq\s*//) { ($one, $two, $three) = getargs($_); $_ = "$one<$two>$three\n"; } # Format with brackets and add '\c' continuation if (s/^\.\s*\[ARGc]\s*//) { ($one, $two) = getargs($_); $_="\[<$one>]$two\\c"; } # Format these with brackets if (s/^\.\s*\[ARG]\s*//) { ($one, $two) = getargs($_); $_ = "\[<$one>$two]\n"; } s/^\.\s*\[B]\s*(.*)/[$1]/; # Format these with brackets, too if (s/^\.\s*\[OPTequal]\s*//) { ($one, $two, $three) = getargs($_); $_ = "\[$one<$two>=<$three>]\n"; } if (s/^\.\s*\[OPTopt]\s*//) { ($one, $two) = getargs($_); $_ = "\[$one\[<$two>]]\n"; } if (s/^\.\s*\[OPTreq]\s*//) { ($one, $two, $three) = getargs($_); $_ = "\[$one<$two>$three]\n"; } # Expand these s/^\.\s*BKARGS\s*/[file ... | -]/; s/^\.\s*FILESreq\s*/file [file ...]/; s/^\.\s*FILES\s*/[file ...]/; # Bullet with blank line if (/^\.\s*LI\s*(.*)/) { $_ = "=> "; &flush; print OUT "\n"; } # Bullet with no blank line if (/^\.\s*li\s*(.*)/) { $_ = "=> "; &flush; } # Tagged paragraphs if (/^\.\s*TP/ or /^\.\s*tp/) { $_ = "\n"; # Leading newline $format = 0; $formatcountdown = 1; &flush; } # One-off tagged paragraph if (s/^\.\s*EV\s*//) { $_ = "\n$_"; # Leading newline $format = 0; $formatcountdown = 0; &flush; } # Just strip the macro--leave the rest of the line intact s/^\.\s*B\s*//; s/^\.\s*C\s*//; s/^\.\s*I\s*//; s/^\.\s*SB\s*//; s/^\.\s*SM\s*//; # Strip macro and quote s/^\.\s*Q\s*(.*)/"$1"/; # Strip the macro and add a leading newline for headings if (s/^\.\s*SH\s*// or s/^\.\s*SS\s*//) { s/\"//g; # Strip quotes in headings $_ = "\n$_"; $format = 0; $formatcountdown = 0; &flush; } # Strip remaining non-breaking spaces (XXX this could be better) s/\\ / /g; # Output if ($format) { fmt($_); } else { print OUT; unless ($formatcountdown) { $format = 1; $formatcountdown = -1; } elsif ($formatcountdown > 0) { $formatcountdown--; } } } # Finish up &flush; close OUT; # Strip duplicate blank lines open(IN, "/tmp/deroff_$$") or die "Can't open /tmp/deroff_$$ for reading"; my $blank = 0; while (<IN>) { unless (/^$/) { if ($blank) { $blank = 0; } } elsif ($blank) { next; } else { $blank = 1; } print; } close IN; unlink "/tmp/deroff_$$"; # Take a string and return an array of macro arguments; handle quoting sub getargs { my $str = shift; my @chars = split(//, $str); my @args = (); my $c = ""; my $arg = ""; my $quote = 0; my $bs = 0; my $i = 0; foreach $c (@chars) { if ($bs) { $bs = 0; if ($c =~ /\s/) { $arg = "$arg$c"; # Space is '\'-quoted next; } else { $arg = "$arg\\"; } # Put '\' back } if ($c eq '"') { if ($quote) { $args[$i++] = $arg; $arg = ""; $quote = 0; } else { $quote = 1; } next; } elsif ($quote) { $arg = "$arg$c"; } elsif ($c =~ /\s/) { if ($arg) { $args[$i++] = $arg; $arg = ""; } } elsif ($c eq '\\') { $bs = 1; } # Backslash else { $arg = "$arg$c"; } } if ($arg) { $args[$i++] = $arg; $arg = ""; } return (@args); } my @line = (); sub fmt { my $str = shift; if ($str =~ /^$/) { # Blank line forces break &flush; print OUT "\n"; return; } my @words = split(/\s+/, $str); my $w = ""; my $i; for ($i = 0; $i <= $#words; ++$i) { $w = $words[$i]; push(@line, $w); if (($w =~ /[\.\!\?]$/) && &should_break($w)) { &flush; next; } if ($w =~ /;$/) { $next = $words[$i + 1]; unless ($i < $#words && ($next eq "or" || $next eq "and")) { &flush; } } } } # Uses global @line array sub flush { return if ($#line == -1); my($w, $len, $cont); $len = 0; $cont = 0; foreach $w (@line) { if (($len > 0) && ($len + length($w) > 65)) { print OUT "\n"; $len = 0; } if ($len) { unless ($cont) { print OUT " "; $len++; } else { $cont = 0; } } if ($w =~ s/\\c$//) { $cont = 1; } # '\c' continuation print OUT $w; $len += length($w); } print OUT "\n" if $len; @line = (); } # Don't break on "K." in "Donald K. Someone". # Don't break on Mr. | Ms. # Don't break on "..." sub should_break { my($w) = $_[0]; return 0 if $w =~ /^.\.$/; return 0 if $w =~ /^\.\.\.$/; return 0 if $w =~ /^mr\.$/i; return 0 if $w =~ /^ms\.$/i; return 1; }
bitkeeper-scm/bitkeeper
src/deroff.pl
Perl
apache-2.0
8,681
=head1 LICENSE Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute Copyright [2016-2021] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut package EnsEMBL::Web::FileChameleonConstants; use strict; use warnings; use Exporter qw(import); our @EXPORT_OK = qw(INPUT_FORMATS STYLE_FORMATS CONVERSION_FORMATS); sub INPUT_FORMATS { return [ { 'value' => 'gff3', 'caption' => 'GFF3', 'example' => qq(), 'checked' => "checked" }, { 'value' => 'gtf', 'caption' => 'GTF', 'example' => qq() }, { 'value' => 'fasta', 'caption' => 'FASTA','example' => qq() }, ]; } sub CONVERSION_FORMATS { return [ { 'value' => 'bowtie', 'caption' => 'BOWTIE', 'example' => qq() }, { 'value' => 'bwa', 'caption' => 'BWA', 'example' => qq() }, { 'value' => 'bbmap', 'caption' => 'BBMap', 'example' => qq() }, { 'value' => 'bwa', 'caption' => 'BWA', 'example' => qq() }, { 'value' => 'start', 'caption' => 'STAR', 'example' => qq() }, { 'value' => 'custom', 'caption' => 'Customise options', 'example' => qq() }, ]; } sub STYLE_FORMATS { return [ { 'value' => 'ucsc_to_ensembl', 'caption' => 'Ensembl style', 'example' => qq(), 'selected' => 'selected' }, { 'value' => 'ensembl_to_ucsc', 'caption' => 'UCSC style', 'example' => qq() }, ]; } 1;
Ensembl/public-plugins
tools/modules/EnsEMBL/Web/FileChameleonConstants.pm
Perl
apache-2.0
1,873
package Paws::CodeBuild; use Moose; sub service { 'codebuild' } sub version { '2016-10-06' } sub target_prefix { 'CodeBuild_20161006' } sub json_version { "1.1" } has max_attempts => (is => 'ro', isa => 'Int', default => 5); has retry => (is => 'ro', isa => 'HashRef', default => sub { { base => 'rand', type => 'exponential', growth_factor => 2 } }); has retriables => (is => 'ro', isa => 'ArrayRef', default => sub { [ ] }); with 'Paws::API::Caller', 'Paws::API::EndpointResolver', 'Paws::Net::V4Signature', 'Paws::Net::JsonCaller', 'Paws::Net::JsonResponse'; sub BatchDeleteBuilds { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::BatchDeleteBuilds', @_); return $self->caller->do_call($self, $call_object); } sub BatchGetBuilds { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::BatchGetBuilds', @_); return $self->caller->do_call($self, $call_object); } sub BatchGetProjects { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::BatchGetProjects', @_); return $self->caller->do_call($self, $call_object); } sub CreateProject { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::CreateProject', @_); return $self->caller->do_call($self, $call_object); } sub CreateWebhook { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::CreateWebhook', @_); return $self->caller->do_call($self, $call_object); } sub DeleteProject { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::DeleteProject', @_); return $self->caller->do_call($self, $call_object); } sub DeleteWebhook { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::DeleteWebhook', @_); return $self->caller->do_call($self, $call_object); } sub ListBuilds { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::ListBuilds', @_); return $self->caller->do_call($self, $call_object); } sub ListBuildsForProject { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::ListBuildsForProject', @_); return $self->caller->do_call($self, $call_object); } sub ListCuratedEnvironmentImages { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::ListCuratedEnvironmentImages', @_); return $self->caller->do_call($self, $call_object); } sub ListProjects { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::ListProjects', @_); return $self->caller->do_call($self, $call_object); } sub StartBuild { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::StartBuild', @_); return $self->caller->do_call($self, $call_object); } sub StopBuild { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::StopBuild', @_); return $self->caller->do_call($self, $call_object); } sub UpdateProject { my $self = shift; my $call_object = $self->new_with_coercions('Paws::CodeBuild::UpdateProject', @_); return $self->caller->do_call($self, $call_object); } sub operations { qw/BatchDeleteBuilds BatchGetBuilds BatchGetProjects CreateProject CreateWebhook DeleteProject DeleteWebhook ListBuilds ListBuildsForProject ListCuratedEnvironmentImages ListProjects StartBuild StopBuild UpdateProject / } 1; ### main pod documentation begin ### =head1 NAME Paws::CodeBuild - Perl Interface to AWS AWS CodeBuild =head1 SYNOPSIS use Paws; my $obj = Paws->service('CodeBuild'); my $res = $obj->Method( Arg1 => $val1, Arg2 => [ 'V1', 'V2' ], # if Arg3 is an object, the HashRef will be used as arguments to the constructor # of the arguments type Arg3 => { Att1 => 'Val1' }, # if Arg4 is an array of objects, the HashRefs will be passed as arguments to # the constructor of the arguments type Arg4 => [ { Att1 => 'Val1' }, { Att1 => 'Val2' } ], ); =head1 DESCRIPTION AWS CodeBuild AWS CodeBuild is a fully managed build service in the cloud. AWS CodeBuild compiles your source code, runs unit tests, and produces artifacts that are ready to deploy. AWS CodeBuild eliminates the need to provision, manage, and scale your own build servers. It provides prepackaged build environments for the most popular programming languages and build tools, such as Apache Maven, Gradle, and more. You can also fully customize build environments in AWS CodeBuild to use your own build tools. AWS CodeBuild scales automatically to meet peak build requests, and you pay only for the build time you consume. For more information about AWS CodeBuild, see the I<AWS CodeBuild User Guide>. AWS CodeBuild supports these operations: =over =item * C<BatchDeleteBuilds>: Deletes one or more builds. =item * C<BatchGetProjects>: Gets information about one or more build projects. A I<build project> defines how AWS CodeBuild will run a build. This includes information such as where to get the source code to build, the build environment to use, the build commands to run, and where to store the build output. A I<build environment> represents a combination of operating system, programming language runtime, and tools that AWS CodeBuild will use to run a build. Also, you can add tags to build projects to help manage your resources and costs. =item * C<CreateProject>: Creates a build project. =item * C<CreateWebhook>: For an existing AWS CodeBuild build project that has its source code stored in a GitHub repository, enables AWS CodeBuild to begin automatically rebuilding the source code every time a code change is pushed to the repository. =item * C<DeleteProject>: Deletes a build project. =item * C<DeleteWebhook>: For an existing AWS CodeBuild build project that has its source code stored in a GitHub repository, stops AWS CodeBuild from automatically rebuilding the source code every time a code change is pushed to the repository. =item * C<ListProjects>: Gets a list of build project names, with each build project name representing a single build project. =item * C<UpdateProject>: Changes the settings of an existing build project. =item * C<BatchGetBuilds>: Gets information about one or more builds. =item * C<ListBuilds>: Gets a list of build IDs, with each build ID representing a single build. =item * C<ListBuildsForProject>: Gets a list of build IDs for the specified build project, with each build ID representing a single build. =item * C<StartBuild>: Starts running a build. =item * C<StopBuild>: Attempts to stop running a build. =item * C<ListCuratedEnvironmentImages>: Gets information about Docker images that are managed by AWS CodeBuild. =back =head1 METHODS =head2 BatchDeleteBuilds(Ids => ArrayRef[Str|Undef]) Each argument is described in detail in: L<Paws::CodeBuild::BatchDeleteBuilds> Returns: a L<Paws::CodeBuild::BatchDeleteBuildsOutput> instance Deletes one or more builds. =head2 BatchGetBuilds(Ids => ArrayRef[Str|Undef]) Each argument is described in detail in: L<Paws::CodeBuild::BatchGetBuilds> Returns: a L<Paws::CodeBuild::BatchGetBuildsOutput> instance Gets information about builds. =head2 BatchGetProjects(Names => ArrayRef[Str|Undef]) Each argument is described in detail in: L<Paws::CodeBuild::BatchGetProjects> Returns: a L<Paws::CodeBuild::BatchGetProjectsOutput> instance Gets information about build projects. =head2 CreateProject(Artifacts => L<Paws::CodeBuild::ProjectArtifacts>, Environment => L<Paws::CodeBuild::ProjectEnvironment>, Name => Str, Source => L<Paws::CodeBuild::ProjectSource>, [Description => Str, EncryptionKey => Str, ServiceRole => Str, Tags => ArrayRef[L<Paws::CodeBuild::Tag>], TimeoutInMinutes => Int]) Each argument is described in detail in: L<Paws::CodeBuild::CreateProject> Returns: a L<Paws::CodeBuild::CreateProjectOutput> instance Creates a build project. =head2 CreateWebhook(ProjectName => Str) Each argument is described in detail in: L<Paws::CodeBuild::CreateWebhook> Returns: a L<Paws::CodeBuild::CreateWebhookOutput> instance For an existing AWS CodeBuild build project that has its source code stored in a GitHub repository, enables AWS CodeBuild to begin automatically rebuilding the source code every time a code change is pushed to the repository. If you enable webhooks for an AWS CodeBuild project, and the project is used as a build step in AWS CodePipeline, then two identical builds will be created for each commit. One build is triggered through webhooks, and one through AWS CodePipeline. Because billing is on a per-build basis, you will be billed for both builds. Therefore, if you are using AWS CodePipeline, we recommend that you disable webhooks in CodeBuild. In the AWS CodeBuild console, clear the Webhook box. For more information, see step 9 in Change a Build ProjectE<rsquo>s Settings. =head2 DeleteProject(Name => Str) Each argument is described in detail in: L<Paws::CodeBuild::DeleteProject> Returns: a L<Paws::CodeBuild::DeleteProjectOutput> instance Deletes a build project. =head2 DeleteWebhook(ProjectName => Str) Each argument is described in detail in: L<Paws::CodeBuild::DeleteWebhook> Returns: a L<Paws::CodeBuild::DeleteWebhookOutput> instance For an existing AWS CodeBuild build project that has its source code stored in a GitHub repository, stops AWS CodeBuild from automatically rebuilding the source code every time a code change is pushed to the repository. =head2 ListBuilds([NextToken => Str, SortOrder => Str]) Each argument is described in detail in: L<Paws::CodeBuild::ListBuilds> Returns: a L<Paws::CodeBuild::ListBuildsOutput> instance Gets a list of build IDs, with each build ID representing a single build. =head2 ListBuildsForProject(ProjectName => Str, [NextToken => Str, SortOrder => Str]) Each argument is described in detail in: L<Paws::CodeBuild::ListBuildsForProject> Returns: a L<Paws::CodeBuild::ListBuildsForProjectOutput> instance Gets a list of build IDs for the specified build project, with each build ID representing a single build. =head2 ListCuratedEnvironmentImages() Each argument is described in detail in: L<Paws::CodeBuild::ListCuratedEnvironmentImages> Returns: a L<Paws::CodeBuild::ListCuratedEnvironmentImagesOutput> instance Gets information about Docker images that are managed by AWS CodeBuild. =head2 ListProjects([NextToken => Str, SortBy => Str, SortOrder => Str]) Each argument is described in detail in: L<Paws::CodeBuild::ListProjects> Returns: a L<Paws::CodeBuild::ListProjectsOutput> instance Gets a list of build project names, with each build project name representing a single build project. =head2 StartBuild(ProjectName => Str, [ArtifactsOverride => L<Paws::CodeBuild::ProjectArtifacts>, BuildspecOverride => Str, EnvironmentVariablesOverride => ArrayRef[L<Paws::CodeBuild::EnvironmentVariable>], SourceVersion => Str, TimeoutInMinutesOverride => Int]) Each argument is described in detail in: L<Paws::CodeBuild::StartBuild> Returns: a L<Paws::CodeBuild::StartBuildOutput> instance Starts running a build. =head2 StopBuild(Id => Str) Each argument is described in detail in: L<Paws::CodeBuild::StopBuild> Returns: a L<Paws::CodeBuild::StopBuildOutput> instance Attempts to stop running a build. =head2 UpdateProject(Name => Str, [Artifacts => L<Paws::CodeBuild::ProjectArtifacts>, Description => Str, EncryptionKey => Str, Environment => L<Paws::CodeBuild::ProjectEnvironment>, ServiceRole => Str, Source => L<Paws::CodeBuild::ProjectSource>, Tags => ArrayRef[L<Paws::CodeBuild::Tag>], TimeoutInMinutes => Int]) Each argument is described in detail in: L<Paws::CodeBuild::UpdateProject> Returns: a L<Paws::CodeBuild::UpdateProjectOutput> instance Changes the settings of a build project. =head1 PAGINATORS Paginator methods are helpers that repetively call methods that return partial results =head1 SEE ALSO This service class forms part of L<Paws> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/CodeBuild.pm
Perl
apache-2.0
12,319
#!/usr/bin/perl -w use lib qw( /home/bfchosting/perlmods/lib/perl/5.8 /home/bfchosting/perlmods/lib/perl/5.8.4 /home/bfchosting/perlmods/share/perl/5.8 /home/bfchosting/perlmods/share/perl/5.8.4 ); use strict; use CGI; use DBI; use CGI::Carp; use Email::Send; use Petal; $Email::Send::Sendmail::SENDMAIL = '/usr/sbin/sendmail'; my $template_file = "mailtofriend.html"; my %conf = ( "dbuser" => "...", dbpassword=> "....", "db" => "...", "dbserver" => "...", "table" => "signatures" ); my $q = new CGI; my $dbh = DBI->connect("DBI:mysql:$conf{'db'}:$conf{'dbserver'}", $conf{'dbuser'}, $conf{'dbpassword'}) || (croak 'failed to connect to database'); my %input_params = $q->Vars; if(my $data = validate_input(\%input_params)){ my $sth = $dbh->prepare("INSERT INTO $conf{'table'} (" . join(", ", keys(%$data)) . ') VALUES (' . join(", ", map($dbh->quote($_), values(%$data))) . ')'); $sth->execute || carp $dbh->errstr; if($q->param("list")){ list_signup($q->param("email")); } }else{ carp('did not validate input'); } my $template = new Petal ($template_file); print $q->header, $template->process (email => $q->param("email")); sub validate_input{ my %in = %{shift @_}; my @reqfields = qw(first_name last_name email city state address); my @optfields = qw(comments country); my %out; foreach my $field (@reqfields){ if(defined($in{$field})){ $out{$field} = $in{$field}; }else{ # carp "did not find required field $field"; return; } } foreach my $field (@optfields){ if(defined($in{$field})){ $out{$field} = $in{$field}; } } return \%out; } sub list_signup{ my $send_sub_to = 'bfc-media@buffalo.wildrockies.org'; my $email = shift @_; my $name = shift @_; my $from = 'bfchosting@buffalo.dreamhosters.com'; my $sender = Email::Send->new({mailer => 'Sendmail'}); my $msg = "To: $send_sub_to From: $from Subject: subscription request from $name - $email sub $email "; $sender->send($msg); }
JesseCrocker/PerlPetition
sign.pl
Perl
apache-2.0
2,072
=head1 LICENSE Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute Copyright [2016-2020] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut package XrefMapper::anopheles_gambiae; use XrefMapper::BasicMapper; use XrefMapper::VBCoordinateMapper; use vars '@ISA'; @ISA = qw{ XrefMapper::BasicMapper }; sub set_methods{ my $default_method = 'ExonerateGappedBest1_55_perc_id'; my %override_method_for_source = ( ExonerateGappedBest5_55_perc_id => ['RefSeq_mRNA','RefSeq_mRNA_predicted', 'RefSeq_ncRNA', 'RefSeq_ncRNA_predicted' ], ); return $default_method, \%override_method_for_source; } # transcript, gene display_xrefs can use defaults # since anopheles_symbol is "before" Uniprot # If there is an Anopheles_symbol xref, use its description # mh4 says Anopheles_symbol doesn't get chosen over UniP # (but they do get chosen in other cases) sub gene_description_sources { return ("VB_Community_Annotation", "Uniprot/SWISSPROT", "VB_RNA_Description", ); } sub transcript_display_xref_sources { my @list = qw( VB_Community_Annotation Uniprot/SWISSPROT VB_RNA_Description ); my %ignore; return [\@list,\%ignore]; } # regexps to match any descriptons we want to filter out sub gene_description_filter_regexps { return (); } 1;
james-monkeyshines/ensembl
misc-scripts/xref_mapping/XrefMapper/anopheles_gambiae.pm
Perl
apache-2.0
1,879
# Copyright 2022 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package apps::centreon::map::jmx::mode::brokerstats; use base qw(centreon::plugins::templates::counter); use strict; use warnings; use Digest::MD5 qw(md5_hex); use centreon::plugins::templates::catalog_functions qw(catalog_status_threshold); sub custom_status_output { my ($self, %options) = @_; my $msg = sprintf("Packets Delta: %d [%d/%d]", $self->{result_values}->{diff_packets}, $self->{result_values}->{processed_packets}, $self->{result_values}->{received_packets}); return $msg; } sub custom_status_calc { my ($self, %options) = @_; $self->{result_values}->{received_packets} = $options{new_datas}->{$self->{instance} . '_ReceivedPackets'}; $self->{result_values}->{processed_packets} = $options{new_datas}->{$self->{instance} . '_ProcessedPackets'}; $self->{result_values}->{diff_packets} = $self->{result_values}->{received_packets} - $self->{result_values}->{processed_packets}; return 0; } sub set_counters { my ($self, %options) = @_; $self->{maps_counters_type} = [ { name => 'global', type => 0 }, ]; $self->{maps_counters}->{global} = [ { label => 'status', set => { key_values => [ { name => 'ReceivedPackets', diff => 1 }, { name => 'ProcessedPackets', diff => 1 } ], closure_custom_calc => $self->can('custom_status_calc'), closure_custom_output => $self->can('custom_status_output'), closure_custom_perfdata => sub { return 0; }, closure_custom_threshold_check => \&catalog_status_threshold } }, { label => 'received-packets-rate', set => { key_values => [ { name => 'ReceivedPackets', per_second => 1 } ], output_template => 'Received Packets: %.2f/s', perfdatas => [ { label => 'received_packets_rate', template => '%.2f', min => 0, unit => 'packets/s' } ] } }, { label => 'processed-packets-rate', set => { key_values => [ { name => 'ProcessedPackets', per_second => 1 } ], output_template => 'Processed Packets: %.2f/s', perfdatas => [ { label => 'processed_packets_rate', template => '%.2f', min => 0, unit => 'packets/s' }, ] } } ]; } sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options, statefile => 1); bless $self, $class; $options{options}->add_options(arguments => { 'warning-status:s' => { name => 'warning_status', default => '' }, 'critical-status:s' => { name => 'critical_status', default => '%{processed_packets} < %{received_packets}' } }); return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::check_options(%options); } my $mbean_broker = "com.centreon.studio.map:type=broker,name=statistics"; sub manage_selection { my ($self, %options) = @_; $self->{cache_name} = "centreon_map_" . md5_hex($options{custom}->{url}) . '_' . $self->{mode} . '_' . (defined($self->{option_results}->{filter_counters}) ? md5_hex($self->{option_results}->{filter_counters}) : md5_hex('all')); $self->{request} = [ { mbean => $mbean_broker } ]; my $result = $options{custom}->get_attributes(request => $self->{request}, nothing_quit => 0); $self->{global} = {}; $self->{global} = { ReceivedPackets => $result->{$mbean_broker}->{ReceivedPackets}, ProcessedPackets => $result->{$mbean_broker}->{ProcessedPackets}, }; } 1; __END__ =head1 MODE Check broker packets rate received and processed. Example: perl centreon_plugins.pl --plugin=apps::centreon::map::jmx::plugin --custommode=jolokia --url=http://10.30.2.22:8080/jolokia-war --mode=broker-stats =over 8 =item B<--filter-counters> Only display some counters (regexp can be used). (Example: --filter-counters='session') =item B<--warning-status> Set warning threshold for status. (Default: ''). Can use special variables like: %{processed_packets}, %{received_packets}, %{diff_packets}. =item B<--critical-status> Set critical threshold for status. (Default: '%{processed_packets} < %{received_packets}'). Can use special variables like: %{processed_packets}, %{received_packets}, %{diff_packets}. =item B<--warning-*> Threshold warning. Can be: 'received-packets-rate', 'processed-packets-rate'. =item B<--critical-*> Threshold critical. Can be: 'received-packets-rate', 'processed-packets-rate'. =back =cut
centreon/centreon-plugins
apps/centreon/map/jmx/mode/brokerstats.pm
Perl
apache-2.0
5,421
package Paws::CloudWatch::AlarmHistoryItem; use Moose; has AlarmName => (is => 'ro', isa => 'Str'); has HistoryData => (is => 'ro', isa => 'Str'); has HistoryItemType => (is => 'ro', isa => 'Str'); has HistorySummary => (is => 'ro', isa => 'Str'); has Timestamp => (is => 'ro', isa => 'Str'); 1; ### main pod documentation begin ### =head1 NAME Paws::CloudWatch::AlarmHistoryItem =head1 USAGE This class represents one of two things: =head3 Arguments in a call to a service Use the attributes of this class as arguments to methods. You shouldn't make instances of this class. Each attribute should be used as a named argument in the calls that expect this type of object. As an example, if Att1 is expected to be a Paws::CloudWatch::AlarmHistoryItem object: $service_obj->Method(Att1 => { AlarmName => $value, ..., Timestamp => $value }); =head3 Results returned from an API call Use accessors for each attribute. If Att1 is expected to be an Paws::CloudWatch::AlarmHistoryItem object: $result = $service_obj->Method(...); $result->Att1->AlarmName =head1 DESCRIPTION Represents the history of a specific alarm. =head1 ATTRIBUTES =head2 AlarmName => Str The descriptive name for the alarm. =head2 HistoryData => Str Data about the alarm, in JSON format. =head2 HistoryItemType => Str The type of alarm history item. =head2 HistorySummary => Str A summary of the alarm history, in text format. =head2 Timestamp => Str The time stamp for the alarm history item. =head1 SEE ALSO This class forms part of L<Paws>, describing an object used in L<Paws::CloudWatch> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/CloudWatch/AlarmHistoryItem.pm
Perl
apache-2.0
1,799
#!/usr/bin/perl -w use strict; use Curses; use Curses::UI; use Device::SerialPort; use Getopt::Std; require 'LICENSE'; sub update_info; sub update_info_monitor; sub getparam; sub send_command; sub read_response; sub filter_command_list; sub send_button; sub help_button; sub log_start; sub serial_port_setup; sub show_page; sub save_notes_dialog; sub quit_dialog; sub about_dialog; sub license_dialog; sub _; sub HELP_MESSAGE; sub VERSION_MESSAGE; sub LICENSE_MESSAGE; # set defaults my %opts = ( d => '/dev/cuaU0', l => undef, c => 0, r => 0 ); Getopt::Std::getopts('d:l:i:crhvs', \%opts); my $intl_file = undef; my $messages = {}; if (defined($opts{i})) { $intl_file = $opts{i}; my $translations = ''; if (open(TRANS, '<', $intl_file)) { while (<TRANS>) { $translations .= $_; } } close(TRANS); unless ($messages = eval '{' . $translations . '}' ) { say STDERR "ERROR!\nCouldn't parse ${intl_file}!" unless $messages; say STDERR "$@" if $@; say STDERR "$!" unless defined $messages; exit(1); } undef($translations); } if (defined($opts{h})) { HELP_MESSAGE(); } if (defined($opts{v})) { VERSION_MESSAGE(); } my $bsd_license = do 'LICENSE'; if (defined($opts{s})) { LICENSE_MESSAGE(); } my $retval = 0; my $filter = ''; my $logging = 0; my %param = (); my %param_label = (); my $PortObj = undef; my $serial_device = undef; $serial_device = $opts{d}; my $written = undef; my $line_monitor = undef; my $monitoring = 'CGETDATA'; my $cui = new Curses::UI( -color_support => $opts{r}, -compat => $opts{c}, -width => 80 ); ########## Main window ########## my $win_main = $cui->add( 'win_main', 'Window', -border => 1, -title => 'E85 Power' ); # menu my $menu = $cui->add( 'menu','Menubar', -menu => [ { -label => 'E85 Power', -submenu => [ { -label => _('Reconnect'), -value => sub { if (defined($opts{l})) { $cui->status( -message => _('Currently in log file mode') ); sleep 1; $cui->nostatus(); } else { serial_port_setup(); } } }, { -label => '', -value => sub {} }, { -label => _('Quit') . " ^Q", -value => \&quit_dialog } ] }, { -label => 'Page', -submenu => [ { -label => _('Status'), -value => sub { show_page(0); } }, { -label => _('Details'), -value => sub { show_page(1); } }, { -label => _('Control'), -value => sub { show_page(2); } } ] }, { -label => 'Help', -submenu => [ { -label => _('About'), -value => \&about_dialog }, { -label => _('License'), -value => \&license_dialog } ] } ] ); ########## Main window ########## ########## STAUS page ########## my $win_status = $win_main->add( 'win_status', 'Window', -border => 1, -title => _('Status'), -titlereverse => 0, -width => 78, -height => 19 ); $param{fuel} = '-'; $win_status->add( 'label_fuel', 'Label', -text => _('Fuel type:'), -y => 0 ); $param_label{fuel} = $win_status->add( 'label_fuel_value', 'Label', -text => $param{fuel}, -y => 0, -x => 14, -width => 15 ); $param{uptime} = '-'; $win_status->add( 'label_uptime', 'Label', -text => _('Uptime:'), -y => 1 ); $param_label{uptime} = $win_status->add( 'label_uptime_value', 'Label', -text => $param{uptime}, -y => 1, -x => 14, -width => 15 ); $param{sport} = 0; $win_status->add( 'label_sport', 'Label', -text => _('Sport:'), -y => 2 ); $param_label{sport} = $win_status->add( 'label_sport_value', 'Checkbox', -label => '', -checked => $param{sport}, -y => 2, -x => 14); $param_label{sport}->onFocus(sub {shift()->loose_focus}); $param{eco} = 0; $win_status->add( 'label_eco', 'Label', -text => _('Eco:'), -y => 3 ); $param_label{eco} = $win_status->add( 'label_eco_value', 'Checkbox', -label => '', -checked => $param{eco}, -y => 3, -x => 14); $param_label{eco}->onFocus(sub {shift()->loose_focus}); $param{lambda} = 0; $win_status->add( 'label_lambda', 'Label', -text => _('Lambda:'), -y => 4 ); $param_label{lambda} = $win_status->add( 'label_lambda_value', 'Checkbox', -label => '', -checked => $param{lambda}, -y => 4, -x => 14); $param_label{lambda}->onFocus(sub {shift()->loose_focus}); $param{mode} = ''; $win_status->add( 'label_mode', 'Label', -text => _('Mode:'), -y => 4, -x => 23 ); $param_label{mode} = $win_status->add( 'label_mode_value', 'Label', -text => $param{mode}, -y => 4, -x => 29, -width => 10 ); $param{speed} = 0; $win_status->add( 'label_speed', 'Label', -text => _('Speed cur/avg:'), -y => 6 ); $param_label{speed} = $win_status->add( 'progressbar_speed_value', 'Progressbar', -pos => $param{speed}, -max => 300, -showvalue => 1, -nopercentage => 1, -nocenterline => 1, -y => 5, -x => 14, -width => 57 ); $param{speed_avg} = -1; $param_label{speed_avg} = $win_status->add( 'label_speed_avg_value', 'Label', -text => '/ ' . $param{speed_avg}, -y => 6, -x => 71, -width => 5 ); $param{rpm} = 0; $win_status->add( 'label_rpm', 'Label', -text => _('RPM:'), -y => 9 ); $param_label{rpm} = $win_status->add( 'progressbar_rpm_value', 'Progressbar', -pos => $param{rpm}, -max => 10000, -showvalue => 1, -nopercentage => 1, -nocenterline => 1, -y => 8, -x => 14 ); $param{etemp} = 0; $win_status->add( 'label_etemp', 'Label', -text => _('Engine temp.:'), -y => 12 ); $param_label{etemp} = $win_status->add( 'progressbar_etemp_value', 'Progressbar', -pos => $param{etemp}, -max => 100, -showvalue => 1, -nopercentage => 1, -nocenterline => 1, -y => 11, -x => 14 ); $param{throttle} = 0; $win_status->add( 'label_throttle', 'Label', -text => _('Throttle:'), -y => 15 ); $param_label{throttle} = $win_status->add( 'progressbar_throttle_value', 'Progressbar', -pos => $param{throttle}, -max => 100, -showvalue => 0, -nopercentage => 0, -nocenterline => 1, -y => 14, -x => 14 ); ### logging ### $win_status->add( 'checkbox_log_file', 'Checkbox', -label => (defined($opts{l})) ? 'Log<-' : 'Log->', -onchange => \&log_start, -checked => (defined($opts{l})) ? 1 : 0, -y => 1, -x => 44 ); if (defined($opts{l})) { $win_status->getobj('checkbox_log_file')->onFocus(sub {shift()->loose_focus}); } $win_status->add( 'text_log_file', 'TextEntry', -text => (defined($opts{l})) ? $opts{l} : './log-' . time() . '.txt', -readonly => (defined($opts{l})) ? 1 : 0, -border => 1, -y => 0, -x => 53 ); ### logging ### $win_status->hide(); ########## STATUS page ########## ########## DETAILS page ########## my $win_details = $win_main->add( 'win_details', 'Window', -border => 1, -title => _('Details'), -titlereverse => 0, -width => 78, -height => 19 ); # Injector my $container_inj = $win_details->add( 'container_inj', 'Container', -title => _('Injector'), -titlereverse => 0, -border => 1, -width => 24, -height => 7 ); $param{inj_open} = -1; $container_inj->add( 'label_inj_open', 'Label', -text => _('Open:'), -y => 0 ); $param_label{inj_open} = $container_inj->add( 'label_inj_open_value', 'Label', -text => $param{inj_open}, -y => 0, -x => 17, -width => 10 ); $param{inj_capacity} = -1; $container_inj->add( 'label_inj_capacity', 'Label', -text => _('Capacity:'), -y => 1 ); $param_label{inj_capacity} = $container_inj->add( 'label_inj_capacity_value', 'Label', -text => $param{inj_capacity}, -y => 1, -x => 17, -width => 10 ); $param{enrich_cur} = -1; $container_inj->add( 'label_enrich_cur', 'Label', -text => _('Enrich cur.:'), -y => 2 ); $param_label{enrich_cur} = $container_inj->add( 'label_enrich_cur_value', 'Label', -text => $param{enrich_cur}, -y => 2, -x => 17, -width => 10 ); $param{enrich_avg} = -1; $container_inj->add( 'label_enrich_avg', 'Label', -text => _('Enrich avg.:'), -y => 3 ); $param_label{enrich_avg} = $container_inj->add( 'label_enrich_avg_value', 'Label', -text => $param{enrich_avg}, -y => 3, -x => 17, -width => 10 ); # Lambda my $container_lam = $win_details->add( 'container_lam', 'Container', -title => 'Lambda', -titlereverse => 0, -border => 1, -x => 24, -width => 36, -height => 7 ); $param{lambda_trimmer} = -1; $container_lam->add( 'label_lambda_trimmer', 'Label', -text => _('Trimmer:'), -y => 0, -x => 0 ); $param_label{lambda_trimmer} = $container_lam->add( 'label_lambda_trimmer_value', 'Label', -text => $param{lambda_trimmer}, -y => 0, -x => 17, -width => 15 ); $param{lambda_in_min} = -1; $param{lambda_in_mid} = -1; $param{lambda_in_max} = -1; $container_lam->add( 'label_lambda_in_pot', 'Label', -text => _('IN min/mid/max:'), -y => 1, -x => 0 ); $param_label{lambda_in_pot} = $container_lam->add( 'label_lambda_in_pot_value', 'Label', -text => $param{lambda_in_min} . '/' . $param{lambda_in_mid} . '/' . $param{lambda_in_max}, -y => 1, -x => 17, -width => 15 ); $param{lambda_out_min} = -1; $param{lambda_out_mid} = -1; $param{lambda_out_max} = -1; $container_lam->add( 'label_lambda_out_pot', 'Label', -text => _('OUT min/mid/max:'), -y => 2, -x => 0 ); $param_label{lambda_out_pot} = $container_lam->add( 'label_lambda_out_pot_value', 'Label', -text => $param{lambda_out_min} . '/' . $param{lambda_out_mid} . '/' . $param{lambda_out_max}, -y => 2, -x => 17, -width => 15 ); $param{lambda_in_calc} = -1; $param{lambda_out_calc} = -1; $container_lam->add( 'label_lambda_calc_pot', 'Label', -text => _('IN/OUT calc.:'), -y => 3, -x => 0 ); $param_label{lambda_calc_pot} = $container_lam->add( 'label_lambda_calc_pot_value', 'Label', -text => $param{lambda_in_calc} . '/' . $param{lambda_out_calc}, -y => 3, -x => 17, -width => 15 ); $param{potential_in} = -1; $container_lam->add( 'label_potential_in', 'Label', -text => _('IN potential:'), -y => 4 ); $param_label{potential_in} = $container_lam->add( 'label_potential_in_value', 'Label', -text => $param{potential_in}, -y => 4, -x => 17, -width => 15 ); # Consumption my $container_cons = $win_details->add( 'container_cons', 'Container', -title => _('Consumption'), -titlereverse => 0, -border => 1, -y => 7, -width => 30, -height => 7 ); $param{cons_pred} = -1; $container_cons->add( 'label_cons_pred', 'Label', -text => _('Predicted:'), -y => 0 ); $param_label{cons_pred} = $container_cons->add( 'label_cons_pred_value', 'Label', -text => $param{cons_pred}, -y => 0, -x => 17, -width => 10 ); $param{cons_cur} = -1; $container_cons->add( 'label_cons_cur', 'Label', -text => _('Current:'), -y => 1 ); $param_label{cons_cur} = $container_cons->add( 'label_cons_cur_value', 'Label', -text => $param{cons_cur}, -y => 1, -x => 17, -width => 10 ); $param{cons_avg_1} = -1; $container_cons->add( 'label_cons_avg_1', 'Label', -text => _('Average / 1km:'), -y => 2 ); $param_label{cons_avg_1} = $container_cons->add( 'label_cons_avg_1_value', 'Label', -text => $param{cons_avg_1}, -y => 2, -x => 17, -width => 10 ); $param{cons_avg_30} = -1; $container_cons->add( 'label_cons_avg_30', 'Label', -text => _('Average / 30km:'), -y => 3 ); $param_label{cons_avg_30} = $container_cons->add( 'label_cons_avg_30_value', 'Label', -text => $param{cons_avg_30}, -y => 3, -x => 17, -width => 10 ); # Travel my $container_travel = $win_details->add( 'container_travel', 'Container', -title => _('Travel'), -titlereverse => 0, -border => 1, -y => 7, -x => 30, -width => 30, -height => 7 ); $param{km} = -1; $container_travel->add( 'label_km', 'Label', -text => _('Traveled km:'), -y => 0 ); $param_label{km} = $container_travel->add( 'label_km_value', 'Label', -text => $param{km}, -y => 0, -x => 18, -width => 10 ); $param{km_start} = -1; $container_travel->add( 'label_km_start', 'Label', -text => _('Trav. km (start):'), -y => 1 ); $param_label{km_start} = $container_travel->add( 'label_km_start_value', 'Label', -text => $param{km_start}, -y => 1, -x => 18, -width => 10 ); $param{co2_save_1} = -1; $container_travel->add( 'label_co2_save_1', 'Label', -text => _('CO2 save / 1km:'), -y => 2 ); $param_label{co2_save_1} = $container_travel->add( 'label_co2_save_1_value', 'Label', -text => $param{co2_save_1}, -y => 2, -x => 18, -width => 10 ); $param{co2_save_start} = -1; $container_travel->add( 'label_co2_save_start', 'Label', -text => _('CO2 save (start):'), -y => 3 ); $param_label{co2_save_start} = $container_travel->add( 'label_co2_save_start_value', 'Label', -text => $param{co2_save_start}, -y => 3, -x => 18, -width => 10 ); $param{co2_save_sum} = -1; $container_travel->add( 'label_co2_save_sum', 'Label', -text => _('CO2 save (sum):'), -y => 4 ); $param_label{co2_save_sum} = $container_travel->add( 'label_co2_save_sum_value', 'Label', -text => $param{co2_save_sum}, -y => 4, -x => 18, -width => 10 ); # some extra stuff $win_details->add( 'label_enrich_cur_progressbar', 'Label', -text => _('Enrich cur.:'), -y => 15 ); $param_label{enrich_cur_progressbar} = $win_details->add( 'progressbar_enrich_cur_value', 'Progressbar', -pos => $param{enrich_cur}, -max => 100, -showvalue => 0, -nopercentage => 0, -nocenterline => 1, -y => 14, -x => 12, -width => 48 ); # notes my $text_details_notes = $win_details->add( 'text_details_notes', 'TextEditor', -title => _('Notes'), -titlereverse => 0, -text => '', -border => 1, -x => 60, -y => 0, -height => 16 ); $win_details->add( 'button_details_notes_save', 'Buttonbox', -buttons => [ { -label => _('Save notes'), -onpress => \&save_notes_dialog } ], -x => 63, -y => 16 ); # XXX another inconveniency # Exclude the containers from the focus order, otherwise # some label will get the focus in the first container, # and won't give it back... $win_details->set_focusorder( 'text_details_notes', 'button_details_notes_save' ); $win_details->hide(); ########## DETAILS page ########## ########## CONTROL page ########## my $win_control = $win_main->add( 'win_control', 'Window', -title => _('Control'), -titlereverse => 0, -border => 1, -width => 78, -height => 19 ); $win_control->add( 'label_command_filter', 'Label', -text => _('Command filter:') ); my $text_command_filter = $win_control->add( 'text_command_filter', 'TextEntry', -text => '', -regexp => '/^[A-Z]*$/', -toupper => 1, -reverse => 1, -homeonblur => 0, -y => 0, -x => 16, -onchange => \&filter_command_list ); # populate the commands' help hash, if it is present my %command_help = (); my $help_file = 'e85power.help'; if (open(HELP_FILE, '<', $help_file)) { %command_help = do $help_file; close(HELP_FILE); } my %command_list = ( CGETADC => undef, CGETANALOG => undef, CGETDATA => undef, CGETERROR => undef, CGETFUEL => undef, CGETICT => undef, CGETLLIST => undef, CGETMODE => undef, CGETPARAM => undef, CGETPOWER => undef, CGETRICH => undef, CGETRPM => undef, CGETSTAT => undef, CGETSYS => undef, CGETTABLE1 => undef, CGETTABLE2 => undef, CGETTABLE3 => undef, CGETTABLE4 => undef, CGETTABLE5 => undef, CGETTABLE6 => undef, CGETTEMP => undef, CGETTIME => undef, CGETTPS => undef, CSETCAPMAX => 'xx', CSETCCM => undef, CSETCOLD => 'xx', CSETDIST => undef, CSETECO => 'x', CSETFALLTIME => 'xx', CSETFIRST => 'x,y,zz', CSETICTCOLD => undef, CSETICTHOT => undef, CSETICTPARAM => 'xxx,yyy', CSETIDLE => undef, CSETINIT => undef, CSETINJAUTO => 'x,yyy', CSETINJPARAM => 'xx,yy,zz,w', CSETLAM => 'xxx,yy', CSETLAMMUL => 'xx', CSETLAMOFFS => 'xxx,yyy', CSETLAMRISE => 'xx,yy', CSETLAMTYPE => 'x', CSETLAMWIN => 'xx,yy', CSETMODE => 'x', CSETMON => undef, CSETONTIMEREF => 'xx', CSETRPM => 'xxxx', CSETSILENT => undef, CSETSPEED => undef, CSETSPORT => 'x', CSETSWMODE => undef, CSETTABFILL => 'x,yyy', CSETTABPOINT => 'x,yy,zzz', CSETTARGETLAM => 'xx,yy', CSETTPSMAX => undef, CSETTPSMIN => undef, CSETTPSPOINT => 'xx,yy', CSETTRIM => 'x,y,zzzz' ); my %command_list_filtered = (); my @popupmenu_command_list_values = (); my $idx = 0; # populate the filtered command list foreach (sort(keys %command_list)) { $command_list_filtered{++$idx} = $_; push @popupmenu_command_list_values, $idx; } @popupmenu_command_list_values = ( 1..$idx ); $win_control->add( 'label_command_list', 'Label', -text => _('Command:'), -x => 7, -y => 1 ); my $popupmenu_command_list = $win_control->add( 'popupmenu_command_list', 'Popupmenu', -values => \@popupmenu_command_list_values, -selected => 0, -labels => \%command_list_filtered, -y => 1, -x => 16, -width => 15 ); my $button_control = $win_control->add( 'button_control', 'Buttonbox', -buttons => [ { -label => _('Send'), -value => 'send', -onpress => (defined($opts{l})) ? sub { $cui->status( -message => _("Currently in log file mode") ); sleep 1; $cui->nostatus(); } : \&send_button }, { -label => _('Help'), -value => 'help', -onpress => \&help_button } ], -x => 33, -y => 1 ); my $response_box = $win_control->add( 'response_box', 'TextViewer', -border => 1, -titlereverse => 0, -title => _('Response'), -wrapping => 1, -vscrollbar => 1, -x => 0, -y => 2 ); $response_box->beep_off(); $win_control->hide(); ########## CONTROL page ########## ########## usage info on the bottom ########## $win_main->add( 'label_usage', 'Label', -text => "F1-F3: " . _('Pages') . " ^X: " . _('Menu'), -border => 1, -x => 0, -y => -1 ); ########## usage info on the bottom ########## ########## software version on the bottom ########## $param_label{sw_version} = $win_main->add( 'label_sw_version_value', 'Label', -text => 'SW: ' . (defined($param{sw_version}) ? $param{sw_version} : '-'), -border => 1, -x => 25, -y => -1, -width => 22 ); ########## software version on the bottom ########## ########## serial device status on the bottom ########## my $label_status = $win_main->add( 'label_status', 'Label', -text => _("Disconnected") . "($serial_device)", -border => 1, -x => -1, -y => -1 ); ########## serial device status on the bottom ########## if (defined($opts{l})) { log_file_setup(); } else { serial_port_setup(); } show_page(0); ########## key bindings ########## $cui->set_binding(sub {$menu->focus()}, "\cX"); $cui->set_binding(\&quit_dialog, "\cQ", "\cC"); $cui->set_binding(sub { show_page(0); }, KEY_F(1)); $cui->set_binding(sub { show_page(1); }, KEY_F(2)); $cui->set_binding(sub { show_page(2); }, KEY_F(3)); ########## key bindings ########## $cui->mainloop(); sub update_info { # The device outputs only one dataline, so we can not turn on every # monitoring command (eg.: CSETMON + CGETDATA + CGETSTAT) to get every # data we need. # That is why we give every monitor command a chance to output its data # and we process them in their time-slice. Fortunatelly some data appears # in both command's output, so we don't miss one second of updating with eg. # the RPM and the ECO/Sport mode infos. # We could update more often (and get every data under eg. one second) but # Curses::UI's set_timer() only understands seconds as its timeout. if ($monitoring eq 'CGETSTAT') { $monitoring = 'CGETDATA'; } elsif ($monitoring eq 'CGETDATA') { $monitoring = 'CGETSTAT'; } send_command($monitoring, 1); ( $param{uptime} ) = send_command('CGETTIME', 1); # if reading from log file, overwrite the data from it if (defined($opts{l})) { if (eof(LOG_INPUT)) { close(LOG_INPUT); $cui->disable_timer('timer_update_info'); $retval = $cui->dialog( -title => _("Log file"), -message => _("End of log file."), -buttons => ['ok'], ); return; } else { $line_monitor = <LOG_INPUT>; chomp($line_monitor); $param{uptime} = $line_monitor; } } update_info_monitor(); if (defined($param{uptime}) && $param{uptime} =~ '^TIME=') { ( undef, $param{uptime} ) = split(/=/, $param{uptime}); $param{uptime} /= 10000; my $hour = int($param{uptime} / 60 / 60); $param{uptime} -= $hour * 60 * 60; my $min = int($param{uptime} / 60); $param{uptime} -= $min * 60; my $sec = int($param{uptime}); $param{uptime} = sprintf("%02d:%02d.%02d", $hour, $min, $sec); $param_label{uptime}->text($param{uptime}); } $win_status->intellidraw(); $win_details->intellidraw(); } sub update_info_monitor { return unless length($line_monitor); if ($line_monitor =~ m/^\Q[S]\E/) { # [S]a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s[E] $line_monitor =~ s/\[[SE]+\]//g; # remove ^[S] and [E]$ ($param{km}, $param{km_start}, $param{speed}, $param{speed_avg}, $param{cons_cur}, undef, $param{cons_avg_1}, $param{cons_avg_30}, $param{rpm}, $param{cons_pred}, $param{fuel}, $param{eco}, $param{sport}, $param{lambda}, $param{co2_save_1}, $param{co2_save_start}, $param{co2_save_sum}, undef, undef) = split(/\//, $line_monitor); } elsif ($line_monitor =~ m/^\Q[D]\E/) { # [D]0/0/2000/0/2000/0/0.00/0.00/0/34/0/0.00/26/0/24/0/1/69/0/119/1/0/0/0[E] $line_monitor =~ s/\[[DE]+\]//g; # remove ^[D] and [E]$ ($param{lambda_in_mid}, $param{lambda_out_mid}, $param{lambda_in_min}, $param{lambda_in_max}, $param{lambda_out_min}, $param{lambda_out_max}, $param{lambda_in_calc}, $param{lambda_out_calc}, $param{lambda}, $param{lambda_trimmer}, $param{rpm}, $param{inj_open}, $param{enrich_cur}, $param{enrich_avg}, $param{cons_pred}, $param{inj_capacity}, $param{fuel}, $param{etemp}, $param{throttle}, $param{potential_in}, $param{mode}, $param{sport}, $param{eco}, undef) = split(/\//, $line_monitor); } $param_label{km}->text($param{km}); $param_label{km_start}->text($param{km_start}); $param_label{speed}->pos($param{speed}); $param_label{speed_avg}->text('/ ' . $param{speed_avg}); $param_label{cons_cur}->text($param{cons_cur}); $param_label{cons_avg_1}->text($param{cons_avg_1}); $param_label{cons_avg_30}->text($param{cons_avg_30}); $param_label{cons_pred}->text($param{cons_pred}); if ($param{fuel} eq 0) { $param{fuel} = _('Petrol'); } elsif ($param{fuel} eq 1) { $param{fuel} = _('Mix'); } elsif ($param{fuel} eq 2) { $param{fuel} = _('E85'); } $param_label{fuel}->text($param{fuel}); $param{eco} ? $param_label{eco}->check : $param_label{eco}->uncheck; $param{sport} ? $param_label{sport}->check : $param_label{sport}->uncheck; $param{lambda} ? $param_label{lambda}->check : $param_label{lambda}->uncheck; $param_label{co2_save_1}->text($param{co2_save_1}); $param_label{co2_save_start}->text($param{co2_save_start}); $param_label{co2_save_sum}->text($param{co2_save_sum}); $param_label{lambda_in_pot}->text($param{lambda_in_min} . '/' . $param{lambda_in_mid} . '/' . $param{lambda_in_max}); $param_label{lambda_out_pot}->text($param{lambda_out_min} . '/' . $param{lambda_out_mid} . '/' . $param{lambda_out_max}); $param_label{lambda_calc_pot}->text($param{lambda_in_calc} . '/' . $param{lambda_out_calc}); $param_label{lambda_trimmer}->text($param{lambda_trimmer}); $param_label{rpm}->pos($param{rpm}); $param_label{inj_open}->text($param{inj_open}); $param_label{enrich_cur}->text($param{enrich_cur}); $param_label{enrich_cur_progressbar}->pos($param{enrich_cur}); $param_label{enrich_avg}->text($param{enrich_avg}); $param_label{inj_capacity}->text($param{inj_capacity}); $param_label{etemp}->pos($param{etemp}); $param_label{throttle}->pos($param{throttle}); $param_label{potential_in}->text($param{potential_in}); # XXX Ezek a szovegek nem biztos hogy jok... if ($param{mode} eq 0) { $param{mode} = _('OFF'); } elsif ($param{mode} eq 1) { $param{mode} = _('Auto'); } elsif ($param{mode} eq 2) { $param{mode} = _('Sport'); } elsif ($param{mode} eq 3) { $param{mode} = _('RPM Table 1'); } elsif ($param{mode} eq 4) { $param{mode} = _('RPM + TPS Table 1'); } elsif ($param{mode} eq 5) { $param{mode} = _('RPM Table 2'); } elsif ($param{mode} eq 6) { $param{mode} = _('RPM + TPS Table 2'); } elsif ($param{mode} eq 7) { $param{mode} = _('Fix'); } $param_label{mode}->text($param{mode}); $win_status->intellidraw(); $win_details->intellidraw(); } sub getparam { # This reads every CSET* command's current parameter. # XXX BUG: my device seems to leave out the last few (RPMDIV, # SPEEDDIV, CCM, SWMODE, TRIM, FIRST) parameters, and I # can see these last few lines very rarely in CGETPARAM's # output. This may be because of a timeout caused by a faulty # or too long USB cable and/or the car's/laptop's electric # circuit's low quality. my @getparam = (); @getparam = send_command('CGETPARAM', 1); foreach (@getparam) { if (m/SW VERSION:/) { s/^.*SW VERSION: //; $param{sw_version} = $_; $param_label{sw_version}->text('SW: ' . $param{sw_version}); $param_label{sw_version}->draw(); } my ( $cmd_temp, $current_value ) = split(/=/); $cmd_temp = 'CSET' . $cmd_temp; if (defined($command_list{$cmd_temp})) { $command_list{$cmd_temp} = $current_value; } } } sub send_command { my $cmd = shift; my $auto = shift; my @answer = (); if (defined($PortObj)) { $written = $PortObj->write($cmd . "\r\n"); if (defined($written)) { if ($written == length($cmd . "\r\n")) { @answer = read_response($cmd, $auto); if (!$auto) { $response_box->text($response_box->get() . "\n" . '> ' . $cmd); if (defined($answer[0])) { foreach (@answer) { $response_box->text($response_box->get() . "\n" . '= ' . $_); } } else { $response_box->text($response_box->get() . "\n" . "! '$cmd': " . _("failed to read response")); } } } else { $response_box->text($response_box->get() . "\n" . "! '$cmd': " . _("sending failed (short write)")) unless $auto; } } else { $response_box->text($response_box->get() . "\n" . "! '$cmd': " . _("sending failed (write error)")) unless $auto; } } else { $response_box->text($response_box->get() . "\n" . "! '$cmd': " . _("sending failed (not connected)")) unless $auto; } if (!$auto) { # intentionally twice ... $response_box->pos(length($response_box->get())); $response_box->cursor_pagedown(); $response_box->pos(length($response_box->get())); $response_box->cursor_pagedown(); $response_box->intellidraw(); } return(@answer); } sub read_response { my $cmd = shift; my $auto = shift; my $line = undef; my $i = 0; my $timeout = 500; # Allow this maximum amount of initial reading, then assume something went wrong. This is the while() loop's maximum allowed running count. my @response = (); if (defined($PortObj)) { # The thing is, we don't poll especially for CSET{DATA,STAT}'s output, # because that comes prepended before any other command's answer anyway. # So we make sure, that the set_timer()'d update_info() issues minimum one # command which is useful for our Status page, and collect CSET{DATA,STAT}'s '^[DS] ...' # answer during the polling of every other - automatically issued - command's answer. # Then we cut that '[DS] ...' from the input, update the $line_monitor variable # which will be fed to update_info_monitor() in update_info(), and process the rest of the # line as the answer to our originally issued command. # Initial reading; we try to wait for the issued command's answer. while (!length($line)) { $line = $PortObj->lookfor(); $i++; # otherwise if timeout is reached, assume something went wrong if ((!defined($line)) || ($i >= $timeout)) { $cui->disable_timer('timer_update_info'); $PortObj->close(); $PortObj = undef; $label_status->text(_('Disconnected') . "($serial_device)"); $label_status->draw; $retval = $cui->dialog( -title => _('Serial device') . "($serial_device)", -message => _("Serial line seems to be dead.\nReconnect?"), -buttons => ['yes', 'no'], ); serial_port_setup() if $retval; return(@response); } } # if we have input, read everything until '' while (length($line)) { $line =~ s/[\x0a\x0d]*//g; # remove every newline # if it's a monitoring line, extract the data from it if ($line =~ m/\[[DS]\]/) { $line_monitor = $line; # $line_monitor will be only the CGET{DATA,STAT}'s output $line_monitor =~ s/(.*)(\[[DS]\].*\Q[E]\E)(.*)/$2/; # XXX DEBUG print STDERR "\$line_monitor($cmd): '$line_monitor'\n"; # $line will be everything other than CGET{DATA,STAT}'s output $line =~ s/\[[DS]\].*\Q[E]\E//; } # XXX DEBUG print STDERR "\$line($cmd): '$line'\n"; push @response, $line unless !length($line); $line = $PortObj->lookfor(); } if ($logging) { # log CGET{DATA,STAT}'s output (if requested) print LOG_FILE "$line_monitor\n" if length($line_monitor); # log GETTIME's output too (if requested) print LOG_FILE "$response[0]\n" if ($response[0] =~ m/^TIME=/); } } return(@response); } sub filter_command_list { my $idx = 0; my $filter = $text_command_filter->get(); # empty the cmd list %command_list_filtered = (); @popupmenu_command_list_values = (); # repopulate the command list with only the commands which match the user supplied filter foreach (keys %command_list) { if (m/^.*$filter.*$/) { $command_list_filtered{++$idx} = $_; push @popupmenu_command_list_values, $idx; } } if ($idx <= 0) { %command_list_filtered = ( 1 => '-' ); @popupmenu_command_list_values = ( 1 ); } # to modify the '-values' option, we must recreate the popupmenu # XXX pretty inconvenient... $win_control->delete('popupmenu_command_list'); $popupmenu_command_list = $win_control->add( 'popupmenu_command_list', 'Popupmenu', -values => \@popupmenu_command_list_values, -selected => 0, -labels => \%command_list_filtered, -y => 1, -x => 16, -width => 15 ); # reorder the focus list, because the popupmenu was readded # XXX pretty inconvenient... $win_control->set_focusorder( 'text_command_filter', 'popupmenu_command_list', 'button_control', 'response_box' ); $text_command_filter->focus(); } sub send_button { my $value = $popupmenu_command_list->get(); if (!defined($value)) { return; } my $cmd = $command_list_filtered{$value}; if (length($cmd)) { if ($cmd eq '-') { return; } } else { return; } my $cancel = 0; # if there are parameters to the command, ask for them if (defined($command_list{$cmd})) { # read current CSET* values getparam(); my $container_command_parameter = $win_main->add( 'container_command_parameter', 'Container', -title => $cmd, -border => 1, -x => 2, -y => 4, -width => 42, -height => 6 ); $container_command_parameter->add( 'textentry_command_parameter', 'TextEntry', -text => $command_list{$cmd}, # the text field will contain the actual values for the CSET* command. (getparam()) -border => 1, -vscrollbar => 1, -height => 4 ); $container_command_parameter->add( 'command_parameter_button', 'Buttonbox', -buttons => [ { -label => '< ' . _('Send') . ' >', -onpress => sub { $container_command_parameter->loose_focus(); $win_main->delete('container_command_parameter'); $win_main->draw(); $cancel = 0; $cmd .= '=' . $container_command_parameter->getobj('textentry_command_parameter')->text(); } }, { -label => '< ' . _('Cancel') . ' >', -onpress => sub { $container_command_parameter->loose_focus(); $win_main->delete('container_command_parameter'); $win_main->draw(); $cancel = 1; } } ], -x => -1, -y => -1, -width => 20 ); $win_main->getobj('container_command_parameter')->show(); $win_main->getobj('container_command_parameter')->modalfocus(); } send_command($cmd, 0) unless ($cancel); } sub help_button { my $value = $popupmenu_command_list->get(); if (!defined($value)) { return; } my $cmd = $command_list_filtered{$value}; if (length($cmd)) { if ($cmd eq '-') { return; } } else { return; } $cui->dialog( -title => _('Help') . " ($cmd)", -message => length($command_help{$cmd}) ? $command_help{$cmd} : _('No help available.'), -buttons => ['ok'], ); } sub log_start { my $log_file = $win_status->getobj('text_log_file')->get(); if ($win_status->getobj('checkbox_log_file')->get()) { if (open(LOG_FILE, '>>', $log_file)) { $logging = 1; $cui->status( -message => _('Logging started') ); sleep 1; $cui->nostatus(); } else { $cui->error( -title => _('Logging'), -message => _("Can't open") . " '$log_file': $!" ); $win_status->getobj('checkbox_log_file')->uncheck(); } } else { $cui->status( -message => _('Logging stopped') ); sleep 1; $cui->nostatus(); close(LOG_FILE); $logging = 0; } } sub log_file_setup { if (open(LOG_INPUT, '<', $opts{l})) { $cui->set_timer('timer_update_info', \&update_info, 1); $cui->set_binding(\&update_info, KEY_F(5)); } else { $cui->error( -title => _('Log file'), -message => _("Couldn't open") . " '$opts{l}': $!" ); exit(1); } } sub serial_port_setup { if (defined($PortObj)) { $retval = $cui->dialog( -title => _('Reconnect'), -message => _('Reconnect to') . "$serial_device?", -buttons => ['no', 'yes'], ); if ($retval) { $PortObj->close(); $PortObj = undef; $label_status->text(_('Disconnected') . "($serial_device)"); $label_status->draw; } else { return; } } $PortObj = new Device::SerialPort($serial_device, 0, undef); my $error = $!; if (defined($PortObj)) { # connected $PortObj->baudrate(115200); $PortObj->databits(8); $PortObj->parity("none"); $PortObj->stopbits(1); $PortObj->handshake("none"); # rts, xoff, none $PortObj->buffers(4096, 4096); $PortObj->are_match("\r\n"); $label_status->text(_('Connected') . "($serial_device)"); $label_status->intellidraw; # Write and read something to 'wake' the device. # Sometimes the very first command gets swallowed :) $PortObj->write("CGETPARAM\r\n"); $PortObj->lookfor(); $PortObj->lookfor(); $PortObj->lookclear(); # read current CSET* values getparam(); $cui->set_timer('timer_update_info', \&update_info, 1); } else { # couldn't connect $cui->disable_timer('timer_update_info'); $cui->error( -title => _('Serial device') . " ($serial_device)", -message => _("Can't connect") . ":\n$error" ); } } sub show_page { my $page = shift; if ($page == 0) { $cui->disable_timer('timer_update_info'); $cui->set_timer('timer_update_info', \&update_info, 1); $win_details->hide(); $win_control->hide(); $win_status->show(); $win_status->focus(); $win_main->draw(); } elsif ($page == 1) { $cui->disable_timer('timer_update_info'); $cui->set_timer('timer_update_info', \&update_info, 1); $win_status->hide(); $win_control->hide(); $win_details->show(); $text_details_notes->focus(); $win_main->draw(); } elsif ($page == 2) { send_command('CSETSILENT', 1); # read current CSET* values getparam(); $cui->disable_timer('timer_update_info'); $win_status->hide(); $win_details->hide(); $win_control->show(); $text_command_filter->focus(); $win_main->draw(); } } sub save_notes_dialog { my $file = $cui->savefilebrowser( -file => '' ); return unless defined $file; if (open(NOTES_FILE, '>', $file)) { print NOTES_FILE $text_details_notes->text(); if (close(NOTES_FILE)) { $cui->dialog( -message => _('Saved') . " '$file'" ); } else { $cui->error( -message => _('Error on closing file') . " '$file':\n$!" ); } } else { $cui->error( -message => _("Can't write to") . " '$file':\n$!" ); } } sub quit_dialog { $retval = $cui->dialog( -title => _('Quit'), -message => _('Do you really want to quit?'), -buttons => ['no', 'yes'], ); if ($retval) { send_command('CSETSILENT', 1); exit(0); } } sub about_dialog { $cui->dialog( -title => _('About'), -message => "E85 Power Kit controller 1.0\nAuthor: Lévai Dániel <leva\@ecentrum.hu>\n\nMore info: www.e85power.hu", -buttons => ['ok'], ); } sub license_dialog { $win_main->add( 'license_container', 'Container', -title => _('License') . ' (BSD)', -border => 1, -height => 22 ); $win_main->getobj('license_container')->add( 'license_message', 'TextViewer', -text => $bsd_license, -border => 1, -vscrollbar => 1, -height => 17 ); $win_main->getobj('license_container')->add( 'license_button', 'Buttonbox', -buttons => [ { -label => '< OK >', -shortcut => 'o', -onpress => sub { $win_main->getobj('license_container')->loose_focus(); $win_main->delete('license_container'); $win_main->draw(); } } ], -pad => 1, -x => -1, -y => -1, -width => 8 ); $win_main->getobj('license_container')->show(); $win_main->getobj('license_container')->modalfocus(); } sub _ { my $msg = shift; defined($messages->{$msg}) ? return($messages->{$msg}) : return($msg); } sub HELP_MESSAGE { print "$0 [-d serial_device] [-l log_file] [-c] [-r] [-h]\n"; print ' -d : ' . _('use the specified serial device') . "\n"; print ' -l : ' . _('read input from the specified log file') . "\n"; print ' -i : ' . _('specify the language file to use') . "\n"; print ' -c : ' . _('curses compatibility mode') . "\n"; print ' -r : ' . _('color support') . "\n"; print ' -v : ' . _('version information') . "\n"; print ' -s : ' . _('license') . "\n"; print ' -h : ' . _('this help') . "\n"; exit(0); } sub VERSION_MESSAGE { print "E85 Power Controller 1.0\n"; print "LEVAI Daniel <leva\@ecentrum.hu>\n"; print _('See the -s option for copying information.') . "\n"; HELP_MESSAGE(); exit(0); } sub LICENSE_MESSAGE { print $bsd_license; exit(0); }
levaidaniel/e85power-controller
e85power.pl
Perl
bsd-2-clause
37,213
vP0(0,0). vP0(1,48). vP0(2,57). vP0(3,66). vP0(4,1). vP0(5,2). vP0(6,3). vP0(7,4). vP0(8,5). vP0(9,6). vP0(10,7). vP0(11,8). vP0(12,25). vP0(13,9). vP0(14,21). vP0(15,10). vP0(16,11). vP0(17,12). vP0(18,13). vP0(19,14). vP0(20,15). vP0(21,16). vP0(22,17). vP0(23,85). vP0(24,18). vP0(25,76). vP0(26,19). vP0(27,20). vP0(28,77). vP0(29,22). vP0(30,23). vP0(31,24). vP0(32,31). vP0(33,26). vP0(34,27). vP0(35,65). vP0(36,56). vP0(37,28). vP0(38,29). vP0(39,30). vP0(40,41). vP0(41,63). vP0(42,71). vP0(43,32). vP0(44,74). vP0(45,35). vP0(46,33). vP0(47,34). vP0(48,37). vP0(49,58). vP0(50,89). vP0(51,36). vP0(52,39). vP0(53,38). vP0(54,40). vP0(55,42). vP0(56,43). vP0(57,44). vP0(58,45). vP0(59,46). vP0(60,55). vP0(61,75). vP0(62,47). vP0(63,49). vP0(64,50). vP0(65,51). vP0(66,52). vP0(67,53). vP0(68,54). vP0(69,72). vP0(70,79). vP0(71,62). vP0(72,73). vP0(73,78). vP0(74,87). vP0(75,59). vP0(76,60). vP0(77,61). vP0(78,64). vP0(79,69). vP0(80,84). vP0(81,67). vP0(82,68). vP0(83,70). vP0(84,81). vP0(85,80). vP0(86,83). vP0(87,86). vP0(88,82). vP0(89,88). vP0(90,90). vP0(135,2641). vP0(1505,91). vP0(1505,92). vP0(1505,93). vP0(1505,94). vP0(1505,95). vP0(1505,96). vP0(1505,97). vP0(1505,98). vP0(1505,99). vP0(1505,100). vP0(1505,101). vP0(1505,102). vP0(1505,103). vP0(1505,104). vP0(1505,107). vP0(1505,108). vP0(1505,109). vP0(1505,110). vP0(1505,111). vP0(1505,112). vP0(1505,113). vP0(1505,114). vP0(1505,119). vP0(1505,120). vP0(1505,121). vP0(1505,122). vP0(1505,123). vP0(1505,124). vP0(1505,125). vP0(1505,126). vP0(1505,127). vP0(1505,128). vP0(1505,129). vP0(1505,130). vP0(1505,131). vP0(1505,132). vP0(1505,133). vP0(1505,134). vP0(1505,135). vP0(1505,136). vP0(1505,137). vP0(1505,138). vP0(1505,139). vP0(1505,140). vP0(1505,141). vP0(1505,142). vP0(1505,143). vP0(1505,145). vP0(1505,146). vP0(1505,148). vP0(1505,149). vP0(1505,151). vP0(1505,152). vP0(1505,154). vP0(1505,155). vP0(1505,157). vP0(1505,158). vP0(1505,160). vP0(1505,161). vP0(1505,163). vP0(1505,164). vP0(1505,166). vP0(1505,167). vP0(1505,169). vP0(1505,170). vP0(1505,171). vP0(1505,172). vP0(1505,173). vP0(1505,174). vP0(1505,175). vP0(1505,176). vP0(1505,177). vP0(1505,178). vP0(1505,179). vP0(1505,180). vP0(1505,181). vP0(1505,182). vP0(1505,183). vP0(1505,184). vP0(1505,185). vP0(1505,186). vP0(1505,187). vP0(1505,188). vP0(1505,189). vP0(1505,190). vP0(1505,191). vP0(1505,192). vP0(1505,193). vP0(1505,194). vP0(1505,195). vP0(1505,196). vP0(1505,197). vP0(1505,198). vP0(1505,199). vP0(1505,200). vP0(1505,201). vP0(1505,202). vP0(1505,203). vP0(1505,204). vP0(1505,206). vP0(1505,207). vP0(1505,209). vP0(1505,210). vP0(1505,212). vP0(1505,213). vP0(1505,215). vP0(1505,216). vP0(1505,218). vP0(1505,219). vP0(1505,221). vP0(1505,222). vP0(1505,224). vP0(1505,225). vP0(1505,227). vP0(1505,228). vP0(1505,230). vP0(1505,231). vP0(1505,232). vP0(1505,233). vP0(1505,235). vP0(1505,236). vP0(1505,238). vP0(1505,239). vP0(1505,241). vP0(1505,242). vP0(1505,244). vP0(1505,245). vP0(1505,247). vP0(1505,248). vP0(1505,250). vP0(1505,251). vP0(1505,253). vP0(1505,254). vP0(1505,256). vP0(1505,257). vP0(1505,259). vP0(1505,260). vP0(1505,261). vP0(1505,262). vP0(1505,264). vP0(1505,265). vP0(1505,266). vP0(1505,267). vP0(1505,268). vP0(1505,269). vP0(1505,270). vP0(1505,271). vP0(1505,273). vP0(1505,274). vP0(1505,275). vP0(1505,276). vP0(1505,278). vP0(1505,279). vP0(1505,281). vP0(1505,282). vP0(1505,284). vP0(1505,285). vP0(1505,287). vP0(1505,288). vP0(1505,290). vP0(1505,291). vP0(1505,293). vP0(1505,294). vP0(1505,296). vP0(1505,297). vP0(1505,299). vP0(1505,300). vP0(1505,302). vP0(1505,303). vP0(1505,305). vP0(1505,306). vP0(1505,308). vP0(1505,309). vP0(1505,311). vP0(1505,312). vP0(1505,314). vP0(1505,315). vP0(1505,316). vP0(1505,317). vP0(1505,318). vP0(1505,319). vP0(1505,320). vP0(1505,321). vP0(1505,322). vP0(1505,323). vP0(1505,324). vP0(1505,325). vP0(1505,326). vP0(1505,327). vP0(1505,328). vP0(1505,329). vP0(1505,330). vP0(1505,331). vP0(1505,333). vP0(1505,334). vP0(1505,335). vP0(1505,336). vP0(1505,337). vP0(1505,338). vP0(1505,339). vP0(1505,340). vP0(1505,341). vP0(1505,342). vP0(1505,344). vP0(1505,345). vP0(1505,347). vP0(1505,348). vP0(1505,350). vP0(1505,351). vP0(1505,353). vP0(1505,354). vP0(1505,356). vP0(1505,357). vP0(1505,359). vP0(1505,360). vP0(1505,362). vP0(1505,363). vP0(1505,365). vP0(1505,366). vP0(1505,368). vP0(1505,369). vP0(1505,370). vP0(1505,371). vP0(1505,372). vP0(1505,373). vP0(1505,374). vP0(1505,375). vP0(1505,376). vP0(1505,377). vP0(1505,378). vP0(1505,379). vP0(1505,381). vP0(1505,382). vP0(1505,384). vP0(1505,385). vP0(1505,387). vP0(1505,388). vP0(1505,390). vP0(1505,391). vP0(1505,393). vP0(1505,394). vP0(1505,396). vP0(1505,397). vP0(1505,399). vP0(1505,400). vP0(1505,402). vP0(1505,403). vP0(1505,405). vP0(1505,406). vP0(1505,408). vP0(1505,409). vP0(1505,411). vP0(1505,412). vP0(1505,414). vP0(1505,415). vP0(1505,417). vP0(1505,418). vP0(1505,420). vP0(1505,421). vP0(1505,423). vP0(1505,424). vP0(1505,426). vP0(1505,427). vP0(1505,428). vP0(1505,429). vP0(1505,430). vP0(1505,431). vP0(1505,432). vP0(1505,433). vP0(1505,434). vP0(1505,435). vP0(1505,436). vP0(1505,437). vP0(1505,439). vP0(1505,440). vP0(1505,442). vP0(1505,443). vP0(1505,445). vP0(1505,446). vP0(1505,448). vP0(1505,449). vP0(1505,451). vP0(1505,452). vP0(1505,454). vP0(1505,455). vP0(1505,457). vP0(1505,458). vP0(1505,459). vP0(1505,460). vP0(1505,462). vP0(1505,463). vP0(1505,465). vP0(1505,466). vP0(1505,468). vP0(1505,469). vP0(1505,471). vP0(1505,472). vP0(1505,474). vP0(1505,475). vP0(1505,477). vP0(1505,480). vP0(1505,481). vP0(1505,482). vP0(1505,483). vP0(1505,484). vP0(1505,485). vP0(1505,486). vP0(1505,487). vP0(1505,488). vP0(1505,489). vP0(1505,490). vP0(1505,491). vP0(1505,492). vP0(1505,493). vP0(1505,494). vP0(1505,495). vP0(1505,496). vP0(1505,497). vP0(1505,498). vP0(1505,499). vP0(1505,500). vP0(1505,501). vP0(1505,502). vP0(1505,503). vP0(1505,504). vP0(1505,505). vP0(1505,506). vP0(1505,507). vP0(1505,508). vP0(1505,509). vP0(1505,510). vP0(1505,511). vP0(1505,512). vP0(1505,513). vP0(1505,514). vP0(1505,515). vP0(1505,516). vP0(1505,517). vP0(1505,518). vP0(1505,519). vP0(1505,520). vP0(1505,521). vP0(1505,522). vP0(1505,523). vP0(1505,524). vP0(1505,525). vP0(1505,526). vP0(1505,527). vP0(1505,528). vP0(1505,529). vP0(1505,530). vP0(1505,531). vP0(1505,532). vP0(1505,533). vP0(1505,534). vP0(1505,535). vP0(1505,536). vP0(1505,537). vP0(1505,538). vP0(1505,539). vP0(1505,540). vP0(1505,541). vP0(1505,542). vP0(1505,543). vP0(1505,544). vP0(1505,545). vP0(1505,546). vP0(1505,547). vP0(1505,548). vP0(1505,549). vP0(1505,550). vP0(1505,551). vP0(1505,552). vP0(1505,553). vP0(1505,554). vP0(1505,555). vP0(1505,556). vP0(1505,557). vP0(1505,558). vP0(1505,559). vP0(1505,560). vP0(1505,561). vP0(1505,562). vP0(1505,563). vP0(1505,564). vP0(1505,565). vP0(1505,566). vP0(1505,567). vP0(1505,568). vP0(1505,569). vP0(1505,570). vP0(1505,571). vP0(1505,572). vP0(1505,573). vP0(1505,574). vP0(1505,575). vP0(1505,576). vP0(1505,577). vP0(1505,578). vP0(1505,579). vP0(1505,580). vP0(1505,581). vP0(1505,582). vP0(1505,583). vP0(1505,584). vP0(1505,585). vP0(1505,586). vP0(1505,587). vP0(1505,588). vP0(1505,589). vP0(1505,590). vP0(1505,591). vP0(1505,592). vP0(1505,593). vP0(1505,594). vP0(1505,595). vP0(1505,596). vP0(1505,597). vP0(1505,598). vP0(1505,599). vP0(1505,600). vP0(1505,601). vP0(1505,602). vP0(1505,603). vP0(1505,604). vP0(1505,605). vP0(1505,606). vP0(1505,607). vP0(1505,608). vP0(1505,609). vP0(1505,610). vP0(1505,611). vP0(1505,612). vP0(1505,613). vP0(1505,614). vP0(1505,615). vP0(1505,616). vP0(1505,617). vP0(1505,618). vP0(1505,619). vP0(1505,620). vP0(1505,621). vP0(1505,622). vP0(1505,623). vP0(1505,624). vP0(1505,625). vP0(1505,626). vP0(1505,627). vP0(1505,628). vP0(1505,629). vP0(1505,630). vP0(1505,631). vP0(1505,632). vP0(1505,633). vP0(1505,634). vP0(1505,635). vP0(1505,636). vP0(1505,637). vP0(1505,638). vP0(1505,639). vP0(1505,640). vP0(1505,641). vP0(1505,642). vP0(1505,643). vP0(1505,644). vP0(1505,645). vP0(1505,646). vP0(1505,958). vP0(1505,959). vP0(1505,960). vP0(1505,961). vP0(1505,962). vP0(1505,964). vP0(1505,965). vP0(1505,966). vP0(1505,967). vP0(1505,968). vP0(1505,970). vP0(1505,971). vP0(1505,972). vP0(1505,973). vP0(1505,974). vP0(1505,976). vP0(1505,977). vP0(1505,978). vP0(1505,981). vP0(1505,982). vP0(1505,983). vP0(1505,984). vP0(1505,987). vP0(1505,990). vP0(1505,991). vP0(1505,994). vP0(1505,995). vP0(1505,998). vP0(1505,999). vP0(1505,1002). vP0(1505,1003). vP0(1505,1006). vP0(1505,1007). vP0(1505,1010). vP0(1505,1011). vP0(1505,1014). vP0(1505,1015). vP0(1505,1018). vP0(1505,1019). vP0(1505,1022). vP0(1505,1023). vP0(1505,1026). vP0(1505,1027). vP0(1505,1030). vP0(1505,1031). vP0(1505,1034). vP0(1505,1035). vP0(1505,1038). vP0(1505,1039). vP0(1505,1040). vP0(1505,1041). vP0(1505,1043). vP0(1505,1044). vP0(1505,1045). vP0(1505,1046). vP0(1505,1048). vP0(1505,1049). vP0(1505,1050). vP0(1505,1051). vP0(1505,1052). vP0(1505,1053). vP0(1505,1054). vP0(1505,1056). vP0(1505,1057). vP0(1505,1058). vP0(1505,1060). vP0(1505,1061). vP0(1505,1062). vP0(1505,1063). vP0(1505,1064). vP0(1505,1065). vP0(1505,1066). vP0(1505,1067). vP0(1505,1071). vP0(1505,1072). vP0(1505,1073). vP0(1505,1074). vP0(1505,1075). vP0(1505,1076). vP0(1505,1077). vP0(1505,1078). vP0(1505,1079). vP0(1505,1080). vP0(1505,1081). vP0(1505,1083). vP0(1505,1084). vP0(1505,1085). vP0(1505,1086). vP0(1505,1087). vP0(1505,1088). vP0(1505,1089). vP0(1505,1090). vP0(1505,1091). vP0(1505,1092). vP0(1505,1093). vP0(1505,1094). vP0(1505,1095). vP0(1505,1096). vP0(1505,1097). vP0(1505,1098). vP0(1505,1099). vP0(1505,1100). vP0(1505,1101). vP0(1505,1102). vP0(1505,1103). vP0(1505,1104). vP0(1505,1105). vP0(1505,1106). vP0(1505,1107). vP0(1505,1108). vP0(1505,1109). vP0(1505,1110). vP0(1505,1111). vP0(1505,1112). vP0(1505,1113). vP0(1505,1114). vP0(1505,1115). vP0(1505,1116). vP0(1505,1117). vP0(1505,1118). vP0(1505,1119). vP0(1505,1120). vP0(1505,1121). vP0(1505,1122). vP0(1505,1125). vP0(1505,1126). vP0(1505,1127). vP0(1505,1128). vP0(1505,1129). vP0(1505,1130). vP0(1505,1131). vP0(1505,1132). vP0(1505,1133). vP0(1505,1134). vP0(1505,1135). vP0(1505,1136). vP0(1505,1137). vP0(1505,1138). vP0(1505,1139). vP0(1505,1140). vP0(1505,1141). vP0(1505,1142). vP0(1505,1143). vP0(1505,1144). vP0(1505,1145). vP0(1505,1146). vP0(1505,1147). vP0(1505,1148). vP0(1505,1149). vP0(1505,1150). vP0(1505,1151). vP0(1505,1152). vP0(1505,1153). vP0(1505,1154). vP0(1505,1155). vP0(1505,1156). vP0(1505,1157). vP0(1505,1158). vP0(1505,1159). vP0(1505,1160). vP0(1505,1161). vP0(1505,1162). vP0(1505,1163). vP0(1505,1164). vP0(1505,1165). vP0(1505,1166). vP0(1505,1167). vP0(1505,1168). vP0(1505,1169). vP0(1505,1170). vP0(1505,1171). vP0(1505,1172). vP0(1505,1173). vP0(1505,1174). vP0(1505,1175). vP0(1505,1176). vP0(1505,1177). vP0(1505,1178). vP0(1505,1179). vP0(1505,1180). vP0(1505,1181). vP0(1505,1182). vP0(1505,1183). vP0(1505,1184). vP0(1505,1185). vP0(1505,1186). vP0(1505,1187). vP0(1505,1188). vP0(1505,1189). vP0(1505,1190). vP0(1505,1191). vP0(1505,1192). vP0(1505,1193). vP0(1505,1194). vP0(1505,1195). vP0(1505,1196). vP0(1505,1197). vP0(1505,1198). vP0(1505,1199). vP0(1505,1200). vP0(1505,1201). vP0(1505,1202). vP0(1505,1203). vP0(1505,1204). vP0(1505,1205). vP0(1505,1206). vP0(1505,1207). vP0(1505,1208). vP0(1505,1209). vP0(1505,1210). vP0(1505,1211). vP0(1505,1212). vP0(1505,1214). vP0(1505,1215). vP0(1505,1216). vP0(1505,1217). vP0(1505,1218). vP0(1505,1219). vP0(1505,1221). vP0(1505,1222). vP0(1505,1223). vP0(1505,1224). vP0(1505,1225). vP0(1505,1226). vP0(1505,1227). vP0(1505,1228). vP0(1505,1229). vP0(1505,1230). vP0(1505,1231). vP0(1505,1232). vP0(1505,1233). vP0(1505,1234). vP0(1505,1235). vP0(1505,1236). vP0(1505,1237). vP0(1505,1238). vP0(1505,1239). vP0(1505,1240). vP0(1505,1241). vP0(1505,1242). vP0(1505,1243). vP0(1505,1244). vP0(1505,1246). vP0(1505,1247). vP0(1505,1248). vP0(1505,1249). vP0(1505,1250). vP0(1505,1251). vP0(1505,1252). vP0(1505,1253). vP0(1505,1254). vP0(1505,1255). vP0(1505,1256). vP0(1505,1257). vP0(1505,1258). vP0(1505,1259). vP0(1505,1260). vP0(1505,1261). vP0(1505,1262). vP0(1505,1263). vP0(1505,1264). vP0(1505,1265). vP0(1505,1266). vP0(1505,1267). vP0(1505,1268). vP0(1505,1269). vP0(1505,1270). vP0(1505,1271). vP0(1505,1272). vP0(1505,1273). vP0(1505,1274). vP0(1505,1275). vP0(1505,1276). vP0(1505,1277). vP0(1505,1278). vP0(1505,1279). vP0(1505,1280). vP0(1505,1281). vP0(1505,1282). vP0(1505,1283). vP0(1505,1284). vP0(1505,1285). vP0(1505,1286). vP0(1505,1287). vP0(1505,1288). vP0(1505,1289). vP0(1505,1290). vP0(1505,1291). vP0(1505,1292). vP0(1505,1293). vP0(1505,1294). vP0(1505,1295). vP0(1505,1296). vP0(1505,1297). vP0(1505,1298). vP0(1505,1299). vP0(1505,1300). vP0(1505,1301). vP0(1505,1302). vP0(1505,1303). vP0(1505,1304). vP0(1505,1305). vP0(1505,1306). vP0(1505,1307). vP0(1505,1308). vP0(1505,1309). vP0(1505,1310). vP0(1505,1311). vP0(1505,1312). vP0(1505,1313). vP0(1505,1314). vP0(1505,1315). vP0(1505,1316). vP0(1505,1317). vP0(1505,1318). vP0(1505,1319). vP0(1505,1320). vP0(1505,1321). vP0(1505,1322). vP0(1505,1323). vP0(1505,1324). vP0(1505,1325). vP0(1505,1326). vP0(1505,1327). vP0(1505,1328). vP0(1505,1329). vP0(1505,1330). vP0(1505,1331). vP0(1505,1332). vP0(1505,1333). vP0(1505,1334). vP0(1505,1335). vP0(1505,1336). vP0(1505,1337). vP0(1505,1338). vP0(1505,1339). vP0(1505,1340). vP0(1505,1341). vP0(1505,1342). vP0(1505,1343). vP0(1505,1344). vP0(1505,1345). vP0(1505,1346). vP0(1505,1347). vP0(1505,1348). vP0(1505,1349). vP0(1505,1350). vP0(1505,1351). vP0(1505,1352). vP0(1505,1353). vP0(1505,1354). vP0(1505,1355). vP0(1505,1356). vP0(1505,1357). vP0(1505,1358). vP0(1505,1359). vP0(1505,1360). vP0(1505,1361). vP0(1505,1362). vP0(1505,1363). vP0(1505,1364). vP0(1505,1365). vP0(1505,1366). vP0(1505,1367). vP0(1505,1368). vP0(1505,1369). vP0(1505,1370). vP0(1505,1371). vP0(1505,1372). vP0(1505,1373). vP0(1505,1374). vP0(1505,1375). vP0(1505,1376). vP0(1505,1377). vP0(1505,1378). vP0(1505,1379). vP0(1505,1380). vP0(1505,1381). vP0(1505,1382). vP0(1505,1383). vP0(1505,1384). vP0(1505,1385). vP0(1505,1386). vP0(1505,1387). vP0(1505,1388). vP0(1505,1389). vP0(1505,1390). vP0(1505,1391). vP0(1505,1392). vP0(1505,1393). vP0(1505,1394). vP0(1505,1395). vP0(1505,1396). vP0(1505,1397). vP0(1505,1398). vP0(1505,1399). vP0(1505,1400). vP0(1505,1401). vP0(1505,1402). vP0(1505,1403). vP0(1505,1414). vP0(1505,1415). vP0(1505,1416). vP0(1505,1417). vP0(1505,1418). vP0(1505,1419). vP0(1505,1420). vP0(1505,1421). vP0(1505,1422). vP0(1505,1423). vP0(1505,1424). vP0(1505,1425). vP0(1505,1426). vP0(1505,1427). vP0(1505,1428). vP0(1505,1429). vP0(1505,1431). vP0(1505,1432). vP0(1505,1433). vP0(1505,1434). vP0(1505,1435). vP0(1505,1436). vP0(1505,1437). vP0(1505,1438). vP0(1505,1439). vP0(1505,1440). vP0(1505,1441). vP0(1505,1442). vP0(1505,1443). vP0(1505,1444). vP0(1505,1445). vP0(1505,1446). vP0(1505,1447). vP0(1505,1448). vP0(1505,1449). vP0(1505,1450). vP0(1505,1451). vP0(1505,1452). vP0(1505,1453). vP0(1505,1454). vP0(1505,1455). vP0(1505,1456). vP0(1505,1457). vP0(1505,1458). vP0(1505,1459). vP0(1505,1460). vP0(1505,1461). vP0(1505,1462). vP0(1505,1463). vP0(1505,1464). vP0(1505,1465). vP0(1505,1466). vP0(1505,1467). vP0(1505,1468). vP0(1505,1469). vP0(1505,1470). vP0(1505,1471). vP0(1505,1472). vP0(1505,1473). vP0(1505,1474). vP0(1505,1475). vP0(1505,1476). vP0(1505,1477). vP0(1505,1478). vP0(1505,1479). vP0(1505,1480). vP0(1505,1481). vP0(1505,1482). vP0(1505,1483). vP0(1505,1484). vP0(1505,1485). vP0(1505,1486). vP0(1505,1487). vP0(1505,1488). vP0(1505,1489). vP0(1505,1490). vP0(1505,1491). vP0(1505,1492). vP0(1505,1493). vP0(1505,1494). vP0(1505,1495). vP0(1505,1496). vP0(1505,1497). vP0(1505,1498). vP0(1505,1499). vP0(1505,1500). vP0(1505,1501). vP0(1505,1502). vP0(1505,1503). vP0(1505,1504). vP0(1505,1505). vP0(1505,1506). vP0(1505,1507). vP0(1505,1508). vP0(1505,1509). vP0(1505,1510). vP0(1505,1511). vP0(1505,1512). vP0(1505,1513). vP0(1505,1514). vP0(1505,1515). vP0(1505,1516). vP0(1505,1517). vP0(1505,1518). vP0(1505,1519). vP0(1505,1520). vP0(1505,1521). vP0(1505,1522). vP0(1505,1523). vP0(1505,1524). vP0(1505,1525). vP0(1505,1528). vP0(1505,1529). vP0(1505,1530). vP0(1505,1531). vP0(1505,1532). vP0(1505,1533). vP0(1505,1534). vP0(1505,1535). vP0(1505,1536). vP0(1505,1537). vP0(1505,1538). vP0(1505,1539). vP0(1505,1540). vP0(1505,1541). vP0(1505,1542). vP0(1505,1543). vP0(1505,1544). vP0(1505,1548). vP0(1505,1549). vP0(1505,1550). vP0(1505,1551). vP0(1505,1552). vP0(1505,1553). vP0(1505,1554). vP0(1505,1555). vP0(1505,1556). vP0(1505,1557). vP0(1505,1558). vP0(1505,1559). vP0(1505,1560). vP0(1505,1561). vP0(1505,1562). vP0(1505,1563). vP0(1505,1564). vP0(1505,1565). vP0(1505,1566). vP0(1505,1567). vP0(1505,1568). vP0(1505,1569). vP0(1505,1571). vP0(1505,1572). vP0(1505,1573). vP0(1505,1574). vP0(1505,1575). vP0(1505,1577). vP0(1505,1578). vP0(1505,1579). vP0(1505,1580). vP0(1505,1581). vP0(1505,1582). vP0(1505,1583). vP0(1505,1584). vP0(1505,1585). vP0(1505,1586). vP0(1505,1587). vP0(1505,1588). vP0(1505,1589). vP0(1505,1590). vP0(1505,1591). vP0(1505,1592). vP0(1505,1593). vP0(1505,1595). vP0(1505,1596). vP0(1505,1598). vP0(1505,1599). vP0(1505,1601). vP0(1505,1602). vP0(1505,1603). vP0(1505,1604). vP0(1505,1605). vP0(1505,1606). vP0(1505,1607). vP0(1505,1608). vP0(1505,1609). vP0(1505,1610). vP0(1505,1611). vP0(1505,1613). vP0(1505,1614). vP0(1505,1615). vP0(1505,1617). vP0(1505,1618). vP0(1505,1619). vP0(1505,1620). vP0(1505,1621). vP0(1505,1623). vP0(1505,1624). vP0(1505,1625). vP0(1505,1627). vP0(1505,1628). vP0(1505,1629). vP0(1505,1631). vP0(1505,1632). vP0(1505,1633). vP0(1505,1634). vP0(1505,1636). vP0(1505,1637). vP0(1505,1638). vP0(1505,1639). vP0(1505,1640). vP0(1505,1641). vP0(1505,1642). vP0(1505,1643). vP0(1505,1644). vP0(1505,1647). vP0(1505,1648). vP0(1505,1649). vP0(1505,1650). vP0(1505,1651). vP0(1505,1652). vP0(1505,1653). vP0(1505,1654). vP0(1505,1655). vP0(1505,1656). vP0(1505,1658). vP0(1505,1659). vP0(1505,1660). vP0(1505,1661). vP0(1505,1662). vP0(1505,1663). vP0(1505,1664). vP0(1505,1665). vP0(1505,1666). vP0(1505,1668). vP0(1505,1669). vP0(1505,1670). vP0(1505,1671). vP0(1505,1672). vP0(1505,1673). vP0(1505,1674). vP0(1505,1675). vP0(1505,1676). vP0(1505,1677). vP0(1505,1678). vP0(1505,1679). vP0(1505,1680). vP0(1505,1681). vP0(1505,1682). vP0(1505,1684). vP0(1505,1685). vP0(1505,1686). vP0(1505,1687). vP0(1505,1688). vP0(1505,1689). vP0(1505,1690). vP0(1505,1691). vP0(1505,1692). vP0(1505,1694). vP0(1505,1695). vP0(1505,1696). vP0(1505,1697). vP0(1505,1698). vP0(1505,1699). vP0(1505,1700). vP0(1505,1701). vP0(1505,1702). vP0(1505,1703). vP0(1505,1706). vP0(1505,1707). vP0(1505,1708). vP0(1505,1709). vP0(1505,1710). vP0(1505,1711). vP0(1505,1712). vP0(1505,1713). vP0(1505,1714). vP0(1505,1715). vP0(1505,1716). vP0(1505,1717). vP0(1505,1718). vP0(1505,1720). vP0(1505,1721). vP0(1505,1722). vP0(1505,1724). vP0(1505,1725). vP0(1505,1726). vP0(1505,1727). vP0(1505,1729). vP0(1505,1730). vP0(1505,1731). vP0(1505,1732). vP0(1505,1733). vP0(1505,1734). vP0(1505,1735). vP0(1505,1736). vP0(1505,1738). vP0(1505,1739). vP0(1505,1740). vP0(1505,1741). vP0(1505,1742). vP0(1505,1743). vP0(1505,1744). vP0(1505,1745). vP0(1505,1746). vP0(1505,1747). vP0(1505,1748). vP0(1505,1749). vP0(1505,1750). vP0(1505,1752). vP0(1505,1753). vP0(1505,1754). vP0(1505,1755). vP0(1505,1756). vP0(1505,1761). vP0(1505,1763). vP0(1505,1764). vP0(1505,1765). vP0(1505,1766). vP0(1505,1767). vP0(1505,1768). vP0(1505,1769). vP0(1505,1770). vP0(1505,1771). vP0(1505,1777). vP0(1505,1778). vP0(1505,1779). vP0(1505,1780). vP0(1505,1781). vP0(1505,1782). vP0(1505,1783). vP0(1505,1784). vP0(1505,1785). vP0(1505,1786). vP0(1505,1787). vP0(1505,1788). vP0(1505,1789). vP0(1505,1790). vP0(1505,1791). vP0(1505,1793). vP0(1505,1794). vP0(1505,1795). vP0(1505,1796). vP0(1505,1797). vP0(1505,1798). vP0(1505,1799). vP0(1505,1800). vP0(1505,1801). vP0(1505,1802). vP0(1505,1803). vP0(1505,1804). vP0(1505,1805). vP0(1505,1806). vP0(1505,1807). vP0(1505,1808). vP0(1505,1809). vP0(1505,1810). vP0(1505,1811). vP0(1505,1812). vP0(1505,1813). vP0(1505,1814). vP0(1505,1815). vP0(1505,1816). vP0(1505,1817). vP0(1505,1818). vP0(1505,1819). vP0(1505,1820). vP0(1505,1821). vP0(1505,1822). vP0(1505,1823). vP0(1505,1824). vP0(1505,1825). vP0(1505,1826). vP0(1505,1827). vP0(1505,1828). vP0(1505,1829). vP0(1505,1830). vP0(1505,1831). vP0(1505,1832). vP0(1505,1833). vP0(1505,1834). vP0(1505,1835). vP0(1505,1836). vP0(1505,1837). vP0(1505,1839). vP0(1505,1840). vP0(1505,1841). vP0(1505,1842). vP0(1505,1843). vP0(1505,1844). vP0(1505,1845). vP0(1505,1846). vP0(1505,1847). vP0(1505,1848). vP0(1505,1849). vP0(1505,1850). vP0(1505,1851). vP0(1505,1852). vP0(1505,1854). vP0(1505,1856). vP0(1505,1857). vP0(1505,1859). vP0(1505,1860). vP0(1505,1862). vP0(1505,1863). vP0(1505,1865). vP0(1505,1866). vP0(1505,1867). vP0(1505,1868). vP0(1505,1869). vP0(1505,1870). vP0(1505,1871). vP0(1505,1872). vP0(1505,1873). vP0(1505,1874). vP0(1505,1875). vP0(1505,1876). vP0(1505,1878). vP0(1505,1880). vP0(1505,1881). vP0(1505,1882). vP0(1505,1883). vP0(1505,1884). vP0(1505,1886). vP0(1505,1887). vP0(1505,1888). vP0(1505,1889). vP0(1505,1890). vP0(1505,1891). vP0(1505,1895). vP0(1505,1898). vP0(1505,1899). vP0(1505,1900). vP0(1505,1901). vP0(1505,1902). vP0(1505,1903). vP0(1505,1905). vP0(1505,1906). vP0(1505,1907). vP0(1505,1908). vP0(1505,1909). vP0(1505,1910). vP0(1505,1911). vP0(1505,1912). vP0(1505,1913). vP0(1505,1914). vP0(1505,1915). vP0(1505,1916). vP0(1505,1917). vP0(1505,1918). vP0(1505,1919). vP0(1505,1920). vP0(1505,1921). vP0(1505,1923). vP0(1505,1926). vP0(1505,1927). vP0(1505,1928). vP0(1505,1929). vP0(1505,1930). vP0(1505,1932). vP0(1505,1933). vP0(1505,1934). vP0(1505,1935). vP0(1505,1936). vP0(1505,1937). vP0(1505,1938). vP0(1505,1939). vP0(1505,1940). vP0(1505,1942). vP0(1505,1943). vP0(1505,1944). vP0(1505,1945). vP0(1505,1946). vP0(1505,1947). vP0(1505,1949). vP0(1505,1950). vP0(1505,1951). vP0(1505,1952). vP0(1505,1953). vP0(1505,1954). vP0(1505,1955). vP0(1505,1956). vP0(1505,1957). vP0(1505,1958). vP0(1505,1959). vP0(1505,1963). vP0(1505,1965). vP0(1505,1966). vP0(1505,1967). vP0(1505,1968). vP0(1505,1969). vP0(1505,1970). vP0(1505,1971). vP0(1505,1972). vP0(1505,1973). vP0(1505,1974). vP0(1505,1975). vP0(1505,1976). vP0(1505,1977). vP0(1505,1978). vP0(1505,1979). vP0(1505,1980). vP0(1505,1981). vP0(1505,1982). vP0(1505,1983). vP0(1505,1984). vP0(1505,1985). vP0(1505,1986). vP0(1505,1987). vP0(1505,1988). vP0(1505,1989). vP0(1505,1990). vP0(1505,1991). vP0(1505,1992). vP0(1505,1993). vP0(1505,1994). vP0(1505,1995). vP0(1505,1996). vP0(1505,1997). vP0(1505,1998). vP0(1505,1999). vP0(1505,2003). vP0(1505,2004). vP0(1505,2007). vP0(1505,2008). vP0(1505,2009). vP0(1505,2010). vP0(1505,2011). vP0(1505,2012). vP0(1505,2014). vP0(1505,2015). vP0(1505,2017). vP0(1505,2018). vP0(1505,2019). vP0(1505,2020). vP0(1505,2021). vP0(1505,2022). vP0(1505,2023). vP0(1505,2024). vP0(1505,2027). vP0(1505,2028). vP0(1505,2031). vP0(1505,2032). vP0(1505,2033). vP0(1505,2034). vP0(1505,2035). vP0(1505,2036). vP0(1505,2037). vP0(1505,2038). vP0(1505,2039). vP0(1505,2040). vP0(1505,2041). vP0(1505,2042). vP0(1505,2043). vP0(1505,2044). vP0(1505,2045). vP0(1505,2046). vP0(1505,2047). vP0(1505,2048). vP0(1505,2049). vP0(1505,2050). vP0(1505,2051). vP0(1505,2052). vP0(1505,2053). vP0(1505,2054). vP0(1505,2055). vP0(1505,2056). vP0(1505,2057). vP0(1505,2058). vP0(1505,2059). vP0(1505,2061). vP0(1505,2063). vP0(1505,2064). vP0(1505,2065). vP0(1505,2066). vP0(1505,2067). vP0(1505,2068). vP0(1505,2069). vP0(1505,2070). vP0(1505,2071). vP0(1505,2072). vP0(1505,2073). vP0(1505,2074). vP0(1505,2075). vP0(1505,2076). vP0(1505,2077). vP0(1505,2078). vP0(1505,2079). vP0(1505,2080). vP0(1505,2081). vP0(1505,2083). vP0(1505,2084). vP0(1505,2085). vP0(1505,2086). vP0(1505,2088). vP0(1505,2089). vP0(1505,2090). vP0(1505,2091). vP0(1505,2092). vP0(1505,2093). vP0(1505,2094). vP0(1505,2095). vP0(1505,2096). vP0(1505,2097). vP0(1505,2098). vP0(1505,2100). vP0(1505,2101). vP0(1505,2102). vP0(1505,2103). vP0(1505,2104). vP0(1505,2105). vP0(1505,2106). vP0(1505,2107). vP0(1505,2108). vP0(1505,2109). vP0(1505,2110). vP0(1505,2111). vP0(1505,2112). vP0(1505,2113). vP0(1505,2114). vP0(1505,2115). vP0(1505,2116). vP0(1505,2117). vP0(1505,2118). vP0(1505,2119). vP0(1505,2120). vP0(1505,2121). vP0(1505,2124). vP0(1505,2125). vP0(1505,2126). vP0(1505,2127). vP0(1505,2128). vP0(1505,2129). vP0(1505,2130). vP0(1505,2131). vP0(1505,2132). vP0(1505,2133). vP0(1505,2135). vP0(1505,2136). vP0(1505,2137). vP0(1505,2138). vP0(1505,2139). vP0(1505,2140). vP0(1505,2145). vP0(1505,2146). vP0(1505,2147). vP0(1505,2149). vP0(1505,2153). vP0(1505,2154). vP0(1505,2156). vP0(1505,2157). vP0(1505,2158). vP0(1505,2159). vP0(1505,2160). vP0(1505,2161). vP0(1505,2162). vP0(1505,2163). vP0(1505,2164). vP0(1505,2165). vP0(1505,2166). vP0(1505,2167). vP0(1505,2168). vP0(1505,2169). vP0(1505,2170). vP0(1505,2171). vP0(1505,2172). vP0(1505,2176). vP0(1505,2180). vP0(1505,2181). vP0(1505,2182). vP0(1505,2183). vP0(1505,2184). vP0(1505,2185). vP0(1505,2186). vP0(1505,2189). vP0(1505,2190). vP0(1505,2191). vP0(1505,2192). vP0(1505,2194). vP0(1505,2195). vP0(1505,2196). vP0(1505,2197). vP0(1505,2198). vP0(1505,2199). vP0(1505,2200). vP0(1505,2201). vP0(1505,2202). vP0(1505,2203). vP0(1505,2205). vP0(1505,2206). vP0(1505,2207). vP0(1505,2208). vP0(1505,2209). vP0(1505,2210). vP0(1505,2212). vP0(1505,2213). vP0(1505,2214). vP0(1505,2215). vP0(1505,2216). vP0(1505,2217). vP0(1505,2218). vP0(1505,2219). vP0(1505,2221). vP0(1505,2222). vP0(1505,2223). vP0(1505,2224). vP0(1505,2225). vP0(1505,2226). vP0(1505,2227). vP0(1505,2228). vP0(1505,2229). vP0(1505,2230). vP0(1505,2231). vP0(1505,2232). vP0(1505,2233). vP0(1505,2234). vP0(1505,2237). vP0(1505,2238). vP0(1505,2239). vP0(1505,2240). vP0(1505,2241). vP0(1505,2242). vP0(1505,2243). vP0(1505,2244). vP0(1505,2245). vP0(1505,2246). vP0(1505,2247). vP0(1505,2248). vP0(1505,2249). vP0(1505,2250). vP0(1505,2251). vP0(1505,2253). vP0(1505,2254). vP0(1505,2255). vP0(1505,2260). vP0(1505,2261). vP0(1505,2263). vP0(1505,2264). vP0(1505,2265). vP0(1505,2266). vP0(1505,2267). vP0(1505,2268). vP0(1505,2269). vP0(1505,2270). vP0(1505,2271). vP0(1505,2272). vP0(1505,2273). vP0(1505,2274). vP0(1505,2275). vP0(1505,2276). vP0(1505,2277). vP0(1505,2279). vP0(1505,2280). vP0(1505,2281). vP0(1505,2282). vP0(1505,2283). vP0(1505,2284). vP0(1505,2285). vP0(1505,2286). vP0(1505,2287). vP0(1505,2288). vP0(1505,2290). vP0(1505,2291). vP0(1505,2292). vP0(1505,2293). vP0(1505,2294). vP0(1505,2297). vP0(1505,2298). vP0(1505,2299). vP0(1505,2300). vP0(1505,2301). vP0(1505,2302). vP0(1505,2303). vP0(1505,2304). vP0(1505,2305). vP0(1505,2306). vP0(1505,2307). vP0(1505,2308). vP0(1505,2309). vP0(1505,2310). vP0(1505,2311). vP0(1505,2312). vP0(1505,2313). vP0(1505,2315). vP0(1505,2316). vP0(1505,2317). vP0(1505,2318). vP0(1505,2319). vP0(1505,2320). vP0(1505,2321). vP0(1505,2322). vP0(1505,2323). vP0(1505,2324). vP0(1505,2325). vP0(1505,2326). vP0(1505,2327). vP0(1505,2328). vP0(1505,2329). vP0(1505,2330). vP0(1505,2331). vP0(1505,2332). vP0(1505,2333). vP0(1505,2334). vP0(1505,2335). vP0(1505,2336). vP0(1505,2337). vP0(1505,2338). vP0(1505,2339). vP0(1505,2340). vP0(1505,2341). vP0(1505,2344). vP0(1505,2345). vP0(1505,2347). vP0(1505,2348). vP0(1505,2350). vP0(1505,2351). vP0(1505,2352). vP0(1505,2353). vP0(1505,2354). vP0(1505,2355). vP0(1505,2356). vP0(1505,2357). vP0(1505,2359). vP0(1505,2360). vP0(1505,2361). vP0(1505,2362). vP0(1505,2363). vP0(1505,2364). vP0(1505,2365). vP0(1505,2366). vP0(1505,2367). vP0(1505,2368). vP0(1505,2369). vP0(1505,2370). vP0(1505,2371). vP0(1505,2372). vP0(1505,2373). vP0(1505,2374). vP0(1505,2375). vP0(1505,2376). vP0(1505,2377). vP0(1505,2378). vP0(1505,2379). vP0(1505,2380). vP0(1505,2381). vP0(1505,2382). vP0(1505,2385). vP0(1505,2386). vP0(1505,2387). vP0(1505,2388). vP0(1505,2389). vP0(1505,2390). vP0(1505,2391). vP0(1505,2392). vP0(1505,2394). vP0(1505,2395). vP0(1505,2396). vP0(1505,2397). vP0(1505,2398). vP0(1505,2399). vP0(1505,2400). vP0(1505,2401). vP0(1505,2402). vP0(1505,2403). vP0(1505,2404). vP0(1505,2405). vP0(1505,2406). vP0(1505,2407). vP0(1505,2408). vP0(1505,2412). vP0(1505,2413). vP0(1505,2414). vP0(1505,2415). vP0(1505,2416). vP0(1505,2417). vP0(1505,2418). vP0(1505,2419). vP0(1505,2420). vP0(1505,2421). vP0(1505,2422). vP0(1505,2423). vP0(1505,2424). vP0(1505,2425). vP0(1505,2426). vP0(1505,2427). vP0(1505,2428). vP0(1505,2429). vP0(1505,2430). vP0(1505,2433). vP0(1505,2434). vP0(1505,2435). vP0(1505,2436). vP0(1505,2437). vP0(1505,2438). vP0(1505,2439). vP0(1505,2440). vP0(1505,2441). vP0(1505,2442). vP0(1505,2443). vP0(1505,2444). vP0(1505,2445). vP0(1505,2446). vP0(1505,2447). vP0(1505,2448). vP0(1505,2449). vP0(1505,2450). vP0(1505,2451). vP0(1505,2452). vP0(1505,2453). vP0(1505,2454). vP0(1505,2455). vP0(1505,2456). vP0(1505,2457). vP0(1505,2458). vP0(1505,2459). vP0(1505,2460). vP0(1505,2461). vP0(1505,2463). vP0(1505,2464). vP0(1505,2465). vP0(1505,2466). vP0(1505,2467). vP0(1505,2468). vP0(1505,2469). vP0(1505,2470). vP0(1505,2471). vP0(1505,2472). vP0(1505,2473). vP0(1505,2474). vP0(1505,2475). vP0(1505,2476). vP0(1505,2477). vP0(1505,2478). vP0(1505,2479). vP0(1505,2480). vP0(1505,2481). vP0(1505,2482). vP0(1505,2483). vP0(1505,2484). vP0(1505,2485). vP0(1505,2488). vP0(1505,2490). vP0(1505,2497). vP0(1505,2498). vP0(1505,2499). vP0(1505,2500). vP0(1505,2503). vP0(1505,2504). vP0(1505,2505). vP0(1505,2506). vP0(1505,2507). vP0(1505,2508). vP0(1505,2509). vP0(1505,2510). vP0(1505,2511). vP0(1505,2512). vP0(1505,2513). vP0(1505,2514). vP0(1505,2515). vP0(1505,2516). vP0(1505,2517). vP0(1505,2518). vP0(1505,2519). vP0(1505,2520). vP0(1505,2521). vP0(1505,2523). vP0(1505,2524). vP0(1505,2525). vP0(1505,2526). vP0(1505,2527). vP0(1505,2528). vP0(1505,2529). vP0(1505,2530). vP0(1505,2531). vP0(1505,2532). vP0(1505,2533). vP0(1505,2534). vP0(1505,2535). vP0(1505,2536). vP0(1505,2537). vP0(1505,2538). vP0(1505,2539). vP0(1505,2540). vP0(1505,2541). vP0(1505,2542). vP0(1505,2543). vP0(1505,2544). vP0(1505,2545). vP0(1505,2546). vP0(1505,2547). vP0(1505,2548). vP0(1505,2549). vP0(1505,2550). vP0(1505,2551). vP0(1505,2552). vP0(1505,2553). vP0(1505,2554). vP0(1505,2555). vP0(1505,2556). vP0(1505,2557). vP0(1505,2558). vP0(1505,2559). vP0(1505,2560). vP0(1505,2562). vP0(1505,2563). vP0(1505,2564). vP0(1505,2565). vP0(1505,2566). vP0(1505,2567). vP0(1505,2568). vP0(1505,2569). vP0(1505,2570). vP0(1505,2571). vP0(1505,2572). vP0(1505,2573). vP0(1505,2574). vP0(1505,2575). vP0(1505,2576). vP0(1505,2577). vP0(1505,2578). vP0(1505,2579). vP0(1505,2580). vP0(1505,2581). vP0(1505,2582). vP0(1505,2583). vP0(1505,2585). vP0(1505,2586). vP0(1505,2588). vP0(1505,2589). vP0(1505,2590). vP0(1505,2591). vP0(1505,2592). vP0(1505,2593). vP0(1505,2594). vP0(1505,2595). vP0(1505,2596). vP0(1505,2597). vP0(1505,2598). vP0(1505,2599). vP0(1505,2600). vP0(1505,2601). vP0(1505,2602). vP0(1505,2603). vP0(1505,2604). vP0(1505,2605). vP0(1505,2606). vP0(1505,2607). vP0(1505,2608). vP0(1505,2609). vP0(1505,2610). vP0(1505,2611). vP0(1505,2612). vP0(1505,2613). vP0(1505,2614). vP0(1505,2616). vP0(1505,2617). vP0(1505,2618). vP0(1505,2619). vP0(1505,2621). vP0(1505,2622). vP0(1505,2624). vP0(1505,2625). vP0(1505,2626). vP0(1505,2628). vP0(1505,2629). vP0(1505,2630). vP0(1505,2631). vP0(1505,2632). vP0(1505,2633). vP0(1505,2634). vP0(1505,2635). vP0(1505,2636). vP0(1505,2637). vP0(1505,2638). vP0(1505,2639). vP0(1505,2640). vP0(1505,2642). vP0(1505,2643). vP0(1505,2644). vP0(1505,2645). vP0(1505,2646). vP0(1505,2647). vP0(1505,2648). vP0(1505,2649). vP0(1505,2650). vP0(1505,2651). vP0(1505,2652). vP0(1505,2653). vP0(1505,2654). vP0(1505,2655). vP0(1505,2656). vP0(1505,2657). vP0(1505,2658). vP0(1505,2659). vP0(1505,2660). vP0(1505,2661). vP0(1505,2662). vP0(1505,2664). vP0(1505,2665). vP0(1505,2666). vP0(1505,2667). vP0(1505,2668). vP0(1505,2669). vP0(1505,2670). vP0(1505,2672). vP0(1505,2673). vP0(1505,2674). vP0(1505,2675). vP0(1505,2676). vP0(1505,2677). vP0(1505,2678). vP0(1505,2679). vP0(1505,2680). vP0(1505,2681). vP0(1505,2682). vP0(1505,2683). vP0(1505,2684). vP0(1505,2686). vP0(1505,2687). vP0(1505,2689). vP0(1505,2690). vP0(1505,2691). vP0(1505,2692). vP0(1505,2693). vP0(1505,2694). vP0(1505,2695). vP0(1505,2696). vP0(1505,2697). vP0(1505,2698). vP0(1505,2699). vP0(1505,2700). vP0(1505,2701). vP0(1505,2702). vP0(1505,2703). vP0(1505,2704). vP0(1505,2705). vP0(1505,2706). vP0(1505,2707). vP0(1505,2708). vP0(1505,2709). vP0(1505,2710). vP0(1505,2711). vP0(1505,2712). vP0(1505,2713). vP0(1505,2714). vP0(1505,2715). vP0(1505,2716). vP0(1505,2717). vP0(1505,2718). vP0(1505,2719). vP0(1505,2720). vP0(1505,2721). vP0(1505,2722). vP0(1505,2723). vP0(1505,2724). vP0(1505,2725). vP0(1505,2728). vP0(1505,2729). vP0(1505,2730). vP0(1505,2731). vP0(1505,2732). vP0(1505,2733). vP0(1505,2734). vP0(1505,2735). vP0(1505,2736). vP0(1505,2737). vP0(1505,2738). vP0(1505,2739). vP0(1505,2740). vP0(1505,2741). vP0(1505,2742). vP0(1505,2743). vP0(1505,2744). vP0(1505,2745). vP0(1505,2746). vP0(1505,2747). vP0(1505,2748). vP0(1505,2749). vP0(1505,2750). vP0(1505,2751). vP0(1505,2754). vP0(1505,2755). vP0(1505,2759). vP0(1505,2761). vP0(1505,2762). vP0(1505,2763). vP0(1505,2764). vP0(1505,2765). vP0(1505,2766). vP0(1505,2767). vP0(1505,2768). vP0(1505,2769). vP0(1505,2770). vP0(1505,2771). vP0(1505,2772). vP0(1505,2773). vP0(1505,2774). vP0(1505,2775). vP0(1505,2776). vP0(1505,2777). vP0(1505,2778). vP0(1505,2779). vP0(1505,2780). vP0(1505,2781). vP0(1505,2782). vP0(1505,2783). vP0(1505,2784). vP0(1505,2785). vP0(1505,2786). vP0(1505,2787). vP0(1505,2788). vP0(1505,2789). vP0(1505,2790). vP0(1505,2795). vP0(1505,2796). vP0(1505,2797). vP0(1505,2798). vP0(1505,2799). vP0(1505,2800). vP0(1505,2801). vP0(1505,2802). vP0(1505,2803). vP0(1505,2804). vP0(1505,2805). vP0(1505,2806). vP0(1505,2807). vP0(1505,2808). vP0(1505,2809). vP0(1505,2810). vP0(1505,2811). vP0(1505,2812). vP0(1505,2813). vP0(1505,2815). vP0(1505,2817). vP0(1505,2818). vP0(1505,2819). vP0(1505,2824). vP0(1505,2825). vP0(1505,2826). vP0(1505,2827). vP0(1505,2828). vP0(1505,2829). vP0(1505,2831). vP0(1505,2832). vP0(1505,2833). vP0(1505,2834). vP0(1505,2835). vP0(1505,2836). vP0(1505,2837). vP0(1505,2838). vP0(1505,2839). vP0(1505,2840). vP0(1505,2841). vP0(1505,2842). vP0(1505,2845). vP0(1505,2846). vP0(1505,2847). vP0(1505,2848). vP0(1505,2849). vP0(1505,2850). vP0(1505,2851). vP0(1505,2852). vP0(1505,2853). vP0(1505,2854). vP0(1505,2855). vP0(1505,2856). vP0(1505,2857). vP0(1505,2858). vP0(1505,2859). vP0(1505,2860). vP0(1505,2861). vP0(1505,2862). vP0(1505,2863). vP0(1505,2864). vP0(1505,2865). vP0(1505,2866). vP0(1505,2867). vP0(1505,2868). vP0(1505,2869). vP0(1505,2870). vP0(1505,2871). vP0(1505,2872). vP0(1505,2873). vP0(1505,2874). vP0(1505,2875). vP0(1505,2876). vP0(1505,2877). vP0(1505,2878). vP0(1505,2879). vP0(1505,2880). vP0(1505,2881). vP0(1505,2882). vP0(1505,2883). vP0(1505,2884). vP0(1505,2885). vP0(1505,2886). vP0(1505,2887). vP0(1505,2888). vP0(1505,2889). vP0(1505,2890). vP0(1505,2892). vP0(1505,2893). vP0(1505,2894). vP0(1505,2895). vP0(1505,2896). vP0(1505,2897). vP0(1505,2898). vP0(1505,2899). vP0(1505,2900). vP0(1505,2901). vP0(1505,2902). vP0(1505,2903). vP0(1505,2904). vP0(1505,2905). vP0(1505,2906). vP0(1505,2907). vP0(1505,2908). vP0(1505,2909). vP0(1505,2910). vP0(1505,2911). vP0(1505,2912). vP0(1505,2913). vP0(1505,2914). vP0(1555,1597). vP0(1560,1061). vP0(3541,1570). vP0(3543,1897). vP0(3643,1576). vP0(4253,1594). vP0(4268,1597). vP0(4272,1612). vP0(4273,2082). vP0(9537,2123). vP0(9874,2155). vP0(10283,2204). vP0(11552,2358). vP0(14768,2760).
datalaude/datalaude
examples/examples/datalog/joone/vP0.pl
Perl
bsd-3-clause
35,833
#!/usr/bin/perl ## ## Plots energy levels using GetEnLevels.sh ## sub print_help(){ print "Usage: "; print "$0 -f file --N0=N0 --Nf=Nf --E1chain=f (--plot --Nlevels=Nlevels) \n"; print " Options: \n"; print " -v, --verbose : display messages \n"; print " --nosave : does not save file \n"; print " --outname : saves in \"phase_shift_[outname].dat\" \n"; exit(0); } ####################### sub dEqual{ ## Checks if A=B (can be real numbers) my $NoArgs=@_; my $NumA=shift(@_); my $NumB=shift(@_); my $Tolerance=1.0E-15; if ($NoArgs>2){$Tolerance=shift(@_);} ##print "NoArgs = $NoArgs, tol = $Tolerance \n"; if ($NumA==$NumB){return(1);} else{ if ( abs($NumA-$NumB)<$Tolerance){return(1);} else{return(0);} } } ####################### sub GetSinglePLev{ my $HoleEnLevels=shift(@_); my $SinglePLev=shift(@_); ## No of levels: my $Nlevels=$#HoleEnLevels+1; my $PrevLevel=-100; my $CurrentLevel=0; my $idegen=1; ### ### 1 - Filter Degeneracies ### for (my $ii=0; $ii<50;$ii++){ $CurrentLevel=@$HoleEnLevels[$ii]; ## if ((abs($CurrentLevel-$PrevLevel)<0.01)|| ## (abs($CurrentLevel)<0.0001)){ if (abs($CurrentLevel-$PrevLevel)<0.01){ $idegen++; ## print "Found GS/deg level: deg = $idegen \n" ; } else{$idegen=1;push(@$SinglePLev,$CurrentLevel);} $PrevLevel=$CurrentLevel; } ## Watch the syntax!! Makes you crazy... my $NSPlevels=$#$SinglePLev+1; ## foreach $en (@$SinglePLev){ ## print "En = $en \n"; ## } ### ### 2 - Filter multiples of same level ### my $BaseLevel=-100; for (my $ii=0; $ii<$NSPlevels;$ii++){ $BaseLevel=@$SinglePLev[$ii]; for (my $jj=$ii+1; $jj<$NSPlevels;$jj++){ $CurrentLevel=@$SinglePLev[$jj]; my $Ratio=$CurrentLevel/$BaseLevel; if ( (($Ratio-int($Ratio))<0.001)|| (($Ratio-int($Ratio))>0.999) ) { ## Remove/Insert elements from list: ## splice(@Nums, pos, no_el_to_replace, New list) splice(@$SinglePLev,$jj,1,() ); $NSPlevels--; } ## else {print "No! \n";} } ### end loop in jj } ## end loop in ii ### ### 3 - Filter more complicated combinations of different levels ### (still to come) ### return(0); } ####################### ## ## main ## use Getopt::Long; use constant PI => 4*atan2(1, 1); if ( @ARGV > 0 ){ GetOptions('f|file=s'=>\$FileName, 'N0=i' =>\$N0, 'Nf=i' =>\$Nf, 'plot' =>\$plot, 'Nlevels=i'=>\$NlevelsMax, 'E1chain=f'=>\$E1chain, 'v|verbose'=>\$verbose, 'nosave' => \$nosave, 'outname=s' =>\$outname, 'h|help' =>\$help); if ((!$N0)||(!$Nf)){print_help();} if (!$FileName){$FileName="output2ch.txt";} if (!$NlevelsMax){$NlevelsMax=60;} if (!defined($E1chain)){$E1chain=0.5;} } else{print_help();} if ($help){print_help();} print "File = $FileName \n"; print "N0 = $N0 \n"; print "Nf = $Nf \n"; my @En_N=(); #List of lists my $NumNs=0; ##my $NlevelsMax=50; ## ## Get Extension ## if (defined($outname)){$Ext=$outname;} else{ $Ext=$FileName; $Ext =~ s/^output_//; $Ext =~ s/.txt$//; } if (!defined($nosave)){open(SAVEPHASE,"> phase_shift_$Ext.dat");} for ($Ns=$N0;$Ns<=$Nf;$Ns+=2){ my @HoleEnLevels=(); my @ElecEnLevels=(); my @HolePm1EnLevels=(); my @ElecPm1EnLevels=(); my @SPholeLevs=(); my @SPelecLevs=(); my @SPholePm1Levs=(); my @SPelecPm1Levs=(); my $nlevels=0; my @LineData=(); my $Nqns=0; my $Qgs=0; my $Sgs=0; my $Pgs=0; my $NumGS=0; my $TotNlev=0; my @QGS=(); my @SGS=(); my @PGS=(); print "Obtaining levels: N = $Ns ; "; ## push(@HoleEnLevels,$Ns); ## set Ns ## $GetEnCommand="GetEnLevels -f $FileName -N $Ns | grep \"| -1 0\" | awk \'{print \$1}\'"; $GetEnCommand="GetEnLevels -f $FileName -N $Ns "; ## print "Command = $GetEnCommand \n"; open (GREPDATA,"$GetEnCommand |"); while (<GREPDATA>){ $TheLine=$_; chomp($TheLine); @LineData=split(/ +/,$TheLine); ### Get No QNs: $Nqns=@LineData-4; ### Set "parity=1" if onle Q,S if ($Nqns==2){$LineData[4]=1;} ## ## Find Ground state(s) for different parities ## if (dEqual($LineData[0],0.0,0.0001)){ push (@QGS,$LineData[2]); if ($Nqns>1){push(@SGS,$LineData[3]);} if ($Nqns>2){push(@PGS,$LineData[4]);} $NumGS++; if (defined($verbose)){ print "GS found: \n"; print " QNs : "; for (my $ii=0;$ii<$Nqns;$ii++){ print $LineData[2+$ii]." "; } print "\n"; print "Num Gs = $NumGS \n"; print "Qgs : ".$QGS[$NumGS-1]." \n"; print "Sgs : ".$SGS[$NumGS-1]." \n"; print "Pgs : ".$PGS[$NumGS-1]." \n"; } ## verbose } ## end if E_i =0 ## if (dEqual($LineData[0],0.0)){ ## $Qgs=$LineData[2]; ## if ($Nqns>1){$Sgs=$LineData[3];} ## if ($Nqns>2){$Pgs=$LineData[4];} ## if (defined($verbose)){ ## print "GS found: Q = $Qgs \n"; ## print " QNs : "; ## for (my $ii=0;$ii<$Nqns;$ii++){ ## print $LineData[2+$ii]." "; ## } ## print "\n"; ## } ## verbose ## } $nlevels++; } close(GREPDATA); $TotNlev=$nlevels; print "Nlevels = ".$TotNlev." \n"; print "N GS = ".$NumGS." \n"; ## ## Once the GS have been identified, get the excitation spectrum ## $nlevels=0; open (GREPDATA,"$GetEnCommand |"); while (<GREPDATA>){ $TheLine=$_; chomp($TheLine); @LineData=split(/ +/,$TheLine); ## Loop in GS for (my $igs=0;$igs<$NumGS;$igs++){ ## ## Hole excitations (P=1) ## if ( (!dEqual($LineData[0],0.0,0.0001))&& (dEqual($LineData[2],$QGS[$igs]-1.0))&& ( (dEqual($LineData[3],abs($SGS[$igs]-0.5)))|| (dEqual($LineData[3],abs($SGS[$igs]+0.5))) )&& (dEqual($LineData[4],1)) ){ if ($nlevels<=$NlevelsMax){push(@HoleEnLevels,$LineData[0]);} } ## ## Electron excitations (P=1) ## if ( (!dEqual($LineData[0],0.0,0.0001))&& (dEqual($LineData[2],$QGS[$igs]+1.0))&& ( (dEqual($LineData[3],abs($SGS[$igs]-0.5)))|| (dEqual($LineData[3],abs($SGS[$igs]+0.5))) )&& (dEqual($LineData[4],1)) ){ if ($nlevels<=$NlevelsMax){push(@ElecEnLevels,$LineData[0]);} } if ($Nqns==3){ ## ## Hole excitations (P=-1) ## if ( (!dEqual($LineData[0],0.0,0.0001))&& (dEqual($LineData[2],$QGS[$igs]-1.0))&& ( (dEqual($LineData[3],abs($SGS[$igs]-0.5)))|| (dEqual($LineData[3],abs($SGS[$igs]+0.5))) )&& (dEqual($LineData[4],-1)) ){ if ($nlevels<=$NlevelsMax){push(@HolePm1EnLevels,$LineData[0]);} } ## ## Electron excitations (P=-1) ## if ( (!dEqual($LineData[0],0.0,0.0001))&& (dEqual($LineData[2],$QGS[$igs]+1.0))&& ( (dEqual($LineData[3],abs($SGS[$igs]-0.5)))|| (dEqual($LineData[3],abs($SGS[$igs]+0.5))) )&& (dEqual($LineData[4],-1)) ){ if ($nlevels<=$NlevelsMax){push(@ElecPm1EnLevels,$LineData[0]);} } } ## end if nqns=3 } ##end loop in GS $nlevels++; } ## end while grep data close(GREPDATA); GetSinglePLev(\@HoleEnLevels,\@SPholeLevs); GetSinglePLev(\@ElecEnLevels,\@SPelecLevs); if ($Nqns==3){ GetSinglePLev(\@HolePm1EnLevels,\@SPholePm1Levs); GetSinglePLev(\@ElecPm1EnLevels,\@SPelecPm1Levs); } if (defined($verbose)){ print "Hole single-particle P=1 levels \n"; for (my $ii=0;$ii<6;$ii++){ $en=$SPholeLevs[$ii]; print "Eh_SP = $en , delta/pi = ".$en/(2.0*$E1chain)." \n"; } print "Electron single-particle P=1 levels \n"; for (my $ii=0;$ii<6;$ii++){ $en=$SPelecLevs[$ii]; print "Ee_SP = $en \n"; } print "Hole single-particle P=-1 levels \n"; for (my $ii=0;$ii<6;$ii++){ $en=$SPholePm1Levs[$ii]; print "EhP=-1_SP = $en , delta/pi = ".$en/(2.0*$E1chain)." \n"; } print "Electron single-particle P=-1 levels \n"; for (my $ii=0;$ii<6;$ii++){ $en=$SPelecPm1Levs[$ii]; print "EeP=-1_SP = $en \n"; } } ##verbose ## ## Calc phase shift ## my $PhaseShift=0.0; my $Conductance=0.0; my $ps_even=0.0; my $ps_odd=0.0; my $UseHoleLevs=0; if (($SPelecLevs[0]-$SPholeLevs[0])>0.001){ $UseHoleLevs=1; if (defined($verbose)){print "Using hole phase shift. \n"} } $ps_even=$SPelecLevs[0]/(2.0*$E1chain); if ($UseHoleLevs){$ps_even=$SPholeLevs[0]/(2.0*$E1chain);} if ($Nqns==3){ $ps_odd=$SPelecPm1Levs[0]/(2.0*$E1chain); if ($UseHoleLevs){$ps_odd=$SPholePm1Levs[0]/(2.0*$E1chain);} ## $PhaseShift=abs(($SPelecLevs[0]-$SPelecPm1Levs[0])/(2.0*$E1chain)); $PhaseShift=abs($ps_even-$ps_odd); $Conductance=(sin(PI*$PhaseShift))**2; } print "ps: $Ns even: $ps_even odd : $ps_odd |even-odd|: $PhaseShift \n"; printf ("N : %i e: %7.5f o: %7.5f |o-e| : %7.5f G: %7.5f \n", $Ns, $ps_even, $ps_odd , $PhaseShift, $Conductance); if (!defined($nosave)){ print SAVEPHASE "$Ns $ps_even $ps_odd $PhaseShift $Conductance \n";} ## push(@En_N,\@HoleEnLevels); $NumNs++; } ##end Loop in Ns if (!defined($nosave)){close(SAVEPHASE);}
cifu9502/nrgcode
src/Scripts/CalcPhaseShiftLevels.perl
Perl
mit
9,031
#!/usr/bin/env perl use Modern::Perl; use Mojo::UserAgent; use HTML::Entities; use JSON; use Date::Manip; use Date::Calc; use Mojo::Log; use Digest::SHA1 qw(sha1_hex); use Search::Elasticsearch; use Data::Dumper; my $log = Mojo::Log->new(); my %months = qw(jan 1 feb 2 mar 3 apr 4 may 5 jun 6 jul 7 aug 8 sep 9 oct 10 nov 11 dec 12); my $es = Search::Elasticsearch->new( nodes => '127.0.0.1:9200' ); my $res = $es->search( index => 'bkmrx', type => 'users', body => { query => { regexp => { 'social.amazon_wishlist' => '.+' } } } ); for my $doc (@{$res->{'hits'}->{'hits'}}) { my $now = DateTime->now(time_zone => 'Europe/London'); next unless $doc->{'_source'}->{'logged_in'}; my $last_in = DateTime->from_epoch( epoch => $doc->{'_source'}->{'logged_in'}, time_zone => 'Europe/London' ); my $dt_diff = $now - $last_in; # don't process if user hasn't logged in in 2 months next if $dt_diff->days > 60; next unless my $url = $doc->{'_source'}->{'social'}->{'amazon_wishlist'}; next unless $url =~ m{^https?://www\.amazon\.co(?:\.uk|m/)}i; my $user_id = $doc->{'_id'}; my $ua = Mojo::UserAgent->new; my $tx = $ua->get($url); if (my $res = $tx->success) { my $dom = $res->dom; for my $e ($dom->find('table.jcb')->each) { my $g_url = $e->at('a.Axc')->{'href'}; my $created_at = time(); my $title = $e->at('a.Axc')->all_text; my $desc = $e->at('p.zxc')->all_text; my $g_url_id; my $doc_id = sha1_hex($user_id . $g_url); my $url_hex = sha1_hex($g_url); # ignore is bad behaviour; need to check for error, then update # if already exists $es->create( index => 'bkmrx', type => 'url', id => $url_hex, ignore => [409], body => { url => $g_url, crawled => 0, # hex => sha1_hex($g_url), added_by => $user_id, votes => 1, } ); $es->create( index => 'bkmrx', type => 'bookmark', id => $doc_id, ignore => [409], parent => $url_hex, body => { user_id => $user_id, url => $g_url, user_title => $title, user_description => $desc, private => 0, from => 'amazon_wishlist', added => (time() * 1000), tags => ['amazon-wishlist', 'to-buy'] } ); } } else { my ($err, $code) = $tx->error; $log->error( "$url error: $err - $code"); } }
robhammond/bkmrx.com
bin/fetch-amazon-wishlist.pl
Perl
mit
3,005
#!/usr/bin/perl # crhDebug.pm # # debug , message and logging subroutines # v1.01 crh 12-jan-09 -- initial release, extracted from crhPop3Mail.pm # v1.14 crh 16-jan-09 -- extended with message and logging subs # v1.20 crh 22-jun-13 -- incorporate output suppression or printing just newline # setDbg($) -- set debug status # isDbg() -- returns dbg status # setDbgProgName($) -- sets the program name used in various subs [optional] # dbgMsg($) -- prints debug labelled message to STDERR if debug active # dbgTMsg($) -- prints debug labelled timestamp prepended message to STDERR if debug active # statusDbgMsg($$$) -- prints formatted message to STDERR if debug active # statusDbgTMsg($$$) -- prints formatted message, including timestamp, to STDERR if debug active # setLogDyName($;$) -- set log file name with YYYMMDD prefix, incorporating directory if supplied # setLogMnName($;$) -- set log file name with YYYMM prefix, incorporating directory if supplied # setLogName($;$) -- set log file name, incorporating directory if supplied # openLog() -- opens log file handle # closeLog() -- closes log file handle # logMsg($) -- prints message to STDERR and also writes datestamp prefixed message to the log file if open # the following include backwards-compatible suppress & print just newline capability # msg(;$$) -- prints message to STDOUT # errMsg(;$$) -- prints message to STDERR # tMsg(;$$) -- prints timestamp prepended message to STDOUT # errTMsg(;$$) -- prints timestamp prepended message to STDERR # dMsg(;$$) -- prints datestamp prepended message to STDOUT # errDMsg(;$$) -- prints datestamp prepended message to STDERR # the following include backwards-compatible suppresscapability # statusMsg($$$;$) -- prints formatted message to STDOUT # statusTMsg($$$;$) -- prints formatted message, including timestamp, to STDOUT # statusErrMsg($$$;$) -- prints formatted message to STDERR # statusErrTMsg($$$;$) -- prints formatted message, including timestamp, to STDERR package crhDebug; use Exporter; @ISA = ("Exporter"); @EXPORT = qw(&setDbg &isDbg &setDbgProgName &dbgMsg &dbgTMsg &statusDbgMsg &statusDbgTMsg &statusMsg &statusTMsg &statusErrMsg &statusErrTMsg &setLogDyName &setLogMnName &setLogName &openLog &closeLog &logMsg &msg &errMsg &tMsg &errTMsg &dMsg &errDMsg); use warnings; use strict; use File::Basename; use POSIX; INIT { #### persistent private variables for subroutines :-) my $debug = 0; # off by default # set default value for $dbgProgName -- use setProgName() to be safe my ($dbgProgName, $dbgDir, $dbgExt) = fileparse($0, ".pl"); # set default value for log file name -- use setLogName() to be safe my $logFile = strftime("%Y%m", localtime) . "$dbgProgName.log"; my $logF; # file handle for log file sub setDbg ($) { # set debug status $debug = $_[0]; return $debug; } sub isDbg { # return debug status return $debug; } sub setDbgProgName (;$) { # set program name # use to set to a sensible value before calling following subroutines # resets deafult value if no argument supplied # returns the set value after calling this subroutine # arg: required program name value if ($_[0]) { $dbgProgName = $_[0]; } else { ($dbgProgName, $dbgDir, $dbgExt) = fileparse($0, ".pl"); } return $dbgProgName; } sub dbgMsg ($) { # print simple debug message to STDERR, possibly if ($debug) { print STDERR "DEBUG-$_[0]\n"; } return $debug; } sub dbgTMsg ($) { # print debug message with prepended timestamp to STDERR, possibly if ($debug) { print STDERR "DEBUG -- [" . dbgTimestamp() . "] $_[0]\n"; } return $debug; } sub statusDbgMsg ($$$) { # print message in standard format to STDERR, possibly # args: status, function, message if ($debug) { print STDERR "$_[0]-$dbgProgName-$_[1] -- $_[2]\n"; } return $debug; } sub statusDbgTMsg ($$$) { # print message in standard format prepended with timestamp to STDERR, possibly # args: status, function, message if ($debug) { print STDERR "$_[0]-$dbgProgName-$_[1] -- [" . dbgTimestamp() . "] $_[2]\n"; } return $debug; } sub statusMsg ($$$;$) { # print message in standard format to STDOUT # args: status, function, message return if $_[3]; print STDOUT "$_[0]-$dbgProgName-$_[1] -- $_[2]\n"; } sub statusTMsg ($$$;$) { # print message in standard format, prepended with timestamp to STDOUT # args: status, function, message [, suppress] return if $_[3]; print STDOUT "$_[0]-$dbgProgName-$_[1] -- [" . dbgTimestamp() . "] $_[2]\n"; } sub statusErrMsg ($$$;$) { # print message in standard format to STDERR # args: status, function, message [, suppress] return if $_[3]; print STDERR "$_[0]-$dbgProgName-$_[1] -- $_[2]\n"; } sub statusErrTMsg ($$$;$) { # print message in standard format, prepended with timestamp to STDERR # args: status, function, message [, suppress] return if $_[3]; print STDERR "$_[0]-$dbgProgName-$_[1] -- [" . dbgTimestamp() . "] $_[2]\n"; } sub setLogDyName ($;$) { # set log file name withYYMMDD prefix, using optional directory if supplied # args: file name stem [, directory] if ($_[1]) { $logFile = $_[1] . strftime("%Y%m%d", localtime) . $_[0] . ".log"; } else { $logFile = strftime("%Y%m%d", localtime) . $_[0] . ".log"; } return $logFile; } sub setLogMnName ($;$) { # set log file name with YYYYMM prefix, using optional directory if supplied # args: file name stem [, directory] if ($_[1]) { $logFile = $_[1] . strftime("%Y%m", localtime) . $_[0] . ".log"; } else { $logFile = strftime("%Y%m", localtime) . $_[0] . ".log"; } return $logFile; } sub setLogName ($;$) { # set log file name, using optional directory if supplied # args: file name stem [, directory] if ($_[1]) { $logFile = $_[1] . $_[0] . ".log"; } else { $logFile = $_[0] . ".log"; } return $logFile; } sub openLog { # open log file open($logF, ">>", $logFile) or die "abort program... problem accessing log file $logFile\n"; } sub closeLog { # close log file close($logF); undef $logF; } sub logMsg ($) { # print argument message to STDERR # also print datestamp prefixed message to log file if currently open print STDERR "$_[0]\n"; if ($logF) { print $logF dbgDatestamp() . ": $_[0]\n" or die "abort program... problem writing to log file $logFile\n"; } } } sub msg (;$$) { # print argument message to STDOUT # args: [text [suppress]] return if $_[1]; if ($_[0]) { # text & newline print STDOUT "$_[0]\n"; } else { # newline print STDOUT "\n"; } } sub errMsg (;$$) { # print argument message to STDERR # args: [text [suppress]] return if $_[1]; if ($_[0]) { # text & newline print STDERR "$_[0]\n"; } else { # newline print STDERR "\n"; } } sub tMsg (;$$) { # print timestamp prepended argument message to STDOUT # args: [text [suppress]] return if $_[1]; if ($_[0]) { print STDOUT "[" . dbgTimestamp() . "] $_[0]\n"; } else { print STDOUT "[" . dbgTimestamp() . "]\n"; } } sub errTMsg (;$$) { # print timestamp prepended argument message to STDERR # args: [text [suppress]] return if $_[1]; if ($_[0]) { print STDERR "[" . dbgTimestamp() . "] $_[0]\n"; } else { print STDERR "[" . dbgTimestamp() . "]\n"; } } sub dMsg (;$$) { # print datestamp prepended argument message to STDOUT # args: [text [suppress]] return if $_[1]; if ($_[0]) { print STDOUT "[" . dbgDatestamp() . "] $_[0]\n"; } else { print STDOUT "[" . dbgDatestamp() . "]\n"; } } sub errDMsg (;$$) { # print datestamp prepended argument message to STDERR # args: [text [suppress]] return if $_[1]; if ($_[0]) { print STDERR "[" . dbgDatestamp() . "] $_[0]\n"; } else { print STDERR "[" . dbgDatestamp() . "]\n"; } } # helper subroutines (not exported) sub dbgTimestamp { # return current timestamp # append string if supplied as parameter if (scalar(@_) > 0) { return strftime("%H:%M:%S", localtime) . ": " . $_[0]; } else { return strftime("%H:%M:%S", localtime); } } sub dbgDatestamp { # return current date and timestamp # append string if supplied as parameter if (scalar(@_) > 0) { return strftime("%Y%m%d-%H%M%S", localtime) . ": " . $_[0]; } else { return strftime("%Y%m%d-%H%M%S", localtime); } } 1;
ciscrh/perl-crhLib
crhDebug.pm
Perl
mit
8,260
# $Id: news.pm,v 2.102 2008/05/23 21:30:10 abigail Exp $ package Regexp::Common::URI::news; use strict; local $^W = 1; use Regexp::Common qw /pattern clean no_defaults/; use Regexp::Common::URI qw /register_uri/; use Regexp::Common::URI::RFC1738 qw /$grouppart $group $article $host $port $digits/; use vars qw /$VERSION/; ($VERSION) = q $Revision: 2.102 $ =~ /[\d.]+/g; my $news_scheme = 'news'; my $news_uri = "(?k:(?k:$news_scheme):(?k:$grouppart))"; my $nntp_scheme = 'nntp'; my $nntp_uri = "(?k:(?k:$nntp_scheme)://(?k:(?k:(?k:$host)(?::(?k:$port))?)" . "/(?k:$group)(?:/(?k:$digits))?))"; register_uri $news_scheme => $news_uri; register_uri $nntp_scheme => $nntp_uri; pattern name => [qw (URI news)], create => $news_uri, ; pattern name => [qw (URI NNTP)], create => $nntp_uri, ; 1; __END__ =pod =head1 NAME Regexp::Common::URI::news -- Returns a pattern for file URIs. =head1 SYNOPSIS use Regexp::Common qw /URI/; while (<>) { /$RE{URI}{news}/ and print "Contains a news URI.\n"; } =head1 DESCRIPTION =head2 $RE{URI}{news} Returns a pattern that matches I<news> URIs, as defined by RFC 1738. News URIs have the form: "news:" ( "*" | group | article "@" host ) Under C<{-keep}>, the following are returned: =over 4 =item $1 The complete URI. =item $2 The scheme. =item $3 The part of the URI following "news://". =back =head2 $RE{URI}{NNTP} Returns a pattern that matches I<NNTP> URIs, as defined by RFC 1738. NNTP URIs have the form: "nntp://" host [ ":" port ] "/" group [ "/" digits ] Under C<{-keep}>, the following are returned: =over 4 =item $1 The complete URI. =item $2 The scheme. =item $3 The part of the URI following "nntp://". =item $4 The host and port, separated by a colon. If no port was given, just the host. =item $5 The host. =item $6 The port, if given. =item $7 The group. =item $8 The digits, if given. =back =head1 REFERENCES =over 4 =item B<[RFC 1738]> Berners-Lee, Tim, Masinter, L., McCahill, M.: I<Uniform Resource Locators (URL)>. December 1994. =back =head1 HISTORY $Log: news.pm,v $ Revision 2.102 2008/05/23 21:30:10 abigail Changed email address Revision 2.101 2008/05/23 21:28:02 abigail Changed license Revision 2.100 2003/02/11 14:11:29 abigail NNTP and news URIs =head1 SEE ALSO L<Regexp::Common::URI> for other supported URIs. =head1 AUTHOR Damian Conway (damian@conway.org) =head1 MAINTAINANCE This package is maintained by Abigail S<(I<regexp-common@abigail.be>)>. =head1 BUGS AND IRRITATIONS Bound to be plenty. =head1 COPYRIGHT This software is Copyright (c) 2001 - 2008, Damian Conway and Abigail. This module is free software, and maybe used under any of the following licenses: 1) The Perl Artistic License. See the file COPYRIGHT.AL. 2) The Perl Artistic License 2.0. See the file COPYRIGHT.AL2. 3) The BSD Licence. See the file COPYRIGHT.BSD. 4) The MIT Licence. See the file COPYRIGHT.MIT. =cut
schwern/Regexp-Common
lib/Regexp/Common/URI/news.pm
Perl
mit
3,133
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!! # This file is machine-generated by lib/unicore/mktables from the Unicode # database, Version 6.2.0. Any changes made here will be lost! # !!!!!!! INTERNAL PERL USE ONLY !!!!!!! # This file is for internal use by core Perl only. The format and even the # name or existence of this file are subject to change without notice. Don't # use it directly. return <<'END'; 2460 24FF END
Bjay1435/capstone
rootfs/usr/share/perl/5.18.2/unicore/lib/Blk/Enclose2.pl
Perl
mit
433
## OpenXPKI::DN ## ## Written by Michael Bell for the OpenXPKI project ## Copyright (C) 2004-2005 by The OpenXPKI Project use strict; use warnings; use utf8; package OpenXPKI::DN; use Memoize; use Text::CSV_XS; use OpenXPKI::Exception; use OpenXPKI::Debug; # OpenSSL style attribute name mapping my %mapping_of = ( SERIALNUMBER => "serialNumber", EMAILADDRESS => "emailAddress", MAIL => "mail", UID => "UID", X500UNIQUEIDENTIFIER => "x500UniqueIdentifier", CN => "CN", TITLE => "title", SN => "SN", OU => "OU", O => "O", L => "L", ST => "ST", C => "C", DC => "DC", DOMAINCOMPONENT => "DC", PSEUDONYM => "pseudonym", ROLE => "role", DESCRIPTION => "description", ); sub new { my $that = shift; my $class = ref($that) || $that; my $self = {}; bless $self, $class; my $arg = shift; return undef if (! defined $arg && ($arg eq "")); ##! 2: "scanning dn: $arg" ##! 2: "length of dn: ".length $arg if (substr ($arg, 0, 1) eq "/") { ## proprietary OpenSSL oneline syntax my $dn = convert_openssl_dn($arg); $self->{PARSED} = [ $self->__get_parsed_rfc_2253 ($dn) ]; } else { ## RFC2253 Syntax $self->{PARSED} = [ $self->__get_parsed_rfc_2253 ($arg) ]; } $self->__build_rdns(); return $self; } # convert OpenSSL DN to RFC2253 DN sub convert_openssl_dn { ##! 1: 'warning: OpenSSL DN used. Can not be parsed unambigously! This may even lead to security issues. Avoid whenever possible!' my $dn = shift; my $openssl_format = Text::CSV_XS->new({ sep_char => q{/}, # Fields are separated by / escape_char => q{\\}, # Backslashed characters are always data }); if (!$openssl_format->parse($dn)) { OpenXPKI::Exception->throw ( message => "I18N_OPENXPKI_DN_CONVERT_OPENSSL_DN_PARSE_ERROR", params => { DN => $dn, BADARGUMENT => $openssl_format->error_input(), }); } my @rdn = $openssl_format->fields(); # remove first empty element (OpenSSL DN starts with /) shift @rdn; # return comma separated list, escape commas include in rdns # RFC 4514 says RDNS are joined by q{,} not q{, } return join(",", reverse map { s{,}{\\,}xsg; $_; } @rdn); } ################################### ## BEGIN of output functions ## ################################### sub get_parsed { my $self = shift; return @{$self->{PARSED}}; } sub get_attributes { my $self = shift; return @{$self->{ATTRIBUTES}}; } sub get_rdns { my $self = shift; return @{$self->{RDNS}}; } sub get_rfc_2253_dn { my $self = shift; return join ",", @{$self->{RDNS}}; } sub get_x500_dn { my $self = shift; return join ",", reverse @{$self->{RDNS}}; } sub get_openssl_dn { my $self = shift; # the map operation below modifies its arguments, so make a copy first my @rdns = @{$self->{RDNS}}; # escape / to \/ and return /-separated DN return "/" . join("/", reverse map { s{/}{\\/}xsg; $_; } @rdns); } sub get_hashed_content { my $self = shift; my %result = (); for my $rdn (@{$self->{PARSED}}) { for my $attribute (@{$rdn}) { my $key = uc($attribute->[0]); push @{$result{$key}}, $attribute->[1]; } } return %result; } ################################### ## END of output functions ## ################################### ########################################### ## BEGIN of structure initialization ## ########################################### sub __build_rdns { my $self = shift; $self->{RDNS} = []; $self->__build_attributes() if (not $self->{ATTRIBUTES}); for my $attribute (@{$self->{ATTRIBUTES}}) { push(@{$self->{RDNS}}, join("+", @{$attribute})); } return 1; } sub __build_attributes { my $self = shift; $self->{ATTRIBUTES} = (); for my $entry (@{$self->{PARSED}}) { my @attributes = (); for my $item (@{$entry}) { my $key = $item->[0]; my $value = $item->[1]; # escape + and , $value =~ s{ ([+,]) }{\\$1}xs; push(@attributes, $key . '=' . $value); } push(@{$self->{ATTRIBUTES}}, \@attributes); } return 1; } ########################################### ## END of structure initialization ## ########################################### ################################## ## BEGIN of RFC 2253 parser ## ################################## sub __get_parsed_rfc_2253 { my $self = shift; my $string = shift; my @result = (); while ($string) { my $rdn; ($rdn, $string) = $self->__get_next_rdn ($string); if (defined $rdn && $rdn ne "") { push(@result, $rdn); } $string = substr ($string, 1) if ($string); ## remove seperator } return @result; } sub __get_next_rdn { my $self = shift; my $string = shift; my ($type, $value); my $result = []; while ($string) { ($type, $value, $string) = $self->__get_attribute ($string); $result->[scalar @{$result}] = [ $type, $value ]; last if (substr ($string, 0, 1) eq ","); ## stop at , if (length ($string) > 1) { $string = substr ($string, 1); ## remove + } else { $string = ""; } } return ($result, $string); } sub __get_attribute { my $self = shift; my $string = shift; my ($type, $value); ($type, $string) = __get_attribute_type ($string); $string = substr ($string, 1); ($value, $string) = __get_attribute_value ($string); ##! 2: "type: $type\nvalue: $value" return ($type, $value, $string); } sub __get_attribute_type { my $string = shift; my $type = $string; $type =~ s/^\s*//; $type =~ s/^([^=]+)=.*/$1/; $string =~ s/^\s*[^=]+(=.*)/$1/; ## fix type to be comliant with OpenSSL if (exists $mapping_of{uc($type)}) { $type = $mapping_of{uc($type)}; } return ($type, $string); } sub __get_attribute_value { my $string = shift; my $value = ""; my $length = length ($string); my $i = 0; my $next = substr ($string, $i, 1); while ($length > $i and $next !~ /[,+]/) { $i++ if ($next eq "\\"); $value .= substr ($string, $i, 1); $i++; $next = substr ($string, $i, 1); } $string = substr ($string, $i); return ($value, $string); } sub get_attribute_names { my @values = sort values %mapping_of; for (my $i=scalar @values -1; $i > 0; $i--) { splice @values, $i, 1 if ($values[$i] eq $values[$i-1]); } return @values; } ################################## ## END of RFC 2253 parser ## ################################## foreach my $function (qw (__get_parsed_rfc_2253 __get_next_rdn __get_attribute __get_attribute_type __get_attribute_value ) ) { memoize($function); } 1; __END__ =head1 Name OpenXPKI::DN - RFC 2253 compatible dn parsing with support for OpenSSL's proprietary formatting rules. =head1 Description This module was designed to implement a fast parser for RFC 2253 distinguished names. It was designed to output RFC 2253 compliant and OpenSSL formatted DNs. Additionally you can get the parsed RDNs and the attributes in a hash (e.g. if you are looking for the organizational hierarchy via OUs). Please note that OpenSSL formatted DNs can not be parsed unambigously. This is because '/' is a perfectly valid character within an RDN but is used to separate them as well. Avoid getting OpenSSL DNs from OpenSSL or other applications whenever possible, as this parsing problem might lead to security issues. =head1 Initialization =head2 new The 'new' constructor expects a RFC 2253 or OpenSSL DN as its only argument. The type of the DN will be detected from the first character. OpenSSL's DNs always begin with a leading slash "/". The return value is an object reference to the used instance of OpenXPKI::DN. =head2 convert_openssl_dn This is a static function which requires an OpenSSL DN as argument. It returns a proper RFC 2253 DN. It is used by the 'new' constructor to convert OpenSSL DNs but you can use it also if you don't need a full parser (which is slower). =head1 Output Functions =head2 get_parsed returns a three-dimensional array. The first level is the number of the RDN, the second level is the number of the attribute and third level contains at [0] the name of the attribute and at [1] the value of the attribute. =head2 get_attributes returns a two-dimensional array. The first level is the number of the RDN, the second level is the number of the attribute. The value is the attribute name and value concatenated with an equal sign "=". =head2 get_rdns returns an array. The array values are completely prepared strings of the RDNs. This works for multi-valued RDNs too. =head2 get_rfc_2253_dn returns the RFC 2253 DN. =head2 get_x500_dn returns the RFC 2253 DN in reversed order. Something like X.500 style. =head2 get_openssl_dn returns the DN in OpenSSL's proprietary oneline format. =head2 get_hashed_content returns a hash which contains the attribute names as keys. The value of each hashentry is an array with the values inside which were found in the DN. =head2 get_attribute_names is a static function which returns all supported attribute names as a normal array. It is not relevant how you call this function.
oliwel/openxpki
core/server/OpenXPKI/DN.pm
Perl
apache-2.0
9,964
=pod =head1 NAME i2d_PKCS7_bio_stream - output PKCS7 structure in BER format. =head1 SYNOPSIS #include <openssl/pkcs7.h> int i2d_PKCS7_bio_stream(BIO *out, PKCS7 *p7, BIO *data, int flags); =head1 DESCRIPTION i2d_PKCS7_bio_stream() outputs a PKCS7 structure in BER format. It is otherwise identical to the function SMIME_write_PKCS7(). =head1 NOTES This function is effectively a version of the d2i_PKCS7_bio() supporting streaming. =head1 BUGS The prefix "d2i" is arguably wrong because the function outputs BER format. =head1 RETURN VALUES i2d_PKCS7_bio_stream() returns 1 for success or 0 for failure. =head1 SEE ALSO L<ERR_get_error(3)|ERR_get_error(3)>, L<PKCS7_sign(3)|PKCS7_sign(3)>, L<PKCS7_verify(3)|PKCS7_verify(3)>, L<PKCS7_encrypt(3)|PKCS7_encrypt(3)> L<PKCS7_decrypt(3)|PKCS7_decrypt(3)>, L<SMIME_write_PKCS7(3)|SMIME_write_PKCS7(3)>, L<PEM_write_bio_PKCS7_stream(3)|PEM_write_bio_PKCS7_stream(3)> =head1 HISTORY i2d_PKCS7_bio_stream() was added to OpenSSL 1.0.0 =cut
caidongyun/nginx-openresty-windows
nginx/objs/lib/openssl-1.0.1g/doc/crypto/i2d_PKCS7_bio_stream.pod
Perl
bsd-2-clause
1,046
package AfterImage::Font; use 5.005; use strict; require DynaLoader; use vars qw($VERSION @ISA); @ISA = qw(DynaLoader Class::Accessor); use base qw(Class::Accessor); $VERSION = '0.01'; bootstrap AfterImage $VERSION; # $AfterImage::Font::manager is hidden from the user. It is a # Singleton, and under no circumstances do we allow the user to # change its options. use vars qw($manager); my $defaults = { filename => '', points => 0, }; AfterImage::Font->mk_accessors(keys %$defaults); AfterImage::Font->mk_ro_accessors(qw(filename)); sub new { my $class = shift; my $self = {}; my $options = defined($_[0]) && ref($_[0]) eq 'HASH' ? $_[0] : { @_ }; $AfterImage::Font::manager ||= AfterImage::Font::c_manager_create($ENV{ASFONT_PATH_ENVVAR} || ''); foreach (keys %$defaults) { if (exists $$options{$_}) { $self->{$_} = $$options{$_}; } else { $self->{$_} = $$defaults{$_}; } } bless($self, $class); if (!$self->filename) { die "AfterImage::Font::new(): font name or filename required\n"; } if (!$self->points) { die "AfterImage::Font::new(): font size required\n"; } $self->{font} = AfterImage::Font::c_open($AfterImage::Font::manager, $self->filename, $self->points); return $self; } 1; __END__ # Below is stub documentation for your module. You'd better edit it! =head1 NAME AfterImage - Perl extension for blah blah blah =head1 SYNOPSIS use AfterImage; blah blah blah =head1 DESCRIPTION Stub documentation for AfterImage, created by h2xs. It looks like the author of the extension was negligent enough to leave the stub unedited. Blah blah blah. =head2 EXPORT None by default. =head1 SEE ALSO Mention other useful documentation such as the documentation of related modules or operating system documentation (such as man pages in UNIX), or any relevant external documentation such as RFCs or standards. If you have a mailing list set up for your module, mention it here. If you have a web site set up for your module, mention it here. =head1 AUTHOR Ethan Fischer, E<lt>allanon@crystaltokyo.comE<gt> =head1 COPYRIGHT AND LICENSE Copyright (C) 2006 by Ethan Fischer This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself, either Perl version 5.8.8 or, at your option, any later version of Perl 5 you may have available. =cut
born2late/afterstep-devel
perl-AfterImage/lib/AfterImage/Font.pm
Perl
mit
2,353
=head1 NAME Apache2::ServerRec - Perl API for Apache server record accessors =head1 Synopsis use Apache2::ServerRec (); $error_fname = $s->error_fname(); $is_virtual = $s->is_virtual(); $keep_alive = $s->keep_alive(); $keep_alive_max = $s->keep_alive_max(); $keep_alive_timeout = $s->keep_alive_timeout(); $limit_req_fields = $s->limit_req_fields(); $limit_req_fieldsize = $s->limit_req_fieldsize(); $limit_req_line = $s->limit_req_line(); $path = $s->path(); $hostname = $s->server_hostname(); $port = $s->port(); $server_admin = $s->server_admin(); $proc = $s->process(); $timeout = $s->timeout(); $loglevel = $s->loglevel(); my $server = Apache2::ServerUtil->server; my $vhosts = 0; for (my $s = $server->next; $s; $s = $s->next) { $vhosts++; } print "There are $vhosts virtual hosts"; =head1 Description C<Apache2::ServerRec> provides the Perl API for Apache server_rec object. C<L<Apache2::ServerUtil|docs::2.0::api::Apache2::ServerUtil>> provides an extra functionality. =head1 API C<Apache2::ServerRec> provides the following functions and/or methods: =head2 C<error_fname> Get/set the C<ErrorLog> file value (e.g. F<logs/error_log>) $error_fname = $s->error_fname(); $prev_error_fname = $s->error_fname($new_error_fname); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_error_fname> ( string ) If passed, sets the new value for C<ErrorLog> Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$error_fname> ( string ) Returns the C<ErrorLog> value setting. If C<$new_error_fname> is passed returns the setting before the change. =item since: 2.0.00 =back =head2 C<is_virtual> Test whether C<$s> is a virtual host object $is_virtual = $s->is_virtual(); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item ret: C<$is_virtual> ( boolean ) Returns the is_virtual setting. If C<$new_is_virtual> is passed, returns the setting before the change. =item since: 2.0.00 =back Example: print "This is a virtual host" if $s->is_virtual(); =head2 C<keep_alive> Get/set the C<KeepAlive> setting, which specifies whether Apache should accept more than one request over the same connection from the same client. $keep_alive = $s->keep_alive(); $prev_keep_alive = $s->keep_alive($new_keep_alive); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_keep_alive> ( boolean ) If passed, sets the new keep_alive. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$keep_alive> ( boolean ) Returns the C<KeepAlive> setting. If C<$new_keep_alive> is passed, returns the setting before the change. =item since: 2.0.00 =back =head2 C<keep_alive_max> Get/set the C<MaxKeepAliveRequest> setting, which specifies the maximum number of requests Apache will serve over a C<KeepAlive> connection. $keep_alive_max = $s->keep_alive_max(); $prev_keep_alive_max = $s->keep_alive_max($new_keep_alive_max); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_keep_alive_max> ( integer ) If passed, sets the new keep_alive_max. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$keep_alive_max> ( integer ) Returns the keep_alive_max setting. If C<$new_keep_alive_max> is passed, returns the setting before the change. =item since: 2.0.00 =back =head2 C<keep_alive_timeout> Get/set the C<KeepAliveTimeout> setting (in microsecs), which specifies how long Apache will wait for another request before breaking a C<KeepAlive> connection. $keep_alive_timeout = $s->keep_alive_timeout(); $prev_keep_alive_timeout = $s->keep_alive_timeout($new_timeout); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_keep_alive_timeout> ( integer ) The expected value is in microsecs. If passed, sets the new C<KeepAlive> timeout. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$keep_alive_timeout> ( integer ) Returns the C<KeepAlive> timeout value (in microsecs). If C<$new_timeout> is passed, returns the setting before the change. =item since: 2.0.00 =back =head2 C<limit_req_fields> Get/set limit on number of request header fields $limit_req_fields = $s->limit_req_fields(); $prev_limit_req_fields = $s->limit_req_fields($new_limit_req_fields); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_limit_req_fields> ( integer ) If passed, sets the new request headers number limit. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$limit_req_fields> ( integer ) Returns the request headers number limit. If C<$new_limit_req_fields> is passed, returns the setting before the change. =item since: 2.0.00 =back =head2 C<limit_req_fieldsize> Get/set limit on size of any request header field $limit_req_fieldsize = $s->limit_req_fieldsize(); $prev_limit = $s->limit_req_fieldsize($new_limit); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_limit_req_fieldsize> ( integer ) If passed, sets the new request header size limit. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$limit_req_fieldsize> ( integer ) Returns the request header size limit. If C<$new_limit> is passed, returns the setting before the change. =item since: 2.0.00 =back =head2 C<limit_req_line> Get/set limit on size of the HTTP request line $limit_req_line = $s->limit_req_line(); $prev_limit_req_line = $s->limit_req_line($new_limit_req_line); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_limit_req_line> ( integer ) If passed, sets the new request line limit value. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$limit_req_line> ( integer ) Returns the request line limit value If C<$new_limit_req_line> is passed, returns the setting before the change. =item since: 2.0.00 =back =head2 C<loglevel> Get/set the C<LogLevel> directive value $loglevel = $s->loglevel(); $prev_loglevel = $s->loglevel($new_loglevel); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_loglevel> ( C<L<Apache2::Const :log constant|docs::2.0::api::Apache2::Const/C__log_>> ) If passed, sets a new C<LogLevel> value Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$loglevel> ( C<L<Apache2::Const :log constant|docs::2.0::api::Apache2::Const/C__log_>> ) Returns the C<LogLevel> value as a constant. If C<$new_loglevel> is passed, returns the setting before the change. =item since: 2.0.00 =back For example, to set the C<LogLevel> value to C<info>: use Apache2::Const -compile => qw(LOG_INFO); $s->loglevel(Apache2::Const::LOG_INFO); =head2 C<next> The next server record in the list (if there are vhosts) $s_next = $s->next(); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item ret: C<$s_next> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item since: 2.0.00 =back For example the following code traverses all the servers, starting from the base server and continuing to vhost servers, counting all available vhosts: use Apache2::ServerRec (); use Apache2::ServerUtil (); my $server = Apache2::ServerUtil->server; my $vhosts = 0; for (my $s = $server->next; $s; $s = $s->next) { $vhosts++; } print "There are $vhosts virtual hosts"; =head2 C<path> Get/set pathname for the C<ServerPath> setting $path = $s->path(); $prev_path = $s->path($new_path); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_path> ( string ) If passed, sets the new path. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$path> ( string ) Returns the path setting. If C<$new_path> is passed, returns the setting before the change. =item since: 2.0.00 =back =head2 C<port> Get/set the port value $port = $s->port(); $prev_port = $s->port($new_port); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_port> ( integer ) If passed, sets the new port. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. META: I don't think one should be allowed to change port number after the server has started. =item ret: C<$port> ( integer ) Returns the port setting. If C<$new_port> is passed returns the setting before the change. =item since: 2.0.00 =back =head2 C<process> The process this server is running in $proc = $s->process(); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item ret: C<$proc> ( C<L<Apache2::Process object|docs::2.0::api::Apache2::Process>> ) =item since: 2.0.00 =back =head2 C<server_admin> Get/set the C<ServerAdmin> value $server_admin = $s->server_admin(); $prev_server_admin = $s->server_admin($new_server_admin); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_server_admin> ( string ) If passed, sets the new C<ServerAdmin> value. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$server_admin> ( string ) Returns the C<ServerAdmin> value. If C<$new_server_admin> is passed, returns the setting before the change. =item since: 2.0.00 =back =head2 C<server_hostname> Get/set the C<ServerName> value $server_hostname = $s->server_hostname(); $prev_server_hostname = $s->server_hostname($new_server_hostname); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_server_hostname> ( string ) If passed, sets the C<ServerName> value Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$server_hostname> ( string ) Returns the C<ServerName> value If C<$new_server_hostname> is passed, returns the setting before the change. =item since: 2.0.00 =back =head2 C<timeout> Get/set the timeout (C<TimeOut>) (in microsecs), which Apache will wait for before it gives up doing something $timeout = $s->timeout(); $prev_timeout = $s->timeout($new_timeout); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_timeout> ( integer ) If passed, sets the new timeout (the value should be in microseconds). Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$timeout> ( integer ) Returns the timeout setting in microseconds. If C<$new_timeout> is passed, returns the setting before the change. =item since: 2.0.00 =back Let us repeat again: the timeout values is microseconds. For example to set the timeout to 20 secs: $s->timeout(20_000_000); =head1 Notes =head2 Limited Functionality under Threaded MPMs Note that under threaded MPMs, some of the read/write accessors, will be able to set values only before threads are spawned (i.e. before the C<L<ChildInit phase|docs::2.0::user::handlers::server/C_PerlChildInitHandler_>>). Therefore if you are developing your application on the non-threaded MPM, but planning to have it run under threaded mpm, you should not use those methods to set values after the ChildInit phase. The affected accessor methods are marked as such in their respective documentation entries. =head1 Unsupported API C<Apache2::ServerRec> also provides auto-generated Perl interface for a few other methods which aren't tested at the moment and therefore their API is a subject to change. These methods will be finalized later as a need arises. If you want to rely on any of the following methods please contact the L<the mod_perl development mailing list|maillist::dev> so we can help each other take the steps necessary to shift the method to an officially supported API. =head2 C<addrs> Get the addrs value $addrs = $s->addrs(); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item ret: C<$addrs> ( C<L<Apache2::ServerAddr|docs::2.0::api::Apache2::ServerAddr>> ) Returns the addrs setting. =item since: subject to change =back META: this methods returns a vhost-specific Apache2::ServerAddr object, which is not implemented at the moment. See the struct server_addr_rec entry in httpd-2.0/include/httpd.h for more information. It seems that most (all?) of the information in that record is available through other APIs. =head2 C<lookup_defaults> Get the lookup_defaults value. MIME type info, etc., before we start checking per-directory info. $lookup_defaults = $s->lookup_defaults(); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item ret: C<$lookup_defaults> ( C<L<Apache2::ConfVector|docs::2.0::api::Apache2::RequestRec/C_per_dir_config_>> ) Returns the lookup_defaults setting. =item since: subject to change =back =head2 C<module_config> Get config vector containing pointers to modules' per-server config structures. $module_config = $s->module_config(); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item ret: C<$module_config> ( C<L<Apache2::ConfVector|docs::2.0::api::Apache2::RequestRec/C_per_dir_config_>> ) Returns the module_config setting. =item since: subject to change =back =head2 C<names> Get/set the value(s) for the C<ServerAlias> setting $names = $s->names(); $prev_names = $s->names($new_names); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_names> ( C<L<APR::ArrayHeader|docs::2.0::api::APR::ArrayHeader>> ) If passed, sets the new names. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$names> ( C<L<APR::ArrayHeader|docs::2.0::api::APR::ArrayHeader>> ) Returns the names setting. If C<$new_names> is passed, returns the setting before the change. =item since: 2.0.00 =back META: we don't have C<APR::ArrayHeader> yet =head2 C<wild_names> Wildcarded names for ServerAlias servers $wild_names = $s->wild_names(); $prev_wild_names = $s->wild_names($new_wild_names); =over 4 =item obj: C<$s> ( C<L<Apache2::ServerRec object|docs::2.0::api::Apache2::ServerRec>> ) =item opt arg1: C<$new_wild_names> ( C<L<APR::ArrayHeader|docs::2.0::api::APR::ArrayHeader>> ) If passed, sets the new wild_names. Note the L<limited functionality under threaded MPMs|/Limited_Functionality_under_Threaded_MPMs>. =item ret: C<$wild_names> ( C<L<APR::ArrayHeader|docs::2.0::api::APR::ArrayHeader>> ) Returns the wild_names setting. If C<$new_wild_names> is passed, returns the setting before the change. =item since: 2.0.00 =back META: we don't have C<APR::ArrayHeader> yet =head1 See Also L<mod_perl 2.0 documentation|docs::2.0::index>. =head1 Copyright mod_perl 2.0 and its core modules are copyrighted under The Apache Software License, Version 2.0. =head1 Authors L<The mod_perl development team and numerous contributors|about::contributors::people>. =cut
Distrotech/mod_perl
docs/src/docs/2.0/api/Apache2/ServerRec.pod
Perl
apache-2.0
16,319
#!/usr/bin/perl # ## Licensed to the Apache Software Foundation (ASF) under one ## or more contributor license agreements. See the NOTICE file ## distributed with this work for additional information ## regarding copyright ownership. The ASF licenses this file ## to you under the Apache License, Version 2.0 (the ## "License"); you may not use this file except in compliance ## with the License. You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. use strict; use warnings; #use Data::Dumper; sub addStat($$$) { my ($stats, $key, $value) = @_; #print "$key $value\n"; $stats->{$key}->{total} = 0 if (!defined $stats->{$key}->{total}); $stats->{$key}->{count} = 0 if (!defined $stats->{$key}->{count}); return if (!($value =~ m|^-?\d+\.?\d*$|)); #print "$key\n"; $stats->{$key}->{total} += $value if $value >= 0; $stats->{$key}->{count}++ if $value >= 0; push(@{$stats->{$key}->{values}}, $value) if $value >= 0; } sub displayStat($) { my ($stats) = @_; printf("%25s %10s %10s %10s %10s %10s %10s %10s %10s\n", 'key', 'total', 'count', 'mean', 'median', '95th', '99th', 'min', 'max'); foreach my $key ( 'tls_handshake', 'ua_begin', 'ua_first_read', 'ua_read_header_done', 'cache_open_read_begin', 'cache_open_read_end', 'cache_open_write_begin', 'cache_open_write_end', 'dns_lookup_begin', 'dns_lookup_end', 'server_connect', 'server_connect_end', 'server_first_read', 'server_read_header_done', 'server_close', 'ua_close', 'sm_finish', 'plugin_active', 'plugin_total' ) { my $count = $stats->{$key}->{count}; my $total = $stats->{$key}->{total}; if (!defined $stats->{$key}->{values}) { next; #print "$key\n"; #die $key; } my @sorted = sort {$a <=> $b} @{$stats->{$key}->{values}}; my $median = $sorted[int($count / 2)]; my $p95th = $sorted[int($count * .95)]; my $p99th = $sorted[int($count * .99)]; my $min = $sorted[0]; my $max = $sorted[$count - 1]; my $mean = 0; $mean = $total / $count if $count > 0; printf("%25s %10.4f %10.4f %10.4f %10.4f %10.4f %10.4f %10.4f %10.4f\n", $key, $total, $count, $mean, $median, $p95th, $p99th, $min, $max); } print "NOTE: Times are in seconds\n"; } { my %stats; while (<>) { chomp; s/unique id/unique_id/; s/server state/server_state/; s/client state/client_state/; if (m|Slow Request: .+ (tls_handshake: .+)|) { my %data = split(/: | /, $1); foreach my $key (keys %data) { next if (!defined $data{$key}); #print "$key $data{$key}\n"; addStat(\%stats, $key, $data{$key}); } } } displayStat(\%stats); }
SolidWallOfCode/trafficserver
tools/slow_log_report.pl
Perl
apache-2.0
3,239
package Cpanel::Security::Advisor::Assessors::Iptables; # Copyright (c) 2013, cPanel, Inc. # All rights reserved. # http://cpanel.net # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the owner nor the names of its contributors may # be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use strict; use base 'Cpanel::Security::Advisor::Assessors'; use Cpanel::SafeRun::Simple; sub generate_advice { my ($self) = @_; $self->_is_iptables_active(); return 1; } sub _is_iptables_active { my ($self) = @_; my $security_advisor_obj = $self->{'security_advisor_obj'}; if ( -x '/etc/init.d/iptables' ) { my $status_check = `/etc/init.d/iptables status`; # need a better way to check this if ( $status_check =~ m/not running/i ) { $security_advisor_obj->add_advice( { 'type' => $Cpanel::Security::Advisor::ADVISE_BAD, 'text' => ['Firewall is not running'], 'suggestion' => [ 'This might be a simple matter of executing "/etc/init.d/iptables start"' ], }, ); } } return 1; } 1;
sporks5000/addon_securityadvisor
pkg/Cpanel/Security/Advisor/Assessors/Iptables.pm
Perl
bsd-3-clause
2,522
#!/usr/bin/perl # ********************************************************** # Copyright (c) 2004-2010 VMware, Inc. All rights reserved. # ********************************************************** # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of VMware, Inc. nor the names of its contributors may be # used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH # DAMAGE. ### bmmemabs.pl ### author: Derek Bruening April 2003 ### ### compares benchmark results produced with bmtable.pl ### computes absolute memory usage difference between two runs $usage = "Usage: $0 [-k] <table1> <table2>\n"; if ($#ARGV < 0) { print $usage; exit; } if ($ARGV[0] eq "-k") { $ignore_errors = 1; shift; } if ($#ARGV != 1) { print $usage; exit; } $file1 = $ARGV[0]; $file2 = $ARGV[1]; $PWD = $ENV{'PWD'}; open(FILE1, "< $file1") || die "Error: Couldn't open $file1 for input\n"; open(FILE2, "< $file2") || die "Error: Couldn't open $file2 for input\n"; print "# in dir $PWD:\n#\t$file1 - $file2\n"; print "-------------------------------------------------\n"; $num_bmarks = 0; while (<FILE1>) { if ($_ =~ /^\s*(\w+)\s+\d+\/\d+\s+(\S+)\s+(\S+)\s+([\d\.]+)\s+(\d+)\s+(\d+)/) { $bmark = $1; $status1{$bmark} = $2; $cpu1{$bmark} = $3; $time1{$bmark} = $4; $rss1{$bmark} = $5; $vsz1{$bmark} = $6; $name_bmarks[$num_bmarks] = $bmark; $num_bmarks++; } } close(FILE1); while (<FILE2>) { if ($_ =~ /^\s*(\w+)\s+\d+\/\d+\s+(\S+)\s+(\S+)\s+([\d\.]+)\s+(\d+)\s+(\d+)/) { $bmark = $1; $status2{$bmark} = $2; $cpu2{$bmark} = $3; $time2{$bmark} = $4; $rss2{$bmark} = $5; $vsz2{$bmark} = $6; $name_bmarks[$num_bmarks] = $bmark; $num_bmarks++; } } close(FILE2); printf("%10s %-6s %-9s %-7s %-7s\n", "Benchmark", "Status", "Time(min)", "RSS(KB)", "VSz(KB)"); @sorted = sort(@name_bmarks); $last = ""; $harsum{"time"} = 0; $harnum{"time"} = 0; $harsum{"rss"} = 0; $harnum{"rss"} = 0; $harsum{"vsz"} = 0; $harnum{"vsz"} = 0; foreach $s (@sorted) { if ($s eq $last) { next; } $last = $s; printf "%10s %4s/%-4s ", $s, &stat($status1{$s}), &stat($status2{$s}); # both must be ok # we only look at status -- we assume table filtered out bad %CPU if ($ignore_errors || ($status1{$s} =~ /ok/ && $status2{$s} =~ /ok/)) { $bad = 0; } else { $bad = 1; } if (!$bad && defined($time1{$s}) && defined($time2{$s}) && $time1{$s} > 0 && $time2{$s} > 0) { $ratio = $time1{$s} / $time2{$s}; $harsum{"time"} += 1/$ratio; $harnum{"time"}++; printf "%6.3f ", $ratio; } else { printf "%6s ", "------"; } if (!$bad && defined($rss1{$s}) && defined($rss2{$s}) && $rss1{$s} > 0 && $rss2{$s} > 0) { $diff = $rss1{$s} - $rss2{$s}; $harsum{"rss"} += $diff; $harnum{"rss"}++; printf "%6d ", $diff; } else { printf "%6s ", "------"; } if (!$bad && defined($vsz1{$s}) && defined($vsz2{$s}) && $vsz1{$s} > 0 && $vsz2{$s} > 0) { $diff = $vsz1{$s} - $vsz2{$s}; $harsum{"vsz"} += $diff; $harnum{"vsz"}++; printf "%6d ", $diff; } else { printf "%6s ", "------"; } print "\n"; } print "-------------------------------------------------\n"; printf "%12s ", "harmonic/arith mean"; if ($harsum{"time"} > 0) { $hmean = $harnum{"time"} / $harsum{"time"}; printf "%4.3f ", $hmean; } else { printf "%5s ", "-----"; } if ($harsum{"rss"} > 0) { $hmean = $harsum{"rss"} / $harnum{"rss"}; printf "%6d ", $hmean; } else { printf "%5s ", "-----"; } if ($harsum{"vsz"} > 0) { $hmean = $harsum{"vsz"} / $harnum{"vsz"}; printf "%6d ", $hmean; } else { printf "%5s ", "-----"; } print "\n"; sub stat($) { my ($status) = @_; if ($status eq "") { return "--"; } else { return $status; } }
bl4ckic3/DynamoRIO-ARM
tools/bmmemabs.pl
Perl
bsd-3-clause
5,073
package Fixtures::Integration::CachegroupParameter; # Do not edit! Generated code. # See https://github.com/Comcast/traffic_control/wiki/The%20Kabletown%20example use Moose; extends 'DBIx::Class::EasyFixture'; use namespace::autoclean; my %definition_for = ( ); sub name { return "CachegroupParameter"; } sub get_definition { my ( $self, $name ) = @_; return $definition_for{$name}; } sub all_fixture_names { return keys %definition_for; } __PACKAGE__->meta->make_immutable; 1;
smalenfant/traffic_control
traffic_ops/app/lib/Fixtures/Integration/CachegroupParameter.pm
Perl
apache-2.0
495
sub random_integers { my $len = shift; my @result; foreach (1..$len) { my $value = int(rand(100)); push @result, $value; } return @result; } sub random_hash { my $len = shift; my %result; foreach (1..$len) { my $key = random_string(32); my $val = random_string(128); $result{$key} = $val; } return \%result; } sub random_string { my $len=$_[0]; my @chars=('a'..'z','A'..'Z','0'..'9','_'); my $result; foreach (1..$len) { $result .= $chars[rand @chars]; } return $result; } sub random_strings { my $len = $_[0]; my @result = (); foreach (1..$len) { my $strlen = rand(64) + 32; push(@result, random_string($strlen)); } return @result; } sub random_timestamp { my $result = rand(2**63) + 1; return $result; } 1;
mbroadst/debian-qpid-cpp-old
proton-c/bindings/perl/tests/utils.pm
Perl
apache-2.0
878
package Fixtures::Integration::ToExtension; # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # Do not edit! Generated code. # See https://github.com/Comcast/traffic_control/wiki/The%20Kabletown%20example use Moose; extends 'DBIx::Class::EasyFixture'; use namespace::autoclean; my %definition_for = ( '0' => { new => 'ToExtension', using => { name => 'ILO_PING', isactive => '1', last_updated => '2015-12-10 15:44:37', servercheck_short_name => 'ILO', type => '5', version => '1.0.0', info_url => 'http://foo.com/bar.html', description => undef, script_file => 'ping', servercheck_column_name => 'aa', additional_config_json => '{ "select": "ilo_ip_address", "cron": "9 * * * *" }', }, }, '1' => { new => 'ToExtension', using => { name => '10G_PING', script_file => 'ping', servercheck_short_name => '10G', version => '1.0.0', info_url => 'http://foo.com/bar.html', isactive => '1', servercheck_column_name => 'ab', type => '5', additional_config_json => '{ "select": "ip_address", "cron": "18 * * * *" }', description => undef, last_updated => '2015-12-10 15:44:37', }, }, '2' => { new => 'ToExtension', using => { name => 'FQDN_PING', isactive => '1', last_updated => '2015-12-10 15:44:37', servercheck_short_name => 'FQDN', type => '5', version => '1.0.0', description => undef, info_url => 'http://foo.com/bar.html', script_file => 'ping', servercheck_column_name => 'ac', additional_config_json => '{ "select": "host_name", "cron": "27 * * * *" }', }, }, '3' => { new => 'ToExtension', using => { name => 'CHECK_DSCP', servercheck_column_name => 'ad', servercheck_short_name => 'DSCP', type => '5', version => '1.0.0', last_updated => '2015-12-10 15:44:37', script_file => 'dscp', additional_config_json => '{ "select": "ilo_ip_address", "cron": "36 * * * *" }', description => undef, info_url => 'http://foo.com/bar.html', isactive => '1', }, }, '4' => { new => 'ToExtension', using => { name => 'OPEN', additional_config_json => '', info_url => 'http://foo.com/bar.html', script_file => 'dscp', version => '1.0.0', description => undef, isactive => '0', last_updated => '2015-12-10 15:44:37', servercheck_column_name => 'ae', servercheck_short_name => '', type => '7', }, }, '5' => { new => 'ToExtension', using => { name => 'OPEN', isactive => '0', servercheck_short_name => '', type => '7', version => '1.0.0', description => undef, info_url => 'http://foo.com/bar.html', last_updated => '2015-12-10 15:44:37', script_file => 'dscp', servercheck_column_name => 'af', additional_config_json => '', }, }, '6' => { new => 'ToExtension', using => { name => 'IPV6_PING', servercheck_short_name => '10G6', version => '1.0.0', additional_config_json => '{ "select": "ip6_address", "cron": "0 * * * *" }', description => undef, script_file => 'ping', info_url => 'http://foo.com/bar.html', isactive => '1', last_updated => '2015-12-10 15:44:37', servercheck_column_name => 'ag', type => '5', }, }, '7' => { new => 'ToExtension', using => { name => 'OPEN', servercheck_short_name => '', isactive => '0', last_updated => '2015-12-10 15:44:37', script_file => 'dscp', servercheck_column_name => 'ah', version => '1.0.0', additional_config_json => '', description => undef, info_url => 'http://foo.com/bar.html', type => '7', }, }, '8' => { new => 'ToExtension', using => { name => 'CHECK_STATS', additional_config_json => '{ "select": "ilo_ip_address", "cron": "54 * * * *" }', description => undef, isactive => '1', script_file => 'ping', servercheck_short_name => 'STAT', type => '5', info_url => 'http://foo.com/bar.html', last_updated => '2015-12-10 15:44:37', servercheck_column_name => 'ai', version => '1.0.0', }, }, '9' => { new => 'ToExtension', using => { name => 'OPEN', info_url => 'http://foo.com/bar.html', script_file => 'dscp', servercheck_column_name => 'aj', type => '7', version => '1.0.0', additional_config_json => '', description => undef, isactive => '0', last_updated => '2015-12-10 15:44:37', servercheck_short_name => '', }, }, '10' => { new => 'ToExtension', using => { name => 'CHECK_MTU', servercheck_column_name => 'ak', servercheck_short_name => 'MTU', version => '1.0.0', additional_config_json => '{ "select": "ip_address", "cron": "45 * * * *" }', description => undef, last_updated => '2015-12-10 15:44:37', script_file => 'ping', type => '5', info_url => 'http://foo.com/bar.html', isactive => '1', }, }, '11' => { new => 'ToExtension', using => { name => 'CHECK_TRAFFIC_ROUTER_STATUS', additional_config_json => '{ "select": "ilo_ip_address", "cron": "10 * * * *" }', description => undef, info_url => 'http://foo.com/bar.html', isactive => '1', last_updated => '2015-12-10 15:44:37', script_file => 'ping', version => '1.0.0', servercheck_column_name => 'al', servercheck_short_name => 'TRTR', type => '5', }, }, '12' => { new => 'ToExtension', using => { name => 'CHECK_TRAFFIC_MONITOR_STATUS', type => '5', version => '1.0.0', description => undef, isactive => '1', last_updated => '2015-12-10 15:44:37', script_file => 'ping', servercheck_short_name => 'TRMO', additional_config_json => '{ "select": "ip_address", "cron": "10 * * * *" }', info_url => 'http://foo.com/bar.html', servercheck_column_name => 'am', }, }, '13' => { new => 'ToExtension', using => { name => 'CACHE_HIT_RATIO_LAST_15', info_url => 'http://foo.com/bar.html', isactive => '1', servercheck_column_name => 'an', version => '1.0.0', additional_config_json => '{ "select": "ilo_ip_address", "cron": "0,15,30,45 * * * *" }', description => undef, servercheck_short_name => 'CHR', type => '6', last_updated => '2015-12-10 15:44:37', script_file => 'ping', }, }, '14' => { new => 'ToExtension', using => { name => 'DISK_UTILIZATION', script_file => 'ping', type => '6', info_url => 'http://foo.com/bar.html', isactive => '1', last_updated => '2015-12-10 15:44:37', servercheck_column_name => 'ao', servercheck_short_name => 'CDU', version => '1.0.0', additional_config_json => '{ "select": "ilo_ip_address", "cron": "20 * * * *" }', description => undef, }, }, '15' => { new => 'ToExtension', using => { name => 'ORT_ERROR_COUNT', description => undef, info_url => 'http://foo.com/bar.html', script_file => 'ping', servercheck_short_name => 'ORT', type => '6', servercheck_column_name => 'ap', version => '1.0.0', additional_config_json => '{ "select": "ilo_ip_address", "cron": "40 * * * *" }', isactive => '1', last_updated => '2015-12-10 15:44:37', }, }, '16' => { new => 'ToExtension', using => { name => 'OPEN', version => '1.0.0', additional_config_json => '', info_url => 'http://foo.com/bar.html', script_file => 'dscp', servercheck_column_name => 'aq', type => '7', description => undef, isactive => '0', last_updated => '2015-12-10 15:44:37', servercheck_short_name => '', }, }, '17' => { new => 'ToExtension', using => { name => 'OPEN', isactive => '0', last_updated => '2015-12-10 15:44:37', script_file => 'dscp', servercheck_short_name => '', additional_config_json => '', description => undef, info_url => 'http://foo.com/bar.html', type => '7', version => '1.0.0', servercheck_column_name => 'ar', }, }, '18' => { new => 'ToExtension', using => { name => 'OPEN', info_url => 'http://foo.com/bar.html', isactive => '0', script_file => 'dscp', servercheck_short_name => '', type => '7', version => '1.0.0', description => undef, last_updated => '2015-12-10 15:44:37', servercheck_column_name => 'bf', additional_config_json => '', }, }, '19' => { new => 'ToExtension', using => { name => 'OPEN', servercheck_short_name => '', version => '1.0.0', description => undef, last_updated => '2015-12-10 15:44:37', script_file => 'dscp', servercheck_column_name => 'at', type => '7', additional_config_json => '', info_url => 'http://foo.com/bar.html', isactive => '0', }, }, '20' => { new => 'ToExtension', using => { name => 'OPEN', description => undef, isactive => '0', last_updated => '2015-12-10 15:44:37', servercheck_column_name => 'au', servercheck_short_name => '', type => '7', additional_config_json => '', version => '1.0.0', script_file => 'dscp', info_url => 'http://foo.com/bar.html', }, }, '21' => { new => 'ToExtension', using => { name => 'OPEN', script_file => 'dscp', description => undef, info_url => 'http://foo.com/bar.html', last_updated => '2015-12-10 15:44:37', version => '1.0.0', additional_config_json => '', isactive => '0', servercheck_column_name => 'av', servercheck_short_name => '', type => '7', }, }, '22' => { new => 'ToExtension', using => { name => 'OPEN', type => '7', version => '1.0.0', isactive => '0', script_file => 'dscp', servercheck_column_name => 'aw', servercheck_short_name => '', additional_config_json => '', description => undef, info_url => 'http://foo.com/bar.html', last_updated => '2015-12-10 15:44:37', }, }, '23' => { new => 'ToExtension', using => { name => 'OPEN', info_url => 'http://foo.com/bar.html', isactive => '0', last_updated => '2015-12-10 15:44:37', script_file => 'dscp', description => undef, servercheck_column_name => 'ax', servercheck_short_name => '', type => '7', version => '1.0.0', additional_config_json => '', }, }, '24' => { new => 'ToExtension', using => { name => 'OPEN', description => undef, isactive => '0', servercheck_short_name => '', type => '7', version => '1.0.0', script_file => 'dscp', servercheck_column_name => 'ay', additional_config_json => '', info_url => 'http://foo.com/bar.html', last_updated => '2015-12-10 15:44:37', }, }, '25' => { new => 'ToExtension', using => { name => 'OPEN', servercheck_column_name => 'az', servercheck_short_name => '', type => '7', additional_config_json => '', info_url => 'http://foo.com/bar.html', last_updated => '2015-12-10 15:44:37', version => '1.0.0', description => undef, isactive => '0', script_file => 'dscp', }, }, '26' => { new => 'ToExtension', using => { name => 'OPEN', additional_config_json => '', isactive => '0', type => '7', version => '1.0.0', servercheck_short_name => '', description => undef, info_url => 'http://foo.com/bar.html', last_updated => '2015-12-10 15:44:37', script_file => 'dscp', servercheck_column_name => 'ba', }, }, '27' => { new => 'ToExtension', using => { name => 'OPEN', script_file => 'dscp', servercheck_column_name => 'bb', servercheck_short_name => '', info_url => 'http://foo.com/bar.html', isactive => '0', last_updated => '2015-12-10 15:44:37', type => '7', version => '1.0.0', additional_config_json => '', description => undef, }, }, '28' => { new => 'ToExtension', using => { name => 'OPEN', description => undef, isactive => '0', servercheck_column_name => 'bc', version => '1.0.0', servercheck_short_name => '', type => '7', additional_config_json => '', info_url => 'http://foo.com/bar.html', last_updated => '2015-12-10 15:44:37', script_file => 'dscp', }, }, '29' => { new => 'ToExtension', using => { name => 'OPEN', info_url => 'http://foo.com/bar.html', script_file => 'dscp', servercheck_column_name => 'bd', servercheck_short_name => '', additional_config_json => '', last_updated => '2015-12-10 15:44:37', type => '7', version => '1.0.0', description => undef, isactive => '0', }, }, '30' => { new => 'ToExtension', using => { name => 'OPEN', description => undef, last_updated => '2015-12-10 15:44:37', servercheck_short_name => '', version => '1.0.0', script_file => 'dscp', servercheck_column_name => 'be', type => '7', additional_config_json => '', info_url => 'http://foo.com/bar.html', isactive => '0', }, }, ); sub name { return "ToExtension"; } sub get_definition { my ( $self, $name ) = @_; return $definition_for{$name}; } sub all_fixture_names { # sort by db name to guarantee insertion order return (sort { $definition_for{$a}{using}{name} cmp $definition_for{$b}{using}{name} } keys %definition_for); } __PACKAGE__->meta->make_immutable; 1;
mdb/incubator-trafficcontrol
traffic_ops/app/lib/Fixtures/Integration/ToExtension.pm
Perl
apache-2.0
12,422
=head2 Sat Jun 8 20:25:57 2013: C<Module> L<JSON|JSON> =over 4 =item * C<installed into: /w/chr/src/third_party/JSON/out/lib/perl5> =item * C<LINKTYPE: dynamic> =item * C<VERSION: 2.59> =item * C<EXE_FILES: > =back =head2 Mon Jun 10 12:38:28 2013: C<Module> L<JSON|JSON> =over 4 =item * C<installed into: /w/chr/src/third_party/JSON/out/lib/perl5> =item * C<LINKTYPE: dynamic> =item * C<VERSION: 2.59> =item * C<EXE_FILES: > =back =head2 Mon Jun 10 12:40:36 2013: C<Module> L<JSON|JSON> =over 4 =item * C<installed into: /w/chr/src/third_party/JSON/out/lib/perl5> =item * C<LINKTYPE: dynamic> =item * C<VERSION: 2.59> =item * C<EXE_FILES: > =back
androidarmv6/android_external_chromium_org
third_party/JSON/out/lib/perl5/x86_64-linux-gnu-thread-multi/perllocal.pod
Perl
bsd-3-clause
681
# ------------------------------------------------------------------------------ # t[#num] - [#invert ? 'Not true' : 'True'] # Generated from [#lib] line: [#lineno] # ------------------------------------------------------------------------------ sub t[#num] { $result = eval { [#test] }; $@ and die $@; return $result ? 1 : 0; }
ryangies/lsn-build
builders/perl/test/case_true.pl
Perl
mit
338
:-[readFile, parseProblem, parseDomain, common]. :-[dfs, backward, h_addb].
TeamSPoon/logicmoo_workspace
packs_sys/logicmoo_ec/test/pddl_tests/orig_pddl_parser/backward-dfs-h_add.pl
Perl
mit
76
=head1 NAME Attean::API::PushParser - Role for parsers that natively call a callback function for each parsed item =head1 VERSION This document describes Attean::API::PushParser version 0.002 =head1 DESCRIPTION The Attean::API::PushParser role defines parsers that can efficiently call a callback function for each object constructed from the parsed data. This role adds methods that builds on this functionality to allow parsing data using different approaches. =head1 ROLES This role consumes the L<Attean::API::Parser> role. =head1 REQUIRED METHODS Classes consuming this role must provide the following methods: =over 4 =item C<< parse_cb_from_io( $fh ) >> Calls the C<< $parser->handler >> function once for each object that result from parsing the data read from the L<IO::Handle> object C<< $fh >>. =item C<< parse_cb_from_bytes( $data ) >> Calls the C<< $parser->handler >> function once for each object that result from parsing the data read from the UTF-8 encoded byte string C<< $data >>. =back =head1 METHODS This role provides default implementations of the following methods: =over 4 =item C<< parse_iter_from_io( $fh ) >> Returns an L<Attean::API::Iterator> that result from parsing the data read from the L<IO::Handle> object C<< $fh >>. =item C<< parse_iter_from_bytes( $data ) >> Returns an L<Attean::API::Iterator> that result from parsing the data read from the UTF-8 encoded byte string C<< $data >>. =item C<< parse_list_from_io( $fh ) >> Returns a list of all objects that result from parsing the data read from the L<IO::Handle> object C<< $fh >>. =item C<< parse_list_from_bytes( $data ) >> Returns a list of all objects that result from parsing the data read from the UTF-8 encoded byte string C<< $data >>. =back =head1 BUGS Please report any bugs or feature requests to through the GitHub web interface at L<https://github.com/kasei/attean/issues>. =head1 SEE ALSO L<http://www.perlrdf.org/> =head1 AUTHOR Gregory Todd Williams C<< <gwilliams@cpan.org> >> =head1 COPYRIGHT Copyright (c) 2014 Gregory Todd Williams. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut
gitpan/Attean
lib/Attean/API/PushParser.pod
Perl
mit
2,194
=begin comment Swaggy Jenkins Jenkins API clients generated from Swagger / Open API specification The version of the OpenAPI document: 1.1.2-pre.0 Contact: blah@cliffano.com Generated by: https://openapi-generator.tech =end comment =cut # # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). # Do not edit the class manually. # Ref: https://openapi-generator.tech # package WWW::OpenAPIClient::Object::ClockDifference; require 5.6.0; use strict; use warnings; use utf8; use JSON qw(decode_json); use Data::Dumper; use Module::Runtime qw(use_module); use Log::Any qw($log); use Date::Parse; use DateTime; use base ("Class::Accessor", "Class::Data::Inheritable"); # # # # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). Do not edit the class manually. # REF: https://openapi-generator.tech # =begin comment Swaggy Jenkins Jenkins API clients generated from Swagger / Open API specification The version of the OpenAPI document: 1.1.2-pre.0 Contact: blah@cliffano.com Generated by: https://openapi-generator.tech =end comment =cut # # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). # Do not edit the class manually. # Ref: https://openapi-generator.tech # __PACKAGE__->mk_classdata('attribute_map' => {}); __PACKAGE__->mk_classdata('openapi_types' => {}); __PACKAGE__->mk_classdata('method_documentation' => {}); __PACKAGE__->mk_classdata('class_documentation' => {}); # new plain object sub new { my ($class, %args) = @_; my $self = bless {}, $class; $self->init(%args); return $self; } # initialize the object sub init { my ($self, %args) = @_; foreach my $attribute (keys %{$self->attribute_map}) { my $args_key = $self->attribute_map->{$attribute}; $self->$attribute( $args{ $args_key } ); } } # return perl hash sub to_hash { my $self = shift; my $_hash = decode_json(JSON->new->convert_blessed->encode($self)); return $_hash; } # used by JSON for serialization sub TO_JSON { my $self = shift; my $_data = {}; foreach my $_key (keys %{$self->attribute_map}) { if (defined $self->{$_key}) { $_data->{$self->attribute_map->{$_key}} = $self->{$_key}; } } return $_data; } # from Perl hashref sub from_hash { my ($self, $hash) = @_; # loop through attributes and use openapi_types to deserialize the data while ( my ($_key, $_type) = each %{$self->openapi_types} ) { my $_json_attribute = $self->attribute_map->{$_key}; if ($_type =~ /^array\[(.+)\]$/i) { # array my $_subclass = $1; my @_array = (); foreach my $_element (@{$hash->{$_json_attribute}}) { push @_array, $self->_deserialize($_subclass, $_element); } $self->{$_key} = \@_array; } elsif ($_type =~ /^hash\[string,(.+)\]$/i) { # hash my $_subclass = $1; my %_hash = (); while (my($_key, $_element) = each %{$hash->{$_json_attribute}}) { $_hash{$_key} = $self->_deserialize($_subclass, $_element); } $self->{$_key} = \%_hash; } elsif (exists $hash->{$_json_attribute}) { #hash(model), primitive, datetime $self->{$_key} = $self->_deserialize($_type, $hash->{$_json_attribute}); } else { $log->debugf("Warning: %s (%s) does not exist in input hash\n", $_key, $_json_attribute); } } return $self; } # deserialize non-array data sub _deserialize { my ($self, $type, $data) = @_; $log->debugf("deserializing %s with %s",Dumper($data), $type); if ($type eq 'DateTime') { return DateTime->from_epoch(epoch => str2time($data)); } elsif ( grep( /^$type$/, ('int', 'double', 'string', 'boolean'))) { return $data; } else { # hash(model) my $_instance = eval "WWW::OpenAPIClient::Object::$type->new()"; return $_instance->from_hash($data); } } __PACKAGE__->class_documentation({description => '', class => 'ClockDifference', required => [], # TODO } ); __PACKAGE__->method_documentation({ '_class' => { datatype => 'string', base_name => '_class', description => '', format => '', read_only => '', }, 'diff' => { datatype => 'int', base_name => 'diff', description => '', format => '', read_only => '', }, }); __PACKAGE__->openapi_types( { '_class' => 'string', 'diff' => 'int' } ); __PACKAGE__->attribute_map( { '_class' => '_class', 'diff' => 'diff' } ); __PACKAGE__->mk_accessors(keys %{__PACKAGE__->attribute_map}); 1;
cliffano/swaggy-jenkins
clients/perl/generated/lib/WWW/OpenAPIClient/Object/ClockDifference.pm
Perl
mit
4,867
# This file is auto-generated by the Perl DateTime Suite time zone # code generator (0.07) This code generator comes with the # DateTime::TimeZone module distribution in the tools/ directory # # Generated from /tmp/Q713JNUf8G/southamerica. Olson data version 2016a # # Do not edit this file directly. # package DateTime::TimeZone::Pacific::Easter; $DateTime::TimeZone::Pacific::Easter::VERSION = '1.95'; use strict; use Class::Singleton 1.03; use DateTime::TimeZone; use DateTime::TimeZone::OlsonDB; @DateTime::TimeZone::Pacific::Easter::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' ); my $spans = [ [ DateTime::TimeZone::NEG_INFINITY, # utc_start 59611187848, # utc_end 1890-01-01 07:17:28 (Wed) DateTime::TimeZone::NEG_INFINITY, # local_start 59611161600, # local_end 1890-01-01 00:00:00 (Wed) -26248, 0, 'LMT', ], [ 59611187848, # utc_start 1890-01-01 07:17:28 (Wed) 60957559048, # utc_end 1932-09-01 07:17:28 (Thu) 59611161600, # local_start 1890-01-01 00:00:00 (Wed) 60957532800, # local_end 1932-09-01 00:00:00 (Thu) -26248, 0, 'EMT', ], [ 60957559048, # utc_start 1932-09-01 07:17:28 (Thu) 62099064000, # utc_end 1968-11-03 04:00:00 (Sun) 60957533848, # local_start 1932-09-01 00:17:28 (Thu) 62099038800, # local_end 1968-11-02 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62099064000, # utc_start 1968-11-03 04:00:00 (Sun) 62111761200, # utc_end 1969-03-30 03:00:00 (Sun) 62099042400, # local_start 1968-11-02 22:00:00 (Sat) 62111739600, # local_end 1969-03-29 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62111761200, # utc_start 1969-03-30 03:00:00 (Sun) 62132328000, # utc_end 1969-11-23 04:00:00 (Sun) 62111736000, # local_start 1969-03-29 20:00:00 (Sat) 62132302800, # local_end 1969-11-22 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62132328000, # utc_start 1969-11-23 04:00:00 (Sun) 62143210800, # utc_end 1970-03-29 03:00:00 (Sun) 62132306400, # local_start 1969-11-22 22:00:00 (Sat) 62143189200, # local_end 1970-03-28 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62143210800, # utc_start 1970-03-29 03:00:00 (Sun) 62160148800, # utc_end 1970-10-11 04:00:00 (Sun) 62143185600, # local_start 1970-03-28 20:00:00 (Sat) 62160123600, # local_end 1970-10-10 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62160148800, # utc_start 1970-10-11 04:00:00 (Sun) 62173450800, # utc_end 1971-03-14 03:00:00 (Sun) 62160127200, # local_start 1970-10-10 22:00:00 (Sat) 62173429200, # local_end 1971-03-13 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62173450800, # utc_start 1971-03-14 03:00:00 (Sun) 62191598400, # utc_end 1971-10-10 04:00:00 (Sun) 62173425600, # local_start 1971-03-13 20:00:00 (Sat) 62191573200, # local_end 1971-10-09 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62191598400, # utc_start 1971-10-10 04:00:00 (Sun) 62204900400, # utc_end 1972-03-12 03:00:00 (Sun) 62191576800, # local_start 1971-10-09 22:00:00 (Sat) 62204878800, # local_end 1972-03-11 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62204900400, # utc_start 1972-03-12 03:00:00 (Sun) 62223652800, # utc_end 1972-10-15 04:00:00 (Sun) 62204875200, # local_start 1972-03-11 20:00:00 (Sat) 62223627600, # local_end 1972-10-14 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62223652800, # utc_start 1972-10-15 04:00:00 (Sun) 62236350000, # utc_end 1973-03-11 03:00:00 (Sun) 62223631200, # local_start 1972-10-14 22:00:00 (Sat) 62236328400, # local_end 1973-03-10 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62236350000, # utc_start 1973-03-11 03:00:00 (Sun) 62253892800, # utc_end 1973-09-30 04:00:00 (Sun) 62236324800, # local_start 1973-03-10 20:00:00 (Sat) 62253867600, # local_end 1973-09-29 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62253892800, # utc_start 1973-09-30 04:00:00 (Sun) 62267799600, # utc_end 1974-03-10 03:00:00 (Sun) 62253871200, # local_start 1973-09-29 22:00:00 (Sat) 62267778000, # local_end 1974-03-09 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62267799600, # utc_start 1974-03-10 03:00:00 (Sun) 62286552000, # utc_end 1974-10-13 04:00:00 (Sun) 62267774400, # local_start 1974-03-09 20:00:00 (Sat) 62286526800, # local_end 1974-10-12 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62286552000, # utc_start 1974-10-13 04:00:00 (Sun) 62299249200, # utc_end 1975-03-09 03:00:00 (Sun) 62286530400, # local_start 1974-10-12 22:00:00 (Sat) 62299227600, # local_end 1975-03-08 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62299249200, # utc_start 1975-03-09 03:00:00 (Sun) 62318001600, # utc_end 1975-10-12 04:00:00 (Sun) 62299224000, # local_start 1975-03-08 20:00:00 (Sat) 62317976400, # local_end 1975-10-11 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62318001600, # utc_start 1975-10-12 04:00:00 (Sun) 62331303600, # utc_end 1976-03-14 03:00:00 (Sun) 62317980000, # local_start 1975-10-11 22:00:00 (Sat) 62331282000, # local_end 1976-03-13 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62331303600, # utc_start 1976-03-14 03:00:00 (Sun) 62349451200, # utc_end 1976-10-10 04:00:00 (Sun) 62331278400, # local_start 1976-03-13 20:00:00 (Sat) 62349426000, # local_end 1976-10-09 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62349451200, # utc_start 1976-10-10 04:00:00 (Sun) 62362753200, # utc_end 1977-03-13 03:00:00 (Sun) 62349429600, # local_start 1976-10-09 22:00:00 (Sat) 62362731600, # local_end 1977-03-12 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62362753200, # utc_start 1977-03-13 03:00:00 (Sun) 62380900800, # utc_end 1977-10-09 04:00:00 (Sun) 62362728000, # local_start 1977-03-12 20:00:00 (Sat) 62380875600, # local_end 1977-10-08 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62380900800, # utc_start 1977-10-09 04:00:00 (Sun) 62394202800, # utc_end 1978-03-12 03:00:00 (Sun) 62380879200, # local_start 1977-10-08 22:00:00 (Sat) 62394181200, # local_end 1978-03-11 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62394202800, # utc_start 1978-03-12 03:00:00 (Sun) 62412955200, # utc_end 1978-10-15 04:00:00 (Sun) 62394177600, # local_start 1978-03-11 20:00:00 (Sat) 62412930000, # local_end 1978-10-14 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62412955200, # utc_start 1978-10-15 04:00:00 (Sun) 62425652400, # utc_end 1979-03-11 03:00:00 (Sun) 62412933600, # local_start 1978-10-14 22:00:00 (Sat) 62425630800, # local_end 1979-03-10 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62425652400, # utc_start 1979-03-11 03:00:00 (Sun) 62444404800, # utc_end 1979-10-14 04:00:00 (Sun) 62425627200, # local_start 1979-03-10 20:00:00 (Sat) 62444379600, # local_end 1979-10-13 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62444404800, # utc_start 1979-10-14 04:00:00 (Sun) 62457102000, # utc_end 1980-03-09 03:00:00 (Sun) 62444383200, # local_start 1979-10-13 22:00:00 (Sat) 62457080400, # local_end 1980-03-08 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62457102000, # utc_start 1980-03-09 03:00:00 (Sun) 62475854400, # utc_end 1980-10-12 04:00:00 (Sun) 62457076800, # local_start 1980-03-08 20:00:00 (Sat) 62475829200, # local_end 1980-10-11 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62475854400, # utc_start 1980-10-12 04:00:00 (Sun) 62489156400, # utc_end 1981-03-15 03:00:00 (Sun) 62475832800, # local_start 1980-10-11 22:00:00 (Sat) 62489134800, # local_end 1981-03-14 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62489156400, # utc_start 1981-03-15 03:00:00 (Sun) 62507304000, # utc_end 1981-10-11 04:00:00 (Sun) 62489131200, # local_start 1981-03-14 20:00:00 (Sat) 62507278800, # local_end 1981-10-10 21:00:00 (Sat) -25200, 0, 'EAST', ], [ 62507304000, # utc_start 1981-10-11 04:00:00 (Sun) 62520606000, # utc_end 1982-03-14 03:00:00 (Sun) 62507282400, # local_start 1981-10-10 22:00:00 (Sat) 62520584400, # local_end 1982-03-13 21:00:00 (Sat) -21600, 1, 'EASST', ], [ 62520606000, # utc_start 1982-03-14 03:00:00 (Sun) 62538753600, # utc_end 1982-10-10 04:00:00 (Sun) 62520584400, # local_start 1982-03-13 21:00:00 (Sat) 62538732000, # local_end 1982-10-09 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62538753600, # utc_start 1982-10-10 04:00:00 (Sun) 62552055600, # utc_end 1983-03-13 03:00:00 (Sun) 62538735600, # local_start 1982-10-09 23:00:00 (Sat) 62552037600, # local_end 1983-03-12 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62552055600, # utc_start 1983-03-13 03:00:00 (Sun) 62570203200, # utc_end 1983-10-09 04:00:00 (Sun) 62552034000, # local_start 1983-03-12 21:00:00 (Sat) 62570181600, # local_end 1983-10-08 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62570203200, # utc_start 1983-10-09 04:00:00 (Sun) 62583505200, # utc_end 1984-03-11 03:00:00 (Sun) 62570185200, # local_start 1983-10-08 23:00:00 (Sat) 62583487200, # local_end 1984-03-10 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62583505200, # utc_start 1984-03-11 03:00:00 (Sun) 62602257600, # utc_end 1984-10-14 04:00:00 (Sun) 62583483600, # local_start 1984-03-10 21:00:00 (Sat) 62602236000, # local_end 1984-10-13 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62602257600, # utc_start 1984-10-14 04:00:00 (Sun) 62614954800, # utc_end 1985-03-10 03:00:00 (Sun) 62602239600, # local_start 1984-10-13 23:00:00 (Sat) 62614936800, # local_end 1985-03-09 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62614954800, # utc_start 1985-03-10 03:00:00 (Sun) 62633707200, # utc_end 1985-10-13 04:00:00 (Sun) 62614933200, # local_start 1985-03-09 21:00:00 (Sat) 62633685600, # local_end 1985-10-12 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62633707200, # utc_start 1985-10-13 04:00:00 (Sun) 62646404400, # utc_end 1986-03-09 03:00:00 (Sun) 62633689200, # local_start 1985-10-12 23:00:00 (Sat) 62646386400, # local_end 1986-03-08 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62646404400, # utc_start 1986-03-09 03:00:00 (Sun) 62665156800, # utc_end 1986-10-12 04:00:00 (Sun) 62646382800, # local_start 1986-03-08 21:00:00 (Sat) 62665135200, # local_end 1986-10-11 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62665156800, # utc_start 1986-10-12 04:00:00 (Sun) 62680878000, # utc_end 1987-04-12 03:00:00 (Sun) 62665138800, # local_start 1986-10-11 23:00:00 (Sat) 62680860000, # local_end 1987-04-11 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62680878000, # utc_start 1987-04-12 03:00:00 (Sun) 62696606400, # utc_end 1987-10-11 04:00:00 (Sun) 62680856400, # local_start 1987-04-11 21:00:00 (Sat) 62696584800, # local_end 1987-10-10 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62696606400, # utc_start 1987-10-11 04:00:00 (Sun) 62709908400, # utc_end 1988-03-13 03:00:00 (Sun) 62696588400, # local_start 1987-10-10 23:00:00 (Sat) 62709890400, # local_end 1988-03-12 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62709908400, # utc_start 1988-03-13 03:00:00 (Sun) 62728056000, # utc_end 1988-10-09 04:00:00 (Sun) 62709886800, # local_start 1988-03-12 21:00:00 (Sat) 62728034400, # local_end 1988-10-08 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62728056000, # utc_start 1988-10-09 04:00:00 (Sun) 62741358000, # utc_end 1989-03-12 03:00:00 (Sun) 62728038000, # local_start 1988-10-08 23:00:00 (Sat) 62741340000, # local_end 1989-03-11 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62741358000, # utc_start 1989-03-12 03:00:00 (Sun) 62760110400, # utc_end 1989-10-15 04:00:00 (Sun) 62741336400, # local_start 1989-03-11 21:00:00 (Sat) 62760088800, # local_end 1989-10-14 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62760110400, # utc_start 1989-10-15 04:00:00 (Sun) 62772807600, # utc_end 1990-03-11 03:00:00 (Sun) 62760092400, # local_start 1989-10-14 23:00:00 (Sat) 62772789600, # local_end 1990-03-10 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62772807600, # utc_start 1990-03-11 03:00:00 (Sun) 62789140800, # utc_end 1990-09-16 04:00:00 (Sun) 62772786000, # local_start 1990-03-10 21:00:00 (Sat) 62789119200, # local_end 1990-09-15 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62789140800, # utc_start 1990-09-16 04:00:00 (Sun) 62804257200, # utc_end 1991-03-10 03:00:00 (Sun) 62789122800, # local_start 1990-09-15 23:00:00 (Sat) 62804239200, # local_end 1991-03-09 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62804257200, # utc_start 1991-03-10 03:00:00 (Sun) 62823009600, # utc_end 1991-10-13 04:00:00 (Sun) 62804235600, # local_start 1991-03-09 21:00:00 (Sat) 62822988000, # local_end 1991-10-12 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62823009600, # utc_start 1991-10-13 04:00:00 (Sun) 62836311600, # utc_end 1992-03-15 03:00:00 (Sun) 62822991600, # local_start 1991-10-12 23:00:00 (Sat) 62836293600, # local_end 1992-03-14 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62836311600, # utc_start 1992-03-15 03:00:00 (Sun) 62854459200, # utc_end 1992-10-11 04:00:00 (Sun) 62836290000, # local_start 1992-03-14 21:00:00 (Sat) 62854437600, # local_end 1992-10-10 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62854459200, # utc_start 1992-10-11 04:00:00 (Sun) 62867761200, # utc_end 1993-03-14 03:00:00 (Sun) 62854441200, # local_start 1992-10-10 23:00:00 (Sat) 62867743200, # local_end 1993-03-13 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62867761200, # utc_start 1993-03-14 03:00:00 (Sun) 62885908800, # utc_end 1993-10-10 04:00:00 (Sun) 62867739600, # local_start 1993-03-13 21:00:00 (Sat) 62885887200, # local_end 1993-10-09 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62885908800, # utc_start 1993-10-10 04:00:00 (Sun) 62899210800, # utc_end 1994-03-13 03:00:00 (Sun) 62885890800, # local_start 1993-10-09 23:00:00 (Sat) 62899192800, # local_end 1994-03-12 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62899210800, # utc_start 1994-03-13 03:00:00 (Sun) 62917358400, # utc_end 1994-10-09 04:00:00 (Sun) 62899189200, # local_start 1994-03-12 21:00:00 (Sat) 62917336800, # local_end 1994-10-08 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62917358400, # utc_start 1994-10-09 04:00:00 (Sun) 62930660400, # utc_end 1995-03-12 03:00:00 (Sun) 62917340400, # local_start 1994-10-08 23:00:00 (Sat) 62930642400, # local_end 1995-03-11 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62930660400, # utc_start 1995-03-12 03:00:00 (Sun) 62949412800, # utc_end 1995-10-15 04:00:00 (Sun) 62930638800, # local_start 1995-03-11 21:00:00 (Sat) 62949391200, # local_end 1995-10-14 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62949412800, # utc_start 1995-10-15 04:00:00 (Sun) 62962110000, # utc_end 1996-03-10 03:00:00 (Sun) 62949394800, # local_start 1995-10-14 23:00:00 (Sat) 62962092000, # local_end 1996-03-09 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62962110000, # utc_start 1996-03-10 03:00:00 (Sun) 62980862400, # utc_end 1996-10-13 04:00:00 (Sun) 62962088400, # local_start 1996-03-09 21:00:00 (Sat) 62980840800, # local_end 1996-10-12 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 62980862400, # utc_start 1996-10-13 04:00:00 (Sun) 62995374000, # utc_end 1997-03-30 03:00:00 (Sun) 62980844400, # local_start 1996-10-12 23:00:00 (Sat) 62995356000, # local_end 1997-03-29 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 62995374000, # utc_start 1997-03-30 03:00:00 (Sun) 63012312000, # utc_end 1997-10-12 04:00:00 (Sun) 62995352400, # local_start 1997-03-29 21:00:00 (Sat) 63012290400, # local_end 1997-10-11 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63012312000, # utc_start 1997-10-12 04:00:00 (Sun) 63025614000, # utc_end 1998-03-15 03:00:00 (Sun) 63012294000, # local_start 1997-10-11 23:00:00 (Sat) 63025596000, # local_end 1998-03-14 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63025614000, # utc_start 1998-03-15 03:00:00 (Sun) 63042552000, # utc_end 1998-09-27 04:00:00 (Sun) 63025592400, # local_start 1998-03-14 21:00:00 (Sat) 63042530400, # local_end 1998-09-26 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63042552000, # utc_start 1998-09-27 04:00:00 (Sun) 63058878000, # utc_end 1999-04-04 03:00:00 (Sun) 63042534000, # local_start 1998-09-26 23:00:00 (Sat) 63058860000, # local_end 1999-04-03 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63058878000, # utc_start 1999-04-04 03:00:00 (Sun) 63075211200, # utc_end 1999-10-10 04:00:00 (Sun) 63058856400, # local_start 1999-04-03 21:00:00 (Sat) 63075189600, # local_end 1999-10-09 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63075211200, # utc_start 1999-10-10 04:00:00 (Sun) 63088513200, # utc_end 2000-03-12 03:00:00 (Sun) 63075193200, # local_start 1999-10-09 23:00:00 (Sat) 63088495200, # local_end 2000-03-11 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63088513200, # utc_start 2000-03-12 03:00:00 (Sun) 63107265600, # utc_end 2000-10-15 04:00:00 (Sun) 63088491600, # local_start 2000-03-11 21:00:00 (Sat) 63107244000, # local_end 2000-10-14 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63107265600, # utc_start 2000-10-15 04:00:00 (Sun) 63119962800, # utc_end 2001-03-11 03:00:00 (Sun) 63107247600, # local_start 2000-10-14 23:00:00 (Sat) 63119944800, # local_end 2001-03-10 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63119962800, # utc_start 2001-03-11 03:00:00 (Sun) 63138715200, # utc_end 2001-10-14 04:00:00 (Sun) 63119941200, # local_start 2001-03-10 21:00:00 (Sat) 63138693600, # local_end 2001-10-13 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63138715200, # utc_start 2001-10-14 04:00:00 (Sun) 63151412400, # utc_end 2002-03-10 03:00:00 (Sun) 63138697200, # local_start 2001-10-13 23:00:00 (Sat) 63151394400, # local_end 2002-03-09 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63151412400, # utc_start 2002-03-10 03:00:00 (Sun) 63170164800, # utc_end 2002-10-13 04:00:00 (Sun) 63151390800, # local_start 2002-03-09 21:00:00 (Sat) 63170143200, # local_end 2002-10-12 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63170164800, # utc_start 2002-10-13 04:00:00 (Sun) 63182862000, # utc_end 2003-03-09 03:00:00 (Sun) 63170146800, # local_start 2002-10-12 23:00:00 (Sat) 63182844000, # local_end 2003-03-08 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63182862000, # utc_start 2003-03-09 03:00:00 (Sun) 63201614400, # utc_end 2003-10-12 04:00:00 (Sun) 63182840400, # local_start 2003-03-08 21:00:00 (Sat) 63201592800, # local_end 2003-10-11 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63201614400, # utc_start 2003-10-12 04:00:00 (Sun) 63214916400, # utc_end 2004-03-14 03:00:00 (Sun) 63201596400, # local_start 2003-10-11 23:00:00 (Sat) 63214898400, # local_end 2004-03-13 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63214916400, # utc_start 2004-03-14 03:00:00 (Sun) 63233064000, # utc_end 2004-10-10 04:00:00 (Sun) 63214894800, # local_start 2004-03-13 21:00:00 (Sat) 63233042400, # local_end 2004-10-09 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63233064000, # utc_start 2004-10-10 04:00:00 (Sun) 63246366000, # utc_end 2005-03-13 03:00:00 (Sun) 63233046000, # local_start 2004-10-09 23:00:00 (Sat) 63246348000, # local_end 2005-03-12 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63246366000, # utc_start 2005-03-13 03:00:00 (Sun) 63264513600, # utc_end 2005-10-09 04:00:00 (Sun) 63246344400, # local_start 2005-03-12 21:00:00 (Sat) 63264492000, # local_end 2005-10-08 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63264513600, # utc_start 2005-10-09 04:00:00 (Sun) 63277815600, # utc_end 2006-03-12 03:00:00 (Sun) 63264495600, # local_start 2005-10-08 23:00:00 (Sat) 63277797600, # local_end 2006-03-11 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63277815600, # utc_start 2006-03-12 03:00:00 (Sun) 63296568000, # utc_end 2006-10-15 04:00:00 (Sun) 63277794000, # local_start 2006-03-11 21:00:00 (Sat) 63296546400, # local_end 2006-10-14 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63296568000, # utc_start 2006-10-15 04:00:00 (Sun) 63309265200, # utc_end 2007-03-11 03:00:00 (Sun) 63296550000, # local_start 2006-10-14 23:00:00 (Sat) 63309247200, # local_end 2007-03-10 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63309265200, # utc_start 2007-03-11 03:00:00 (Sun) 63328017600, # utc_end 2007-10-14 04:00:00 (Sun) 63309243600, # local_start 2007-03-10 21:00:00 (Sat) 63327996000, # local_end 2007-10-13 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63328017600, # utc_start 2007-10-14 04:00:00 (Sun) 63342529200, # utc_end 2008-03-30 03:00:00 (Sun) 63327999600, # local_start 2007-10-13 23:00:00 (Sat) 63342511200, # local_end 2008-03-29 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63342529200, # utc_start 2008-03-30 03:00:00 (Sun) 63359467200, # utc_end 2008-10-12 04:00:00 (Sun) 63342507600, # local_start 2008-03-29 21:00:00 (Sat) 63359445600, # local_end 2008-10-11 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63359467200, # utc_start 2008-10-12 04:00:00 (Sun) 63372769200, # utc_end 2009-03-15 03:00:00 (Sun) 63359449200, # local_start 2008-10-11 23:00:00 (Sat) 63372751200, # local_end 2009-03-14 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63372769200, # utc_start 2009-03-15 03:00:00 (Sun) 63390916800, # utc_end 2009-10-11 04:00:00 (Sun) 63372747600, # local_start 2009-03-14 21:00:00 (Sat) 63390895200, # local_end 2009-10-10 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63390916800, # utc_start 2009-10-11 04:00:00 (Sun) 63406033200, # utc_end 2010-04-04 03:00:00 (Sun) 63390898800, # local_start 2009-10-10 23:00:00 (Sat) 63406015200, # local_end 2010-04-03 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63406033200, # utc_start 2010-04-04 03:00:00 (Sun) 63422366400, # utc_end 2010-10-10 04:00:00 (Sun) 63406011600, # local_start 2010-04-03 21:00:00 (Sat) 63422344800, # local_end 2010-10-09 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63422366400, # utc_start 2010-10-10 04:00:00 (Sun) 63440506800, # utc_end 2011-05-08 03:00:00 (Sun) 63422348400, # local_start 2010-10-09 23:00:00 (Sat) 63440488800, # local_end 2011-05-07 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63440506800, # utc_start 2011-05-08 03:00:00 (Sun) 63449582400, # utc_end 2011-08-21 04:00:00 (Sun) 63440485200, # local_start 2011-05-07 21:00:00 (Sat) 63449560800, # local_end 2011-08-20 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63449582400, # utc_start 2011-08-21 04:00:00 (Sun) 63471351600, # utc_end 2012-04-29 03:00:00 (Sun) 63449564400, # local_start 2011-08-20 23:00:00 (Sat) 63471333600, # local_end 2012-04-28 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63471351600, # utc_start 2012-04-29 03:00:00 (Sun) 63482241600, # utc_end 2012-09-02 04:00:00 (Sun) 63471330000, # local_start 2012-04-28 21:00:00 (Sat) 63482220000, # local_end 2012-09-01 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63482241600, # utc_start 2012-09-02 04:00:00 (Sun) 63502801200, # utc_end 2013-04-28 03:00:00 (Sun) 63482223600, # local_start 2012-09-01 23:00:00 (Sat) 63502783200, # local_end 2013-04-27 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63502801200, # utc_start 2013-04-28 03:00:00 (Sun) 63514296000, # utc_end 2013-09-08 04:00:00 (Sun) 63502779600, # local_start 2013-04-27 21:00:00 (Sat) 63514274400, # local_end 2013-09-07 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63514296000, # utc_start 2013-09-08 04:00:00 (Sun) 63534250800, # utc_end 2014-04-27 03:00:00 (Sun) 63514278000, # local_start 2013-09-07 23:00:00 (Sat) 63534232800, # local_end 2014-04-26 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63534250800, # utc_start 2014-04-27 03:00:00 (Sun) 63545745600, # utc_end 2014-09-07 04:00:00 (Sun) 63534229200, # local_start 2014-04-26 21:00:00 (Sat) 63545724000, # local_end 2014-09-06 22:00:00 (Sat) -21600, 0, 'EAST', ], [ 63545745600, # utc_start 2014-09-07 04:00:00 (Sun) 63565700400, # utc_end 2015-04-26 03:00:00 (Sun) 63545727600, # local_start 2014-09-06 23:00:00 (Sat) 63565682400, # local_end 2015-04-25 22:00:00 (Sat) -18000, 1, 'EASST', ], [ 63565700400, # utc_start 2015-04-26 03:00:00 (Sun) DateTime::TimeZone::INFINITY, # utc_end 63565682400, # local_start 2015-04-25 22:00:00 (Sat) DateTime::TimeZone::INFINITY, # local_end -18000, 0, 'EAST', ], ]; sub olson_version {'2016a'} sub has_dst_changes {47} sub _max_year {2026} sub _new_instance { return shift->_init( @_, spans => $spans ); } 1;
jkb78/extrajnm
local/lib/perl5/DateTime/TimeZone/Pacific/Easter.pm
Perl
mit
24,840
% Compile the main Regulus code :- compile('$REGULUS/Prolog/load'). % Compile generator :- regulus_batch('$REGULUS/Examples/Toy1/scripts/interlingua.cfg', ["LOAD_GENERATION"]). :- halt.
TeamSPoon/logicmoo_workspace
packs_sys/logicmoo_nlu/ext/regulus/Examples/Toy1/scripts/build_interlingua_generator.pl
Perl
mit
191
# Copyright 2020, Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package Google::Ads::GoogleAds::V9::Services::BiddingStrategyService::BiddingStrategyOperation; use strict; use warnings; use base qw(Google::Ads::GoogleAds::BaseEntity); use Google::Ads::GoogleAds::Utils::GoogleAdsHelper; sub new { my ($class, $args) = @_; my $self = { create => $args->{create}, remove => $args->{remove}, update => $args->{update}, updateMask => $args->{updateMask}}; # Delete the unassigned fields in this object for a more concise JSON payload remove_unassigned_fields($self, $args); bless $self, $class; return $self; } 1;
googleads/google-ads-perl
lib/Google/Ads/GoogleAds/V9/Services/BiddingStrategyService/BiddingStrategyOperation.pm
Perl
apache-2.0
1,168
package Paws::DirectConnect::BGPPeer; use Moose; has AddressFamily => (is => 'ro', isa => 'Str', request_name => 'addressFamily', traits => ['NameInRequest']); has AmazonAddress => (is => 'ro', isa => 'Str', request_name => 'amazonAddress', traits => ['NameInRequest']); has Asn => (is => 'ro', isa => 'Int', request_name => 'asn', traits => ['NameInRequest']); has AuthKey => (is => 'ro', isa => 'Str', request_name => 'authKey', traits => ['NameInRequest']); has BgpPeerState => (is => 'ro', isa => 'Str', request_name => 'bgpPeerState', traits => ['NameInRequest']); has BgpStatus => (is => 'ro', isa => 'Str', request_name => 'bgpStatus', traits => ['NameInRequest']); has CustomerAddress => (is => 'ro', isa => 'Str', request_name => 'customerAddress', traits => ['NameInRequest']); 1; ### main pod documentation begin ### =head1 NAME Paws::DirectConnect::BGPPeer =head1 USAGE This class represents one of two things: =head3 Arguments in a call to a service Use the attributes of this class as arguments to methods. You shouldn't make instances of this class. Each attribute should be used as a named argument in the calls that expect this type of object. As an example, if Att1 is expected to be a Paws::DirectConnect::BGPPeer object: $service_obj->Method(Att1 => { AddressFamily => $value, ..., CustomerAddress => $value }); =head3 Results returned from an API call Use accessors for each attribute. If Att1 is expected to be an Paws::DirectConnect::BGPPeer object: $result = $service_obj->Method(...); $result->Att1->AddressFamily =head1 DESCRIPTION A structure containing information about a BGP peer. =head1 ATTRIBUTES =head2 AddressFamily => Str =head2 AmazonAddress => Str =head2 Asn => Int =head2 AuthKey => Str =head2 BgpPeerState => Str =head2 BgpStatus => Str =head2 CustomerAddress => Str =head1 SEE ALSO This class forms part of L<Paws>, describing an object used in L<Paws::DirectConnect> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/DirectConnect/BGPPeer.pm
Perl
apache-2.0
2,170
#!/usr/bin/perl -- # generate site specific passwords from one (or more) master password(s) # Steffen Zahn steffzahn@gmail.com use strict; use warnings; use Digest::SHA qw(sha256); # use Term::ReadPassword; # non-standard, apparently my $DEBUG=0; my $printable="abcdefghijkmnopqrstuvwxyz23456789!:?/&()-"; # size 41 (prime) my $printable_len=length($printable); my $site=$ARGV[0]; my $length=$ARGV[1]; my $pw=$ARGV[2]; my %site2defaultPWLength = ( "google" => 16, "facebook" => 16, "instagram" => 16, "pinterest" => 16, "flickr" => 16, "yahoo" => 16, "vimeo" => 16, "tumblr" => 16, "cloud9" => 16, "github" => 16, "dropbox" => 16, "twitter" => 16, "wikipedia" => 16, "apple" => 16, "slashdot" => 16, "skype" => 16, "amazon" => 16, "flipboard" => 16, "wlan" => 16, "bank" => 16, "home" => 16, "company" => 16 ); sub prompt { my $pr= @_ ? shift @_ : "Enter value:"; my $line; while( !defined($line) || ($line eq "") ) { print $pr; $line = <STDIN>; die "Read undefined value" if( !defined($line) ); chomp $line; print "Need non-empty input\n" if( $line eq "" ); } return $line; } if( !defined($site) ) { $site=prompt("Site:"); } die "First parameter needs to be a non-empty string" if( !defined($site) || ($site eq "") ); print "Warning site $site might be misspelled\n" if( !defined( $site2defaultPWLength{$site} ) ); if( !defined($length) ) { if( defined( $site2defaultPWLength{$site} ) ) { $length = $site2defaultPWLength{$site}; print "Using default password length of $length defined for site \"$site\"\n"; } else { $length=prompt("Length of generated password:"); } } if( !defined($pw) ) { system('stty', '-echo'); # Disable echoing $pw = prompt("Master password:"); system('stty', 'echo'); # Turn it back on print "\n"; } die "Need 3 parameter" if( !defined($site) || !defined($length) || !defined($pw) ); die "Second parameter is not a number" if( !defined($length) || ($length !~ /^\d+$/) ); die "Third parameter needs to be a non-empty string" if( !defined($pw) || ($pw eq "") ); sub blob2u32 { my $str= @_ ? shift @_ : undef; return unpack( "N",substr($str,0,4)); } my $a = sha256( $site . $pw ); print "DEBUG a $a\n" if($DEBUG); while($length>0) { my $b = sha256( $a . $pw ); print "DEBUG b $b\n" if($DEBUG); print substr($printable,blob2u32($b) % $printable_len,1); $a = $b; $length--; } print "\n"; exit 0;
steffzahn/test
pwgen256.pl
Perl
apache-2.0
2,680
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # require 5.6.0; use strict; use warnings; use Thrift; use Thrift::Transport; package Thrift::BufferedTransport; { $Thrift::BufferedTransport::VERSION = '0.9.0'; } use base('Thrift::Transport'); sub new { my $classname = shift; my $transport = shift; my $rBufSize = shift || 512; my $wBufSize = shift || 512; my $self = { transport => $transport, rBufSize => $rBufSize, wBufSize => $wBufSize, wBuf => '', rBuf => '', }; return bless($self,$classname); } sub isOpen { my $self = shift; return $self->{transport}->isOpen(); } sub open { my $self = shift; $self->{transport}->open(); } sub close() { my $self = shift; $self->{transport}->close(); } sub readAll { my $self = shift; my $len = shift; return $self->{transport}->readAll($len); } sub read { my $self = shift; my $len = shift; my $ret; # Methinks Perl is already buffering these for us return $self->{transport}->read($len); } sub write { my $self = shift; my $buf = shift; $self->{wBuf} .= $buf; if (length($self->{wBuf}) >= $self->{wBufSize}) { $self->{transport}->write($self->{wBuf}); $self->{wBuf} = ''; } } sub flush { my $self = shift; if (length($self->{wBuf}) > 0) { $self->{transport}->write($self->{wBuf}); $self->{wBuf} = ''; } $self->{transport}->flush(); } # # BufferedTransport factory creates buffered transport objects from transports # package Thrift::BufferedTransportFactory; { $Thrift::BufferedTransportFactory::VERSION = '0.9.0'; } sub new { my $classname = shift; my $self = {}; return bless($self,$classname); } # # Build a buffered transport from the base transport # # @return Thrift::BufferedTransport transport # sub getTransport { my $self = shift; my $trans = shift; my $buffered = Thrift::BufferedTransport->new($trans); return $buffered; } 1;
gitpan/Thrift
lib/Thrift/BufferedTransport.pm
Perl
apache-2.0
2,789
=head1 LICENSE Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute Copyright [2016-2021] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut package XrefParser::IxodesCAPParser; use strict; use warnings; use Carp; use File::Basename; use base qw( XrefParser::BaseParser ); # Ixodes: #>ISCW800184-PA|GR35|Gustatory receptor|hughrobe|scaffold:IscaW1:DS849364:15863:44694:1 #MTFAYSQFRYSTRLLRWGGVWIVAEATNPGKQSFKTTLKRPYFWYCVLCLSTLVGTEFGN #IIWALLFSFKHRKVFVSGVYTATQITVLVKTMLSSLMVALAAGRLKKLVARANQFEIIRN #IKIAPRSKKVTWRDIRIWGRVLFMVLFVSIRNMDNLSILDVENIFGLGALVVVMTASSML sub run { my ($self, $ref_arg) = @_; my $source_id = $ref_arg->{source_id}; my $species_id = $ref_arg->{species_id}; my $files = $ref_arg->{files}; my $release_file = $ref_arg->{rel_file}; my $verbose = $ref_arg->{verbose}; if((!defined $source_id) or (!defined $species_id) or (!defined $files) or (!defined $release_file)){ croak "Need to pass source_id, species_id, files and rel_file as pairs"; } $verbose |=0; my $file = @{$files}[0]; next if (/^File:/); # skip header my @xrefs; local $/ = "\n>"; my $file_io = $self->get_filehandle($file); if ( !defined $file_io ) { print STDERR "Could not open $file\n"; return 1; } while ( $_ = $file_io->getline() ) { my $xref; my ($header, $sequence) = $_ =~ /^>?(.+?)\n([^>]*)/s or warn("Can't parse FASTA entry: $_\n"); # deconstruct header - just use first part my ($accession, $symbol, $description, $submitter, $position) = split /\|/, $header; if ($symbol eq "") { $symbol = "$accession" ; } # make sequence into one long string $sequence =~ s/\n//g; # build the xref object and store it $xref->{ACCESSION} = $accession; $xref->{LABEL} = $symbol; $xref->{DESCRIPTION} = $description; $xref->{SEQUENCE} = $sequence; $xref->{SOURCE_ID} = $source_id; $xref->{SPECIES_ID} = $species_id; $xref->{SEQUENCE_TYPE} = 'peptide'; $xref->{STATUS} = 'manual annotation'; push @xrefs, $xref; } $file_io->close(); $self->upload_xref_object_graphs(\@xrefs); print scalar(@xrefs) . " Ixodes CAP xrefs succesfully parsed\n" if($verbose); return 0; } 1;
Ensembl/ensembl
misc-scripts/xref_mapping/XrefParser/IxodesCAPParser.pm
Perl
apache-2.0
2,818
package Paws::EC2::DeleteVpnConnectionRoute; use Moose; has DestinationCidrBlock => (is => 'ro', isa => 'Str', required => 1); has VpnConnectionId => (is => 'ro', isa => 'Str', required => 1); use MooseX::ClassAttribute; class_has _api_call => (isa => 'Str', is => 'ro', default => 'DeleteVpnConnectionRoute'); class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::API::Response'); class_has _result_key => (isa => 'Str', is => 'ro'); 1; ### main pod documentation begin ### =head1 NAME Paws::EC2::DeleteVpnConnectionRoute - Arguments for method DeleteVpnConnectionRoute on Paws::EC2 =head1 DESCRIPTION This class represents the parameters used for calling the method DeleteVpnConnectionRoute on the Amazon Elastic Compute Cloud service. Use the attributes of this class as arguments to method DeleteVpnConnectionRoute. You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DeleteVpnConnectionRoute. As an example: $service_obj->DeleteVpnConnectionRoute(Att1 => $value1, Att2 => $value2, ...); Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object. =head1 ATTRIBUTES =head2 B<REQUIRED> DestinationCidrBlock => Str The CIDR block associated with the local subnet of the customer network. =head2 B<REQUIRED> VpnConnectionId => Str The ID of the VPN connection. =head1 SEE ALSO This class forms part of L<Paws>, documenting arguments for method DeleteVpnConnectionRoute in L<Paws::EC2> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/EC2/DeleteVpnConnectionRoute.pm
Perl
apache-2.0
1,865
#! /usr/bin/env perl use strict; use warnings; use JSON; use Digest::MD5 qw(md5_hex); use Parallel::Forker; use Fcntl qw(:flock); my $lockfile = "xml-check.lock"; sub run { my ($f) = @_; warn ((localtime time)."\nChecking $f [$$]\n"); my $x = system("xmllint --stream $f"); open(LOCK,'>',$lockfile); flock(LOCK,LOCK_EX); die "QUIT ON SIGNAL\n" if $x and $x<128; if($x) { warn "ERROR: BAD XML IN $f\n"; } flock(LOCK,LOCK_UN); close LOCK; open(OUTPUT,'>',"xml-check-".md5_hex($f).".log") or die "Cannot write"; print OUTPUT JSON->new->encode({ size => -s $f, status => $x }); close OUTPUT; } my ($naughty,$nice,$size) = (0,0,0); my $start = time; sub collect { my ($f) = @_; my $fn = "xml-check-".md5_hex($f).".log"; my $raw = ""; open(INPUT,'<',$fn) or die "Cannot read $fn"; while(<INPUT>) { $raw.=$_; } close INPUT; unlink $fn; my $data = JSON->new->decode($raw); if($data->{'status'}) { $naughty++; } else { $nice++; } $size += $data->{'size'}; open(LOCK,'>',$lockfile); flock(LOCK,LOCK_EX); my $elapsed = time-$start+1; warn sprintf("failed=%d passed=%d processed=%dMb rate=%dMb/s (%ds/Gb)\n", $naughty,$nice,$size/1024/1024,$size/$elapsed/1024/1024, 1024*1024*1024*$elapsed/$size); flock(LOCK,LOCK_UN); close LOCK; } my $forker = Parallel::Forker->new( use_sig_chld => 1, max_proc => 8 ); $SIG{CHLD} = sub { Parallel::Forker::sig_child($forker); }; $SIG{TERM} = sub { $forker->kill_tree_all('TERM') if $forker && $forker->in_parent; die "Quitting...\n"; }; open(FILES,"find input -name \*.xml |") or die "find failed"; while(my $f = <FILES>) { chomp $f; $forker->schedule( run_on_start => sub { run($f); }, run_on_finish => sub { collect($f); } )->ready(); } $forker->wait_all(); 1;
Ensembl/ensembl-webcode
utils/indexing/xmlChecker.pl
Perl
apache-2.0
1,813
package Paws::ElastiCache::EC2SecurityGroup; use Moose; has EC2SecurityGroupName => (is => 'ro', isa => 'Str'); has EC2SecurityGroupOwnerId => (is => 'ro', isa => 'Str'); has Status => (is => 'ro', isa => 'Str'); 1; ### main pod documentation begin ### =head1 NAME Paws::ElastiCache::EC2SecurityGroup =head1 USAGE This class represents one of two things: =head3 Arguments in a call to a service Use the attributes of this class as arguments to methods. You shouldn't make instances of this class. Each attribute should be used as a named argument in the calls that expect this type of object. As an example, if Att1 is expected to be a Paws::ElastiCache::EC2SecurityGroup object: $service_obj->Method(Att1 => { EC2SecurityGroupName => $value, ..., Status => $value }); =head3 Results returned from an API call Use accessors for each attribute. If Att1 is expected to be an Paws::ElastiCache::EC2SecurityGroup object: $result = $service_obj->Method(...); $result->Att1->EC2SecurityGroupName =head1 DESCRIPTION Provides ownership and status information for an Amazon EC2 security group. =head1 ATTRIBUTES =head2 EC2SecurityGroupName => Str The name of the Amazon EC2 security group. =head2 EC2SecurityGroupOwnerId => Str The AWS account ID of the Amazon EC2 security group owner. =head2 Status => Str The status of the Amazon EC2 security group. =head1 SEE ALSO This class forms part of L<Paws>, describing an object used in L<Paws::ElastiCache> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/ElastiCache/EC2SecurityGroup.pm
Perl
apache-2.0
1,671
# # Copyright 2022 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package hardware::server::sun::mgmt_cards::components::showenvironment::temperature; use strict; use warnings; sub check { my ($self) = @_; $self->{output}->output_add(long_msg => "Checking temperatures"); $self->{components}->{temperature} = {name => 'temperatures', total => 0, skip => 0}; return if ($self->check_filter(section => 'temperature')); if ($self->{stdout} =~ /^System Temperatures.*?\n.*?\n.*?\n.*?\n(.*?)\n\n/ims && defined($1)) { #Sensor Status Temp LowHard LowSoft LowWarn HighWarn HighSoft HighHard #-------------------------------------------------------------------------------- #MB.P0.T_CORE OK 62 -- -- -- 88 93 100 foreach (split(/\n/, $1)) { next if (! /^([^\s]+)\s+([^\s].*?)\s{2}/); my $sensor_status = defined($2) ? $2 : 'unknown'; my $sensor_name = defined($1) ? $1 : 'unknown'; next if ($self->check_filter(section => 'temperature', instance => $sensor_name)); $self->{components}->{temperature}->{total}++; $self->{output}->output_add(long_msg => "System Temperature Sensor '" . $sensor_name . "' is " . $sensor_status); my $exit = $self->get_severity(section => 'temperature', value => $sensor_status); if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) { $self->{output}->output_add(severity => $exit, short_msg => "System Temperature Sensor '" . $sensor_name . "' is " . $sensor_status); } } } } 1;
centreon/centreon-plugins
hardware/server/sun/mgmt_cards/components/showenvironment/temperature.pm
Perl
apache-2.0
2,443
# Copyright 2020, Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package Google::Ads::GoogleAds::V10::Services::CampaignDraftService::CampaignDraftOperation; use strict; use warnings; use base qw(Google::Ads::GoogleAds::BaseEntity); use Google::Ads::GoogleAds::Utils::GoogleAdsHelper; sub new { my ($class, $args) = @_; my $self = { create => $args->{create}, remove => $args->{remove}, update => $args->{update}, updateMask => $args->{updateMask}}; # Delete the unassigned fields in this object for a more concise JSON payload remove_unassigned_fields($self, $args); bless $self, $class; return $self; } 1;
googleads/google-ads-perl
lib/Google/Ads/GoogleAds/V10/Services/CampaignDraftService/CampaignDraftOperation.pm
Perl
apache-2.0
1,165
=head1 LICENSE Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute Copyright [2016-2021] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut =head1 CONTACT Please email comments or questions to the public Ensembl developers list at <http://lists.ensembl.org/mailman/listinfo/dev>. Questions may also be sent to the Ensembl help desk at <http://www.ensembl.org/Help/Contact>. =cut =head1 NAME Bio::EnsEMBL::DBSQL::StrainSliceAdaptor - adaptor/factory for MappedSlices representing alternative assemblies =head1 SYNOPSIS my $slice = $slice_adaptor->fetch_by_region( 'chromosome', 14, 900000, 950000 ); my $msc = Bio::EnsEMBL::MappedSliceContainer->new(-SLICE => $slice); # create a new strain slice adaptor and attach it to the MSC my $ssa = Bio::EnsEMBL::DBSQL::StrainSliceAdaptor->new($sa->db); $msc->set_StrainSliceAdaptor($ssa); # now attach strain $msc->attach_StrainSlice('Watson'); =head1 DESCRIPTION NOTE: this code is under development and not fully functional nor tested yet. Use only for development. This adaptor is a factory for creating MappedSlices representing strains and attaching them to a MappedSliceContainer. A mapper will be created to map between the reference slice and the common container slice coordinate system. =head1 METHODS new fetch_by_name =head1 REALTED MODULES Bio::EnsEMBL::MappedSlice Bio::EnsEMBL::MappedSliceContainer Bio::EnsEMBL::AlignStrainSlice Bio::EnsEMBL::StrainSlice =cut package Bio::EnsEMBL::Variation::DBSQL::StrainSliceAdaptor; use strict; use warnings; no warnings 'uninitialized'; use Bio::EnsEMBL::Variation::DBSQL::BaseAdaptor; use Bio::EnsEMBL::Variation::StrainSlice; use Bio::EnsEMBL::Utils::Argument qw(rearrange); use Bio::EnsEMBL::Utils::Exception qw(throw warning); use Bio::EnsEMBL::MappedSlice; use Bio::EnsEMBL::Mapper; our @ISA = qw(Bio::EnsEMBL::Variation::DBSQL::BaseAdaptor); =head2 new Example : my $strain_slice_adaptor = Bio::EnsEMBL::Variation::DBSQL::StrainSliceAdaptor->new; Description : Constructor. Return type : Bio::EnsEMBL::Variation::DBSQL::StrainSliceAdaptor Exceptions : none Caller : general Status : At Risk : under development =cut sub new { my $caller = shift; my $class = ref($caller) || $caller; my $self = $class->SUPER::new(@_); return $self; } =head2 fetch_by_name Arg[1] : Bio::EnsEMBL::MappedSliceContainer $container - the container to attach MappedSlices to Arg[2] : String $name - the name of the strain to fetch Example : my ($mapped_slice) = @{ $msc->fetch_by_name('Watson') }; Description: Creates a MappedSlice representing a version of the container's reference slice with variant alleles from the named strain Return type: listref of Bio::EnsEMBL::MappedSlice Exceptions : thrown on wrong or missing arguments Caller : general, Bio::EnsEMBL::MappedSliceContainer Status : At Risk : under development =cut sub fetch_by_name { my $self = shift; my $container = shift; my $name = shift; # argueent check unless ($container and ref($container) and $container->isa('Bio::EnsEMBL::MappedSliceContainer')) { throw("Need a MappedSliceContainer."); } unless ($name) { throw("Need a strain name."); } my $slice = $container->ref_slice; my $af_adaptor = $self->db->get_AlleleFeatureAdaptor; my $sample_adaptor = $self->db->get_SampleAdaptor; # fetch sample object for this strain name my $sample = shift @{$sample_adaptor->fetch_all_by_name($name)}; # check we got a result unless(defined $sample) { warn("Strain $name not found in the database"); return ''; } ## MAP STRAIN SLICE TO REF SLICE ################################ # create a mapper my $mapper = Bio::EnsEMBL::Mapper->new('mapped_slice', 'ref_slice'); # create a mapped_slice object my $mapped_slice = Bio::EnsEMBL::MappedSlice->new( -ADAPTOR => $self, -CONTAINER => $container, -NAME => $slice->name . "\#strain_$name", ); # get the strain slice my $strain_slice = $self->get_by_strain_Slice($sample->name, $slice); # get all allele features for this slice and sample #my @afs = sort {$a->start() <=> $b->start()} @{$af_adaptor->fetch_all_by_Slice($slice, $sample)}; # get allele features with coverage info my $afs = $strain_slice->get_all_AlleleFeatures_Slice(1); # check we got some data #warning("No strain genotype data available for slice ".$slice->name." and strain ".$sample->name) if ! defined $afs[0]; my $start_slice = $slice->start; my $start_strain = 1; my $sr_name = $slice->seq_region_name; #my $sr_name = 'ref_slice'; my ($end_slice, $end_strain, $allele_length); my $indel_flag = 0; my $total_length_diff = 0; # check for AFs if (defined($afs) && scalar @$afs) { # go through each AF foreach my $af(@$afs) { if ($af->length_diff != 0) { $indel_flag = 1; $total_length_diff += $af->length_diff if ($af->length_diff > 0); # get the allele length $allele_length = $af->length + $af->length_diff(); $end_slice = $slice->start + $af->start() - 2; if ($end_slice >= $start_slice){ $end_strain = $end_slice - $start_slice + $start_strain; #add the sequence that maps $mapper->add_map_coordinates( 'mapped_slice', $start_strain, $end_strain, 1, $sr_name, $start_slice, $end_slice); $mapper->add_indel_coordinates( 'mapped_slice', $end_strain + 1, $end_strain + $allele_length, 1, $sr_name, $end_slice + 1, $end_slice + $af->length); $start_strain = $end_strain + $allele_length + 1; $start_slice = $end_slice + $af->length + 1; } else { #add the indel $mapper->add_indel_coordinates( 'mapped_slice', $end_strain + 1, $end_strain + $allele_length, 1, $sr_name, $end_slice + 1, $end_slice + $af->length); $start_strain += $allele_length; $start_slice = $end_slice + $af->length + 1; } } } } # add the remaining coordinates (or the whole length if no indels found) $mapper->add_map_coordinates( 'mapped_slice', $start_strain, $start_strain + ($slice->end - $start_slice), 1, $sr_name, $start_slice, $slice->end); # add the slice/mapper pair $mapped_slice->add_Slice_Mapper_pair($strain_slice, $mapper); ## MAP REF_SLICE TO CONTAINER SLICE ################################### if($total_length_diff > 0) { # create a new mapper my $new_mapper = Bio::EnsEMBL::Mapper->new('ref_slice', 'container'); # get existing pairs my @existing_pairs = $container->mapper->list_pairs('container', 1, $container->container_slice->length, 'container'); my @new_pairs = $mapper->list_pairs('mapped_slice', 1, $strain_slice->length(), 'mapped_slice'); # we need a list of indels (specifically inserts) my @indels; # go through existing first foreach my $pair(@existing_pairs) { if($pair->from->end - $pair->from->start != $pair->to->end - $pair->to->start) { my $indel; $indel->{'length_diff'} = ($pair->to->end - $pair->to->start) - ($pair->from->end - $pair->from->start); # we're only interested in inserts here, not deletions next unless $indel->{'length_diff'} > 0; $indel->{'ref_start'} = $pair->from->start; $indel->{'ref_end'} = $pair->from->end; $indel->{'length'} = $pair->from->end - $pair->from->start + 1; push @indels, $indel; } } # now new ones foreach my $pair (@new_pairs) { if ($pair->from->end - $pair->from->start != $pair->to->end - $pair->to->start) { my $indel; $indel->{'length_diff'} = (($pair->from->end - $pair->from->start) - ($pair->to->end - $pair->to->start)); # we're only interested in inserts here, not deletions next unless $indel->{'length_diff'} > 0; $indel->{'ref_start'} = $pair->to->start; $indel->{'ref_end'} = $pair->to->end; $indel->{'length'} = $pair->to->end - $pair->to->start + 1; push @indels, $indel; } } # sort them @indels = sort { $a->{'ref_start'} <=> $b->{'ref_start'} || # by position $b->{'length_diff'} <=> $a->{'length_diff'} # then by length diff so we only keep the longest } @indels; # clean them my @new_indels = (); my $p = $indels[0]; push @new_indels, $indels[0] if scalar @indels; for my $i(1..$#indels) { my $c = $indels[$i]; if ($c->{'ref_start'} != $p->{'ref_start'} && $c->{'ref_end'} != $p->{'ref_end'}) { push @new_indels, $c; $p = $c; } } $start_slice = $slice->start; $start_strain = 1; $sr_name = $slice->seq_region_name; foreach my $indel(@new_indels) { $end_slice = $indel->{'ref_start'} - 1; $end_strain = $start_strain + ($end_slice - $start_slice); $allele_length = $indel->{'length'} + $indel->{'length_diff'}; $new_mapper->add_map_coordinates( $sr_name, $start_slice, $end_slice, 1, 'container', $start_strain, $end_strain); $new_mapper->add_indel_coordinates( $sr_name, $end_slice + 1, $end_slice + $indel->{'length'}, 1, 'container', $end_strain + 1, $end_strain + $allele_length); $start_strain = $end_strain + $allele_length + 1; $start_slice = $end_slice + $indel->{'length'} + 1; } $new_mapper->add_map_coordinates( $sr_name, $start_slice, $slice->end, 1, 'container', $start_strain, $start_strain + ($slice->end - $start_slice)); # replace the mapper with the new mapper $container->mapper($new_mapper); # change the container slice's length according to length diff $total_length_diff = abs $total_length_diff; $container->container_slice($container->container_slice->expand(undef, $total_length_diff, 1)); } return [$mapped_slice]; } sub get_by_strain_Slice { my $self = shift; my $strain_name = shift; my $slice = shift; return Bio::EnsEMBL::Variation::StrainSlice->new( -START => $slice->{'start'}, -END => $slice->{'end'}, -STRAND => $slice->{'strand'}, -ADAPTOR => $slice->adaptor(), -SEQ => $slice->{'seq'}, -SEQ_REGION_NAME => $slice->{'seq_region_name'}, -SEQ_REGION_LENGTH => $slice->{'seq_region_length'}, -COORD_SYSTEM => $slice->{'coord_system'}, -STRAIN_NAME => $strain_name); } 1;
Ensembl/ensembl-variation
modules/Bio/EnsEMBL/Variation/DBSQL/StrainSliceAdaptor.pm
Perl
apache-2.0
11,463
# OpenXPKI::Server::Workflow::Activity::Tools::PublishCRL # Written by Oliver Welter for the OpenXPKI project 2012 # Copyright (c) 2012 by The OpenXPKI Project package OpenXPKI::Server::Workflow::Activity::Tools::PublishCRL; use strict; use English; use base qw( OpenXPKI::Server::Workflow::Activity ); use OpenXPKI::DN; use OpenXPKI::Server::Context qw( CTX ); use OpenXPKI::Exception; use OpenXPKI::Debug; use OpenXPKI::Serialization::Simple; use OpenXPKI::Crypt::X509; use Data::Dumper; sub execute { ##! 1: 'start' my $self = shift; my $workflow = shift; my $context = $workflow->context(); my $config = CTX('config'); my $pki_realm = CTX('session')->data->pki_realm; my $dbi = CTX('dbi'); if (!$self->param('prefix')) { OpenXPKI::Exception->throw( message => 'I18N_OPENXPI_WORKFLOW_ACTIVITY_TOOLS_PUBLISH_CRL_NO_PREFIX' ); } my $default_token = CTX('api2')->get_default_token(); my $prefix = $self->param('prefix'); my $ca_alias = $context->param('ca_alias'); my $crl_serial = $context->param('crl_serial'); $crl_serial = $self->param('crl_serial') unless($crl_serial); if (!$ca_alias) { OpenXPKI::Exception->throw( message => 'I18N_OPENXPI_WORKFLOW_ACTIVITY_TOOLS_PUBLISH_CRL_NO_CA_ALIAS' ); } my $certificate = CTX('api2')->get_certificate_for_alias( 'alias' => $ca_alias ); my $x509_issuer = OpenXPKI::Crypt::X509->new( $certificate->{data} ); my $ca_identifier = $certificate->{identifier}; if (!$crl_serial) { OpenXPKI::Exception->throw( message => 'I18N_OPENXPI_WORKFLOW_ACTIVITY_TOOLS_PUBLISH_CRL_NO_CRL_SERIAL' ); } my $crl; # auto detect the latest one if ($crl_serial eq 'latest') { # Load the crl data $crl = $dbi->select_one( from => 'crl', columns => [ '*' ], where => { pki_realm => $pki_realm, issuer_identifier => $ca_identifier }, order_by => '-last_update', ); # can happen for external CAs or if new tokens did not create a crl yet if (!$crl && $self->param('empty_ok')) { CTX('log')->system()->info("CRL publication skipped for $ca_identifier - no crl found"); return; } $crl_serial = $crl->{crk_key}; } else { # Load the crl data $crl = $dbi->select_one( from => 'crl', columns => [ '*' ], where => { crl_key => $crl_serial } ); if ($crl && $crl->{issuer_identifier} ne $ca_identifier) { OpenXPKI::Exception->throw( message => 'I18N_OPENXPI_WORKFLOW_ACTIVITY_TOOLS_PUBLISH_CRL_SERIAL_DOES_NOT_MATCH_ISSUER', params => { CRL_SERIAL => $crl_serial, PKI_REALM => $pki_realm, ISSUER => $crl->{issuer_identifier}, EXPECTED_ISSUER => $ca_identifier } ); } } ##! 16: "Start publishing - CRL Serial $crl_serial , ca alias $ca_alias" if (!$crl || !$crl->{data}) { OpenXPKI::Exception->throw( message => 'I18N_OPENXPKI_SERVER_WORKFLOW_ACTIVITY_TOOLS_PUBLISH_CRL_UNABLE_TO_LOAD_CRL', params => { 'CRL_SERIAL' => $crl_serial }, ); } # split of group and generation from alias $ca_alias =~ /^(.*)-(\d+)$/; my $data = { pem => $crl->{data}, alias => $ca_alias, group => $1, generation => $2, }; # Convert to DER $data->{der} = $default_token->command({ COMMAND => 'convert_crl', DATA => $crl->{data}, OUT => 'DER', }); if (!defined $data->{der} || $data->{der} eq '') { OpenXPKI::Exception->throw( message => 'I18N_OPENXPKI_SERVER_WORKFLOW_ACTIVITY_TOOLS_PUBLISH_CRL_COULD_NOT_CONVERT_CRL_TO_DER', ); } # Get Issuer Info from selected ca $data->{issuer} = $x509_issuer->subject_hash(); $data->{subject} = $x509_issuer->get_subject(); $data->{subject_key_identifier} = $x509_issuer->get_subject_key_id(); my @target; my @prefix = split ( /\./, $prefix ); if ( $context->param( 'tmp_publish_queue' ) ) { my $queue = $context->param( 'tmp_publish_queue' ); ##! 16: 'Load targets from context queue' if (!ref $queue) { $queue = OpenXPKI::Serialization::Simple->new()->deserialize( $queue ); } @target = @{$queue}; } else { ##! 16: 'Load all targets' @target = $config->get_keys( \@prefix ); } my $on_error = $self->param('on_error') || ''; my @failed; ##! 32: 'Targets ' . Dumper \@target foreach my $target (@target) { eval{ $config->set( [ @prefix, $target, $data->{issuer}{CN}[0] ], $data ); }; if (my $eval_err = $EVAL_ERROR) { CTX('log')->application()->debug("Publishing failed with $eval_err"); if ($on_error eq 'queue') { push @failed, $target; CTX('log')->application()->info("CRL pubication failed for target $target, requeuing"); } elsif ($on_error eq 'skip') { CTX('log')->application()->warn("CRL pubication failed for target $target and skip is set"); } else { OpenXPKI::Exception->throw( message => 'I18N_OPENXPKI_SERVER_WORKFLOW_ACTIVITY_PUBLICATION_FAILED', params => { TARGET => $target, ERROR => $eval_err } ); } } else { CTX('log')->application()->debug("CRL pubication to $target for $crl_serial done"); } } if (@failed) { $context->param( 'tmp_publish_queue' => \@failed ); $self->pause('I18N_OPENXPKI_UI_ERROR_DURING_PUBLICATION'); # pause stops execution of the remaining code } $context->param( { 'tmp_publish_queue' => undef }); # Set the publication date in the database, only if not set already if (!$crl->{publication_date}) { $dbi->update( table => 'crl', set => { publication_date => DateTime->now()->epoch() }, where => { crl_key => $crl_serial } ); CTX('log')->system()->info("CRL pubication date set for crl $crl_serial"); } ##! 4: 'end' return; } 1; __END__ =head1 Name OpenXPKI::Server::Workflow::Activity::Tools::PublishCRLs =head1 Description This activity publishes a single crl. The context must hold the crl_serial and the ca_alias parameters. I<crl_serial> can have the value "latest" which will resolve to the crl with the highest last_update date for the issuer. The data point you specify at prefix must contain a list of connectors. Each connector is called with the CN of the issuing ca as location. The data portion contains a hash ref with the keys I<pem>, I<der> and I<subject> (issuer subject) holding the appropriate strings and I<issuer> which is the issuer subject parsed into a hash as used in the template processing when issuing the certificates. There are severeal options to handle errors when the connectors fail, details are given below (see I<on_error> parameter). =head1 Configuration =head2 Example publish_crl_action: class: OpenXPKI::Server::Workflow::Activity::Tools::PublishCRL prefix: publishing.crl =head2 Activity parameters =over =item prefix The config path where the connector configuration resides, in the default configuration this is I<publishing.crl>. =item on_error Define what to do on problems with the publication connectors. One of: =over =item exception (default) The connector exception bubbles up and the workflow terminates. =item skip Skip the publication target and continue with the next one. =item queue Similar to skip, but failed targets are added to a queue. As long as the queue is not empty, pause/wake_up is used to retry those targets with the retry parameters set. This obvioulsy requires I<retry_count> to be set. =back =item crl_serial The serial of the crl to publish or the keyword "latest" which pulls the CRL with the latest last_update date for the given issuer. Only effective if B<NOT> set in the context. =item empty_ok Boolean, only used in conjunction with crl_serial = latest. Will silently skip publication of no CRL is found for the given issuer. =back =head2 Context parameters =over =item ca_alias The alias name of the CA =item crl_serial The serial of the crl to publish or the keyword "latest" which pulls the CRL with the latest last_update date for the given issuer. =item tmp_publish_queue Used to temporary store unpublished targets when on_error is set. =back =head2 Data Source Configuration At the configuration path given in the I<prefix> parameter, you must provide a list of connectors: publishing: crl: repo1@: connector:.... repo2@: connector:.... To publish the crl to your webserver, here is an example connector: cdp: class: Connector::Builtin::File::Path LOCATION: /var/www/myrealm/ file: "[% ARGS %].crl" content: "[% pem %]" The ARGS placeholder is replaced with the CN part of the issuing ca. So if you name your ca generations as "ServerCA-1" and "ServerCA-2", you will end up with two crls at "http://myhost/myrealm/ServerCA-1.crl" resp. "http://myhost/myrealm/ServerCA-2.crl"
oliwel/openxpki
core/server/OpenXPKI/Server/Workflow/Activity/Tools/PublishCRL.pm
Perl
apache-2.0
9,551
#!/usr/bin/perl my $line_num = 0; my $first = 0; while (my $line = <STDIN>) { ++$line_num; $line =~ s/[\r\n]//g; if ($line =~ /^$/) { print "\n"; $first = 0; next; } my @words = split /\s+/,$line; if (scalar(@words) ne 5 and scalar(@words) ne 4) { print STDERR "\nformat error\n"; print STDERR "$line $line_num\n"; } else { if ($first ne 0) { print " "; } if (scalar(@words) eq 4) { if ($words[1] =~ /^(.*)\/(.*)$/) { $words[4] = $words[3]; $words[3] = $words[2]; $words[2] = $2; if( $1 eq "") { $words[1] = "/"; } else { $words[1] = $1; } } else { print STDERR "\nformat error\n"; print STDERR "$line $line_num\n"; } } my $current = $words[0] - 1; print $words[1]." ".$words[4]." ".$current." ".$words[3]; ++$first; } if ($line_num % 10000 == 0) { print STDERR "\r$line_num sentences"; } } print STDERR "\r$line_num sentences";
liqiangnlp/LiNMT
scripts/knowledge.aware/dependency.tree/LiNMT-convert-dp-with-word.pl
Perl
mit
1,029
#!/usr/bin/perl 1;
waterkip/test-package-perl
t/bin/foo.pl
Perl
bsd-2-clause
20
=head1 SHVectorToCilm =over 17 =item SHVectorToCilm - Convert a 1-dimensional indexed vector of real spherical harmonic coefficients to a three-dimensional array. =back =head1 SYNOPSIS =over 28 =item SUBROUTINE SHVectorToCilm ( VECTOR, CILM, LMAX ) =back =over 4 =over 20 =item REAL*8 VECTOR( (LMAX+1)**2 ), CILM(2, LMAX+1, LMAX+1) =item INTEGER LMAX =back =back =head1 DESCRIPTION I<SHVectorToCilm> will convert a 1-dimensional indexed vector of real spherical harmonic coefficients to a three-dimensional array. The degree L, order M, and I (1 = cosine, 2 = sine) corresponds to the index 1 + L**2 + (I-1)*L + M. =head1 ARGUMENTS =over 13 =item VECTOR (input) REAL*8, DIMENSION ( (LMAX+1)**2 ) The input 1-D indexed array of real spherical harmonic coefficients. =item CILM (output) REAL*8, DIMENSION (2, LMAX+1, LMAX+1) The 3-D arrary of output real spherical harmonic coefficients. =item LMAX (input) INTEGER The maximum degree of the output coefficients. =back =head1 SEE ALSO shvectortocilm(1), yilmindex(1), shctor(1), shrtoc(1), shcilmtocindex(1), shcindextocilm(1) L<http://www.ipgp.jussieu.fr/~wieczor/SHTOOLS/SHTOOLS.html> =head1 COPYRIGHT AND LICENSE Copyright 2009 by Mark Wieczorek <wieczor@ipgp.fr>. This is free software; you can distribute and modify it under the terms of the revised BSD license.
pletnes/SHTOOLS
src/doc/shvectortocilm.pod
Perl
bsd-3-clause
1,361
#----------------------------------------------------------- # eventlogs.pl # Author: Don C. Weber # Plugin for Registry Ripper; Access System hive file to get the # Event Log settings from System hive # # Change history # # # References # Eventlog Key: http://msdn.microsoft.com/en-us/library/aa363648(VS.85).aspx # # Author: Don C. Weber, http://www.cutawaysecurity.com/blog/cutaway-security #----------------------------------------------------------- package eventlogs; use strict; my %config = (hive => "System", hasShortDescr => 1, hasDescr => 0, hasRefs => 0, osmask => 22, version => 20081219); sub getConfig{return %config} sub getShortDescr { return "Gets Event Log settings from System hive"; } sub getDescr{} sub getRefs {} sub getHive {return $config{hive};} sub getVersion {return $config{version};} my $VERSION = getVersion(); sub pluginmain { my $class = shift; my $hive = shift; ::logMsg("Launching eventlogs v.".$VERSION); ::rptMsg("eventlogs v.".$VERSION); # banner ::rptMsg("(".getHive().") ".getShortDescr()."\n"); # banner my $reg = Parse::Win32Registry->new($hive); my $root_key = $reg->get_root_key; # First thing to do is get the ControlSet00x marked current...this is # going to be used over and over again in plugins that access the system # file my $current; my $key_path = 'Select'; my $key; if ($key = $root_key->get_subkey($key_path)) { $current = $key->get_value("Current")->get_data(); my $ccs = "ControlSet00".$current; my $win_path = $ccs."\\Services\\Eventlog"; my $win; if ($win = $root_key->get_subkey($win_path)) { ::rptMsg("EventLog Configuration"); ::rptMsg($win_path); ::rptMsg("LastWrite Time ".gmtime($win->get_timestamp())." (UTC)"); my $cn; if ($cn = $win->get_value("ComputerName")->get_data()) { ::rptMsg("ComputerName = ".$cn); } else { ::rptMsg("ComputerName value not found."); } } else { ::rptMsg($win_path." not found."); } # Cycle through each type of log my $logname; my $evpath; my $evlog; my @list_logs = $win->get_list_of_subkeys(); foreach $logname (@list_logs){ ::rptMsg(""); $evpath = $win_path."\\".$logname->get_name(); if ($evlog = $root_key->get_subkey($evpath)) { ::rptMsg(" ".$logname->get_name()." EventLog"); ::rptMsg(" ".$evpath); ::rptMsg(" LastWrite Time ".gmtime($evlog->get_timestamp())." (UTC)"); ::rptMsg(" Configuration Settings"); ::rptMsg(" Log location: ".$evlog->get_value('File')->get_data()); ::rptMsg(" Log Size: ".$evlog->get_value('MaxSize')->get_data()." Bytes"); ($evlog->get_value('AutoBackupLogFiles') == 0x0) ? ::rptMsg(" AutoBackupLogFiles is Disabled") : ::rptMsg(" AutoBackupLogFiles is Enabled") } else { ::rptMsg($logname->get_name()." Event Log not found."); } } ::rptMsg(""); ::rptMsg("Analysis Tips: For Event Log settings information check: http://msdn.microsoft.com/en-us/library/aa363648(VS.85).aspx"); } else { ::rptMsg($key_path." not found."); ::logMsg($key_path." not found."); } } 1;
raman-bt/autopsy
RecentActivity/release/rr-full/plugins/eventlogs.pl
Perl
apache-2.0
3,255
=pod =head1 NAME dsaparam - DSA parameter manipulation and generation =head1 SYNOPSIS B<openssl dsaparam> [B<-inform DER|PEM>] [B<-outform DER|PEM>] [B<-in filename>] [B<-out filename>] [B<-noout>] [B<-text>] [B<-C>] [B<-rand file(s)>] [B<-genkey>] [B<-engine id>] [B<numbits>] =head1 DESCRIPTION This command is used to manipulate or generate DSA parameter files. =head1 OPTIONS =over 4 =item B<-inform DER|PEM> This specifies the input format. The B<DER> option uses an ASN1 DER encoded form compatible with RFC2459 (PKIX) DSS-Parms that is a SEQUENCE consisting of p, q and g respectively. The PEM form is the default format: it consists of the B<DER> format base64 encoded with additional header and footer lines. =item B<-outform DER|PEM> This specifies the output format, the options have the same meaning as the B<-inform> option. =item B<-in filename> This specifies the input filename to read parameters from or standard input if this option is not specified. If the B<numbits> parameter is included then this option will be ignored. =item B<-out filename> This specifies the output filename parameters to. Standard output is used if this option is not present. The output filename should B<not> be the same as the input filename. =item B<-noout> this option inhibits the output of the encoded version of the parameters. =item B<-text> this option prints out the DSA parameters in human readable form. =item B<-C> this option converts the parameters into C code. The parameters can then be loaded by calling the B<get_dsaXXX()> function. =item B<-genkey> this option will generate a DSA either using the specified or generated parameters. =item B<-rand file(s)> a file or files containing random data used to seed the random number generator, or an EGD socket (see L<RAND_egd(3)|RAND_egd(3)>). Multiple files can be specified separated by a OS-dependent character. The separator is B<;> for MS-Windows, B<,> for OpenVMS, and B<:> for all others. =item B<numbits> this option specifies that a parameter set should be generated of size B<numbits>. It must be the last option. If this option is included then the input file (if any) is ignored. =item B<-engine id> specifying an engine (by its unique B<id> string) will cause B<dsaparam> to attempt to obtain a functional reference to the specified engine, thus initialising it if needed. The engine will then be set as the default for all available algorithms. =back =head1 NOTES PEM format DSA parameters use the header and footer lines: -----BEGIN DSA PARAMETERS----- -----END DSA PARAMETERS----- DSA parameter generation is a slow process and as a result the same set of DSA parameters is often used to generate several distinct keys. =head1 SEE ALSO L<gendsa(1)|gendsa(1)>, L<dsa(1)|dsa(1)>, L<genrsa(1)|genrsa(1)>, L<rsa(1)|rsa(1)> =cut
caidongyun/nginx-openresty-windows
nginx/objs/lib/openssl-1.0.1g/doc/apps/dsaparam.pod
Perl
bsd-2-clause
2,950
package DirHandle; our $VERSION = '1.02'; =head1 NAME DirHandle - supply object methods for directory handles =head1 SYNOPSIS use DirHandle; $d = new DirHandle "."; if (defined $d) { while (defined($_ = $d->read)) { something($_); } $d->rewind; while (defined($_ = $d->read)) { something_else($_); } undef $d; } =head1 DESCRIPTION The C<DirHandle> method provide an alternative interface to the opendir(), closedir(), readdir(), and rewinddir() functions. The only objective benefit to using C<DirHandle> is that it avoids namespace pollution by creating globs to hold directory handles. =head1 NOTES =over 4 =item * On Mac OS (Classic), the path separator is ':', not '/', and the current directory is denoted as ':', not '.'. You should be careful about specifying relative pathnames. While a full path always begins with a volume name, a relative pathname should always begin with a ':'. If specifying a volume name only, a trailing ':' is required. =back =cut require 5.000; use Carp; use Symbol; sub new { @_ >= 1 && @_ <= 2 or croak 'usage: new DirHandle [DIRNAME]'; my $class = shift; my $dh = gensym; if (@_) { DirHandle::open($dh, $_[0]) or return undef; } bless $dh, $class; } sub DESTROY { my ($dh) = @_; # Don't warn about already being closed as it may have been closed # correctly, or maybe never opened at all. local($., $@, $!, $^E, $?); no warnings 'io'; closedir($dh); } sub open { @_ == 2 or croak 'usage: $dh->open(DIRNAME)'; my ($dh, $dirname) = @_; opendir($dh, $dirname); } sub close { @_ == 1 or croak 'usage: $dh->close()'; my ($dh) = @_; closedir($dh); } sub read { @_ == 1 or croak 'usage: $dh->read()'; my ($dh) = @_; readdir($dh); } sub rewind { @_ == 1 or croak 'usage: $dh->rewind()'; my ($dh) = @_; rewinddir($dh); } 1;
leighpauls/k2cro4
third_party/cygwin/lib/perl5/5.10/DirHandle.pm
Perl
bsd-3-clause
1,931
# LaTeX2HTML 2008 (1.71) # Associate internals original text with physical files. $key = q/issues/; $ref_files{$key} = "$dir".q|node12.html|; $noresave{$key} = "$nosave"; $key = q/algorithms/; $ref_files{$key} = "$dir".q|node19.html|; $noresave{$key} = "$nosave"; $key = q/mce_alg_combine/; $ref_files{$key} = "$dir".q|node19.html|; $noresave{$key} = "$nosave"; $key = q/tables/; $ref_files{$key} = "$dir".q|node25.html|; $noresave{$key} = "$nosave"; $key = q/Ltl2buchi/; $ref_files{$key} = "$dir".q|node33.html|; $noresave{$key} = "$nosave"; $key = q/module_ltl/; $ref_files{$key} = "$dir".q|node34.html|; $noresave{$key} = "$nosave"; $key = q/configuration/; $ref_files{$key} = "$dir".q|node16.html|; $noresave{$key} = "$nosave"; $key = q/translation/; $ref_files{$key} = "$dir".q|node11.html|; $noresave{$key} = "$nosave"; $key = q/shell/; $ref_files{$key} = "$dir".q|node31.html|; $noresave{$key} = "$nosave"; $key = q/stacks/; $ref_files{$key} = "$dir".q|node26.html|; $noresave{$key} = "$nosave"; $key = q/monitors/; $ref_files{$key} = "$dir".q|node20.html|; $noresave{$key} = "$nosave"; $key = q/debugger/; $ref_files{$key} = "$dir".q|node28.html|; $noresave{$key} = "$nosave"; $key = q/running/; $ref_files{$key} = "$dir".q|node15.html|; $noresave{$key} = "$nosave"; $key = q/mce_mon_nondeadlock/; $ref_files{$key} = "$dir".q|node20.html|; $noresave{$key} = "$nosave"; $key = q/schedulers/; $ref_files{$key} = "$dir".q|node27.html|; $noresave{$key} = "$nosave"; $key = q/fig:workflow/; $ref_files{$key} = "$dir".q|node4.html|; $noresave{$key} = "$nosave"; $key = q/abstractions/; $ref_files{$key} = "$dir".q|node24.html|; $noresave{$key} = "$nosave"; $key = q/sect_example/; $ref_files{$key} = "$dir".q|node5.html|; $noresave{$key} = "$nosave"; $key = q/locker_sched/; $ref_files{$key} = "$dir".q|node27.html|; $noresave{$key} = "$nosave"; 1;
oroszgy/mcerlang4eclipse
org.mcerlang.help/html/userManual/internals.pl
Perl
mit
1,892
#!/usr/bin/perl # # ***** BEGIN LICENSE BLOCK ***** # Zimbra Collaboration Suite Server # Copyright (C) 2007, 2008, 2009, 2010 Zimbra, Inc. # # The contents of this file are subject to the Zimbra Public License # Version 1.3 ("License"); you may not use this file except in # compliance with the License. You may obtain a copy of the License at # http://www.zimbra.com/license. # # Software distributed under the License is distributed on an "AS IS" # basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. # ***** END LICENSE BLOCK ***** # use strict; use Migrate; Migrate::verifySchemaVersion(47); Migrate::runSql("CREATE INDEX i_mailbox_id ON zimbra.scheduled_task (mailbox_id);"); Migrate::updateSchemaVersion(47, 48); exit(0);
nico01f/z-pec
ZimbraServer/src/db/migration/migrate20070928-ScheduledTaskIndex.pl
Perl
mit
749
package t::Util; use strict; use warnings; use Digest::MD5 qw(md5_hex); use File::Temp qw(tempfile tempdir); use IO::Socket::INET; use IO::Socket::SSL; use IO::Poll qw(POLLIN POLLOUT POLLHUP POLLERR); use List::Util qw(shuffle); use List::MoreUtils qw(firstidx); use Net::EmptyPort qw(check_port empty_port); use Net::DNS::Nameserver; use POSIX ":sys_wait_h"; use Path::Tiny; use Protocol::HTTP2::Connection; use Protocol::HTTP2::Constants; use Scope::Guard; use Test::More; use Time::HiRes qw(sleep gettimeofday tv_interval); use Carp; use base qw(Exporter); our @EXPORT = qw( ASSETS_DIR DOC_ROOT bindir run_as_root server_features exec_unittest exec_mruby_unittest exec_fuzzer spawn_server spawn_h2o spawn_h2o_raw empty_ports create_data_file md5_file etag_file prog_exists run_prog openssl_can_negotiate curl_supports_http2 run_with_curl h2get_exists run_with_h2get run_with_h2get_simple one_shot_http_upstream wait_debugger make_guard spawn_forked spawn_h2_server find_blackhole_ip get_tracer check_dtrace_availability run_picotls_client spawn_dns_server run_fuzzer ); use constant ASSETS_DIR => 't/assets'; use constant DOC_ROOT => ASSETS_DIR . "/doc_root"; sub bindir { $ENV{H2O_VALGRIND} || $ENV{BINARY_DIR} || '.'; } sub run_as_root { return if $< == 0; exec qw(sudo -E env PERL5LIB=.), "PATH=$ENV{PATH}", $^X, $0; die "failed to invoke $0 using sudo:$!"; } sub server_features { open my $fh, "-|", bindir() . "/h2o", "--version" or die "failed to invoke: h2o --version:$!"; <$fh>; # skip h2o version +{ map { chomp($_); split /:/, $_, 2 } <$fh> }; } sub exec_unittest { my $base = shift; my $fn = bindir() . "/t-00unit-$base.t"; plan skip_all => "unit test:$base does not exist" if ! -e $fn; if (prog_exists("memcached")) { my $port = empty_port(); pipe my $rfh, my $wfh or die "pipe failed:$!"; my $pid = fork; die "fork failed:$!" unless defined $pid; if ($pid == 0) { # child process close $wfh; POSIX::dup2($rfh->fileno, 5) or die "dup2 failed:$!"; if ($< == 0) { exec qw(share/h2o/kill-on-close -- memcached -u root -l 127.0.0.1 -p), $port; } else { exec qw(share/h2o/kill-on-close -- memcached -l 127.0.0.1 -p), $port; } exit 1; } close $rfh; POSIX::dup($wfh->fileno) or die "dup failed:$!"; sleep 1; if (waitpid($pid, WNOHANG) == $pid) { die "failed to launch memcached"; } $ENV{MEMCACHED_PORT} = $port; } exec $fn; die "failed to exec $fn:$!"; } sub exec_mruby_unittest { plan skip_all => 'mruby support is off' unless server_features()->{mruby}; my $test_dir = path('t/00unit.mruby'); my $bin = path(bindir(), 'mruby/host/bin/mruby'); unless (-e $bin) { die "unit test: mruby binary $bin does not exist"; } my $k = 0; $test_dir->visit(sub { my ($path) = @_; return unless $path =~ /\.rb$/; my $fn = "$bin $path"; my $output = `$fn`; # parse mruby test output $output =~ /# Running tests:\n\n([SFE\.]+)\n/ or die "cannot parse test output for $path"; my ($i, $j) = (0, 0); my @results = map { +{ type => $_, index => ++$i, failed => ($_ eq 'F' || $_ eq 'E') } } split(//, $1); while ($output =~ /\d\) (Skipped|Failure|Error):\n([^\n]+)/g) { my ($type, $detail) = (substr($1, 0, 1), $2); while ($results[$j]->{type} ne $type) { $j++; } $results[$j++]->{detail} = $detail; } # print TAP compatible output printf("%s %s\n", $path, '.' x (51 - length($path))); for my $r (@results) { printf(" %s %d - %s\n", $r->{failed} ? 'not ok' : 'ok', $r->{index}, $r->{detail} || ''); printf STDERR ("# Error - %s\n", $r->{detail}) if $r->{failed}; } printf(" 1..%d\n", scalar(@results)); printf("%s %d - %s\n", (grep { $_->{failed} } @results) ? 'not ok' : 'ok', ++$k, $path); }, +{ recurse => 1 }); printf("1..%d\n", $k); } sub exec_fuzzer { my $name = shift; my $prog = bindir() . "/h2o-fuzzer-$name"; plan skip_all => "$prog does not exist" if ! -e $prog; is system("$prog -close_fd_mask=3 -runs=1 -max_len=16384 fuzz/$name-corpus < /dev/null"), 0; done_testing; } # spawns a child process and returns a guard object that kills the process when destroyed sub spawn_server { my %args = @_; my $ppid = $$; my $pid = fork; die "fork failed:$!" unless defined $pid; if ($pid != 0) { print STDERR "spawning $args{argv}->[0]... "; if ($args{is_ready}) { while (1) { if ($args{is_ready}->()) { print STDERR "done\n"; last; } if (waitpid($pid, WNOHANG) == $pid) { die "server failed to start (got $?)\n"; } sleep 0.1; } } my $guard = make_guard(sub { return if $$ != $ppid; print STDERR "killing $args{argv}->[0]... "; my $sig = 'TERM'; Retry: if (kill $sig, $pid) { my $i = 0; my $sigterm = sig_num('TERM'); my $sigkill = sig_num('KILL'); my $sigzero = sig_num('ZERO'); while (1) { if (waitpid($pid, WNOHANG) == $pid) { Test::More::fail "server die with signal $?" unless $? == $sigterm || $? == $sigkill || $? == $sigzero; print STDERR "killed (got $?)\n"; last; } if ($i++ == 100) { if ($sig eq 'TERM') { print STDERR "failed, sending SIGKILL... "; $sig = 'KILL'; goto Retry; } print STDERR "failed, continuing anyways\n"; last; } sleep 0.1; } } else { print STDERR "no proc? ($!)\n"; } }); return wantarray ? ($guard, $pid) : $guard; } # child process exec @{$args{argv}}; die "failed to exec $args{argv}->[0]:$!"; } sub sig_num { my $name = shift; firstidx { $_ eq $name } split " ", $Config::Config{sig_name}; } # returns a hash containing `port`, `tls_port`, `guard` sub spawn_h2o { my ($conf) = @_; my @opts; my $max_ssl_version; # decide the port numbers my ($port, $tls_port) = empty_ports(2, { host => "0.0.0.0" }); # setup the configuration file $conf = $conf->($port, $tls_port) if ref $conf eq 'CODE'; my $user = $< == 0 ? "root" : ""; if (ref $conf eq 'HASH') { @opts = @{$conf->{opts}} if $conf->{opts}; $max_ssl_version = $conf->{max_ssl_version} || undef; $user = $conf->{user} if exists $conf->{user}; $conf = $conf->{conf}; } $conf = <<"EOT"; $conf listen: host: 0.0.0.0 port: $port listen: host: 0.0.0.0 port: $tls_port ssl: key-file: examples/h2o/server.key certificate-file: examples/h2o/server.crt @{[$max_ssl_version ? "max-version: $max_ssl_version" : ""]} @{[$user ? "user: $user" : ""]} EOT my $ret = spawn_h2o_raw($conf, [$port, $tls_port], \@opts); return { %$ret, port => $port, tls_port => $tls_port, }; } sub spawn_h2o_raw { my ($conf, $check_ports, $opts) = @_; # By default, h2o will launch as many threads as there are CPU cores on the # host, unless 'num-threads' is specified. This results in the process # running out of file descriptors, if the 'nofiles' limit is low and the # host has a large number of CPU cores. So make sure the number of threads # is bound. $conf = "num-threads: 2\n$conf" unless $conf =~ /^num-threads:/m; my ($conffh, $conffn) = tempfile(UNLINK => 1); print $conffh $conf; # spawn the server my ($guard, $pid) = spawn_server( argv => [ bindir() . "/h2o", "-c", $conffn, @{$opts || []} ], is_ready => sub { check_port($_) or return for @{ $check_ports || [] }; 1; }, ); return { guard => $guard, pid => $pid, conf_file => $conffn, }; } sub empty_ports { my ($n, @ep_args) = @_; my @ports; while (@ports < $n) { my $t = empty_port(@ep_args); push @ports, $t unless grep { $_ == $t } @ports; } return @ports; } sub create_data_file { my $sz = shift; my ($fh, $fn) = tempfile(UNLINK => 1); print $fh '0' x $sz; close $fh; return $fn; } sub md5_file { my $fn = shift; open my $fh, "<", $fn or die "failed to open file:$fn:$!"; local $/; return md5_hex(join '', <$fh>); } sub etag_file { my $fn = shift; my @st = stat $fn or die "failed to stat file:$fn:$!"; return sprintf("\"%08x-%zx\"", $st[9], $st[7]); } sub prog_exists { my $prog = shift; system("which $prog > /dev/null 2>&1") == 0; } sub run_prog { my $cmd = shift; my ($tempfh, $tempfn) = tempfile(UNLINK => 1); my $stderr = `$cmd 2>&1 > $tempfn`; my $stdout = do { local $/; <$tempfh> }; close $tempfh; # tempfile does not close the file automatically (see perldoc) return ($stderr, $stdout); } sub openssl_can_negotiate { my $openssl_ver = `openssl version`; $openssl_ver =~ /^\S+\s(\d+)\.(\d+)\.(\d+)/ or die "cannot parse OpenSSL version: $openssl_ver"; $openssl_ver = $1 * 10000 + $2 * 100 + $3; return $openssl_ver >= 10001; } sub curl_supports_http2 { return !! (`curl --version` =~ /^Features:.*\sHTTP2(?:\s|$)/m); } sub run_with_curl { my ($server, $cb) = @_; plan skip_all => "curl not found" unless prog_exists("curl"); subtest "http/1" => sub { $cb->("http", $server->{port}, "curl", 257); }; subtest "https/1" => sub { my $cmd = "curl --insecure"; $cmd .= " --http1.1" if curl_supports_http2(); $cb->("https", $server->{tls_port}, $cmd, 257); }; subtest "https/2" => sub { plan skip_all => "curl does not support HTTP/2" unless curl_supports_http2(); $cb->("https", $server->{tls_port}, "curl --insecure --http2", 512); }; } sub h2get_exists { prog_exists(bindir() . "/h2get_bin/h2get"); } sub run_with_h2get { my ($server, $script) = @_; plan skip_all => "h2get not found" unless h2get_exists(); my $helper_code = <<"EOR"; class H2 def read_loop(timeout) while true f = self.read(timeout) return nil if f == nil puts f.to_s if f.type == "DATA" && f.len > 0 self.send_window_update(0, f.len) self.send_window_update(f.stream_id, f.len) end if (f.type == "DATA" || f.type == "HEADERS") && f.is_end_stream return f elsif f.type == "RST_STREAM" || f.type == "GOAWAY" return f end end end end EOR $script = "$helper_code\n$script"; my ($scriptfh, $scriptfn) = tempfile(UNLINK => 1); print $scriptfh $script; close($scriptfh); return run_prog(bindir()."/h2get_bin/h2get $scriptfn 127.0.0.1:$server->{tls_port}"); } sub run_with_h2get_simple { my ($server, $script) = @_; my $settings = <<'EOS'; h2g = H2.new authority = ARGV[0] host = "https://#{authority}" h2g.connect(host) h2g.send_prefix() h2g.send_settings() i = 0 while i < 2 do f = h2g.read(-1) if f.type == "SETTINGS" and (f.flags == ACK) then i += 1 elsif f.type == "SETTINGS" then h2g.send_settings_ack() i += 1 end end EOS run_with_h2get($server, $settings."\n".$script); } sub one_shot_http_upstream { my ($response, $port) = @_; my $listen = IO::Socket::INET->new( LocalHost => '0.0.0.0', LocalPort => $port, Proto => 'tcp', Listen => 1, Reuse => 1, ) or die "failed to listen to 127.0.0.1:$port:$!"; my $pid = fork; die "fork failed" unless defined $pid; if ($pid != 0) { close $listen; my $guard = make_guard(sub { kill 'KILL', $pid; while (waitpid($pid, WNOHANG) != $pid) {} }); return ($port, $guard); } while (my $sock = $listen->accept) { $sock->print($response); close $sock; } } sub wait_debugger { my ($pid, $timeout) = @_; $timeout ||= -1; print STDERR "waiting debugger for pid $pid ..\n"; while ($timeout-- != 0) { my $out = `ps -p $pid -o 'state' | tail -n 1`; if ($out =~ /^(T|.+X).*$/) { print STDERR "debugger attached\n"; return 1; } sleep 1; } print STDERR "no debugger attached\n"; undef; } sub make_guard { my $code = shift; return Scope::Guard->new(sub { local $?; $code->(); }); } sub spawn_forked { my ($code) = @_; my ($cout, $pin); pipe($pin, $cout); my ($cerr, $pin2); pipe($pin2, $cerr); my $pid = fork; if ($pid) { close $cout; close $cerr; my $upstream; $upstream = +{ pid => $pid, kill => sub { return unless defined $pid; kill 'KILL', $pid; undef $pid; }, guard => make_guard(sub { $upstream->{kill}->() }), stdout => $pin, stderr => $pin2, }; return $upstream; } close $pin; close $pin2; open(STDOUT, '>&=', fileno($cout)) or die $!; open(STDERR, '>&=', fileno($cerr)) or die $!; $code->(); exit; } sub spawn_h2_server { my ($upstream_port, $stream_state_cbs, $stream_frame_cbs) = @_; my $upstream = IO::Socket::SSL->new( LocalAddr => '127.0.0.1', LocalPort => $upstream_port, Listen => 1, ReuseAddr => 1, SSL_cert_file => 'examples/h2o/server.crt', SSL_key_file => 'examples/h2o/server.key', SSL_alpn_protocols => ['h2'], ) or die "cannot create socket: $!"; my $server = spawn_forked(sub { my $conn; $conn = Protocol::HTTP2::Connection->new(Protocol::HTTP2::Constants::SERVER, on_new_peer_stream => sub { my $stream_id = shift; for my $state (keys %{ $stream_state_cbs || +{} }) { my $cb = $stream_state_cbs->{$state}; $conn->stream_cb($stream_id, $state, sub { $cb->($conn, $stream_id); }); } for my $type (keys %{ $stream_frame_cbs || +{} }) { my $cb = $stream_frame_cbs->{$type}; $conn->stream_frame_cb($stream_id, $type, sub { $cb->($conn, $stream_id, shift); }); } }, ); $conn->{_state} = +{}; $conn->enqueue(Protocol::HTTP2::Constants::SETTINGS, 0, 0, +{}); my $sock = $upstream->accept or die "cannot accept socket: $!"; my $input = ''; while (!$conn->{_state}->{closed}) { my $offset = 0; my $buf; my $r = $sock->read($buf, 1); next unless $r; $input .= $buf; unless ($conn->preface) { my $len = $conn->preface_decode(\$input, 0); unless (defined($len)) { die 'invalid preface'; } next unless $len; $conn->preface(1); $offset += $len; } while (my $len = $conn->frame_decode(\$input, $offset)) { $offset += $len; } substr($input, 0, $offset) = '' if $offset; if (my $after_read = delete($conn->{_state}->{after_read})) { $after_read->(); } while (my $frame = $conn->dequeue) { $sock->write($frame); } if (my $after_write = delete($conn->{_state}->{after_write})) { $after_write->(); } } }); close $upstream; return $server; } # usage: see t/90h2olog.t package H2ologTracer { use POSIX ":sys_wait_h"; sub new { my ($class, $opts) = @_; my $h2o_pid = $opts->{pid} or Carp::croak("Missing pid in the opts"); my $h2olog_args = $opts->{args} // []; my $h2olog_prog = t::Util::bindir() . "/h2olog"; my $tempdir = File::Temp::tempdir(CLEANUP => 1); my $output_file = "$tempdir/h2olog.jsonl"; my $tracer_pid = open my($errfh), "-|", qq{exec $h2olog_prog @{$h2olog_args} -d -p $h2o_pid -w '$output_file' 2>&1}; die "failed to spawn $h2olog_prog: $!" unless defined $tracer_pid; # wait until h2olog and the trace log becomes ready while (1) { my $errline = <$errfh>; Carp::confess("h2olog[$tracer_pid] died unexpectedly") unless defined $errline; Test::More::diag("h2olog[$tracer_pid]: $errline"); last if $errline =~ /Attaching pid=/; } open my $fh, "<", $output_file or die "h2olog[$tracer_pid] does not create the output file ($output_file): $!"; my $off = 0; my $get_trace = sub { Carp::confess "h2olog[$tracer_pid] is down (got $?)" if waitpid($tracer_pid, WNOHANG) != 0; seek $fh, $off, 0 or die "seek failed: $!"; read $fh, my $bytes, 65000; $bytes = '' unless defined $bytes; $off += length $bytes; return $bytes; }; my $guard = t::Util::make_guard(sub { if (waitpid($tracer_pid, WNOHANG) == 0) { Test::More::diag "killing h2olog[$tracer_pid] with SIGTERM"; kill("TERM", $tracer_pid) or warn("failed to kill h2olog[$tracer_pid]: $!"); } else { Test::More::diag($_) while <$errfh>; # in case h2olog shows error messages, e.g. BPF program doesn't compile Test::More::diag "h2olog[$tracer_pid] has already exited"; } }); return bless { _guard => $guard, tracer_pid => $tracer_pid, get_trace => $get_trace, }, $class; } sub get_trace { my($self) = @_; return $self->{get_trace}->(); } } sub find_blackhole_ip { my %ips; my $port = $_[0] || 23; my $blackhole_ip = undef; my $poll = IO::Poll->new(); my $start = [ gettimeofday() ]; foreach my $ip ('10.0.0.1', '192.168.0.1', '172.16.0.1', '240.0.0.1', '192.0.2.0') { my $sock = IO::Socket::INET->new(Blocking => 0, PeerPort => $port, PeerAddr => $ip); $ips{$sock} = $ip; $poll->mask($sock => POLLOUT|POLLIN|POLLERR|POLLHUP); } while (scalar($poll->handles()) > 0 and tv_interval($start) < 2.00) { if ($poll->poll(.1) > 0) { foreach my $sock ($poll->handles(POLLOUT|POLLIN|POLLERR|POLLHUP)) { delete($ips{$sock}); $poll->remove($sock); $sock->close() } } } if (scalar($poll->handles()) > 0) { $blackhole_ip = $ips{(keys %ips)[rand(keys %ips)]} } foreach my $sock ($poll->handles()) { $poll->remove($sock); $sock->close(); } die unless $poll->handles() == 0; return $blackhole_ip; } sub check_dtrace_availability { run_as_root(); plan skip_all => 'dtrace support is off' unless server_features()->{dtrace}; if ($^O eq 'linux') { plan skip_all => 'bpftrace not found' unless prog_exists('bpftrace'); # NOTE: the test is likely to depend on https://github.com/iovisor/bpftrace/pull/864 plan skip_all => "skipping bpftrace tests (setenv DTRACE_TESTS=1 to run them)" unless $ENV{DTRACE_TESTS}; } else { plan skip_all => 'dtrace not found' unless prog_exists('dtrace'); plan skip_all => 'unbuffer not found' unless prog_exists('unbuffer'); } } sub get_tracer { my $tracer_pid = shift; my $fn = shift; my $read_trace; while (1) { sleep 1; if (open my $fh, "<", $fn) { my $off = 0; $read_trace = sub { seek $fh, $off, 0 or die "seek failed:$!"; read $fh, my $bytes, 1048576; $bytes = '' unless defined $bytes; $off += length $bytes; if ($^O ne 'linux') { $bytes = join "", map { substr($_, 4) . "\n" } grep /^XXXX/, split /\n/, $bytes; } return $bytes; }; last; } die "bpftrace failed to start\n" if waitpid($tracer_pid, WNOHANG) == $tracer_pid; } return $read_trace; } sub run_picotls_client { my($opts) = @_; my $port = $opts->{port}; # required my $host = $opts->{host} // '127.0.0.1'; my $path = $opts->{path} // '/'; my $cli_opts = $opts->{opts} // ''; my $cli = bindir() . "/picotls/cli"; my $tempdir = tempdir(); my $cmd = "exec $cli $cli_opts $host $port > $tempdir/resp.txt 2>&1"; diag $cmd; open my $fh, "|-", $cmd or die "failed to invoke command:$cmd:$!"; autoflush $fh 1; print $fh <<"EOT"; GET $path HTTP/1.1\r Host: $host:$port\r Connection: close\r \r EOT sleep 1; close $fh; open $fh, "<", "$tempdir/resp.txt" or die "failed to open file:$tempdir/resp.txt:$!"; my $resp = do { local $/; <$fh> }; return $resp; } sub spawn_dns_server { my ($dns_port, $zone_rrs, $delays) = @_; my $server = spawn_forked(sub { my $ns = Net::DNS::Nameserver->new( LocalPort => $dns_port, ReplyHandler => sub { my ($qname, $qclass, $qtype, $peerhost, $query, $conn) = @_; my ($rcode, @ans, @auth, @add); foreach (@$zone_rrs) { my $rr = Net::DNS::RR->new($_); if ($rr->owner eq $qname && $rr->class eq $qclass && $rr->type eq $qtype) { push @ans, $rr; } } if (!@ans) { $rcode = "NXDOMAIN"; } else { $rcode = "NOERROR"; } # mark the answer as authoritative (by setting the 'aa' flag) my $headermask = {aa => 1}; my $optionmask = {}; if ($delays && $delays->{$qtype} > 0) { sleep($delays->{$qtype}); } @ans = shuffle(@ans); return ($rcode, \@ans, \@auth, \@add, $headermask, $optionmask); }, Verbose => 0 ) || die "couldn't create nameserver object\n"; $ns->main_loop; }); return $server; } 1;
cwyang/h2o
t/Util.pm
Perl
mit
23,801
# -*- perl -*- # !!! DO NOT EDIT !!! # This file was automatically generated. package Net::Amazon::Validate::ItemSearch::ca::Author; use 5.006; use strict; use warnings; sub new { my ($class , %options) = @_; my $self = { '_default' => 'Books', %options, }; push @{$self->{_options}}, 'Books'; push @{$self->{_options}}, 'Classical'; push @{$self->{_options}}, 'SoftwareVideoGames'; push @{$self->{_options}}, 'VideoGames'; bless $self, $class; } sub user_or_default { my ($self, $user) = @_; if (defined $user && length($user) > 0) { return $self->find_match($user); } return $self->default(); } sub default { my ($self) = @_; return $self->{_default}; } sub find_match { my ($self, $value) = @_; for (@{$self->{_options}}) { return $_ if lc($_) eq lc($value); } die "$value is not a valid value for ca::Author!\n"; } 1; __END__ =head1 NAME Net::Amazon::Validate::ItemSearch::ca::Author; =head1 DESCRIPTION The default value is Books, unless mode is specified. The list of available values are: Books Classical SoftwareVideoGames VideoGames =cut
carlgao/lenga
images/lenny64-peon/usr/share/perl5/Net/Amazon/Validate/ItemSearch/ca/Author.pm
Perl
mit
1,188
% regulus_read.pl :- ensure_loaded('$REGULUS/PrologLib/compatibility'). :- module(regulus_read, [read_regulus_file_or_files/2, read_regulus_file/2, read_regulus_files/2, read_regulus_file_or_files/3, read_regulus_file/3, read_regulus_files/3, read_transfer_file_or_files/3, read_orthography_file_or_files/3, read_collocation_file_or_files/2, read_collocation_file_or_files/3, read_generic_regulus_related_file_or_files/2, read_generic_regulus_related_file_or_files/3, read_and_compile_generic_regulus_related_file/2, read_lf_pattern_file_or_files/2, read_lf_pattern_file_or_files/3, read_lf_rewrite_file_or_files/2, read_lf_rewrite_file_or_files/3, read_corpus_file/2, read_corpus_file_printing_statistics/2, include_closure_for_file_or_list/3, strip_wrappers_from_rules/2, all_sem_features/2, add_lexical_feature_defaults/2, expand_abbreviations_in_rules/2] ). 'LOAD_DYNAMIC_LEXICON_SUPPORT_IF_AVAILABLE'. :- use_module('$REGULUS/Prolog/regulus_declarations'). :- use_module('$REGULUS/Prolog/regulus_utilities'). :- use_module('$REGULUS/PrologLib/utilities'). :- use_module(library(lists)). :- use_module(library(terms)). :- use_module(library(ordsets)). :- use_module(library(system)). 'SICSTUS4ONLY'( ( :- use_module(library(file_systems)) ) ). %--------------------------------------------------------------- read_regulus_file_or_files(InFileOrFiles, ReadRules) :- read_regulus_file_or_files(InFileOrFiles, ReadRules, _Decls). read_regulus_file(InFile, ReadRules) :- read_regulus_file(InFile, ReadRules, _Decls). read_regulus_files(InFile, ReadRules) :- read_regulus_files(InFile, ReadRules, _Decls). read_regulus_file_or_files(InFileOrFiles, ReadRules, Decls) :- ( is_list(InFileOrFiles) -> read_regulus_files(InFileOrFiles, ReadRules, Decls) ; read_regulus_files([InFileOrFiles], ReadRules, Decls) ). read_regulus_file(InFile, ReadRules, Decls) :- read_regulus_files([InFile], ReadRules, Decls). read_regulus_files(Files, ReadRules3, ReadDeclarations) :- retract_all_regulus_preds, read_regulus_files1(Files, UnexpandedReadRules, ReadDeclarations, grammar), length(ReadDeclarations, NDecls), internalise_regulus_declarations(ReadDeclarations, 0-NIgnoredDecls), save_grammar_macros, expand_macros_in_rule_list(UnexpandedReadRules, ReadRules), process_dynamic_lex_entries_in_rules(ReadRules, ReadRules1), remove_labels_and_ignored_rules(ReadRules1, ReadRules2, 0-NIgnoredRules), canonicalise_rules(ReadRules2, ReadRules3), warn_about_unused_features(ReadRules3), warn_about_inconsistent_feature_spaces(ReadRules3), count_regulus_rules(ReadRules3, 0-NNonLexical, 0-NLexical), note_and_count_vocabulary_items(ReadRules3, NVocabulary), format('~N~n -- Read file(s): ~d declarations, ~d non-lexical rules, ~d lexical rules, ~d distinct vocabulary items.~n', [NDecls, NNonLexical, NLexical, NVocabulary]), format('~N -- ~d declarations and ~d rules ignored.~n~n', [NIgnoredDecls, NIgnoredRules]). %--------------------------------------------------------------- read_transfer_file_or_files(InFileOrFiles, ReadRules, Decls) :- ( is_list(InFileOrFiles) -> read_transfer_files(InFileOrFiles, ReadRules, Decls) ; read_transfer_files([InFileOrFiles], ReadRules, Decls) ). read_transfer_files(Files, ReadRules1, ReadDeclarations) :- retract_regulus_preds_for_transfer_files, read_regulus_files1(Files, UnexpandedReadRules, ReadDeclarations, transfer), length(ReadDeclarations, NDecls), internalise_regulus_declarations_for_transfer(ReadDeclarations, 0-NIgnoredDecls), expand_macros_in_rule_list(UnexpandedReadRules, ReadRules), remove_labels_and_ignored_rules(ReadRules, ReadRules1, 0-NIgnoredRules), count_transfer_entries(ReadRules1, 0-NRules, 0-NLexical), format('~N~n -- Read file(s): ~d declarations, ~d transfer rules, ~d transfer lexicon entries.~n', [NDecls, NRules, NLexical]), format('~N -- ~d declarations and ~d rules ignored.~n~n', [NIgnoredDecls, NIgnoredRules]). %--------------------------------------------------------------- read_orthography_file_or_files(InFileOrFiles, ReadRules, Decls) :- ( is_list(InFileOrFiles) -> read_orthography_files(InFileOrFiles, ReadRules, Decls) ; read_orthography_files([InFileOrFiles], ReadRules, Decls) ). read_orthography_files(Files, ReadRules, ReadDeclarations) :- read_regulus_files1(Files, ReadRules, ReadDeclarations, orthography), length(ReadRules, NRules), length(ReadDeclarations, NDecls), format('~N~n -- Read file(s): ~d orthography declarations, ~d orthography rules.~n', [NDecls, NRules]). %--------------------------------------------------------------- read_collocation_file_or_files(InFileOrFiles, Rules) :- read_collocation_file_or_files(InFileOrFiles, Rules, _Decls). read_collocation_file_or_files(InFileOrFiles, ReadRules, Decls) :- ( is_list(InFileOrFiles) -> read_collocation_files(InFileOrFiles, ReadRules, Decls) ; read_collocation_files([InFileOrFiles], ReadRules, Decls) ). read_collocation_files(Files, Rules, ReadDeclarations) :- read_regulus_files1(Files, ReadRules, ReadDeclarations, collocation), length(ReadRules, NRules), length(ReadDeclarations, NDecls), internalise_collocation_decls(ReadDeclarations), expand_collocation_macros_in_rules(ReadRules, Rules), %print_expanded_collocation_rules(Rules), format('~N~n -- Read file(s): ~d collocation declarations, ~d collocation rules.~n', [NDecls, NRules]). print_expanded_collocation_rules([]). print_expanded_collocation_rules([F | R]) :- print_expanded_collocation_rule(F), !, print_expanded_collocation_rules(R). print_expanded_collocation_rule(Rule) :- compound(Rule), Rule =.. [F, LHS, RHS], is_prolog_string(LHS), is_prolog_string(RHS), format('~N~q("~s", "~s").~n', [F, LHS, RHS]), !. print_expanded_collocation_rule(Rule) :- format('~N~q.~n', [Rule]), !. %--------------------------------------------------------------- read_generic_regulus_related_file_or_files(InFileOrFiles, ReadRules) :- ( is_list(InFileOrFiles) -> read_generic_regulus_related_files(InFileOrFiles, ReadRules) ; read_generic_regulus_related_files([InFileOrFiles], ReadRules) ). read_generic_regulus_related_files(Files, ReadRules1) :- retract_regulus_preds_for_generic_files, read_regulus_files1(Files, UnexpandedReadRules, ReadDeclarations, generic), length(ReadDeclarations, NDecls), internalise_generic_regulus_declarations(ReadDeclarations), expand_macros_in_rule_list(UnexpandedReadRules, ReadRules), remove_labels_and_ignored_rules(ReadRules, ReadRules1, 0-_NIgnoredRules), length(ReadRules1, NRules), format('~N~n -- Read file(s): ~d macros, ~d entries.~n', [NDecls, NRules]). read_and_compile_generic_regulus_related_file(InFile, Package) :- read_generic_regulus_related_file_or_files(InFile, ReadRules), strip_wrappers_from_rules(ReadRules, ReadRules1), compiled_version_of_file(InFile, CompiledFile), list_to_prolog_file(ReadRules1, CompiledFile), safe_compile_with_redefine_warnings_off(Package, CompiledFile), !. read_and_compile_generic_regulus_related_file(InFile, Package) :- format('~N*** Error: bad call: ~w~n', [read_and_compile_generic_regulus_related_file(InFile, Package)]), fail. strip_wrappers_from_rules([], []). strip_wrappers_from_rules([F | R], [F1 | R1]) :- strip_wrapper_from_rule(F, F1), !, strip_wrappers_from_rules(R, R1). strip_wrapper_from_rule(rule(Element, _LineInfo), Element) :- !. strip_wrapper_from_rule(rule(_Label, Element, _LineInfo), Element) :- !. strip_wrapper_from_rule(F, F1) :- format('~N*** Error: bad call: ~w~n', [strip_wrapper_from_rule(F, F1)]), fail. compiled_version_of_file(File, CompiledFile) :- safe_absolute_file_name(File, AbsFile), directory_and_file_for_pathname(AbsFile, Dir, BaseFile), format_to_atom('~w/compiled_~w', [Dir, BaseFile], CompiledFile), !. compiled_version_of_file(File, CompiledFile) :- format('~N*** Error: bad call: ~w~n', [compiled_version_of_file(File, CompiledFile)]), fail. %--------------------------------------------------------------- read_lf_pattern_file_or_files(InFileOrFiles, ReadRules) :- ( is_list(InFileOrFiles) -> read_lf_pattern_files(InFileOrFiles, ReadRules) ; read_lf_pattern_file_or_files([InFileOrFiles], ReadRules) ). read_lf_pattern_files(Files, ReadRules) :- retract_regulus_preds_for_lf_pattern_files, read_regulus_files1(Files, UnexpandedReadRules, ReadDeclarations, lf_pattern), length(ReadDeclarations, NDecls), internalise_regulus_declarations_for_lf_pattern(ReadDeclarations, 0-_NIgnoredDecls), expand_macros_in_rule_list(UnexpandedReadRules, ReadRules), length(ReadRules, NRules), format('~N~n -- Read file(s): ~d declarations, ~d LF pattern rules.~n', [NDecls, NRules]). %--------------------------------------------------------------- read_lf_rewrite_file_or_files(InFileOrFiles, ReadRules) :- ( is_list(InFileOrFiles) -> read_lf_rewrite_files(InFileOrFiles, ReadRules) ; read_lf_rewrite_file_or_files([InFileOrFiles], ReadRules) ). read_lf_rewrite_files(Files, ReadRules) :- retract_regulus_preds_for_lf_rewrite_files, read_regulus_files1(Files, UnexpandedReadRules, ReadDeclarations, lf_rewrite), length(ReadDeclarations, NDecls), internalise_regulus_declarations_for_lf_rewrite(ReadDeclarations, 0-_NIgnoredDecls), expand_macros_in_rule_list(UnexpandedReadRules, ReadRules), length(ReadRules, NRules), format('~N~n -- Read file(s): ~d declarations, ~d LF pattern rules.~n', [NDecls, NRules]). %--------------------------------------------------------------- % read_regulus_files1(+Files, -ReadRules, -ReadDeclarations, +FileType) read_regulus_files1([], [], [], _FileType). read_regulus_files1([F | R], ReadRules, ReadDeclarations, FileType) :- read_top_level_regulus_file(F, ReadRules-ReadRulesNext, ReadDeclarations-ReadDeclarationsNext, FileType), !, read_regulus_files1(R, ReadRulesNext, ReadDeclarationsNext, FileType). read_top_level_regulus_file(InFile, RulesIn-RulesOut, DeclarationsIn-DeclarationsOut, FileType) :- ( FileType = grammar -> absolute_regulus_file_for_reading(InFile, AbsoluteFile), format('~NReading Regulus file ~w~n', [AbsoluteFile]) ; FileType = transfer -> absolute_file_name(InFile, AbsoluteFile), format('~NReading transfer file ~w~n', [AbsoluteFile]) ; FileType = orthography -> absolute_file_name(InFile, AbsoluteFile), format('~NReading orthography rule file ~w~n', [AbsoluteFile]) ; FileType = collocation -> absolute_file_name(InFile, AbsoluteFile), format('~NReading collocation rule file ~w~n', [AbsoluteFile]) ; FileType = lf_pattern -> absolute_file_name(InFile, AbsoluteFile), format('~NReading LF pattern rule file ~w~n', [AbsoluteFile]) ; FileType = lf_rewrite -> absolute_file_name(InFile, AbsoluteFile), format('~NReading LF rewrite rule file ~w~n', [AbsoluteFile]) ; FileType = generic -> absolute_file_name(InFile, AbsoluteFile), format('~NReading generic rule file ~w~n', [AbsoluteFile]) ; otherwise -> format('~N*** Unknown Regulus file-type in read_top_level_regulus_file/4: ~w~n', [FileType]), fail ), open(AbsoluteFile, read, S), %line_count(S, FirstLineNumber), %format('~N--- Started reading (line number = ~d) ~w~n', [FirstLineNumber, AbsoluteFile]), read_regulus_stream(S, RulesIn-RulesOut, DeclarationsIn-DeclarationsOut, 0-_LastItemNumber, 0, AbsoluteFile, FileType), close(S). read_regulus_stream(S, InRules-OutRules, InDecls-OutDecls, ItemNumber0-OutItemNumber, LastLine0, File, FileType) :- ItemNumber is ItemNumber0 + 1, LastLine is LastLine0 + 1, %LastLine is LastLine0 + 2, %read(S, Term), safe_read(S, Term), line_count(S, CurrentLine), read_regulus_stream1(Term, S, InRules-OutRules, InDecls-OutDecls, ItemNumber-OutItemNumber, LastLine-CurrentLine, File, FileType). read_regulus_stream1(end_of_file, _S, InRules-InRules, InDecls-InDecls, ItemNumber-ItemNumber, _LastL-_CurrentL, _File, _FileType) :- !. read_regulus_stream1(Term, S, InRules-OutRules, InDecls-OutDecls, InItemNumber-OutItemNumber, _LastLine-CurrentLine, File, FileType) :- Term = include(IncludeFile), !, read_regulus_include_file(IncludeFile, File, InRules-NextRules, InDecls-NextDecls, InItemNumber-NextItemNumber, FileType), read_regulus_stream(S, NextRules-OutRules, NextDecls-OutDecls, NextItemNumber-OutItemNumber, CurrentLine, File, FileType). read_regulus_stream1(labelled_item(Label, Term), S, InRules-OutRules, InDecls-OutDecls, ItemNumber-OutItemNumber, LastLine-CurrentLine, File, FileType) :- member(FileType, [grammar, transfer]), LineInfo = line_info(ItemNumber, LastLine-CurrentLine, File), read_regulus_stream2(Term, Label, LineInfo, InRules-NextRules, InDecls-NextDecls, FileType), !, read_regulus_stream(S, NextRules-OutRules, NextDecls-OutDecls, ItemNumber-OutItemNumber, CurrentLine, File, FileType). read_regulus_stream1(frequency_labelled_item(Label, Term), S, InRules-OutRules, InDecls-OutDecls, ItemNumber-OutItemNumber, LastLine-CurrentLine, File, FileType) :- member(FileType, [grammar, transfer]), %LineInfo = line_info(ItemNumber, LastLine-CurrentLine, File), LineInfo = line_info(frequency(Label), LastLine-CurrentLine, File), read_regulus_stream2(Term, Label, LineInfo, InRules-NextRules, InDecls-NextDecls, FileType), !, read_regulus_stream(S, NextRules-OutRules, NextDecls-OutDecls, ItemNumber-OutItemNumber, CurrentLine, File, FileType). read_regulus_stream1(Term, S, InRules-OutRules, InDecls-OutDecls, ItemNumber-OutItemNumber, LastLine-CurrentLine, File, FileType) :- LineInfo = line_info(ItemNumber, LastLine-CurrentLine, File), read_regulus_stream2(Term, '*no_label*', LineInfo, InRules-NextRules, InDecls-NextDecls, FileType), !, read_regulus_stream(S, NextRules-OutRules, NextDecls-OutDecls, ItemNumber-OutItemNumber, CurrentLine, File, FileType). read_regulus_stream1(Term, S, InRules-OutRules, InDecls-OutDecls, ItemNumber-OutItemNumber, LastLine-CurrentLine, File, FileType) :- !, format2error('~N*** Error: bad term ~w in ~w file between lines ~d and ~d~n', [Term, FileType, LastLine, CurrentLine]), read_regulus_stream(S, InRules-OutRules, InDecls-OutDecls, ItemNumber-OutItemNumber, CurrentLine, File, FileType). %--------------------------------------------------------------- read_regulus_stream2(Term, Label, LineInfo, InRules-OutRules, InDecls-OutDecls, grammar) :- is_regulus_declaration(Term), !, InDecls = [declaration(Label, Term, LineInfo) | OutDecls], InRules = OutRules. read_regulus_stream2(Term, Label, LineInfo, InRules-OutRules, InDecls-OutDecls, grammar) :- Term = @MacroInvocation, !, InRules = [rule(Label, @MacroInvocation, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, Label, LineInfo, InRules-OutRules, InDecls-OutDecls, grammar) :- Term = dynamic_lexicon( @MacroInvocation ), !, InRules = [rule(Label, dynamic_lexicon( @MacroInvocation ), LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, Label, LineInfo, InRules-OutRules, InDecls-OutDecls, grammar) :- is_regulus_rule(Term), !, InRules = [rule(Label, Term, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, Label, LineInfo, InRules-OutRules, InDecls-OutDecls, transfer) :- is_transfer_declaration(Term), !, InDecls = [declaration(Label, Term, LineInfo) | OutDecls], InRules = OutRules. read_regulus_stream2(Term, Label, LineInfo, InRules-OutRules, InDecls-OutDecls, transfer) :- Term = @MacroInvocation, !, InRules = [rule(Label, @MacroInvocation, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, Label, LineInfo, InRules-OutRules, InDecls-OutDecls, transfer) :- is_transfer_entry(Term), !, InRules = [rule(Label, Term, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, orthography) :- is_orthography_declaration(Term), !, InDecls = [declaration(Term, LineInfo) | OutDecls], InRules = OutRules. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, orthography) :- is_orthography_entry(Term), !, InRules = [rule(Term, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, collocation) :- is_collocation_declaration(Term), !, InDecls = [declaration(Term, LineInfo) | OutDecls], InRules = OutRules. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, collocation) :- is_collocation_entry(Term), !, InRules = [rule(Term, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, lf_pattern) :- is_lf_pattern_declaration(Term), !, InDecls = [declaration(no_label, Term, LineInfo) | OutDecls], InRules = OutRules. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, lf_pattern) :- is_lf_pattern_entry(Term), !, InRules = [rule(no_label, Term, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, lf_pattern) :- Term = @MacroInvocation, !, InRules = [rule(no_label, @MacroInvocation, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, lf_rewrite) :- is_lf_rewrite_declaration(Term), !, InDecls = [declaration(no_label, Term, LineInfo) | OutDecls], InRules = OutRules. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, lf_rewrite) :- is_lf_rewrite_entry(Term), !, InRules = [rule(no_label, Term, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, Label, LineInfo, InRules-OutRules, InDecls-OutDecls, lf_rewrite) :- Term = @MacroInvocation, !, InRules = [rule(Label, @MacroInvocation, LineInfo) | OutRules], InDecls = OutDecls. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, generic) :- is_generic_declaration(Term), !, InDecls = [declaration(no_label, Term, LineInfo) | OutDecls], InRules = OutRules. read_regulus_stream2(Term, _Label, LineInfo, InRules-OutRules, InDecls-OutDecls, generic) :- is_generic_entry(Term), !, InRules = [rule(no_label, Term, LineInfo) | OutRules], InDecls = OutDecls. %--------------------------------------------------------------- read_regulus_include_file(IncludeFile, CurrentFile, InRules-NextRules, InDecls-NextDecls, InItemNumber-NextItemNumber, FileType) :- ( FileType = grammar -> absolute_regulus_file_for_reading_relative_to_current(IncludeFile, CurrentFile, AbsoluteIncludeFile) ; absolute_file_for_reading_relative_to_current(IncludeFile, CurrentFile, AbsoluteIncludeFile) ), read_regulus_include_file1(AbsoluteIncludeFile, InRules-NextRules, InDecls-NextDecls, InItemNumber-NextItemNumber, FileType). read_regulus_include_file1(AbsoluteIncludeFile, InRules-InRules, InDecls-InDecls, InItemNumber-InItemNumber, _FileType) :- previously_loaded_include_file(AbsoluteIncludeFile), !. read_regulus_include_file1(AbsoluteIncludeFile, InRules-NextRules, InDecls-NextDecls, InItemNumber-NextItemNumber, FileType) :- format('~N [Including file ~w]~n', [AbsoluteIncludeFile]), % Do the assert before we start reading rather than after in case we get a recursive include. asserta(previously_loaded_include_file(AbsoluteIncludeFile)), open(AbsoluteIncludeFile, read, S), read_regulus_stream(S, InRules-NextRules, InDecls-NextDecls, InItemNumber-NextItemNumber, 0, AbsoluteIncludeFile, FileType), close(S). %--------------------------------------------------------------- count_regulus_rules([], NNonLex-NNonLex, NLex-NLex). count_regulus_rules([F|R], NNonLexIn-NNonLexOut, NLexIn-NLexOut) :- is_non_lexical_regulus_rule(F), NNonLexNext is NNonLexIn + 1, !, count_regulus_rules(R, NNonLexNext-NNonLexOut, NLexIn-NLexOut). count_regulus_rules([F|R], NNonLexIn-NNonLexOut, NLexIn-NLexOut) :- is_lexical_regulus_rule(F), NLexNext is NLexIn + 1, !, count_regulus_rules(R, NNonLexIn-NNonLexOut, NLexNext-NLexOut). count_regulus_rules([F|_R], _, _) :- regulus_error('~NInternal error: cannot identify ~w as either lexical or non-lexical rule~n', [F]). %--------------------------------------------------------------- count_transfer_entries([], NNonLex-NNonLex, NLex-NLex). count_transfer_entries([F|R], NNonLexIn-NNonLexOut, NLexIn-NLexOut) :- is_non_lexical_transfer_entry(F), NNonLexNext is NNonLexIn + 1, !, count_transfer_entries(R, NNonLexNext-NNonLexOut, NLexIn-NLexOut). count_transfer_entries([F|R], NNonLexIn-NNonLexOut, NLexIn-NLexOut) :- is_lexical_transfer_entry(F), NLexNext is NLexIn + 1, !, count_transfer_entries(R, NNonLexIn-NNonLexOut, NLexNext-NLexOut). count_transfer_entries([F|_R], _, _) :- regulus_error('~NInternal error: cannot identify ~w as either lexical or non-lexical transfer rule~n', [F]). %--------------------------------------------------------------- remove_labels_and_ignored_rules([], [], IgnoredIn-IgnoredIn) :- !. remove_labels_and_ignored_rules([rule(Label, Term, _LineInfo) | R], R1, IgnoredIn-IgnoredOut) :- ( ignore_regulus_item(Label) ; dummy_rule_item(Term) ), IgnoredNext is IgnoredIn + 1, !, remove_labels_and_ignored_rules(R, R1, IgnoredNext-IgnoredOut). remove_labels_and_ignored_rules([F | R], [F1 | R1], IgnoredIn-IgnoredOut) :- remove_label_from_rule(F, F1), !, remove_labels_and_ignored_rules(R, R1, IgnoredIn-IgnoredOut). remove_label_from_rule(rule(_Label, Term, LineInfo), rule(Term, LineInfo)) :- !. remove_label_from_rule(Rule, Rule1) :- regulus_error('~NInternal error: bad call: ~w~n', [remove_label_from_rule(Rule, Rule1)]). dummy_rule_item(Term) :- functor_indicating_dummy_rule(F/N), term_contains_functor(Term, F/N), !. functor_indicating_dummy_rule('xxx'/0). %--------------------------------------------------------------- note_and_count_vocabulary_items(Rules, NVocabulary) :- findall(Item, vocabulary_item_in_rule_list(Rules, Item), Items0), safe_remove_duplicates(Items0, Items), length(Items, NVocabulary), note_vocabulary_items(Items). note_vocabulary_items([]). note_vocabulary_items([F | R]) :- assertz(vocabulary_item(F)), !, note_vocabulary_items(R). vocabulary_item_in_rule_list([rule((_LHS --> RHS), LineInfo) | _R], Item) :- vocabulary_item_in_rule_body(RHS, Item), warn_if_vocabulary_item_contains_spaces(Item, LineInfo). vocabulary_item_in_rule_list([_F | R], Item) :- vocabulary_item_in_rule_list(R, Item). vocabulary_item_in_rule_body(Body, Item) :- nonvar(Body), vocabulary_item_in_rule_body1(Body, Item). vocabulary_item_in_rule_body1((P, Q), Item) :- !, ( vocabulary_item_in_rule_body(P, Item) ; vocabulary_item_in_rule_body(Q, Item) ). vocabulary_item_in_rule_body1((P ; Q), Item) :- !, ( vocabulary_item_in_rule_body(P, Item) ; vocabulary_item_in_rule_body(Q, Item) ). vocabulary_item_in_rule_body1(?P, Item) :- !, vocabulary_item_in_rule_body(P, Item). vocabulary_item_in_rule_body1(Atom, Atom) :- Atom \== [], atomic(Atom), !. vocabulary_item_in_rule_body1(Body+_Comment, Atom) :- vocabulary_item_in_rule_body1(Body, Atom), !. warn_if_vocabulary_item_contains_spaces(_Atom, _LineInfo) :- allow_spaces_in_vocabulary_items, !. warn_if_vocabulary_item_contains_spaces(Atom, LineInfo) :- atom(Atom), atom_codes(Atom, Chars), member(0' , Chars), inform_about_regulus_exception(regulus_exception('Vocabulary item contains spaces: "~w". You should probably split this up into individual words.', [Atom]), LineInfo), !. warn_if_vocabulary_item_contains_spaces(_Atom, _LineInfo). allow_spaces_in_vocabulary_items :- current_predicate(user:regulus_config/2), user:regulus_config(allow_spaces_in_vocabulary_items, yes). %--------------------------------------------------------------- % We want to internalise the declarations in the right order, % irrespective of how they are ordered in the source file, % so that we can produce appropriate error messages if necessary. internalise_regulus_declarations(Declarations, IgnoredIn-IgnoredOut) :- internalise_declarations1(Declarations, ignore_item, IgnoredIn-IgnoredNext0), internalise_declarations1(Declarations, macro, IgnoredNext0-IgnoredNext1), internalise_declarations1(Declarations, default_macro, IgnoredNext1-IgnoredNext2), internalise_declarations1(Declarations, external_grammar, IgnoredNext2-IgnoredNext3), internalise_declarations1(Declarations, feature_value_space, IgnoredNext3-IgnoredNext4), mark_category_valued_feature_spaces, combine_feature_value_space_declarations, internalise_declarations1(Declarations, specialises, IgnoredNext4-IgnoredNext5), internalise_declarations1(Declarations, ignore_specialises, IgnoredNext5-IgnoredNext6), internalise_declarations1(Declarations, feature_value_space_substitution, IgnoredNext6-IgnoredNext7), combine_specialises_declarations, revise_feature_value_spaces, internalise_declarations1(Declarations, feature, IgnoredNext7-IgnoredNext8), internalise_declarations1(Declarations, lexical_feature_default, IgnoredNext8-IgnoredNext9), internalise_declarations1(Declarations, ignore_feature, IgnoredNext9-IgnoredNext10), internalise_declarations1(Declarations, feature_instantiation_schedule, IgnoredNext10-IgnoredNext11), check_feature_instantiation_schedule, internalise_declarations1(Declarations, category, IgnoredNext11-IgnoredNext12), add_feature_declarations_for_category_valued_features, revise_category_declarations_for_category_valued_features, revise_feature_instantiation_schedule_for_category_valued_features, internalise_declarations1(Declarations, top_level_category, IgnoredNext12-IgnoredOut), give_up_if_no_or_bad_top_level_category. %--------------------------------------------------------------- % If we're loading a transfer file, we only have ignore_items and macros to internalise. internalise_regulus_declarations_for_transfer(Declarations, IgnoredIn-IgnoredOut) :- internalise_declarations1(Declarations, ignore_item, IgnoredIn-IgnoredNext0), internalise_declarations1(Declarations, macro, IgnoredNext0-IgnoredOut). %--------------------------------------------------------------- % If we're loading an LF pattern file, we only have macros to internalise. internalise_regulus_declarations_for_lf_pattern(Declarations, IgnoredIn-IgnoredOut) :- internalise_declarations1(Declarations, macro, IgnoredIn-IgnoredOut). %--------------------------------------------------------------- % If we're loading an LF rewrite file, we only have macros to internalise. internalise_regulus_declarations_for_lf_rewrite(Declarations, IgnoredIn-IgnoredOut) :- internalise_declarations1(Declarations, macro, IgnoredIn-IgnoredOut). %--------------------------------------------------------------- % Only macros in generic Regulus-related file internalise_generic_regulus_declarations(Declarations) :- internalise_declarations1(Declarations, macro, 0-_NIgnored). %--------------------------------------------------------------- give_up_if_no_or_bad_top_level_category :- top_level_category(TopCat), category_internal(TopCat, _Feats), !. give_up_if_no_or_bad_top_level_category :- top_level_category(TopCat), regulus_error('~NTop-level category ~w not defined as a category.~n', [TopCat]). give_up_if_no_or_bad_top_level_category :- regulus_error('~NNo top-level category defined.~n', []). %--------------------------------------------------------------- internalise_declarations1([], _Functor, IgnoredIn-IgnoredIn). internalise_declarations1([F | R], Functor, IgnoredIn-IgnoredOut) :- ( F = declaration(Label, Decl, LineInfo) -> true ; F = declaration(Decl, LineInfo) -> Label = '*no_label*' ; format( '~N *** Internal error: bad call: ~w~n', [internalise_declarations1([F | R], Functor)]), fail ), ( functor(Decl, Functor, _N) -> internalise_declaration(Label, Decl, LineInfo, IgnoredIn-IgnoredNext) ; IgnoredIn = IgnoredNext ), !, internalise_declarations1(R, Functor, IgnoredNext-IgnoredOut). internalise_declaration(Label, _Decl, _LineInfo, IgnoredIn-IgnoredOut) :- ignore_regulus_item(Label), IgnoredOut is IgnoredIn + 1, !. internalise_declaration(_Label, Decl, LineInfo, IgnoredIn-IgnoredIn) :- on_exception( Exception, internalise_declaration1(Decl), inform_about_regulus_exception(Exception, LineInfo) ). internalise_declaration1(Decl) :- internalise_declaration2(Decl, Decl1), ( Decl1 = null -> true ; assertz(Decl1) ), !. internalise_declaration2(ignore_item(Label), ignore_regulus_item(Label)) :- ignore_item_decl_is_consistent(Label), !. internalise_declaration2(macro(LHS, RHS), macro(LHS, RHS)) :- macro_decl_is_consistent(LHS, RHS), !. internalise_declaration2(default_macro(LHS, RHS), default_macro(LHS, RHS)) :- macro_decl_is_consistent(LHS, RHS), !. internalise_declaration2(external_grammar(GrammarName, Body), Result) :- external_grammar_decl_is_consistent(GrammarName, Body), ( external_grammar(GrammarName, Body) -> Result = null ; Result = external_grammar(GrammarName, Body) ), !. internalise_declaration2(feature_value_space(Name, SpaceIn), feature_value_space0(Name, OrderedSpace)) :- expand_macros_in_term(SpaceIn, Space), feature_value_space_decl_is_consistent(Name, Space), list_to_ord_set(Space, OrderedSpace), !. internalise_declaration2(specialises(Val1, Val2, SpaceId), specialises(Val1, Val2, SpaceId)) :- specialises_decl_is_consistent(Val1, Val2, SpaceId), !. internalise_declaration2(ignore_specialises(Val1, Val2, SpaceId), ignore_specialises(Val1, Val2, SpaceId)) :- ignore_specialises_decl_is_consistent(Val1, Val2, SpaceId), !. internalise_declaration2(feature_value_space_substitution(Val1, Val2, SpaceId), feature_value_space_substitution(Val1, Val2, SpaceId)) :- feature_value_space_substitution_decl_is_consistent(Val1, Val2, SpaceId), !. internalise_declaration2(feature(Feat, Space), feature(Feat, Space)) :- feature_decl_is_consistent(Feat, Space), !. internalise_declaration2(ignore_feature(Feat), ignore_feature(Feat)) :- ignore_feature_decl_is_consistent(Feat), !. internalise_declaration2(lexical_feature_default(Feat, Default), lexical_feature_default(Feat, Default)) :- lexical_feature_default_decl_is_consistent(Feat, Default), !. internalise_declaration2(feature_instantiation_schedule(Schedule), feature_instantiation_schedule(Schedule)) :- feature_instantiation_schedule_decl_is_consistent(Schedule), !. internalise_declaration2(category(CatName, FeatsIn), category_internal(CatName, FeatsOut)) :- expand_macros_in_term(FeatsIn, Feats1), flatten_list(Feats1, Feats2), category_decl_is_consistent(CatName, Feats2), remove_ignored_features_from_list(Feats2, Feats3), list_to_ord_set(Feats3, FeatsOut), !. internalise_declaration2(top_level_category(Cat), top_level_category(Cat)) :- top_level_category_decl_is_consistent(Cat), !. internalise_declaration2( Other, _) :- regulus_error('~NUnable to internalise regulus declaration ~q~n', [Other]). %--------------------------------------------------------------- mark_category_valued_feature_spaces :- findall([Id, NonCatVals, CatVals], find_feature_valued_feature_space(Id, NonCatVals, CatVals), Triples), mark_category_valued_feature_spaces1(Triples). mark_category_valued_feature_spaces1([]). mark_category_valued_feature_spaces1([F | R]) :- mark_category_valued_feature_space1(F), mark_category_valued_feature_spaces1(R). mark_category_valued_feature_space1([Id, NonCatVals, CatVals]) :- append(NonCatVals, CatVals, AllVals), retract(feature_value_space0(Id, _Space)), assertz(feature_value_space0(Id, [AllVals])), assertz(category_valued_feature_space(Id, NonCatVals, CatVals)). find_feature_valued_feature_space(Id, NonCatVals, CatVals) :- feature_value_space0(Id, Space), Space = [Vals], findall(Cat, member(syn_term(Cat), Vals), CatVals), findall(NonCatVal, ( member(NonCatVal, Vals), atom(NonCatVal) ), NonCatVals), CatVals \== []. %--------------------------------------------------------------- add_feature_declarations_for_category_valued_features :- findall( [Feat, CatVals, SpaceId], ( feature(Feat, SpaceId), \+ignore_feature(Feat), category_valued_feature_space(SpaceId, _NonCatVals, CatVals) ), Triples), !, add_feature_declarations_for_category_valued_features1(Triples). add_feature_declarations_for_category_valued_features1([]). add_feature_declarations_for_category_valued_features1([[Feat, CatVals, SpaceId] | R]) :- add_feature_declarations_for_category_valued_feature(Feat, CatVals, SpaceId), add_feature_declarations_for_category_valued_features1(R). add_feature_declarations_for_category_valued_feature(_Feat, [], _SpaceId). add_feature_declarations_for_category_valued_feature(Feat, [F | R], SpaceId) :- add_feature_declarations_for_category_valued_feature1(Feat, F, SpaceId), add_feature_declarations_for_category_valued_feature(Feat, R, SpaceId). add_feature_declarations_for_category_valued_feature1(Feat, Cat, SpaceId) :- ( category_internal(Cat, SubFeats) -> add_feature_declarations_for_category_valued_feature2(Feat, Cat, SubFeats) ; regulus_error('~NUndeclared category ~w listed as possible value in value-space ~w', [Cat, SpaceId]) ). add_feature_declarations_for_category_valued_feature2(_Feat, _Cat, []). add_feature_declarations_for_category_valued_feature2(Feat, Cat, [F | R]) :- add_feature_declarations_for_category_valued_feature3(Feat, Cat, F), add_feature_declarations_for_category_valued_feature2(Feat, Cat, R). add_feature_declarations_for_category_valued_feature3(Feat, Cat, SubFeat) :- \+ feature(SubFeat, _Space), !, regulus_error('~NCategory ~w declared as possible value of feature ~w contains undeclared feature ~w', [Cat, Feat, SubFeat]). add_feature_declarations_for_category_valued_feature3(Feat, Cat, SubFeat) :- feature(SubFeat, SpaceId), category_valued_feature_space(SpaceId, _, _), !, regulus_error('~NCategory ~w declared as possible value of feature ~w contains feature-valued feature ~w', [Cat, Feat, SubFeat]). add_feature_declarations_for_category_valued_feature3(Feat, Cat, SubFeat) :- feature(SubFeat, SpaceId), join_with_underscore([Feat, Cat, SubFeat], NewFeat), assertz(subfeature(Feat, Cat, SubFeat, NewFeat)), assertz(feature(NewFeat, SpaceId)). %--------------------------------------------------------------- revise_category_declarations_for_category_valued_features :- findall( Cat, ( category_internal(Cat, Feats), member(Feat, Feats), feature(Feat, Space), category_valued_feature_space(Space, _, _) ), Cats), revise_category_declarations_for_category_valued_features1(Cats). revise_category_declarations_for_category_valued_features1([]). revise_category_declarations_for_category_valued_features1([F | R]) :- revise_category_declaration_for_category_valued_features(F), revise_category_declarations_for_category_valued_features1(R). revise_category_declaration_for_category_valued_features(Cat) :- category_internal(Cat, NormalFeats), findall(SubFeat, ( member(Feat, NormalFeats), subfeature(Feat, _, _, SubFeat) ), SubFeats), append(NormalFeats, SubFeats, NewFeats0), sort(NewFeats0, NewFeats), retract(category_internal(Cat, NormalFeats)), assertz(category_internal(Cat, NewFeats)). %--------------------------------------------------------------- revise_feature_instantiation_schedule_for_category_valued_features :- feature_instantiation_schedule(Schedule), revise_feature_instantiation_schedule_for_category_valued_features(Schedule, Schedule1), retractall(feature_instantiation_schedule(_)), assert(feature_instantiation_schedule(Schedule1)). revise_feature_instantiation_schedule_for_category_valued_features([], []). revise_feature_instantiation_schedule_for_category_valued_features([F | R], [F1 | R1]) :- revise_feature_instantiation_schedule_for_category_valued_features1(F, F1), revise_feature_instantiation_schedule_for_category_valued_features(R, R1). revise_feature_instantiation_schedule_for_category_valued_features1([], []). revise_feature_instantiation_schedule_for_category_valued_features1([Feat | R], Result) :- findall(SubFeat, subfeature(Feat, _, _, SubFeat), SubFeats), revise_feature_instantiation_schedule_for_category_valued_features1(R, R1), ( SubFeats \== [] -> append([Feat | SubFeats], R1, Result) ; Result = [Feat | R1] ). %--------------------------------------------------------------- combine_feature_value_space_declarations :- all_feature_value_space_ids(Ids), combine_feature_value_space_declarations1(Ids). all_feature_value_space_ids(Ids) :- setof(Id, Val^feature_value_space0(Id, Val), Ids), !. all_feature_value_space_ids([]) :- format('~NREGULUS WARNING:~nNo feature_value_space declarations provided.~n', []). combine_feature_value_space_declarations1([]). combine_feature_value_space_declarations1([F|R]) :- combine_feature_value_space_declarations2(F), !, combine_feature_value_space_declarations1(R). combine_feature_value_space_declarations2(Id) :- findall(Space, feature_value_space0(Id, Space), Spaces), ( combine_feature_value_space_declarations3(Spaces, CombinedSpace) -> assertz(feature_value_space(Id, CombinedSpace)) ; regulus_error('~NUnable to combine feature_value_space declarations for ~w~n', [Id]) ). combine_feature_value_space_declarations3([Space], Space) :- !. combine_feature_value_space_declarations3([F|R], CombinedSpace) :- combine_feature_value_space_declarations3(R, CombinedSpaceR), combine_feature_value_space_declarations4(F, CombinedSpaceR, CombinedSpace). combine_feature_value_space_declarations4(Space1, Space2, CombinedSpace) :- is_list(Space1), is_list(Space2), length(Space1, Len), length(Space2, Len), !, combine_feature_value_space_declarations5(Space1, Space2, CombinedSpace). combine_feature_value_space_declarations5([], [], []). combine_feature_value_space_declarations5([F1|R1], [F2|R2], [F3|R3]) :- ord_union(F1, F2, F3List), list_to_ord_set(F3List, F3), !, combine_feature_value_space_declarations5(R1, R2, R3). %--------------------------------------------------------------- combine_specialises_declarations :- findall(SpaceId, uncancelled_specialises(_Val1, _Val2, SpaceId), ConstrainedSpaceIds), list_to_ord_set(ConstrainedSpaceIds, ConstrainedSpaceIdsOS), combine_specialises_declarations1(ConstrainedSpaceIdsOS). combine_specialises_declarations1([]). combine_specialises_declarations1([F | R]) :- combine_specialises_declarations2(F), combine_specialises_declarations1(R). combine_specialises_declarations2(SpaceId) :- findall(Super, uncancelled_specialises(_Sub, Super, SpaceId), Supers), list_to_ord_set(Supers, SupersOS), combine_specialises_declarations3(SupersOS, SpaceId). combine_specialises_declarations3([], _SpaceId). combine_specialises_declarations3([F | R], SpaceId) :- combine_specialises_declarations4(F, SpaceId), combine_specialises_declarations3(R, SpaceId). combine_specialises_declarations4(Super, SpaceId) :- findall(Sub, transitively_specialises_feature_value(Super, SpaceId, Sub), Subs), list_to_ord_set(Subs, SubsOS), atom_list_to_disjunction(SubsOS, DisjunctionOfSubs), assertz(feature_value_space_substitution(Super, DisjunctionOfSubs, SpaceId)). transitively_specialises_feature_value(Super, SpaceId, Sub) :- transitively_specialises_feature_value1(Super, SpaceId, Sub, []). transitively_specialises_feature_value1(Super, SpaceId, Sub, Previous) :- uncancelled_specialises(NextSub, Super, SpaceId), error_if_circular_chain_of_specialisations(SpaceId, NextSub, Previous), ( \+ uncancelled_specialises(_LowerSub, NextSub, SpaceId) -> Sub = NextSub ; transitively_specialises_feature_value1(NextSub, SpaceId, Sub, [NextSub | Previous]) ). error_if_circular_chain_of_specialisations(SpaceId, Sub, Previous) :- member(Sub, Previous), !, reverse([Sub | Previous], ReversedTrace), regulus_error('~NCircular chain of specialisations in ~w: ~w~n', [SpaceId, ReversedTrace]). error_if_circular_chain_of_specialisations(_SpaceId, _Sub, _Previous). atom_list_to_disjunction([Atom], Atom) :- atomic(Atom), !. atom_list_to_disjunction([F | R], (F\/DisjR)) :- atom_list_to_disjunction(R, DisjR). uncancelled_specialises(Val1, Val2, SpaceId) :- specialises(Val1, Val2, SpaceId), \+ ignore_specialises(Val1, Val2, SpaceId). %--------------------------------------------------------------- revise_feature_value_spaces :- findall(feature_value_space(Id, Space), feature_value_space(Id, Space), Decls), retractall(feature_value_space(_,_)), revise_feature_value_spaces1(Decls). revise_feature_value_spaces1([]). revise_feature_value_spaces1([F | R]) :- revise_feature_value_space(F), !, revise_feature_value_spaces1(R). revise_feature_value_space(feature_value_space(Id, Space)) :- remove_substituted_vals_from_feature_value_space(Space, Id, Space1), assertz(feature_value_space(Id, Space1)). remove_substituted_vals_from_feature_value_space([], _Id, []). remove_substituted_vals_from_feature_value_space([F | R], Id, [F1 | R1]) :- remove_substituted_vals_from_feature_value_space1(F, Id, F1), !, remove_substituted_vals_from_feature_value_space(R, Id, R1). remove_substituted_vals_from_feature_value_space1([], _Id, []). remove_substituted_vals_from_feature_value_space1([F | R], Id, Out) :- ( feature_value_space_substitution(F, _SubstitutedValue, Id) -> Out = R1 ; Out = [F | R1] ), !, remove_substituted_vals_from_feature_value_space1(R, Id, R1). %--------------------------------------------------------------- check_feature_instantiation_schedule :- findall(Schedule, feature_instantiation_schedule(Schedule), Schedules), length(Schedules, NSchedules), check_feature_instantiation_schedule1(NSchedules). % If we have exactly one feature_instantiation_schedule declaration, fine. check_feature_instantiation_schedule1(1) :- !. % If we have none, make up a default declaration which says that we instantiate % all the features in one go. check_feature_instantiation_schedule1(0) :- !, findall(Feat, feature(Feat, _), Feats), asserta(feature_instantiation_schedule([Feats])). % Otherwise signal an error. check_feature_instantiation_schedule1(_Other) :- regulus_error('~NMore than one feature_instantiation_schedule declaration~n', []). %--------------------------------------------------------------- ignore_item_decl_is_consistent(Label) :- ( atom(Label) -> true ; regulus_error('~NArgument in ignore_item must be an atom~n', []) ). %--------------------------------------------------------------- macro_decl_is_consistent(LHS, _RHS) :- ( var(LHS) -> regulus_error('~NLHS in macro declaration may not be a variable~n', []) ; true ). %--------------------------------------------------------------- external_grammar_decl_is_consistent(GrammarName, Body) :- ( term_contains_functor([GrammarName, Body], '@'/1) -> regulus_error('~NMacros not yet permitted in external grammar declaration~n', []) ; true ), ( atom(GrammarName) -> true ; regulus_error('~NFirst arg in external_grammar declaration not an atom~n', []) ), ( atom(Body) -> true ; regulus_error('~NSecond arg in external_grammar declaration not an atom~n', []) ). feature_value_space_decl_is_consistent(Name, Space) :- ( term_contains_functor([Name, Space], '@'/1) -> regulus_error('~NMacros not yet permitted in feature value space declaration~n', []) ; true ), ( atom(Name) -> true ; regulus_error('~NFirst arg in feature_value_space declaration not an atom~n', []) ), ( is_list_of_lists_of_cat_vals(Space) -> true ; is_list_of_cat_vals(Space) -> regulus_error('~NSecond arg in feature_value_space declaration should be a LIST OF LISTS of category values. You maybe want "~w" instead of "~w"?~n', [[Space], Space]) ; otherwise -> regulus_error('~NSecond arg in feature_value_space declaration not a list of lists of category values~n', []) ). specialises_decl_is_consistent(Val1, Val2, SpaceId) :- ( term_contains_functor([Val1, Val2, SpaceId], '@'/1) -> regulus_error('~NMacros not yet permitted specialises declaration~n', []) ; true ), ( atom(SpaceId) -> true ; regulus_error('~NThird arg in "specialises" declaration not an atom~n', []) ), ( feature_value_space(SpaceId, Space) -> true ; regulus_error('~NThird arg in "specialises" declaration not declared as feature value space~n', []) ), feature_value_space_inheritance_rel_is_consistent(Val1, Val2, Space). ignore_specialises_decl_is_consistent(Val1, Val2, SpaceId) :- ( term_contains_functor([Val1, Val2, SpaceId], '@'/1) -> regulus_error('~NMacros not yet permitted in ignore_specialises declaration~n', []) ; true ), ( atom(SpaceId) -> true ; regulus_error('~NThird arg in "ignore_specialises" declaration not an atom~n', []) ), ( feature_value_space(SpaceId, Space) -> true ; regulus_error('~NThird arg in "ignore_specialises" declaration not declared as feature value space~n', []) ), ignore_feature_value_space_inheritance_rel_is_consistent(Val1, Val2, Space). feature_value_space_substitution_decl_is_consistent(Val1, Val2, SpaceId) :- ( term_contains_functor([Val1, Val2, SpaceId], '@'/1) -> regulus_error('~NMacros not yet permitted in feature_value_space_substitution declaration~n', []) ; true ), ( atom(SpaceId) -> true ; regulus_error('~NThird arg in "feature_value_space_substitution" declaration not an atom~n', []) ), ( feature_value_space(SpaceId, Space) -> true ; regulus_error('~NThird arg in "feature_value_space_substitution" declaration not declared as feature value space~n', []) ), feature_value_space_substitution_rel_is_consistent(Val1, Val2, Space). feature_value_space_inheritance_rel_is_consistent(Val1, Val2, Space) :- ( term_contains_functor([Val1, Val2, Space], '@'/1) -> regulus_error('~NMacros not yet permitted in feature_value_space_inheritance_rel declaration~n', []) ; true ), member(SubSpace, Space), member(Val1, SubSpace), member(Val2, SubSpace), !. feature_value_space_inheritance_rel_is_consistent(_Val1, _Val2, _Space) :- regulus_error('~N"specialises" declaration must be of form specialises(Val1, Val2, Space),\nwith Val1 and Val2 members of the same component of the feature value space Space~n', []). ignore_feature_value_space_inheritance_rel_is_consistent(Val1, Val2, Space) :- ( term_contains_functor([Val1, Val2, Space], '@'/1) -> regulus_error('~NMacros not yet permitted in ignore_feature_value_space_inheritance_rel declaration~n', []) ; true ), member(SubSpace, Space), member(Val1, SubSpace), member(Val2, SubSpace), !. ignore_feature_value_space_inheritance_rel_is_consistent(Val1, Val2, Space) :- format('~NWARNING: not all values defined in declaration: ~w~n~n', [ignore_specialises(Val1, Val2, Space)]). feature_value_space_substitution_rel_is_consistent(Val1, Val2, Space) :- ( term_contains_functor([Val1, Val2, Space], '@'/1) -> regulus_error('~NMacros not yet permitted in feature_value_space_substitution_rel declaration~n', []) ; true ), member(SubSpace, Space), member(Val1, SubSpace), member(Val2, SubSpace), !. feature_value_space_substitution_rel_is_consistent(Val1, Val2, Space) :- format('~NWARNING: not all values defined in declaration: ~w~n~n', [feature_value_space_substitution(Val1, Val2, Space)]). feature_decl_is_consistent(Feat, Space) :- ( term_contains_functor([Feat, Space], '@'/1) -> regulus_error('~NMacros not yet permitted in feature declaration~n', []) ; true ), ( atom(Feat) -> true ; regulus_error('~NFirst arg in feature declaration not an atom~n', []) ), ( atom(Space) -> true ; regulus_error('~NSecond arg in feature declaration not an atom~n', []) ), ( feature_value_space(Space, _) -> true ; regulus_error('~NSecond arg in feature declaration not declared as feature value space~n', []) ). ignore_feature_decl_is_consistent(Feat) :- ( term_contains_functor([Feat], '@'/1) -> regulus_error('~NMacros not yet permitted in ignore_feature declaration~n', []) ; true ), ( atom(Feat) -> true ; regulus_error('~NArg in ignore_feature declaration not an atom~n', []) ), ( ( feature(Feat, _) ; Feat = sem ; Feat = gsem ) -> true ; regulus_error('~NArg in ignore_feature declaration not declared as feature~n', []) ). lexical_feature_default_decl_is_consistent(Feat, Default) :- ( term_contains_functor([Feat, Default], '@'/1) -> regulus_error('~NMacros not yet permitted in lexical_feature_default declaration~n', []) ; true ), ( atom(Feat) -> true ; regulus_error('~NFirst arg in lexical_feature_default declaration not an atom~n', []) ), ( feature(Feat, _SpaceId) -> true ; regulus_error('~NFirst arg in lexical_feature_default declaration not declared as feature~n', []) ), ( is_valid_feat_val(Feat, Default) -> true; regulus_error('~NSecond arg in lexical_feature_default declaration not a possible value for "~w"~n', [Feat]) ). feature_instantiation_schedule_decl_is_consistent(Schedule) :- ( term_contains_functor(Schedule, '@'/1) -> regulus_error('~NMacros not yet permitted in feature_instantiation_schedule declaration~n', []) ; true ), ( is_list_of_atom_lists(Schedule) -> true ; regulus_error('~NArg in feature_instantiation_schedule declaration not a list of lists of atoms~n', []) ), findall(NonFeat, non_feat_in_schedule(NonFeat, Schedule), NonFeats), findall(OmittedFeat, omitted_feat_in_schedule(OmittedFeat, Schedule), OmittedFeats), ( NonFeats = [] -> true ; regulus_error('~NFollowing atoms in feature_instantiation_schedule declaration not declared as features: ~w~n', [NonFeats]) ), ( OmittedFeats = [] -> true ; regulus_error('~NFollowing features not listed in feature_instantiation_schedule declaration: ~w~n', [OmittedFeats]) ). non_feat_in_schedule(Feat, Schedule) :- member(Stage, Schedule), member(Feat, Stage), \+ feature(Feat, _). omitted_feat_in_schedule(Feat, Schedule) :- feature(Feat, _), \+ ( ( member(Stage, Schedule), member(Feat, Stage) ) ). category_decl_is_consistent(CatName, Feats) :- ( atom(CatName) -> true ; regulus_error('~NFirst arg in category declaration not an atom~n', []) ), ( is_list(Feats) -> true ; regulus_error('~NSecond arg in category declaration not a list~n', []) ), ( ( member(sem, Feats), member(gsem, Feats) ) -> regulus_error('~NCannot have both sem and gsem as features~n', []) ; true ), ( ( undeclared_features_in_list(Feats, UndeclaredFeats), UndeclaredFeats \== [] ) -> regulus_error('~NUndeclared features in category declaration: ~w~n', [UndeclaredFeats]) ; true ). top_level_category_decl_is_consistent(Cat) :- ( term_contains_functor(Cat, '@'/1) -> regulus_error('~NMacros not yet permitted in top_level_category declaration~n', []) ; true ), ( atom(Cat) -> true ; regulus_error('~NFirst arg in top_level_category declaration not an atom~n', []) ), ( category_internal(Cat, _) -> true ; regulus_error('~NFirst arg in top_level_category declaration not declared as category~n', []) ). % undeclared_features_in_list(+Feats, -UndeclaredFeats) undeclared_features_in_list([], []). undeclared_features_in_list([F | R], [F | R1]) :- \+ member(F, [sem, gsem]), \+ feature(F, _), \+ ignore_feature(F), !, undeclared_features_in_list(R, R1). undeclared_features_in_list([_F | R], R1) :- !, undeclared_features_in_list(R, R1). %--------------------------------------------------------------- absolute_file_for_reading_relative_to_current(File, CurrentFile, AbsoluteFile) :- on_exception( _Exception, absolute_file_for_reading_relative_to_current1(File, CurrentFile, AbsoluteFile), regulus_error('~NUnable to interpret ~w (included in ~w) as the name of a readable file.~n', [File, CurrentFile]) ), !. absolute_file_for_reading_relative_to_current(File, CurrentFile, _AbsoluteFile) :- regulus_error('~NUnable to interpret ~w (included in ~w) as the name of a readable file.~n', [File, CurrentFile]). absolute_file_for_reading_relative_to_current1(File, CurrentFile, AbsoluteFile) :- directory_and_file_for_pathname(CurrentFile, CurrentDirectory, _), safe_working_directory(LastDirectory, CurrentDirectory), absolute_file_name(File, AbsoluteFile), safe_working_directory(_, LastDirectory). %--------------------------------------------------------------- absolute_regulus_file_for_reading_relative_to_current(File, CurrentFile, AbsoluteFile) :- on_exception( _Exception, absolute_regulus_file_for_reading_relative_to_current1(File, CurrentFile, AbsoluteFile), regulus_error('~NUnable to interpret ~w (included in ~w) as the name of a readable file with .regulus extension~n', [File, CurrentFile]) ), !. absolute_regulus_file_for_reading_relative_to_current(File, CurrentFile, _AbsoluteFile) :- regulus_error('~NUnable to interpret ~w (included in ~w) as the name of a readable file with .regulus extension~n', [File, CurrentFile]). absolute_regulus_file_for_reading_relative_to_current1(File, CurrentFile, AbsoluteFile) :- directory_and_file_for_pathname(CurrentFile, CurrentDirectory, _), safe_working_directory(LastDirectory, CurrentDirectory), absolute_regulus_file_for_reading(File, AbsoluteFile), safe_working_directory(_, LastDirectory). %--------------------------------------------------------------- absolute_regulus_file_for_reading(File, _AbsoluteFile) :- var(File), regulus_error('~NVariable used as file name~n', []). absolute_regulus_file_for_reading(File, AbsoluteFile) :- add_regulus_extension_if_necessary(File, FileWithExtension), absolute_file_name(FileWithExtension, AbsoluteFile), safe_file_exists(AbsoluteFile), !. absolute_regulus_file_for_reading(File, _AbsoluteFile) :- regulus_error('~NUnable to interpret ~w as the name of a readable file with .regulus extension~n', [File]). add_regulus_extension_if_necessary(File, FileWithExtension) :- atomic(File), atom_codes(File, Chars), append(_Body, ".regulus", Chars), !, File = FileWithExtension. add_regulus_extension_if_necessary(File, FileWithExtension) :- atomic(File), !, atom_codes(File, Chars), append(Chars, ".regulus", FullChars), atom_codes(FileWithExtension, FullChars). add_regulus_extension_if_necessary(File, FileWithExtension) :- compound(File), functor(File, F, 1), functor(FileWithExtension, F, 1), arg(1, File, Arg), arg(1, FileWithExtension, ArgWithExtension), add_regulus_extension_if_necessary(Arg, ArgWithExtension). %--------------------------------------------------------------- read_corpus_file_printing_statistics(File, List) :- safe_absolute_file_name(File, AbsFile), read_corpus_file(AbsFile, List), length(List, N), format('~N--- Read file (~d records) ~w~n', [N, AbsFile]), !. read_corpus_file(File, List) :- safe_absolute_file_name(File, AbsFile), prolog_file_to_list(AbsFile, List0), expand_multiple_entries_in_corpus_file(List0, List), !. expand_multiple_entries_in_corpus_file([], []). expand_multiple_entries_in_corpus_file([F | R], [F | RestList]) :- compound(F), functor(F, sent, _), !, expand_multiple_entries_in_corpus_file(R, RestList). expand_multiple_entries_in_corpus_file([F | R], [F1 | RestList]) :- compound(F), F =.. [multiple_sent, Multiplicity | Args], number(Multiplicity), Multiplicity > 0, F1 =.. [sent | Args], Multiplicity1 is Multiplicity - 1, F2 =.. [multiple_sent, Multiplicity1 | Args], !, expand_multiple_entries_in_corpus_file([F2 | R], RestList). expand_multiple_entries_in_corpus_file([F | R], RestList) :- compound(F), F =.. [multiple_sent, Multiplicity | _Args], number(Multiplicity), Multiplicity =< 0, !, expand_multiple_entries_in_corpus_file(R, RestList). expand_multiple_entries_in_corpus_file([F | R], RestList) :- format('~N*** Warning: bad entry in corpus file: ~w~n', [F]), !, expand_multiple_entries_in_corpus_file(R, RestList). %------------------------------------------------------------------------------------ include_closure_for_file_or_list(FileOrList, Closure, Extension) :- include_closure_for_file_or_list(FileOrList, '*top*', Closure, Extension). include_closure_for_file_or_list(FileOrList, CurrentFile, Closure, Extension) :- findall(Member, member_of_include_closure_for_file_or_list(FileOrList, CurrentFile, Extension, Member), Members), sort(Members, Closure). member_of_include_closure_for_file_or_list(List, CurrentFile, Extension, Member) :- is_list(List), member(File, List), member_of_include_closure_for_file_or_list(File, CurrentFile, Extension, Member). member_of_include_closure_for_file_or_list(File, CurrentFile, Extension, Member) :- \+ is_list(File), ( CurrentFile = '*top*' -> safe_absolute_file_name(File, AbsFile0) ; otherwise -> absolute_file_for_reading_relative_to_current(File, CurrentFile, AbsFile0) ), ( pathname_has_extension(AbsFile0, Extension) -> AbsFile = AbsFile0 ; format_to_atom('~w.~w', [AbsFile0, Extension], AbsFile) ), safe_file_exists(AbsFile), ( Member = AbsFile ; member_of_include_closure_for_file(AbsFile, Extension, Member) ). member_of_include_closure_for_file(File, Extension, Member) :- prolog_file_to_list(File, List), member(include(IncludedFile), List), member_of_include_closure_for_file_or_list(IncludedFile, File, Extension, Member). %--------------------------------------------------------------- all_sem_features(Rules, SemFeats) :- findall(SemFeat, sem_feature_in_rule_in_list(SemFeat, Rules), SemFeats0), sort(SemFeats0, SemFeats), !. all_sem_features(_Rules, SemFeats) :- SemFeats = []. sem_feature_in_rule_in_list(SemFeat, Rules) :- member(rule(Rule, _LineInfo), Rules), sem_feature_in_rule(SemFeat, Rule). sem_feature_in_rule(SemFeat, (H --> B)) :- !, ( sem_feature_in_rule(SemFeat, H) ; sem_feature_in_rule(SemFeat, B) ). sem_feature_in_rule(SemFeat, (P, Q)) :- !, ( sem_feature_in_rule(SemFeat, P) ; sem_feature_in_rule(SemFeat, Q) ). sem_feature_in_rule(SemFeat, (P ; Q)) :- !, ( sem_feature_in_rule(SemFeat, P) ; sem_feature_in_rule(SemFeat, Q) ). sem_feature_in_rule(SemFeat, _Cat:FeaturesWithVals) :- !, ( member((sem=SemVal), FeaturesWithVals) ; member((gsem=SemVal), FeaturesWithVals) ), sem_feature_in_semval(SemFeat, SemVal). sem_feature_in_semval(SemFeat, SemVal) :- is_list(SemVal), member((F=V), SemVal), ( SemFeat = F ; sem_feature_in_semval(SemFeat, V) ). %--------------------------------------------------------------- % add_lexical_feature_defaults(RulesIn, RulesOut) add_lexical_feature_defaults([], []). add_lexical_feature_defaults([F | R], [F1 | R1]) :- ( is_lexical_regulus_rule(F) -> add_lexical_feature_defaults_to_rule(F, F1) ; F1 = F ), !, add_lexical_feature_defaults(R, R1). add_lexical_feature_defaults_to_rule(rule((CatName:FeatVals --> RHS), LineInfo), rule((CatName:FeatVals1 --> RHS1), LineInfo)) :- on_exception( Exception, ( add_lexical_feature_defaults_to_feats(CatName, FeatVals, FeatVals1), remove_annotations_from_lex_items(RHS, RHS1) ), ( inform_about_regulus_exception(Exception, LineInfo), fail ) ), !. %add_lexical_feature_defaults_to_rule(X, Y) :- % regulus_error('Call failed: ~w', [add_lexical_feature_defaults_to_rule(X, Y)]). add_lexical_feature_defaults_to_feats(CatName, _FeatVals, _FeatVals1) :- \+ category_internal(CatName, _FeatsOS), regulus_error('Undeclared category: ~w', [CatName]). add_lexical_feature_defaults_to_feats(CatName, FeatVals, FeatVals1) :- category_internal(CatName, FeatsOS), feats_in_featvals(FeatVals, NamedFeats), list_to_ord_set(NamedFeats, NamedFeatsOS), ord_subtract(FeatsOS, NamedFeatsOS, UnnamedFeats), default_feat_vals_for_feat_list(UnnamedFeats, DefaultFeatVals), append(FeatVals, DefaultFeatVals, FeatVals1), !. add_lexical_feature_defaults_to_feats(_CatName, _FeatVals, _FeatVals1) :- regulus_error('Unable to add lexical feature defaults.', []). feats_in_featvals(FeatVals, Feats) :- feats_in_featvals1(FeatVals, Feats0), safe_remove_duplicates(Feats0, Feats). feats_in_featvals1([], []). feats_in_featvals1([(Feat=_Val) | RestFeatVals], [Feat | RestFeats]) :- feats_in_featvals1(RestFeatVals, RestFeats). default_feat_vals_for_feat_list([], []). default_feat_vals_for_feat_list([Feat | RestFeats], [(Feat = Default) | RestFeatVals]) :- lexical_feature_default(Feat, Default), !, default_feat_vals_for_feat_list(RestFeats, RestFeatVals). default_feat_vals_for_feat_list([_Feat | RestFeats], RestFeatVals) :- !, default_feat_vals_for_feat_list(RestFeats, RestFeatVals). %--------------------------------------------------------------- remove_annotations_from_lex_items(X, X1) :- remove_annotations_from_lex_items1(X, X1), !. remove_annotations_from_lex_items(_X, _X1) :- regulus_error('Bad call to remove_annotations_from_lex_items/2', []). remove_annotations_from_lex_items1(V, _) :- var(V), !, fail. remove_annotations_from_lex_items1((F, R), (F1, R1)) :- !, remove_annotations_from_lex_items1(F, F1), remove_annotations_from_lex_items1(R, R1). remove_annotations_from_lex_items1((F ; R), (F1 ; R1)) :- !, remove_annotations_from_lex_items1(F, F1), remove_annotations_from_lex_items1(R, R1). remove_annotations_from_lex_items1((?(F)), (?(F1))) :- !, remove_annotations_from_lex_items1(F, F1). remove_annotations_from_lex_items1(X+_Annotation, X) :- !. remove_annotations_from_lex_items1(Atom, Atom) :- atomic(Atom), !. %--------------------------------------------------------------- expand_abbreviations_in_rules([], []) :- !. expand_abbreviations_in_rules([F | R], [F1 | R1]) :- ( is_lexical_regulus_rule(F) -> %F1 = F expand_abbreviations_in_rule(F, F1) ; expand_abbreviations_in_rule(F, F1) ), !, expand_abbreviations_in_rules(R, R1). expand_abbreviations_in_rules(_RulesIn, _RulesOut) :- regulus_error('Bad call to expand_abbreviations_in_rules/2', []). expand_abbreviations_in_rule(rule((CatName:FeatVals --> RHS), LineInfo), rule((CatName:FeatVals1 --> RHS), LineInfo)) :- on_exception( Exception, expand_abbreviations_in_feats(CatName, FeatVals, FeatVals1), ( inform_about_regulus_exception(Exception, LineInfo), fail ) ), !. expand_abbreviations_in_rule(rule(_Rule, LineInfo), _) :- inform_about_regulus_exception(regulus_exception('Bad call to expand_abbreviations_in_rule/2', []), LineInfo), fail. expand_abbreviations_in_feats(CatName, _FeatVals, _FeatVals1) :- \+ category_internal(CatName, _FeatsOS), regulus_error('Undeclared category: ~w', [CatName]). expand_abbreviations_in_feats(_CatName, FeatVals, FeatVals1) :- expand_abbreviations_in_feats1(FeatVals, FeatVals1). expand_abbreviations_in_feats1([], []). expand_abbreviations_in_feats1([F | R], [F1 | R1]) :- expand_abbreviations_in_featval_pair(F, F1), !, expand_abbreviations_in_feats1(R, R1). expand_abbreviations_in_featval_pair(SemFeat=Sem, SemFeat=Sem1) :- member(SemFeat, [sem, gsem]), expand_multiple_concats_in_semval(Sem, Sem1), !. expand_abbreviations_in_featval_pair(Other, Other). expand_multiple_concats_in_semval(Atom, Atom) :- atomic(Atom), !. expand_multiple_concats_in_semval(Var, Var) :- var(Var), !. expand_multiple_concats_in_semval(Concat, Concat1) :- compound(Concat), ( functor(Concat, concat, N) ; functor(Concat, strcat, N) ), N > 2, expand_multiple_concat(Concat, Concat1), !. expand_multiple_concats_in_semval(Term, Term1) :- functor(Term, F, N), functor(Term1, F, N), expand_multiple_concats_in_semval_args(N, Term, Term1). expand_multiple_concats_in_semval_args(I, _Term, _Term1) :- I =< 0, !. expand_multiple_concats_in_semval_args(I, Term, Term1) :- I > 0, arg(I, Term, Arg), arg(I, Term1, Arg1), expand_multiple_concats_in_semval(Arg, Arg1), I1 is I - 1, !, expand_multiple_concats_in_semval_args(I1, Term, Term1). expand_multiple_concat(concat(X, Y), concat(X, Y)) :- !. expand_multiple_concat(ConcatIn, ConcatOut) :- ConcatIn =.. [concat, FirstArg | RestArgs], ConcatNext =.. [concat | RestArgs], ConcatOut = concat(FirstArg1, ConcatNext1), !, expand_multiple_concats_in_semval(FirstArg, FirstArg1), expand_multiple_concat(ConcatNext, ConcatNext1). expand_multiple_concat(strcat(X, Y), strcat(X, Y)) :- !. expand_multiple_concat(StrcatIn, StrcatOut) :- StrcatIn =.. [strcat, FirstArg | RestArgs], StrcatNext =.. [strcat | RestArgs], StrcatOut = strcat(FirstArg1, StrcatNext1), !, expand_multiple_concats_in_semval(FirstArg, FirstArg1), expand_multiple_concat(StrcatNext, StrcatNext1). %--------------------------------------------------------------- expand_macros_in_rule_list(RulesIn, RulesOut) :- findall(RuleOut, expand_macros_in_rule_list_member(RulesIn, RuleOut), RulesOut). expand_macros_in_rule_list_member(RulesIn, RuleOut) :- member(RuleIn, RulesIn), expand_macros_in_rule_with_line_info(RuleIn, RuleOut). expand_macros_in_rule_with_line_info(rule(Label, RuleBodyIn, LineInfo), rule(Label, RuleBodyOut, LineInfo)) :- on_exception( Exception, expand_macros_in_rule_body(RuleBodyIn, RuleBodyOut), ( inform_about_regulus_exception(Exception, LineInfo), regulus_error('Fatal compilation error', []) ) ). expand_macros_in_rule_body(dynamic_lexicon(LexIn), Result) :- expand_dynamic_lex_entry(dynamic_lexicon(LexIn), Result). expand_macros_in_rule_body(RuleBodyIn, RuleBodyOut) :- \+ safe_subsumes_chk(dynamic_lexicon(_), RuleBodyIn), expand_macros_in_term(RuleBodyIn, RuleBodyNext), %% addition by peter ljunglöf, 4 jan 08, 11 jan 08 apply_ground_gsl_functions(RuleBodyNext, RuleBodyNextNext), %% end addition flatten_and_check_after_macro_expansion(RuleBodyNextNext, RuleBodyOut). expand_macros_in_term(In, Out) :- expand_macros_in_term(In, Out, []). expand_macros_in_term(V, V, _Previous) :- var(V), !. expand_macros_in_term(T, T, _Previous) :- \+ term_contains_macro_calls(T), !. expand_macros_in_term((LHS=@MacroInvocation), TOut, Previous) :- !, expand_macros_in_term((LHS = @MacroInvocation), TOut, Previous). expand_macros_in_term(@TIn, TOut, Previous) :- !, expand_macros_in_term(TIn, TInWithExpandedArgs, Previous), check_ok_macro_invocation(TInWithExpandedArgs, Previous, MacroOrDefaultMacro), macro_or_default_macro(MacroOrDefaultMacro, TInWithExpandedArgs, TNext), expand_macros_in_term(TNext, TOut, [TInWithExpandedArgs | Previous]). expand_macros_in_term(TIn, TOut, Previous) :- functor(TIn, F, N), functor(TOut, F, N), expand_macros_in_term_args(N, TIn, TOut, Previous). expand_macros_in_term_args(0, _TIn, _TOut, _Previous). expand_macros_in_term_args(I, TIn, TOut, Previous) :- I > 0, arg(I, TIn, ArgIn), arg(I, TOut, ArgOut), expand_macros_in_term(ArgIn, ArgOut, Previous), I1 is I - 1, expand_macros_in_term_args(I1, TIn, TOut, Previous). macro_or_default_macro(macro, In, Out) :- macro(In, Out). macro_or_default_macro(default_macro, In, Out) :- default_macro(In, Out). term_contains_macro_calls(T) :- ( term_contains_functor(T, '@'/1) ; term_contains_functor(T, '=@'/2) ), !. %--------------------------------------------------------------- %% addition by peter ljunglöf, 4 jan 08 %% this will concatenate all ground occurrences of strcat(..) anywhere in a rule %% making it possible to define macros such as this: %% macro(regularNoun(House,sing), House). %% macro(regularNoun(House,plur), strcat(House,s)). %% modification by peter ljunglöf, 11 jan 08 %% added the GSL functions neg/1, add/2, sub/2, mul/2, div/2 %% also changed the name from concatenate_strings_in_term/2 apply_ground_gsl_functions(X, X) :- var(X), !. apply_ground_gsl_functions(strcat(A,B), ABStr) :- !, apply_ground_gsl_functions(A, AStr), apply_ground_gsl_functions(B, BStr), ( atom(AStr), atom(BStr) -> atom_concat(AStr, BStr, ABStr) ; compound(AStr), AStr = (AH, AT) -> apply_ground_gsl_functions(strcat(AT, BStr), ABStrT), ABStr = (AH, ABStrT) ; compound(BStr), BStr = (BH, BT) -> apply_ground_gsl_functions(strcat(AStr, BH), ABStrH), ABStr = (ABStrH, BT) ; otherwise -> ABStr = strcat(AStr, BStr) ). apply_ground_gsl_functions(neg(A), ANeg) :- !, apply_ground_gsl_functions(A, ANum), ( number(ANum) -> ANeg is -ANum ; ANeg = neg(ANum) ). apply_ground_gsl_functions(add(A,B), ABSum) :- !, apply_ground_gsl_functions(A, ANum), apply_ground_gsl_functions(B, BNum), ( number(ANum), number(BNum) -> ABSum is ANum+BNum ; ABSum = add(ANum,BNum) ). apply_ground_gsl_functions(sub(A,B), ABDiff) :- !, apply_ground_gsl_functions(A, ANum), apply_ground_gsl_functions(B, BNum), ( number(ANum), number(BNum) -> ABDiff is ANum-BNum ; ABDiff = sub(ANum,BNum) ). apply_ground_gsl_functions(mul(A,B), ABProduct) :- !, apply_ground_gsl_functions(A, ANum), apply_ground_gsl_functions(B, BNum), ( number(ANum), number(BNum) -> ABProduct is ANum*BNum ; ABProduct = mul(ANum,BNum) ). apply_ground_gsl_functions(div(A,B), ABQuotient) :- !, apply_ground_gsl_functions(A, ANum), apply_ground_gsl_functions(B, BNum), ( number(ANum), number(BNum) -> ABQuotient is ANum/BNum ; ABQuotient = add(ANum,BNum) ). apply_ground_gsl_functions(TIn, TOut) :- functor(TIn, F, N), functor(TOut, F, N), apply_ground_gsl_functions_args(N, TIn, TOut). apply_ground_gsl_functions_args(0, _TIn, _TOut) :- !. apply_ground_gsl_functions_args(I, TIn, TOut) :- arg(I, TIn, ArgIn), arg(I, TOut, ArgOut), apply_ground_gsl_functions(ArgIn, ArgOut), I1 is I-1, apply_ground_gsl_functions_args(I1, TIn, TOut). %% end addition, 4 jan 08 %--------------------------------------------------------------- flatten_and_check_after_macro_expansion(RuleIn, _RuleOut) :- var(RuleIn), !, regulus_error('~NResult of macro-expansion ~w not a rule~n', [RuleIn]). flatten_and_check_after_macro_expansion(RuleIn, RuleOut) :- RuleIn = ( _LHS --> _RHS ), !, flatten_featval_lists_in_rule_body(RuleIn, RuleOut), check_ok_rule_after_macro_expansion(RuleOut). flatten_and_check_after_macro_expansion(RuleIn, RuleIn) :- ( RuleIn = transfer_lexicon(_LHS, _RHS) ; RuleIn = reverse_transfer_lexicon(_LHS, _RHS) ; RuleIn = bidirectional_transfer_lexicon(_LHS, _RHS) ), !, check_ok_transfer_lexicon_entry_after_macro_expansion(RuleIn). flatten_and_check_after_macro_expansion(RuleIn, RuleOut) :- ( RuleIn = transfer_rule(LHS, RHS) ; RuleIn = reverse_transfer_rule(LHS, RHS) ; RuleIn = bidirectional_transfer_rule(LHS, RHS) ), RuleIn =.. [RuleType, LHS, RHS], !, flatten_transfer_representation(LHS, LHS1), flatten_transfer_representation(RHS, RHS1), RuleOut =.. [RuleType, LHS1, RHS1], check_ok_transfer_rule_after_macro_expansion(RuleOut). flatten_and_check_after_macro_expansion(RuleIn, RuleOut) :- RuleIn = ( transfer_rule(LHS, RHS) :- ContextConditions ), !, flatten_transfer_representation(LHS, LHS1), flatten_transfer_representation(RHS, RHS1), RuleOut = ( transfer_rule(LHS1, RHS1) :- ContextConditions ), check_ok_transfer_rule_after_macro_expansion(RuleOut). flatten_and_check_after_macro_expansion(RuleIn, RuleOut) :- RuleIn = ( reverse_transfer_rule(LHS, RHS) :- ContextConditions ), !, flatten_transfer_representation(LHS, LHS1), flatten_transfer_representation(RHS, RHS1), RuleOut = ( reverse_transfer_rule(LHS1, RHS1) :- ContextConditions ), check_ok_transfer_rule_after_macro_expansion(RuleOut). flatten_and_check_after_macro_expansion(RuleIn, RuleOut) :- RuleIn = ( bidirectional_transfer_rule(LHS, RHS) :- ContextConditions ), !, flatten_transfer_representation(LHS, LHS1), flatten_transfer_representation(RHS, RHS1), RuleOut = ( bidirectional_transfer_rule(LHS1, RHS1) :- ContextConditions ), check_ok_transfer_rule_after_macro_expansion(RuleOut). flatten_and_check_after_macro_expansion(RuleIn, RuleIn) :- ( RuleIn = role_transfer_rule(LHS, RHS) ; RuleIn = reverse_role_transfer_rule(LHS, RHS) ; RuleIn = bidirectional_role_transfer_rule(LHS, RHS) ; RuleIn = ( role_transfer_rule(LHS, RHS) :- _ContextConditions ) ; RuleIn = ( reverse_role_transfer_rule(LHS, RHS) :- _ContextConditions ) ; RuleIn = ( bidirectional_role_transfer_rule(LHS, RHS) :- _ContextConditions ) ; RuleIn = ( role_list_transfer_rule(_Roles) :- _ContextConditions ) ), check_ok_role_transfer_rule_after_macro_expansion(RuleIn). flatten_and_check_after_macro_expansion(RuleIn, RuleOut) :- is_lf_pattern_entry(RuleIn), !, RuleIn = RuleOut. flatten_and_check_after_macro_expansion(RuleIn, RuleOut) :- is_lf_rewrite_entry(RuleIn), !, RuleIn = RuleOut. flatten_and_check_after_macro_expansion(RuleIn, RuleIn). %--------------------------------------------------------------- flatten_list(V, V) :- var(V), !. flatten_list(A, A) :- atomic(A), !. flatten_list([F | R], Result) :- is_list(F), !, flatten(F, F1), flatten(R, R1), append(F1, R1, Result). flatten_list([F | R], [F | R1]) :- !, flatten(R, R1), flatten_list(Other, Other). %--------------------------------------------------------------- flatten_featval_lists_in_rule_body(In, Out) :- flatten_featval_lists_in_rule_body1(In, Out), !. flatten_featval_lists_in_rule_body(In, Out) :- regulus_error('~NBad call: ~w~n', [flatten_featval_lists_in_rule_body(In, Out)]). flatten_featval_lists_in_rule_body1(V, V) :- var(V), !. flatten_featval_lists_in_rule_body1(A, A) :- atomic(A), !. flatten_featval_lists_in_rule_body1(Cat:FeatVals, Cat:FeatVals1) :- !, flatten_featval_list(FeatVals, FeatVals1). flatten_featval_lists_in_rule_body1(T, T1) :- functor(T, F, N), functor(T1, F, N), flatten_featval_lists_in_rule_body1_args(N, T, T1). flatten_featval_lists_in_rule_body1_args(0, _T, _T1). flatten_featval_lists_in_rule_body1_args(I, T, T1) :- I > 0, arg(I, T, Arg), arg(I, T1, Arg1), flatten_featval_lists_in_rule_body1(Arg, Arg1), I1 is I - 1, !, flatten_featval_lists_in_rule_body1_args(I1, T, T1). flatten_featval_list([], []) :- !. flatten_featval_list([F | R], [F | R1]) :- nonvar(F), F = (Feat = _Val), atomic(Feat), flatten_featval_list(R, R1), !. flatten_featval_list([F | R], Result) :- is_list(F), flatten_featval_list(F, F1), flatten_featval_list(R, R1), append(F1, R1, Result), !. flatten_featval_list(Other, Out) :- regulus_error('~NBad call: ~w~n', [flatten_featval_list(Other, Out)]). %--------------------------------------------------------------- check_ok_rule_after_macro_expansion(Rule) :- is_macro_expanded_regulus_rule(Rule), !. check_ok_rule_after_macro_expansion(Rule) :- regulus_error('~NResult of macro-expansion ~w not a rule~n', [Rule]). check_ok_transfer_lexicon_entry_after_macro_expansion(Rule) :- is_lexical_transfer_entry(Rule), !. check_ok_transfer_lexicon_entry_after_macro_expansion(Rule) :- regulus_error('~NResult of macro-expansion ~w not a transfer lexicon entry~n', [Rule]). check_ok_transfer_rule_after_macro_expansion(Rule) :- is_non_lexical_transfer_entry(Rule), !. check_ok_transfer_rule_after_macro_expansion(Rule) :- regulus_error('~NResult of macro-expansion ~w not a transfer rule~n', [Rule]). check_ok_role_transfer_rule_after_macro_expansion(Rule) :- is_non_lexical_transfer_entry(Rule), !. check_ok_role_transfer_rule_after_macro_expansion(Rule) :- regulus_error('~NResult of macro-expansion ~w not a transfer rule~n', [Rule]). %--------------------------------------------------------------- is_macro_expanded_regulus_rule(Rule) :- nonvar(Rule), Rule = (LHS --> RHS), nonvar(LHS), ( current_predicate(user:regulus_config/2), user:regulus_config(prolog_semantics, yes) ; nonvar(RHS) ), is_macro_expanded_regulus_rule_head(LHS), is_macro_expanded_regulus_rule_body(RHS), !. is_macro_expanded_regulus_rule_head(Head) :- is_macro_expanded_cat(Head), !. is_macro_expanded_regulus_rule_body(V) :- var(V), !, current_predicate(user:regulus_config/2), user:regulus_config(prolog_semantics, yes). is_macro_expanded_regulus_rule_body((P, Q)) :- is_macro_expanded_regulus_rule_body(P), is_macro_expanded_regulus_rule_body(Q), !. is_macro_expanded_regulus_rule_body((P ; Q)) :- is_macro_expanded_regulus_rule_body(P), is_macro_expanded_regulus_rule_body(Q), !. is_macro_expanded_regulus_rule_body((?(P))) :- is_macro_expanded_regulus_rule_body(P), !. is_macro_expanded_regulus_rule_body(Cat) :- is_macro_expanded_cat(Cat), !. is_macro_expanded_regulus_rule_body(Body+Comment) :- atom(Comment), is_macro_expanded_regulus_rule_body(Body), !. is_macro_expanded_regulus_rule_body(Atom) :- atom(Atom), !. is_macro_expanded_cat(Cat) :- nonvar(Cat), Cat = CatName:FeatVals, atom(CatName), is_list(FeatVals), !. %--------------------------------------------------------------- % If there is neither a macro nor a default macro definition that matches, signal an error. check_ok_macro_invocation(LHS, _Previous, _MacroOrDefaultMacro) :- \+ macro(LHS, _RHS), \+ default_macro(LHS, _RHS), !, regulus_error('~NNo expansion defined for macro invocation ~w~n', [LHS]). % If we have a cyclical macro expansion, signal an error. check_ok_macro_invocation(LHS, Previous, _MacroOrDefaultMacro) :- ( compound(LHS) ; atom(LHS) ), id_member(LHS, Previous), !, regulus_error('~NCycle in macro invocation: ~w~n', [LHS]). % If there is no macro definition that matches, then use a default macro. % (We know there is one by now, since we got past the first clause). check_ok_macro_invocation(LHS, _Previous, MacroOrDefaultMacro) :- \+ macro(LHS, _RHS), MacroOrDefaultMacro = default_macro, !. % If we got this far, there is a macro that works, so use that. check_ok_macro_invocation(_LHS, _Previous, MacroOrDefaultMacro) :- MacroOrDefaultMacro = macro, !. %--------------------------------------------------------------- :- dynamic cat_used/1. :- dynamic feature_used/2. warn_about_unused_features(Rules) :- init_warn_about_unused_features, mark_used_features(Rules), warn_about_unused_features1, !. warn_about_unused_features(_Rules) :- regulus_error('~NCall to warn_about_unused_features/1 failed.~n', []). init_warn_about_unused_features :- retractall(cat_used(_)), retractall(feature_used(_, _)). mark_used_features(Var) :- var(Var), !. mark_used_features(Atom) :- atomic(Atom), !. mark_used_features(Cat:FeatVals) :- !, ( is_list(FeatVals) -> mark_used_cat(Cat), mark_used_features_for_cat(Cat, FeatVals) ; otherwise -> true ). mark_used_features(Term) :- compound(Term), functor(Term, _F, N), mark_used_features_args(N, Term), !. mark_used_features(Other) :- regulus_error('~NBad call: ~w~n', [mark_used_features(Other)]). mark_used_features_args(0, _Term). mark_used_features_args(I, Term) :- I > 0, arg(I, Term, Arg), mark_used_features(Arg), I1 is I - 1, !, mark_used_features_args(I1, Term). mark_used_features_for_cat(_Cat, []) :- !. mark_used_features_for_cat(Cat, [F | R]) :- mark_used_feature_for_cat_and_featval(Cat, F), !, mark_used_features_for_cat(Cat, R). mark_used_cat(Cat) :- ( cat_used(Cat) -> true ; otherwise -> assertz(cat_used(Cat)) ), !. mark_used_cat(Cat) :- regulus_error('~NBad call: ~w~n', [mark_used_cat(Cat)]). mark_used_feature_for_cat_and_featval(Cat, Feat=_Val) :- ( feature_used(Cat, Feat) -> true ; otherwise -> assertz(feature_used(Cat, Feat)) ), !. mark_used_feature_for_cat_and_featval(Cat, F) :- regulus_error('~NBad call: ~w~n', [mark_used_feature_for_cat_and_featval(Cat, F)]). warn_about_unused_features1 :- unused_cat_feat_pair(Cat, Feat), format('~N*** Warning: feature \'~w\' in category \'~w\' is never used~n', [Feat, Cat]), fail. warn_about_unused_features1. unused_cat_feat_pair(Cat, Feat) :- category_internal(Cat, Feats), member(Feat, Feats), \+ feature_used(Cat, Feat), cat_used(Cat). %--------------------------------------------------------------- canonicalise_rules([], []). canonicalise_rules([F | R], [F1 | R1]) :- canonicalise_rule0(F, F1), !, canonicalise_rules(R, R1). canonicalise_rule0(rule(Rule, LineInfo), rule(Rule1, LineInfo)) :- on_exception(Exception, canonicalise_rule(Rule, Rule1), ( nl, inform_about_regulus_exception(Exception, LineInfo), nl, nl, fail ) ), !. canonicalise_rule0(F, F). canonicalise_rule((Head --> Body), (Head --> Body1)) :- canonicalise_rule_body(Body, Body1). canonicalise_rule_body((P, Q), Result) :- canonicalise_rule_body(P, P1), canonicalise_rule_body(Q, Q1), comma_list_to_list(P1, P1List), comma_list_to_list(Q1, Q1List), append(P1List, Q1List, ResultList), list_to_comma_list(ResultList, Result), !. canonicalise_rule_body((P ; Q), (P1 ; Q1)) :- canonicalise_rule_body(P, P1), canonicalise_rule_body(Q, Q1), !. canonicalise_rule_body(?(P), ?(P1)) :- canonicalise_rule_body(P, P1), !. canonicalise_rule_body(Atom, LowerCaseAtom) :- atom(Atom), convert_lexical_items_to_lower_case, lowercase_atom(Atom, LowerCaseAtom). canonicalise_rule_body(Other, Other). % Do this in a way that allows an earlier setting to override a later one. convert_lexical_items_to_lower_case :- current_predicate(user:regulus_config/2), user:regulus_config(convert_lexical_items_to_lower_case, Value), !, Value = yes. %--------------------------------------------------------------- warn_about_inconsistent_feature_spaces([]). warn_about_inconsistent_feature_spaces([F | R]) :- warn_about_inconsistent_feature_spaces_in_rule(F), !, warn_about_inconsistent_feature_spaces(R). warn_about_inconsistent_feature_spaces_in_rule(rule(_Label, Rule, LineInfo)) :- !, warn_about_inconsistent_feature_spaces_in_rule(rule(Rule, LineInfo)). warn_about_inconsistent_feature_spaces_in_rule(rule(Rule, LineInfo)) :- !, copy_term(Rule, Rule1), on_exception(Exception, instantiate_features_to_check_consistency(Rule1), ( nl, inform_about_regulus_exception(Exception, LineInfo), nl, nl, fail ) ). warn_about_inconsistent_feature_spaces_in_rule(_Other). instantiate_features_to_check_consistency((Head --> Body)) :- instantiate_features_to_check_consistency(Head), instantiate_features_to_check_consistency(Body), !. instantiate_features_to_check_consistency((P, Q)) :- instantiate_features_to_check_consistency(P), instantiate_features_to_check_consistency(Q), !. instantiate_features_to_check_consistency((P ; Q)) :- instantiate_features_to_check_consistency(P), instantiate_features_to_check_consistency(Q), !. instantiate_features_to_check_consistency(?(P)) :- instantiate_features_to_check_consistency(P), !. instantiate_features_to_check_consistency(Cat:Feats) :- instantiate_features_to_check_consistency1(Feats, Cat), !. instantiate_features_to_check_consistency(_Other). instantiate_features_to_check_consistency1([], _Cat). instantiate_features_to_check_consistency1([Feat=Val | R], Cat) :- instantiate_feature_to_check_consistency1(Feat, Val, Cat), !, instantiate_features_to_check_consistency1(R, Cat). instantiate_feature_to_check_consistency1(Feat, Val, Cat) :- member(Feat, [sem, gsem]), !, instantiate_sem_structure_to_check_consistency(Feat, Val, Cat). instantiate_feature_to_check_consistency1(Feat, Val, Cat) :- ( feature(Feat, Space) -> true ; true -> regulus_error('Unknown feature ~w', [Feat]) ), ( var(Val) -> Val = cat_feat_space(Cat, Feat, Space) ; Val = cat_feat_space(_OtherCat, _OtherFeat, Space) -> true ; Val = cat_feat_space(OtherCat, OtherFeat, OtherSpace) -> signal_inconsistent_feature_exception(cat_feat_space(OtherCat, OtherFeat, OtherSpace), cat_feat_space(Cat, Feat, Space)) ; otherwise -> true ). instantiate_sem_structure_to_check_consistency(Feat, Val, Cat) :- ( var(Val) -> Val = cat_feat_space(Cat, Feat, semantic_value) ; Val = cat_feat_space(_OtherCat, _OtherFeat, semantic_value) -> true ; Val = cat_feat_space(OtherCat, OtherFeat, OtherSpace) -> signal_inconsistent_feature_exception(cat_feat_space(OtherCat, OtherFeat, OtherSpace), cat_feat_space(Cat, Feat, semantic_value)) ; compound(Val) -> functor(Val, _F, N), instantiate_sem_structure_to_check_consistency_args(N, Feat, Val, Cat) ; otherwise -> true ). instantiate_sem_structure_to_check_consistency_args(I, _Feat, _Val, _Cat) :- I < 1, !. instantiate_sem_structure_to_check_consistency_args(I, Feat, Val, Cat) :- arg(I, Val, Arg), instantiate_sem_structure_to_check_consistency(Feat, Arg, Cat), I1 is I - 1, !, instantiate_sem_structure_to_check_consistency_args(I1, Feat, Val, Cat). signal_inconsistent_feature_exception(FeatUse1, FeatUse2) :- feature_use_description(FeatUse1, FeatUseDescription1), feature_use_description(FeatUse2, FeatUseDescription2), Format = 'Inconsistent feature spaces: ~w and ~w', Args = [FeatUseDescription1, FeatUseDescription2], regulus_error(Format, Args). feature_use_description(cat_feat_space(Cat, Feat, Space), FeatUseDescription) :- format_to_atom('~w:[... ~w=<value in ~w> ...]', [Cat, Feat, Space], FeatUseDescription), !. feature_use_description(FeatUse1, FeatUseDescription1) :- Format = 'Bad call: ~w', Args = [feature_use_description(FeatUse1, FeatUseDescription1)], regulus_error(Format, Args). %--------------------------------------------------------------- is_list_of_lists_of_cat_vals([]). is_list_of_lists_of_cat_vals([F | R]) :- is_list_of_cat_vals(F), !, is_list_of_lists_of_cat_vals(R). is_list_of_cat_vals([]). is_list_of_cat_vals([F | R]) :- possible_cat_val(F), !, is_list_of_cat_vals(R). possible_cat_val(Atom) :- atomic(Atom). possible_cat_val(CatSpec) :- nonvar(CatSpec), CatSpec = syn_term(Cat), atom(Cat). %--------------------------------------------------------------- internalise_collocation_decls(ReadDeclarations) :- retract_regulus_preds_for_collocation_rules, internalise_collocation_decls1(ReadDeclarations), !. internalise_collocation_decls(_ReadDeclarations) :- Format = 'Bad call: ~w', Args = ['internalise_collocation_decls(...)'], regulus_error(Format, Args). internalise_collocation_decls1([]). internalise_collocation_decls1([F | R]) :- internalise_collocation_decl(F), !, internalise_collocation_decls1(R). internalise_collocation_decl(declaration(_Label, collocation_macro(LHS, RHS), _LineInfo)) :- assertz(collocation_macro_internal(LHS, RHS)), !. internalise_collocation_decl(declaration(collocation_macro(LHS, RHS), _LineInfo)) :- assertz(collocation_macro_internal(LHS, RHS)), !. internalise_collocation_decl(Other) :- Format = 'Bad call: ~w', Args = [internalise_collocation_decl(Other)], regulus_error(Format, Args). %--------------------------------------------------------------- expand_collocation_macros_in_rules(ReadRules, ExpandedRules) :- findall(ExpandedRule, ( member(Rule, ReadRules), expand_collocation_macros_in_rule(Rule, ExpandedRule) ), ExpandedRules), !. expand_collocation_macros_in_rules(_ReadRules, _ExpandedRules) :- Format = 'Unable to expand collocation rules', Args = [], regulus_error(Format, Args). expand_collocation_macros_in_rule(Rule, ExpandedRule) :- get_line_info_lhs_and_rhs_from_collocation_rule(Rule, LineInfo, F, LHS, RHS), expand_collocation_macros_in_rule1(F, LHS, RHS, LineInfo, ExpandedRule). get_line_info_lhs_and_rhs_from_collocation_rule(Rule, LineInfo, F, LHS, RHS) :- ( Rule = rule(Body, LineInfo) ; Rule = rule(_Label, Body, LineInfo) ), compound(Body), Body =.. [F, LHS, RHS], !. expand_collocation_macros_in_rule1(F, LHS, RHS, LineInfo, ExpandedRule) :- on_exception( Exception, expand_collocation_macros_in_rule2(F, LHS, RHS, ExpandedRule), inform_about_regulus_exception(Exception, LineInfo) ). /* better_collocation("SAY: YOU HAVE {@number(N)} SISTER", "multimedia:prompt-ask-how-many-sisters-you-have {@number(N)}"). */ expand_collocation_macros_in_rule2(F, LHS, RHS, ExpandedRule) :- tokenize_collocation_rule_lhs_or_rhs(LHS, LHS1, []-LHSVars), tokenize_collocation_rule_lhs_or_rhs(RHS, RHS1, LHSVars-_Vars), substitute_from_collocation_macros(LHS1, LHS2), substitute_from_collocation_macros(RHS1, RHS2), append_list(LHS2, LHS3), append_list(RHS2, RHS3), ExpandedRule =.. [F, LHS3, RHS3]. tokenize_collocation_rule_lhs_or_rhs(In, Out, VarsIn-VarsOut) :- collocation_rule_lhs_or_rhs(Out, VarsIn-VarsOut, In, []), !. tokenize_collocation_rule_lhs_or_rhs(In, _Out, _Substitutions) :- regulus_error('~NIll-formed string "~s" in collocation rule~n', [In]). collocation_rule_lhs_or_rhs([F | R], VarsIn-VarsOut) --> collocation_rule_lhs_or_rhs_component(F, VarsIn-VarsNext), !, collocation_rule_lhs_or_rhs(R, VarsNext-VarsOut). collocation_rule_lhs_or_rhs([], VarsIn-VarsIn) --> []. collocation_rule_lhs_or_rhs_component(macro_call(Body), VarsIn-VarsOut) --> "{@", non_curly_bracket_string(MacroStr), { MacroStr \== [] }, "}", { add_or_use_var_pairs(MacroStr, Body, VarsIn-VarsOut) }, !. collocation_rule_lhs_or_rhs_component(str(Str), VarsIn-VarsIn) --> non_curly_bracket_string(Str), { Str \== [] }, !. non_curly_bracket_string([F | R]) --> non_curly_bracket_char(F), !, non_curly_bracket_string(R). non_curly_bracket_string([]) --> []. non_curly_bracket_char(Char) --> [Char], { \+ is_curly_bracket_char(Char) }, !. is_curly_bracket_char(0'{). is_curly_bracket_char(0'}). add_or_use_var_pairs(MacroStr, Body, VarsIn-VarsOut) :- append(MacroStr, ".", MacroStrWithPeriod), safe_read_from_chars(MacroStrWithPeriod, Body, [variable_names(Vars)]), add_or_use_var_pairs1(Vars, VarsIn-VarsOut). add_or_use_var_pairs1([], VarsIn-VarsIn). add_or_use_var_pairs1([F | R], VarsIn-VarsOut) :- ( member(F, VarsIn) -> VarsNext = VarsIn ; otherwise -> VarsNext = [F | VarsIn] ), !, add_or_use_var_pairs1(R, VarsNext-VarsOut). substitute_from_collocation_macros([], []). substitute_from_collocation_macros([F | R], [F1 | R1]) :- substitute_from_collocation_macros_single(F, F1), substitute_from_collocation_macros(R, R1). substitute_from_collocation_macros_single(str(Str), Str) :- !. substitute_from_collocation_macros_single(macro_call(Body), Value) :- expand_collocation_macro_call(Body, PossibleValues), member(Value, PossibleValues). expand_collocation_macro_call(Body, Expanded) :- findall(SomeExpanded, collocation_macro_internal(Body, SomeExpanded), AllExpanded), ( AllExpanded = [] -> regulus_error('~NUnable to expand collocation macro call "~w" in collocation rule~n', [Body]) ; otherwise -> member(Expanded, AllExpanded), % Need to instantiate Body collocation_macro_internal(Body, Expanded) ).
TeamSPoon/logicmoo_workspace
packs_sys/logicmoo_nlu/ext/regulus/Prolog/regulus_read.pl
Perl
mit
91,185
#!/usr/bin/perl use strict; use warnings; # This class is generated from DBIx.pm. Do not modify. package WWW::Shopify::Model::DBIx::Schema::Result::Model::Webhook; use base qw/DBIx::Class::Core/; __PACKAGE__->load_components(qw/InflateColumn::DateTime/); __PACKAGE__->table('shopify_webhooks'); __PACKAGE__->add_columns( "address", { data_type => 'VARCHAR(255)', is_nullable => '1' }, "created_at", { data_type => 'DATETIME', is_nullable => '1' }, "id", { is_nullable => '0', data_type => 'BIGINT' }, "updated_at", { data_type => 'DATETIME', is_nullable => '1' }, "format", { is_nullable => '1', data_type => 'VARCHAR(255)' }, "topic", { data_type => 'VARCHAR(255)', is_nullable => '1' }, "shop_id", { data_type => "BIGINT" } ); __PACKAGE__->set_primary_key('id'); __PACKAGE__->belongs_to(shop => 'WWW::Shopify::Model::DBIx::Schema::Result::Model::Shop', 'shop_id'); sub represents { return 'WWW::Shopify::Model::Webhook'; } sub parent_variable { return undef; } 1;
gitpan/WWW-Shopify
lib/WWW/Shopify/Model/DBIx/Schema/Result/Model/Webhook.pm
Perl
mit
981
#!/usr/bin/perl while(<STDIN>) { if(m%</way>%) { print " <!-- Special case for motor vehicles -->\n"; print "\n"; print " <if>\n"; print " <output k=\"foot\" v=\"no\"/>\n"; print " <output k=\"horse\" v=\"no\"/>\n"; print " <output k=\"wheelchair\" v=\"no\"/>\n"; print " <output k=\"bicycle\" v=\"no\"/>\n"; print "\n"; print " <output k=\"bridge\" v=\"no\"/>\n"; print " <output k=\"tunnel\" v=\"no\"/>\n"; print " </if>\n"; print "\n"; } if(m%</relation>%) { print " <!-- Special case for motor vehicles -->\n"; print "\n"; print " <if>\n"; print " <output k=\"footroute\" v=\"no\"/>\n"; print " <output k=\"bicycleroute\" v=\"no\"/>\n"; print " </if>\n"; print "\n"; } print; }
MinnPost/minnpost-nice-ride
data-processing/routino-2.2/xml/scripts/drive.pl
Perl
mit
907
# # Copyright 2018 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package storage::hp::eva::cli::mode::components::psu; use strict; use warnings; sub load { my ($self) = @_; $self->{ssu_commands}->{'ls diskshelf full xml'} = 1; $self->{ssu_commands}->{'ls controller full xml'} = 1; } sub psu_ctrl { my ($self) = @_; # <object> # <objecttype>controller</objecttype> # <objectname>\Hardware\Rack 1\Controller Enclosure 7\Controller B</objectname> # <powersources> # <powerlevel>12.32</powerlevel> # <source> # <type>powersupply0</type> # <state>good</state> # </source> foreach my $object (@{$self->{xml_result}->{object}}) { next if ($object->{objecttype} ne 'controller'); $object->{objectname} =~ s/\\/\//g; foreach my $result (@{$object->{powersources}->{source}}) { next if ($result->{type} eq ''); my $instance = $object->{objectname} . '/' . $result->{type}; next if ($self->check_filter(section => 'psu', instance => $instance)); next if ($result->{state} =~ /notinstalled/i && $self->absent_problem(section => 'psu', instance => $instance)); $self->{components}->{psu}->{total}++; $self->{output}->output_add(long_msg => sprintf("power supply '%s' status is '%s' [instance = %s]", $instance, $result->{state}, $instance, )); my $exit = $self->get_severity(label => 'default', section => 'psu', value => $result->{state}); if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) { $self->{output}->output_add(severity => $exit, short_msg => sprintf("Power supply '%s' status is '%s'", $instance, $result->{state})); } } } } sub psu_diskshelf { my ($self) = @_; # <object> # <objecttype>diskshelf</objecttype> # <objectname>\Hardware\Rack 1\Disk Enclosure 3</objectname> # <powersupplies> # <powersupply> # <name>powersupply1</name> # <operationalstate>good</operationalstate> # <failurepredicted>No</failurepredicted> # <vdcoutputs> # <vdcoutput> # <type>vdc5output</type> # <voltage>5.5</voltage> # <current>6.7</current> # </vdcoutput> # <vdcoutput> # <type>vdc12output</type> # <voltage>12.5</voltage> # <current>4.1</current> # </vdcoutput> # </vdcoutputs> foreach my $object (@{$self->{xml_result}->{object}}) { next if ($object->{objecttype} ne 'diskshelf'); $object->{objectname} =~ s/\\/\//g; foreach my $result (@{$object->{powersupplies}->{powersupply}}) { my $instance = $object->{objectname} . '/' . $result->{name}; next if ($self->check_filter(section => 'psu', instance => $instance)); next if ($result->{operationalstate} =~ /notinstalled/i && $self->absent_problem(section => 'psu', instance => $instance)); $self->{components}->{psu}->{total}++; $self->{output}->output_add(long_msg => sprintf("power suuply '%s' status is '%s' [instance = %s]", $instance, $result->{operationalstate}, $instance, )); my $exit = $self->get_severity(label => 'default', section => 'psu', value => $result->{operationalstate}); if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) { $self->{output}->output_add(severity => $exit, short_msg => sprintf("Power supply '%s' status is '%s'", $instance, $result->{operationalstate})); } foreach my $voltage (@{$result->{vdcoutputs}->{vdcoutput}}) { next if ($voltage->{current} !~ /[0-9]/); my ($exit2, $warn, $crit, $checked) = $self->get_severity_numeric(section => 'psu', instance => $instance . '/' . $voltage->{type}, value => $voltage->{current}); if (!$self->{output}->is_status(value => $exit2, compare => 'ok', litteral => 1)) { $self->{output}->output_add(severity => $exit2, short_msg => sprintf("Power supply '%s' is %s V", $instance, $voltage->{current})); } $self->{output}->perfdata_add(label => 'voltage_' . $instance . '/' . $voltage->{type}, unit => 'V', value => $voltage->{current}, warning => $warn, critical => $crit, ); } } } } sub check { my ($self) = @_; $self->{output}->output_add(long_msg => "Checking power supplies"); $self->{components}->{psu} = {name => 'psus', total => 0, skip => 0}; return if ($self->check_filter(section => 'psu')); psu_ctrl($self); psu_diskshelf($self); } 1;
wilfriedcomte/centreon-plugins
storage/hp/eva/cli/mode/components/psu.pm
Perl
apache-2.0
6,209
# Copyright 2020, Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package Google::Ads::GoogleAds::V10::Services::FeedMappingService::MutateFeedMappingsResponse; use strict; use warnings; use base qw(Google::Ads::GoogleAds::BaseEntity); use Google::Ads::GoogleAds::Utils::GoogleAdsHelper; sub new { my ($class, $args) = @_; my $self = { partialFailureError => $args->{partialFailureError}, results => $args->{results}}; # Delete the unassigned fields in this object for a more concise JSON payload remove_unassigned_fields($self, $args); bless $self, $class; return $self; } 1;
googleads/google-ads-perl
lib/Google/Ads/GoogleAds/V10/Services/FeedMappingService/MutateFeedMappingsResponse.pm
Perl
apache-2.0
1,125
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute # Copyright [2016-2022] EMBL-European Bioinformatics Institute # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. =head1 CONTACT Please email comments or questions to the public Ensembl developers list at <http://lists.ensembl.org/mailman/listinfo/dev>. Questions may also be sent to the Ensembl help desk at <http://www.ensembl.org/Help/Contact>. =head1 NAME Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveSelectProjectedGenes =cut =head1 DESCRIPTION HiveSelectProjectedGenes fetches the genes from the WGA and the CESAR2.0 projection output databases and it selects one of the two overlapping projected transcripts by maximum cov+pid. It stores the selected single-transcript genes into the output database together with the non-overlapping projected transcripts from each projection method. =head1 OPTIONS -dna_db Ensembl database containing the DNA sequences. -wga_db Ensembl database containing the WGA projected genes. -cesar_db Ensembl database containing the CESAR2.0 projected genes. -output_db Ensembl database where the selected projected genes will be stored. =cut package Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveSelectProjectedGenes; use strict; use warnings; use feature 'say'; use Bio::EnsEMBL::Analysis::Tools::Algorithms::ClusterUtils; use Bio::EnsEMBL::Analysis::Tools::GeneBuildUtils::GeneUtils qw(empty_Gene); use Bio::EnsEMBL::Analysis::Tools::GeneBuildUtils::TranscriptUtils qw(empty_Transcript); use DBI qw(:sql_types); use parent ('Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBaseRunnableDB'); sub fetch_input { # set database connections my $self = shift; my $dna_dba = $self->hrdb_get_dba($self->param('dna_db')); $self->hrdb_set_con($dna_dba,'dna_db'); my $wga_dba = $self->hrdb_get_dba($self->param('wga_db')); $self->hrdb_set_con($wga_dba,'wga_db'); $wga_dba->dnadb($dna_dba); my $cesar_dba = $self->hrdb_get_dba($self->param('cesar_db')); $self->hrdb_set_con($cesar_dba,'cesar_db'); $cesar_dba->dnadb($dna_dba); my $output_dba = $self->hrdb_get_dba($self->param('output_db')); $self->hrdb_set_con($output_dba,'output_db'); $output_dba->dnadb($dna_dba); return 1; } sub run { my $self = shift; my $wga_ta = $self->hrdb_get_con('wga_db')->get_TranscriptAdaptor(); my $wga_ga = $self->hrdb_get_con('wga_db')->get_GeneAdaptor(); my $cesar_ta = $self->hrdb_get_con('cesar_db')->get_TranscriptAdaptor(); my $cesar_ga = $self->hrdb_get_con('cesar_db')->get_GeneAdaptor(); my $output_genes; my %discarded_wga; # loop through all cesar projected transcripts foreach my $cesar_t (@{$cesar_ta->fetch_all_by_logic_name('cesar')}) { my $orig_cesar_sid = $cesar_t->stable_id(); my $cesar_sid = $orig_cesar_sid; $cesar_sid =~ s/\..*//; # remove stable id version if present # fetch the equivalent wga projected transcript my $wga_t = fetch_transcript_by_stable_id_and_logic_name($wga_ta,$cesar_sid,'project_transcripts'); if ($wga_t) { # select the best t between wga and cesar based on cov and pid my @wga_sfs = @{$wga_t->get_all_supporting_features()}; my $wga_sf = shift(@wga_sfs); # fetch any sf as all the sfs have the same coverage and percent id my $wga_cov = 0; my $wga_pid = 0; if ($wga_sf) { $wga_cov = $wga_sf->hcoverage(); $wga_pid = $wga_sf->percent_id(); } my @cesar_sfs = @{$cesar_t->get_all_supporting_features()}; my $cesar_sf = shift(@cesar_sfs); # fetch any sf as all the sfs have the same coverage and percent id my $cesar_cov = 0; my $cesar_pid = 0; if ($cesar_sf) { $cesar_cov = $cesar_sf->hcoverage(); $cesar_pid = $cesar_sf->percent_id(); } if ($wga_cov+$wga_pid > $cesar_cov+$cesar_pid) { # select wga if wga cov+pid is greater than cesar cov+pid my $selected_gene = fetch_gene_by_transcript_stable_id_and_logic_name($wga_ga,$wga_ta,$cesar_sid,'project_transcripts'); $selected_gene->flush_Transcripts(); empty_Gene($selected_gene); $selected_gene->analysis($wga_t->analysis()); $selected_gene->biotype($wga_t->biotype()); empty_Transcript($wga_t); $selected_gene->add_Transcript($wga_t); push(@{$output_genes},$selected_gene); my $non_selected_gene = fetch_gene_by_transcript_stable_id_and_logic_name($cesar_ga,$cesar_ta,$orig_cesar_sid,'cesar'); $non_selected_gene->flush_Transcripts(); empty_Gene($non_selected_gene); $non_selected_gene->analysis($cesar_t->analysis()); $non_selected_gene->biotype($cesar_t->biotype().'_cesar'); empty_Transcript($cesar_t); $non_selected_gene->add_Transcript($cesar_t); push(@{$output_genes},$non_selected_gene); } else { # select cesar my $selected_gene = fetch_gene_by_transcript_stable_id_and_logic_name($cesar_ga,$cesar_ta,$orig_cesar_sid,'cesar'); $selected_gene->flush_Transcripts(); empty_Gene($selected_gene); $selected_gene->analysis($cesar_t->analysis()); $selected_gene->biotype($cesar_t->biotype()); empty_Transcript($cesar_t); $selected_gene->add_Transcript($cesar_t); push(@{$output_genes},$selected_gene); my $non_selected_gene = fetch_gene_by_transcript_stable_id_and_logic_name($wga_ga,$wga_ta,$cesar_sid,'project_transcripts'); $non_selected_gene->flush_Transcripts(); empty_Gene($non_selected_gene); $non_selected_gene->analysis($wga_t->analysis()); $non_selected_gene->biotype($wga_t->biotype().'_wga'); empty_Transcript($wga_t); $non_selected_gene->add_Transcript($wga_t); push(@{$output_genes},$non_selected_gene); $discarded_wga{$cesar_sid} = 1; } } else { # no wga equivalent means cesar is selected my $selected_gene = fetch_gene_by_transcript_stable_id_and_logic_name($cesar_ga,$cesar_ta,$orig_cesar_sid,'cesar'); $selected_gene->flush_Transcripts(); empty_Gene($selected_gene); $selected_gene->analysis($cesar_t->analysis()); $selected_gene->biotype($cesar_t->biotype()); empty_Transcript($cesar_t); $selected_gene->add_Transcript($cesar_t); push(@{$output_genes},$selected_gene); } } # loop through all wga projected transcripts to select the ones which did not have any cesar equivalent foreach my $wga_t (@{$wga_ta->fetch_all_by_logic_name('project_transcripts')}) { my $wga_t_sid = $wga_t->stable_id(); if (!(exists $discarded_wga{$wga_t_sid})) { my $selected_gene = fetch_gene_by_transcript_stable_id_and_logic_name($wga_ga,$wga_ta,$wga_t_sid,'project_transcripts'); $selected_gene->flush_Transcripts(); empty_Gene($selected_gene); $selected_gene->analysis($wga_t->analysis()); $selected_gene->biotype($wga_t->biotype()); empty_Transcript($wga_t); $selected_gene->add_Transcript($wga_t); push(@{$output_genes},$selected_gene); } } $self->output($output_genes); return 1; } sub write_output { my $self = shift; my $output_genes = $self->output(); my $gene_adaptor = $self->hrdb_get_con('output_db')->get_GeneAdaptor(); foreach my $output_gene (@{$output_genes}) { $gene_adaptor->store($output_gene); } return 1; } sub fetch_transcript_by_stable_id_and_logic_name { my ($ta,$stable_id,$logic_name) = @_; my $constraint = "t.stable_id = ? AND t.analysis_id = (SELECT analysis_id FROM analysis WHERE logic_name = ?) "; $ta->bind_param_generic_fetch($stable_id,SQL_VARCHAR); $ta->bind_param_generic_fetch($logic_name,SQL_VARCHAR); my ($transcript) = @{$ta->generic_fetch($constraint)}; return $transcript; } sub fetch_gene_by_transcript_stable_id_and_logic_name { my ($ga,$ta,$stable_id,$logic_name) = @_; my $transcript = fetch_transcript_by_stable_id_and_logic_name($ta,$stable_id,$logic_name); my $gene = $ga->fetch_by_transcript_id($transcript->dbID()); return $gene; } 1;
Ensembl/ensembl-analysis
modules/Bio/EnsEMBL/Analysis/Hive/RunnableDB/HiveSelectProjectedGenes.pm
Perl
apache-2.0
8,751
#!/usr/bin/env perl #** @file vrunTask.pl # # @brief ViewRun task. Similar to assessmentTask, but for launching viewer runs. # This code runs on a hypervisor. # @verbatim # When started via condor, the command line will contain the inputs # This script needs access to libvirt, so it should be sudo'd. # Create the input folder for the VM image. # Create the 'run.sh' from the BOG specifications with commands to manipulate mysql. # Communicate with the AgentMonitor. # Start the VM. # @end verbatim # @author Dave Boulineau (db), dboulineau@continuousassurance.org # @date 01/02/2014 09:23:48 # @copy Copyright (c) 2014 Software Assurance Marketplace, Morgridge Institute for Research #* use 5.014; use utf8; use warnings; use strict; use FindBin qw($Bin); use lib ( "$FindBin::Bin/../perl5", "$FindBin::Bin/lib" ); use sigtrap 'handler', \&taskShutdown, 'normal-signals'; use Carp qw(carp croak); use ConfigReader::Simple; use Cwd qw(getcwd abs_path); use File::Basename qw(basename dirname); use English '-no_match_vars'; use File::Copy qw(move cp); use File::Path qw(make_path remove_tree); use File::Spec qw(catfile); use Getopt::Long qw/GetOptions/; use Log::Log4perl::Level; use Log::Log4perl; use Pod::Usage qw/pod2usage/; use Storable qw(lock_retrieve); our $VERSION = '1.00'; use SWAMP::Client::AgentClient qw(configureClient removeVmID addVmID createVmID setViewerState storeviewer); use SWAMP::VRunTools qw(createrunscript copyvruninputs parseRunOut); use SWAMP::SWAMPUtils qw( diewithconfess checksumFile createDomainPIDFile getBuildNumber getDomainStateFilename getLoggingConfigString getSWAMPDir getSwampConfig loadProperties removeDomainPIDFile systemcall condor_chirp trim ); my $help = 0; my $man = 0; my $debug = 0; #** @var $debug If true, increase log level to DEBUG my $basedir = getSWAMPDir(); my $startupdir = getcwd; #** @var $agentHost The hostname on which agentMonitor is listening my $agentHost; #** @var $agentPort The agentMonitor port my $agentPort; #** @var $bogfile The name of our Bill Of Goods file my $bogfile; #** @var $uri of our libvirt, currently using 'undef' my $uri; #** @var $vmname Name of the Virtual Machine this script will create. my $vmname = "swamp${PID}"; #** @var $vmid Opaque id of the Virtual Machine this script will create. my $vmid; #** @var $ok Flag indicating this script should continue my $ok = 1; #** @var %bog The map that will contain our Bill Of Goods for this assessment run. my %bog; #** @var $appname Textual name for this process's logger my $appname = "vruntask_$PID"; #** @var $log Global Log::Log4perl object my $log; my $outfile; use constant { 'MAX_RESTARTS' => 3, # number of times to allow a VM to restart before giving up. 'MAX_WAITSTUCK' => 600, # number of seconds to wait for a VM to start running the assessment. }; my $start_time = time(); GetOptions( 'bog=s' => \$bogfile, 'out=s' => \$outfile, 'debug' => \$debug, 'vmname=s' => \$vmname, 'ahost=s' => \$agentHost, 'aport=s' => \$agentPort, 'libvirturi' => \$uri, 'help|?' => \$help, 'man' => \$man, ) or pod2usage(2); if ($help) { pod2usage(1); } if ($man) { pod2usage( '-verbose' => 2 ); } if ( !defined($bogfile) ) { pod2usage('--bog parameter required'); } $appname = "vruntask_$vmname"; $appname =~ s/vswamp//sxm; Log::Log4perl->init( getLoggingConfigString() ); if ( !$debug ) { Log::Log4perl->get_logger(q{})->remove_appender('Screen'); } $log = Log::Log4perl->get_logger(q{}); $log->level( $debug ? $TRACE : $INFO ); # Catch anyone who calls die. local $SIG{'__DIE__'} = \&diewithconfess; configureClients( $agentHost, $agentPort ); my $ver = "$VERSION." . getBuildNumber(); $log->info( "#### $appname v$ver running on " . `hostname -f` ); # 1. read the BOG file # 2. Create the Input disk from the DB image # 3. Create the run.sh # 4. start the VM # 4.5 inform agentmonitor that the VM has been launched # 5. wait for the run.sh to indicate success/failure # 6. report to agentmonitor that the viewer is running if ( !setupWorkingSpace($vmname) ) { $ok = 0; $log->error('Unable to set up working space.'); # TODO report to the AgentMonitor that things have failed. exit 1; } if ( loadProperties( $bogfile, \%bog ) == 0 ) { $ok = 0; $log->error("Unable to load BOG $bogfile"); # TODO report to the AgentMonitor that things have failed. exit 1; } condor_chirp($bog{'intent'}, "NAME", "vmname", $vmname); condor_chirp($bog{'intent'}, "ID", "execution_record_uuid", $bog{'execrunid'}); condor_chirp($bog{'intent'}, "TIME", "vrunTask_start", time()); createDomainPIDFile($PID, $vmname); $log->info("VRUNTASK TIME: Launching viewer for $bog{'project'} "); my $inputfolder = 'input'; if ( copyvruninputs( \%bog, $inputfolder ) ) { my $runscript_time = time(); $log->info("VRUNTASK TIME: copyvruninputs complete - seconds: ", $runscript_time - $start_time); if ( createrunscript( \%bog, $inputfolder , getSwampConfig()->get('vruntimeout') // 28_800 ) ) { $log->info("VRUNTASK TIME: createrunscript complete - seconds: ", time() - $runscript_time); $vmid = createVmID(); addVmID( $vmid, $bog{'execrunid'}, $vmname ); #updateRunStatus( $bog{'execrunid'}, 'Starting virtual machine' ); # Let's make a VM # launch_platform is set by copyvruninputs and is based on the version of the viewer that is needed # to properly handle user data. my $vm_time = time(); condor_chirp($bog{'intent'}, "TIME", "start_vm", time()); my ( $output, $status ) = systemcall( "PERL5LIB=$basedir/perl5 $basedir/bin/start_vm --name $vmname input $bog{'launch_platform'}" ); if ($status) { $log->error("start_vm returned: $output"); removeVmID( \$vmid ); $log->error("Unable to startVM $status"); } else { $log->info("VRUNTASK TIME: start_vm complete - seconds: ", time() - $vm_time); condor_chirp($bog{'intent'}, "TIME", "setDeadman", time()); setDeadman(); setStarts(0); # Clean slate. watchVM( \%bog ); removeVmID( \$vmid ); my $vm_cleanup = time(); ( $output, $status ) = systemcall("PERL5LIB=$basedir/perl5 $basedir/bin/vm_cleanup --force $vmname 2>&1"); if ($status) { $log->warn("Unable to cleanup from $vmname: $output"); } $log->info("VRUNTASK TIME: vm_cleanup complete - seconds: ", time() - $vm_cleanup); } } else { $log->error("Error creating scripts inputs"); } } else { $log->error("Error copying inputs"); } removeStateFile($vmname); cleanupWorkingSpace(); removeDomainPIDFile($PID, $vmname); $log->info("VRUNTASK TIME: exiting - total seconds: ", time() - $start_time); condor_chirp($bog{'intent'}, "TIME", "exit", time()); exit $ok ? 0 : 1; #** @function watchVM( ) # @brief Block until our VM shuts down or the deadman timer goes off. # # @return 1 if all is well, 0 if the VM is stuck. #* sub watchVM { my $bogref = shift; my $quitLoop = 0; my $lastPoll = 0; my $ret = 0; my $needUpdate = 1; while ( !$quitLoop ) { if ( !checkDeadman() ) { $quitLoop = 1; $log->info("Exiting loop because of deadMan"); $ret = 0; last; } my $rootref = localGetDomainStatus($vmname); my $currentState = $rootref->{'domainstate'}; if ( !checkState($currentState) ) { $log->warn("domain state for $vmname has been UNKNOWN for too long"); } if ( $currentState eq 'started' ) { # NB increasing the frequency of this poll will likely not # have the desired effect, the overhead of guestfish too great. # # If it's been more than 10 seconds and the VM is # running if ( time - $lastPoll > 10 ) { if ($needUpdate) { $needUpdate = sendUpdate( $currentState, $rootref, $bogref ); } else { setDeadman(); # We have communicated, all is well. } $lastPoll = time; } } elsif ( $currentState eq 'shutdown' || $currentState eq 'stopped' ) { # Tell AgentMonitor that this viewer is shutdown. setViewerState( 'domain' => $vmname, 'viewer' => $bogref->{'viewer'}, 'vieweruuid' => $bogref->{'viewer_uuid'}, 'urluuid' => $bogref->{'urluuid'}, 'project' => $bogref->{'project'}, 'state' => 'shutdown' ); saveViewerDatabase($bogref, $vmname); $quitLoop = 1; $ret = 1; last; } sleep 5; } return $ret; } #** @function saveViewerDatabase( \%bogref ) # @brief persist the SQL database from the VM to /swamp and then invoke storeviewer method. # # @param bogref reference to this run's Bill Of Goods # @return 0 on failure, 1 on success. #* sub saveViewerDatabase { my $bogref = shift; my $savefile = q{viewerdb.tar.gz}; # Grab the /var/lib/mysql folder from the VM my ( $output, $status ) = systemcall("sudo virt-copy-out -d $vmname /var/lib/mysql ."); if ($status) { $log->error("Cannot copy the viewer database : $output"); } # Create the viewerdb tarball containing the viewer database and the version of the viewer master image # used. The viewer master version is used to track upgrade paths. ( $output, $status ) = systemcall("cd mysql;rm -f version_*;touch version_$bogref->{'platform'};tar -czvf ../$savefile ib* aria* version_*"); if ($status) { $log->error("Cannot compress : $output"); return 0; } my $sharedfolder = File::Spec->catfile( $bogref->{'resultsfolder'}, $bogref->{'viewer_uuid'} ); make_path($sharedfolder); my %results; $results{'viewerdbchecksum'} = checksumFile( $savefile ); if (!cp($savefile, $sharedfolder)) { $log->error("Cannot copy $savefile to $sharedfolder : $OS_ERROR"); return 0; } $results{'vieweruuid'} = $bogref->{'viewer_uuid'}; $results{'viewerdbpath'} = abs_path(File::Spec->catfile($sharedfolder, q{viewerdb.tar.gz})); # MYSQL needs to own our result files folders so they can be cleaned up. my ( $uid, $gid ) = ( getpwnam('mysql') )[ 2, 3 ]; if ( chown( $uid, $gid, $sharedfolder ) != 1 ) { $log->warning("Cannot chown folder $sharedfolder to mysql user. $OS_ERROR" ); } if ( chown( $uid, $gid, $results{'viewerdbpath'} ) != 1 ) { $log->warning("Cannot chown file $results{'viewerdbpath'} to mysql user. $OS_ERROR" ); } storeviewer(%results); return 1; } sub sendUpdate { my $state = shift; my $ref = shift; my $bogref = shift; my $output; my $status; my $ret = 1; ( $output, $status ) = systemcall("export LIBGUESTFS_ATTACH_METHOD=libvirt; guestfish --ro --mount /dev/sdc:/mnt/out --mount /dev/sdb:/mnt/in -d $vmname -i cat /mnt/out/run.out 2>&1"); if ( !$status ) { setStarts(0); # Clean slate. haveCommunicated(); setDeadman(); # Reset the timer, we have communications. # The scalar $output will have all of the stuff in it from the VM run.out if ($output =~/ERROR:\sNO\sIP/sxm) { setViewerState( 'domain' => $vmname, 'viewer' => $bogref->{'viewer'}, 'project' => $bogref->{'project'}, 'vieweruuid' => $bogref->{'viewer_uuid'}, 'apikey' => $bogref->{'apikey'}, 'urluuid' => $bogref->{'urluuid'}, 'state' => 'shutdown' ); $log->error("VM detected no IP address and has shutdown"); return 0; } my %values = parseRunOut( $bogref, $output ); setViewerState( 'domain' => $vmname, 'viewer' => $bogref->{'viewer'}, 'project' => $values{'project'}, 'vieweruuid' => $bogref->{'viewer_uuid'}, 'ipaddress' => $values{'ipaddr'}, 'apikey' => $bogref->{'apikey'}, 'urluuid' => $bogref->{'urluuid'}, 'state' => $values{'state'} ); if ( defined( $values{'ipaddr'} ) ) { $ret = 0; # no need to send further updates } } else { $log->info("Cannot get run.out from $vmname: $status ($state) output: $output"); } return $ret; } { my $_haveCommunicated = 0; sub forgetCommunicated { $_haveCommunicated = 0; return; } sub haveCommunicated { $_haveCommunicated = 1; return; } sub getHaveCommunicated { return $_haveCommunicated; } } sub removeStateFile { my $domname = shift; my $statefile = getDomainStateFilename( $basedir, $domname ); if ( unlink($statefile) != 1 ) { $log->warn("Unable to remove state file $statefile: $OS_ERROR"); } return; } sub localGetDomainStatus { my $domname = shift; my $statefile = abs_path("$basedir/run/$domname.state"); # There should be a state file, but there is not. if ( !-r $statefile ) { return { 'domainstate' => 'UNKNOWN' }; } my $root = lock_retrieve($statefile); return $root; } #** @function checkState( ) # @brief Examine current VM state. Return 1 if the loop should continue, 0 if the wait loop should exit. # This is intended to be called only from the doWaitLoop. # If we are in UNKNOWN state for more than 1 minute, something # has gone wrong and we need to just exit. # # @param state Current state as reported by libvirt # @return 0 or 1 #* sub checkState { my $state = shift; state $inUnknown = time; if ( $state eq 'UNKNOWN' ) { if ( ( time - $inUnknown ) > 60 ) { return 0; } } else { # Any state other than UNKNOWN resets the counter $inUnknown = time; } return 1; } #** @function checkDeadman( ) # @brief Check the deadman timer on the VM and if it has expired, # try to restart N times. If after N failures, give up. # # @return # @see #* sub checkDeadman { my $now = time; my $ret = 0; if ( numberStarts() < main->MAX_RESTARTS ) { # If we get no response, kick the VM over. if ( abs( $now - getDeadman() ) > main->MAX_WAITSTUCK ) { $ret = restartVM(1); $log->info("Restarting VM"); } else { $ret = 1; } } return $ret; } { #** @var $numberStarts The number of times we've had to start our VM. my $nRestarts = 0; sub setStarts { $nRestarts = shift; return; } sub numberStarts { return $nRestarts; } } sub restartVM { my $needDestroy = shift; my $ret = 0; setStarts( numberStarts() + 1 ); # Let's make a VM my $output; my $status = 0; if ($needDestroy) { ( $output, $status ) = systemcall("virsh destroy $vmname"); } if ( !$status ) { ( $output, $status ) = systemcall("virsh start $vmname"); if ($status) { $log->error("Unable to start stuck VM $vmname"); } else { $ret = 1; forgetCommunicated(); setDeadman(); $log->info( "Restarted stuck VM $vmname " . numberStarts() . " times." ); } } else { $log->error("Unable to destroy stuck VM $vmname $status: ($output)"); } return $ret; } { #** @var $vmDeadman the timer started when we launch our VM. This is part of a # deadman timer watching for feedback from the VM. No feedback is assumed to be a # failure to launch. my $vmDeadman; sub getDeadman { return $vmDeadman; } sub setDeadman { $vmDeadman = time; return; } } sub configureClients { my $aHost = shift; my $aPort = shift; my $config = getSwampConfig(); if ( !defined($aPort) ) { $aPort = int( $config->get('agentMonitorJobPort') ); } if ( !defined($aHost) ) { $aHost = $config->get('agentMonitorHost'); } if ( defined($aPort) && defined($aHost) ) { SWAMP::Client::AgentClient::configureClient( $aHost, $aPort ); } return; } { my $workingspace; #** @function setupWorkingSpace( $suffix ) # @brief Based on the desired resultsFolder in the swamp.conf, build and chdir to a temp space in the resultsFolder tree. # # @param suffix The folder we should use. This should be unique in a SWAMP instance. # @return 1 if we succeeded, 0 otherwise. The assessment will fail if we return 0. #* sub setupWorkingSpace { my $suffix = shift; my $config = getSwampConfig(); $workingspace = $config->get("resultsFolder") // q{.}; $workingspace = File::Spec->catfile( $workingspace, q{temp}, $suffix ); make_path( $workingspace, { 'error' => \my $err } ); if ( @{$err} ) { for my $diag ( @{$err} ) { my ( $file, $message ) = %{$diag}; if ( $file eq q{} ) { $log->error("Cannot make working folder [$workingspace]: $message"); } else { $log->error("Cannot make working folder [$workingspace]: $file $message"); } } return 0; } my ( $output, $status ) = systemcall("tar -C $workingspace -xf input*.tgz"); if ($status) { $log->error("Cannot extract input to $workingspace : $status $OS_ERROR $output"); return 0; } if (-r '.chirp.config') { cp('.chirp.config', $workingspace); } chdir $workingspace; if ( getcwd() ne abs_path($workingspace) ) { $log->error("Cannot chdir to input to $workingspace : $OS_ERROR"); return 0; } return 1; } #** @function cleanupWorkingSpace( ) # @brief Remove the working space set up by #setupWorkingSpace() #* sub cleanupWorkingSpace { if ($workingspace) { $log->info("Cleaning up $workingspace"); remove_tree($workingspace); } return; } } sub taskShutdown { if ( defined($vmid) ) { # If vmid is still defined our VM is viable. systemcall("PERL5LIB=$basedir/perl5 $basedir/bin/vm_cleanup --force $vmname 2>&1"); removeDomainPIDFile($PID, $vmname); my $statefile = getDomainStateFilename( $basedir, $vmname ) . q{died}; if ( open( my $fh, '>', $statefile ) ) { print $fh "Caught signal @_, shutting down\n"; if ( !close($fh) ) { # nothing to do, we're shutting down. } } } # Try and clean up. cleanupWorkingSpace(); croak "Caught signal @_, shutting down"; } sub logtag { ( my $name = $PROGRAM_NAME ) =~ s/\.pl//sxm; return basename($name); } sub logfilename { ( my $name = $appname ) =~ s/\.pl//sxm; $name = basename($name); return "$basedir/log/${name}.log"; } __END__ =pod =encoding utf8 =head1 NAME =head1 SYNOPSIS =head1 DESCRIPTION =head1 OPTIONS =over 8 =item --man Show manual page for this script =back =head1 EXAMPLES =head1 SEE ALSO =cut
OWASP/open-swamp
exec/opt/swamp/bin/vrunTask.pl
Perl
apache-2.0
19,668
#!/usr/local/bin/perl use strict; ########################################################################### ## ## ## ## ## Carnegie Mellon University, Pittsburgh, PA ## ## Copyright (c) 2004-2005 ## ## All Rights Reserved. ## ## ## ## Permission is hereby granted, free of charge, to use and distribute ## ## this software and its documentation without restriction, including ## ## without limitation the rights to use, copy, modify, merge, publish, ## ## distribute, sublicense, and/or sell copies of this work, and to ## ## permit persons to whom this work is furnished to do so, subject to ## ## the following conditions: ## ## 1. The code must retain the above copyright notice, this list of ## ## conditions and the following disclaimer. ## ## 2. Any modifications must be clearly marked as such. ## ## 3. Original authors' names are not deleted. ## ## 4. The authors' names are not used to endorse or promote products ## ## derived from this software without specific prior written ## ## permission. ## ## ## ## CARNEGIE MELLON UNIVERSITY AND THE CONTRIBUTORS TO THIS WORK ## ## DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ## ## ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ## ## SHALL CARNEGIE MELLON UNIVERSITY NOR THE CONTRIBUTORS BE LIABLE ## ## FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ## ## WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN ## ## AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ## ## ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ## ## THIS SOFTWARE. ## ## ## ########################################################################### ## ## ## Author : S P Kishore (skishore@cs.cmu.edu) ## ## Date : June 2005 ## ## ## ########################################################################### my $nargs = $#ARGV + 1; if ($nargs != 3) { print "Usage: perl file.pl <file1> <prompt-file> <prefix>\n"; exit; } my $inF = $ARGV[0]; my $prmF = $ARGV[1]; my $pfx = $ARGV[2];; my @ln = &Get_ProcessedLines($inF); my @para; my $p = 0; for (my $i = 0; $i <= $#ln; $i++) { my @wrd = &Get_Words($ln[$i]); my $nw = $#wrd + 1; if ($nw == 0) { #print "$para[$p] \n ************\n"; $p = $p + 1; }else { $para[$p] = $para[$p]." ".$ln[$i]; } } my @mpara; my $z = 0; for (my $j = 0; $j <= $#para; $j++) { my @wrd = &Get_Words($para[$j]); my $nw = $#wrd + 1; if ($nw == 0) { } else { $mpara[$z] = $para[$j]; $z++; } } @para = @mpara; #Divide the paragraphs into sub-paras with a length of around 100 words! #Step taken to assist faster running of edec, and also minimize the memory #requirements time! my $k = 0; my @modP; my $maxW = 100; for (my $j = 0; $j <= $#para; $j++) { my @wrd = &Get_Words($para[$j]); my $nw = $#wrd + 1; if ($nw <= $maxW) { $modP[$k] = $para[$j]; $k++; } else { $modP[$k] = $wrd[0]; my $flag = 0; for (my $z = 1; $z < $#wrd; $z++) { if ($z % $maxW == 0) { $flag = 1; } if ($flag == 0) { $modP[$k] = $modP[$k]." ".$wrd[$z]; } else { my $cw = $wrd[$z]; my $nw = $wrd[$z+1]; my @lcw = split(//,$cw); my $lch = $lcw[$#lcw]; my @lnw = split(//,$nw); my $nch = $lnw[0]; #print "LCH: $lch NCH: $nch NOW: $#lcw \n"; #if ($nch =~ m/[A-Z]/) { # print "UPPER CASE \n"; #} if ($lch eq "." && $nch =~ m/[A-Z]/ && $#lcw >= 2) { #check whether the current word has a period. #whether the next word starts with a capital #whether the current word has more than 2 characters. $modP[$k] = $modP[$k]." ".$wrd[$z]; $k++; $flag = 0; #print "ENTER THIS CONDITION.... \n"; } else { $modP[$k] = $modP[$k]." ".$wrd[$z]; } } } $modP[$k] = $modP[$k]." ".$wrd[$#wrd]; $k++; } } my @para1 = @modP; my $min = 1.0e+35; my $max = -1.0e+35; my $avg = 0; open(fp_prm, ">$prmF"); for (my $j = 0; $j <= $#para1; $j++) { my @wrd = &Get_Words($para1[$j]); my $nw = $#wrd + 1; print "Para $j - $nw\n"; if ($min > $nw) { $min = $nw; } if ($max < $nw) { $max = $nw; } $avg = $avg + $nw; my $sid = &Get_ID($j); my $cln = $para1[$j]; &Make_SingleSpace(\$cln); &Handle_Quote(\$cln); print fp_prm "( $pfx\_$sid \" $cln \") \n"; } close(fp_prm); $avg = $avg / ($#para1 + 1); $avg = int($avg); print "Min / Max: $min / $max ; Avg: $avg\n"; sub Get_ID() { my $id = shift(@_); my $rv = ""; if ($id < 10) { $rv = "000"; }elsif ($id < 100) { $rv = "00"; } elsif ($id < 1000) { $rv = "0"; } $rv = $rv.$id; return $rv; } sub Handle_Quote() { chomp(${$_[0]}); ${$_[0]} =~ s/[\"]/\\"/g; } sub Make_SingleSpace() { chomp(${$_[0]}); ${$_[0]} =~ s/[\s]+$//; ${$_[0]} =~ s/^[\s]+//; ${$_[0]} =~ s/[\s]+/ /g; ${$_[0]} =~ s/[\t]+/ /g; } sub Check_FileExistence() { my $inF = shift(@_); if (!(-e $inF)) { print "Cannot open $inF \n"; exit; } return 1; } sub Get_Lines() { my $inF = shift(@_); &Check_FileExistence($inF); open(fp_llr, "<$inF"); my @dat = <fp_llr>; close(fp_llr); return @dat; } sub Get_Words() { my $ln = shift(@_); &Make_SingleSpace(\$ln); my @wrd = split(/ /, $ln); return @wrd; } sub Get_ProcessedLines() { my $inF = shift(@_); &Check_FileExistence($inF); open(fp_llr, "<$inF"); my @dat = <fp_llr>; close(fp_llr); my @nd; for (my $i = 0; $i <= $#dat; $i++) { my $tl = $dat[$i]; &Make_SingleSpace(\$tl); $nd[$i] = $tl; } return @nd; }
saikrishnarallabandi/Festival-Speech-Synthesis-System
festvox/src/interslice/scripts/prepare_prompts_v1.pl
Perl
apache-2.0
6,685
# # Copyright 2016 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package centreon::common::emc::navisphere::mode::spcomponents::psu; use strict; use warnings; my @conditions = ( ['^(?!(Present|Valid)$)' => 'CRITICAL'], ); sub check { my ($self) = @_; $self->{output}->output_add(long_msg => "Checking power supplies"); $self->{components}->{psu} = {name => 'psus', total => 0, skip => 0}; return if ($self->check_exclude(section => 'psu')); # Enclosure SPE Power A0 State: Present # Bus 0 Enclosure 0 Power A State: Present while ($self->{response} =~ /^(?:Bus\s+(\d+)\s+){0,1}Enclosure\s+(\S+)\s+(Power)\s+(\S+)\s+State:\s+(.*)$/mgi) { my ($state, $instance) = ($5, "$2.$3.$4"); if (defined($1)) { $instance = "$1.$2.$3.$4"; } next if ($self->check_exclude(section => 'psu', instance => $instance)); $self->{components}->{psu}->{total}++; $self->{output}->output_add(long_msg => sprintf("Power Supply '%s' state is %s.", $instance, $state) ); foreach (@conditions) { if ($state =~ /$$_[0]/i) { $self->{output}->output_add(severity => $$_[1], short_msg => sprintf("Power Supply '%s' state is %s", $instance, $state)); last; } } } } 1;
golgoth31/centreon-plugins
centreon/common/emc/navisphere/mode/spcomponents/psu.pm
Perl
apache-2.0
2,218
package Google::Ads::AdWords::v201809::DistinctError::Reason; use strict; use warnings; sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809'}; # derivation by restriction use base qw( SOAP::WSDL::XSD::Typelib::Builtin::string); 1; __END__ =pod =head1 NAME =head1 DESCRIPTION Perl data type class for the XML Schema defined simpleType DistinctError.Reason from the namespace https://adwords.google.com/api/adwords/cm/v201809. The reasons for the validation error. This clase is derived from SOAP::WSDL::XSD::Typelib::Builtin::string . SOAP::WSDL's schema implementation does not validate data, so you can use it exactly like it's base type. # Description of restrictions not implemented yet. =head1 METHODS =head2 new Constructor. =head2 get_value / set_value Getter and setter for the simpleType's value. =head1 OVERLOADING Depending on the simple type's base type, the following operations are overloaded Stringification Numerification Boolification Check L<SOAP::WSDL::XSD::Typelib::Builtin> for more information. =head1 AUTHOR Generated by SOAP::WSDL =cut
googleads/googleads-perl-lib
lib/Google/Ads/AdWords/v201809/DistinctError/Reason.pm
Perl
apache-2.0
1,117
=head1 LICENSE Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute Copyright [2016-2021] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut package EnsEMBL::Web::Component::Tools::Down; use strict; use warnings; use URI::Escape qw(uri_escape); use EnsEMBL::Web::Utils::FileHandler qw(file_get_contents); use EnsEMBL::Web::Exceptions; use parent qw(EnsEMBL::Web::Component::Tools); sub _init { my $self = shift; $self->SUPER::_init; $self->ajaxable(0); } sub content { my $self = shift; my $sd = $self->hub->species_defs; my $message; try { $message = file_get_contents($sd->TOOLS_UNAVAILABLE_MESSAGE_FILE); } catch { $message = $sd->TOOLS_UNAVAILABLE_MESSAGE; }; return $self->_error('Temporarily Unavailable', sprintf '%s Please contact <a href="mailto:%s?subject=%s">%2$s</a> for more information.', $message || 'Web Tools are temporarily not available.', $sd->ENSEMBL_HELPDESK_EMAIL, uri_escape(sprintf 'Tools unavailable on %s', $sd->ENSEMBL_SERVERNAME) ); } 1;
Ensembl/public-plugins
tools/modules/EnsEMBL/Web/Component/Tools/Down.pm
Perl
apache-2.0
1,593
# # COPYRIGHT NOTICE. Copyright (C) 2007 Siemens Corporate Research, # Inc. ("caBIG(tm) Participant"). The eXtensible Imaging Platform # (XIP) was created with NCI funding and is part of the caBIG(tm) # initiative. The software subject to this notice and license # includes both human readable source code form and machine # readable, binary, object code form (the "caBIG(tm) Software"). # # This caBIG(tm) Software License (the "License") is between # caBIG(tm) Participant and You. "You (or "Your") shall mean a # person or an entity, and all other entities that control, are # controlled by, or are under common control with the entity. # "Control" for purposes of this definition means (i) the direct or # indirect power to cause the direction or management of such # entity, whether by contract or otherwise, or (ii) ownership of # fifty percent (50%) or more of the outstanding shares, or (iii) # beneficial ownership of such entity. # # LICENSE. Provided that You agree to the conditions described # below, caBIG(tm) Participant grants You a non-exclusive, # worldwide, perpetual, fully-paid-up, no-charge, irrevocable, # transferable and royalty-free right and license in its rights in # the caBIG(tm) Software, including any copyright or patent rights # therein that may be infringed by the making, using, selling, # offering for sale, or importing of caBIG(tm) Software, to (i) # use, install, access, operate, execute, reproduce, copy, modify, # translate, market, publicly display, publicly perform, and # prepare derivative works of the caBIG(tm) Software; (ii) make, # have made, use, practice, sell, and offer for sale, and/or # otherwise dispose of caBIG(tm) Software (or portions thereof); # (iii) distribute and have distributed to and by third parties the # caBIG(tm) Software and any modifications and derivative works # thereof; and (iv) sublicense the foregoing rights set out in (i), # (ii) and (iii) to third parties, including the right to license # such rights to further third parties. For sake of clarity, and # not by way of limitation, caBIG(tm) Participant shall have no # right of accounting or right of payment from You or Your # sublicensees for the rights granted under this License. This # License is granted at no charge to You. Your downloading, # copying, modifying, displaying, distributing or use of caBIG(tm) # Software constitutes acceptance of all of the terms and # conditions of this Agreement. If you do not agree to such terms # and conditions, you have no right to download, copy, modify, # display, distribute or use the caBIG(tm) Software. # # 1. Your redistributions of the source code for the caBIG(tm) # Software must retain the above copyright notice, this list # of conditions and the disclaimer and limitation of # liability of Article 6 below. Your redistributions in # object code form must reproduce the above copyright notice, # this list of conditions and the disclaimer of Article 6 in # the documentation and/or other materials provided with the # distribution, if any. # 2. Your end-user documentation included with the # redistribution, if any, must include the following # acknowledgment: "This product includes software developed # by Siemens Corporate Research Inc." If You do not include # such end-user documentation, You shall include this # acknowledgment in the caBIG(tm) Software itself, wherever # such third-party acknowledgments normally appear. # 3. You may not use the names "Siemens Corporate Research, # Inc.", "The National Cancer Institute", "NCI", "Cancer # Bioinformatics Grid" or "caBIG(tm)" to endorse or promote # products derived from this caBIG(tm) Software. This # License does not authorize You to use any trademarks, # service marks, trade names, logos or product names of # either caBIG(tm) Participant, NCI or caBIG(tm), except as # required to comply with the terms of this License. # 4. For sake of clarity, and not by way of limitation, You may # incorporate this caBIG(tm) Software into Your proprietary # programs and into any third party proprietary programs. # However, if You incorporate the caBIG(tm) Software into # third party proprietary programs, You agree that You are # solely responsible for obtaining any permission from such # third parties required to incorporate the caBIG(tm) # Software into such third party proprietary programs and for # informing Your sublicensees, including without limitation # Your end-users, of their obligation to secure any required # permissions from such third parties before incorporating # the caBIG(tm) Software into such third party proprietary # software programs. In the event that You fail to obtain # such permissions, You agree to indemnify caBIG(tm) # Participant for any claims against caBIG(tm) Participant by # such third parties, except to the extent prohibited by law, # resulting from Your failure to obtain such permissions. # 5. For sake of clarity, and not by way of limitation, You may # add Your own copyright statement to Your modifications and # to the derivative works, and You may provide additional or # different license terms and conditions in Your sublicenses # of modifications of the caBIG(tm) Software, or any # derivative works of the caBIG(tm) Software as a whole, # provided Your use, reproduction, and distribution of the # Work otherwise complies with the conditions stated in this # License. # 6. THIS caBIG(tm) SOFTWARE IS PROVIDED "AS IS" AND ANY # EXPRESSED OR IMPLIED WARRANTIES (INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, NON- # INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE # DISCLAIMED. IN NO EVENT SHALL SIEMENS CORPORATE RESEARCH # INC. OR ITS AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE # GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS caBIG(tm) SOFTWARE, EVEN IF ADVISED OF # THE POSSIBILITY OF SUCH DAMAGE. # ############################################################################### package Filter::Field::OutputArg; require Filter::Field; @ISA = qw(Filter::Field); sub new() { my ( $that, $name, $argument_name ) = @_; my $class = ref($that) || $that; my $self = { NAME => $name, ARGUMENT_NAME => $argument_name, OWNER => undef, ENABLED => 1 }; # Transform $self into an object of class $class return bless $self, $class; } sub argument_name() { my $self = shift; return $self->{ARGUMENT_NAME}; } sub is_output() { return 1; } sub is_arg() { return 1; } sub is_accessor() { return 0; } sub serialize() { my $self = shift; my $enable = $self->enable() ? "" : " enabled = \"false\""; return "<output id = \"" . $self->name() . "\" type = \"ARG( " . $self->argument_name() . " )\"" . $enable . "/>"; }
OpenXIP/xip-libraries
src/wrap/itk/modules/Filter/Field/OutputArg.pm
Perl
apache-2.0
7,655
package OpenXPKI::Server::API2::Plugin::Profile::Util; use Moose; # Project modules use Data::Dumper; use OpenXPKI::Debug; use OpenXPKI::Server::Context qw( CTX ); =head2 get_input_elements Read field definitions from config and return them in a HashRef. The following config paths are queried (stops on first first finding): realm.XXX.profile.PROFILENAME.template.FIELDNAME realm.XXX.profile.template.FIELDNAME realm.XXX.profile.template._default B<Parameters> =over =item * C<profile> (Str) - profile name =item * C<input_names> (ArrayRef) - list of input names =back =cut sub get_input_elements { my ($self, $profile, $input_names) = @_; my $config = CTX('config'); my @definitions; for my $input_name (@{$input_names}) { my ($input, $input_path); ##! 32: "Input $input_name" # each input name can have a local or/and a global definiton, # we need to probe where to find it for my $path (['profile', $profile, 'template', $input_name], ['profile', 'template', $input_name]) { $input = $config->get_hash($path); if ($input) { ##! 64: "Element found at $path" $input_path = $path; last; } } if (not $input) { # check if there is a default section (only look in the profile!) $input = $config->get_hash(['profile', $profile, 'template' , '_default' ]) or OpenXPKI::Exception->throw ( message => "I18N_OPENXPKI_SERVER_API_DEFAULT_NO_SUCH_INPUT_ELEMENT_DEFINED", params => { 'input' => $input_name, 'profile' => $profile, } ); # got a default item, create field using default $input->{id} = $input_name; $input->{label} = $input_name; } # convert keys to lower case my %lcinput = map { lc $_ => $input->{$_} } keys %{$input}; # if type is select, add options array ref if ($lcinput{type} && $lcinput{type} eq 'select') { ##! 32: 'type is select' # up to v3.1 the select fields in form elements only had a # list directly at option but we now want to support the # same syntax as in the regular workflows where option is # a config node. For most config layouts the data is already # in the lcinput hash my $options = $lcinput{option}; delete $lcinput{option}; if (!ref $options) { $options->{mode} = $config->get( [ @$input_path, 'option', 'mode' ] ); } ##! 64: 'Options is ' . Dumper $options if (ref $options eq 'ARRAY') { # WARNING - this changes the return value for an API function! $lcinput{options} = [ map {{ label => $_, value => $_ }} @{$options} ]; } else { my $mode = $config->get( [ @$input_path, 'option', 'mode' ] ) || 'list'; my @option; if ($mode eq 'keyvalue') { @option = $config->get_list( [ @$input_path, 'option', 'item' ] ); if (my $label = $config->get( [ @$input_path, 'option', 'label' ] )) { @option = map { { label => sprintf($label, $_->{label}, $_->{value}), value => $_->{value} } } @option; } } else { my @item; if ($mode eq 'keys' || $mode eq 'map') { @item = $config->get_keys( [ @$input_path, 'option', 'item' ] ); } else { # option.item holds the items as list, this is mandatory @item = $config->get_list( [ @$input_path, 'option', 'item' ] ); } if ($mode eq 'map') { # expects that item is a link to a deeper hash structure # where the each hash item has a key "label" set # will hide items with an empty label foreach my $key (@item) { my $label = $config->get( [ @$input_path, 'option', 'item', $key, 'label' ] ); next unless ($label); push @option, { value => $key, label => $label }; } } elsif (my $label = $config->get( [ @$input_path, 'option', 'label' ] )) { # if set, we generate the values from option.label + key @option = map { { value => $_, label => $label.'_'.uc($_) } } @item; } else { # the minimum default - use keys as labels @option = map { { value => $_, label => $_ } } @item; } } $lcinput{options} = \@option; } } # SAN use fields with dynamic key/value assignment # Those have a special section "keys" which is a list of hashes # Get size of list to iterate if ($lcinput{keys}) { my $size = $config->get_size([ @$input_path, 'keys' ]); my @keys; for (my $i=0;$i<$size;$i++) { my $key = $config->get_hash([ @$input_path, 'keys', $i ]); push @keys, { value => $key->{value}, label => $key->{label} }; } $lcinput{keys} = \@keys; } $lcinput{clonable} = 1 if ($lcinput{min} || $lcinput{max}); push @definitions, \%lcinput; } ##! 64: 'Definitions: ' . Dumper @definitions return \@definitions; } __PACKAGE__->meta->make_immutable;
openxpki/openxpki
core/server/OpenXPKI/Server/API2/Plugin/Profile/Util.pm
Perl
apache-2.0
5,819
package Paws::Lambda::ListFunctions; use Moose; has FunctionVersion => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'FunctionVersion'); has Marker => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'Marker'); has MasterRegion => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'MasterRegion'); has MaxItems => (is => 'ro', isa => 'Int', traits => ['ParamInQuery'], query_name => 'MaxItems'); use MooseX::ClassAttribute; class_has _api_call => (isa => 'Str', is => 'ro', default => 'ListFunctions'); class_has _api_uri => (isa => 'Str', is => 'ro', default => '/2015-03-31/functions/'); class_has _api_method => (isa => 'Str', is => 'ro', default => 'GET'); class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::Lambda::ListFunctionsResponse'); class_has _result_key => (isa => 'Str', is => 'ro'); 1; ### main pod documentation begin ### =head1 NAME Paws::Lambda::ListFunctions - Arguments for method ListFunctions on Paws::Lambda =head1 DESCRIPTION This class represents the parameters used for calling the method ListFunctions on the AWS Lambda service. Use the attributes of this class as arguments to method ListFunctions. You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to ListFunctions. As an example: $service_obj->ListFunctions(Att1 => $value1, Att2 => $value2, ...); Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object. =head1 ATTRIBUTES =head2 FunctionVersion => Str Optional string. If not specified, only the unqualified functions ARNs (Amazon Resource Names) will be returned. Valid value: C<ALL> _ Will return all versions, including C<$LATEST> which will have fully qualified ARNs (Amazon Resource Names). Valid values are: C<"ALL"> =head2 Marker => Str Optional string. An opaque pagination token returned from a previous C<ListFunctions> operation. If present, indicates where to continue the listing. =head2 MasterRegion => Str Optional string. If not specified, will return only regular function versions (i.e., non-replicated versions). Valid values are: The region from which the functions are replicated. For example, if you specify C<us-east-1>, only functions replicated from that region will be returned. C<ALL> _ Will return all functions from any region. If specified, you also must specify a valid FunctionVersion parameter. =head2 MaxItems => Int Optional integer. Specifies the maximum number of AWS Lambda functions to return in response. This parameter value must be greater than 0. =head1 SEE ALSO This class forms part of L<Paws>, documenting arguments for method ListFunctions in L<Paws::Lambda> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/Lambda/ListFunctions.pm
Perl
apache-2.0
3,105
# Copyright 2020, Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package Google::Ads::GoogleAds::V8::Services::CampaignBidModifierService; use strict; use warnings; use base qw(Google::Ads::GoogleAds::BaseService); sub get { my $self = shift; my $request_body = shift; my $http_method = 'GET'; my $request_path = 'v8/{+resourceName}'; my $response_type = 'Google::Ads::GoogleAds::V8::Resources::CampaignBidModifier'; return $self->SUPER::call($http_method, $request_path, $request_body, $response_type); } sub mutate { my $self = shift; my $request_body = shift; my $http_method = 'POST'; my $request_path = 'v8/customers/{+customerId}/campaignBidModifiers:mutate'; my $response_type = 'Google::Ads::GoogleAds::V8::Services::CampaignBidModifierService::MutateCampaignBidModifiersResponse'; return $self->SUPER::call($http_method, $request_path, $request_body, $response_type); } 1;
googleads/google-ads-perl
lib/Google/Ads/GoogleAds/V8/Services/CampaignBidModifierService.pm
Perl
apache-2.0
1,453