code stringlengths 2 1.05M | repo_name stringlengths 5 101 | path stringlengths 4 991 | language stringclasses 3 values | license stringclasses 5 values | size int64 2 1.05M |
|---|---|---|---|---|---|
#!/usr/bin/perl
use LWP::Simple;
$url = @ARGV[0];
$res = get($url);
if (defined $res) {
print $res;
} else {
exit -1;
} | orangetw/Tiny-URL-Fuzzer | bin/requester/get.pl | Perl | mit | 128 |
package Google::Ads::AdWords::v201809::CampaignGroupPerformanceTargetPage;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use base qw(Google::Ads::AdWords::v201809::Page);
# Variety: sequence
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %totalNumEntries_of :ATTR(:get<totalNumEntries>);
my %Page__Type_of :ATTR(:get<Page__Type>);
my %entries_of :ATTR(:get<entries>);
__PACKAGE__->_factory(
[ qw( totalNumEntries
Page__Type
entries
) ],
{
'totalNumEntries' => \%totalNumEntries_of,
'Page__Type' => \%Page__Type_of,
'entries' => \%entries_of,
},
{
'totalNumEntries' => 'SOAP::WSDL::XSD::Typelib::Builtin::int',
'Page__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'entries' => 'Google::Ads::AdWords::v201809::CampaignGroupPerformanceTarget',
},
{
'totalNumEntries' => 'totalNumEntries',
'Page__Type' => 'Page.Type',
'entries' => 'entries',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::CampaignGroupPerformanceTargetPage
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
CampaignGroupPerformanceTargetPage from the namespace https://adwords.google.com/api/adwords/cm/v201809.
Contains a subset of campaign group performance targets resulting from the filtering and paging of the {CampaignGroupPerformanceTargetService#get} call.
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * entries
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/CampaignGroupPerformanceTargetPage.pm | Perl | apache-2.0 | 2,034 |
#!/usr/bin/perl
use LoxBerry::Log;
use strict;
use warnings;
print "\nTest Notification\n";
print "=================\n";
print "Documentation of notify for Perl: http://www.loxwiki.eu:80/x/eQd7AQ\n\n";
print "All tests are using package 'test' and groupname 'testing'\n\n";
print "Notification directory is: " . $LoxBerry::Log::notification_dir . "\n\n";
print "TEST: Setting two info, one error notification\n";
my $package = "test";
my $group = "testing";
my $message;
$message = "This is the first information notification";
notify ( $package, $group, $message);
sleep(1);
$message = "This is the second information notification";
notify ( $package, $group, $message);
sleep(1);
$message = "This is an error notification";
notify ( $package, $group, $message, 1);
print "Notifications created\n";
# exit(0);
print "TEST: Check notification count\n";
my ($check_err, $check_ok, $check_sum) = get_notification_count( $package, $group);
print "We have $check_err errors and $check_ok infos, together $check_sum notifications.\n";
print "TEST: Get all notifications of package test with content:\n";
my @notifications = get_notifications( $package);
for my $notification (@notifications ) {
my ($contentraw, $contenthtml) = notification_content($notification->{KEY});
if ( $notification->{SEVERITY} ) {
print STDERR " Error at $notification->{DATESTR} in group $notification->{NAME}:\n$contentraw\n";
} else {
print STDERR " Info at $notification->{DATESTR} in group $notification->{NAME}:\n$contentraw\n";
}
}
print "TEST: Delete all but least notification\n";
delete_notifications($package, undef, 1);
print "Re-request notifications (without content):\n";
@notifications = get_notifications($package);
for my $notification (@notifications ) {
if ( $notification->{SEVERITY} ) {
print STDERR " Error at $notification->{DATESTR} in group $notification->{NAME}.\n";
} else {
print STDERR " Info at $notification->{DATESTR} in group $notification->{NAME}.\n";
}
}
print "TEST: Delete all notifications of package test:\n";
delete_notifications($package);
print "TEST: Get notification count:\n";
($check_err, $check_ok, $check_sum) = get_notification_count( $package, $group);
print "We have $check_err errors and $check_ok infos, together $check_sum notifications.\n";
print "\nTESTS FINISHED.\n";
| mschlenstedt/Loxberry | libs/perllib/LoxBerry/testing/testnotify.pl | Perl | apache-2.0 | 2,384 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::extreme::snmp::mode::memory;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub custom_usage_perfdata {
my ($self, %options) = @_;
$self->{output}->perfdata_add(
label => 'used', unit => 'B',
instances => $self->use_instances(extra_instance => $options{extra_instance}) ? $self->{result_values}->{display} : undef,
value => $self->{result_values}->{used},
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{thlabel}, total => $self->{result_values}->{total}, cast_int => 1),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{thlabel}, total => $self->{result_values}->{total}, cast_int => 1),
min => 0, max => $self->{result_values}->{total}
);
}
sub custom_usage_threshold {
my ($self, %options) = @_;
my $exit = $self->{perfdata}->threshold_check(value => $self->{result_values}->{prct_used}, threshold => [ { label => 'critical-' . $self->{thlabel}, exit_litteral => 'critical' }, { label => 'warning-' . $self->{thlabel}, exit_litteral => 'warning' } ]);
return $exit;
}
sub custom_usage_output {
my ($self, %options) = @_;
my ($total_size_value, $total_size_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{total});
my ($total_used_value, $total_used_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{used});
my ($total_free_value, $total_free_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{free});
my $msg = sprintf("Total: %s Used: %s (%.2f%%) Free: %s (%.2f%%)",
$total_size_value . " " . $total_size_unit,
$total_used_value . " " . $total_used_unit, $self->{result_values}->{prct_used},
$total_free_value . " " . $total_free_unit, $self->{result_values}->{prct_free});
return $msg;
}
sub custom_usage_calc {
my ($self, %options) = @_;
$self->{result_values}->{display} = $options{new_datas}->{$self->{instance} . '_display'};
$self->{result_values}->{total} = $options{new_datas}->{$self->{instance} . '_total'};
$self->{result_values}->{used} = $options{new_datas}->{$self->{instance} . '_total'} - $options{new_datas}->{$self->{instance} . '_free'};
$self->{result_values}->{free} = $options{new_datas}->{$self->{instance} . '_free'};
$self->{result_values}->{prct_free} = $self->{result_values}->{free} * 100 / $self->{result_values}->{total};
$self->{result_values}->{prct_used} = $self->{result_values}->{used} * 100 / $self->{result_values}->{total};
return 0;
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'memory', type => 1, cb_prefix_output => 'prefix_memory_output', message_multiple => 'All memory usages are ok' }
];
$self->{maps_counters}->{memory} = [
{ label => 'usage', set => {
key_values => [ { name => 'display' }, { name => 'free' }, { name => 'total' } ],
closure_custom_calc => $self->can('custom_usage_calc'),
closure_custom_output => $self->can('custom_usage_output'),
closure_custom_perfdata => $self->can('custom_usage_perfdata'),
closure_custom_threshold_check => $self->can('custom_usage_threshold'),
}
},
];
}
sub prefix_memory_output {
my ($self, %options) = @_;
return "Memory '" . $options{instance_value}->{display} . "' ";
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
});
return $self;
}
my $mapping = {
extremeMemoryMonitorSystemFree => { oid => '.1.3.6.1.4.1.1916.1.32.2.2.1.3' },
extremeMemoryMonitorSystemTotal => { oid => '.1.3.6.1.4.1.1916.1.32.2.2.1.2' },
};
sub manage_selection {
my ($self, %options) = @_;
$self->{memory} = {};
my $oid_extremeMemoryMonitorSystemEntry = '.1.3.6.1.4.1.1916.1.32.2.2.1';
$self->{results} = $options{snmp}->get_table(oid => $oid_extremeMemoryMonitorSystemEntry,
nothing_quit => 1);
foreach my $oid (keys %{$self->{results}}) {
next if ($oid !~ /^$mapping->{extremeMemoryMonitorSystemFree}->{oid}\.(\d+)/);
my $instance = $1;
my $result = $options{snmp}->map_instance(mapping => $mapping, results => $self->{results}, instance => $instance);
$self->{memory}->{$instance} = { display => $instance,
free => $result->{extremeMemoryMonitorSystemFree} * 1024, total => $result->{extremeMemoryMonitorSystemTotal} * 1024};
}
}
1;
__END__
=head1 MODE
Check memory usages.
=over 8
=item B<--warning-usage>
Threshold warning (in percent).
=item B<--critical-usage>
Threshold critical (in percent).
=back
=cut
| Sims24/centreon-plugins | network/extreme/snmp/mode/memory.pm | Perl | apache-2.0 | 5,872 |
#!/usr/bin/env perl
use warnings;
use Getopt::Long;
$use = "g2sdf.pl [-outAll|-outAllJobs|-outOptimized|-outFinal|-outE E] [-calcDeltaE] gau.out... >out.sdf\n"
." Default is -outFinal\n"
." -outE Search and output only record with the exact string-match energy\n"
." -outFinal Ouput the final gemoetry (and energy of each file\n"
." -outOptimized output the geometry of all converged optimizations\n"
." -outAll output each geometry found in the gaussain output file. Note:\n"
." Optimizations using the #T key will not produce geometries for intermediate steps.\n"
." -outAllJobs output final geometry for each job in file\n"
." -require MP2|HF|DFT will only output if the energy is of given type\n"
." -calcDeltaE find the minimum energy and also report relative energies for all entries [kcal/mol]\n"
." this does NOT distinguish entries by smiles!\n"
."\n";
my( $outAll, $outAllJobs, $outOptimized, $outFinal,$outE,$calcDeltaE ) = (0, 0, 0, 0, '', '');
our( $eTypeRequired ) = ".*";
GetOptions("outAll" =>\$outAll, "outAllJobs"=> \$outAllJobs, "outOptimized" => \$outOptimized,
"outFinal" =>\$outFinal,
"outE=s" =>\$outE,
"calcDeltaE"=> \$calcDeltaE,
"require=s" =>\$eTypeRequired ) || die $use;
if( $outAll + $outAllJobs + $outOptimized + $outFinal != 1 && ! $outE )
{ $outFinal = 1;
}
#TODO compute and output SCS MP2 = E(SCS-MP2) = 6/5*(abab) + 1/3*(aaaa + bbbb)
# In Gaussian (or any other program), you need to find the MP2 output
# and look for the alpha-alpha(aaaa), beta-beta(bbbb) and alpha-beta(abab) energies.
my($lastMol, $lastJob);
my( $lastE ) = "0";
my( $lastEType ) = "";
#my( $stdOriPattern ) = "(Standard|Z-Matrix) orientation:";
my( $stdOriPattern ) = "(Standard|Z-Matrix|Input) orientation:";
my( $inOPattern ) = "Input orientation:";
my( $oStartPattern ) = "";
my( $hfPattern ) = "SCF Done: *E\\(([^)]+)\\) = *([^ ]+) *A\\.U\\.";
my( $mp2Pattern ) = " EUMP2 = *([^ ]+)";
my( $jobPattern ) = "^ #([PTN] | ).*";
my( $initialParamPattern ) = " ! Initial Parameters !";
my( $optimizedPattern ) = " ! Optimized Parameters !";
our( $scanFindPattern ) = "! ([^ ]+) .* (Scan|Frozen|frozen)[, ].*!";
our( $scanValuePattern ) = "! [^ ]+ +.\\(([^ ]+)\\) +([^ ]+) +(-DE|Frozen|frozen|Scan).*!";
our( @scanCoords ) = ();
our( $scanValues ) = {};
our( $scanAtoms ) = {};
our( $scanKeyMap ) = {};
our( $maxScanVarNum ) = 1;
my( $celastLine ) = "";
my( $fName ) = "";
my( $lastFName ) = "###";
my( $oBabel );
if(defined $ENV{"OBABLE"})
{ $oBabel = $ENV{"OBABLE"}
}else
{ $oBabel = "obabel";
}
if( $calcDeltaE )
{ open(MYOUT, ">/tmp/g2sdf.$$.sdf" ) || die("Could not open tmp file");
select(MYOUT);
}
while(<>)
{ $fName = $ARGV;
if( /$initialParamPattern/ )
{ # look for scan coodiante names
@scanCoords = ();
$scanKeyMap = {};
$scanValues = {};
$maxScanVarNum = 1;
&processOptimizedParam();
}
# input file changed
if( $lastFName ne $fName && $lastMol )
{ $lastMol = &addEToMol($lastMol,$lastE,$lastEType,$lastJob,$lastFName);
$outOptimized || print($lastMol);
$lastMol = "";
$lastE = "0";
$lastEType = "";
warn "g2sdf:Starting on $fName\n";
}
$lastFName = $fName;
# new gaussian job
if( /$jobPattern/ && $lastLine =~ /^ ---+$/ )
{ if(/^ #[^T].*NoSym/i)
{ $oStartPat = $stdOriPattern;
}else
{ #$oStartPat = $inOPattern;
$oStartPat = $stdOriPattern;
}
if( $outAllJobs && $lastJob && $lastMol )
{ print &addEToMol($lastMol,$lastE,$lastEType,$lastJob,$fName);
}
$lastJob = $_;
chomp($lastJob);
if( /opt[(=]/ && ! /opt=\S+restart/i )
{ # new optimization job
# and not a restart reset scaned variable recognition
foreach my $k (keys %scanValues) { delete($scanValues{$k}); }
}
}
if( /$hfPattern/ )
{ $lastE = $2;
chomp($lastE);
$lastEType = $1;
if( $outE && $lastE eq $outE )
{ print &addEToMol($lastMol,$lastE,$lastEType,$lastJob,$fName);
}
}
if( /$mp2Pattern/ )
{ $lastE = $1;
chomp($lastE);
$lastEType = 'MP2';
if( $outE && $lastE eq $outE )
{ print &addEToMol($lastMol,$lastE,$lastEType,$lastJob,$fName);
}
}
if( /$optimizedPattern/ )
{ &processOptimizedParam();
if( $outOptimized )
{ print &addEToMol($lastMol,$lastE,$lastEType,$lastJob,$fName);
}
}
# found coordiantes section "Orientation"
if( $oStartPat && /$oStartPat/ )
{ if( $outAll && $lastMol )
{ $lastMol = &addEToMol($lastMol,$lastE,$lastEType,$lastJob,$fName);
print $lastMol;
}
$lastMol = &processCoor();
}
$lastLine = $_;
}
if( $lastMol && ! $outOptimized && ! $outE )
{ $lastMol = &addEToMol($lastMol,$lastE,$lastEType,$lastJob,$fName);
print $lastMol;
}
if( $calcDeltaE )
{ close(MYOUT) || die;
select(STDOUT);
my($com) = <<COMS;
sdfAggregator.csh -in /tmp/g2sdf.$$.sdf -outputmode all -out .sdf \\
-function 'minE=min(Energy)' \\
| sdfGroovy.csh -in .sdf -out .sdf \\
-c '\$>deltaE=String.format("%.3f",(float)(f(\$Energy)-f(\$minE))*627.509)'
COMS
#warn($com);
system($com);
}
# also called on initial parameters to set eg. @scanCoords
sub processOptimizedParam
{ my($scanC, $countBreakLine ) = ('', 0);
while(<>)
{ /-{80}/ && ++$countBreakLine == 2 && return;
# look for scan coordinates from torsion scans in input
if( /$scanFindPattern/ )
{ push(@scanCoords,$1);
}
# check if this is an input card for a scan coordinate and
# extract its value
if( /^ ! / )
{ foreach $scanC (@scanCoords)
{ if( index($_, " ! $scanC ") == 0 )
{ my($atoms,$val) = /$scanValuePattern/;
$scanValues{$scanC} = sprintf("%.1f",$val);
$scanAtoms{$scanC} = &convertToIdx($atoms);
if( !$scanKeyMap{$scanC} )
{ $scanKeyMap{$scanC} = $maxScanVarNum++;
}
}
}
}
}
}
sub processCoor
{ my($molBlock,$com);
$molBlock = <>;
$molBlock .= <>;
$molBlock .= <>;
$molBlock .= <>;
while(<>)
{ $molBlock .= $_;
/--------/ && last;
}
return $molBlock;
}
sub addEToMol
{ my($molBlock, $lastE, $lastEType, $lastJob, $fName) = @_;
if( $lastEType !~ /$eTypeRequired/ ) { return "";}
$fName =~ s/\.[^.]+$//;
$com = "$oBabel -ig09 -osdf<<OBABLE"
."|sdfTagTool.csh -in .sdf -out .sdf -addSmi\n"
." Entering Link 1 = \n"
." #T\n"
."Standard orientation:\n"
.$molBlock
."Standard orientation:\n"
."OBABLE";
#warn "$com\n";
$molBlock = `$com`;
$molBlock =~ s/<SMI>/<SMILES>/;
$molBlock = "$fName$molBlock";
$lastE =~ s/D/E/;
$lastE = sprintf("%.6f", $lastE);
my( $tags ) = "> <EnergyType>\n$lastEType\n\n"
."> <Job>\n$lastJob\n\n"
."> <fName>\n$fName\n\n"
."> <Energy>\n$lastE\n\n";
foreach my $key (keys %scanValues)
{ $tags .= "> <ScanVar_$scanKeyMap{$key}>\n$scanValues{$key}\n\n";
$tags .= "> <ScanAtoms_$scanKeyMap{$key}>\n$scanAtoms{$key}\n\n";
}
$molBlock =~ s/^\$\$\$\$/$tags\$\$\$\$/m;
return $molBlock;
}
sub convertToIdx
{ my(@atList) = split(/,/,$_[0]);
my($at);
for $at (@atList) { $at--; }
return join(" ", @atList);
}
| chemalot/chemalot | bin/g2sdf.pl | Perl | apache-2.0 | 7,551 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::mq::rabbitmq::restapi::mode::listqueues;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
}
sub manage_selection {
my ($self, %options) = @_;
my $result = $options{custom}->query(url_path => '/api/queues/?columns=vhost,name,state');
$self->{queue} = {};
foreach (@$result) {
$self->{queue}->{$_->{vhost} . ':' . $_->{name}} = {
vhost => $_->{vhost},
name => $_->{name},
state => $_->{state},
};
}
}
sub run {
my ($self, %options) = @_;
$self->manage_selection(%options);
foreach (sort keys %{$self->{queue}}) {
$self->{output}->output_add(long_msg => sprintf(
"[name = %s][vhost = %s][state = %s]",
$self->{queue}->{$_}->{name}, $self->{queue}->{$_}->{vhost}, $self->{queue}->{$_}->{state})
);
}
$self->{output}->output_add(
severity => 'OK',
short_msg => 'List queues:'
);
$self->{output}->display(nolabel => 1, force_ignore_perfdata => 1, force_long_output => 1);
$self->{output}->exit();
}
sub disco_format {
my ($self, %options) = @_;
$self->{output}->add_disco_format(elements => ['name', 'vhost', 'state']);
}
sub disco_show {
my ($self, %options) = @_;
$self->manage_selection(%options);
foreach (values %{$self->{queue}}) {
$self->{output}->add_disco_entry(%$_);
}
}
1;
__END__
=head1 MODE
List queues.
=over 8
=back
=cut
| Sims24/centreon-plugins | apps/mq/rabbitmq/restapi/mode/listqueues.pm | Perl | apache-2.0 | 2,539 |
package Paws::IoTData::UpdateThingShadow;
use Moose;
has Payload => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'payload', required => 1);
has ThingName => (is => 'ro', isa => 'Str', traits => ['ParamInURI'], uri_name => 'thingName', required => 1);
use MooseX::ClassAttribute;
class_has _stream_param => (is => 'ro', default => 'Payload');
class_has _api_call => (isa => 'Str', is => 'ro', default => 'UpdateThingShadow');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/things/{thingName}/shadow');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'POST');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::IoTData::UpdateThingShadowResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::IoTData::UpdateThingShadow - Arguments for method UpdateThingShadow on Paws::IoTData
=head1 DESCRIPTION
This class represents the parameters used for calling the method UpdateThingShadow on the
AWS IoT Data Plane service. Use the attributes of this class
as arguments to method UpdateThingShadow.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to UpdateThingShadow.
As an example:
$service_obj->UpdateThingShadow(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> Payload => Str
The state information, in JSON format.
=head2 B<REQUIRED> ThingName => Str
The name of the thing.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method UpdateThingShadow in L<Paws::IoTData>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/IoTData/UpdateThingShadow.pm | Perl | apache-2.0 | 2,090 |
:- use_module(library(http/thread_httpd)).
:- use_module(library(http/http_dispatch)).
:- use_module(library(http/http_error)).
:- use_module(library(http/html_write)).
:- use_module(library(http/http_parameters)).
:- ensure_loaded("../algorithm/memorization").
:- ensure_loaded("../data/entries").
:- ensure_loaded("../data/data").
:- ensure_loaded("web_utils").
server(Port) :-
http_server(http_dispatch, [port(Port)]).
:- http_handler(/, welcome, [prefix]).
:- http_handler('/memorize.html', begin_memorization, []).
:- http_handler('/response.html', respond, []).
:- http_handler('/update.html', update,[]).
:- http_handler('/create_entries.html', add_entries,[]).
:- http_handler('/update_entries.html', update_entries,[]).
%Set the title to 'Welcome' and call the predicate "welcome_page" with the request, the welcome page redirects to the memorization page.
welcome(Request) :-
reply_html_page(
title('Welcome!'),[\welcome_page(Request)]).
%Landing point for update_entries.html.
update_entries(Request) :-
%data_key: The name of the parameter in the request (from the 'table_data' predicate, in web_utils.pl
http_parameters(Request, [data_key(Data,[ list(string) ])]), create_entry(Data), welcome(Request).
create_entry([]).
create_entry([Value|T]):-
findall(Key, entry(Key,_Value,_N,_EF,_Date), Entries), length(Entries,Size),
assertz(entry(Size, Value, 0, 2.5, 0)), create_entry(T).
% Landing point for 'add_entries.html'. Select entries to memorize from
% existing data. Going to add a 'categories' one day for breaking the
% data up.
add_entries(_Request) :-
findall(Value, (data(Value,_Hint,_Answer), \+ entry(_Key,Value,_N,_EF,_Date)), DataSet),
reply_html_page(title('Add Entries'), [\create_add_entries_form(DataSet)]).
%Creates the table of all the entries in the list
create_add_entries_form(List) -->
html_begin(form(action("update_entries.html"))),
html_begin(table(border(1), align(center), width('80%'))),
html_begin(tr),
table_header("Value"),
table_header("Hint"),
table_header("Answer"),
table_header("Add Entry"),
html_end(tr),
table_data(List),
html_begin(tr),
html_begin(td(colspan(4))),
html_begin(input(type(submit),value("Submit"))),
html_end(td),
html_end(tr),
html_end(table).
%Puts the keys of entries due for today into 'KeySet'
get_practice_set(KeySet) :-
get_time(CurrentTime),
findall(Key, (entry(Key,_Value,_N,_EF,Date), CurrentTime > Date), KeySet).
%Landing point for 'memorize.html'. Gets the current time, finds all entries that are due and prompts for them. Stores the result in 'entries2.pl' User logins: instead of 'entries2' 'entries_UID/USER_NAME/etc'
begin_memorization(_Request) :-
get_practice_set(Z),
create_ui(Z).
%Landing point for 'update.html'. Updates the entry based on the users response. First pulls out the headers, then it converts the remaining practice back to a list of numbers (likewise the key), and calls the 'process' method.
update(Request) :-
http_parameters(Request, [key(Key,[]), rating(Rating,[]), practice_remaining(Practice,[])]),
atom_number(Key, H),
atom_number(Rating, Q),
get_list(Practice, RemainingList),
process(H,Q,RemainingList, NewPractice),
store('entries.pl'),
create_ui(NewPractice).
%Landing point for 'response.html'
respond(Request) :-
http_parameters(Request, [key(Key,[]),answer(Answer,[]),practice_remaining(Practice,[])]),
reply_html_page(
title('Rate your response'),
[\response_form(_Request, Key, Practice, Answer)]).
%Prepare and prompt the form for the specified verses (this will be done several times)
create_ui(Z) :-
reply_html_page(
title('Huzzah!'),
[\prompt_form(_Request, Z)]).
practice_end -->
html_begin(h1),
['Well done!'],
html_end(h1),
html_begin(br),
['You have completed all outstanding practice for the day!'].
welcome_page(_Request) -->
html_begin(p),
['Hello and welcome! Choose an option below: '],
html_begin(ul),
html_begin(li),
html_begin(a(href('memorize.html'))),
['Daily Memorization'],
html_end(a),
html_end(li),
html_begin(li),
html_begin(a(href('create_entries.html'))),
['Add Items'],
html_end(a),
html_end(li),
html_end(ul),
html_end(p).
%This will be called once for each entry to be done eventually:
%This is passing in a hidden input the key of the item being tested (to get it out on the other side), as well as (soon) the rest of the list.
prompt_form(_Request, [H|T]) -->
{
entry(H, Value, _N, _EF, _Date),
data(Value, Hint, _Answer)
},
{
get_string(T, TailString)
},
html_begin(form(action('response.html'))),
['Hint: ',Hint, '<br/>','Your Answer: '],
html_begin(input(type(text), name(answer), autocomplete(off))),
html_begin(input(type(hidden), name(key), value(H))),
html_begin(input(type(hidden), name(practice_remaining), value(TailString))),
html_begin(input(type(submit), value('Submit'))).
prompt_form(_Request, [H]) -->
{
entry(H, Value, _N, _EF, _Date),
data(Value, Hint, _Answer)
},
html_begin(form(action('response.html'))),
['Hint: ', Hint, '<br/>', 'Your Answer: '],
html_begin(input(type(text), name(answer), autocomplete(off))),
html_begin(input(type(hidden), name(key), value(H))),
html_begin(input(type(hidden), name(practice_remaining), value("-1"))),
html_begin(input(type(submit), value('Submit'))).
prompt_form(_Request, []) -->
practice_end.
%This takes a list and returns it in a string
get_string([H|T], TailString) :-
get_string(T, TailString2),
string_concat(",", TailString2, TailString3),
string_concat(H, TailString3, TailString).
get_string([H], H).
%Gets a number list from the string
get_list('-1', []).
get_list(String, List) :-
split_string(String, ",", "", AsList),
to_number_list(AsList, List).
%Converts a list of strings to a list of numbers
to_number_list([H|T], NumberList) :-
atom_string(Atom, H),
atom_number(Atom, Num),
to_number_list(T, NumberList2),
append([Num],NumberList2, NumberList).
to_number_list([], []).
%This response form provides the feed back (1-5 score of how easily it was remembered), and then the memorization algorithm will be called through once this is submitted. (Memorization: process(H,Q,T,NewT))
response_form(_Request, Key, KeyList, Answer) -->
{
atom_number(Key, H),
entry(H, Value, _N, _EF, _Date),
data(Value, _Hint, Actual)
},
html_begin(form(action('update.html'))),
['Your response was: ', Answer, '<br>The actual answer was: ', Actual, '<br>'],
radio_button(rating, 1),
radio_button(rating, 2),
checked_radio_button(rating, 3),
radio_button(rating, 4),
radio_button(rating, 5),
% html_begin(input(type(text), name(rating), autocomplete=off)),
html_begin(input(type(hidden), name(key), value(Key))),
html_begin(input(type(hidden), name(practice_remaining), value(KeyList))),
html_begin(input(type(submit), value('Submit'))).
| JustAnotherSoul/01 | server/web_server.pl | Perl | apache-2.0 | 6,850 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package Bio::EnsEMBL::Production::Pipeline::PipeConfig::CopyDatabase_conf;
use strict;
use warnings;
use base ('Bio::EnsEMBL::Production::Pipeline::PipeConfig::Base_conf');
sub pipeline_create_commands {
my ($self) = @_;
return [
@{$self->SUPER::pipeline_create_commands},
$self->db_cmd('CREATE TABLE result (job_id int(10), output TEXT, PRIMARY KEY (job_id))'),
$self->db_cmd('CREATE TABLE job_progress (job_progress_id int(11) NOT NULL AUTO_INCREMENT, job_id int(11) NOT NULL , message TEXT, PRIMARY KEY (job_progress_id))'),
$self->db_cmd('ALTER TABLE job_progress ADD INDEX (job_id)'),
$self->db_cmd('ALTER TABLE job DROP KEY input_id_stacks_analysis'),
$self->db_cmd('ALTER TABLE job MODIFY input_id TEXT')
];
}
sub pipeline_analyses {
my ($self) = @_;
return [
{
-logic_name => 'copy_database',
-module => 'Bio::EnsEMBL::Production::Pipeline::CopyDatabases::CopyDatabaseHive',
-input_ids => [ {} ],
-max_retry_count => 0,
-meadow_type => 'LOCAL',
-flow_into => {
2 => [ '?table_name=result' ]
},
}
];
}
1;
| Ensembl/ensembl-production | modules/Bio/EnsEMBL/Production/Pipeline/PipeConfig/CopyDatabase_conf.pm | Perl | apache-2.0 | 1,894 |
package receptor_model.pm;
sub make_model{#consecutive output as inputs
my $modeller_inp1=$ncore."_salign.py";
my $modeller_inp2=$ncore."_align2d_mult.py";
my $modeller_inp3=$ncore."_model_mult.py";
my $modeller_inp4=$ncore."_evaluate_model.py";
my $modeller_inp5=$ncore."_plot_profiles.py";
`python $modeller_inp1`; #align templates; output: fm00495.ali
`python $modeller_inp2`; #structural alignment; input: Target.ali,fm00495.ali; output: Target-multi.ali
`python $modeller_inp3`; #build model; input: Target-mult.ali; output: Target.B9999000*.pdb
`python $modeller_inp4`; #evaluate model; input: Target.B9999000*.pdb; output: Target.B9999000*.profile
`python $modeller_inp5`; #print profiles; input: Target.B9999000*.profile; output: dope_profile.png
}
sub make_loops{#consecutive output as inputs
my $modeller_inp6=$ncore."_loop_refine.py";
my $modeller_inp7=$ncore."_model_energies.py";
my $modeller_inp8=$ncore."_evaluate_model.py";
`python $modeller_inp6`; #model loops; output:
`python $modeller_inp7`; # input: ; output:
`python $modeller_inp8`; #input: ; output:
}
sub test_model{
}
sub make_receptor_inputs{
my ($template_pdb1,$ch_template_pdb1,$template_pdb2,$ch_template_pdb2,$ncore)=@_;
my $modeller_inp1=$ncore."_salign.py";
my $modeller_inp2=$ncore."_align2d_mult.py";
my $modeller_inp3=$ncore."_model_mult.py";
my $modeller_inp4=$ncore."_evaluate_model.py";
my $modeller_inp5=$ncore."_plot_profiles.py";
my $dope_profile=$ncore."_dope_profile.png";
#MINP1
open(MINP1, $modeller_inp1) if $modeller_inp1;
print MINP1 "# Illustrates the SALIGN multiple structure/sequence alignment";
print MINP1 "";
print MINP1 "from modeller import *";
print MINP1 "";
print MINP1 "log.verbose()"
print MINP1 "env = environ()"
print MINP1 "env.io.atom_files_directory = './:../atom_files/'"
print MINP1 "";
print MINP1 "aln = alignment(env)";
print MINP1 "for (code, chain) in ((\'$template_pdb1\', \'$ch_template_pdb1\'), (\'$template_pdb2\', \'$ch_template_pdb2\')):";
print MINP1 " mdl = model(env, file=code, model_segment=('FIRST:'+chain, 'LAST:'+chain))";
print MINP1 " aln.append_model(mdl, atom_files=code, align_codes=code+chain)";
print MINP1 "";
print MINP1 "for (weights, write_fit, whole) in (((1., 0., 0., 0., 1., 0.), False, True),";
print MINP1 " ((1., 0.5, 1., 1., 1., 0.), False, True),";
print MINP1 " ((1., 1., 1., 1., 1., 0.), True, False)):";
print MINP1 " aln.salign(rms_cutoff=3.5, normalize_pp_scores=False,";
print MINP1 " rr_file='$(LIB)/as1.sim.mat', overhang=30,";
print MINP1 " gap_penalties_1d=(-450, -50),";
print MINP1 " gap_penalties_3d=(0, 3), gap_gap_score=0, gap_residue_score=0,";
print MINP1 " dendrogram_file='fm00495.tree',";
print MINP1 " alignment_type='tree', # If 'progresive', the tree is not";
print MINP1 " # computed and all structues will be";
print MINP1 " # aligned sequentially to the first";
print MINP1 " feature_weights=weights, # For a multiple sequence alignment only";
print MINP1 " # the first feature needs to be non-zero";
print MINP1 " improve_alignment=True, fit=True, write_fit=write_fit,";
print MINP1 " write_whole_pdb=whole, output='ALIGNMENT QUALITY')";
print MINP1 "";
print MINP1 "#aln.write(file='fm00495.pap', alignment_format='PAP')";
print MINP1 "aln.write(file='fm00495.ali', alignment_format='PIR')";
print MINP1 "";
print MINP1 "aln.salign(rms_cutoff=1.0, normalize_pp_scores=False,";
print MINP1 " rr_file='$(LIB)/as1.sim.mat', overhang=30,";
print MINP1 " gap_penalties_1d=(-450, -50), gap_penalties_3d=(0, 3),";
print MINP1 " gap_gap_score=0, gap_residue_score=0, dendrogram_file='1is3A.tree',";
print MINP1 " alignment_type='progressive', feature_weights=[0]*6,";
print MINP1 " improve_alignment=False, fit=False, write_fit=True,";
print MINP1 " write_whole_pdb=False, output='QUALITY')";
#MINP2
open(MINP2, $modeller_inp2) if $modeller_inp2;
print MINP2 "from modeller import *";
print MINP2 "";
print MINP2 "log.verbose()";
print MINP2 "env = environ()";
print MINP2 "";
print MINP2 "env.libs.topology.read(file='$(LIB)/top_heav.lib')";
print MINP2 "";
print MINP2 "# Read aligned structure(s):";
print MINP2 "aln = alignment(env)";
print MINP2 "aln.append(file='fm00495.ali', align_codes='all')";
print MINP2 "aln_block = len(aln)";
print MINP2 "";
print MINP2 "# Read aligned sequence(s):";
print MINP2 "aln.append(file='Target.ali', align_codes='Target')";
print MINP2 ""
print MINP2 "# Structure sensitive variable gap penalty sequence-sequence alignment:"
print MINP2 "aln.salign(output='', max_gap_length=20,";
print MINP2 " gap_function=True, # to use structure-dependent gap penalty";
print MINP2 " alignment_type='PAIRWISE', align_block=aln_block,";
print MINP2 " feature_weights=(1., 0., 0., 0., 0., 0.), overhang=0,";
print MINP2 " gap_penalties_1d=(-450, 0),";
print MINP2 " gap_penalties_2d=(0.35, 1.2, 0.9, 1.2, 0.6, 8.6, 1.2, 0., 0.),";
print MINP2 " similarity_flag=True)";
print MINP2 "";
print MINP2 "aln.write(file='Target-multi.ali', alignment_format='PIR')";
print MINP2 "#aln.write(file='Target-multi.pap', alignment_format='PAP')";
#MINP3
open(MINP3, $modeller_inp3) if $modeller_inp3;
print MINP3 "from modeller import *";
print MINP3 "from modeller.automodel import *"
print MINP3 ""
print MINP3 ""
print MINP3 "env = environ()";
print MINP3 "a = automodel(env, alnfile='Target-mult.ali',";
print MINP3 " knowns=(\'$template_pdb1.$ch_template_pdb1\',\'$template_pdb2.$ch_template_pdb2\'), sequence='Target')";
print MINP3 "a.starting_model = 1";
print MINP3 "a.ending_model = 5";
print MINP3 "a.make()";
#MINP4
open(MINP4, $modeller_inp4) if $modeller_inp4;
print MINP4 "from modeller import *";
print MINP4 "from modeller.scripts import complete_pdb";
print MINP4 "";
print MINP4 "log.verbose() # request verbose output";
print MINP4 "env = environ()";
print MINP4 "env.libs.topology.read(file='$(LIB)/top_heav.lib') # read topology";
print MINP4 "env.libs.parameters.read(file='$(LIB)/par.lib') # read parameters";
print MINP4 "";
print MINP4 "# read model file";
print MINP4 "mdl1= complete_pdb(env, 'Target.B99990001.pdb')";
print MINP4 "mdl2= complete_pdb(env, 'Target.B99990002.pdb')";
print MINP4 "mdl3= complete_pdb(env, 'Target.B99990003.pdb')";
print MINP4 "mdl4= complete_pdb(env, 'Target.B99990004.pdb')";
print MINP4 "mdl5= complete_pdb(env, 'Target.B99990005.pdb')";
print MINP4 "";
print MINP4 "# Assess all atoms with DOPE:";
print MINP4 "s = selection(mdl1)"
print MINP4 "s.assess_dope(output='ENERGY_PROFILE NO_REPORT', file='Target.B99990001.profile',"
print MINP4 " normalize_profile=True, smoothing_window=15)"
print MINP4 "s = selection(mdl2)"
print MINP4 "s.assess_dope(output='ENERGY_PROFILE NO_REPORT', file='Target.B99990002.profile',"
print MINP4 " normalize_profile=True, smoothing_window=15)"
print MINP4 "s = selection(mdl3)"
print MINP4 "s.assess_dope(output='ENERGY_PROFILE NO_REPORT', file='Target.B99990003.profile',"
print MINP4 " normalize_profile=True, smoothing_window=15)"
print MINP4 "s = selection(mdl4)"
print MINP4 "s.assess_dope(output='ENERGY_PROFILE NO_REPORT', file='Target.B99990004.profile',"
print MINP4 " normalize_profile=True, smoothing_window=15)"
print MINP4 "s = selection(mdl5)"
print MINP4 "s.assess_dope(output='ENERGY_PROFILE NO_REPORT', file='Target.B99990005.profile',"
print MINP4 " normalize_profile=True, smoothing_window=15)"
#MINP5
open(MINP5, $modeller_inp5) if $modeller_inp5;
print MINP5 "import pylab";
print MINP5 "import modeller";
print MINP5 "";
print MINP5 "def get_profile(profile_file, seq):";
print MINP5 " \"\"\"Read `profile_file` into a Python array, and add gaps corresponding to";
print MINP5 " the alignment sequence `seq`.\"\"\"";
print MINP5 " # Read all non-comment and non-blank lines from the file:";
print MINP5 " f = file(profile_file)";
print MINP5 " vals = []";
print MINP5 " for line in f:";
print MINP5 " if not line.startswith('#') and len(line) > 10:";
print MINP5 " spl = line.split()";
print MINP5 " vals.append(float(spl[-1]))";
print MINP5 " # Insert gaps into the profile corresponding to those in seq:";
print MINP5 " for n, res in enumerate(seq.residues):";
print MINP5 " for gap in range(res.get_leading_gaps()):";
print MINP5 " vals.insert(n, None)";
print MINP5 " # Add a gap at position '0', so that we effectively count from 1:";
print MINP5 " vals.insert(0, None)";
print MINP5 " return vals";
print MINP5 "";
print MINP5 "e = modeller.environ()";
print MINP5 "a = modeller.alignment(e, file='Target-mult.ali')";
print MINP5 "";
print MINP5 "#template = get_profile('1bdmA.profile', a['1bdmA'])";
print MINP5 "model1 = get_profile('Target.B99990001.profile', a['Target'])";
print MINP5 "model2 = get_profile('Target.B99990002.profile', a['Target'])";
print MINP5 "model3 = get_profile('Target.B99990003.profile', a['Target'])";
print MINP5 "model4 = get_profile('Target.B99990004.profile', a['Target'])";
print MINP5 "model5 = get_profile('Target.B99990005.profile', a['Target'])";
print MINP5 "";
print MINP5 "# Plot the template and model profiles in the same plot for comparison:";
print MINP5 "pylab.figure(1, figsize=(10,6))";
print MINP5 "pylab.xlabel('Alignment position')";
print MINP5 "pylab.ylabel('DOPE per-residue score')";
print MINP5 "pylab.plot(model1, color='yellow', linewidth=2, label='Model1')";
print MINP5 "pylab.plot(model2, color='orange', linewidth=2, label='Model2')";
print MINP5 "pylab.plot(model3, color='red', linewidth=2, label='Model3')";
print MINP5 "pylab.plot(model4, color='purple', linewidth=2, label='Model4')";
print MINP5 "pylab.plot(model5, color='blue', linewidth=2, label='Model5')";
print MINP5 "#pylab.plot(template, color='green', linewidth=2, label='Template')";
print MINP5 "pylab.legend()";
print MINP5 "pylab.savefig(\'$dope_profile\', dpi=65)";
}
sub make_loops_inputs{
my $modeller_inp6=$ncore."_loop_refine.py";
my $modeller_inp7=$ncore."_model_energies.py";
my $modeller_inp8=$ncore."_evaluate_model.py";
my ($ncore)=@_;
#loop modelling
open(LINP1, $modeller_inp6) if $modeller_inp6;
print LINP1 �\# Loop refinement of an existing model�;
print LINP1 �from modeller import *�;
print LINP1 �from modeller.automodel import *�;
print LINP1 ��;
print LINP1 �log.verbose()�;
print LINP1 �env = environ()�;
print LINP1 ��;
print LINP1 �\# directories for input atom files�;
print LINP1 �env.io.atom_files_directory = './:../atom_files'�;
print LINP1 ��;
print LINP1 �\# Create a new class based on 'loopmodel' so that we can redefine�;
print LINP1 �\# select_loop_atoms (necessary)�;
if ($normal_loop=1) print LINP1 �class myloop(loopmodel):�;
if ($dope_loop=1) print LINP1 �class myloop(dope_loopmodel):�;
print LINP1 � # This routine picks the residues to be refined by loop modeling�;
print LINP1 � def select_loop_atoms(self):�;
print LINP1 � # 10 residue insertion �;
print LINP1 � return selection(self.residue_range('141', '175'))�;
print LINP1 ��;
if ($disulfids=1) {
print LINP1 � def special_patches(self, aln): # A disulfide brigde between residues 15 and 187�;
print LINP1 � self.patch(residue_type='DISU', residues=(self.residues['71'], self.residues['159']))�;
}
print LINP1 �m = myloop(env, inimodel='multi2-Target.B99990001.pdb', \# initial model of the target�;
print LINP1 � sequence='Target') # code of the target�;
print LINP1 ��;
print LINP1 �m.loop.starting_model= 1 # index of the first loop model �;
print LINP1 �m.loop.ending_model = 200 # index of the last loop model�;
print LINP1 �m.loop.md_level = refine.very_fast \# loop refinement method�;
print LINP1 ��;
print LINP1 �m.make()�;
print LINP1 ��;
print LINP1 ��;
#energy calculation
open(LINP2, $modeller_inp7) if $modeller_inp7;
print LINP2 �from modeller import *�;
print LINP2 �from modeller.scripts import complete_pdb�;
print LINP2 ��;
print LINP2 �log.verbose() \# request verbose output�;
print LINP2 �env = environ()�;
print LINP2 �env.libs.topology.read(file='$(LIB)/top_heav.lib') \# read topology�;
print LINP2 �env.libs.parameters.read(file='$(LIB)/par.lib') \# read parameters�;
print LINP2 ��;
print LINP2 �for i in range(1, 11):�;
print LINP2 � \# read model file�;
print LINP2 � code = "Target.BL%04d0001.pdb" % i�;
print LINP2 � mdl = complete_pdb(env, code)�;
print LINP2 � s = selection(mdl)�;
print LINP2 � s.assess_dope(output='ENERGY_PROFILE NO_REPORT', file='Target.profile',�;
print LINP2 � normalize_profile=True, smoothing_window=15)�;
#evaluate model
open(LINP3, $modeller_inp8) if $modeller_inp8;
print LINP3 �from modeller import *�;
print LINP3 �from modeller.scripts import complete_pdb�;
print LINP3 ��;
print LINP3 �log.verbose() # request verbose output�;
print LINP3 �env = environ()�;
print LINP3 �env.libs.topology.read(file='$(LIB)/top_heav.lib') # read topology�;
print LINP3 �env.libs.parameters.read(file='$(LIB)/par.lib') # read parameters�;
print LINP3 ��;
print LINP3 �# directories for input atom files�;
print LINP3 �env.io.atom_files_directory = './:../atom_files'�;
print LINP3 ��;
print LINP3 �# read model file�;
print LINP3 �mdl = complete_pdb(env, 'TvLDH.BL00080001.pdb')�;
print LINP3 ��;
print LINP3 �s = selection(mdl)�;
print LINP3 �s.assess_dope(output='ENERGY_PROFILE NO_REPORT', file='TvLDH.profile',�;
print LINP3 � normalize_profile=True, smoothing_window=15)�;
} | wjurkowski/tmdrug | modules/obsolete/receptor_model2.pm | Perl | apache-2.0 | 14,182 |
#!/usr/bin/env perl
=head1 NAME
make_input_ids.pl
=head1 SYNOPSIS
make_input_ids.pl -p_host ecs1a -p_user user -p_pass **** -p_name pipe_mouse -set chr12-03
=head1 DESCRIPTION
This script allows to prime the pipeline database with the input_ids of the seq_region name(s) provided.
If no values are provided for the database connexion (login, password and port), the ~/.netrc file will be checked.
See Net::Netrc module for more details.
=head1 OPTIONS
-p_host (default: otterpipe1) host name for database (gets put as host= in locator)
-p_port (check the ~/.netrc file) For RDBs, what port to connect to (port= in locator)
-p_name For RDBs, what name to connect to (p_name= in locator)
-p_user (check the ~/.netrc file) For RDBs, what username to connect as (user= in locator)
-p_pass (check the ~/.netrc file) For RDBs, what password to use (pass= in locator)
-logic_name (default: SubmitContig) the logic_name of the analysis object which needs to be
associated with these entries
-set|name the seq_region name you want to prime (could be used several times)
-cs (default: chromosome) the coordinate system associated with the seq_region name
-cs_version (default: Otter) the version of the coord system you want
-target_cs (default: contig) the target coordinate system you want slices in
-target_cs_version (optional) the version of the target coord system you want
-verbose if you want more information about what the script is doing
-help Displays script documentation with PERLDOC
=head1 CONTACT
Mustapha Larbaoui B<email> ml6@sanger.ac.uk
=cut
use warnings ;
use strict;
use Getopt::Long;
use Net::Netrc;
use Bio::EnsEMBL::Pipeline::Analysis;
use Bio::EnsEMBL::Pipeline::DBSQL::Finished::DBAdaptor;
use Bio::EnsEMBL::Pipeline::Utils::InputIDFactory;
use Bio::EnsEMBL::Pipeline::DBSQL::StateInfoContainer;
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
my $host = 'otterpipe1';
my $user;
my $pass;
my $port;
my $p_name;
my $logic_name = 'SubmitContig';
my $cs = 'chromosome';
my $cs_version = 'Otter';
my $target_cs = 'contig';
my $target_cs_version;
my @seqreg_name;
my $verbose;
my $help = 0;
&GetOptions(
'p_host:s' => \$host,
'p_port:n' => \$port,
'p_user:s' => \$user,
'p_pass:s' => \$pass,
'p_name:s' => \$p_name,
'set|name:s' => \@seqreg_name,
'cs:s' => \$cs,
'cs_version:s' => \$cs_version,
'target_cs:s' => \$target_cs,
'target_cs_version:s' => \$target_cs_version,
'logic_name:s' => \$logic_name,
'verbose!' => \$verbose,
'h|help' => \$help
);
if ($help) {
exec( 'perldoc', $0 );
}
if ( !$user || !$pass || !$port ) {
my $ref = Net::Netrc->lookup($host);
throw(
"~/.netrc file unavailable;
need to provide missing parameter:
user [$user]; password [$pass]; port [$port]"
)
unless ($ref);
$user = $ref->login unless $user;
$pass = $ref->password unless $pass;
$port = $ref->account unless $port;
throw(
"Missing parameter in the ~/.netrc file:\n
machine " . ( $host || 'missing' ) . "\n
login " . ( $user || 'missing' ) . "\n
password " . ( $pass || 'missing' ) . "\n
account "
. ( $port || 'missing' ) . " (should be used to set the port number)"
)
unless ( $user && $pass && $port );
}
if ( !$p_name ) {
throw("You must specify a database name (-p_name option");
}
if ( !@seqreg_name ) {
throw("You must at least specify one seq_region name (-set option)");
}
my $db = new Bio::EnsEMBL::Pipeline::DBSQL::Finished::DBAdaptor(
-host => $host,
-user => $user,
-pass => $pass,
-port => $port,
-dbname => $p_name
);
my $slice_a = $db->get_SliceAdaptor;
my $analysis_a = $db->get_AnalysisAdaptor;
my $state_info_container = $db->get_StateInfoContainer;
my $ana = $analysis_a->fetch_by_logic_name($logic_name);
foreach my $name (@seqreg_name) {
print STDOUT "Storing seq_region name $name in pipeline [$p_name]\n"
if ($verbose);
my $slice =
$slice_a->fetch_by_region( $cs, $name, undef, undef, undef, $cs_version );
if ( !$slice ) {
warn(
"No seq_region [$name] found in database [$p_name] for coord_system [$cs] and cs_version [$cs_version]"
);
next;
}
my $target_projection = $slice->project($target_cs);
foreach my $ct (@$target_projection) {
my $target_sclice = $ct->to_Slice();
my $target =
$slice_a->fetch_by_region( $target_cs,
$target_sclice->seq_region_name,
undef, undef, undef, $target_cs_version );
$state_info_container->store_input_id_analysis( $target->name(), $ana,
'' );
print STDOUT $target->name() . "\t"
. $ana->input_id_type . "\t"
. $ana->logic_name
. "\tstored\n"
if ($verbose);
}
}
| Ensembl/ensembl-pipeline | scripts/Finished/make_input_ids.pl | Perl | apache-2.0 | 4,840 |
# Copyright (c) 2016 Timm Murray
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
package SQL::Functional::InsertClause;
use strict;
use warnings;
use Moose;
use namespace::autoclean;
use SQL::Functional::Clause;
use SQL::Functional::TableClause;
use SQL::Functional::ValuesClause;
with 'SQL::Functional::Clause';
has into => (
is => 'ro',
isa => 'SQL::Functional::TableClause',
required => 1,
);
has fields => (
is => 'ro',
isa => 'ArrayRef[Str]',
required => 1,
auto_deref => 1,
);
has values => (
is => 'ro',
isa => 'SQL::Functional::Clause',
required => 1,
);
sub to_string
{
my ($self) = @_;
my @fields = $self->fields;
my $into = $self->into;
my $values = $self->values;
my $str = 'INSERT INTO ' . $into->to_string
. ' (' . join( ', ', @fields ) . ')'
. ' ' . $values->to_string;
return $str;
}
sub get_params
{
my ($self) = @_;
return $self->values->get_params;
}
no Moose;
__PACKAGE__->meta->make_immutable;
1;
__END__
| frezik/SQL-Functional | lib/SQL/Functional/InsertClause.pm | Perl | bsd-2-clause | 2,316 |
#!/usr/bin/perl
use 5.010;
use warnings;
use strict;
my $exit = 0;
my $out = `./rosalind-iev 1 0 0 1 0 1`;
$exit++ if $out ne "3.5\n";
$out = `./rosalind-iev 17694 17964 19355 18338 19724 16114`;
$exit++ if $out ne "157257\n";
$out = `./rosalind-iev 19693 17215 19087 19450 18998 18989`;
$exit++ if $out ne "160163\n";
$exit++ unless system('./rosalind-iev');
$exit++ unless system('./rosalind-iev 0');
$exit++ unless system('./rosalind-iev 0 1');
$exit++ unless system('./rosalind-iev 0 1 2');
$exit++ unless system('./rosalind-iev 0 1 2 3');
$exit++ unless system('./rosalind-iev 0 1 2 3 4');
$exit++ unless system('./rosalind-iev a');
$exit++ unless system('./rosalind-iev 0 b');
$exit++ unless system('./rosalind-iev 0 0 c');
$exit++ unless system('./rosalind-iev 0 0 0 d');
$exit++ unless system('./rosalind-iev 0 0 0 0 e');
$exit++ unless system('./rosalind-iev 0 0 0 0 0 f');
exit $exit;
| mikeyhc/rosalind | iev/t/01_default.pl | Perl | bsd-3-clause | 897 |
package HTTP2::Draft::Framer;
use 5.008;
use strict;
use warnings FATAL => 'all';
use HTTP2::Draft::FrameStream;
use HTTP2::Draft::Frame qw ( :frames :settings :errors );
use strict;
use warnings;
use IO::Async::SSL;
use Data::Dumper;
use HTTP2::Draft;
use HTTP2::Draft::Log qw( $log );
=head1 NAME
HTTP2::Draft::Framer - Framer based on IO::Async
=head1 VERSION
Version 0.03
=cut
our $VERSION = '0.03';
my $magic_header_hex = "505249202a20485454502f322e300d0a0d0a534d0d0a0d0a";
my $magic_header = pack( "(H2)*", unpack( "(A2)*", $magic_header_hex ) );
sub IO::Async::Loop::HTTP2_connect
{
my $loop = shift;
my %params = @_;
my $on_http2_connect = delete $params{on_http2_connect};
my $on_frame_read = delete $params{on_frame_read};
my $on_read = sub {
my ( $framer, $buffref, $eof ) = @_;
# TODO: should the Conn be passed here rather than the stream?
if ( $eof )
{
$on_frame_read->( $framer, undef, $eof );
return;
}
my $conn = $framer->{conn};
my ( $frame, $length ) = $conn->read_frame( $$buffref );
if ( $frame )
{
# consume the used bytes on the sockets buffer
# print( "$frame->{size}, ", length( $$buffref ), "\n" );
$$buffref = substr( $$buffref, $frame->{size} );
$on_frame_read->( $framer, $frame, $eof );
}
};
my $on_connected = sub {
my ( $handle ) = @_;
my $framer = HTTP2::Draft::FrameStream->new( handle => $handle );
$log->info( "NPN: ", $framer->{write_handle}->next_proto_negotiated() );
$framer->configure( on_read => $on_read,
autoflush => 1,
write_all => 1 );
my $conn = HTTP2::Draft::Connection->new( $framer, "client" );
$framer->{conn} = $conn;
$framer->write( $magic_header );
$on_http2_connect->( $framer );
};
$params{on_connected} = $on_connected;
$loop->SSL_connect( %params );
}
sub IO::Async::Loop::HTTP2_listen
{
my $loop = shift;
my %params = @_;
my $on_http2_connect = delete $params{on_http2_connect};
my $on_frame_read = delete $params{on_frame_read};
my $on_frame_error = delete $params{on_frame_error};
my $on_read = sub {
my ( $framer, $buffref, $eof ) = @_;
if ( $eof )
{
$on_frame_read->( $framer, undef, $eof );
return;
}
my $conn = $framer->{conn};
# HTTP2::Draft::hex_print( $$buffref );
#print Dumper( $framer );
$log->info( "Conn state == $conn->{state}" );
# TODO: dispel the magic 1
# state 1: tls connection established, nothing has been read or written
# including settings frames and magic
# state 2: received magic header, waiting for first SETTINGS frame
# state 3: received SETTINGS frame, sent SETTINGS frame, ready to roll
while( length( $$buffref ) > 8 ) {
if ( $conn->{state} == 1 )
{
# my $h = pack( "(H2)*", unpack( "(A2)*", $magic_header ) );
my $hlen = length( $magic_header );
#print $hlen, "\n";
#print $$buffref, "\n";
my $buflen = length( $$buffref );
if ( $buflen < $hlen )
{
# perform a partial check of what is in the buffer
if ( substr( $magic_header, 0, $buflen ) eq $$buffref )
{
return;
}
else
{
# ERROR
$on_frame_error->( $framer, "Bad magic", $$buffref );
return;
}
}
else
{
if ( substr( $$buffref, 0, $hlen ) eq $magic_header )
{
$conn->{state} = 2;
$$buffref = substr( $$buffref, $hlen );
#return;
}
}
}
else
{
my $conn = $framer->{conn};
my $frame = $conn->read_frame( $$buffref );
if ( $frame )
{
# consume the used bytes on the socket
$$buffref = substr( $$buffref, $frame->{size} );
$on_frame_read->( $framer, $frame, $eof );
}
}
}
};
$params{on_accept} = sub {
my ( $handle ) = @_;
my $framer = HTTP2::Draft::FrameStream->new( handle => $handle );
$log->info( "NPN: ", $framer->{write_handle}->next_proto_negotiated() );
# print Dumper( $framer );
$framer->configure( on_read => $on_read,
on_read_error => sub { print "READ ERROR: " . join( ", ", @_ ), "\n" },
on_write_error => sub { print "WRITE ERROR\n" },
autoflush => 1,
write_all => 1 );
# $framer->debug_printf( "EVENT on_read" );
my $conn = HTTP2::Draft::Connection->new( $framer, "server" );
$framer->{conn} = $conn;
$conn->{state} = 1;
$on_http2_connect->( $framer );
};
$loop->SSL_listen( %params );
}
=head1 SYNOPSIS
Quick summary of what the module does.
Perhaps a little code snippet.
use HTTP2::Draft::Framer;
my $foo = HTTP2::Draft::Framer->new();
...
=head1 EXPORT
A list of functions that can be exported. You can delete this section
if you don't export anything, such as for a purely object-oriented module.
=head1 SUBROUTINES/METHODS
=head2 function1
=cut
=head1 AUTHOR
Stephen Ludin, C<< <sludin at ludin.org> >>
=head1 BUGS
Please report any bugs or feature requests to C<bug-http2-draft at rt.cpan.org>, or through
the web interface at L<http://rt.cpan.org/NoAuth/ReportBug.html?Queue=HTTP2-Draft>. I will be notified, and then you'll
automatically be notified of progress on your bug as I make changes.
=head1 SUPPORT
You can find documentation for this module with the perldoc command.
perldoc HTTP2::Draft::Framer
You can also look for information at:
=over 4
=item * RT: CPAN's request tracker (report bugs here)
L<http://rt.cpan.org/NoAuth/Bugs.html?Dist=HTTP2-Draft>
=item * AnnoCPAN: Annotated CPAN documentation
L<http://annocpan.org/dist/HTTP2-Draft>
=item * CPAN Ratings
L<http://cpanratings.perl.org/d/HTTP2-Draft>
=item * Search CPAN
L<http://search.cpan.org/dist/HTTP2-Draft/>
=back
=head1 ACKNOWLEDGEMENTS
=head1 LICENSE AND COPYRIGHT
Copyright 2013 Stephen Ludin.
This program is free software; you can redistribute it and/or modify it
under the terms of the the Artistic License (2.0). You may obtain a
copy of the full license at:
L<http://www.perlfoundation.org/artistic_license_2_0>
Any use, modification, and distribution of the Standard or Modified
Versions is governed by this Artistic License. By using, modifying or
distributing the Package, you accept this license. Do not use, modify,
or distribute the Package, if you do not accept this license.
If your Modified Version has been derived from a Modified Version made
by someone other than you, you are nevertheless required to ensure that
your Modified Version complies with the requirements of this license.
This license does not grant you the right to use any trademark, service
mark, tradename, or logo of the Copyright Holder.
This license includes the non-exclusive, worldwide, free-of-charge
patent license to make, have made, use, offer to sell, sell, import and
otherwise transfer the Package with respect to any patent claims
licensable by the Copyright Holder that are necessarily infringed by the
Package. If you institute patent litigation (including a cross-claim or
counterclaim) against any party alleging that the Package constitutes
direct or contributory patent infringement, then this Artistic License
to you shall terminate on the date that such litigation is filed.
Disclaimer of Warranty: THE PACKAGE IS PROVIDED BY THE COPYRIGHT HOLDER
AND CONTRIBUTORS "AS IS' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES.
THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE, OR NON-INFRINGEMENT ARE DISCLAIMED TO THE EXTENT PERMITTED BY
YOUR LOCAL LAW. UNLESS REQUIRED BY LAW, NO COPYRIGHT HOLDER OR
CONTRIBUTOR WILL BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, OR
CONSEQUENTIAL DAMAGES ARISING IN ANY WAY OUT OF THE USE OF THE PACKAGE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=cut
1; # End of HTTP::Draft::Framer
| sludin/http2-perl | lib/HTTP2/Draft/Framer.pm | Perl | bsd-3-clause | 8,140 |
%:- if(( ( \+ ((current_prolog_flag(logicmoo_include,Call),Call))) )).
:- module(mpred_pttp_static,[]).
%:- endif.
:- ensure_loaded(library(pfc_lib)).
:- ensure_loaded(library('pfc2.0'/'mpred_header.pi')).
:- '$set_source_module'(baseKB).
:-
%swi_module(mpred_pttp_statics,[
% pttp1/2,
op(400,fy,-), % negation
op(500,xfy,&), % conjunction
op(600,xfy,v), % disjunction
op(650,xfy,=>), % implication
op(680,xfy,<=>), % equivalence
op( 500, fy, ~), % negation
op( 500, fy, all), % universal quantifier
op( 500, fy, ex), % existential quantifier
% op( 500,xfy, :),
% nnf/4,
!.
/*
pttp_tell_wid/2,
pttp_test/2,
search/7,
do_pttp_test/1,
timed_call/2,
expand_input_proof/2,
contract_output_proof/2
]).
*/
%%% ****h* PTTP/PTTP
%%% COPYRIGHT
%%% Copyright (c) 1988-2003 Mark E. Stickel, SRI International, Menlo Park, CA 94025 USA
%%%
%%% Permission is hereby granted, free of charge, to any person obtaining a
%%% copy of this software and associated documentation files (the "Software"),
%%% to deal in the Software without restriction, including without limitation
%%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
%%% and/or sell copies of the Software, and to permit persons to whom the
%%% Software is furnished to do so, subject to the following conditions:
%%%
%%% The above copyright notice and this permission notice shall be included
%%% in all copies or substantial portions of the Software.
%%%
%%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
%%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
%%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
%%% IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
%%% CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
%%% TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
%%% SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
%%% DESCRIPTION
%%%
%%% A Prolog Technology Theorem Prover
%%%
%%% Mark E. Stickel
%%%
%%% Prolog is not a full theorem prover
%%% for three main reasons:
%%%
%%% It uses an unsound unification algorithm without
%%% the occurs check.
%%%
%%% Its inference system is complete for Horn
%%% clauses, but not for more general formulas.
%%%
%%% Its unbounded depth-first search strategy
%%% is incomplete.
%%%
%%% Also, it cannot display the proofs it finds.
%%%
%%% The Prolog Technology Theorem Prover (PTTP)
%%% overcomes these limitations by
%%%
%%% transforming clauses so that head literals have
%%% no repeated variables and unification without the
%%% occurs check is valid; remaining unification
%%% is done using complete unification with the
%%% occurs check in the body;
%%%
%%% adding contrapositives of clauses (so that
%%% any literal, not just a distinguished head
%%% literal, can be resolved on) and the model-
%%% elimination procedure reduction rule that
%%% matches goals with the negations of their
%%% ancestor goals;
%%%
%%% using a sequence of bounded depth-first searches
%%% to pttp prove a theorem;
%%%
%%% retaining information on what formulas are
%%% used for each inference so that the proof
%%% can be printed.
%%%
%%% This version of PTTP translates first-order
%%% predicate calculus formulas written in a Prolog-like
%%% notation into Prolog code. The resulting code
%%% is then compiled and executed.
%%%
%%% PTTP commands:
%%%
%%% pttp_assert(formula) - translates the first-order formula
%%% (normally a conjunction of formulas) into Prolog
%%% and compiles it
%%%
%%% pttp_prove(formula) - tries to pttp prove a formula
%%%
%%% Look at the description of these functions
%%% and the examples for more details on how
%%% pttp_assert and pttp_prove should be used.
%%% For more information on PTTP, consult
%%% Stickel, M.E. A Prolog technology theorem prover:
%%% implementation by an extended Prolog compiler.
%%% Journal of Automated Reasoning 4, 4 (1988), 353-380.
%%% and
%%% Stickel, M.E. A Prolog technology theorem prover:
%%% a new exposition and implementation in Prolog.
%%% Technical Note 464, Artificial Intelligence Center,
%%% SRI International, Menlo Park, California, June 1989.
%%%
%%%
%%%
%%% Several arguments are added to each predicate:
%%% PosAncestors is list of positive ancestor goals
%%% NegAncestors is list of negative ancestor goals
%%% DepthIn is depth bound before goal is solved
%%% DepthOut will be set to remaining depth bound after goal is solved
%%% ProofIn is dotted-pair difference list of proof so far
%%% ProofOut will be set to list of steps of proof so far after goal is solved
%%%
%%%
%%%
%%% Depth-first iterative-deepening search.
%%%
%%% PTTP adds arguments DepthIn and DepthOut
%%% to each PTTP literal to control bounded depth-first
%%% search. When a literal is called,
%%% DepthIn is the current depth bound. When
%%% the literal exits, DepthOut is the new number
%%% of levels remaining after the solution of
%%% the literal (DepthIn - DepthOut is the number
%%% of levels used in the solution of the goal.)
%%%
%%% For clauses with empty bodies or bodies
%%% composed only of pttp_builtin functions,
%%% DepthIn = DepthOut.
%%%
%%% For other clauses, the depth bound is
%%% compared to the cost of the body. If the
%%% depth bound is exceeded, the clause fails.
%%% Otherwise the depth bound is reduced by
%%% the cost of the body.
%%%
%%% p :- q , r.
%%% is transformed into
%%% p(DepthIn,DepthOut) :-
%%% DepthIn >= 2, Depth1 is DepthIn - 2,
%%% q(Depth1,Depth2),
%%% r(Depth2,DepthOut).
%%%
%%% p :- q ; r.
%%% is transformed into
%%% p(DepthIn,DepthOut) :-
%%% DepthIn >= 1, Depth1 is DepthIn - 1,
%%% (q(Depth1,DepthOut) ; r(Depth1,DepthOut)).
%%%
%%%
%%%
%%% Complete inference.
%%%
%%% Model elimination reduction operation and
%%% identical ancestor goal pruning.
%%%
%%% Two arguments are added to each literal, one
%%% for all the positive ancestors, one for all
%%% the negative ancestors.
%%%
%%% Unifiable membership is checked in the list
%%% of opposite polarity to the goal
%%% for performing the reduction operation.
%%%
%%% Identity membership is checked in the list
%%% of same polarity as the goal
%%% for performing the ancestor goal pruning operation.
%%% This is not necessary for soundness or completeness,
%%% but is often effective at substantially reducing the
%%% number of inferences.
%%%
%%% The current head goal is added to the front
%%% of the appropriate ancestor list during the
%%% call on subgoals in bodies of nonunit clauses.
%%%
%%%
%%%
%%% Proof Printing.
%%%
%%% Add extra arguments to each goal so that information
%%% on what inferences were made in the proof can be printed
%%% at the end.
%%% ***
%%% ****f* PTTP/pttp
%%% DESCRIPTION
%%% pttp is the PTTP compiler top-level predicate.
%%% Its argument is a conjunction of formulas to be compiled.
%%% SOURCE
%%% ***
%%% ****if* PTTP/linearize
%%% DESCRIPTION
%%% Prolog's unification operation is unsound for first-order
%%% reasoning because it lacks the occurs check that would
%%% block binding a variable to a term that contains the
%%% variable and creating a circular term. However, Prolog's
%%% unification algorithm is sound and the occurs check is
%%% unnecessary provided the terms being unified have no
%%% variables in common and at least one of the terms has
%%% no repeated variables. A Prolog fact or rule head will
%%% not have variables in common with the goal. The linearize
%%% transformation rewrites a fact or rule so that the fact
%%% or rule head has no repeated variables and Prolog unification
%%% can be used safely. The rest of the unification can then
%%% be done in the body of the transformed clause, using
%%% the sound unify predicate.
%%%
%%% For example,
%%% p(X,Y,f(X,Y)) :- true.
%%% is transformed into
%%% p(X,Y,f(X1,Y1)) :- unify(X,X1), unify(Y,Y1).
%%% SOURCE
linearize(TermIn,TermOut,VarsIn,VarsOut,MatchesIn,MatchesOut):- linearize(unify, TermIn,TermOut,VarsIn,VarsOut,MatchesIn,MatchesOut).
linearize(Pred, TermIn,TermOut,VarsIn,VarsOut,MatchesIn,MatchesOut) :-
is_ftNonvar(TermIn) ->
functor(TermIn,F,N),
pttp_functor(TermOut,F,N),
linearize_args(Pred,TermIn,TermOut,VarsIn,VarsOut,
MatchesIn,MatchesOut,1,N);
identical_member_special(TermIn,VarsIn) ->
((VarsOut = VarsIn,
UNIFY =.. [Pred,TermIn,TermOut],
conjoin_pttp(MatchesIn,UNIFY,MatchesOut)));
%true ->
(( TermOut = TermIn,
VarsOut = [TermIn|VarsIn],
MatchesOut = MatchesIn)).
linearize_args(Pred,TermIn,TermOut,VarsIn,VarsOut,MatchesIn,MatchesOut,I,N) :-
I > N ->
VarsOut = VarsIn,
MatchesOut = MatchesIn;
%true ->
arg(I,TermIn,ArgI),
linearize(Pred,ArgI,NewArgI,VarsIn,Vars1,MatchesIn,Matches1),
arg(I,TermOut,NewArgI),
I1 is I + 1,
linearize_args(Pred,TermIn,TermOut,Vars1,VarsOut,Matches1,MatchesOut,I1,N).
%%% ***
%%% ****if* PTTP/unify
%%% DESCRIPTION
%%% Prolog's unification operation is unsound for first-order
%%% reasoning because it lacks the occurs check that would
%%% block binding a variable to a term that contains the
%%% variable and creating a circular term. Thus, PTTP
%%% must provide a sound unfication algorithm with the occurs
%%% check.
%%%
%%% unify(X,Y) is similar to Prolog's X=Y, except that operations
%%% like unify(X,f(X)) fail rather than create circular terms.
%%% SOURCE
unify(X,Y) :- unify_with_occurs_check(X,Y).
unify_cheaper(X,Y) :- compound(X),compound(Y),!,
functor(X,F1,N),
functor(Y,F2,N),
same_functor(F1,F2),
(N = 1 ->
arg(1,X,X1), arg(1,Y,Y1), unify(X1,Y1);
%true ->
unify_args(X,Y,N)).
unify_cheaper(X,Y) :- unify_with_occurs_check(X,Y),!.
same_functor(F1,F2):- ( F1=F2 -> true ; simular_functors(F1,F2)).
simular_functors(F1,F2):-fail,F1=F2.
unify_args(X,Y,N) :-
N = 2 ->
arg(2,X,X2), arg(2,Y,Y2), unify(X2,Y2),
arg(1,X,X1), arg(1,Y,Y1), unify(X1,Y1);
%true ->
arg(N,X,Xn), arg(N,Y,Yn), unify(Xn,Yn),
N1 is N - 1, unify_args(X,Y,N1).
%%% ***
constrain_args_pttp(_P,[AR,GS]):-!,dif(AR,GS).
constrain_args_pttp(_,[P,AR,GS]):-P\=d,P\=p,P\=l,!,dif(AR,GS).
constrain_args_pttp(_,_).
argument_type_checking(HF,HeadArgs,constrain_args(HF,HeadArgs)):-current_predicate(constrain_args/2).
argument_type_checking(HF,HeadArgs,constrain_args_pttp(HF,HeadArgs)):-current_predicate(constrain_args_pttp/2).
argument_type_checking(_,_,true).
:- meta_predicate pretest_call(0).
pretest_call(C):-call(C).
%%% ***
%%% ****if* PTTP/unifiable_member
%%% DESCRIPTION
%%% unifiable_member(X,L) succeeds each time X is unifiable with an
%%% element of the list L
%%% SOURCE
unifiable_member(X,[Y|L]) :- unify(X,Y); unifiable_member(X,L).
unifiable_member_cheaper(X,[Y|L]) :- unify_cheaper(X,Y); unifiable_member_cheaper(X,L).
%%% ***
%%% ****if* PTTP/identical_member_special
%%% DESCRIPTION
%%% identical_member_special(X,L) succeeds iff X is an element of the list L
%%% it does not use unification during element comparisons
%%% SOURCE
% from memberchk_eq(X,L).
identical_member_cheaper(X,[Y|L]) :- unify_cheaper(X,Y); identical_member_cheaper(X,L).
identical_member_special_loop_check(X,L):- 1 is random(9),!,unifiable_member(X,L).
identical_member_special_loop_check(X,L):-identical_member_special(X,L).
identical_member_special(X,[Y|Ys]) :-
( X == Y
-> true
; identical_member_special(X,Ys)
).
%%% ***
%%% ****if* PTTP/write_proof
%%% DESCRIPTION
%%% write_proof prints the proof that PTTP finds
%%% SOURCE
write_proof(Proof) :-
must_det_l((
write('Proof:'),
nl,
proof_length(Proof,Len),
write('length = '),
write(Len),
write(', '),
proof_depth(Proof,Depth),
write('depth = '),
write(Depth),
nl,
write('Goal# Wff# Wff Instance'),
nl,
write('----- ---- ------------'),
add_proof_query_line(Proof,Proof2),
process_proof(Proof2,0,Proof1),
write_proof1(Proof1),
nl,
write('Proof end.'))).
write_proof1([]).
write_proof1([[LineNum,X,Head,Depth,Subgoals]|Y]) :-
nl,
write_indent_for_number(LineNum),
write('['),
write(LineNum),
write('] '),
write_indent_for_number(X),
write(X),
write(' '),
write_proof_indent(Depth),
write(Head),
(Subgoals = [] ->
true;
%true ->
write(' :- '),
write_proof_subgoals(Subgoals)),
write(.),
write_proof1(Y).
write_proof_subgoals([X,Y|Z]) :-
write('['),
write(X),
write('] , '),
write_proof_subgoals([Y|Z]).
write_proof_subgoals([X]) :-
write('['),
write(X),
write(']').
write_proof_indent(N) :-
N > 0,
write(' '),
N1 is N - 1,
write_proof_indent(N1).
write_proof_indent(0).
process_proof([Prf|PrfEnd],_LineNum,Result) :-
Prf == PrfEnd,
!,
Result = [].
process_proof([[[X,Head,PosAncestors,NegAncestors]|Y]|PrfEnd],LineNum,Result) :-
LineNum1 is LineNum + 1,
process_proof([Y|PrfEnd],LineNum1,P),
(is_query_lit(Head) ->
Depth is 0;
%true ->
list_length_pttp(PosAncestors,N1), % compute indentation to show
list_length_pttp(NegAncestors,N2), % level of goal nesting from
Depth is N1 + N2 + 1), % lengths of ancestor lists
Depth1 is Depth + 1,
collect_proof_subgoals(Depth1,P,Subgoals),
(X = redn ->
X1 = red,
negated_literal(Head,Head1);
((number(X) , X < 0); X= (-(_))) ->
isNegOf(X1,X),
negated_literal(Head,Head1);
%true ->
X1 = X,
Head1 = Head),
Result = [[LineNum,X1,Head1,Depth,Subgoals]|P].
collect_proof_subgoals(_Depth1,[],Result) :-
Result = [].
collect_proof_subgoals(Depth1,[[LineNum,_,_,Depth,_]|P],Result) :-
Depth = Depth1,
collect_proof_subgoals(Depth1,P,R),
Result = [LineNum|R].
collect_proof_subgoals(Depth1,[[_,_,_,Depth,_]|P],Result) :-
Depth > Depth1,
collect_proof_subgoals(Depth1,P,Result).
collect_proof_subgoals(Depth1,[[_,_,_,Depth,_]|_],Result) :-
Depth < Depth1,
Result = [].
add_proof_query_line(Proof,Proof2) :-
Proof = [Prf|_PrfEnd],
is_ftNonvar(Prf),
Prf = [[_,query,_,_]|_],
!,
Proof2 = Proof.
add_proof_query_line(Proof,Proof2) :-
Proof = [Prf|PrfEnd],
Proof2 = [[[0,query,[],[]]|Prf]|PrfEnd].
%%% ***
%%% ****if* PTTP/clauses
%%% DESCRIPTION
%%% Negation normal form to Prolog clause translation.
%%% Include a literal in the body of each clause to
%%% indicate the number of the formula the clause came from.
%%% SOURCE
clauses((A , B),L,WffNum1,WffNum2) :-
!,
clauses(A,L1,WffNum1,W),
clauses(B,L2,W,WffNum2),
conjoin_pttp(L1,L2,L).
clauses(PNF,L,WffNum1,WffNum2):-
save_wid(WffNum1,pttp_in,PNF),
once(pttp_nnf(PNF,OUT)),
save_wid(WffNum1,pttp_nnf,OUT),
clauses1(OUT,L,WffNum1,WffNum2).
clauses1(A,L,WffNum1,WffNum2) :-
write_clause_with_number(A,WffNum1),
head_literals(A,Lits),
clauses2(A,Lits,L,WffNum1),
kb_incr(WffNum1 ,WffNum2).
clauses2(A,[Lit|Lits],L,WffNum) :-
body_for_head_literal(Lit,A,Body1),
(Body1 == false ->
L = true;
%true ->
conjoin_pttp(infer_by(WffNum),Body1,Body),
clauses2(A,Lits,L1,WffNum),
conjoin_pttp((Lit :- Body),L1,L)).
clauses2(_,[],true,_).
head_literals(Wff,L) :-
Wff = (A :- _B) -> % contrapositives not made for A :- ... inputs
head_literals(A,L);
Wff = (A , B) ->
(head_literals(A,L1),
head_literals(B,L2),
list_union(L1,L2,L));
Wff = (A ; B) ->
(head_literals(A,L1),
head_literals(B,L2),
list_union(L1,L2,L));
%true ->
L = [Wff].
body_for_head_literal(Head,Wff,Body) :-
Wff = (A :- B) ->
(body_for_head_literal(Head,A,A1),
conjoin_pttp(A1,B,Body));
Wff = (A , B) ->
(body_for_head_literal(Head,A,A1),
body_for_head_literal(Head,B,B1),
pttp_disjoin(A1,B1,Body));
Wff = (A ; B) ->
(body_for_head_literal(Head,A,A1),
body_for_head_literal(Head,B,B1),
conjoin_pttp(A1,B1,Body));
Wff == Head ->
Body = true;
(once(negated_literal(Wff,Was)),Head=@=Was) ->
Body = false;
%true ->
negated_literal(Wff,Body).
%%% ***
%%% ****if* PTTP/predicates
%%% DESCRIPTION
%%% predicates returns a list of the predicates appearing in a formula.
%%% SOURCE
is_functor_like_search(Search):-atom(Search),arg(_,vv(search,pttp_prove),Search).
is_functor_like_firstOrder(Search):-atom(Search),arg(_,vv(asserted_t,secondOrder,pttp_prove),Search).
is_functor_like_firstOrder(Search):-atom(Search),is_holds_true_pttp(Search).
is_functor_like_firstOrder(Search):-atom(Search),is_holds_false_pttp(Search).
predicates(Wff,[]):-is_ftVar(Wff),!.
predicates(Wff,[]):-not(compound(Wff)),!.
predicates([Lw],L):- predicates(Lw,L),!.
predicates([Lw|ISTw],L):- !,
predicates(Lw,L1),
predicates(ISTw,L2),
union(L2,L1,L).
predicates(Wff,L):- functor(Wff,Search,_),is_functor_like_search(Search),arg(1,Wff,X),predicates(X,L),!.
predicates(Wff,L) :-
Wff = (A :- B) ->
predicates(A,L1),
predicates(B,L2),
union(L2,L1,L);
Wff = (A , B) ->
predicates(A,L1),
predicates(B,L2),
union(L2,L1,L);
Wff = (A ; B) ->
predicates(A,L1),
predicates(B,L2),
union(L2,L1,L);
functor(Wff,search,_) -> % list predicates in first argument of search
arg(1,Wff,X),
predicates(X,L);
pttp_builtin(Wff) ->
L = [];
%true ->
functor(Wff,F,N),
L = [[F,N]].
predicates(Wff,L) :- functor(Wff,F,A), predicates(Wff,F,A,L).
skipped_functor(F):- fail,is_2nd_order_holds_pttp(F).
predicates(Wff,F,___,L):- logical_functor_pttp(F), Wff=..[_|ARGS], predicates(ARGS,L).
predicates(Wff,F,A, L):- pttp_builtin(F,A), Wff=..[_|ARGS], predicates(ARGS,L).
% predicates(Wff,F,___,L):- skipped_functor(F), Wff=..[_|ARGS], predicates(ARGS,L).
predicates(Wff,F,A,[[F,A]|L]):- Wff=..[_|ARGS], predicates(ARGS,L).
%%% ***
%%% ****if* PTTP/procedure
%%% DESCRIPTION
%%% procedure returns a conjunction of the clauses
%%% with head predicate P/N.
%%% SOURCE
procedure(P,N,Clauses,Proc) :-
( (Clauses = (A , B)) ->
(procedure(P,N,A,ProcA),
procedure(P,N,B,ProcB),
conjoin_pttp(ProcA,ProcB,Proc));
((Clauses = (A :- _B) , functor(A,P,N)) ->
Proc = Clauses;
%true ->
Proc = true)).
procedures([[P,N]|Preds],Clauses,Procs) :-
procedure(P,N,Clauses,Proc),
procedures(Preds,Clauses,Procs2),
conjoin_pttp(Proc,Procs2,Procs).
procedures([],_Clauses,true).
%%% ***
head_body_was(_,_).
:- was_export(is_holds_false_pttp/1).
is_holds_false_pttp(A):-not(atom(A)),!,fail.
is_holds_false_pttp(Prop):-member(Prop,[not,nholds,holds_f,mpred_f,aint,assertion_f,asserted_mpred_f,retraction,not_secondOrder,not_firstOrder]).
is_holds_false_pttp(F):-atom_concat(_,'_false',F).
%is_holds_false_pttp(F):-atom_concat(_,'_f',F).
is_holds_false_pttp(F):-is_p_to_n(_,F).
% is_holds_false_pttp(F):-atom_concat('imp',_,F).
:- was_export(is_holds_true_pttp/1).
is_holds_true_pttp(A):-not(atom(A)),!,fail.
is_holds_true_pttp(Prop):-arg(_,vvv(holds,holds_t,t,asserted_mpred_t,assertion_t,assertion,secondOrder,asserted_t),Prop).
is_holds_true_pttp(F):-atom_concat(_,'_true',F).
is_holds_true_pttp(F):-atom_concat(_,'_t',F).
%is_holds_true_pttp(F):-atom_concat('pos',_,F).
%is_holds_true_pttp(F):-atom_concat('is',_,F).
is_holds_true_pttp(F):-atom_concat(_,'_in',F).
is_holds_true_pttp(F):-is_p_to_n(F,_).
:- was_export(is_2nd_order_holds_pttp/1).
is_2nd_order_holds_pttp(Prop):- atom(Prop), is_holds_true_pttp(Prop) ; is_holds_false_pttp(Prop).
:- style_check(+singleton).
do_not_wrap(F):-not(atom(F)),!,fail.
do_not_wrap(F):-arg(_,vv(query),F).
do_not_wrap(F):-atom_concat('int_',_,F).
:- was_export(correct_pttp/2).
%:- was_dynamic t_l:second_order_wrapper/1.
:- thread_local t_l:second_order_wrapper/1.
t_l:second_order_wrapper(true_t).
correct_pttp_head(Wrapper,B,A):- locally_tl(second_order_wrapper(Wrapper), correct_pttp(B,A)),!.
correct_pttp_body(Wrapper,B,A):- locally_tl(second_order_wrapper(Wrapper), correct_pttp(B,A)),!.
correct_pttp(B,A):-must(correct_pttp([],B,A)),!.
correct_pttp(LC,B,A):-member_eq(B,LC),A=B.
correct_pttp(LC,-B,NA):-!,must((correct_pttp([B|LC],B,A),negated_literal(A,AN),correct_pttp(LC,AN,NA))),!.
correct_pttp(LC,n(_,B),NA):-!,must((correct_pttp([B|LC],B,A),negated_literal(A,AN),correct_pttp(LC,AN,NA))),!.
correct_pttp(LC,B,A):-once(correct_pttp_0([B|LC],B,A)),B==A,!.
correct_pttp(LC,B,A):-once(correct_pttp_0([B|LC],B,A)),!. % dmsg(once(correct_pttp_0(LC,B,A))),term_variables(B,BV),term_variables(A,AV),must(AV==BV).
correct_pttp_0(_,Body,Body):-is_ftVar(Body).
correct_pttp_0(_,Body,Body):-not(compound(Body)),!.
correct_pttp_0(_,BodyIn,Body):- is_ftVar(BodyIn),trace_or_throw(var_correct_lit(BodyIn,Body)).
correct_pttp_0(LC,BodyIn,Body):- functor(BodyIn,F,A),'=..'(BodyIn,[F|List]),correct_pttp_1(LC,BodyIn,F,A,List,Body).
correct_pttp_1(LC, BodyIn,F,_,_,Body):- sanity(atom(F)), atom_concat('not_',_,F),negated_literal(BodyIn,Neg),!,
correct_pttp(LC,Neg,NegBody),
negated_literal(NegBody,Body).
correct_pttp_1(LC, BodyIn,F,_,_,Body):- is_holds_false_pttp(F),negated_literal(BodyIn,Neg),!,correct_pttp(LC,Neg,NegBody),negated_literal(NegBody,Body),!.
correct_pttp_1(LC, BodyIn,F,A,L,Body):- is_holds_false_pttp(F),trace_or_throw(correct_pttp_1(LC,BodyIn,F,A,L,Body)).
correct_pttp_1(LC,_BodyIn,F,_,[L|IST],Body):- length([L|IST],A), correct_pttp_2(LC,F,A,[L|IST],Body).
:- kb_shared(wrapper_for/2).
correct_pttp_2(_,F,_,[L|IST],Body):- wrapper_for(F,Wrapper),!, wrap_univ(Body ,[Wrapper,F,L|IST]).
correct_pttp_2(_,F,A,[L|IST],Body):- correct_pttp_4(F,A,[L|IST],Body),!.
correct_pttp_2(_LC,F,_A,[L|IST],Body):- do_not_wrap(F),!,wrap_univ(Body,[F,L|IST]).
correct_pttp_2(_LC,F,A,[L|IST],Body):- atom(F),pttp_builtin(F,A),!,dmsg(todo(warn(pttp_builtin(F,A)))),wrap_univ(Body,[call_builtin,F,L|IST]).
correct_pttp_2(LC,F,_, L,Body):- is_ftVar(F),!,trace_or_throw(correct_pttp_2(LC,F,L,Body)).
correct_pttp_2(_LC,F,_,[L|IST],Body):- is_holds_true_pttp(F),!,wrap_univ(Body,[F,L|IST]).
% uncomment (need it)
correct_pttp_2(_,infer_by,1,[L|IST],Body):- infer_by = F, wrap_univ(Body ,[F,L|IST]).
% slow for 7
correct_pttp_2(LC,F,A,[L|IST],Body):-correct_pttp_3(LC,F,A,[L|IST],Body),!.
correct_pttp_3(_,F,A,[L|IST],Body):- correct_pttp_4(F,A,[L|IST],Body),!.
correct_pttp_3(_,F,_,[L|IST],Body):- t_l:second_order_wrapper(Wrapper),!, wrap_univ(Body ,[Wrapper,F,L|IST]).
correct_pttp_3(_,F,_,[L|IST],Body):- wrap_univ(Body,[true_t,F,L|IST]).
wrap_univ(Body ,[WapperPred,[P]]):-is_wrapper_pred(WapperPred),compound(P),P=..F_ARGS,!,wrap_univ(Body ,[WapperPred|F_ARGS]).
wrap_univ(Body ,[WapperPred,P]):-is_wrapper_pred(WapperPred),compound(P),P=..F_ARGS,!,wrap_univ(Body ,[WapperPred|F_ARGS]).
wrap_univ(Body ,[F1,F2|ARGS]):- F1==F2,!,wrap_univ(Body ,[F1|ARGS]).
wrap_univ(_Body,[F|ARGS]):- must((atom(F),is_list(ARGS))),length(ARGS,A),must(A>1),functor(P,F,A),fail,
(predicate_property(P,_)->fail;(dmsg(once(warn(no_predicate_property(P)))))),fail.
wrap_univ(Body ,[F|List]):- must((Body=..[F|List])).
is_wrapper_pred(VarPred):-is_ftVar(VarPred),!,fail.
is_wrapper_pred(not_possible_t).
is_wrapper_pred(call_builtin).
is_wrapper_pred(WapperPred):-is_p_or_not(WapperPred),!.
% correct_pttp_4(F,A,[L|IST],Body):-...
correct_pttp_4(_,_,_,_):-!,fail.
%%% ***
%%% ****if* PTTP/pttp1
%%% SOURCE
%:- was_export(pttp1/2).
%pttp1(X,Y) :- must_pttp_id(ID), !, pttp1_wid(ID, X,Y).
:- was_export(pttp1_wid/3).
pttp1_wid(ID,X,Y) :-
must_det_l((
pttp1a_wid(ID,X,X0),
pttp1b_wid(ID,X0,X8),
pttp1c_wid(ID,X0,X8,IntProcs,Procs),
conjoin_pttp(Procs,IntProcs,Y))).
:- was_export(pttp1a_wid/3).
% pttp1a_wid(ID,X,XX):-pttp1a_wid_0(ID,X,XX),!.
pttp1a_wid(ID,X,XX):-pttp1a_wid_0(ID,X,X0),
((X0=(FOO:-TRUE),TRUE==true)->pttp1a_wid_0(ID,FOO,XX);XX=X0).
pttp1a_wid_0(ID,X,X0) :-
must_det_l((
subst(X , ~,-,XX1),
subst(XX1,~,-,XX2),
subst(XX2,not,-,XX3),
% write('PTTP input formulas:'),
clauses(XX3,X0,ID,_))).
pttp1b_wid(_ID,X0,X8) :- must(apply_to_conjuncts(X0,add_features,X8)).
pttp1c_wid(_ID,X0,X8,IntProcs,Procs) :-
must_det_l((
predicates(X8,IntPreds0),
list_reverse(IntPreds0,IntPreds1),
procedures(IntPreds1,X8,IntProcs),
predicates(X0,Preds0),
list_reverse(Preds0,Preds),
apply_to_elements(Preds,make_wrapper(IntPreds1),Procs))).
% :- ensure_loaded(dbase_i_mpred_pttp_compile_stickel_orig).
%%% ***
%%% ****if* PTTP/pttp2
%%% SOURCE
:- was_export(pttp2_wid/2).
pttp2_wid(ID,Y) :- !, must(apply_to_conjuncts(Y,pttp_assert_int_wid_for_conjuncts(ID),_)).
/*
:- was_export(pttp2/1).
pttp2(Y) :- must_pttp_id(ID), pttp2_wid(ID,Y).
pttp2(Y) :-
% nl,
% write('PTTP output formulas:'),
% apply_to_conjuncts(Y,write_clause,_),
% nl,
nl,
tell('pttp_temp.pl'),
apply_to_conjuncts(Y,write_clause,_),
nl,
told,
compile('pttp_temp.pl'),
nl,
!.
%%% ***
%%% ****if* PTTP/expand_input_proof
%%% SOURCE
*/
:- was_export(expand_input_proof/2).
expand_input_proof([],_Proof).
expand_input_proof([N|L],[[N|_]|L1]) :-
expand_input_proof(L,L1).
%%% ***
%%% ****if* PTTP/contract_output_proof
%%% SOURCE
:- was_export(contract_output_proof/2).
contract_output_proof([Prf|PrfEnd],Proof) :-
Prf == PrfEnd,
!,
Proof = [].
contract_output_proof([[[N,_,_,_]|L]|PrfEnd],[N|L1]) :-
contract_output_proof([L|PrfEnd],L1).
%%% ***
%%% ****if* PTTP/proof_length
%%% SOURCE
proof_length([Prf|PrfEnd],N) :-
Prf == PrfEnd,
!,
N = 0.
proof_length([[[_,X,_,_]|L]|PrfEnd],N) :-
proof_length([L|PrfEnd],N1),
(X == query -> N is N1; N is N1 + 1).
%%% ***
%%% ****if* PTTP/proof_depth
%%% SOURCE
proof_depth([Prf|PrfEnd],N) :-
Prf == PrfEnd,
!,
N = 0.
proof_depth([[[_,_,PosAnc,NegAnc]|L]|PrfEnd],N) :-
proof_depth([L|PrfEnd],N1),
list_length_pttp(PosAnc,N2),
list_length_pttp(NegAnc,N3),
N4 is N2 + N3,
max(N1,N4,N).
%%% ***
%%% ****if* PTTP/pttp_functor
%%% DESCRIPTION
%%% Sometimes the `functor' predicate doesn't work as expected and
%%% a more comprehensive predicate is needed. The pttp_functor'
%%% predicate overcomes the problem of functor(X,13,0) causing
%%% an error in Symbolics Prolog. You may need to use it if
%%% `functor' in your Prolog system fails to construct or decompose
%%% terms that are numbers or constants.
%%% SOURCE
pttp_functor(Term,F,N) :-
is_ftNonvar(F),
atomic(F),
N == 0,
!,
Term = F.
pttp_functor(Term,F,N) :-
is_ftNonvar(Term),
atomic(Term),
!,
F = Term,
N = 0.
pttp_functor(Term,F,N) :-
functor(Term,F,N).
%%% ***
%%% ****if* PTTP/list_append
%%% SOURCE
list_append([X|L1],L2,[X|L3]) :-
list_append(L1,L2,L3).
list_append([],L,L).
%%% ***
%%% ****if* PTTP/list_reverse
%%% SOURCE
list_reverse(L1,L2) :-
revappend(L1,[],L2).
revappend([X|L1],L2,L3) :-
revappend(L1,[X|L2],L3).
revappend([],L,L).
%%% ***
%%% ****if* PTTP/list_union
%%% SOURCE
list_union([X|L1],L2,L3) :-
identical_member_special(X,L2),
!,
list_union(L1,L2,L3).
list_union([X|L1],L2,[X|L3]) :-
list_union(L1,L2,L3).
list_union([],L,L).
%%% ***
%%% ****if* PTTP/list_length_pttp
%%% SOURCE
list_length_pttp([_X|L],N) :-
list_length_pttp(L,N1),
N is N1 + 1.
list_length_pttp([],0).
%%% ***
%%% ****if* PTTP/min
%%% SOURCE
min(X,Y,Min) :-
X =< Y ->
Min = X;
%true ->
Min = Y.
%%% ***
%%% ****if* PTTP/max
%%% SOURCE
max(X,Y,Max) :-
X =< Y ->
Max = Y;
%true ->
Max = X.
%%% ***
%%% ****if* PTTP/conjoin_pttp
%%% SOURCE
:- was_export(conjoin_pttp/3).
conjoin_pttp(A,B,C) :- A==B, !, C=A.
conjoin_pttp(A,B,C) :- var(A),!,conjoin_pttp(varcall(A),B,C).
conjoin_pttp(A,B,C) :- var(B),!,conjoin_pttp(A,varcall(B),C).
conjoin_pttp(infer_by(_),B,B) :- !.
conjoin_pttp(false,true,call(false)).
conjoin_pttp(A,B,C) :- B==false,!,conjoin_pttp(false,A,C).
conjoin_pttp(A,B,C) :- A==false,!,must(negated_literal(B,C)).
conjoin_pttp(A,B,C) :-
A == true ->
C = B;
B == true ->
C = A;
A == false ->
C = false;
B == false ->
C = false;
%true ->
C = (A , B).
%%% ***
%%% ****if* PTTP/pttp_disjoin
%%% SOURCE
pttp_disjoin(A,B,C) :-
A == true ->
C = true;
B == true ->
C = true;
A == false ->
C = B;
B == false ->
C = A;
%true ->
C = (A ; B).
is_builtin_p_to_n('mudEquals','not_mudEquals').
%is_p_to_n_2way('answerable_t','unknown_t').
is_p_to_n_2way('askable_t','fallacy_t').
%ODD is_p_to_n(',','not_both_t').
%ODD is_p_to_n(';','not_either_t').
%ODD is_p_to_n('&','not_both_t').
%ODD is_p_to_n('v','not_either_t').
%ODD is_p_to_n('both_t','not_both_t').
%ODD is_p_to_n('not_both_t',',').
% is_p_to_n('true_t','not_possible_t').
% is_p_to_n('not_true_t','possible_t').
is_p_to_n('possible_t','not_possible_t').
is_p_to_n('not_possible_t','possible_t').
%is_p_to_n(P,N):-is_p_to_n_2way(P,N).
%is_p_to_n(P,N):-is_p_to_n_2way(N,P).
is_p_to_n('not_unknown_t','not_answerable_t').
% TODO is_p_to_n('not_true_t','possible_t').
is_p_to_n('proven_in','impossible_in').
is_p_to_n(P,N):-is_builtin_p_to_n(P,N).
is_p_to_n('isa','not_mudIsa').
is_p_to_n(P0,N0):-is_p_to_not(P),atom_concat('not_',P,N),P0=P,N0=N.
is_p_to_n(P,N):- false,is_p_to_n1(P,N).
is_p_to_not('asserted_t').
is_p_to_not('possible_t').
is_p_to_not('true_t').
is_p_to_not('not_true_t').
is_p_to_not('fallacy_t').
is_p_to_not('answerable_t').
is_p_to_not('unknown_t').
is_p_to_not('askable_t').
is_p_to_not('pred_isa_t').
is_p_to_not('pred_t').
is_p_or_not(F):-is_p_to_n(P,N),(F=P;F=N).
% possible_t TODO
is_p_to_n1(P,N):-atom(P),is_p_to_n0(PF,NF),atom_concat(Root,PF,P),atom_concat(Root,NF,N).
is_p_to_n1(P,N):-atom(N),is_p_to_n0(PF,NF),atom_concat(Root,NF,N),atom_concat(Root,PF,P).
is_p_to_n1(P,N):-atom(P),is_p_to_n0(PF,NF),atom_concat(PF,Root,P),atom_concat(NF,Root,N).
is_p_to_n1(P,N):-atom(N),is_p_to_n0(PF,NF),atom_concat(NF,Root,N),atom_concat(PF,Root,P).
is_p_to_n0('_pos','_neg').
is_p_to_n0('true_','false_').
is_p_to_n0('_true','_false').
is_p_to_n0('pos_','neg_').
is_p_to_n0('when_','unless_').
is_p_to_n0('possible_','impossible_').
%is_p_simple('not_proven_not_t','possible_t').
%is_p_simple('not_possible_t','not_true_t').
%is_p_simple('not_unknown_t','answerable_t').
%is_p_simple('not_answerable_t','unknown_t').
is_p_simple(X,X).
%%% ***
%%% ****if* PTTP/negated_functor
%%% SOURCE
negated_functor0(_,_):-!,fail.
%negated_functor0(true_t,not_possible_t).
%negated_functor0(not_true_t,possible_t).
%negated_functor0(F,NotF) :- is_p_to_n(F,NotF).
%negated_functor0(F,NotF) :- is_p_to_n(NotF,F).
:- was_export(negated_functor/2).
negated_functor(F,NotF) :- var(F),!,trace_or_throw(negated_functor(F,NotF)).
%negated_functor(F,NotF) :- sanity(atom(F)),atom_concat('not_',Now,F),!,must(NotF=Now).
negated_functor(F,SNotF) :- negated_functor0(F,NotF),!,is_p_simple(NotF,SNotF).
negated_functor((-),_):-!,dtrace(negated_functor((-),_)),fail.
negated_functor((~),_):-!,dtrace(negated_functor((~),_)),fail.
negated_functor(F,NotF) :- atom_concat('int_',Now,F),!,negated_functor(Now,Then),atom_concat('int_',Then,NotF),!.
negated_functor(F,NotF) :- must( \+member(F,[&,(,),(;),(v),(all),(:-)])),
name(F,L),
name('not_',L1),
(list_append(L1,L2,L) ->
true;
%true ->
list_append(L1,L,L2)),
name(NotF,L2).
negated_functor(F,NotF) :- is_2nd_order_holds_pttp(F),trace_or_throw(negated_functor(F,NotF) ).
negated_functor(F,NotF) :- is_2nd_order_holds_pttp(NotF),trace_or_throw(negated_functor(F,NotF) ).
%%% ***
%%% ****if* PTTP/negated_literal
%%% SOURCE
negated_literal(A,B):-var(A),!,trace_or_throw(var_negated_literal(A,B)),!.
negated_literal(not(A),A):-!.
negated_literal(-(A),A):-!.
negated_literal(A,-(A)):-is_ftVar(A),!.
negated_literal(-(A),(A)):-is_ftVar(A),!.
negated_literal(A,-(A)):-atom(A),A\=(~),A\=(-),!.
negated_literal(A,B):- functor(A,F,_Arity),member(F,[&,(,),(;),(v),(all),(:-)]),must_det_l((as_dlog(A,AA),IN=not(AA), call((nnf('$VAR'('KB'),IN,BB),BB \=@= IN,baseKB:as_prolog(BB,B))))).
negated_literal(not(A),B):-negated_literal(A,AA),!,negated_literal_0(AA,B),!.
negated_literal(-A,B):-negated_literal(A,AA),!,negated_literal_0(AA,B),!.
negated_literal(A,B):- var(B),!,negated_literal_0(A,B),!.
negated_literal(B,-A):-negated_literal(A,AA),!,negated_literal_0(AA,B),!.
negated_literal(A,B):- negated_literal_0(A,B),!.
negated_literal(A,B):- ground(B),not(ground(A)),!,negated_literal(B,A),!.
negated_literal_0(Lit,NotLit) :-
Lit =.. [F1|L1],
negated_functor(F1,F2),
(is_ftVar(NotLit) ->
wrap_univ(NotLit , [F2|L1]);
%true ->
( wrap_univ(NotLit , [F2|L2]),
L1 == L2) ).
%%% ***
%%% ****if* PTTP/is_negative_functor
%%% SOURCE
is_negative_functor(F) :- is_holds_false_pttp(F),!.
is_negative_functor(F) :-
name(F,L),
name('not_',L1),
list_append(L1,_,L).
%%% ***
%%% ****if* PTTP/is_negative_literal
%%% SOURCE
is_negative_literal(Lit) :-
functor(Lit,F,_),
is_negative_functor(F).
%%% ***
%%% ****if* PTTP/internal_functor
%%% SOURCE
internal_functor(P) :-
name(P,L),
name('int_',L1),
list_append(L1,_,L).
internal_functor(P,IntP) :-
name(P,L),
name('int_',L1),
list_append(L1,L,L2),
name(IntP,L2).
%%% ***
%%% ****if* PTTP/apply_to_conjuncts
%%% SOURCE
apply_to_conjuncts(Wff,P,Wff1) :-
Wff = (A , B) ->
apply_to_conjuncts(A,P,A1),
apply_to_conjuncts(B,P,B1),
conjoin_pttp(A1,B1,Wff1);
%true ->
P =.. G,
list_append(G,[Wff,Wff1],G1),
T1 =.. G1,
call(T1).
%%% ***
%%% ****if* PTTP/apply_to_elements
%%% SOURCE
apply_to_elements([X|L],P,Result) :-
P =.. G,
list_append(G,[X,X1],G1),
T1 =.. G1,
call(T1),
apply_to_elements(L,P,L1),
conjoin_pttp(X1,L1,Result).
apply_to_elements([],_,true).
%%% ***
%%% ****if* PTTP/write_clause
%%% SOURCE
write_clause(A) :-
nl,
write(A),
write(.).
write_clause(A,_) :- % 2-ary predicate for use as
write_clause(A). % apply_to_conjuncts argument
%%% ***
%%% ****if* PTTP/write_clause_with_number
%%% SOURCE
write_clause_with_number(A,WffNum) :-
nl,
write_indent_for_number(WffNum),
write(WffNum),
write(' '),
copy_term(A,AA),
numbervars(AA,0,_,[attvar(bind),singletons(true)]),
write(AA),
write(.).
write_indent_for_number(N) :-
((number(N) , N < 100) -> write(' ') ; true),
((number(N) , N < 10) -> write(' ') ; true).
%%% ***
%%% ****if* PTTP/timed_call
%%% DESCRIPTION
%%% A query can be timed by timed_call(query,'Proof').
%%% NOTES
%%% assumes that statistics(cputime,T) binds T to run-time in seconds
%%% different Prolog systems have different ways to get this information
%%% SOURCE
:- was_export(timed_call/2).
:- meta_predicate(timed_call(0,+)).
timed_call(X,Type) :-
statistics(cputime,T1), %SWI Prolog
(call(time(X)) -> V = success ; V = failure), %SWI Prolog
statistics(cputime,T2), %SWI Prolog
Secs is T2 - T1, %SWI Prolog
% statistics(runtime,[T1,_]), %Quintus/SICStus Prolog
% (call(X) -> V = success ; V = failure), %Quintus/SICStus Prolog
% statistics(runtime,[T2,_]), %Quintus/SICStus Prolog
% Secs is (T2 - T1) / 1000.0, %Quintus/SICStus Prolog
nl,
write(Type),
write(' time: '),
write(Secs),
write(' seconds'),
nl,
V = success.
%%% ***
%%% ****if* PTTP/write_search_progress
%%% SOURCE
write_search_progress(Level) :- Level>100,!.
write_search_progress(_Level) :- !.
write_search_progress(Level) :-
% write('cost '),
write(Level),write('.'),current_output(S),flush_output(S).
%%% ***
%%% ****if* PTTP/pttp_builtin
%%% DESCRIPTION
%%% List of pttp_builtin predicates that can appear in clause bodies.
%%% No extra arguments are added for ancestor goals or depth-first
%%% iterative-deepening search. Also, if a clause body is
%%% composed entirely of pttp_builtin goals, the head is not saved
%%% as an ancestor for use in reduction or pruning.
%%% This list can be added to as required.
%%% SOURCE
pttp_builtin(T) :-
functor(T,F,N),
pttp_builtin(F,N).
pttp_builtin(V,A):-is_ftVar(V),!,trace_or_throw(pttp_builtin(V,A)).
pttp_builtin(!,0).
pttp_builtin(F,A):- mpred_prop(F,A,prologHybrid),!,fail.
pttp_builtin(isa,2):-!,fail.
pttp_builtin(isa,_):-!,fail.
pttp_builtin(S2,_):-is_p_to_not(S2),!,fail.
pttp_builtin(call_proof,2).
pttp_builtin(query,0):-!,fail.
pttp_builtin(true,0).
pttp_builtin(false,0).
pttp_builtin(fail,0).
pttp_builtin(succeed,0).
pttp_builtin(dtrace,0).
pttp_builtin(atom,1).
pttp_builtin(integer,1).
pttp_builtin(number,1).
pttp_builtin(F,_):-is_p_or_not(F),!,fail.
pttp_builtin(not_asserted_t,_):-!,fail.
pttp_builtin(clause_u,1).
pttp_builtin(atomic,1).
pttp_builtin(constant,1).
pttp_builtin(functor,3).
pttp_builtin(arg,3).
pttp_builtin(var,1).
%pttp_builtin(->,2).
%pttp_builtin(->,3).
pttp_builtin(nonvar,1).
pttp_builtin(call,1).
pttp_builtin(=,2).
pttp_builtin(\=,2).
pttp_builtin(==,2).
pttp_builtin(\==,2).
pttp_builtin(=\=,2).
pttp_builtin(>,2).
pttp_builtin(<,2).
pttp_builtin(>=,2).
pttp_builtin(loop_check,_).
pttp_builtin(=<,2).
pttp_builtin(is,2).
pttp_builtin(display,1).
pttp_builtin(write,1).
pttp_builtin(nl,0).
pttp_builtin(only_if_pttp,0).
pttp_builtin(ANY,A):-atom(ANY),A==0.
pttp_builtin(infer_by,_).
pttp_builtin(search_cost,_).
pttp_builtin(mudEquals,2):-!.
pttp_builtin(F,A):-functor(P,F,A),prequent(P),!.
pttp_builtin(F,A):-is_builtin_p_to_n(P,N),member(F,[P,N]),member(A,[2,3,4]).
pttp_builtin(test_and_decrement_search_cost,_).
pttp_builtin(unify,_).
pttp_builtin(identical_member_special,_).
pttp_builtin(identical_member_special_loop_check,_).
pttp_builtin(M:P,A):-atom(M),!,pttp_builtin(P,A).
pttp_builtin(F,A):- (mpred_prop(F,A,prologBuiltin)),!. %,fail.
% TODO pttp_builtin(F,_):- (mpred_prop(F,A,prologDynamic)),!. %,fail.
pttp_builtin(unifiable_member,_).
% TODO pttp_builtin(t,_).
%pttp_builtin(F,A):-mpred_prop(F,A,prologPTTP),!,fail.
%pttp_builtin(F,A):-mpred_prop(F,A,prologKIF),!,fail.
pttp_builtin(F,A):-current_predicate(F/A),functor(P,F,A),builtin_why(P,F,A,Why),!,dmsg(todo(warn(builtin_why(F,A,Why)))).
%%% ***
builtin_why(_,int_query,_,_):-!,fail.
builtin_why(_,query,_,_):-!,fail.
builtin_why(_,F,_,int_):- atom_concat(_,'_int',F),!.
builtin_why(_,F,_,int_):- atom_concat('int_',_,F),!,fail.
builtin_why(P,_,_,meta_predicate(P)):- predicate_property(P,meta_predicate(P)).
builtin_why(P,_,_,thread_local):- predicate_property(P,thread_local).
builtin_why(P,_,_,source_file(F)):- source_file(P,F).
builtin_why(P,_,_,built_in):- real_builtin_predicate(P).
builtin_why(P,_,_,transparent):- predicate_property(P,transparent).
% builtin_why(P,_,_,number_of_rules(N)):- predicate_property(P,number_of_rules(N)),N>0.
builtin_why(X,0):-atom(X).
%builtin_why(P,2,t(P,2)):-t(P,_,_),!,fail.
%builtin_why(P,3,t(P,3)):-t(P,_,_,_),!,fail.
% -----------------------------------------------------------------
% pttp_nnf(+Fml,?NNF)
%
% Fml is a first-order formula and NNF its Skolemized negation
% normal form.
%
% Syntax of Fml:
% negation: '-', disj: 'v', conj: '&', impl: '=>', eqv: '<=>',
% quant. 'all(X,<Formula>)', where 'X' is a prolog variable.
%
% Syntax of NNF: negation: '-', disj: ';', conj: ',', quant.:
% 'all(X,<Formula>)', where 'X' is a prolog variable.
%
% Example: pttp_nnf(ex(Y, all(X, (f(Y) => f(X)))),NNF).
% NNF = all(_A,(-(f(all(X,f(ex)=>f(X))));f(_A)))) ?
:- was_export(pttp_nnf/2).
pttp_nnf((A,B),(C,D)):- must(is_ftNonvar(A)), !, pttp_nnf(A,C), pttp_nnf(B,D).
pttp_nnf(Fml,NNFOUT) :- pttp_nnf(Fml,[],NNF,_),NNFOUT=NNF.
:- op(400,fy,-), % negation
op(500,xfy,&), % conjunction
op(600,xfy,v), % disjunction
op(650,xfy,=>), % implication
op(680,xfy,<=>). % equivalence
% -----------------------------------------------------------------
% pttp_nnf(+Fml,+FreeV,-NNF,-Paths)
%
% Fml,NNF: See above.
% FreeV: List of free variables in Fml.
% Paths: Number of disjunctive paths in Fml.
pttp_nnf_pre_clean(_Type,Atomic,Atomic,[]):-atomic(Atomic),!.
pttp_nnf_pre_clean(_Type,Atomic,Atomic,[]):-is_ftVar(Atomic),!.
pttp_nnf_pre_clean(Type,pttp(A),AA,Vars):- !,pttp_nnf_pre_clean(Type,A,AA,Vars).
pttp_nnf_pre_clean(Type,[A|B],[AA|BB],Vars):-!,
pttp_nnf_pre_clean(Type,A,AA,Vars1),
pttp_nnf_pre_clean(Type,B,BB,Vars2),
append(Vars1,Vars2,Vars).
pttp_nnf_pre_clean(_Type,C,CC,Vars):-
C=..[A|B],
logical_functor_pttp(A),!,
pttp_nnf_pre_clean_functor(A,AA,Vars1),!,
pttp_nnf_pre_clean(sent,B,BB,Vars2),
append(Vars1,Vars2,Vars),
CC=..[AA|BB],!.
pttp_nnf_pre_clean(Type,CIN,CC,Vars):-
Type == sent,
correct_pttp(CIN,C),
C=..[A|B],
pttp_nnf_pre_clean_functor(A,AA,Vars1),!,
pttp_nnf_pre_clean(arg,B,BB,Vars2),
append(Vars1,Vars2,Vars),
CC=..[AA|BB],!.
pttp_nnf_pre_clean(Type,C,CC,Vars):-
C=..[A|B],
pttp_nnf_pre_clean_functor(A,AA,Vars1),!,
pttp_nnf_pre_clean(Type,B,BB,Vars2),
append(Vars1,Vars2,Vars),
CC=..[AA|BB],!.
pttp_nnf_post_clean(Atomic,Atomic,[]):-atomic(Atomic),!.
pttp_nnf_post_clean(Atomic,Atomic,[]):-is_ftVar(Atomic),!.
pttp_nnf_post_clean(pttp(A),AA,Vars):- !,pttp_nnf_post_clean(A,AA,Vars).
pttp_nnf_post_clean(-(A),NN,Vars):- !,pttp_nnf_post_clean(A,AA,Vars),negated_literal(AA,NN).
pttp_nnf_post_clean((A,B),(AA , BB),Vars):-
pttp_nnf_post_clean(A,AA,Vars1),
pttp_nnf_post_clean(B,BB,Vars2),
append(Vars1,Vars2,Vars).
pttp_nnf_post_clean((A;B),(AA ; BB),Vars):-
pttp_nnf_post_clean(A,AA,Vars1),
pttp_nnf_post_clean(B,BB,Vars2),
append(Vars1,Vars2,Vars).
pttp_nnf_post_clean([A|B],[AA|BB],Vars):-
pttp_nnf_post_clean(A,AA,Vars1),
pttp_nnf_post_clean(B,BB,Vars2),
append(Vars1,Vars2,Vars).
pttp_nnf_post_clean((A&B),(AA , BB),Vars):- fail,
pttp_nnf_post_clean(A,AA,Vars1),
pttp_nnf_post_clean(B,BB,Vars2),
append(Vars1,Vars2,Vars).
pttp_nnf_post_clean((A v B),(AA ; BB),Vars):- fail,
pttp_nnf_post_clean(A,AA,Vars1),
pttp_nnf_post_clean(B,BB,Vars2),
append(Vars1,Vars2,Vars).
pttp_nnf_post_clean(C,CC,Vars):-
C=..[A|B],
A=AA,
pttp_nnf_post_clean(B,BB,Vars),
CC=..[AA|BB],!.
pttp_nnf(Fml,FreeV,CleanNNF,Paths):-
pttp_nnf_pre_clean(sent,Fml,Clean,FreeV),
pttp_nnf_clean(Clean,FreeV,NNF,Paths),
pttp_nnf_post_clean(NNF,CleanNNF,FreeV).
pttp_nnf_clean(Atomic,_,Atomic,1):-atomic(Atomic),!.
pttp_nnf_clean(Atomic,_,Atomic,1):-is_ftVar(Atomic),!.
pttp_nnf_clean(Fml,FreeV,NNF,Paths) :-
(Fml = -(-A) -> Fml1 = A;
Fml = -all(X,F) -> Fml1 = ex(X,-F);
Fml = -ex(X,F) -> Fml1 = all(X,-F);
Fml = -(A v B) -> Fml1 = (-A & -B);
Fml = -(A & B) -> Fml1 = (-A v -B);
Fml = (A => B) -> Fml1 = (-A v B);
Fml = -(A => B) -> Fml1 = A & -B;
Fml = (A <=> B) -> Fml1 = (A & B) v (-A & -B);
Fml = -(A <=> B) -> Fml1 = (A & -B) v (-A & B)),!,
pttp_nnf_clean(Fml1,FreeV,NNF,Paths).
pttp_nnf_clean(all(X,F),FreeV,all(X,NNF),Paths) :- !,
pttp_nnf_clean(F,[X|FreeV],NNF,Paths).
pttp_nnf_clean(ex(X,Fml),FreeV,NNF,Paths) :- !,
copy_term((X,Fml,FreeV),(sk(X,Fml),Fml1,FreeV)),
pttp_nnf_clean(Fml1,FreeV,NNF,Paths).
pttp_nnf_clean((A & B),FreeV,(NNF1,NNF2),Paths) :- !,
pttp_nnf_clean(A,FreeV,NNF1,Paths1),
pttp_nnf_clean(B,FreeV,NNF2,Paths2),
Paths is Paths1 * Paths2.
pttp_nnf_clean((A v B),FreeV,NNF,Paths) :- !,
pttp_nnf_clean(A,FreeV,NNF1,Paths1),
pttp_nnf_clean(B,FreeV,NNF2,Paths2),
Paths is Paths1 + Paths2,
(Paths1 > Paths2 -> NNF = (NNF2;NNF1);
NNF = (NNF1;NNF2)).
pttp_nnf_clean(Lit,_,Lit,1).
% :- ensure_loaded(dbase_i_mpred_pttp_precompiled).
:- fixup_exports.
| TeamSPoon/logicmoo_base | prolog/logicmoo/pttp/dbase_i_mpred_pttp_statics.pl | Perl | mit | 43,363 |
#------------------------------------------------------------------------------
# File: Audible.pm
#
# Description: Read metadata from Audible audio books
#
# Revisions: 2015/04/05 - P. Harvey Created
#
# References: 1) https://github.com/jteeuwen/audible
# 2) https://code.google.com/p/pyaudibletags/
# 3) http://wiki.multimedia.cx/index.php?title=Audible_Audio
#------------------------------------------------------------------------------
package Image::ExifTool::Audible;
use strict;
use vars qw($VERSION);
use Image::ExifTool qw(:DataAccess :Utils);
$VERSION = '1.02';
sub ProcessAudible_meta($$$);
sub ProcessAudible_cvrx($$$);
%Image::ExifTool::Audible::Main = (
GROUPS => { 2 => 'Audio' },
NOTES => q{
ExifTool will extract any information found in the metadata dictionary of
Audible .AA files, even if not listed in the table below.
},
# tags found in the metadata dictionary (chunk 2)
pubdate => { Name => 'PublishDate', Groups => { 2 => 'Time' } },
pub_date_start => { Name => 'PublishDateStart', Groups => { 2 => 'Time' } },
author => { Name => 'Author', Groups => { 2 => 'Author' } },
copyright => { Name => 'Copyright', Groups => { 2 => 'Author' } },
# also seen (ref PH):
# product_id, parent_id, title, provider, narrator, price, description,
# long_description, short_title, is_aggregation, title_id, codec, HeaderSeed,
# EncryptedBlocks, HeaderKey, license_list, CPUType, license_count, <12 hex digits>,
# parent_short_title, parent_title, aggregation_id, short_description, user_alias
# information extracted from other chunks
_chapter_count => { Name => 'ChapterCount' }, # from chunk 6
_cover_art => { # from chunk 11
Name => 'CoverArt',
Groups => { 2 => 'Preview' },
Binary => 1,
},
);
# 'tags' atoms observed in Audible .m4b audio books (ref PH)
%Image::ExifTool::Audible::tags = (
GROUPS => { 0 => 'QuickTime', 2 => 'Audio' },
NOTES => 'Information found in "tags" atom of Audible M4B audio books.',
meta => {
Name => 'Audible_meta',
SubDirectory => { TagTable => 'Image::ExifTool::Audible::meta' },
},
cvrx => {
Name => 'Audible_cvrx',
SubDirectory => { TagTable => 'Image::ExifTool::Audible::cvrx' },
},
tseg => {
Name => 'Audible_tseg',
SubDirectory => { TagTable => 'Image::ExifTool::Audible::tseg' },
},
);
# 'meta' information observed in Audible .m4b audio books (ref PH)
%Image::ExifTool::Audible::meta = (
PROCESS_PROC => \&ProcessAudible_meta,
GROUPS => { 0 => 'QuickTime', 2 => 'Audio' },
NOTES => 'Information found in Audible M4B "meta" atom.',
Album => 'Album',
ALBUMARTIST => { Name => 'AlbumArtist', Groups => { 2 => 'Author' } },
Artist => { Name => 'Artist', Groups => { 2 => 'Author' } },
Comment => 'Comment',
Genre => 'Genre',
itunesmediatype => { Name => 'iTunesMediaType', Description => 'iTunes Media Type' },
SUBTITLE => 'Subtitle',
Title => 'Title',
TOOL => 'CreatorTool',
Year => { Name => 'Year', Groups => { 2 => 'Time' } },
track => 'ChapterName', # (found in 'meta' of 'tseg' atom)
);
# 'cvrx' information observed in Audible .m4b audio books (ref PH)
%Image::ExifTool::Audible::cvrx = (
PROCESS_PROC => \&ProcessAudible_cvrx,
GROUPS => { 0 => 'QuickTime', 2 => 'Audio' },
NOTES => 'Audible cover art information in M4B audio books.',
VARS => { NO_ID => 1 },
CoverArtType => 'CoverArtType',
CoverArt => {
Name => 'CoverArt',
Groups => { 2 => 'Preview' },
Binary => 1,
},
);
# 'tseg' information observed in Audible .m4b audio books (ref PH)
%Image::ExifTool::Audible::tseg = (
GROUPS => { 0 => 'QuickTime', 2 => 'Audio' },
tshd => {
Name => 'ChapterNumber',
Format => 'int32u',
ValueConv => '$val + 1', # start counting from 1
},
meta => {
Name => 'Audible_meta2',
SubDirectory => { TagTable => 'Image::ExifTool::Audible::meta' },
},
);
#------------------------------------------------------------------------------
# Process Audible 'meta' tags from M4B files (ref PH)
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success
sub ProcessAudible_meta($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $dataPos = $$dirInfo{DataPos};
my $dirLen = length $$dataPt;
return 0 if $dirLen < 4;
my $num = Get32u($dataPt, 0);
$et->VerboseDir('Audible_meta', $num);
my $pos = 4;
my $index;
for ($index=0; $index<$num; ++$index) {
last if $pos + 3 > $dirLen;
my $unk = Get8u($dataPt, $pos); # ? (0x80 or 0x00)
last unless $unk == 0x80 or $unk == 0x00;
my $len = Get16u($dataPt, $pos + 1); # tag length
$pos += 3;
last if $pos + $len + 6 > $dirLen or not $len;
my $tag = substr($$dataPt, $pos, $len); # tag ID
my $ver = Get16u($dataPt, $pos + $len); # version?
last unless $ver == 0x0001;
my $size = Get32u($dataPt, $pos + $len + 2);# data size
$pos += $len + 6;
last if $pos + $size > $dirLen;
my $val = $et->Decode(substr($$dataPt, $pos, $size), 'UTF8');
unless ($$tagTablePtr{$tag}) {
my $name = Image::ExifTool::MakeTagName(($tag =~ /[a-z]/) ? $tag : lc($tag));
AddTagToTable($tagTablePtr, $tag, { Name => $name });
}
$et->HandleTag($tagTablePtr, $tag, $val,
DataPt => $dataPt,
DataPos => $dataPos,
Start => $pos,
Size => $size,
Index => $index,
);
$pos += $size;
}
return 1;
}
#------------------------------------------------------------------------------
# Process Audible 'cvrx' cover art atom from M4B files (ref PH)
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success
sub ProcessAudible_cvrx($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $dataPos = $$dirInfo{DataPos};
my $dirLen = length $$dataPt;
return 0 if 0x0a > $dirLen;
my $len = Get16u($dataPt, 0x08);
return 0 if 0x0a + $len + 6 > $dirLen;
my $size = Get32u($dataPt, 0x0a + $len + 2);
return 0 if 0x0a + $len + 6 + $size > $dirLen;
$et->VerboseDir('Audible_cvrx', undef, $dirLen);
$et->HandleTag($tagTablePtr, 'CoverArtType', undef,
DataPt => $dataPt,
DataPos => $dataPos,
Start => 0x0a,
Size => $len,
);
$et->HandleTag($tagTablePtr, 'CoverArt', undef,
DataPt => $dataPt,
DataPos => $dataPos,
Start => 0x0a + $len + 6,
Size => $size,
);
return 1;
}
#------------------------------------------------------------------------------
# Read information from an Audible .AA file
# Inputs: 0) ExifTool ref, 1) dirInfo ref
# Returns: 1 on success, 0 if this wasn't a valid AA file
sub ProcessAA($$)
{
my ($et, $dirInfo) = @_;
my $raf = $$dirInfo{RAF};
my ($buff, $toc, $entry, $i);
# check magic number
return 0 unless $raf->Read($buff, 16) == 16 and $buff=~/^.{4}\x57\x90\x75\x36/s;
# check file size
if (defined $$et{VALUE}{FileSize}) {
# first 4 bytes of the file should be the filesize
unpack('N', $buff) == $$et{VALUE}{FileSize} or return 0;
}
$et->SetFileType();
SetByteOrder('MM');
my $bytes = 12 * Get32u(\$buff, 8); # table of contents size in bytes
$bytes > 0xc00 and $et->Warn('Invalid TOC'), return 1;
# read the table of contents
$raf->Read($toc, $bytes) == $bytes or $et->Warn('Truncated TOC'), return 1;
my $tagTablePtr = GetTagTable('Image::ExifTool::Audible::Main');
# parse table of contents (in $toc)
for ($entry=0; $entry<$bytes; $entry+=12) {
my $type = Get32u(\$toc, $entry);
next unless $type == 2 or $type == 6 or $type == 11;
my $offset = Get32u(\$toc, $entry + 4);
my $length = Get32u(\$toc, $entry + 8) or next;
$raf->Seek($offset, 0) or $et->Warn("Chunk $type seek error"), last;
if ($type == 6) { # offset table
next if $length < 4 or $raf->Read($buff, 4) != 4; # only read the chapter count
$et->HandleTag($tagTablePtr, '_chapter_count', Get32u(\$buff, 0));
next;
}
# read the chunk
$length > 100000000 and $et->Warn("Chunk $type too big"), next;
$raf->Read($buff, $length) == $length or $et->Warn("Chunk $type read error"), last;
if ($type == 11) { # cover art
next if $length < 8;
my $len = Get32u(\$buff, 0);
my $off = Get32u(\$buff, 4);
next if $off < $offset + 8 or $off - $offset + $len > $length;
$et->HandleTag($tagTablePtr, '_cover_art', substr($buff, $off-$offset, $len));
next;
}
# parse metadata dictionary (in $buff)
$length < 4 and $et->Warn('Bad dictionary'), next;
my $num = Get32u(\$buff, 0);
$num > 0x200 and $et->Warn('Bad dictionary count'), next;
my $pos = 4; # dictionary starts immediately after count
require Image::ExifTool::HTML; # (for UnescapeHTML)
$et->VerboseDir('Audible Metadata', $num);
for ($i=0; $i<$num; ++$i) {
my $tagPos = $pos + 9; # position of tag string
$tagPos > $length and $et->Warn('Truncated dictionary'), last;
# (1 unknown byte ignored at start of each dictionary entry)
my $tagLen = Get32u(\$buff, $pos + 1); # tag string length
my $valLen = Get32u(\$buff, $pos + 5); # value string length
my $valPos = $tagPos + $tagLen; # position of value string
my $nxtPos = $valPos + $valLen; # position of next entry
$nxtPos > $length and $et->Warn('Bad dictionary entry'), last;
my $tag = substr($buff, $tagPos, $tagLen);
my $val = substr($buff, $valPos, $valLen);
unless ($$tagTablePtr{$tag}) {
my $name = Image::ExifTool::MakeTagName($tag);
$name =~ s/_(.)/\U$1/g; # change from underscore-separated to mixed case
AddTagToTable($tagTablePtr, $tag, { Name => $name });
}
# unescape HTML character references and convert from UTF-8
$val = $et->Decode(Image::ExifTool::HTML::UnescapeHTML($val), 'UTF8');
$et->HandleTag($tagTablePtr, $tag, $val,
DataPos => $offset,
DataPt => \$buff,
Start => $valPos,
Size => $valLen,
Index => $i,
);
$pos = $nxtPos; # step to next dictionary entry
}
}
return 1;
}
1; # end
__END__
=head1 NAME
Image::ExifTool::Audible - Read meta information from Audible audio books
=head1 SYNOPSIS
This module is used by Image::ExifTool
=head1 DESCRIPTION
This module contains definitions required by Image::ExifTool to read meta
information from Audible audio books.
=head1 AUTHOR
Copyright 2003-2019, Phil Harvey (phil at owl.phy.queensu.ca)
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 REFERENCES
=over 4
=item L<https://github.com/jteeuwen/audible>
=item L<https://code.google.com/p/pyaudibletags/>
=item L<http://wiki.multimedia.cx/index.php?title=Audible_Audio>
=back
=head1 SEE ALSO
L<Image::ExifTool::TagNames/Audible Tags>,
L<Image::ExifTool(3pm)|Image::ExifTool>
=cut
| philmoz/Focus-Points | focuspoints.lrdevplugin/bin/exiftool/lib/Image/ExifTool/Audible.pm | Perl | apache-2.0 | 11,796 |
package Google::Ads::AdWords::v201406::SharedSetService::ResponseHeader;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201406' }
__PACKAGE__->__set_name('ResponseHeader');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::AdWords::v201406::SoapResponseHeader
);
}
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201406::SharedSetService::ResponseHeader
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
ResponseHeader from the namespace https://adwords.google.com/api/adwords/cm/v201406.
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201406::SharedSetService::ResponseHeader->new($data);
Constructor. The following data structure may be passed to new():
$a_reference_to, # see Google::Ads::AdWords::v201406::SoapResponseHeader
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201406/SharedSetService/ResponseHeader.pm | Perl | apache-2.0 | 1,044 |
package Google::Ads::AdWords::v201409::ConversionTrackerService::ResponseHeader;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201409' }
__PACKAGE__->__set_name('ResponseHeader');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::AdWords::v201409::SoapResponseHeader
);
}
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201409::ConversionTrackerService::ResponseHeader
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
ResponseHeader from the namespace https://adwords.google.com/api/adwords/cm/v201409.
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201409::ConversionTrackerService::ResponseHeader->new($data);
Constructor. The following data structure may be passed to new():
$a_reference_to, # see Google::Ads::AdWords::v201409::SoapResponseHeader
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201409/ConversionTrackerService/ResponseHeader.pm | Perl | apache-2.0 | 1,068 |
package DDG::Spice::SeatGeek::EventsByCity;
# ABSTRACT: Returns upcoming concerts in a city
use strict;
use DDG::Spice;
triggers start =>
'upcoming concert in',
'upcoming concerts in',
'concerts in',
'live in',
'live shows in',
'shows in',
'gigs in';
spice proxy_cache_valid => "200 304 12h";
spice to => 'http://api.seatgeek.com/2/events?taxonomies.name=concert&per_page=20&venue.city=$1&callback={{callback}}';
handle remainder_lc => sub {
# Return if this is a geolocation search
return if $_ =~ "^my area";
# Replaces spaces between words with dashes, because the API requires it
$_ =~ s/\s/\-/g;
return $_ if $_;
return;
};
1;
| sagarhani/zeroclickinfo-spice | lib/DDG/Spice/SeatGeek/EventsByCity.pm | Perl | apache-2.0 | 691 |
##**************************************************************
##
## Copyright (C) 1990-2011, Condor Team, Computer Sciences Department,
## University of Wisconsin-Madison, WI.
##
## Licensed under the Apache License, Version 2.0 (the "License"); you
## may not use this file except in compliance with the License. You may
## obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
##**************************************************************
# CondorPersonal.pm - a Perl API to Condor for Personal Condors
#
# Designed to allow a flexible way to have tests and other jobs
# run in conjunction with other Condor perl modules and control
# the environment in which they run
#
# 1-6-05 Bill Taylor - vastly updated Bill Taylor 10-2-14
#
#################################################################
#
# A Personal Condor will be constructed in a subdirectory
# based on current PID and the version string passed in
# such that bin, sbin, log, condor_config and the rest
# live in:
#
# PID/PIDversion/sbin
# PID/PIDversion/bin
# PID/PIDversion/log
# PID/PIDversion/execute
# PID/PIDversion/condor_config
# ...
package CondorPersonal;
use strict;
use warnings;
use Carp;
use Cwd;
use POSIX qw/sys_wait_h strftime/;
use Socket;
use Sys::Hostname;
use CondorUtils;
use CondorTest;
my $iswindows = CondorUtils::is_windows();
my $iscygwin = CondorUtils::is_cygwin_perl();
my $iswindowsnativeperl = CondorUtils::is_windows_native_perl();
my $wininstalldir = "";
my $installdir = "";
my $masterconfig = ""; #one built by batch_test and moving to test glue
my $currentlocation;
sub Initialize
{
# we want to initialize by whatever we have in $ENNV(CONDOR_CONFIG}
# not something created in the test glue.
my %control = @_;
my $intheglue = 0;
if( exists $control{test_glue}) {
$intheglue = 1;
}
my $newconfig = deriveMasterConfig();
debug("Effective config now $newconfig\n", 2);
$currentlocation = getcwd();
if(CondorUtils::is_windows() == 1) {
if(is_windows_native_perl()) {
$currentlocation =~ s/\//\\/g;
$masterconfig = "$currentlocation" . "\\$newconfig";
} else {
# never have cygwin till test runs, so glue would not be set
#print "RAW curentlocation:-$currentlocation-\n";
my $tmp = `cygpath -m $currentlocation`;
fullchomp($tmp);
#print "tmp after cygpath:$tmp\n";
$tmp =~ s/\//\\/g;
$currentlocation = $tmp;
$masterconfig = "$currentlocation" . "\\$newconfig";
}
} else {
$masterconfig = "$currentlocation" . "/$newconfig";
}
}
sub deriveMasterConfig {
# since we still are in the impact of the initial condor
# get all of the actual settings
my @outres = ();
my $derivedconfig = "derived_condor_config";
if(-f "$derivedconfig") {
return($derivedconfig);
} else {
# we need gererate the effective current config and
# start from there. There are all the possible config files
# plus changed vs default values.
my $res = CondorTest::runCondorTool("condor_config_val -writeconfig:file $derivedconfig",\@outres,2,{emit_output=>0,expect_result=>\&ANY});
if($res != 1) {
die "Error while getting the effective current configuration\n";
}
open(DR,"<derived_condor_config") or die "Failed to create derived_condor_config: $!\n";
my $line = "";
while(<DR>) {
$line = $_;
fullchomp($line);
print "$line\n";
}
}
return($derivedconfig);
}
BEGIN {
}
my %windows_semaphores = ();
my %framework_timers = ();
#################################################################
#
# Parameters used within parameter config file......
#
# Parameter Use Default Variable stored in
# ----------------------------------------------------------------------------------------------
# condortemplate Core config file condor_config_template $personal_template
# condorlocalsrc Name for condor local config src $personal_local_src
# daemonwait Wait for startd/schedd to be seen true $personal_startup_wait
# localpostsrc New end of local config file $personal_local_post_src
# append_condor_config Text to append to end of local config file
# secprepostsrc New security settings $personal_sec_prepost_src
# condordaemon daemon list to start contents of config template $personal_daemons
# condorconfig Name for condor config file condor_config $personal_config
# condordomain Name for domain local $condordomain
# condorlocal Name for condor local config condor_config.local $personal_local
# condor "install" or path to tarball nightlies $condordistribution
# collector Used to define COLLECTOR_HOST $collectorhost
# nameschedd Used to define SCHEDD_NAME cat(name and collector) $scheddname
# condorhost Used to define CONDOR_HOST $condorhost
# ports Select dynamic or normal ports dynamic $portchanges
# slots sets NUM_CPUS NUM_SLOTS none
# universe parallel configuration of schedd none $personal_universe
#
# Notes added 10/3/14 bt
#
# The use of param files and tests calling other scripts is over. What we have now
# are test additional knobs which are added to the end of a very basic local file.
# All platforms go with the initial CONDOR_CONFIG in the environment which simply
# put has a valid RELEASE_DIR.
#
# Notes from January 2014 bill taylor
# Clumsy and unreliable IsRunningYet causes test to fail and failures to be long in time
# for a series of tests as platforms etc vary. We are moving away from IsRunningYet
# address file based to daemon information available with condor_who -daemon -log xxxxxxxx
#
# So we can collect whodata within our presonal condor instance
#
# Se we first want a test setable trigger to start using new start up evaluation
# and get one test working this way.
#
# The new way will still block. It will have a time tolerance not a try tolerance.
# There must be functions to determine our current direction based state and a way
# to call this state evaluation repeatably.
#
# There is a large comment relative to our data storage class in CondorTest.pm above this
# pacakge: package WhoDataInstance
#
# We actually do not have test framework call backs. But we added a basic state change loop
# which will check for and call callbacks while we are looping to change state. NewIsRunningYet
# replaces IsRunningYet and KillDaemons replaces KillDaemonPids and uses NewIsDownYet(new).
# Both NewIs functions call the same state transition code which collects and examines condor_who
# data stored in the instance of the personal condor
my %daemon_logs =
(
"COLLECTOR" => "CollectorLog",
"NEGOTIATOR" => "NegotiatorLog",
"MASTER" => "MasterLog",
"STARTD" => "StartLog",
"SCHEDD" => "SchedLog",
"collector" => "CollectorLog",
"negotiator" => "NegotiatorLog",
"master" => "MasterLog",
"startd" => "StartLog",
"schedd" => "SchedLog",
);
########################################
##
## 7/28/14 bt
##
## Our normal test pass has the LOCKDIR to deep
## to use a folder for shared ports handles
## So the flag below relocates it to /tmp
## Other switch turns on shared port for
## every personal condor spun up
##
my $MOVESOCKETDIR = 0;
my $USESHARERPORT = 0;
##
##
########################################
my $RunningTimeStamp = 0;
my $topleveldir = getcwd();
my $home = $topleveldir;
my $localdir;
my $condorlocaldir;
my $pid = $$;
my $version = ""; # remote, middle, ....... for naming schedd "schedd . pid . version"
my $mastername = ""; # master_$verison
my $DEBUGLEVEL = 1; # nothing higher shows up
my $debuglevel = 4; # take all the ones we don't want to see
# and allowed easy changing and remove hard
# coded value
my @debugcollection = ();
my $isnightly = IsThisNightly($topleveldir);
my $wrap_test;
#################################################################
#
# Debug messages get time stamped. These will start showing up
# at DEBUGLEVEL = 3 with some rather verbous at 4.
#
# For a single test which uses this module simply
# CondorPersonal::DebugOn();
# CondorPersonal::DebugLevel(3);
# .... some time later .....
# CondorPersonal::DebugLevel(2);
#
# There is no reason not to have debug always on the the level
# pretty completely controls it. All DebugOff calls
# have been removed.
#
# This is a similar debug setup as the rest of the test
# suite but I did not want to require the other condor
# modules for this one.
#
#################################################################
my %personal_condor_params;
my %personal_config_changes;
my $personal_config = "condor_config";
my $personal_template = "condor_config_template";
my $personal_daemons = "";
my $personal_local = "condor_config.local";
my $personal_local_src = "";
my $personal_local_post_src = "";
my $personal_sec_prepost_src = "";
my $personal_universe = "";
my $personal_startup_wait = "true";
my $personalmaster;
my $portchanges = "dynamic";
my $collector_port = "0";
my $personal_config_file = "";
my $condordomain = "";
my $procdaddress = "";
#################################################################
#
# Main interface StartCondor
#condor
# Calls functions to parse parameters, install binaries, tune the config file
# and start the personal condor. Passes back config file location and port
# number<config_file_location:collector_port>.
#
sub StartCondor
{
my $mpid = "";
my $arraysz = scalar(@_);
my $testname = shift || die "Missing test name\n";
my $paramfile = shift || die "Missing parameter file!\n";
$version = shift || die "Missing parameter version!\n";
my $nowait = shift;
my $config_and_port = "";
my $winpath = "";
if(!(-f $paramfile)) {
die "StartCondor: param file $paramfile does not exist!!\n";
}
CondorPersonal::ParsePersonalCondorParams($paramfile);
if(defined $nowait) {
#print "StartCondor: no wait option\n";
$personal_condor_params{"no_wait"} = "TRUE";
}
# Insert the positional arguments into the new-style named-argument
# hash and call the version of this function which handles it.
$personal_condor_params{"test_name"} = $testname;
$personal_condor_params{"condor_name"} = $version;
$personal_condor_params{"fresh_local"} = "TRUE";
return StartCondorWithParams(%personal_condor_params);
}
############################################
## StartCondorWithParams
##
## Starts up a personal condor that is configured as specified in
## the named arguments to this function. If you are using the
## CondorTest framework, do not call this function directly.
## Call CondorTest::StartCondorWithParams().
##
## Required Arguments:
## condor_name - a descriptive name, used when generating directory names
##
## Optional Arguments:
## test_name - name of the test that is using this personal condor
## append_condor_config - lines to be added to the (local) configuration file
## daemon_list - list of condor daemons to run
##
##
############################################
sub StartCondorWithParams
{
%personal_condor_params = @_;
if(is_windows()) {
$ENV{LOCAL_DIR} = undef;
}
Initialize(@_);
# Make sure at the least we have an initial Config folder to seed future
# personal condors. Test via environment variable CONDOR_CONFIG.
#
my $configvalid = DoInitialConfigCheck();
if($configvalid == 1) {
die "We expected a configured HTCondor in our environment\n";
}
my $condor_name = $personal_condor_params{"condor_name"};
my $testname = $personal_condor_params{"test_name"} || die "Missing test_name\n";
$version = $personal_condor_params{"condor_name"} || die "Missing condor_name!\n";
my $mpid = $personal_condor_params{"owner_pid"} || $pid;
$mpid = "pdir$mpid";
my $config_and_port = "";
my $winpath = "";
if(exists $personal_condor_params{"test_glue"}) {
system("mkdir -p $topleveldir/condor_tests/$testname.saveme/$mpid/$mpid$version");
$topleveldir = "$topleveldir/condor_tests/$testname.saveme/$mpid/$mpid$version";
} else {
if(is_windows() && is_windows_native_perl()) {
CreateDir("-p $topleveldir\\$testname.saveme\\$mpid\\$mpid$version");
$topleveldir = "$topleveldir\\$testname.saveme\\$mpid\\$mpid$version";
} elsif(is_windows() && is_cygwin_perl()) {
CreateDir("-p $topleveldir/$testname.saveme/$mpid/$mpid$version");
my $tmp1 = "$topleveldir/$testname.saveme/$mpid/$mpid$version";
$topleveldir = `cygpath -m $tmp1`;
CondorUtils::fullchomp($topleveldir);
} else {
CreateDir("-p $topleveldir/$testname.saveme/$mpid/$mpid$version");
$topleveldir = "$topleveldir/$testname.saveme/$mpid/$mpid$version";
}
}
$procdaddress = $mpid . $version;
if(exists $personal_condor_params{"personaldir"}) {
$topleveldir = $personal_condor_params{"personaldir"};
debug( "SETTING $topleveldir as topleveldir\n",$debuglevel);
system("mkdir -p $topleveldir");
}
# if we are wrapping tests, publish log location
$wrap_test = $ENV{WRAP_TESTS};
if(defined $wrap_test) {
my $logdir = $topleveldir . "/log";
#CondorPubLogdirs::PublishLogDir($testname,$logdir);
}
if(is_windows() && is_windows_native_perl()){
$personal_config_file = $topleveldir ."\\condor_config";
} elsif(is_windows() && is_cygwin_perl()){
$personal_config_file = $topleveldir ."/condor_config";
} else {
$personal_config_file = $topleveldir ."/condor_config";
}
$ENV{CONDOR_CONFIG} = $masterconfig;
# we need the condor instance early for state determination
#print "Personal: StartCondorWithParams: Creating condor instance for: $personal_config_file\n";
my $new_condor = CondorTest::CreateAndStoreCondorInstance( $version, $personal_config_file, 0, 0 );
$localdir = CondorPersonal::InstallPersonalCondor();
#
if($localdir eq "")
{
return("Failed to do needed Condor Install\n");
}
if( CondorUtils::is_windows() == 1 ){
if(is_windows_native_perl()) {
$localdir =~ s/\//\\/g;
$condorlocaldir = $localdir;
} else {
$winpath = `cygpath -m $localdir`;
CondorUtils::fullchomp($winpath);
$condorlocaldir = $winpath;
}
if( exists $personal_condor_params{catch_startup_tune}) {
CondorPersonal::TunePersonalCondor($condorlocaldir, $mpid, $personal_condor_params{catch_startup_tune});
} else {
CondorPersonal::TunePersonalCondor($condorlocaldir, $mpid);
}
} else {
if( exists $personal_condor_params{catch_startup_tune}) {
CondorPersonal::TunePersonalCondor($localdir, $mpid,$personal_condor_params{catch_startup_tune});
} else {
CondorPersonal::TunePersonalCondor($localdir, $mpid);
}
}
$ENV{CONDOR_CONFIG} = $personal_config_file;
if(exists $personal_condor_params{"do_not_start"}) {
$topleveldir = $home;
return("do_not_start");
}
$collector_port = CondorPersonal::StartPersonalCondor();
# reset topleveldir to $home so all configs go at same level
$topleveldir = $home;
debug( "collector port is $collector_port\n",$debuglevel);
if( CondorUtils::is_windows() == 1 ){
if(is_windows_native_perl()) {
$personal_config_file =~ s/\//\\/g;
$config_and_port = $personal_config_file . "+" . $collector_port ;
} else {
$winpath = `cygpath -m $personal_config_file`;
CondorUtils::fullchomp($winpath);
$config_and_port = $winpath . "+" . $collector_port ;
}
} else {
$config_and_port = $personal_config_file . "+" . $collector_port ;
}
#CondorPersonal::Reset();
debug( "StartCondor config_and_port is --$config_and_port--\n",$debuglevel);
debug( "Personal Condor Started\n",$debuglevel);
return( $config_and_port );
}
sub StartCondorWithParamsStart
{
my $winpath = "";
my $config_and_port = "";
if(is_windows()) {
$ENV{LOCAL_DIR} = undef;
}
$collector_port = CondorPersonal::StartPersonalCondor();
debug( "collector port is $collector_port\n",$debuglevel);
if( CondorUtils::is_windows() == 1 ){
if(is_windows_native_perl()) {
$_ = $personal_config_file;
s/\//\\/g;
$winpath = $_;
} else {
$winpath = `cygpath -m $personal_config_file`;
CondorUtils::fullchomp($winpath);
}
$config_and_port = $winpath . "+" . $collector_port ;
} else {
$config_and_port = $personal_config_file . "+" . $collector_port ;
}
CondorPersonal::Reset();
debug( "StartCondor config_and_port is --$config_and_port--\n",$debuglevel);
debug( "Personal Condor Started\n",$debuglevel);
return( $config_and_port );
}
sub debug {
my $string = shift;
my $level = shift;
my $time = timestamp();
if(!(defined $level)) { $level = 0; }
if ($level <= $DEBUGLEVEL) {
my $msg = "$time $string";
print $msg;
push @debugcollection, $msg;
} else {
my $msg = "$time (CP$level) $string";
push @debugcollection, $msg;
}
}
sub debug_flush {
print "\nDEBUG_FLUSH:\n";
my $logdir = `condor_config_val log`;
fullchomp($logdir);
print "\nLOG=$logdir and contains:\n";
List("ls -lh $logdir");
# what daemons does condor_who see running/exited?
print "\ncondor_who -verb says:\n";
system("condor_who -verb");
# what is in our config files?
print "\ncondor_config_val -writeconfig:file says:\n";
system("condor_config_val -writeconfig:file -");
print "\n------------Saved debug output is----------------\n";
foreach my $line (@debugcollection) {
print "$line";
}
}
sub DebugLevel
{
my $newlevel = shift;
my $oldlevel = $DEBUGLEVEL;
$DEBUGLEVEL = $newlevel;
return($oldlevel);
}
sub timestamp {
return strftime("%H:%M:%S", localtime);
}
sub Reset
{
debug( "CondorPersonal RESET\n",$debuglevel);
%personal_condor_params = ();
%personal_config_changes = ();
$personal_config = "condor_config";
$personal_template = "condor_config_template";
$personal_daemons = "";
$personal_local = "condor_config.local";
$personal_local_src = "";
$personal_local_post_src = "";
$personal_sec_prepost_src = "";
$personal_universe = "";
$personal_startup_wait = "true";
$RunningTimeStamp = 0;
$topleveldir = getcwd();
$home = $topleveldir;
$portchanges = "dynamic";
$collector_port = "0";
$personal_config_file = "";
$condordomain = "";
$procdaddress = "";
}
#################################################################
#
# ParsePersonalCondorParams
#
# Parses parameter file in typical condor form of NAME = VALUE
# and stores results into a hash for lookup later.
#
sub ParsePersonalCondorParams
{
my $submit_file = shift || die "missing submit file argument";
my $line = 0;
if( ! open( SUBMIT_FILE, $submit_file ) )
{
die "error opening \"$submit_file\": $!\n";
return 0;
}
debug( "reading submit file...\n" ,4);
my $variable;
my $value;
while( <SUBMIT_FILE> )
{
CondorUtils::fullchomp($_);
$line++;
# skip comments & blank lines
next if /^#/ || /^\s*$/;
# if this line is a variable assignment...
if( /^(\w+)\s*\=\s*(.*)$/ ) {
$variable = lc $1;
$value = $2;
# if line ends with a continuation ('\')...
while( $value =~ /\\\s*$/ ) {
# remove the continuation
$value =~ s/\\\s*$//;
# read the next line and append it
<SUBMIT_FILE> || last;
$value .= $_;
}
# compress whitespace and remove trailing newline for readability
$value =~ s/\s+/ /g;
CondorUtils::fullchomp($value);
# Do proper environment substitution
if( $value =~ /(.*)\$ENV\((.*)\)(.*)/ ) {
my $envlookup = $ENV{$2};
debug( "Found $envlookup in environment \n",4);
$value = $1.$envlookup.$3;
}
debug( "(CondorPersonal.pm) $variable = $value\n" ,$debuglevel);
#print "(CondorPersonal.pm) $variable = $value\n";
# save the variable/value pair
$personal_condor_params{$variable} = $value;
} else {
# debug( "line $line of $submit_file not a variable assignment... " .
# "skipping\n" );
}
}
close(SUBMIT_FILE);
return 1;
}
##################################################################
#
# Run condor_config_val using the specified configuration file.
#
sub CondorConfigVal
{
my $config_file = shift;
my $param_name = shift;
my $returnarrayref = shift;
my @otherarray = ();
my $result = "";
my $oldconfig = $ENV{CONDOR_CONFIG};
$ENV{CONDOR_CONFIG} = $config_file;
#print "CondorConfigVal called with this fig:$config_file;\n";
#my $result = `condor_config_val $param_name`;
if (defined $returnarrayref) {
my $res = CondorTest::runCondorTool("condor_config_val $param_name",$returnarrayref,2,{emit_output=>0,expect_result=>\&ANY});
} else {
my $res = CondorTest::runCondorTool("condor_config_val $param_name",\@otherarray,2,{emit_output=>0});
my $firstline = $otherarray[0];
fullchomp $firstline;
$result = $firstline;
}
$ENV{CONDOR_CONFIG} = $oldconfig;
return $result;
}
#################################################################
#
# InstallPersonalCondor
#
# We either find binaries in the environment or we install
# a particular tar ball.
#
sub InstallPersonalCondor
{
# this used to be used globally but now passwed
# in from StartCondorWithParams
#%personal_condor_params = @_;
my %control = %personal_condor_params;
my $master;
my $collector;
my $submit;
my $iswindows = CondorUtils::is_windows() ;
my $condorq = "";
my $sbinloc = "";
my $configline = "";
my @configfiles;
my $condordistribution;
my $tmpconfig = $ENV{CONDOR_CONFIG};
my $configdir = "";
if($iswindows) {
$condorq = Which("condor_q.exe");
} else {
$condorq = Which("condor_q");
}
if($tmpconfig =~ /^(.*\/)\w+$/) {
$configdir = $1;
#print "InstallPersonalCondor: CONFIG DIR:$configdir\n";
}
my $binloc = "";
$condordistribution = $control{"condor"} || "nightlies";
debug( "Install this condor --$condordistribution--\n",$debuglevel);
if( $condordistribution eq "nightlies" ) {
# test if this is really the environment we are in or
# switch it to install mode.
if(! -f "../../condor/sbin/condor_master") {
$condordistribution = "install";
}
}
if( $condordistribution eq "install" ) {
if($iswindows == 1) {
#print "condor distribution = install\n";
}
# where is the hosting condor_config file? The one assumed to be based
# on a setup with condor_configure.
my @config = ();
debug("InstallPersonalCondor getting ccv -config\n",$debuglevel);
CondorTest::runCondorTool("condor_config_val -config",\@config,2,{emit_output=>0});
debug("InstallPersonalCondor BACK FROM ccv -config\n",$debuglevel);
open(CONFIG,"condor_config_val -config 2>&1 | ") || die "Can not find config file: $!\n";
while(<CONFIG>)
{
next if ($_ =~ /figuration source/);
CondorUtils::fullchomp($_);
$configline = $_;
push @configfiles, $configline;
}
close(CONFIG);
$personal_condor_params{"condortemplate"} = shift @config;
fullchomp($personal_condor_params{"condortemplate"});
#print " ****** Condortemplate set to <$personal_condor_params{condortemplate}>\n";
if(exists $personal_condor_params{fresh_local}) {
} else {
# Always start with a freshly constructed local config file
# so we know what we get bt 5/13
#$personal_condor_params{"condorlocalsrc"} = shift @configfiles;
}
debug("condor_q: $condorq\n",$debuglevel);
debug("topleveldir: $topleveldir\n",$debuglevel);
if($iswindows == 1) {
# maybe we have a dos path
if(is_windows_native_perl()) {
if($condorq =~ /[A-Za-z]:/) {
$_ = $condorq;
s/\//\\/g;
$condorq = $_;
#print "condor_q now:$condorq\n";
if($condorq =~ /^([A-Za-z]:\\.*?)\\bin\\(.*)$/) {
#print "setting binloc:$1 \n";
$binloc = $1;
$sbinloc = $1;
}
}
} else {
my $tmp = `cygpath -m $condorq`;
fullchomp($tmp);
#print "InstallPersonalCondor:condorq:$tmp\n";
$condorq = $tmp;
if($condorq =~ /[A-Za-z]:/) {
if($condorq =~ /^([A-Za-z]:\/.*?)\/bin\/(.*)$/) {
#print "setting binloc:$1 \n";
$binloc = $1;
$sbinloc = $1;
}
}
}
}
if($binloc eq "") {
if( $condorq =~ /^(\/.*\/)(\w+)\s*$/ ) {
debug( "Root path $1 and base $2\n",$debuglevel);
$binloc = $1; # we'll get our binaries here.
} elsif(-f "../release_dir/bin/condor_status") {
#print "Bummer which condor_q failed\n";
#print "Using ../release_dir/bin(s)\n";
$binloc = "../release_dir/bin"; # we'll get our binaries here.
}
else
{
#print "which condor_q responded: $condorq! CondorPersonal Failing now\n";
debug_flush();
die "Can not seem to find a Condor install!\n";
}
}
if($sbinloc eq "") {
if( $binloc =~ /^(\/.*\/)s*bin\/\s*$/ )
{
debug( "Root path to sbin is $1\n",$debuglevel);
$sbinloc = $1; # we'll get our binaries here. # local_dir is here
}
else
{
debug_flush();
die "Can not seem to locate Condor release binaries\n";
}
}
debug( "Sandbox started rooted here: $topleveldir\n",$debuglevel);
#print "Sandbox started rooted here: $topleveldir\n";
if(is_windows_native_perl()) {
#print "before making local dirs\n";
#CondorUtils::dir_listing("$topleveldir");
my $cwd = getcwd();
chdir ("$topleveldir");
CreateDir("execute spool log log\\tmp");
chdir ("$cwd");
#print "after making local dirs\n";
#CondorUtils::dir_listing("$topleveldir");
} else {
system("cd $topleveldir && mkdir -p execute spool log log/tmp");
}
} elsif( $condordistribution eq "nightlies" ) {
if($iswindows == 1) {
}
# we want a mechanism by which to find the condor binaries
# we are testing. But we know where they are relative to us
# ../../condor/bin etc
# That is simply the nightly test setup.... for now at least
# where is the hosting condor_config file? The one assumed to be based
# on a setup with condor_configure.
debug(" Nightlies - find environment config files\n",$debuglevel);
my @config = ();
CondorTest::runCondorTool("condor_config_val -config",\@config,2,{emit_output=>0});
$personal_condor_params{"condortemplate"} = shift @config;
$personal_condor_params{"condorlocalsrc"} = shift @config;
fullchomp($personal_condor_params{"condortemplate"});
fullchomp($personal_condor_params{"condorlocalsrc"});
#print " ****** Case nightlies leading to <$personal_condor_params{condortemplate}> and $personal_condor_params{condorlocalsrc}\n";
debug( "My path to condor_q is $condorq and topleveldir is $topleveldir\n",$debuglevel);
if( $condorq =~ /^(\/.*\/)(\w+)\s*$/ )
{
debug( "Root path $1 and base $2\n",$debuglevel);
$binloc = $1; # we'll get our binaries here.
}
else
{
#print "which condor_q responded: $condorq! CondorPersonal Failing now\n";
debug_flush();
die "Can not seem to find a Condor install!\n";
}
if( $binloc =~ /^(\/.*)\/bin\/\s*$/ )
{
debug( "Root path to sbin is $1\n",$debuglevel);
$sbinloc = $1; # we'll get our binaries here. # local_dir is here
}
else
{
debug_flush();
die "Can not seem to locate Condor release binaries\n";
}
debug( "My path to condor_q is $binloc and topleveldir is $topleveldir\n",$debuglevel);
debug( "Sandbox started rooted here: $topleveldir\n",$debuglevel);
if(is_windows_native_perl()) {
my $cwd = getcwd();
system("chdir $topleveldir");
CreateDir("execute spool log log/tmp");
system("chdir $cwd");
} else {
system("cd $topleveldir && mkdir -p execute spool log log/tmp");
}
} elsif( -e $condordistribution ) {
if($iswindows == 1) {
}
# in this option we ought to run condor_configure
# to get a current config files but we'll do this
# after getting the current condor_config from
# the environment we are in as it is supposed to
# have been generated this way in the nightly tests
# run in the NWO.
my $res = chdir "$topleveldir";
if(!$res) {
die "chdir $topleveldir failed: $!\n";
exit(1);
}
system("cd $topleveldir && mkdir -p execute spool log");
system("tar -xf $home/$condordistribution");
$sbinloc = $topleveldir; # local_dir is here
chdir "$home";
} else {
debug_flush();
die "Undiscernable install directive! (condor = $condordistribution)\n";
}
debug( "InstallPersonalCondor returning $sbinloc for LOCAL_DIR setting\n",$debuglevel);
return($sbinloc);
}
sub FetchParams
{
return(%personal_condor_params);
}
#################################################################
#
# TunePersonalCondor
#
# Most changes go into the condor_config.local file but
# some changes are done to the condor_config template.
#
# RELEASE_DIR, LOCAL_DIR and LOCAL_CONFIG_FILE are
# adjusted from the main template file and other
# changes are in the condor_config.local file.
#
sub TunePersonalCondor
{
my %control = %personal_condor_params;
my $myhost = CondorTest::getFqdnHost();
my @domainparts = split /\./, $myhost;
my $condorhost = "";
my $collectorhost = "";
my $localdir = shift;
my $mpid = shift;
my $scheddname;
my $startdname;
my $minimalconfig = 0;
my $returnarrayref = shift;
my $iswindows = CondorUtils::is_windows();
if(!(defined $mpid)) {
$mpid = $$;
$mpid = "pdir$mpid";
}
my $socketdir = "";
if($MOVESOCKETDIR == 1) {
# The tests get pretty long paths to LOCK_DIR making unix sockets exceed
# the max character length. So in remote_pre we create a folder to hold
# the test run's socket folder. /tmp/tds$pid. We place this name we will
# need to configure each personal with in condor_tests/SOCKETDIR and we will
# configure with our own pid. Remote_post removes this top level directory.
if( CondorUtils::is_windows() == 0 ){
# windows does not have a path length limit
if(!(-f "SOCKETDIR")) {
print "Creating SOCKETDIR?\n";
my $privatetmploc = "/tmp/tds$$";
print "tmp loc:$privatetmploc\n";
$socketdir = "SOCKETDIR";
system("mkdir $privatetmploc;ls /tmp");
open(SD,">$socketdir") or print "Failed to create:$socketdir:$!\n";
print SD "$privatetmploc\n";
close(SD);
} else {
open(SD,"<SOCKETDIR") or print "Failed to open:SOCKETDIR:$!\n";
$socketdir = (<SD>);
chomp($socketdir);
print "Fetch master SOCKETDIR:$socketdir\n";
$socketdir = "$socketdir" . "/$$";
print "This tests socketdir:$socketdir\n";
}
}
}
#print " ****** TunePersonalCondor with localdir set to <$localdir>\n";
debug( "TunePersonalCondor setting LOCAL_DIR to $localdir\n",$debuglevel);
debug( "My basic name is $myhost\n",$debuglevel);
# was a special condor host called out?
if( exists $control{"condorhost"} )
{
$condorhost = $control{"condorhost"};
}
# was a special condor collector called out?
if( exists $control{"collector"} )
{
$collectorhost = $control{"collector"};
}
# was a special domain called out?
if( exists $control{"condordomain"} )
{
$condordomain = $control{"condordomain"};
}
if( $condordomain ne "" ) {
$condorhost = $myhost . "." . $condordomain;
} else {
$condorhost = $myhost;
}
debug( "Fully qualified domain name is ************************ $condorhost ********************\n",$debuglevel);
# was a special template called out?
if( exists $control{"condortemplate"} )
{
$personal_template = $control{"condortemplate"};
}
# was a special config file called out?
if( exists $control{"condorconfig"} )
{
$personal_config = $control{"condorconfig"};
} else {
$personal_config = "condor_config";
# store this default in the personal condor params so
# other parts of the code can rely on it.
$personal_condor_params{"condorconfig"} = $personal_config;
}
# was a special daemon list called out?
if( exists $control{"daemon_list"} )
{
#print "New daemon list called out <$control{daemon_list}>\n";
$personal_daemons = $control{"daemon_list"};
}
# was a special local config file name called out?
if( exists $control{"condorlocal"} )
{
$personal_local = $control{"condorlocal"};
} else {
$personal_local = "condor_config.local";
}
# was a special local config file src called out?
if( exists $control{"condorlocalsrc"} )
{
$personal_local_src = $control{"condorlocalsrc"};
}
# was a special local config file post src called out?
if( exists $control{"secprepostsrc"} )
{
$personal_sec_prepost_src = $control{"secprepostsrc"};
}
# was a special local config file post src called out?
if( exists $control{"localpostsrc"} )
{
$personal_local_post_src = $control{"localpostsrc"};
}
# is this for a specific universe like parallel?
if( exists $control{"universe"} )
{
$personal_universe = $control{"universe"};
debug( "HMMMMMMMMMMM universe request is $personal_universe\n",$debuglevel);
}
debug( "Proto file is --$personal_template--\n",3);
$personalmaster = "$topleveldir/sbin/condor_master";
#filter fig file storing entries we set so we can test
#for completeness when we are done
my $mytoppath = "";
if( CondorUtils::is_windows() == 1 ){
if(is_windows_native_perl()) {
$_ = $topleveldir;
s/\//\\/g; # convert to reverse slants
#s/\\/\\\\/g;
$mytoppath = $_;
} else {
$mytoppath = `cygpath -m $topleveldir`;
}
CondorUtils::fullchomp($mytoppath);
} else {
$mytoppath = $topleveldir;
}
debug( "HMMMMMMMMMMM personal local is $personal_local , mytoppath is $mytoppath",$debuglevel);
my $line;
open(TEMPLATE,"<$personal_template") || die "Can not open template: $personal_template: $!\n";
debug( "want to open new config file as $topleveldir/$personal_config\n",$debuglevel);
open(NEW,">$topleveldir/$personal_config") || die "Can not open new config file: $topleveldir/$personal_config: $!\n";
# There is an interesting side effect of reading and changing the condor_config
# template and then at the end, add the new LOCAL_DIR entree. That is when the constructed
# file is inspected it looks like the LOCAL_DIR came from the environment.
# So we are going to parse for a comment only line and only drop it out if it is NOT
# the one followed by "# from <Environment>". When that is the line that follows, we will drop
# out the new LOCAL_DIR, then a blank line and THEN those two saved lines.
my $lastline = "";
my $thisline = "";
while(<TEMPLATE>)
{
CondorUtils::fullchomp($_);
$line = $_;
if( $line =~ /^LOCAL_DIR\s*=.*/ )
# this is now beeing added to Config/condor_config so should propograte
{
debug( "-----------$line-----------\n",4);
$personal_config_changes{"LOCAL_DIR"} = "LOCAL_DIR = $mytoppath\n";
print NEW "LOCAL_DIR = $mytoppath\n";
} elsif( $line =~ /^LOCAL_CONFIG_FILE\s*=.*/ ) {
debug( "-----------$line-----------\n",4);
if($iswindows) {
if(is_windows_native_perl()) {
#print "My toppath:$mytoppath\n";
$personal_config_changes{"LOCAL_CONFIG_FILE"} = "LOCAL_CONFIG_FILE = $mytoppath\\$personal_local\n";
print NEW "LOCAL_CONFIG_FILE = $mytoppath\\$personal_local\n";
} else {
$personal_config_changes{"LOCAL_CONFIG_FILE"} = "LOCAL_CONFIG_FILE = $mytoppath/$personal_local\n";
print NEW "LOCAL_CONFIG_FILE = $mytoppath/$personal_local\n";
}
} else {
$personal_config_changes{"LOCAL_CONFIG_FILE"} = "LOCAL_CONFIG_FILE = $mytoppath/$personal_local\n";
print NEW "LOCAL_CONFIG_FILE = $mytoppath/$personal_local\n";
}
} elsif( $line =~ /^#\s*$/ ) {
#print "save $line could be comment before environment label\n";
$lastline = $line;
} elsif( $line =~ /^#.*?Environment.*$/ ) {
$thisline = $line;
#print "TunePersonalCondor: setting LOCAL_DIR=$mytoppath\n";
print NEW "LOCAL_DIR = $mytoppath\n";
print NEW "\n";
print NEW "$lastline\n";
print NEW "$thisline\n";
} elsif( $line =~ /^#.*$/ ) {
#print "Not environment label, drop both lines\n";
print NEW "$lastline\n";
print NEW "$thisline\n";
} elsif( $line =~ /^LOCAL_CONFIG_DIR\s*=.*/ ) {
# eat this entry
} else {
print NEW "$line\n";
}
}
close(TEMPLATE);
close(NEW);
open(NEW,">$topleveldir/$personal_local") || die "Can not open template: $!\n";
if($minimalconfig == 0) {
if( ! exists $personal_config_changes{"CONDOR_HOST"} )
{
$personal_config_changes{"CONDOR_HOST"} = "CONDOR_HOST = $condorhost\n";
}
if( exists $control{"ports"} )
{
debug( "Port Changes being Processed!!!!!!!!!!!!!!!!!!!!\n",$debuglevel);
$portchanges = $control{"ports"};
debug( "portchanges set to $portchanges\n",$debuglevel);
}
debug( "opening to write: $topleveldir/$personal_local\n",$debuglevel);
if($personal_daemons ne "")
{
# Allow the collector to run on the default and expected port as the main
# condor install on this system.
print NEW "# Adding requested daemons\n";
print NEW "DAEMON_LIST = $personal_daemons\n";
} else {
print NEW "DAEMON_LIST = MASTER STARTD SCHEDD COLLECTOR NEGOTIATOR\n";
}
if(is_windows_native_perl()) {
print NEW "NEGOTIATOR_ADDRESS_FILE = \$(LOG)\\.negotiator_address\n";
print NEW "SCHEDD_ADDRESS_FILE = \$(LOG)\\.schedd_address\n";
} else {
print NEW "NEGOTIATOR_ADDRESS_FILE = \$(LOG)/.negotiator_address\n";
print NEW "SCHEDD_ADDRESS_FILE = \$(LOG)/.schedd_address\n";
}
print NEW "UPDATE_COLLECTOR_WITH_TCP = FALSE\n";
print NEW "SCHEDD_INTERVAL = 5\n";
print NEW "UPDATE_INTERVAL = 5\n";
print NEW "NEGOTIATOR_INTERVAL = 5\n";
print NEW "CONDOR_ADMIN = \n";
print NEW "CONDOR_JOB_POLL_INTERVAL = 5\n";
print NEW "PERIODIC_EXPR_TIMESLICE = .99\n";
print NEW "JOB_START_DELAY = 0\n";
print NEW "LOCK = \$(LOG)\n";
if($iswindows == 1) {
#print NEW "PROCD_LOG = \$(LOG)/ProcLog\n";
print NEW "# Adding procd pipe for windows\n";
print NEW "PROCD_ADDRESS = \\\\.\\pipe\\$procdaddress\n";
}
my $jvm = "";
my $java_libdir = "";
my $exec_result;
my $javabinary = "";
# now we consider configuration requests
if( exists $control{"slots"} )
{
my $myslots = $control{"slots"};
debug( "Slots wanted! Number = $myslots\n",$debuglevel);
print NEW "# Adding slot request from param file\n";
print NEW "NUM_CPUS = $myslots\n";
print NEW "SLOTS = $myslots\n";
print NEW "# Done Adding slot request from param file\n";
}
if($personal_local_src ne "")
{
print NEW "# Requested local config: $personal_local_src\n";
#print "******************** Must seed condor_config.local <<$personal_local_src>> ************************\n";
debug( "opening to read: $personal_local_src\n",$debuglevel);
open(LOCSRC,"<$personal_local_src") || die "Can not open local config template: $!\n";
while(<LOCSRC>)
{
CondorUtils::fullchomp($_);
$line = $_;
print NEW "$line\n";
}
# now make sure we have the local dir we want after the generic .local file is seeded in
# $line = $personal_config_changes{"LOCAL_DIR"};
# print NEW "$line\n";
# # and a lock directory we like
close(LOCSRC);
}
if($personal_sec_prepost_src ne "")
{
debug( "Adding to local config file from $personal_sec_prepost_src\n",$debuglevel);
open(SECURITY,"<$personal_sec_prepost_src") || die "Can not do local config additions: $personal_sec_prepost_src: $!\n";
print NEW "# Adding changes requested from $personal_sec_prepost_src\n";
while(<SECURITY>)
{
print NEW "$_";
}
close(SECURITY);
print NEW "# Done Adding changes requested from $personal_sec_prepost_src\n";
}
if($personal_local_post_src ne "")
{
debug("Adding to local config file from $personal_local_post_src\n",$debuglevel);
open(POST,"<$personal_local_post_src") || die "Can not do local config additions: $personal_local_post_src:$!\n";
print NEW "# Adding changes requested from $personal_local_post_src\n";
while(<POST>)
{
print NEW "$_";
}
close(POST);
print NEW "# Done Adding changes requested from $personal_local_post_src\n";
}
if( exists $control{append_condor_config} ) {
print NEW "# Appending from 'append_condor_config'\n";
print NEW "$control{append_condor_config}\n";
print NEW "# Done appending from 'append_condor_config'\n";
}
# assume an array reference
if( exists $control{append_condor_config_plus} ) {
print NEW "# Appending from 'append_condor_config_plus'\n";
my $arrayref = $control{append_condor_config_plus};
foreach my $line (@{$arrayref}) {
print NEW "$line\n";
}
print NEW "# Done appending from 'append_condor_config_plus'\n";
}
}
#lets always overrule existing A__DEBUG with one that adds to it D_CMD
print NEW "ALL_DEBUG = \$(ALL_DEBUG) D_CMD:1\n";
# we are testing. dramatically reduce MaxVacateTime
print NEW "JOB_MAX_VACATE_TIME = 15\n";
close(NEW);
if (defined $returnarrayref) {
PostTunePersonalCondor($personal_config_file,$returnarrayref);
} else {
PostTunePersonalCondor($personal_config_file);
}
}
#################################################################
#
# PostTunePersonalCondor() is called after TunePersonalCondor.
# It assumes that the configuration file is all set up and
# ready to use.
sub PostTunePersonalCondor
{
my $config_file = shift;
my $outputarrayref = shift;
debug("PostTunePersonalCondor: getting DAEMON_LIST from $config_file\n",2);
# If this is a quill test, then quill is within
# $personal_daemons AND $topleveldir/../pgpass wants to be
# $topleveldir/spool/.pgpass
my $configured_daemon_list;
if (defined $outputarrayref) {
$configured_daemon_list = CondorConfigVal($config_file,"daemon_list", $outputarrayref);
} else {
$configured_daemon_list = CondorConfigVal($config_file,"daemon_list");
}
if($configured_daemon_list =~ m/quill/i ) {
debug( "This is a quill test (because DAEMON_LIST=$configured_daemon_list)\n", $debuglevel );
my $cmd = "cp $topleveldir/../pgpass $topleveldir/spool/.pgpass";
system("$cmd");
}
}
#################################################################
#
# StartPersonalCondor will start a personal condor which has
# been set up. If the ports are dynamic, it will look up the
# address and return the port number.
#
sub StartPersonalCondor
{
my %control = %personal_condor_params;
my $personalmaster = "";
# If we start a personal Condor as root (for testing the VM universe),
# we need to change the permissions/ownership on the directories we
# made so that the master (which runs as condor) can use them.
if( ! CondorUtils::is_windows() && ( $> == 0 )) {
my $testName = $control{ 'test_name' };
system( "chown condor.condor $home/${testName}.saveme >& /dev/null" );
system( "chown -R condor.condor $home/${testName}.saveme/pdir$pid >& /dev/null" );
}
my $configfile = $control{"condorconfig"};
#my $fullconfig = "$topleveldir/$configfile";
my $fullconfig = "$ENV{CONDOR_CONFIG}";
#print "StartPersonalCondor CONDOR_CONFIG=$fullconfig\n";
debug( "Want $configfile for config file\n",$debuglevel);
my $figpath = "";
if( CondorUtils::is_windows() == 1 ){
if(is_windows_native_perl()) {
$personalmaster = "start $localdir" . "\\bin\\condor_master.exe -f";
} else {
$figpath = `cygpath -m $fullconfig`;
CondorUtils::fullchomp($figpath);
$fullconfig = $figpath;
# note: on windows all binaaries in bin!
my $tmp = `cygpath -m $localdir`;
CondorUtils::fullchomp($tmp);
$personalmaster = $tmp . "/bin/condor_master.exe -f &";
}
} else {
$personalmaster = $localdir . "sbin/condor_master -f &";
}
# We may not want to wait for certain daemons to talk
# to each other on startup.
if( exists $control{"daemonwait"} ) {
my $waitparam = $control{"daemonwait"};
if($waitparam eq "false") {
$personal_startup_wait = "false";
}
}
# set up to use the existing generated configfile
my $condorstate = 0;
$ENV{CONDOR_CONFIG} = $fullconfig;
my $condor_instance = CondorTest::GetPersonalCondorWithConfig($fullconfig);
if($condor_instance != 0) {
$condorstate = $condor_instance->GetCondorAlive();
} else {
$condorstate = 0;
}
my $fig = $ENV{CONDOR_CONFIG};
debug( "Condor_config from environment is --$fig--\n",$debuglevel);
# At the momment we only restart/start a personal we just configured
# or reconfigured
if( $condorstate == 0 ) {
# not running with this config so treat it like a start case
debug("Condor state is off\n",$debuglevel);
debug( "start up the personal condor!--$personalmaster--\n",$debuglevel);
# when open3 is used it sits and waits forever
if( exists $control{catch_startup_start}) {
print "catch_startup_start seen. Calling runCondorTool\n";
runCondorTool("$personalmaster",$control{catch_startup_start},2,{emit_output=>1});
} else {
my $res = system("$personalmaster");
if($res != 0) {
print "Failed system call starting master\n";
}
}
sleep(2);
} else {
debug_flush();
die "Bad state for a new personal condor configuration! running :-(\n";
}
# is test opting into new condor personal status yet?
my $res = 1;
sleep(5);
if(exists $control{"no_wait"}) {
#print "use no methods here to be sure daemons are up.\n";
} else {
#print "NO_WAIT not set???????\n";
my $condor_name = $personal_condor_params{"condor_name"};
$res = NewIsRunningYet($fullconfig, $condor_name);
}
if($res == 0) {
debug_flush();
die "Can not continue because condor is not running!!!!\n";
}
# if this was a dynamic port startup, return the port which
# the collector is listening on...
if( $portchanges eq "dynamic" )
{
debug("Looking for collector port!\n",$debuglevel);
return( FindCollectorPort() );
}
else
{
debug("NOT Looking for collector port!\n",$debuglevel);
return("0");
}
}
sub ProcessStateWanted
{
my $condor_config = shift;
#print "ProcessStateWanted: $condor_config\n";
my $condor_instance = CondorTest::GetPersonalCondorWithConfig($condor_config);
my $direction = $condor_instance->GetCondorDirection(); # up or down
#print "in ProcessStateWanted going:$direction\n";
if($condor_instance == 0) {
return("?");
}
# lets look for best case first
my $scheddseen = "";
if($direction eq "up") {
my $alldaemons = $condor_instance->HasAllLiveDaemons();
if($alldaemons eq "yes") {
$scheddseen = $condor_instance->CollectorSeesSchedd();
if($scheddseen eq "yes") {
return("up");
}
return("alldaemonsup");
}
} else {
my $nodaemons = $condor_instance->HasNoLiveDaemons();
if($nodaemons eq "yes") {
return("down");
}
}
my $master = $condor_instance->HasLiveMaster();
if($master == 0) {
return("down");
}
return("hasmaster");
}
sub StateChange
{
my $desiredstate = shift || croak "No desired state passed in\n";
my $config = shift;
my $timelimit = shift;
my $masterlimit = shift;
my $RunningTimeStamp = time;
my $finaltime = 0;
my $state = "";
my $now = 0;
# we'll use this to set alive field in instance correctly
# or upon error to drop out condor_who data
my $condor_config = $ENV{CONDOR_CONFIG};
my $condor_instance = CondorTest::GetPersonalCondorWithConfig($condor_config);
my $node_name = $condor_instance->GetCondorName() || "Condor";
my $daemonlist = $condor_instance->GetDaemonList();
debug("CondorPersonal Waiting $timelimit sec for <$node_name> to be $desiredstate. MasterTime:$masterlimit\n\tDAEMON_LIST = $daemonlist\n", 1);
#print "\tCONDOR_CONFIG=$config\n";
while($state ne $desiredstate) {
#print "Waiting for state: $desiredstate current $state\n";
my $amialive = $condor_instance->HasLiveMaster();
$now = time;
if(($amialive == 0) &&($desiredstate eq "up")) {
if(($now - $RunningTimeStamp) >= $masterlimit) {
print "StateChange: <$node_name> Master did not start in $masterlimit seconds, giving up.\n";
$condor_instance->DisplayWhoDataInstances();
return(0);
}
}
if(($amialive == 1) &&($desiredstate eq "down")) {
if(($now - $RunningTimeStamp) >= $masterlimit) {
print "StateChange: <$node_name> Master did not exit in $masterlimit seconds. giving up.\n";
$condor_instance->DisplayWhoDataInstances();
return(0);
}
}
if((($now - $RunningTimeStamp) >= $timelimit) && ($timelimit >= $masterlimit)) {
print "StateChange: <$node_name>:$desiredstate not seen after $timelimit seconds. giving up\n";
$condor_instance->DisplayWhoDataInstances();
return(0);
}
#print "StateChange: again\n";
#CollectWhoData($desiredstate);
CollectWhoData();
$state = ProcessStateWanted($config);
#print "StateChange: now:$state\n";
CheckNamedFWTimed();
sleep 1;
}
if($desiredstate eq "up") {
$condor_instance->SetCondorAlive(1);
} elsif($desiredstate eq "down") {
$condor_instance->SetCondorAlive(0);
} else {
die "Only up/down transitions expected to be requested\n";
}
#$condor_instance->DisplayWhoDataInstances();
$now = time;
$finaltime = ($now - $RunningTimeStamp);
if($desiredstate eq "up") {
debug("Condor <$node_name> is running. ($finaltime of $timelimit seconds)\n", 1);
} elsif($desiredstate eq "down") {
debug("Condor <$node_name> is off. ($finaltime of $timelimit seconds)\n", 1);
}
return(1);
}
sub NewIsRunningYet {
my $config = shift;
my $name = shift;
#print "Checking running of: $name config:$config \n";
# ON going up or down, first number total time to allow a full up state, but
# master has to be alive by second number or bail
my $res = StateChange("up", $config, 120, 30);
return $res;
}
sub NewIsDownYet {
my $config = shift;
my $name = shift;
if(defined $name) {
#print "Checking running of: $name config:$config \n";
} else {
#print "This config does not have a condor instance condor_name:$config\n";
}
my $res = StateChange("down", $config, 120, 160);
return $res;
}
#################################################################
#
# dual State thoughts which condor_who may detect
#
# turning on, Unknown
# turning on, Not Run Yet
# turning on, Coming up
# turning on, mater alive
# turning on, collector alive
# turning on, collector knows xxxxx
# turning on, all daemons
# going down, all daemons
# going down, collector knows XXXXX
# going down, collector alive
# going down, master alive
# down
# $self = {
# textfile => shift,
# placeholders => { } # { }, not ( )
# };
# ...
# $self->{placeholders}->{$key} = $value;
# delete $self->{placeholders}->{$key};
# @keys = keys %{$self->{placeholders}};
# foreach my ($k,$v) each %{$self->{placeholders}} { ... }
#
#
sub CollectWhoData
{
my $desiredstate = shift;
# experient to vary by going up vs down OFF now
# and nothing is passed in
my @whoarray;
#print "CollectWhoData for this Condor:<$ENV{CONDOR_CONFIG}>\n";
# Get condor instance for this config
#print CondorUtils::TimeStr() . " CollectWhoData start\n";
my $usequick = 1;
my $condor = CondorTest::GetPersonalCondorWithConfig($ENV{CONDOR_CONFIG});
#$condor->DisplayWhoDataInstances();
# condor_who -quick is best before master is alive
if($condor != 0) {
if(defined $desiredstate) {
if($desiredstate eq "down"){
print "going down and using quick mode\n";
} else {
if($condor->HasLiveMaster() == 1) {
$usequick = 0;
}
}
} else {
my $hasLive = $condor->HasLiveMaster();
#print "HasLiveMaster says:$hasLive\n";
if($condor->HasLiveMaster() == 1) {
$usequick = 0;
}
}
} else {
die "CollectWhoData with no condor instance yet\n";
}
my $logdir = `condor_config_val log`;
$_ = $logdir;
s/\\/\//g;
$logdir = $_;
CondorUtils::fullchomp($logdir);
if($usequick == 1) {
#print "Using -quick\n";
CondorTest::runCondorTool("condor_who -quick -daemon -log \"$logdir\"",\@whoarray,2,{emit_output=>0});
foreach my $wholine (@whoarray) {
CondorUtils::fullchomp($wholine);
# print timestamp() . ": raw whodataline: $wholine\n";
if($wholine =~ /(\w*)\s+(.*?)\s+(.*?)\s+(.*?)/) {
# print timestamp() . ": Who data with 4 fields:$1,$2,$3,$4\n";
#print "condor_who -quick fields. $1 daemon name $2 pid\n";
#id this is the master is pid real?
my $savepid = $2;
my $processstring = "";
if($1 eq "Master") {
#print "Master found\n";
if(CondorUtils::is_windows() == 1) {
my @grift = `tasklist | grep $savepid`;
foreach my $process (@grift) {
#print "consider:$process saved pid: $savepid\n";
if($process =~ /(.*?)\s+(\d+)\s+(\w+).*/) {
$processstring = $1;
if($2 eq $savepid) {
#print "Pids equal:$processstring\n";
# does this process have master in binary
if($processstring =~ /condor_master/) {
#print "Is master, thus master alive\n";
CondorTest::LoadWhoData("Master","yes",$savepid,"","","","");
}
}
}
}
} else {
#print "Master record\n";
if($savepid ne "no") {
my @psdata = `ps $savepid`;
my $pssize = @psdata;
#print "ps data on $savepid: $psdata[1]\n";
if($pssize >= 2) {
if($psdata[1] =~ /condor_master/) {
#Mark master alive
#print "Marking Master Alive*************************************************************\n";
#print "Before LoadWhoData:\n";
CondorTest::LoadWhoData("Master","yes",$savepid,"","","","");
}
}
}
}
} #else {
#print "Not Master but $1\n";
#next if $wholine =~ /^Daemon.*$/; # skip column headings
#next if $wholine =~ /^\-\-\-\-\-\-.*$/; # skip dashes
#CondorTest::LoadWhoData($1,$2,"","","","","");
#}
}
}
} else {
CondorTest::runCondorTool("condor_who -daemon -log \"$logdir\"",\@whoarray,2,{emit_output=>0});
foreach my $wholine (@whoarray) {
CondorUtils::fullchomp($wholine);
next if $wholine =~ /^Daemon.*$/; # skip column headings
next if $wholine =~ /^\-\-\-\-\-\-.*$/; # skip dashes
# print timestamp() . ": rawhodataline: $wholine\n";
if($wholine =~ /(.*?)\s+(.*?)\s+(.*?)\s+(.*?)\s+(.*?)\s+<(.*)>\s+(.*)/) {
# print timestamp() . ": Who data with 7 fields:$1,$2,$3,$4,$5,$6,$7\n";
# print "Parse:$wholine\n";
# print "Before LoadWhoData: $1,$2,$3,$4,$5,$6,$7\n";
# this next call assumes we are interested in currently configed personal condor
# which means a lookup for condor instance for each daemon
CondorTest::LoadWhoData($1,$2,$3,$4,$5,$6,$7);
} elsif($wholine =~ /(.*?)\s+(.*?)\s+(.*?)\s+(.*?)\s+(.*?).*/) {
# print timestamp() . ": Who data with 5 fields:$1,$2,$3,$4,$5\n";
# print "Before LoadWhoData: $1,$2,$3,$4,$5\n";
CondorTest::LoadWhoData($1,$2,$3,$4,$5,"","");
} else {
#print "CollectWhoData: Parse Error: $wholine\n";
}
}
}
#print CondorUtils::TimeStr() . " CollectWhoData done\n";
}
sub KillDaemons
{
my $desiredconfig = shift;
my $oldconfig = $ENV{CONDOR_CONFIG};
$ENV{CONDOR_CONFIG} = $desiredconfig;
my $condor_name = $personal_condor_params{"condor_name"};
my $condor_instance = CondorTest::GetPersonalCondorWithConfig($desiredconfig);
my $alive = $condor_instance->GetCondorAlive();
if($alive == 0) {
# nothing to do since it is not marked as ever coming up
return(1);
}
CondorTest::runToolNTimes("condor_off -master -fast",1,0,{expect_result=>\&ANY,emit_output=>0});
my $res = NewIsDownYet($desiredconfig, $condor_name);
# reset config to whatever it was.
$ENV{CONDOR_CONFIG} = $oldconfig;
return($res);
}
#################################################################
#
# KillDaemonPids
#
# Find the log directory via the config file passed in. Then
# open the PIDS fill and kill every pid in it for a sure kill.
#
#################################################################
sub KillDaemonPids
{
my $desiredconfig = shift;
my $oldconfig = $ENV{CONDOR_CONFIG};
#print "Using new kill method\n";
KillDaemons($desiredconfig);
#print "Exit KillDaemonPids after calling KillDaemons\n";
return(0);
}
#################################################################
#
# FindCollectorPort
#
# Looks for collector_address_file via condor_config_val and tries
# to parse port number out of the file.
#
sub FindCollectorAddress
{
my $collector_address_file = `condor_config_val collector_address_file`;
my $line;
CondorUtils::fullchomp($collector_address_file);
debug( "Looking for collector port in file ---$collector_address_file---\n",$debuglevel);
if($collector_address_file eq "") {
debug( "No collector address file defined! Can not find port\n",$debuglevel);
return("0");
}
if( ! -e "$collector_address_file") {
debug( "No collector address file exists! Can not find port\n",$debuglevel);
return("0");
}
open(COLLECTORADDR,"<$collector_address_file") || die "Can not open collector address file: $!\n";
while(<COLLECTORADDR>) {
CondorUtils::fullchomp($_);
$line = $_;
if( $line =~ /^\s*(<[^>]+>)\s*$/ ) {
debug( "Collector address is $1\n",$debuglevel);
return($1);
} else {
debug( "$line\n",$debuglevel);
}
}
close(COLLECTORADDR);
debug( "No collector address found in collector address file!\n",$debuglevel);
return("");
}
sub FindCollectorPort
{
my $addr = FindCollectorAddress();
if( $addr =~ /^(\d+\.\d+\.\d+\.\d+):(\d+)$/ ) {
debug( "Collector ip $1 and port $2\n",$debuglevel);
return($2);
} else {
debug( "Failed to extract port from collector address: $addr\n",$debuglevel);
}
return("0");
}
#################################################################
#
# SaveMeSetup
#
# Make the saveme directory for a test, Create the pid based
# location for the current test within this saveme directory
# and then create a symbolic link to this pid directory. By doing this
# when the personal condor setup go to make a pid directory to
# run in, it ends up running within the saveme directory.
# This saveme directory allows more data to be returned during the
# nightly testing.
#
# If all is good the current pid is returned but if there
# is an error 0 is returned.
#
#################################################################
sub SaveMeSetup
{
my $testname = shift;
print "Into SaveMeSetup for:$testname\n";
my $mypid = $$;
my $res = 1;
my $mysaveme = $testname . ".saveme";
$res = CreateDir("-p $mysaveme");
if($res != 0) {
print "SaveMeSetup: Could not create \"saveme\" directory for test\n";
return(0);
}
my $mypiddir = $mysaveme . "/pdir" . $mypid;
# there should be no matching directory here
# unless we are getting pid recycling. Start fresh.
$res = system("rm -rf $mypiddir");
if($res != 0) {
print "SaveMeSetup: Could not remove prior pid directory in savemedir \n";
return(0);
}
$res = system("mkdir $mypiddir");
if($res != 0) {
print "SaveMeSetup: Could not create pid directory in \"saveme\" directory\n";
return(0);
}
# make a symbolic link for personal condor module to use
# if we get pid recycling, blow the symbolic link
# This might not be a symbolic link, so use -r to be sure
#$res = verbose_system("rm -fr $mypid");
#if($res != 0) {
#print "SaveMeSetup: Could not remove prior pid directory\n";
#return(0);
#}
#$res = verbose_system("ln -s $mypiddir $mypid");
#if($res != 0) {
#print "SaveMeSetup: Could not link to pid dir in \"saveme\" directory\n";
#return(0);
#}
return($mypid);
}
sub PersonalSystem
{
my $args = shift @_;
my $dumpLogs = $ENV{DUMP_CONDOR_LOGS};
my $mypid = $$;
$mypid = "pdir$mypid";
if(defined $dumpLogs) {
print "Dump Condor Logs if things go south\n";
print "Pid dir is $mypid\n";
system("pwd");
}
my $hashref = system($args);
my $rc = ${$hashref}{exitcode};
if(defined $dumpLogs) {
print "Dumping Condor Logs\n";
my $savedir = getcwd();
chdir("$mypid");
system("ls");
PersonalDumpLogs($mypid);
chdir("$savedir");
system("pwd");
}
return $rc;
}
sub PersonalDumpLogs
{
my $piddir = shift;
local *PD;
#print "PersonalDumpLogs for $piddir\n";
#system("pwd");
#system("ls -la");
opendir PD, "." || die "failed to open . : $!\n";
#print "Open worked.... listing follows.....\n";
foreach my $file (readdir PD)
{
#print "Consider: $file\n";
next if $file =~ /^\.\.?$/; # skip . and ..
if(-f $file ) {
#print "F:$file\n";
} elsif( -d $file ) {
#print "D:$file\n";
my $logdir = $file . "/log";
PersonalDumpCondorLogs($logdir);
}
}
close(PD);
}
sub PersonalDumpCondorLogs
{
my $logdir = shift;
local *LD;
#print "PersonalDumpLogs for $logdir\n";
my $now = getcwd();
chdir("$logdir");
#system("pwd");
#system("ls -la");
print "\n\n******************* DUMP $logdir ******************\n\n";
opendir LD, "." || die "failed to open . : $!\n";
#print "Open worked.... listing follows.....\n";
foreach my $file (readdir LD)
{
#print "Consider: $file\n";
next if $file =~ /^\.\.?$/; # skip . and ..
if(-f $file ) {
print "\n\n******************* DUMP $file ******************\n\n";
open(FF,"<$file") || die "Can not open logfile: $file: $!\n";
while(<FF>){
print "$_";
}
close(FF);
} elsif( -d $file ) {
#print "D:$file\n";
}
}
close(LD);
chdir("$now");
}
sub DisplayPartialLocalConfig
{
my $configloc = shift;
my $logdir = `condor_config_val log`;
$_ = $logdir;
s/\\/\//g;
$logdir = $_;
my $fullpathtolocalconfig = "";
my $line = "";
fullchomp($logdir);
if($logdir =~ /(.*\/)log/) {
#print "Config File Location <$1>\n";
$fullpathtolocalconfig = $1 . $personal_local;
print "\nlocal config file: $fullpathtolocalconfig\n";
if( -f $fullpathtolocalconfig) {
print "\nDumping Adjustments to: $personal_local\n\n";
my $startdumping = 0;
open(LC,"<$fullpathtolocalconfig") or die "Can not open $fullpathtolocalconfig: $!\n";
while(<LC>) {
fullchomp($_);
$line = $_;
if($line =~ /# Requested.*/) {
print "$line\n";
} elsif($line =~ /# Adding.*/) {
if($startdumping == 0) {
$startdumping = 1;
}
print "$line\n";
} else {
if($startdumping == 1) {
print "$line\n";
}
}
}
close(LC);
print "\nDONE Dumping Adjustments to: $personal_local\n\n";
}
}
}
sub IsThisNightly
{
my $mylocation = shift;
debug("IsThisNightly passed: $mylocation\n",$debuglevel);
if($mylocation =~ /^.*(\/execute\/).*$/) {
return(1);
} else {
return(0);
}
}
sub CheckNamedFWTimed
{
foreach my $key (sort keys %framework_timers) {
}
}
sub RegisterFWTimed
{
my $name = shift || croak "Missing fw callback name\n";
my $timedcallback = shift || croak "missing callback argument\n";
my $timeddelta = shift || croak "missing delta argument\n";
$framework_timers{$name}{name} = $name;
$framework_timers{$name}{timer_time} = time;
$framework_timers{$name}{timedcallback} = $timedcallback;
$framework_timers{$name}{timeddelta} = $timeddelta;
}
sub RemoveFWTimed
{
my $namedcallback = shift || croak "Missing name of named callback to delete\n";
delete $framework_timers{$namedcallback};
}
my $minimalistConfig = "
";
my $WinminimalistConfigextra = "
";
sub DoInitialConfigCheck
{
if(exists $ENV{CONDOR_CONFIG}) {
my $config = $ENV{CONDOR_CONFIG};
if( -f "$config") {
#print "Our initial main config file:$config\n";
return(0);
} else {
print "CONDOR_CONFIG defined but missing:$config\n";
return(1);
}
} else {
print "CONDOR_CONFIG not set\n";
return(1);
}
}
1;
| neurodebian/htcondor | src/condor_scripts/CondorPersonal.pm | Perl | apache-2.0 | 63,103 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 6.2.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
# The name this swash is to be known by, with the format of the mappings in
# the main body of the table, and what all code points missing from this file
# map to.
$utf8::SwashInfo{'ToScx'}{'format'} = 'sw'; # string, but some elements are interpreted as a list; white space occurs only as list item separators
$utf8::SwashInfo{'ToScx'}{'missing'} = 'Unknown';
return <<'END';
0000 0040 Common
0041 005A Latin
005B 0060 Common
0061 007A Latin
007B 00A9 Common
00AA Latin
00AB 00B9 Common
00BA Latin
00BB 00BF Common
00C0 00D6 Latin
00D7 Common
00D8 00F6 Latin
00F7 Common
00F8 02B8 Latin
02B9 02DF Common
02E0 02E4 Latin
02E5 02E9 Common
02EA 02EB Bopomofo
02EC 02FF Common
0300 0341 Inherited
0342 Greek
0343 0344 Inherited
0345 Greek
0346 0362 Inherited
0363 036F Latin
0370 0373 Greek
0374 Common
0375 0377 Greek
037A 037D Greek
037E Common
0384 Greek
0385 Common
0386 Greek
0387 Common
0388 038A Greek
038C Greek
038E 03A1 Greek
03A3 03E1 Greek
03E2 03EF Coptic
03F0 03FF Greek
0400 0484 Cyrillic
0485 0486 Cyrillic Latin
0487 0527 Cyrillic
0531 0556 Armenian
0559 055F Armenian
0561 0587 Armenian
0589 Armenian Georgian
058A Armenian
058F Armenian
0591 05C7 Hebrew
05D0 05EA Hebrew
05F0 05F4 Hebrew
0600 0604 Arabic
0606 060B Arabic
060C Arabic Syriac Thaana
060D 061A Arabic
061B Arabic Syriac Thaana
061E Arabic
061F Arabic Syriac Thaana
0620 063F Arabic
0640 Arabic Mandaic Syriac
0641 064A Arabic
064B 0655 Arabic Syriac
0656 065F Arabic
0660 0669 Arabic Thaana
066A 066F Arabic
0670 Arabic Syriac
0671 06DC Arabic
06DD Common
06DE 06FF Arabic
0700 070D Syriac
070F 074A Syriac
074D 074F Syriac
0750 077F Arabic
0780 07B1 Thaana
07C0 07FA Nko
0800 082D Samaritan
0830 083E Samaritan
0840 085B Mandaic
085E Mandaic
08A0 Arabic
08A2 08AC Arabic
08E4 08FE Arabic
0900 0950 Devanagari
0951 0952 Devanagari Latin
0953 0963 Devanagari
0964 0965 Bengali Devanagari Gurmukhi Oriya Takri
0966 0977 Devanagari
0979 097F Devanagari
0981 0983 Bengali
0985 098C Bengali
098F 0990 Bengali
0993 09A8 Bengali
09AA 09B0 Bengali
09B2 Bengali
09B6 09B9 Bengali
09BC 09C4 Bengali
09C7 09C8 Bengali
09CB 09CE Bengali
09D7 Bengali
09DC 09DD Bengali
09DF 09E3 Bengali
09E6 09FB Bengali
0A01 0A03 Gurmukhi
0A05 0A0A Gurmukhi
0A0F 0A10 Gurmukhi
0A13 0A28 Gurmukhi
0A2A 0A30 Gurmukhi
0A32 0A33 Gurmukhi
0A35 0A36 Gurmukhi
0A38 0A39 Gurmukhi
0A3C Gurmukhi
0A3E 0A42 Gurmukhi
0A47 0A48 Gurmukhi
0A4B 0A4D Gurmukhi
0A51 Gurmukhi
0A59 0A5C Gurmukhi
0A5E Gurmukhi
0A66 0A75 Gurmukhi
0A81 0A83 Gujarati
0A85 0A8D Gujarati
0A8F 0A91 Gujarati
0A93 0AA8 Gujarati
0AAA 0AB0 Gujarati
0AB2 0AB3 Gujarati
0AB5 0AB9 Gujarati
0ABC 0AC5 Gujarati
0AC7 0AC9 Gujarati
0ACB 0ACD Gujarati
0AD0 Gujarati
0AE0 0AE3 Gujarati
0AE6 0AF1 Gujarati
0B01 0B03 Oriya
0B05 0B0C Oriya
0B0F 0B10 Oriya
0B13 0B28 Oriya
0B2A 0B30 Oriya
0B32 0B33 Oriya
0B35 0B39 Oriya
0B3C 0B44 Oriya
0B47 0B48 Oriya
0B4B 0B4D Oriya
0B56 0B57 Oriya
0B5C 0B5D Oriya
0B5F 0B63 Oriya
0B66 0B77 Oriya
0B82 0B83 Tamil
0B85 0B8A Tamil
0B8E 0B90 Tamil
0B92 0B95 Tamil
0B99 0B9A Tamil
0B9C Tamil
0B9E 0B9F Tamil
0BA3 0BA4 Tamil
0BA8 0BAA Tamil
0BAE 0BB9 Tamil
0BBE 0BC2 Tamil
0BC6 0BC8 Tamil
0BCA 0BCD Tamil
0BD0 Tamil
0BD7 Tamil
0BE6 0BFA Tamil
0C01 0C03 Telugu
0C05 0C0C Telugu
0C0E 0C10 Telugu
0C12 0C28 Telugu
0C2A 0C33 Telugu
0C35 0C39 Telugu
0C3D 0C44 Telugu
0C46 0C48 Telugu
0C4A 0C4D Telugu
0C55 0C56 Telugu
0C58 0C59 Telugu
0C60 0C63 Telugu
0C66 0C6F Telugu
0C78 0C7F Telugu
0C82 0C83 Kannada
0C85 0C8C Kannada
0C8E 0C90 Kannada
0C92 0CA8 Kannada
0CAA 0CB3 Kannada
0CB5 0CB9 Kannada
0CBC 0CC4 Kannada
0CC6 0CC8 Kannada
0CCA 0CCD Kannada
0CD5 0CD6 Kannada
0CDE Kannada
0CE0 0CE3 Kannada
0CE6 0CEF Kannada
0CF1 0CF2 Kannada
0D02 0D03 Malayalam
0D05 0D0C Malayalam
0D0E 0D10 Malayalam
0D12 0D3A Malayalam
0D3D 0D44 Malayalam
0D46 0D48 Malayalam
0D4A 0D4E Malayalam
0D57 Malayalam
0D60 0D63 Malayalam
0D66 0D75 Malayalam
0D79 0D7F Malayalam
0D82 0D83 Sinhala
0D85 0D96 Sinhala
0D9A 0DB1 Sinhala
0DB3 0DBB Sinhala
0DBD Sinhala
0DC0 0DC6 Sinhala
0DCA Sinhala
0DCF 0DD4 Sinhala
0DD6 Sinhala
0DD8 0DDF Sinhala
0DF2 0DF4 Sinhala
0E01 0E3A Thai
0E3F Common
0E40 0E5B Thai
0E81 0E82 Lao
0E84 Lao
0E87 0E88 Lao
0E8A Lao
0E8D Lao
0E94 0E97 Lao
0E99 0E9F Lao
0EA1 0EA3 Lao
0EA5 Lao
0EA7 Lao
0EAA 0EAB Lao
0EAD 0EB9 Lao
0EBB 0EBD Lao
0EC0 0EC4 Lao
0EC6 Lao
0EC8 0ECD Lao
0ED0 0ED9 Lao
0EDC 0EDF Lao
0F00 0F47 Tibetan
0F49 0F6C Tibetan
0F71 0F97 Tibetan
0F99 0FBC Tibetan
0FBE 0FCC Tibetan
0FCE 0FD4 Tibetan
0FD5 0FD8 Common
0FD9 0FDA Tibetan
1000 109F Myanmar
10A0 10C5 Georgian
10C7 Georgian
10CD Georgian
10D0 10FA Georgian
10FB Common
10FC 10FF Georgian
1100 11FF Hangul
1200 1248 Ethiopic
124A 124D Ethiopic
1250 1256 Ethiopic
1258 Ethiopic
125A 125D Ethiopic
1260 1288 Ethiopic
128A 128D Ethiopic
1290 12B0 Ethiopic
12B2 12B5 Ethiopic
12B8 12BE Ethiopic
12C0 Ethiopic
12C2 12C5 Ethiopic
12C8 12D6 Ethiopic
12D8 1310 Ethiopic
1312 1315 Ethiopic
1318 135A Ethiopic
135D 137C Ethiopic
1380 1399 Ethiopic
13A0 13F4 Cherokee
1400 167F Canadian_Aboriginal
1680 169C Ogham
16A0 16EA Runic
16EB 16ED Common
16EE 16F0 Runic
1700 170C Tagalog
170E 1714 Tagalog
1720 1734 Hanunoo
1735 1736 Buhid Hanunoo Tagbanwa Tagalog
1740 1753 Buhid
1760 176C Tagbanwa
176E 1770 Tagbanwa
1772 1773 Tagbanwa
1780 17DD Khmer
17E0 17E9 Khmer
17F0 17F9 Khmer
1800 1801 Mongolian
1802 1803 Mongolian Phags_Pa
1804 Mongolian
1805 Mongolian Phags_Pa
1806 180E Mongolian
1810 1819 Mongolian
1820 1877 Mongolian
1880 18AA Mongolian
18B0 18F5 Canadian_Aboriginal
1900 191C Limbu
1920 192B Limbu
1930 193B Limbu
1940 Limbu
1944 194F Limbu
1950 196D Tai_Le
1970 1974 Tai_Le
1980 19AB New_Tai_Lue
19B0 19C9 New_Tai_Lue
19D0 19DA New_Tai_Lue
19DE 19DF New_Tai_Lue
19E0 19FF Khmer
1A00 1A1B Buginese
1A1E 1A1F Buginese
1A20 1A5E Tai_Tham
1A60 1A7C Tai_Tham
1A7F 1A89 Tai_Tham
1A90 1A99 Tai_Tham
1AA0 1AAD Tai_Tham
1B00 1B4B Balinese
1B50 1B7C Balinese
1B80 1BBF Sundanese
1BC0 1BF3 Batak
1BFC 1BFF Batak
1C00 1C37 Lepcha
1C3B 1C49 Lepcha
1C4D 1C4F Lepcha
1C50 1C7F Ol_Chiki
1CC0 1CC7 Sundanese
1CD0 1CD2 Devanagari
1CD3 Common
1CD4 1CE8 Devanagari
1CE9 1CEC Common
1CED Devanagari
1CEE 1CF1 Common
1CF2 1CF4 Devanagari
1CF5 1CF6 Common
1D00 1D25 Latin
1D26 1D2A Greek
1D2B Cyrillic
1D2C 1D5C Latin
1D5D 1D61 Greek
1D62 1D65 Latin
1D66 1D6A Greek
1D6B 1D77 Latin
1D78 Cyrillic
1D79 1DBE Latin
1DBF 1DC1 Greek
1DC2 1DE6 Inherited
1DFC 1DFF Inherited
1E00 1EFF Latin
1F00 1F15 Greek
1F18 1F1D Greek
1F20 1F45 Greek
1F48 1F4D Greek
1F50 1F57 Greek
1F59 Greek
1F5B Greek
1F5D Greek
1F5F 1F7D Greek
1F80 1FB4 Greek
1FB6 1FC4 Greek
1FC6 1FD3 Greek
1FD6 1FDB Greek
1FDD 1FEF Greek
1FF2 1FF4 Greek
1FF6 1FFE Greek
2000 200B Common
200C 200D Inherited
200E 2064 Common
206A 2070 Common
2071 Latin
2074 207E Common
207F Latin
2080 208E Common
2090 209C Latin
20A0 20BA Common
20D0 20F0 Inherited
2100 2125 Common
2126 Greek
2127 2129 Common
212A 212B Latin
212C 2131 Common
2132 Latin
2133 214D Common
214E Latin
214F 215F Common
2160 2188 Latin
2189 Common
2190 23F3 Common
2400 2426 Common
2440 244A Common
2460 26FF Common
2701 27FF Common
2800 28FF Braille
2900 2B4C Common
2B50 2B59 Common
2C00 2C2E Glagolitic
2C30 2C5E Glagolitic
2C60 2C7F Latin
2C80 2CF3 Coptic
2CF9 2CFF Coptic
2D00 2D25 Georgian
2D27 Georgian
2D2D Georgian
2D30 2D67 Tifinagh
2D6F 2D70 Tifinagh
2D7F Tifinagh
2D80 2D96 Ethiopic
2DA0 2DA6 Ethiopic
2DA8 2DAE Ethiopic
2DB0 2DB6 Ethiopic
2DB8 2DBE Ethiopic
2DC0 2DC6 Ethiopic
2DC8 2DCE Ethiopic
2DD0 2DD6 Ethiopic
2DD8 2DDE Ethiopic
2DE0 2DFF Cyrillic
2E00 2E3B Common
2E80 2E99 Han
2E9B 2EF3 Han
2F00 2FD5 Han
2FF0 2FFB Common
3000 Common
3001 3002 Bopomofo Hangul Han Hiragana Katakana Yi
3003 Bopomofo Hangul Han Hiragana Katakana
3004 Common
3005 Han
3006 Han Hiragana Katakana
3007 Han
3008 3011 Bopomofo Hangul Han Hiragana Katakana Yi
3012 Common
3013 Bopomofo Hangul Han Hiragana Katakana
3014 301B Bopomofo Hangul Han Hiragana Katakana Yi
301C 301F Bopomofo Hangul Han Hiragana Katakana
3020 Common
3021 3029 Han
302A 302D Bopomofo Han
302E 302F Hangul
3030 Bopomofo Hangul Han Hiragana Katakana
3031 3035 Hiragana Katakana
3036 Common
3037 Bopomofo Hangul Han Hiragana Katakana
3038 303B Han
303C 303D Han Hiragana Katakana
303E 303F Bopomofo Hangul Han Hiragana Katakana
3041 3096 Hiragana
3099 309C Hiragana Katakana
309D 309F Hiragana
30A0 Hiragana Katakana
30A1 30FA Katakana
30FB Bopomofo Hangul Han Hiragana Katakana Yi
30FC Hiragana Katakana
30FD 30FF Katakana
3105 312D Bopomofo
3131 318E Hangul
3190 319F Han Hiragana Katakana
31A0 31BA Bopomofo
31C0 31E3 Bopomofo Hangul Han Hiragana Katakana
31F0 31FF Katakana
3200 321E Hangul
3220 3243 Bopomofo Hangul Han Hiragana Katakana
3244 325F Common
3260 327E Hangul
327F Common
3280 32B0 Bopomofo Hangul Han Hiragana Katakana
32B1 32BF Common
32C0 32CB Bopomofo Hangul Han Hiragana Katakana
32CC 32CF Common
32D0 32FE Katakana
3300 3357 Katakana
3358 3370 Bopomofo Hangul Han Hiragana Katakana
3371 337A Common
337B 337F Bopomofo Hangul Han Hiragana Katakana
3380 33DF Common
33E0 33FE Bopomofo Hangul Han Hiragana Katakana
33FF Common
3400 4DB5 Han
4DC0 4DFF Common
4E00 9FCC Han
A000 A48C Yi
A490 A4C6 Yi
A4D0 A4FF Lisu
A500 A62B Vai
A640 A697 Cyrillic
A69F Cyrillic
A6A0 A6F7 Bamum
A700 A721 Common
A722 A787 Latin
A788 A78A Common
A78B A78E Latin
A790 A793 Latin
A7A0 A7AA Latin
A7F8 A7FF Latin
A800 A82B Syloti_Nagri
A830 A839 Devanagari Gujarati Gurmukhi Kaithi Takri
A840 A877 Phags_Pa
A880 A8C4 Saurashtra
A8CE A8D9 Saurashtra
A8E0 A8FB Devanagari
A900 A92F Kayah_Li
A930 A953 Rejang
A95F Rejang
A960 A97C Hangul
A980 A9CD Javanese
A9CF A9D9 Javanese
A9DE A9DF Javanese
AA00 AA36 Cham
AA40 AA4D Cham
AA50 AA59 Cham
AA5C AA5F Cham
AA60 AA7B Myanmar
AA80 AAC2 Tai_Viet
AADB AADF Tai_Viet
AAE0 AAF6 Meetei_Mayek
AB01 AB06 Ethiopic
AB09 AB0E Ethiopic
AB11 AB16 Ethiopic
AB20 AB26 Ethiopic
AB28 AB2E Ethiopic
ABC0 ABED Meetei_Mayek
ABF0 ABF9 Meetei_Mayek
AC00 D7A3 Hangul
D7B0 D7C6 Hangul
D7CB D7FB Hangul
F900 FA6D Han
FA70 FAD9 Han
FB00 FB06 Latin
FB13 FB17 Armenian
FB1D FB36 Hebrew
FB38 FB3C Hebrew
FB3E Hebrew
FB40 FB41 Hebrew
FB43 FB44 Hebrew
FB46 FB4F Hebrew
FB50 FBC1 Arabic
FBD3 FD3D Arabic
FD3E FD3F Common
FD50 FD8F Arabic
FD92 FDC7 Arabic
FDF0 FDF1 Arabic
FDF2 Arabic Thaana
FDF3 FDFC Arabic
FDFD Arabic Thaana
FE00 FE0F Inherited
FE10 FE19 Common
FE20 FE26 Inherited
FE30 FE44 Common
FE45 FE46 Bopomofo Hangul Han Hiragana Katakana
FE47 FE52 Common
FE54 FE66 Common
FE68 FE6B Common
FE70 FE74 Arabic
FE76 FEFC Arabic
FEFF Common
FF01 FF20 Common
FF21 FF3A Latin
FF3B FF40 Common
FF41 FF5A Latin
FF5B FF60 Common
FF61 FF65 Bopomofo Hangul Han Hiragana Katakana Yi
FF66 FF6F Katakana
FF70 Hiragana Katakana
FF71 FF9D Katakana
FF9E FF9F Hiragana Katakana
FFA0 FFBE Hangul
FFC2 FFC7 Hangul
FFCA FFCF Hangul
FFD2 FFD7 Hangul
FFDA FFDC Hangul
FFE0 FFE6 Common
FFE8 FFEE Common
FFF9 FFFD Common
10000 1000B Linear_B
1000D 10026 Linear_B
10028 1003A Linear_B
1003C 1003D Linear_B
1003F 1004D Linear_B
10050 1005D Linear_B
10080 100FA Linear_B
10100 10102 Cypriot Linear_B
10107 10133 Cypriot Linear_B
10137 1013F Cypriot Linear_B
10140 1018A Greek
10190 1019B Common
101D0 101FC Common
101FD Inherited
10280 1029C Lycian
102A0 102D0 Carian
10300 1031E Old_Italic
10320 10323 Old_Italic
10330 1034A Gothic
10380 1039D Ugaritic
1039F Ugaritic
103A0 103C3 Old_Persian
103C8 103D5 Old_Persian
10400 1044F Deseret
10450 1047F Shavian
10480 1049D Osmanya
104A0 104A9 Osmanya
10800 10805 Cypriot
10808 Cypriot
1080A 10835 Cypriot
10837 10838 Cypriot
1083C Cypriot
1083F Cypriot
10840 10855 Imperial_Aramaic
10857 1085F Imperial_Aramaic
10900 1091B Phoenician
1091F Phoenician
10920 10939 Lydian
1093F Lydian
10980 1099F Meroitic_Hieroglyphs
109A0 109B7 Meroitic_Cursive
109BE 109BF Meroitic_Cursive
10A00 10A03 Kharoshthi
10A05 10A06 Kharoshthi
10A0C 10A13 Kharoshthi
10A15 10A17 Kharoshthi
10A19 10A33 Kharoshthi
10A38 10A3A Kharoshthi
10A3F 10A47 Kharoshthi
10A50 10A58 Kharoshthi
10A60 10A7F Old_South_Arabian
10B00 10B35 Avestan
10B39 10B3F Avestan
10B40 10B55 Inscriptional_Parthian
10B58 10B5F Inscriptional_Parthian
10B60 10B72 Inscriptional_Pahlavi
10B78 10B7F Inscriptional_Pahlavi
10C00 10C48 Old_Turkic
10E60 10E7E Arabic
11000 1104D Brahmi
11052 1106F Brahmi
11080 110C1 Kaithi
110D0 110E8 Sora_Sompeng
110F0 110F9 Sora_Sompeng
11100 11134 Chakma
11136 11143 Chakma
11180 111C8 Sharada
111D0 111D9 Sharada
11680 116B7 Takri
116C0 116C9 Takri
12000 1236E Cuneiform
12400 12462 Cuneiform
12470 12473 Cuneiform
13000 1342E Egyptian_Hieroglyphs
16800 16A38 Bamum
16F00 16F44 Miao
16F50 16F7E Miao
16F8F 16F9F Miao
1B000 Katakana
1B001 Hiragana
1D000 1D0F5 Common
1D100 1D126 Common
1D129 1D166 Common
1D167 1D169 Inherited
1D16A 1D17A Common
1D17B 1D182 Inherited
1D183 1D184 Common
1D185 1D18B Inherited
1D18C 1D1A9 Common
1D1AA 1D1AD Inherited
1D1AE 1D1DD Common
1D200 1D245 Greek
1D300 1D356 Common
1D360 1D371 Common
1D400 1D454 Common
1D456 1D49C Common
1D49E 1D49F Common
1D4A2 Common
1D4A5 1D4A6 Common
1D4A9 1D4AC Common
1D4AE 1D4B9 Common
1D4BB Common
1D4BD 1D4C3 Common
1D4C5 1D505 Common
1D507 1D50A Common
1D50D 1D514 Common
1D516 1D51C Common
1D51E 1D539 Common
1D53B 1D53E Common
1D540 1D544 Common
1D546 Common
1D54A 1D550 Common
1D552 1D6A5 Common
1D6A8 1D7CB Common
1D7CE 1D7FF Common
1EE00 1EE03 Arabic
1EE05 1EE1F Arabic
1EE21 1EE22 Arabic
1EE24 Arabic
1EE27 Arabic
1EE29 1EE32 Arabic
1EE34 1EE37 Arabic
1EE39 Arabic
1EE3B Arabic
1EE42 Arabic
1EE47 Arabic
1EE49 Arabic
1EE4B Arabic
1EE4D 1EE4F Arabic
1EE51 1EE52 Arabic
1EE54 Arabic
1EE57 Arabic
1EE59 Arabic
1EE5B Arabic
1EE5D Arabic
1EE5F Arabic
1EE61 1EE62 Arabic
1EE64 Arabic
1EE67 1EE6A Arabic
1EE6C 1EE72 Arabic
1EE74 1EE77 Arabic
1EE79 1EE7C Arabic
1EE7E Arabic
1EE80 1EE89 Arabic
1EE8B 1EE9B Arabic
1EEA1 1EEA3 Arabic
1EEA5 1EEA9 Arabic
1EEAB 1EEBB Arabic
1EEF0 1EEF1 Arabic
1F000 1F02B Common
1F030 1F093 Common
1F0A0 1F0AE Common
1F0B1 1F0BE Common
1F0C1 1F0CF Common
1F0D1 1F0DF Common
1F100 1F10A Common
1F110 1F12E Common
1F130 1F16B Common
1F170 1F19A Common
1F1E6 1F1FF Common
1F200 Hiragana
1F201 1F202 Common
1F210 1F23A Common
1F240 1F248 Common
1F250 1F251 Common
1F300 1F320 Common
1F330 1F335 Common
1F337 1F37C Common
1F380 1F393 Common
1F3A0 1F3C4 Common
1F3C6 1F3CA Common
1F3E0 1F3F0 Common
1F400 1F43E Common
1F440 Common
1F442 1F4F7 Common
1F4F9 1F4FC Common
1F500 1F53D Common
1F540 1F543 Common
1F550 1F567 Common
1F5FB 1F640 Common
1F645 1F64F Common
1F680 1F6C5 Common
1F700 1F773 Common
20000 2A6D6 Han
2A700 2B734 Han
2B740 2B81D Han
2F800 2FA1D Han
E0001 Common
E0020 E007F Common
E0100 E01EF Inherited
END
| Bjay1435/capstone | rootfs/usr/share/perl/5.18.2/unicore/To/Scx.pl | Perl | mit | 15,009 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
return <<'END';
0009
0020
00A0
1680
180E
2000 200A
202F
205F
3000
END
| efortuna/AndroidSDKClone | ndk_experimental/prebuilt/linux-x86_64/lib/perl5/5.16.2/unicore/lib/Perl/Blank.pl | Perl | apache-2.0 | 489 |
package Module::Build::PodParser;
use strict;
use vars qw($VERSION);
$VERSION = '0.4003';
$VERSION = eval $VERSION;
use vars qw(@ISA);
sub new {
# Perl is so fun.
my $package = shift;
my $self;
@ISA = ();
$self = bless {have_pod_parser => 0, @_}, $package;
unless ($self->{fh}) {
die "No 'file' or 'fh' parameter given" unless $self->{file};
$self->{fh} = IO::File->new($self->{file}) or die "Couldn't open $self->{file}: $!";
}
return $self;
}
sub parse_from_filehandle {
my ($self, $fh) = @_;
local $_;
while (<$fh>) {
next unless /^=(?!cut)/ .. /^=cut/; # in POD
# Accept Name - abstract or C<Name> - abstract
last if ($self->{abstract}) = /^ (?: [a-z0-9:]+ | [BCIF] < [a-z0-9:]+ > ) \s+ - \s+ (.*\S) /ix;
}
my @author;
while (<$fh>) {
next unless /^=head1\s+AUTHORS?/i ... /^=/;
next if /^=/;
push @author, $_ if /\@/;
}
return unless @author;
s/^\s+|\s+$//g foreach @author;
$self->{author} = \@author;
return;
}
sub get_abstract {
my $self = shift;
return $self->{abstract} if defined $self->{abstract};
$self->parse_from_filehandle($self->{fh});
return $self->{abstract};
}
sub get_author {
my $self = shift;
return $self->{author} if defined $self->{author};
$self->parse_from_filehandle($self->{fh});
return $self->{author} || [];
}
| Dokaponteam/ITF_Project | xampp/perl/lib/Module/Build/PodParser.pm | Perl | mit | 1,345 |
package Mouse::Exporter;
use strict;
use warnings;
use Carp ();
my %SPEC;
my $strict_bits;
my $warnings_extra_bits;
BEGIN{
$strict_bits = strict::bits(qw(subs refs vars));
$warnings_extra_bits = warnings::bits(FATAL => 'recursion');
}
# it must be "require", because Mouse::Util depends on Mouse::Exporter,
# which depends on Mouse::Util::import()
require Mouse::Util;
sub import{
## no critic ProhibitBitwiseOperators
# strict->import;
$^H |= $strict_bits;
# warnings->import('all', FATAL => 'recursion');
${^WARNING_BITS} |= $warnings::Bits{all};
${^WARNING_BITS} |= $warnings_extra_bits;
return;
}
sub setup_import_methods{
my($class, %args) = @_;
my $exporting_package = $args{exporting_package} ||= caller();
my($import, $unimport) = $class->build_import_methods(%args);
Mouse::Util::install_subroutines($exporting_package,
import => $import,
unimport => $unimport,
export_to_level => sub {
my($package, $level, undef, @args) = @_; # the third argument is redundant
$package->import({ into_level => $level + 1 }, @args);
},
export => sub {
my($package, $into, @args) = @_;
$package->import({ into => $into }, @args);
},
);
return;
}
sub build_import_methods{
my($self, %args) = @_;
my $exporting_package = $args{exporting_package} ||= caller();
$SPEC{$exporting_package} = \%args;
# canonicalize args
my @export_from;
if($args{also}){
my %seen;
my @stack = ($exporting_package);
while(my $current = shift @stack){
push @export_from, $current;
my $also = $SPEC{$current}{also} or next;
push @stack, grep{ !$seen{$_}++ } ref($also) ? @{ $also } : $also;
}
}
else{
@export_from = ($exporting_package);
}
my %exports;
my @removables;
my @all;
my @init_meta_methods;
foreach my $package(@export_from){
my $spec = $SPEC{$package} or next;
if(my $as_is = $spec->{as_is}){
foreach my $thingy (@{$as_is}){
my($code_package, $code_name, $code);
if(ref($thingy)){
$code = $thingy;
($code_package, $code_name) = Mouse::Util::get_code_info($code);
}
else{
$code_package = $package;
$code_name = $thingy;
no strict 'refs';
$code = \&{ $code_package . '::' . $code_name };
}
push @all, $code_name;
$exports{$code_name} = $code;
if($code_package eq $package){
push @removables, $code_name;
}
}
}
if(my $init_meta = $package->can('init_meta')){
if(!grep{ $_ == $init_meta } @init_meta_methods){
push @init_meta_methods, $init_meta;
}
}
}
$args{EXPORTS} = \%exports;
$args{REMOVABLES} = \@removables;
$args{groups}{all} ||= \@all;
if(my $default_list = $args{groups}{default}){
my %default;
foreach my $keyword(@{$default_list}){
$default{$keyword} = $exports{$keyword}
|| Carp::confess(qq{The $exporting_package package does not export "$keyword"});
}
$args{DEFAULT} = \%default;
}
else{
$args{groups}{default} ||= \@all;
$args{DEFAULT} = $args{EXPORTS};
}
if(@init_meta_methods){
$args{INIT_META} = \@init_meta_methods;
}
return (\&do_import, \&do_unimport);
}
# the entity of general import()
sub do_import {
my($package, @args) = @_;
my $spec = $SPEC{$package}
|| Carp::confess("The package $package package does not use Mouse::Exporter");
my $into = _get_caller_package(ref($args[0]) ? shift @args : undef);
my @exports;
my @traits;
while(@args){
my $arg = shift @args;
if($arg =~ s/^-//){
if($arg eq 'traits'){
push @traits, ref($args[0]) ? @{shift(@args)} : shift(@args);
}
else {
Mouse::Util::not_supported("-$arg");
}
}
elsif($arg =~ s/^://){
my $group = $spec->{groups}{$arg}
|| Carp::confess(qq{The $package package does not export the group "$arg"});
push @exports, @{$group};
}
else{
push @exports, $arg;
}
}
# strict->import;
$^H |= $strict_bits; ## no critic ProhibitBitwiseOperators
# warnings->import('all', FATAL => 'recursion');
${^WARNING_BITS} |= $warnings::Bits{all}; ## no critic ProhibitBitwiseOperators
${^WARNING_BITS} |= $warnings_extra_bits; ## no critic ProhibitBitwiseOperators
if($spec->{INIT_META}){
my $meta;
foreach my $init_meta(@{$spec->{INIT_META}}){
$meta = $package->$init_meta(for_class => $into);
}
if(@traits){
my $type = (split /::/, ref $meta)[-1]; # e.g. "Class" for "My::Meta::Class"
@traits = map{
ref($_)
? $_
: Mouse::Util::resolve_metaclass_alias($type => $_, trait => 1)
} @traits;
require Mouse::Util::MetaRole;
Mouse::Util::MetaRole::apply_metaroles(
for => $into,
Mouse::Util::is_a_metarole($into->meta)
? (role_metaroles => { role => \@traits })
: (class_metaroles => { class => \@traits }),
);
}
}
elsif(@traits){
Carp::confess("Cannot provide traits when $package does not have an init_meta() method");
}
if(@exports){
my @export_table;
foreach my $keyword(@exports){
push @export_table,
$keyword => ($spec->{EXPORTS}{$keyword}
|| Carp::confess(qq{The $package package does not export "$keyword"})
);
}
Mouse::Util::install_subroutines($into, @export_table);
}
else{
Mouse::Util::install_subroutines($into, %{$spec->{DEFAULT}});
}
return;
}
# the entity of general unimport()
sub do_unimport {
my($package, $arg) = @_;
my $spec = $SPEC{$package}
|| Carp::confess("The package $package does not use Mouse::Exporter");
my $from = _get_caller_package($arg);
my $stash = do{
no strict 'refs';
\%{$from . '::'}
};
for my $keyword (@{ $spec->{REMOVABLES} }) {
next if !exists $stash->{$keyword};
my $gv = \$stash->{$keyword};
# remove what is from us
if(ref($gv) eq 'GLOB' && *{$gv}{CODE} == $spec->{EXPORTS}{$keyword}){
delete $stash->{$keyword};
}
}
return;
}
sub _get_caller_package {
my($arg) = @_;
# We need one extra level because it's called by import so there's a layer
# of indirection
if(ref $arg){
return defined($arg->{into}) ? $arg->{into}
: defined($arg->{into_level}) ? scalar caller(1 + $arg->{into_level})
: scalar caller(1);
}
else{
return scalar caller(1);
}
}
1;
__END__
=head1 NAME
Mouse::Exporter - make an import() and unimport() just like Mouse.pm
=head1 VERSION
This document describes Mouse version 1.12
=head1 SYNOPSIS
package MyApp::Mouse;
use Mouse ();
use Mouse::Exporter;
Mouse::Exporter->setup_import_methods(
as_is => [ 'has_rw', 'other_sugar', \&Some::Random::thing ],
also => 'Mouse',
);
sub has_rw {
my $meta = caller->meta;
my ( $name, %options ) = @_;
$meta->add_attribute(
$name,
is => 'rw',
%options,
);
}
# then later ...
package MyApp::User;
use MyApp::Mouse;
has 'name';
has_rw 'size';
thing;
no MyApp::Mouse;
=head1 DESCRIPTION
This module encapsulates the exporting of sugar functions in a
C<Mouse.pm>-like manner. It does this by building custom C<import>,
C<unimport> methods for your module, based on a spec you provide.
Note that C<Mouse::Exporter> does not provide the C<with_meta> option,
but you can easily get the metaclass by C<< caller->meta >> as L</SYNOPSIS> shows.
=head1 METHODS
=head2 C<< setup_import_methods( ARGS ) >>
=head2 C<< build_import_methods( ARGS ) -> (\&import, \&unimport) >>
=head1 SEE ALSO
L<Moose::Exporter>
=cut
| volanja/isucon3-yosen-re | perl/local/lib/perl5/x86_64-linux/Mouse/Exporter.pm | Perl | mit | 8,680 |
% ----------------------------------------------------------------------
% BEGIN LICENSE BLOCK
% Version: CMPL 1.1
%
% The contents of this file are subject to the Cisco-style Mozilla Public
% License Version 1.1 (the "License"); you may not use this file except
% in compliance with the License. You may obtain a copy of the License
% at www.eclipse-clp.org/license.
%
% Software distributed under the License is distributed on an "AS IS"
% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
% the License for the specific language governing rights and limitations
% under the License.
%
% The Original Code is The ECLiPSe Constraint Logic Programming System.
% The Initial Developer of the Original Code is Cisco Systems, Inc.
% Portions created by the Initial Developer are
% Copyright (C) 1989-2006 Cisco Systems, Inc. All Rights Reserved.
%
% Contributor(s): ECRC GmbH
%
% END LICENSE BLOCK
%
% System: ECLiPSe Constraint Logic Programming System
% Version: $Id: history.pl,v 1.1 2008/06/30 17:43:46 jschimpf Exp $
% ----------------------------------------------------------------------
/*
* SEPIA PROLOG SOURCE MODULE
*/
/*
* IDENTIFICATION: history.pl
* DESCRIPTION: History package for sepia, library file.
* CONTENTS:
* REVISION HISTORY:
* AUTHOR VERSION DATE REASON
* Micha Meier 3.11.89
*/
:- module(history).
% Make sure that the important operators are ok
:- op(1100, xfy, ;),
op(1050, xfy, ->).
:-
make_local_array(dbg),
get_flag(debug_compile, Dbg),
get_flag(variable_names, Var),
setval(dbg, flags(Dbg, Var)),
nodbgcomp.
:- export h/0.
:- make_local_array(history_command), setval(history_command, 1).
:- setval(history, 20).
:- dynamic history_command/3.
check_goal(_, goal(Term, List, Goal, VarList), M) :-
(integer(Term) ->
(Term < 0 ->
getval(history_command, Comm),
Number is Comm + Term
;
Number = Term
),
(history_command(Number, Goal, VarList) ->
write(toplevel_output, Goal),
writeln(toplevel_output, '.')
;
Goal = fail,
VarList = []
)
;
(Term == h ->
true
;
history_command(_, OldTerm, _),
variant(OldTerm, Term) ->
true
;
getval(history_command, Comm),
getval(history, H),
OldComm is Comm - H,
(history_command(Oldest, _, _) ->
(Oldest =< OldComm ->
remove_all(Oldest, OldComm)
;
true
)
;
true
),
incval(history_command),
assert(history_command(Comm, Term, List))
),
Goal = Term,
VarList = List
).
history_prompt(_, M) :-
put(toplevel_output, 0'[),
write(toplevel_output, M),
put(toplevel_output, 0' ),
getval(history_command, Comm),
write(toplevel_output, Comm),
write(toplevel_output, ']: '),
flush(toplevel_output).
h :-
history_command(Comm, Goal, _),
write(Comm),
put(0' ),
writeln(Goal),
fail.
h.
remove_all(From, To) :-
From > To,
!.
remove_all(From, To) :-
(retract(history_command(From, _, _)) -> true; true),
Next is From + 1,
remove_all(Next, To).
:- set_error_handler(153, history_prompt/2),
set_error_handler(154, check_goal/3).
:- skipped((h)/0),
untraceable((h)/0).
:-
getval(dbg, flags(Dbg, Var)),
set_flag(debug_compile, Dbg),
set_flag(variable_names, Var),
erase_array(dbg).
| linusyang/barrelfish | usr/skb/eclipse_kernel/lib/history.pl | Perl | mit | 3,280 |
package PPI::Statement::Include::Perl6;
=pod
=head1 NAME
PPI::Statement::Include::Perl6 - Inline Perl 6 file section
=head1 SYNOPSIS
use v6-alpha;
grammar My::Grammar {
...
}
=head1 INHERITANCE
PPI::Statement::Include::Perl6
isa PPI::Statement::Include
isa PPI::Statement
isa PPI::Node
isa PPI::Element
=head1 DESCRIPTION
A C<PPI::Statement::Include::Perl6> is a special include statement that
indicates the start of a section of Perl 6 code inlined into a regular
Perl 5 code file.
The primary purpose of the class is to allow L<PPI> to provide at least
basic support for "6 in 5" modules like v6.pm;
Currently, PPI only supports starting a Perl 6 block. It does not
currently support changing back to Perl 5 again. Additionally all POD
and __DATA__ blocks and __END__ blocks will be included in the Perl 6
string and will not be parsed by PPI.
=cut
use strict;
use PPI::Statement::Include ();
use vars qw{$VERSION @ISA};
BEGIN {
$VERSION = '1.215';
@ISA = 'PPI::Statement::Include';
}
=pod
=head2 perl6
The C<perl6> method returns the block of Perl 6 code that is attached to
the "use v6...;" command.
=cut
sub perl6 {
$_[0]->{perl6};
}
1;
=pod
=head1 TO DO
- Write specific unit tests for this package
=head1 SUPPORT
See the L<support section|PPI/SUPPORT> in the main module.
=head1 AUTHOR
Adam Kennedy E<lt>adamk@cpan.orgE<gt>
=head1 COPYRIGHT
Copyright 2001 - 2011 Adam Kennedy.
This program is free software; you can redistribute
it and/or modify it under the same terms as Perl itself.
The full text of the license can be found in the
LICENSE file included with this module.
=cut
| Dokaponteam/ITF_Project | xampp/perl/vendor/lib/PPI/Statement/Include/Perl6.pm | Perl | mit | 1,675 |
package Fixtures::Hwinfo;
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
use Moose;
extends 'DBIx::Class::EasyFixture';
use namespace::autoclean;
use Digest::SHA1 qw(sha1_hex);
my %definition_for = (
hw1 => {
new => 'Hwinfo',
using => {
id => 1,
serverid => 100,
description => 'BACKPLANE FIRMWA',
val => '7.0.0.29',
},
},
hw2 => {
new => 'Hwinfo',
using => {
id => 2,
serverid => 200,
description => 'DRAC FIRMWA',
val => '1.0.0.29',
},
},
hw3 => {
new => 'Hwinfo',
using => {
id => 3,
serverid => 200,
description => 'ServiceTag',
val => 'XXX',
},
},
hw4 => {
new => 'Hwinfo',
using => {
id => 4,
serverid => 200,
description => 'Manufacturer',
val => 'Dell Inc.',
},
},
hw5 => {
new => 'Hwinfo',
using => {
id => 5,
serverid => 200,
description => 'Model',
val => 'Beetle',
},
},
);
sub get_definition {
my ( $self, $name ) = @_;
return $definition_for{$name};
}
sub all_fixture_names {
# sort by db val to guarantee insertion order
return (sort { $definition_for{$a}{using}{val} cmp $definition_for{$b}{using}{val} } keys %definition_for);
}
__PACKAGE__->meta->make_immutable;
1;
| rscrimojr/incubator-trafficcontrol | traffic_ops/app/lib/Fixtures/Hwinfo.pm | Perl | apache-2.0 | 1,806 |
:- set_prolog_flag(double_quotes, string).
:- install_constant_renamer_until_eof.
:- set_prolog_flag(do_renames_sumo,never).
%textCached('Are-TheWord', denotation, [ 'CountNoun', 'Are-UnitOfArea']).
%textCached('Be-TheWord', denotation, [ 'BeAux', 'is-Underspecified']).
%textCached('My-TheWord', denotation, [ 'PossessivePronoun-Pre', 'PronounFn'('FirstPerson-NLAttr', 'Singular-NLAttr', 'Ungendered-NLAttr', 'PossessivePronoun-Pre')]).
%textCached('Name-TheWord', denotation, [ 'CountNoun', 'Name']).
%textCached('Name-TheWord', denotation, [ 'CountNoun', nameString]).
%textCached('Name-TheWord', denotation, [ 'Verb', 'NamingSomething']).
%textCached('You-TheWord', denotation, [ 'ObjectPronoun', 'PronounFn'('SecondPerson-NLAttr', 'Plural-NLAttr', 'Ungendered-NLAttr', 'ObjectPronoun')]).
%textCached('You-TheWord', denotation, [ 'ObjectPronoun', 'PronounFn'('SecondPerson-NLAttr', 'Singular-NLAttr', 'Ungendered-NLAttr', 'ObjectPronoun')]).
%textCached('You-TheWord', denotation, [ 'SubjectPronoun', 'PronounFn'('SecondPerson-NLAttr', 'Plural-NLAttr', 'Ungendered-NLAttr', 'SubjectPronoun')]).
%textCached('You-TheWord', denotation, [ 'SubjectPronoun', 'PronounFn'('SecondPerson-NLAttr', 'Singular-NLAttr', 'Ungendered-NLAttr', 'SubjectPronoun')]).
:- multifile(textCached/3).
:- dynamic(textCached/3).
:- style_check(-singleton).
textCached('A-TheWord',frame, ['A-TheWord', 'Determiner', 'QuantifierFrame', thereExists(':NOUN', and(':RESTR', ':SCOPE')), determinerSemTrans]).
textCached('About-TheWord',frame, ['About-TheWord', 'Adverb', 'DeterminerModifyingFrame', 'ApproximatelyFn'(':DET'), adverbSemTrans]).
textCached('About-TheWord',frame, ['About-TheWord', 'AdverbOfFrequency', 'DeterminerModifyingFrame', 'ApproximatelyFn'(':DET'), adverbSemTrans]).
textCached('About-TheWord',frame, ['About-TheWord', 'AdverbOfManner', 'DeterminerModifyingFrame', 'ApproximatelyFn'(':DET'), adverbSemTrans]).
textCached('About-TheWord',frame, ['About-TheWord', 'AdverbOfPlace', 'DeterminerModifyingFrame', 'ApproximatelyFn'(':DET'), adverbSemTrans]).
textCached('About-TheWord',frame, ['About-TheWord', 'AdverbOfTime', 'DeterminerModifyingFrame', 'ApproximatelyFn'(':DET'), adverbSemTrans]).
textCached('About-TheWord',frame, ['About-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', implies(and(isa(':NOUN', 'InformationBearingObject'), isa(':OBLIQUE-OBJECT', 'Thing')), containsInformationAbout(':NOUN', ':OBLIQUE-OBJECT')), 'prepReln-Object']).
textCached('About-TheWord',frame, ['About-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', implies(and(isa(':NOUN', 'OneToManyCommunication'), isa(':OBLIQUE-OBJECT', 'Thing')), and(isa(':NOUN', 'AnnouncingSomething'), topicOfInfoTransfer(':NOUN', ':OBLIQUE-OBJECT'))), 'prepReln-Object']).
textCached('About-TheWord',frame, ['About-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', implies(and(isa(':NOUN', 'PropositionalInformationThing'), isa(':OBLIQUE-OBJECT', 'Thing')), propositionalInfoAbout(':NOUN', ':OBLIQUE-OBJECT')), 'prepReln-Object']).
textCached('About-TheWord',frame, ['About-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', subjectOfInfo(':NOUN', ':OBJECT'), prepSemTrans]).
textCached('About-TheWord',frame, ['About-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', topicOfInfoTransfer(':ACTION', ':OBLIQUE-OBJECT'), prepSemTrans]).
textCached('Absent-TheWord',frame, ['Absent-TheWord', 'Verb', 'PPCompFrameFn'('DitransitivePPFrameType', 'From-TheWord'), not(socialParticipants(':OBJECT', ':SUBJECT')), verbSemTrans]).
textCached('Access-TheWord',frame, ['Access-TheWord', 'CountNoun', 'GenitiveFrame', and(isa(':NOUN', 'AccessingAnIBT'), informationOrigin(':NOUN', ':POSSESSOR')), nounSemTrans]).
textCached('Access-TheWord',frame, ['Access-TheWord', 'Noun', 'GenitiveFrame', and(isa(':NOUN', 'AccessingAnIBT'), informationOrigin(':NOUN', ':POSSESSOR')), nounSemTrans]).
textCached('Access-TheWord',frame, ['Access-TheWord', 'Verb', 'TransitiveNPFrame', and(isa(':ACTION', 'AccessingAnIBT'), performedBy(':ACTION', ':SUBJECT'), informationOrigin(':ACTION', ':OBJECT')), verbSemTrans]).
textCached('Access-TheWord',frame, ['Access-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', and(isa(':ACTION', 'AccessingAnIBT'), situationConstituents(':ACTION', ':OBJECT'), doneBy(':ACTION', ':SUBJECT'))), verbSemTransPartial]).
textCached('Access-TheWord',frame, ['Access-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', and(situationConstituents(':ACTION', ':OBJECT'), doneBy(':ACTION', ':SUBJECT'), isa(':ACTION', 'AccessingAnIBT'))), verbSemTransPartial]).
textCached('Admit-TheWord',frame, ['Admit-TheWord', 'Verb', 'DitransitiveNP-InfinitivePhraseFrame', and(isa(':ACTION', 'AdmitToMembership'), performedBy(':ACTION', ':SUBJECT'), recipientOfInfo(':ACTION', ':OBJECT'), infoTransferred(':ACTION', A), 'ist-Information'(A, performedBy(':INF-COMP', ':OBJECT'))), verbSemTrans]).
textCached('Admit-TheWord',frame, ['Admit-TheWord', 'Verb', 'DitransitiveNP-InfinitivePhraseFrame', and(isa(':ACTION', 'PermittingEntrance'), performedBy(':ACTION', ':SUBJECT'), recipientOfInfo(':ACTION', ':OBJECT'), infoTransferred(':ACTION', A), 'ist-Information'(A, performedBy(':INF-COMP', ':OBJECT'))), verbSemTrans]).
textCached('Admit-TheWord',frame, ['Admit-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', and(isa(':ACTION', 'AdmitToMembership'), situationConstituents(':ACTION', ':OBJECT'), doneBy(':ACTION', ':SUBJECT'))), verbSemTransPartial]).
textCached('Admit-TheWord',frame, ['Admit-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', and(isa(':ACTION', 'PermittingEntrance'), situationConstituents(':ACTION', ':OBJECT'), doneBy(':ACTION', ':SUBJECT'))), verbSemTransPartial]).
textCached('Admit-TheWord',frame, ['Admit-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', and(situationConstituents(':ACTION', ':OBJECT'), doneBy(':ACTION', ':SUBJECT'), isa(':ACTION', 'AdmitToMembership'))), verbSemTransPartial]).
textCached('Admit-TheWord',frame, ['Admit-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', and(situationConstituents(':ACTION', ':OBJECT'), doneBy(':ACTION', ':SUBJECT'), isa(':ACTION', 'PermittingEntrance'))), verbSemTransPartial]).
textCached('All-TheWord',frame, ['All-TheWord', 'Determiner', 'QuantifierFrame', forAll(':NOUN', implies(':RESTR', ':SCOPE')), determinerSemTrans]).
textCached('Along-TheWord',frame, ['Along-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', 'motionPathway-Partial'(':NOUN', ':OBJECT'), prepSemTrans]).
textCached('Along-TheWord',frame, ['Along-TheWord', 'Preposition', 'VerbPhraseModifyingFrame', 'motionPathway-Partial'(':EVENT', ':OBJECT'), prepSemTrans]).
textCached('An-TheWord',frame, ['An-TheWord', 'Determiner', 'QuantifierFrame', thereExists(':NOUN', and(':RESTR', ':SCOPE')), determinerSemTrans]).
textCached('Ancient-TheWord',frame, ['Ancient-TheWord', 'Adjective', 'RegularAdjFrame', startsDuring(':NOUN', 'Antiquity'), adjSemTrans]).
textCached('Anger-TheWord',frame, ['Anger-TheWord', 'Adjective', 'RegularAdjFrame', feelsEmotion(':NOUN', 'MediumToVeryHighAmountFn'('Anger')), adjSemTrans]).
textCached('Anger-TheWord',frame, ['Anger-TheWord', 'Adjective', 'RegularAdjFrame', feelsEmotionTypeAtLevel(':NOUN', 'MediumToVeryHighAmountFn'('Anger')), adjSemTrans]).
textCached('Arabian-TheWord',frame, ['Arabian-TheWord', 'Adjective', 'RegularAdjFrame', conceptuallyRelated(':NOUN', 'ArabianPeninsula'), adjSemTrans]).
textCached('Are-TheWord', completeSemTrans,[]).
textCached('Are-TheWord',frame, ['Are-TheWord', 'CountNoun', 'NumberFrame', equals(':NOUN', 'Are-UnitOfArea'(':NUMBER')), nounSemTrans]).
textCached('Are-TheWord',frame, ['Are-TheWord', 'Noun', 'NumberFrame', equals(':NOUN', 'Are-UnitOfArea'(':NUMBER')), nounSemTrans]).
textCached('Arrive-TheWord',frame, ['Arrive-TheWord', 'CountNoun', 'GenitiveFrame', and(isa(':NOUN', 'ArrivingAtAPlace'), doneBy(':NOUN', ':POSSESSOR')), nounSemTrans]).
textCached('Arrive-TheWord',frame, ['Arrive-TheWord', 'Noun', 'GenitiveFrame', and(isa(':NOUN', 'ArrivingAtAPlace'), doneBy(':NOUN', ':POSSESSOR')), nounSemTrans]).
textCached('Arrive-TheWord',frame, ['Arrive-TheWord', 'Verb', 'IntransitiveVerbFrame', and(isa(':ACTION', 'ArrivingAtAPlace'), doneBy(':ACTION', ':SUBJECT')), verbSemTrans]).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', 'at-UnderspecifiedLandmark'(':NOUN', ':OBJECT'), prepSemTrans]).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', implies(and(isa(':NOUN', 'Agent-PartiallyTangible'), isa(':OBLIQUE-OBJECT', 'Event')), spectators(':OBLIQUE-OBJECT', ':NOUN')), 'prepReln-Object']).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', implies(and(isa(':NOUN', 'Event'), isa(':OBLIQUE-OBJECT', 'Place')), eventOccursAt(':NOUN', ':OBLIQUE-OBJECT')), 'prepReln-Object']).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', implies(and(isa(':NOUN', 'PartiallyTangible'), isa(':OBLIQUE-OBJECT', 'SpatialThing-Localized')), objectFoundInLocation(':NOUN', ':OBLIQUE-OBJECT')), 'prepReln-Object']).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'Post-NounPhraseModifyingFrame', objectFoundInLocation(':NOUN', ':OBJECT'), prepSemTrans]).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'VerbPhraseModifyingFrame', 'at-UnderspecifiedLandmark'(':ACTION', ':OBJECT'), prepSemTrans]).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'VerbPhraseModifyingFrame', eventOccursAt(':ACTION', ':OBJECT'), prepSemTrans]).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'VerbPhraseModifyingFrame', implies(and(isa(':ACTION', 'Event'), isa(':OBLIQUE-OBJECT', 'SpatialThing')), eventOccursAt(':ACTION', ':OBLIQUE-OBJECT')), 'prepReln-Action']).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'VerbPhraseModifyingFrame', implies(and(isa(':ACTION', 'Event'), isa(':OBLIQUE-OBJECT', 'TimeOfDay')), temporallySubsumes(':OBLIQUE-OBJECT', 'StartFn'(':ACTION'))), 'prepReln-Action']).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'VerbPhraseModifyingFrame', implies(and(isa(':ACTION', 'GeneralizedTransfer'), isa(':OBLIQUE-OBJECT', 'PartiallyTangible')), target(':ACTION', ':OBLIQUE-OBJECT')), 'prepReln-Action']).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'VerbPhraseModifyingFrame', implies(and(isa(':ACTION', 'MakingAnOralSound'), isa(':OBLIQUE-OBJECT', 'PartiallyTangible')), communicationTarget(':ACTION', ':OBLIQUE-OBJECT')), 'prepReln-Action']).
textCached('At-TheWord',frame, ['At-TheWord', 'Preposition', 'VerbPhraseModifyingFrame', temporallyIntersects(':OBJECT', 'StartFn'(':ACTION')), prepSemTrans]).
textCached('Attend-TheWord',frame, ['Attend-TheWord', 'Verb', 'TransitiveNPFrame', socialParticipants(':OBJECT', ':SUBJECT'), verbSemTrans]).
textCached('Attend-TheWord',frame, ['Attend-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', holdsIn(':ACTION', socialParticipants(':SUBJECT', ':OBJECT'))), denotationRelatedTo]).
textCached('Babysit-TheWord',frame, ['Babysit-TheWord', 'CountNoun', 'GenitiveFrame', and(isa(':NOUN', 'Babysitting'), beneficiary(':NOUN', ':POSSESSOR')), nounSemTrans]).
textCached('Babysit-TheWord',frame, ['Babysit-TheWord', 'Noun', 'GenitiveFrame', and(isa(':NOUN', 'Babysitting'), beneficiary(':NOUN', ':POSSESSOR')), nounSemTrans]).
textCached('Babysit-TheWord',frame, ['Babysit-TheWord', 'Verb', 'TransitiveNPFrame', and(isa(':ACTION', 'Babysitting'), beneficiary(':ACTION', ':OBJECT'), performedBy(':ACTION', ':SUBJECT')), verbSemTrans]).
textCached('Bad-TheWord',frame, ['Bad-TheWord', 'Adjective', 'RegularAdjFrame', hasEvaluativeQuantity(':NOUN', 'MediumToVeryHighAmountFn'('Badness-Generic')), adjSemTrans]).
textCached('Bargain-TheWord',frame, ['Bargain-TheWord', 'Verb', 'PPCompFrameFn'('TransitivePPFrameType', 'With-TheWord'), and(socialParticipants(':ACTION', ':OBLIQUE-OBJECT'), socialParticipants(':ACTION', ':SUBJECT'), isa(':ACTION', 'Bargaining')), verbSemTrans]).
textCached('Bargain-TheWord',frame, ['Bargain-TheWord', 'Verb', 'UnderstoodReciprocalObjectFrame', and(socialParticipants(':ACTION', ':SUBJECT'), isa(':ACTION', 'Bargaining')), verbSemTrans]).
textCached('Base-TheWord',frame, ['Base-TheWord', 'Verb', 'PPCompFrameFn'('TransitivePPFrameType', 'On-TheWord'), 'relyOn-Generic'(':SUBJECT', ':OBLIQUE-OBJECT'), verbSemTrans]).
textCached('Base-TheWord',wsframe, ['Base-TheWord', 'TheList'(string("of"), string("operations")), 'GenitiveFrame', headquarters(':POSSESSOR', ':NOUN'), 'CountNoun']).
textCached('Bat-TheWord',frame, ['Bat-TheWord', 'Verb', 'IntransitiveVerbFrame', thereExists(':ACTION', and(bodilyDoer(':SUBJECT', ':ACTION'), isa(':ACTION', 'SportsEvent'), possible(isa(':SUBJECT', 'BaseballBatter')))), performsInstancesAsPartOfJob]).
textCached('Bat-TheWord',frame, ['Bat-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', and(bodilyDoer(':SUBJECT', ':ACTION'), isa(':ACTION', 'SportsEvent'), possible(isa(':SUBJECT', 'BaseballBatter')))), performsInstancesAsPartOfJob]).
textCached('Bat-TheWord',frame, ['Bat-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', and(isa(':ACTION', 'BaseballSwing'), situationConstituents(':ACTION', ':OBJECT'), doneBy(':ACTION', ':SUBJECT'))), verbSemTransPartial]).
textCached('Bat-TheWord',frame, ['Bat-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', and(situationConstituents(':ACTION', ':OBJECT'), doneBy(':ACTION', ':SUBJECT'), isa(':ACTION', 'BaseballSwing'))), verbSemTransPartial]).
textCached('Be-TheWord', completeSemTrans,[]).
textCached('Be-TheWord',frame, ['Be-TheWord', 'AuxVerb', 'CopulaGenericFrame', ':COMPLEMENT', auxVerbSemTrans]).
textCached('Be-TheWord',frame, ['Be-TheWord', 'AuxVerb', 'TransitiveNPFrame', equals(':OBJECT', ':SUBJECT'), auxVerbSemTrans]).
textCached('Be-TheWord',frame, ['Be-TheWord', 'Verb', 'TransitiveNPFrame', thereExists(':ACTION', holdsIn(':ACTION', 'is-Underspecified'(':SUBJECT', ':OBJECT'))), denotation]).
textCached('Be-TheWord',wsframe, ['Be-TheWord', 'TheList'(string("able")), 'TransitiveInfinitivePhraseFrame', capableOf(':SUBJECT', ':ACTION', performedBy), 'Verb']).
textCached('Be-TheWord',wsframe, ['Be-TheWord', 'TheList'(string("made")), 'PPCompFrameFn'('TransitivePPFrameType', 'Of-TheWord'), constituents(':SUBJECT', ':OBLIQUE-OBJECT'), 'Verb']).
textCached('Be-TheWord',wsframe, ['Be-TheWord', 'TheList'(string('"able"')), 'TransitiveInfinitivePhraseFrame', capableOf(':SUBJECT', ':ACTION', performedBy), 'Verb']).
textCached('Be-TheWord',wsframe, ['Be-TheWord', 'TheList'(string('"made"')), 'PPCompFrameFn'('TransitivePPFrameType', 'Of-TheWord'), constituents(':SUBJECT', ':OBLIQUE-OBJECT'), 'Verb']).
:- include('posm_cached_data2.nldata').
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_nlu/ext/pldata/posm_cached_data2.pl | Perl | mit | 14,829 |
package ScrapingHub::API::Client::Storage::Items;
use Moo;
extends 'ScrapingHub::API::Client';
use URI::Escape;
use ScrapingHub::API::Utils qw/flag_missing_params/;
sub get_items {
my ($self, $args) = @_;
flag_missing_params($args, qw/job/);
my $job = $args->{job} and delete $args->{job};
my $field = $args->{field} and delete $args->{field};
my $url = $self->api_url . '/items/' . $job;
$url .= '/' . URI::Escape::uri_escape_utf8($field) if ($field);
return $self->query('GET', $url, $args);
}
sub get_job_item_stats {
my ($self, $args) = @_;
flag_missing_params($args, qw/job/);
my $job = $args->{job} and delete $args->{job};
my $url = $self->api_url . '/items/' . $job . '/stats';
return $self->query('GET', $url, $args);
}
1;
=head1 NAME
ScrapingHub Items API client interface
=head1 SYNOPSIS
my $sh_jobs = ScrapingHub::API::Client::App::Items;
my $items = $sh_jobs->get_items({ job => '123456/56/78' });
=head1 DESCRIPTION
Client interface for the Jobs API that falls under the ScapingHub's Scrapy API suite.
=head1 METHODS
=head2 get_items
Get the items that were scraped for a particular job.
C<< my $items = $sh_jobs->get_items({ job => '123456/56/78' }); >>
=head2 get_job_item_stats
Get the stats for the items scraped for a particular job.
C<< my $stats = $sh_jobs->get_job_item_stats({ job => '123456/56/78' }); >>
=cut
| mgoodnight/scrapinghub-scrapy-cloud-client | lib/ScrapingHub/API/Client/Storage/Items.pm | Perl | mit | 1,415 |
package #
Date::Manip::TZ::euberl00;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:33:42 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our (%Dates,%LastRule);
END {
undef %Dates;
undef %LastRule;
}
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
%Dates = (
1 =>
[
[ [1,1,2,0,0,0],[1,1,2,0,53,28],'+00:53:28',[0,53,28],
'LMT',0,[1893,3,31,23,6,31],[1893,3,31,23,59,59],
'0001010200:00:00','0001010200:53:28','1893033123:06:31','1893033123:59:59' ],
],
1893 =>
[
[ [1893,3,31,23,6,32],[1893,4,1,0,6,32],'+01:00:00',[1,0,0],
'CET',0,[1916,4,30,21,59,59],[1916,4,30,22,59,59],
'1893033123:06:32','1893040100:06:32','1916043021:59:59','1916043022:59:59' ],
],
1916 =>
[
[ [1916,4,30,22,0,0],[1916,5,1,0,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1916,9,30,22,59,59],[1916,10,1,0,59,59],
'1916043022:00:00','1916050100:00:00','1916093022:59:59','1916100100:59:59' ],
[ [1916,9,30,23,0,0],[1916,10,1,0,0,0],'+01:00:00',[1,0,0],
'CET',0,[1917,4,16,0,59,59],[1917,4,16,1,59,59],
'1916093023:00:00','1916100100:00:00','1917041600:59:59','1917041601:59:59' ],
],
1917 =>
[
[ [1917,4,16,1,0,0],[1917,4,16,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1917,9,17,0,59,59],[1917,9,17,2,59,59],
'1917041601:00:00','1917041603:00:00','1917091700:59:59','1917091702:59:59' ],
[ [1917,9,17,1,0,0],[1917,9,17,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1918,4,15,0,59,59],[1918,4,15,1,59,59],
'1917091701:00:00','1917091702:00:00','1918041500:59:59','1918041501:59:59' ],
],
1918 =>
[
[ [1918,4,15,1,0,0],[1918,4,15,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1918,9,16,0,59,59],[1918,9,16,2,59,59],
'1918041501:00:00','1918041503:00:00','1918091600:59:59','1918091602:59:59' ],
[ [1918,9,16,1,0,0],[1918,9,16,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1940,4,1,0,59,59],[1940,4,1,1,59,59],
'1918091601:00:00','1918091602:00:00','1940040100:59:59','1940040101:59:59' ],
],
1940 =>
[
[ [1940,4,1,1,0,0],[1940,4,1,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1942,11,2,0,59,59],[1942,11,2,2,59,59],
'1940040101:00:00','1940040103:00:00','1942110200:59:59','1942110202:59:59' ],
],
1942 =>
[
[ [1942,11,2,1,0,0],[1942,11,2,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1943,3,29,0,59,59],[1943,3,29,1,59,59],
'1942110201:00:00','1942110202:00:00','1943032900:59:59','1943032901:59:59' ],
],
1943 =>
[
[ [1943,3,29,1,0,0],[1943,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1943,10,4,0,59,59],[1943,10,4,2,59,59],
'1943032901:00:00','1943032903:00:00','1943100400:59:59','1943100402:59:59' ],
[ [1943,10,4,1,0,0],[1943,10,4,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1944,4,3,0,59,59],[1944,4,3,1,59,59],
'1943100401:00:00','1943100402:00:00','1944040300:59:59','1944040301:59:59' ],
],
1944 =>
[
[ [1944,4,3,1,0,0],[1944,4,3,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1944,10,2,0,59,59],[1944,10,2,2,59,59],
'1944040301:00:00','1944040303:00:00','1944100200:59:59','1944100202:59:59' ],
[ [1944,10,2,1,0,0],[1944,10,2,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1945,4,2,0,59,59],[1945,4,2,1,59,59],
'1944100201:00:00','1944100202:00:00','1945040200:59:59','1945040201:59:59' ],
],
1945 =>
[
[ [1945,4,2,1,0,0],[1945,4,2,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1945,5,23,23,59,59],[1945,5,24,1,59,59],
'1945040201:00:00','1945040203:00:00','1945052323:59:59','1945052401:59:59' ],
[ [1945,5,24,0,0,0],[1945,5,24,3,0,0],'+03:00:00',[3,0,0],
'CEMT',1,[1945,9,23,23,59,59],[1945,9,24,2,59,59],
'1945052400:00:00','1945052403:00:00','1945092323:59:59','1945092402:59:59' ],
[ [1945,9,24,0,0,0],[1945,9,24,2,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1945,11,18,0,59,59],[1945,11,18,2,59,59],
'1945092400:00:00','1945092402:00:00','1945111800:59:59','1945111802:59:59' ],
[ [1945,11,18,1,0,0],[1945,11,18,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1946,4,14,0,59,59],[1946,4,14,1,59,59],
'1945111801:00:00','1945111802:00:00','1946041400:59:59','1946041401:59:59' ],
],
1946 =>
[
[ [1946,4,14,1,0,0],[1946,4,14,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1946,10,7,0,59,59],[1946,10,7,2,59,59],
'1946041401:00:00','1946041403:00:00','1946100700:59:59','1946100702:59:59' ],
[ [1946,10,7,1,0,0],[1946,10,7,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1947,4,6,1,59,59],[1947,4,6,2,59,59],
'1946100701:00:00','1946100702:00:00','1947040601:59:59','1947040602:59:59' ],
],
1947 =>
[
[ [1947,4,6,2,0,0],[1947,4,6,4,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1947,5,11,0,59,59],[1947,5,11,2,59,59],
'1947040602:00:00','1947040604:00:00','1947051100:59:59','1947051102:59:59' ],
[ [1947,5,11,1,0,0],[1947,5,11,4,0,0],'+03:00:00',[3,0,0],
'CEMT',1,[1947,6,28,23,59,59],[1947,6,29,2,59,59],
'1947051101:00:00','1947051104:00:00','1947062823:59:59','1947062902:59:59' ],
[ [1947,6,29,0,0,0],[1947,6,29,2,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1947,10,5,0,59,59],[1947,10,5,2,59,59],
'1947062900:00:00','1947062902:00:00','1947100500:59:59','1947100502:59:59' ],
[ [1947,10,5,1,0,0],[1947,10,5,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1948,4,18,0,59,59],[1948,4,18,1,59,59],
'1947100501:00:00','1947100502:00:00','1948041800:59:59','1948041801:59:59' ],
],
1948 =>
[
[ [1948,4,18,1,0,0],[1948,4,18,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1948,10,3,0,59,59],[1948,10,3,2,59,59],
'1948041801:00:00','1948041803:00:00','1948100300:59:59','1948100302:59:59' ],
[ [1948,10,3,1,0,0],[1948,10,3,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1949,4,10,0,59,59],[1949,4,10,1,59,59],
'1948100301:00:00','1948100302:00:00','1949041000:59:59','1949041001:59:59' ],
],
1949 =>
[
[ [1949,4,10,1,0,0],[1949,4,10,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1949,10,2,0,59,59],[1949,10,2,2,59,59],
'1949041001:00:00','1949041003:00:00','1949100200:59:59','1949100202:59:59' ],
[ [1949,10,2,1,0,0],[1949,10,2,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1980,4,6,0,59,59],[1980,4,6,1,59,59],
'1949100201:00:00','1949100202:00:00','1980040600:59:59','1980040601:59:59' ],
],
1980 =>
[
[ [1980,4,6,1,0,0],[1980,4,6,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1980,9,28,0,59,59],[1980,9,28,2,59,59],
'1980040601:00:00','1980040603:00:00','1980092800:59:59','1980092802:59:59' ],
[ [1980,9,28,1,0,0],[1980,9,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1981,3,29,0,59,59],[1981,3,29,1,59,59],
'1980092801:00:00','1980092802:00:00','1981032900:59:59','1981032901:59:59' ],
],
1981 =>
[
[ [1981,3,29,1,0,0],[1981,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1981,9,27,0,59,59],[1981,9,27,2,59,59],
'1981032901:00:00','1981032903:00:00','1981092700:59:59','1981092702:59:59' ],
[ [1981,9,27,1,0,0],[1981,9,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1982,3,28,0,59,59],[1982,3,28,1,59,59],
'1981092701:00:00','1981092702:00:00','1982032800:59:59','1982032801:59:59' ],
],
1982 =>
[
[ [1982,3,28,1,0,0],[1982,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1982,9,26,0,59,59],[1982,9,26,2,59,59],
'1982032801:00:00','1982032803:00:00','1982092600:59:59','1982092602:59:59' ],
[ [1982,9,26,1,0,0],[1982,9,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1983,3,27,0,59,59],[1983,3,27,1,59,59],
'1982092601:00:00','1982092602:00:00','1983032700:59:59','1983032701:59:59' ],
],
1983 =>
[
[ [1983,3,27,1,0,0],[1983,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1983,9,25,0,59,59],[1983,9,25,2,59,59],
'1983032701:00:00','1983032703:00:00','1983092500:59:59','1983092502:59:59' ],
[ [1983,9,25,1,0,0],[1983,9,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1984,3,25,0,59,59],[1984,3,25,1,59,59],
'1983092501:00:00','1983092502:00:00','1984032500:59:59','1984032501:59:59' ],
],
1984 =>
[
[ [1984,3,25,1,0,0],[1984,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1984,9,30,0,59,59],[1984,9,30,2,59,59],
'1984032501:00:00','1984032503:00:00','1984093000:59:59','1984093002:59:59' ],
[ [1984,9,30,1,0,0],[1984,9,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1985,3,31,0,59,59],[1985,3,31,1,59,59],
'1984093001:00:00','1984093002:00:00','1985033100:59:59','1985033101:59:59' ],
],
1985 =>
[
[ [1985,3,31,1,0,0],[1985,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1985,9,29,0,59,59],[1985,9,29,2,59,59],
'1985033101:00:00','1985033103:00:00','1985092900:59:59','1985092902:59:59' ],
[ [1985,9,29,1,0,0],[1985,9,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1986,3,30,0,59,59],[1986,3,30,1,59,59],
'1985092901:00:00','1985092902:00:00','1986033000:59:59','1986033001:59:59' ],
],
1986 =>
[
[ [1986,3,30,1,0,0],[1986,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1986,9,28,0,59,59],[1986,9,28,2,59,59],
'1986033001:00:00','1986033003:00:00','1986092800:59:59','1986092802:59:59' ],
[ [1986,9,28,1,0,0],[1986,9,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1987,3,29,0,59,59],[1987,3,29,1,59,59],
'1986092801:00:00','1986092802:00:00','1987032900:59:59','1987032901:59:59' ],
],
1987 =>
[
[ [1987,3,29,1,0,0],[1987,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1987,9,27,0,59,59],[1987,9,27,2,59,59],
'1987032901:00:00','1987032903:00:00','1987092700:59:59','1987092702:59:59' ],
[ [1987,9,27,1,0,0],[1987,9,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1988,3,27,0,59,59],[1988,3,27,1,59,59],
'1987092701:00:00','1987092702:00:00','1988032700:59:59','1988032701:59:59' ],
],
1988 =>
[
[ [1988,3,27,1,0,0],[1988,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1988,9,25,0,59,59],[1988,9,25,2,59,59],
'1988032701:00:00','1988032703:00:00','1988092500:59:59','1988092502:59:59' ],
[ [1988,9,25,1,0,0],[1988,9,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1989,3,26,0,59,59],[1989,3,26,1,59,59],
'1988092501:00:00','1988092502:00:00','1989032600:59:59','1989032601:59:59' ],
],
1989 =>
[
[ [1989,3,26,1,0,0],[1989,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1989,9,24,0,59,59],[1989,9,24,2,59,59],
'1989032601:00:00','1989032603:00:00','1989092400:59:59','1989092402:59:59' ],
[ [1989,9,24,1,0,0],[1989,9,24,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1990,3,25,0,59,59],[1990,3,25,1,59,59],
'1989092401:00:00','1989092402:00:00','1990032500:59:59','1990032501:59:59' ],
],
1990 =>
[
[ [1990,3,25,1,0,0],[1990,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1990,9,30,0,59,59],[1990,9,30,2,59,59],
'1990032501:00:00','1990032503:00:00','1990093000:59:59','1990093002:59:59' ],
[ [1990,9,30,1,0,0],[1990,9,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1991,3,31,0,59,59],[1991,3,31,1,59,59],
'1990093001:00:00','1990093002:00:00','1991033100:59:59','1991033101:59:59' ],
],
1991 =>
[
[ [1991,3,31,1,0,0],[1991,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1991,9,29,0,59,59],[1991,9,29,2,59,59],
'1991033101:00:00','1991033103:00:00','1991092900:59:59','1991092902:59:59' ],
[ [1991,9,29,1,0,0],[1991,9,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1992,3,29,0,59,59],[1992,3,29,1,59,59],
'1991092901:00:00','1991092902:00:00','1992032900:59:59','1992032901:59:59' ],
],
1992 =>
[
[ [1992,3,29,1,0,0],[1992,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1992,9,27,0,59,59],[1992,9,27,2,59,59],
'1992032901:00:00','1992032903:00:00','1992092700:59:59','1992092702:59:59' ],
[ [1992,9,27,1,0,0],[1992,9,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1993,3,28,0,59,59],[1993,3,28,1,59,59],
'1992092701:00:00','1992092702:00:00','1993032800:59:59','1993032801:59:59' ],
],
1993 =>
[
[ [1993,3,28,1,0,0],[1993,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1993,9,26,0,59,59],[1993,9,26,2,59,59],
'1993032801:00:00','1993032803:00:00','1993092600:59:59','1993092602:59:59' ],
[ [1993,9,26,1,0,0],[1993,9,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1994,3,27,0,59,59],[1994,3,27,1,59,59],
'1993092601:00:00','1993092602:00:00','1994032700:59:59','1994032701:59:59' ],
],
1994 =>
[
[ [1994,3,27,1,0,0],[1994,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1994,9,25,0,59,59],[1994,9,25,2,59,59],
'1994032701:00:00','1994032703:00:00','1994092500:59:59','1994092502:59:59' ],
[ [1994,9,25,1,0,0],[1994,9,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1995,3,26,0,59,59],[1995,3,26,1,59,59],
'1994092501:00:00','1994092502:00:00','1995032600:59:59','1995032601:59:59' ],
],
1995 =>
[
[ [1995,3,26,1,0,0],[1995,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1995,9,24,0,59,59],[1995,9,24,2,59,59],
'1995032601:00:00','1995032603:00:00','1995092400:59:59','1995092402:59:59' ],
[ [1995,9,24,1,0,0],[1995,9,24,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1996,3,31,0,59,59],[1996,3,31,1,59,59],
'1995092401:00:00','1995092402:00:00','1996033100:59:59','1996033101:59:59' ],
],
1996 =>
[
[ [1996,3,31,1,0,0],[1996,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1996,10,27,0,59,59],[1996,10,27,2,59,59],
'1996033101:00:00','1996033103:00:00','1996102700:59:59','1996102702:59:59' ],
[ [1996,10,27,1,0,0],[1996,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1997,3,30,0,59,59],[1997,3,30,1,59,59],
'1996102701:00:00','1996102702:00:00','1997033000:59:59','1997033001:59:59' ],
],
1997 =>
[
[ [1997,3,30,1,0,0],[1997,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1997,10,26,0,59,59],[1997,10,26,2,59,59],
'1997033001:00:00','1997033003:00:00','1997102600:59:59','1997102602:59:59' ],
[ [1997,10,26,1,0,0],[1997,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1998,3,29,0,59,59],[1998,3,29,1,59,59],
'1997102601:00:00','1997102602:00:00','1998032900:59:59','1998032901:59:59' ],
],
1998 =>
[
[ [1998,3,29,1,0,0],[1998,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1998,10,25,0,59,59],[1998,10,25,2,59,59],
'1998032901:00:00','1998032903:00:00','1998102500:59:59','1998102502:59:59' ],
[ [1998,10,25,1,0,0],[1998,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[1999,3,28,0,59,59],[1999,3,28,1,59,59],
'1998102501:00:00','1998102502:00:00','1999032800:59:59','1999032801:59:59' ],
],
1999 =>
[
[ [1999,3,28,1,0,0],[1999,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[1999,10,31,0,59,59],[1999,10,31,2,59,59],
'1999032801:00:00','1999032803:00:00','1999103100:59:59','1999103102:59:59' ],
[ [1999,10,31,1,0,0],[1999,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2000,3,26,0,59,59],[2000,3,26,1,59,59],
'1999103101:00:00','1999103102:00:00','2000032600:59:59','2000032601:59:59' ],
],
2000 =>
[
[ [2000,3,26,1,0,0],[2000,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2000,10,29,0,59,59],[2000,10,29,2,59,59],
'2000032601:00:00','2000032603:00:00','2000102900:59:59','2000102902:59:59' ],
[ [2000,10,29,1,0,0],[2000,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2001,3,25,0,59,59],[2001,3,25,1,59,59],
'2000102901:00:00','2000102902:00:00','2001032500:59:59','2001032501:59:59' ],
],
2001 =>
[
[ [2001,3,25,1,0,0],[2001,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2001,10,28,0,59,59],[2001,10,28,2,59,59],
'2001032501:00:00','2001032503:00:00','2001102800:59:59','2001102802:59:59' ],
[ [2001,10,28,1,0,0],[2001,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2002,3,31,0,59,59],[2002,3,31,1,59,59],
'2001102801:00:00','2001102802:00:00','2002033100:59:59','2002033101:59:59' ],
],
2002 =>
[
[ [2002,3,31,1,0,0],[2002,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2002,10,27,0,59,59],[2002,10,27,2,59,59],
'2002033101:00:00','2002033103:00:00','2002102700:59:59','2002102702:59:59' ],
[ [2002,10,27,1,0,0],[2002,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2003,3,30,0,59,59],[2003,3,30,1,59,59],
'2002102701:00:00','2002102702:00:00','2003033000:59:59','2003033001:59:59' ],
],
2003 =>
[
[ [2003,3,30,1,0,0],[2003,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2003,10,26,0,59,59],[2003,10,26,2,59,59],
'2003033001:00:00','2003033003:00:00','2003102600:59:59','2003102602:59:59' ],
[ [2003,10,26,1,0,0],[2003,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2004,3,28,0,59,59],[2004,3,28,1,59,59],
'2003102601:00:00','2003102602:00:00','2004032800:59:59','2004032801:59:59' ],
],
2004 =>
[
[ [2004,3,28,1,0,0],[2004,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2004,10,31,0,59,59],[2004,10,31,2,59,59],
'2004032801:00:00','2004032803:00:00','2004103100:59:59','2004103102:59:59' ],
[ [2004,10,31,1,0,0],[2004,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2005,3,27,0,59,59],[2005,3,27,1,59,59],
'2004103101:00:00','2004103102:00:00','2005032700:59:59','2005032701:59:59' ],
],
2005 =>
[
[ [2005,3,27,1,0,0],[2005,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2005,10,30,0,59,59],[2005,10,30,2,59,59],
'2005032701:00:00','2005032703:00:00','2005103000:59:59','2005103002:59:59' ],
[ [2005,10,30,1,0,0],[2005,10,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2006,3,26,0,59,59],[2006,3,26,1,59,59],
'2005103001:00:00','2005103002:00:00','2006032600:59:59','2006032601:59:59' ],
],
2006 =>
[
[ [2006,3,26,1,0,0],[2006,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2006,10,29,0,59,59],[2006,10,29,2,59,59],
'2006032601:00:00','2006032603:00:00','2006102900:59:59','2006102902:59:59' ],
[ [2006,10,29,1,0,0],[2006,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2007,3,25,0,59,59],[2007,3,25,1,59,59],
'2006102901:00:00','2006102902:00:00','2007032500:59:59','2007032501:59:59' ],
],
2007 =>
[
[ [2007,3,25,1,0,0],[2007,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2007,10,28,0,59,59],[2007,10,28,2,59,59],
'2007032501:00:00','2007032503:00:00','2007102800:59:59','2007102802:59:59' ],
[ [2007,10,28,1,0,0],[2007,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2008,3,30,0,59,59],[2008,3,30,1,59,59],
'2007102801:00:00','2007102802:00:00','2008033000:59:59','2008033001:59:59' ],
],
2008 =>
[
[ [2008,3,30,1,0,0],[2008,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2008,10,26,0,59,59],[2008,10,26,2,59,59],
'2008033001:00:00','2008033003:00:00','2008102600:59:59','2008102602:59:59' ],
[ [2008,10,26,1,0,0],[2008,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2009,3,29,0,59,59],[2009,3,29,1,59,59],
'2008102601:00:00','2008102602:00:00','2009032900:59:59','2009032901:59:59' ],
],
2009 =>
[
[ [2009,3,29,1,0,0],[2009,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2009,10,25,0,59,59],[2009,10,25,2,59,59],
'2009032901:00:00','2009032903:00:00','2009102500:59:59','2009102502:59:59' ],
[ [2009,10,25,1,0,0],[2009,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2010,3,28,0,59,59],[2010,3,28,1,59,59],
'2009102501:00:00','2009102502:00:00','2010032800:59:59','2010032801:59:59' ],
],
2010 =>
[
[ [2010,3,28,1,0,0],[2010,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2010,10,31,0,59,59],[2010,10,31,2,59,59],
'2010032801:00:00','2010032803:00:00','2010103100:59:59','2010103102:59:59' ],
[ [2010,10,31,1,0,0],[2010,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2011,3,27,0,59,59],[2011,3,27,1,59,59],
'2010103101:00:00','2010103102:00:00','2011032700:59:59','2011032701:59:59' ],
],
2011 =>
[
[ [2011,3,27,1,0,0],[2011,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2011,10,30,0,59,59],[2011,10,30,2,59,59],
'2011032701:00:00','2011032703:00:00','2011103000:59:59','2011103002:59:59' ],
[ [2011,10,30,1,0,0],[2011,10,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2012,3,25,0,59,59],[2012,3,25,1,59,59],
'2011103001:00:00','2011103002:00:00','2012032500:59:59','2012032501:59:59' ],
],
2012 =>
[
[ [2012,3,25,1,0,0],[2012,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2012,10,28,0,59,59],[2012,10,28,2,59,59],
'2012032501:00:00','2012032503:00:00','2012102800:59:59','2012102802:59:59' ],
[ [2012,10,28,1,0,0],[2012,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2013,3,31,0,59,59],[2013,3,31,1,59,59],
'2012102801:00:00','2012102802:00:00','2013033100:59:59','2013033101:59:59' ],
],
2013 =>
[
[ [2013,3,31,1,0,0],[2013,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2013,10,27,0,59,59],[2013,10,27,2,59,59],
'2013033101:00:00','2013033103:00:00','2013102700:59:59','2013102702:59:59' ],
[ [2013,10,27,1,0,0],[2013,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2014,3,30,0,59,59],[2014,3,30,1,59,59],
'2013102701:00:00','2013102702:00:00','2014033000:59:59','2014033001:59:59' ],
],
2014 =>
[
[ [2014,3,30,1,0,0],[2014,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2014,10,26,0,59,59],[2014,10,26,2,59,59],
'2014033001:00:00','2014033003:00:00','2014102600:59:59','2014102602:59:59' ],
[ [2014,10,26,1,0,0],[2014,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2015,3,29,0,59,59],[2015,3,29,1,59,59],
'2014102601:00:00','2014102602:00:00','2015032900:59:59','2015032901:59:59' ],
],
2015 =>
[
[ [2015,3,29,1,0,0],[2015,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2015,10,25,0,59,59],[2015,10,25,2,59,59],
'2015032901:00:00','2015032903:00:00','2015102500:59:59','2015102502:59:59' ],
[ [2015,10,25,1,0,0],[2015,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2016,3,27,0,59,59],[2016,3,27,1,59,59],
'2015102501:00:00','2015102502:00:00','2016032700:59:59','2016032701:59:59' ],
],
2016 =>
[
[ [2016,3,27,1,0,0],[2016,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2016,10,30,0,59,59],[2016,10,30,2,59,59],
'2016032701:00:00','2016032703:00:00','2016103000:59:59','2016103002:59:59' ],
[ [2016,10,30,1,0,0],[2016,10,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2017,3,26,0,59,59],[2017,3,26,1,59,59],
'2016103001:00:00','2016103002:00:00','2017032600:59:59','2017032601:59:59' ],
],
2017 =>
[
[ [2017,3,26,1,0,0],[2017,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2017,10,29,0,59,59],[2017,10,29,2,59,59],
'2017032601:00:00','2017032603:00:00','2017102900:59:59','2017102902:59:59' ],
[ [2017,10,29,1,0,0],[2017,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2018,3,25,0,59,59],[2018,3,25,1,59,59],
'2017102901:00:00','2017102902:00:00','2018032500:59:59','2018032501:59:59' ],
],
2018 =>
[
[ [2018,3,25,1,0,0],[2018,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2018,10,28,0,59,59],[2018,10,28,2,59,59],
'2018032501:00:00','2018032503:00:00','2018102800:59:59','2018102802:59:59' ],
[ [2018,10,28,1,0,0],[2018,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2019,3,31,0,59,59],[2019,3,31,1,59,59],
'2018102801:00:00','2018102802:00:00','2019033100:59:59','2019033101:59:59' ],
],
2019 =>
[
[ [2019,3,31,1,0,0],[2019,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2019,10,27,0,59,59],[2019,10,27,2,59,59],
'2019033101:00:00','2019033103:00:00','2019102700:59:59','2019102702:59:59' ],
[ [2019,10,27,1,0,0],[2019,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2020,3,29,0,59,59],[2020,3,29,1,59,59],
'2019102701:00:00','2019102702:00:00','2020032900:59:59','2020032901:59:59' ],
],
2020 =>
[
[ [2020,3,29,1,0,0],[2020,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2020,10,25,0,59,59],[2020,10,25,2,59,59],
'2020032901:00:00','2020032903:00:00','2020102500:59:59','2020102502:59:59' ],
[ [2020,10,25,1,0,0],[2020,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2021,3,28,0,59,59],[2021,3,28,1,59,59],
'2020102501:00:00','2020102502:00:00','2021032800:59:59','2021032801:59:59' ],
],
2021 =>
[
[ [2021,3,28,1,0,0],[2021,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2021,10,31,0,59,59],[2021,10,31,2,59,59],
'2021032801:00:00','2021032803:00:00','2021103100:59:59','2021103102:59:59' ],
[ [2021,10,31,1,0,0],[2021,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2022,3,27,0,59,59],[2022,3,27,1,59,59],
'2021103101:00:00','2021103102:00:00','2022032700:59:59','2022032701:59:59' ],
],
2022 =>
[
[ [2022,3,27,1,0,0],[2022,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2022,10,30,0,59,59],[2022,10,30,2,59,59],
'2022032701:00:00','2022032703:00:00','2022103000:59:59','2022103002:59:59' ],
[ [2022,10,30,1,0,0],[2022,10,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2023,3,26,0,59,59],[2023,3,26,1,59,59],
'2022103001:00:00','2022103002:00:00','2023032600:59:59','2023032601:59:59' ],
],
2023 =>
[
[ [2023,3,26,1,0,0],[2023,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2023,10,29,0,59,59],[2023,10,29,2,59,59],
'2023032601:00:00','2023032603:00:00','2023102900:59:59','2023102902:59:59' ],
[ [2023,10,29,1,0,0],[2023,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2024,3,31,0,59,59],[2024,3,31,1,59,59],
'2023102901:00:00','2023102902:00:00','2024033100:59:59','2024033101:59:59' ],
],
2024 =>
[
[ [2024,3,31,1,0,0],[2024,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2024,10,27,0,59,59],[2024,10,27,2,59,59],
'2024033101:00:00','2024033103:00:00','2024102700:59:59','2024102702:59:59' ],
[ [2024,10,27,1,0,0],[2024,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2025,3,30,0,59,59],[2025,3,30,1,59,59],
'2024102701:00:00','2024102702:00:00','2025033000:59:59','2025033001:59:59' ],
],
2025 =>
[
[ [2025,3,30,1,0,0],[2025,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2025,10,26,0,59,59],[2025,10,26,2,59,59],
'2025033001:00:00','2025033003:00:00','2025102600:59:59','2025102602:59:59' ],
[ [2025,10,26,1,0,0],[2025,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2026,3,29,0,59,59],[2026,3,29,1,59,59],
'2025102601:00:00','2025102602:00:00','2026032900:59:59','2026032901:59:59' ],
],
2026 =>
[
[ [2026,3,29,1,0,0],[2026,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2026,10,25,0,59,59],[2026,10,25,2,59,59],
'2026032901:00:00','2026032903:00:00','2026102500:59:59','2026102502:59:59' ],
[ [2026,10,25,1,0,0],[2026,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2027,3,28,0,59,59],[2027,3,28,1,59,59],
'2026102501:00:00','2026102502:00:00','2027032800:59:59','2027032801:59:59' ],
],
2027 =>
[
[ [2027,3,28,1,0,0],[2027,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2027,10,31,0,59,59],[2027,10,31,2,59,59],
'2027032801:00:00','2027032803:00:00','2027103100:59:59','2027103102:59:59' ],
[ [2027,10,31,1,0,0],[2027,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2028,3,26,0,59,59],[2028,3,26,1,59,59],
'2027103101:00:00','2027103102:00:00','2028032600:59:59','2028032601:59:59' ],
],
2028 =>
[
[ [2028,3,26,1,0,0],[2028,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2028,10,29,0,59,59],[2028,10,29,2,59,59],
'2028032601:00:00','2028032603:00:00','2028102900:59:59','2028102902:59:59' ],
[ [2028,10,29,1,0,0],[2028,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2029,3,25,0,59,59],[2029,3,25,1,59,59],
'2028102901:00:00','2028102902:00:00','2029032500:59:59','2029032501:59:59' ],
],
2029 =>
[
[ [2029,3,25,1,0,0],[2029,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2029,10,28,0,59,59],[2029,10,28,2,59,59],
'2029032501:00:00','2029032503:00:00','2029102800:59:59','2029102802:59:59' ],
[ [2029,10,28,1,0,0],[2029,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2030,3,31,0,59,59],[2030,3,31,1,59,59],
'2029102801:00:00','2029102802:00:00','2030033100:59:59','2030033101:59:59' ],
],
2030 =>
[
[ [2030,3,31,1,0,0],[2030,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2030,10,27,0,59,59],[2030,10,27,2,59,59],
'2030033101:00:00','2030033103:00:00','2030102700:59:59','2030102702:59:59' ],
[ [2030,10,27,1,0,0],[2030,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2031,3,30,0,59,59],[2031,3,30,1,59,59],
'2030102701:00:00','2030102702:00:00','2031033000:59:59','2031033001:59:59' ],
],
2031 =>
[
[ [2031,3,30,1,0,0],[2031,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2031,10,26,0,59,59],[2031,10,26,2,59,59],
'2031033001:00:00','2031033003:00:00','2031102600:59:59','2031102602:59:59' ],
[ [2031,10,26,1,0,0],[2031,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2032,3,28,0,59,59],[2032,3,28,1,59,59],
'2031102601:00:00','2031102602:00:00','2032032800:59:59','2032032801:59:59' ],
],
2032 =>
[
[ [2032,3,28,1,0,0],[2032,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2032,10,31,0,59,59],[2032,10,31,2,59,59],
'2032032801:00:00','2032032803:00:00','2032103100:59:59','2032103102:59:59' ],
[ [2032,10,31,1,0,0],[2032,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2033,3,27,0,59,59],[2033,3,27,1,59,59],
'2032103101:00:00','2032103102:00:00','2033032700:59:59','2033032701:59:59' ],
],
2033 =>
[
[ [2033,3,27,1,0,0],[2033,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2033,10,30,0,59,59],[2033,10,30,2,59,59],
'2033032701:00:00','2033032703:00:00','2033103000:59:59','2033103002:59:59' ],
[ [2033,10,30,1,0,0],[2033,10,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2034,3,26,0,59,59],[2034,3,26,1,59,59],
'2033103001:00:00','2033103002:00:00','2034032600:59:59','2034032601:59:59' ],
],
2034 =>
[
[ [2034,3,26,1,0,0],[2034,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2034,10,29,0,59,59],[2034,10,29,2,59,59],
'2034032601:00:00','2034032603:00:00','2034102900:59:59','2034102902:59:59' ],
[ [2034,10,29,1,0,0],[2034,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2035,3,25,0,59,59],[2035,3,25,1,59,59],
'2034102901:00:00','2034102902:00:00','2035032500:59:59','2035032501:59:59' ],
],
2035 =>
[
[ [2035,3,25,1,0,0],[2035,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2035,10,28,0,59,59],[2035,10,28,2,59,59],
'2035032501:00:00','2035032503:00:00','2035102800:59:59','2035102802:59:59' ],
[ [2035,10,28,1,0,0],[2035,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2036,3,30,0,59,59],[2036,3,30,1,59,59],
'2035102801:00:00','2035102802:00:00','2036033000:59:59','2036033001:59:59' ],
],
2036 =>
[
[ [2036,3,30,1,0,0],[2036,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2036,10,26,0,59,59],[2036,10,26,2,59,59],
'2036033001:00:00','2036033003:00:00','2036102600:59:59','2036102602:59:59' ],
[ [2036,10,26,1,0,0],[2036,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2037,3,29,0,59,59],[2037,3,29,1,59,59],
'2036102601:00:00','2036102602:00:00','2037032900:59:59','2037032901:59:59' ],
],
2037 =>
[
[ [2037,3,29,1,0,0],[2037,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2037,10,25,0,59,59],[2037,10,25,2,59,59],
'2037032901:00:00','2037032903:00:00','2037102500:59:59','2037102502:59:59' ],
[ [2037,10,25,1,0,0],[2037,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2038,3,28,0,59,59],[2038,3,28,1,59,59],
'2037102501:00:00','2037102502:00:00','2038032800:59:59','2038032801:59:59' ],
],
2038 =>
[
[ [2038,3,28,1,0,0],[2038,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2038,10,31,0,59,59],[2038,10,31,2,59,59],
'2038032801:00:00','2038032803:00:00','2038103100:59:59','2038103102:59:59' ],
[ [2038,10,31,1,0,0],[2038,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2039,3,27,0,59,59],[2039,3,27,1,59,59],
'2038103101:00:00','2038103102:00:00','2039032700:59:59','2039032701:59:59' ],
],
2039 =>
[
[ [2039,3,27,1,0,0],[2039,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2039,10,30,0,59,59],[2039,10,30,2,59,59],
'2039032701:00:00','2039032703:00:00','2039103000:59:59','2039103002:59:59' ],
[ [2039,10,30,1,0,0],[2039,10,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2040,3,25,0,59,59],[2040,3,25,1,59,59],
'2039103001:00:00','2039103002:00:00','2040032500:59:59','2040032501:59:59' ],
],
2040 =>
[
[ [2040,3,25,1,0,0],[2040,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2040,10,28,0,59,59],[2040,10,28,2,59,59],
'2040032501:00:00','2040032503:00:00','2040102800:59:59','2040102802:59:59' ],
[ [2040,10,28,1,0,0],[2040,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2041,3,31,0,59,59],[2041,3,31,1,59,59],
'2040102801:00:00','2040102802:00:00','2041033100:59:59','2041033101:59:59' ],
],
2041 =>
[
[ [2041,3,31,1,0,0],[2041,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2041,10,27,0,59,59],[2041,10,27,2,59,59],
'2041033101:00:00','2041033103:00:00','2041102700:59:59','2041102702:59:59' ],
[ [2041,10,27,1,0,0],[2041,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2042,3,30,0,59,59],[2042,3,30,1,59,59],
'2041102701:00:00','2041102702:00:00','2042033000:59:59','2042033001:59:59' ],
],
2042 =>
[
[ [2042,3,30,1,0,0],[2042,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2042,10,26,0,59,59],[2042,10,26,2,59,59],
'2042033001:00:00','2042033003:00:00','2042102600:59:59','2042102602:59:59' ],
[ [2042,10,26,1,0,0],[2042,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2043,3,29,0,59,59],[2043,3,29,1,59,59],
'2042102601:00:00','2042102602:00:00','2043032900:59:59','2043032901:59:59' ],
],
2043 =>
[
[ [2043,3,29,1,0,0],[2043,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2043,10,25,0,59,59],[2043,10,25,2,59,59],
'2043032901:00:00','2043032903:00:00','2043102500:59:59','2043102502:59:59' ],
[ [2043,10,25,1,0,0],[2043,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2044,3,27,0,59,59],[2044,3,27,1,59,59],
'2043102501:00:00','2043102502:00:00','2044032700:59:59','2044032701:59:59' ],
],
2044 =>
[
[ [2044,3,27,1,0,0],[2044,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2044,10,30,0,59,59],[2044,10,30,2,59,59],
'2044032701:00:00','2044032703:00:00','2044103000:59:59','2044103002:59:59' ],
[ [2044,10,30,1,0,0],[2044,10,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2045,3,26,0,59,59],[2045,3,26,1,59,59],
'2044103001:00:00','2044103002:00:00','2045032600:59:59','2045032601:59:59' ],
],
2045 =>
[
[ [2045,3,26,1,0,0],[2045,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2045,10,29,0,59,59],[2045,10,29,2,59,59],
'2045032601:00:00','2045032603:00:00','2045102900:59:59','2045102902:59:59' ],
[ [2045,10,29,1,0,0],[2045,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2046,3,25,0,59,59],[2046,3,25,1,59,59],
'2045102901:00:00','2045102902:00:00','2046032500:59:59','2046032501:59:59' ],
],
2046 =>
[
[ [2046,3,25,1,0,0],[2046,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2046,10,28,0,59,59],[2046,10,28,2,59,59],
'2046032501:00:00','2046032503:00:00','2046102800:59:59','2046102802:59:59' ],
[ [2046,10,28,1,0,0],[2046,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2047,3,31,0,59,59],[2047,3,31,1,59,59],
'2046102801:00:00','2046102802:00:00','2047033100:59:59','2047033101:59:59' ],
],
2047 =>
[
[ [2047,3,31,1,0,0],[2047,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2047,10,27,0,59,59],[2047,10,27,2,59,59],
'2047033101:00:00','2047033103:00:00','2047102700:59:59','2047102702:59:59' ],
[ [2047,10,27,1,0,0],[2047,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2048,3,29,0,59,59],[2048,3,29,1,59,59],
'2047102701:00:00','2047102702:00:00','2048032900:59:59','2048032901:59:59' ],
],
2048 =>
[
[ [2048,3,29,1,0,0],[2048,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2048,10,25,0,59,59],[2048,10,25,2,59,59],
'2048032901:00:00','2048032903:00:00','2048102500:59:59','2048102502:59:59' ],
[ [2048,10,25,1,0,0],[2048,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2049,3,28,0,59,59],[2049,3,28,1,59,59],
'2048102501:00:00','2048102502:00:00','2049032800:59:59','2049032801:59:59' ],
],
2049 =>
[
[ [2049,3,28,1,0,0],[2049,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2049,10,31,0,59,59],[2049,10,31,2,59,59],
'2049032801:00:00','2049032803:00:00','2049103100:59:59','2049103102:59:59' ],
[ [2049,10,31,1,0,0],[2049,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2050,3,27,0,59,59],[2050,3,27,1,59,59],
'2049103101:00:00','2049103102:00:00','2050032700:59:59','2050032701:59:59' ],
],
2050 =>
[
[ [2050,3,27,1,0,0],[2050,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2050,10,30,0,59,59],[2050,10,30,2,59,59],
'2050032701:00:00','2050032703:00:00','2050103000:59:59','2050103002:59:59' ],
[ [2050,10,30,1,0,0],[2050,10,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2051,3,26,0,59,59],[2051,3,26,1,59,59],
'2050103001:00:00','2050103002:00:00','2051032600:59:59','2051032601:59:59' ],
],
2051 =>
[
[ [2051,3,26,1,0,0],[2051,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2051,10,29,0,59,59],[2051,10,29,2,59,59],
'2051032601:00:00','2051032603:00:00','2051102900:59:59','2051102902:59:59' ],
[ [2051,10,29,1,0,0],[2051,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2052,3,31,0,59,59],[2052,3,31,1,59,59],
'2051102901:00:00','2051102902:00:00','2052033100:59:59','2052033101:59:59' ],
],
2052 =>
[
[ [2052,3,31,1,0,0],[2052,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2052,10,27,0,59,59],[2052,10,27,2,59,59],
'2052033101:00:00','2052033103:00:00','2052102700:59:59','2052102702:59:59' ],
[ [2052,10,27,1,0,0],[2052,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2053,3,30,0,59,59],[2053,3,30,1,59,59],
'2052102701:00:00','2052102702:00:00','2053033000:59:59','2053033001:59:59' ],
],
2053 =>
[
[ [2053,3,30,1,0,0],[2053,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2053,10,26,0,59,59],[2053,10,26,2,59,59],
'2053033001:00:00','2053033003:00:00','2053102600:59:59','2053102602:59:59' ],
[ [2053,10,26,1,0,0],[2053,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2054,3,29,0,59,59],[2054,3,29,1,59,59],
'2053102601:00:00','2053102602:00:00','2054032900:59:59','2054032901:59:59' ],
],
2054 =>
[
[ [2054,3,29,1,0,0],[2054,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2054,10,25,0,59,59],[2054,10,25,2,59,59],
'2054032901:00:00','2054032903:00:00','2054102500:59:59','2054102502:59:59' ],
[ [2054,10,25,1,0,0],[2054,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2055,3,28,0,59,59],[2055,3,28,1,59,59],
'2054102501:00:00','2054102502:00:00','2055032800:59:59','2055032801:59:59' ],
],
2055 =>
[
[ [2055,3,28,1,0,0],[2055,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2055,10,31,0,59,59],[2055,10,31,2,59,59],
'2055032801:00:00','2055032803:00:00','2055103100:59:59','2055103102:59:59' ],
[ [2055,10,31,1,0,0],[2055,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2056,3,26,0,59,59],[2056,3,26,1,59,59],
'2055103101:00:00','2055103102:00:00','2056032600:59:59','2056032601:59:59' ],
],
2056 =>
[
[ [2056,3,26,1,0,0],[2056,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2056,10,29,0,59,59],[2056,10,29,2,59,59],
'2056032601:00:00','2056032603:00:00','2056102900:59:59','2056102902:59:59' ],
[ [2056,10,29,1,0,0],[2056,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2057,3,25,0,59,59],[2057,3,25,1,59,59],
'2056102901:00:00','2056102902:00:00','2057032500:59:59','2057032501:59:59' ],
],
2057 =>
[
[ [2057,3,25,1,0,0],[2057,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2057,10,28,0,59,59],[2057,10,28,2,59,59],
'2057032501:00:00','2057032503:00:00','2057102800:59:59','2057102802:59:59' ],
[ [2057,10,28,1,0,0],[2057,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2058,3,31,0,59,59],[2058,3,31,1,59,59],
'2057102801:00:00','2057102802:00:00','2058033100:59:59','2058033101:59:59' ],
],
2058 =>
[
[ [2058,3,31,1,0,0],[2058,3,31,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2058,10,27,0,59,59],[2058,10,27,2,59,59],
'2058033101:00:00','2058033103:00:00','2058102700:59:59','2058102702:59:59' ],
[ [2058,10,27,1,0,0],[2058,10,27,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2059,3,30,0,59,59],[2059,3,30,1,59,59],
'2058102701:00:00','2058102702:00:00','2059033000:59:59','2059033001:59:59' ],
],
2059 =>
[
[ [2059,3,30,1,0,0],[2059,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2059,10,26,0,59,59],[2059,10,26,2,59,59],
'2059033001:00:00','2059033003:00:00','2059102600:59:59','2059102602:59:59' ],
[ [2059,10,26,1,0,0],[2059,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2060,3,28,0,59,59],[2060,3,28,1,59,59],
'2059102601:00:00','2059102602:00:00','2060032800:59:59','2060032801:59:59' ],
],
2060 =>
[
[ [2060,3,28,1,0,0],[2060,3,28,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2060,10,31,0,59,59],[2060,10,31,2,59,59],
'2060032801:00:00','2060032803:00:00','2060103100:59:59','2060103102:59:59' ],
[ [2060,10,31,1,0,0],[2060,10,31,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2061,3,27,0,59,59],[2061,3,27,1,59,59],
'2060103101:00:00','2060103102:00:00','2061032700:59:59','2061032701:59:59' ],
],
2061 =>
[
[ [2061,3,27,1,0,0],[2061,3,27,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2061,10,30,0,59,59],[2061,10,30,2,59,59],
'2061032701:00:00','2061032703:00:00','2061103000:59:59','2061103002:59:59' ],
[ [2061,10,30,1,0,0],[2061,10,30,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2062,3,26,0,59,59],[2062,3,26,1,59,59],
'2061103001:00:00','2061103002:00:00','2062032600:59:59','2062032601:59:59' ],
],
2062 =>
[
[ [2062,3,26,1,0,0],[2062,3,26,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2062,10,29,0,59,59],[2062,10,29,2,59,59],
'2062032601:00:00','2062032603:00:00','2062102900:59:59','2062102902:59:59' ],
[ [2062,10,29,1,0,0],[2062,10,29,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2063,3,25,0,59,59],[2063,3,25,1,59,59],
'2062102901:00:00','2062102902:00:00','2063032500:59:59','2063032501:59:59' ],
],
2063 =>
[
[ [2063,3,25,1,0,0],[2063,3,25,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2063,10,28,0,59,59],[2063,10,28,2,59,59],
'2063032501:00:00','2063032503:00:00','2063102800:59:59','2063102802:59:59' ],
[ [2063,10,28,1,0,0],[2063,10,28,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2064,3,30,0,59,59],[2064,3,30,1,59,59],
'2063102801:00:00','2063102802:00:00','2064033000:59:59','2064033001:59:59' ],
],
2064 =>
[
[ [2064,3,30,1,0,0],[2064,3,30,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2064,10,26,0,59,59],[2064,10,26,2,59,59],
'2064033001:00:00','2064033003:00:00','2064102600:59:59','2064102602:59:59' ],
[ [2064,10,26,1,0,0],[2064,10,26,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2065,3,29,0,59,59],[2065,3,29,1,59,59],
'2064102601:00:00','2064102602:00:00','2065032900:59:59','2065032901:59:59' ],
],
2065 =>
[
[ [2065,3,29,1,0,0],[2065,3,29,3,0,0],'+02:00:00',[2,0,0],
'CEST',1,[2065,10,25,0,59,59],[2065,10,25,2,59,59],
'2065032901:00:00','2065032903:00:00','2065102500:59:59','2065102502:59:59' ],
[ [2065,10,25,1,0,0],[2065,10,25,2,0,0],'+01:00:00',[1,0,0],
'CET',0,[2066,3,28,0,59,59],[2066,3,28,1,59,59],
'2065102501:00:00','2065102502:00:00','2066032800:59:59','2066032801:59:59' ],
],
);
%LastRule = (
'zone' => {
'dstoff' => '+02:00:00',
'stdoff' => '+01:00:00',
},
'rules' => {
'03' => {
'flag' => 'last',
'dow' => '7',
'num' => '0',
'type' => 'u',
'time' => '01:00:00',
'isdst' => '1',
'abb' => 'CEST',
},
'10' => {
'flag' => 'last',
'dow' => '7',
'num' => '0',
'type' => 'u',
'time' => '01:00:00',
'isdst' => '0',
'abb' => 'CET',
},
},
);
1;
| jkb78/extrajnm | local/lib/perl5/Date/Manip/TZ/euberl00.pm | Perl | mit | 47,658 |
#!/usr/bin/perl
use POSIX;
use strict;
use lib "/httpd/modules";
use CFG;
use ZOOVY;
my ($CFG) = CFG->new();
my (@USERS) = @{CFG->new()->users()};
#
# perl -e 'foreach $letter ("a".."z","0".."9") { opendir $D1, "/data/users/$letter"; while (my $user = readdir($D1)) { next if (substr($user,0,1) eq "."); next if (! -d "/data/users/$letter/$user"); print "$letter/$user"; system("/bin/du -sk /data/users/$letter/$user > /data/users/$letter/$user/diskspace.txt"); } closedir $D1; }'
#
foreach my $USERNAME (@USERS) {
my ($USERPATH) = &ZOOVY::resolve_userpath($USERNAME);
next if (! -d "$USERPATH");
next if (! -d "$USERPATH/.zfs");
print "$USERPATH/diskspace.txt\n";
unlink("$USERPATH/diskspace.txt");
# look for log files
&cleanup_logs($USERNAME,"$USERPATH");
print "DISK: $USERPATH\n";
my $DU = undef;
if ((not defined $DU) && (-f '/usr/bin/du')) { $DU = '/usr/bin/du'; }
if ((not defined $DU) && (-f '/bin/du')) { $DU = '/bin/du'; }
system("$DU -sk $USERPATH > $USERPATH/diskspace.txt");
}
sub cleanup_logs {
my ($user,$dir) = @_;
my $now = POSIX::strftime("%Y%m",localtime(time()));
my $nowabs = &abs_date($now);
opendir my $Duser, "$dir";
while (my $file = readdir($Duser) ) {
next if (substr($file,0,1) eq '.');
if ($file =~ /(.*?)\-$now\.log$/) {
## current log file - leave it alone
}
elsif ($file eq 'access.log') {
## rename access.log to access-201000.log
print "RENAMING $dir/$file\n";
system("/bin/mv $dir/access.log $dir/access-201000.log");
}
elsif ($file =~ /^(.*?)\-[\d]{6,6}\.log$/) {
## compress old log files
print "COMPRESSING LOG: $dir/$file [$now]\n";
system("/bin/gzip -9 $dir/$file");
}
elsif ($file =~ /^access\-/) {
## never archive access logs! support needs these.
}
## LONG TERM: GLACIER?
#elsif ($file =~ /^(.*?)\-([\d]{6,6})\.log\.gz$/) {
# if ((abs_date($1)+3) < $nowabs) {
# ## move to log directory with username
# print "ARCHIVING $dir/$file to /data/logs/$user~$file\n";
# system("/bin/mv $dir/$file /data/logs/$user~$file");
# }
# else {
# ## preserve less than 3 months
# }
# }
elsif ($file =~ /^(.*?)\.cdb$/) {
print "REMOVING: $dir/$file\n";
unlink("$dir/$file");
}
elsif ($file =~ /^shiprules\.bin$/) {
print "REMOVING: $dir/$file\n";
unlink("$dir/$file");
}
else {
print "SYSTEM: $dir/$file\n";
}
}
closedir $Duser;
}
##
## returns the number of months that have elapsed since 2000
##
sub abs_date {
my ($yyyymm) = @_;
my $x = int(substr($yyyymm,0,4));
$x = $x - 2000;
$x *= 12;
$x += int(substr($yyyymm,4,2));
return($x);
} | CommerceRack/backend | scripts/cron/diskspace.pl | Perl | mit | 2,620 |
##############################################################################
# $URL: http://perlcritic.tigris.org/svn/perlcritic/trunk/distributions/Perl-Critic/lib/Perl/Critic/Policy/ValuesAndExpressions/ProhibitMixedBooleanOperators.pm $
# $Date: 2012-07-02 22:16:39 -0700 (Mon, 02 Jul 2012) $
# $Author: thaljef $
# $Revision: 4126 $
##############################################################################
package Perl::Critic::Policy::ValuesAndExpressions::ProhibitMixedBooleanOperators;
use 5.006001;
use strict;
use warnings;
use Readonly;
use Perl::Critic::Utils qw{ :severities :data_conversion };
use base 'Perl::Critic::Policy';
#-----------------------------------------------------------------------------
our $VERSION = '1.118';
#-----------------------------------------------------------------------------
Readonly::Hash my %LOW_BOOLEANS => hashify( qw( not or and ) );
Readonly::Hash my %HIGH_BOOLEANS => hashify( qw( ! || && ) );
Readonly::Hash my %EXEMPT_TYPES => hashify(
qw(
PPI::Statement::Block
PPI::Statement::Scheduled
PPI::Statement::Package
PPI::Statement::Include
PPI::Statement::Sub
PPI::Statement::Variable
PPI::Statement::Compound
PPI::Statement::Data
PPI::Statement::End
)
);
#-----------------------------------------------------------------------------
Readonly::Scalar my $DESC => q{Mixed high and low-precedence booleans};
Readonly::Scalar my $EXPL => [ 70 ];
#-----------------------------------------------------------------------------
sub supported_parameters { return () }
sub default_severity { return $SEVERITY_HIGH }
sub default_themes { return qw( core bugs pbp ) }
sub applies_to { return 'PPI::Statement' }
#-----------------------------------------------------------------------------
sub violates {
my ( $self, $elem, undef ) = @_;
# PPI::Statement is the ancestor of several types of PPI elements.
# But for this policy, we only want the ones that generally
# represent a single statement or expression. There might be
# better ways to do this, such as scanning for a semi-colon or
# some other marker.
return if exists $EXEMPT_TYPES{ ref $elem };
if ( $elem->find_first(\&_low_boolean)
&& $elem->find_first(\&_high_boolean) ) {
return $self->violation( $DESC, $EXPL, $elem );
}
return; #ok!
}
#-----------------------------------------------------------------------------
sub _low_boolean {
my (undef, $elem) = @_;
return if $elem->isa('PPI::Statement');
$elem->isa('PPI::Token::Operator') || return 0;
return exists $LOW_BOOLEANS{$elem};
}
#-----------------------------------------------------------------------------
sub _high_boolean {
my (undef, $elem) = @_;
return if $elem->isa('PPI::Statement');
$elem->isa('PPI::Token::Operator') || return 0;
return exists $HIGH_BOOLEANS{$elem};
}
1;
__END__
#-----------------------------------------------------------------------------
=pod
=head1 NAME
Perl::Critic::Policy::ValuesAndExpressions::ProhibitMixedBooleanOperators - Write C< !$foo && $bar || $baz > instead of C< not $foo && $bar or $baz>.
=head1 AFFILIATION
This Policy is part of the core L<Perl::Critic|Perl::Critic>
distribution.
=head1 DESCRIPTION
Conway advises against combining the low-precedence booleans ( C<and
or not> ) with the high-precedence boolean operators ( C<&& || !> ) in
the same expression. Unless you fully understand the differences
between the high and low-precedence operators, it is easy to
misinterpret expressions that use both. And even if you do understand
them, it is not always clear if the author actually intended it.
next if not $foo || $bar; #not ok
next if !$foo || $bar; #ok
next if !( $foo || $bar ); #ok
=head1 CONFIGURATION
This Policy is not configurable except for the standard options.
=head1 AUTHOR
Jeffrey Ryan Thalhammer <jeff@imaginative-software.com>
=head1 COPYRIGHT
Copyright (c) 2005-2011 Imaginative Software Systems. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the same terms as Perl itself. The full text of this license
can be found in the LICENSE file included with this module.
=cut
# Local Variables:
# mode: cperl
# cperl-indent-level: 4
# fill-column: 78
# indent-tabs-mode: nil
# c-indentation-style: bsd
# End:
# ex: set ts=8 sts=4 sw=4 tw=78 ft=perl expandtab shiftround :
| amidoimidazol/bio_info | Beginning Perl for Bioinformatics/lib/Perl/Critic/Policy/ValuesAndExpressions/ProhibitMixedBooleanOperators.pm | Perl | mit | 4,571 |
=head1 NAME
Mail::Message::Head::ListGroup - mailinglist related header fields
=head1 INHERITANCE
Mail::Message::Head::ListGroup
is a Mail::Message::Head::FieldGroup
is a Mail::Reporter
=head1 SYNOPSIS
my $lg = Mail::Message::Head::ListGroup->new(head => $head, ...);
$head->addListGroup($lg);
my $lg = $head->addListGroup(...);
$lg->delete;
=head1 DESCRIPTION
A I<list group> is a set of header fields which are added by mailing-list
managing software. This class knowns various details about that software.
The knowledge and test messages which are used to initially implement
this module is taken from Mail::ListDetector, written by
Michael Stevens <mailto:michael@etla.org>. The logic is redesigned to
add flexibility and use the powerful MailBox features.
=head1 METHODS
=head2 Constructors
$obj-E<gt>B<address>
=over 4
Returns a L<Mail::Message::Field::Address|Mail::Message::Field::Address> object (or C<undef>) which
defines the posting address of the mailing list.
=back
$obj-E<gt>B<clone>
=over 4
See L<Mail::Message::Head::FieldGroup/"Constructors">
=back
$obj-E<gt>B<from>(HEAD|MESSAGE)
=over 4
Create a C<Mail::Message::Head::ListGroup> based in the specified MESSAGE
or message HEAD.
=back
$obj-E<gt>B<implementedTypes>
Mail::Message::Head::ListGroup-E<gt>B<implementedTypes>
=over 4
See L<Mail::Message::Head::FieldGroup/"Constructors">
=back
$obj-E<gt>B<listname>
=over 4
Returns the name of the mailing list, which is usually a part of the
e-mail address which is used to post the messages to.
=back
Mail::Message::Head::ListGroup-E<gt>B<new>(FIELDS, OPTIONS)
=over 4
Construct an object which maintains one set of mailing list headers
Option --Defined in --Default
address undef
head Mail::Message::Head::FieldGroup undef
listname <derived from address>
log Mail::Reporter 'WARNINGS'
rfc undef
software Mail::Message::Head::FieldGroup undef
trace Mail::Reporter 'WARNINGS'
type Mail::Message::Head::FieldGroup undef
version Mail::Message::Head::FieldGroup undef
. address => STRING|OBJECT
=over 4
Address of the mailing list, which may be specified as STRING
or e-mail containing object (a L<Mail::Address|Mail::Address> or L<Mail::Identity|Mail::Identity>.
In any case, the data is converted into a L<Mail::Identity|Mail::Identity>.
=back
. head => HEAD
. listname => STRING
=over 4
A short textual representation of the mailing-list.
=back
. log => LEVEL
. rfc => 'rfc2919'|'rfc2369'
=over 4
Defines the mailing list software follows an rfc.
=back
. software => STRING
. trace => LEVEL
. type => STRING
. version => STRING
=back
$obj-E<gt>B<rfc>
=over 4
When the mailing list software follows the guidelines of one of the dedicated
RFCs, then this will be returned otherwise C<undef>. The return values can
be C<rfc2919>, C<rfc2369>, or C<undef>.
=back
=head2 The header
$obj-E<gt>B<add>((FIELD, VALUE) | OBJECT)
=over 4
See L<Mail::Message::Head::FieldGroup/"The header">
=back
$obj-E<gt>B<addFields>([FIELDNAMES])
=over 4
See L<Mail::Message::Head::FieldGroup/"The header">
=back
$obj-E<gt>B<attach>(HEAD)
=over 4
See L<Mail::Message::Head::FieldGroup/"The header">
=back
$obj-E<gt>B<delete>
=over 4
See L<Mail::Message::Head::FieldGroup/"The header">
=back
$obj-E<gt>B<fieldNames>
=over 4
See L<Mail::Message::Head::FieldGroup/"The header">
=back
$obj-E<gt>B<fields>
=over 4
See L<Mail::Message::Head::FieldGroup/"The header">
=back
$obj-E<gt>B<head>
=over 4
See L<Mail::Message::Head::FieldGroup/"The header">
=back
=head2 Access to the header
$obj-E<gt>B<isListGroupFieldName>(NAME)
Mail::Message::Head::ListGroup-E<gt>B<isListGroupFieldName>(NAME)
=over 4
=back
$obj-E<gt>B<software>
=over 4
See L<Mail::Message::Head::FieldGroup/"Access to the header">
=back
$obj-E<gt>B<type>
=over 4
See L<Mail::Message::Head::FieldGroup/"Access to the header">
=back
$obj-E<gt>B<version>
=over 4
See L<Mail::Message::Head::FieldGroup/"Access to the header">
=back
=head2 Internals
$obj-E<gt>B<collectFields>
=over 4
Scan the header for fields which are usually contained in mailing list
software. This method is automatically called when a list group is
constructed L<from()|Mail::Message::Head::ListGroup/"Constructors"> an existing header or message.
Returned are the names of the list header fields found, in scalar
context the amount. An empty list/zero indicates that this is not
a mailing list message.
Please warn the author of MailBox if you see that to few
or too many fields are included.
=back
$obj-E<gt>B<detected>(TYPE, SOFTWARE, VERSION)
=over 4
See L<Mail::Message::Head::FieldGroup/"Internals">
=back
=head2 Error handling
$obj-E<gt>B<AUTOLOAD>
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<addReport>(OBJECT)
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<defaultTrace>([LEVEL]|[LOGLEVEL, TRACELEVEL]|[LEVEL, CALLBACK])
Mail::Message::Head::ListGroup-E<gt>B<defaultTrace>([LEVEL]|[LOGLEVEL, TRACELEVEL]|[LEVEL, CALLBACK])
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<details>
=over 4
Produce information about the detected/create list group, which may be
helpful during debugging, by default to the selected file handle.
=back
$obj-E<gt>B<errors>
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<log>([LEVEL [,STRINGS]])
Mail::Message::Head::ListGroup-E<gt>B<log>([LEVEL [,STRINGS]])
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<logPriority>(LEVEL)
Mail::Message::Head::ListGroup-E<gt>B<logPriority>(LEVEL)
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<logSettings>
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<notImplemented>
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<print>([FILEHANDLE])
=over 4
See L<Mail::Message::Head::FieldGroup/"Error handling">
=back
$obj-E<gt>B<report>([LEVEL])
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<reportAll>([LEVEL])
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<trace>([LEVEL])
=over 4
See L<Mail::Reporter/"Error handling">
=back
$obj-E<gt>B<warnings>
=over 4
See L<Mail::Reporter/"Error handling">
=back
=head2 Cleanup
$obj-E<gt>B<DESTROY>
=over 4
See L<Mail::Reporter/"Cleanup">
=back
$obj-E<gt>B<inGlobalDestruction>
=over 4
See L<Mail::Reporter/"Cleanup">
=back
=head1 DETAILS
=head2 Mailing list fields
=head3 Detected lists
The L<Mail::Message::Head::ListGroup|Mail::Message::Head::ListGroup> class can detect many different
mailing lists, some of which are very popular and some of which are
rare.
Numerous fields in a header are addded when the message is passed
through a mailing list server. Each list software has defined its own
fields, sometimes woth conflicting definitions. There are also two
RFCs about mailing list: C<rfc2919> and C<rfc2369>.
The following lists are currently detected. Between parenthesis is
the string returned by L<type()|Mail::Message::Head::FieldGroup/"Access to the header"> when that differs from the software
name.
=over 4
=item * CommuniGate
Legacy commercial MacOS implementation by Stalker Software Inc.
L<http://www.stalker.com/mac/default.html>
=item * CommuniGate Pro (CommuniGatePro)
Commercial rfc2919 compliant implementation by Stalker Software Inc.
L<http://www.stalker.com>
=item * Ecartis
Commercial mailing list manager, formerly known as Listar. Produced
by NodeRunner Computing. See L<http://www.ecartis.com>.
=item * Ezmlm
Open Source mailing list manager, available from L<http://www.ezmlm.org>.
=item * FML
Open Source mailing list manager, see L<http://www.fml.org>.
=item * Listar
Old name for Ecartis.
=item * Listbox
Mailing lists defined at L<http://listbox.com>.
=item * Mailman
GNU's mailing list manager, available from L<http://www.list.org>.
=item * Majordomo
Free (licenced) mailing list manager by Great Circle Associates,
available from L<http://www.greatcircle.com/majordomo/>
=item * Smartlist
Related to procmail, as expressed by their shared main page at
L<http://www.procmail.org/>.
=item * Yahoo! Groups (YahooGroups)
Mailing lists defined at L<http://groups.yahoo.com>.
=item * Listserv
Commercial mailing list manager, produced by L-Soft. See
L<http://www.lsoft.com/>.
=back
=head1 DIAGNOSTICS
Error: Cannot convert "$string" into an address object
=over 4
The L<new(address)|Mail::Message::Head::ListGroup/"Constructors"> is coerced into a L<Mail::Message::Field::Address|Mail::Message::Field::Address>,
which fails. Have a look at L<Mail::Message::Field::Address::coerce()|Mail::Message::Field::Address/"Constructors">
to see what valid arguments are.
=back
Error: Package $package does not implement $method.
=over 4
Fatal error: the specific package (or one of its superclasses) does not
implement this method where it should. This message means that some other
related classes do implement this method however the class at hand does
not. Probably you should investigate this and probably inform the author
of the package.
=back
=head1 SEE ALSO
This module is part of Mail-Box distribution version 2.082,
built on April 28, 2008. Website: F<http://perl.overmeer.net/mailbox/>
=head1 LICENSE
Copyrights 2001-2008 by Mark Overmeer. For other contributors see ChangeLog.
This program is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
See F<http://www.perl.com/perl/misc/Artistic.html>
| carlgao/lenga | images/lenny64-peon/usr/share/perl5/Mail/Message/Head/ListGroup.pod | Perl | mit | 9,638 |
package Eldhelm::AI::BehaviourTree::End;
use strict;
use parent 'Eldhelm::AI::BehaviourTree::Node';
sub new {
my ($class, %args) = @_;
my $self = $class->SUPER::new(%args);
bless $self, $class;
return $self;
}
sub update {
my ($self) = @_;
my $status = $self->{status} ||= 'failed';
$self->logFinishUpdate;
return $status;
}
1; | wastedabuser/eldhelm-platform | lib/Eldhelm/AI/BehaviourTree/End.pm | Perl | mit | 363 |
:- module(quote, [quote/1]).
:- use_module(sandbox).
:- dynamic(quote/1).
quote(Quote) :-
open('quotes', read, Stream),
read_lines(Stream, Quotes),
asserta(quote(Q) :- random_member(Q, Quotes)),
close(Stream),
!, quote(Quote).
| ljos/epictetus | src/epictetus/quote.pl | Perl | mit | 248 |
if($^O eq "linux") {
$CFLAGS .= " -I/usr/include/mysql";
foreach(@lib_dirs) {
$CFLAGS .= " -I$_/mysql";
}
}
check_lib "mysqlclient", <<C;
#include <mysql.h>
int main() {
MYSQL mysql;
mysql_init(&mysql);
}
C
needs_static_init;
| slash-lang/slash | ext/mysql/configure.pl | Perl | mit | 281 |
package HMF::Pipeline::Functions::Config;
use FindBin::libs;
use discipline;
use Carp;
use File::Basename;
use File::Copy::Recursive qw(rcopy);
use File::Path qw(make_path);
use File::Spec::Functions;
use FindBin;
use Getopt::Long;
use IO::Pipe;
use List::MoreUtils qw(uniq);
use POSIX qw(strftime);
use Time::HiRes qw(gettimeofday);
use HMF::Pipeline::Functions::Validate qw(parseFastqName verifyConfig verifyBam);
use HMF::Pipeline::Functions::Metadata;
use parent qw(Exporter);
our @EXPORT_OK = qw(
parse
validate
createDirs
addSubDir
setupLogging
addSamples
sampleBamAndJobs
refSampleBamAndJobs
sampleBamsAndJobs
sampleControlBamsAndJobs
allRunningJobs
recordGitVersion
copyConfigAndScripts
);
sub parse {
my ($configurationFile, $opt) = @_;
open my $fh, "<", $configurationFile or die "Couldn't open $configurationFile: $!";
parseFile($fh, $opt);
close $fh;
return;
}
sub parseFile {
my ($fh, $opt) = @_;
while (<$fh>) {
chomp;
next if m/^#/ or not $_;
my ($key, $val) = split "\t", $_, 2;
die "Key '$key' is missing a value - is it badly formatted?" if not defined $val;
if ($key eq 'INIFILE') {
$val = catfile(pipelinePath(), $val) unless file_name_is_absolute($val);
push @{$opt->{$key}}, $val;
parse($val, $opt);
} elsif ($key eq 'FASTQ' or $key eq 'BAM') {
$opt->{$key}->{$val} = 1;
} else {
$opt->{$key} = $val;
}
}
return;
}
sub validate {
my ($opt) = @_;
my @errors = @{verifyConfig($opt)};
if (@errors) {
foreach my $error (@errors) {
warn "ERROR: $error";
}
die "One or more options not found or invalid in config files";
}
$opt->{RUN_NAME} = basename($opt->{OUTPUT_DIR});
return;
}
sub makePaths {
my (@dirs) = @_;
make_path(@dirs, {error => \my $errors});
my $messages = join ", ", map { join ": ", each %{$_} } @{$errors};
die "Couldn't create directories: $messages" if $messages;
return;
}
sub createDirs {
my ($output_dir, %extra_dirs) = @_;
foreach (@extra_dirs{keys %extra_dirs}) { $_ = catfile($output_dir, $_) }
my $dirs = {
out => $output_dir,
tmp => catfile($output_dir, "tmp"),
log => catfile($output_dir, "logs"),
job => catfile($output_dir, "jobs"),
%extra_dirs,
};
makePaths(values %{$dirs});
return $dirs;
}
sub addSubDir {
my ($dirs, $dir) = @_;
my $output_dir = catfile($dirs->{out}, $dir);
makePaths($output_dir);
return $output_dir;
}
sub setupLogging {
my ($output_dir) = @_;
my ($seconds, $microseconds) = gettimeofday;
my $datetime = strftime('%Y%m%d_%H%M%S_', localtime $seconds) . sprintf('%.6d', $microseconds);
my $output_file = catfile($output_dir, "logs", "submitlog_${datetime}.out");
my $error_file = catfile($output_dir, "logs", "submitlog_${datetime}.err");
my $output_fh = IO::Pipe->new()->writer("tee $output_file") or die "Couldn't tee to $output_file: $!";
my $error_fh = IO::Pipe->new()->writer("tee $error_file >&2") or die "Couldn't tee to $error_file: $!";
open STDOUT, ">&", $output_fh or die "STDOUT redirection failed: $!";
open STDERR, ">&", $error_fh or die "STDERR redirection failed: $!";
## no critic (Modules::RequireExplicitInclusion)
STDOUT->autoflush(1);
STDERR->autoflush(1);
$output_fh->autoflush(1);
## use critic
return;
}
sub addSamples {
my ($opt) = @_;
$opt->{SAMPLES} = {};
if ($opt->{FASTQ}) {
foreach my $input_path (sort keys %{$opt->{FASTQ}}) {
my $sample_name = parseFastqName($input_path)->{sampleName};
$opt->{SAMPLES}->{$sample_name} = [] if not exists $opt->{SAMPLES}->{$sample_name};
push @{$opt->{SAMPLES}->{$sample_name}}, $input_path;
@{$opt->{RUNNING_JOBS}->{$sample_name}} = ();
}
}
if ($opt->{BAM}) {
foreach my $input_path (sort keys %{$opt->{BAM}}) {
my $sample_name = verifyBam($input_path, $opt);
not exists $opt->{SAMPLES}->{$sample_name} or die "sample '$sample_name' from $input_path already used by $opt->{SAMPLES}->{$sample_name}[0]";
$opt->{SAMPLES}->{$sample_name} = [$input_path];
@{$opt->{RUNNING_JOBS}->{$sample_name}} = ();
}
}
return;
}
sub sampleBamAndJobs {
my ($sample, $opt) = @_;
my $bam = catfile($opt->{OUTPUT_DIR}, $sample, "mapping", $opt->{BAM_FILES}->{$sample});
return ($bam, $opt->{RUNNING_JOBS}->{$sample});
}
sub sampleBamsAndJobs {
my ($opt) = @_;
my $all_bams = {};
my @all_jobs;
foreach my $sample (sort keys %{$opt->{SAMPLES}}) {
my ($bam, $jobs) = sampleBamAndJobs($sample, $opt);
$all_bams->{$sample} = $bam;
push @all_jobs, @{$jobs};
}
return ($all_bams, [ uniq @all_jobs ]);
}
sub refSampleBamAndJobs {
my ($opt) = @_;
my ($ref_sample) = HMF::Pipeline::Functions::Metadata::refSampleName($opt);
$opt->{BAM_FILES}->{$ref_sample} or die "metadata ref_sample $ref_sample not in BAM file list: " . join ", ", keys %{$opt->{BAM_FILES}};
my ($ref_sample_bam, $ref_sample_jobs) = sampleBamAndJobs($ref_sample, $opt);
return ($ref_sample, $ref_sample_bam, [ uniq @{$ref_sample_jobs} ]);
}
sub sampleControlBamsAndJobs {
my ($opt) = @_;
my ($ref_sample, $tumor_sample, $joint_name) = HMF::Pipeline::Functions::Metadata::sampleControlNames($opt);
$opt->{BAM_FILES}->{$ref_sample} or die "metadata ref_sample $ref_sample not in BAM file list: " . join ", ", keys %{$opt->{BAM_FILES}};
$opt->{BAM_FILES}->{$tumor_sample} or die "metadata tumor_sample $tumor_sample not in BAM file list: " . join ", ", keys %{$opt->{BAM_FILES}};
my ($ref_sample_bam, $ref_sample_jobs) = sampleBamAndJobs($ref_sample, $opt);
my ($tumor_sample_bam, $tumor_sample_jobs) = sampleBamAndJobs($tumor_sample, $opt);
return ($ref_sample, $tumor_sample, $ref_sample_bam, $tumor_sample_bam, $joint_name, [ uniq @{$ref_sample_jobs}, @{$tumor_sample_jobs} ]);
}
sub allRunningJobs {
my ($opt) = @_;
my @running_job_chains = grep { defined } values %{$opt->{RUNNING_JOBS}};
my @running_jobs = map { @$_ } @running_job_chains;
my @unique_jobs = uniq sort grep { defined } @running_jobs;
return \@unique_jobs;
}
sub recordGitVersion {
my ($opt) = @_;
my $git_dir = catfile(pipelinePath(), ".git");
$opt->{VERSION} = qx(git --git-dir $git_dir describe --tags);
chomp $opt->{VERSION};
return;
}
sub copyConfigAndScripts {
my ($opt) = @_;
my $pipeline_path = pipelinePath();
my $slice_dir = catfile($pipeline_path, "settings", "slicing");
my $strelka_dir = catfile($pipeline_path, "settings", "strelka");
my $gridss_dir = catfile($pipeline_path, "settings", "gridss");
my $scripts_dir = catfile($pipeline_path, "scripts");
my $qscript_dir = catfile($pipeline_path, "QScripts");
rcopy $slice_dir, catfile($opt->{OUTPUT_DIR}, "settings", "slicing") or die "Failed to copy slice settings $slice_dir: $!";
rcopy $strelka_dir, catfile($opt->{OUTPUT_DIR}, "settings", "strelka") or die "Failed to copy Strelka settings $strelka_dir: $!";
rcopy $gridss_dir, catfile($opt->{OUTPUT_DIR}, "settings", "gridss") or die "Failed to copy Gridss settings $gridss_dir: $!";
rcopy $scripts_dir, catfile($opt->{OUTPUT_DIR}, "scripts") or die "Failed to copy scripts directory $scripts_dir: $!";
rcopy $qscript_dir, catfile($opt->{OUTPUT_DIR}, "QScripts") or die "Failed to copy QScripts $qscript_dir: $!";
foreach my $ini_file (@{$opt->{INIFILE}}) {
rcopy $ini_file, catfile($opt->{OUTPUT_DIR}, "logs") or die "Failed to copy INI file $ini_file: $!";
rcopy $ini_file, catfile($opt->{OUTPUT_DIR}, "settings") or die "Failed to copy INI file $ini_file: $!";
}
my $final_ini = catfile($opt->{OUTPUT_DIR}, "logs", "final.ini");
open my $fh, ">", $final_ini or die "Couldn't open $final_ini: $!";
say $fh join "\n", map { "$_\t$opt->{$_}" } grep { defined $opt->{$_} and not ref $opt->{$_} } sort keys %{$opt};
close $fh;
return;
}
# SABR: do NOT depend on this from jobs
sub pipelinePath {
return catfile($FindBin::Bin, updir());
}
1;
| hartwigmedical/pipeline | lib/HMF/Pipeline/Functions/Config.pm | Perl | mit | 8,374 |
#!/usr/bin/perl -w
use strict;
use Getopt::Long;
use Pod::Usage;
use Bio::SearchIO;
#use Test::More;
my ($help, $man)='0';
my $output = 'first';
my $outfile = 'AlignmentOutput.txt';
GetOptions ("outformat:s" => \$output, "outfile:s" => \$outfile, "help" => \$help, "man" => \$man);
pod2usage(-exitval => 0, -verbose => 1) if $help;
pod2usage(-exitval => 0, -verbose => 2) if $man;
if (!(defined $ARGV[0])) {
print "\n***Please specify an input file***\n";
pod2usage(-exitval => 0, -verbose => 1);
}
open(OUTFILE, ">>$outfile");
my$y=0;
my$p=0;
my($count, $previous_length, $previous_added)=0;
my$current='';
my $in = new Bio::SearchIO(-format => 'blast',
-file => <$ARGV[0]>);
## Write Header ##
print OUTFILE "Query Name\tHit Name\tHit Length\tAlignment Length\t% Hit Length\tNumber Conserved\t% Hit Conserved\t% Query Length\n";
## Read in each query result, initialise 'count' score ##
OUTERLOOP: while(my $result = $in->next_result) {
$count=0;
##If query has no hits, skip it ##
if ($result->num_hits == 0) {
next;
}
my@matrix=();
## Read in each hit, add 1 to count (count marks number of hit) ##
while(my $hit = $result->next_hit) {
$count++;
## When 'first' output mode is selected, skip anything that isn't the first hit ##
if (($output eq 'first') && ($count > 1)) {
next OUTERLOOP;
}
## If hit has only 1 HSP, calculate percentage coverage of query and hit sequences, and conservation percentage of hit. Print results. ##
if ($hit->num_hsps == 1){
while(my $hsp = $hit->next_hsp) {
my$hit_length_percent = sprintf '%.5f', (($hsp->length('hit') / $hit->length) * 100);
my$hit_cons_percent = sprintf '%.5f',(($hsp->num_conserved / $hit->length) * 100);
my$query_length_percent = sprintf '%.5f',(($hsp->length('query') / $result->query_length) * 100);
##if 'first' or 'all' output selected, print the results straight away. ##
if (($output eq 'first') || ($output eq 'all')){
#cmp_ok($hit_length_percent, '<=', 100);
#cmp_ok($hit_cons_percent, '<=', 100);
#cmp_ok($query_length_percent, '<=', 100);
#cmp_ok($hit_cons_percent, '<=', $hit_length_percent);
print OUTFILE $result->query_name, "\t",
$hit->name, "\t",
$hit->length, "\t",
$hsp->length('hit'), "\t",
$hit_length_percent, "\t",
$hsp->num_conserved, "\t",
$hit_cons_percent, "\t",
$query_length_percent, "\n";
}
elsif ($output eq 'best'){
##for 'best' output, put all results for the query into 2-d array. $count distuingishes each hit from next##
$matrix[$count][0] = $hit->name;
$matrix[$count][1] = $hit->length;
$matrix[$count][2] = $hsp->length('hit');
$matrix[$count][3] = $hit_length_percent;
$matrix[$count][4] = $hsp->num_conserved;
$matrix[$count][5] = $hit_cons_percent;
$matrix[$count][6] = $query_length_percent;
##Put lowest value of length percent (either of query sequence, or more rare, of protein sequence) in another column which will be sorted later##
if ($hit_length_percent < $query_length_percent) {
$matrix[$count][7] = $hit_length_percent;
} else {
$matrix[$count][7] = $query_length_percent;
}
}
}
}
##If hit has more than 1 HSP, invoke a loop to test whether each position in query / hit sequence is covered by at least one HSP. ##
##This takes a bit longer but allows for overlaps between the HSPs ##
elsif ($hit->num_hsps > 1) {
my%hash;
my@conserved_points=(); ##array for conserved positions ##
my$conservation=0;
my@hsp_query_points=(); ##array for query positions covered in HSP ##
my@hsp_hit_points=(); ## array for hit positions covered in HSP ##
my$query_length=0;
my$hit_length=0;
##loop through all HSPs of hit, noting conserved positions, and start and end positions of hit and query sequences##
while(my $hsp = $hit->next_hsp) {
push @conserved_points, $hsp->seq_inds('hit','conserved');
push @hsp_query_points, $hsp->range('query');
push @hsp_hit_points, $hsp-> range('hit');
}
##Enter all conserved_points into hash, then we can test later which positions are 'defined' and calculate conservation score ##
foreach my$value (@conserved_points){
if (exists($hash{$value})) { ##if position is already in hash don't bother defining it again (will occur if HSPs overlap) ##
next;
} else {
$hash{$value}='yes';
}
}
## loop through every position in hit, see if it is defined in conservation hash. If so, add 1 to the conservation score ##
for (my$i=1; $i<= $hit->length; $i++){
if (exists ($hash{$i})) {
$conservation++;
}
}
## Loop through each position in query, and see if it is covered between a 'start' and 'end' position in hsp_query_points array. If so, add 1 to query length ##
LOOP1: for ($y=1; $y<= ($result->query_length); $y++){
for ($p=0; $p<= (scalar(@hsp_query_points)-1); $p=$p+2){
if (($y >= $hsp_query_points[($p)]) && ($y <= $hsp_query_points[($p+1)])){
$query_length++;
next LOOP1;
} else {
next;
}
}
}
## Same as LOOP1, but with hit positions ##
LOOP2: for ($y=1; $y<= ($hit->length); $y++){
for ($p=0; $p<= (scalar(@hsp_hit_points)-1); $p=$p+2){
if (($y >= $hsp_hit_points[($p)]) && ($y <= $hsp_hit_points[($p+1)])){
$hit_length++;
next LOOP2;
} else {
next;
}
}
}
## Calculated percentages from scores ##
my$hit_cons_percent = sprintf '%.5f',(($conservation / $hit->length) * 100);
my$query_length_percent = sprintf '%.5f',(($query_length / $result->query_length) * 100);
my$hit_length_percent = sprintf '%.5f',(($hit_length / $hit->length) * 100);
##Print results for 'all' and 'first' output format##
if (($output eq 'first') || ($output eq 'all')){
#cmp_ok($hit_length_percent, '<=', 100);
#cmp_ok($hit_cons_percent, '<=', 100);
#cmp_ok($query_length_percent, '<=', 100);
#cmp_ok($hit_cons_percent, '<=', $hit_length_percent);
print OUTFILE $result->query_name, "\t",
$hit->name, "\t",
$hit->length, "\t",
$hit_length, "\t",
$hit_length_percent, "\t",
$conservation, "\t",
$hit_cons_percent, "\t",
$query_length_percent, "\n";
}
elsif ($output eq 'best'){
##for 'best' output, put all results for the query into 2-d matrix. $count distuingishes each hit from next##
$matrix[$count][0] = $hit->name;
$matrix[$count][1] = $hit->length;
$matrix[$count][2] = $hit_length;
$matrix[$count][3] = $hit_length_percent;
$matrix[$count][4] = $conservation;
$matrix[$count][5] = $hit_cons_percent;
$matrix[$count][6] = $query_length_percent;
##Put lowest value of length percent (either of query sequence, or more rare, of protein sequence) in another column which will be sorted later##
if ($hit_length_percent < $query_length_percent) {
$matrix[$count][7] = $hit_length_percent;
} else {
$matrix[$count][7] = $query_length_percent;
}
}
}
}
if ($output eq 'best') {
##Now calculate the 'best hit'##
$previous_length = 0;
$previous_added = 0;
##loop through last column of matrix to find the longest length##
for (my$i=1; $i<=$count; $i++){
if ($matrix[$i][7] > $previous_length) {
$previous_length = $matrix[$i][7];
} else {
next;
}
}
##find all lengths that are within 5% of the longest, then find the hit with largest conservation+length out of those results##
for (my$i=1; $i<=$count; $i++){
if ((($previous_length - $matrix[$i][7]) < 5) && (($matrix[$i][5] + $matrix[$i][7]) > $previous_added)) {
$previous_added = ($matrix[$i][5] + $matrix[$i][7]);
$current = $i;
} else {
next;
}
}
##print the best result##
#cmp_ok($matrix[$count][3], '<=', 100);
#cmp_ok($matrix[$count][5], '<=', 100);
#cmp_ok($matrix[$count][6], '<=', 100);
#cmp_ok($matrix[$count][5], '<=', $matrix[$count][3]);
print OUTFILE $result -> query_name, "\t",
$matrix[$current][0], "\t",
$matrix[$current][1], "\t",
$matrix[$current][2], "\t",
$matrix[$current][3], "\t",
$matrix[$current][4], "\t",
$matrix[$current][5], "\t",
$matrix[$current][6], "\n",
}
}
#done_testing();
###Documentation###
__END__
=head1 NAME
SummariseBlast.pl
=head1 SYNOPSIS
perl SummariseBlast.pl <inputfile> [options]
=head1 OPTIONS
--outformat=first (default and quickest) Prints only the first hit's result.
=all Prints all BLAST alignment results.
=best Prints only the 'best' hit's result.
--outfile=<outputfile> (By default, output is written to a file named "AlignmentOutput.txt", but an alternative can be specified).
-help prints usage and options
-man prints full manual
=head1 DESCRIPTION
This Perl script takes input from a pairwise BLAST search result. It prints some summary information from the alignments, so that the user can get a quick overview of the quality and length of the alignment.
It is designed to be used on a BLAST search against a database of genes, transcripts, or proteins, and not on whole genomes. For example, it could be used to accompany a homology search, in order to
see how well genes from a transcript assembly match with mRNA from a related organism, or in functional annotation to see the matches for a set of unannotated proteins.
It takes into account multiple HSPs per hit, if present, and whether they overlap in either the query or hit sequence.
The output is the following summary information:
Query Name
Hit Name (or Accession number)
Hit Length = total length of the hit sequence
Alignment Length = length of the alignment, including all HSPs.
% Hit Length = Percentage of the hit in the alignment. (Alignment length / Hit length) * 100.
Number conserved = number of conserved amino acids.
% Hit Conserved = percentage of amino acids in hit sequence that are conserved in the alignment. (Number conserved / Hit Length) * 100.
% Query Length = percentage of query sequence participating in alignment.
About the three --outformat options:
"all" prints the above information for every hit result. This means that if there are 500 hits per query sequence, then this script will write 500 rows of information for each query sequence.
If only one result per query is needed, consider the "best" and "first" options. "First" (the default and quickest option) only considers the very first hit from the BLAST output, which has the lowest e-value. It can be used to get a quick first overview of the Blast results.
Whereas, "best" does some sorting to find the actual best hit (which is not necessarily the first hit, although often will be). It first finds the hit with the highest % Query Length score (or % hit score if this is lower),
and then gets all other hits that are within 5% of the top scorer (e.g. if top score is 45%, all results with 40% and higher will be included). These "top" hits, are then sorted based on the sum of: % conservation + % Query Length.
The highest of these is chosen as the 'best' hit, and its alignment information is printed. The rationale behind this option, is that a low e-value can come from a very short but specific alignment
(i.e. hitting only one domain of a protein) and may end up as the first hit. Whereas a longer but less specific alignment (i.e. hitting the whole length of a protein) may have a higher e-value and therefore be lower down on the list,
however many would actually consider this the better alignment, because a larger amount of the hit sequence is covered.
The output is a tab-delimited file, which can be imported easily into other programs such as Excel and R. If a query sequence had no hits in the BLAST result, it is ignored and will be missing from the output file.
This script was written by Elizabeth Sollars, Queen Mary University of London. Any bugs or friendly suggestions, please email e.sollars@qmul.ac.uk
=cut
| lsollars/Perl-Scripts | SummariseBlast.pl | Perl | mit | 12,981 |
#!/usr/bin/env perl
use strict;
use warnings;
use utf8;
use feature 'say';
# Find modules installed w/ Carton
use FindBin;
use lib "$FindBin::Bin/../local/lib/perl5";
# Actual modules the script requires
use Data::Dumper;
use Text::CSV_XS;
use Mojo::Util qw/ encode slurp spurt /;
use Mojo::Loader;
use Mojo::Template;
use Mojo::JSON;
# Read the output path and filename from STDIN
my $input_file = shift @ARGV;
die 'No input file specified' unless $input_file;
my $filename = $input_file;
my @rows;
my $csv
= Text::CSV_XS->new( { binary => 1, eol => $/, allow_loose_quotes => 1 }
) # should set binary attribute.
or die "Cannot use CSV: " . Text::CSV->error_diag();
open my $fh, "<:encoding(utf8)", $filename or die "$filename: $!";
$csv->column_names( $csv->getline( $fh ) );
while ( my $row = $csv->getline_hr( $fh ) ) {
push @rows, $row;
}
$csv->eof or $csv->error_diag();
close $fh;
for my $ward ( @rows ) {
my $name = $ward->{'fed_district_name'};
$name =~ s/-//;
( my $slug = lc( $name ) ) =~ s/\W/-/g;
$slug .= '-' . $ward->{'school_ward_id'};
$ward->{'slug'} = $slug;
my $output_path = '_toronto_school_wards/' . $slug . '.md';
my $loader = Mojo::Loader->new;
my $template = $loader->data( __PACKAGE__, 'ward' );
my $mt = Mojo::Template->new;
my $output_str = $mt->render( $template, $ward);
$output_str = encode 'UTF-8', $output_str;
## Write the template output to a filehandle
spurt $output_str, $output_path;
say "Wrote $ward->{'school_ward_id'} to $output_path";
}
__DATA__
@@ ward
% my $ward = shift;
---
layout: school_ward
title: "<%= $ward->{'fed_district_name'} =%>"
wid: "<%= $ward->{'school_ward_id'} %>"
ward: "ward_<%= $ward->{'school_ward_id'} %>"
permalink: "/toronto-school-ward/<%= $ward->{'slug'} %>/"
---
| phillipadsmith/everycandidate.org | _scripts/generate_school_wards.pl | Perl | mit | 1,840 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V10::Common::KeywordAnnotations;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {concepts => $args->{concepts}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V10/Common/KeywordAnnotations.pm | Perl | apache-2.0 | 1,023 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=pod
=head1 NAME
Bio::EnsEMBL::Compara::Production::Projection::DisplayProjection
=head1 DESCRIPTION
Data transfer object for holding the results of a projection.
=head1 AUTHOR
Andy Yates (ayatesatebiacuk)
=head1 CONTACT
This modules is part of the EnsEMBL project (http://www.ensembl.org)
Questions can be posted to the dev mailing list: http://lists.ensembl.org/mailman/listinfo/dev
=cut
package Bio::EnsEMBL::Compara::Production::Projection::DisplayProjection;
use strict;
use warnings;
use Bio::EnsEMBL::Utils::Argument qw(rearrange);
use Bio::EnsEMBL::Utils::Scalar qw(assert_ref);
use base qw(Bio::EnsEMBL::Compara::Production::Projection::Projection);
=head2 new()
Arg[-total] : required; Total number of genes the target projected to
Arg[-current_index] : required; Current position in the total
Arg[..] : See parent object for more information about other params
Description : New method used for a new instance of the given object.
Required fields are indicated accordingly. Fields are
specified using the Arguments syntax (case insensitive).
=cut
sub new {
my ( $class, @args ) = @_;
my $self = $class->SUPER::new(@args);
my ( $total, $current_index ) = rearrange( [qw(total current_index )], @args );
confess(
'The attribute total must be specified during construction'
) if !defined $total;
$self->{total} = $total;
confess(
'The attribute type must be specified during construction or provide a builder subroutine'
) if !defined $current_index;
$self->{current_index} = $current_index;
return $self;
}
=head2 total()
The current total of genes the target was mapped to (applies to 1:m)
relationships
=cut
sub total {
my ($self) = @_;
return $self->{total};
}
=head2 current_index()
Current position of our iteration through the total amount of genes this
target is related to
=cut
sub current_index {
my ($self) = @_;
return $self->{current_index};
}
=head2 ignore()
Attribute which can only be set after construction since it is used
to optionally allow us to ignore an existing projection during a post
filtering stage.
=cut
sub ignore {
my ($self, $ignore) = @_;
$self->{ignore} = $ignore if defined $ignore;
return $self->{ignore};
}
1; | dbolser-ebi/ensembl-compara | modules/Bio/EnsEMBL/Compara/Production/Projection/DisplayProjection.pm | Perl | apache-2.0 | 2,960 |
#! /usr/bin/perl
use strict;
use warnings;
use Getopt::Long;
use Pod::Usage;
my $man = 0;
my $help = 0;
# my $port = 0;
# my $host = "";
GetOptions(
'help|?' => \$help,
'man' => \$man,
# 'p|port=i' => \$port,
# 'a:s' => \$host
) or pod2usage(-verbose => 2);
pod2usage(1) if $help;
pod2usage(-verbose => 2) if $man;
&main; exit;
sub main {
print "hello world\n";
}
__END__
=head1 NAME
foo.pl - Write shor description of this script here
=head1 SYNOPSIS
foo.pl [options] [file ...]
Options:
-help brief help message
-man full documentation
=head1 OPTIONS
=over 2
=item B<-help>
Print a brief help message and exits.
=item B<-man>
Prints the manual page and exits.
=back
=head1 DESCRIPTION
B<This program> will read the given input file(s) and do something
useful with the contents thereof.
=head1 AUTHOR
Kosei Moriyama <cou929@gmail.com>
=cut
| mishin/presentation | 4_trans/template.pl | Perl | apache-2.0 | 955 |
=head1 LICENSE
See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=head1 NAME
Bio::EnsEMBL::Compara::RunnableDB::Flatfiles::UpdateTableFromFile
=head1 DESCRIPTION
Update a table with data from flatfiles. Files should be given as an arrayref
in the `attrib_files` parameter. File headers match fields in the table's
column names (`primary_key` is required and should be present in all files).
=cut
package Bio::EnsEMBL::Compara::RunnableDB::Flatfiles::UpdateTableFromFile;
use warnings;
use strict;
use Data::Dumper;
use Bio::EnsEMBL::Compara::Utils::FlatFile qw(map_row_to_header);
use base ('Bio::EnsEMBL::Compara::RunnableDB::SqlCmd');
sub param_defaults {
my ($self) = @_;
return {
%{$self->SUPER::param_defaults},
'dry_run' => 0,
}
}
sub fetch_input {
my $self = shift;
my $table = $self->param_required('table');
my $primary_key = $self->param_required('primary_key');
# fetch all attributes from file list
my %attribs;
my @attrib_files = @{$self->param('attrib_files')};
foreach my $f ( @attrib_files ) {
open( my $fh, '<', $f ) or die "Cannot open $f for reading";
my $header = <$fh>;
my @header_cols = split( /\s+/, $header );
die "No $primary_key found in $f - please check file header line\n" unless grep {$_ eq $primary_key} @header_cols;
while ( my $line = <$fh> ) {
my $row = map_row_to_header($line, \@header_cols);
my $primary_id = $row->{$primary_key};
die "$primary_key is empty in file $f" unless $primary_id;
delete $row->{$primary_key};
foreach my $attrib_name ( keys %$row ) {
$attribs{$primary_id}->{$attrib_name} = $row->{$attrib_name};
}
}
close $fh;
}
# generate UPDATE SQL commands
my @sql_cmds;
foreach my $id ( keys %attribs ) {
my $sql = "UPDATE $table SET ";
$sql .= join(',', map { $_ . '=' . $attribs{$id}->{$_} } keys %{$attribs{$id}});
$sql .= " WHERE $primary_key = $id";
push @sql_cmds, $sql;
}
print Dumper \@sql_cmds if $self->debug;
$self->param('db_conn', $self->compara_dba);
$self->param('sql', \@sql_cmds);
if ( $self->param('dry_run') ){
$self->input_job->autoflow(0);
$self->complete_early("Dry-run mode : exiting...");
}
}
1;
| Ensembl/ensembl-compara | modules/Bio/EnsEMBL/Compara/RunnableDB/Flatfiles/UpdateTableFromFile.pm | Perl | apache-2.0 | 2,969 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::ibm::fs900::snmp::mode::components::fan;
use strict;
use warnings;
# In MIB 'IBM-FLASHSYSTEM.MIB'
my $mapping = {
fanObject => { oid => '.1.3.6.1.4.1.2.6.255.1.1.6.10.1.2' },
fanPWM => { oid => '.1.3.6.1.4.1.2.6.255.1.1.6.10.1.4' },
fanTemp => { oid => '.1.3.6.1.4.1.2.6.255.1.1.6.10.1.5' },
fan0 => { oid => '.1.3.6.1.4.1.2.6.255.1.1.6.10.1.6' },
fan1 => { oid => '.1.3.6.1.4.1.2.6.255.1.1.6.10.1.7' },
fan2 => { oid => '.1.3.6.1.4.1.2.6.255.1.1.6.10.1.8' },
fan3 => { oid => '.1.3.6.1.4.1.2.6.255.1.1.6.10.1.9' },
};
my $oid_fanTableEntry = '.1.3.6.1.4.1.2.6.255.1.1.6.10.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_fanTableEntry };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking fan modules");
$self->{components}->{fan} = {name => 'fans', total => 0, skip => 0};
return if ($self->check_filter(section => 'fan'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_fanTableEntry}})) {
next if ($oid !~ /^$mapping->{fanObject}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_fanTableEntry}, instance => $instance);
next if ($self->check_filter(section => 'fan', instance => $instance));
$self->{components}->{fan}->{total}++;
$self->{output}->output_add(long_msg => sprintf("Fan module '%s' [instance = %s, PWM = %s%%, temperature = %sC, fan0 speed = %s%%, fan1 speed = %s%%, fan2 speed = %s%%, fan3 speed = %s%%]",
$result->{fanObject}, $instance, $result->{fanPWM}, $result->{fanTemp} / 10, $result->{fan0}, $result->{fan1}, $result->{fan2}, $result->{fan3}));
if (defined($result->{fanPWM}) && $result->{fanPWM} =~ /[0-9]/) {
my ($exit, $warn, $crit) = $self->get_severity_numeric(section => 'fan.pwm', instance => $instance, value => $result->{fanPWM});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Fan module '%s' PWM is %s%", $instance, $result->{fanPWM}));
}
$self->{output}->perfdata_add(
label => 'fan_pwm', unit => '%',
nlabel => 'hardware.fan.pwm.percentage',
instances => $instance,
value => $result->{fanPWM},
warning => $warn,
critical => $crit,
min => 0, max => 100,
);
}
if (defined($result->{fanTemp}) && $result->{fanTemp} =~ /[0-9]/) {
my ($exit, $warn, $crit) = $self->get_severity_numeric(section => 'fan.temperature', instance => $instance, value => $result->{fanTemp} / 10);
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Fan module '%s' temperature is %s degree centigrade", $instance, $result->{fanTemp} / 10));
}
$self->{output}->perfdata_add(
label => 'fan_temp', unit => 'C',
nlabel => 'hardware.fan.temperature.celsius',
instances => $instance,
value => $result->{fanTemp} / 10,
warning => $warn,
critical => $crit,
min => 0,
);
}
foreach my $fan ('fan0', 'fan1', 'fan2', 'fan3') {
if (defined($result->{$fan}) && $result->{$fan} =~ /[0-9]/) {
my ($exit, $warn, $crit) = $self->get_severity_numeric(section => 'fan.speed', instance => $instance, value => $result->{$fan});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Fan module '%s' fan '%s' speed is %s%%", $instance, $fan, $result->{$fan}));
}
$self->{output}->perfdata_add(
label => 'fan_speed', unit => '%',
nlabel => 'hardware.fan.speed.percentage',
instances => [$instance, $fan],
value => $result->{$fan},
warning => $warn,
critical => $crit,
min => 0, max => 100,
);
}
}
}
}
1;
| centreon/centreon-plugins | storage/ibm/fs900/snmp/mode/components/fan.pm | Perl | apache-2.0 | 5,479 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package os::windows::local::mode::cmdreturn;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
use centreon::plugins::misc;
use Time::HiRes qw(gettimeofday tv_interval);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {
'warning-time:s' => { name => 'warning_time' },
'critical-time:s' => { name => 'critical_time' },
'timeout:s' => { name => 'timeout', default => 30 },
'command:s' => { name => 'command' },
'command-path:s' => { name => 'command_path' },
'command-options:s' => { name => 'command_options' },
'manage-returns:s' => { name => 'manage_returns', default => '' },
});
$self->{manage_returns} = {};
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (!defined($self->{option_results}->{command})) {
$self->{output}->add_option_msg(short_msg => "Need to specify command option.");
$self->{output}->option_exit();
}
foreach my $entry (split(/#/, $self->{option_results}->{manage_returns})) {
next if (!($entry =~ /(.*?),(.*?),(.*)/));
next if (!$self->{output}->is_litteral_status(status => $2));
if ($1 ne '') {
$self->{manage_returns}->{$1} = {return => $2, msg => $3};
} else {
$self->{manage_returns}->{default} = {return => $2, msg => $3};
}
}
if ($self->{option_results}->{manage_returns} eq '' || scalar(keys %{$self->{manage_returns}}) == 0) {
$self->{output}->add_option_msg(short_msg => "Need to specify manage-returns option correctly.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'warning-time', value => $self->{option_results}->{warning_time})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning_time} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical-time', value => $self->{option_results}->{critical_time})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical_time} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
my $timing0 = [gettimeofday];
my ($stdout, $exit_code) = centreon::plugins::misc::execute(
output => $self->{output},
options => $self->{option_results},
command => $self->{option_results}->{command},
command_path => $self->{option_results}->{command_path},
command_options => $self->{option_results}->{command_options},
no_quit => 1
);
my $timeelapsed = tv_interval($timing0, [gettimeofday]);
my $long_msg = $stdout;
$long_msg =~ s/\|/-/mg;
$self->{output}->output_add(long_msg => $long_msg);
if (defined($self->{manage_returns}->{$exit_code})) {
$self->{output}->output_add(severity => $self->{manage_returns}->{$exit_code}->{return},
short_msg => $self->{manage_returns}->{$exit_code}->{msg});
} elsif (defined($self->{manage_returns}->{default})) {
$self->{output}->output_add(severity => $self->{manage_returns}->{default}->{return},
short_msg => $self->{manage_returns}->{default}->{msg});
} else {
$self->{output}->output_add(severity => 'UNKNWON',
short_msg => 'Exit code from command');
}
if (defined($exit_code)) {
$self->{output}->perfdata_add(
label => 'code',
value => $exit_code
);
}
my $exit = $self->{perfdata}->threshold_check(value => $timeelapsed,
threshold => [ { label => 'critical-time', exit_litteral => 'critical' }, { label => 'warning-time', exit_litteral => 'warning' } ]);
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Response time %.3fs", $timeelapsed));
}
$self->{output}->perfdata_add(
label => 'time', unit => 's',
value => sprintf('%.3f', $timeelapsed),
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning_time'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical_time'),
min => 0
);
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check command returns.
=over 8
=item B<--manage-returns>
Set action according command exit code.
Example: 0,OK,File xxx exist#1,CRITICAL,File xxx not exist#,UNKNOWN,Command problem
=item B<--timeout>
Timeout in seconds for the command (Default: 30).
=item B<--command>
Command to test (Default: none).
=item B<--command-path>
Command path (Default: none).
=item B<--command-options>
Command options (Default: none).
=item B<--warning-time>
Threshold warning in seconds.
=item B<--critical-time>
Threshold critical in seconds.
=back
=cut
| Tpo76/centreon-plugins | os/windows/local/mode/cmdreturn.pm | Perl | apache-2.0 | 6,084 |
package Paws::ServiceCatalog::TagOptionSummary;
use Moose;
has Key => (is => 'ro', isa => 'Str');
has Values => (is => 'ro', isa => 'ArrayRef[Str|Undef]');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ServiceCatalog::TagOptionSummary
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::ServiceCatalog::TagOptionSummary object:
$service_obj->Method(Att1 => { Key => $value, ..., Values => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::ServiceCatalog::TagOptionSummary object:
$result = $service_obj->Method(...);
$result->Att1->Key
=head1 DESCRIPTION
The TagOption summary key-value pair.
=head1 ATTRIBUTES
=head2 Key => Str
The TagOptionSummary key.
=head2 Values => ArrayRef[Str|Undef]
The TagOptionSummary value.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::ServiceCatalog>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/ServiceCatalog/TagOptionSummary.pm | Perl | apache-2.0 | 1,415 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package cloud::aws::cloudfront::mode::errors;
use base qw(cloud::aws::custom::mode);
use strict;
use warnings;
sub get_metrics_mapping {
my ($self, %options) = @_;
my $metrics_mapping = {
extra_params => {
message_multiple => 'All instances metrics are ok'
},
metrics => {
TotalErrorRate => {
output => 'Total Error Rate',
label => 'errorrate-total',
nlabel => {
absolute => 'cloudfront.errorrate.total.percentage'
},
unit => '%'
},
'4xxErrorRate' => {
output => '4xx Error Rate',
label => 'errorrate-4xx',
nlabel => {
absolute => 'cloudfront.errorrate.4xx.percentage'
},
unit => '%'
},
'5xxErrorRate' => {
output => '5xx Error Rate',
label => 'errorrate-5xx',
nlabel => {
absolute => 'cloudfront.errorrate.5xx.percentage'
},
unit => '%'
}
}
};
return $metrics_mapping;
}
sub prefix_metric_output {
my ($self, %options) = @_;
return "Instance '" . $options{instance_value}->{display} . "' ";
}
sub long_output {
my ($self, %options) = @_;
return "Checking Instance '" . $options{instance_value}->{display} . "' ";
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {
'id:s@' => { name => 'id' }
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
if (!defined($self->{option_results}->{id}) || $self->{option_results}->{id} eq '') {
$self->{output}->add_option_msg(short_msg => "Need to specify --id option.");
$self->{output}->option_exit();
}
foreach my $instance (@{$self->{option_results}->{id}}) {
if ($instance ne '') {
push @{$self->{aws_instance}}, $instance;
}
}
}
sub manage_selection {
my ($self, %options) = @_;
my %metric_results;
foreach my $instance (@{$self->{aws_instance}}) {
$metric_results{$instance} = $options{custom}->cloudwatch_get_metrics(
namespace => 'AWS/CloudFront',
dimensions => [ { Name => 'Region', Value => 'Global' }, { Name => 'DistributionId', Value => $instance } ],
metrics => $self->{aws_metrics},
statistics => $self->{aws_statistics},
timeframe => $self->{aws_timeframe},
period => $self->{aws_period},
);
foreach my $metric (@{$self->{aws_metrics}}) {
foreach my $statistic (@{$self->{aws_statistics}}) {
next if (!defined($metric_results{$instance}->{$metric}->{lc($statistic)}) && !defined($self->{option_results}->{zeroed}));
$self->{metrics}->{$instance}->{display} = $instance;
$self->{metrics}->{$instance}->{statistics}->{lc($statistic)}->{display} = $statistic;
$self->{metrics}->{$instance}->{statistics}->{lc($statistic)}->{timeframe} = $self->{aws_timeframe};
$self->{metrics}->{$instance}->{statistics}->{lc($statistic)}->{$metric} =
defined($metric_results{$instance}->{$metric}->{lc($statistic)}) ?
$metric_results{$instance}->{$metric}->{lc($statistic)} : 0;
}
}
}
if (scalar(keys %{$self->{metrics}}) <= 0) {
$self->{output}->add_option_msg(short_msg => 'No metrics. Check your options or use --zeroed option to set 0 on undefined values');
$self->{output}->option_exit();
}
}
1;
__END__
=head1 MODE
Check CloudFront instances errors.
Example:
perl centreon_plugins.pl --plugin=cloud::aws::cloudfront::plugin --custommode=paws --mode=errors --region='eu-west-1'
--id='E8T734E1AF1L4' --statistic='average' --critical-totalerrorsrate='10' --verbose
See 'https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/monitoring-using-cloudwatch.html'
for more informations.
Default statistic: 'average' / Valid statistic: 'average'.
=over 8
=item B<--id>
Set the instance id (Required) (Can be multiple).
=item B<--filter-metric>
Filter metrics (Can be: 'TotalErrorRate', '4xxErrorRate', '5xxErrorRate')
(Can be a regexp).
=item B<--warning-*>
Thresholds warning (Can be: 'errorrate-total',
'errorrate-4xx', 'errorrate-5xx').
=item B<--critical-*>
Thresholds critical (Can be: 'errorrate-total',
'errorrate-4xx', 'errorrate-5xx').
=back
=cut
| centreon/centreon-plugins | cloud/aws/cloudfront/mode/errors.pm | Perl | apache-2.0 | 5,534 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::digi::sarian::snmp::mode::memory;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
"warning:s" => { name => 'warning' },
"critical:s" => { name => 'critical' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
$self->{snmp} = $options{snmp};
my $oid_totalMemory = '.1.3.6.1.4.1.16378.10000.3.17.0'; # in Kbytes
my $oid_freeMemory = '.1.3.6.1.4.1.16378.10000.3.18.0'; # in Kbytes
my $result = $self->{snmp}->get_leef(oids => [$oid_totalMemory, $oid_freeMemory],
nothing_quit => 1);
my $free = $result->{$oid_freeMemory} * 1024;
my $total_size = $result->{$oid_totalMemory} * 1024;
my $used = $total_size - $free;
my $prct_used = $used * 100 / $total_size;
my $exit = $self->{perfdata}->threshold_check(value => $prct_used, threshold => [ { label => 'critical', exit_litteral => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
my ($total_value, $total_unit) = $self->{perfdata}->change_bytes(value => $total_size);
my ($used_value, $used_unit) = $self->{perfdata}->change_bytes(value => $used);
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Ram used %s (%.2f%%), Total: %s",
$used_value . " " . $used_unit, $prct_used,
$total_value . " " . $total_unit));
$self->{output}->perfdata_add(label => "used", unit => 'B',
value => $used,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning', total => $total_size, cast_int => 1),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical', total => $total_size, cast_int => 1),
min => 0, max => $total_size);
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check memory usage (sarian-monitor.mib).
=over 8
=item B<--warning>
Threshold warning in percent.
=item B<--critical>
Threshold critical in percent.
=back
=cut
| Tpo76/centreon-plugins | network/digi/sarian/snmp/mode/memory.pm | Perl | apache-2.0 | 3,978 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package Bio::EnsEMBL::Production::Pipeline::RNAGeneXref::AnalysisConfiguration;
use strict;
use warnings;
use base ('Bio::EnsEMBL::Production::Pipeline::Common::Base');
use Path::Tiny;
use POSIX qw(strftime);
sub run {
my ($self) = @_;
my $analyses = $self->param_required('analyses');
my $aa = $self->core_dba->get_adaptor('Analysis');
my @filtered_analyses = ();
foreach my $analysis_config (@{$analyses}) {
if (-e $$analysis_config{'local_file'}) {
my $timestamp = path($$analysis_config{'local_file'})->stat->mtime;
my $datestamp = strftime "%Y-%m-%d", localtime($timestamp);
$$analysis_config{'db_version'} = $datestamp;
my $logic_name = $$analysis_config{'logic_name'};
my $analysis = $aa->fetch_by_logic_name($logic_name);
if (defined($analysis)) {
if ($$analysis_config{'db_version'} ne $analysis->db_version) {
push @filtered_analyses, $analysis_config;
}
} else {
push @filtered_analyses, $analysis_config;
}
}
}
$self->param('filtered_analyses', \@filtered_analyses);
}
sub write_output {
my ($self) = @_;
my $analyses = $self->param('filtered_analyses');
if ( scalar(@$analyses) ) {
$self->dataflow_output_id( $analyses, 2 );
$self->dataflow_output_id( {}, 3 );
}
}
1;
| Ensembl/ensembl-production | modules/Bio/EnsEMBL/Production/Pipeline/RNAGeneXref/AnalysisConfiguration.pm | Perl | apache-2.0 | 2,026 |
# Licensed under the Upwork's API Terms of Use;
# you may not use this file except in compliance with the Terms.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author:: Maksym Novozhylov (mnovozhilov@upwork.com)
# Copyright:: Copyright 2015(c) Upwork.com
# License:: See LICENSE.txt and TOS - https://developers.upwork.com/api-tos.html
package Net::Upwork::API::Routers::Activities::Team;
use strict;
use warnings;
use parent "Net::Upwork::API";
use constant ENTRY_POINT => Net::Upwork::API::Client::ENTRY_POINT_API;
=pod
=head1 NAME
Auth
=head1 FUNCTIONS
=over 4
=item new($api)
Create a new object for accessing Auth API
B<Parameters>
$api
API object
=cut
sub new {
my ($class, $api) = @_;
return Net::Upwork::API::init_router($class, $api, ENTRY_POINT);
}
=item get_list
List all oTask/Activity records within a team
B<Parameters>
$company
Company ID
$team
Team ID
B<Return value>
JSON response as a string
=cut
sub get_list {
my $self = shift;
my $company = shift;
my $team = shift;
return get_by_type($self, $company, $team);
}
=item get_specific_list
List all oTask/Activity records within a Company by specified code(s)
B<Parameters>
$company
Company ID
$team
Team ID
$code
Code(s)
B<Return value>
JSON response as a string
=cut
sub get_specific_list {
my $self = shift;
my $company = shift;
my $team = shift;
my $code = shift;
return get_by_type($self, $company, $team, $code);
}
=item add_activity
Create an oTask/Activity record within a team
B<Parameters>
$company
Company ID
$team
Team ID
$params
Hash of params
B<Return value>
JSON response as a string
=cut
sub add_activity {
my $self = shift;
my $company = shift;
my $team = shift;
my %params = @_;
return $self->client()->post("/otask/v1/tasks/companies/" . $company . "/teams/" . $team . "/tasks", %params);
}
=item update_activities
Update specific oTask/Activity record within a team
B<Parameters>
$company
Company ID
$team
Team ID
$code
Code
$params
Hash of params
B<Return value>
JSON response as a string
=cut
sub update_activities {
my $self = shift;
my $company = shift;
my $team = shift;
my $code = shift;
my %params = @_;
return $self->client()->put("/otask/v1/tasks/companies/" . $company . "/teams/" . $team . "/tasks/" . $code, %params);
}
=item archive_activities
Archive specific oTask/Activity record within a team
B<Parameters>
$company
Company ID
$team
Team ID
$code
Code
B<Return value>
JSON response as a string
=cut
sub archive_activities {
my $self = shift;
my $company = shift;
my $team = shift;
my $code = shift;
return $self->client()->put("/otask/v1/tasks/companies/" . $company . "/teams/" . $team . "/archive/" . $code);
}
=item unarchive_activities
Unarchive specific oTask/Activity record within a team
B<Parameters>
$company
Company ID
$team
Team ID
$code
Code
B<Return value>
JSON response as a string
=cut
sub unarchive_activities {
my $self = shift;
my $company = shift;
my $team = shift;
my $code = shift;
return $self->client()->put("/otask/v1/tasks/companies/" . $company . "/teams/" . $team . "/unarchive/" . $code);
}
=item update_batch
Update a group of oTask/Activity records within a company
B<Parameters>
$company
Company ID
$params
Hash of params
B<Return value>
JSON response as a string
=cut
sub update_batch {
my $self = shift;
my $company = shift;
my %params = @_;
return $self->client()->put("/otask/v1/tasks/companies/" . $company . "/tasks/batch", %params);
}
=item get_by_type
Get by type
B<Parameters>
$company
Company ID
$team
Team ID
$code
Optional, code.
B<Return value>
String
=cut
sub get_by_type {
my ($self, $company, $team, $code) = @_;
$code ||= "";
my $url = "";
unless ($code eq "") {
$url .= "/" . $code;
}
return $self->client()->get("/otask/v1/tasks/companies/" . $company . "/teams/" . $team . "/tasks" . $url);
}
=back
=head1 AUTHOR
Maksym Novozhylov C<< <mnovozhilov@upwork.com> >>
=head1 COPYRIGHT
Copyright E<copy> Upwork Global Corp., 2015
=cut
1;
| upwork/perl-upwork | lib/Net/Upwork/API/Routers/Activities/Team.pm | Perl | apache-2.0 | 4,636 |
=head1 LICENSE
See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
# POD documentation - main docs before the code
=pod
=head1 NAME
Bio::EnsEMBL::Compara::Production::HomologySet
=cut
=head1 SYNOPSIS
An abstract data class for holding an arbitrary collection of
Hhomology objects and providing set operations and cross-reference
operations to compare to another HomologySet object.
=cut
=head1 DESCRIPTION
A 'set' object of Homology objects. Uses Homology::homology_key to identify
unique homologies and Member::stable_id to identify unique genes.
Is used for comparing HomologySet objects with each other and building comparison
matrixes.
Not really a production object, but more an abstract data type for use by
post analysis scripts. Placed in Production since I could not think of a better location.
The design of this object essentially was within the homology_diff.pl script
but has now been formalized into a proper object design.
General use is like:
$homology_set1 = new Bio::EnsEMBL::Compara::Production::HomologySet;
$homology_set1->add(@{$homologyDBA->fetch_all_by_MethodLinkSpeciesSet($mlss1));
$homology_set2 = new Bio::EnsEMBL::Compara::Production::HomologySet;
$homology_set2->add(@{$homologyDBA->fetch_all_by_MethodLinkSpeciesSet($mlss2));
$crossref = $homology_set1->crossref_homologies_by_type($homology_set2);
$homology_set1->print_conversion_stats($homology_set2,$crossref);
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 APPENDIX
The rest of the documentation details each of the object methods.
Internal methods are usually preceded with a _
=cut
package Bio::EnsEMBL::Compara::Production::HomologySet;
use strict;
use warnings;
use Bio::EnsEMBL::Compara::Production::GeneSet;
sub new {
my ($class, @args) = @_;
## Allows to create a new object from an existing one with $object->new
$class = ref($class) if (ref($class));
my $self = {};
bless $self,$class;
$self->clear;
return $self;
}
sub clear {
my $self = shift;
$self->{'conversion_hash'} = {};
$self->{'gene_set'} = new Bio::EnsEMBL::Compara::Production::GeneSet;
$self->{'homology_hash'} = {};
$self->{'gene_to_homologies'} = {};
$self->{'types'} = {};
}
sub add {
my $self = shift;
my @homology_list = @_;
foreach my $homology (@homology_list) {
next if(defined($self->{'homology_hash'}->{$homology->homology_key}));
#printf("HomologySet add: %s\n", $homology->homology_key);
my ($gene1, $gene2) = @{$homology->gene_list};
$self->{'homology_hash'}->{$homology->homology_key} = $homology;
$self->{'gene_set'}->add($gene1);
$self->{'gene_set'}->add($gene2);
my $description = $homology->description;
if ($homology->method_link_species_set){
if (scalar @{$homology->method_link_species_set->species_set->genome_dbs} == 1) {
my $gdb = $homology->method_link_species_set->species_set->genome_dbs->[0];
$description .= "_".$gdb->dbID;
}
}
$self->{'types'}->{$description}++;
$self->{'gene_to_homologies'}->{$gene1->stable_id} = []
unless(defined($self->{'gene_to_homologies'}->{$gene1->stable_id}));
$self->{'gene_to_homologies'}->{$gene2->stable_id} = []
unless(defined($self->{'gene_to_homologies'}->{$gene2->stable_id}));
push @{$self->{'gene_to_homologies'}->{$gene1->stable_id}}, $homology;
push @{$self->{'gene_to_homologies'}->{$gene2->stable_id}}, $homology;
}
return $self;
}
sub merge {
my $self = shift;
my $other_set = shift;
$self->add(@{$other_set->list});
return $self;
}
### homology types ie description ###
sub types {
my $self = shift;
my @types = keys(%{$self->{'types'}});
return \@types;
}
sub count_for_type {
my $self = shift;
my $type = shift;
my $count = $self->{'types'}->{$type};
$count=0 unless(defined($count));
return $count;
}
### homology ###
sub size {
my $self = shift;
return scalar(keys(%{$self->{'homology_hash'}}));
}
sub list {
my $self = shift;
my @homologies = values(%{$self->{'homology_hash'}});
return \@homologies;
}
sub has_homology {
my $self = shift;
my $homology = shift;
return 1 if(defined($self->{'homology_hash'}->{$homology->homology_key}));
return 0;
}
sub find_homology_like {
my $self = shift;
my $homology = shift;
return $self->{'homology_hash'}->{$homology->homology_key};
}
sub subset_containing_genes {
my $self = shift;
my $gene_set = shift;
my $newset = new Bio::EnsEMBL::Compara::Production::HomologySet;
foreach my $homology (@{$self->list}) {
foreach my $gene (@{$homology->gene_list}) {
if($gene_set->includes($gene)) {
$newset->add($homology);
}
}
}
return $newset;
}
sub homologies_for_gene {
my $self = shift;
my $gene = shift;
my $homologies = $self->{'gene_to_homologies'}->{$gene->stable_id};
return $homologies if($homologies);
return [];
}
sub best_homology_for_gene {
my $self = shift;
my $gene = shift;
my $ordered_types = shift; #hashref type=>rank
my $best_homology = undef;
my $best_rank = undef;
#print $gene->toString;
foreach my $homology (@{$self->homologies_for_gene($gene)}) {
#print $homology->toString;
my $rank = $ordered_types->{$homology->description};
if(!defined($best_rank) or ($rank and ($rank<$best_rank))) {
$best_homology = $homology;
$best_rank = $rank;
}
}
#if($best_homology) { print "BEST: ", $best_homology->toString; }
return $best_homology;
}
### gene ###
sub gene_set {
my $self = shift;
return $self->{'gene_set'};
}
### debug printing ###
sub print_stats {
my $self = shift;
printf("%d unique genes\n", $self->gene_set->size);
printf("%d unique homologies\n", $self->size);
foreach my $type (@{$self->types}) {
printf("%10d : %s\n", $self->count_for_type($type), $type);
}
}
1;
| Ensembl/ensembl-compara | modules/Bio/EnsEMBL/Compara/Production/HomologySet.pm | Perl | apache-2.0 | 6,672 |
package API::Deliveryservice2;
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
# JvD Note: you always want to put Utils as the first use. Sh*t don't work if it's after the Mojo lines.
use UI::Utils;
use UI::DeliveryService;
use Mojo::Base 'Mojolicious::Controller';
use Mojolicious::Validator;
use Mojolicious::Validator::Validation;
use Email::Valid;
use Validate::Tiny ':all';
use Data::Dumper;
use Common::ReturnCodes qw(SUCCESS ERROR);
use JSON;
use MojoPlugins::Response;
use UI::DeliveryService;
use Scalar::Util qw(looks_like_number);
my $valid_server_types = {
edge => "EDGE",
mid => "MID",
};
# this structure maps the above types to the allowed metrics below
my $valid_metric_types = {
origin_tps => "mid",
ooff => "mid",
};
sub delivery_services {
my $self = shift;
my $id = $self->param('id');
my $logs_enabled = $self->param('logsEnabled');
my $current_user = $self->current_user()->{username};
my $rs;
my $tm_user_id;
my $forbidden;
if ( defined($id) || defined($logs_enabled) ) {
( $forbidden, $rs, $tm_user_id ) = $self->get_delivery_service_params( $current_user, $id, $logs_enabled );
}
else {
( $rs, $tm_user_id ) = $self->get_delivery_services_by_user($current_user);
}
my @data;
if ( defined($rs) ) {
while ( my $row = $rs->next ) {
my $re_rs = $row->deliveryservice_regexes;
my @matchlist = ();
while ( my $re_row = $re_rs->next ) {
push(
@matchlist, {
type => $re_row->regex->type->name,
pattern => $re_row->regex->pattern,
setNumber => $re_row->set_number,
}
);
}
my $cdn_domain = $self->get_cdn_domain_by_ds_id($row->id);
my $regexp_set = &UI::DeliveryService::get_regexp_set( $self, $row->id );
my @example_urls = &UI::DeliveryService::get_example_urls( $self, $row->id, $regexp_set, $row, $cdn_domain, $row->protocol );
push(
@data, {
"active" => \$row->active,
"cacheurl" => $row->cacheurl,
"ccrDnsTtl" => $row->ccr_dns_ttl,
"cdnId" => $row->cdn->id,
"cdnName" => $row->cdn->name,
"checkPath" => $row->check_path,
"displayName" => $row->display_name,
"dnsBypassCname" => $row->dns_bypass_cname,
"dnsBypassIp" => $row->dns_bypass_ip,
"dnsBypassIp6" => $row->dns_bypass_ip6,
"dnsBypassTtl" => $row->dns_bypass_ttl,
"dscp" => $row->dscp,
"edgeHeaderRewrite" => $row->edge_header_rewrite,
"exampleURLs" => \@example_urls,
"geoLimitRedirectURL" => $row->geolimit_redirect_url,
"geoLimit" => $row->geo_limit,
"geoLimitCountries" => $row->geo_limit_countries,
"geoProvider" => $row->geo_provider,
"globalMaxMbps" => $row->global_max_mbps,
"globalMaxTps" => $row->global_max_tps,
"httpBypassFqdn" => $row->http_bypass_fqdn,
"id" => $row->id,
"infoUrl" => $row->info_url,
"initialDispersion" => $row->initial_dispersion,
"ipv6RoutingEnabled" => \$row->ipv6_routing_enabled,
"lastUpdated" => $row->last_updated,
"logsEnabled" => \$row->logs_enabled,
"longDesc" => $row->long_desc,
"longDesc1" => $row->long_desc_1,
"longDesc2" => $row->long_desc_2,
"matchList" => \@matchlist,
"maxDnsAnswers" => $row->max_dns_answers,
"midHeaderRewrite" => $row->mid_header_rewrite,
"missLat" => $row->miss_lat,
"missLong" => $row->miss_long,
"multiSiteOrigin" => \$row->multi_site_origin,
# "multiSiteOriginAlgorithm" => $row->multi_site_origin_algorithm,
"orgServerFqdn" => $row->org_server_fqdn,
"originShield" => $row->origin_shield,
"profileId" => $row->profile->id,
"profileName" => $row->profile->name,
"profileDescription" => $row->profile->description,
"protocol" => $row->protocol,
"qstringIgnore" => $row->qstring_ignore,
"rangeRequestHandling" => $row->range_request_handling,
"regexRemap" => $row->regex_remap,
"regionalGeoBlocking" => \$row->regional_geo_blocking,
"remapText" => $row->remap_text,
"signed" => \$row->signed,
"sslKeyVersion" => $row->ssl_key_version,
"trRequestHeaders" => $row->tr_request_headers,
"trResponseHeaders" => $row->tr_response_headers,
"type" => $row->type->name,
"typeId" => $row->type->id,
"xmlId" => $row->xml_id
}
);
}
}
return defined($forbidden) ? $self->forbidden() : $self->success( \@data );
}
sub get_delivery_services_by_user {
my $self = shift;
my $current_user = shift;
my $tm_user_id;
my $rs;
if ( &is_privileged($self) ) {
$rs = $self->db->resultset('Deliveryservice')->search( undef, { prefetch => [ 'cdn', 'deliveryservice_regexes' ], order_by => 'xml_id' } );
}
else {
my $tm_user = $self->db->resultset('TmUser')->search( { username => $current_user } )->single();
$tm_user_id = $tm_user->id;
my @ds_ids = $self->db->resultset('DeliveryserviceTmuser')->search( { tm_user_id => $tm_user_id } )->get_column('deliveryservice')->all();
$rs = $self->db->resultset('Deliveryservice')
->search( { 'me.id' => { -in => \@ds_ids } }, { prefetch => [ 'cdn', 'deliveryservice_regexes' ], order_by => 'xml_id' } );
}
return ( $rs, $tm_user_id );
}
sub get_delivery_service_params {
my $self = shift;
my $current_user = shift;
my $id = shift;
my $logs_enabled = shift;
# Convert to 1 or 0
$logs_enabled = $logs_enabled ? 1 : 0;
my $tm_user_id;
my $rs;
my $forbidden;
my $condition;
if ( &is_privileged($self) ) {
if ( defined($id) ) {
$condition = ( { 'me.id' => $id } );
}
else {
$condition = ( { 'me.logs_enabled' => $logs_enabled } );
}
$rs =
$self->db->resultset('Deliveryservice')->search( $condition, { prefetch => [ 'cdn', 'deliveryservice_regexes' ], order_by => 'xml_id' } );
}
elsif ( $self->is_delivery_service_assigned($id) ) {
my $tm_user = $self->db->resultset('TmUser')->search( { username => $current_user } )->single();
$tm_user_id = $tm_user->id;
my @ds_ids =
$self->db->resultset('DeliveryserviceTmuser')->search( { tm_user_id => $tm_user_id, deliveryservice => $id } )->get_column('deliveryservice')
->all();
$rs =
$self->db->resultset('Deliveryservice')
->search( { 'me.id' => { -in => \@ds_ids } }, { prefetch => [ 'cdn', 'deliveryservice_regexes' ], order_by => 'xml_id' } );
}
elsif ( !$self->is_delivery_service_assigned($id) ) {
$forbidden = "true";
}
return ( $forbidden, $rs, $tm_user_id );
}
sub update_profileparameter {
my $self = shift;
my $ds_id = shift;
my $profile_id = shift;
my $params = shift;
&UI::DeliveryService::header_rewrite( $self, $ds_id, $profile_id, $params->{xmlId}, $params->{edgeHeaderRewrite}, "edge" );
&UI::DeliveryService::header_rewrite( $self, $ds_id, $profile_id, $params->{xmlId}, $params->{midHeaderRewrite}, "mid" );
&UI::DeliveryService::regex_remap( $self, $ds_id, $profile_id, $params->{xmlId}, $params->{regexRemap} );
&UI::DeliveryService::cacheurl( $self, $ds_id, $profile_id, $params->{xmlId}, $params->{cacheurl} );
}
sub create {
my $self = shift;
my $params = $self->req->json;
if ( !&is_oper($self) ) {
return $self->forbidden();
}
my ($transformed_params, $err) = (undef, undef);
($transformed_params, $err) = $self->_check_params($params);
if ( defined($err) ) {
return $self->alert($err);
}
my $existing = $self->db->resultset('Deliveryservice')->search( { xml_id => $params->{xmlId} } )->get_column('xml_id')->single();
if ( $existing ) {
$self->alert("a delivery service with xmlId " . $params->{xmlId} . " already exists." );
}
my $value=$self->new_value($params, $transformed_params);
my $insert = $self->db->resultset('Deliveryservice')->create($value);
$insert->insert();
my $new_id = $insert->id;
if ( $new_id > 0 ) {
my $patterns = $params->{matchList};
foreach my $re (@$patterns) {
my $type = $self->db->resultset('Type')->search( { name => $re->{type} } )->get_column('id')->single();
my $regexp = $re->{pattern};
my $insert = $self->db->resultset('Regex')->create(
{
pattern => $regexp,
type => $type,
}
);
$insert->insert();
my $new_re_id = $insert->id;
my $de_re_insert = $self->db->resultset('DeliveryserviceRegex')->create(
{
regex => $new_re_id,
deliveryservice => $new_id,
set_number => defined($re->{setNumber}) ? $re->{setNumber} : 0,
}
);
$de_re_insert->insert();
}
my $profile_id=$transformed_params->{ profile_id };
$self->update_profileparameter($new_id, $profile_id, $params);
my $cdn_rs = $self->db->resultset('Cdn')->search( { id => $transformed_params->{cdn_id} } )->single();
my $dnssec_enabled = $cdn_rs->dnssec_enabled;
if ( $dnssec_enabled == 1 ) {
$self->app->log->debug("dnssec is enabled, creating dnssec keys");
&UI::DeliveryService::create_dnssec_keys( $self, $cdn_rs->name, $params->{xmlId}, $new_id );
}
&log( $self, "Create deliveryservice with xml_id: " . $params->{xmlId}, " APICHANGE" );
my $response = $self->get_response($new_id);
return $self->success($response, "Delivery service was created: " . $new_id);
}
my $r = "Create Delivery Service fail, insert to database failed.";
return $self->alert($r);
}
sub nodef_to_default {
my $self = shift;
my $v = shift;
my $default = shift;
return $v || $default;
}
sub get_types {
my $self = shift;
my $use_in_table = shift;
my $types;
my $rs = $self->db->resultset('Type')->search( { use_in_table => $use_in_table } );
while ( my $row = $rs->next ) {
$types->{ $row->name } = $row->id;
}
return $types;
}
sub _check_params {
my $self = shift;
my $params = shift;
my $ds_id = shift;
my $transformed_params = undef;
if ( !defined($params) ) {
return (undef, "parameters should be in json format, please check!");
}
if ( !defined($params->{xmlId}) ) {
return (undef, "parameter xmlId is must." );
}
if (!defined($params->{active})) {
return (undef, "parameter active is must." );
}
if (looks_like_number($params->{active})) {
if ($params->{active} == 1) {
$transformed_params->{active} = 1;
} elsif ($params->{active} == 0) {
$transformed_params->{active} = 0;
} else {
return (undef, "active must be 1|0");
}
} else {
if ($params->{active} eq "true") {
$transformed_params->{active} = 1;
} elsif ($params->{active} eq "false") {
$transformed_params->{active} = 0;
} else {
return (undef, "active must be true|false");
}
}
if ( defined($params->{type}) ) {
my $rs = $self->get_types("deliveryservice");
if ( !exists $rs->{ $params->{type} } ) {
return (undef, "type (" . $params->{type} . ") must be deliveryservice type." );
}
else {
$transformed_params->{type} = $rs->{ $params->{type} };
}
} else {
return (undef, "parameter type is must." );
}
if (!defined($params->{protocol})) {
return (undef, "parameter protocol is must." );
}
my $proto_num = $params->{protocol};
if (!looks_like_number($proto_num) || $proto_num < 0 || $proto_num > 3) {
return (undef, "protocol must be 0|1|2|3." );
}
if ( defined($params->{profileName}) ) {
my $pname = $params->{profileName};
my $profile = $self->db->resultset('Profile')->search( { 'me.name' => $pname }, { prefetch => ['cdn'] } )->single();
if ( !defined($profile) || $profile->cdn->name ne $params->{cdnName} ) {
return (undef, "profileName (" . $params->{profileName} . ") does not exist, or is not on the same CDN as " . $params->{cdnName} );
}
else {
$transformed_params->{ profile_id } = $profile->id;
}
} else {
return (undef, "parameter profileName is must." );
}
my $cdn_id = undef;
if ( defined($params->{cdnName}) ) {
$cdn_id = $self->db->resultset('Cdn')->search( { name => $params->{cdnName} } )->get_column('id')->single();
if ( !defined $cdn_id ) {
return (undef, "cdnName (" . $params->{cdnName} . ") does not exists." );
} else {
$transformed_params->{ cdn_id } = $cdn_id;
}
} else {
return (undef, "parameter cdnName is must." );
}
if ( defined($params->{matchList}) ) {
my $match_list = $params->{matchList};
if ((scalar $match_list) == 0) {
return (undef, "At least have 1 pattern in matchList.");
}
my $cdn_domain = undef;
if (defined($ds_id)) {
$cdn_domain = $self->get_cdn_domain_by_ds_id($ds_id);
} else {
my $profile_id = $self->get_profile_id_for_name($params->{profileName});
$cdn_domain = $self->get_cdn_domain_by_profile_id($profile_id);
}
foreach my $match_item (@$match_list) {
my $conflicting_regex = $self->find_existing_host_regex($match_item->{'type'}, $match_item->{'pattern'}, $cdn_domain, $cdn_id, $ds_id);
if (defined($conflicting_regex)) {
return(undef, "Another delivery service is already using host regex $conflicting_regex");
}
}
} else {
return (undef, "parameter matchList is must." );
}
if ( defined($params->{multiSiteOrigin}) ) {
if ( !( ( $params->{multiSiteOrigin} eq "0" ) || ( $params->{multiSiteOrigin} eq "1" ) ) ) {
return (undef, "multiSiteOrigin must be 0|1." );
}
} else {
return (undef, "parameter multiSiteOrigin is must." );
}
if ( !defined($params->{displayName}) ) {
return (undef, "parameter displayName is must." );
}
if ( defined($params->{orgServerFqdn}) ) {
if ( $params->{orgServerFqdn} !~ /^https?:\/\// ) {
return (undef, "orgServerFqdn must start with http(s)://" );
}
} else {
return (undef, "parameter orgServerFqdn is must." );
}
if ( defined($params->{logsEnabled}) ) {
if ( $params->{logsEnabled} eq "true" || $params->{logsEnabled} == 1 ) {
$transformed_params->{logsEnabled} = 1;
} elsif ( $params->{logsEnabled} eq "false" || $params->{logsEnabled} == 0 ) {
$transformed_params->{logsEnabled} = 0;
} else {
return (undef, "logsEnabled must be true|false." );
}
} else {
$transformed_params->{logsEnabled} = 0;
}
return ($transformed_params, undef);
}
sub new_value {
my $self = shift;
my $params = shift;
my $transformed_params = shift;
my $value = {
xml_id => $params->{xmlId},
display_name => $params->{displayName},
dscp => $self->nodef_to_default( $params->{dscp}, 0 ),
signed => $self->nodef_to_default( $params->{signed}, 0 ),
qstring_ignore => $params->{qstringIgnore},
geo_limit => $params->{geoLimit},
geo_limit_countries => $params->{geoLimitCountries},
geolimit_redirect_url => $params->{geoLimitRedirectURL},
geo_provider => $params->{geoProvider},
http_bypass_fqdn => $params->{httpBypassFqdn},
dns_bypass_ip => $params->{dnsBypassIp},
dns_bypass_ip6 => $params->{dnsBypassIp6},
dns_bypass_cname => $params->{dnsBypassCname},
dns_bypass_ttl => $params->{dnsBypassTtl},
org_server_fqdn => $params->{orgServerFqdn},
multi_site_origin => $params->{multiSiteOrigin},
ccr_dns_ttl => $params->{ccrDnsTtl},
type => $transformed_params->{type},
profile => $transformed_params->{profile_id},
cdn_id => $transformed_params->{cdn_id},
global_max_mbps => $self->nodef_to_default( $params->{globalMaxMbps}, 0 ),
global_max_tps => $self->nodef_to_default( $params->{globalMaxTps}, 0 ),
miss_lat => $params->{missLat},
miss_long => $params->{missLong},
long_desc => $params->{longDesc},
long_desc_1 => $params->{longDesc1},
long_desc_2 => $params->{longDesc2},
max_dns_answers => $self->nodef_to_default( $params->{maxDnsAnswers}, 0 ),
info_url => $params->{infoUrl},
check_path => $params->{checkPath},
active => $transformed_params->{active},
protocol => $params->{protocol},
ipv6_routing_enabled => $params->{ipv6RoutingEnabled},
range_request_handling => $params->{rangeRequestHandling},
edge_header_rewrite => $params->{edgeHeaderRewrite},
mid_header_rewrite => $params->{midHeaderRewrite},
regex_remap => $params->{regexRemap},
origin_shield => $params->{originShield},
cacheurl => $params->{cacheurl},
remap_text => $params->{remapText},
initial_dispersion => $params->{initialDispersion},
regional_geo_blocking => $self->nodef_to_default($params->{regionalGeoBlocking}, 0),
ssl_key_version => $params->{sslKeyVersion},
tr_request_headers => $params->{trRequestHeaders},
tr_response_headers => $params->{trResponseHeaders},
logs_enabled => $transformed_params->{logsEnabled},
};
return $value;
}
sub get_response {
my $self = shift;
my $ds_id = shift;
my $response;
my $rs = $self->db->resultset('Deliveryservice')->find( { id => $ds_id } );
if ( defined($rs) ) {
my $cdn_name = $self->db->resultset('Cdn')->search( { id => $rs->cdn_id } )->get_column('name')->single();
$response->{id} = $rs->id;
$response->{xmlId} = $rs->xml_id;
$response->{active} = $rs->active==1 ? "true" : "false";
$response->{dscp} = $rs->dscp;
$response->{signed} = $rs->signed;
$response->{qstringIgnore} = $rs->qstring_ignore;
$response->{geoLimit} = $rs->geo_limit;
$response->{geoLimitCountries} = $rs->geo_limit_countries;
$response->{geoLimitRedirectURL} = $rs->geolimit_redirect_url;
$response->{geoProvider} = $rs->geo_provider;
$response->{httpBypassFqdn} = $rs->http_bypass_fqdn;
$response->{dnsBypassIp} = $rs->dns_bypass_ip;
$response->{dnsBypassIp6} = $rs->dns_bypass_ip6;
$response->{dnsBypassTtl} = $rs->dns_bypass_ttl;
$response->{orgServerFqdn} = $rs->org_server_fqdn;
$response->{type} = $rs->type->name;
$response->{profileName} = $rs->profile->name;
$response->{cdnName} = $cdn_name;
$response->{ccrDnsTtl} = $rs->ccr_dns_ttl;
$response->{globalMaxMbps} = $rs->global_max_mbps;
$response->{globalMaxTps} = $rs->global_max_tps;
$response->{longDesc} = $rs->long_desc;
$response->{longDesc1} = $rs->long_desc_1;
$response->{longDesc2} = $rs->long_desc_2;
$response->{maxDnsAnswers} = $rs->max_dns_answers;
$response->{infoUrl} = $rs->info_url;
$response->{missLat} = $rs->miss_lat;
$response->{missLong} = $rs->miss_long;
$response->{checkPath} = $rs->check_path;
$response->{protocol} = $rs->protocol;
$response->{sslKeyVersion} = $rs->ssl_key_version;
$response->{ipv6RoutingEnabled} = $rs->ipv6_routing_enabled;
$response->{rangeRequestHandling} = $rs->range_request_handling;
$response->{edgeHeaderRewrite} = $rs->edge_header_rewrite;
$response->{originShield} = $rs->origin_shield;
$response->{midHeaderRewrite} = $rs->mid_header_rewrite;
$response->{regexRemap} = $rs->regex_remap;
$response->{cacheurl} = $rs->cacheurl;
$response->{remapText} = $rs->remap_text;
$response->{multiSiteOrigin} = $rs->multi_site_origin;
$response->{displayName} = $rs->display_name;
$response->{trResponseHeaders} = $rs->tr_response_headers;
$response->{initialDispersion} = $rs->initial_dispersion;
$response->{dnsBypassCname} = $rs->dns_bypass_cname;
$response->{regionalGeoBlocking} = $rs->regional_geo_blocking;
$response->{trRequestHeaders} = $rs->tr_request_headers;
$response->{logsEnabled} = $rs->logs_enabled==1 ? "true" : "false";
}
my @pats = ();
$rs = $self->db->resultset('DeliveryserviceRegex')->search( { deliveryservice => $ds_id } );
while ( my $row = $rs->next ) {
push(
@pats, {
'pattern' => $row->regex->pattern,
'type' => $row->regex->type->name,
'setNumber' => $row->set_number,
}
);
}
$response->{matchList} = \@pats;
return $response;
}
sub update {
my $self = shift;
my $id = $self->param('id');
my $params = $self->req->json;
if ( !&is_oper($self) ) {
return $self->forbidden();
}
my $ds = $self->db->resultset('Deliveryservice')->find( { id => $id } );
if ( !defined($ds) ) {
return $self->not_found();
}
my ($transformed_params, $err) = (undef, undef);
($transformed_params, $err) = $self->_check_params($params, $id);
if ( defined($err) ) {
return $self->alert($err);
}
my $existing = $self->db->resultset('Deliveryservice')->search( { xml_id => $params->{xmlId} } )->get_column('xml_id')->single();
if ( $existing && $existing ne $ds->xml_id ) {
$self->alert("a delivery service with xmlId " . $params->{xmlId} . " already exists." );
}
if ( $transformed_params->{ type } != $ds->type->id ) {
return $self->alert("delivery service type can't be changed");
}
my $value=$self->new_value($params, $transformed_params);
$ds->update($value);
if ( defined($params->{matchList}) ) {
my $patterns = $params->{matchList};
my $patterns_len = @$patterns;
my $rs = $self->db->resultset('RegexesForDeliveryService')->search( {}, { bind => [$id] } );
my $last_number = $rs->count;
my $row = $rs->next;
my $update_number;
my $re;
for ( $update_number=0; $update_number < $last_number && $update_number < $patterns_len; $update_number++ ) {
$re = @$patterns[$update_number];
my $type = $self->db->resultset('Type')->search( { name => $re->{type} } )->get_column('id')->single();
my $update = $self->db->resultset('Regex')->find( { id => $row->id } );
$update->update(
{
pattern => $re->{pattern},
type => $type,
}
);
$update = $self->db->resultset('DeliveryserviceRegex')->find( { deliveryservice => $id, regex => $row->id } );
$update->update( { set_number => defined($re->{setNumber}) ? $re->{setNumber} : 0 } );
$row = $rs->next;
}
if ( $patterns_len > $last_number ) {
for ( ; $update_number < $patterns_len; $update_number++ ) {
$re = @$patterns[$update_number];
my $type = $self->db->resultset('Type')->search( { name => $re->{type} } )->get_column('id')->single();
my $insert = $self->db->resultset('Regex')->create(
{
pattern => $re->{pattern},
type => $type,
}
);
$insert->insert();
my $new_re_id = $insert->id;
my $de_re_insert = $self->db->resultset('DeliveryserviceRegex')->create(
{
regex => $new_re_id,
deliveryservice => $id,
set_number => defined($re->{setNumber}) ? $re->{setNumber} : 0,
}
);
$de_re_insert->insert();
}
}
while ( $row ) {
my $delete_re = $self->db->resultset('Regex')->search( { id => $row->id } );
$delete_re->delete();
$row = $rs->next;
}
}
my $profile_id=$transformed_params->{ profile_id };
$self->update_profileparameter($id, $profile_id, $params);
&log( $self, "Update deliveryservice with xml_id: " . $params->{xmlId}, " APICHANGE" );
my $response = $self->get_response($id);
return $self->success($response, "Delivery service was updated: " . $id);
}
1;
| naamashoresh/incubator-trafficcontrol | traffic_ops/app/lib/API/Deliveryservice2.pm | Perl | apache-2.0 | 24,503 |
# Copyright (c) 2015 Timm Murray
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
package UAV::Pilot::Server;
use v5.14;
use Moose::Role;
1;
__END__
=head1 NAME
UAV::Pilot::Server
=head1 DESCRIPTION
Role for servers running on board a UAV.
=cut
| frezik/UAV-Pilot | lib/UAV/Pilot/Server.pm | Perl | bsd-2-clause | 1,546 |
#!/usr/bin/env perl
#
# ====================================================================
# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
# project. The module is, however, dual licensed under OpenSSL and
# CRYPTOGAMS licenses depending on where you obtain it. For further
# details see http://www.openssl.org/~appro/cryptogams/.
# ====================================================================
#
# March, June 2010
#
# The module implements "4-bit" GCM GHASH function and underlying
# single multiplication operation in GF(2^128). "4-bit" means that
# it uses 256 bytes per-key table [+128 bytes shared table]. GHASH
# function features so called "528B" variant utilizing additional
# 256+16 bytes of per-key storage [+512 bytes shared table].
# Performance results are for this streamed GHASH subroutine and are
# expressed in cycles per processed byte, less is better:
#
# gcc 3.4.x(*) assembler
#
# P4 28.6 14.0 +100%
# Opteron 19.3 7.7 +150%
# Core2 17.8 8.1(**) +120%
# Atom 31.6 16.8 +88%
# VIA Nano 21.8 10.1 +115%
#
# (*) comparison is not completely fair, because C results are
# for vanilla "256B" implementation, while assembler results
# are for "528B";-)
# (**) it's mystery [to me] why Core2 result is not same as for
# Opteron;
# May 2010
#
# Add PCLMULQDQ version performing at 2.02 cycles per processed byte.
# See ghash-x86.pl for background information and details about coding
# techniques.
#
# Special thanks to David Woodhouse <dwmw2@infradead.org> for
# providing access to a Westmere-based system on behalf of Intel
# Open Source Technology Centre.
# December 2012
#
# Overhaul: aggregate Karatsuba post-processing, improve ILP in
# reduction_alg9, increase reduction aggregate factor to 4x. As for
# the latter. ghash-x86.pl discusses that it makes lesser sense to
# increase aggregate factor. Then why increase here? Critical path
# consists of 3 independent pclmulqdq instructions, Karatsuba post-
# processing and reduction. "On top" of this we lay down aggregated
# multiplication operations, triplets of independent pclmulqdq's. As
# issue rate for pclmulqdq is limited, it makes lesser sense to
# aggregate more multiplications than it takes to perform remaining
# non-multiplication operations. 2x is near-optimal coefficient for
# contemporary Intel CPUs (therefore modest improvement coefficient),
# but not for Bulldozer. Latter is because logical SIMD operations
# are twice as slow in comparison to Intel, so that critical path is
# longer. A CPU with higher pclmulqdq issue rate would also benefit
# from higher aggregate factor...
#
# Westmere 1.78(+13%)
# Sandy Bridge 1.80(+8%)
# Ivy Bridge 1.80(+7%)
# Haswell 0.55(+93%) (if system doesn't support AVX)
# Broadwell 0.45(+110%)(if system doesn't support AVX)
# Bulldozer 1.49(+27%)
# Silvermont 2.88(+13%)
# March 2013
#
# ... 8x aggregate factor AVX code path is using reduction algorithm
# suggested by Shay Gueron[1]. Even though contemporary AVX-capable
# CPUs such as Sandy and Ivy Bridge can execute it, the code performs
# sub-optimally in comparison to above mentioned version. But thanks
# to Ilya Albrekht and Max Locktyukhin of Intel Corp. we knew that
# it performs in 0.41 cycles per byte on Haswell processor, and in
# 0.29 on Broadwell.
#
# [1] http://rt.openssl.org/Ticket/Display.html?id=2900&user=guest&pass=guest
$flavour = shift;
$output = shift;
if ($flavour =~ /\./) { $output = $flavour; undef $flavour; }
$win64=0; $win64=1 if ($flavour =~ /[nm]asm|mingw64/ || $output =~ /\.asm$/);
$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
( $xlate="${dir}x86_64-xlate.pl" and -f $xlate ) or
( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or
die "can't locate x86_64-xlate.pl";
# This must be kept in sync with |$avx| in aesni-gcm-x86_64.pl; otherwise tags
# will be computed incorrectly.
#
# In upstream, this is controlled by shelling out to the compiler to check
# versions, but BoringSSL is intended to be used with pre-generated perlasm
# output, so this isn't useful anyway.
$avx = 0;
open OUT,"| \"$^X\" $xlate $flavour $output";
*STDOUT=*OUT;
$do4xaggr=1;
# common register layout
$nlo="%rax";
$nhi="%rbx";
$Zlo="%r8";
$Zhi="%r9";
$tmp="%r10";
$rem_4bit = "%r11";
$Xi="%rdi";
$Htbl="%rsi";
# per-function register layout
$cnt="%rcx";
$rem="%rdx";
sub LB() { my $r=shift; $r =~ s/%[er]([a-d])x/%\1l/ or
$r =~ s/%[er]([sd]i)/%\1l/ or
$r =~ s/%[er](bp)/%\1l/ or
$r =~ s/%(r[0-9]+)[d]?/%\1b/; $r; }
sub AUTOLOAD() # thunk [simplified] 32-bit style perlasm
{ my $opcode = $AUTOLOAD; $opcode =~ s/.*:://;
my $arg = pop;
$arg = "\$$arg" if ($arg*1 eq $arg);
$code .= "\t$opcode\t".join(',',$arg,reverse @_)."\n";
}
{ my $N;
sub loop() {
my $inp = shift;
$N++;
$code.=<<___;
xor $nlo,$nlo
xor $nhi,$nhi
mov `&LB("$Zlo")`,`&LB("$nlo")`
mov `&LB("$Zlo")`,`&LB("$nhi")`
shl \$4,`&LB("$nlo")`
mov \$14,$cnt
mov 8($Htbl,$nlo),$Zlo
mov ($Htbl,$nlo),$Zhi
and \$0xf0,`&LB("$nhi")`
mov $Zlo,$rem
jmp .Loop$N
.align 16
.Loop$N:
shr \$4,$Zlo
and \$0xf,$rem
mov $Zhi,$tmp
mov ($inp,$cnt),`&LB("$nlo")`
shr \$4,$Zhi
xor 8($Htbl,$nhi),$Zlo
shl \$60,$tmp
xor ($Htbl,$nhi),$Zhi
mov `&LB("$nlo")`,`&LB("$nhi")`
xor ($rem_4bit,$rem,8),$Zhi
mov $Zlo,$rem
shl \$4,`&LB("$nlo")`
xor $tmp,$Zlo
dec $cnt
js .Lbreak$N
shr \$4,$Zlo
and \$0xf,$rem
mov $Zhi,$tmp
shr \$4,$Zhi
xor 8($Htbl,$nlo),$Zlo
shl \$60,$tmp
xor ($Htbl,$nlo),$Zhi
and \$0xf0,`&LB("$nhi")`
xor ($rem_4bit,$rem,8),$Zhi
mov $Zlo,$rem
xor $tmp,$Zlo
jmp .Loop$N
.align 16
.Lbreak$N:
shr \$4,$Zlo
and \$0xf,$rem
mov $Zhi,$tmp
shr \$4,$Zhi
xor 8($Htbl,$nlo),$Zlo
shl \$60,$tmp
xor ($Htbl,$nlo),$Zhi
and \$0xf0,`&LB("$nhi")`
xor ($rem_4bit,$rem,8),$Zhi
mov $Zlo,$rem
xor $tmp,$Zlo
shr \$4,$Zlo
and \$0xf,$rem
mov $Zhi,$tmp
shr \$4,$Zhi
xor 8($Htbl,$nhi),$Zlo
shl \$60,$tmp
xor ($Htbl,$nhi),$Zhi
xor $tmp,$Zlo
xor ($rem_4bit,$rem,8),$Zhi
bswap $Zlo
bswap $Zhi
___
}}
$code=<<___;
.text
#include <boringssl/bssl.h>
.extern OPENSSL_ia32cap_P
.globl gcm_gmult_4bit
.type gcm_gmult_4bit,\@function,2
.align 16
gcm_gmult_4bit:
push %rbx
push %rbp # %rbp and %r12 are pushed exclusively in
push %r12 # order to reuse Win64 exception handler...
.Lgmult_prologue:
movzb 15($Xi),$Zlo
lea .Lrem_4bit(%rip),$rem_4bit
___
&loop ($Xi);
$code.=<<___;
mov $Zlo,8($Xi)
mov $Zhi,($Xi)
mov 16(%rsp),%rbx
lea 24(%rsp),%rsp
.Lgmult_epilogue:
ret
.size gcm_gmult_4bit,.-gcm_gmult_4bit
___
# per-function register layout
$inp="%rdx";
$len="%rcx";
$rem_8bit=$rem_4bit;
$code.=<<___;
.globl gcm_ghash_4bit
.type gcm_ghash_4bit,\@function,4
.align 16
gcm_ghash_4bit:
push %rbx
push %rbp
push %r12
push %r13
push %r14
push %r15
sub \$280,%rsp
.Lghash_prologue:
mov $inp,%r14 # reassign couple of args
mov $len,%r15
___
{ my $inp="%r14";
my $dat="%edx";
my $len="%r15";
my @nhi=("%ebx","%ecx");
my @rem=("%r12","%r13");
my $Hshr4="%rbp";
&sub ($Htbl,-128); # size optimization
&lea ($Hshr4,"16+128(%rsp)");
{ my @lo =($nlo,$nhi);
my @hi =($Zlo,$Zhi);
&xor ($dat,$dat);
for ($i=0,$j=-2;$i<18;$i++,$j++) {
&mov ("$j(%rsp)",&LB($dat)) if ($i>1);
&or ($lo[0],$tmp) if ($i>1);
&mov (&LB($dat),&LB($lo[1])) if ($i>0 && $i<17);
&shr ($lo[1],4) if ($i>0 && $i<17);
&mov ($tmp,$hi[1]) if ($i>0 && $i<17);
&shr ($hi[1],4) if ($i>0 && $i<17);
&mov ("8*$j($Hshr4)",$hi[0]) if ($i>1);
&mov ($hi[0],"16*$i+0-128($Htbl)") if ($i<16);
&shl (&LB($dat),4) if ($i>0 && $i<17);
&mov ("8*$j-128($Hshr4)",$lo[0]) if ($i>1);
&mov ($lo[0],"16*$i+8-128($Htbl)") if ($i<16);
&shl ($tmp,60) if ($i>0 && $i<17);
push (@lo,shift(@lo));
push (@hi,shift(@hi));
}
}
&add ($Htbl,-128);
&mov ($Zlo,"8($Xi)");
&mov ($Zhi,"0($Xi)");
&add ($len,$inp); # pointer to the end of data
&lea ($rem_8bit,".Lrem_8bit(%rip)");
&jmp (".Louter_loop");
$code.=".align 16\n.Louter_loop:\n";
&xor ($Zhi,"($inp)");
&mov ("%rdx","8($inp)");
&lea ($inp,"16($inp)");
&xor ("%rdx",$Zlo);
&mov ("($Xi)",$Zhi);
&mov ("8($Xi)","%rdx");
&shr ("%rdx",32);
&xor ($nlo,$nlo);
&rol ($dat,8);
&mov (&LB($nlo),&LB($dat));
&movz ($nhi[0],&LB($dat));
&shl (&LB($nlo),4);
&shr ($nhi[0],4);
for ($j=11,$i=0;$i<15;$i++) {
&rol ($dat,8);
&xor ($Zlo,"8($Htbl,$nlo)") if ($i>0);
&xor ($Zhi,"($Htbl,$nlo)") if ($i>0);
&mov ($Zlo,"8($Htbl,$nlo)") if ($i==0);
&mov ($Zhi,"($Htbl,$nlo)") if ($i==0);
&mov (&LB($nlo),&LB($dat));
&xor ($Zlo,$tmp) if ($i>0);
&movzw ($rem[1],"($rem_8bit,$rem[1],2)") if ($i>0);
&movz ($nhi[1],&LB($dat));
&shl (&LB($nlo),4);
&movzb ($rem[0],"(%rsp,$nhi[0])");
&shr ($nhi[1],4) if ($i<14);
&and ($nhi[1],0xf0) if ($i==14);
&shl ($rem[1],48) if ($i>0);
&xor ($rem[0],$Zlo);
&mov ($tmp,$Zhi);
&xor ($Zhi,$rem[1]) if ($i>0);
&shr ($Zlo,8);
&movz ($rem[0],&LB($rem[0]));
&mov ($dat,"$j($Xi)") if (--$j%4==0);
&shr ($Zhi,8);
&xor ($Zlo,"-128($Hshr4,$nhi[0],8)");
&shl ($tmp,56);
&xor ($Zhi,"($Hshr4,$nhi[0],8)");
unshift (@nhi,pop(@nhi)); # "rotate" registers
unshift (@rem,pop(@rem));
}
&movzw ($rem[1],"($rem_8bit,$rem[1],2)");
&xor ($Zlo,"8($Htbl,$nlo)");
&xor ($Zhi,"($Htbl,$nlo)");
&shl ($rem[1],48);
&xor ($Zlo,$tmp);
&xor ($Zhi,$rem[1]);
&movz ($rem[0],&LB($Zlo));
&shr ($Zlo,4);
&mov ($tmp,$Zhi);
&shl (&LB($rem[0]),4);
&shr ($Zhi,4);
&xor ($Zlo,"8($Htbl,$nhi[0])");
&movzw ($rem[0],"($rem_8bit,$rem[0],2)");
&shl ($tmp,60);
&xor ($Zhi,"($Htbl,$nhi[0])");
&xor ($Zlo,$tmp);
&shl ($rem[0],48);
&bswap ($Zlo);
&xor ($Zhi,$rem[0]);
&bswap ($Zhi);
&cmp ($inp,$len);
&jb (".Louter_loop");
}
$code.=<<___;
mov $Zlo,8($Xi)
mov $Zhi,($Xi)
lea 280(%rsp),%rsi
mov 0(%rsi),%r15
mov 8(%rsi),%r14
mov 16(%rsi),%r13
mov 24(%rsi),%r12
mov 32(%rsi),%rbp
mov 40(%rsi),%rbx
lea 48(%rsi),%rsp
.Lghash_epilogue:
ret
.size gcm_ghash_4bit,.-gcm_ghash_4bit
___
######################################################################
# PCLMULQDQ version.
@_4args=$win64? ("%rcx","%rdx","%r8", "%r9") : # Win64 order
("%rdi","%rsi","%rdx","%rcx"); # Unix order
($Xi,$Xhi)=("%xmm0","%xmm1"); $Hkey="%xmm2";
($T1,$T2,$T3)=("%xmm3","%xmm4","%xmm5");
sub clmul64x64_T2 { # minimal register pressure
my ($Xhi,$Xi,$Hkey,$HK)=@_;
if (!defined($HK)) { $HK = $T2;
$code.=<<___;
movdqa $Xi,$Xhi #
pshufd \$0b01001110,$Xi,$T1
pshufd \$0b01001110,$Hkey,$T2
pxor $Xi,$T1 #
pxor $Hkey,$T2
___
} else {
$code.=<<___;
movdqa $Xi,$Xhi #
pshufd \$0b01001110,$Xi,$T1
pxor $Xi,$T1 #
___
}
$code.=<<___;
pclmulqdq \$0x00,$Hkey,$Xi #######
pclmulqdq \$0x11,$Hkey,$Xhi #######
pclmulqdq \$0x00,$HK,$T1 #######
pxor $Xi,$T1 #
pxor $Xhi,$T1 #
movdqa $T1,$T2 #
psrldq \$8,$T1
pslldq \$8,$T2 #
pxor $T1,$Xhi
pxor $T2,$Xi #
___
}
sub reduction_alg9 { # 17/11 times faster than Intel version
my ($Xhi,$Xi) = @_;
$code.=<<___;
# 1st phase
movdqa $Xi,$T2 #
movdqa $Xi,$T1
psllq \$5,$Xi
pxor $Xi,$T1 #
psllq \$1,$Xi
pxor $T1,$Xi #
psllq \$57,$Xi #
movdqa $Xi,$T1 #
pslldq \$8,$Xi
psrldq \$8,$T1 #
pxor $T2,$Xi
pxor $T1,$Xhi #
# 2nd phase
movdqa $Xi,$T2
psrlq \$1,$Xi
pxor $T2,$Xhi #
pxor $Xi,$T2
psrlq \$5,$Xi
pxor $T2,$Xi #
psrlq \$1,$Xi #
pxor $Xhi,$Xi #
___
}
{ my ($Htbl,$Xip)=@_4args;
my $HK="%xmm6";
$code.=<<___;
.globl gcm_init_clmul
.type gcm_init_clmul,\@abi-omnipotent
.align 16
gcm_init_clmul:
.L_init_clmul:
___
$code.=<<___ if ($win64);
.LSEH_begin_gcm_init_clmul:
# I can't trust assembler to use specific encoding:-(
.byte 0x48,0x83,0xec,0x18 #sub $0x18,%rsp
.byte 0x0f,0x29,0x34,0x24 #movaps %xmm6,(%rsp)
___
$code.=<<___;
movdqu ($Xip),$Hkey
pshufd \$0b01001110,$Hkey,$Hkey # dword swap
# <<1 twist
pshufd \$0b11111111,$Hkey,$T2 # broadcast uppermost dword
movdqa $Hkey,$T1
psllq \$1,$Hkey
pxor $T3,$T3 #
psrlq \$63,$T1
pcmpgtd $T2,$T3 # broadcast carry bit
pslldq \$8,$T1
por $T1,$Hkey # H<<=1
# magic reduction
pand .L0x1c2_polynomial(%rip),$T3
pxor $T3,$Hkey # if(carry) H^=0x1c2_polynomial
# calculate H^2
pshufd \$0b01001110,$Hkey,$HK
movdqa $Hkey,$Xi
pxor $Hkey,$HK
___
&clmul64x64_T2 ($Xhi,$Xi,$Hkey,$HK);
&reduction_alg9 ($Xhi,$Xi);
$code.=<<___;
pshufd \$0b01001110,$Hkey,$T1
pshufd \$0b01001110,$Xi,$T2
pxor $Hkey,$T1 # Karatsuba pre-processing
movdqu $Hkey,0x00($Htbl) # save H
pxor $Xi,$T2 # Karatsuba pre-processing
movdqu $Xi,0x10($Htbl) # save H^2
palignr \$8,$T1,$T2 # low part is H.lo^H.hi...
movdqu $T2,0x20($Htbl) # save Karatsuba "salt"
___
if ($do4xaggr) {
&clmul64x64_T2 ($Xhi,$Xi,$Hkey,$HK); # H^3
&reduction_alg9 ($Xhi,$Xi);
$code.=<<___;
movdqa $Xi,$T3
___
&clmul64x64_T2 ($Xhi,$Xi,$Hkey,$HK); # H^4
&reduction_alg9 ($Xhi,$Xi);
$code.=<<___;
pshufd \$0b01001110,$T3,$T1
pshufd \$0b01001110,$Xi,$T2
pxor $T3,$T1 # Karatsuba pre-processing
movdqu $T3,0x30($Htbl) # save H^3
pxor $Xi,$T2 # Karatsuba pre-processing
movdqu $Xi,0x40($Htbl) # save H^4
palignr \$8,$T1,$T2 # low part is H^3.lo^H^3.hi...
movdqu $T2,0x50($Htbl) # save Karatsuba "salt"
___
}
$code.=<<___ if ($win64);
movaps (%rsp),%xmm6
lea 0x18(%rsp),%rsp
.LSEH_end_gcm_init_clmul:
___
$code.=<<___;
ret
.size gcm_init_clmul,.-gcm_init_clmul
___
}
{ my ($Xip,$Htbl)=@_4args;
$code.=<<___;
.globl gcm_gmult_clmul
.type gcm_gmult_clmul,\@abi-omnipotent
.align 16
gcm_gmult_clmul:
.L_gmult_clmul:
movdqu ($Xip),$Xi
movdqa .Lbswap_mask(%rip),$T3
movdqu ($Htbl),$Hkey
movdqu 0x20($Htbl),$T2
pshufb $T3,$Xi
___
&clmul64x64_T2 ($Xhi,$Xi,$Hkey,$T2);
$code.=<<___ if (0 || (&reduction_alg9($Xhi,$Xi)&&0));
# experimental alternative. special thing about is that there
# no dependency between the two multiplications...
mov \$`0xE1<<1`,%eax
mov \$0xA040608020C0E000,%r10 # ((7..0)·0xE0)&0xff
mov \$0x07,%r11d
movq %rax,$T1
movq %r10,$T2
movq %r11,$T3 # borrow $T3
pand $Xi,$T3
pshufb $T3,$T2 # ($Xi&7)·0xE0
movq %rax,$T3
pclmulqdq \$0x00,$Xi,$T1 # ·(0xE1<<1)
pxor $Xi,$T2
pslldq \$15,$T2
paddd $T2,$T2 # <<(64+56+1)
pxor $T2,$Xi
pclmulqdq \$0x01,$T3,$Xi
movdqa .Lbswap_mask(%rip),$T3 # reload $T3
psrldq \$1,$T1
pxor $T1,$Xhi
pslldq \$7,$Xi
pxor $Xhi,$Xi
___
$code.=<<___;
pshufb $T3,$Xi
movdqu $Xi,($Xip)
ret
.size gcm_gmult_clmul,.-gcm_gmult_clmul
___
}
{ my ($Xip,$Htbl,$inp,$len)=@_4args;
my ($Xln,$Xmn,$Xhn,$Hkey2,$HK) = map("%xmm$_",(3..7));
my ($T1,$T2,$T3)=map("%xmm$_",(8..10));
$code.=<<___;
.globl gcm_ghash_clmul
.type gcm_ghash_clmul,\@abi-omnipotent
.align 32
gcm_ghash_clmul:
.L_ghash_clmul:
___
$code.=<<___ if ($win64);
lea -0x88(%rsp),%rax
.LSEH_begin_gcm_ghash_clmul:
# I can't trust assembler to use specific encoding:-(
.byte 0x48,0x8d,0x60,0xe0 #lea -0x20(%rax),%rsp
.byte 0x0f,0x29,0x70,0xe0 #movaps %xmm6,-0x20(%rax)
.byte 0x0f,0x29,0x78,0xf0 #movaps %xmm7,-0x10(%rax)
.byte 0x44,0x0f,0x29,0x00 #movaps %xmm8,0(%rax)
.byte 0x44,0x0f,0x29,0x48,0x10 #movaps %xmm9,0x10(%rax)
.byte 0x44,0x0f,0x29,0x50,0x20 #movaps %xmm10,0x20(%rax)
.byte 0x44,0x0f,0x29,0x58,0x30 #movaps %xmm11,0x30(%rax)
.byte 0x44,0x0f,0x29,0x60,0x40 #movaps %xmm12,0x40(%rax)
.byte 0x44,0x0f,0x29,0x68,0x50 #movaps %xmm13,0x50(%rax)
.byte 0x44,0x0f,0x29,0x70,0x60 #movaps %xmm14,0x60(%rax)
.byte 0x44,0x0f,0x29,0x78,0x70 #movaps %xmm15,0x70(%rax)
___
$code.=<<___;
movdqa .Lbswap_mask(%rip),$T3
movdqu ($Xip),$Xi
movdqu ($Htbl),$Hkey
movdqu 0x20($Htbl),$HK
pshufb $T3,$Xi
sub \$0x10,$len
jz .Lodd_tail
movdqu 0x10($Htbl),$Hkey2
___
if ($do4xaggr) {
my ($Xl,$Xm,$Xh,$Hkey3,$Hkey4)=map("%xmm$_",(11..15));
$code.=<<___;
mov OPENSSL_ia32cap_P+4(%rip),%eax
cmp \$0x30,$len
jb .Lskip4x
and \$`1<<26|1<<22`,%eax # isolate MOVBE+XSAVE
cmp \$`1<<22`,%eax # check for MOVBE without XSAVE
je .Lskip4x
sub \$0x30,$len
mov \$0xA040608020C0E000,%rax # ((7..0)·0xE0)&0xff
movdqu 0x30($Htbl),$Hkey3
movdqu 0x40($Htbl),$Hkey4
#######
# Xi+4 =[(H*Ii+3) + (H^2*Ii+2) + (H^3*Ii+1) + H^4*(Ii+Xi)] mod P
#
movdqu 0x30($inp),$Xln
movdqu 0x20($inp),$Xl
pshufb $T3,$Xln
pshufb $T3,$Xl
movdqa $Xln,$Xhn
pshufd \$0b01001110,$Xln,$Xmn
pxor $Xln,$Xmn
pclmulqdq \$0x00,$Hkey,$Xln
pclmulqdq \$0x11,$Hkey,$Xhn
pclmulqdq \$0x00,$HK,$Xmn
movdqa $Xl,$Xh
pshufd \$0b01001110,$Xl,$Xm
pxor $Xl,$Xm
pclmulqdq \$0x00,$Hkey2,$Xl
pclmulqdq \$0x11,$Hkey2,$Xh
pclmulqdq \$0x10,$HK,$Xm
xorps $Xl,$Xln
xorps $Xh,$Xhn
movups 0x50($Htbl),$HK
xorps $Xm,$Xmn
movdqu 0x10($inp),$Xl
movdqu 0($inp),$T1
pshufb $T3,$Xl
pshufb $T3,$T1
movdqa $Xl,$Xh
pshufd \$0b01001110,$Xl,$Xm
pxor $T1,$Xi
pxor $Xl,$Xm
pclmulqdq \$0x00,$Hkey3,$Xl
movdqa $Xi,$Xhi
pshufd \$0b01001110,$Xi,$T1
pxor $Xi,$T1
pclmulqdq \$0x11,$Hkey3,$Xh
pclmulqdq \$0x00,$HK,$Xm
xorps $Xl,$Xln
xorps $Xh,$Xhn
lea 0x40($inp),$inp
sub \$0x40,$len
jc .Ltail4x
jmp .Lmod4_loop
.align 32
.Lmod4_loop:
pclmulqdq \$0x00,$Hkey4,$Xi
xorps $Xm,$Xmn
movdqu 0x30($inp),$Xl
pshufb $T3,$Xl
pclmulqdq \$0x11,$Hkey4,$Xhi
xorps $Xln,$Xi
movdqu 0x20($inp),$Xln
movdqa $Xl,$Xh
pclmulqdq \$0x10,$HK,$T1
pshufd \$0b01001110,$Xl,$Xm
xorps $Xhn,$Xhi
pxor $Xl,$Xm
pshufb $T3,$Xln
movups 0x20($Htbl),$HK
xorps $Xmn,$T1
pclmulqdq \$0x00,$Hkey,$Xl
pshufd \$0b01001110,$Xln,$Xmn
pxor $Xi,$T1 # aggregated Karatsuba post-processing
movdqa $Xln,$Xhn
pxor $Xhi,$T1 #
pxor $Xln,$Xmn
movdqa $T1,$T2 #
pclmulqdq \$0x11,$Hkey,$Xh
pslldq \$8,$T1
psrldq \$8,$T2 #
pxor $T1,$Xi
movdqa .L7_mask(%rip),$T1
pxor $T2,$Xhi #
movq %rax,$T2
pand $Xi,$T1 # 1st phase
pshufb $T1,$T2 #
pxor $Xi,$T2 #
pclmulqdq \$0x00,$HK,$Xm
psllq \$57,$T2 #
movdqa $T2,$T1 #
pslldq \$8,$T2
pclmulqdq \$0x00,$Hkey2,$Xln
psrldq \$8,$T1 #
pxor $T2,$Xi
pxor $T1,$Xhi #
movdqu 0($inp),$T1
movdqa $Xi,$T2 # 2nd phase
psrlq \$1,$Xi
pclmulqdq \$0x11,$Hkey2,$Xhn
xorps $Xl,$Xln
movdqu 0x10($inp),$Xl
pshufb $T3,$Xl
pclmulqdq \$0x10,$HK,$Xmn
xorps $Xh,$Xhn
movups 0x50($Htbl),$HK
pshufb $T3,$T1
pxor $T2,$Xhi #
pxor $Xi,$T2
psrlq \$5,$Xi
movdqa $Xl,$Xh
pxor $Xm,$Xmn
pshufd \$0b01001110,$Xl,$Xm
pxor $T2,$Xi #
pxor $T1,$Xhi
pxor $Xl,$Xm
pclmulqdq \$0x00,$Hkey3,$Xl
psrlq \$1,$Xi #
pxor $Xhi,$Xi #
movdqa $Xi,$Xhi
pclmulqdq \$0x11,$Hkey3,$Xh
xorps $Xl,$Xln
pshufd \$0b01001110,$Xi,$T1
pxor $Xi,$T1
pclmulqdq \$0x00,$HK,$Xm
xorps $Xh,$Xhn
lea 0x40($inp),$inp
sub \$0x40,$len
jnc .Lmod4_loop
.Ltail4x:
pclmulqdq \$0x00,$Hkey4,$Xi
pclmulqdq \$0x11,$Hkey4,$Xhi
pclmulqdq \$0x10,$HK,$T1
xorps $Xm,$Xmn
xorps $Xln,$Xi
xorps $Xhn,$Xhi
pxor $Xi,$Xhi # aggregated Karatsuba post-processing
pxor $Xmn,$T1
pxor $Xhi,$T1 #
pxor $Xi,$Xhi
movdqa $T1,$T2 #
psrldq \$8,$T1
pslldq \$8,$T2 #
pxor $T1,$Xhi
pxor $T2,$Xi #
___
&reduction_alg9($Xhi,$Xi);
$code.=<<___;
add \$0x40,$len
jz .Ldone
movdqu 0x20($Htbl),$HK
sub \$0x10,$len
jz .Lodd_tail
.Lskip4x:
___
}
$code.=<<___;
#######
# Xi+2 =[H*(Ii+1 + Xi+1)] mod P =
# [(H*Ii+1) + (H*Xi+1)] mod P =
# [(H*Ii+1) + H^2*(Ii+Xi)] mod P
#
movdqu ($inp),$T1 # Ii
movdqu 16($inp),$Xln # Ii+1
pshufb $T3,$T1
pshufb $T3,$Xln
pxor $T1,$Xi # Ii+Xi
movdqa $Xln,$Xhn
pshufd \$0b01001110,$Xln,$Xmn
pxor $Xln,$Xmn
pclmulqdq \$0x00,$Hkey,$Xln
pclmulqdq \$0x11,$Hkey,$Xhn
pclmulqdq \$0x00,$HK,$Xmn
lea 32($inp),$inp # i+=2
nop
sub \$0x20,$len
jbe .Leven_tail
nop
jmp .Lmod_loop
.align 32
.Lmod_loop:
movdqa $Xi,$Xhi
movdqa $Xmn,$T1
pshufd \$0b01001110,$Xi,$Xmn #
pxor $Xi,$Xmn #
pclmulqdq \$0x00,$Hkey2,$Xi
pclmulqdq \$0x11,$Hkey2,$Xhi
pclmulqdq \$0x10,$HK,$Xmn
pxor $Xln,$Xi # (H*Ii+1) + H^2*(Ii+Xi)
pxor $Xhn,$Xhi
movdqu ($inp),$T2 # Ii
pxor $Xi,$T1 # aggregated Karatsuba post-processing
pshufb $T3,$T2
movdqu 16($inp),$Xln # Ii+1
pxor $Xhi,$T1
pxor $T2,$Xhi # "Ii+Xi", consume early
pxor $T1,$Xmn
pshufb $T3,$Xln
movdqa $Xmn,$T1 #
psrldq \$8,$T1
pslldq \$8,$Xmn #
pxor $T1,$Xhi
pxor $Xmn,$Xi #
movdqa $Xln,$Xhn #
movdqa $Xi,$T2 # 1st phase
movdqa $Xi,$T1
psllq \$5,$Xi
pxor $Xi,$T1 #
pclmulqdq \$0x00,$Hkey,$Xln #######
psllq \$1,$Xi
pxor $T1,$Xi #
psllq \$57,$Xi #
movdqa $Xi,$T1 #
pslldq \$8,$Xi
psrldq \$8,$T1 #
pxor $T2,$Xi
pshufd \$0b01001110,$Xhn,$Xmn
pxor $T1,$Xhi #
pxor $Xhn,$Xmn #
movdqa $Xi,$T2 # 2nd phase
psrlq \$1,$Xi
pclmulqdq \$0x11,$Hkey,$Xhn #######
pxor $T2,$Xhi #
pxor $Xi,$T2
psrlq \$5,$Xi
pxor $T2,$Xi #
lea 32($inp),$inp
psrlq \$1,$Xi #
pclmulqdq \$0x00,$HK,$Xmn #######
pxor $Xhi,$Xi #
sub \$0x20,$len
ja .Lmod_loop
.Leven_tail:
movdqa $Xi,$Xhi
movdqa $Xmn,$T1
pshufd \$0b01001110,$Xi,$Xmn #
pxor $Xi,$Xmn #
pclmulqdq \$0x00,$Hkey2,$Xi
pclmulqdq \$0x11,$Hkey2,$Xhi
pclmulqdq \$0x10,$HK,$Xmn
pxor $Xln,$Xi # (H*Ii+1) + H^2*(Ii+Xi)
pxor $Xhn,$Xhi
pxor $Xi,$T1
pxor $Xhi,$T1
pxor $T1,$Xmn
movdqa $Xmn,$T1 #
psrldq \$8,$T1
pslldq \$8,$Xmn #
pxor $T1,$Xhi
pxor $Xmn,$Xi #
___
&reduction_alg9 ($Xhi,$Xi);
$code.=<<___;
test $len,$len
jnz .Ldone
.Lodd_tail:
movdqu ($inp),$T1 # Ii
pshufb $T3,$T1
pxor $T1,$Xi # Ii+Xi
___
&clmul64x64_T2 ($Xhi,$Xi,$Hkey,$HK); # H*(Ii+Xi)
&reduction_alg9 ($Xhi,$Xi);
$code.=<<___;
.Ldone:
pshufb $T3,$Xi
movdqu $Xi,($Xip)
___
$code.=<<___ if ($win64);
movaps (%rsp),%xmm6
movaps 0x10(%rsp),%xmm7
movaps 0x20(%rsp),%xmm8
movaps 0x30(%rsp),%xmm9
movaps 0x40(%rsp),%xmm10
movaps 0x50(%rsp),%xmm11
movaps 0x60(%rsp),%xmm12
movaps 0x70(%rsp),%xmm13
movaps 0x80(%rsp),%xmm14
movaps 0x90(%rsp),%xmm15
lea 0xa8(%rsp),%rsp
.LSEH_end_gcm_ghash_clmul:
___
$code.=<<___;
ret
.size gcm_ghash_clmul,.-gcm_ghash_clmul
___
}
$code.=<<___;
.globl gcm_init_avx
.type gcm_init_avx,\@abi-omnipotent
.align 32
gcm_init_avx:
___
if ($avx) {
my ($Htbl,$Xip)=@_4args;
my $HK="%xmm6";
$code.=<<___ if ($win64);
.LSEH_begin_gcm_init_avx:
# I can't trust assembler to use specific encoding:-(
.byte 0x48,0x83,0xec,0x18 #sub $0x18,%rsp
.byte 0x0f,0x29,0x34,0x24 #movaps %xmm6,(%rsp)
___
$code.=<<___;
vzeroupper
vmovdqu ($Xip),$Hkey
vpshufd \$0b01001110,$Hkey,$Hkey # dword swap
# <<1 twist
vpshufd \$0b11111111,$Hkey,$T2 # broadcast uppermost dword
vpsrlq \$63,$Hkey,$T1
vpsllq \$1,$Hkey,$Hkey
vpxor $T3,$T3,$T3 #
vpcmpgtd $T2,$T3,$T3 # broadcast carry bit
vpslldq \$8,$T1,$T1
vpor $T1,$Hkey,$Hkey # H<<=1
# magic reduction
vpand .L0x1c2_polynomial(%rip),$T3,$T3
vpxor $T3,$Hkey,$Hkey # if(carry) H^=0x1c2_polynomial
vpunpckhqdq $Hkey,$Hkey,$HK
vmovdqa $Hkey,$Xi
vpxor $Hkey,$HK,$HK
mov \$4,%r10 # up to H^8
jmp .Linit_start_avx
___
sub clmul64x64_avx {
my ($Xhi,$Xi,$Hkey,$HK)=@_;
if (!defined($HK)) { $HK = $T2;
$code.=<<___;
vpunpckhqdq $Xi,$Xi,$T1
vpunpckhqdq $Hkey,$Hkey,$T2
vpxor $Xi,$T1,$T1 #
vpxor $Hkey,$T2,$T2
___
} else {
$code.=<<___;
vpunpckhqdq $Xi,$Xi,$T1
vpxor $Xi,$T1,$T1 #
___
}
$code.=<<___;
vpclmulqdq \$0x11,$Hkey,$Xi,$Xhi #######
vpclmulqdq \$0x00,$Hkey,$Xi,$Xi #######
vpclmulqdq \$0x00,$HK,$T1,$T1 #######
vpxor $Xi,$Xhi,$T2 #
vpxor $T2,$T1,$T1 #
vpslldq \$8,$T1,$T2 #
vpsrldq \$8,$T1,$T1
vpxor $T2,$Xi,$Xi #
vpxor $T1,$Xhi,$Xhi
___
}
sub reduction_avx {
my ($Xhi,$Xi) = @_;
$code.=<<___;
vpsllq \$57,$Xi,$T1 # 1st phase
vpsllq \$62,$Xi,$T2
vpxor $T1,$T2,$T2 #
vpsllq \$63,$Xi,$T1
vpxor $T1,$T2,$T2 #
vpslldq \$8,$T2,$T1 #
vpsrldq \$8,$T2,$T2
vpxor $T1,$Xi,$Xi #
vpxor $T2,$Xhi,$Xhi
vpsrlq \$1,$Xi,$T2 # 2nd phase
vpxor $Xi,$Xhi,$Xhi
vpxor $T2,$Xi,$Xi #
vpsrlq \$5,$T2,$T2
vpxor $T2,$Xi,$Xi #
vpsrlq \$1,$Xi,$Xi #
vpxor $Xhi,$Xi,$Xi #
___
}
$code.=<<___;
.align 32
.Linit_loop_avx:
vpalignr \$8,$T1,$T2,$T3 # low part is H.lo^H.hi...
vmovdqu $T3,-0x10($Htbl) # save Karatsuba "salt"
___
&clmul64x64_avx ($Xhi,$Xi,$Hkey,$HK); # calculate H^3,5,7
&reduction_avx ($Xhi,$Xi);
$code.=<<___;
.Linit_start_avx:
vmovdqa $Xi,$T3
___
&clmul64x64_avx ($Xhi,$Xi,$Hkey,$HK); # calculate H^2,4,6,8
&reduction_avx ($Xhi,$Xi);
$code.=<<___;
vpshufd \$0b01001110,$T3,$T1
vpshufd \$0b01001110,$Xi,$T2
vpxor $T3,$T1,$T1 # Karatsuba pre-processing
vmovdqu $T3,0x00($Htbl) # save H^1,3,5,7
vpxor $Xi,$T2,$T2 # Karatsuba pre-processing
vmovdqu $Xi,0x10($Htbl) # save H^2,4,6,8
lea 0x30($Htbl),$Htbl
sub \$1,%r10
jnz .Linit_loop_avx
vpalignr \$8,$T2,$T1,$T3 # last "salt" is flipped
vmovdqu $T3,-0x10($Htbl)
vzeroupper
___
$code.=<<___ if ($win64);
movaps (%rsp),%xmm6
lea 0x18(%rsp),%rsp
.LSEH_end_gcm_init_avx:
___
$code.=<<___;
ret
.size gcm_init_avx,.-gcm_init_avx
___
} else {
$code.=<<___;
jmp .L_init_clmul
.size gcm_init_avx,.-gcm_init_avx
___
}
$code.=<<___;
.globl gcm_gmult_avx
.type gcm_gmult_avx,\@abi-omnipotent
.align 32
gcm_gmult_avx:
jmp .L_gmult_clmul
.size gcm_gmult_avx,.-gcm_gmult_avx
___
$code.=<<___;
.globl gcm_ghash_avx
.type gcm_ghash_avx,\@abi-omnipotent
.align 32
gcm_ghash_avx:
___
if ($avx) {
my ($Xip,$Htbl,$inp,$len)=@_4args;
my ($Xlo,$Xhi,$Xmi,
$Zlo,$Zhi,$Zmi,
$Hkey,$HK,$T1,$T2,
$Xi,$Xo,$Tred,$bswap,$Ii,$Ij) = map("%xmm$_",(0..15));
$code.=<<___ if ($win64);
lea -0x88(%rsp),%rax
.LSEH_begin_gcm_ghash_avx:
# I can't trust assembler to use specific encoding:-(
.byte 0x48,0x8d,0x60,0xe0 #lea -0x20(%rax),%rsp
.byte 0x0f,0x29,0x70,0xe0 #movaps %xmm6,-0x20(%rax)
.byte 0x0f,0x29,0x78,0xf0 #movaps %xmm7,-0x10(%rax)
.byte 0x44,0x0f,0x29,0x00 #movaps %xmm8,0(%rax)
.byte 0x44,0x0f,0x29,0x48,0x10 #movaps %xmm9,0x10(%rax)
.byte 0x44,0x0f,0x29,0x50,0x20 #movaps %xmm10,0x20(%rax)
.byte 0x44,0x0f,0x29,0x58,0x30 #movaps %xmm11,0x30(%rax)
.byte 0x44,0x0f,0x29,0x60,0x40 #movaps %xmm12,0x40(%rax)
.byte 0x44,0x0f,0x29,0x68,0x50 #movaps %xmm13,0x50(%rax)
.byte 0x44,0x0f,0x29,0x70,0x60 #movaps %xmm14,0x60(%rax)
.byte 0x44,0x0f,0x29,0x78,0x70 #movaps %xmm15,0x70(%rax)
___
$code.=<<___;
vzeroupper
vmovdqu ($Xip),$Xi # load $Xi
lea .L0x1c2_polynomial(%rip),%r10
lea 0x40($Htbl),$Htbl # size optimization
vmovdqu .Lbswap_mask(%rip),$bswap
vpshufb $bswap,$Xi,$Xi
cmp \$0x80,$len
jb .Lshort_avx
sub \$0x80,$len
vmovdqu 0x70($inp),$Ii # I[7]
vmovdqu 0x00-0x40($Htbl),$Hkey # $Hkey^1
vpshufb $bswap,$Ii,$Ii
vmovdqu 0x20-0x40($Htbl),$HK
vpunpckhqdq $Ii,$Ii,$T2
vmovdqu 0x60($inp),$Ij # I[6]
vpclmulqdq \$0x00,$Hkey,$Ii,$Xlo
vpxor $Ii,$T2,$T2
vpshufb $bswap,$Ij,$Ij
vpclmulqdq \$0x11,$Hkey,$Ii,$Xhi
vmovdqu 0x10-0x40($Htbl),$Hkey # $Hkey^2
vpunpckhqdq $Ij,$Ij,$T1
vmovdqu 0x50($inp),$Ii # I[5]
vpclmulqdq \$0x00,$HK,$T2,$Xmi
vpxor $Ij,$T1,$T1
vpshufb $bswap,$Ii,$Ii
vpclmulqdq \$0x00,$Hkey,$Ij,$Zlo
vpunpckhqdq $Ii,$Ii,$T2
vpclmulqdq \$0x11,$Hkey,$Ij,$Zhi
vmovdqu 0x30-0x40($Htbl),$Hkey # $Hkey^3
vpxor $Ii,$T2,$T2
vmovdqu 0x40($inp),$Ij # I[4]
vpclmulqdq \$0x10,$HK,$T1,$Zmi
vmovdqu 0x50-0x40($Htbl),$HK
vpshufb $bswap,$Ij,$Ij
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ii,$Xlo
vpxor $Xhi,$Zhi,$Zhi
vpunpckhqdq $Ij,$Ij,$T1
vpclmulqdq \$0x11,$Hkey,$Ii,$Xhi
vmovdqu 0x40-0x40($Htbl),$Hkey # $Hkey^4
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x00,$HK,$T2,$Xmi
vpxor $Ij,$T1,$T1
vmovdqu 0x30($inp),$Ii # I[3]
vpxor $Zlo,$Xlo,$Xlo
vpclmulqdq \$0x00,$Hkey,$Ij,$Zlo
vpxor $Zhi,$Xhi,$Xhi
vpshufb $bswap,$Ii,$Ii
vpclmulqdq \$0x11,$Hkey,$Ij,$Zhi
vmovdqu 0x60-0x40($Htbl),$Hkey # $Hkey^5
vpxor $Zmi,$Xmi,$Xmi
vpunpckhqdq $Ii,$Ii,$T2
vpclmulqdq \$0x10,$HK,$T1,$Zmi
vmovdqu 0x80-0x40($Htbl),$HK
vpxor $Ii,$T2,$T2
vmovdqu 0x20($inp),$Ij # I[2]
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ii,$Xlo
vpxor $Xhi,$Zhi,$Zhi
vpshufb $bswap,$Ij,$Ij
vpclmulqdq \$0x11,$Hkey,$Ii,$Xhi
vmovdqu 0x70-0x40($Htbl),$Hkey # $Hkey^6
vpxor $Xmi,$Zmi,$Zmi
vpunpckhqdq $Ij,$Ij,$T1
vpclmulqdq \$0x00,$HK,$T2,$Xmi
vpxor $Ij,$T1,$T1
vmovdqu 0x10($inp),$Ii # I[1]
vpxor $Zlo,$Xlo,$Xlo
vpclmulqdq \$0x00,$Hkey,$Ij,$Zlo
vpxor $Zhi,$Xhi,$Xhi
vpshufb $bswap,$Ii,$Ii
vpclmulqdq \$0x11,$Hkey,$Ij,$Zhi
vmovdqu 0x90-0x40($Htbl),$Hkey # $Hkey^7
vpxor $Zmi,$Xmi,$Xmi
vpunpckhqdq $Ii,$Ii,$T2
vpclmulqdq \$0x10,$HK,$T1,$Zmi
vmovdqu 0xb0-0x40($Htbl),$HK
vpxor $Ii,$T2,$T2
vmovdqu ($inp),$Ij # I[0]
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ii,$Xlo
vpxor $Xhi,$Zhi,$Zhi
vpshufb $bswap,$Ij,$Ij
vpclmulqdq \$0x11,$Hkey,$Ii,$Xhi
vmovdqu 0xa0-0x40($Htbl),$Hkey # $Hkey^8
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x10,$HK,$T2,$Xmi
lea 0x80($inp),$inp
cmp \$0x80,$len
jb .Ltail_avx
vpxor $Xi,$Ij,$Ij # accumulate $Xi
sub \$0x80,$len
jmp .Loop8x_avx
.align 32
.Loop8x_avx:
vpunpckhqdq $Ij,$Ij,$T1
vmovdqu 0x70($inp),$Ii # I[7]
vpxor $Xlo,$Zlo,$Zlo
vpxor $Ij,$T1,$T1
vpclmulqdq \$0x00,$Hkey,$Ij,$Xi
vpshufb $bswap,$Ii,$Ii
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x11,$Hkey,$Ij,$Xo
vmovdqu 0x00-0x40($Htbl),$Hkey # $Hkey^1
vpunpckhqdq $Ii,$Ii,$T2
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x00,$HK,$T1,$Tred
vmovdqu 0x20-0x40($Htbl),$HK
vpxor $Ii,$T2,$T2
vmovdqu 0x60($inp),$Ij # I[6]
vpclmulqdq \$0x00,$Hkey,$Ii,$Xlo
vpxor $Zlo,$Xi,$Xi # collect result
vpshufb $bswap,$Ij,$Ij
vpclmulqdq \$0x11,$Hkey,$Ii,$Xhi
vxorps $Zhi,$Xo,$Xo
vmovdqu 0x10-0x40($Htbl),$Hkey # $Hkey^2
vpunpckhqdq $Ij,$Ij,$T1
vpclmulqdq \$0x00,$HK, $T2,$Xmi
vpxor $Zmi,$Tred,$Tred
vxorps $Ij,$T1,$T1
vmovdqu 0x50($inp),$Ii # I[5]
vpxor $Xi,$Tred,$Tred # aggregated Karatsuba post-processing
vpclmulqdq \$0x00,$Hkey,$Ij,$Zlo
vpxor $Xo,$Tred,$Tred
vpslldq \$8,$Tred,$T2
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x11,$Hkey,$Ij,$Zhi
vpsrldq \$8,$Tred,$Tred
vpxor $T2, $Xi, $Xi
vmovdqu 0x30-0x40($Htbl),$Hkey # $Hkey^3
vpshufb $bswap,$Ii,$Ii
vxorps $Tred,$Xo, $Xo
vpxor $Xhi,$Zhi,$Zhi
vpunpckhqdq $Ii,$Ii,$T2
vpclmulqdq \$0x10,$HK, $T1,$Zmi
vmovdqu 0x50-0x40($Htbl),$HK
vpxor $Ii,$T2,$T2
vpxor $Xmi,$Zmi,$Zmi
vmovdqu 0x40($inp),$Ij # I[4]
vpalignr \$8,$Xi,$Xi,$Tred # 1st phase
vpclmulqdq \$0x00,$Hkey,$Ii,$Xlo
vpshufb $bswap,$Ij,$Ij
vpxor $Zlo,$Xlo,$Xlo
vpclmulqdq \$0x11,$Hkey,$Ii,$Xhi
vmovdqu 0x40-0x40($Htbl),$Hkey # $Hkey^4
vpunpckhqdq $Ij,$Ij,$T1
vpxor $Zhi,$Xhi,$Xhi
vpclmulqdq \$0x00,$HK, $T2,$Xmi
vxorps $Ij,$T1,$T1
vpxor $Zmi,$Xmi,$Xmi
vmovdqu 0x30($inp),$Ii # I[3]
vpclmulqdq \$0x10,(%r10),$Xi,$Xi
vpclmulqdq \$0x00,$Hkey,$Ij,$Zlo
vpshufb $bswap,$Ii,$Ii
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x11,$Hkey,$Ij,$Zhi
vmovdqu 0x60-0x40($Htbl),$Hkey # $Hkey^5
vpunpckhqdq $Ii,$Ii,$T2
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x10,$HK, $T1,$Zmi
vmovdqu 0x80-0x40($Htbl),$HK
vpxor $Ii,$T2,$T2
vpxor $Xmi,$Zmi,$Zmi
vmovdqu 0x20($inp),$Ij # I[2]
vpclmulqdq \$0x00,$Hkey,$Ii,$Xlo
vpshufb $bswap,$Ij,$Ij
vpxor $Zlo,$Xlo,$Xlo
vpclmulqdq \$0x11,$Hkey,$Ii,$Xhi
vmovdqu 0x70-0x40($Htbl),$Hkey # $Hkey^6
vpunpckhqdq $Ij,$Ij,$T1
vpxor $Zhi,$Xhi,$Xhi
vpclmulqdq \$0x00,$HK, $T2,$Xmi
vpxor $Ij,$T1,$T1
vpxor $Zmi,$Xmi,$Xmi
vxorps $Tred,$Xi,$Xi
vmovdqu 0x10($inp),$Ii # I[1]
vpalignr \$8,$Xi,$Xi,$Tred # 2nd phase
vpclmulqdq \$0x00,$Hkey,$Ij,$Zlo
vpshufb $bswap,$Ii,$Ii
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x11,$Hkey,$Ij,$Zhi
vmovdqu 0x90-0x40($Htbl),$Hkey # $Hkey^7
vpclmulqdq \$0x10,(%r10),$Xi,$Xi
vxorps $Xo,$Tred,$Tred
vpunpckhqdq $Ii,$Ii,$T2
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x10,$HK, $T1,$Zmi
vmovdqu 0xb0-0x40($Htbl),$HK
vpxor $Ii,$T2,$T2
vpxor $Xmi,$Zmi,$Zmi
vmovdqu ($inp),$Ij # I[0]
vpclmulqdq \$0x00,$Hkey,$Ii,$Xlo
vpshufb $bswap,$Ij,$Ij
vpclmulqdq \$0x11,$Hkey,$Ii,$Xhi
vmovdqu 0xa0-0x40($Htbl),$Hkey # $Hkey^8
vpxor $Tred,$Ij,$Ij
vpclmulqdq \$0x10,$HK, $T2,$Xmi
vpxor $Xi,$Ij,$Ij # accumulate $Xi
lea 0x80($inp),$inp
sub \$0x80,$len
jnc .Loop8x_avx
add \$0x80,$len
jmp .Ltail_no_xor_avx
.align 32
.Lshort_avx:
vmovdqu -0x10($inp,$len),$Ii # very last word
lea ($inp,$len),$inp
vmovdqu 0x00-0x40($Htbl),$Hkey # $Hkey^1
vmovdqu 0x20-0x40($Htbl),$HK
vpshufb $bswap,$Ii,$Ij
vmovdqa $Xlo,$Zlo # subtle way to zero $Zlo,
vmovdqa $Xhi,$Zhi # $Zhi and
vmovdqa $Xmi,$Zmi # $Zmi
sub \$0x10,$len
jz .Ltail_avx
vpunpckhqdq $Ij,$Ij,$T1
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ij,$Xlo
vpxor $Ij,$T1,$T1
vmovdqu -0x20($inp),$Ii
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x11,$Hkey,$Ij,$Xhi
vmovdqu 0x10-0x40($Htbl),$Hkey # $Hkey^2
vpshufb $bswap,$Ii,$Ij
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x00,$HK,$T1,$Xmi
vpsrldq \$8,$HK,$HK
sub \$0x10,$len
jz .Ltail_avx
vpunpckhqdq $Ij,$Ij,$T1
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ij,$Xlo
vpxor $Ij,$T1,$T1
vmovdqu -0x30($inp),$Ii
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x11,$Hkey,$Ij,$Xhi
vmovdqu 0x30-0x40($Htbl),$Hkey # $Hkey^3
vpshufb $bswap,$Ii,$Ij
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x00,$HK,$T1,$Xmi
vmovdqu 0x50-0x40($Htbl),$HK
sub \$0x10,$len
jz .Ltail_avx
vpunpckhqdq $Ij,$Ij,$T1
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ij,$Xlo
vpxor $Ij,$T1,$T1
vmovdqu -0x40($inp),$Ii
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x11,$Hkey,$Ij,$Xhi
vmovdqu 0x40-0x40($Htbl),$Hkey # $Hkey^4
vpshufb $bswap,$Ii,$Ij
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x00,$HK,$T1,$Xmi
vpsrldq \$8,$HK,$HK
sub \$0x10,$len
jz .Ltail_avx
vpunpckhqdq $Ij,$Ij,$T1
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ij,$Xlo
vpxor $Ij,$T1,$T1
vmovdqu -0x50($inp),$Ii
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x11,$Hkey,$Ij,$Xhi
vmovdqu 0x60-0x40($Htbl),$Hkey # $Hkey^5
vpshufb $bswap,$Ii,$Ij
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x00,$HK,$T1,$Xmi
vmovdqu 0x80-0x40($Htbl),$HK
sub \$0x10,$len
jz .Ltail_avx
vpunpckhqdq $Ij,$Ij,$T1
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ij,$Xlo
vpxor $Ij,$T1,$T1
vmovdqu -0x60($inp),$Ii
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x11,$Hkey,$Ij,$Xhi
vmovdqu 0x70-0x40($Htbl),$Hkey # $Hkey^6
vpshufb $bswap,$Ii,$Ij
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x00,$HK,$T1,$Xmi
vpsrldq \$8,$HK,$HK
sub \$0x10,$len
jz .Ltail_avx
vpunpckhqdq $Ij,$Ij,$T1
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ij,$Xlo
vpxor $Ij,$T1,$T1
vmovdqu -0x70($inp),$Ii
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x11,$Hkey,$Ij,$Xhi
vmovdqu 0x90-0x40($Htbl),$Hkey # $Hkey^7
vpshufb $bswap,$Ii,$Ij
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x00,$HK,$T1,$Xmi
vmovq 0xb8-0x40($Htbl),$HK
sub \$0x10,$len
jmp .Ltail_avx
.align 32
.Ltail_avx:
vpxor $Xi,$Ij,$Ij # accumulate $Xi
.Ltail_no_xor_avx:
vpunpckhqdq $Ij,$Ij,$T1
vpxor $Xlo,$Zlo,$Zlo
vpclmulqdq \$0x00,$Hkey,$Ij,$Xlo
vpxor $Ij,$T1,$T1
vpxor $Xhi,$Zhi,$Zhi
vpclmulqdq \$0x11,$Hkey,$Ij,$Xhi
vpxor $Xmi,$Zmi,$Zmi
vpclmulqdq \$0x00,$HK,$T1,$Xmi
vmovdqu (%r10),$Tred
vpxor $Xlo,$Zlo,$Xi
vpxor $Xhi,$Zhi,$Xo
vpxor $Xmi,$Zmi,$Zmi
vpxor $Xi, $Zmi,$Zmi # aggregated Karatsuba post-processing
vpxor $Xo, $Zmi,$Zmi
vpslldq \$8, $Zmi,$T2
vpsrldq \$8, $Zmi,$Zmi
vpxor $T2, $Xi, $Xi
vpxor $Zmi,$Xo, $Xo
vpclmulqdq \$0x10,$Tred,$Xi,$T2 # 1st phase
vpalignr \$8,$Xi,$Xi,$Xi
vpxor $T2,$Xi,$Xi
vpclmulqdq \$0x10,$Tred,$Xi,$T2 # 2nd phase
vpalignr \$8,$Xi,$Xi,$Xi
vpxor $Xo,$Xi,$Xi
vpxor $T2,$Xi,$Xi
cmp \$0,$len
jne .Lshort_avx
vpshufb $bswap,$Xi,$Xi
vmovdqu $Xi,($Xip)
vzeroupper
___
$code.=<<___ if ($win64);
movaps (%rsp),%xmm6
movaps 0x10(%rsp),%xmm7
movaps 0x20(%rsp),%xmm8
movaps 0x30(%rsp),%xmm9
movaps 0x40(%rsp),%xmm10
movaps 0x50(%rsp),%xmm11
movaps 0x60(%rsp),%xmm12
movaps 0x70(%rsp),%xmm13
movaps 0x80(%rsp),%xmm14
movaps 0x90(%rsp),%xmm15
lea 0xa8(%rsp),%rsp
.LSEH_end_gcm_ghash_avx:
___
$code.=<<___;
ret
.size gcm_ghash_avx,.-gcm_ghash_avx
___
} else {
$code.=<<___;
jmp .L_ghash_clmul
.size gcm_ghash_avx,.-gcm_ghash_avx
___
}
$code.=<<___;
.align 64
.Lbswap_mask:
.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
.L0x1c2_polynomial:
.byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2
.L7_mask:
.long 7,0,7,0
.L7_mask_poly:
.long 7,0,`0xE1<<1`,0
.align 64
.type .Lrem_4bit,\@object
.Lrem_4bit:
.long 0,`0x0000<<16`,0,`0x1C20<<16`,0,`0x3840<<16`,0,`0x2460<<16`
.long 0,`0x7080<<16`,0,`0x6CA0<<16`,0,`0x48C0<<16`,0,`0x54E0<<16`
.long 0,`0xE100<<16`,0,`0xFD20<<16`,0,`0xD940<<16`,0,`0xC560<<16`
.long 0,`0x9180<<16`,0,`0x8DA0<<16`,0,`0xA9C0<<16`,0,`0xB5E0<<16`
.type .Lrem_8bit,\@object
.Lrem_8bit:
.value 0x0000,0x01C2,0x0384,0x0246,0x0708,0x06CA,0x048C,0x054E
.value 0x0E10,0x0FD2,0x0D94,0x0C56,0x0918,0x08DA,0x0A9C,0x0B5E
.value 0x1C20,0x1DE2,0x1FA4,0x1E66,0x1B28,0x1AEA,0x18AC,0x196E
.value 0x1230,0x13F2,0x11B4,0x1076,0x1538,0x14FA,0x16BC,0x177E
.value 0x3840,0x3982,0x3BC4,0x3A06,0x3F48,0x3E8A,0x3CCC,0x3D0E
.value 0x3650,0x3792,0x35D4,0x3416,0x3158,0x309A,0x32DC,0x331E
.value 0x2460,0x25A2,0x27E4,0x2626,0x2368,0x22AA,0x20EC,0x212E
.value 0x2A70,0x2BB2,0x29F4,0x2836,0x2D78,0x2CBA,0x2EFC,0x2F3E
.value 0x7080,0x7142,0x7304,0x72C6,0x7788,0x764A,0x740C,0x75CE
.value 0x7E90,0x7F52,0x7D14,0x7CD6,0x7998,0x785A,0x7A1C,0x7BDE
.value 0x6CA0,0x6D62,0x6F24,0x6EE6,0x6BA8,0x6A6A,0x682C,0x69EE
.value 0x62B0,0x6372,0x6134,0x60F6,0x65B8,0x647A,0x663C,0x67FE
.value 0x48C0,0x4902,0x4B44,0x4A86,0x4FC8,0x4E0A,0x4C4C,0x4D8E
.value 0x46D0,0x4712,0x4554,0x4496,0x41D8,0x401A,0x425C,0x439E
.value 0x54E0,0x5522,0x5764,0x56A6,0x53E8,0x522A,0x506C,0x51AE
.value 0x5AF0,0x5B32,0x5974,0x58B6,0x5DF8,0x5C3A,0x5E7C,0x5FBE
.value 0xE100,0xE0C2,0xE284,0xE346,0xE608,0xE7CA,0xE58C,0xE44E
.value 0xEF10,0xEED2,0xEC94,0xED56,0xE818,0xE9DA,0xEB9C,0xEA5E
.value 0xFD20,0xFCE2,0xFEA4,0xFF66,0xFA28,0xFBEA,0xF9AC,0xF86E
.value 0xF330,0xF2F2,0xF0B4,0xF176,0xF438,0xF5FA,0xF7BC,0xF67E
.value 0xD940,0xD882,0xDAC4,0xDB06,0xDE48,0xDF8A,0xDDCC,0xDC0E
.value 0xD750,0xD692,0xD4D4,0xD516,0xD058,0xD19A,0xD3DC,0xD21E
.value 0xC560,0xC4A2,0xC6E4,0xC726,0xC268,0xC3AA,0xC1EC,0xC02E
.value 0xCB70,0xCAB2,0xC8F4,0xC936,0xCC78,0xCDBA,0xCFFC,0xCE3E
.value 0x9180,0x9042,0x9204,0x93C6,0x9688,0x974A,0x950C,0x94CE
.value 0x9F90,0x9E52,0x9C14,0x9DD6,0x9898,0x995A,0x9B1C,0x9ADE
.value 0x8DA0,0x8C62,0x8E24,0x8FE6,0x8AA8,0x8B6A,0x892C,0x88EE
.value 0x83B0,0x8272,0x8034,0x81F6,0x84B8,0x857A,0x873C,0x86FE
.value 0xA9C0,0xA802,0xAA44,0xAB86,0xAEC8,0xAF0A,0xAD4C,0xAC8E
.value 0xA7D0,0xA612,0xA454,0xA596,0xA0D8,0xA11A,0xA35C,0xA29E
.value 0xB5E0,0xB422,0xB664,0xB7A6,0xB2E8,0xB32A,0xB16C,0xB0AE
.value 0xBBF0,0xBA32,0xB874,0xB9B6,0xBCF8,0xBD3A,0xBF7C,0xBEBE
.asciz "GHASH for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
.align 64
___
# EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame,
# CONTEXT *context,DISPATCHER_CONTEXT *disp)
if ($win64) {
$rec="%rcx";
$frame="%rdx";
$context="%r8";
$disp="%r9";
$code.=<<___;
.extern __imp_RtlVirtualUnwind
.type se_handler,\@abi-omnipotent
.align 16
se_handler:
push %rsi
push %rdi
push %rbx
push %rbp
push %r12
push %r13
push %r14
push %r15
pushfq
sub \$64,%rsp
mov 120($context),%rax # pull context->Rax
mov 248($context),%rbx # pull context->Rip
mov 8($disp),%rsi # disp->ImageBase
mov 56($disp),%r11 # disp->HandlerData
mov 0(%r11),%r10d # HandlerData[0]
lea (%rsi,%r10),%r10 # prologue label
cmp %r10,%rbx # context->Rip<prologue label
jb .Lin_prologue
mov 152($context),%rax # pull context->Rsp
mov 4(%r11),%r10d # HandlerData[1]
lea (%rsi,%r10),%r10 # epilogue label
cmp %r10,%rbx # context->Rip>=epilogue label
jae .Lin_prologue
lea 24(%rax),%rax # adjust "rsp"
mov -8(%rax),%rbx
mov -16(%rax),%rbp
mov -24(%rax),%r12
mov %rbx,144($context) # restore context->Rbx
mov %rbp,160($context) # restore context->Rbp
mov %r12,216($context) # restore context->R12
.Lin_prologue:
mov 8(%rax),%rdi
mov 16(%rax),%rsi
mov %rax,152($context) # restore context->Rsp
mov %rsi,168($context) # restore context->Rsi
mov %rdi,176($context) # restore context->Rdi
mov 40($disp),%rdi # disp->ContextRecord
mov $context,%rsi # context
mov \$`1232/8`,%ecx # sizeof(CONTEXT)
.long 0xa548f3fc # cld; rep movsq
mov $disp,%rsi
xor %rcx,%rcx # arg1, UNW_FLAG_NHANDLER
mov 8(%rsi),%rdx # arg2, disp->ImageBase
mov 0(%rsi),%r8 # arg3, disp->ControlPc
mov 16(%rsi),%r9 # arg4, disp->FunctionEntry
mov 40(%rsi),%r10 # disp->ContextRecord
lea 56(%rsi),%r11 # &disp->HandlerData
lea 24(%rsi),%r12 # &disp->EstablisherFrame
mov %r10,32(%rsp) # arg5
mov %r11,40(%rsp) # arg6
mov %r12,48(%rsp) # arg7
mov %rcx,56(%rsp) # arg8, (NULL)
call *__imp_RtlVirtualUnwind(%rip)
mov \$1,%eax # ExceptionContinueSearch
add \$64,%rsp
popfq
pop %r15
pop %r14
pop %r13
pop %r12
pop %rbp
pop %rbx
pop %rdi
pop %rsi
ret
.size se_handler,.-se_handler
.section .pdata
.align 4
.rva .LSEH_begin_gcm_gmult_4bit
.rva .LSEH_end_gcm_gmult_4bit
.rva .LSEH_info_gcm_gmult_4bit
.rva .LSEH_begin_gcm_ghash_4bit
.rva .LSEH_end_gcm_ghash_4bit
.rva .LSEH_info_gcm_ghash_4bit
.rva .LSEH_begin_gcm_init_clmul
.rva .LSEH_end_gcm_init_clmul
.rva .LSEH_info_gcm_init_clmul
.rva .LSEH_begin_gcm_ghash_clmul
.rva .LSEH_end_gcm_ghash_clmul
.rva .LSEH_info_gcm_ghash_clmul
___
$code.=<<___ if ($avx);
.rva .LSEH_begin_gcm_init_avx
.rva .LSEH_end_gcm_init_avx
.rva .LSEH_info_gcm_init_clmul
.rva .LSEH_begin_gcm_ghash_avx
.rva .LSEH_end_gcm_ghash_avx
.rva .LSEH_info_gcm_ghash_clmul
___
$code.=<<___;
.section .xdata
.align 8
.LSEH_info_gcm_gmult_4bit:
.byte 9,0,0,0
.rva se_handler
.rva .Lgmult_prologue,.Lgmult_epilogue # HandlerData
.LSEH_info_gcm_ghash_4bit:
.byte 9,0,0,0
.rva se_handler
.rva .Lghash_prologue,.Lghash_epilogue # HandlerData
.LSEH_info_gcm_init_clmul:
.byte 0x01,0x08,0x03,0x00
.byte 0x08,0x68,0x00,0x00 #movaps 0x00(rsp),xmm6
.byte 0x04,0x22,0x00,0x00 #sub rsp,0x18
.LSEH_info_gcm_ghash_clmul:
.byte 0x01,0x33,0x16,0x00
.byte 0x33,0xf8,0x09,0x00 #movaps 0x90(rsp),xmm15
.byte 0x2e,0xe8,0x08,0x00 #movaps 0x80(rsp),xmm14
.byte 0x29,0xd8,0x07,0x00 #movaps 0x70(rsp),xmm13
.byte 0x24,0xc8,0x06,0x00 #movaps 0x60(rsp),xmm12
.byte 0x1f,0xb8,0x05,0x00 #movaps 0x50(rsp),xmm11
.byte 0x1a,0xa8,0x04,0x00 #movaps 0x40(rsp),xmm10
.byte 0x15,0x98,0x03,0x00 #movaps 0x30(rsp),xmm9
.byte 0x10,0x88,0x02,0x00 #movaps 0x20(rsp),xmm8
.byte 0x0c,0x78,0x01,0x00 #movaps 0x10(rsp),xmm7
.byte 0x08,0x68,0x00,0x00 #movaps 0x00(rsp),xmm6
.byte 0x04,0x01,0x15,0x00 #sub rsp,0xa8
___
}
$code =~ s/\`([^\`]*)\`/eval($1)/gem;
print $code;
close STDOUT;
| aliclark/libquic | boringssl/crypto/modes/asm/ghash-x86_64.pl | Perl | bsd-3-clause | 42,578 |
#!/usr/bin/env perl
use strict;
use warnings;
use Carp;
use Getopt::Long qw(:config no_ignore_case bundling pass_through);
use File::Basename;
use FindBin;
my $usage = <<__EOUSAGE__;
###################################################################################
#
# -K <int> define K clusters via k-means algorithm
#
# or, cut the hierarchical tree:
#
# --Ktree <int> cut tree into K clusters
#
# --Ptree <float> cut tree based on this percent of max(height) of tree
#
# -R <string> the filename for the store RData (file.all.RData)
#
# misc:
#
# --lexical_column_ordering reorder column names according to lexical ordering
# --no_column_reordering
#
###################################################################################
__EOUSAGE__
;
my $Kmeans;
my $Ktree;
my $help_flag = 0;
my $R_data_file;
my $pct_height = 0;
my $lexically_order_columns;
my $no_column_reordering;
&GetOptions ( 'h' => \$help_flag,
'K=i' => \$Kmeans,
'Ktree=i' => \$Ktree,
'Ptree=f' => \$pct_height,
'R=s' => \$R_data_file,
'lexical_column_ordering' => \$lexically_order_columns,
'no_column_reordering' => \$no_column_reordering,
);
if ($help_flag) {
die $usage;
}
if (@ARGV) {
die "Error, don't understand args: @ARGV";
}
unless (($Kmeans || $Ktree || $pct_height) && $R_data_file) {
die $usage;
}
if ($pct_height && $pct_height < 1) {
die "Error, specify --Ptree as percent value > 1\n\n";
}
main: {
unless (-s $R_data_file) {
die "Error, cannot find pre-existing R-session data as file: $R_data_file";
}
my $R_script = "__tmp_define_clusters.R";
open (my $ofh, ">$R_script") or die "Error, cannot write to file $R_script";
print $ofh "library(cluster)\n";
#print $ofh "library(gplots)\n";
print $ofh "library(Biobase)\n";
print $ofh "source(\"$FindBin::Bin/R/heatmap.3.R\")\n";
print $ofh "load(\"$R_data_file\")\n";
print $ofh "data = heatmap_data\n";
my $core_filename;
my $outdir;
if ($Kmeans) {
print $ofh "kmeans_clustering <- kmeans(data, centers=$Kmeans, iter.max=100, nstart=5)\n";
$core_filename = "clusters_fixed_Kmeans_${Kmeans}.heatmap";
$outdir = basename($R_data_file) . ".clusters_fixed_Kmeans_" . $Kmeans;
print $ofh "gene_partition_assignments = kmeans_clustering\$cluster\n";
}
elsif ($Ktree) {
print $ofh "gene_partition_assignments <- cutree(as.hclust(hc_genes), k=$Ktree)\n";
$core_filename = "clusters_fixed_Ktree_${Ktree}.heatmap";
$outdir = basename($R_data_file) . ".clusters_fixed_Ktree_" . $Ktree;
}
else {
print $ofh "gene_partition_assignments <- cutree(as.hclust(hc_genes), h=$pct_height/100*max(hc_genes\$height))\n";
$core_filename = "clusters_fixed_P_${pct_height}.heatmap";
$outdir = basename($R_data_file) . ".clusters_fixed_P_" . $pct_height;
}
print $ofh "max_cluster_count = max(gene_partition_assignments)\n";
print $ofh "outdir = \"" . $outdir . "\"\n";
print $ofh "dir.create(outdir)\n";
# make another heatmap:
print $ofh "partition_colors = rainbow(length(unique(gene_partition_assignments)), start=0.4, end=0.95)\n";
print $ofh "gene_colors = as.matrix(partition_colors[gene_partition_assignments])\n";
print $ofh "pdf(\"$core_filename.heatmap.pdf\")\n";
if ($lexically_order_columns) {
print $ofh "data = data[,order(colnames(data))]\n";
}
if ($lexically_order_columns || $no_column_reordering) {
print $ofh "heatmap.3(data, dendrogram='row', Rowv=as.dendrogram(hc_genes), Colv=F, col=myheatcol, RowSideColors=gene_colors, scale=\"none\", density.info=\"none\", trace=\"none\", key=TRUE, cexCol=1, margins=c(10,10))\n";
}
else {
print $ofh "heatmap.3(data, dendrogram='both', Rowv=as.dendrogram(hc_genes), Colv=as.dendrogram(hc_samples), col=myheatcol, RowSideColors=gene_colors, scale=\"none\", density.info=\"none\", trace=\"none\", key=TRUE, cexCol=1, margins=c(10,10))\n";
}
print $ofh "dev.off()\n";
print $ofh "gene_names = rownames(data)\n";
print $ofh "num_cols = length(data[1,])\n";
print $ofh "for (i in 1:max_cluster_count) {\n";
print $ofh " partition_i = (gene_partition_assignments == i)\n";
print $ofh " partition_data = data[partition_i,,drop=F]\n";
# print $ofh " # if the partition involves only one row, then it returns a vector instead of a table\n";
# ;
# print $ofh " if (sum(partition_i) == 1) {\n";
# print $ofh " dim(partition_data) = c(1,num_cols)\n";
# print $ofh " colnames(partition_data) = colnames(data)\n";
# print $ofh " rownames(partition_data) = gene_names[partition_i]\n";
# print $ofh " }\n";
if ($lexically_order_columns) {
print $ofh "partition_data = partition_data[,order(colnames(partition_data)), drop=F]\n";
}
elsif (! $no_column_reordering) {
## order based on sample clustering
print $ofh "partition_data = partition_data[,hc_samples\$order, drop=F]\n";
}
print $ofh " outfile = paste(outdir, \"/subcluster_\", i, \"_log2_medianCentered_fpkm.matrix\", sep='')\n";
print $ofh " write.table(partition_data, file=outfile, quote=F, sep=\"\\t\")\n";
print $ofh "}\n";
close $ofh;
&process_cmd("R --vanilla -q < $R_script");
###################################################
## Generate the expression plots for each cluster
###################################################
chdir $outdir or die "Error, cannot cd into $outdir";
my $cmd = "$FindBin::Bin/plot_expression_patterns.pl subcluster\*fpkm.matrix";
&process_cmd($cmd);
exit(0);
}
####
sub process_cmd {
my ($cmd) = @_;
print STDERR "CMD: $cmd\n";
my $ret = system($cmd);
if ($ret) {
die "Error, cmd $cmd died with ret $ret";
}
return;
}
| ssn1306/trinityrnaseq | Analysis/DifferentialExpression/define_clusters_by_cutting_tree.pl | Perl | bsd-3-clause | 6,213 |
#!/usr/bin/perl
# crhFile.pm
#
# file subroutines
# v1.13 crh 20-jan-09 -- initial release
# dos2UnixPath($) -- converts DOS path separators to Unix
# unix2DosPath($) -- converts Unix path separators to DOS
# absPath(;$) -- returns absolute pathname
# checkDir ($) -- check directory exists
# checkFile ($) -- check file, return short form of filename if exists
# checkInfile($) -- checks input file for existence, readability, etc
# createOutfile($$;$) -- checks and creates output file
# renFile($$;$) -- checks and renames file
# getFileLine ($;$) -- return next line (trimmed), skips blank & remark lines
# fileList ($$;$) -- returns filenames array, optionally searching recursively
# checkBinaryFile ($) -- check binary input file, return 1 if successful
package crhFile;
use Exporter;
@ISA = ("Exporter");
@EXPORT = qw(&dos2UnixPath &unix2DosPath &absPath &checkInfile &createOutfile
&renFile &getFileLine &fileList &checkBinaryFile &checkDir &checkFile);
use warnings;
use strict;
use lib '../crhLib'; # crh custom packages
use File::Basename;
use Cwd 'abs_path';
use File::Find;
use POSIX;
use crhDebug;
sub dos2UnixPath($) {
# converts DOS path separators (\) to Unix (/)
# allows code to be made portable across platforms
$_[0] =~ s=\\=/=g;
return $_[0];
}
sub unix2DosPath($) {
# converts Unix path separators (/) to DOS (\)
# complements dos2UnixPath()
$_[0] =~ s=/=\\=g;
return $_[0];
}
sub absPath(;$) {
# convenience renaming of core package subroutine abs_path()
# takes an optional single argument and returns the absolute pathname for it
# if no argument then returns current working directory
if ($_[0]) {
return abs_path($_[0]);
} else {
return abs_path();
}
}
sub checkDir ($) {
# check directory
# args: directory
# return (status, error message);
my $fileF = "";
my $fileD = "";
my $fileE = "";
($fileF, $fileD, $fileE) = fileparse($_[0], qr/\..*/);
return ("0", "directory not found") if !(-e $_[0]);
return ("0", "not a directory") if !(-d $_[0]);
return ($fileD, "directory OK");
}
sub checkFile ($) {
# check file, return short form of filename if exists
# otherwise returns "0"
# args: input filename
# return: (status, error message)
my $fileF = "";
my $fileD = "";
my $fileE = "";
my $okFile = "";
($fileF, $fileD, $fileE) = fileparse($_[0], qr/\..*/);
$okFile = $fileF . $fileE;
return ("0", "file not found") if !(-e $_[0]);
return ($okFile, "file OK");
}
sub checkInfile ($) {
# check input file, return short form of filename if successful
# otherwise returns "0"
# args: input filename
# return: (status, error message)
my $inFile = "";
my $inFileF = "";
my $inFileD = "";
my $inFileE = "";
my $okFile = "";
($inFileF, $inFileD, $inFileE) = fileparse($_[0], qr/\..*/);
$inFile = $inFileD . $inFileF . $inFileE;
$okFile = $inFileF . $inFileE;
return ("0", "$inFile file not found") if !(-e $_[0]);
return ("0", "$inFile is not a plain file") if !(-f $_[0]);
return ("0", "$inFile is not a text file") if !(-T $_[0]);
return ("0", "$inFile is not a readable file") if !(-r $_[0]);
return ($okFile, "$okFile input file checks OK");
}
sub createOutfile ($$;$) {
# output file based on input file, if checks OK
# args: input file, output file extension (incl .) [,output file name]
my $inFile = $_[0];
my $outFile = "";
my $outFileF = ""; # generated output file name
my $outFileD = "";
my $outFileE = ""; # generated output file extension
($outFileF, $outFileD, $outFileE) = fileparse($inFile, qr/\..*/);
$outFileE = $_[1];
if ($_[2]) { # use different file name if given as arg
$outFileF = $_[2];
}
# generate output file name
$outFile = $outFileD . $outFileF . $outFileE;
if (-e $outFile) {
return ("0", "output file $outFile already exists");
} elsif (open(OUTF, ">", $outFile)) {
close(OUTF);
return ($outFile, "$outFileF$outFileE output file checks OK");
} else {
return ("0", "output file $outFile cannot be created");
}
}
sub renFile ($$;$) {
# rename input file
# args: input file, output file ext (incl .) [, output file name]
my $inFile = $_[0];
my $outFileF = ""; # generated output file name
my $outFileD = "";
my $outFileE = ""; # generated output file extension
my $outFile = "";
($outFileF, $outFileD, $outFileE) = fileparse($inFile, qr/\..*/);
$outFileE = $_[1];
if ($_[2]) { # use different file name if given as arg
$outFileF = $_[2];
}
# generate output file name
$outFile = $outFileD . $outFileF . $outFileE;
if (-e $outFile) {
return ("0", "rename failed,... $outFile already exists");
} elsif (rename($inFile, $outFile)) {
return ($outFile, "input file $inFile renamed to $outFileF$outFileE");
} else {
return ("0", "rename failed,... cannot rename $inFile to $outFileF$outFileE");
}
}
sub getFileLine ($;$) {
# return next line (trimmed), skipping blank and remark lines
# return empty string if eof detected
# args: input file handle (open) [,remark character]
my $fh = $_[0];
my $remChar = '';
if ($_[1]) {
$remChar = $_[1];
}
my $line = '';
while ($line = <$fh>) {
chomp $line;
$line =~ s/^\s+|\s+$//g; # trim whitespace
if ($line eq '') { # blank line
next;
}
if (($remChar ne '') && (substr($line, 0, 1) eq $remChar)) { # ignore remark lines
next;
}
return $line; # success :-)
}
return ''; # signifies eof, the empty string is never returned otherwise
}
sub fileList ($$;$) {
# returns array of sorted filenames
# does recursive directory search if required
# does case insensitive filename sort within each directory
# (should really use case folding [fc()] to work consistently with unicode)
# args: base directory, file glob, recursive
my $fileDir = $_[0];
my $fileListGlob = $_[1];
my $recursive = 0;
my @fileListFiles = ();
if ($_[2]) { # set recursion flag, possibly
$recursive = $_[2];
}
if ($recursive) {
find sub {
return unless -d;
push(@fileListFiles, sort {lc($a) cmp lc($b)} glob("$File::Find::name/$fileListGlob"));
}, $fileDir;
} else { # just process the specified directory
@fileListFiles = sort {lc($a) cmp lc($b)} glob("$fileDir$fileListGlob");
}
return @fileListFiles;
}
sub checkBinaryFile ($) {
# check binary input file, return 1 if successful
# otherwise returns 0
# args: binary filename
# return (status, status message)
my $binFile = $_[0];
my $name = basename($binFile);
return (0, $name, "$name file not found") if !(-e $binFile);
return (0, $name, "$name is a directory") if !(-f $binFile);
return (0, $name, "$name is not a readable file") if !(-r $binFile);
return (0, $name, "$name is not a binary file") if !(-B $binFile); # text files pass!
return (0, $name, "$name is a text file") if (-T $binFile);
return (1, $name, "$name file basic checks OK");
}
# helper subroutines, not exported
1;
| ciscrh/crhLib | crhFile.pm | Perl | mit | 7,244 |
/*****************************************************************************
* This file is part of the Prolog Development Tool (PDT)
*
* Author: Günter Kniesel
* WWW: http://sewiki.iai.uni-bonn.de/research/pdt/start
* Mail: pdt@lists.iai.uni-bonn.de
* Copyright (C): 2013, CS Dept. III, University of Bonn
*
* All rights reserved. This program is made available under the terms
* of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
****************************************************************************/
:- module( pdt_search,
[ find_reference_to/12 % (+Functor,+Arity,?DefFile,?DefModule,?RefModule,?RefName,?RefArity,?RefFile,?RefLine,?Nth,?Kind)
, find_definitions_categorized/12 % (+EnclFile,+SelectionLine, +Term, -Functor, -Arity, -This, -DeclOrDef, -DefiningEntity, -FullPath, -Line, -Properties,-Visibility)
, find_primary_definition_visible_in/6 % (EnclFile,TermString,ReferencedModule,MainFile,FirstLine,MultifileResult)
, find_definition_contained_in/8
, find_pred/8
]).
:- use_module( split_file_path,
[ split_file_path/4 % (File,Folder,FileName,BaseName,Extension)
] ).
:- use_module( pdt_xref,
[ find_reference_to/12 % ...
] ).
:- use_module( properties,
[ properties_for_predicate/4
] ).
:- use_module( pdt_prolog_library(utils4modules),
[ module_of_file/2 % (File,FileModule)
, defined_in/4 % (SubModule,Name,Arity,DeclModule),
, defined_in_module/3 % (Module,Name,Arity),
, declared_in_file/4 % (Module,Name,Arity,Location)
, defined_in_files/4 % (Module,Name,Arity,Locations)
] ).
% TODO: Why this import?
:- user:consult(pdt_runtime_builder_analyzer('meta_pred_toplevel.pl')).
:- op(600, xfy, ::). % Logtalk message sending operator
/***********************************************************************
* Find Definitions and Declarations and categorize them by visibility *
* --------------------------------------------------------------------*
* for "Find All Declarations" (Ctrl+G) action *
***********************************************************************/
% find_definitions_categorized(+ReferencingFile,+-ReferencingLine,+ReferencingTerm,-Name,-Arity,
% ???ReferencingModule, -DefiningModule, -DeclOrDef, -Visibility, -File,-Line)
% ^^^^^^^^^^^^^^ TODO: moved to this place (two arguments forward)
% Logtalk
find_definitions_categorized(EnclFile,SelectionLine, Term, Functor, Arity, This, DeclOrDef, DefiningEntity, FullPath, Line, Properties, Visibility):-
Term \= _:_,
split_file_path(EnclFile, _Directory,_FileName,_,lgt),
!,
logtalk_adapter::find_definitions_categorized(EnclFile,SelectionLine, Term, Functor, Arity, This, DeclOrDef, DefiningEntity, FullPath, Line, Properties, Visibility).
find_definitions_categorized(EnclFile,_SelectionLine,Term,Functor,Arity, ReferencingModule, DeclOrDef, DefiningModule, File,Line, PropertyList, Visibility):-
search_term_to_predicate_indicator(Term, Functor/Arity),
module_of_file(EnclFile,FileModule),
( atom(ReferencingModule)
-> true % Explicit entity reference ReferencedModule:Term (or ReferencedModule::Term
; ReferencingModule = FileModule % Implicit module reference
),
find_definition(ReferencingModule,Functor,Arity,Sources),
member(DeclOrDef-Visibility-DefiningModule-Location,Sources),
member(File-Lines,Location),
member(Line,Lines),
properties_for_predicate(ReferencingModule,Functor,Arity,PropertyList).
search_term_to_predicate_indicator(_:Term, Functor/Arity) :- !, functor(Term, Functor, Arity).
search_term_to_predicate_indicator( Term, Functor/Arity) :- functor(Term, Functor, Arity).
%% find_definition(+ReferencingModule,+Name,?Arity,-Visibility,-Sources)
find_definition(ReferencingModule,Name,Arity,Sources) :-
var(Name),
throw( input_argument_free(find_definition(ReferencingModule,Name,Arity,Sources)) ).
find_definition(Name,Arity,ReferencingModule,Module,Visibility,File,LineNr,N,Case) :-
% ( pdt_option(search, restrict_arity(true))
% ->
% ),
% ( pdt_option( search, restrict_module(true)
% ->
% ),
defined_in(Module,Name,Arity),
visibility(Module,Name,Arity,ReferencingModule,Visibility),
location(Module,Name,Arity,File,LineNr,N,Case).
% locations_by_file(Module,Name,Arity,File,Lines).
%
%locations_by_file(Module,Name,Arity,File,Line) :-
% setof( LineNr-N-Case,
% Module^Name^Arity^ location(Module,Name,Arity,File,LineNr,N,Case),
% Lines
% ),
% member(Line,Lines)
% .
%% location(+Module,+Name,+Arity, ?File, ?Line, ?N, ?Case)
%
% @param File The full path of the file containing the N-th clause
% @param Case = clause|declaration|foreign|dynamic
%
% * Case==clause:
% a clause of the predicate Module:Name/Arity starts at
% line Line in File.
% * Case==declaration:
% a declaration for the predicate Module:Name/Arity is
% somewhere in file. Lacking information about positions
% of declarations, we guess the Line to be 1.
% * Case==foreign
% There is no source location for Module:Name/Arity
% since it is defined by foreign language code.
% In this case File==none and Line==0.
% * Case==dynamic
% There is no source location for Module:Name/Arity
% since it is defined only by dynamic code.
% In this case File==none and Line==0.
%
location(Module,Name,Arity, File,Line,N,clause) :- % Clause
clause_location(Module,Name,Arity,File,Line,N).
location(Module,Name,Arity,File,Line,N,declaration) :- % Declaration
\+ clause_location(Module,Name,Arity,_,_,_),
module_property(Module, file(File)),
!,
Line=1,
N=0.
location(Module,Name,Arity,File,Line,N,Prop) :- % No source code
\+ clause_location(Module,Name,Arity,_,_,_),
functor(Head,Name,Arity),
( (Prop = foreign, predicate_property(Module:Head,Prop))
; (Prop = (dynamic), predicate_property(Module:Head,Prop))
),
!,
File=none,
Line=0,
N=0.
%% clause_location(+Module,+Name,+Arity,?N,?File,?Line) is nondet
%
% The N-th clause of the predicate Module:Name/Arity starts at
% line Line in File.
%
% @param File The full path of the file containing the N-th clause
clause_location(Module,Name,Arity,N,File,Line) :-
functor(Head,Name,Arity),
nth_clause(Module:Head,N,Ref),
clause_property(Ref, file(File)),
clause_property(Ref, line_count(Line)).
imports_pred_from(Sub,Head,Super) :-
predicate_property(Sub:Head, imported_from(Super)).
%% visibility(+ContextModule,+Name,+Arity,?DeclModule)
visibility(ContextModule,Name,Arity,DeclModule, supermodule) :-
defined_in(ContextModule,Name,Arity,DeclModule),
ContextModule \== DeclModule.
visibility(ContextModule,Name,Arity,DeclModule, local) :-
defined_in(ContextModule,Name,Arity,DeclModule),
ContextModule == DeclModule.
visibility(ContextModule,Name,Arity,DeclModule, submodule) :-
defined_in(DeclModule,Name,Arity,DeclModule),
% DeclModule is a submodule of ContextModule
defined_in(DeclModule,_,_,ContextModule), % submodule
ContextModule \== DeclModule.
visibility(ContextModule,Name,Arity,DeclModule, invisible) :-
% There is some DeclaringModule
defined_in(DeclModule,Name,Arity,DeclModule),
DeclModule \== ContextModule,
% ... but the ContextModule neither is imported to it
% nor imports from it:
functor(Head,Name,Arity),
\+ imports_pred_from(DeclModule,Head,ContextModule),
\+ imports_pred_from(ContextModule,Head,DeclModule).
/***********************************************************************
* Find Primary Definition *
* --------------------------------------------------------------------*
* for "Open Primary Declaration" (F3) action *
***********************************************************************/
%% find_primary_definition_visible_in(+EnclFile,+Name,+Arity,?ReferencedModule,?MainFile,?FirstLine,?MultifileResult)
%
% Find first line of first clause in the *primary* file defining the predicate Name/Arity
% visible in ReferencedModule. In case of multifile predicates, the primary file is either
% the file whose module is the DefiningModule or otherwise (this case only occurs
% for "magic" system modules, (e.g. 'system')) the file containing most clauses.
%
% Used for the open declaration action (F3) in
% pdt/src/org/cs3/pdt/internal/actions/FindPredicateActionDelegate.java
find_primary_definition_visible_in(EnclFile,TermString,ReferencedModule,MainFile,FirstLine,MultifileResult) :-
split_file_path(EnclFile, _Directory,_FileName,_,lgt),
!,
logtalk_adapter::find_primary_definition_visible_in(EnclFile,TermString,ReferencedModule,MainFile,FirstLine,MultifileResult).
% The second argument is just an atom contianing the string representation of the term:
find_primary_definition_visible_in(EnclFile,TermString,ReferencedModule,MainFile,FirstLine,MultifileResult) :-
retrieve_term_from_atom(EnclFile, TermString, Term),
extract_name_arity(Term,Head,Name,Arity),
find_primary_definition_visible_in__(EnclFile,Head,Name,Arity,ReferencedModule,MainFile,FirstLine,MultifileResult).
retrieve_term_from_atom(EnclFile, TermString, Term) :-
( module_property(Module, file(EnclFile))
-> atom_concat(TermString, '.', TermStringWithDot),
open_chars_stream(TermStringWithDot, Stream),
read_term(Stream, Term, [module(Module)])
; atom_to_term(TermString, Term, _)
).
extract_name_arity(Term,Head,Name,Arity) :-
( var(Term)
-> throw( 'Cannot display the definition of a variable. Please select a predicate name.' )
; true
),
% Special treatment of Name/Arity terms:
( Term = Name/Arity
-> true
; ( Term = _Module:Term2
-> functor(Term2, Name, Arity)
; functor(Term,Name,Arity)
)
),
% Create most general head
functor(Head,Name,Arity).
% Now the second argument is a real term that is
% a) a file loading directive:
find_primary_definition_visible_in__(_,Term,_,_,_,File,Line,no):-
find_file(Term,File,Line).
% b) a literal (call or clause head):
find_primary_definition_visible_in__(EnclFile,Term,Name,Arity,ReferencedModule,MainFile,FirstLine,MultifileResult) :-
find_definition_visible_in(EnclFile,Term,Name,Arity,ReferencedModule,DefiningModule,Locations),
( Locations = [_,_|_]
-> MultifileResult = yes
; MultifileResult = no
),
primary_location(Locations,DefiningModule,MainFile,FirstLine).
% If Term is a loading directive, find the related file,
% eventually interpreting a FileSPec that contains an alias
find_file(Term,File,Line) :-
extract_file_spec(Term,FileSpec),
catch( absolute_file_name(FileSpec,[solutions(all),extensions(['.pl', '.lgt', '.ct', '.ctc'])], File),
_,
fail
),
access_file(File, read),
!,
Line=1.
% Work regardelessly whether the user selected the entire consult/use_module
% statement or just the file spec. Does NOT work if he only selected a file
% name within an alias but not the complete alias.
extract_file_spec(consult(FileSpec),FileSpec) :- !.
extract_file_spec(use_module(FileSpec),FileSpec) :- !.
extract_file_spec(ensure_loaded(FileSpec),FileSpec) :- !.
extract_file_spec(Term,Term).
find_definition_visible_in(EnclFile,_Term,Name,Arity,ReferencedModule,DefiningModule,Locations) :-
module_of_file(EnclFile,FileModule),
( atom(ReferencedModule)
-> true % Explicit module reference
; ReferencedModule = FileModule % Implicit module reference
),
( defined_in_module(ReferencedModule,Name,Arity,DefiningModule)
-> defined_in_files(DefiningModule,Name,Arity,Locations)
; ( defined_in(ReferencedModule,Name,Arity,DeclaringModule),
defined_in_files(DeclaringModule,Name,Arity,Locations)
)
).
primary_location(Locations,DefiningModule,File,FirstLine) :-
member(File-Lines,Locations),
module_of_file(File,DefiningModule),
!,
Lines = [FirstLine|_].
primary_location(Locations,_,File,FirstLine) :-
findall( NrOfClauses-File-FirstLine,
( member(File-Lines,Locations),
length(Lines,NrOfClauses),
Lines=[FirstLine|_]
),
All
),
sort(All, Sorted),
Sorted = [ NrOfClauses-File-FirstLine |_ ].
/***********************************************************************
* Find Definitions in File *
* --------------------------------------------------------------------*
* for Outline *
***********************************************************************/
% TODO: This is meanwhile subsumed by other predicates. Integrate!
%% find_definition_contained_in(+File, -Name,-Arity,-Line,-PropertyList) is nondet.
%
% Looks up the starting line of each clause of each
% predicate Name/Arity defined in File. Core properties
% of the predicate are contained in the PropertyList.
%
% Called from PDTOutlineQuery.java
find_definition_contained_in(File, Entity, EntityKind, Functor, Arity, SearchCategory, Line, PropertyList) :-
split_file_path(File, _Directory,_FileName,_,lgt),
!,
logtalk_adapter::find_definition_contained_in(File, Entity, EntityKind, Functor, Arity, SearchCategory, Line, PropertyList).
find_definition_contained_in(File, Module, module, Functor, Arity, SearchCategory, Line, PropertyList) :-
% Backtrack over all predicates defined in File:
source_file(ModuleCandidate:Head, File),
% strip_module(ModuleHead,ModuleCandidate,Head),
( module_property(ModuleCandidate, file(File))
-> Module = ModuleCandidate
; Module = user
),
functor(Head, Functor, Arity),
properties_for_predicate(ModuleCandidate,Functor, Arity, PropertyList0),
% In the case of a multifile predicate, we want to find all clauses for this
% predicate, even when they occur in other files
( member(multifile, PropertyList0)
-> ( defined_in_file(ModuleCandidate, Functor, Arity, _, DeclFile, Line),
( DeclFile \= File
-> ( module_property(MultiModule, file(DeclFile)),
append([for(MultiModule), defining_file(DeclFile)], PropertyList0, PropertyList),
SearchCategory = multifile
)
; ( PropertyList = PropertyList0,
SearchCategory = definition
)
)
)
; ( PropertyList = PropertyList0,
SearchCategory = definition,
% The following backtracks over each clause of each predicate.
% Do this at the end, after the things that are deterministic:
( defined_in_file(ModuleCandidate, Functor, Arity, _, File, _)
-> Module2 = ModuleCandidate
; Module2 = Module
),
defined_in_file(Module2, Functor, Arity, _, File, Line)
)
),
\+find_blacklist(Functor,Arity,Module).
% The following clause searches for clauses inside the given file, which contribute to multifile
% predicates, defined in foreign modules.
find_definition_contained_in(File, Module, module, Functor, Arity, multifile, Line, PropertyList):-
module_property(FileModule, file(File)),
declared_in_module(Module,Head),
Module \= FileModule,
predicate_property(Module:Head, multifile),
nth_clause(Module:Head,_,Ref),
clause_property(Ref,file(File)),
clause_property(Ref,line_count(Line)),
functor(Head, Functor, Arity),
properties_for_predicate(Module, Functor, Arity, PropertyList0),
append([from(Module)], PropertyList0, PropertyList),
\+find_blacklist(Functor,Arity,Module).
%% find_blacklist(?Functor, ?Arity, ?Module) is nondet.
%
% Used to remove (internal) predicates from the results of find_definition_contained_in/8.
%
%
find_blacklist('$load_context_module',2,_).
find_blacklist('$mode',2,_).
find_blacklist('$pldoc',4,_).
/***********************************************
* FIND VISIBLE PREDICATE (FOR AUTOCOMPLETION) *
***********************************************/
%% find_pred(+EnclFile,+Prefix,-EnclModule,-Name,-Arity,-Exported,-Builtin,-Help) is nondet.
%
% Looks up all predicates with prefix Prefix defined or imported in file EnclFile.
%
% Used by the PLEditor content assist.
%
% The meaning of Arity is overloaded: -2: atom, -1 : module, >= 0 : predicate
%
% For performance reasons an empty prefix with an unspecified module
% will only bind predicates if EnclFile is specified.
%
% <EnclFile> specifies the file in which this query is triggered
% <Prefix> specifies the prefix of the predicate
% <Module> specifies the module associated to the file.
find_pred(EnclFile,Prefix,Module,Name,Arity,Exported,Builtin,Help) :-
split_file_path(EnclFile, _Directory,_FileName,_,lgt),
!,
logtalk_adapter::find_pred(EnclFile,Prefix,Module,Name,Arity,Exported,Builtin,Help).
find_pred(EnclFile,Prefix,Module,Name,Arity,Exported,Builtin,Help) :-
\+ atom(EnclFile),
throw( first_argument_free_in_call_to(find_pred(EnclFile,Prefix,Module,Name,Arity,Exported,Builtin,Help))).
find_pred(EnclFile,Prefix,Module,Name,Arity,Exported,Builtin,Help) :-
setof(
(Name,Arity),
Prefix^Module^
( my_module_of_file(EnclFile,Module),
find_pred_(Prefix,Module,Name,Arity,true)
),
All
),
member((Name,Arity),All),
% no enclosing module specified in the code via modulename:..
get_declaring_module(EnclFile,Module,Name,Arity),
functor(Term,Name,Arity),
( predicate_property(Module:Term,exported)->
Exported=true
; Exported=false
),
( predicate_property(Module:Term,built_in)->
Builtin=true
; Builtin=false
),
predicate_manual_entry(Module,Name,Arity,Help).
find_pred_(Prefix,Module,Name,Arity,true) :-
( var(Module)->
Prefix \== ''
; true
), % performance tweak:
current_predicate(Module:Name/Arity),
atom_concat(Prefix,_,Name),
% rule out used built-ins, like =../2, in case the enclosing module is given (in this case the prefix might be empty):
( nonvar(Module) ->
( functor(Term,Name,Arity),
(Prefix \== ''; \+ predicate_property(Term, built_in)) )
; true
).
get_declaring_module(EnclFile,Module,Name,Arity) :-
var(Module),
my_module_of_file(EnclFile,ContainingModule),
current_predicate(ContainingModule:Name/Arity),
functor(Head,Name,Arity),
( predicate_property(ContainingModule:Head,imported_from(Module))
; Module = ContainingModule
),
!.
get_declaring_module(_EnclFile,Module,_Name,_Arity) :-
nonvar(Module),
!.
%% find_pred(+EnclFile,+Prefix,-EnclModule,-Name,-Arity,-Exported,-Builtin,-Help, -Kind) is nondet.
%
find_pred_for_editor_completion(EnclFile,Prefix,Module,Name,Arity,Exported,Builtin,Help,Kind) :-
\+ atom(EnclFile),
throw( first_argument_free_in_call_to(find_pred_for_editor_completion(EnclFile,Prefix,Module,Name,Arity,Exported,Builtin,Help,Kind))).
find_pred_for_editor_completion(EnclFile,Prefix,Module,Name,Arity,Exported,Builtin,Help,predicate) :-
find_pred(EnclFile,Prefix,Module,Name,Arity,Exported,Builtin,Help).
find_pred_for_editor_completion(_EnclFile,Prefix,EnclModule,Name,-1,true,false,'nodoc', module) :-
var(EnclModule),
current_module(Name),
atom_concat(Prefix,_,Name).
% TODO: Improvement Idea: use "string" Prefix instead
% of atom to avoid Prefix to be added to the set of atoms
find_pred_for_editor_completion(_EnclFile,Prefix,'',Atom,-1,fail,true,'nodoc', atom) :-
'$atom_completions'(Prefix, Atoms),
member(Atom,Atoms),
Atom \= Prefix,
garbage_collect_atoms,
\+ current_predicate(Atom/_Arity).
my_module_of_file(_File, Module) :-
atom(Module),
current_module(Module),
!.
my_module_of_file(File,Module):-
module_property(Module2,file(File)),
( Module = Module2
; Module = user
).
my_module_of_file(File,Module):-
atom(File),
\+ module_property(Module,file(File)),
( Module=user
; Module=system
).
| TeamSPoon/logicmoo_base | prolog/logicmoo/pdt_server/pdt.common/pl/pdt_search_WORK_IN_PROGRESS.pl | Perl | mit | 20,807 |
/* Part of Extended Libraries for SWI-Prolog
Author: Edison Mera Menendez
E-mail: efmera@gmail.com
WWW: https://github.com/edisonm/xlibrary
Copyright (C): 2017, Process Design Center, Breda, The Netherlands.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
:- module(resolve_calln,
[resolve_calln/2,
do_resolve_calln/2]).
:- use_module(library(extend_args)).
% May be this is slow, but it works:
resolve_calln(M:CallN, A:B) :-
!,
resolve_calln(CallN, Goal),
strip_module(M:Goal, A, B).
resolve_calln(Meta, Goal) :-
do_resolve_calln(Meta, Meta2), !,
resolve_calln(Meta2, Goal).
resolve_calln(Goal, Goal).
do_resolve_calln(Meta, Meta2) :-
Meta =.. [call, Call|Args],
extend_args(Call, Args, Meta2).
| TeamSPoon/logicmoo_workspace | packs_lib/xlibrary/prolog/resolve_calln.pl | Perl | mit | 2,103 |
#!/usr/bin/env perl
use lib qw(lib examples/lib);
use Cat;
use Rubyish;
my $oreo = Cat->new->name("Oreo");
puts $oreo->methods;
print $oreo->sound . "\n";
$oreo->play(qw(CHEESE BURGER));
print '$oreo is a ' . ref($oreo) . "\n";
print "Oreo to YAML:\n" . $oreo->to_yaml;
puts $oreo->inspect;
puts $oreo->ancestors;
| gitpan/Rubyish-Perl | examples/cat.pl | Perl | mit | 323 |
package Time::Local;
use strict;
use Carp ();
use Exporter;
our $VERSION = '1.28';
use parent 'Exporter';
our @EXPORT = qw( timegm timelocal );
our @EXPORT_OK
= qw( timegm_modern timelocal_modern timegm_nocheck timelocal_nocheck );
my @MonthDays = ( 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 );
# Determine breakpoint for rolling century
my $ThisYear = ( localtime() )[5];
my $Breakpoint = ( $ThisYear + 50 ) % 100;
my $NextCentury = $ThisYear - $ThisYear % 100;
$NextCentury += 100 if $Breakpoint < 50;
my $Century = $NextCentury - 100;
my $SecOff = 0;
my ( %Options, %Cheat );
use constant SECS_PER_MINUTE => 60;
use constant SECS_PER_HOUR => 3600;
use constant SECS_PER_DAY => 86400;
my $MaxDay;
if ( $] < 5.012000 ) {
require Config;
## no critic (Variables::ProhibitPackageVars)
my $MaxInt;
if ( $^O eq 'MacOS' ) {
# time_t is unsigned...
$MaxInt = ( 1 << ( 8 * $Config::Config{ivsize} ) )
- 1; ## no critic qw(ProhibitPackageVars)
}
else {
$MaxInt
= ( ( 1 << ( 8 * $Config::Config{ivsize} - 2 ) ) - 1 ) * 2
+ 1; ## no critic qw(ProhibitPackageVars)
}
$MaxDay = int( ( $MaxInt - ( SECS_PER_DAY / 2 ) ) / SECS_PER_DAY ) - 1;
}
else {
# recent localtime()'s limit is the year 2**31
$MaxDay = 365 * ( 2**31 );
}
# Determine the EPOC day for this machine
my $Epoc = 0;
if ( $^O eq 'vos' ) {
# work around posix-977 -- VOS doesn't handle dates in the range
# 1970-1980.
$Epoc = _daygm( 0, 0, 0, 1, 0, 70, 4, 0 );
}
elsif ( $^O eq 'MacOS' ) {
$MaxDay *= 2; # time_t unsigned ... quick hack?
# MacOS time() is seconds since 1 Jan 1904, localtime
# so we need to calculate an offset to apply later
$Epoc = 693901;
$SecOff = timelocal( localtime(0) ) - timelocal( gmtime(0) );
$Epoc += _daygm( gmtime(0) );
}
else {
$Epoc = _daygm( gmtime(0) );
}
%Cheat = (); # clear the cache as epoc has changed
sub _daygm {
# This is written in such a byzantine way in order to avoid
# lexical variables and sub calls, for speed
return $_[3] + (
$Cheat{ pack( 'ss', @_[ 4, 5 ] ) } ||= do {
my $month = ( $_[4] + 10 ) % 12;
my $year = $_[5] + 1900 - int( $month / 10 );
( ( 365 * $year )
+ int( $year / 4 )
- int( $year / 100 )
+ int( $year / 400 )
+ int( ( ( $month * 306 ) + 5 ) / 10 ) ) - $Epoc;
}
);
}
sub _timegm {
my $sec
= $SecOff + $_[0]
+ ( SECS_PER_MINUTE * $_[1] )
+ ( SECS_PER_HOUR * $_[2] );
return $sec + ( SECS_PER_DAY * &_daygm );
}
sub timegm {
my ( $sec, $min, $hour, $mday, $month, $year ) = @_;
if ( $Options{no_year_munging} ) {
$year -= 1900;
}
else {
if ( $year >= 1000 ) {
$year -= 1900;
}
elsif ( $year < 100 and $year >= 0 ) {
$year += ( $year > $Breakpoint ) ? $Century : $NextCentury;
}
}
unless ( $Options{no_range_check} ) {
Carp::croak("Month '$month' out of range 0..11")
if $month > 11
or $month < 0;
my $md = $MonthDays[$month];
++$md
if $month == 1 && _is_leap_year( $year + 1900 );
Carp::croak("Day '$mday' out of range 1..$md")
if $mday > $md or $mday < 1;
Carp::croak("Hour '$hour' out of range 0..23")
if $hour > 23 or $hour < 0;
Carp::croak("Minute '$min' out of range 0..59")
if $min > 59 or $min < 0;
Carp::croak("Second '$sec' out of range 0..59")
if $sec >= 60 or $sec < 0;
}
my $days = _daygm( undef, undef, undef, $mday, $month, $year );
unless ( $Options{no_range_check} or abs($days) < $MaxDay ) {
my $msg = q{};
$msg .= "Day too big - $days > $MaxDay\n" if $days > $MaxDay;
$year += 1900;
$msg
.= "Cannot handle date ($sec, $min, $hour, $mday, $month, $year)";
Carp::croak($msg);
}
return
$sec + $SecOff
+ ( SECS_PER_MINUTE * $min )
+ ( SECS_PER_HOUR * $hour )
+ ( SECS_PER_DAY * $days );
}
sub _is_leap_year {
return 0 if $_[0] % 4;
return 1 if $_[0] % 100;
return 0 if $_[0] % 400;
return 1;
}
sub timegm_nocheck {
local $Options{no_range_check} = 1;
return &timegm;
}
sub timegm_modern {
local $Options{no_year_munging} = 1;
return &timegm;
}
sub timelocal {
my $ref_t = &timegm;
my $loc_for_ref_t = _timegm( localtime($ref_t) );
my $zone_off = $loc_for_ref_t - $ref_t
or return $loc_for_ref_t;
# Adjust for timezone
my $loc_t = $ref_t - $zone_off;
# Are we close to a DST change or are we done
my $dst_off = $ref_t - _timegm( localtime($loc_t) );
# If this evaluates to true, it means that the value in $loc_t is
# the _second_ hour after a DST change where the local time moves
# backward.
if (
!$dst_off
&& ( ( $ref_t - SECS_PER_HOUR )
- _timegm( localtime( $loc_t - SECS_PER_HOUR ) ) < 0 )
) {
return $loc_t - SECS_PER_HOUR;
}
# Adjust for DST change
$loc_t += $dst_off;
return $loc_t if $dst_off > 0;
# If the original date was a non-extent gap in a forward DST jump,
# we should now have the wrong answer - undo the DST adjustment
my ( $s, $m, $h ) = localtime($loc_t);
$loc_t -= $dst_off if $s != $_[0] || $m != $_[1] || $h != $_[2];
return $loc_t;
}
sub timelocal_nocheck {
local $Options{no_range_check} = 1;
return &timelocal;
}
sub timelocal_modern {
local $Options{no_year_munging} = 1;
return &timelocal;
}
1;
# ABSTRACT: Efficiently compute time from local and GMT time
__END__
=pod
=encoding UTF-8
=head1 NAME
Time::Local - Efficiently compute time from local and GMT time
=head1 VERSION
version 1.28
=head1 SYNOPSIS
use Time::Local;
my $time = timelocal( $sec, $min, $hour, $mday, $mon, $year );
my $time = timegm( $sec, $min, $hour, $mday, $mon, $year );
=head1 DESCRIPTION
This module provides functions that are the inverse of built-in perl functions
C<localtime()> and C<gmtime()>. They accept a date as a six-element array, and
return the corresponding C<time(2)> value in seconds since the system epoch
(Midnight, January 1, 1970 GMT on Unix, for example). This value can be
positive or negative, though POSIX only requires support for positive values,
so dates before the system's epoch may not work on all operating systems.
It is worth drawing particular attention to the expected ranges for the values
provided. The value for the day of the month is the actual day (i.e. 1..31),
while the month is the number of months since January (0..11). This is
consistent with the values returned from C<localtime()> and C<gmtime()>.
=head1 FUNCTIONS
=head2 C<timelocal_modern()> and C<timegm_modern()>
When C<Time::Local> was first written, it was a common practice to represent
years as a two-digit value like C<99> for C<1999> or C<1> for C<2001>. This
caused all sorts of problems (google "Y2K problem" if you're very young) and
developers eventually realized that this was a terrible idea.
The default exports of C<timelocal()> and C<timegm()> do a complicated
calculation when given a year value less than 1000. This leads to surprising
results in many cases. See L</Year Value Interpretation> for details.
The C<time*_modern()> subs do not do this year munging and simply take the
year value as provided.
While it would be nice to make this the default behavior, that would almost
certainly break a lot of code, so you must explicitly import these subs and
use them instead of the default C<timelocal()> and C<timegm()>.
You are B<strongly> encouraged to use these subs in any new code which uses
this module. It will almost certainly make your code's behavior less
surprising.
=head2 C<timelocal()> and C<timegm()>
This module exports two functions by default, C<timelocal()> and C<timegm()>.
The C<timelocal()> and C<timegm()> functions perform range checking on the
input $sec, $min, $hour, $mday, and $mon values by default.
=head2 C<timelocal_nocheck()> and C<timegm_nocheck()>
If you are working with data you know to be valid, you can speed your code up
by using the "nocheck" variants, C<timelocal_nocheck()> and
C<timegm_nocheck()>. These variants must be explicitly imported.
use Time::Local 'timelocal_nocheck';
# The 365th day of 1999
print scalar localtime timelocal_nocheck( 0, 0, 0, 365, 0, 99 );
If you supply data which is not valid (month 27, second 1,000) the results
will be unpredictable (so don't do that).
=head2 Year Value Interpretation
B<This does not apply to C<timelocal_modern> or C<timegm_modern>. Use those
exports if you want to ensure consistent behavior as your code ages.>
Strictly speaking, the year should be specified in a form consistent with
C<localtime()>, i.e. the offset from 1900. In order to make the interpretation
of the year easier for humans, however, who are more accustomed to seeing
years as two-digit or four-digit values, the following conventions are
followed:
=over 4
=item *
Years greater than 999 are interpreted as being the actual year, rather than
the offset from 1900. Thus, 1964 would indicate the year Martin Luther King
won the Nobel prize, not the year 3864.
=item *
Years in the range 100..999 are interpreted as offset from 1900, so that 112
indicates 2012. This rule also applies to years less than zero (but see note
below regarding date range).
=item *
Years in the range 0..99 are interpreted as shorthand for years in the rolling
"current century," defined as 50 years on either side of the current
year. Thus, today, in 1999, 0 would refer to 2000, and 45 to 2045, but 55
would refer to 1955. Twenty years from now, 55 would instead refer to
2055. This is messy, but matches the way people currently think about two
digit dates. Whenever possible, use an absolute four digit year instead.
=back
The scheme above allows interpretation of a wide range of dates, particularly
if 4-digit years are used.
=head2 Limits of time_t
On perl versions older than 5.12.0, the range of dates that can be actually be
handled depends on the size of C<time_t> (usually a signed integer) on the
given platform. Currently, this is 32 bits for most systems, yielding an
approximate range from Dec 1901 to Jan 2038.
Both C<timelocal()> and C<timegm()> croak if given dates outside the supported
range.
As of version 5.12.0, perl has stopped using the time implementation of the
operating system it's running on. Instead, it has its own implementation of
those routines with a safe range of at least +/- 2**52 (about 142 million
years)
=head2 Ambiguous Local Times (DST)
Because of DST changes, there are many time zones where the same local time
occurs for two different GMT times on the same day. For example, in the
"Europe/Paris" time zone, the local time of 2001-10-28 02:30:00 can represent
either 2001-10-28 00:30:00 GMT, B<or> 2001-10-28 01:30:00 GMT.
When given an ambiguous local time, the timelocal() function should always
return the epoch for the I<earlier> of the two possible GMT times.
=head2 Non-Existent Local Times (DST)
When a DST change causes a locale clock to skip one hour forward, there will
be an hour's worth of local times that don't exist. Again, for the
"Europe/Paris" time zone, the local clock jumped from 2001-03-25 01:59:59 to
2001-03-25 03:00:00.
If the C<timelocal()> function is given a non-existent local time, it will
simply return an epoch value for the time one hour later.
=head2 Negative Epoch Values
On perl version 5.12.0 and newer, negative epoch values are fully supported.
On older versions of perl, negative epoch (C<time_t>) values, which are not
officially supported by the POSIX standards, are known not to work on some
systems. These include MacOS (pre-OSX) and Win32.
On systems which do support negative epoch values, this module should be able
to cope with dates before the start of the epoch, down the minimum value of
time_t for the system.
=head1 IMPLEMENTATION
These routines are quite efficient and yet are always guaranteed to agree with
C<localtime()> and C<gmtime()>. We manage this by caching the start times of
any months we've seen before. If we know the start time of the month, we can
always calculate any time within the month. The start times are calculated
using a mathematical formula. Unlike other algorithms that do multiple calls
to C<gmtime()>.
The C<timelocal()> function is implemented using the same cache. We just
assume that we're translating a GMT time, and then fudge it when we're done
for the timezone and daylight savings arguments. Note that the timezone is
evaluated for each date because countries occasionally change their official
timezones. Assuming that C<localtime()> corrects for these changes, this
routine will also be correct.
=head1 AUTHORS EMERITUS
This module is based on a Perl 4 library, timelocal.pl, that was
included with Perl 4.036, and was most likely written by Tom
Christiansen.
The current version was written by Graham Barr.
=head1 BUGS
The whole scheme for interpreting two-digit years can be considered a bug.
Bugs may be submitted at L<https://github.com/houseabsolute/Time-Local/issues>.
There is a mailing list available for users of this distribution,
L<mailto:datetime@perl.org>.
I am also usually active on IRC as 'autarch' on C<irc://irc.perl.org>.
=head1 SOURCE
The source code repository for Time-Local can be found at L<https://github.com/houseabsolute/Time-Local>.
=head1 AUTHOR
Dave Rolsky <autarch@urth.org>
=head1 CONTRIBUTORS
=for stopwords Florian Ragwitz J. Nick Koston Unknown
=over 4
=item *
Florian Ragwitz <rafl@debian.org>
=item *
J. Nick Koston <nick@cpanel.net>
=item *
Unknown <unknown@example.com>
=back
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 1997 - 2018 by Graham Barr & Dave Rolsky.
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
The full text of the license can be found in the
F<LICENSE> file included with this distribution.
=cut
| operepo/ope | client_tools/svc/rc/usr/share/perl5/core_perl/Time/Local.pm | Perl | mit | 14,362 |
#! /usr/bin/perl
# generates a sorted list of fake events to be used by various SEG tests
# kept for posterity, a sample test data file is included in the pkg.
my $events;
my $start;
for ($i = 0; $i < 50; $i++) {
my $r = int(rand(2));
if ($r == 0) {
$events->{$start}->{$i} = "pending";
$start += int(rand(5));
$events->{$start}->{$i} = "active";
$start += int(rand(5));
$events->{$start}->{$i} = "done";
$start += int(rand(5));
} elsif ($r == 1) {
$events->{$start}->{$i} = "pending";
$start += int(rand(5));
$events->{$start}->{$i} = "active";
$start += int(rand(5));
$events->{$start}->{$i} = "failed";
$start += int(rand(5));
} else {
$events->{$start}->{$i} = "pending";
$start += int(rand(5));
$events->{$start}->{$i} = "failed";
$start += int(rand(5));
}
}
foreach $stamp (sort {$a <=> $b} keys %{$events}) {
foreach $id (keys %{$events->{$stamp}}) {
printf "%05d;$id;$events->{$stamp}->{$id}\n", $stamp;
}
}
| gridcf/gct | gram/jobmanager/scheduler_event_generator/source/test/make-test-data.pl | Perl | apache-2.0 | 1,086 |
package DateTime::TimeZone::Local::VMS;
{
$DateTime::TimeZone::Local::VMS::VERSION = '1.57';
}
use strict;
use warnings;
use parent 'DateTime::TimeZone::Local';
sub Methods { return qw( FromEnv ) }
sub EnvVars {
return qw( TZ SYS$TIMEZONE_RULE SYS$TIMEZONE_NAME UCX$TZ TCPIP$TZ );
}
1;
# ABSTRACT: Determine the local system's time zone on VMS
__END__
=pod
=head1 NAME
DateTime::TimeZone::Local::VMS - Determine the local system's time zone on VMS
=head1 VERSION
version 1.57
=head1 SYNOPSIS
my $tz = DateTime::TimeZone->new( name => 'local' );
my $tz = DateTime::TimeZone::Local->TimeZone();
=head1 DESCRIPTION
This module provides methods for determining the local time zone on a
VMS platform.
NOTE: This is basically a stub pending an implementation by someone
who knows something about VMS.
=head1 HOW THE TIME ZONE IS DETERMINED
This class tries the following methods of determining the local time
zone:
=over 4
=item * %ENV
We check the following environment variables:
=over 8
=item * TZ
=item * SYS$TIMEZONE_RULE
=item * SYS$TIMEZONE_NAME
=item * UCX$TZ
=item * TCPIP$TZ
=back
=back
=head1 AUTHOR
Dave Rolsky <autarch@urth.org>
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 2013 by Dave Rolsky.
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
=cut
| Dokaponteam/ITF_Project | xampp/perl/vendor/lib/DateTime/TimeZone/Local/VMS.pm | Perl | mit | 1,404 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
return <<'END';
10000 1000B
1000D 10026
10028 1003A
1003C 1003D
1003F 1004D
10050 1005D
10080 100FA
END
| Dokaponteam/ITF_Project | xampp/perl/lib/unicore/lib/Sc/Linb.pl | Perl | mit | 495 |
=pod
=head1 NAME
provider-kdf - The KDF library E<lt>-E<gt> provider functions
=head1 SYNOPSIS
=for openssl multiple includes
#include <openssl/core_dispatch.h>
#include <openssl/core_names.h>
/*
* None of these are actual functions, but are displayed like this for
* the function signatures for functions that are offered as function
* pointers in OSSL_DISPATCH arrays.
*/
/* Context management */
void *OSSL_FUNC_kdf_newctx(void *provctx);
void OSSL_FUNC_kdf_freectx(void *kctx);
void *OSSL_FUNC_kdf_dupctx(void *src);
/* Encryption/decryption */
int OSSL_FUNC_kdf_reset(void *kctx);
int OSSL_FUNC_kdf_derive(void *kctx, unsigned char *key, size_t keylen,
const OSSL_PARAM params[]);
/* KDF parameter descriptors */
const OSSL_PARAM *OSSL_FUNC_kdf_gettable_params(void *provctx);
const OSSL_PARAM *OSSL_FUNC_kdf_gettable_ctx_params(void *kcxt, void *provctx);
const OSSL_PARAM *OSSL_FUNC_kdf_settable_ctx_params(void *kcxt, void *provctx);
/* KDF parameters */
int OSSL_FUNC_kdf_get_params(OSSL_PARAM params[]);
int OSSL_FUNC_kdf_get_ctx_params(void *kctx, OSSL_PARAM params[]);
int OSSL_FUNC_kdf_set_ctx_params(void *kctx, const OSSL_PARAM params[]);
=head1 DESCRIPTION
This documentation is primarily aimed at provider authors. See L<provider(7)>
for further information.
The KDF operation enables providers to implement KDF algorithms and make
them available to applications via the API functions L<EVP_KDF_CTX_reset(3)>,
and L<EVP_KDF_derive(3)>.
All "functions" mentioned here are passed as function pointers between
F<libcrypto> and the provider in B<OSSL_DISPATCH> arrays via
B<OSSL_ALGORITHM> arrays that are returned by the provider's
provider_query_operation() function
(see L<provider-base(7)/Provider Functions>).
All these "functions" have a corresponding function type definition
named B<OSSL_FUNC_{name}_fn>, and a helper function to retrieve the
function pointer from an B<OSSL_DISPATCH> element named
B<OSSL_FUNC_{name}>.
For example, the "function" OSSL_FUNC_kdf_newctx() has these:
typedef void *(OSSL_OSSL_FUNC_kdf_newctx_fn)(void *provctx);
static ossl_inline OSSL_OSSL_FUNC_kdf_newctx_fn
OSSL_FUNC_kdf_newctx(const OSSL_DISPATCH *opf);
B<OSSL_DISPATCH> array entries are identified by numbers that are provided as
macros in L<openssl-core_dispatch.h(7)>, as follows:
OSSL_FUNC_kdf_newctx OSSL_FUNC_KDF_NEWCTX
OSSL_FUNC_kdf_freectx OSSL_FUNC_KDF_FREECTX
OSSL_FUNC_kdf_dupctx OSSL_FUNC_KDF_DUPCTX
OSSL_FUNC_kdf_reset OSSL_FUNC_KDF_RESET
OSSL_FUNC_kdf_derive OSSL_FUNC_KDF_DERIVE
OSSL_FUNC_kdf_get_params OSSL_FUNC_KDF_GET_PARAMS
OSSL_FUNC_kdf_get_ctx_params OSSL_FUNC_KDF_GET_CTX_PARAMS
OSSL_FUNC_kdf_set_ctx_params OSSL_FUNC_KDF_SET_CTX_PARAMS
OSSL_FUNC_kdf_gettable_params OSSL_FUNC_KDF_GETTABLE_PARAMS
OSSL_FUNC_kdf_gettable_ctx_params OSSL_FUNC_KDF_GETTABLE_CTX_PARAMS
OSSL_FUNC_kdf_settable_ctx_params OSSL_FUNC_KDF_SETTABLE_CTX_PARAMS
A KDF algorithm implementation may not implement all of these functions.
In order to be a consistent set of functions, at least the following functions
must be implemented: OSSL_FUNC_kdf_newctx(), OSSL_FUNC_kdf_freectx(),
OSSL_FUNC_kdf_set_ctx_params(), OSSL_FUNC_kdf_derive().
All other functions are optional.
=head2 Context Management Functions
OSSL_FUNC_kdf_newctx() should create and return a pointer to a provider side
structure for holding context information during a KDF operation.
A pointer to this context will be passed back in a number of the other KDF
operation function calls.
The parameter I<provctx> is the provider context generated during provider
initialisation (see L<provider(7)>).
OSSL_FUNC_kdf_freectx() is passed a pointer to the provider side KDF context in
the I<kctx> parameter.
If it receives NULL as I<kctx> value, it should not do anything other than
return.
This function should free any resources associated with that context.
OSSL_FUNC_kdf_dupctx() should duplicate the provider side KDF context in the
I<kctx> parameter and return the duplicate copy.
=head2 Encryption/Decryption Functions
OSSL_FUNC_kdf_reset() initialises a KDF operation given a provider
side KDF context in the I<kctx> parameter.
OSSL_FUNC_kdf_derive() performs the KDF operation after processing the
I<params> as per OSSL_FUNC_kdf_set_ctx_params().
The I<kctx> parameter contains a pointer to the provider side context.
The resulting key of the desired I<keylen> should be written to I<key>.
If the algorithm does not support the requested I<keylen> the function must
return error.
=head2 KDF Parameters
See L<OSSL_PARAM(3)> for further details on the parameters structure used by
these functions.
OSSL_FUNC_kdf_get_params() gets details of parameter values associated with the
provider algorithm and stores them in I<params>.
OSSL_FUNC_kdf_set_ctx_params() sets KDF parameters associated with the given
provider side KDF context I<kctx> to I<params>.
Any parameter settings are additional to any that were previously set.
Passing NULL for I<params> should return true.
OSSL_FUNC_kdf_get_ctx_params() retrieves gettable parameter values associated
with the given provider side KDF context I<kctx> and stores them in I<params>.
Passing NULL for I<params> should return true.
OSSL_FUNC_kdf_gettable_params(), OSSL_FUNC_kdf_gettable_ctx_params(),
and OSSL_FUNC_kdf_settable_ctx_params() all return constant B<OSSL_PARAM>
arrays as descriptors of the parameters that OSSL_FUNC_kdf_get_params(),
OSSL_FUNC_kdf_get_ctx_params(), and OSSL_FUNC_kdf_set_ctx_params()
can handle, respectively. OSSL_FUNC_kdf_gettable_ctx_params() and
OSSL_FUNC_kdf_settable_ctx_params() will return the parameters associated
with the provider side context I<kctx> in its current state if it is
not NULL. Otherwise, they return the parameters associated with the
provider side algorithm I<provctx>.
Parameters currently recognised by built-in KDFs are as follows. Not all
parameters are relevant to, or are understood by all KDFs:
=over 4
=item "size" (B<OSSL_KDF_PARAM_SIZE>) <unsigned integer>
Gets the output size from the associated KDF ctx.
If the algorithm produces a variable amount of output, SIZE_MAX should be
returned.
If the input parameters required to calculate the fixed output size have not yet
been supplied, 0 should be returned indicating an error.
=item "key" (B<OSSL_KDF_PARAM_KEY>) <octet string>
Sets the key in the associated KDF ctx.
=item "secret" (B<OSSL_KDF_PARAM_SECRET>) <octet string>
Sets the secret in the associated KDF ctx.
=item "pass" (B<OSSL_KDF_PARAM_PASSWORD>) <octet string>
Sets the password in the associated KDF ctx.
=item "cipher" (B<OSSL_KDF_PARAM_CIPHER>) <UTF8 string>
=item "digest" (B<OSSL_KDF_PARAM_DIGEST>) <UTF8 string>
=item "mac" (B<OSSL_KDF_PARAM_MAC>) <UTF8 string>
Sets the name of the underlying cipher, digest or MAC to be used.
It must name a suitable algorithm for the KDF that's being used.
=item "maclen" (B<OSSL_KDF_PARAM_MAC_SIZE>) <octet string>
Sets the length of the MAC in the associated KDF ctx.
=item "properties" (B<OSSL_KDF_PARAM_PROPERTIES>) <UTF8 string>
Sets the properties to be queried when trying to fetch the underlying algorithm.
This must be given together with the algorithm naming parameter to be
considered valid.
=item "iter" (B<OSSL_KDF_PARAM_ITER>) <unsigned integer>
Sets the number of iterations in the associated KDF ctx.
=item "mode" (B<OSSL_KDF_PARAM_MODE>) <UTF8 string>
Sets the mode in the associated KDF ctx.
=item "pkcs5" (B<OSSL_KDF_PARAM_PKCS5>) <integer>
Enables or diables the SP800-132 compliance checks.
A mode of 0 enables the compliance checks.
The checks performed are:
=over 4
=item - the iteration count is at least 1000.
=item - the salt length is at least 128 bits.
=item - the derived key length is at least 112 bits.
=back
=item "ukm" (B<OSSL_KDF_PARAM_UKM>) <octet string>
Sets an optional random string that is provided by the sender called
"partyAInfo". In CMS this is the user keying material.
=item "cekalg" (B<OSSL_KDF_PARAM_CEK_ALG>) <UTF8 string>
Sets the CEK wrapping algorithm name in the associated KDF ctx.
=item "n" (B<OSSL_KDF_PARAM_SCRYPT_N>) <unsigned integer>
Sets the scrypt work factor parameter N in the associated KDF ctx.
=item "r" (B<OSSL_KDF_PARAM_SCRYPT_R>) <unsigned integer>
Sets the scrypt work factor parameter r in the associated KDF ctx.
=item "p" (B<OSSL_KDF_PARAM_SCRYPT_P>) <unsigned integer>
Sets the scrypt work factor parameter p in the associated KDF ctx.
=item "maxmem_bytes" (B<OSSL_KDF_PARAM_SCRYPT_MAXMEM>) <unsigned integer>
Sets the scrypt work factor parameter maxmem in the associated KDF ctx.
=item "info" (B<OSSL_KDF_PARAM_INFO>) <octet string>
Sets the optional shared info in the associated KDF ctx.
=item "seed" (B<OSSL_KDF_PARAM_SEED>) <octet string>
Sets the IV in the associated KDF ctx.
=item "xcghash" (B<OSSL_KDF_PARAM_SSHKDF_XCGHASH>) <octet string>
Sets the xcghash in the associated KDF ctx.
=item "session_id" (B<OSSL_KDF_PARAM_SSHKDF_SESSION_ID>) <octet string>
Sets the session ID in the associated KDF ctx.
=item "type" (B<OSSL_KDF_PARAM_SSHKDF_TYPE>) <UTF8 string>
Sets the SSH KDF type parameter in the associated KDF ctx.
There are six supported types:
=over 4
=item EVP_KDF_SSHKDF_TYPE_INITIAL_IV_CLI_TO_SRV
The Initial IV from client to server.
A single char of value 65 (ASCII char 'A').
=item EVP_KDF_SSHKDF_TYPE_INITIAL_IV_SRV_TO_CLI
The Initial IV from server to client
A single char of value 66 (ASCII char 'B').
=item EVP_KDF_SSHKDF_TYPE_ENCRYPTION_KEY_CLI_TO_SRV
The Encryption Key from client to server
A single char of value 67 (ASCII char 'C').
=item EVP_KDF_SSHKDF_TYPE_ENCRYPTION_KEY_SRV_TO_CLI
The Encryption Key from server to client
A single char of value 68 (ASCII char 'D').
=item EVP_KDF_SSHKDF_TYPE_INTEGRITY_KEY_CLI_TO_SRV
The Integrity Key from client to server
A single char of value 69 (ASCII char 'E').
=item EVP_KDF_SSHKDF_TYPE_INTEGRITY_KEY_SRV_TO_CLI
The Integrity Key from client to server
A single char of value 70 (ASCII char 'F').
=back
=item "constant" (B<OSSL_KDF_PARAM_CONSTANT>) <octet string>
Sets the constant value in the associated KDF ctx.
=item "id" (B<OSSL_KDF_PARAM_PKCS12_ID>) <integer>
Sets the intended usage of the output bits in the associated KDF ctx.
It is defined as per RFC 7292 section B.3.
=back
=head1 RETURN VALUES
OSSL_FUNC_kdf_newctx() and OSSL_FUNC_kdf_dupctx() should return the newly created
provider side KDF context, or NULL on failure.
OSSL_FUNC_kdf_derive(), OSSL_FUNC_kdf_get_params(),
OSSL_FUNC_kdf_get_ctx_params() and OSSL_FUNC_kdf_set_ctx_params() should return 1 for
success or 0 on error.
OSSL_FUNC_kdf_gettable_params(), OSSL_FUNC_kdf_gettable_ctx_params() and
OSSL_FUNC_kdf_settable_ctx_params() should return a constant B<OSSL_PARAM>
array, or NULL if none is offered.
=head1 NOTES
The KDF life-cycle is described in L<life_cycle-kdf(7)>. Providers should
ensure that the various transitions listed there are supported. At some point
the EVP layer will begin enforcing the listed transitions.
=head1 SEE ALSO
L<provider(7)>, L<life_cycle-kdf(7)>, L<EVP_KDF(3)>.
=head1 HISTORY
The provider KDF interface was introduced in OpenSSL 3.0.
=head1 COPYRIGHT
Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved.
Licensed under the Apache License 2.0 (the "License"). You may not use
this file except in compliance with the License. You can obtain a copy
in the file LICENSE in the source distribution or at
L<https://www.openssl.org/source/license.html>.
=cut
| kipid/blog | nodejs/openssl-master/doc/man7/provider-kdf.pod | Perl | mit | 11,667 |
use v5.14;
use warnings;
=head1 NAME
Attean::SimpleQueryEvaluator - Simple query evaluator
=head1 VERSION
This document describes Attean::SimpleQueryEvaluator version 0.002
=head1 SYNOPSIS
use v5.14;
use Attean;
my $e = Attean::SimpleQueryEvaluator->new( model => $model );
my $iter = $e->evaluate( $algebra, $active_graph );
=head1 DESCRIPTION
The Attean::SimpleQueryEvaluator class implements a simple query evaluator that,
given an L<Attean::API::Algebra|Attean::API::Query> and a L<Attean::API::Model>
object, evaluates the query represented by the algebra using data from the
model, and returns a query result.
=head1 ATTRIBUTES
=over 4
=cut
use Attean::Algebra;
use Attean::Expression;
package Attean::SimpleQueryEvaluator 0.001 {
use Moo;
use Attean::RDF;
use Scalar::Util qw(blessed);
use List::Util qw(all any reduce);
use Types::Standard qw(ConsumerOf);
use namespace::clean;
=item C<< model >>
The L<Attean::API::Model> object used for query evaluation.
=cut
has 'model' => (is => 'ro', isa => ConsumerOf['Attean::API::Model'], required => 1);
=item C<< default_graph >>
The L<Attean::API::IRI> object representing the default graph in the C<< model >>.
The default graph will be excluded from enumeration of graph names for query
features such as C<< GRAPH ?g {} >>.
=cut
has 'default_graph' => (is => 'ro', isa => ConsumerOf['Attean::API::IRI'], required => 1);
=back
=head1 METHODS
=over 4
=item C<< evaluate( $algebra, $active_graph ) >>
Returns an L<Attean::API::Iterator> object with results produced by evaluating
the query C<< $algebra >> against the evaluator's C<< model >>, using the
supplied C<< $active_graph >>.
=cut
sub evaluate {
my $self = shift;
my $algebra = shift;
my $active_graph = shift || Carp::confess "No active-graph passed to Attean::SimpleQueryEvaluator->evaluate";
Carp::confess "No algebra passed for evaluation" unless ($algebra);
my $expr_eval = Attean::SimpleQueryEvaluator::ExpressionEvaluator->new( evaluator => $self );
my @children = @{ $algebra->children };
my ($child) = $children[0];
if ($algebra->isa('Attean::Algebra::BGP')) {
my @triples = @{ $algebra->triples };
if (scalar(@triples) == 0) {
my $b = Attean::Result->new( bindings => {} );
return Attean::ListIterator->new(values => [$b], item_type => 'Attean::API::Result');
} else {
my @iters;
my @new_vars;
my %blanks;
foreach my $t (@triples) {
my $q = $t->as_quad_pattern($active_graph);
my @values;
foreach my $v ($q->values) {
if ($v->does('Attean::API::Blank')) {
unless (exists $blanks{$v->value}) {
$blanks{$v->value} = Attean::Variable->new();
push(@new_vars, $blanks{$v->value}->value);
}
push(@values, $blanks{$v->value});
} else {
push(@values, $v);
}
}
push(@iters, $self->model->get_bindings( @values ));
}
while (scalar(@iters) > 1) {
my ($lhs, $rhs) = splice(@iters, 0, 2);
unshift(@iters, $lhs->join($rhs));
}
return shift(@iters)->map(sub { shift->project_complement(@new_vars) });
}
} elsif ($algebra->isa('Attean::Algebra::Distinct') or $algebra->isa('Attean::Algebra::Reduced')) {
my %seen;
my $iter = $self->evaluate( $child, $active_graph );
return $iter->grep(sub { return not($seen{ shift->as_string }++); });
} elsif ($algebra->isa('Attean::Algebra::Extend')) {
my $child = $algebra;
my @extends;
my %extends;
while ($child->isa('Attean::Algebra::Extend')) {
my $expr = $child->expression;
my $var = $child->variable->value;
$extends{ $var } = $expr;
unshift(@extends, $var);
($child) = @{ $child->children };
}
return $self->evaluate( $child, $active_graph )->map(sub {
my $r = shift;
my %extension;
my %row_cache;
foreach my $var (@extends) {
my $expr = $extends{ $var };
my $val = $expr_eval->evaluate_expression( $expr, $r, $active_graph, \%row_cache );
# warn "Extend error: $@" if ($@);
$r = Attean::Result->new( bindings => { $var => $val } )->join($r) if ($val);
}
return $r;
});
} elsif ($algebra->isa('Attean::Algebra::Filter')) {
# TODO: Merge adjacent filter evaluation so that they can share a row_cache hash (as is done for Extend above)
my $expr = $algebra->expression;
my $iter = $self->evaluate( $child, $active_graph );
return $iter->grep(sub {
my $t = $expr_eval->evaluate_expression( $expr, shift, $active_graph, {} );
# if ($@) { warn "Filter evaluation: $@\n" };
return ($t ? $t->ebv : 0);
});
} elsif ($algebra->isa('Attean::Algebra::OrderBy')) {
my $iter = $self->evaluate( $child, $active_graph );
my @rows = $iter->elements;
my @cmps = @{ $algebra->comparators };
my @exprs = map { $_->expression } @cmps;
my @dirs = map { $_->ascending } @cmps;
my @sorted = map { $_->[0] } sort {
my ($ar, $avalues) = @$a;
my ($br, $bvalues) = @$b;
my $c = 0;
foreach my $i (0 .. $#cmps) {
my ($av, $bv) = map { $_->[$i] } ($avalues, $bvalues);
$c = $av ? $av->compare($bv) : 1;
$c *= -1 if ($dirs[$i] == 0);
last unless ($c == 0);
}
$c
} map { my $r = $_; [$r, [map { $expr_eval->evaluate_expression( $_, $r, $active_graph, {} ) } @exprs]] } @rows;
return Attean::ListIterator->new( values => \@sorted, item_type => $iter->item_type);
} elsif ($algebra->isa('Attean::Algebra::Graph')) {
my $graph = $algebra->graph;
return $self->evaluate($child, $graph) if ($graph->does('Attean::API::Term'));
my @iters;
my $graphs = $self->model->get_graphs();
while (my $g = $graphs->next) {
next if ($g->value eq $self->default_graph->value);
my $gr = Attean::Result->new( bindings => { $graph->value => $g } );
push(@iters, $self->evaluate($child, $g)->map(sub { if (my $result = shift->join($gr)) { return $result } else { return } }));
}
return Attean::IteratorSequence->new( iterators => \@iters, item_type => 'Attean::API::Result' );
} elsif ($algebra->isa('Attean::Algebra::Group')) {
my @groupby = @{ $algebra->groupby };
my $iter = $self->evaluate($child, $active_graph);
my %groups;
while (my $r = $iter->next) {
my %vars;
my %row_cache;
my @group_terms = map { $expr_eval->evaluate_expression( $_, $r, $active_graph, \%row_cache ) } @groupby;
my $key = join(' ', map { blessed($_) ? $_->as_string : '' } @group_terms);
my %group_bindings;
foreach my $i (0 .. $#group_terms) {
my $v = $groupby[$i];
if (blessed($v) and $v->isa('Attean::ValueExpression') and $v->value->does('Attean::API::Variable') and $group_terms[$i]) {
$group_bindings{$v->value->value} = $group_terms[$i];
}
}
$groups{$key} = [Attean::Result->new( bindings => \%group_bindings ), []] unless (exists($groups{$key}));
push(@{ $groups{$key}[1] }, $r);
}
my @keys = keys %groups;
$groups{''} = [Attean::Result->new( bindings => {} ), []] if (scalar(@keys) == 0);
my $aggs = $algebra->aggregates;
my @results;
foreach my $key (keys %groups) {
my %row_cache;
my ($binding, $rows) = @{ $groups{$key} };
my $count = scalar(@$rows);
my %bindings;
foreach my $i (0 .. $#{ $aggs }) {
my $name = $aggs->[$i]->variable->value;
my $term = $expr_eval->evaluate_expression( $aggs->[$i], $rows, $active_graph, {} );
# warn "AGGREGATE error: $@" if ($@);
$bindings{ $name } = $term if ($term);
}
push(@results, Attean::Result->new( bindings => \%bindings )->join($binding));
}
return Attean::ListIterator->new(values => \@results, item_type => 'Attean::API::Result');
} elsif ($algebra->isa('Attean::Algebra::Join')) {
my ($lhs, $rhs) = map { $self->evaluate($_, $active_graph) } @children;
return $lhs->join($rhs);
} elsif ($algebra->isa('Attean::Algebra::LeftJoin')) {
my $expr = $algebra->expression;
my ($lhs_iter, $rhs_iter) = map { $self->evaluate($_, $active_graph) } @children;
my @rhs = $rhs_iter->elements;
my @results;
while (my $lhs = $lhs_iter->next) {
my $joined = 0;
foreach my $rhs (@rhs) {
if (my $j = $lhs->join($rhs)) {
if ($expr_eval->evaluate_expression( $expr, $j, $active_graph, {} )->ebv) {
$joined++;
push(@results, $j);
}
}
}
push(@results, $lhs) unless ($joined);
}
return Attean::ListIterator->new( values => \@results, item_type => 'Attean::API::Result');
} elsif ($algebra->isa('Attean::Algebra::Minus')) {
my ($lhsi, $rhs) = map { $self->evaluate($_, $active_graph) } @children;
my @rhs = $rhs->elements;
my @results;
while (my $lhs = $lhsi->next) {
my @compatible;
my @disjoint;
RHS: foreach my $rhs (@rhs) {
if (my $j = $lhs->join($rhs)) {
push(@compatible, 1);
} else {
push(@compatible, 0);
}
my $intersects = 0;
my %lhs_dom = map { $_ => 1 } $lhs->variables;
foreach my $rvar ($rhs->variables) {
if (exists $lhs_dom{$rvar}) {
$intersects = 1;
}
}
push(@disjoint, not($intersects));
}
my $count = scalar(@rhs);
my $keep = 1;
foreach my $i (0 .. $#rhs) {
$keep = 0 unless ($compatible[$i] == 0 or $disjoint[$i] == 1);
}
push(@results, $lhs) if ($keep);
}
return Attean::ListIterator->new( values => \@results, item_type => 'Attean::API::Result');
} elsif ($algebra->isa('Attean::Algebra::Path')) {
my $s = $algebra->subject;
my $path = $algebra->path;
my $o = $algebra->object;
my @children = @{ $path->children };
my ($child) = $children[0];
return $self->model->get_bindings( $s, $path->predicate, $o, $active_graph ) if ($path->isa('Attean::Algebra::PredicatePath'));
if ($path->isa('Attean::Algebra::InversePath')) {
my $path = Attean::Algebra::Path->new( subject => $o, path => $child, object => $s );
return $self->evaluate( $path, $active_graph );
} elsif ($path->isa('Attean::Algebra::AlternativePath')) {
my @children = @{ $path->children };
my @algebras = map { Attean::Algebra::Path->new( subject => $s, path => $_, object => $o ) } @children;
my @iters = map { $self->evaluate($_, $active_graph) } @algebras;
return Attean::IteratorSequence->new( iterators => \@iters, item_type => $iters[0]->item_type );
} elsif ($path->isa('Attean::Algebra::NegatedPropertySet')) {
my $preds = $path->predicates;
my %preds = map { $_->value => 1 } @$preds;
my $filter = $self->model->get_quads($s, undef, $o, $active_graph)->grep(sub {
my $q = shift;
my $p = $q->predicate;
return not exists $preds{ $p->value };
});
my %vars;
$vars{subject} = $s->value if ($s->does('Attean::API::Variable'));
$vars{object} = $o->value if ($o->does('Attean::API::Variable'));
return $filter->map(sub {
my $q = shift;
return unless $q;
my %bindings = map { $vars{$_} => $q->$_() } (keys %vars);
return Attean::Result->new( bindings => \%bindings );
}, 'Attean::API::Result');
} elsif ($path->isa('Attean::Algebra::SequencePath')) {
if (scalar(@children) == 1) {
my $path = Attean::Algebra::Path->new( subject => $s, path => $children[0], object => $o );
return $self->evaluate($path, $active_graph);
} else {
my @paths;
my $first = shift(@children);
my $join = Attean::Variable->new();
my @new_vars = ($join->value);
push(@paths, Attean::Algebra::Path->new( subject => $s, path => $first, object => $join ));
foreach my $i (0 .. $#children) {
my $newjoin = Attean::Variable->new();
my $obj = ($i == $#children) ? $o : $newjoin;
push(@new_vars, $newjoin->value);
push(@paths, Attean::Algebra::Path->new( subject => $join, path => $children[$i], object => $obj ));
$join = $newjoin;
}
while (scalar(@paths) > 1) {
my ($l, $r) = splice(@paths, 0, 2);
unshift(@paths, Attean::Algebra::Join->new( children => [$l, $r] ));
}
return $self->evaluate(shift(@paths), $active_graph)->map(sub { shift->project_complement(@new_vars) });
}
} elsif ($path->isa('Attean::Algebra::ZeroOrMorePath') or $path->isa('Attean::Algebra::OneOrMorePath')) {
if ($s->does('Attean::API::Term') and $o->does('Attean::API::Variable')) {
my $v = {};
if ($path->isa('Attean::Algebra::ZeroOrMorePath')) {
$self->_ALP($active_graph, $s, $child, $v);
} else {
my $iter = $self->_eval($active_graph, $s, $child);
while (my $n = $iter->next) {
$self->_ALP($active_graph, $n, $child, $v);
}
}
my @results = map { Attean::Result->new( bindings => { $o->value => $_ } ) } (values %$v);
return Attean::ListIterator->new(values => \@results, item_type => 'Attean::API::Result');
} elsif ($s->does('Attean::API::Variable') and $o->does('Attean::API::Variable')) {
my $nodes = $self->model->graph_nodes( $active_graph );
my @results;
while (my $t = $nodes->next) {
my $tr = Attean::Result->new( bindings => { $s->value => $t } );
my $p = Attean::Algebra::Path->new( subject => $t, path => $path, object => $o );
my $iter = $self->evaluate($p, $active_graph);
while (my $r = $iter->next) {
push(@results, $r->join($tr));
}
}
return Attean::ListIterator->new(values => \@results, item_type => 'Attean::API::Result');
} elsif ($s->does('Attean::API::Variable') and $o->does('Attean::API::Term')) {
my $pp = Attean::Algebra::InversePath->new( children => [$child] );
my $p = Attean::Algebra::Path->new( subject => $o, path => $pp, object => $s );
return $self->evaluate($p, $active_graph);
} else { # Term ZeroOrMorePath(path) Term
my $v = {};
$self->_ALP($active_graph, $s, $child, $v);
my @results;
foreach my $v (values %$v) {
return Attean::ListIterator->new(values => [Attean::Result->new()], item_type => 'Attean::API::Result')
if ($v->equals($o));
}
return Attean::ListIterator->new(values => [], item_type => 'Attean::API::Result');
}
} elsif ($path->isa('Attean::Algebra::ZeroOrOnePath')) {
my $path = Attean::Algebra::Path->new( subject => $s, path => $child, object => $o );
my @iters;
my %seen;
push(@iters, $self->evaluate( $path, $active_graph )->grep(sub { return not($seen{shift->as_string}++); }));
push(@iters, $self->_zeroLengthPath($s, $o, $active_graph));
return Attean::IteratorSequence->new( iterators => \@iters, item_type => 'Attean::API::Result' );
}
die "Unimplemented path type: $path";
} elsif ($algebra->isa('Attean::Algebra::Project')) {
my $iter = $self->evaluate( $child, $active_graph );
my @vars = map { $_->value } @{ $algebra->variables };
return $iter->map(sub {
my $r = shift;
my $b = { map { my $t = $r->value($_); $t ? ($_ => $t) : () } @vars };
return Attean::Result->new( bindings => $b );
}); #->debug('Project result');
} elsif ($algebra->isa('Attean::Algebra::Slice')) {
my $iter = $self->evaluate( $child, $active_graph );
$iter = $iter->offset($algebra->offset) if ($algebra->offset > 0);
$iter = $iter->limit($algebra->limit) if ($algebra->limit >= 0);
return $iter;
} elsif ($algebra->isa('Attean::Algebra::Union')) {
return Attean::IteratorSequence->new( iterators => [map { $self->evaluate($_, $active_graph) } @children], item_type => 'Attean::API::Result');
} elsif ($algebra->isa('Attean::Algebra::Ask')) {
my $iter = $self->evaluate($child, $active_graph);
my $result = $iter->next;
return Attean::ListIterator->new(values => [$result ? Attean::Literal->true : Attean::Literal->false], item_type => 'Attean::API::Term');
} elsif ($algebra->isa('Attean::Algebra::Construct')) {
my $iter = $self->evaluate($child, $active_graph);
my $patterns = $algebra->triples;
my %seen;
return Attean::CodeIterator->new(
generator => sub {
my $r = $iter->next;
return unless ($r);
my %mapping = map { my $t = $r->value($_); $t ? ("?$_" => $t) : (); } ($r->variables);
my $mapper = Attean::TermMap->rewrite_map(\%mapping);
my @triples;
PATTERN: foreach my $p (@$patterns) {
my @terms = $p->apply_map($mapper)->values;
next PATTERN unless all { $_->does('Attean::API::Term') } @terms;
push(@triples, Attean::Triple->new(@terms));
}
return @triples;
},
item_type => 'Attean::API::Triple'
)->grep(sub { return not($seen{shift->as_string}++); });
} elsif ($algebra->isa('Attean::Algebra::Table')) {
return Attean::ListIterator->new(values => $algebra->rows, item_type => 'Attean::API::Result');
}
die "Unimplemented algebra evaluation for: $algebra";
}
sub _ALP {
my $self = shift;
my $graph = shift;
my $term = shift;
my $path = shift;
my $v = shift;
return if (exists $v->{ $term->as_string });
$v->{ $term->as_string } = $term;
my $iter = $self->_eval($graph, $term, $path);
while (my $n = $iter->next) {
$self->_ALP($graph, $n, $path, $v);
}
}
sub _eval {
my $self = shift;
my $graph = shift;
my $term = shift;
my $path = shift;
my $pp = Attean::Algebra::Path->new( subject => $term, path => $path, object => variable('o') );
my $iter = $self->evaluate($pp, $graph);
my $terms = $iter->map(sub { shift->value('o') }, 'Attean::API::Term');
my %seen;
return $terms->grep(sub { not $seen{ shift->as_string }++ });
}
sub _zeroLengthPath {
my $self = shift;
my $s = shift;
my $o = shift;
my $graph = shift;
my $s_term = ($s->does('Attean::API::Term'));
my $o_term = ($o->does('Attean::API::Term'));
if ($s_term and $o_term) {
my @r;
push(@r, Attean::Result->new()) if ($s->equals($o));
return Attean::ListIterator->new(values => \@r, item_type => 'Attean::API::Result');
} elsif ($s_term) {
my $name = $o->value;
my $r = Attean::Result->new( bindings => { $name => $s } );
return Attean::ListIterator->new(values => [$r], item_type => 'Attean::API::Result');
} elsif ($o_term) {
my $name = $s->value;
my $r = Attean::Result->new( bindings => { $name => $o } );
return Attean::ListIterator->new(values => [$r], item_type => 'Attean::API::Result');
} else {
my @vars = map { $_->value } ($s, $o);
my $nodes = $self->model->graph_nodes( $graph );
return $nodes->map(sub {
my $term = shift;
Attean::Result->new( bindings => { map { $_ => $term } @vars } );
}, 'Attean::API::Result');
}
}
}
package Attean::SimpleQueryEvaluator::ExpressionEvaluator 0.001 {
use Moo;
use Attean::RDF;
use Scalar::Util qw(blessed);
use Types::Standard qw(InstanceOf);
use URI::Escape qw(uri_escape_utf8);
use Encode qw(encode);
use POSIX qw(ceil floor);
use Digest;
use Data::UUID;
use List::MoreUtils qw(zip);
use DateTime::Format::W3CDTF;
use I18N::LangTags;
use namespace::clean;
has 'evaluator' => (is => 'ro', isa => InstanceOf['Attean::SimpleQueryEvaluator']);
sub evaluate_expression {
my $self = shift;
my $expr = shift;
my $row = shift;
my $active_graph = shift;
my $row_cache = shift || {};
my $impl = $self->impl($expr, $active_graph);
return eval { $impl->($row, row_cache => $row_cache) };
}
sub impl {
my $self = shift;
my $expr = shift;
my $active_graph = shift;
my $op = $expr->operator;
my $true = Attean::Literal->true;
my $false = Attean::Literal->false;
if ($expr->isa('Attean::ExistsExpression')) {
my $pattern = $expr->pattern;
return sub {
my $r = shift;
my $table = Attean::Algebra::Table->new( variables => [map { variable($_) } $r->variables], rows => [$r] );
my $join = Attean::Algebra::Join->new( children => [$table, $pattern] );
# TODO: substitute variables at top-level of EXISTS pattern
my $iter = $self->evaluator->evaluate($join, $active_graph);
return ($iter->next) ? $true : $false;
};
} elsif ($expr->isa('Attean::ValueExpression')) {
my $node = $expr->value;
if ($node->does('Attean::API::Variable')) {
return sub { return shift->value($node->value); };
} else {
return sub { return $node };
}
} elsif ($expr->isa('Attean::UnaryExpression')) {
my ($child) = @{ $expr->children };
my $impl = $self->impl($child, $active_graph);
if ($op eq '!') {
return sub {
my $term = $impl->(@_);
return ($term->ebv) ? $false : $true;
}
} elsif ($op eq '-' or $op eq '+') {
return sub {
my $term = $impl->(@_);
die "TypeError $op" unless (blessed($term) and $term->does('Attean::API::NumericLiteral'));
my $v = $term->numeric_value;
return Attean::Literal->new( value => eval "$op$v", datatype => $term->datatype );
};
}
die "Unimplemented UnaryExpression evaluation: " . $expr->operator;
} elsif ($expr->isa('Attean::BinaryExpression')) {
my ($lhs, $rhs) = @{ $expr->children };
my ($lhsi, $rhsi) = map { $self->impl($_, $active_graph) } ($lhs, $rhs);
if ($op eq '&&') {
return sub {
my ($r, %args) = @_;
my $lbv = eval { $lhsi->($r, %args) };
my $rbv = eval { $rhsi->($r, %args) };
die "TypeError $op" unless ($lbv or $rbv);
return $false if (not($lbv) and not($rbv->ebv));
return $false if (not($rbv) and not($lbv->ebv));
die "TypeError $op" unless ($lbv and $rbv);
return ($lbv->ebv && $rbv->ebv) ? $true : $false;
}
} elsif ($op eq '||') {
return sub {
my ($r, %args) = @_;
my $lbv = eval { $lhsi->($r, %args) };
return $true if ($lbv and $lbv->ebv);
my $rbv = eval { $rhsi->($r, %args) };
die "TypeError $op" unless ($rbv);
return $true if ($rbv->ebv);
return $false if ($lbv);
die "TypeError $op";
}
} elsif ($op =~ m#^(?:[-+*/])$#) { # numeric operators: - + * /
return sub {
my ($r, %args) = @_;
($lhs, $rhs) = map { $_->($r, %args) } ($lhsi, $rhsi);
for ($lhs, $rhs) { die "TypeError $op" unless (blessed($_) and $_->does('Attean::API::NumericLiteral')); }
my $lv = $lhs->numeric_value;
my $rv = $rhs->numeric_value;
return Attean::Literal->new( value => eval "$lv $op $rv", datatype => $lhs->binary_promotion_type($rhs, $op) );
};
} elsif ($op =~ /^!?=$/) {
return sub {
my ($r, %args) = @_;
($lhs, $rhs) = map { $_->($r, %args) } ($lhsi, $rhsi);
for ($lhs, $rhs) { die "TypeError $op" unless (blessed($_) and $_->does('Attean::API::Term')); }
my $ok = ($lhs->equals($rhs));
$ok = not($ok) if ($op eq '!=');
return $ok ? $true : $false;
}
} elsif ($op =~ /^[<>]=?$/) {
return sub {
my ($r, %args) = @_;
($lhs, $rhs) = map { $_->($r, %args) } ($lhsi, $rhsi);
for ($lhs, $rhs) { die "TypeError $op" unless $_->does('Attean::API::Term'); }
my $c = ($lhs->compare($rhs));
return $true if (($c < 0 and ($op =~ /<=?/)) or ($c > 0 and ($op =~ />=?/)) or ($c == 0 and ($op =~ /=/)));
return $false;
}
}
die "Unexpected operator evaluation: $op";
} elsif ($expr->isa('Attean::FunctionExpression')) {
my $func = $expr->operator;
my @children = map { $self->impl($_, $active_graph) } @{ $expr->children };
my %type_roles = qw(URI IRI IRI IRI BLANK Blank LITERAL Literal NUMERIC NumericLiteral);
my %type_classes = qw(URI Attean::IRI IRI Attean::IRI STR Attean::Literal);
return sub {
my ($r, %args) = @_;
my $row_cache = $args{row_cache} || {};
if ($func eq 'IF') {
my $term = $children[0]->( $r, %args );
return ($term->ebv) ? $children[1]->( $r, %args ) : $children[2]->( $r, %args );
} elsif ($func eq 'IN' or $func eq 'NOTIN') {
($true, $false) = ($false, $true) if ($func eq 'NOTIN');
my $child = shift(@children);
my $term = $child->( $r, %args );
foreach my $c (@children) {
if (my $value = eval { $c->( $r, %args ) }) {
return $true if ($term->equals($value));
}
}
return $false;
} elsif ($func eq 'COALESCE') {
foreach my $c (@children) {
my $t = eval { $c->( $r, %args ) };
next if ($@);
return $t if $t;
}
return;
}
my @operands = map { $_->( $r, %args ) } @children;
if ($func =~ /^(STR)$/) {
return $type_classes{$1}->new($operands[0]->value);
} elsif ($func =~ /^([UI]RI)$/) {
my @base = $expr->has_base ? (base => $expr->base) : ();
return $type_classes{$1}->new(value => $operands[0]->value, @base);
} elsif ($func eq 'BNODE') {
if (scalar(@operands)) {
my $name = $operands[0]->value;
if (my $b = $row_cache->{bnodes}{$name}) {
return $b;
} else {
my $b = Attean::Blank->new();
$row_cache->{bnodes}{$name} = $b;
return $b;
}
}
return Attean::Blank->new();
} elsif ($func eq 'LANG') {
die "TypeError: LANG" unless ($operands[0]->does('Attean::API::Literal'));
return Attean::Literal->new($operands[0]->language // '');
} elsif ($func eq 'LANGMATCHES') {
my ($lang, $match) = map { $_->value } @operands;
if ($match eq '*') {
# """A language-range of "*" matches any non-empty language-tag string."""
return ($lang ? $true : $false);
} else {
return (I18N::LangTags::is_dialect_of( $lang, $match )) ? $true : $false;
}
} elsif ($func eq 'DATATYPE') {
return $operands[0]->datatype;
} elsif ($func eq 'BOUND') {
return $operands[0] ? $true : $false;
} elsif ($func eq 'RAND') {
return Attean::Literal->new( value => rand(), datatype => 'http://www.w3.org/2001/XMLSchema#double' );
} elsif ($func eq 'ABS') {
return Attean::Literal->new( value => abs($operands[0]->value), $operands[0]->construct_args );
} elsif ($func =~ /^(?:CEIL|FLOOR)$/) {
my $v = $operands[0]->value;
return Attean::Literal->new( value => (($func eq 'CEIL') ? ceil($v) : floor($v)), $operands[0]->construct_args );
} elsif ($func eq 'ROUND') {
return Attean::Literal->new( value => sprintf('%.0f', (0.000000000000001 + $operands[0]->numeric_value)), $operands[0]->construct_args );
} elsif ($func eq 'CONCAT') {
my $all_lang = 1;
my $all_str = 1;
my $lang;
foreach my $n (@operands) {
die "CONCAT called with a non-literal argument" unless ($n->does('Attean::API::Literal'));
if ($n->datatype->value ne 'http://www.w3.org/2001/XMLSchema#string') {
die "CONCAT called with a datatyped-literal other than xsd:string";
} elsif ($n->language) {
$all_str = 0;
if (defined($lang) and $lang ne $n->language) {
$all_lang = 0;
} else {
$lang = $n->language;
}
} else {
$all_lang = 0;
$all_str = 0;
}
}
my %strtype;
if ($all_lang and $lang) {
$strtype{language} = $lang;
} elsif ($all_str) {
$strtype{datatype} = 'http://www.w3.org/2001/XMLSchema#string'
}
return Attean::Literal->new( value => join('', map { $_->value } @operands), %strtype );
} elsif ($func eq 'SUBSTR') {
my $str = shift(@operands);
my @args = map { $_->numeric_value } @operands;
my $v = scalar(@args == 1) ? substr($str->value, $args[0]-1) : substr($str->value, $args[0]-1, $args[1]);
return Attean::Literal->new( value => $v, $str->construct_args );
} elsif ($func eq 'STRLEN') {
return Attean::Literal->integer(length($operands[0]->value));
} elsif ($func eq 'REPLACE') {
my ($node, $pat, $rep) = @operands;
die "TypeError: REPLACE called without a literal arg1 term" unless (blessed($node) and $node->does('Attean::API::Literal'));
die "TypeError: REPLACE called without a literal arg2 term" unless (blessed($pat) and $pat->does('Attean::API::Literal'));
die "TypeError: REPLACE called without a literal arg3 term" unless (blessed($rep) and $rep->does('Attean::API::Literal'));
die "TypeError: REPLACE called with a datatyped (non-xsd:string) literal" if ($node->datatype and $node->datatype->value ne 'http://www.w3.org/2001/XMLSchema#string');
my ($value, $pattern, $replace) = map { $_->value } @operands;
die "EvaluationError: REPLACE called with unsafe ?{} match pattern" if (index($pattern, '(?{') != -1 or index($pattern, '(??{') != -1);
die "EvaluationError: REPLACE called with unsafe ?{} replace pattern" if (index($replace, '(?{') != -1 or index($replace, '(??{') != -1);
$replace =~ s/\\/\\\\/g;
$replace =~ s/\$(\d+)/\$$1/g;
$replace =~ s/"/\\"/g;
$replace = qq["$replace"];
no warnings 'uninitialized';
$value =~ s/$pattern/"$replace"/eeg;
return Attean::Literal->new(value => $value, $node->construct_args);
} elsif ($func =~ /^[UL]CASE$/) {
return Attean::Literal->new( value => ($func eq 'UCASE' ? uc($operands[0]->value) : lc($operands[0]->value) ), $operands[0]->construct_args );
} elsif ($func eq 'ENCODE_FOR_URI') {
return Attean::Literal->new( uri_escape_utf8($operands[0]->value) );
} elsif ($func eq 'CONTAINS') {
my ($node, $pat) = @operands;
my ($lit, $plit) = map { $_->value } @operands;
die "TypeError: CONTAINS" if ($node->language and $pat->language and $node->language ne $pat->language);
return (index($lit, $plit) >= 0) ? $true : $false;
} elsif ($func eq 'STRSTARTS' or $func eq 'STRENDS') {
my ($lit, $plit) = map { $_->value } @operands;
if ($func eq 'STRENDS') {
my $pos = length($lit) - length($plit);
return (rindex($lit, $plit) == $pos) ? $true : $false;
} else {
return (index($lit, $plit) == 0) ? $true : $false;
}
} elsif ($func eq 'STRBEFORE' or $func eq 'STRAFTER') {
my ($node, $substr) = @operands;
die "$func called without a literal arg1 term" unless (blessed($node) and $node->does('Attean::API::Literal'));
die "$func called without a literal arg2 term" unless (blessed($substr) and $substr->does('Attean::API::Literal'));
die "$func called with a datatyped (non-xsd:string) literal" if ($node->datatype and $node->datatype->value ne 'http://www.w3.org/2001/XMLSchema#string');
my $lhs_simple = (not($node->language) and ($node->datatype->value eq 'http://www.w3.org/2001/XMLSchema#string'));
my $rhs_simple = (not($substr->language) and ($substr->datatype->value eq 'http://www.w3.org/2001/XMLSchema#string'));
if ($lhs_simple and $rhs_simple) {
# ok
} elsif ($node->language and $substr->language and $node->language eq $substr->language) {
# ok
} elsif ($node->language and $rhs_simple) {
# ok
} else {
die "$func called with literals that are not argument compatible";
}
my $value = $node->value;
my $match = $substr->value;
my $i = index($value, $match, 0);
if ($i < 0) {
return Attean::Literal->new('');
} else {
if ($func eq 'STRBEFORE') {
return Attean::Literal->new(value => substr($value, 0, $i), $node->construct_args);
} else {
return Attean::Literal->new(value => substr($value, $i+length($match)), $node->construct_args);
}
}
} elsif ($func =~ /^(?:YEAR|MONTH|DAY|HOURS|MINUTES)$/) {
my $method = lc($func =~ s/^(HOUR|MINUTE)S$/$1/r);
my $dt = $operands[0]->datetime;
return Attean::Literal->integer($dt->$method());
} elsif ($func eq 'SECONDS') {
my $dt = $operands[0]->datetime;
return Attean::Literal->decimal($dt->second());
} elsif ($func eq 'TZ' or $func eq 'TIMEZONE') {
my $dt = $operands[0]->datetime;
my $tz = $dt->time_zone;
if ($tz->is_floating) {
return Attean::Literal->new('') if ($func eq 'TZ');
die "TIMEZONE called with a dateTime without a timezone";
}
return Attean::Literal->new('Z') if ($func eq 'TZ' and $tz->is_utc);
if ($tz) {
my $offset = $tz->offset_for_datetime( $dt );
my $hours = 0;
my $minutes = 0;
my $minus = ($func eq 'TZ') ? '+' : '';
if ($offset < 0) {
$minus = '-';
$offset = -$offset;
}
my $duration = "${minus}PT";
if ($offset >= 60*60) {
my $h = int($offset / (60*60));
$duration .= "${h}H" if ($h > 0);
$hours = int($offset / (60*60));
$offset = $offset % (60*60);
}
if ($offset >= 60) {
my $m = int($offset / 60);
$duration .= "${m}M" if ($m > 0);
$minutes = int($offset / 60);
$offset = $offset % 60;
}
my $seconds = int($offset);
my $s = int($offset);
$duration .= "${s}S" if ($s > 0 or $duration eq 'PT');
return ($func eq 'TZ')
? Attean::Literal->new(sprintf('%s%02d:%02d', $minus, $hours, $minutes))
: Attean::Literal->new( value => $duration, datatype => "http://www.w3.org/2001/XMLSchema#dayTimeDuration");
} else {
return Attean::Literal->new('') if ($func eq 'TZ');
die "TIMEZONE called without a valid dateTime";
}
} elsif ($func eq 'NOW') {
my $value = DateTime::Format::W3CDTF->new->format_datetime( DateTime->now );
return Attean::Literal->new( value => $value, datatype => 'http://www.w3.org/2001/XMLSchema#dateTime' );
} elsif ($func =~ /^(?:STR)?UUID$/) {
my $u = Data::UUID->new();
return Attean::Literal->new($u->to_string( $u->create() )) if ($func eq 'STRUUID');
return Attean::IRI->new('urn:uuid:' . $u->to_string( $u->create() ));
} elsif ($func =~ /^(MD5|SHA1|SHA256|SHA384|SHA512)$/) {
my $hash = $func =~ s/SHA/SHA-/r;
my $digest = eval { Digest->new($hash)->add(encode('UTF-8', $operands[0]->value, Encode::FB_CROAK))->hexdigest };
return Attean::Literal->new($digest);
} elsif ($func eq 'STRLANG') {
my ($str, $lang) = @operands;
my @values = map { $_->value } @operands;
die "TypeError: STRLANG must be called with two plain literals" unless (blessed($str) and $str->does('Attean::API::Literal') and blessed($lang) and $lang->does('Attean::API::Literal'));
die "TypeError: STRLANG not called with a simple literal" unless ($str->datatype->value eq 'http://www.w3.org/2001/XMLSchema#string' and not($str->language));
return Attean::Literal->new( value => $values[0], language => $values[1] );
} elsif ($func eq 'STRDT') {
die "TypeError: STRDT" unless ($operands[0]->does('Attean::API::Literal') and not($operands[0]->language));
if (my $dt = $operands[0]->datatype) {
die "TypeError: STRDT" unless ($dt->value eq 'http://www.w3.org/2001/XMLSchema#string');
}
die "TypeError: STRDT" unless ($operands[1]->does('Attean::API::IRI'));
my @values = map { $_->value } @operands;
return Attean::Literal->new( value => $values[0], datatype => $values[1] );
} elsif ($func eq 'SAMETERM') {
my ($a, $b) = @operands;
die "TypeError: SAMETERM" unless (blessed($operands[0]) and blessed($operands[1]));
return $false if ($a->compare($b));
return ($a->value eq $b->value) ? $true : $false;
} elsif ($func =~ /^IS([UI]RI|BLANK|LITERAL|NUMERIC)$/) {
return $operands[0]->does("Attean::API::$type_roles{$1}") ? $true : $false;
} elsif ($func eq 'REGEX') {
my ($value, $pattern) = map { $_->value } @operands;
return ($value =~ /$pattern/) ? $true : $false;
}
die "Unimplemented FunctionExpression evaluation: " . $expr->operator;
};
} elsif ($expr->isa('Attean::AggregateExpression')) {
my $agg = $expr->operator;
my ($child) = @{ $expr->children };
if ($agg eq 'COUNT') {
if ($child) {
my $impl = $self->impl($child, $active_graph);
return sub {
my ($rows, %args) = @_;
my @terms = grep { blessed($_) } map { $impl->($_, %args) } @{ $rows };
if ($expr->distinct) {
my %seen;
@terms = grep { not($seen{$_->as_string}++) } @terms;
}
return Attean::Literal->integer(scalar(@terms));
};
} else {
return sub {
my ($rows, %args) = @_;
return Attean::Literal->integer(scalar(@$rows));
};
}
} elsif ($agg =~ /^(?:SAMPLE|MIN|MAX|SUM|AVG|GROUP_CONCAT)$/) {
my $impl = $self->impl($child, $active_graph);
if ($agg eq 'SAMPLE') {
return sub {
my ($rows, %args) = @_;
return $impl->( shift(@$rows), %args )
};
} elsif ($agg eq 'MIN' or $agg eq 'MAX') {
my $expect = ($agg eq 'MIN') ? 1 : -1;
return sub {
my ($rows, %args) = @_;
my $extrema;
foreach my $r (@$rows) {
my $t = $impl->( $r, %args );
return if (not($t) and $agg eq 'MIN'); # unbound is always minimal
next if (not($t)); # unbound need not be considered for MAX
$extrema = $t if (not($extrema) or $extrema->compare($t) == $expect);
}
return $extrema;
};
} elsif ($agg eq 'SUM' or $agg eq 'AVG') {
return sub {
my ($rows, %args) = @_;
my $count = 0;
my $sum = Attean::Literal->integer(0);
my %seen;
foreach my $r (@$rows) {
my $term = $impl->( $r, %args );
if ($expr->distinct) {
next if ($seen{ $term->as_string }++);
}
if ($term->does('Attean::API::NumericLiteral')) {
$count++;
$sum = Attean::Literal->new( value => ($sum->numeric_value + $term->numeric_value), datatype => $sum->binary_promotion_type($term, '+') );
} else {
die "TypeError: AVG";
}
}
if ($agg eq 'AVG') {
$sum = not($count) ? undef : Attean::Literal->new( value => ($sum->numeric_value / $count), datatype => $sum->binary_promotion_type(Attean::Literal->integer($count), '/') );
}
return $sum;
};
} elsif ($agg eq 'GROUP_CONCAT') {
my $sep = $expr->scalar_vars->{ 'seperator' } // ' ';
return sub {
my ($rows, %args) = @_;
my %seen;
my @strings;
foreach my $r (@$rows) {
my $term = eval { $impl->( $r, %args ) };
if ($expr->distinct) {
next if ($seen{ blessed($term) ? $term->as_string : '' }++);
}
push(@strings, $term->value // '');
}
return Attean::Literal->new(join($sep, sort @strings));
};
}
}
die "Unimplemented AggregateExpression evaluation: " . $expr->operator;
} elsif ($expr->isa('Attean::CastExpression')) {
my ($child) = @{ $expr->children };
my $impl = $self->impl( $child, $active_graph );
my $type = $expr->datatype;
return sub {
my ($r, %args) = @_;
my $term = $impl->($r, %args);
# TODO: reformat syntax for xsd:double
my $cast = Attean::Literal->new( value => $term->value, datatype => $type );
return $cast->canonicalized_term if ($cast->does('Attean::API::CanonicalizingLiteral'));
return $cast;
}
} else {
Carp::confess "No impl for expression " . $expr->as_string;
}
}
}
1;
__END__
=back
=head1 BUGS
Please report any bugs or feature requests to through the GitHub web interface
at L<https://github.com/kasei/attean/issues>.
=head1 SEE ALSO
L<http://www.perlrdf.org/>
=head1 AUTHOR
Gregory Todd Williams C<< <gwilliams@cpan.org> >>
=head1 COPYRIGHT
Copyright (c) 2014 Gregory Todd Williams.
This program is free software; you can redistribute it and/or modify it under
the same terms as Perl itself.
=cut
| gitpan/Attean | lib/Attean/SimpleQueryEvaluator.pm | Perl | mit | 39,154 |
###############################################################################
# search_controller.pl
# =============================================================================
# Version: Vegetable Revolution 3.0
# Released: 1st June 2009
# Revision: $Rev$
# Copyright: James Aitken <http://loonypandora.co.uk>
###############################################################################
use strict;
sub _show_search_results {
my ($search, $limit, $begin_time, $end_time, $board_string, $offset) = @_;
$search = $vr::dbh->quote("%" . $search . "%");
my $query = qq{
SELECT DISTINCT messages.message_id, messages.thread_id, messages.message_body, messages.message_time, threads.thread_subject, threads.board_id, boards.category_id, users.user_id, users.user_name, users.display_name, users.avatar, special_groups.spec_group_color
FROM messages
INNER JOIN threads AS threads ON threads.thread_id = messages.thread_id
INNER JOIN boards AS boards ON threads.board_id = boards.board_id
INNER JOIN users AS users ON users.user_id = messages.user_id
LEFT JOIN special_groups AS special_groups ON users.spec_group_id = special_groups.spec_group_id
WHERE messages.message_body LIKE $search
AND boards.category_id = 'forum'
AND boards.vip_only != '1'
AND boards.mods_only != '1'
AND messages.message_deleted != '1'
AND messages.message_time <= ? AND messages.message_time >= ?
AND threads.board_id IN ($board_string)
ORDER BY messages.message_time DESC
LIMIT ?, ?
};
$vr::loop = $vr::dbh->prepare($query);
$vr::loop->execute($begin_time, $end_time, $offset, $limit);
$vr::loop->bind_columns(\(@vr::loop{ @{ $vr::loop->{NAME_lc} } }));
}
sub _show_search_info {
my ($search, $begin_time, $end_time, $board_string) = @_;
$search = $vr::dbh->quote("%" . $search . "%");
my $query = qq{
SELECT COUNT(DISTINCT messages.thread_id) as total_results
FROM messages
INNER JOIN threads AS threads ON threads.thread_id = messages.thread_id
INNER JOIN boards AS boards ON threads.board_id = boards.board_id
WHERE messages.message_body LIKE $search
AND boards.category_id = 'forum'
AND boards.vip_only != '1'
AND boards.mods_only != '1'
AND messages.message_deleted != '1'
AND messages.message_time <= ? AND messages.message_time >= ?
AND threads.board_id IN ($board_string)
LIMIT 1
};
my $static = $vr::dbh->prepare($query);
$static->execute($begin_time, $end_time);
$static->bind_columns(\(@vr::db{ @{ $static->{NAME_lc} } }));
$static->fetch;
}
1;
| LoonyPandora/VegRev | app/models/search_model.pl | Perl | mit | 2,509 |
#!/usr/bin/env perl
use strict;
use warnings;
use Genome;
while(<>) {
my $file = $_;
chomp $file;
my ($id) = $file =~ /^(.+)\.bam$/;
unless($id) {
die "Unable to grab software id from bam name\n";
}
my $sr = Genome::SoftwareResult->get($id) or die "Unable to grab software results from $id\n";
my @inst_data = $sr->instrument_data;
unless(@inst_data) {
die "Unable to grab instrument data for $file\n";
}
my %samples = map { $_->sample->name => 1 } @inst_data;
for my $sample (keys %samples) {
print "$file\t$sample\n";
}
}
| ernfrid/oneoffs | sample_names_for_bam_ids.pl | Perl | mit | 600 |
package API::Routes::FacebookReviews;
use Dancer ':syntax';
use Dancer::Exception qw(:all);
use Dancer::Plugin::Res;
use API::Plugins::FacebookManager;
set serializer => 'JSON';
get '/api/v1/new_reviews' => sub {
my $revs;
try {
$revs = facebook_manager()->check_new_reviews;
} catch {
send_error("Failed to check for new reviews : $_" => 500);
};
return res 200, $revs;
};
get '/api/v1/all_reviews' => sub {
my $revs;
try {
$revs = facebook_manager()->get_all_reviews;
} catch {
send_error("Failed to check for all reviews : $_" => 500);
};
return res 200, $revs;
};
true;
| connoryates/facebook_review_app | lib/API/Routes/FacebookReviews.pm | Perl | mit | 656 |
package TOXML::UTIL;
use strict;
use lib "/backend/lib";
require DBINFO;
require ZOOVY;
require TOXML;
@TOXML::WRAPPER_THEME_ATTRIBS = (
'name','pretty','content_background_color','content_font_face','content_font_size',
'content_text_color','table_heading_background_color','table_heading_font_face',
'table_heading_font_size','table_heading_text_color',
'table_listing_background_color','table_listing_background_color_alternate','table_listing_font_face',
'table_listing_font_size','table_listing_text_color','link_active_text_color','link_text_color',
'link_visited_text_color','alert_color','disclaimer_background_color','disclaimer_font_face',
'disclaimer_font_size','disclaimer_text_color'
);
#%TOXML::UTIL::minilogos = (
# 'overstock'=>'//static.zoovy.com/img/proshop/W88-H31-Bffffff/zoovy/logos/overstock',
# 'ebay' => '//static.zoovy.com/img/proshop/W88-H31-Bffffff/zoovy/logos/ebay',
# 'ebaypower' => '//static.zoovy.com/img/proshop/W88-H31-Bffffff/zoovy/logos/ebay',
# 'ebaymotors' => '//static.zoovy.com/img/proshop/W88-H31-Bffffff/zoovy/logos/ebay_motors',
# 'ebaystores' => '//static.zoovy.com/img/proshop/W88-H31-Bffffff/zoovy/logos/ebay_stores',
# );
## LAYOUT
%TOXML::LAYOUT_PROPERTIES = (
1<<0 => 'Dynamic Images / Slideshow',
1<<1 => 'Image Categories / Image Cart',
);
## WIZARDS
%TOXML::WIZARD_PROPERTIES = (
1<<0 => 'Standard Fields (payment, shipping, returns, about, contact, checkout)',
1<<1 => 'Has Header (tabs w/navigation)',
1<<2 => 'Detailed Description',
1<<3 => 'Contains Flash',
);
## WRAPPER
%TOXML::BW_COLORS = (
1<<0 => 'Black Backgrounds',
1<<1 => 'Color Backgrounds',
1<<2 => 'Light Backgrounds',
1<<3 => 'Grey/Black',
1<<4 => 'Blue',
1<<5 => 'Red',
1<<6 => 'Green',
1<<7 => 'Other',
1<<8 => '',
);
%TOXML::BW_CATEGORIES = (
1<<0 => 'Staff Favorites',
1<<1 => 'Seasonal / Xmas',
1<<2 => 'Seasonal / Valentines',
1<<3 => 'Seasonal / Other',
1<<4 => 'Silly Themes',
1<<5 => 'Locations',
1<<6 => '',
1<<7 => 'Industry / Auto',
1<<8 => 'Industry / Electronics',
1<<9 => 'Industry / Sporting Goods',
1<<10 => 'Industry / For Kids',
1<<11 => '',
1<<12 => '',
1<<13 => 'Series 2001',
1<<14 => 'Series 2002',
1<<15 => 'Series 2003',
1<<16 => 'Series 2006',
1<<17 => 'Series 2007',
);
%TOXML::BW_PROPERTIES = (
1<<0 => 'Minicart',
1<<1 => 'Sidebar',
1<<2 => 'Subcats',
1<<3 => 'Search',
1<<4 => 'Newsletter',
1<<5 => 'Login',
1<<6 => 'Image Navcats',
1<<7 => 'Flex Header',
1<<8 => 'Web 2.0',
1<<9 => '',
1<<10 => '',
1<<11 => '',
1<<12 => 'Has Popup',
1<<13 => 'Has Wizard',
);
# NOTE: Make sure to add any new flow types into default_flow below also
$TOXML::UTIL::LAYOUT_STYLES = {
'H' => [ 'Homepage', 'The homepage is the first page to appear on your site.' ],
'A' => [ 'About Us', 'The purpose of the about us page is to inform customers about how to reach you, as well as to help your company identity.' ],
'U' => [ 'Contact Us' , 'The purpose of the Contact Us page is to proved your customers with a way to get in contatc with you.' ],
'S' => [ 'Search Page' , 'The purpose of the about us page is to inform customers about how to reach you, as well as to help your company identity.' ],
'E' => [ 'Results Page', 'The results page is displayed after a search has been performed.'],
'Y' => [ 'Privacy Policy', 'The purpose of the privacy page is to disclose how you will use the customer information you collect.' ],
'R' => [ 'Return Page', 'The purpose of the about us page is to inform you customers about your return policy.' ],
'P' => [ 'Product Page', 'The purpose of the page is to feature a product.' ],
'C' => [ 'Category Page', 'The purpose of a category page is to provide a hierarchy that makes it easier for customers to find products.' ],
'X' => [ 'Custom Page', 'A custom page, do with it as you will.' ],
'D' => [ 'Dynamic Page', 'A dynamic page, provides different data depending on how it is referenced.' ],
'G' => [ 'Gallery Page', 'A listing of marketplaces and the products listed on them.' ],
'T' => [ 'Shopping Cart', 'The cart page is displayed after a customer clicks the buy button.' ],
'L' => [ 'Login', 'When login is required to get access to a feature, this page is displayed.' ],
'Q' => [ 'Adult Warning', 'Adult Warning (requires ADULT be enabled on the account' ],
'N' => [ 'Shipping Quote', 'Calculates shipping for auctions' ],
'B' => [ 'Popup', 'Popup'],
'I' => [ 'Email/Newsletter', 'An eMail Newsletter you can send to customers'],
'W' => [ 'Rewards Page', 'Rewards Page Layout' ],
};
##
## FeatureBW
## 1 = Multi Image (3+)
##
##
##
##
##
sub copy {
## on a wrapper
## copy the images to the custom files directory
## rename the images
}
##
## for a given doc + user
## selects this docid as it's most recently "remembered" (e.g. selected)
## adds the entry to TOXML_RANKS (or updates it to selected)
## updates the RANK for the DOCID in the TOXML table
##
## STATE = 0 - only remember, do not select.
## 1 - (default) actually remembers this one as selected
## note: the STATE setting currently does nothing, but i need to make it do something -bh
##
sub remember {
my ($USERNAME,$FORMAT,$DOCID,$STATE) = @_;
if (not defined $STATE) { $STATE = 1; }
my $dbh = &DBINFO::db_user_connect($USERNAME);
my $MID = &ZOOVY::resolve_mid($USERNAME);
my $qtFORMAT = $dbh->quote($FORMAT);
my $qtDOCID = $dbh->quote($DOCID);
my $qtUSERNAME = $dbh->quote($USERNAME);
my $pstmt = "select count(*) from TOXML_RANKS where MID=$MID and FORMAT=$qtFORMAT and DOCID=$qtDOCID";
my $sth = $dbh->prepare($pstmt);
$sth->execute();
my ($count) = $sth->fetchrow();
$sth->finish();
if ($count==0) {
$pstmt = "insert into TOXML_RANKS (CREATED_GMT,MID,MERCHANT,DOCID,FORMAT) values (".time().",$MID,$qtUSERNAME,$qtDOCID,$qtFORMAT)";
# print STDERR $pstmt."\n";
$dbh->do($pstmt);
}
$pstmt = "update TOXML T, TOXML_RANKS TR set T.RANK_SELECTED=T.RANK_SELECTED-1 where TR.SELECTED=1 and TR.DOCID=T.DOCID and TR.FORMAT=T.FORMAT and TR.MID=$MID";
# print STDERR $pstmt."\n";
$dbh->do($pstmt);
$pstmt = "update TOXML_RANKS set SELECTED=0 where FORMAT=$qtFORMAT and MID=$MID";
# print STDERR $pstmt."\n";
$dbh->do($pstmt);
$pstmt = "update TOXML_RANKS set SELECTED=1 where FORMAT=$qtFORMAT and DOCID=$qtDOCID and MID=$MID";
# print STDERR $pstmt."\n";
$dbh->do($pstmt);
$pstmt = "update TOXML set RANK_SELECTED=RANK_SELECTED+1,RANK_REMEMBER=RANK_REMEMBER+1 where FORMAT=$qtFORMAT and DOCID=$qtDOCID";
# print STDERR $pstmt."\n";
$dbh->do($pstmt);
&DBINFO::db_user_close();
}
##
##
##
sub forget {
my ($USERNAME,$FORMAT,$DOCID) = @_;
my $dbh = &DBINFO::db_user_connect($USERNAME);
my $qtDOCID = $dbh->quote($DOCID);
my $qtFORMAT = $dbh->quote($FORMAT);
my $MID = &ZOOVY::resolve_mid($USERNAME);
my $pstmt = "delete from TOXML_RANKS where MID=$MID and FORMAT=$qtFORMAT and DOCID=$qtDOCID";
# print STDERR $pstmt."\n";
$dbh->do($pstmt);
$pstmt = "update TOXML set RANK_REMEMBER=RANK_REMEMBER-1 where FORMAT='WRAPPER' and MID in (0,$MID) and DOCID=$qtDOCID";
# print STDERR $pstmt."\n";
$dbh->do($pstmt);
&DBINFO::db_user_close();
}
##
## sub updateDB
##
sub updateFILE {
my ($toxml) = @_;
my $BINFILE = sprintf("/httpd/static/TOXML_%s.bin",$toxml->format());
my $ref = {};
if (-f $BINFILE) {
$ref = Storable::retrieve("$BINFILE");
}
my $SUBTYPE = '';
my ($el) = $toxml->findElements('CONFIG');
if (defined $el) { $SUBTYPE = $el->{'SUBTYPE'}; }
if (not defined $SUBTYPE) { $SUBTYPE = '_'; }
my $TITLE = (defined $el->{'TITLE'})?$el->{'TITLE'}:'';
my $cat = int($el->{'CATEGORIES'});
my $col = int($el->{'COLORS'});
$ref->{$toxml->{'_ID'}} = {
'MID'=>0,
'FORMAT'=>$toxml->{'_FORMAT'},
'SUBTYPE'=>$SUBTYPE,
'TITLE'=>$TITLE,
'PROPERTIES'=> int($el->{'PROPERTIES'}),
'WRAPPER_CATEGORIES'=>$cat,
'WRAPPER_COLORS'=>$col,
};
Storable::store($ref,$BINFILE);
}
##
## valid OPTIONS:
## SUBTYPE
## [bitwise] DETAIL=> 0 (default) not supplied FORMAT, MID, SUBTYPE, DIGEST, UPDATED_GMT, TEMPLATE
## 1 (returns config element)
## 2 (filter results to only returns non-MID 0 files)
## SORT => 1 (sorts by placing favorites first)
## DEREPCATED=>1 (include deprecated documents)
##
## returns:
## an array of hashes, each hash has:
## DOCID,SUBTYPE,FORMAT,DIGEST,UPDATED_GMT,TITLE,MID
##
sub listDocs {
my ($USERNAME,$FORMAT,%options) = @_;
if ($FORMAT eq 'EMAIL') { $FORMAT = 'ZEMAIL'; }
if (not defined $options{'DETAIL'}) { $options{'DETAIL'} = 0; }
my @AR = ();
my $dbh = &DBINFO::db_user_connect($USERNAME);
my $MID = &ZOOVY::resolve_mid($USERNAME);
my %RANKS = ();
## Load system templates
if (-f "/httpd/static/TOXML_$FORMAT.bin") {
require Storable;
my $REFS = Storable::retrieve("/httpd/static/TOXML_$FORMAT.bin");
foreach my $ID (keys %{$REFS}) {
my $ref = $REFS->{$ID};
next if ((defined $options{'DEPRECATED'}) && ($ref->{'CREATED_GMT'} == 0));
next if ((defined $options{'SUBTYPE'}) && ($ref->{'SUBTYPE'} ne $options{'SUBTYPE'}));
if ($ref->{'CREATED_GMT'}==0) { $ref->{'CREATED_GMT'} = 1293234232; }
$ref->{'DOCID'} = "$ID";
$ref->{'UPDATED_GMT'} = 0;
push @AR, $ref;
}
}
if ($USERNAME ne '') {
my $userpath = &ZOOVY::resolve_userpath($USERNAME).'/TOXML';
opendir TDIR, "$userpath";
while (my $file = readdir(TDIR)) {
my %INFO = ();
next if (substr($file,0,1) eq '.');
next unless ($file =~ m/^([A-Z]+)\+(.*)\.bin$/i);
$INFO{'FORMAT'} = $1;
$INFO{'DOCID'} = '~'.$2;
next if ($INFO{'FORMAT'} eq '');
next if ($INFO{'FORMAT'} eq 'DEFINITION');
next if ($INFO{'DOCID'} eq '');
next if (($FORMAT ne '') && ($FORMAT ne $INFO{'FORMAT'}));
$INFO{'MID'} = $MID;
$INFO{'UPDATED_GMT'} = time();
$INFO{'ID'} = -1;
$INFO{'STARS'} = 10.5;
$RANKS{ $INFO{'DOCID'} } = $INFO{'STARS'};
next if ((defined $options{'SUBTYPE'}) && ($INFO{'SUBTYPE'} ne '') && ($options{'SUBTYPE'} ne $INFO{'SUBTYPE'}));
push @AR, \%INFO;
}
closedir TDIR;
}
if ($options{'SELECTED'} ne '') {
## if one is selected, make sure it appears in the list.
my $selected_found = 0;
foreach my $inforef (@AR) {
if ($inforef->{'DOCID'} eq $options{'SELECTED'}) { $selected_found++; }
}
if (not $selected_found) {
$RANKS{$options{'SELECTED'}} = 11;
unshift @AR, { DOCID=>$options{'SELECTED'}, FORMAT=>$FORMAT, TITLE=>$options{'SELECTED'} };
}
}
if (($options{'DETAIL'}&1)==1) {
my $x = scalar(@AR);
for (my $i =0;$i<$x;$i++) {
next if ($AR[$i]->{'DOCID'} eq ''); ## corrupt
next if ($AR[$i]->{'FORMAT'} eq 'DEFINITION'); ## ignore
my ($t) = TOXML->new($AR[$i]->{'FORMAT'},$AR[$i]->{'DOCID'},USERNAME=>$USERNAME,MID=>$MID);
next if (not defined $t);
my ($CONFIGEL) = $t->findElements('CONFIG');
if (defined $CONFIGEL) {
delete $AR[$i]->{'TITLE'}; # delete TITLE so we can reset it in the next step.
foreach my $k (keys %{$CONFIGEL}) {
next if (defined $AR[$i]->{$k}); # never override properties which have already been set. (ex: ID)
$AR[$i]->{$k} = $CONFIGEL->{$k};
}
}
}
}
## sort the items based on popularity
if ($options{'SORT'}==1) {
## step1: convert our current @AR into %H (key = DOCID, value = ref)
my %H = ();
foreach my $ref (@AR) {
$H{$ref->{'DOCID'}} = $ref;
}
@AR = (); # everything is stored in %H so this is safe.
foreach my $docid (reverse sort keys %H) { push @AR, $H{$docid}; }
undef %H;
}
return(\@AR);
}
##
## Format: P (product)
##
sub favoriteDocs {
my ($USERNAME, $FORMAT, $SUBTYPE) = @_;
my $dbh = &DBINFO::db_user_connect($USERNAME);
my $MID = &ZOOVY::resolve_mid($USERNAME);
my $pstmt = "select DOCID from TOXML_RANKS where MID=".$MID." and FORMAT=".$dbh->quote($FORMAT);
# $pstmt .= " and SUBTYPE=$SUBTYPE";
$pstmt .= " order by DOCID";
my $sth = $dbh->prepare($pstmt);
$sth->execute();
my @docs = ();
while ( my ($docid) = $sth->fetchrow() ) {
push @docs, $docid;
}
$sth->finish();
&DBINFO::db_user_close();
return(@docs);
}
1;
| CommerceRack/backend | lib/TOXML/UTIL.pm | Perl | mit | 12,090 |
package MIP::Recipes::Analysis::Glnexus;
use 5.026;
use Carp;
use charnames qw{ :full :short };
use English qw{ -no_match_vars };
use File::Basename qw{ dirname };
use File::Spec::Functions qw{ catdir catfile devnull };
use open qw{ :encoding(UTF-8) :std };
use Params::Check qw{ allow check last_error };
use utf8;
use warnings;
use warnings qw{ FATAL utf8 };
## CPANM
use autodie qw{ :all };
use Readonly;
## MIPs lib/
use MIP::Constants qw{ $DASH $LOG_NAME $NEWLINE $PIPE $SPACE };
BEGIN {
require Exporter;
use base qw{ Exporter };
# Functions and variables which can be optionally exported
our @EXPORT_OK = qw{ analysis_glnexus };
}
sub analysis_glnexus {
## Function : Merges gvcfs from DeepVariant to generate a cohort vcf.
## Returns :
## Arguments: $active_parameter_href => Active parameters for this analysis hash {REF}
## : $case_id => Family id
## : $file_info_href => File_info hash {REF}
## : $job_id_href => Job id hash {REF}
## : $parameter_href => Parameter hash {REF}
## : $profile_base_command => Submission profile base command
## : $recipe_name => Recipe name
## : $sample_info_href => Info on samples and case hash {REF}
my ($arg_href) = @_;
## Flatten argument(s)
my $active_parameter_href;
my $file_info_href;
my $job_id_href;
my $parameter_href;
my $recipe_name;
my $sample_info_href;
## Default(s)
my $case_id;
my $profile_base_command;
my $tmpl = {
active_parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$active_parameter_href,
strict_type => 1,
},
case_id => {
default => $arg_href->{active_parameter_href}{case_id},
store => \$case_id,
strict_type => 1,
},
file_info_href => {
default => {},
defined => 1,
required => 1,
store => \$file_info_href,
strict_type => 1,
},
job_id_href => {
default => {},
defined => 1,
required => 1,
store => \$job_id_href,
strict_type => 1,
},
parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$parameter_href,
strict_type => 1,
},
profile_base_command => {
default => q{sbatch},
store => \$profile_base_command,
strict_type => 1,
},
recipe_name => {
defined => 1,
required => 1,
store => \$recipe_name,
strict_type => 1,
},
sample_info_href => {
default => {},
defined => 1,
required => 1,
store => \$sample_info_href,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
use MIP::File_info qw{ get_io_files parse_io_outfiles };
use MIP::Program::Bcftools qw{ bcftools_norm };
use MIP::Program::Glnexus qw{ glnexus_merge };
use MIP::Program::Htslib qw{ htslib_tabix };
use MIP::Processmanagement::Processes qw{ submit_recipe };
use MIP::Recipe qw{ parse_recipe_prerequisites };
use MIP::Sample_info qw{ set_file_path_to_store set_recipe_outfile_in_sample_info };
use MIP::Script::Setup_script qw{ setup_script };
### PREPROCESSING:
## Retrieve logger object
my $log = Log::Log4perl->get_logger($LOG_NAME);
my %recipe = parse_recipe_prerequisites(
{
active_parameter_href => $active_parameter_href,
parameter_href => $parameter_href,
recipe_name => $recipe_name,
}
);
my $core_number = $recipe{core_number};
my $memory = $recipe{memory};
## Get the io infiles per chain and id
my @genotype_infile_paths;
SAMPLE_ID:
foreach my $sample_id ( @{ $active_parameter_href->{sample_ids} } ) {
## Get the io infiles per chain and id
my %sample_io = get_io_files(
{
id => $sample_id,
file_info_href => $file_info_href,
parameter_href => $parameter_href,
recipe_name => $recipe_name,
stream => q{in},
}
);
push @genotype_infile_paths, $sample_io{in}{file_path};
}
my %io = parse_io_outfiles(
{
chain_id => $recipe{job_id_chain},
id => $case_id,
file_info_href => $file_info_href,
file_name_prefixes_ref => [$case_id],
outdata_dir => $active_parameter_href->{outdata_dir},
parameter_href => $parameter_href,
recipe_name => $recipe_name,
}
);
my $outfile_path = $io{out}{file_path};
## Filehandles
# Create anonymous filehandle
my $filehandle = IO::Handle->new();
## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header
my ( $recipe_file_path, $recipe_info_path ) = setup_script(
{
active_parameter_href => $active_parameter_href,
core_number => $core_number,
directory_id => $case_id,
filehandle => $filehandle,
job_id_href => $job_id_href,
memory_allocation => $memory,
process_time => $recipe{time},
recipe_directory => $recipe_name,
recipe_name => $recipe_name,
}
);
### SHELL:
say {$filehandle} q{## } . $recipe_name;
glnexus_merge(
{
config => q{DeepVariant_unfiltered},
dir => catdir( $active_parameter_href->{temp_directory}, q{glnexus} ),
filehandle => $filehandle,
infile_paths_ref => \@genotype_infile_paths,
memory => $memory,
threads => $core_number,
}
);
print {$filehandle} $PIPE . $SPACE;
## Normalize
bcftools_norm(
{
filehandle => $filehandle,
infile_path => $DASH,
multiallelic => q{-},
output_type => q{u},
reference_path => $active_parameter_href->{human_genome_reference},
threads => $core_number,
}
);
print {$filehandle} $PIPE . $SPACE;
## Remove duplicates
bcftools_norm(
{
filehandle => $filehandle,
infile_path => $DASH,
outfile_path => $outfile_path,
output_type => q{z},
reference_path => $active_parameter_href->{human_genome_reference},
remove_duplicates => 1,
threads => $core_number,
}
);
say {$filehandle} $NEWLINE;
htslib_tabix {
(
filehandle => $filehandle,
infile_path => $outfile_path,
)
};
## Close filehandle
close $filehandle or $log->logcroak(q{Could not close filehandle});
if ( $recipe{mode} == 1 ) {
set_recipe_outfile_in_sample_info(
{
path => $outfile_path,
recipe_name => $recipe_name,
sample_info_href => $sample_info_href,
}
);
set_file_path_to_store(
{
format => q{vcf},
id => $case_id,
path => $outfile_path,
path_index => $outfile_path . q{.tbi},
recipe_name => $recipe_name,
sample_info_href => $sample_info_href,
}
);
submit_recipe(
{
base_command => $profile_base_command,
case_id => $case_id,
dependency_method => q{sample_to_case},
log => $log,
job_id_chain => $recipe{job_id_chain},
job_id_href => $job_id_href,
job_reservation_name => $active_parameter_href->{job_reservation_name},
max_parallel_processes_count_href =>
$file_info_href->{max_parallel_processes_count},
recipe_file_path => $recipe_file_path,
sample_ids_ref => \@{ $active_parameter_href->{sample_ids} },
submission_profile => $active_parameter_href->{submission_profile},
}
);
}
return 1;
}
1;
| henrikstranneheim/MIP | lib/MIP/Recipes/Analysis/Glnexus.pm | Perl | mit | 9,101 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 12.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly. Use Unicode::UCD to access the Unicode character data
# base.
return <<'END';
V120
48
58
1632
1642
1643
1645
1776
1786
1984
1994
2406
2416
2534
2544
2662
2672
2790
2800
2918
2928
3046
3056
3174
3184
3302
3312
3430
3440
3558
3568
3664
3674
3792
3802
3872
3882
4160
4170
4240
4250
6112
6122
6160
6170
6470
6480
6608
6618
6784
6794
6800
6810
6992
7002
7088
7098
7232
7242
7248
7258
42528
42538
43216
43226
43264
43274
43472
43482
43504
43514
43600
43610
44016
44026
65296
65306
66720
66730
68912
68922
69734
69744
69872
69882
69942
69952
70096
70106
70384
70394
70736
70746
70864
70874
71248
71258
71360
71370
71472
71482
71904
71914
72784
72794
73040
73050
73120
73130
92768
92778
93008
93018
120782
120832
123200
123210
123632
123642
125264
125274
END
| operepo/ope | client_tools/svc/rc/usr/share/perl5/core_perl/unicore/lib/SB/NU.pl | Perl | mit | 1,156 |
sub'v{$==!$-+rand pop}[s/\d+/$|--?($-=$&-1)+v$-%$c?3:2:$&-print||`stty size`-
print$'?"\e[?25l\e[2J":y!Oo*.! !r/ge?s/(?<=f|(.))...B.\K./$1||chr 32>>$=/ge..
4E4:s/|/o*... //s//\e[B\b /g/s//\e[1;${\v$c=`tput cols`-2}fO/]while*_=_.v-42#
| Minizarbi/nudist_platypus_gold | nudist_platypus_gold.pl | Perl | mit | 234 |
use v5.14;
use FindBin;
use JSON::PP;
use HTTP::Tiny;
use File::Path qw<make_path>;
use File::Slurp qw<write_file read_file>;
my $json = JSON::PP->new->canonical->pretty;
my $watchlist = $json->decode( scalar read_file("${FindBin::Bin}/watchlist.json") );
my $http = HTTP::Tiny->new;
my @grabs = @ARGV > 0 ? @ARGV : (keys %$watchlist);
for my $k (@grabs) {
my $urls = $watchlist->{$k};
unless (ref($urls)) {
$urls = [$urls];
}
for my $url (@$urls) {
my ($fn) = $url =~ m{/([^/]+)\.html$};
my $now = time;
my ($sec, $min, $hour, $mday, $mon, $year) = gmtime(time);
$year += 1900;
$mon += 1;
my $output_dir = sprintf('data/%s/%s/%04d%02d%02d%02d%02d%02d', $k, $fn, $year, $mon, $mday, $hour, $min, $sec);
make_path($output_dir) unless -d $output_dir;
say "$k => $url => $output_dir";
my $res = $http->get($url);
my $res_dump = $json->encode($res);
write_file "${output_dir}/http-response.json", $res_dump;
if ($res->{success}) {
write_file "${output_dir}/page.html", $res->{content};
}
}
}
chdir($FindBin::Bin);
system("git add -A data");
system(q< git commit --author 'nobody <nobody@nowhere>' --allow-empty-message -m '' >);
| gugod/vote-watch-2016 | run.pl | Perl | cc0-1.0 | 1,286 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Services::FeedItemSetLinkService::GetFeedItemSetLinkRequest;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {resourceName => $args->{resourceName}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V8/Services/FeedItemSetLinkService/GetFeedItemSetLinkRequest.pm | Perl | apache-2.0 | 1,063 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::juniper::mag::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'blade-temperature' => 'network::juniper::mag::mode::bladetemperature',
'cpu' => 'network::juniper::common::ive::mode::cpu',
'disk' => 'network::juniper::common::ive::mode::disk',
'interfaces' => 'snmp_standard::mode::interfaces',
'list-interfaces' => 'snmp_standard::mode::listinterfaces',
'logfile' => 'network::juniper::common::ive::mode::logfile',
'users' => 'network::juniper::common::ive::mode::users',
'memory' => 'snmp_standard::mode::memory',
'swap' => 'snmp_standard::mode::swap',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Juniper MAG in SNMP.
=cut
| centreon/centreon-plugins | network/juniper/mag/plugin.pm | Perl | apache-2.0 | 1,839 |
package Google::Ads::AdWords::v201809::CampaignSharedSetService::getResponse;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' }
__PACKAGE__->__set_name('getResponse');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::SOAP::Typelib::ComplexType
);
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %rval_of :ATTR(:get<rval>);
__PACKAGE__->_factory(
[ qw( rval
) ],
{
'rval' => \%rval_of,
},
{
'rval' => 'Google::Ads::AdWords::v201809::CampaignSharedSetPage',
},
{
'rval' => 'rval',
}
);
} # end BLOCK
} # end of BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::CampaignSharedSetService::getResponse
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
getResponse from the namespace https://adwords.google.com/api/adwords/cm/v201809.
=head1 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * rval
$element->set_rval($data);
$element->get_rval();
=back
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201809::CampaignSharedSetService::getResponse->new($data);
Constructor. The following data structure may be passed to new():
{
rval => $a_reference_to, # see Google::Ads::AdWords::v201809::CampaignSharedSetPage
},
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/CampaignSharedSetService/getResponse.pm | Perl | apache-2.0 | 1,803 |
package ACDwide::DB::Schema::Result::OperatorsFreeOnline;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
use strict;
use warnings;
use base 'DBIx::Class::Core';
=head1 NAME
ACDwide::DB::Schema::Result::OperatorsFreeOnline
=cut
__PACKAGE__->table("operators_free_online");
=head1 ACCESSORS
=head2 operator_id
data_type: 'integer'
is_nullable: 1
=head2 weight
data_type: 'integer'
is_nullable: 1
=head2 timestamp
data_type: 'integer'
is_nullable: 1
=head2 id
data_type: 'integer'
is_nullable: 1
=cut
__PACKAGE__->add_columns(
"operator_id",
{ data_type => "integer", is_nullable => 1 },
"weight",
{ data_type => "integer", is_nullable => 1 },
"timestamp",
{ data_type => "integer", is_nullable => 1 },
"id",
{ data_type => "integer", is_nullable => 1 },
);
# Created by DBIx::Class::Schema::Loader v0.07010 @ 2015-06-23 15:13:25
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:isX5PouZ+jauis3SwGunzw
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;
| ivanoff/ACDwide | opt/ACDwide/DB/Schema/Result/OperatorsFreeOnline.pm | Perl | apache-2.0 | 1,092 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Services::CampaignCriterionSimulationService;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseService);
sub get {
my $self = shift;
my $request_body = shift;
my $http_method = 'GET';
my $request_path = 'v8/{+resourceName}';
my $response_type =
'Google::Ads::GoogleAds::V8::Resources::CampaignCriterionSimulation';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V8/Services/CampaignCriterionSimulationService.pm | Perl | apache-2.0 | 1,069 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::emc::symmetrix::dmx34::local::mode::components::disk;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
# ------------[ Device Information ]-------------
#
# RAID6 Configured: NO
# RAID5 Configured: NO
# RDF Configured: NO_RDF
# Verify Volume Status
# There are 16 local devices are not ready (device(DA)):- à 0 si pas de problème
# 10000(01a) 10072(01a) 20086(01a) 1009A(01a) 200AE(01a) 100C2(01a) 100EA(01a)
# 10112(01a) 20075(01d) 10089(01d) 2009D(01d) 100B1(01d) 100C9(01d) 100F1(01d)
# 10119(01d) 20061(01d)
#
# No local devices have invalid tracks
#
# Deferred disk service is NOT enabled
#
# 8 hot spares are configured, 1 are invoked, none are not ready à none si pas de problème
#
# HotSpare 16d:D5 is invoked against 1d:D4 Time: MAR/24/16 04:48:49 à récupérer si pb
#
# No DAs have any volumes with Not Ready bit set
#
# All DAs have Write Optimize enabled
#
# No devices have TimeFinder Lock
#
# No Devices Found in Transient State
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking disks");
$self->{components}->{disk} = {name => 'disks (1 means all)', total => 0, skip => 0};
return if ($self->check_filter(section => 'disk'));
if ($self->{content_file_health} !~ /----\[ Device Information(.*?)----\[/msi) {
$self->{output}->output_add(long_msg => 'skipping: cannot find devices');
return ;
}
my $content = $1;
$self->{components}->{disk}->{total}++;
# Error if not present:
# No local devices have invalid tracks
if ($content !~ /No local devices have invalid tracks/msi) {
$self->{output}->output_add(severity => 'CRITICAL',
short_msg => sprintf("problem of invalid tracks on disks"));
} else {
$self->{output}->output_add(long_msg => sprintf("no invalid tracks on disks"));
}
# Error if not present:
# All local devices are ready
if ($content !~ /All local devices are ready/msi) {
$content =~ /There are\s+(\S+)\s+local devices are not ready.*?\n(.*?)\n\s*\n/msi;
my ($num, $disks) = ($1, $2);
$disks =~ s/\n/ /msg;
$disks =~ s/\s+/ /msg;
$disks =~ s/^\s+//;
$self->{output}->output_add(long_msg => sprintf("problem on following disks '%s'", $disks));
$self->{output}->output_add(severity => 'CRITICAL',
short_msg => sprintf("problem on '%s' disks", $num));
} else {
$self->{output}->output_add(long_msg => sprintf("all devices are ready"));
}
return if ($content !~ /(\S+) hot spares are configured,\s*(\S+)\s+are invoked,\s*(\S+)\s+are not ready/msi);
my ($total, $used, $not_ready) = ($1, $2, $3);
$used = 0 if ($used =~ /none/i);
$not_ready = 0 if ($not_ready =~ /none/i);
my ($exit, $warn, $crit, $checked) = $self->get_severity_numeric(section => 'disk', instance => '1', value => $total - $used - $not_ready);
$self->{output}->output_add(long_msg => sprintf("'%s' spare disk availables on '%s'",
$total - $used - $not_ready, $total));
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("'%s' spare disk availables on '%s'",
$total - $used - $not_ready, $total));
}
$self->{output}->perfdata_add(
label => "disk_spare_available",
nlabel => 'hardware.disk.spare.available.count',
value => $total - $used - $not_ready,
warning => $warn,
critical => $crit, min => 0, max => $total
);
}
1;
| Sims24/centreon-plugins | storage/emc/symmetrix/dmx34/local/mode/components/disk.pm | Perl | apache-2.0 | 4,624 |
% prolog solution for number 3
% Name: Oscar Kurniawan Manule
% NPM: 0706272080
% % Collaboration with Denvil
delonelem(_, [], []).
deloneelem(X,[X|T1],T1).
deloneelem(X,[H|T1],[H|T2]) :- deloneelem(X,T1,T2),!.
| misugijunz/codingexercise | algorithm/logic_programming/sourcecode/PR2/no3.pl | Perl | apache-2.0 | 217 |
#!/usr/bin/perl
#execute a nested for loop
foreach $a ('1', '2') {
foreach $b ('a', 'b') {
print "$a $b\n";
}
}
| mileiio/pyforsysadmin | nestedforloop/nested_for_loop.pl | Perl | apache-2.0 | 130 |
#!/usr/bin/perl
#
# Copyright 2016 Georgia Institute of Technology
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#use 5.012;
use strict;
use Config::General;
use FindBin qw( $RealBin );
use lib "$RealBin/../lib";
use PuNDIT::Central::Localization::TrReceiver;
# debug. Remove this for production
use Data::Dumper;
=pod
=head1 DESCRIPTION
This is a test library for verifying TrReceiver behaviour
=cut
my $configFile = $RealBin . "/../etc/pundit_central.conf";
my %cfgHash = Config::General::ParseConfig($configFile);
my $fedName = 'federation1';
my $trRcv;
my $delay = 120; # time to delay by, in seconds
sub main
{
sleep 10;
my $timestart = time - ($delay + 60);
while (1)
{
my $trMatrix = $trRcv->getTrMatrix($timestart, $timestart + 5 + 60);
print "TR Matrix ";
print Dumper($trMatrix);
$timestart += 5; # advance window by 5
while ((time - $delay) < $timestart)
{
sleep(5);
}
}
}
$trRcv = new PuNDIT::Central::Localization::TrReceiver(\%cfgHash, $fedName);
main(); | pundit-project/pundit | pundit-central/tests/TrReceiverTest.pl | Perl | apache-2.0 | 1,688 |
package Paws::Route53::ListResourceRecordSetsResponse;
use Moose;
has IsTruncated => (is => 'ro', isa => 'Bool', required => 1);
has MaxItems => (is => 'ro', isa => 'Str', required => 1);
has NextRecordIdentifier => (is => 'ro', isa => 'Str');
has NextRecordName => (is => 'ro', isa => 'Str');
has NextRecordType => (is => 'ro', isa => 'Str');
has ResourceRecordSets => (is => 'ro', isa => 'ArrayRef[Paws::Route53::ResourceRecordSet]', required => 1);
has _request_id => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::Route53::ListResourceRecordSetsResponse
=head1 ATTRIBUTES
=head2 B<REQUIRED> IsTruncated => Bool
A flag that indicates whether more resource record sets remain to be
listed. If your results were truncated, you can make a follow-up
pagination request by using the C<NextRecordName> element.
=head2 B<REQUIRED> MaxItems => Str
The maximum number of records you requested.
=head2 NextRecordIdentifier => Str
I<Weighted, latency, geolocation, and failover resource record sets
only>: If results were truncated for a given DNS name and type, the
value of C<SetIdentifier> for the next resource record set that has the
current DNS name and type.
=head2 NextRecordName => Str
If the results were truncated, the name of the next record in the list.
This element is present only if C<IsTruncated> is true.
=head2 NextRecordType => Str
If the results were truncated, the type of the next record in the list.
This element is present only if C<IsTruncated> is true.
Valid values are: C<"SOA">, C<"A">, C<"TXT">, C<"NS">, C<"CNAME">, C<"MX">, C<"NAPTR">, C<"PTR">, C<"SRV">, C<"SPF">, C<"AAAA">, C<"CAA">
=head2 B<REQUIRED> ResourceRecordSets => ArrayRef[L<Paws::Route53::ResourceRecordSet>]
Information about multiple resource record sets.
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/Route53/ListResourceRecordSetsResponse.pm | Perl | apache-2.0 | 1,841 |
=head1 LICENSE
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2018] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::Analysis::RunnableDB::AlignmentChains -
=head1 SYNOPSIS
my $db = Bio::EnsEMBL::DBAdaptor->new($locator);
my $genscan = Bio::EnsEMBL::Analysis::RunnableDB::AlignmentChains->new (
-db => $db,
-input_id => $input_id
-analysis => $analysis );
$genscan->fetch_input();
$genscan->run();
$genscan->write_output(); #writes to DB
=head1 DESCRIPTION
Given an compara MethodLinkSpeciesSet identifer, and a reference genomic
slice identifer, fetches the GenomicAlignBlocks from the given compara
database, forms them into sets of alignment chains, and writes the result
back to the database.
This module (at least for now) relies heavily on Jim Kent\'s Axt tools.
=head1 METHODS
=cut
package Bio::EnsEMBL::Analysis::RunnableDB::AlignmentChains;
use warnings ;
use vars qw(@ISA);
use strict;
use Bio::EnsEMBL::Analysis::RunnableDB;
use Bio::EnsEMBL::Analysis::Config::General;
use Bio::EnsEMBL::Analysis::RunnableDB::AlignmentFilter;
use Bio::EnsEMBL::Analysis::Config::AlignmentFilter;
use Bio::EnsEMBL::Analysis::Runnable::AlignmentChains;
use Bio::EnsEMBL::Compara::DBSQL::DBAdaptor;
use Bio::EnsEMBL::DBSQL::DBAdaptor;
use Bio::EnsEMBL::DnaDnaAlignFeature;
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
use Bio::EnsEMBL::Utils::Argument qw( rearrange );
@ISA = qw(Bio::EnsEMBL::Analysis::RunnableDB::AlignmentFilter);
############################################################
sub new {
my ($class,@args) = @_;
my $self = $class->SUPER::new(@args);
$self->read_and_check_config($CHAIN_CONFIG_BY_LOGIC);
return $self;
}
=head2 fetch_input
Title : fetch_input
Usage : $self->fetch_input
Returns : nothing
Args : none
=cut
sub fetch_input {
my( $self) = @_;
throw("No input id") unless defined($self->input_id);
my ($seq_name, $target_chunk_total, $target_chunk_id);
if ($self->input_id =~ /^([^:]+):(\d+):(\d+)$/) {
($seq_name, $target_chunk_total, $target_chunk_id) = ($1, $2, $3);
} elsif ($self->input_id =~ /(\S+)/) {
($seq_name, $target_chunk_total, $target_chunk_id) = ($1, 1, 1);
} else {
throw("Input id could not be parsed: ", $self->input_id);
}
$self->OUTPUT_GROUP_TYPE("chain") unless (defined $self->OUTPUT_GROUP_TYPE);
my $q_dbh = Bio::EnsEMBL::DBSQL::DBAdaptor->new(%{$self->QUERY_CORE_DB});
my $t_dbh = Bio::EnsEMBL::DBSQL::DBAdaptor->new(%{$self->TARGET_CORE_DB});
my $compara_dbh = Bio::EnsEMBL::Compara::DBSQL::DBAdaptor->new(%{$self->COMPARA_DB});
my $query_species = $q_dbh->get_MetaContainerAdaptor->get_Species->binomial;
my $target_species = $t_dbh->get_MetaContainerAdaptor->get_Species->binomial;
my $q_gdb = $compara_dbh->get_GenomeDBAdaptor->fetch_by_name_assembly($query_species);
my $t_gdb = $compara_dbh->get_GenomeDBAdaptor->fetch_by_name_assembly($target_species);
################################################
# check that the default assembly for the query and target agrees with that
# for the method_link_species_set GenomeDBs
#################################################
my ($q_assembly_version, $t_assembly_version);
eval {
$q_assembly_version = $q_dbh->get_CoordSystemAdaptor->fetch_by_name
('toplevel',
$q_gdb->assembly);
$t_assembly_version = $t_dbh->get_CoordSystemAdaptor->fetch_by_name
('toplevel',
$t_gdb->assembly);
};
$@ and do {
throw("Had trouble fetching coord systems for ".
$q_gdb->assembly . " and " .
$t_gdb->assembly . " from core dbs: $@");
};
#############
# query Slice
#############
# since compara only stores toplevel DNA fragments, we assume
# supplied slice identifer corresponds to a top-level region.
my $ref_slice = $q_dbh->get_SliceAdaptor->fetch_by_region('toplevel',
$seq_name,
undef,
undef,
undef,
$q_assembly_version);
throw("Could not fetch top level query slice for '$seq_name'") if not defined $ref_slice;
################################################################
# get the compara data: MethodLinkSpeciesSet, reference DnaFrag,
# and all GenomicAlignBlocks
################################################################
my $mlss = $compara_dbh->get_MethodLinkSpeciesSetAdaptor
->fetch_by_method_link_type_GenomeDBs($self->INPUT_METHOD_LINK_TYPE,
[$q_gdb, $t_gdb]);
throw("No MethodLinkSpeciesSet for :\n" .
$self->INPUT_METHOD_LINK_TYPE . "\n" .
$query_species . "\n" .
$target_species)
if not $mlss;
my $out_mlss = $compara_dbh->get_MethodLinkSpeciesSetAdaptor
->fetch_by_method_link_type_GenomeDBs($self->OUTPUT_METHOD_LINK_TYPE,
[$q_gdb, $t_gdb]);
throw("No MethodLinkSpeciesSet for :\n" .
$self->OUTPUT_METHOD_LINK_TYPE . "\n" .
$query_species . "\n" .
$target_species)
if not $out_mlss;
######## needed for output####################
$self->output_MethodLinkSpeciesSet($out_mlss);
my $ref_dnafrag = $compara_dbh->get_DnaFragAdaptor->fetch_by_GenomeDB_and_name($q_gdb,
$seq_name);
print STDERR "Fetching all GenomicAlignBlocks and sorting them by target...\n";
my %blocks_by_target;
foreach my $block (@{$compara_dbh->get_GenomicAlignBlockAdaptor
->fetch_all_by_MethodLinkSpeciesSet_DnaFrag($mlss, $ref_dnafrag)}) {
#my ($qy_al) = $block->reference_genomic_align;
my ($tg_al) = @{$block->get_all_non_reference_genomic_aligns};
my $tg_name = $tg_al->dnafrag->name;
# the following awful hack releases the contained genomic_aligns;
# since we wmay only be using a fraction of the total set of blocks,
# the tome lost in having to requery for the relevant genomic_aligns
# later is more than offset by a big memory saving
$block->{'genomic_align_array'} = undef;
push @{$blocks_by_target{$tg_name}}, $block;
}
print STDERR "Gathering sequence converting features for id group $target_chunk_id of $target_chunk_total...\n";
###################################################################
# Fetch slices and make features for blocks involving only the
# designated subset of target sequences
###################################################################
my @all_target_names = sort keys %blocks_by_target;
my (%target_slices, %features_by_target);
for(my $i = $target_chunk_id - 1;
$i < @all_target_names;
$i += $target_chunk_total) {
my $target_name = $all_target_names[$i];
$target_slices{$target_name} =
$t_dbh->get_SliceAdaptor->fetch_by_region('toplevel',
$target_name,
undef,
undef,
undef,
$t_assembly_version);
foreach my $target_block (@{$blocks_by_target{$target_name}}) {
my ($qy_al) = $target_block->reference_genomic_align;
my ($tg_al) = @{$target_block->get_all_non_reference_genomic_aligns};
if (not exists($self->query_DnaFrag_hash->{$qy_al->dnafrag->name})) {
######### needed for output ######################################
$self->query_DnaFrag_hash->{$qy_al->dnafrag->name} = $qy_al->dnafrag;
}
if (not exists($self->target_DnaFrag_hash->{$target_name})) {
######### needed for output #######################################
$self->target_DnaFrag_hash->{$tg_al->dnafrag->name} = $tg_al->dnafrag;
}
my $daf_cigar = $self->daf_cigar_from_compara_cigars($qy_al->cigar_line,
$tg_al->cigar_line);
if (defined $daf_cigar) {
my $daf = Bio::EnsEMBL::DnaDnaAlignFeature->new
(-seqname => $qy_al->dnafrag->name,
-start => $qy_al->dnafrag_start,
-end => $qy_al->dnafrag_end,
-strand => $qy_al->dnafrag_strand,
-hseqname => $tg_al->dnafrag->name,
-hstart => $tg_al->dnafrag_start,
-hend => $tg_al->dnafrag_end,
-hstrand => $tg_al->dnafrag_strand,
-cigar_string => $daf_cigar);
push @{$features_by_target{$tg_al->dnafrag->name}}, $daf;
}
}
}
print STDERR "Sorting resulting features into batches...\n";
######################################################################
# each runnable comprises blocks from a subset of the target sequences
######################################################################
my $target_batches = $self->form_target_batches(\%target_slices);
foreach my $targets (@$target_batches) {
my (%these_target_slices, @features);
foreach my $t_id (@$targets) {
$these_target_slices{$t_id} = $target_slices{$t_id};
push @features, @{$features_by_target{$t_id}};
}
printf(STDERR "Making runnable with %d targets (%d features)\n", scalar(@$targets), scalar(@features));
my %parameters = (-analysis => $self->analysis,
-query_slice => $ref_slice,
-target_slices => \%these_target_slices,
-query_nib_dir => $self->QUERY_NIB_DIR,
-target_nib_dir => $self->TARGET_NIB_DIR,
-min_chain_score => $self->MIN_CHAIN_SCORE,
-features => \@features);
foreach my $program (qw(faToNib lavToAxt axtChain)) {
$parameters{'-' . $program} = $BIN_DIR . "/" . $program;
}
my $runnable = Bio::EnsEMBL::Analysis::Runnable::AlignmentChains->new(%parameters);
$self->runnable($runnable);
}
}
###########################################
sub form_target_batches {
my ($self, $t_slices) = @_;
my @batches;
my $batch_index = 0;
my ($total_len, $total_count) = (0,0);
foreach my $hname (keys %{$t_slices}) {
push @{$batches[$batch_index]}, $hname;
$total_count++;
if ($total_count >= 1000) {
$total_count = 0;
$batch_index++;
}
}
return \@batches;
}
####################################
# config variable holders
####################################
sub read_and_check_config {
my ($self, $hash) = @_;
$self->SUPER::read_and_check_config($hash);
my $logic = $self->analysis->logic_name;
foreach my $var (qw(INPUT_METHOD_LINK_TYPE
OUTPUT_METHOD_LINK_TYPE
QUERY_CORE_DB
TARGET_CORE_DB
COMPARA_DB)) {
throw("You must define $var in config for logic '$logic'" .
" or in the DEFAULT entry")
if not $self->$var;
}
}
sub QUERY_CORE_DB {
my ($self, $db) = @_;
if (defined $db) {
$self->{_query_core_db} = $db;
}
return $self->{_query_core_db};
}
sub TARGET_CORE_DB {
my ($self, $db) = @_;
if (defined $db) {
$self->{_target_core_db} = $db;
}
return $self->{_target_core_db};
}
sub QUERY_NIB_DIR {
my ($self, $dir) = @_;
if (defined $dir) {
$self->{_query_nib_dir} = $dir;
}
return $self->{_query_nib_dir};
}
sub TARGET_NIB_DIR {
my ($self, $dir) = @_;
if (defined $dir) {
$self->{_target_nib_dir} = $dir;
}
return $self->{_target_nib_dir};
}
1;
| kiwiroy/ensembl-analysis | modules/Bio/EnsEMBL/Analysis/RunnableDB/AlignmentChains.pm | Perl | apache-2.0 | 13,023 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::pineapp::securemail::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'system' => 'apps::pineapp::securemail::snmp::mode::system',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check PineApp SecureMail in SNMP.
=cut
| centreon/centreon-plugins | apps/pineapp/securemail/snmp/plugin.pm | Perl | apache-2.0 | 1,253 |
package LHBclass;
use strict;
use warnings;
require Exporter;
use vars qw(@ISA @EXPORT $VERSION);
use HingeAlchemy::ConstHAlch;
our @ISA = qw(Exporter);
our @EXPORT = qw(vrclass sumvrclass);
$VERSION=1.0;
sub vrclass{
my (@Ra1_r,@Ra1_l,@Va1_r,@Va1_l,@l_ccl,@b_ccl,@h_ccl,@l_pcl,@b_pcl,@h_pcl,$R,$V);
my (@Ra1_1,@Ra1_2,@Ra1_3,@Ra1_4,@Ra1_5,@Ra1_6,@Ra1_7,@Ra1_8,@Ra1_9);
my (@Va1_1,@Va1_2,@Va1_3,@Va1_4,@Va1_5,@Va1_6,@Va1_7,@Va1_8,@Va1_9);
my ($k,$l,$z);
my ($ns,$mnr,$mlen,$mottyp,$minl,$motif,$lsum,$bsum,$hsum,$l_pcl,$h_pcl,$b_pcl,$l_totccl,$h_totccl,$b_totccl)=@_;
$Ra1_l[1]=$Ra1_1[0]-$ns*$Ra1_1[1];
$Ra1_r[1]=$Ra1_1[0]+$ns*$Ra1_1[1];
$Ra1_l[2]=$Ra1_2[0]-$ns*$Ra1_2[1];
$Ra1_r[2]=$Ra1_2[0]+$ns*$Ra1_2[1];
$Ra1_l[3]=$Ra1_3[0]-$ns*$Ra1_3[1];
$Ra1_r[3]=$Ra1_3[0]+$ns*$Ra1_3[1];
$Ra1_l[4]=$Ra1_4[0]-$ns*$Ra1_4[1];
$Ra1_r[4]=$Ra1_4[0]+$ns*$Ra1_4[1];
$Ra1_l[5]=$Ra1_5[0]-$ns*$Ra1_5[1];
$Ra1_r[5]=$Ra1_5[0]+$ns*$Ra1_5[1];
$Ra1_l[6]=$Ra1_6[0]-$ns*$Ra1_6[1];
$Ra1_r[6]=$Ra1_6[0]+$ns*$Ra1_6[1];
$Ra1_l[7]=$Ra1_7[0]-$ns*$Ra1_7[1];
$Ra1_r[7]=$Ra1_7[0]+$ns*$Ra1_7[1];
$Ra1_l[8]=$Ra1_8[0]-$ns*$Ra1_8[1];
$Ra1_r[8]=$Ra1_8[0]+$ns*$Ra1_8[1];
$Ra1_l[9]=$Ra1_9[0]-$ns*$Ra1_9[1];
$Ra1_r[9]=$Ra1_9[0]+$ns*$Ra1_9[1];
$Va1_l[1]=$Va1_1[0]-$ns*$Va1_1[1];
$Va1_r[1]=$Va1_1[0]+$ns*$Va1_1[1];
$Va1_l[2]=$Va1_2[0]-$ns*$Va1_2[1];
$Va1_r[2]=$Va1_2[0]+$ns*$Va1_2[1];
$Va1_l[3]=$Va1_3[0]-$ns*$Va1_3[1];
$Va1_r[3]=$Va1_3[0]+$ns*$Va1_3[1];
$Va1_l[4]=$Va1_4[0]-$ns*$Va1_4[1];
$Va1_r[4]=$Va1_4[0]+$ns*$Va1_4[1];
$Va1_l[5]=$Va1_5[0]-$ns*$Va1_5[1];
$Va1_r[5]=$Va1_5[0]+$ns*$Va1_5[1];
$Va1_l[6]=$Va1_6[0]-$ns*$Va1_6[1];
$Va1_r[6]=$Va1_6[0]+$ns*$Va1_6[1];
$Va1_l[7]=$Va1_7[0]-$ns*$Va1_7[1];
$Va1_r[7]=$Va1_7[0]+$ns*$Va1_7[1];
$Va1_l[8]=$Va1_8[0]-$ns*$Va1_8[1];
$Va1_r[8]=$Va1_8[0]+$ns*$Va1_8[1];
$Va1_l[9]=$Va1_9[0]-$ns*$Va1_9[1];
$Va1_r[9]=$Va1_9[0]+$ns*$Va1_9[1];
for $l (1..9){
$l_ccl[$l]=0;
$h_ccl[$l]=0;
$b_ccl[$l]=0;
$l_pcl[$l]=0;
$h_pcl[$l]=0;
$b_pcl[$l]=0;
}
for $k (0..$mnr){
if($$mlen[$k]>=$minl){
if($$mottyp[$k] eq 'L'){#loops
for $z (1..$$mlen[$k]){
$R=$$motif[$k][$z][3];
$V=$$motif[$k][$z][4];
for $l (1..9){
# print "$R $V $Ra1_l[$l] $Ra1_r[$l] $Va1_l[$l] $Va1_r[$l]\n";
if($R >= $Ra1_l[$l] and $R <= $Ra1_r[$l] and $V >= $Va1_l[$l] and $V <= $Va1_r[$l]){$l_ccl[$l]++;}
}
}
}
if($$mottyp[$k] eq 'H'){#helices
for $z (1..$$mlen[$k]){
$R=$$motif[$k][$z][3];
$V=$$motif[$k][$z][4];
for $l (1..9){
if($R >= $Ra1_l[$l] and $R <= $Ra1_r[$l] and $V >= $Va1_l[$l] and $V <= $Va1_r[$l]){$h_ccl[$l]++;}
}
}
}
if($$mottyp[$k] eq 'B'){#betas
for $z (1..$$mlen[$k]){
$R=$$motif[$k][$z][3];
$V=$$motif[$k][$z][4];
for $l (1..9){
if($R >= $Ra1_l[$l] and $R <= $Ra1_r[$l] and $V >= $Va1_l[$l] and $V <= $Va1_r[$l]){$b_ccl[$l]++;}
}
}
}
}
}
#counts probabilities
for $l (1..9){
$$l_totccl[$l]=0;
$$h_totccl[$l]=0;
$$b_totccl[$l]=0;
}
for $l (1..9){
if($$lsum>0){
$$l_pcl[$l]=$l_ccl[$l]/$$lsum;}
if($$hsum>0){
$$h_pcl[$l]=$h_ccl[$l]/$$hsum;}
if($$bsum>0){
$$b_pcl[$l]=$b_ccl[$l]/$$bsum;}
$$l_totccl[$l]=$$l_totccl[$l]+$l_ccl[$l];
$$h_totccl[$l]=$$h_totccl[$l]+$h_ccl[$l];
$$b_totccl[$l]=$$b_totccl[$l]+$b_ccl[$l];
}
}
sub sumvrclass{
my ($l);
my ($lsumtot,$hsumtot,$bsumtot,$l_totccl,$h_totccl,$b_totccl,$l_totpcl,$h_totpcl,$b_totpcl)=@_;
for $l (1..9){
$$l_totpcl[$l]=0;
$$h_totpcl[$l]=0;
$$b_totpcl[$l]=0;
}
for $l (1..9){
if($$lsumtot>0){#loops
$$l_totpcl[$l]=$$l_totccl[$l]/$$lsumtot;}
if($$hsumtot>0){#helices
$$h_totpcl[$l]=$$h_totccl[$l]/$$hsumtot;}
if($$bsumtot>0){#betas
$$b_totpcl[$l]=$$b_totccl[$l]/$$bsumtot;}
}
}
1; | wjurkowski/hint | modules/LHBclass.pm | Perl | apache-2.0 | 3,573 |
%
% Checks if structure is a binary tree.
% Example: tree(8,tree(4,tree(1,void,void),tree(5,void,void)),tree(12,void,void))
%
is_binary_tree(void).
is_binary_tree(tree(X,L,R)):-is_binary_tree(L),is_binary_tree(R).
| vlad1m1r990/PrologSamples | binary_tree/is_binary_tree.pl | Perl | apache-2.0 | 215 |
package Paws::Budgets::Subscriber;
use Moose;
has Address => (is => 'ro', isa => 'Str', required => 1);
has SubscriptionType => (is => 'ro', isa => 'Str', required => 1);
1;
### main pod documentation begin ###
=head1 NAME
Paws::Budgets::Subscriber
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::Budgets::Subscriber object:
$service_obj->Method(Att1 => { Address => $value, ..., SubscriptionType => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::Budgets::Subscriber object:
$result = $service_obj->Method(...);
$result->Att1->Address
=head1 DESCRIPTION
Subscriber model. Each notification may contain multiple subscribers
with different addresses.
=head1 ATTRIBUTES
=head2 B<REQUIRED> Address => Str
=head2 B<REQUIRED> SubscriptionType => Str
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::Budgets>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/Budgets/Subscriber.pm | Perl | apache-2.0 | 1,429 |
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::purestorage::restapi::custom::api;
use strict;
use warnings;
use centreon::plugins::http;
use JSON;
sub new {
my ($class, %options) = @_;
my $self = {};
bless $self, $class;
if (!defined($options{output})) {
print "Class Custom: Need to specify 'output' argument.\n";
exit 3;
}
if (!defined($options{options})) {
$options{output}->add_option_msg(short_msg => "Class Custom: Need to specify 'options' argument.");
$options{output}->option_exit();
}
if (!defined($options{noptions})) {
$options{options}->add_options(arguments =>
{
"hostname:s" => { name => 'hostname' },
"username:s" => { name => 'username' },
"password:s" => { name => 'password' },
"proxyurl:s" => { name => 'proxyurl' },
"timeout:s" => { name => 'timeout' },
"ssl-opt:s@" => { name => 'ssl_opt' },
"api-path:s" => { name => 'api_path' },
});
}
$options{options}->add_help(package => __PACKAGE__, sections => 'REST API OPTIONS', once => 1);
$self->{output} = $options{output};
$self->{mode} = $options{mode};
$self->{http} = centreon::plugins::http->new(output => $self->{output});
return $self;
}
sub set_options {
my ($self, %options) = @_;
$self->{option_results} = $options{option_results};
}
sub set_defaults {
my ($self, %options) = @_;
foreach (keys %{$options{default}}) {
if ($_ eq $self->{mode}) {
for (my $i = 0; $i < scalar(@{$options{default}->{$_}}); $i++) {
foreach my $opt (keys %{$options{default}->{$_}[$i]}) {
if (!defined($self->{option_results}->{$opt}[$i])) {
$self->{option_results}->{$opt}[$i] = $options{default}->{$_}[$i]->{$opt};
}
}
}
}
}
}
sub check_options {
my ($self, %options) = @_;
$self->{hostname} = (defined($self->{option_results}->{hostname})) ? $self->{option_results}->{hostname} : undef;
$self->{username} = (defined($self->{option_results}->{username})) ? $self->{option_results}->{username} : undef;
$self->{password} = (defined($self->{option_results}->{password})) ? $self->{option_results}->{password} : undef;
$self->{timeout} = (defined($self->{option_results}->{timeout})) ? $self->{option_results}->{timeout} : 10;
$self->{proxyurl} = (defined($self->{option_results}->{proxyurl})) ? $self->{option_results}->{proxyurl} : undef;
$self->{api_path} = (defined($self->{option_results}->{api_path})) ? $self->{option_results}->{api_path} : '/api/1.11';
if (!defined($self->{hostname})) {
$self->{output}->add_option_msg(short_msg => "Need to specify hostname option.");
$self->{output}->option_exit();
}
if (!defined($self->{username})) {
$self->{output}->add_option_msg(short_msg => "Need to specify username option.");
$self->{output}->option_exit();
}
if (!defined($self->{password})) {
$self->{output}->add_option_msg(short_msg => "Need to specify password option.");
$self->{output}->option_exit();
}
return 0;
}
sub get_connection_infos {
my ($self, %options) = @_;
return $self->{hostname} . '_' . $self->{http}->get_port();
}
sub build_options_for_httplib {
my ($self, %options) = @_;
$self->{option_results}->{hostname} = $self->{hostname};
$self->{option_results}->{timeout} = $self->{timeout};
$self->{option_results}->{port} = 443;
$self->{option_results}->{proto} = 'https';
$self->{option_results}->{proxyurl} = $self->{proxyurl};
}
sub settings {
my ($self, %options) = @_;
$self->build_options_for_httplib();
$self->{http}->add_header(key => 'Accept', value => 'application/json');
$self->{http}->add_header(key => 'Content-Type', value => 'application/json');
if (defined($self->{session_id})) {
$self->{http}->add_header(key => 'Cookie', value => 'session=' . $self->{session_id});
}
$self->{http}->set_options(%{$self->{option_results}});
}
sub request_api {
my ($self, %options) = @_;
my $content = $self->{http}->request(method => $options{method}, url_path => $options{url_path}, query_form_post => $options{query_form_post},
critical_status => '', warning_status => '', unknown_status => '');
my $response = $self->{http}->get_response();
my $decoded;
eval {
$decoded = decode_json($content);
};
if ($@) {
$self->{output}->output_add(long_msg => $content, debug => 1);
$self->{output}->add_option_msg(short_msg => "Cannot decode json response");
$self->{output}->option_exit();
}
if ($response->code() != 200) {
$self->{output}->add_option_msg(short_msg => "Connection issue: " . $decoded->{msg});
$self->{output}->option_exit();
}
return $decoded;
}
sub get_api_token {
my ($self, %options) = @_;
my $json_request = { username => $self->{username}, password => $self->{password} };
my $encoded;
eval {
$encoded = encode_json($json_request);
};
if ($@) {
$self->{output}->add_option_msg(short_msg => "Cannot encode json request");
$self->{output}->option_exit();
}
$self->settings();
my $decoded = $self->request_api(method => 'POST', url_path => $self->{api_path} . '/auth/apitoken', query_form_post => $encoded);
if (!defined($decoded->{api_token})) {
$self->{output}->add_option_msg(short_msg => "Cannot get api token");
$self->{output}->option_exit();
}
return $decoded->{api_token};
}
sub get_session {
my ($self, %options) = @_;
my $json_request = { api_token => $options{api_token} };
my $encoded;
eval {
$encoded = encode_json($json_request);
};
if ($@) {
$self->{output}->add_option_msg(short_msg => "Cannot encode json request");
$self->{output}->option_exit();
}
$self->settings();
my $decoded = $self->request_api(method => 'POST', url_path => $self->{api_path} . '/auth/session', query_form_post => $encoded);
my $headers = $self->{http}->get_header();
my $cookie = $headers->header('Set-Cookie');
if (!defined($cookie)) {
$self->{output}->add_option_msg(short_msg => "Cannot get session");
$self->{output}->option_exit();
}
$cookie =~ /session=(.*);/;
return $1;
}
sub connect {
my ($self, %options) = @_;
my $api_token = $self->get_api_token();
$self->{session_id} = $self->get_session(api_token => $api_token);
}
sub get_object {
my ($self, %options) = @_;
if (!defined($self->{api_token})) {
$self->connect();
}
$self->settings();
return $self->request_api(method => 'GET', url_path => $self->{api_path} . $options{path});
}
sub DESTROY {
my $self = shift;
if (defined($self->{session_id})) {
$self->request_api(method => 'DELETE', url_path => $self->{api_path} . '/auth/session');
}
}
1;
__END__
=head1 NAME
Pure Storage REST API
=head1 SYNOPSIS
Pure Storage Rest API custom mode
=head1 REST API OPTIONS
=over 8
=item B<--hostname>
Pure Storage hostname.
=item B<--username>
Pure Storage username.
=item B<--password>
Pure Storage password.
=item B<--proxyurl>
Proxy URL if any.
=item B<--timeout>
Set HTTP timeout in seconds (Default: '10').
=item B<--ssl-opt>
Set SSL Options (--ssl-opt="SSL_version => TLSv1" --ssl-opt="SSL_verify_mode => SSL_VERIFY_NONE").
=item B<--api-path>
API base url path (Default: '/api/1.11').
=back
=head1 DESCRIPTION
B<custom>.
=cut
| wilfriedcomte/centreon-plugins | storage/purestorage/restapi/custom/api.pm | Perl | apache-2.0 | 8,606 |
# please insert nothing before this line: -*- mode: cperl; cperl-indent-level: 4; cperl-continued-statement-offset: 4; indent-tabs-mode: nil -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package ModPerl::BuildMM;
use strict;
use warnings;
use ExtUtils::MakeMaker ();
use Cwd ();
use File::Spec::Functions qw(catdir catfile splitdir);
use File::Basename;
use File::Find;
use Apache2::Build ();
use ModPerl::MM;
use constant WIN32 => Apache2::Build::WIN32;
use constant CYGWIN => Apache2::Build::CYGWIN;
our %PM; #add files to installation
# MM methods that this package overrides
no strict 'refs';
my $stash = \%{__PACKAGE__ . '::MY::'};
my @methods = grep *{$stash->{$_}}{CODE}, keys %$stash;
ModPerl::MM::override_eu_mm_mv_all_methods(@methods);
use strict 'refs';
my $apache_test_dir = catdir Cwd::getcwd(), "Apache-Test", "lib";
#to override MakeMaker MOD_INSTALL macro
sub mod_install {
q{$(PERL) -I$(INST_LIB) -I$(PERL_LIB) \\}."\n" .
qq{-I$apache_test_dir -MModPerl::BuildMM \\}."\n" .
q{-e "ExtUtils::Install::install({@ARGV},'$(VERBINST)',0,'$(UNINST)');"}."\n";
}
my $build;
sub build_config {
my $key = shift;
$build ||= Apache2::Build->build_config;
return $build unless $key;
$build->{$key};
}
#the parent WriteMakefile moves MY:: methods into a different class
#so alias them each time WriteMakefile is called in a subdir
sub my_import {
no strict 'refs';
my $stash = \%{__PACKAGE__ . '::MY::'};
for my $sym (keys %$stash) {
next unless *{$stash->{$sym}}{CODE};
my $name = "MY::$sym";
undef &$name if defined &$name;
*$name = *{$stash->{$sym}}{CODE};
}
}
sub WriteMakefile {
my %args = @_;
$build ||= build_config();
ModPerl::MM::my_import(__PACKAGE__);
my $inc = $args{INC} || '';
$inc = $args{INC} if $args{INC};
$inc .= " " . $build->inc;
if (my $glue_inc = $build->{MP_XS_GLUE_DIR}) {
for (split /\s+/, $glue_inc) {
$inc .= " -I$_";
}
}
my $libs;
my @libs = ();
push @libs, $args{LIBS} if $args{LIBS};
if (Apache2::Build::BUILD_APREXT) {
# in order to decouple APR/APR::* from mod_perl.so,
# link these modules against the static MP_APR_LIB lib,
# rather than the mod_perl lib (which would demand mod_perl.so
# be available). For other modules, use mod_perl.lib as
# usual. This is done for APR in xs/APR/APR/Makefile.PL.
my $name = $args{NAME};
if ($name =~ /^APR::\w+$/) {
# For cygwin compatibility, the order of the libs should be
# <mod_perl libs> <apache libs>
@libs = ($build->mp_apr_lib, $build->apache_libs);
}
else {
@libs = ($build->modperl_libs, $build->apache_libs);
}
}
else {
@libs = ($build->modperl_libs, $build->apache_libs);
}
$libs = join ' ', @libs;
my $ccflags;
$ccflags = $args{CCFLAGS} if $args{CCFLAGS};
$ccflags = " " . $build->perl_ccopts . $build->ap_ccopts;
my $optimize;
$optimize = $args{OPTIMIZE} if $args{OPTIMIZE};
$optimize = " " . $build->perl_config('optimize');
my $lddlflags;
$lddlflags = $args{LDDLFLAGS} if $args{LDDLFLAGS};
$lddlflags = " " . $build->perl_config('lddlflags');
my %dynamic_lib;
%dynamic_lib = %{ $args{dynamic_lib}||{} } if $args{dynamic_lib};
$dynamic_lib{OTHERLDFLAGS} = $build->otherldflags;
my @opts = (
INC => $inc,
CCFLAGS => $ccflags,
OPTIMIZE => $optimize,
LDDLFLAGS => $lddlflags,
LIBS => $libs,
dynamic_lib => \%dynamic_lib,
);
my @typemaps;
push @typemaps, $args{TYPEMAPS} if $args{TYPEMAPS};
my $pwd = Cwd::fastcwd();
for ('xs', $pwd, "$pwd/..") {
my $typemap = $build->file_path("$_/typemap");
if (-e $typemap) {
push @typemaps, $typemap;
}
}
push @opts, TYPEMAPS => \@typemaps if @typemaps;
my $clean_files = (exists $args{clean} && exists $args{clean}{FILES}) ?
$args{clean}{FILES} : '';
$clean_files .= " glue_pods"; # cleanup the dependency target
$args{clean}{FILES} = $clean_files;
ExtUtils::MakeMaker::WriteMakefile(@opts, %args);
}
my %always_dynamic = map { $_, 1 }
qw(ModPerl::Const Apache2::Const APR::Const APR APR::PerlIO);
sub ModPerl::BuildMM::MY::constants {
my $self = shift;
$build ||= build_config();
#"discover" xs modules. since there is no list hardwired
#any module can be unpacked in the mod_perl-2.xx directory
#and built static
#this stunt also make it possible to leave .xs files where
#they are, unlike 1.xx where *.xs live in src/modules/perl
#and are copied to subdir/ if DYNAMIC=1
if ($build->{MP_STATIC_EXTS}) {
#skip .xs -> .so if we are linking static
my $name = $self->{NAME};
unless ($always_dynamic{$name}) {
if (my ($xs) = keys %{ $self->{XS} }) {
$self->{HAS_LINK_CODE} = 0;
print "$name will be linked static\n";
#propagate static xs module to src/modules/perl/Makefile
$build->{XS}->{$name} =
join '/', Cwd::fastcwd(), $xs;
$build->save;
}
}
}
$self->MM::constants;
}
sub ModPerl::BuildMM::MY::top_targets {
my $self = shift;
my $string = $self->MM::top_targets;
return $string;
}
sub ModPerl::BuildMM::MY::postamble {
my $self = shift;
my $doc_root = catdir Cwd::getcwd(), "docs", "api";
my @targets = ();
# reasons for glueing pods to the respective .pm files:
# - manpages will get installed over the mp1 manpages and vice
# versa. glueing pods avoids creation of manpages, but may be we
# could just tell make to skip manpages creation?
# if pods are installed directly they need to be also redirected,
# some into Apache2/ others (e.g. Apache2) not
# add the code to glue the existing pods to the .pm files in blib.
# create a dependency on pm_to_blib subdirs linkext targets to
# allow 'make -j'
require ExtUtils::MakeMaker;
my $mm_ver = $ExtUtils::MakeMaker::VERSION;
$mm_ver =~ s/_.*//; # handle dev versions like 6.30_01
my $pm_to_blib = ($mm_ver >= 6.22 && $mm_ver <= 6.25)
? "pm_to_blib.ts"
: "pm_to_blib";
my @target = ("glue_pods: $pm_to_blib subdirs linkext");
if (-d $doc_root) {
my $build = build_config();
# those living in modperl-2.0/lib are already nicely mapped
my %pms = %{ $self->{PM} };
my $cwd = Cwd::getcwd();
my $blib_dir = catdir qw(blib lib);
# those autogenerated under WrapXS/
# those living under xs/
# those living under ModPerl-Registry/lib/
my @src = ('WrapXS', 'xs', catdir(qw(ModPerl-Registry lib)));
for my $base (@src) {
chdir $base;
my @files = ();
find({ no_chdir => 1,
wanted => sub { push @files, $_ if /.pm$/ },
}, ".");
chdir $cwd;
for (@files) {
my $pm = catfile $base, $_;
my $blib;
if ($base =~ /^(xs|WrapXS)/) {
my @segm = splitdir $_;
splice @segm, -2, 1; # xs/APR/Const/Const.pm
splice @segm, -2, 1 if /APR.pm/; # odd case
$blib = catfile $blib_dir, @segm;
}
else {
$blib = catfile $blib_dir, $_;
}
$pms{$pm} = $blib;
}
}
while (my ($pm, $blib) = each %pms) {
$pm =~ s|/\./|/|g; # clean the path
$blib =~ s|/\./|/|g; # clean the path
my @segm = splitdir $blib;
for my $i (1..2) {
# try APR.pm and APR/Bucket.pm
my $pod = catdir(@segm[-$i .. -1]);
$pod =~ s/\.pm/\.pod/;
my $podpath = catfile $doc_root, $pod;
next unless -r $podpath;
push @target,
'$(FULLPERL) -I$(INST_LIB) ' .
"-I$apache_test_dir -MModPerl::BuildMM " .
"-e ModPerl::BuildMM::glue_pod $pm $podpath $blib";
# Win32 doesn't normally install man pages
# and Cygwin doesn't allow '::' in file names
next if WIN32 || CYGWIN;
# manify while we're at it
my (undef, $man, undef) = $blib =~ m!(blib/lib/)(.*)(\.pm)!;
$man =~ s!/!::!g;
push @target,
'$(NOECHO) $(POD2MAN_EXE) --section=3 ' .
"$podpath \$(INST_MAN3DIR)/$man.\$(MAN3EXT)"
}
}
push @target, $self->{NOECHO} . '$(TOUCH) $@';
}
else {
# we don't have the docs sub-cvs repository extracted, skip
# the docs gluing
push @target, $self->{NOECHO} . '$(NOOP)';
}
push @targets, join "\n\t", @target;
# # next target: cleanup the dependency file
# @target = ('glue_pods_clean:');
# push @target, '$(RM_F) glue_pods';
# push @targets, join "\n\t", @target;
return join "\n\n", @targets, '';
}
sub glue_pod {
die "expecting 3 arguments: pm, pod, dst" unless @ARGV == 3;
my ($pm, $pod, $dst) = @ARGV;
# it's possible that the .pm file is not existing
# (e.g. ThreadMutex.pm is not created on unless
# $apr_config->{HAS_THREADS})
return unless -e $pm && -e $dst;
# have we already glued the doc?
exit 0 unless -s $pm == -s $dst;
# ExtUtils::Install::pm_to_blib removes the 'w' perms, so we can't
# just append the doc there
my $orig_mode = (stat $dst)[2];
my $rw_mode = 0666;
chmod $rw_mode, $dst or die "Can't chmod $rw_mode $dst: $!";
open my $pod_fh, "<$pod" or die "Can't open $pod: $!";
open my $dst_fh, ">>$dst" or die "Can't open $dst: $!";
print $dst_fh "\n"; # must add one line separation
print $dst_fh (<$pod_fh>);
close $pod_fh;
close $dst_fh;
# restore the perms
chmod $orig_mode, $dst or die "Can't chmod $orig_mode $dst: $!";
}
sub ModPerl::BuildMM::MY::post_initialize {
my $self = shift;
$build ||= build_config();
my $pm = $self->{PM};
while (my ($k, $v) = each %PM) {
if (-e $k) {
$pm->{$k} = $v;
}
}
# prefix typemap with Apache2/ so when installed in the
# perl-lib-tree it won't be picked by non-mod_perl modules
if (exists $pm->{'lib/typemap'} ) {
$pm->{'lib/typemap'} = '$(INST_ARCHLIB)/auto/Apache2/typemap';
}
'';
}
my $apr_config;
sub ModPerl::BuildMM::MY::libscan {
my ($self, $path) = @_;
$apr_config ||= $build->get_apr_config();
if ($path =~ m/(Thread|Global)(Mutex|RWLock)/) {
return unless $apr_config->{HAS_THREADS};
}
return '' if $path =~ /DummyVersions.pm/;
return '' if $path =~ m/\.pl$/;
return '' if $path =~ m/~$/;
return '' if $path =~ /\B\.svn\b/;
$path;
}
1;
| dreamhost/dpkg-ndn-perl-mod-perl | lib/ModPerl/BuildMM.pm | Perl | apache-2.0 | 11,897 |
#!/usr/bin/perl
# ------------------------------------------------------------
# projector_turn_on.pl
# ------------------------------------------------------------
use feature qw{say} ;
use utf8 ;
use Net::PJLink qw{ :RESPONSES } ;
# These projectors are behind a firewall, inaccessable directly
my $pass = 'admin' ;
my $projectors = [ '172.17.4.10' ] ;
my $prj = Net::PJLink->new( host => $projectors , keep_alive => 1 ) ;
$prj->set_auth_password( $pass ) ;
$prj->set_power( 0 ) ;
$prj->close_all_connections;
exit;
| GreaterLaLa/ProjectorControl | projector_off.pl | Perl | apache-2.0 | 521 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2022] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=head1 NAME
Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveProcessAssemblyReport
=head1 SYNOPSIS
=head1 DESCRIPTION
=cut
package Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveProcessAssemblyReport;
use strict;
use warnings;
use Net::FTP;
use Time::Piece;
use File::Fetch;
use File::Temp;
use File::Spec::Functions qw(catfile splitpath catdir);
use File::Path qw(make_path);
use Digest::MD5;
use IO::Uncompress::AnyUncompress qw(anyuncompress $AnyUncompressError) ;
use Bio::EnsEMBL::Hive::Utils qw(destringify);
use Bio::EnsEMBL::IO::Parser::Fasta;
use Bio::EnsEMBL::Slice;
use Bio::EnsEMBL::CoordSystem;
use Bio::EnsEMBL::Attribute;
use parent ('Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBaseRunnableDB');
=head2 param_defaults
Arg [1] : None
Description: Default parameters
toplevel_as_sequence_levels => 1,
_report_name => '#assembly_accession#_#assembly_name#_assembly_report.txt',
_md5checksum_name => 'md5checksums.txt',
_genome_file_name => '#assembly_accession#_#assembly_name#_genomic.fna',
_genome_zip_ext => '.gz',
_molecule_relations => {
'assembled-molecule' => 'chromosome',
'unplaced-scaffold' => 'scaffold',
'unlocalized-scaffold' => 'scaffold',
'alt-scaffold' => 'scaffold',
},
ftp_user => 'anonymous',
ftp_pass => undef,
load_non_nuclear => 0,
_agp_branch => 3,
_coord_systems => {},
Returntype : Hashref
Exceptions : None
=cut
sub param_defaults {
my ($self) = @_;
return {
%{$self->SUPER::param_defaults},
# if any DNA slice sequence length is greater than _MAX_SLICE_LENGTH base pairs,
# all DNA slice sequences will be cut into _SUBSLICE_LENGTH-base-pair long sequences
_MAX_SLICE_LENGTH => 950000000, # 950 million base pairs
_SUBSLICE_LENGTH => 10000000, # 10 million base pairs
_exceeded_max_slice_length => 0, # it will be set to 1 when any DNA slice sequence length is greater than _MAX_SLICE_LENGTH
toplevel_as_sequence_levels => 1,
_report_name => '#assembly_accession#_#assembly_name#_assembly_report.txt',
_md5checksum_name => 'md5checksums.txt',
_genome_file_name => '#assembly_accession#_#assembly_name#_genomic.fna',
_genome_zip_ext => '.gz',
_molecule_relations => {
'assembled-molecule' => 'chromosome',
'unplaced-scaffold' => 'scaffold',
'unlocalized-scaffold' => 'scaffold',
'alt-scaffold' => 'scaffold',
},
ftp_user => 'anonymous',
ftp_pass => undef,
load_non_nuclear => 0,
_agp_branch => 3,
_coord_systems => {},
}
}
=head2 fetch_input
Arg [1] : None
Description: Retrieve external db ids for INSDC, RefSeq and UCSC. Retrieve the
attribute information for 'toplevel' and 'karyotype_rank'.
Download the report file using 'full_ftp_path' and '_report_name'
Returntype : None
Exceptions : Throws if 'target_db' is not set
Throws if 'assembly_accession' is not set
Throws if 'assembly_name' is not set
Throws if 'full_ftp_path' is not set
=cut
sub fetch_input {
my ($self) = @_;
my $db = $self->get_database_by_name('target_db');
$self->hrdb_set_con($db, 'target_db');
my $assembly_accession = $self->param_required('assembly_accession');
my $assembly_name = $self->param_required('assembly_name');
my %external_db_ids;
my $external_db_adaptor = $db->get_DBEntryAdaptor;
foreach my $external_db ('INSDC', 'RefSeq_genomic', 'UCSC') {
$external_db_ids{$external_db} = $external_db_adaptor->get_external_db_id($external_db, undef, 1);
}
$self->param('_external_db_ids', \%external_db_ids);
my ($id, $code, $name, $desc) = @{$db->get_AttributeAdaptor->fetch_by_code('toplevel')};
$self->param('toplevel_attribute', Bio::EnsEMBL::Attribute->new(
-code => $code,
-name => $name,
-description => $desc,
-value => 1,
));
($id, $code, $name, $desc) = @{$db->get_AttributeAdaptor->fetch_by_code('karyotype_rank')};
$self->param('karyotype_rank', {
-code => $code,
-name => $name,
-description => $desc,
});
my $report_dir;
if ($self->param_is_defined('output_path')) {
$report_dir = $self->param('output_path');
}
else {
$report_dir = File::Temp->newdir;
$self->param('output_path', $report_dir);
}
if (!-d $report_dir) {
make_path($report_dir);
}
my $fetcher = File::Fetch->new(uri => $self->param_required('full_ftp_path').'/'.$self->param('_report_name'));
$fetcher->fetch(to => $report_dir);
$fetcher = File::Fetch->new(uri => $self->param_required('full_ftp_path').'/'.$self->param('_md5checksum_name'));
$fetcher->fetch(to => $report_dir);
if ($self->param('toplevel_as_sequence_levels')) {
$fetcher = File::Fetch->new(uri => $self->param_required('full_ftp_path').'/'.$self->param('_genome_file_name').$self->param('_genome_zip_ext'));
$fetcher->fetch(to => $report_dir);
}
}
=head2 run
Arg [1] : None
Description: Process the Genbank report file of the assembly to know which sequences to
load in the database and retrieve some meta data attached
Returntype : None
Exceptions : None
=cut
sub run {
my ($self) = @_;
my $file = catfile($self->param_required('output_path'), $self->param('_report_name'));
my $one_system = $self->param('toplevel_as_sequence_levels');
my $synonym_id = $self->param('_external_db_ids');
my $molecule_matcher = $self->param('_molecule_relations');
my $load_non_nuclear = $self->param('load_non_nuclear');
my $karyotype_rank_data = $self->param('karyotype_rank');
my $no_chromosome = 0;
my $common_name;
my $strain;
my @slices;
my @dirs;
my @chromosomes;
my @agp_files;
my @fastas;
my $karyotype_rank = 0;
open(RH, $file) || $self->throw("Could not open $file");
while (my $line = <RH>) {
$line =~ s/\R$//;
if (!$one_system and $line =~ /^##/) {
my @data = split("\t", substr($line, 3));
if ($data[2]) {
if ($data[0] =~ /^GC/) {
if ($load_non_nuclear and $data[2] eq 'non-nuclear') {
push(@dirs, 'non-nuclear');
}
elsif ($data[2] ne 'non-nuclear') {
$data[2] =~ s/ /_/;
push(@dirs, $data[2]);
}
}
}
}
elsif ($line =~ /^#\s+([^:]+):\s+(.+)/) {
if ($1 eq 'Assembly level') {
if ($2 eq 'Chromosome') {
$self->param('chromosomes_present', 1);
}
else {
$no_chromosome = 1;
}
if ($2 eq 'Contig') {
$self->param('toplevel_as_sequence_levels', 1);
$one_system = 1;
}
}
elsif ($1 eq 'WGS project') {
$self->param('wgs_project', $2);
}
elsif ($1 eq 'Date') {
my $assembly_date = $2;
$self->param('assembly_date', sprintf("%d-%02d", $assembly_date =~ /(\d+)-(\d+)/));
}
elsif ($1 eq 'Taxid') {
$self->param('taxon_id', $2);
}
elsif ($1 eq 'Organism name') {
$common_name = $2;
$self->param('common_name', $1) if ($common_name =~ /\(([^)]+)\)/);
$common_name =~ s/\s+\(.+//;
$self->param('scientific_name', $common_name);
}
elsif ($1 eq 'Infraspecific name') {
$strain = $2;
$self->param('strain', $1) if ($strain =~ /[a-zA-Z]+\=(.+)/);
$strain =~ s/\=.+//;
$self->param('strain_type', $strain);
}
elsif ($1 eq 'RefSeq assembly accession') {
if ($self->param_is_defined('assembly_refseq_accession')) {
$self->warning('RefSeq assembly accession are different, using the one from the file')
if ($2 ne $self->param('assembly_refseq_accession'));
}
$self->param('assembly_refseq_accession', $2);
}
}
elsif ($line =~ /^\w/) {
my @data = split("\t", $line);
if (!$load_non_nuclear and $data[7] eq 'non-nuclear') {
if ($data[1] eq 'assembled-molecule' and $data[6] =~ /^NC/) {
if ($self->param_is_defined('mt_accession')) {
$self->warning('MT accession are different, using the one from the file')
if ($data[6] ne $self->param('mt_accession'));
}
$self->param('mt_accession', $data[6]);
}
else {
$self->warning($data[0].' is a non nuclear sequence');
}
}
else {
my $seq_region_name = $data[4];
my $karyotype_attribute;
my $coord_system;
if ($data[1] eq 'assembled-molecule') {
$seq_region_name = $data[2];
push(@chromosomes, $seq_region_name);
$karyotype_attribute = Bio::EnsEMBL::Attribute->new(
%$karyotype_rank_data,
-value => ++$karyotype_rank,
);
}
elsif ($data[7] ne 'Primary Assembly') {
$seq_region_name = $data[0];
}
if ($one_system) {
$coord_system = $self->get_coord_system('primary_assembly');
}
else {
$coord_system = $self->get_coord_system($molecule_matcher->{$data[1]}, $no_chromosome);
}
my $slice = Bio::EnsEMBL::Slice->new(
-seq_region_name => $seq_region_name,
-start => 1,
-end => $data[8] eq 'na' ? 1 : $data[8],
-coord_system => $coord_system,
);
# This is not great but the easiest
$slice->{karyotype_rank} = $karyotype_attribute if ($karyotype_attribute);
$slice->{_gb_insdc_name} = $data[4];
if ($data[4] eq $seq_region_name) {
if ($data[0] ne 'na') {
push(@{$slice->{add_synonym}}, [$data[0], undef]);
}
}
else {
push(@{$slice->{add_synonym}}, [$data[4], $synonym_id->{INSDC}]);
}
if ($data[6] ne 'na') {
push(@{$slice->{add_synonym}}, [$data[6], $synonym_id->{RefSeq_genomic}]);
}
if ($data[9] ne 'na') {
push(@{$slice->{add_synonym}}, [$data[9], $synonym_id->{UCSC}]);
}
# if the maximum slice length is exceeded for any slice, all the slices will be cut later on
# and an internal coord_system will be used
if ($slice->length() > $self->param('_MAX_SLICE_LENGTH')) {
$self->param('_exceeded_max_slice_length',1);
}
push(@slices, $slice);
}
}
}
close(RH) || $self->throw("Could not close $file");
open(MH, catfile($self->param('output_path'), $self->param('_md5checksum_name'))) || $self->throw('Cannot open md5checksum file');
my %checksum;
while(my $line = <MH>) {
$line =~ s/\R$//;
$line =~ /^(\S+)\s+\S+\/([^\/]+)$/;
$checksum{$2} = $1;
}
close(MH) || $self->throw('Cannot close md5checksum file');
if ($one_system) {
$self->say_with_header('Checking genome file');
my $file = catfile($self->param('output_path'), $self->param('_genome_file_name').$self->param('_genome_zip_ext'));
my $digest = Digest::MD5->new();
open(my $fh, $file) || $self->throw('Could not open '.$file);
binmode $fh;
my $md5sum = $digest->addfile($fh)->hexdigest;
close($fh) || $self->throw('Could not close '.$file);
$self->throw('MD5 not as expected '.$checksum{$self->param('_genome_file_name').$self->param('_genome_zip_ext')}.' but '.$md5sum)
unless ($md5sum eq $checksum{$self->param('_genome_file_name').$self->param('_genome_zip_ext')});
my ($output) = $file =~ /^(\S+)\.\w+$/;
anyuncompress $file => $output
or $self->throw("anyuncompress failed: $AnyUncompressError");
unlink $file;
}
else {
if (@dirs) {
my $wgs_project = $self->param_is_defined('wgs_project') ? $self->param('wgs_project') : undef;
my ($type, $hostname, $base_ftp_dir) = $self->param('full_ftp_path') =~ /^(\w+:\/\/)([^\/]+)\/(\S+)/;
my $client = Net::FTP->new($hostname);
$client->login($self->param('ftp_user'), $self->param('ftp_pass'));
my $structure_dir = $base_ftp_dir.'/'.$self->param('assembly_accession').'_'.$self->param('assembly_name').'_assembly_structure';
my $base_url = "$type$hostname/$structure_dir";
if ($client->cwd($structure_dir)) {
foreach my $dir (@dirs) {
$client->cwd($dir) || $self->throw("Could not go into $dir");
foreach my $inode ($client->ls) {
if ($client->cwd($inode)) {
foreach my $agp ($client->ls('AGP')) {
$agp =~ s/AGP\///;
push(@agp_files, {
url => "$base_url/$dir/$inode/AGP/$agp",
md5sum => $checksum{$agp},
output_dir => catdir('#output_path#', $dir),
wgs_project => $wgs_project,
});
}
if (@chromosomes) {
if ($client->ls("FASTA/chr".$chromosomes[0].'.fna.gz')) {
foreach my $fasta ($client->ls('FASTA')) {
$fasta =~ s/FASTA\///;
push(@fastas, {
url => "$base_url/$dir/$inode/FASTA/$fasta",
md5sum => $checksum{$fasta},
output_dir => catdir('#output_path#', 'FASTA'),
});
}
$self->throw('Could not gat all the fasta files') unless (@chromosomes == @fastas or @chromosomes+1 == @fastas);
}
}
if ($client->ls('FASTA/unplaced.scaffold.fna.gz')) {
push(@fastas, {
url => "$base_url/$dir/$inode/FASTA/unplaced.scaffold.fna.gz",
md5sum => $checksum{'unplaced.scaffold.fna.gz'},
output_dir => catdir('#output_path#', 'FASTA'),
});
}
}
if ($inode eq 'component_localID2acc' or $inode eq 'scaffold_localID2acc') {
push(@agp_files, {
url => "$base_url/$dir/$inode",
md5sum => $checksum{$inode},
output_dir => catdir('#output_path#', $dir),
uncompress => 0,
});
}
}
}
}
else {
$self->warning('No assembly structure directory found in ftp');
}
}
if (@agp_files) {
$self->param('agp_files', \@agp_files);
}
else {
$self->throw('Could not find any AGP files');
}
if (@fastas) {
$self->param('fasta_files', \@fastas);
}
else {
$self->throw('Could not find any FASTA files for checking');
}
}
$self->output(\@slices);
}
=head2 write_output
Arg [1] : None
Description: Write the Bio::EnsEMBL::Slice object corresponding to the toplevel
sequences to the database.It will also add some meta data from the
report file when they are present
On branch '_branch_to_flow_to' (2), send FASTA files to be downloaded
if needed
Returntype : None
Exceptions : None
=cut
sub write_output {
my ($self) = @_;
my $db = $self->hrdb_get_con('target_db');
if ($self->param('_exceeded_max_slice_length')) {
# the assembly.mapping meta_key is required to be able to fetch any sequence
my $mc = $db->get_MetaContainer();
$mc->store_key_value('assembly.mapping','primary_assembly:'.$self->param('assembly_name').'|ensembl_internal:'.$self->param('assembly_name'));
$self->get_coord_system('ensembl_internal');
$self->get_coord_system('primary_assembly')->{sequence_level} = 0;
}
my $coord_system_adaptor = $db->get_CoordSystemAdaptor;
foreach my $coord_system (values %{$self->param('_coord_systems')}) {
$coord_system_adaptor->store($coord_system);
}
my $slice_adaptor = $db->get_SliceAdaptor;
my $attribute_adaptor = $db->get_AttributeAdaptor;
my $toplevel_attribute = $self->param('toplevel_attribute');
my $toplevel_as_sequence_levels = $self->param('toplevel_as_sequence_levels');
my $genome_file;
if ($toplevel_as_sequence_levels) {
$genome_file = Bio::EnsEMBL::IO::Parser::Fasta->open(catfile($self->param('output_path'), $self->param('_genome_file_name')));
}
my %sequences;
foreach my $slice (@{$self->output}) {
if ($toplevel_as_sequence_levels) {
my $insdc_name = $slice->{_gb_insdc_name};
my $seq;
if (exists $sequences{$insdc_name}) {
$seq = $sequences{$insdc_name};
delete $sequences{$insdc_name};
} else {
while ($genome_file->next) {
$genome_file->getHeader =~ /^(\S+)/;
if ($insdc_name eq $1) {
$seq = uc($genome_file->getSequence);
last;
} else {
$sequences{$1} = uc($genome_file->getSequence);
}
}
}
if ($slice->length <= 1) {
my %new_slice = %$slice;
$new_slice{end} = length($seq);
$new_slice{seq_region_length} = $new_slice{end};
$slice = Bio::EnsEMBL::Slice->new_fast(\%new_slice);
}
if ($self->param('_exceeded_max_slice_length')) {
$slice_adaptor->store($slice);
$self->cut_and_store_subslices($slice_adaptor,$slice,\$seq);
} else {
$slice_adaptor->store($slice,\$seq);
}
} else {
$slice_adaptor->store($slice);
}
if (exists $slice->{add_synonym}) {
foreach my $synonym_data (@{$slice->{add_synonym}}) {
$slice->add_synonym(@$synonym_data);
}
$slice_adaptor->update($slice);
}
if (exists $slice->{karyotype_rank}) {
$attribute_adaptor->store_on_Slice($slice,[$slice->{karyotype_rank}]);
}
$attribute_adaptor->store_on_Slice($slice,[$toplevel_attribute]);
}
my $display_name;
my $common_name = $self->param('common_name');
my $meta_adaptor = $db->get_MetaContainerAdaptor;
my $date = localtime->strftime('%Y-%m-Ensembl');
$meta_adaptor->store_key_value('genebuild.start_date', $date);
$meta_adaptor->store_key_value('assembly.date', $self->param('assembly_date'));
$meta_adaptor->store_key_value('species.scientific_name', $self->param('scientific_name'));
$display_name = $self->param('scientific_name');
$display_name =~ s/^(\w)/\U$1/;
$meta_adaptor->store_key_value('species.common_name', $self->param('common_name'));
$meta_adaptor->store_key_value('species.taxonomy_id', $self->param('taxon_id'));
$meta_adaptor->store_key_value('assembly.accession', $self->param('assembly_accession'));
$meta_adaptor->store_key_value('assembly.default', $self->param('assembly_name'));
$meta_adaptor->store_key_value('assembly.name', $self->param('assembly_name'));
$meta_adaptor->store_key_value('assembly.web_accession_source', 'NCBI');
$meta_adaptor->store_key_value('assembly.web_accession_type', 'INSDC Assembly ID');
$common_name =~ s/^(\w)/\U$1/;
if ($self->param_is_defined('strain')) {
$meta_adaptor->store_key_value('species.strain', $self->param('strain'));
$meta_adaptor->store_key_value('strain.type', $self->param('strain_type'));
$display_name .= ' ('.$self->param('strain').')';
}
else {
$meta_adaptor->store_key_value('species.strain', 'reference');
$meta_adaptor->store_key_value('strain.type', 'strain');
$display_name .= ' ('.$common_name.')';
}
$display_name .= ' - '.$self->param('assembly_accession');
$meta_adaptor->store_key_value('species.display_name', $display_name);
$meta_adaptor->store_key_value('species.full_display_name', $display_name);
# Not sure it will properly add the new values, hopefully it will and not cause problems
my $job_params = destringify($self->input_job->input_id);
if ($self->param_is_defined('mt_accession')) {
$job_params->{mt_accession} = $self->param('mt_accession');
}
if ($self->param_is_defined('assembly_refseq_accession')) {
$job_params->{assembly_refseq_accession} = $self->param('assembly_refseq_accession');
}
if (!$toplevel_as_sequence_levels) {
$self->dataflow_output_id($self->param('agp_files'), $self->param('_agp_branch'));
$self->dataflow_output_id($self->param('fasta_files'), $self->param('_branch_to_flow_to'));
}
$self->dataflow_output_id($job_params, 'MAIN');
}
=head2 get_coord_system
Arg [1] : String type, should be either 'primary_assembly', 'chromosome', 'scaffold' or 'ensembl_internal'.
'ensembl_internal' represents the sequence level which is created to deal with the assemblies having
any sequence exceeding the maximum slice length.
Arg [2] : Boolean, false by default, set to true if scaffold is the highest coordinate system
Description: Create the Bio::EnsEMBL::CoordSystem object based on Arg[1], cache the object to avoid
duplication
Returntype : Bio::EnsEMBL::CoordSystem
Exceptions : None
=cut
sub get_coord_system {
my ($self, $type, $no_chromosome) = @_;
if (!exists $self->param('_coord_systems')->{$type}) {
my $rank = 1;
$rank = 2 if (($type eq 'scaffold' or $type eq 'ensembl_internal') and !$no_chromosome);
my $seq_level = $self->param('toplevel_as_sequence_levels');
if ($type eq 'ensembl_internal') {
$seq_level = 1;
}
my $coord_system;
if ($type eq 'ensembl_internal') {
$coord_system = Bio::EnsEMBL::CoordSystem->new(
-name => $type,
-rank => $rank,
-default => 1,
# note version will be NULL as ensembl_internal is not part of the official assembly
-sequence_level => $seq_level
);
} else {
$coord_system = Bio::EnsEMBL::CoordSystem->new(
-name => $type,
-rank => $rank,
-default => 1,
-version => $self->param('assembly_name'),
-sequence_level => $seq_level
);
}
$self->param('_coord_systems')->{$type} = $coord_system;
}
return $self->param('_coord_systems')->{$type};
}
=head2 cut_and_store_subslices
Arg [1] : SliceAdaptor type.
Arg [2] : Slice type.
Arg [3] : string ref.
Description: Cut the slice Slice into _SUBSLICE_LENGTH-base-pair long slices and store them all into the seq_region, assembly and dna tables of the SliceAdaptor adaptor.
It returns the seq_region_id assigned to the slice.
Returntype : int
Exceptions : None
=cut
sub cut_and_store_subslices {
my ($self,$slice_adaptor,$slice,$seq) = @_;
my $slice_seq_region_id = $slice_adaptor->get_seq_region_id($slice);
# store all subslices with dna
my $i = $slice->sub_Slice_Iterator($self->param('_SUBSLICE_LENGTH'));
my $subslice_index = 0;
while ($i->has_next()) {
$subslice_index++;
my $subslice = $i->next();
my $subseq = substr($$seq,$subslice->start()-1,$self->param('_SUBSLICE_LENGTH'));
my $subseq_length = length($subseq);
my $new_subslice = Bio::EnsEMBL::Slice->new(-coord_system => $self->get_coord_system('ensembl_internal'),
-start => 1,
-end => $subseq_length,
-strand => 1,
-seq_region_name => $subslice->seq_region_name()."_".$subslice_index,
-seq_region_length => $subseq_length,
-adaptor => $slice_adaptor);
my $new_subslice_seq_region_id = $slice_adaptor->store($new_subslice,\$subseq);
# store the subslice as a component of the whole slice
my $sql = "INSERT INTO assembly(asm_seq_region_id, asm_start, asm_end, cmp_seq_region_id, cmp_start, cmp_end, ori) VALUES(?, ?, ?, ?, ?, ?, ?)";
my $sth = $slice_adaptor->dbc()->prepare($sql);
my $new_subslice_start_on_slice = ($subslice_index-1)*$self->param('_SUBSLICE_LENGTH')+1;
$sth->execute($slice_seq_region_id,
$new_subslice_start_on_slice,
$new_subslice_start_on_slice+$subseq_length-1,
$new_subslice_seq_region_id,
1,
$subseq_length,
1);
}
}
1;
| Ensembl/ensembl-analysis | modules/Bio/EnsEMBL/Analysis/Hive/RunnableDB/HiveProcessAssemblyReport.pm | Perl | apache-2.0 | 25,011 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package XrefParser::CeleraTranscriptParser;
use strict;
use base qw( XrefParser::CeleraParser );
# See CeleraParser for details
sub get_sequence_type
{
return 'dna';
}
1;
| willmclaren/ensembl | misc-scripts/xref_mapping/XrefParser/CeleraTranscriptParser.pm | Perl | apache-2.0 | 830 |
#
# Copyright 2015 Electric Cloud, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#########################
## createcfg.pl
#########################
use ElectricCommander;
use ElectricCommander::PropDB;
use ElectricCommander::PropMod;
#*****************************************************************************
use constant {
SUCCESS => 0,
ERROR => 1,
};
my $opts;
my $PROJECT_NAME = '$[/myProject/projectName]';
my $PLUGIN_NAME = '@PLUGIN_NAME@';
my $PLUGIN_KEY = '@PLUGIN_KEY@';
#*****************************************************************************
# get an EC object
my $ec = ElectricCommander->new();
$ec->abortOnError(0);
ElectricCommander::PropMod::loadPerlCodeFromProperty($ec, '/myProject/jboss_driver/EC::Logger');
my $logger = EC::Logger->new(log_level_old_api_value => '$[log_level]');
#*****************************************************************************
sub configurationError {
my ($errmsg) = @_;
$ec->setProperty('/myJob/configError', $errmsg);
$ec->setProperty('/myJobStep/summary', $errmsg);
$ec->setProperty('/myJobStep/outcome', 'error');
$logger->diagnostic_error("Create Configuration failed.\n\n$errmsg");
$logger->error("Create configuration failed");
return;
}
#*****************************************************************************
# load option list from procedure parameters
my $x = $ec->getJobDetails($ENV{COMMANDER_JOBID});
my $nodeset = $x->find("//actualParameter");
foreach my $node ($nodeset->get_nodelist) {
my $parm = $node->findvalue("actualParameterName");
my $val = $node->findvalue("value");
$opts->{$parm} = "$val";
}
if (!defined $opts->{config} || "$opts->{config}" eq '') {
configurationError("Config parameter must exist and be non-blank");
exit(ERROR);
}
# check to see if a config with this name already exists before we do anything else
my $xpath = $ec->getProperty("/myProject/jboss_cfgs/$opts->{config}");
my $property = $xpath->findvalue("//response/property/propertyName");
if (defined $property && "$property" ne "") {
configurationError("A configuration named '$opts->{config}' already exists.");
exit(ERROR);
}
my $cfg = ElectricCommander::PropDB->new($ec, "/myProject/jboss_cfgs");
# add all the options as properties
foreach my $key (keys %{$opts}) {
if ($key eq 'config') {
next;
}
$cfg->setCol("$opts->{config}", $key, "$opts->{$key}");
}
$logger->info("Configuration \"$opts->{config}\" created.\n");
exit(SUCCESS);
| electric-cloud/EC-JBoss | src/main/resources/project/conf/createcfg.pl | Perl | apache-2.0 | 3,038 |
package API::Cachegroup;
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
# a note about locations and cachegroups. This used to be "Location", before we had physical locations in 12M. Very confusing.
# What used to be called a location is now called a "cache group" and location is now a physical address, not a group of caches working together.
#
# JvD Note: you always want to put Utils as the first use. Sh*t don't work if it's after the Mojo lines.
use UI::Utils;
use Mojo::Base 'Mojolicious::Controller';
use Data::Dumper;
use JSON;
use MojoPlugins::Response;
use Validate::Tiny ':all';
sub index {
my $self = shift;
my @data;
my %idnames;
my $orderby = $self->param('orderby') || "name";
my $rs_idnames = $self->db->resultset("Cachegroup")->search( undef, { columns => [qw/id name/] } );
while ( my $row = $rs_idnames->next ) {
$idnames{ $row->id } = $row->name;
}
my $rs_data = $self->db->resultset("Cachegroup")->search( undef, { prefetch => [ { 'type' => undef, } ], order_by => 'me.' . $orderby } );
while ( my $row = $rs_data->next ) {
push(
@data, {
"id" => $row->id,
"name" => $row->name,
"shortName" => $row->short_name,
"latitude" => $row->latitude,
"longitude" => $row->longitude,
"lastUpdated" => $row->last_updated,
"parentCachegroupId" => $row->parent_cachegroup_id,
"parentCachegroupName" => ( defined $row->parent_cachegroup_id ) ? $idnames{ $row->parent_cachegroup_id } : undef,
"secondaryParentCachegroupId" => $row->secondary_parent_cachegroup_id,
"secondaryParentCachegroupName" => ( defined $row->secondary_parent_cachegroup_id )
? $idnames{ $row->secondary_parent_cachegroup_id }
: undef,
"typeId" => $row->type->id,
"typeName" => $row->type->name
}
);
}
$self->success( \@data );
}
sub index_trimmed {
my $self = shift;
my @data;
my $orderby = $self->param('orderby') || "name";
my $rs_data = $self->db->resultset("Cachegroup")->search( undef, { prefetch => [ { 'type' => undef, } ], order_by => 'me.' . $orderby } );
while ( my $row = $rs_data->next ) {
push(
@data, {
"name" => $row->name,
}
);
}
$self->success( \@data );
}
sub show {
my $self = shift;
my $id = $self->param('id');
my $rs_data = $self->db->resultset("Cachegroup")->search( { id => $id } );
my @data = ();
my %idnames;
my $rs_idnames = $self->db->resultset("Cachegroup")->search( undef, { columns => [qw/id name/] } );
while ( my $row = $rs_idnames->next ) {
$idnames{ $row->id } = $row->name;
}
while ( my $row = $rs_data->next ) {
push(
@data, {
"id" => $row->id,
"name" => $row->name,
"shortName" => $row->short_name,
"latitude" => $row->latitude,
"longitude" => $row->longitude,
"lastUpdated" => $row->last_updated,
"parentCachegroupId" => $row->parent_cachegroup_id,
"parentCachegroupName" => ( defined $row->parent_cachegroup_id ) ? $idnames{ $row->parent_cachegroup_id } : undef,
"secondaryParentCachegroupId" => $row->secondary_parent_cachegroup_id,
"secondaryParentCachegroupName" => ( defined $row->secondary_parent_cachegroup_id )
? $idnames{ $row->secondary_parent_cachegroup_id }
: undef,
"typeId" => $row->type->id,
"typeName" => $row->type->name
}
);
}
$self->success( \@data );
}
sub update {
my $self = shift;
my $id = $self->param('id');
my $params = $self->req->json;
if ( !&is_oper($self) ) {
return $self->forbidden();
}
my ( $is_valid, $result ) = $self->is_cachegroup_valid($params);
if ( !$is_valid ) {
return $self->alert($result);
}
my $cachegroup = $self->db->resultset('Cachegroup')->find( { id => $id } );
if ( !defined($cachegroup) ) {
return $self->not_found();
}
my $name = $params->{name};
if ( $cachegroup->name ne $name ) {
my $existing = $self->db->resultset('Cachegroup')->find( { name => $name } );
if ($existing) {
return $self->alert( "A cachegroup with name " . $name . " already exists." );
}
}
my $short_name = $params->{shortName};
if ( $cachegroup->short_name ne $short_name ) {
my $existing = $self->db->resultset('Cachegroup')->find( { short_name => $short_name } );
if ($existing) {
return $self->alert( "A cachegroup with short_name " . $short_name . " already exists." );
}
}
my $values = {
name => $params->{name},
short_name => $params->{shortName},
latitude => $params->{latitude},
longitude => $params->{longitude},
parent_cachegroup_id => $params->{parentCachegroupId},
secondary_parent_cachegroup_id => $params->{secondaryParentCachegroupId},
type => $params->{typeId}
};
my $rs = $cachegroup->update($values);
if ($rs) {
my %idnames;
my $response;
my $rs_idnames = $self->db->resultset("Cachegroup")->search( undef, { columns => [qw/id name/] } );
while ( my $row = $rs_idnames->next ) {
$idnames{ $row->id } = $row->name;
}
$response->{id} = $rs->id;
$response->{name} = $rs->name;
$response->{shortName} = $rs->short_name;
$response->{latitude} = $rs->latitude;
$response->{longitude} = $rs->longitude;
$response->{lastUpdated} = $rs->last_updated;
$response->{parentCachegroupId} = $rs->parent_cachegroup_id;
$response->{parentCachegroupName} =
( defined $rs->parent_cachegroup_id )
? $idnames{ $rs->parent_cachegroup_id }
: undef;
$response->{secondaryParentCachegroupId} = $rs->secondary_parent_cachegroup_id;
$response->{secondaryParentCachegroupName} =
( defined $rs->secondary_parent_cachegroup_id )
? $idnames{ $rs->secondary_parent_cachegroup_id }
: undef;
$response->{typeId} = $rs->type->id;
$response->{typeName} = $rs->type->name;
&log( $self, "Updated Cachegroup name '" . $rs->name . "' for id: " . $rs->id, "APICHANGE" );
return $self->success( $response, "Cachegroup update was successful." );
}
else {
return $self->alert("Cachegroup update failed.");
}
}
sub create {
my $self = shift;
my $params = $self->req->json;
if ( !&is_oper($self) ) {
return $self->forbidden();
}
my ( $is_valid, $result ) = $self->is_cachegroup_valid($params);
if ( !$is_valid ) {
return $self->alert($result);
}
my $name = $params->{name};
my $existing = $self->db->resultset('Cachegroup')->find( { name => $name } );
if ($existing) {
return $self->alert( "A cachegroup with name " . $name . " already exists." );
}
my $short_name = $params->{shortName};
$existing = $self->db->resultset('Cachegroup')->find( { short_name => $short_name } );
if ($existing) {
return $self->alert( "A cachegroup with short_name " . $short_name . " already exists." );
}
my $values = {
name => $params->{name},
short_name => $params->{shortName},
latitude => $params->{latitude},
longitude => $params->{longitude},
parent_cachegroup_id => $params->{parentCachegroupId},
secondary_parent_cachegroup_id => $params->{secondaryParentCachegroupId},
type => $params->{typeId}
};
my $insert = $self->db->resultset('Cachegroup')->create($values);
my $rs = $insert->insert();
if ($rs) {
my %idnames;
my $response;
my $rs_idnames = $self->db->resultset("Cachegroup")->search( undef, { columns => [qw/id name/] } );
while ( my $row = $rs_idnames->next ) {
$idnames{ $row->id } = $row->name;
}
$response->{id} = $rs->id;
$response->{name} = $rs->name;
$response->{shortName} = $rs->short_name;
$response->{latitude} = $rs->latitude;
$response->{longitude} = $rs->longitude;
$response->{lastUpdated} = $rs->last_updated;
$response->{parentCachegroupId} = $rs->parent_cachegroup_id;
$response->{parentCachegroupName} =
( defined $rs->parent_cachegroup_id )
? $idnames{ $rs->parent_cachegroup_id }
: undef;
$response->{secondaryParentCachegroupId} = $rs->secondary_parent_cachegroup_id;
$response->{secondaryParentCachegroupName} =
( defined $rs->secondary_parent_cachegroup_id )
? $idnames{ $rs->secondary_parent_cachegroup_id }
: undef;
$response->{typeId} = $rs->type->id;
$response->{typeName} = $rs->type->name;
&log( $self, "Updated Cachegroup name '" . $rs->name . "' for id: " . $rs->id, "APICHANGE" );
return $self->success( $response, "Cachegroup creation was successful." );
}
else {
return $self->alert("Cachegroup creation failed.");
}
}
sub delete {
my $self = shift;
my $id = $self->param('id');
if ( !&is_oper($self) ) {
return $self->forbidden();
}
my $cg = $self->db->resultset('Cachegroup')->find( { id => $id } );
if ( !defined($cg) ) {
return $self->not_found();
}
my $servers = $self->db->resultset('Server')->find( { cachegroup => $cg->id } );
if ( defined($servers) ) {
return $self->alert("This cachegroup is currently used by servers.");
}
my $parent_cgs = $self->db->resultset('Cachegroup')->find( { parent_cachegroup_id => $cg->id } );
if ( defined($parent_cgs) ) {
return $self->alert("This cachegroup is currently used as a parent cachegroup.");
}
my $secondary_parent_cgs = $self->db->resultset('Cachegroup')->find( { secondary_parent_cachegroup_id => $cg->id } );
if ( defined($secondary_parent_cgs) ) {
return $self->alert("This cachegroup is currently used as a secondary parent cachegroup.");
}
my $asns = $self->db->resultset('Asn')->find( { cachegroup => $cg->id } );
if ( defined($asns) ) {
return $self->alert("This cachegroup is currently used by one or more ASNs.");
}
my $rs = $cg->delete();
if ($rs) {
return $self->success_message("Cachegroup deleted.");
} else {
return $self->alert( "Cachegroup delete failed." );
}
}
sub by_parameter_id {
my $self = shift;
my $paramid = $self->param('paramid');
my @data;
my %dsids;
my %in_use;
# Get a list of all cachegroup id's associated with this param id
my $rs_in_use = $self->db->resultset("CachegroupParameter")->search( { 'parameter' => $paramid } );
while ( my $row = $rs_in_use->next ) {
$in_use{ $row->cachegroup->id } = 1;
}
# Add remaining cachegroup ids to @data
my $rs_links = $self->db->resultset("Cachegroup")->search( undef, { order_by => "name" } );
while ( my $row = $rs_links->next ) {
if ( !defined( $in_use{ $row->id } ) ) {
push( @data, { "id" => $row->id, "name" => $row->name } );
}
}
$self->success( { cachegroups => \@data } );
}
sub available_for_parameter {
my $self = shift;
my @data;
my $paramid = $self->param('paramid');
my %dsids;
my %in_use;
# Get a list of all profile id's associated with this param id
my $rs_in_use = $self->db->resultset("CachegroupParameter")->search( { 'parameter' => $paramid } );
while ( my $row = $rs_in_use->next ) {
$in_use{ $row->cachegroup->id } = 1;
}
# Add remaining cachegroup ids to @data
my $rs_links = $self->db->resultset("Cachegroup")->search( undef, { order_by => "name" } );
while ( my $row = $rs_links->next ) {
if ( !defined( $in_use{ $row->id } ) ) {
push( @data, { "id" => $row->id, "name" => $row->name } );
}
}
$self->success( \@data );
}
sub postupdatequeue {
my $self = shift;
my $params = $self->req->json;
if ( !&is_oper($self) ) {
return $self->forbidden("Forbidden. Insufficent privileges.");
}
my $name;
my $id = $self->param('id');
$name = $self->db->resultset('Cachegroup')->search( { id => $id } )->get_column('name')->single();
if ( !defined($name) ) {
return $self->alert( "cachegroup id[" . $id . "] does not exist." );
}
my $cdn = $params->{cdn};
my $cdn_id = $self->db->resultset('Cdn')->search( { name => $cdn } )->get_column('id')->single();
if ( !defined($cdn_id) ) {
return $self->alert( "cdn " . $cdn . " does not exist." );
}
my $setqueue = $params->{action};
if ( !defined($setqueue) ) {
return $self->alert("action needed, should be queue or dequeue.");
}
if ( $setqueue eq "queue" ) {
$setqueue = 1;
}
elsif ( $setqueue eq "dequeue" ) {
$setqueue = 0;
}
else {
return $self->alert("action should be queue or dequeue.");
}
my @profiles;
@profiles = $self->db->resultset('Server')->search(
{ 'cdn.name' => $cdn },
{
prefetch => 'cdn',
select => 'me.profile',
distinct => 1
}
)->get_column('profile')->all();
my $update = $self->db->resultset('Server')->search(
{
-and => [
cachegroup => $id,
profile => { -in => \@profiles }
]
}
);
my $response;
my @svrs = ();
if ( $update->count() > 0 ) {
$update->update( { upd_pending => $setqueue } );
my @row = $update->get_column('host_name')->all();
foreach my $svr (@row) {
push( @svrs, $svr );
}
}
$response->{serverNames} = \@svrs;
$response->{action} = ( $setqueue == 1 ) ? "queue" : "dequeue";
$response->{cdn} = $cdn;
$response->{cachegroupName} = $name;
$response->{cachegroupId} = $id;
return $self->success($response);
}
sub is_cachegroup_valid {
my $self = shift;
my $params = shift;
if (!$self->is_valid_cachegroup_type($params->{typeId})) {
return ( 0, "Invalid cachegroup type" );
}
my $rules = {
fields => [ qw/name shortName latitude longitude parentCachegroupId secondaryParentCachegroupId typeId/ ],
# Validation checks to perform
checks => [
name => [ is_required("is required"), \&is_alphanumeric ],
shortName => [ is_required("is required"), \&is_alphanumeric ],
typeId => [ is_required("is required") ],
latitude => [ \&is_valid_lat ],
longitude => [ \&is_valid_long ]
]
};
# Validate the input against the rules
my $result = validate( $params, $rules );
if ( $result->{success} ) {
return ( 1, $result->{data} );
}
else {
return ( 0, $result->{error} );
}
}
sub is_alphanumeric {
my ( $value, $params ) = @_;
if ( !defined $value or $value eq '' ) {
return undef;
}
if ( !( $value =~ /^[0-9a-zA-Z_\.\-]+$/ ) ) {
return "invalid. Use alphanumeric . or _ .";
}
return undef;
}
sub is_valid_lat {
my ( $value, $params ) = @_;
if ( !defined $value or $value eq '' ) {
return undef;
}
if ( !( $value =~ /^[-]*[0-9]+[.]*[0-9]*/ ) ) {
return "invalid. Must be a float number.";
}
if ( abs $value > 90 ) {
return "invalid. May not exceed +- 90.0.";
}
return undef;
}
sub is_valid_long {
my ( $value, $params ) = @_;
if ( !defined $value or $value eq '' ) {
return undef;
}
if ( !( $value =~ /^[-]*[0-9]+[.]*[0-9]*/ ) ) {
return "invalid. Must be a float number.";
}
if ( abs $value > 180 ) {
return "invalid. May not exceed +- 180.0.";
}
return undef;
}
sub is_valid_cachegroup_type {
my $self = shift;
my $type_id = shift;
my $rs = $self->db->resultset("Type")->find( { id => $type_id } );
if ( defined($rs) && ( $rs->use_in_table eq "cachegroup" ) ) {
return 1;
}
return 0;
}
1;
| knutsel/traffic_control-1 | traffic_ops/app/lib/API/Cachegroup.pm | Perl | apache-2.0 | 15,757 |
#!/usr/bin/env perl
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2021] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
use warnings;
use strict;
use Getopt::Long;
use Carp;
use Data::Dumper;
use Bio::EnsEMBL::Production::Utils::CopyDatabase
qw/copy_database/;
use Log::Log4perl qw/:easy/;
my $opts = {};
GetOptions( $opts, 'source_db_uri=s',
'target_db_uri=s', 'only_tables=s',
'skip_tables=s', 'update|u',
'drop|d', 'convert_innodb|c', 'skip_optimize|k', 'verbose' );
if ( $opts->{verbose} ) {
Log::Log4perl->easy_init($DEBUG);
}
else {
Log::Log4perl->easy_init($INFO);
}
my $logger = get_logger;
if ( !defined $opts->{source_db_uri} || !defined $opts->{target_db_uri} ) {
croak "Usage: copy_database.pl -source_db_uri <mysql://user:password\@host:port/db_name> -target_db_uri <mysql://user:password\@host:port/db_name> [-only_tables=table1,table2] [-skip_tables=table1,table2] [-update] [-drop] [-convert_innodb] [-skip_optimize] [-verbose]";
}
$logger->debug("Copying $opts->{source_db_uri} to $opts->{target_db_uri}");
copy_database($opts->{source_db_uri}, $opts->{target_db_uri}, $opts->{only_tables}, $opts->{skip_tables}, $opts->{update}, $opts->{drop}, $opts->{convert_innodb}, $opts->{skip_optimize}, $opts->{verbose});
| Ensembl/ensembl-production | scripts/copy_database.pl | Perl | apache-2.0 | 1,920 |
package VMOMI::ArrayOfHostPlugStoreTopologyPath;
use parent 'VMOMI::ComplexType';
use strict;
use warnings;
our @class_ancestors = ( );
our @class_members = (
['HostPlugStoreTopologyPath', 'HostPlugStoreTopologyPath', 1, 1],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/ArrayOfHostPlugStoreTopologyPath.pm | Perl | apache-2.0 | 453 |
#!/usr/bin/env perl
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2020] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 NAME
make_bsubs.pl
=head1 SYNOPSIS
make_bsubs.pl
Makes bsub entries for run_blat.pl, etc...
bsubs can be submitted using submit.pl - they\'re not automatically
done from here as it\'s better to submit a few and check they come
back OK before sending a genome worth.
Makes sure all the various scratch subdirectories needed are in place,
and makes them if necessary.
=head1 DESCRIPTION
=head1 OPTIONS
=cut
use warnings ;
use strict;
use Getopt::Long;
use Bio::EnsEMBL::DBSQL::DBAdaptor;
use Bio::EnsEMBL::Pipeline::Config::cDNAs_ESTs::Exonerate qw (
EST_TMPDIR
EST_REFDBHOST
EST_REFDBUSER
EST_REFDBNAME
EST_DBHOST
LSF_OPTIONS
EST_SCRIPTDIR
EST_TMPDIR
EST_RUNNER
EST_EXONERATE_BSUBS
EST_EXONERATE_RUNNABLE
EST_EXONERATE_ANALYSIS
EST_EXONERATE_OPTIONS
EST_CHUNKNUMBER
EST_CHUNKDIR
EST_FILE
);
my %chrhash;
# declare these here so we can refer to them later
my $exonerate_bsubdir = "exonerate_results";
# make output directories
&make_directories();
# create jobs file for Exonerate
&make_exonerate_bsubs();
############################################################
sub make_directories {
my $scratchdir = $EST_TMPDIR ;
# bsub output directories
my $bsubdir = $scratchdir . "/" . $exonerate_bsubdir . "/";
my $bsuberr = $bsubdir . "stderr/";
my $bsubout = $bsubdir . "stdout/";
makedir($bsubdir);
makedir($bsuberr);
makedir($bsubout);
}
############################################################
sub make_exonerate_bsubs {
my $jobfile = $EST_EXONERATE_BSUBS;
open (OUT, ">$jobfile") or die ("Can't open $jobfile for writing: $!");
my $lsf_options = $LSF_OPTIONS;
my $refdb_host = $EST_REFDBHOST;
my $est_host = $EST_DBHOST;
#$lsf_options .= " -R\"select[$refdb_host < 440 && $est_host < 440] rusage[$refdb_host=10]\" ";
$lsf_options .= " -R\"select[$refdb_host < 440 && $est_host < 440] rusage[$refdb_host=10:duration=2:decay=1:$est_host=10:duration=2:decay=1]\"";
my $scriptdir = $EST_SCRIPTDIR;
my $check = $scriptdir . "check_node.pl";
my $exonerate = $scriptdir . "run_exonerate.pl";
my $bsuberr = $EST_TMPDIR . "/" . $exonerate_bsubdir . "/stderr/";
my $bsubout = $EST_TMPDIR . "/" . $exonerate_bsubdir . "/stdout/";
my $runnable_db = $EST_EXONERATE_RUNNABLE;
my $analysis = $EST_EXONERATE_ANALYSIS;
my $estfile = $EST_FILE; # may be a full path
my @path = split /\//, $estfile;
$estfile = $path[$#path];
$estfile =~ s/\.fa$//i;
$estfile .= "_chunk_";
my $numchunks = $EST_CHUNKNUMBER;
for(my $i = 0; $i < $numchunks; $i++){
my $num = $i;
while (length($num) < 7){
$num = "0" . $num;
}
my $chunk = $estfile . $num;
my $outfile = $bsubout . $chunk;
my $errfile = $bsuberr . $chunk;
my $chunk_file = $EST_CHUNKDIR."/".$chunk;
my $query_chunk = "/tmp/".$chunk;
#my $command = "bsub $lsf_options -f \" $chunk_file > $query_chunk \" -o $outfile -e $errfile -E \"$check $query_chunk\" $exonerate -runnable $runnable_db -analysis $analysis -query_seq $query_chunk -write";
my $command = "bsub $lsf_options -o $outfile -e $errfile -E \"$check $chunk\" $exonerate -runnable $runnable_db -analysis $analysis -query_seq $chunk_file -write";
print OUT "$command\n";
}
close (OUT) or die (" Error closing $jobfile: $!");
}
############################################################
sub makedir{
my ($dir) = @_;
if(opendir(DIR, $dir)){ closedir(DIR); }
else{ system("mkdir $dir") == 0 or die "error creating $dir\n"; }
}
############################################################
| Ensembl/ensembl-pipeline | scripts/EST/make_exonerate_bsubs.pl | Perl | apache-2.0 | 4,604 |
package Parakeet::Dependency::Item;
use strict;
use warnings;
use utf8;
use v5.12;
use Misc::DmUtil::Data qw(dumperSelective isEmpty notEmpty toStr);
use Misc::DmUtil::Log qw(:level);
use Data::Dumper;
use Time::HiRes qw(gettimeofday tv_interval);
use overload
'<=>' => \&cmp,
'""' => \&asString,
'cmp' => \&cmp;
# Convert to string
sub asString
{
my $this = shift;
my $m = $this->modified();
my $mod = ($m >= 0 ? new Misc::DmUtil::DateTime($m)->toIso8601() : "null");
return $this->type().",".$this->subject()->asString().",".$this->subjectStage().",$mod";
} # end of 'asString()'
# Compare for sort
sub cmp
{
my $a = shift;
my $b = shift;
return $a->asString() cmp $b->asString();
} # end of 'cmp()'
# Return if content is current versus item
sub isCurrent
{
my $this = shift;
my $log = $this->{log};
my $debug = $log->isLoggable(LOG_DEBUG);
my $info = $log->isLoggable(LOG_INFO);
$log->debug("Start") if $debug;
$log->debug("type=[".$this->{type}."], subject=[".$this->subject()."]") if $debug;
if ($this->type() eq "content")
{
my $address = $this->subjectAddress();
my $depContent = $this->subjectContent();
unless ($depContent)
{
$log->info("Failed to get Content [$address]") if $info;
return 0;
}
my $modified = $depContent->modifiedHighest($this->subjectStage());
unless ($modified->{epoch} == $this->modified())
{
$log->info(
"Modified time difference, saw [".
new Misc::DmUtil::DateTime($modified->{epoch})->toIso8601().
"] from [$modified->{file}] and expected [".
new Misc::DmUtil::DateTime($this->modified())->toIso8601().
"]") if $info;
return 0;
}
}
elsif ($this->type() eq "file")
{
my $modified = $this->subjectFile()->modifiedTime() // -1;
$log->debug("modified=[".toStr($modified)."], this.modified=[".$this->modified()."]") if $debug;
unless ($modified == $this->modified())
{
my $old = ($this->modified() >= 0 ? new Misc::DmUtil::DateTime($this->modified())->toIso8601() : "missing");
my $new = ($modified >= 0 ? new Misc::DmUtil::DateTime($modified)->toIso8601() : "missing");
$log->info("Modified time difference, saw [$new] and expected [$old]") if $info;
return 0;
}
}
$log->info("All OK") if $info;
return 1;
} # end of 'isCurrent()'
# Return if ready for dependency check
sub isReady
{
my $this = shift;
my $log = $this->{log};
my $debug = $log->isLoggable(LOG_DEBUG);
my $info = $log->isLoggable(LOG_INFO);
my $content;
if ($this->type() eq "content")
{
$content = $this->subjectContent();
# Failure to get content counts as ready as it means the content
# has been deleted so will never become ready
unless ($content)
{
$log->info("Failed to get content") if $info;
return 1;
}
}
elsif ($this->type() eq "file")
{
# Files with a stage attached are a readyness check versus self
if (notEmpty($this->subjectStage()))
{
$content = $this->observerContent();
}
else
{
$log->info("Generic file, presume ready") if $info;
return 1;
}
}
else
{
$log->fatal("Unknown type");
}
my $stage = $this->subjectStage();
unless ($content->stageRun($stage))
{
$log->info("Stage [$stage] on [".$content->address()."] not run yet") if $info;
return 0;
}
$log->info("Ready") if $info;
return 1;
} # end of 'isReady()'
# Return key
sub key
{
my $this = shift;
unless (defined $this->{key})
{
$this->{key} = $this->type()."/".$this->subject()."/".$this->subjectStage();
}
return $this->{key};
} # end of 'key()'
# Return modified time
sub modified
{
my $this = shift;
my $log = $this->{log};
unless (defined $this->{modified})
{
if ($this->type() eq "content")
{
my $content = $this->subjectContent();
if ($content)
{
my $highest = $content->modifiedHighest($this->subjectStage());
if ($highest)
{
$log->info(
"Seen highest time of [".
new Misc::DmUtil::DateTime($highest->{epoch}).
"] from [".
$highest->{file}.
"]") if $log->isLoggable(LOG_INFO);
$this->{modified} = $highest->{epoch};
}
}
}
elsif ($this->type() eq "file")
{
$this->{modified} = $this->{subjectFile}->modifiedTime();
}
else
{
$log->fatal("Unknown type");
}
$this->{modified} //= -1;
}
return $this->{modified};
} # end of 'modified()'
# Construction
sub new
{
my $class = shift;
my %arg = @_;
my $this = {};
bless $this, $class;
# create log
my $log = Misc::DmUtil::Log::find(%arg);
# straight copies
foreach my $i (qw(modified observerContent observerStage subjectStage type))
{
$this->{$i} = $arg{$i};
}
foreach my $i (qw(observerStage))
{
unless (notEmpty($this->{$i}))
{
$log->fatal("No [$i] supplied");
}
}
if ($arg{subjectContent})
{
$this->{subjectAddress} = $arg{subjectContent}->address();
$this->{subjectContent} = $arg{subjectContent};
$this->{type} = "content";
}
elsif ($arg{subjectFile})
{
$this->{subjectFile} = Misc::DmUtil::FilePath::toObject($arg{subjectFile});
$this->{type} = "file";
}
elsif ($arg{subject})
{
my $t = $arg{type} // "";
if ($t eq "content")
{
$this->{subjectAddress} = Parakeet::Address::toObject($arg{subject});
$this->{type} = "content";
}
elsif ($t eq "file")
{
$this->{subjectFile} = Misc::DmUtil::FilePath::toObject($arg{subject});
$this->{type} = "file";
}
else
{
$log->fatal("Unknown subject type [$t]");
}
}
else
{
$log->fatal("No inputs\n".Dumper(\%arg));
}
$this->{log} = new Misc::DmUtil::Log(id => $log->id().",".$this->subject());
return $this;
} # end of 'new()'
# Return observer content
sub observerContent
{
return shift()->{observerContent};
} # end of 'observerContent()'
# Return observer stage
sub observerStage
{
return shift()->{observerStage};
} # end of 'observerStage();
# Return subject
sub subject
{
my $this = shift;
if ($this->type() eq "file")
{
return $this->subjectFile();
}
else
{
return $this->subjectAddress();
}
} # end of 'subject()'
# Return subject address
sub subjectAddress
{
return shift()->{subjectAddress};
} # end of 'subjectAddress()'
# Return subjectContent
sub subjectContent
{
my $this = shift;
my $log = $this->{log};
if ($this->type() ne "content")
{
$log->fatal("Can't call subjectContent() on non-content dependency");
}
unless (defined $this->{subjectContent})
{
$this->{subjectContent} = Parakeet::Content::get($this->subjectAddress(), fatal => 0);
}
$this->{subjectContent} //= 0;
return $this->{subjectContent};
} # end of 'subjectContent()'
# Return subject file
sub subjectFile
{
return shift()->{subjectFile};
} # end of 'subjectFile()'
# Return subject stage
sub subjectStage
{
my $this = shift;
unless (defined $this->{subjectStage})
{
if ($this->type() eq "content")
{
my $content = $this->subjectContent();
if ($content)
{
$this->{subjectStage} = $content->stagePrev($this->observerStage());
}
}
$this->{subjectStage} //= "";
}
return $this->{subjectStage};
} # end of 'subjectStage()'
# Return type
sub type
{
return shift()->{type};
} # end of 'type()'
1;
| duncanmartin/parakeet | lib/Parakeet/Dependency/Item.pm | Perl | bsd-2-clause | 7,790 |
#!/usr/bin/env perl -w
##-----------------------------------------------------------------------------
## This file is part of ctan-o-mat.
## This program is distributed under BSD-like license. See file LICENSE
##
## (c) 2016-2017 Gerd Neugebauer
##
## Net: gene@gerd-neugebauer.de
##
## This program is free software; you can redistribute it and/or modify it
## under the terms of a 3-clause BSD-like license as stated in the file
## LICENSE contained in this distribution.
##
## You should have received a copy of the LICENSE along with this program; if
## not, see the repository under https://github.com/ge-ne/ctan-o-mat.
##
##-----------------------------------------------------------------------------
=head1 NAME
ctan-o-mat - Validate and upload a package for CTAN
=head1 SYNOPSIS
ctan-o-mat [options] [<package configuration>]
=head1 DESCRIPTION
This program can be used to automate the upload of a package to the
Comprehensive TeX Archive Network (https://www.ctan.org). The description
of the package is taken from a configuration file. Thus it can be updated
easily without the need to fill a Web form with the same old information
again and again.
The provided information is validated in any case. If the validation
succeeds and not only the validation is requested then the provided
archive file is placed in the incoming area of the CTAN for further
processing by the CTAN team.
In any case any finding during the validation is reported at the end
of the processing. Note that the validation is the default and a
official submission has to be requested by the an appropriate command
line option.
B<ctan-o-mat> requires an Internet connection to the CTAN server. Even the
validation retrieves the known attributes and the basic constraints
from the server.
=head1 CONFIGURATION
The default configuration is read from a file with the same name as
the current directory an the extension .pkg. This file name can be
overwritten on the command line.
The configuration depends on the features currently supported by the
CTAN server.
Since these features can change over time the configuration is not
hard-coded in B<ctan-o-mat>. You can request an empty template of the
configuration via the command line parameter C<--init>.
=head1 OPTIONS
=over 4
=item -h
=item --help
Print this short summary about the usage and exit the program.
=item -i
=item --init
Create an empty template for a configuration on stdout.
=item --list licenses
List the known licenses of CTAN to the standard output stream.
Each license is represented as one line. The line contains the fields
key, name, free indicator. Those fields are separated by tab characters.
Afterwards the program terminates without processing any further arguments.
=item --config <package configuration>
=item --pkg <package configuration>
=item --package <package configuration>
Set the package configuration file.
=item -s
=item --submit
Upload the submission, validate it and officially submit it to CTAN it the
validation succeeds.
=item -v
=item --verbose
Print some more information during the processing (verbose mode).
=item --version
Print the version number of this program and exit.
=item --validate
=item -n
=item --noaction
Do not perform the final upload. The package is validated and the
resulting messages are printed.
=item <package>
This parameter is the name of a package configuration
(see section CONFIGURATION) contained in a file.
If not set otherwise the package configuration defaults to the
name of the current directory with C<.pkg> appended.
=back
=head1 ENVIRONMENT
The following environment variables are recognized by B<ctan-o-mat>.
=over 4
=item CTAN_O_MAT_URL
The value is the URL prefix for the CTAN server to be contacted. The default
is C<https://ctan.org/submit>. The complete URL is constructed by appending
C<validate>, C<upload>, or C<fields> to use the respective CTAN REST API.
=back
=head1 CONNECTING VIA PROXIES
If you need to connect to the Internet via a proxy then this can be achieved
by setting some environment variables before running B<ctan-o-mat>.
To redirect the request via the proxy simply define an environment variable
C<http_proxy> to point to the proxy host -- including protocol and port as
required. Note that the name of the environment variable is supposed to be in
B<lower> case.
=head1 AUTHOR
Gerd Neugebauer (gene@gerd-neugebauer.de)
=head1 BUGS
=over 4
=item *
The program can not be used without a working connection to the
Internet.
=back
=cut
use strict;
use FileHandle;
use File::Basename;
use Cwd;
use constant VERSION => '1.2';
#------------------------------------------------------------------------------
# Function: usage
# Arguments: none
# Returns: nothing
# Description: Print the POD to stderr and exit
#
sub usage {
use Pod::Text;
Pod::Text->new()
->parse_from_filehandle( new FileHandle( $0, 'r' ), \*STDERR );
exit(0);
}
#------------------------------------------------------------------------------
# Variable: $verbose
# Description: The verbosity indicator.
#
my $verbose = 0;
#------------------------------------------------------------------------------
# Variable: $debug
# Description: The debug indicator.
#
my $debug = 0;
#------------------------------------------------------------------------------
# Variable: $submit
# Description: The validation or submit indicator.
#
my $submit = undef;
#------------------------------------------------------------------------------
# Variable: $cfg
# Description: The name of the configuration file.
#
my $cfg = undef;
#------------------------------------------------------------------------------
# Variable: $CTAN_URL
# Description: The base URL for requesting information from the CTAN server.
#
my $CTAN_URL = $ENV{'CTAN_O_MAT_URL'} || 'https://ctan.org';
$CTAN_URL .= '/' if not $CTAN_URL =~ m/\/$/;
use Getopt::Long;
GetOptions(
"config=s" => \$cfg,
"debug" => \$debug,
"h|help" => \&usage,
"i|init:s" => sub {
local $_ = pkg_name_or_fallback( $_[1], '' );
( new CTAN::Pkg() )->add( pkg => $_ )
->write( new CTAN::Upload::Fields() );
exit(0);
},
"list=s" => sub {
if ( $_[1] eq 'licenses' ) {
new CTAN::Licenses()->print();
}
else {
print STDERR "*** Unknown entity $_[1]\n";
}
exit(0);
},
"n|noaction" => sub { $submit = undef; },
"pkg=s" => \$cfg,
"package=s" => \$cfg,
"submit|upload" => sub { $submit = 1; },
"validate" => sub { $submit = undef; },
"v|verbose" => \$verbose,
"version" => sub {
print STDOUT VERSION, "\n";
exit(0);
},
);
new CTAN::Pkg()
->read( pkg_name_or_fallback( $ARGV[0] || $cfg, '.pkg' ) )
->upload($submit);
#------------------------------------------------------------------------------
# Function: pkg_name_or_fallback
# Arguments: $value the value
# $ext the extension to append
# Description: Construct a fallback from the current directory if $value is
# not defined.
#
sub pkg_name_or_fallback {
my ( $value, $ext ) = @_;
if ( not defined $value or $value eq '' ) {
$value = cwd();
$value =~ s|.*[/\\]||;
$value = $value . $ext;
}
return $value;
}
###############################################################################
package JSON::Parser;
#------------------------------------------------------------------------------
# Constructor: new
# Description: This is the constructor
#
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $this = {};
return bless $this, $class;
}
#------------------------------------------------------------------------------
# Method: parse
# Arguments:
# $json the JSON list with the messages
# Description: Parse the input string for a JSON object and retrun the Perl
# representation of it.
#
sub parse {
my ( $this, $json ) = @_;
my ( $result, $remainder ) = $this->scan($json);
chomp $remainder;
if ( $remainder ne '' ) {
die "*** Unprocessed JSON: $remainder\n";
}
return $result;
}
#------------------------------------------------------------------------------
# Method: scan
# Arguments:
# $json the JSON list with the messages
# Description: Scan the input string for the next token
#
sub scan {
my ( $this, $json ) = @_;
local $_ = $json;
s/^\s+//;
if ( m/^\[\s*/ ) {
my @a = ();
$_ = $';
while ( not m/^\]/ ) {
my ( $el, $remainder ) = $this->scan($_);
push @a, $el;
$_ = $remainder;
s/^\s*,\s*//;
}
$_ = substr( $_, 1 );
return ( \@a, $_ );
}
elsif ( m/^\{\s*/ ) {
my %a = ();
$_ = $';
while ( not m/^\}/ ) {
my ( $key, $remainder ) = $this->scan($_);
$_ = $remainder;
s/^\s*:\s*//;
my ( $val, $remainder2 ) = $this->scan($_);
$_ = $remainder2;
$a{$key} = $val;
s/^\s*,\s*//;
}
$_ = substr( $_, 1 );
return ( \%a, $_ );
}
elsif ( $_ =~ m/^"/ ) {
$_ = $';
my $s = '';
while ( m/(\\.|")/ ) {
$s .= $`;
$_ = $';
if ( $& eq '"' ) {
return ( $s, $_ );
}
if ( $& eq '\\n' ) {
$s .= "\n";
}
elsif ( $& eq '\\"' ) {
$s .= '"';
}
elsif ( $& eq '\\t' ) {
$s .= "\t";
}
elsif ( $& eq '\\\\' ) {
$s .= "\\";
}
elsif ( $& eq '\\r' ) {
$s .= "\r";
}
elsif ( $& eq '\\b' ) {
$s .= "\b";
}
else {
$s .= "\\";
}
}
die "*** Missing end of string\n";
}
elsif ( m/^([0-9]+|[a-z]+)/i ) {
$_ = $';
$_ = $&;
return ( $_, $_ );
}
die "*** Parse error at: $_\n";
}
###############################################################################
package CTAN::Upload::Fields;
use LWP::UserAgent;
use LWP::Protocol::https;
use HTTP::Request::Common;
#------------------------------------------------------------------------------
# Variable: @parameter
# Description: The list of fields.
#
my @parameter = (); # FIXME
#------------------------------------------------------------------------------
# Constructor: new
# Description: This is the constructor
#
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $this = {};
bless $this, $class;
return $this->_load();
}
#------------------------------------------------------------------------------
# Method: _load
# Arguments: none
# Description: Retrieve a list of currently supported fields from the
# CTAN server.
#
sub _load {
my $this = shift;
my $url = $CTAN_URL . 'submit/fields';
print STDERR "--- Retrieving fields from CTAN..." if $::verbose;
print STDERR $url, "\n" if $debug;
my $response;
eval {
my $ua = LWP::UserAgent->new();
my $request = GET $url;
print STDERR "done\n" if $::verbose;
$response = $ua->request($request);
};
die CTAN::ErrorHandler::format(
$response->decoded_content, $response->status_line
),
"\n"
if not $response->is_success;
local $_ = $response->decoded_content;
print STDERR $response->decoded_content, "\n\n" if $debug;
while (m/\"([a-z0-9]+)\":\{([^{}]*)\}/i) {
my $f = $1;
my %a = ();
$_ = $';
my $attr = $2;
while ( $attr =~ m/\"([a-z0-9]+)\":([a-z0-9]+|"[^"]*")/i ) {
$attr = $';
$a{$1} = $2;
$a{$1} =~ s/(^"|"$)//g;
}
$this->{$f} = \%a;
push @CTAN::Upload::Fields::parameter, $f;
}
return $this;
}
###############################################################################
package CTAN::Licenses;
use LWP::UserAgent;
use LWP::Protocol::https;
use HTTP::Request::Common;
use Data::Dumper;
#------------------------------------------------------------------------------
# Constructor: new
# Description: This is the constructor
#
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $this = [];
bless $this, $class;
return $this->_load();
}
#------------------------------------------------------------------------------
# Method: _load
# Arguments: none
# Description: Retrieve a list of currently supported licenses from the
# CTAN server.
#
sub _load {
my $this = shift;
my $url = $CTAN_URL . 'json/1.3/licenses';
print STDERR "--- Retrieving licenses from CTAN..." if $verbose;
print STDERR $url, "\t" if $debug;
my $response;
eval {
my $ua = LWP::UserAgent->new();
my $request = GET $url;
print STDERR "done\n" if $verbose;
$response = $ua->request($request);
};
die CTAN::ErrorHandler::format(
$response->decoded_content, $response->status_line
),
"\n"
if not $@ and not $response->is_success;
print STDERR "done\n" if $verbose;
local $_ = $response->decoded_content;
eval {
$this->[0] = new JSON::Parser()->parse($_);
};
if ($@) {
s/^[0-9]+ */*** /;
die $_;
}
return $this;
}
#------------------------------------------------------------------------------
# Method: print
# Arguments: none
# Description: Print the licenses to stdout.
#
sub print {
my $this = shift;
local $_ = $this->[0];
my @a = @$_;
foreach (@a) {
print $_->{key}, "\t", $_->{name}, "\t";
if ( $_->{free} eq 'true' ) {
print "free\n";
}
else {
print "non-free\n";
}
}
}
###############################################################################
package CTAN::ErrorHandler;
#------------------------------------------------------------------------------
# Method: format
# Arguments:
# $json the JSON list with the messages
# $fallback the fallback message if the first parameter is empty
# Description: format the JSON error message
#
sub format {
local $_ = shift;
if ( $_ eq '' ) {
return shift;
}
if (m/^(<!DOCTYPE html>|<html)/i) {
return "Unexpected HTML response found under $CTAN_URL";
}
my $json;
eval {
$json = new JSON::Parser()->parse($_);
};
if ($@) {
s/^[0-9]+ */*** /;
die $_;
}
return join( "\n", map { join( ': ', @$_ ) } @$json );
}
###############################################################################
package CTAN::Pkg;
use LWP::UserAgent;
use LWP::Protocol::https;
use HTTP::Request::Common;
#------------------------------------------------------------------------------
# Constructor: new
# Description: This is the constructor
#
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $this = [];
return bless $this, $class;
}
#------------------------------------------------------------------------------
# Method: add
# Arguments: arbitrary many key/value pairs
# Description:
# This function adds a key/value pair to the object.
#
sub add {
my $this = shift;
my ( $key, $val );
$key = shift;
$val = shift;
while ( defined $key and defined $val ) {
if ( $key eq 'file' ) {
push @$this, $key => [$val];
}
else { push @$this, $key => $val; }
$key = shift;
$val = shift;
}
return $this;
}
#------------------------------------------------------------------------------
# Method: read
# Arguments: $file the file name to be read
# Description:
# This function parses a configuration file in (La)TeX form and returns
# it as hash-like list.
#
sub read {
my ( $this, $file ) = @_;
die "*** Configuration file missing.\n" if not defined $file;
my $fields = new CTAN::Upload::Fields();
my $fd = new FileHandle($file)
|| die "*** Configuration file `$file' could not be read.\n";
local $_;
while (<$fd>) {
s/^[ \t]*%.*//;
s/([^\\])%.*/$1/;
while (m/\\([a-z]+)/i) {
$_ = $';
my $keyword = $1;
if ( $keyword eq 'begin' ) {
die "$file:$.: missing {environment} instead of $_\n"
if not m/^[ \t]*\{([a-z]*)\}/i;
$keyword = $1;
my $val = '';
$_ = $';
while ( not m/\\end\{$keyword\}/ ) {
$val .= $_;
$_ = <$fd>;
die "$file:$.: "
. "unexpected end of file while searching end of $keyword\n"
if not defined $_;
}
m/\\end\{$keyword\}/;
$_ = $';
$val .= $`;
$val =~ s/^[ \t\n\r]*//m;
$val =~ s/[ \t\n\r]*$//m;
$this->add( $keyword => $val );
}
elsif ( $keyword eq 'endinput' ) {
last;
}
elsif ( defined $fields->{$keyword} ) {
die "$file:$.: missing {environment} instead of $_\n"
if not m/^[ \t]*\{([^{}]*)\}/i;
$_ = $';
$this->add( $keyword => $1 );
}
else {
die "$file:$.: undefined keyword $keyword\n";
}
s/^[ \t]*%.*//;
}
}
$fd->close();
return $this;
}
#------------------------------------------------------------------------------
# Method: upload
# Arguments: Upload a file and the parameters
# Description: Connect to the CTAN server to upload or validate the package.
#
sub upload {
my $this = shift;
my $submit = shift;
my $service_url;
if ($submit) {
print STDERR "--- Sending to CTAN for submission..." if $verbose;
$service_url = $CTAN_URL . 'submit/upload';
}
else {
print STDERR "--- Uploading to CTAN for validation..." if $verbose;
$service_url = $CTAN_URL . 'submit/validate';
}
my $ua = LWP::UserAgent->new();
my $request = POST(
$service_url,
'Content_Type' => 'multipart/form-data',
'Content' => $this
);
my $response = $ua->request($request);
print STDERR "done\n" if $verbose;
die CTAN::ErrorHandler::format( $response->decoded_content,
$response->status_line )
. "\n"
if not $response->is_success;
if ( not $submit and $response->decoded_content eq '[]' ) {
print "ok\n";
print STDERR "--- The validation has succeeded.\n",
"--- You can now submit your package to CTAN for publication.\n"
if $verbose;
}
else {
print CTAN::ErrorHandler::format( $response->decoded_content, 'ok' ),
"\n";
}
return $this;
}
#------------------------------------------------------------------------------
# Method: write
# Arguments: none
# Description: Write a new configuration to stdout.
#
sub write {
my $this = shift;
my %this = @$this;
my $fields = shift;
print <<__EOF__;
% This is a description file for ctan-o-mat.
% It manages uploads of a package to
% CTAN -- the Comprehensive TeX Archive Network.
%
% The syntax is roughly oriented towards (La)TeX.
% Two form of the macros are used. The simple macros take one argument
% in braces. Here the argument may not contain embedded macros.
%
% The second form uses an environment enclosed in \\begin{}/\\end{}.
% In the long text fields logo macros can be used.
%
% You should enter your values between the begin and the end of the
% named type.
__EOF__
local $_;
foreach (@CTAN::Upload::Fields::parameter) {
print <<__EOF__;
% -------------------------------------------------------------------------
% This field contains the $fields->{$_}->{'text'}.
__EOF__
if ( defined $fields->{$_}->{'nullable'} ) {
print "% The value is optional.\n";
}
if ( defined $fields->{$_}->{'url'} ) {
print "% The value is a URL.\n";
}
if ( defined $fields->{$_}->{'email'} ) {
print "% The value is an email address.\n";
}
if ( defined $fields->{$_}->{'file'} ) {
print
"% The value is the file name of the archive to be uploaded.\n";
print "% It may have a relative or absolute directory.\n";
}
if ( defined $fields->{$_}->{'maxsize'} ) {
print "% The value is restricted to ", $fields->{$_}->{'maxsize'},
" characters.\n";
}
if ( defined $fields->{$_}->{'list'} ) {
print "% Multiple values are allowed.\n\\$_\{}\n";
}
elsif ( defined $fields->{$_}->{'maxsize'}
and $fields->{$_}->{'maxsize'} ne 'null'
and $fields->{$_}->{'maxsize'} < 256 )
{
my $v = $this{$_};
$v = '' if not defined $v;
print "\\$_\{$v\}\n";
}
elsif ( defined $fields->{$_}->{'file'}
and $fields->{$_}->{'file'} eq 'true' )
{
my $v = $this{$_};
$v = '' if not defined $v;
print "\\$_\{$v\}\n";
}
else {
my $v = $this{$_};
if ( defined $v ) {
$v = "\n " + $v + "\n";
}
else {
$v = '';
}
print "\\begin{$_}$v\\end{$_}\n";
}
}
}
#------------------------------------------------------------------------------
# Local Variables:
# mode: perl
# End:
| ge-ne/ctan-o-mat | ctan-o-mat.pl | Perl | bsd-3-clause | 19,694 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.