code stringlengths 2 1.05M | repo_name stringlengths 5 101 | path stringlengths 4 991 | language stringclasses 3 values | license stringclasses 5 values | size int64 2 1.05M |
|---|---|---|---|---|---|
#
# Copyright 2018 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::storagetek::sl::snmp::mode::components::psu;
use strict;
use warnings;
use storage::storagetek::sl::snmp::mode::components::resources qw($map_operational);
my $mapping = {
slHdwSupplyName => { oid => '.1.3.6.1.4.1.1211.1.15.4.8.1.2' },
slHdwSupplyOperational => { oid => '.1.3.6.1.4.1.1211.1.15.4.8.1.4', map => $map_operational },
};
my $oid_slHdwSupplyEntry = '.1.3.6.1.4.1.1211.1.15.4.8.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_slHdwSupplyEntry };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking power supplies");
$self->{components}->{psu} = {name => 'psus', total => 0, skip => 0};
return if ($self->check_filter(section => 'psu'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_slHdwSupplyEntry}})) {
next if ($oid !~ /^$mapping->{slHdwSupplyOperational}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_slHdwSupplyEntry}, instance => $instance);
next if ($self->check_filter(section => 'psu', instance => $instance));
$self->{components}->{psu}->{total}++;
$self->{output}->output_add(long_msg => sprintf("power supply '%s' status is '%s' [instance: %s].",
$result->{slHdwSupplyName}, $result->{slHdwSupplyOperational},
$instance
));
my $exit = $self->get_severity(label => 'operational', section => 'psu', value => $result->{slHdwSupplyOperational});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Power supply '%s' status is '%s'",
$result->{slHdwSupplyName}, $result->{slHdwSupplyOperational}));
}
}
}
1; | wilfriedcomte/centreon-plugins | storage/storagetek/sl/snmp/mode/components/psu.pm | Perl | apache-2.0 | 2,835 |
#
# Copyright 2015 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package centreon::common::radlan::mode::cpu;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"warning:s" => { name => 'warning', default => '' },
"critical:s" => { name => 'critical', default => '' },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
($self->{warn1s}, $self->{warn1m}, $self->{warn5m}) = split /,/, $self->{option_results}->{warning};
($self->{crit1s}, $self->{crit1m}, $self->{crit5m}) = split /,/, $self->{option_results}->{critical};
if (($self->{perfdata}->threshold_validate(label => 'warn1s', value => $self->{warn1s})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning (1sec) threshold '" . $self->{warn1s} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'warn1m', value => $self->{warn1m})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning (1min) threshold '" . $self->{warn1m} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'warn5m', value => $self->{warn5m})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning (5min) threshold '" . $self->{warn5m} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'crit1s', value => $self->{crit1s})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical (1sec) threshold '" . $self->{crit1s} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'crit1m', value => $self->{crit1m})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical (1min) threshold '" . $self->{crit1m} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'crit5m', value => $self->{crit5})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical (5min) threshold '" . $self->{crit5m} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
# $options{snmp} = snmp object
$self->{snmp} = $options{snmp};
my $oid_rlCpuUtilEnable = '.1.3.6.1.4.1.89.1.6.0';
my $oid_rlCpuUtilDuringLastSecond = '.1.3.6.1.4.1.89.1.7.0';
my $oid_rlCpuUtilDuringLastMinute = '.1.3.6.1.4.1.89.1.8.0';
my $oid_rlCpuUtilDuringLast5Minutes = '.1.3.6.1.4.1.89.1.9.0';
$self->{result} = $self->{snmp}->get_leef(oids => [ $oid_rlCpuUtilEnable, $oid_rlCpuUtilDuringLastSecond, $oid_rlCpuUtilDuringLastMinute, $oid_rlCpuUtilDuringLast5Minutes ],
nothing_quit => 1);
if (defined($self->{result}->{$oid_rlCpuUtilEnable}) && $self->{result}->{$oid_rlCpuUtilEnable} == 1) {
my $cpu1sec = $self->{result}->{$oid_rlCpuUtilDuringLastSecond};
my $cpu1min = $self->{result}->{$oid_rlCpuUtilDuringLastMinute};
my $cpu5min = $self->{result}->{$oid_rlCpuUtilDuringLast5Minutes};
my $exit1 = $self->{perfdata}->threshold_check(value => $cpu1sec,
threshold => [ { label => 'crit1s', exit_litteral => 'critical' }, { label => 'warn1s', exit_litteral => 'warning' } ]);
my $exit2 = $self->{perfdata}->threshold_check(value => $cpu1min,
threshold => [ { label => 'crit1m', exit_litteral => 'critical' }, { label => 'warn1m', exit_litteral => 'warning' } ]);
my $exit3 = $self->{perfdata}->threshold_check(value => $cpu5min,
threshold => [ { label => 'crit5m', exit_litteral => 'critical' }, { label => 'warn5m', exit_litteral => 'warning' } ]);
my $exit = $self->{output}->get_most_critical(status => [ $exit1, $exit2, $exit3 ]);
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("CPU Usage: %.2f%% (1sec), %.2f%% (1min), %.2f%% (5min)",
$cpu1sec, $cpu1min, $cpu5min));
$self->{output}->perfdata_add(label => "cpu_1s", unit => '%',
value => $cpu1sec,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warn1s'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'crit1s'),
min => 0, max => 100);
$self->{output}->perfdata_add(label => "cpu_1m", unit => '%',
value => $cpu1min,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warn1m'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'crit1m'),
min => 0, max => 100);
$self->{output}->perfdata_add(label => "cpu_5m", unit => '%',
value => $cpu5min,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warn5m'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'crit5m'),
min => 0, max => 100);
} else {
$self->{output}->output_add(severity => 'UNKNOWN',
short_msg => sprintf("CPU measurement is not enabled."));
}
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check cpu usage (RADLAN-rndMng).
=over 8
=item B<--warning>
Threshold warning in percent (1s,1min,5min).
=item B<--critical>
Threshold critical in percent (1s,1min,5min).
=back
=cut | s-duret/centreon-plugins | centreon/common/radlan/mode/cpu.pm | Perl | apache-2.0 | 6,957 |
package Paws::Pinpoint::UpdateApnsChannel;
use Moose;
has APNSChannelRequest => (is => 'ro', isa => 'Paws::Pinpoint::APNSChannelRequest', required => 1);
has ApplicationId => (is => 'ro', isa => 'Str', traits => ['ParamInURI'], uri_name => 'application-id', required => 1);
use MooseX::ClassAttribute;
class_has _stream_param => (is => 'ro', default => 'APNSChannelRequest');
class_has _api_call => (isa => 'Str', is => 'ro', default => 'UpdateApnsChannel');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/v1/apps/{application-id}/channels/apns');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'PUT');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::Pinpoint::UpdateApnsChannelResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::Pinpoint::UpdateApnsChannel - Arguments for method UpdateApnsChannel on Paws::Pinpoint
=head1 DESCRIPTION
This class represents the parameters used for calling the method UpdateApnsChannel on the
Amazon Pinpoint service. Use the attributes of this class
as arguments to method UpdateApnsChannel.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to UpdateApnsChannel.
As an example:
$service_obj->UpdateApnsChannel(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> APNSChannelRequest => L<Paws::Pinpoint::APNSChannelRequest>
=head2 B<REQUIRED> ApplicationId => Str
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method UpdateApnsChannel in L<Paws::Pinpoint>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/Pinpoint/UpdateApnsChannel.pm | Perl | apache-2.0 | 2,099 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2019] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::Utils::Collector
=head1 SYNOPSIS
# Inherit this base module in your feature specific Collector
# instance:
package Bio::EnsEMBL::Funcgen::Collector::ResultFeature;
use base('Bio::EnsEMBL::Utils::Collector');
# ... and define package config variables
$Bio::EnsEMBL::Funcgen::Collector::bin_model = 'SIMPLE';
$Bio::EnsEMBL::Funcgen::Collector::window_sizes =
[ 30, 65, 130, 260, 450, 648, 950, 1296 ];
# Could replace 30 with 0 here for low density data at natural resolution
$Bio::EnsEMBL::Utils::Collector::bin_method =
'count'; # only used by collector
$Bio::EnsEMBL::Utils::Collector::packed_size = 2;
# ... or simply use this module in a script either defining package
# config variables, or passing as parameters to the constructor:
my $collector =
Bio::EnsEMBL::Utils::BaseCollector->new( -pack_template => 'v' );
$Bio::EnsEMBL::Funcgen::Collector::pack_template = 'v';
# Config variables can also be over-ridden by passing a config hash to
# the store_window_bins_by_Slice() method:
$collector->store_window_bins_by_Slice( $slice, (
-pack_template => 'v',
-packed_size => 2 ) );
# NOTE: Over-riding default config variables can cause problems when
# storing or fetching data. e.g. Fetch may revert to using defaults or
# table partitions may not match window sizes.
=head1 DESCRIPTION
This package is the base Collector class which contains generic
getter/setter methods along with the main 'collecting' methods which
perform the majority of the work in generating compressed data
collections optimised for web display. The bins produced are aimed at
storage in a BLOB representing an entire seq_region i.e. even bins with
no features/null data are encoded as a 0 score. Non-BLOB collections
are currently not supported.
If your Collection class defines a Bio::EnsEMBL::Feature, then its
adaptor should inherit from the relevant Collection class.
The minimum prerequisites of the input features/data are that they have
a start() and end() method. For instance a Bio::EnsEMBL::Features
generated from a database or parsed from a flat file.
NOTE: This Collector does not have a lightweight mode previously used
for dynamic/on the fly collecting i.e. it does not take advantage of
bypassing object creation via the related BaseFeatureAdaptor method.
=cut
package Bio::EnsEMBL::Utils::Collector;
use strict;
use warnings;
use Bio::EnsEMBL::Utils::Argument ('rearrange');
use Bio::EnsEMBL::Utils::Exception ('throw', 'deprecate');
### Global package config vars
# Defaults
our $max_view_width = 1000000; # Max bp width in location/detailed view
our $max_data_type_size = 16777216; # Default is 16MB for long BLOB
# This is really a guide value as this should be set in the inheriting
# Collector class by deducting the rest of the row size from this value.
# Is is upto the inheritor to handle checking whether this size has been
# exceeded.
# NOTE: Theoretically the min window size is: slice_length/(16777216/2)
# So for human chr1: 249,250,621/(16777216/2) = 29.7 => 30. However,
# this size does not seem to directly translate to the MySQL
# max_allowed_packet_size. Increasing max_allowed_packet_size to 64MB
# solves this issue, and substr operation doesn't appear to incur any of
# the potential memory(4*) usage issues.
# Others global package variables which are set in the inheriting
# Collector class.
our ( $bin_model, $bin_method, $pack_template,
$packed_size, $window_sizes );
=head2 new
Deprecated.
Args : None
Example :
my $collector = Bio::EnsEMBL::XXX::Collector::FEATURE->new();
$collector->store_windows_by_Slice($slice);
# Where XXX is, e.g. Compara, FuncGen etc.
Description: Simple new method to enable use of collector
when not inherited by a descendant of
Bio::EnsEMBL::DBSQL::BaseFeatureAdaptor
Returntype : Bio::EnsEMBL::XXX::Collector
Exceptions : None
Caller : Collector script
Status : At Risk
=cut
sub new {
deprecate("new is deprecated and will be removed in e100.");
return bless {}, $_[0]; # Simply blesses this class as an empty hash.
# Do not set anything here, as will not be first in ISA for
# FeatureAdaptors. Hence, not guaranteed to be called.
}
=head2 new_assembly
Deprecated.
Args : optional - string assembly version e.g. GRCh37
Example : $collector->new_assembly('GRCh37');
Description: Getter/Setter for new assembly version which should be
used to project only 0 wsize Collections.
Returntype : string
Exceptions : None
Caller : store_window_bins_by_Slice() or
write_collection() in inheriting Collector class.
Status : At Risk
=cut
sub new_assembly {
my ( $self, $new_asm ) = @_;
deprecate("new_assembly is deprecated and will be removed in e100.");
if ( defined($new_asm) ) {
$self->{'new_assembly'} = $new_asm;
}
return $self->{'new_assembly'};
}
### Setter/Getter methods for basic/mandatory config
# Can also be set using package variables in the inheriting
# Collector/Adaptor or run script. Allows over-riding of defaults set
# in Adaptor/Collector.
# Package variables used here instead of attrs to enable easy
# default config in inheriting class/script method. Provided
# for easy/standardised fetch access outside of this package
# i.e. Collectors/Adaptors
=head2 max_data_type_size
Deprecated.
Args : optional - int Maximum size of collection in bytes
Example : $collector->max_data_type_size($new_max_size);
Description: Getter/Setter for max_data_type_size, default is
currently set at in this class as 16777216 (16MB), for
long BLOB. This is used by the write_collection()
method to determine when to build and store a compressed
collection.
Returntype : int
Exceptions : None
Caller : bins_per_record() and
write_collection() in inheriting Collector class.
Status : At Risk
=cut
sub max_data_type_size {
my ( $self, $size ) = @_;
deprecate("max_data_type_size is deprecated and will be removed in e100.");
# Validate is sensible integer
if ( defined($size) ) {
if ( $size !~ /^\d+$/ ) {
throw("max_data_type_size must be a integer of bytes, not $size");
}
$max_data_type_size = $size;
} elsif ( !defined($max_data_type_size) ) {
# This should never happen as we have defaults in this module.
throw( 'You must define a '
. '$Bio::EnsEMBL::Utils::Collector::max_data_type_size '
. 'or pass -max_data_type_size config' );
}
return $max_data_type_size;
}
=head2 max_view_width
Deprecated.
Args : optional - int Maximum width of view
Example : $collector->max_view_width($new_max_width);
Description: Getter/Setter for max_view_width, default is currently
set at in this class as 500000bp, for maximum level of
zoom permitted by location view.
Returntype : int
Exceptions : None
Caller : general
Status : At Risk
=cut
sub max_view_width {
my ( $self, $size ) = @_;
deprecate("max_data_type_size is deprecated and will be removed in e100.");
# Validate is sensible integer
if ( defined($size) ) {
if ( $size !~ /^\d+$/ ) {
throw("max_view_width must be a integer, not $size");
}
$max_view_width = $size;
} elsif ( !defined $max_view_width ) {
# This should never happen as we have defaults in this module.
throw( 'You must define a '
. '$Bio::EnsEMBL::Utils::Collector::max_view_width '
. 'or pass -max_view_width config' );
}
return $max_view_width;
}
=head2 bin_method
Deprecated.
Args[0] : optional - string name of bin method e.g. 'max_magnitude'
Args[1] : optional - Bio::EnsEMBL::Funcgen::Parsers::InputSet
Example : my $bin_method = $self->bin_method();
Description: Getter/Setter for bin_method, default is normally set in
the inheriting Collector class either by package variable
or by passing a config hash via the store methods.
Returntype : string
Exceptions : Throws if cannot set by package variable
Caller : general
Status : At Risk
=cut
sub bin_method {
my ( $self, $bmethod, $config ) = @_;
deprecate("bin_method is deprecated and will be removed in e100.");
if ( defined($bmethod) ) {
$bin_method = $bmethod;
}
if ( !defined($bin_method) ) {
throw( 'You must define a '
. '$Bio::EnsEMBL::Utils::Collector::bin_method '
. 'or pass -bin_method config' );
}
if ( !$self->can( "_calculate_" . $bin_method ) ) {
throw("$bin_method is not a valid/available binning method");
}
my $set_up_method = "_set_up_" . $bin_method;
if ( $self->can($set_up_method) ) {
$self->$set_up_method($config);
}
return $bin_method;
}
=head2 bin_model
Deprecated.
Args : optional - string bin model e.g. SIMPLE or COMPLEX
Example : my $bin_model = $self->bin_model;
Description: Getter/Setter for bin_model, default should be set in
inheriting Collector class. Currently only supports
'SIMPLE' bin model.
Returntype : string
Exceptions : Throws if bin_model is not SIMPLE
Caller : general
Status : At Risk
=cut
sub bin_model {
my ( $self, $bmodel ) = @_;
deprecate("bin_model is deprecated and will be removed in e100.");
if ( defined($bmodel) ) {
$bin_model = $bmodel;
}
if ( !defined($bin_model) ) {
throw( 'You must define a '
. '$Bio::EnsEMBL::Utils::Collector::bin_model '
. 'or pass -bin_model config' );
}
if ( $bin_model ne 'SIMPLE' ) {
throw( 'Bio::EnsEMBL::Utils::Collector does not yet support '
. 'non-SIMPLE bin models' );
}
return $bin_model;
}
=head2 window_sizes
Deprecated.
Args : optional - arrayref of window sizes
Example :
foreach my $wsize ( @{ $collector->window_sizes } )
{ # Do some collecting
}
Description: Getter/Setter for window_sizes. Default should be set
in inheriting Collector (if the config is dynamic),
FeatureAdaptor class or script using package variable or
this method.
NOTE: Redefining these may cause a mismatch with the
table partition definition.
Returntype : arrayref of ints
Exceptions : Throws if cannot set a valid array of int window sizes
Caller : general
Status : At Risk - rename bin_sizes?
=cut
sub window_sizes {
my ( $self, $sizes ) = @_;
deprecate("window_sizes is deprecated and will be removed in e100.");
if ( defined($sizes) ) {
$window_sizes = $sizes;
}
if ( !( ref($window_sizes)
&& ( ref($window_sizes) eq 'ARRAY' )
&& ( scalar(@$window_sizes) > 0 ) ) )
{
throw('Must pass -windows_sizes in the config '
. 'or define $Bio::EnsEMBL::Utils::Collector::window_sizes '
. 'in your Collector as an array ref of integer window_sizes' );
}
return $window_sizes;
}
=head2 has_window_size
Deprecated.
Args : int - window size to validate
Example : if( $collector->has_window_size('30') ){
#Do something wrt to 30bp window size
}
Description: Simple utility method to validate whether this Collector
has a given window_size
Returntype : Boolean
Exceptions : Throws if window size not specified
Caller : general
Status : At Risk
=cut
sub has_window_size{
my ( $self, $size ) = @_;
deprecate("has_window_size is deprecated and will be removed in e100.");
if(! defined $size){
throw('You must pass a window size to validate');
}
return grep(/$size/, @$window_sizes);
}
### Getter/Setters for BLOB collection config
# NOTE: Overriding the defaults here may cause a mismatch when the data
# is retrieved.
=head2 pack_template
Deprecated.
Args : optional - string perl 'pack' template
Example : $self->pack_template('v');
Description: Getter/Setter for pack_template. Default should be set
in inheriting Collector (if the config is dynamic),
FeatureAdaptor class or script using package variable or
this method.
Returntype : string
Exceptions : Throws if cannot set pack_template from package variable
Caller : FeatureAdaptor::_obj_from_sth
Status : At Risk
=cut
sub pack_template {
my ( $self, $template ) = @_;
deprecate("pack_template is deprecated and will be removed in e100.");
if ( defined($template) ) {
$pack_template = $template;
}
if ( !defined($pack_template) ) {
throw( 'Must pass a per score '
. '-pack_template in the config '
. 'or define $Bio::EnsEMBL::Utils::Collector::pack_template '
. 'in your Collector' );
}
return $pack_template;
}
=head2 packed_size
Deprecated.
Args : optional - int size of perl 'pack' template in bytes
Example : $self->packed_size(2);
Description: Getter/Setter for packed_size. Default should be set
in inheriting Collector (if the config is dynamic),
FeatureAdaptor class or script using package variable or
this method.
Returntype : string
Exceptions : Throws if cannot set pack_template from pacakge variable
Caller : current_packed_size() and
FeatureAdaptor::_obj_from_sth()
Status : At Risk
=cut
sub packed_size {
my ( $self, $size ) = @_;
deprecate("packed_size is deprecated and will be removed in e100.");
if ( defined($size) ) {
$packed_size = $size;
}
if ( !defined($packed_size) ) {
throw( 'Must pass -packed_size(wrt to pack_template) config '
. 'or define $Bio::EnsEMBL::Utils::Collector::packed_size '
. 'in your Collector' );
}
if ( $packed_size !~ /^\d+$/ ) {
throw( "$packed_size is not an integer, "
. "must pass a size integer for packed_size "
. "which specifies size of pack_template:\t"
. $pack_template );
}
return $packed_size;
}
=head2 bins_per_record
Deprecated.
Example : my $bin_per_records = $self->bin_per_record
Description: Simple method to calculate the max number of bins
allowed per record given the current config.
Returntype : int
Exceptions : None
Caller :
Status : At Risk
=cut
sub bins_per_record {
deprecate("bins_per_record is deprecated and will be removed in e100.");
return int( $max_data_type_size/$packed_size );
}
=head2 current_packed_size
Deprecated.
Arg[0] : int - window size
Example : my $cps = $self->current_packed_size($wsize);
Description: Simple method to calculate the max number of bins
allowed per record given the current config.
Returntype : int
Exceptions : None
Caller :
Status : At Risk
=cut
sub current_packed_size {
my ( $self, $wsize ) = @_;
deprecate("current_packed_size is deprecated and will be removed in e100.");
return ( scalar( @{ $self->score_cache($wsize) } )*$packed_size );
}
=head2 score_cache
Deprecated.
Arg[0] : int - window size
Example : my $cps = $self->current_packed_size($wsize);
Description: Handles caching of bin scores for each window size
Returntype : arrayref
Exceptions : Throws if no window size defined
Caller : current_packed_size() and store_collection()
methods
Status : At Risk
=cut
sub score_cache {
my ( $self, $wsize, $scores ) = @_;
deprecate("score_cache is deprecated and will be removed in e100.");
if ( !defined($wsize) ) {
throw('Must pass a window size argument');
}
$self->{'score_cache'}{$wsize} ||= [];
if ( defined($scores) ) {
push( @{ $self->{'score_cache'}{$wsize} }, @{$scores} );
}
return $self->{'score_cache'}{$wsize};
}
=head2 collection_start
Deprecated.
Arg[0] : int - window_size
Arg[1] : optional int - seq_region_start
Example : my $coll_start->(150);
Description: Getter/Setter collection seq_region_start
Returntype : int
Exceptions : Throws if no window size defined
Caller : store_window_bin_by_Slice() and write_collection()
Status : At Risk
=cut
sub collection_start {
my ( $self, $wsize, $sr_start ) = @_;
deprecate("collection_start is deprecated and will be removed in e100.");
if ( !defined($wsize) ) {
throw('Must pass a window size argument');
}
if ( defined($sr_start) ) {
$self->{'collection_start'}{$wsize} = $sr_start;
}
return $self->{'collection_start'}{$wsize};
}
=head2 collection_end
Deprecated.
Arg[0] : int - window_size
Arg[1] : optional int - seq_region_end
Example : my $coll_end->(150);
Description: Getter/Setter collection seq_region_end
Returntype : int
Exceptions : Throws if no window size defined
Caller : inheriting Collector write_collection method
Status : At Risk
=cut
sub collection_end{
my ($self, $wsize, $sr_end) = @_;
deprecate("collection_end is deprecated and will be removed in e100.");
throw('Must pass a window size argument') if ! defined $wsize;
if(defined $sr_end){
$self->{'collection_end'}{$wsize} = $sr_end;
}
else{
return $self->{'collection_end'}{$wsize};
}
}
=head2 collection_strand
Deprecated.
Arg[0] : int - window_size
Arg[1] : optional int - seq_region_strand
Example : my $coll_start->(0);
Description: Getter/Setter collection seq_region_strand
Returntype : int
Exceptions : Throws if no window size defined
Caller : inheriting Collector write_collection method
Status : At Risk - Collections are currently strandless
=cut
sub collection_strand {
my ( $self, $wsize, $strand ) = @_;
deprecate("collection_strand is deprecated and will be removed in e100.");
if ( !defined($wsize) ) {
throw('Must pass a window size argument');
}
if ( defined $strand ) {
$self->{'collection_strand'}{$wsize} = $strand;
}
return $self->{'collection_strand'}{$wsize};
}
### Here follows the actual working methods
=head2 _get_Slice_chunks
Deprecated.
Description: Defines the optimal set of slice chunks to use for
generating collections such that redundant fetches
are minimized.
Returntype : hashref of window_size chunk size pairs
Exceptions : Throws if no window sizes or max_view_width defined
Caller : store_window_bin_by_Slice()
Status : At Risk
=cut
sub _get_Slice_chunks {
my $self = shift;
deprecate("_get_Slice_chunks is deprecated and will be removed in e100.");
if ( !defined($window_sizes) || !defined($max_view_width) ) {
throw( 'You must pass both a window_size array ref '
. 'and max_view_width arguments' );
}
if ( !defined( $self->{'_slice_chunks'} ) ) {
# Calulate sensible slice length based on window sizes
my @wsizes = sort { $a <=> $b } @$window_sizes;
# Handle calculating only 0 wsize
if ( scalar(@wsizes) == 1
&& $wsizes[0] == 0 )
{
return { $max_view_width => [0] };
}
my $multiplier = int( $max_view_width/$wsizes[$#wsizes] );
my $chunk_length = $multiplier*$wsizes[$#wsizes];
my $not_divisible = 1;
my %chunk_windows; # Registry of chunk lengths to run with windows
my %workable_chunks = map { $_ => {} } @wsizes;
# get rid of natural resolution as this will always work
delete $workable_chunks{'0'};
while ( $not_divisible && $chunk_length != 0 ) {
$not_divisible = 0;
foreach my $wsize (@wsizes) {
if ( $wsize == 0 ) {
# Special wsize for normal data
next;
}
# Set not divisible if modulus is true
if ( $chunk_length % $wsize ) {
$not_divisible = 1;
} else {
$workable_chunks{$wsize}{$chunk_length} = [];
}
}
# Gradually shrink the length until we find a workable slice
# length for all windows.
if ($not_divisible) {
$chunk_length -= $wsizes[$#wsizes];
}
}
my %chunk_sets;
if ( $chunk_length == 0 ) {
print "Could not find chunk length "
. "for all window sizes, "
. "attempting to subset windows "
. "using alternate slice length\n";
foreach my $wsize ( keys(%workable_chunks) ) {
# Loop through windows, seeing if they are workable in the other
# windows.
foreach my $chunk ( keys( %{ $workable_chunks{$wsize} } ) ) {
foreach my $other_wsize ( keys %workable_chunks ) {
next if $wsize == $other_wsize;
if ( exists( $workable_chunks{$other_wsize}{$chunk} ) ) {
# only push it onto the other wsize, as we will do the
# reverse later
$chunk_sets{$chunk}{$wsize} = undef;
}
}
}
}
# %chunk_sets represents co-occurence of wsizes with repect to
# chunks. Take the set which has the most windows and the longest
# chunk. Then get the largest which handles the rest.
# define possible set lengths
my $i = 0;
my %set_lengths;
map { $set_lengths{$i} = []; $i++ } @wsizes;
# get rid of natural resolution as this will always work
delete $set_lengths{'0'};
# Store chunks lengths for each set size
foreach my $chunk ( keys(%chunk_sets) ) {
my $set_size = scalar( values( %{ $chunk_sets{$chunk} } ) );
push( @{ $set_lengths{$set_size} }, $chunk );
}
# Get the biggest set with the longest length;
# Scalar here as we are disregarding natural resolution of 0 in
# loop.
my $largest_size = scalar(@wsizes);
my $found_largest_set = 0;
while ( !$found_largest_set ) {
$largest_size--;
if ( scalar( @{ $set_lengths{$largest_size} } ) > 0 ) {
$found_largest_set = 1;
}
}
my ($largest_chunk) =
sort { $b <=> $a } @{ $set_lengths{$largest_size} };
my @largest_windows = keys %{ $chunk_sets{$largest_chunk} };
@{ $chunk_windows{$largest_chunk} } = @largest_windows;
print "Largest chunk $largest_chunk($largest_size) "
. "contains windows: @largest_windows\n";
my %remaining_windows = map { $_ => {} } @wsizes;
# get rid of natural resolution as this will always work
delete $remaining_windows{'0'};
map { delete $remaining_windows{$_} } @largest_windows;
my $remaining_set_size = scalar( keys(%remaining_windows) );
# Use array here for practicality, would need to maintain hash if
# we need to iterate.
my @rwindows = keys(%remaining_windows);
# Could be one window, but this will not be in the co-occurence
# hash %chunk_sets.
my $next_chunk;
if ( scalar(@rwindows) == 1 ) {
my ($last_window) = @rwindows;
# Find a suitably large chunk for this one window.
$multiplier = int( 500000/$last_window );
$next_chunk = $multiplier*$last_window;
} else {
foreach my $chunk ( sort { $b <=> $a }
@{ $set_lengths{$remaining_set_size} } )
{
my $seen_count = 0;
foreach my $rwindow (@rwindows) {
if ( grep /$rwindow/,
( values( %{ $chunk_sets{$chunk} } ) ) )
{
$seen_count++;
}
}
if ( $seen_count == $remaining_set_size ) {
$next_chunk = $chunk;
last;
}
}
}
@{ $chunk_windows{$next_chunk} } = @rwindows;
if ( defined($next_chunk) ) {
print "Found next chunk length $next_chunk "
. "contains remaining windows:\t@rwindows\n";
} else {
warn "Need to write iterative method for set definition";
throw( 'Could not find workable slice length '
. 'for remaining windows: '
. join( ', ', @rwindows ) );
}
} else {
@{ $chunk_windows{$chunk_length} } = keys(%workable_chunks);
print "Found workable chunk length $chunk_length "
. "for all window sizes:\t"
. join( ' ', @{ $chunk_windows{$chunk_length} } ) . "\n";
}
$self->{'_slice_chunks'} = \%chunk_windows;
} ## end if ( !defined( $self->...))
return $self->{'_slice_chunks'};
} ## end sub _get_Slice_chunks
=head2 set_config
Deprecated.
Arg[0] : optional hash - parameter hash(see above methods for more info):
WINDOW_SIZES => array ref - subset of defined window
sizes
BIN_METHOD => string
MAX_VIEW_WIDTH => int
MAX_DATA_TYPE_SIZE => int
PACK_TEMPLATE => string
PACKED_SIZE => int
BIN_MODEL => string
NEW_ASSEMBLY => string
METHOD_CONFIG => hash of method specific config params
SKIP_ZERO_WINDOW => boolean - skips generation of 0 wsize
this is used if already generated
from an assembly projection.
NOTE: Over-riding any of the default config may cause
problems when storing or retrieving Collection data,
except sub sets of default window sizes.
Description: This method replaces the constructor as new will not be
called for Adaptor based Collectors.
Separating this from the store method is currently
redundant as jobs are normally submitetd in Slice based
jobs. However, this will be required if the store method
is further seaprated into fetch/generate and store methods
Returntype : None
Exceptions : Throws if no window sizes or max_view_width defined
Caller : Inheritor Collector e.g. Bio::EnsEMBL::Funcgen:Collector::ResultFeature
or script.
Status : At Risk
=cut
sub set_config {
my ( $self, %config ) = @_;
deprecate("set_config is deprecated and will be removed in e100.");
my ( $wsizes, $bmethod, $mv_width,
$md_type_size, $template, $psize,
$bmodel, $new_assm, $skip_zero_window,
$method_config )
= rearrange( [ 'WINDOW_SIZES', 'BIN_METHOD',
'MAX_VIEW_WIDTH', 'MAX_DATA_TYPE_SIZE',
'PACK_TEMPLATE', 'PACKED_SIZE',
'BIN_MODEL', 'NEW_ASSEMBLY',
'SKIP_ZERO_WINDOW', 'METHOD_CONFIG' ],
%config );
### VAILDATE/SET VARS/CONFIG
# Attrs used in this method
$self->bin_method( $bmethod, $method_config );
$self->bin_model($bmodel);
$self->window_sizes($wsizes);
# Set to undef if we have empty array? To change this we need to
# pass the config hash -window_sizes conditionally
# This currently overwrite the defaults!
# if ( ref($window_sizes) eq 'ARRAY'
# && scalar( @{$window_sizes} ) == 0 )
# {
# $window_sizes = undef;
# }
# Attrs used in other (store) methods
$self->pack_template($template);
$self->packed_size($psize);
$self->max_data_type_size($md_type_size);
$self->max_view_width($mv_width);
# Other vars
$self->new_assembly($new_assm);
$self->{'_only_natural'} = 0;
$self->{'_store_natural'} = grep /^0$/, @$window_sizes;
### Set window_sizes
if ( $self->new_assembly() ) {
print "Assembly projection may cause problems "
. "for large Collections, "
. "defaulting to window_sizes = (0)\n";
if ( $skip_zero_window ) {
throw( "You cannot -skip_zero_window or "
. "omit 0 from -window_sizes "
. "when projecting to a new assembly($new_assm) "
. "which should only be generated using window_size=0" );
}
# Then build the bins on the projected 0 level single Features
# Test we haven't explicity set window_sizes to be something else
if ( defined($wsizes)
&& !( scalar(@$wsizes) == 1 && $wsizes->[0] == 0 ) )
{
throw( "You have set window_sizes config "
. "which are not safe when projecting to "
. "a new assembly($new_assm), "
. "please omit window_sizes config or set to 0" );
}
$self->window_sizes( [0] );
} else {
if ( $wsizes && $skip_zero_window &&
( grep /^0$/, @$wsizes )) {
#Only test passed params not default config
throw( "You have specied skip_zero_window "
. "and window_size 0 in your parameters, "
. "please remove one of these" );
}
elsif ( defined($window_sizes) && !grep /^0$/, @$window_sizes ) {
$skip_zero_window = 1;
# re-add 0 window as we need this to build the collections
# see ...
unshift( @{$window_sizes}, 0 );
}
}
if ( $self->{'_store_natural'} && scalar( @{$window_sizes} ) == 1 ) {
$self->{'_only_natural'} = 1;
}
if ($skip_zero_window) {
$self->{'_store_natural'} = 0;
}
return;
} ## end sub set_config
=head2 store_window_bins_by_Slice
Deprecated.
Arg[0] : Bio::EnsEMBL:Slice
Example : $collector->store_window_bins_by_Slice($slice);
Description: This is the main run method, it loops through
optimal slice chunks from _define_window_chunks,
calls _bin_features_by_Slice as appropriate and
calls write_collection in the inheriting Collector
class/script.
Returntype : None
Exceptions : Throws if Bio::EnsEMBL::Slice is not defined
Caller : store methods in inheriting Collector class/script
Status : At Risk
=cut
sub store_window_bins_by_Slice {
my ( $self, $slice ) = @_;
deprecate("store_window_bins_by_Slice is deprecated and will be removed in e100.");
warn "Need to be careful here "
. "about cleaning start end strand caches between "
. "serially run slices";
if ( !( defined($slice)
&& ref($slice)
&& $slice->isa('Bio::EnsEMBL::Slice') ) )
{
throw('You must pass a valid Bio::EnsEMBL::Slice');
}
# Rollback previously stored features.
# Change 'can' to empty method stubb with pod ???
if ( $self->can('rollback_Features_by_Slice') ) {
$self->rollback_Features_by_Slice($slice);
} else {
warn ref($self)
. " cannot rollback_Features_by_Slice. "
. "This may result in storage failure "
. "or duplicate Collections if there is pre-existing data";
}
### PROCESS CHUNKS
my %chunk_windows = %{ $self->_get_Slice_chunks };
my (%counts);
my $store_natural = $self->{'_store_natural'};
my $only_natural = $self->{'_only_natural'};
$counts{0} = 0; # Set natural res count to 0
my $slice_end = $slice->end;
my $orig_start = $slice->start;
my $region = $slice->coord_system_name;
my $version = $slice->coord_system->version;
my $seq_region_name = $slice->seq_region_name;
my $strand = $slice->strand;
# Warn if this is not a full slice. Version needed in case we are
# projecting from a non-default version slice
my $full_slice =
$slice->adaptor->fetch_by_region( $region, $seq_region_name, undef,
undef, undef, $version );
if ( ( $full_slice->start() != $orig_start )
|| ( $full_slice->end() != $slice_end ) )
{
warn "Generating collections using sub-Slices "
. "can result in data issues/artifacts";
# Last chunk might not be the correct window length. Test
# slices less than chunk length can cause failures in
# _bin_features_by_window_sizes others?
}
# Set the initial collection_start to orig_start. This is not the
# case for 0 wsize where it must always be the true feature start.
for my $wsize (@$window_sizes) {
if ( $wsize == 0 ) { next }
$self->collection_start( $wsize, $orig_start );
# Also reset collection end and score cache in case we are running
# serially.
$self->{collection_end}{$wsize} = undef;
$self->{'score_cache'}{$wsize} = [];
}
my $first_chunk_length = 1;
foreach my $chunk_length ( sort keys %chunk_windows ) {
print "Processing windows "
. join( ', ', @{ $chunk_windows{$chunk_length} } )
. " with chunk length $chunk_length\n";
# Set window counts to 0
map $counts{$_} = 0, @{ $chunk_windows{$chunk_length} };
# May need to reset flat file parser handle or other caches via
# inheriting Collector
if ( !$first_chunk_length ) {
# Change 'can' to empty method stubb with pod???
if ( $self->can('reinitialise_input') ) {
$self->reinitialise_input();
}
}
$first_chunk_length = 0;
# Now walk through slice using slice length chunks and build all
# windows in each chunk.
my $in_slice = 1;
my $start_adj = 0;
my ( $sub_slice, $sub_end, $features, $bins );
my $sub_start = 1;
my $slice_length = $slice->length();
# Always create in local coords for fetch
# Then change to seq_region coords for store if required
while ($in_slice) {
$sub_start += $start_adj;
$sub_end = $sub_start + $chunk_length - 1;
if ( $sub_end >= $slice_length ) {
# Surplus bins are removed in store/write_collection in caller
$in_slice = 0;
}
$sub_slice =
$slice->adaptor->fetch_by_region( $region, $seq_region_name,
$sub_start + $orig_start - 1,
$sub_end + $orig_start - 1,
$strand, $version );
# Can't subslice as this will not clip if we go over the length of
# the slice, unlike normal slice fetching. Will clipping the end
# to the slice end cause any problems here? How will this affect
# bin clipping?
### Grab features and shift chunk coords
$features = $self->get_Features_by_Slice($sub_slice);
# warn "Binning "
# . scalar(@$features)
# . " Features for chunk length $chunk_length, on Slice "
# . $sub_slice->name;
if ( ( @{$features} )
&& ref( $features->[0] ) =~ /Bio::EnsEMBL::Utils::Collection/ )
{
# Would need to create base module with generic methods:
# window_size, ...
# Check that the returned feature/collections support window_size.
# All Collections should be able to
if ( $features->[0]->can('window_size') ) {
if ( $features->[0]->window_size != 0 ) {
throw( "You are trying to generated Collections from "
. "a non-zero window sized Collection:\t"
. $features->[1]->{'window_size'} );
}
# This should never happen
# if ( !$skip_zero_window ) {
# throw( 'You have retrieved data from a Collection '
# . 'which without using -skip_zero_window '
# . 'i.e. you are trying to generate overwrite '
# . 'the data you are generating the Collections from' );
# }
} else {
throw( 'Something is wrong, '
. 'the Collection you have retrieved '
. 'does not support the method window_size' );
}
} ## end if ( ( @{$features} ) ...)
# Set collection start here for 0 window_size
if ( @{$features}
&& $store_natural
&& !defined( $self->collection_start(0) ) )
{
$self->collection_start( 0,
$features->[0]->start + $sub_start );
}
if ($in_slice) {
$start_adj = $chunk_length;
}
# Collect features into wsize bins
if ( !$only_natural ) {
# Get hashref of wsize=>bin array pairs
$bins =
$self->_bin_features_by_Slice_window_sizes(
-slice => $sub_slice,
-window_sizes => $chunk_windows{$chunk_length},
-features => $features, );
}
# Handle 0 wsize
if ($store_natural) {
foreach my $feature ( @{$features} ) {
$counts{0}++;
if ( $bin_model eq 'SIMPLE' ) {
$self->collection_start( 0, $feature->start + $sub_start );
$self->write_collection(
0,
$slice, # Pass Slice to sub-slice when storing
$feature->end + $sub_start,
$feature->strand, # Need to pass strand for 0 resolution
$feature->scores, );
}
}
print "Window size 0 (natural resolution) has "
. scalar( @{$features} )
. " feature bins for:\t"
. $sub_slice->name . "\n";
}
# Now store collections for wsizes >0
my $num_bins;
foreach my $wsize ( sort keys( %{$bins} ) ) {
$num_bins = scalar( @{ $bins->{$wsize} } );
$counts{$wsize} += $num_bins;
if ( $bin_model eq 'SIMPLE' ) {
$self->write_collection(
$wsize,
$slice,
#$sub_start,
$sub_end,
$slice->strand, # This is most likely 1!
# Override this woth 0 in descendant Collector if required.
$bins->{$wsize}, );
} else {
throw( 'Bio::EnsEMBL::Utils::Collector '
. 'does not yet support non-SIMPLE bin models' );
# i.e. More than one score
}
}
} ## end while ($in_slice)
# Turn off storing of natural resolution for next chunk length sets
$store_natural = 0;
} ## end foreach my $chunk_length ( ...)
# Write last collections for each wsize
foreach my $wsize (@$window_sizes) {
if ( ( $wsize == 0 && !$store_natural )
|| ( $wsize != 0 && $only_natural ) )
{
next;
}
print "Writing final $wsize window_size collection, "
. "this may result in slightly different "
. "bin numbers from counts due to removing "
. "overhanging bins past end of slice\n";
$self->write_collection( $wsize, $slice );
}
# Print some counts
foreach my $wsize ( sort ( keys %counts ) ) {
print "Generated "
. $counts{$wsize}
. " bins for window size $wsize for "
. $slice->name . "\n";
# Some may have failed to store if we are projecting to a new
# assembly.
}
return;
} ## end sub store_window_bins_by_Slice
=head2 _bin_features_by_Slice_window_sizes
Deprecated.
Args[0] : Bio::EnsEMBL::Slice
Args[1] : ARRAYREF of window sizes
Args[2] : ARRAYREF of features with start and end method
e.g. Bio::EnsEMBL::Features
Example :
$bins =
$self->_bin_features_by_window_sizes(
-slice => $slice,
-window_sizes => $chunk_windows{$chunk_length},
-features => $features, );
Description: Bins feature scores for a given list of window sizes and
predefined method.
Returntype : HASHREF of scores per bin per window size
Exceptions : None
Caller : store_window_bins_by_Slice
Status : At Risk
=cut
sub _bin_features_by_Slice_window_sizes {
my ( $self, @args ) = @_;
deprecate("_bin_features_by_Slice_window_sizes is deprecated and will be removed in e100.");
my ( $slice, $wsizes, $features ) =
rearrange( [ 'SLICE', 'WINDOW_SIZES', 'FEATURES' ], @args );
# Generate these once in caller?
my $calc_method = '_calculate_' . $bin_method;
my $post_method = '_post_process_' . $bin_method;
# Do this conditional on the Collection type i.e. is
# collection seq_region blob then no else yes Would need
# $Bio::EnsEMBL::Utils::Collector::collection_format=BLOB|STANDARD
# if ( !defined($features) || !@{$features} ) { return {} }
# Set up some hashes to store data by window_size
my ( %bins, %nbins, %bin_counts );
my $slice_start = $slice->start();
my $slice_length = $slice->length();
# Set up some bin data for the windows
foreach my $wsize (@$wsizes) {
$nbins{$wsize} = int( $slice_length/$wsize ); # int rounds down
# nbins is index of the bin not the 'number'
# Unless $slice_length is a multiple!
if ( !( $slice_length % $wsize ) ) { $nbins{$wsize}-- }
# Create default bins with 0
$bins{$wsize} = [];
map { $bins{$wsize}->[$_] = 0 } ( 0 .. $nbins{$wsize} );
# Set bin counts to 0 for each bin
$bin_counts{$wsize} = [];
# This is adding an undef to the start of the array!?
map { $bin_counts{$wsize}->[ ($_) ] = 0 } @{ $bins{$wsize} };
foreach my $bin ( @{ $bins{$wsize} } ) {
$bin_counts{$wsize}->[$bin] = 0;
}
}
my $feature_index = 0;
my ( $bin_index, @bin_masks );
foreach my $feature ( @{$features} ) {
# Set up the bins for each window size
foreach my $wsize (@$wsizes) {
my $start_bin = int( ( $feature->start )/$wsize );
my $end_bin = int( ( $feature->end )/$wsize );
if ( $end_bin > $nbins{$wsize} ) {
$end_bin = $nbins{$wsize};
}
$self->$calc_method( $feature, $start_bin, $end_bin,
$wsize, \%bins, \%bin_counts );
}
}
# Now do post processing of bins if required
if ( $self->can($post_method) ) {
$self->$post_method( \%bins, \%bin_counts );
}
return \%bins;
} ## end sub _bin_features_by_Slice_window_sizes
# end sub _bin_features_by_Slice
### Here follows the bin methods
# These may also be defined in the inheriting Collector class. No tests
# as these are internal and require speed.
=head2 _calculate_count
Deprecated.
Args[0] : feature e.g. Bio::EnsEMBL::Feature
Args[1] : int - start bin
Args[2] : int - end bin
Args[3] : int - window_size
Args[4] : hashref - score bins
Example : $self->$calc_method
Description: Adds count to bins which this feature overlaps
Returntype : None
Exceptions : None
Caller : _bin_features_by_window_sizes
Status : At Risk
=cut
sub _calculate_count {
my ( $self, $feature, $start_bin, $end_bin, $wsize, $bins_ref ) = @_;
deprecate("_calculate_count is deprecated and will be removed in e100.");
my $bin_index;
for ( $bin_index = $start_bin; $bin_index <= $end_bin; ++$bin_index )
{
$bins_ref->{$wsize}->[$bin_index]++;
}
return;
}
=head2 _calculate_average_score
Deprecated.
Args[0] : feature e.g. Bio::EnsEMBL::Feature
Args[1] : int - start bin
Args[2] : int - end bin
Args[3] : int - window_size
Args[4] : hashref - score bins
Example : $self->$calc_method
Description: Adds score to bins which this feature overlaps
Returntype : None
Exceptions : None
Caller : _bin_features_by_window_sizes
Status : At Risk
=cut
sub _calculate_average_score {
my ( $self, $feature, $start_bin, $end_bin, $wsize, $bins_ref,
$bin_counts_ref )
= @_;
deprecate("_calculate_average_score is deprecated and will be removed in e100.");
# This is simple an average of all the scores for features which
# overlap this bin. No weighting with respect to the bin or the
# feature.
my $score = $self->get_score_by_Feature($feature);
for ( my $bin_index = $start_bin;
$bin_index <= $end_bin;
++$bin_index )
{
# We should really push onto array here so we can have median or
# mean.
$bins_ref->{$wsize}->[$bin_index] += $score;
$bin_counts_ref->{$wsize}->[$bin_index]++;
}
return;
}
=head2 _post_process_average_score
Deprecated.
Args[0] : hashref - score bins
Args[1] : hashref - count bins
Example : $self->$post_method
Description: Post processes bins to calculate average score
Returntype : None
Exceptions : None
Caller : _bin_features_by_window_sizes
Status : At Risk
=cut
sub _post_process_average_score {
my ( $self, $bins_ref, $bin_counts_ref ) = @_;
deprecate("_post_process_average_score is deprecated and will be removed in e100.");
foreach my $wsize ( keys %{$bins_ref} ) {
foreach my $bin_index ( 0 .. $#{ $bins_ref->{$wsize} } ) {
if ( $bin_counts_ref->{$wsize}->[$bin_index] ) {
$bins_ref->{$wsize}->[$bin_index] /=
$bin_counts_ref->{$wsize}->[$bin_index];
}
}
}
return;
}
=head2 _calculate_max_magnitude
Deprecated.
Args[0] : feature e.g. Bio::EnsEMBL::Feature
Args[1] : int - start bin
Args[2] : int - end bin
Args[3] : int - window_size
Args[4] : hashref - score bins
Example : $self->$calc_method
Description: Sets max +/-ve scores for bins which this feature overlaps
Returntype : None
Exceptions : None
Caller : _bin_features_by_window_sizes
Status : At Risk
=cut
sub _calculate_max_magnitude {
my ( $self, $feature, $start_bin, $end_bin, $wsize, $bins_ref ) = @_;
deprecate("_calculate_max_magnitude is deprecated and will be removed in e100.");
my $score = $self->get_score_by_Feature($feature);
# Max magnitude
# Take the highest value +ve or -ve score
for ( my $bin_index = $start_bin;
$bin_index <= $end_bin;
++$bin_index )
{
# We really need to capture the lowest -ve and higest +ve scores
# here and post process to pick between them.
$bins_ref->{$wsize}->[$bin_index] ||= [ 0, 0 ]; #-ve, +ve
if ( $score < $bins_ref->{$wsize}->[$bin_index]->[0] ) {
$bins_ref->{$wsize}->[$bin_index]->[0] = $score;
} elsif ( $score > $bins_ref->{$wsize}->[$bin_index][1] ) {
$bins_ref->{$wsize}->[$bin_index]->[1] = $score;
}
}
return;
} ## end sub _calculate_max_magnitude
=head2 _post_process_max_magnitude
Deprecated.
Args[0] : hashref - score bins
Args[1] : hashref - count bins
Example : $self->$post_method
Description: Post processes bins to pick largest +ve or -ve score
Returntype : None
Exceptions : None
Caller : _bin_features_by_window_sizes
Status : At Risk
=cut
sub _post_process_max_magnitude {
my ( $self, $bins_ref ) = @_;
deprecate("_post_process_max_magnitude is deprecated and will be removed in e100.");
# Take the highest value +ve or -ve score
foreach my $wsize ( keys %{$bins_ref} ) {
foreach my $bin_index ( 0 .. $#{ $bins_ref->{$wsize} } ) {
# Have potential for no listref in a given bin
# default value if we haven't seen anything is 0
# Actually want an array of -ve +ve values
if ( $bins_ref->{$wsize}->[$bin_index] ) {
my $tmp_minus = -$bins_ref->{$wsize}->[$bin_index]->[0];
if ( $tmp_minus > $bins_ref->{$wsize}->[$bin_index]->[1] ) {
$bins_ref->{$wsize}->[$bin_index] =
$bins_ref->{$wsize}->[$bin_index]->[0];
} else {
$bins_ref->{$wsize}->[$bin_index] =
$bins_ref->{$wsize}->[$bin_index]->[1];
}
}
}
}
return;
} ## end sub _post_process_max_magnitude
=head2 _calculate_RPKM
Deprecated.
Args[0] : feature e.g. Bio::EnsEMBL::Feature
Args[1] : int - start bin
Args[2] : int - end bin
Args[3] : int - window_size
Args[4] : hashref - score bins
Example : $self->$calc_method
Description: Stores counts to calculate Read Per Kb per Million(RPKM)
Returntype : None
Exceptions : None
Caller : _bin_features_by_window_sizes
Status : At Risk
=cut
sub _calculate_RPKM {
my ( $self, $feature, $start_bin, $end_bin, $wsize, $bins_ref ) = @_;
deprecate("_calculate_RPKM is deprecated and will be removed in e100.");
$self->_calculate_count( $feature, $start_bin, $end_bin,
$wsize, $bins_ref );
return;
}
=head2 _post_process_RPKM
Deprecated.
Args[0] : hashref - score bins
Args[1] : hashref - count bins
Example : $self->$post_method
Description: Post processes bins to calculate average score
Returntype : None
Exceptions : None
Caller : _bin_features_by_window_sizes
Status : At Risk
=cut
sub _post_process_RPKM {
my ( $self, $bins_ref ) = @_;
deprecate("_post_process_RPKM is deprecated and will be removed in e100.");
#10^9 x C / NGB
#C = Reads overlapping bin
#N = Total reads in the experiment
#G = Length of bin in bps
#(don't really have to account for non-ref/HAPs or gender here
#as should be close enough, CellTypes/gender differences will be miniscule)
#B = length of each bin
foreach my $wsize(keys %{$bins_ref}){
foreach my $bin_index(0..$#{$bins_ref->{$wsize}}){
$bins_ref->{$wsize}->[$bin_index] =
((10**9) *
$bins_ref->{$wsize}->[$bin_index])/(($self->_RPKM_factor($wsize)) * $wsize);
}
}
return;
}
=head2 _set_up_RPKM
Deprecated.
Args[0] : hashref - method config e.g
{
DNADB => Bio::EnsEMBL::DBSQL::DBAdaptor,
TOTAL_FEATURE => $total_feature_count,
}
Example : $self->$set_up_method($config);
Description: Sets the RPKM factor
Returntype : None
Exceptions : Throws is required config params are not set
Caller : bin_method
Status : At Risk
=cut
sub _set_up_RPKM{
my ($self, $config) = @_;
deprecate("_set_up_RPKM is deprecated and will be removed in e100.");
my ($dnadb, $total_features) = rearrange([ 'DNADB', 'TOTAL_FEATURES'], %{$config});
#Test specifically here to notify about config hash
if(! $total_features){
throw("For RPKM you must pass a valid 'total_features' ".
"as part of the method config hash.");
}
if(! $dnadb){
throw("For RPKM you must pass 'dnadb' as part of the method config hash.");
}
foreach my $wsize(@{$self->window_sizes}){
#Should never have 0 here
$self->_RPKM_factor($wsize, ($wsize * $total_features)); #N*G
warn "setting $wsize RPKM factor($wsize * $total_features) to ".
$self->_RPKM_factor($wsize);
}
return;
} ## end sub _set_up_RPKM
=head2 _RPKM_factor
Deprecated.
Args[0] : int - RPKM factor i.e. (Total reads in the experiment *
Genome length)
Example : $self->_RPKM_factor($wsize, $factor);
Description: Gets/Sets the RPKM factor
Returntype : int
Exceptions : None
Caller : _set_up_RPKM, _post_process_RPKM
Status : At Risk
=cut
sub _RPKM_factor{
my ($self, $wsize, $factor) = @_;
deprecate("_RPKM_factor is deprecated and will be removed in e100.");
if (! defined $wsize){
throw("You must pass at least window_size to get or set the RPKM factor");
}
if(defined $factor){
$self->{'RPKM_factor'}{$wsize} = $factor;
}
elsif(! exists $self->{'RPKM_factor'}{$wsize}){
#This should never happen unless the window sizes
#are redefined after initialisation
throw("You have requested an RPKM factor for a window_size".
" which has not been set:\t$wsize");
}
return $self->{'RPKM_factor'}{$wsize};
}
=head2 get_diploid_genome_length_by_gender
Deprecated.
Args[0] : string - RPKM factor i.e. (Total reads in the experiment *
Genome length)
Args[1] : string - gender e.g. male or female
Example :
my $glength =
$self->get_diploid_genome_length_by_gender( $dnadb, $gender );
Description: Gets the gender specific diploid genome length,
including non-ref but not including haplotypes. Only
handles species with X/Y sex chromosomes.
Returntype : int
Exceptions : None
Caller : _set_up_RPKM, _post_process_RPKM
Status : At Risk - Move to and export from generic Utils Slice module???
=cut
sub get_diploid_genome_length_by_gender {
my ( $dnadb, $gender ) = @_;
deprecate("get_diploid_genome_length_by_gender is deprecated and will be removed in e100.");
my %sex_chrs = ( 'Y' => 'male',
'X' => 'female', );
my $dip_length = 0;
if (!(( ref($dnadb) && $dnadb->isa('Bio::EnsEMBL::DBSQL::DBAdaptor') )
&& $dnadb->grou() eq 'core'
&& ( defined $gender && $gender =~ /(male|female)/ ) ) )
{
throw( "Must provide valid "
. "Bio::EnsEMBL::DBSQL::DBAdaptor($dnadb) and "
. "gender ($gender) arguments" );
}
my @ref_slices = $dnadb->get_SliceAdaptor->fetch_all('toplevel');
# Include non-ref(unassembled), but omit haps/lrgs(i.e. redundant)
foreach my $slice (
@{ $dnadb->get_SliceAdaptor->fetch_all( 'toplevel', undef, 1, 1 ) }
)
{
# Include duplicated region for true diploid length
# Skip haps/lrgs
if ( ( $slice->coord_system->name() eq 'chromosome'
&& !$slice->is_reference() )
|| $slice->coord_system->name() eq 'lrg' )
{
next;
}
if ( exists( $sex_chrs{ $slice->seq_region_name() } ) ) {
if ( $gender eq 'male' ) {
$dip_length += $slice->length;
} elsif ( $sex_chrs{ $slice->seq_region_name } eq 'male' ) {
next;
}
}
$dip_length += 2*$slice->length;
}
return $dip_length;
} ## end sub get_diploid_genome_length_by_gender
1;
| muffato/ensembl | modules/Bio/EnsEMBL/Utils/Collector.pm | Perl | apache-2.0 | 54,184 |
#!/usr/bin/perl
#
# 14:13 2009/6/12
# Jonathan Tsai
# Ver 1.02
#
# monitor service and restart it
# Usage : mon_service.pl <config_file>
# * <config_file> : default is mon_service.conf
#
# 1.00 (2008/10/24) First Version Release
#
$prgname = substr($0, rindex($0,'/')+1);
$prgpath = substr($0, 0, rindex($0,'/'));
$ver = "1.02 (2009/6/12)";
$t_conffile =
$p_config = !defined($ARGV[0])?"/opt/trysrvtool/mon_service.conf":$ARGV[0];
@arr_config=();
if (-e $p_config) {
@tmp_config = split(/\n/, `/bin/cat $p_config | /bin/grep -v "#"`);
foreach $v_config (@tmp_config) {
if (length($v_config)>0) {
push @arr_config, $v_config;
}
}
}
if (@arr_config==0) {
exit;
}
$g_msg = "# $prgname Ver $ver \n";
$v_msg = "";
foreach $v_conf_line (@arr_config) {
($v_service_name, $v_check_ip, $v_check_port, $v_input_cmd, $v_except_msg_keyword, $v_run_cmd)=split(/\t/, $v_conf_line);
$t_msg = `echo $v_input_cmd | nc $v_check_ip $v_check_port`;
if (index($t_msg, $v_except_msg_keyword)<0) {
$t_nowdatetime = `date +"%Y-%m-%d %H:%M:%S"`;
$t_result=`$v_run_cmd`;
$v_msg .= $t_nowdatetime." Run:[".$v_run_cmd."]\n";
$v_msg .= $t_result;
}
}
if (length($v_msg)>0) {
print($g_msg);
print($v_msg);
print("-----\n");
}
| tryweb/trysrvtool | mon_service.pl | Perl | apache-2.0 | 1,229 |
use strict;
use warnings;
package Grace::Toolset;
use File::Spec;
use Carp;
use Grace::Util;
my %_drivers;
sub register ($$$) {
my ($drv, $chn, $new) = @_;
my $old;
if ($old = $_drivers{$drv}{toolchain}{$chn}) {
carp(__PACKAGE__.": Driver '$drv' already registered '$chn'");
carp(__PACKAGE__.": Old Rootdir: $old->{rootdir}");
carp(__PACKAGE__.": New Rootdir: $new->{rootdir}");
carp(__PACKAGE__.": Replacing toolchain '$chn'");
}
return ($_drivers{$drv}{toolchain}{$chn} = $new);
}
BEGIN {
my ($inc, $pth, $dir, $fil, %drv, $drv);
# Trace @INC to search for Grace::Toolchain drivers.
foreach $inc (@INC) {
$pth = $inc; # Copy to $pth, to avoid altering @INC.
if (! File::Spec->file_name_is_absolute($pth)) {
# Make an absolute path out of a relative one.
$pth = File::Spec->catdir(File::Spec->curdir(), $pth);
}
# Inspect Grace/Toolchain/*.
$pth = File::Spec->catdir($pth, 'Grace', 'Toolchain');
next if (! -d $pth);
if (! opendir($dir, $pth)) {
carp("Path '$pth': $!\n");
next;
}
# Pick up *.pm from Grace/Toolchain/...
foreach $drv (grep { m{^.*\.pm$}io } readdir($dir)) {
$fil = File::Spec->catdir($pth, $drv);
$drv =~ s{^(.*)\.pm$}{Grace::Toolchain::$1}io;
$drv{$drv} = $fil;
}
closedir($dir);
}
# Doctor found driver filenames into something loadable.
foreach $drv (keys(%drv)) {
eval "require $drv" or do {
carp("Could not load driver '$drv': $@\n");
next;
};
$_drivers{$drv}{drivermod} = $drv{$drv};
}
}
sub drivers () {
return keys(%_drivers);
}
sub toolchains (@) {
shift;
print(STDERR __PACKAGE__."->toolchains([@_])\n");
my @drv = @_;
my $drv;
my @chn;
if (! @drv) {
print(STDERR "->toolchains(): no drivers specified, probe all.\n");
@drv = keys(%_drivers);
print(STDERR "->toolchains(): probe [@drv]\n");
}
foreach $drv (@drv) {
map { push(@chn, "$drv/$_") } keys(%{$_drivers{$drv}{toolchain}});
}
return @chn;
}
sub toolchain ($) {
shift;
my $req = shift;
print(STDERR __PACKAGE__."->toolchain($req)\n");
my ($drv, $chn, $cfg, $err);
($drv, $chn) = split(m{/+}o, $req, 2);
if (defined($chn)) {
$cfg = $_drivers{$drv}{toolchain}{$chn};
} else {
$chn = $drv;
foreach $drv (keys(%_drivers)) {
if ($cfg = $_drivers{$drv}{toolchain}{$chn}) {
return $cfg;
}
}
}
return $cfg;
}
# Probe Grace/Toolchain/* for appropriate toolchain drivers.
# Attempt to load each toolchain driver, in turn.
# Each toolchain driver will attempt to auto-discover toolchains.
# Each toolchain driver may present multiple toolchains.
# Each individual toolchain is individually selectable.
1;
| coreybrenner/grace | Grace/Toolset.pm | Perl | bsd-2-clause | 2,965 |
package Tapper::Schema::TestrunDB::ResultSet::Precondition;
BEGIN {
$Tapper::Schema::TestrunDB::ResultSet::Precondition::AUTHORITY = 'cpan:TAPPER';
}
{
$Tapper::Schema::TestrunDB::ResultSet::Precondition::VERSION = '4.1.3';
}
use strict;
use warnings;
use parent 'DBIx::Class::ResultSet';
use YAML::Syck;
sub add {
my ($self, $preconditions) = @_;
my @precond_list = @$preconditions;
my @precond_ids;
foreach my $precond_data (@precond_list) {
# (XXX) decide how to handle empty preconditions
next if not (ref($precond_data) eq 'HASH');
my $shortname = $precond_data->{shortname} || '';
my $timeout = $precond_data->{timeout};
my $precondition = $self->result_source->schema->resultset('Precondition')->new
({
shortname => $shortname,
precondition => Dump($precond_data),
timeout => $timeout,
});
$precondition->insert;
push @precond_ids, $precondition->id;
}
return @precond_ids;
}
1;
__END__
=pod
=encoding utf-8
=head1 NAME
Tapper::Schema::TestrunDB::ResultSet::Precondition
=head2 add
Create (add) a list of preconditions and return them with their now
associated db data (eg. ID).
=head1 AUTHOR
AMD OSRC Tapper Team <tapper@amd64.org>
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2013 by Advanced Micro Devices, Inc..
This is free software, licensed under:
The (two-clause) FreeBSD License
=cut
| gitpan/Tapper-Schema | lib/Tapper/Schema/TestrunDB/ResultSet/Precondition.pm | Perl | bsd-2-clause | 1,625 |
#!/usr/bin/perl -w
# MGEL
# Surya Saha 3/15/07
# reading cmd line input .out file which is sorted on the start position
# and finds the relationship among images and families
# Relationship types:
# Upstream: u1 (0-500 bases),u2 (500-1000 bases),u3 (1000-5000 bases), u4 (5000-10000 bases), u5 (10000-15000 bases)
# Downstream: d1 (0-500 bases),d2 (500-1000 bases),d3 (1000-5000 bases), d4 (5000-10000 bases), d5 (10000-15000 bases)
# In: Location of fam2 is entirely within fam1 (IN)
# Contains: Location of fam2 is entirely within fam1 (Cont)
# Overlap:
# single linkage algo so consider overlap if > 10% of either
# 10% to 30% (Ovlap-10to30)
# 30% to 70% (Ovlap-30to70)
# 70% + (Ovlap>70)
# Creating the frequent itemsets in the format
# fam1, fam1-count, fam1-avglen, fam2, fam2-count, fam2-avglen, Occurence, Strand, Category
# v3: Removed all duplicate counting
# v3: Counts all relationships
# v4: Optimized the code to avoid recording itemsets with 0 count
# v4: Check for function call with large parameters
# v5: count relations for images INSTEAD of families
# v5: Use the strand information to calculate the relationships (See rLog)
# v6: Optimize the code (remove duplicates)
# v6: Fixed the bug where false relations were being counted for 'B' strand because of missing ELSE
# v7: Better progress messages
# v7: Hangs with chr12.con.out
# v8 : 07/01/07
# v8: Reducing the number of loops
# v8: No pruning.
# v8 : F1 O F2 is equal to F2 O F1 if F1==F2
# v8 : Huge improvement in complexity (8+ hours to 36 mins for chr12.con.out)
# v8 : Both the sub_fam/img and ref_fam/img will not take part in relationships with ref_fam and
# sub_fam respec. in the future
# v9 : Added a reciprocal relationship for IN called CONTAINS to handle the new confidence calulation
# v10: Writing out the copy information for each relationship
# v10: Writing out the copy information for each relationship separately for each strand
# v10.1: Introduced a flag to prevent writing out copies file (for noise files)
# v11: Modified get_index to use a hash instead of iterating thru an array
# v11: Improved runtime on chr12 from 25 mins to 2 mins
# v11: Fixed it so no information is recorded for copies unless required
use strict;
use warnings;
use POSIX;
unless (@ARGV == 2){
print "USAGE: $0 <input .out file> <write copies??(0/1)>\n";
exit;
}
my ($ifname,$rec,@temp,%temphash,$ctr,$i,$j,$copy_file_flag);
my (@table,@famnames,@counts,%counts_index,$ups_ctr,$dns_ctr, $ref_img,
$ref_fam,$ref_start,$ref_end,$ref_strand,$sub_fam,$sub_img,$sub_start,
$sub_end,$sub_strand,%pos_relationships, %comp_relationships, %both_relationships,
%pos_rel_history, %comp_rel_history, %both_rel_history, $tot_fams,
$tot_recs,$user_t,$system_t, $cuser_t,$csystem_t);
my ($pos_copy_ctr,$comp_copy_ctr, $both_copy_ctr,
@pos_copies,@comp_copies,@both_copies);
$ifname=$ARGV[0];
chomp $ifname;
$copy_file_flag=$ARGV[1];
chomp $copy_file_flag;
if(!($copy_file_flag == 0 || $copy_file_flag == 1) ){ print STDERR "flag can be only 0 or 1\nExiting..\n"; exit;}
unless(open(INFILEDATA,$ifname)){print "not able to open ".$ifname."\n\n";exit;}
unless(open(OUTFILEDATA,">$ifname.f_itemsets.tab")){print "not able to open ".$ifname."f_itemsets.tab\n\n";exit;}
if($copy_file_flag){
unless(open(OUTFILECOPIESPOS,">$ifname.pos.copies.tab")){print "not able to open ".$ifname."pos.copies.tab \n\n";exit;}
unless(open(OUTFILECOPIESCOMP,">$ifname.comp.copies.tab")){print "not able to open ".$ifname."comp.copies.tab \n\n";exit;}
unless(open(OUTFILECOPIESBOTH,">$ifname.both.copies.tab")){print "not able to open ".$ifname."both.copies.tab \n\n";exit;}
}
# debugging
# unless(open(ERRFILE,">ERRFILE")){print "not able to open ERRFILE \n\n";exit;}
#to get the index position of a family in the @counts array
#it might be faster to just get info from the @counts array
#once we have the index pos
#params: $fam
# sub get_index{
# my ($fam,$ctr);
# $fam=$_[0];
# $fam=~ s/\s*//g;
# $ctr=0;
# foreach (@counts){
# $_->[0] =~ s/\s*//g;
# if ($_->[0] eq $fam){
# return $ctr;
# last;
# }
# else{
# $ctr++;
# }
# }
# }
# reimplementing the subroutine to use a hash to return the location of
# the family, help in speedup??
#params: $fam
sub get_index{
$_[0]=~ s/\s*//g;
return $counts_index{$_[0]};
}
# SLURPING IN THE WHOLE .OUT REPORT FILE
$ctr=0;
while($rec=<INFILEDATA>){
if($rec =~ /#/){next;}
if(length ($rec) < 10){next;}#for avoiding last line
push @table, [split(' ',$rec)];
$ctr++;
}
# record tot recs
$tot_recs = $ctr;
print OUTFILEDATA "\# Version: 11\n";
if($copy_file_flag){
print OUTFILECOPIESPOS "\# Version: 10.1\n";
print OUTFILECOPIESCOMP "\# Version: 10.1\n";
print OUTFILECOPIESBOTH "\# Version: 10.1\n";
}
$i=localtime();
print OUTFILEDATA "\# Time: $i\n";
if($copy_file_flag){
print OUTFILECOPIESPOS "\# Time: $i\n";
print OUTFILECOPIESCOMP "\# Time: $i\n";
print OUTFILECOPIESBOTH "\# Time: $i\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\# Runtime details after reading in the file: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print OUTFILEDATA "\n";
print STDERR "\# Runtime details after reading $tot_recs from file: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
#@table
#1935 10.6 0.0 2.8 chr12 8936 9225 27748096 C R=286 Unknown (0) 283 2
#0 1 2 3 4 5 6 7 8 9 10 11 12 13
# FIND THE NUMBER OF OCCURENCES OF EACH FAMILY
# get family names
$ctr=0;
foreach(@table){
$famnames[$ctr++]=$_->[9];
}
#removing duplicates
#sorting
@temp=sort @famnames;
@famnames=@temp;
%temphash = map { $_, 1 } @famnames;
@famnames = keys %temphash;
#sorting agn
@temp=sort @famnames;
@famnames=@temp;
# INITIALIZING THE @COUNTS 2D ARRAY
# @count: fam occurences avg-len imagenum
$ctr=0;
foreach(@famnames){
$counts[$ctr][0]=$_;
#adding a value into the hash for family pos
$counts_index{"$_"} = $ctr;
#initializing all counters to 0
$counts[$ctr][1]=0;#occurences
$counts[$ctr][2]=0;#avg length
$counts[$ctr++][3]=0;#number of images (mined till now)
}
$tot_fams=$ctr;
# populating the @counts array
# count the number of times a family is found and its avg length
foreach $i (@counts){
foreach $j (@table){
if($i->[0] eq $j->[9]){
$i->[1]++;#occurences
$i->[2] = $i->[2] + ($j->[6] - $j->[5]);#total length till now
}
}
$i->[2]=floor($i->[2] / $i->[1]);#avg length
}
# Add a field to end of @table
# where @table[][14]=image number
foreach (@table){
# since @counts[][3] is initialized to 0
$_->[14]=1+$counts[&get_index($_->[9])][3]++;
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\# Runtime details after preparing \@counts and appending \@table: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print OUTFILEDATA "\n";
print STDERR "\# Runtime details after preparing \@counts and appending \@table: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n";
# FINDING ALL RELATIONS
# @table sorted on start position
# 1935 10.6 0.0 2.8 chr12 8936 9225 27748096 C R=286 Unknown (0) 283 2 3
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
# @count: fam occurences avg-len imagenum
# finding the relationships
# %pos_relationships : [fam1 fam2 category] = count
# %comp_relationships : [fam1 fam2 category] = count
# %both_relationships : [fam1 fam2 category] = count
# %pos_rel_history : [fam1 fam1-img category] = fam2
# %comp_rel_history : [fam1 fam1-img category] = fam2
# %both_rel_history : [fam1 fam1-img category] = fam2
$ups_ctr=$dns_ctr=0;
if($copy_file_flag){
$pos_copy_ctr=$comp_copy_ctr=$both_copy_ctr=0;
}
for $i (0 .. $#table){
$ref_start=$table[$i][5]; $ref_end=$table[$i][6];
$ref_strand=$table[$i][8]; $ref_fam=$table[$i][9];
$ref_img=$table[$i][14];
# cleaning up
$ref_start=~ s/\s//g; $ref_end=~ s/\s//g;
$ref_strand=~ s/\s//g; $ref_fam=~ s/\s//g;
$ref_img=~ s/\s//g;
print STDERR '.';
$j=$i;
# only look for relationships with images located before it and
# ending within 15k bases before ref_start or anytime after it
while(($j!=0) && ($table[$j-1][6] > $ref_start-15000)) {
$ups_ctr++;
$j--;
$sub_start=$table[$j][5]; $sub_end=$table[$j][6];
$sub_strand=$table[$j][8]; $sub_fam=$table[$j][9];
$sub_img=$table[$j][14];
# cleaning up
$sub_start=~ s/\s//g; $sub_end=~ s/\s//g;
$sub_strand=~ s/\s//g; $sub_fam=~ s/\s//g;
$sub_img=~ s/\s//g;
# Note: since all relationship are exclusive, I have used elsif
# In: Location of ref fam is entirely within sub fam (IN,CONT)
# IN should be first bcos if sub start is near the ref start, it will
# be listed right before the ref record in the list
if(($sub_start <= $ref_start) && ($sub_end >= $ref_end)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam IN"} =$sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$ref_fam $sub_fam IN"}) {
$pos_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$pos_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$ref_fam; $pos_copies[$pos_copy_ctr][1]="IN";
$pos_copies[$pos_copy_ctr][2]=$sub_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$ref_start; $pos_copies[$pos_copy_ctr][5]=$ref_end;
$pos_copies[$pos_copy_ctr][6]=$sub_start; $pos_copies[$pos_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam CONT"}) {
$pos_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam CONT"}++;
}
if($copy_file_flag){
# add record for CONT relationship
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="CONT";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam IN"}) {
$comp_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="IN";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam CONT"}) {
$comp_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="CONT";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$ref_fam $sub_fam IN"}) {
$both_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$both_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$ref_fam; $both_copies[$both_copy_ctr][1]="IN";
$both_copies[$both_copy_ctr][2]=$sub_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$ref_start; $both_copies[$both_copy_ctr][5]=$ref_end;
$both_copies[$both_copy_ctr][6]=$sub_start; $both_copies[$both_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam CONT"}) {
$both_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="CONT";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}# IN end
# Overlap: If overlap is more than 10% of length of either family (Ovlap)
# now if subject fam ends within the reference fam
elsif (($sub_end > $ref_start) && ($sub_end < $ref_end)) {
my ($ovlap, $ref_ovlap, $sub_ovlap);
$ovlap = $sub_end - $ref_start;
$ref_ovlap = ($ovlap / ($ref_end - $ref_start)) * 100;
$sub_ovlap = ($ovlap / ($sub_end - $sub_start)) * 100;
# Overlap :10% to 30% (Ovlap-10to30)
if ((($ref_ovlap > 10.00) && ($ref_ovlap <= 30.00)) ||
(($sub_ovlap > 10.00) && ($sub_ovlap <= 30.00))) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-10to30";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-10to30";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-10to30";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Overlap :30% to 70% (Ovlap-30to70)
elsif ((($ref_ovlap > 30.00) && ($ref_ovlap <= 70.00)) ||
(($sub_ovlap > 30.00) && ($sub_ovlap <= 70.00))) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-30to70";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-30to70";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-30to70";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Overlap : >70% (Ovlap-70plus)
elsif (($ref_ovlap > 70.00) || ($sub_ovlap > 70.00)) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-70plus";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-70plus";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-70plus";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
}# overlap end
# Upstream: u1 (0-500 bases)
elsif(($sub_end <= $ref_start) && ($sub_end > $ref_start-500)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u1"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u1"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u1"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u1"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u1"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u1"}) {
$pos_relationships{"$sub_fam $ref_fam u1"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u1"}++;
}
# add record for u1 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u1";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u1"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u1"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u1"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u1"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u1"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u1"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u1"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u1"}++;
}
# add record for u1 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u1";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u1"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u1"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u1"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u1"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u1"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u1"}) {
$both_relationships{"$sub_fam $ref_fam u1"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u1"}++;
}
# add record for u1 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u1";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Upstream: u2 (500-1000 bases)
elsif(($sub_end <= $ref_start-500) && ($sub_end > $ref_start-1000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u2"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u2"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u2"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u2"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u2"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u2"}) {
$pos_relationships{"$sub_fam $ref_fam u2"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u2"}++;
}
# add record for u2 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u2";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u2"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u2"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u2"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u2"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u2"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u2"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u2"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u2"}++;
}
# add record for u2 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u2";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u2"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u2"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u2"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u2"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u2"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u2"}) {
$both_relationships{"$sub_fam $ref_fam u2"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u2"}++;
}
# add record for u2 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u2";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Upstream: u3 (1000-5000 bases)
elsif(($sub_end <= $ref_start-1000) && ($sub_end > $ref_start-5000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u3"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u3"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u3"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u3"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u3"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u3"}) {
$pos_relationships{"$sub_fam $ref_fam u3"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u3"}++;
}
# add record for u3 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u3";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u3"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u3"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u3"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u3"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u3"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u3"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u3"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u3"}++;
}
# add record for u3 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u3";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u3"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u3"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u3"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u3"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u3"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u3"}) {
$both_relationships{"$sub_fam $ref_fam u3"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u3"}++;
}
# add record for u3 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u3";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Upstream: u4 (5000-10000 bases)
elsif(($sub_end <= $ref_start-5000) && ($sub_end > $ref_start-10000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u4"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u4"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u4"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u4"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u4"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u4"}) {
$pos_relationships{"$sub_fam $ref_fam u4"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u4"}++;
}
# add record for u4 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u4";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u4"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u4"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u4"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u4"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u4"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u4"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u4"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u4"}++;
}
# add record for u4 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u4";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u4"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u4"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u4"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u4"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u4"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u4"}) {
$both_relationships{"$sub_fam $ref_fam u4"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u4"}++;
}
# add record for u4 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u4";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Upstream: u5 (10000-15000 bases)
elsif(($sub_end <= $ref_start-10000) && ($sub_end > $ref_start-15000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam u5"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam u5"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam u5"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam u5"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam u5"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam u5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam u5"}) {
$pos_relationships{"$sub_fam $ref_fam u5"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam u5"}++;
}
# add record for u5 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="u5";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam u5"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam u5"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam u5"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam u5"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam u5"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam u5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam u5"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam u5"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam u5"}++;
}
# add record for u5 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="u5";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='B';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam u5"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam u5"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam u5"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam u5"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam u5"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam u5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam u5"}) {
$both_relationships{"$sub_fam $ref_fam u5"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam u5"}++;
}
# add record for u5 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="u5";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# if($copy_file_flag){
# # temporary fix to reduce memory consumption when
# # copies are not needed
# @pos_copies=();
# @comp_copies=();
# @both_copies=();# deallocating memory
# $pos_copy_ctr=$comp_copy_ctr=$both_copy_ctr=0;# resetting counters
# }
# print STDERR '.';
}# end while
$j=$i;
# only look for relationships with images located after it
# and starting within 15k bases after ref_end (enforced by condition above)
# or anytime after ref_start (enforced by sorting the list on start pos)
while(($j!=$#table) && ($table[$j+1][5] < $ref_end+15000)){
$dns_ctr++;
$j++;
$sub_start=$table[$j][5]; $sub_end=$table[$j][6];
$sub_strand=$table[$j][8]; $sub_fam=$table[$j][9];
$sub_img=$table[$j][14];
# cleaning up
$sub_start=~ s/\s//g; $sub_end=~ s/\s//g;
$sub_strand=~ s/\s//g; $sub_fam=~ s/\s//g;
$sub_img=~ s/\s//g;
# Note: since all relationship are exclusive, I have used elsif
# In: Location of ref fam is entirely within sub fam (IN)
# IN should be first bcos if sub start is near the ref start, it will
# be listed right after the ref record in the list
if(($sub_start == $ref_start) && ($sub_end >= $ref_end)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$ref_fam $sub_fam IN"}) {
$pos_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$pos_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$ref_fam; $pos_copies[$pos_copy_ctr][1]="IN";
$pos_copies[$pos_copy_ctr][2]=$sub_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$ref_start; $pos_copies[$pos_copy_ctr][5]=$ref_end;
$pos_copies[$pos_copy_ctr][6]=$sub_start; $pos_copies[$pos_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam CONT"}) {
$pos_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="CONT";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam IN"}) {
$comp_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="IN";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam CONT"}) {
$comp_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="CONT";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam IN"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam IN"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam IN"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam IN"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam IN"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam IN"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$ref_fam $sub_fam IN"}) {
$both_relationships{"$ref_fam $sub_fam IN"} = 1;
}
else{
$both_relationships{"$ref_fam $sub_fam IN"}++;
}
# add record for IN relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$ref_fam; $both_copies[$both_copy_ctr][1]="IN";
$both_copies[$both_copy_ctr][2]=$sub_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$ref_start; $both_copies[$both_copy_ctr][5]=$ref_end;
$both_copies[$both_copy_ctr][6]=$sub_start; $both_copies[$both_copy_ctr++][7]=$sub_end;
}
# increment reciprocal relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam CONT"}) {
$both_relationships{"$sub_fam $ref_fam CONT"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam CONT"}++;
}
# add record for CONT relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="CONT";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}# IN end
# Overlap: If overlap is more than 10% of length of either family (Ovlap)
# now if subject fam ends within the reference fam
elsif (($sub_start > $ref_start) && ($sub_start < $ref_end)) {
my ($ovlap, $ref_ovlap, $sub_ovlap);
$ovlap = $ref_end - $sub_start;
$ref_ovlap = ($ovlap / ($ref_end - $ref_start)) * 100;
$sub_ovlap = ($ovlap / ($sub_end - $sub_start)) * 100;
# Overlap :10% to 30% (Ovlap-10to30)
if ((($ref_ovlap > 10.00) && ($ref_ovlap <= 30.00)) ||
(($sub_ovlap > 10.00) && ($sub_ovlap <= 30.00))) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-10to30";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-10to30";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-10to30"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-10to30"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-10to30"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-10to30"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-10to30"}++;
}
# add record for Ovlap-10to30 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-10to30";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Overlap :30% to 70% (Ovlap-30to70)
elsif ((($ref_ovlap > 30.00) && ($ref_ovlap <= 70.00)) ||
(($sub_ovlap > 30.00) && ($sub_ovlap <= 70.00))) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-30to70";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-30to70";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-30to70"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-30to70"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-30to70"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-30to70"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-30to70"}++;
}
# add record for Ovlap-30to70 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-30to70";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Overlap : >70% (Ovlap-70plus)
elsif (($ref_ovlap > 70.00) || ($sub_ovlap > 70.00)) {
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="Ovlap-70plus";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$comp_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$sub_fam; $comp_copies[$comp_copy_ctr][1]="Ovlap-70plus";
$comp_copies[$comp_copy_ctr][2]=$ref_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$sub_start; $comp_copies[$comp_copy_ctr][5]=$sub_end;
$comp_copies[$comp_copy_ctr][6]=$ref_start; $comp_copies[$comp_copy_ctr++][7]=$ref_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam Ovlap-70plus"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam Ovlap-70plus"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam Ovlap-70plus"}) {
$both_relationships{"$sub_fam $ref_fam Ovlap-70plus"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam Ovlap-70plus"}++;
}
# add record for Ovlap-70plus relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="Ovlap-70plus";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
}# overlap end
# Downstream: d1 (0-500 bases)
elsif(($sub_start >= $ref_end) && ($sub_start < $ref_end+500)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d1"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d1"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d1"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d1"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d1"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d1"}) {
$pos_relationships{"$sub_fam $ref_fam d1"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d1"}++;
}
# add record for d1 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="d1";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d1"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d1"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d1"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d1"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d1"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d1"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d1"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d1"}++;
}
# add record for d1 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d1";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d1"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d1"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d1"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d1"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d1"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d1"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d1"}) {
$both_relationships{"$sub_fam $ref_fam d1"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d1"}++;
}
# add record for d1 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d1";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Downstream: d2 (500-1000 bases)
elsif(($sub_start >= $ref_end+500) && ($sub_start < $ref_end+1000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d2"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d2"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d2"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d2"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d2"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d2"}) {
$pos_relationships{"$sub_fam $ref_fam d2"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d2"}++;
}
# add record for d2 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="d2";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d2"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d2"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d2"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d2"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d2"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d2"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d2"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d2"}++;
}
# add record for d2 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d2";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d2"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d2"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d2"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d2"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d2"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d2"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d2"}) {
$both_relationships{"$sub_fam $ref_fam d2"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d2"}++;
}
# add record for d2 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d2";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Downstream: d3 (1000-5000 bases)
elsif(($sub_start >= $ref_end+1000) && ($sub_start < $ref_end+5000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d3"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d3"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d3"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d3"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d3"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d3"}) {
$pos_relationships{"$sub_fam $ref_fam d3"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d3"}++;
}
# add record for d3 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="d3";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d3"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d3"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d3"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d3"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d3"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d3"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d3"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d3"}++;
}
# add record for d3 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d3";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d3"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d3"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d3"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d3"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d3"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d3"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d3"}) {
$both_relationships{"$sub_fam $ref_fam d3"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d3"}++;
}
# add record for d3 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d3";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Downstream: d4 (5000-10000 bases)
elsif(($sub_start >= $ref_end+5000) && ($sub_start < $ref_end+10000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d4"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d4"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d4"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d4"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d4"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d4"}) {
$pos_relationships{"$sub_fam $ref_fam d4"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d4"}++;
}
# add record for d4 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$ref_fam; $pos_copies[$pos_copy_ctr][1]="d4";
$pos_copies[$pos_copy_ctr][2]=$sub_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$ref_start; $pos_copies[$pos_copy_ctr][5]=$ref_end;
$pos_copies[$pos_copy_ctr][6]=$sub_start; $pos_copies[$pos_copy_ctr++][7]=$sub_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d4"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d4"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d4"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d4"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d4"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d4"} = $ref_fam;
# # debugging
# if($ref_fam eq "R=759" && $sub_fam eq "R=759"){
# print ERRFILE "\nREF image data:\n";
# foreach(0..14){ print ERRFILE $table[$i][$_],' ';}
# print ERRFILE "\n";
# print ERRFILE "SUB image data:\n";
# foreach(0..14){ print ERRFILE $table[$j][$_],' ';}
# print ERRFILE "\n\n";
# }
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d4"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d4"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d4"}++;
}
# add record for d4 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d4";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d4"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d4"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d4"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d4"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d4"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d4"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d4"}) {
$both_relationships{"$sub_fam $ref_fam d4"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d4"}++;
}
# add record for d4 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d4";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# Downstream: d5 (10000-15000 bases)
elsif(($sub_start >= $ref_end+10000) && ($sub_start < $ref_end+15000)){
if ($ref_strand eq $sub_strand){
if($ref_strand eq '+'){# pos strand
# check if the ref image has this relation with this family already
if ((!(exists $pos_rel_history{"$ref_fam $ref_img $sub_fam d5"}) || ($pos_rel_history{"$ref_fam $ref_img $sub_fam d5"} ne $sub_fam)) && (!(exists $pos_rel_history{"$sub_fam $sub_img $ref_fam d5"}) || ($pos_rel_history{"$sub_fam $sub_img $ref_fam d5"} ne $ref_fam))) {
# create history entry
$pos_rel_history{"$ref_fam $ref_img $sub_fam d5"} = $sub_fam;
$pos_rel_history{"$sub_fam $sub_img $ref_fam d5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $pos_relationships{"$sub_fam $ref_fam d5"}) {
$pos_relationships{"$sub_fam $ref_fam d5"} = 1;
}
else{
$pos_relationships{"$sub_fam $ref_fam d5"}++;
}
# add record for d5 relationship
if($copy_file_flag){
$pos_copies[$pos_copy_ctr][0]=$sub_fam; $pos_copies[$pos_copy_ctr][1]="d5";
$pos_copies[$pos_copy_ctr][2]=$ref_fam; $pos_copies[$pos_copy_ctr][3]='+';
$pos_copies[$pos_copy_ctr][4]=$sub_start; $pos_copies[$pos_copy_ctr][5]=$sub_end;
$pos_copies[$pos_copy_ctr][6]=$ref_start; $pos_copies[$pos_copy_ctr++][7]=$ref_end;
}
}
}
elsif($ref_strand eq 'C'){# comp strand
# check if the ref image has this relation with this family already
if ((!(exists $comp_rel_history{"$ref_fam $ref_img $sub_fam d5"}) || ($comp_rel_history{"$ref_fam $ref_img $sub_fam d5"} ne $sub_fam)) && (!(exists $comp_rel_history{"$sub_fam $sub_img $ref_fam d5"}) || ($comp_rel_history{"$sub_fam $sub_img $ref_fam d5"} ne $ref_fam))) {
# create history entry
$comp_rel_history{"$ref_fam $ref_img $sub_fam d5"} = $sub_fam;
$comp_rel_history{"$sub_fam $sub_img $ref_fam d5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $comp_relationships{"$ref_fam $sub_fam d5"}) {
# now ref fam is upstream of sub fam as we are
# counting from right
$comp_relationships{"$ref_fam $sub_fam d5"} = 1;
}
else{
$comp_relationships{"$ref_fam $sub_fam d5"}++;
}
# add record for d5 relationship
if($copy_file_flag){
$comp_copies[$comp_copy_ctr][0]=$ref_fam; $comp_copies[$comp_copy_ctr][1]="d5";
$comp_copies[$comp_copy_ctr][2]=$sub_fam; $comp_copies[$comp_copy_ctr][3]='C';
$comp_copies[$comp_copy_ctr][4]=$ref_start; $comp_copies[$comp_copy_ctr][5]=$ref_end;
$comp_copies[$comp_copy_ctr][6]=$sub_start; $comp_copies[$comp_copy_ctr++][7]=$sub_end;
}
}
}
}
# irrespective of strand
# check if the ref image has this relation with this family already
if ((!(exists $both_rel_history{"$ref_fam $ref_img $sub_fam d5"}) || ($both_rel_history{"$ref_fam $ref_img $sub_fam d5"} ne $sub_fam)) && (!(exists $both_rel_history{"$sub_fam $sub_img $ref_fam d5"}) || ($both_rel_history{"$sub_fam $sub_img $ref_fam d5"} ne $ref_fam))) {
# create history entry
$both_rel_history{"$ref_fam $ref_img $sub_fam d5"} = $sub_fam;
$both_rel_history{"$sub_fam $sub_img $ref_fam d5"} = $ref_fam;
# increment relationship count or create relationship entry
if (!exists $both_relationships{"$sub_fam $ref_fam d5"}) {
$both_relationships{"$sub_fam $ref_fam d5"} = 1;
}
else{
$both_relationships{"$sub_fam $ref_fam d5"}++;
}
# add record for d5 relationship
if($copy_file_flag){
$both_copies[$both_copy_ctr][0]=$sub_fam; $both_copies[$both_copy_ctr][1]="d5";
$both_copies[$both_copy_ctr][2]=$ref_fam; $both_copies[$both_copy_ctr][3]='B';
$both_copies[$both_copy_ctr][4]=$sub_start; $both_copies[$both_copy_ctr][5]=$sub_end;
$both_copies[$both_copy_ctr][6]=$ref_start; $both_copies[$both_copy_ctr++][7]=$ref_end;
}
}
}
# if($copy_file_flag){
# # temporary fix to reduce memory consumption when
# # copies are not needed
# @pos_copies=();
# @comp_copies=();
# @both_copies=();# deallocating memory
# $pos_copy_ctr=$comp_copy_ctr=$both_copy_ctr=0;# resetting counters
# }
# print STDERR '.';
}#end while
}# end relationship finding
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\n\# Runtime details after finding relationships: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n\n\n";
print STDERR "\n\# Runtime details after finding relationships: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n\n";
# PRINTING THE ITEMSETS
# %pos_relationships : [fam1 fam2 category] = count
# %comp_relationships : [fam1 fam2 category] = count
# %both_relationships : [fam1 fam2 category] = count
# @count: fam occurences avg-len imagenum
# Creating the frequent itemsets in the format
# fam1, fam1-count, fam1-avglen, fam2, fam2-count, fam2-avglen, Occurence, Strand, Category
print OUTFILEDATA "\# Total records in OUT file: $tot_recs\n";
print OUTFILEDATA "\# Total number of families: $tot_fams\n\n";
print OUTFILEDATA "\# Note: If dns rec ~ ups rec, then the regions were located uniformly\n";
print OUTFILEDATA "\# Average number of upstream OUT records processed per image: ".ceil($ups_ctr/$tot_recs)."\n";
print OUTFILEDATA "\# Average number of downstream OUT records processed per image: ".ceil($dns_ctr/$tot_recs)."\n";
print OUTFILEDATA "\# Average number of OUT records processed per image: ".ceil(($ups_ctr+$dns_ctr)/$tot_recs)."\n\n";
print OUTFILEDATA "\# Total relationships on pos strand:".keys(%pos_relationships)."\n";
if($copy_file_flag){ print OUTFILEDATA "\# Total copies/clusters on pos strand:".$pos_copy_ctr."\n";}
print OUTFILEDATA "\# Total relationships on comp strand:".keys(%comp_relationships)."\n";
if($copy_file_flag){ print OUTFILEDATA "\# Total copies/clusters on comp strand:".$comp_copy_ctr."\n";}
print OUTFILEDATA "\# Total relationships on both strands:".keys(%both_relationships)."\n";
if($copy_file_flag){ print OUTFILEDATA "\# Total copies/clusters on both strands:".$both_copy_ctr."\n\n\n";}
else{print OUTFILEDATA "\n\n";}
# TESTING
# relationships on the positive strand
# while( ($i,$j) = each %pos_relationships){
# @temp=split(' ',$i);
# print OUTFILEDATA "$temp[0]\t$temp[2]\t$temp[1]\t$j\t+\n";
# }
#
# # relationships on the comp strand
# while( ($i,$j) = each %comp_relationships){
# @temp=split(' ',$i);
# print OUTFILEDATA "$temp[0]\t$temp[2]\t$temp[1]\t$j\tC\n";
# }
# relationships on the positive strand
while( ($i,$j) = each %pos_relationships){
@temp=split(' ',$i);
$rec=&get_index($temp[0]);
print OUTFILEDATA "$temp[0]\t$counts[$rec][1]\t$counts[$rec][2]\t";
$rec=&get_index($temp[1]);
print OUTFILEDATA "$temp[1]\t$counts[$rec][1]\t$counts[$rec][2]\t";
print OUTFILEDATA "$j\t+\t$temp[2]\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\n\# Runtime details after printing positive itemsets: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n\# Runtime details after printing positive itemsets: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
# relationships on the comp strand
while( ($i,$j) = each %comp_relationships){
@temp=split(' ',$i);
$rec=&get_index($temp[0]);
print OUTFILEDATA "$temp[0]\t$counts[$rec][1]\t$counts[$rec][2]\t";
$rec=&get_index($temp[1]);
print OUTFILEDATA "$temp[1]\t$counts[$rec][1]\t$counts[$rec][2]\t";
print OUTFILEDATA "$j\tC\t$temp[2]\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\n\# Runtime details after printing negative itemsets: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n\# Runtime details after printing negative itemsets: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
# relationships on both strands
while( ($i,$j) = each %both_relationships){
@temp=split(' ',$i);
$rec=&get_index($temp[0]);
print OUTFILEDATA "$temp[0]\t$counts[$rec][1]\t$counts[$rec][2]\t";
$rec=&get_index($temp[1]);
print OUTFILEDATA "$temp[1]\t$counts[$rec][1]\t$counts[$rec][2]\t";
print OUTFILEDATA "$j\tB\t$temp[2]\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILEDATA "\n\# Runtime details after printing both itemsets: \n";
print OUTFILEDATA "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILEDATA "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n\# Runtime details after printing both itemsets: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
# PRINTING @copies
# @copies : fam1 rel fam2 Strand fam1_st fam1_end fam2_st fam2_end
if($copy_file_flag){
print OUTFILECOPIESPOS "#fam1\trel\tfam2\tStrand\tfam1-st\tf1-end\tf2-st\tf2-end\n";
foreach $i (@pos_copies){
print OUTFILECOPIESPOS "$i->[0]\t$i->[1]\t$i->[2]\t$i->[3]\t$i->[4]\t$i->[5]\t$i->[6]\t$i->[7]\n";
}
print OUTFILECOPIESCOMP "#fam1\trel\tfam2\tStrand\tfam1-st\tf1-end\tf2-st\tf2-end\n";
foreach $i (@comp_copies){
print OUTFILECOPIESCOMP "$i->[0]\t$i->[1]\t$i->[2]\t$i->[3]\t$i->[4]\t$i->[5]\t$i->[6]\t$i->[7]\n";
}
print OUTFILECOPIESBOTH "#fam1\trel\tfam2\tStrand\tfam1-st\tf1-end\tf2-st\tf2-end\n";
foreach $i (@both_copies){
print OUTFILECOPIESBOTH "$i->[0]\t$i->[1]\t$i->[2]\t$i->[3]\t$i->[4]\t$i->[5]\t$i->[6]\t$i->[7]\n";
}
#calculating time taken
($user_t,$system_t,$cuser_t,$csystem_t) = times;
print OUTFILECOPIESPOS "\n\# Runtime details after printing copy info: \n";
print OUTFILECOPIESPOS "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILECOPIESPOS "\# User time for process: ",ceil($user_t/60)," mins\n";
print OUTFILECOPIESCOMP "\n\# Runtime details after printing copy info: \n";
print OUTFILECOPIESCOMP "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILECOPIESCOMP "\# User time for process: ",ceil($user_t/60)," mins\n";
print OUTFILECOPIESBOTH "\n\# Runtime details after printing copy info: \n";
print OUTFILECOPIESBOTH "\# System time for process: ",ceil($system_t/60)," mins\n";
print OUTFILECOPIESBOTH "\# User time for process: ",ceil($user_t/60)," mins\n";
print STDERR "\n\# Runtime details after printing copy info: \n";
print STDERR "\# System time for process: ",ceil($system_t/60)," mins\n";
print STDERR "\# User time for process: ",ceil($user_t/60)," mins\n";
}
close (INFILEDATA);
close (OUTFILEDATA);
if($copy_file_flag){
close (OUTFILECOPIESPOS);
close (OUTFILECOPIESCOMP);
close (OUTFILECOPIESBOTH);
}
# # debugging
# close (ERRFILE);
exit;
| suryasaha/ProxMiner | archive/miner.out2f_itemsets.v11.pl | Perl | bsd-2-clause | 97,224 |
# Copyright (c) 2015, BROCADE COMMUNICATIONS SYSTEMS, INC
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
=head1 NAME
Brocade::BSC::Node
=head1 DESCRIPTION
A I<Brocade::BSC::Node> object is used to model, query, and configure
network devices via Brocade's OpenDaylight-based Software-Defined
Networking controller.
=cut
package Brocade::BSC::Node;
use strict;
use warnings;
use YAML;
use JSON -convert_blessed_universally;
=head1 METHODS
=cut
# Constructor ==========================================================
#
=over 4
=item B<new>
Creates a new I<Brocade::BSC::Node> object and populates fields with
values from argument hash, if present, or YAML configuration file.
### parameters:
# + cfgfile - path to YAML configuration file specifying node attributes
# + ctrl - reference to Brocade::BSC controller object (required)
# + name - name of controlled node
#
### YAML configuration file labels and default values
#
# parameter hash | YAML label | default value
# -------------- | ----------- | -------------
# name | nodeName |
Returns new I<Brocade::BSC::Node> object.
=cut
sub new {
my $class = shift;
my %params = @_;
my $yamlcfg;
if ($params{cfgfile} && ( -e $params{cfgfile})) {
$yamlcfg = YAML::LoadFile($params{cfgfile});
}
my $self = {
ctrl => $params{ctrl},
name => ''
};
if ($yamlcfg) {
$yamlcfg->{nodeName}
&& ($self->{name} = $yamlcfg->{nodeName});
}
$params{name} && ($self->{name} = $params{name});
bless ($self, $class);
}
# Method ===============================================================
#
=item B<as_json>
# Returns : Returns pretty-printed JSON string representing netconf node.
=cut
sub as_json {
my $self = shift;
my $json = new JSON->canonical->allow_blessed->convert_blessed;
return $json->pretty->encode($self);
}
# Method ===============================================================
#
=item B<ctrl_req>
# Parameters: $method (string, req) HTTP verb
# : $urlpath (string, req) path for REST request
# : $data (string, opt)
# : $headerref (hash ref, opt)
# Returns : HTTP::Response
=cut
sub ctrl_req {
my $self = shift;
return $self->{ctrl}->_http_req(@_);
}
# Module ===============================================================
1;
=back
=head1 COPYRIGHT
Copyright (c) 2015, BROCADE COMMUNICATIONS SYSTEMS, INC
All rights reserved.
| BRCDcomm/perlbsc | Brocade-BSC/lib/Brocade/BSC/Node.pm | Perl | bsd-3-clause | 4,002 |
#!/usr/local/bin/perl
# VC-32.pl - unified script for Microsoft Visual C++, covering Win32,
# Win64 and WinCE [follow $FLAVOR variable to trace the differences].
#
$ssl= "ssleay32";
if ($fips && !$shlib)
{
$crypto="libeayfips32";
$crypto_compat = "libeaycompat32.lib";
}
else
{
$crypto="libeay32";
}
if ($fipscanisterbuild)
{
$fips_canister_path = "\$(LIB_D)\\fipscanister.lib";
}
$o='\\';
$cp='$(PERL) util/copy.pl';
$mkdir='$(PERL) util/mkdir-p.pl';
$rm='del /Q';
$zlib_lib="zlib1.lib";
# Santize -L options for ms link
$l_flags =~ s/-L("\[^"]+")/\/libpath:$1/g;
$l_flags =~ s/-L(\S+)/\/libpath:$1/g;
# C compiler stuff
$cc='cl';
if ($FLAVOR =~ /WIN64/)
{
# Note that we currently don't have /WX on Win64! There is a lot of
# warnings, but only of two types:
#
# C4344: conversion from '__int64' to 'int/long', possible loss of data
# C4267: conversion from 'size_t' to 'int/long', possible loss of data
#
# Amount of latter type is minimized by aliasing strlen to function of
# own desing and limiting its return value to 2GB-1 (see e_os.h). As
# per 0.9.8 release remaining warnings were explicitly examined and
# considered safe to ignore.
#
$base_cflags=' /W3 /Gs0 /GF /Gy /nologo -DWIN32_LEAN_AND_MEAN -DL_ENDIAN -DDSO_WIN32 -DOPENSSL_SYSNAME_WIN32 -DOPENSSL_SYSNAME_WINNT -DUNICODE -D_UNICODE';
$base_cflags.=' -D_CRT_SECURE_NO_DEPRECATE'; # shut up VC8
$base_cflags.=' -D_CRT_NONSTDC_NO_DEPRECATE'; # shut up VC8
my $f = $shlib || $fips ?' /MD':' /MT';
$lib_cflag='/Zl' if (!$shlib); # remove /DEFAULTLIBs from static lib
$opt_cflags=$f.' /Ox';
$dbg_cflags=$f.'d /Od -DDEBUG -D_DEBUG';
$lflags="/nologo /subsystem:console /opt:ref";
}
elsif ($FLAVOR =~ /CE/)
{
# sanity check
die '%OSVERSION% is not defined' if (!defined($ENV{'OSVERSION'}));
die '%PLATFORM% is not defined' if (!defined($ENV{'PLATFORM'}));
die '%TARGETCPU% is not defined' if (!defined($ENV{'TARGETCPU'}));
#
# Idea behind this is to mimic flags set by eVC++ IDE...
#
$wcevers = $ENV{'OSVERSION'}; # WCENNN
die '%OSVERSION% value is insane' if ($wcevers !~ /^WCE([1-9])([0-9]{2})$/);
$wcecdefs = "-D_WIN32_WCE=$1$2 -DUNDER_CE=$1$2"; # -D_WIN32_WCE=NNN
$wcelflag = "/subsystem:windowsce,$1.$2"; # ...,N.NN
$wceplatf = $ENV{'PLATFORM'};
$wceplatf =~ tr/a-z0-9 /A-Z0-9_/d;
$wcecdefs .= " -DWCE_PLATFORM_$wceplatf";
$wcetgt = $ENV{'TARGETCPU'}; # just shorter name...
SWITCH: for($wcetgt) {
/^X86/ && do { $wcecdefs.=" -Dx86 -D_X86_ -D_i386_ -Di_386_";
$wcelflag.=" /machine:IX86"; last; };
/^ARMV4[IT]/ && do { $wcecdefs.=" -DARM -D_ARM_ -D$wcetgt";
$wcecdefs.=" -DTHUMB -D_THUMB_" if($wcetgt=~/T$/);
$wcecdefs.=" -QRarch4T -QRinterwork-return";
$wcelflag.=" /machine:THUMB"; last; };
/^ARM/ && do { $wcecdefs.=" -DARM -D_ARM_ -D$wcetgt";
$wcelflag.=" /machine:ARM"; last; };
/^MIPSIV/ && do { $wcecdefs.=" -DMIPS -D_MIPS_ -DR4000 -D$wcetgt";
$wcecdefs.=" -D_MIPS64 -QMmips4 -QMn32";
$wcelflag.=" /machine:MIPSFPU"; last; };
/^MIPS16/ && do { $wcecdefs.=" -DMIPS -D_MIPS_ -DR4000 -D$wcetgt";
$wcecdefs.=" -DMIPSII -QMmips16";
$wcelflag.=" /machine:MIPS16"; last; };
/^MIPSII/ && do { $wcecdefs.=" -DMIPS -D_MIPS_ -DR4000 -D$wcetgt";
$wcecdefs.=" -QMmips2";
$wcelflag.=" /machine:MIPS"; last; };
/^R4[0-9]{3}/ && do { $wcecdefs.=" -DMIPS -D_MIPS_ -DR4000";
$wcelflag.=" /machine:MIPS"; last; };
/^SH[0-9]/ && do { $wcecdefs.=" -D$wcetgt -D_$wcetgt_ -DSHx";
$wcecdefs.=" -Qsh4" if ($wcetgt =~ /^SH4/);
$wcelflag.=" /machine:$wcetgt"; last; };
{ $wcecdefs.=" -D$wcetgt -D_$wcetgt_";
$wcelflag.=" /machine:$wcetgt"; last; };
}
$cc='$(CC)';
$base_cflags=' /W3 /WX /GF /Gy /nologo -DUNICODE -D_UNICODE -DOPENSSL_SYSNAME_WINCE -DWIN32_LEAN_AND_MEAN -DL_ENDIAN -DDSO_WIN32 -DNO_CHMOD -I$(WCECOMPAT)/include -DOPENSSL_SMALL_FOOTPRINT';
$base_cflags.=" $wcecdefs";
$opt_cflags=' /MC /O1i'; # optimize for space, but with intrinsics...
$dbg_clfags=' /MC /Od -DDEBUG -D_DEBUG';
$lflags="/nologo /opt:ref $wcelflag";
}
else # Win32
{
$base_cflags=' /W3 /WX /Gs0 /GF /Gy /nologo -DOPENSSL_SYSNAME_WIN32 -DWIN32_LEAN_AND_MEAN -DL_ENDIAN -DDSO_WIN32';
$base_cflags.=' -D_CRT_SECURE_NO_DEPRECATE'; # shut up VC8
$base_cflags.=' -D_CRT_NONSTDC_NO_DEPRECATE'; # shut up VC8
my $f = $shlib || $fips ?' /MD':' /MT';
$lib_cflag='/Zl' if (!$shlib); # remove /DEFAULTLIBs from static lib
$opt_cflags=$f.' /Ox /O2 /Ob2';
$dbg_cflags=$f.'d /Od -DDEBUG -D_DEBUG';
$lflags="/nologo /subsystem:console /opt:ref";
}
$mlflags='';
$out_def="out32"; $out_def.='_$(TARGETCPU)' if ($FLAVOR =~ /CE/);
$tmp_def="tmp32"; $tmp_def.='_$(TARGETCPU)' if ($FLAVOR =~ /CE/);
$inc_def="inc32";
if ($debug)
{
$cflags=$dbg_cflags.$base_cflags.' /Zi';
$lflags.=" /debug";
$mlflags.=' /debug';
}
else
{
$cflags=$opt_cflags.$base_cflags;
}
$obj='.obj';
$ofile="/Fo";
# EXE linking stuff
$link="link";
$rsc="rc";
$efile="/out:";
$exep='.exe';
if ($no_sock) { $ex_libs=''; }
elsif ($FLAVOR =~ /CE/) { $ex_libs='winsock.lib'; }
else { $ex_libs='wsock32.lib'; }
my $oflow;
if ($FLAVOR =~ /WIN64/ and `cl 2>&1` =~ /14\.00\.4[0-9]{4}\./)
{
$oflow=' bufferoverflowu.lib';
}
else
{
$oflow="";
}
if ($FLAVOR =~ /CE/)
{
$ex_libs.=' $(WCECOMPAT)/lib/wcecompatex.lib';
$ex_libs.=' /nodefaultlib:oldnames.lib coredll.lib corelibc.lib' if ($ENV{'TARGETCPU'} eq "X86");
}
else
{
$ex_libs.=' gdi32.lib crypt32.lib advapi32.lib user32.lib';
$ex_libs.= $oflow;
}
# As native NT API is pure UNICODE, our WIN-NT build defaults to UNICODE,
# but gets linked with unicows.lib to ensure backward compatibility.
if ($FLAVOR =~ /NT/)
{
$cflags.=" -DOPENSSL_SYSNAME_WINNT -DUNICODE -D_UNICODE";
$ex_libs="unicows.lib $ex_libs";
}
# static library stuff
$mklib='lib /nologo';
$ranlib='';
$plib="";
$libp=".lib";
$shlibp=($shlib)?".dll":".lib";
$lfile='/out:';
$shlib_ex_obj="";
$app_ex_obj="setargv.obj" if ($FLAVOR !~ /CE/);
if ($nasm) {
my $ver=`nasm -v 2>NUL`;
my $vew=`nasmw -v 2>NUL`;
# pick newest version
$asm=($ver gt $vew?"nasm":"nasmw")." -f win32";
$afile='-o ';
} elsif ($ml64) {
$asm='ml64 /c /Cp /Cx';
$asm.=' /Zi' if $debug;
$afile='/Fo';
} else {
$asm='ml /nologo /Cp /coff /c /Cx';
$asm.=" /Zi" if $debug;
$afile='/Fo';
}
$aes_asm_obj='';
$bn_asm_obj='';
$bn_asm_src='';
$des_enc_obj='';
$des_enc_src='';
$bf_enc_obj='';
$bf_enc_src='';
if (!$no_asm)
{
if ($FLAVOR =~ "WIN32")
{
$aes_asm_obj='crypto\aes\asm\a_win32.obj';
$aes_asm_src='crypto\aes\asm\a_win32.asm';
$bn_asm_obj='crypto\bn\asm\bn_win32.obj crypto\bn\asm\mt_win32.obj';
$bn_asm_src='crypto\bn\asm\bn_win32.asm crypto\bn\asm\mt_win32.asm';
$bnco_asm_obj='crypto\bn\asm\co_win32.obj';
$bnco_asm_src='crypto\bn\asm\co_win32.asm';
$des_enc_obj='crypto\des\asm\d_win32.obj crypto\des\asm\y_win32.obj';
$des_enc_src='crypto\des\asm\d_win32.asm crypto\des\asm\y_win32.asm';
$bf_enc_obj='crypto\bf\asm\b_win32.obj';
$bf_enc_src='crypto\bf\asm\b_win32.asm';
$cast_enc_obj='crypto\cast\asm\c_win32.obj';
$cast_enc_src='crypto\cast\asm\c_win32.asm';
$rc4_enc_obj='crypto\rc4\asm\r4_win32.obj';
$rc4_enc_src='crypto\rc4\asm\r4_win32.asm';
$rc5_enc_obj='crypto\rc5\asm\r5_win32.obj';
$rc5_enc_src='crypto\rc5\asm\r5_win32.asm';
$md5_asm_obj='crypto\md5\asm\m5_win32.obj';
$md5_asm_src='crypto\md5\asm\m5_win32.asm';
$sha1_asm_obj='crypto\sha\asm\s1_win32.obj crypto\sha\asm\sha512-sse2.obj';
$sha1_asm_src='crypto\sha\asm\s1_win32.asm crypto\sha\asm\sha512-sse2.asm';
$rmd160_asm_obj='crypto\ripemd\asm\rm_win32.obj';
$rmd160_asm_src='crypto\ripemd\asm\rm_win32.asm';
$cpuid_asm_obj='crypto\cpu_win32.obj';
$cpuid_asm_src='crypto\cpu_win32.asm';
$cflags.=" -DOPENSSL_CPUID_OBJ -DOPENSSL_IA32_SSE2 -DAES_ASM -DBN_ASM -DOPENSSL_BN_ASM_PART_WORDS -DOPENSSL_BN_ASM_MONT -DMD5_ASM -DSHA1_ASM -DRMD160_ASM";
}
elsif ($FLAVOR =~ "WIN64A")
{
$aes_asm_obj='$(OBJ_D)\aes-x86_64.obj';
$aes_asm_src='crypto\aes\asm\aes-x86_64.asm';
$bn_asm_obj='$(OBJ_D)\x86_64-mont.obj $(OBJ_D)\bn_asm.obj';
$bn_asm_src='crypto\bn\asm\x86_64-mont.asm';
$sha1_asm_obj='$(OBJ_D)\sha1-x86_64.obj $(OBJ_D)\sha256-x86_64.obj $(OBJ_D)\sha512-x86_64.obj';
$sha1_asm_src='crypto\sha\asm\sha1-x86_64.asm crypto\sha\asm\sha256-x86_64.asm crypto\sha\asm\sha512-x86_64.asm';
$cpuid_asm_obj='$(OBJ_D)\cpuid-x86_64.obj';
$cpuid_asm_src='crypto\cpuid-x86_64.asm';
$cflags.=" -DOPENSSL_CPUID_OBJ -DAES_ASM -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DSHA512_ASM";
}
}
if ($shlib && $FLAVOR !~ /CE/)
{
$mlflags.=" $lflags /dll";
# $cflags =~ s| /MD| /MT|;
$lib_cflag=" -D_WINDLL";
$out_def="out32dll";
$tmp_def="tmp32dll";
#
# Engage Applink...
#
$app_ex_obj.=" \$(OBJ_D)\\applink.obj /implib:\$(TMP_D)\\junk.lib";
$cflags.=" -DOPENSSL_USE_APPLINK -I.";
# I'm open for better suggestions than overriding $banner...
$banner=<<'___';
@echo Building OpenSSL
$(OBJ_D)\applink.obj: ms\applink.c
$(CC) /Fo$(OBJ_D)\applink.obj $(APP_CFLAGS) -c ms\applink.c
$(OBJ_D)\uplink.obj: ms\uplink.c ms\applink.c
$(CC) /Fo$(OBJ_D)\uplink.obj $(SHLIB_CFLAGS) -c ms\uplink.c
$(INCO_D)\applink.c: ms\applink.c
$(CP) ms\applink.c $(INCO_D)\applink.c
EXHEADER= $(EXHEADER) $(INCO_D)\applink.c
LIBS_DEP=$(LIBS_DEP) $(OBJ_D)\applink.obj
___
$banner .= "CRYPTOOBJ=\$(OBJ_D)\\uplink.obj \$(CRYPTOOBJ)\n";
$banner.=<<'___' if ($FLAVOR =~ /WIN64/);
CRYPTOOBJ=ms\uptable.obj $(CRYPTOOBJ)
___
}
elsif ($shlib && $FLAVOR =~ /CE/)
{
$mlflags.=" $lflags /dll";
$lib_cflag=" -D_WINDLL -D_DLL";
$out_def='out32dll_$(TARGETCPU)';
$tmp_def='tmp32dll_$(TARGETCPU)';
}
$cflags.=" /Fd$out_def";
sub do_lib_rule
{
my($objs,$target,$name,$shlib,$ign,$base_addr) = @_;
local($ret);
$taget =~ s/\//$o/g if $o ne '/';
my $base_arg;
if ($base_addr ne "")
{
$base_arg= " /base:$base_addr";
}
else
{
$base_arg = "";
}
if ($target =~ /O_CRYPTO/ && $fipsdso)
{
$name = "/def:ms/libeayfips.def";
}
elsif ($name ne "")
{
$name =~ tr/a-z/A-Z/;
$name = "/def:ms/${name}.def";
}
# $target="\$(LIB_D)$o$target";
# $ret.="$target: $objs\n";
if (!$shlib)
{
# $ret.="\t\$(RM) \$(O_$Name)\n";
$ex =' ';
$ret.="$target: $objs\n";
$ret.="\t\$(MKLIB) $lfile$target @<<\n $objs $ex\n<<\n";
}
else
{
my $ex = "";
if ($target !~ /O_CRYPTO/)
{
$ex .= " \$(L_CRYPTO)";
#$ex .= " \$(L_FIPS)" if $fipsdso;
}
my $fipstarget;
if ($fipsdso)
{
$fipstarget = "O_FIPS";
}
else
{
$fipstarget = "O_CRYPTO";
}
if ($name eq "")
{
$ex.= $oflow;
if ($target =~ /capi/)
{
$ex.=' crypt32.lib advapi32.lib';
}
}
elsif ($FLAVOR =~ /CE/)
{
$ex.=' winsock.lib $(WCECOMPAT)/lib/wcecompatex.lib';
}
else
{
$ex.=' unicows.lib' if ($FLAVOR =~ /NT/);
$ex.=' wsock32.lib gdi32.lib advapi32.lib user32.lib';
$ex.=' crypt32.lib';
$ex.= $oflow;
}
$ex.=" $zlib_lib" if $zlib_opt == 1 && $target =~ /O_CRYPTO/;
if ($fips && $target =~ /$fipstarget/)
{
$ex.= $mwex unless $fipscanisterbuild;
$ret.="$target: $objs \$(PREMAIN_DSO_EXE)";
if ($fipsdso)
{
$ex.=" \$(OBJ_D)\\\$(LIBFIPS).res";
$ret.=" \$(OBJ_D)\\\$(LIBFIPS).res";
$ret.=" ms/\$(LIBFIPS).def";
}
$ret.="\n\tSET FIPS_LINK=\$(LINK)\n";
$ret.="\tSET FIPS_CC=\$(CC)\n";
$ret.="\tSET FIPS_CC_ARGS=/Fo\$(OBJ_D)${o}fips_premain.obj \$(SHLIB_CFLAGS) -c\n";
$ret.="\tSET PREMAIN_DSO_EXE=\$(PREMAIN_DSO_EXE)\n";
$ret.="\tSET FIPS_SHA1_EXE=\$(FIPS_SHA1_EXE)\n";
$ret.="\tSET FIPS_TARGET=$target\n";
$ret.="\tSET FIPSLIB_D=\$(FIPSLIB_D)\n";
$ret.="\t\$(FIPSLINK) \$(MLFLAGS) /fixed /map $base_arg $efile$target ";
$ret.="$name @<<\n \$(SHLIB_EX_OBJ) $objs ";
$ret.="\$(OBJ_D)${o}fips_premain.obj $ex\n<<\n";
}
else
{
$ret.="$target: $objs";
if ($target =~ /O_CRYPTO/ && $fipsdso)
{
$ret .= " \$(O_FIPS)";
$ex .= " \$(L_FIPS)";
}
$ret.="\n\t\$(LINK) \$(MLFLAGS) $efile$target $name @<<\n \$(SHLIB_EX_OBJ) $objs $ex\n<<\n";
}
$ret.="\tIF EXIST \$@.manifest mt -nologo -manifest \$@.manifest -outputresource:\$@;2\n\n";
}
$ret.="\n";
return($ret);
}
sub do_link_rule
{
my($target,$files,$dep_libs,$libs,$standalone)=@_;
local($ret,$_);
$file =~ s/\//$o/g if $o ne '/';
$n=&bname($targer);
$ret.="$target: $files $dep_libs\n";
if ($standalone == 1)
{
$ret.=" \$(LINK) \$(LFLAGS) $efile$target @<<\n\t";
$ret.= "\$(EX_LIBS) " if ($files =~ /O_FIPSCANISTER/ && !$fipscanisterbuild);
$ret.="$files $libs\n<<\n";
}
elsif ($standalone == 2)
{
$ret.="\tSET FIPS_LINK=\$(LINK)\n";
$ret.="\tSET FIPS_CC=\$(CC)\n";
$ret.="\tSET FIPS_CC_ARGS=/Fo\$(OBJ_D)${o}fips_premain.obj \$(SHLIB_CFLAGS) -c\n";
$ret.="\tSET PREMAIN_DSO_EXE=\n";
$ret.="\tSET FIPS_TARGET=$target\n";
$ret.="\tSET FIPS_SHA1_EXE=\$(FIPS_SHA1_EXE)\n";
$ret.="\tSET FIPSLIB_D=\$(FIPSLIB_D)\n";
$ret.="\t\$(FIPSLINK) \$(LFLAGS) /fixed /map $efile$target @<<\n";
$ret.="\t\$(APP_EX_OBJ) $files \$(OBJ_D)${o}fips_premain.obj $libs\n<<\n";
}
else
{
$ret.="\t\$(LINK) \$(LFLAGS) $efile$target @<<\n";
$ret.="\t\$(APP_EX_OBJ) $files $libs\n<<\n";
}
$ret.="\tIF EXIST \$@.manifest mt -nologo -manifest \$@.manifest -outputresource:\$@;1\n\n";
return($ret);
}
sub do_rlink_rule
{
local($target,$rl_start, $rl_mid, $rl_end,$dep_libs,$libs)=@_;
local($ret,$_);
my $files = "$rl_start $rl_mid $rl_end";
$file =~ s/\//$o/g if $o ne '/';
$n=&bname($targer);
$ret.="$target: $files $dep_libs \$(FIPS_SHA1_EXE)\n";
$ret.="\t\$(PERL) ms\\segrenam.pl \$\$a $rl_start\n";
$ret.="\t\$(PERL) ms\\segrenam.pl \$\$b $rl_mid\n";
$ret.="\t\$(PERL) ms\\segrenam.pl \$\$c $rl_end\n";
$ret.="\t\$(MKLIB) $lfile$target @<<\n\t$files\n<<\n";
$ret.="\t\$(FIPS_SHA1_EXE) $target > ${target}.sha1\n";
$ret.="\t\$(PERL) util${o}copy.pl -stripcr fips${o}fips_premain.c \$(LIB_D)${o}fips_premain.c\n";
$ret.="\t\$(CP) fips${o}fips_premain.c.sha1 \$(LIB_D)${o}fips_premain.c.sha1\n";
$ret.="\n";
return($ret);
}
sub do_sdef_rule
{
my $ret = "ms/\$(LIBFIPS).def: \$(O_FIPSCANISTER)\n";
$ret.="\t\$(PERL) util/mksdef.pl \$(MLFLAGS) /out:dummy.dll /def:ms/libeay32.def @<<\n \$(O_FIPSCANISTER)\n<<\n";
$ret.="\n";
return $ret;
}
1;
| GaloisInc/hacrypto | src/C/openssl/openssl-0.9.8zh/util/pl/VC-32.pl | Perl | bsd-3-clause | 14,335 |
name(smtp).
title('An (E)SMTP client for sending mail').
version('1.0.0').
keywords([smtp, mail, sendmail]).
author('Jan Wielemaker', 'J.Wielemaker@vu.nl').
home('https://github.com/JanWielemaker/smtp').
download('https://github.com/JanWielemaker/smtp/releases/*.zip').
| TeamSPoon/logicmoo_workspace | packs_web/swish/pack/smtp/pack.pl | Perl | mit | 270 |
#
# Copyright 2017 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::dlink::dgs3100::snmp::mode::components::psu;
use strict;
use warnings;
my %map_states = (
1 => 'normal',
2 => 'warning',
3 => 'critical',
4 => 'shutdown',
5 => 'notPresent',
6 => 'notFunctioning',
);
# In MIB 'env_mib.mib'
my $mapping = {
rlEnvMonSupplyStatusDescr => { oid => '.1.3.6.1.4.1.171.10.94.89.89.83.1.2.1.2' },
rlEnvMonSupplyState => { oid => '.1.3.6.1.4.1.171.10.94.89.89.83.1.2.1.3', map => \%map_states },
};
my $oid_rlEnvMonSupplyStatusEntry = '.1.3.6.1.4.1.171.10.94.89.89.83.1.2.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_rlEnvMonSupplyStatusEntry };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking power supplies");
$self->{components}->{psu} = {name => 'psus', total => 0, skip => 0};
return if ($self->check_filter(section => 'psu'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_rlEnvMonSupplyStatusEntry}})) {
next if ($oid !~ /^$mapping->{rlEnvMonSupplyStatusDescr}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_rlEnvMonSupplyStatusEntry}, instance => $instance);
next if ($self->check_filter(section => 'psu', instance => $result->{rlEnvMonSupplyStatusDescr}));
next if ($result->{rlEnvMonSupplyState} eq 'notPresent' &&
$self->absent_problem(section => 'psu', instance => $result->{rlEnvMonSupplyStatusDescr}));
$self->{components}->{psu}->{total}++;
$self->{output}->output_add(long_msg => sprintf("Power supply '%s' status is %s.",
$result->{rlEnvMonSupplyStatusDescr}, $result->{rlEnvMonSupplyState}
));
my $exit = $self->get_severity(section => 'psu', value => $result->{rlEnvMonSupplyState});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Power supply '%s' status is %s",
$result->{rlEnvMonSupplyStatusDescr}, $result->{rlEnvMonSupplyState}));
}
}
}
1; | Shini31/centreon-plugins | network/dlink/dgs3100/snmp/mode/components/psu.pm | Perl | apache-2.0 | 3,112 |
package AnyDBM_File;
use warnings;
use strict;
use 5.006_001;
our $VERSION = '1.01';
our @ISA = qw(NDBM_File DB_File GDBM_File SDBM_File ODBM_File) unless @ISA;
my $mod;
for $mod (@ISA) {
if (eval "require $mod") {
@ISA = ($mod); # if we leave @ISA alone, warnings abound
return 1;
}
}
die "No DBM package was successfully found or installed";
__END__
=head1 NAME
AnyDBM_File - provide framework for multiple DBMs
NDBM_File, DB_File, GDBM_File, SDBM_File, ODBM_File - various DBM implementations
=head1 SYNOPSIS
use AnyDBM_File;
=head1 DESCRIPTION
This module is a "pure virtual base class"--it has nothing of its own.
It's just there to inherit from one of the various DBM packages. It
prefers ndbm for compatibility reasons with Perl 4, then Berkeley DB (See
L<DB_File>), GDBM, SDBM (which is always there--it comes with Perl), and
finally ODBM. This way old programs that used to use NDBM via dbmopen()
can still do so, but new ones can reorder @ISA:
BEGIN { @AnyDBM_File::ISA = qw(DB_File GDBM_File NDBM_File) }
use AnyDBM_File;
Having multiple DBM implementations makes it trivial to copy database formats:
use Fcntl; use NDBM_File; use DB_File;
tie %newhash, 'DB_File', $new_filename, O_CREAT|O_RDWR;
tie %oldhash, 'NDBM_File', $old_filename, 1, 0;
%newhash = %oldhash;
=head2 DBM Comparisons
Here's a partial table of features the different packages offer:
odbm ndbm sdbm gdbm bsd-db
---- ---- ---- ---- ------
Linkage comes w/ perl yes yes yes yes yes
Src comes w/ perl no no yes no no
Comes w/ many unix os yes yes[0] no no no
Builds ok on !unix ? ? yes yes ?
Code Size ? ? small big big
Database Size ? ? small big? ok[1]
Speed ? ? slow ok fast
FTPable no no yes yes yes
Easy to build N/A N/A yes yes ok[2]
Size limits 1k 4k 1k[3] none none
Byte-order independent no no no no yes
Licensing restrictions ? ? no yes no
=over 4
=item [0]
on mixed universe machines, may be in the bsd compat library,
which is often shunned.
=item [1]
Can be trimmed if you compile for one access method.
=item [2]
See L<DB_File>.
Requires symbolic links.
=item [3]
By default, but can be redefined.
=back
=head1 SEE ALSO
dbm(3), ndbm(3), DB_File(3), L<perldbmfilter>
=cut
| operepo/ope | client_tools/svc/rc/usr/share/perl5/core_perl/AnyDBM_File.pm | Perl | mit | 2,611 |
#!/usr/bin/env perl
########################################################################
# Authors: Christopher Henry, Scott Devoid, Paul Frybarger
# Contact email: chenry@mcs.anl.gov
# Development location: Mathematics and Computer Science Division, Argonne National Lab
########################################################################
use strict;
use warnings;
use Bio::KBase::workspace::ScriptHelpers qw( get_ws_client workspace workspaceURL parseObjectMeta parseWorkspaceMeta printObjectMeta);
use Bio::KBase::fbaModelServices::ScriptHelpers qw(fbaws printJobData get_fba_client runFBACommand universalFBAScriptCode );
#Defining globals describing behavior
my $primaryArgs = ["Model ID","Phenotype set"];
my $servercommand = "queue_reconciliation_sensitivity_analysis";
my $script = "fba-phenosensitivity";
my $translation = {
"Model ID" => "model",
"Phenotype set" => "phenotypeSet",
modelws => "model_workspace",
phenows => "phenotypeSet_workspace",
workspace => "workspace",
auth => "auth",
overwrite => "overwrite",
nosubmit => "donot_submit_job",
};
my $fbaTranslation = {
objfraction => "objfraction",
allrev => "allreversible",
maximize => "maximizeObjective",
defaultmaxflux => "defaultmaxflux",
defaultminuptake => "defaultminuptake",
defaultmaxuptake => "defaultmaxuptake",
simplethermo => "simplethermoconst",
thermoconst => "thermoconst",
nothermoerror => "nothermoerror",
minthermoerror => "minthermoerror"
};
#Defining usage and options
my $specs = [
[ 'phenows:s', 'Workspace with phenotype data object' ],
[ 'modelws:s', 'Workspace with model object' ],
[ 'maximize:s', 'Maximize objective', { "default" => 1 } ],
[ 'gapfills:s@', 'List of gapfillings to assess' ],
[ 'gapgens:s@', 'List of gapgenerations to assess' ],
[ 'objterms:s@', 'Objective terms' ],
[ 'geneko:s@', 'List of gene KO (; delimiter)' ],
[ 'rxnko:s@', 'List of reaction KO (; delimiter)' ],
[ 'bounds:s@', 'Custom bounds' ],
[ 'constraints:s@', 'Custom constraints' ],
[ 'defaultmaxflux:s', 'Default maximum reaction flux' ],
[ 'defaultminuptake:s', 'Default minimum nutrient uptake' ],
[ 'defaultmaxuptake:s', 'Default maximum nutrient uptake' ],
[ 'uptakelim:s@', 'Atom uptake limits' ],
[ 'simplethermo', 'Use simple thermodynamic constraints' ],
[ 'thermoconst', 'Use full thermodynamic constraints' ],
[ 'nothermoerror', 'No uncertainty in thermodynamic constraints' ],
[ 'minthermoerror', 'Minimize uncertainty in thermodynamic constraints' ],
[ 'allrev', 'Treat all reactions as reversible', { "default" => 0 } ],
[ 'objfraction:s', 'Fraction of objective for follow on analysis', { "default" => 0.1 }],
[ 'notes:s', 'Notes for flux balance analysis' ],
[ 'nosubmit', 'Do not submit job to cluster', { "default" => 0 } ],
[ 'workspace|w:s', 'Workspace to save FBA results', { "default" => fbaws() } ],
[ 'overwrite|o', 'Overwrite any existing FBA with same name' ]
];
my ($opt,$params) = universalFBAScriptCode($specs,$script,$primaryArgs,$translation);
if (defined($opt->{gapfills})) {
foreach my $gfs (@{$opt->{gapfills}}) {
push(@{$params->{gapFills}},split(/;/,$gfs));
}
}
if (defined($opt->{gapgens})) {
foreach my $ggs (@{$opt->{gapgens}}) {
push(@{$params->{gapGens}},split(/;/,$ggs));
}
}
$params->{formulation} = {
geneko => [],
rxnko => [],
bounds => [],
constraints => [],
uptakelim => {},
additionalcpds => []
};
foreach my $key (keys(%{$fbaTranslation})) {
if (defined($opt->{$key})) {
$params->{formulation}->{$fbaTranslation->{$key}} = $opt->{$key};
}
}
if (defined($opt->{objterms})) {
foreach my $terms (@{$opt->{objterms}}) {
my $array = [split(/;/,$terms)];
foreach my $term (@{$array}) {
my $termArray = [split(/:/,$term)];
if (defined($termArray->[2])) {
push(@{$params->{formulation}->{objectiveTerms}},$termArray);
}
}
}
}
if (defined($opt->{geneko})) {
foreach my $gene (@{$opt->{geneko}}) {
push(@{$params->{formulation}->{geneko}},split(/;/,$gene));
}
}
if (defined($opt->{rxnko})) {
foreach my $rxn (@{$opt->{rxnko}}) {
push(@{$params->{formulation}->{rxnko}},split(/;/,$rxn));
}
}
if (defined($opt->{bounds})) {
foreach my $terms (@{$opt->{bounds}}) {
my $array = [split(/;/,$terms)];
foreach my $term (@{$array}) {
my $termArray = [split(/:/,$term)];
if (defined($termArray->[3])) {
push(@{$params->{formulation}->{bounds}},$termArray);
}
}
}
}
if (defined($opt->{constraints})) {
my $count = 0;
foreach my $constraint (@{$opt->{constraints}}) {
my $array = [split(/;/,$constraint)];
my $rhs = shift(@{$array});
my $sign = shift(@{$array});
my $terms = [];
foreach my $term (@{$array}) {
my $termArray = [split(/:/,$term)];
if (defined($termArray->[2])) {
push(@{$terms},$termArray)
}
}
push(@{$params->{formulation}->{constraints}},[$rhs,$sign,$terms,"Constraint ".$count]);
$count++;
}
}
if (defined($opt->{uptakelim})) {
foreach my $uplims (@{$opt->{rxnko}}) {
my $array = [split(/;/,$uplims)];
foreach my $uplim (@{$array}) {
my $pair = [split(/:/,$uplim)];
if (defined($pair->[1])) {
$params->{formulation}->{uptakelim}->{$pair->[0]} = $pair->[1];
}
}
}
}
#Calling the server
my $output = runFBACommand($params,$servercommand,$opt);
#Checking output and report results
if (!defined($output)) {
print "Phenotype sensitivity analysis failed!\n";
} else {
print "Phenotype sensitivity analysis successful:\n";
printJobData($output);
}
| kbase/KBaseFBAModeling | scripts/fba-phenosensitivity.pl | Perl | mit | 5,501 |
=head1 NAME
Apache2::FilterRec - Perl API for manipulating the Apache filter record
=head1 Synopsis
use Apache2::Filter ();
use Apache2::FilterRec ();
my $frec = $filter->frec;
print "filter name is:", $frec->name;
=head1 Description
C<Apache2::FilterRec> provides an access to the filter record
structure.
The C<Apache2::FilterRec> object is retrieved by calling
C<L<frec()|docs::2.0::api::Apache2::Filter/C_frec_>>:
$frec = $filter->frec;
=head1 API
C<Apache2::FilterRec> provides the following functions and/or methods:
=head2 C<name>
The registered name for this filter
$name = $frec->name();
=over 4
=item obj: C<$frec>
( C<L<Apache2::FilterRec object|docs::2.0::api::Apache2::FilterRec>> )
=item ret: C<$name> (string)
=item since: 2.0.00
=back
mod_perl filters have four names:
modperl_request_output
modperl_request_input
modperl_connection_output
modperl_connection_input
You can see the names of the non-mod_perl filters as well. By calling
C<L<$filter-E<gt>next-E<gt>frec-E<gt>name|docs::2.0::api::Apache2::Filter/C_next_>>
you can get the name of the next filter in the chain.
Example:
Let's print the name of the current and the filter that follows it:
use Apache2::Filter ();
use Apache2::FilterRec ();
for my $frec ($filter->frec, $filter->next->frec) {
print "Name: ", $frec->name;
}
=head1 See Also
L<mod_perl 2.0 documentation|docs::2.0::index>.
=head1 Copyright
mod_perl 2.0 and its core modules are copyrighted under
The Apache Software License, Version 2.0.
=head1 Authors
L<The mod_perl development team and numerous
contributors|about::contributors::people>.
=cut
| Distrotech/mod_perl | docs/src/docs/2.0/api/Apache2/FilterRec.pod | Perl | apache-2.0 | 1,679 |
=head1 foobar-lib.pl
Functions for the Foobar Web Server. This is an example Webmin module for a
simple fictional webserver.
=cut
use WebminCore;
init_config();
=head2 list_foobar_websites()
Returns a list of all websites served by the Foobar webserver, as hash
references with C<domain> and C<directory> keys.
=cut
sub list_foobar_websites
{
my @rv;
my $lnum = 0;
open(CONF, $config{'foobar_conf'});
while(<CONF>) {
s/\r|\n//g;
s/#.*$//;
my ($dom, $dir) = split(/\s+/, $_);
if ($dom && $dir) {
push(@rv, { 'domain' => $dom,
'directory' => $dir,
'line' => $lnum });
}
$lnum++;
}
close(CONF);
return @rv;
}
=head2 create_foobar_website(&site)
Adds a new website, specified by the C<site> hash reference parameter, which
must contain C<domain> and C<directory> keys.
=cut
sub create_foobar_website
{
my ($site) = @_;
open_tempfile(CONF, ">>$config{'foobar_conf'}");
print_tempfile(CONF, $site->{'domain'}." ".$site->{'directory'}."\n");
close_tempfile(CONF);
}
=head2 modify_foobar_website(&site)
Updates a website specified by the C<site> hash reference parameter, which
must be a modified entry returned from the C<list_foobar_websites> function.
=cut
sub modify_foobar_website
{
my ($site) = @_;
my $lref = read_file_lines($config{'foobar_conf'});
$lref->[$site->{'line'}] = $site->{'domain'}." ".$site->{'directory'};
flush_file_lines($config{'foobar_conf'});
}
=head2 delete_foobar_website(&site)
Deletes a website, specified by the C<site> hash reference parameter, which
must have been one of the elements returned by C<list_foobar_websites>
=cut
sub delete_foobar_website
{
my ($site) = @_;
my $lref = read_file_lines($config{'foobar_conf'});
splice(@$lref, $site->{'line'}, 1);
flush_file_lines($config{'foobar_conf'});
}
=head2 apply_configuration()
Signal the Foobar webserver process to re-read it's configuration files.
=cut
sub apply_configuration
{
kill_byname_logged('HUP', 'foobard');
}
1;
| BangL/webmin | foobar/foobar-lib.pl | Perl | bsd-3-clause | 1,949 |
#!/usr/bin/perl -w
# -*- Mode: Perl; tab-width: 4; indent-tabs-mode: nil; -*-
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is Mozilla JavaScript Testing Utilities
#
# The Initial Developer of the Original Code is
# Mozilla Corporation.
# Portions created by the Initial Developer are Copyright (C) 2008
# the Initial Developer. All Rights Reserved.
#
# Contributor(s): Bob Clary <bclary@bclary.com>
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
# usage: pattern-expander.pl knownfailures > knownfailures.expanded
#
# pattern-expander.pl reads the specified knownfailures file and
# writes to stdout an expanded set of failures where the wildcards
# ".*" are replaced with the set of possible values specified in the
# universe.data file.
use lib $ENV{TEST_DIR} . "/tests/mozilla.org/js";
use Patterns;
package Patterns;
processfile();
sub processfile
{
my ($i, $j);
while (<ARGV>) {
chomp;
$record = {};
my ($test_id, $test_branch, $test_repo, $test_buildtype, $test_type, $test_os, $test_kernel, $test_processortype, $test_memory, $test_timezone, $test_options, $test_result, $test_exitstatus, $test_description) = $_ =~
/TEST_ID=([^,]*), TEST_BRANCH=([^,]*), TEST_REPO=([^,]*), TEST_BUILDTYPE=([^,]*), TEST_TYPE=([^,]*), TEST_OS=([^,]*), TEST_KERNEL=([^,]*), TEST_PROCESSORTYPE=([^,]*), TEST_MEMORY=([^,]*), TEST_TIMEZONE=([^,]*), TEST_OPTIONS=([^,]*), TEST_RESULT=([^,]*), TEST_EXITSTATUS=([^,]*), TEST_DESCRIPTION=(.*)/;
$record->{TEST_ID} = $test_id;
$record->{TEST_BRANCH} = $test_branch;
$record->{TEST_REPO} = $test_repo;
$record->{TEST_BUILDTYPE} = $test_buildtype;
$record->{TEST_TYPE} = $test_type;
$record->{TEST_OS} = $test_os;
$record->{TEST_KERNEL} = $test_kernel;
$record->{TEST_PROCESSORTYPE} = $test_processortype;
$record->{TEST_MEMORY} = $test_memory;
$record->{TEST_TIMEZONE} = $test_timezone;
$record->{TEST_OPTIONS} = $test_options;
$record->{TEST_RESULT} = $test_result;
$record->{TEST_EXITSTATUS} = $test_exitstatus;
$record->{TEST_DESCRIPTION} = $test_description;
if ($DEBUG) {
dbg("processfile: \$_=$_");
}
my @list1 = ();
my @list2 = ();
my $iuniversefield;
my $universefield;
$item1 = copyreference($record);
if ($DEBUG) {
dbg("processfile: check copyreference");
dbg("processfile: \$record=" . recordtostring($record));
dbg("processfile: \$item1=" . recordtostring($item1));
}
push @list1, ($item1);
for ($iuniversefield = 0; $iuniversefield < @universefields; $iuniversefield++)
{
$universefield = $universefields[$iuniversefield];
if ($DEBUG) {
dbg("processfile: \$universefields[$iuniversefield]=$universefield, \$record->{$universefield}=$record->{$universefield}");
}
for ($j = 0; $j < @list1; $j++)
{
$item1 = $list1[$j];
if ($DEBUG) {
dbg("processfile: item1 \$list1[$j]=" . recordtostring($item1));
}
# create a reference to a copy of the hash referenced by $item1
if ($item1->{$universefield} ne '.*')
{
if ($DEBUG) {
dbg("processfile: literal value");
}
$item2 = copyreference($item1);
if ($DEBUG) {
dbg("processfile: check copyreference");
dbg("processfile: \$item1=" . recordtostring($item1));
dbg("processfile: \$item2=" . recordtostring($item2));
dbg("processfile: pushing existing record to list 2: " . recordtostring($item2));
}
push @list2, ($item2);
}
else
{
if ($DEBUG) {
dbg("processfile: wildcard value");
}
$keyfielduniversekey = getuniversekey($item1, $universefield);
@keyfielduniverse = getuniverse($keyfielduniversekey, $universefield);
if ($DEBUG) {
dbg("processfile: \$keyfielduniversekey=$keyfielduniversekey, \@keyfielduniverse=" . join(',', @keyfielduniverse));
}
for ($i = 0; $i < @keyfielduniverse; $i++)
{
$item2 = copyreference($item1);
if ($DEBUG) {
dbg("processfile: check copyreference");
dbg("processfile: \$item1=" . recordtostring($item1));
dbg("processfile: \$item2=" . recordtostring($item2));
}
$item2->{$universefield} = $keyfielduniverse[$i];
if ($DEBUG) {
dbg("processfile: pushing new record to list 2 " . recordtostring($item2));
}
push @list2, ($item2);
}
}
if ($DEBUG) {
for ($i = 0; $i < @list1; $i++)
{
dbg("processfile: \$list1[$i]=" . recordtostring($list1[$i]));
}
for ($i = 0; $i < @list2; $i++)
{
dbg("processfile: \$list2[$i]=" . recordtostring($list2[$i]));
}
}
}
@list1 = @list2;
@list2 = ();
}
for ($j = 0; $j < @list1; $j++)
{
$item1 = $list1[$j];
push @records, ($item1);
}
}
@records = sort sortrecords @records;
dumprecords();
}
| glycerine/vj | src/js-1.8.5/js/src/tests/pattern-expander.pl | Perl | apache-2.0 | 7,393 |
package IPC::Open2;
use strict;
our ($VERSION, @ISA, @EXPORT);
require 5.000;
require Exporter;
$VERSION = 1.04;
@ISA = qw(Exporter);
@EXPORT = qw(open2);
=head1 NAME
IPC::Open2 - open a process for both reading and writing using open2()
=head1 SYNOPSIS
use IPC::Open2;
$pid = open2(\*CHLD_OUT, \*CHLD_IN, 'some cmd and args');
# or without using the shell
$pid = open2(\*CHLD_OUT, \*CHLD_IN, 'some', 'cmd', 'and', 'args');
# or with handle autovivification
my($chld_out, $chld_in);
$pid = open2($chld_out, $chld_in, 'some cmd and args');
# or without using the shell
$pid = open2($chld_out, $chld_in, 'some', 'cmd', 'and', 'args');
waitpid( $pid, 0 );
my $child_exit_status = $? >> 8;
=head1 DESCRIPTION
The open2() function runs the given $cmd and connects $chld_out for
reading and $chld_in for writing. It's what you think should work
when you try
$pid = open(HANDLE, "|cmd args|");
The write filehandle will have autoflush turned on.
If $chld_out is a string (that is, a bareword filehandle rather than a glob
or a reference) and it begins with C<< >& >>, then the child will send output
directly to that file handle. If $chld_in is a string that begins with
C<< <& >>, then $chld_in will be closed in the parent, and the child will
read from it directly. In both cases, there will be a dup(2) instead of a
pipe(2) made.
If either reader or writer is the null string, this will be replaced
by an autogenerated filehandle. If so, you must pass a valid lvalue
in the parameter slot so it can be overwritten in the caller, or
an exception will be raised.
open2() returns the process ID of the child process. It doesn't return on
failure: it just raises an exception matching C</^open2:/>. However,
C<exec> failures in the child are not detected. You'll have to
trap SIGPIPE yourself.
open2() does not wait for and reap the child process after it exits.
Except for short programs where it's acceptable to let the operating system
take care of this, you need to do this yourself. This is normally as
simple as calling C<waitpid $pid, 0> when you're done with the process.
Failing to do this can result in an accumulation of defunct or "zombie"
processes. See L<perlfunc/waitpid> for more information.
This whole affair is quite dangerous, as you may block forever. It
assumes it's going to talk to something like B<bc>, both writing
to it and reading from it. This is presumably safe because you
"know" that commands like B<bc> will read a line at a time and
output a line at a time. Programs like B<sort> that read their
entire input stream first, however, are quite apt to cause deadlock.
The big problem with this approach is that if you don't have control
over source code being run in the child process, you can't control
what it does with pipe buffering. Thus you can't just open a pipe to
C<cat -v> and continually read and write a line from it.
The IO::Pty and Expect modules from CPAN can help with this, as they
provide a real tty (well, a pseudo-tty, actually), which gets you
back to line buffering in the invoked command again.
=head1 WARNING
The order of arguments differs from that of open3().
=head1 SEE ALSO
See L<IPC::Open3> for an alternative that handles STDERR as well. This
function is really just a wrapper around open3().
=cut
# &open2: tom christiansen, <tchrist@convex.com>
#
# usage: $pid = open2('rdr', 'wtr', 'some cmd and args');
# or $pid = open2('rdr', 'wtr', 'some', 'cmd', 'and', 'args');
#
# spawn the given $cmd and connect $rdr for
# reading and $wtr for writing. return pid
# of child, or 0 on failure.
#
# WARNING: this is dangerous, as you may block forever
# unless you are very careful.
#
# $wtr is left unbuffered.
#
# abort program if
# rdr or wtr are null
# a system call fails
require IPC::Open3;
sub open2 {
local $Carp::CarpLevel = $Carp::CarpLevel + 1;
return IPC::Open3::_open3('open2', $_[1], $_[0], '>&STDERR', @_[2 .. $#_]);
}
1
| Dokaponteam/ITF_Project | xampp/perl/lib/IPC/Open2.pm | Perl | mit | 4,004 |
:- module(util, [
new_database/0,
set_default_username/1, % +Username
set_no_auth/0,
default_user_id/1, % -Id
request_get/2, % +Path, -Dict
request_put/3, % +Path, +DictIn, -DictOut
request_del/2, % +Path, -Dict
request_post/3, % +Path, +DictIn, -DictOut
request_get_content/2, % +Path, -String
is_invalid_data/1 % +Response
]).
/** <module> Test utilities
The module contains utility predicates
for unit/integration testing.
*/
:- use_module(library(http/json)).
:- use_module(library(http/http_open)).
:- use_module(library(http/http_json)).
:- use_module(library(http/http_client)).
:- use_module(library(docstore)).
:- use_module(prolog/bc/bc_data).
:- use_module(prolog/bc/bc_data_user).
:- use_module(prolog/bc/bc_data_comment).
:- use_module(prolog/bc/bc_data_entry).
:- dynamic(default_username/1).
:- dynamic(no_auth/0).
% Recreates the test database.
% This also runs the initial migrations.
new_database:-
bc_data_close,
( exists_file('test.docstore')
-> delete_file('test.docstore')
; true),
bc_data_open('test.docstore'),
retractall(default_username(_)),
asserta(default_username('admin@example.com')),
retractall(no_auth).
% Sets default username.
% Call in the middle of test to
% set the user.
set_default_username(Username):-
retractall(default_username(_)),
asserta(default_username(Username)).
% Disables authentication for API calls.
set_no_auth:-
asserta(no_auth).
% Retrieves the default test user id.
default_user_id(UserId):-
default_username(Username),
ds_find(user, username=Username, [User]),
User.'$id' = UserId.
% Auth key for the test user.
test_auth_key(Key):-
default_username(Username),
ds_find(user, username=Username, [key], [User]),
User.key = Key.
request_get(Path, Dict):-
request_options(Options),
atom_concat('http://localhost:18008', Path, Url),
http_open(Url, Stream, Options),
json_read_dict(Stream, Dict),
close(Stream).
request_post(Path, In, Out):-
request_options(BaseOptions),
Options = [ post(json(In)) | BaseOptions ],
atom_concat('http://localhost:18008', Path, Url),
http_open(Url, Stream, Options),
json_read_dict(Stream, Out),
close(Stream).
request_put(Path, In, Out):-
request_options(BaseOptions),
Options = [ post(json(In)), method(put) | BaseOptions ],
atom_concat('http://localhost:18008', Path, Url),
http_open(Url, Stream, Options),
json_read_dict(Stream, Out),
close(Stream).
request_del(Path, Dict):-
request_options(BaseOptions),
Options = [ method(delete) | BaseOptions ],
atom_concat('http://localhost:18008', Path, Url),
http_open(Url, Stream, Options),
json_read_dict(Stream, Dict),
close(Stream).
request_options(Options):-
( no_auth
-> Options = []
; test_auth_key(Key),
Options = [ request_header('X-Key'=Key) ]).
request_get_content(Path, String):-
atom_concat('http://localhost:18008', Path, Url),
http_open(Url, Stream, [ status_code(_) ]),
read_string(Stream, _, String),
close(Stream).
% FIXME rename to is_response_invalid_data
is_invalid_data(Response):-
Response.status = "error",
sub_string(Response.message, 0, _, _, "Invalid input").
| kalatestimine/blog-core | tests/util/util.pl | Perl | mit | 3,341 |
#!/usr/bin/perl
use strict;
use warnings;
use diagnostics;
# This works for IMGUI 1.50 WIP and does not get all functions
#
# to use ./generate_imgui_bindings.pl <imgui.h >imgui_iterator.cpp
# and define macros properly as in example imgui_lua_bindings.cpp
#
# check imgui_iterator for explanations of why some functions are not supported yet
my %bannedNames = (
"GetColorU32" => "banned",
"NewFrame" => "banned",
"Shutdown" => "banned",
);
#define bannedNames with keys of functions to exclude them
# EXAMPLE:
#my %bannedNames = (
# "NewFrame" => "banned",
# "Render" => "banned",
# "Shutdown" => "banned" );
# This is only useful for ENABLE_IM_LUA_END_STACK
# We hold a list of differnet 'things' that can be pushed to the stack
# i.e. Group for BeginGroup
# It usually works like this BeginBlah EndBlah
# We have to redefine stuff when it doesn't work so cleanly
my %beginN = (
"TreeNode" => "Tree",
"TreePush" => "Tree"
);
my %changeN = (
"Tree" => "TreePop"
);
my %endN = (
"TreePop" => "Tree"
);
my %endOverride = (
"PopupModal" => "Popup",
"PopupContextItem" => "Popup",
"PopupContextWindow" => "Popup",
"PopupContextVoid" => "Popup" );
my $numSupported = 0;
my $numUnsupported = 0;
my $line;
my %funcNames;
my %endTypeToInt;
my @endTypes;
while ($line = <STDIN>) {
#replace ImVec2(x, y) with ImVec2 x, y so it's easier for regex
$line =~ s/ImVec2\(([^,]*),([^\)]*)\)/ImVec2 $1 $2/g;
$line =~ s/ImVec4\(([^,]*),([^,]*),([^,]*),([^\)]*)\)/ImVec4 $1 $2 $3 $4/g;
#delete this so it's eaiser for regexes
$line =~ s/\s+IM_FMTARGS\(.\);/;/g;
$line =~ s/\s+IM_FMTLIST\(.\);/;/g;
if ($line =~ m/ *IMGUI_API *(const char*\*|[^ ]+) *([^\(]+)\(([^\;]*)\);/) {
print "//" . $line;
# this will be set to 0 if something is not supported yet
my $shouldPrint = 1;
my @args = split(',', $3);
# things to do before calling real c++ function
my @before;
# arguments to real c++ function
my @funcArgs;
# things to do after callign real c++ function
my @after;
# real c++ function name
my $funcName = $2;
if (defined($bannedNames{$funcName})) {
print "//Not allowed to use this function\n";
$shouldPrint = 0;
}
# c++ type of return value
my $retLine = $1;
my $retType;
# macro used for calling function
my $callMacro;
# if it has a return value (yes I know this is not the cleanest code)
my $hasRet = 1;
if ($retLine =~ /^void$/) {
$callMacro = "CALL_FUNCTION_NO_RET";
$hasRet = 0;
} elsif ($retLine =~ /^bool$/) {
$callMacro = "CALL_FUNCTION";
push(@funcArgs, "bool");
push(@after, "PUSH_BOOL(ret)");
} elsif ($retLine =~ /^float$/) {
$callMacro = "CALL_FUNCTION";
push(@funcArgs, "float");
push(@after, "PUSH_NUMBER(ret)");
} elsif ($retLine =~ /^ImVec2$/) {
$callMacro = "CALL_FUNCTION";
push(@funcArgs, "ImVec2");
push(@after, "PUSH_NUMBER(ret.x)");
push(@after, "PUSH_NUMBER(ret.y)");
} elsif ($retLine =~ /^ImVec4$/) {
$callMacro = "CALL_FUNCTION";
push(@funcArgs, "ImVec4");
push(@after, "PUSH_NUMBER(ret.x)");
push(@after, "PUSH_NUMBER(ret.y)");
push(@after, "PUSH_NUMBER(ret.z)");
push(@after, "PUSH_NUMBER(ret.w)");
} elsif ($retLine =~ /^(unsigned int|ImGuiID|ImU32)$/) {
$callMacro = "CALL_FUNCTION";
push(@funcArgs, "unsigned int");
push(@after, "PUSH_NUMBER(ret)");
} elsif ($retLine =~ /^(ImGuiMouseCursor)$/) { # Enums
$callMacro = "CALL_FUNCTION";
push(@funcArgs, "int");
push(@after, "PUSH_NUMBER(ret)");
} elsif ($retLine =~ /^int$/) {
$callMacro = "CALL_FUNCTION";
push(@funcArgs, "int");
push(@after, "PUSH_NUMBER(ret)");
} elsif ($retLine =~ /^const char*\*$/) {
$callMacro = "CALL_FUNCTION";
push(@funcArgs, "const char*");
push(@after, "PUSH_STRING(ret)");
} else {
print "// Unsupported return type $1\n";
$shouldPrint = 0;
}
for (my $i = 0; $i < @args; $i++) {
# bool * x = NULL or bool * x
if ($args[$i] =~ m/^ *bool *\* *([^ =\[]*)( = NULL|) *$/) {
my $name = $1;
if ($2 =~ m/^ = NULL$/) {
push(@before, "OPTIONAL_BOOL_POINTER_ARG($name)");
} else {
push(@before, "BOOL_POINTER_ARG($name)");
}
push(@funcArgs, $name);
push(@after, "END_BOOL_POINTER($name)");
# float * x
} elsif ($args[$i] =~ m/^ *float *\* *([^ =\[]*)$/) {
my $name = $1;
push(@before, "FLOAT_POINTER_ARG($name)");
push(@funcArgs, $name);
push(@after, "END_FLOAT_POINTER($name)");
# const float * x
} elsif ($args[$i] =~ m/^ *const float *\* *([^ =\[]*)$/) {
my $name = $1;
push(@before, "FLOAT_ARRAY_ARG($name)");
push(@funcArgs, $name);
#float a or float a = number
} elsif ($args[$i] =~ m/^ *float *([^ =\[]*)( *= *[^ ]*|)$/) {
my $name = $1;
if ($2 =~ m/^ *= *([^ ]*)$/) {
push(@before, "OPTIONAL_NUMBER_ARG($name, $1)");
} else {
push(@before, "NUMBER_ARG($name)");
}
push(@funcArgs, $name);
# const char* a or const char* a = NULL or "blah"
} elsif ($args[$i] =~ m/^ *const char\* *([^ =\[]*)( *= *(NULL|".*")|) *$/) {
my $name = $1;
if ($2 =~ m/^ *= *(NULL|".*") *$/) {
push(@before, "OPTIONAL_LABEL_ARG($name, $1)");
} else {
push(@before, "LABEL_ARG($name)");
}
push(@funcArgs, $name);
# char *
} elsif ($args[$i] =~ m/^ *char *\* *([^ =\[]*)$/) {
my $name = $1;
push(@before, "LABEL_POINTER_ARG($name)");
push(@funcArgs, $name);
push(@funcArgs, "buf_size");
push(@after, "END_LABEL_POINTER($name)");
# skip next argument
$i = $i + 1;
# const char** a
} elsif ($args[$i] =~ m/^ *const char\*\* *([^ =\[]*) *$/ or $args[$i] =~ m/^ *const char\* const\* *([^ =\[]*) *$/ or $args[$i] =~ m/^ *const char\* const *([^ =\[]*)\[\] *$/) {
my $name = $1;
push(@before, "LABEL_ARRAY_ARG($name)");
push(@funcArgs, $name);
#const ImVec2& size with or without default value of ImVec(0,0)
} elsif ($args[$i] =~ m/^ *(const)? ImVec2&? ([^ ]*) *(= * ImVec2 .* .*|) *$/) {
my $name = $2;
if ($3 =~ m/^= * ImVec2 (.*) (.*)$/) {
push(@before, "OPTIONAL_IM_VEC_2_ARG($name, $1, $2)");
} else {
push(@before, "IM_VEC_2_ARG($name)");
}
push(@funcArgs, $name);
#const ImVec4& size with or without default value of ImVec(0,0)
} elsif ($args[$i] =~ m/^ *const ImVec4& ([^ ]*) *(= * ImVec4 .* .* .* .*|) *$/) {
my $name = $1;
if ($2 =~ m/^= * ImVec4 (.*) (.*) (.*) (.*)$/) {
push(@before, "OPTIONAL_IM_VEC_4_ARG($name, $1, $2, $3, $4)");
} else {
push(@before, "IM_VEC_4_ARG($name)");
}
push(@funcArgs, $name);
# one of the various enums
# we are handling these as ints
} elsif ($args[$i] =~ m/^ *(ImGuiWindowFlags|ImGuiCol|ImGuiStyleVar|ImGuiKey|ImGuiAlign|ImGuiColorEditMode|ImGuiMouseCursor|ImGuiSetCond|ImGuiInputTextFlags|ImGuiSelectableFlags|ImGuiTreeNodeFlags|ImGuiComboFlags|ImGuiFocusedFlags|ImGuiHoveredFlags|ImGuiDragDropFlags|ImGuiColorEditFlags|ImGuiCond|ImGuiStyle) ([^ ]*)( = 0|) *$/) {
#These are ints
my $name = $2;
if ($3 =~ m/^ = 0$/) {
push(@before, "OPTIONAL_ENUM_ARG($name, 0)");
} else {
push(@before, "ENUM_ARG($name)");
}
push(@funcArgs, $name);
#int with default value or not
} elsif ($args[$i] =~ m/^ *int ([^ =\[]*)( = [^ ]*|) *$/) {
my $name = $1;
if ($2 =~ m/^ = ([^ ]*)$/) {
push(@before, "OPTIONAL_INT_ARG($name, $1)");
} else {
push(@before, "INT_ARG($name)");
}
push(@funcArgs, $name);
#unsigned int with default value or not
} elsif ($args[$i] =~ m/^ *(unsigned +int|ImGuiID|ImU32) ([^ =\[]*)( = [^ ]*|) *$/) {
my $name = $2;
if ($2 =~ m/^ = ([^ ]*)$/) {
push(@before, "OPTIONAL_UINT_ARG($name, $1)");
} else {
push(@before, "UINT_ARG($name)");
}
push(@funcArgs, $name);
# bool with default value or not
} elsif ($args[$i] =~ m/^ *bool ([^ =\[]*)( *= *true| *= *false|) *$/) {
my $name = $1;
if ($2 =~ m/^ *= *([^ ]*)$/) {
push(@before, "OPTIONAL_BOOL_ARG($name, $1)");
} else {
push(@before, "BOOL_ARG($name)");
}
push(@funcArgs, $name);
# int * x
} elsif ($args[$i] =~ m/^ *int *\* *([^ =\[]*)$/) {
my $name = $1;
if ($name eq "current_item") {
push(@before, "INT_CURRENT_ITEM_POINTER_ARG($name)");
push(@funcArgs, $name);
push(@after, "END_INT_CURRENT_ITEM_POINTER($name)");
} else {
push(@before, "INT_POINTER_ARG($name)");
push(@funcArgs, $name);
push(@after, "END_INT_POINTER($name)");
}
# unsigned int * x
} elsif ($args[$i] =~ m/^ *unsigned +int *\* *([^ =\[]*)$/) {
my $name = $1;
push(@before, "UINT_POINTER_ARG($name)");
push(@funcArgs, $name);
push(@after, "END_UINT_POINTER($name)");
# float x[]
} elsif ($args[$i] =~ m/^ *float *([^ =\[]*)\[(.)\]$/) {
my $name = $1;
push(@before, "FLOAT_ARRAY$2_ARG($name)");
push(@funcArgs, $name);
push(@after, "END_FLOAT_ARRAY$2($name)");
# int x[]
} elsif ($args[$i] =~ m/^ *int *([^ =\[]*)\[(.)\]$/) {
my $name = $1;
push(@before, "INT_ARRAY$2_ARG($name)");
push(@funcArgs, $name);
push(@after, "END_INT_ARRAY$2($name)");
# ImTextureID
} elsif ($args[$i] =~ m/^ *ImTextureID ([^ =\[]*) *$/) {
my $name = $1;
push(@before, "TEXTURE_ARG($name)");
push(@funcArgs, $name);
} elsif ($args[$i] =~ m/^ *(.*) (.*) = (.*)$/) {
my $type = $1;
my $name = $2;
my $value = $3;
push(@before, "DEFAULT_ARG($type, $name, $value)");
push(@funcArgs, $name);
} elsif ($args[$i] =~ m/^ *int *\* *([^ =\[]*)$/) {
# we don't support variadic functions yet but we let you use it without extra variables
} elsif ($args[$i] =~ m/^ *\.\.\. *$/) {
print "// Variadic functions aren't suppported but here it is anyway\n";
} else {
print "// Unsupported arg type " . $args[$i] . "\n";
$shouldPrint = 0;
}
}
if ($retLine =~ /^bool$/) {
push(@after, "PUSH_LAST_BOOL(ret)");
} elsif ($retLine =~ /^float$/) {
push(@after, "PUSH_LAST_NUMBER(ret)");
} elsif ($retLine =~ /^ImVec2$/) {
push(@after, "PUSH_LAST_NUMBER(ret.x)");
push(@after, "PUSH_LAST_NUMBER(ret.y)");
} elsif ($retLine =~ /^ImVec4$/) {
push(@after, "PUSH_LAST_NUMBER(ret.x)");
push(@after, "PUSH_LAST_NUMBER(ret.y)");
push(@after, "PUSH_LAST_NUMBER(ret.z)");
push(@after, "PUSH_LAST_NUMBER(ret.w)");
} elsif ($retLine =~ /^(unsigned int|ImGuiID|ImU32)$/) {
push(@after, "PUSH_LAST_NUMBER(ret)");
} elsif ($retLine =~ /^(ImGuiMouseCursor)$/) { # Enums
push(@after, "PUSH_LAST_NUMBER(ret)");
} elsif ($retLine =~ /^int$/) {
push(@after, "PUSH_LAST_NUMBER(ret)");
} elsif ($retLine =~ /^const char*\*$/) {
push(@after, "PUSH_LAST_STRING(ret)");
}
my $luaFunc = $funcName;
# Stupid way of implementing overriding
if ($funcNames{$luaFunc}) {
$funcNames{$luaFunc} = $funcNames{$luaFunc} + 1;
$luaFunc .= "_" . $funcNames{$luaFunc};
} else {
$funcNames{$luaFunc} = 1;
}
if ($shouldPrint != 0) {
print "IMGUI_FUNCTION($luaFunc)\n";
for (my $i = 0; $i < @before; $i++) {
print $before[$i] . "\n";
}
print $callMacro . "($funcName";
for (my $i = 0; $i < @funcArgs; $i++) {
print ", " . $funcArgs[$i];
}
print ")\n";
#for begin and end stack stuff
if ($funcName =~ m/^Begin(.*)$/ || defined($beginN{$funcName})) {
my $curEndType;
if (defined($beginN{$funcName})) {
$curEndType = $beginN{$funcName};
} else {
$curEndType = $1;
}
if (defined($endOverride{$curEndType})) {
$curEndType = $endOverride{$curEndType};
}
if (!defined($endTypeToInt{$curEndType})) {
$endTypeToInt{$curEndType} = scalar(@endTypes);
push(@endTypes, $curEndType);
}
my $curEndTypeInt = $endTypeToInt{$curEndType};
if ($hasRet) {
print "IF_RET_ADD_END_STACK($curEndTypeInt)\n";
} else {
print "ADD_END_STACK($curEndTypeInt)\n";
}
} elsif ($funcName =~ m/^End(.*)$/ || defined($endN{$funcName})) {
my $curEndType;
if (defined($endN{$funcName})) {
$curEndType = $endN{$funcName};
} else {
$curEndType = $1;
}
if (defined($endOverride{$curEndType})) {
$curEndType = $endOverride{$curEndType};
}
if (!defined($endTypeToInt{$curEndType})) {
$endTypeToInt{$curEndType} = scalar(@endTypes);
push(@endTypes, $curEndType);
}
my $curEndTypeInt = $endTypeToInt{$curEndType};
print "POP_END_STACK($curEndTypeInt)\n"
}
for (my $i = 0; $i < @after; $i++) {
print $after[$i] . "\n";
}
print "END_IMGUI_FUNC\n";
$numSupported += 1;
} else {
$numUnsupported += 1;
}
} elsif ($line =~ m/^} \/\/ namespace ImGui$/) {
last;
}
}
#for end stack stuff
print "END_STACK_START\n";
for (my $i = 0; $i < @endTypes; $i++) {
my $endFunc;
if (defined($changeN{$endTypes[$i]})) {
$endFunc = $changeN{$endTypes[$i]};
} else {
$endFunc = "End" . $endTypes[$i];
}
print "END_STACK_OPTION($i, " . $endFunc .")\n";
}
print "END_STACK_END\n";
#debug info
print STDERR "Supported: $numSupported Unsupported: $numUnsupported\n";
| slages/love-imgui | generate_imgui_bindings.pl | Perl | mit | 14,170 |
package Amazon::S3::Bucket;
use strict;
use warnings;
use Carp;
use File::stat;
use base qw(Class::Accessor::Fast);
__PACKAGE__->mk_accessors(qw(bucket creation_date account));
sub new {
my $class = shift;
my $self = $class->SUPER::new(@_);
croak "no bucket" unless $self->bucket;
croak "no account" unless $self->account;
return $self;
}
sub _uri {
my ($self, $key) = @_;
return ($key)
? $self->bucket . "/" . $self->account->_urlencode($key)
: $self->bucket . "/";
}
# returns bool
sub add_key {
my ($self, $key, $value, $conf) = @_;
croak 'must specify key' unless $key && length $key;
if ($conf->{acl_short}) {
$self->account->_validate_acl_short($conf->{acl_short});
$conf->{'x-amz-acl'} = $conf->{acl_short};
delete $conf->{acl_short};
}
if (ref($value) eq 'SCALAR') {
$conf->{'Content-Length'} ||= -s $$value;
$value = _content_sub($$value);
} else {
$conf->{'Content-Length'} ||= length $value;
}
# If we're pushing to a bucket that's under DNS flux, we might get a 307
# Since LWP doesn't support actually waiting for a 100 Continue response,
# we'll just send a HEAD first to see what's going on
if (ref($value)) {
return
$self->account->_send_request_expect_nothing_probed('PUT',
$self->_uri($key), $conf, $value);
} else {
return
$self->account->_send_request_expect_nothing('PUT', $self->_uri($key),
$conf, $value);
}
}
sub add_key_filename {
my ($self, $key, $value, $conf) = @_;
return $self->add_key($key, \$value, $conf);
}
sub head_key {
my ($self, $key) = @_;
return $self->get_key($key, "HEAD");
}
sub get_key {
my ($self, $key, $method, $filename) = @_;
$method ||= "GET";
$filename = $$filename if ref $filename;
my $acct = $self->account;
my $request = $acct->_make_request($method, $self->_uri($key), {});
my $response = $acct->_do_http($request, $filename);
if ($response->code == 404) {
return undef;
}
$acct->_croak_if_response_error($response);
my $etag = $response->header('ETag');
if ($etag) {
$etag =~ s/^"//;
$etag =~ s/"$//;
}
my $return = {
content_length => $response->content_length || 0,
content_type => $response->content_type,
etag => $etag,
value => $response->content,
};
foreach my $header ($response->headers->header_field_names) {
next unless $header =~ /x-amz-meta-/i;
$return->{lc $header} = $response->header($header);
}
return $return;
}
sub get_key_filename {
my ($self, $key, $method, $filename) = @_;
return $self->get_key($key, $method, \$filename);
}
# returns bool
sub delete_key {
my ($self, $key) = @_;
croak 'must specify key' unless $key && length $key;
return
$self->account->_send_request_expect_nothing('DELETE', $self->_uri($key),
{});
}
sub delete_bucket {
my $self = shift;
croak "Unexpected arguments" if @_;
return $self->account->delete_bucket($self);
}
sub list {
my $self = shift;
my $conf = shift || {};
$conf->{bucket} = $self->bucket;
return $self->account->list_bucket($conf);
}
sub list_all {
my $self = shift;
my $conf = shift || {};
$conf->{bucket} = $self->bucket;
return $self->account->list_bucket_all($conf);
}
sub get_acl {
my ($self, $key) = @_;
my $acct = $self->account;
my $request = $acct->_make_request('GET', $self->_uri($key) . '?acl', {});
my $response = $acct->_do_http($request);
if ($response->code == 404) {
return undef;
}
$acct->_croak_if_response_error($response);
return $response->content;
}
sub set_acl {
my ($self, $conf) = @_;
$conf ||= {};
unless ($conf->{acl_xml} || $conf->{acl_short}) {
croak "need either acl_xml or acl_short";
}
if ($conf->{acl_xml} && $conf->{acl_short}) {
croak "cannot provide both acl_xml and acl_short";
}
my $path = $self->_uri($conf->{key}) . '?acl';
my $hash_ref =
($conf->{acl_short})
? {'x-amz-acl' => $conf->{acl_short}}
: {};
my $xml = $conf->{acl_xml} || '';
return
$self->account->_send_request_expect_nothing('PUT', $path, $hash_ref,
$xml);
}
sub get_location_constraint {
my ($self) = @_;
my $xpc =
$self->account->_send_request('GET', $self->bucket . '/?location');
return undef unless $xpc && !$self->account->_remember_errors($xpc);
my $lc = $xpc->{content};
if (defined $lc && $lc eq '') {
$lc = undef;
}
return $lc;
}
# proxy up the err requests
sub err { $_[0]->account->err }
sub errstr { $_[0]->account->errstr }
sub _content_sub {
my $filename = shift;
my $stat = stat($filename);
my $remaining = $stat->size;
my $blksize = $stat->blksize || 4096;
croak "$filename not a readable file with fixed size"
unless -r $filename
and $remaining;
open DATA, "< $filename" or croak "Could not open $filename: $!";
return sub {
my $buffer;
# warn "read remaining $remaining";
unless (my $read = read(DATA, $buffer, $blksize)) {
# warn "read $read buffer $buffer remaining $remaining";
croak
"Error while reading upload content $filename ($remaining remaining) $!"
if $! and $remaining;
# otherwise, we found EOF
close DATA
or croak "close of upload content $filename failed: $!";
$buffer ||=
''; # LWP expects an emptry string on finish, read returns 0
}
$remaining -= length($buffer);
return $buffer;
};
}
1;
__END__
=head1 NAME
Amazon::S3::Bucket - A container class for a S3 bucket and its contents.
=head1 SYNOPSIS
use Amazon::S3;
# creates bucket object (no "bucket exists" check)
my $bucket = $s3->bucket("foo");
# create resource with meta data (attributes)
my $keyname = 'testing.txt';
my $value = 'T';
$bucket->add_key(
$keyname, $value,
{ content_type => 'text/plain',
'x-amz-meta-colour' => 'orange',
}
);
# list keys in the bucket
$response = $bucket->list
or die $s3->err . ": " . $s3->errstr;
print $response->{bucket}."\n";
for my $key (@{ $response->{keys} }) {
print "\t".$key->{key}."\n";
}
# check if resource exists.
print "$keyname exists\n" if $bucket->head_key($keyname);
# delete key from bucket
$bucket->delete_key($keyname);
=head1 METHODS
=head2 new
Instaniates a new bucket object.
Requires a hash containing two arguments:
=over
=item bucket
The name (identifier) of the bucket.
=item account
The L<S3::Amazon> object (representing the S3 account) this
bucket is associated with.
=back
NOTE: This method does not check if a bucket actually
exists. It simply instaniates the bucket.
Typically a developer will not call this method directly,
but work through the interface in L<S3::Amazon> that will
handle their creation.
=head2 add_key
Takes three positional parameters:
=over
=item key
A string identifier for the resource in this bucket
=item value
A SCALAR string representing the contents of the resource.
=item configuration
A HASHREF of configuration data for this key. The configuration
is generally the HTTP headers you want to pass the S3
service. The client library will add all necessary headers.
Adding them to the configuration hash will override what the
library would send and add headers that are not typically
required for S3 interactions.
In addition to additional and overriden HTTP headers, this
HASHREF can have a C<acl_short> key to set the permissions
(access) of the resource without a seperate call via
C<add_acl> or in the form of an XML document. See the
documentation in C<add_acl> for the values and usage.
=back
Returns a boolean indicating its success. Check C<err> and
C<errstr> for error message if this operation fails.
=head2 add_key_filename
The method works like C<add_key> except the value is assumed
to be a filename on the local file system. The file will
be streamed rather then loaded into memory in one big chunk.
=head2 head_key $key_name
Returns a configuration HASH of the given key. If a key does
not exist in the bucket C<undef> will be returned.
=head2 get_key $key_name, [$method]
Takes a key and an optional HTTP method and fetches it from
S3. The default HTTP method is GET.
The method returns C<undef> if the key does not exist in the
bucket and throws an exception (dies) on server errors.
On success, the method returns a HASHREF containing:
=over
=item content_type
=item etag
=item value
=item @meta
=back
=head2 get_key_filename $key_name, $method, $filename
This method works like C<get_key>, but takes an added
filename that the S3 resource will be written to.
=head2 delete_key $key_name
Permanently removes C<$key_name> from the bucket. Returns a
boolean value indicating the operations success.
=head2 delete_bucket
Permanently removes the bucket from the server. A bucket
cannot be removed if it contains any keys (contents).
This is an alias for C<$s3->delete_bucket($bucket)>.
=head2 list
List all keys in this bucket.
See L<Amazon::S3/list_bucket> for documentation of this
method.
=head2 list_all
List all keys in this bucket without having to worry about
'marker'. This may make multiple requests to S3 under the
hood.
See L<Amazon::S3/list_bucket_all> for documentation of this
method.
=head2 get_acl
Retrieves the Access Control List (ACL) for the bucket or
resource as an XML document.
=over
=item key
The key of the stored resource to fetch. This parameter is
optional. By default the method returns the ACL for the
bucket itself.
=back
=head2 set_acl $conf
Retrieves the Access Control List (ACL) for the bucket or
resource. Requires a HASHREF argument with one of the following keys:
=over
=item acl_xml
An XML string which contains access control information
which matches Amazon's published schema.
=item acl_short
Alternative shorthand notation for common types of ACLs that
can be used in place of a ACL XML document.
According to the Amazon S3 API documentation the following recognized acl_short
types are defined as follows:
=over
=item private
Owner gets FULL_CONTROL. No one else has any access rights.
This is the default.
=item public-read
Owner gets FULL_CONTROL and the anonymous principal is
granted READ access. If this policy is used on an object, it
can be read from a browser with no authentication.
=item public-read-write
Owner gets FULL_CONTROL, the anonymous principal is granted
READ and WRITE access. This is a useful policy to apply to a
bucket, if you intend for any anonymous user to PUT objects
into the bucket.
=item authenticated-read
Owner gets FULL_CONTROL, and any principal authenticated as
a registered Amazon S3 user is granted READ access.
=back
=item key
The key name to apply the permissions. If the key is not
provided the bucket ACL will be set.
=back
Returns a boolean indicating the operations success.
=head2 get_location_constraint
Returns the location constraint data on a bucket.
For more information on location constraints, refer to the
Amazon S3 Developer Guide.
=head2 err
The S3 error code for the last error the account encountered.
=head2 errstr
A human readable error string for the last error the account encountered.
=head1 SEE ALSO
L<Amazon::S3>
=head1 AUTHOR & COPYRIGHT
Please see the L<Amazon::S3> manpage for author, copyright, and
license information.
| sharkhack/AmazonS3 | PERL/Amazon/S3/Bucket.pm | Perl | mit | 11,958 |
package TheOtherOne::Command::client;
use strict;
use warnings;
use feature ':5.14';
use List::Util qw(none);
use TheOtherOne -command;
use TheOtherOne::Client;
my $socket_key = lc TheOtherOne::Client::SOCKET;
my $http_key = lc TheOtherOne::Client::HTTP;
sub abstract {
'connect to too server';
}
sub opt_spec {
return (
[ "$socket_key|s", 'use socket' ],
[ "$http_key|h", 'use http' ],
[ 'host|t=s', 'socket|http host', { default => '127.0.0.1' } ],
[ 'port|p=s', 'socket|http port', { default => '7777' } ],
[ 'verbose|v', 'speaks verbosely', { default => 0 } ],
);
}
sub validate_args {
my ($self, $opt, $args) = @_;
if ( none { $opt->{$_}; } ($socket_key, $http_key) ) {
$opt->{$socket_key} = 1;
}
}
sub execute {
my ($self, $opt, $args) = @_;
if ( $opt->{$socket_key} ) {
TheOtherOne::Client->get($socket_key)->run($opt->{host}, $opt->{port}, $args->[0], $opt->{verbose});
}
elsif ( $opt->{$http_key} ) {
}
else {
print $self->usage;
}
}
1;
| oogatta/TheOtherOne | lib/TheOtherOne/Command/client.pm | Perl | mit | 1,039 |
package MIP::Recipes::Analysis::Sv_combinevariantcallsets;
use 5.026;
use Carp;
use charnames qw{ :full :short };
use English qw{ -no_match_vars };
use File::Basename qw{ dirname };
use File::Spec::Functions qw{ catfile splitpath };
use open qw{ :encoding(UTF-8) :std };
use Params::Check qw{ allow check last_error };
use utf8;
use warnings;
use warnings qw{ FATAL utf8 };
## CPANM
use autodie qw{ :all };
use List::MoreUtils qw { any };
use Readonly;
## MIPs lib/
use MIP::Constants qw{ $ASTERISK $COLON $DOT $EMPTY_STR $LOG_NAME $NEWLINE $UNDERSCORE };
BEGIN {
require Exporter;
use base qw{ Exporter };
# Functions and variables which can be optionally exported
our @EXPORT_OK = qw{ analysis_sv_combinevariantcallsets };
}
sub analysis_sv_combinevariantcallsets {
## Function : CombineVariants to combine all structural variants call from different callers
## Returns :
## Arguments: $active_parameter_href => Active parameters for this analysis hash {REF}
## : $case_id => Family id
## : $file_info_href => File info hash {REF}
## : $job_id_href => Job id hash {REF}
## : $parameter_href => Parameter hash {REF}
## : $profile_base_command => Submission profile base command
## : $recipe_name => Program name
## : $reference_dir => MIP reference directory
## : $sample_info_href => Info on samples and case hash {REF}
## : $temp_directory => Temporary directory {REF}
my ($arg_href) = @_;
## Flatten argument(s)
my $active_parameter_href;
my $file_info_href;
my $job_id_href;
my $parameter_href;
my $recipe_name;
my $sample_info_href;
## Default(s)
my $case_id;
my $profile_base_command;
my $reference_dir;
my $temp_directory;
my $tmpl = {
active_parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$active_parameter_href,
strict_type => 1,
},
case_id => {
default => $arg_href->{active_parameter_href}{case_id},
store => \$case_id,
strict_type => 1,
},
file_info_href => {
default => {},
defined => 1,
required => 1,
store => \$file_info_href,
strict_type => 1,
},
job_id_href => {
default => {},
defined => 1,
required => 1,
store => \$job_id_href,
strict_type => 1,
},
parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$parameter_href,
strict_type => 1,
},
profile_base_command => {
default => q{sbatch},
store => \$profile_base_command,
strict_type => 1,
},
recipe_name => {
defined => 1,
required => 1,
store => \$recipe_name,
strict_type => 1,
},
sample_info_href => {
default => {},
defined => 1,
required => 1,
store => \$sample_info_href,
strict_type => 1,
},
reference_dir => {
default => $arg_href->{active_parameter_href}{reference_dir},
store => \$reference_dir,
strict_type => 1,
},
temp_directory => {
default => $arg_href->{active_parameter_href}{temp_directory},
store => \$temp_directory,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
use MIP::File_info qw{ get_io_files parse_io_outfiles };
use MIP::Recipe qw{ parse_recipe_prerequisites };
use MIP::Program::Gnu::Coreutils qw{ gnu_mv };
use MIP::Processmanagement::Processes qw{ submit_recipe };
use MIP::Program::Bcftools
qw{ bcftools_merge bcftools_norm bcftools_view bcftools_view_and_index_vcf };
use MIP::Program::Svdb qw{ svdb_merge };
use MIP::Sample_info qw{ set_file_path_to_store
set_recipe_outfile_in_sample_info
set_recipe_metafile_in_sample_info };
use MIP::Script::Setup_script qw{ setup_script };
### PREPROCESSING:
## Stores the parallel chains that job ids should be inherited from
my @parallel_chains;
## Retrieve logger object
my $log = Log::Log4perl->get_logger($LOG_NAME);
## Unpack parameters
my @structural_variant_callers;
## Only process active callers
foreach
my $structural_variant_caller ( @{ $parameter_href->{cache}{structural_variant_callers} } )
{
if ( $active_parameter_href->{$structural_variant_caller} ) {
push @structural_variant_callers, $structural_variant_caller;
}
}
my %recipe = parse_recipe_prerequisites(
{
active_parameter_href => $active_parameter_href,
parameter_href => $parameter_href,
recipe_name => $recipe_name,
}
);
## Set and get the io files per chain, id and stream
my %io = parse_io_outfiles(
{
chain_id => $recipe{job_id_chain},
id => $case_id,
file_info_href => $file_info_href,
file_name_prefixes_ref => [$case_id],
outdata_dir => $active_parameter_href->{outdata_dir},
parameter_href => $parameter_href,
recipe_name => $recipe_name,
}
);
my $outdir_path_prefix = $io{out}{dir_path_prefix};
my $outfile_path = $io{out}{file_path};
my $outfile_path_prefix = $io{out}{file_path_prefix};
my $outfile_suffix = $io{out}{file_suffix};
## Filehandles
# Create anonymous filehandle
my $filehandle = IO::Handle->new();
## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header
my ( $recipe_file_path, $recipe_info_path ) = setup_script(
{
active_parameter_href => $active_parameter_href,
core_number => $recipe{core_number},
directory_id => $case_id,
filehandle => $filehandle,
job_id_href => $job_id_href,
memory_allocation => $recipe{memory},
process_time => $recipe{time},
recipe_directory => $recipe_name,
recipe_name => $recipe_name,
temp_directory => $temp_directory,
}
);
## Split to enable submission to &sample_info_qc later
my ( $volume, $directory, $stderr_file ) =
splitpath( $recipe_info_path . $DOT . q{stderr.txt} );
### SHELL:
## Collect infiles for all sample_ids for programs that do not do joint calling to enable migration to temporary directory
# Paths for structural variant callers to be merged
my %file_path;
_preprocess_single_callers_file(
{
active_parameter_href => $active_parameter_href,
filehandle => $filehandle,
file_info_href => $file_info_href,
file_path_href => \%file_path,
parallel_chains_ref => \@parallel_chains,
parameter_href => $parameter_href,
structural_variant_callers_ref => \@structural_variant_callers,
}
);
## Merged sample files to one case file (samples > 1) else reformat to standardise
_merge_or_reformat_single_callers_file(
{
active_parameter_href => $active_parameter_href,
filehandle => $filehandle,
file_path_href => \%file_path,
outdir_path_prefix => $outdir_path_prefix,
outfile_suffix => $outfile_suffix,
parameter_href => $parameter_href,
recipe_info_path => $recipe_info_path,
structural_variant_callers_ref => \@structural_variant_callers,
}
);
## Migrate joint calling per case callers like Manta and Delly
_preprocess_joint_callers_file(
{
active_parameter_href => $active_parameter_href,
filehandle => $filehandle,
file_info_href => $file_info_href,
file_path_href => \%file_path,
outdir_path_prefix => $outdir_path_prefix,
outfile_suffix => $outfile_suffix,
parallel_chains_ref => \@parallel_chains,
parameter_href => $parameter_href,
structural_variant_callers_ref => \@structural_variant_callers,
}
);
## Merge structural variant caller's case vcf files
say {$filehandle} q{## Merge structural variant caller's case vcf files};
## Get parameters
my @svdb_infile_paths;
STRUCTURAL_CALLER:
foreach my $structural_variant_caller (@structural_variant_callers) {
## Only use first part of name
my ($variant_caller_prio_tag) = split /_/sxm, $structural_variant_caller;
push @svdb_infile_paths,
catfile( $outdir_path_prefix,
$case_id
. $UNDERSCORE
. $structural_variant_caller
. $outfile_suffix
. $COLON
. $variant_caller_prio_tag );
}
svdb_merge(
{
filehandle => $filehandle,
infile_paths_ref => \@svdb_infile_paths,
priority => $active_parameter_href->{sv_svdb_merge_prioritize},
same_order => 1,
stdoutfile_path => $outfile_path,
}
);
say {$filehandle} $NEWLINE;
## Alternative file tag
my $alt_file_tag = $EMPTY_STR;
if ( $active_parameter_href->{sv_decompose} ) {
## Update file tag
$alt_file_tag = $UNDERSCORE . q{decompose};
## Split multiallelic variants
say {$filehandle} q{## Split multiallelic variants};
bcftools_norm(
{
filehandle => $filehandle,
infile_path => $outfile_path,
multiallelic => q{-},
outfile_path => $outfile_path_prefix . $alt_file_tag . $outfile_suffix,
}
);
say {$filehandle} $NEWLINE;
gnu_mv(
{
filehandle => $filehandle,
force => 1,
infile_path => $outfile_path_prefix . $alt_file_tag . $outfile_suffix,
outfile_path => $outfile_path,
}
);
say {$filehandle} $NEWLINE;
}
if ( $active_parameter_href->{sv_combinevariantcallsets_bcf_file} ) {
## Reformat variant calling file and index
bcftools_view_and_index_vcf(
{
filehandle => $filehandle,
infile_path => $outfile_path,
index_type => q{csi},
outfile_path_prefix => $outfile_path_prefix,
output_type => q{b},
}
);
}
close $filehandle or $log->logcroak(q{Could not close filehandle});
if ( $recipe{mode} == 1 ) {
set_recipe_outfile_in_sample_info(
{
path => $outfile_path,
recipe_name => $recipe_name,
sample_info_href => $sample_info_href,
}
);
$sample_info_href->{sv_vcf_file}{ready_vcf}{path} = $outfile_path;
if ( $active_parameter_href->{sv_combinevariantcallsets_bcf_file} ) {
my $sv_bcf_file_path = $outfile_path_prefix . $DOT . q{bcf};
set_recipe_metafile_in_sample_info(
{
metafile_tag => q{sv_bcf_file},
path => $sv_bcf_file_path,
recipe_name => $recipe_name,
sample_info_href => $sample_info_href,
}
);
set_file_path_to_store(
{
format => q{bcf},
id => $case_id,
path => $sv_bcf_file_path,
path_index => $sv_bcf_file_path . $DOT . q{csi},
recipe_name => $recipe_name,
sample_info_href => $sample_info_href,
}
);
}
submit_recipe(
{
base_command => $profile_base_command,
case_id => $case_id,
dependency_method => q{sample_to_case},
job_id_chain => $recipe{job_id_chain},
job_id_href => $job_id_href,
job_reservation_name => $active_parameter_href->{job_reservation_name},
log => $log,
max_parallel_processes_count_href =>
$file_info_href->{max_parallel_processes_count},
parallel_chains_ref => \@parallel_chains,
recipe_file_path => $recipe_file_path,
sample_ids_ref => \@{ $active_parameter_href->{sample_ids} },
submission_profile => $active_parameter_href->{submission_profile},
}
);
}
return 1;
}
sub _add_to_parallel_chain {
## Function : Add to parallel chain
## Returns :
## Arguments: $parallel_chains_ref => Store structural variant caller parallel chain
## : $structural_variant_caller_chain => Chain of structural variant caller
my ($arg_href) = @_;
## Flatten argument(s)
my $parallel_chains_ref;
my $structural_variant_caller_chain;
my $tmpl = {
parallel_chains_ref => {
default => [],
defined => 1,
required => 1,
store => \$parallel_chains_ref,
strict_type => 1,
},
structural_variant_caller_chain => {
defined => 1,
required => 1,
store => \$structural_variant_caller_chain,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
## If element is not part of array
if (
not any {
$_ eq $structural_variant_caller_chain
}
@{$parallel_chains_ref}
)
{
push @{$parallel_chains_ref}, $structural_variant_caller_chain;
}
return;
}
sub _preprocess_joint_callers_file {
## Function : Preprocess joint calling per case callers like Manta and Delly. And store merged outfile per caller
## Returns :
## Arguments: $active_parameter_href => Active parameters for this analysis hash {REF}
## : $case_id => Family id
## : $filehandle => Filehandle to write to
## : $file_info_href => File info hash {REF
## : $file_path_href => Store file path prefix {REF}
## : $outdir_path_prefix => Outdir path prefix
## : $outfile_suffix => Outfile suffix
## : $parallel_chains_ref => Store structural variant caller parallel chain
## : $parameter_href => Parameter hash {REF}
## : $structural_variant_callers_ref => Structural variant callers that do not use joint calling
my ($arg_href) = @_;
## Flatten argument(s)
my $active_parameter_href;
my $filehandle;
my $file_info_href;
my $file_path_href;
my $outdir_path_prefix;
my $outfile_suffix;
my $parallel_chains_ref;
my $parameter_href;
my $structural_variant_callers_ref;
## Default(s)
my $case_id;
my $tmpl = {
active_parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$active_parameter_href,
strict_type => 1,
},
case_id => {
default => $arg_href->{active_parameter_href}{case_id},
store => \$case_id,
strict_type => 1,
},
filehandle => { required => 1, store => \$filehandle, },
file_info_href => {
default => {},
defined => 1,
required => 1,
store => \$file_info_href,
strict_type => 1,
},
file_path_href => {
default => {},
defined => 1,
required => 1,
store => \$file_path_href,
strict_type => 1,
},
outdir_path_prefix => {
defined => 1,
required => 1,
store => \$outdir_path_prefix,
strict_type => 1,
},
outfile_suffix => {
defined => 1,
required => 1,
store => \$outfile_suffix,
strict_type => 1,
},
parallel_chains_ref => {
default => [],
defined => 1,
required => 1,
store => \$parallel_chains_ref,
strict_type => 1,
},
parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$parameter_href,
strict_type => 1,
},
structural_variant_callers_ref => {
default => [],
defined => 1,
required => 1,
store => \$structural_variant_callers_ref,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
use MIP::File_info qw{ get_io_files };
my $joint_caller = q{manta | delly_reformat | tiddit};
my $stream = q{out};
STRUCTURAL_CALLER:
foreach my $structural_variant_caller ( @{$structural_variant_callers_ref} ) {
next STRUCTURAL_CALLER
if ( $structural_variant_caller !~ / $joint_caller /xsm );
## Expect vcf. Special case: manta, delly, tiddit are processed by joint calling and per case
## Get the io infiles per chain and id
my %sample_io = get_io_files(
{
id => $case_id,
file_info_href => $file_info_href,
parameter_href => $parameter_href,
recipe_name => $structural_variant_caller,
stream => $stream,
}
);
my $infile_path_prefix = $sample_io{$stream}{file_path_prefix};
my $infile_suffix = $sample_io{$stream}{file_suffix};
my $infile_path = $infile_path_prefix . $infile_suffix;
_add_to_parallel_chain(
{
parallel_chains_ref => $parallel_chains_ref,
structural_variant_caller_chain =>
$parameter_href->{$structural_variant_caller}{chain},
}
);
my $decompose_outfile_path = catfile( $outdir_path_prefix,
$case_id . $UNDERSCORE . $structural_variant_caller . $outfile_suffix );
## Store merged outfile per caller
push @{ $file_path_href->{$structural_variant_caller} }, $decompose_outfile_path;
if ( $active_parameter_href->{sv_decompose} ) {
## Split multiallelic variants
say {$filehandle} q{## Split multiallelic variants};
bcftools_norm(
{
filehandle => $filehandle,
infile_path => $infile_path,
multiallelic => q{-},
outfile_path => $decompose_outfile_path,
}
);
say {$filehandle} $NEWLINE;
}
}
return;
}
sub _preprocess_single_callers_file {
## Function : Collect infiles for all sample_ids for programs that do not do joint calling. Add chain of structural variant caller to parallel chains
## Returns :
## Arguments: $active_parameter_href => Active parameters for this analysis hash {REF}
## : $filehandle => Filehandle to write to
## : $file_info_href => File info hash {REF
## : $file_path_href => Store file path prefix {REF}
## : $parallel_chains_ref => Store structural variant caller parallel chain
## : $parameter_href => Parameter hash {REF}
## : $structural_variant_callers_ref => Structural variant callers that do not use joint calling
## : $temp_directory => Temporary directory
my ($arg_href) = @_;
## Flatten argument(s)
my $active_parameter_href;
my $filehandle;
my $file_info_href;
my $file_path_href;
my $parallel_chains_ref;
my $parameter_href;
my $structural_variant_callers_ref;
## Default(s)
my $temp_directory;
my $tmpl = {
active_parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$active_parameter_href,
strict_type => 1,
},
filehandle => { required => 1, store => \$filehandle, },
file_info_href => {
default => {},
defined => 1,
required => 1,
store => \$file_info_href,
strict_type => 1,
},
file_path_href => {
default => {},
defined => 1,
required => 1,
store => \$file_path_href,
strict_type => 1,
},
parallel_chains_ref => {
default => [],
defined => 1,
required => 1,
store => \$parallel_chains_ref,
strict_type => 1,
},
parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$parameter_href,
strict_type => 1,
},
structural_variant_callers_ref => {
default => [],
defined => 1,
required => 1,
store => \$structural_variant_callers_ref,
strict_type => 1,
},
temp_directory => {
default => $arg_href->{active_parameter_href}{temp_directory},
store => \$temp_directory,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
use MIP::File_info qw{ get_io_files };
my $joint_caller = q{manta | delly_reformat | tiddit};
my $stream = q{out};
SAMPLE_ID:
foreach my $sample_id ( @{ $active_parameter_href->{sample_ids} } ) {
STRUCTURAL_CALLER:
foreach my $structural_variant_caller ( @{$structural_variant_callers_ref} ) {
next STRUCTURAL_CALLER
if ( $structural_variant_caller =~ / $joint_caller /xsm );
## Expect vcf. Special case: manta, delly and tiddit are processed by joint calling and per case
## Get the io infiles per chain and id
my %sample_io = get_io_files(
{
id => $sample_id,
file_info_href => $file_info_href,
parameter_href => $parameter_href,
recipe_name => $structural_variant_caller,
stream => $stream,
}
);
my $infile_path_prefix = $sample_io{$stream}{file_path_prefix};
my $infile_suffix = $sample_io{$stream}{file_suffix};
my $infile_path = $infile_path_prefix . $infile_suffix;
push @{ $file_path_href->{$structural_variant_caller} }, $infile_path . $DOT . q{gz};
_add_to_parallel_chain(
{
parallel_chains_ref => $parallel_chains_ref,
structural_variant_caller_chain =>
$parameter_href->{$structural_variant_caller}{chain},
}
);
## Reformat variant calling file and index
bcftools_view_and_index_vcf(
{
infile_path => $infile_path,
outfile_path_prefix => $infile_path_prefix,
output_type => q{z},
filehandle => $filehandle,
}
);
}
}
return;
}
sub _merge_or_reformat_single_callers_file {
## Function : Merged sample files to one case file (samples > 1) else reformat to standardise
## Returns :
## Arguments: $active_parameter_href => Active parameters for this analysis hash {REF}
## : $case_id => Family ID
## : $filehandle => Filehandle to write to
## : $file_path_href => Store file path prefix {REF}
## : $outdir_path_prefix => Outdir path prefix
## : $outfile_suffix => Outfile suffix
## : $parameter_href => Parameter hash {REF}
## : $recipe_info_path => Program info path
## : $structural_variant_callers_ref => Structural variant callers that do not use joint calling
my ($arg_href) = @_;
## Flatten argument(s)
my $active_parameter_href;
my $filehandle;
my $file_path_href;
my $outdir_path_prefix;
my $outfile_suffix;
my $parameter_href;
my $recipe_info_path;
my $structural_variant_callers_ref;
## Default(s)
my $case_id;
my $tmpl = {
active_parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$active_parameter_href,
strict_type => 1,
},
case_id => {
default => $arg_href->{active_parameter_href}{case_id},
store => \$case_id,
strict_type => 1,
},
filehandle => { required => 1, store => \$filehandle, },
file_path_href => {
default => {},
defined => 1,
required => 1,
store => \$file_path_href,
strict_type => 1,
},
outdir_path_prefix => {
defined => 1,
required => 1,
store => \$outdir_path_prefix,
strict_type => 1,
},
outfile_suffix => {
defined => 1,
required => 1,
store => \$outfile_suffix,
strict_type => 1,
},
parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$parameter_href,
strict_type => 1,
},
recipe_info_path => {
defined => 1,
required => 1,
store => \$recipe_info_path,
strict_type => 1,
},
structural_variant_callers_ref => {
default => [],
defined => 1,
required => 1,
store => \$structural_variant_callers_ref,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
my $joint_caller = q{manta | delly_reformat | tiddit};
STRUCTURAL_CALLER:
foreach my $structural_variant_caller ( @{$structural_variant_callers_ref} ) {
next STRUCTURAL_CALLER
if ( $structural_variant_caller =~ / $joint_caller /xsm );
## Expect vcf. Special case: joint calling and per case
## Assemble file paths by adding file ending
my @merge_infile_paths = @{ $file_path_href->{$structural_variant_caller} };
my $merge_outfile_path = catfile( $outdir_path_prefix,
$case_id . $UNDERSCORE . $structural_variant_caller . $outfile_suffix );
## Store merged outfile per caller
push @{ $file_path_href->{$structural_variant_caller} }, $merge_outfile_path;
if ( scalar @{ $active_parameter_href->{sample_ids} } > 1 ) {
## Merge all structural variant caller's vcf files per sample_id
say {$filehandle} q{## Merge all structural variant caller's vcf files per sample_id};
bcftools_merge(
{
filehandle => $filehandle,
infile_paths_ref => \@merge_infile_paths,
outfile_path => $merge_outfile_path,
output_type => q{v},
stderrfile_path => $recipe_info_path
. $UNDERSCORE
. $structural_variant_caller
. $UNDERSCORE
. q{merge.stderr.txt},
}
);
say {$filehandle} $NEWLINE;
}
else {
## Reformat all structural variant caller's vcf files per sample_id
say {$filehandle}
q{## Reformat all structural variant caller's vcf files per sample_id};
bcftools_view(
{
filehandle => $filehandle,
infile_path => $merge_infile_paths[0], # Can be only one
outfile_path => $merge_outfile_path,
output_type => q{v},
stderrfile_path => $recipe_info_path
. $UNDERSCORE
. $structural_variant_caller
. $UNDERSCORE
. q{merge.stderr.txt},
}
);
say {$filehandle} $NEWLINE;
}
}
return;
}
1;
| henrikstranneheim/MIP | lib/MIP/Recipes/Analysis/Sv_combinevariantcallsets.pm | Perl | mit | 30,538 |
#!/usr/bin/env perl
use v5.020;
use strict;
use warnings;
use utf8;
binmode STDOUT, ":utf8";
binmode STDERR, ":utf8";
binmode STDIN, ":encoding(UTF-8)";
use Data::Printer;
use FindBin;
use Getopt::Long;
use Template;
use Text::CSV_XS 'csv';
use lib "$FindBin::Bin/../lib";
use DartSense::Match;
use DartSense::Player;
use experimental 'signatures';
no warnings "experimental::signatures";
my $players = {};
my @matches;
my @players;
my $options = { file => '', };
GetOptions( "file=s" => \$options->{file}, );
my $aoa = csv( in => $options->{file} );
my $headers = shift @{$aoa};
my $passes = 5;
for ( @{$aoa} ) {
my %args;
for ( my $i = 0; $i < @{$_}; $i++ ) {
$args{ $headers->[$i] } = $_->[$i];
}
my $player1 = getPlayer( $args{speler1} );
my $player2 = getPlayer( $args{speler2} );
push @matches,
DartSense::Match->new(
{ %args, player1 => $player1, player2 => $player2, } );
}
@matches = sort { $a->get_seq cmp $b->get_seq } @matches;
for ( 1 .. $passes ) {
for my $player (@players) {
$player->resetPass;
}
for my $match (@matches) {
$match->calcratings;
}
}
@players = sort { $b->{rating} <=> $a->{rating} } @players;
# p @players;
my $template
= Template->new( { INCLUDE_PATH => "$FindBin::Bin/../var/templates" } );
$template->process(
"ranking.tt",
{ players => \@players },
"$FindBin::Bin/../var/www/index.html"
) || die $template->error();
for (@players) {
$template->process(
"player.tt",
{ player => $_ },
"$FindBin::Bin/../var/www/player_" . $_->get_name . ".html"
) || die $template->error();
}
# p @matches;
# # p $aoa;
sub getPlayer {
my $name = shift;
my $player = $players->{$name};
if ( !$player ) {
$player = DartSense::Player->new( { name => $name } );
$players->{$name} = $player;
push @players, $player;
}
return $player;
}
| basbloemsaat/dartsense | bin/old/genSite.pl | Perl | mit | 1,953 |
=head1 NAME
GeodSolve -- perform geodesic calculations
=head1 SYNOPSIS
B<GeodSolve> [ B<-i> | B<-l> I<lat1> I<lon1> I<azi1> ] [ B<-a> ]
[ B<-e> I<a> I<f> ] B<-u> ]
[ B<-d> | B<-:> ] [ B<-w> ] [ B<-b> ] [ B<-f> ] [ B<-p> I<prec> ] [ B<-E> ]
[ B<--comment-delimiter> I<commentdelim> ]
[ B<--version> | B<-h> | B<--help> ]
[ B<--input-file> I<infile> | B<--input-string> I<instring> ]
[ B<--line-separator> I<linesep> ]
[ B<--output-file> I<outfile> ]
=head1 DESCRIPTION
The shortest path between two points on the ellipsoid at (I<lat1>,
I<lon1>) and (I<lat2>, I<lon2>) is called the geodesic. Its length is
I<s12> and the geodesic from point 1 to point 2 has forward azimuths
I<azi1> and I<azi2> at the two end points.
B<GeodSolve> operates in one of three modes:
=over
=item 1.
By default, B<GeodSolve> accepts lines on the standard input containing
I<lat1> I<lon1> I<azi1> I<s12> and prints I<lat2> I<lon2> I<azi2>
on standard output. This is the direct geodesic calculation.
=item 2.
Command line arguments B<-l> I<lat1> I<lon1> I<azi1> specify a geodesic
line. B<GeodSolve> then accepts a sequence of I<s12> values (one per
line) on standard input and prints I<lat2> I<lon2> I<azi2> for each.
This generates a sequence of points on a single geodesic.
=item 3.
With the B<-i> command line argument, B<GeodSolve> performs the inverse
geodesic calculation. It reads lines containing I<lat1> I<lon1> I<lat2>
I<lon2> and prints the corresponding values of I<azi1> I<azi2> I<s12>.
=back
=head1 OPTIONS
=over
=item B<-i>
perform an inverse geodesic calculation (see 3 above).
=item B<-l>
line mode (see 2 above); generate a sequence of points along the
geodesic specified by I<lat1> I<lon1> I<azi1>. The B<-w> flag can be
used to swap the default order of the 2 geographic coordinates, provided
that it appears before B<-l>.
=item B<-a>
arc mode; on input I<and> output I<s12> is replaced by I<a12> the arc
length (in degrees) on the auxiliary sphere. See L</AUXILIARY SPHERE>.
=item B<-e>
specify the ellipsoid via I<a> I<f>; the equatorial radius is I<a> and
the flattening is I<f>. Setting I<f> = 0 results in a sphere. Specify
I<f> E<lt> 0 for a prolate ellipsoid. A simple fraction, e.g., 1/297,
is allowed for I<f>. (Also, if I<f> E<gt> 1, the flattening is set to
1/I<f>.) By default, the WGS84 ellipsoid is used, I<a> = 6378137 m,
I<f> = 1/298.257223563.
=item B<-u>
unroll the longitude. Normally, on output longitudes are reduced to lie
in [-180deg,180deg). However with this option, the returned longitude
I<lon2> is "unrolled" so that I<lon2> - I<lon1> indicates how often and
in what sense the geodesic has encircled the earth. Use the B<-f>
option, to get both longitudes printed.
=item B<-d>
output angles as degrees, minutes, seconds instead of decimal degrees.
=item B<-:>
like B<-d>, except use : as a separator instead of the d, ', and "
delimiters.
=item B<-w>
on input and output, longitude precedes latitude (except that, on input,
this can be overridden by a hemisphere designator, I<N>, I<S>, I<E>,
I<W>).
=item B<-b>
report the I<back> azimuth at point 2 instead of the forward azimuth.
=item B<-f>
full output; each line of output consists of 12 quantities: I<lat1>
I<lon1> I<azi1> I<lat2> I<lon2> I<azi2> I<s12> I<a12> I<m12> I<M12>
I<M21> I<S12>. I<a12> is described in L</AUXILIARY SPHERE>. The four
quantities I<m12>, I<M12>, I<M21>, and I<S12> are described in
L</ADDITIONAL QUANTITIES>.
=item B<-p>
set the output precision to I<prec> (default 3); I<prec> is the
precision relative to 1 m. See L</PRECISION>.
=item B<-E>
use "exact" algorithms (based on elliptic integrals) for the geodesic
calculations. These are more accurate than the (default) series
expansions for |I<f>| E<gt> 0.02.
=item B<--comment-delimiter>
set the comment delimiter to I<commentdelim> (e.g., "#" or "//"). If
set, the input lines will be scanned for this delimiter and, if found,
the delimiter and the rest of the line will be removed prior to
processing and subsequently appended to the output line (separated by a
space).
=item B<--version>
print version and exit.
=item B<-h>
print usage and exit.
=item B<--help>
print full documentation and exit.
=item B<--input-file>
read input from the file I<infile> instead of from standard input; a file
name of "-" stands for standard input.
=item B<--input-string>
read input from the string I<instring> instead of from standard input.
All occurrences of the line separator character (default is a semicolon)
in I<instring> are converted to newlines before the reading begins.
=item B<--line-separator>
set the line separator character to I<linesep>. By default this is a
semicolon.
=item B<--output-file>
write output to the file I<outfile> instead of to standard output; a
file name of "-" stands for standard output.
=back
=head1 INPUT
B<GeodSolve> measures all angles in degrees and all lengths (I<s12>) in
meters, and all areas (I<S12>) in meters^2. On input angles (latitude,
longitude, azimuth, arc length) can be as decimal degrees or degrees,
minutes, seconds. For example, C<40d30>, C<40d30'>, C<40:30>, C<40.5d>,
and C<40.5> are all equivalent. By default, latitude precedes longitude
for each point (the B<-w> flag switches this convention); however on
input either may be given first by appending (or prepending) I<N> or
I<S> to the latitude and I<E> or I<W> to the longitude. Azimuths are
measured clockwise from north; however this may be overridden with I<E>
or I<W>.
For details on the allowed formats for angles, see the C<GEOGRAPHIC
COORDINATES> section of GeoConvert(1).
=head1 AUXILIARY SPHERE
Geodesics on the ellipsoid can be transferred to the I<auxiliary sphere>
on which the distance is measured in terms of the arc length I<a12>
(measured in degrees) instead of I<s12>. In terms of I<a12>, 180
degrees is the distance from one equator crossing to the next or from
the minimum latitude to the maximum latitude. Geodesics with I<a12>
E<gt> 180 degrees do not correspond to shortest paths. With the B<-a>
flag, I<s12> (on both input and output) is replaced by I<a12>. The
B<-a> flag does I<not> affect the full output given by the B<-f> flag
(which always includes both I<s12> and I<a12>).
=head1 ADDITIONAL QUANTITIES
The B<-f> flag reports four additional quantities.
The reduced length of the geodesic, I<m12>, is defined such that if the
initial azimuth is perturbed by dI<azi1> (radians) then the second point
is displaced by I<m12> dI<azi1> in the direction perpendicular to the
geodesic. I<m12> is given in meters. On a curved surface the
reduced length obeys a symmetry relation, I<m12> + I<m21> = 0. On a
flat surface, we have I<m12> = I<s12>.
I<M12> and I<M21> are geodesic scales. If two geodesics are parallel at
point 1 and separated by a small distance I<dt>, then they are separated
by a distance I<M12> I<dt> at point 2. I<M21> is defined similarly
(with the geodesics being parallel to one another at point 2). I<M12>
and I<M21> are dimensionless quantities. On a flat surface, we have
I<M12> = I<M21> = 1.
If points 1, 2, and 3 lie on a single geodesic, then the following
addition rules hold:
s13 = s12 + s23,
a13 = a12 + a23,
S13 = S12 + S23,
m13 = m12 M23 + m23 M21,
M13 = M12 M23 - (1 - M12 M21) m23 / m12,
M31 = M32 M21 - (1 - M23 M32) m12 / m23.
Finally, I<S12> is the area between the geodesic from point 1 to point 2
and the equator; i.e., it is the area, measured counter-clockwise, of
the geodesic quadrilateral with corners (I<lat1>,I<lon1>), (0,I<lon1>),
(0,I<lon2>), and (I<lat2>,I<lon2>). It is given in meters^2.
=head1 PRECISION
I<prec> gives precision of the output with I<prec> = 0 giving 1 m
precision, I<prec> = 3 giving 1 mm precision, etc. I<prec> is the
number of digits after the decimal point for lengths. For decimal
degrees, the number of digits after the decimal point is I<prec> + 5.
For DMS (degree, minute, seconds) output, the number of digits after the
decimal point in the seconds component is I<prec> + 1. The minimum
value of I<prec> is 0 and the maximum is 10.
=head1 ERRORS
An illegal line of input will print an error message to standard output
beginning with C<ERROR:> and causes B<GeodSolve> to return an exit code
of 1. However, an error does not cause B<GeodSolve> to terminate;
following lines will be converted.
=head1 ACCURACY
Using the (default) series solution, GeodSolve is accurate to about 15
nm (15 nanometers) for the WGS84 ellipsoid. The approximate maximum
error (expressed as a distance) for an ellipsoid with the same major
radius as the WGS84 ellipsoid and different values of the flattening is
|f| error
0.01 25 nm
0.02 30 nm
0.05 10 um
0.1 1.5 mm
0.2 300 mm
If B<-E> is specified, GeodSolve is accurate to about 40 nm (40
nanometers) for the WGS84 ellipsoid. The approximate maximum error
(expressed as a distance) for an ellipsoid with a quarter meridian of
10000 km and different values of the I<a/b> = 1 - I<f> is
1-f error (nm)
1/128 387
1/64 345
1/32 269
1/16 210
1/8 115
1/4 69
1/2 36
1 15
2 25
4 96
8 318
16 985
32 2352
64 6008
128 19024
=head1 MULTIPLE SOLUTIONS
The shortest distance returned for the inverse problem is (obviously)
uniquely defined. However, in a few special cases there are multiple
azimuths which yield the same shortest distance. Here is a catalog of
those cases:
=over
=item I<lat1> = -I<lat2> (with neither point at a pole)
If I<azi1> = I<azi2>, the geodesic is unique. Otherwise there are two
geodesics and the second one is obtained by setting [I<azi1>,I<azi2>] =
[I<azi2>,I<azi1>], [I<M12>,I<M21>] = [I<M21>,I<M12>], I<S12> = -I<S12>.
(This occurs when the longitude difference is near +/-180 for oblate
ellipsoids.)
=item I<lon2> = I<lon1> +/- 180 (with neither point at a pole)
If I<azi1> = 0 or +/-180, the geodesic is unique. Otherwise there are
two geodesics and the second one is obtained by setting
[I<azi1>,I<azi2>] = [-I<azi1>,-I<azi2>], I<S12> = -I<S12>. (This occurs
when I<lat2> is near -I<lat1> for prolate ellipsoids.)
=item Points 1 and 2 at opposite poles
There are infinitely many geodesics which can be generated by setting
[I<azi1>,I<azi2>] = [I<azi1>,I<azi2>] + [I<d>,-I<d>], for arbitrary
I<d>. (For spheres, this prescription applies when points 1 and 2 are
antipodal.)
=item I<s12> = 0 (coincident points)
There are infinitely many geodesics which can be generated by setting
[I<azi1>,I<azi2>] = [I<azi1>,I<azi2>] + [I<d>,I<d>], for arbitrary I<d>.
=back
=head1 EXAMPLES
Route from JFK Airport to Singapore Changi Airport:
echo 40:38:23N 073:46:44W 01:21:33N 103:59:22E |
GeodSolve -i -: -p 0
003:18:29.9 177:29:09.2 15347628
Waypoints on the route at intervals of 2000km:
for ((i = 0; i <= 16; i += 2)); do echo ${i}000000;done |
GeodSolve -l 40:38:23N 073:46:44W 003:18:29.9 -: -p 0
40:38:23.0N 073:46:44.0W 003:18:29.9
58:34:45.1N 071:49:36.7W 004:48:48.8
76:22:28.4N 065:32:17.8W 010:41:38.4
84:50:28.0N 075:04:39.2E 150:55:00.9
67:26:20.3N 098:00:51.2E 173:27:20.3
49:33:03.2N 101:06:52.6E 176:07:54.3
31:34:16.5N 102:30:46.3E 177:03:08.4
13:31:56.0N 103:26:50.7E 177:24:55.0
04:32:05.7S 104:14:48.7E 177:28:43.6
=head1 SEE ALSO
GeoConvert(1).
An online version of this utility is availbable at
L<http://geographiclib.sourceforge.net/cgi-bin/GeodSolve>.
The algorithms are described in C. F. F. Karney,
I<Algorithms for geodesics>, J. Geodesy 87, 43-55 (2013); DOI:
L<https://dx.doi.org/10.1007/s00190-012-0578-z>;
addenda: L<http://geographiclib.sf.net/geod-addenda.html>.
The Wikipedia page, Geodesics on an ellipsoid,
L<https://en.wikipedia.org/wiki/Geodesics_on_an_ellipsoid>.
=head1 AUTHOR
B<GeodSolve> was written by Charles Karney.
=head1 HISTORY
B<GeodSolve> was added to GeographicLib, L<http://geographiclib.sf.net>,
in 2009-03. Prior to version 1.30, it was called B<Geod>. (The name
was changed to avoid a conflict with the B<geod> utility in I<proj.4>.)
| ObjSal/GeographicLib | man/GeodSolve.pod | Perl | mit | 12,153 |
#!/usr/bin/env perl
######################################################
#Created by Sarah Schmedes
#Will run entire strainphlan workflow, including images
#Designate -clade flag
#designated clade when prompted in STDIN
#Date Released: 08/01/2017
#####################################################
use strict;
use warnings;
use Genomix qw(:constants :meta);
use Getopt::Long;
#select samples to run through strainphlan
my $metafile = $OH_METAFILE;
my ($bodysite, $samplelist, $sampath, $straindir, $straindbpath);
#set flags
GetOptions('bodysite=s' => \$bodysite,
'samplelist=s' => \$samplelist,
'sampath=s' => \$sampath,
'output=s' => \$straindir
'straindb_path=s' => \$straindbpath)
or die "Must use bodysite, sampelist, sampath, and output flags\n";
#collect all SRSs for a particular body site
my @allbodySRS = queryDB($metafile, $bodysite, "bodysiteID", "srsID");
my $samplename_ref = samplename(@allbodySRS);
my %bodysamplename = %$samplename_ref;
open SAMPLES, "<$samplelist" or die "Could not open revised sample list for reading! $!\n";
my %samplenames;
while (<SAMPLES>) {
my $line = $_;
chomp $line;
if ($line =~ /^Bodysite/) {
next;
}
my ($body, $ind, $time, @data) = split /\t/, $line;
if ($body eq $bodysite) {
my $samplename = "$ind\_$body\_$time";
$samplenames{$samplename} = 1;
}
}
close(SAMPLES);
my @sortednames = sort {$a cmp $b} keys %samplenames;
my @SRSlist;
foreach my $samplename (@sortednames) {
foreach my $key (keys %bodysamplename) {
if ($bodysamplename{$key} eq $samplename) {
push(@SRSlist, $key);
}
}
}
#run strainphlan on all samples
#all metaphlan2/strainphlan scripts must be in the environmental path
foreach my $sample (@SRSlist) {
#Generate a marker file for each sample.
#The marker file contains the consensus of unique marker genes for each species found in the sample
#This marker file will be used for SNP profiling
if (! -f "$straindir/$bodysite/consensus_markers/$sample\.markers") {
if (system("sample2markers.py --ifn_samples $sampath/$sample\.sam.bz2 --input_type sam --output_dir $straindir/$bodysite/consensus_markers --nprocs 20 1> $straindir/$bodysite/consensus_markers/$sample\_log.txt 2> $straindir/$bodysite/consensus_markers/$sample\_error.txt"))
{
die "Strainphlan sample2markers.py ERROR: $!\n";
}
}
}
#Run strainphlan to identify clades that were detected in all samples
#providing the marker files generated in the prior step
#to see which clades can be SNP-profiled
if (! -f "$straindir/$bodysite/clades/$bodysite\_clades.txt") {
if (system("strainphlan.py --ifn_samples $straindir/$bodysite/consensus_markers/*.markers --output_dir $straindir/$bodysite/clades --print_clades_only --nprocs_main 20 1> $straindir/$bodysite/clades/$bodysite\_clades.txt 2> $straindir/$bodysite/clades/$bodysite\_errorlog.txt"))
{
die "strainphlan.py ERROR: $!\n";
}
}
print "Clade to analyze\n";
my $clade = <STDIN>;
chomp $clade;
print "Please specify reference genome NAME (NAME.fna.bz2)\n";
my $refgen = <STDIN>;
chomp $refgen;
if (! -f "$straindir/db_markers/$clade\.markers.fasta") {
#Build reference database for the designated clade
#This step only needs to be done once for each species for all projects
if (system("extract_markers.py --mpa_pkl $straindbpath/mpa_v20_m200.pkl --ifn_markers $straindir/db_markers/all_markers.fasta --clade $clade --ofn_markers $straindir/db_markers/$clade\.markers.fasta"))
{
die "Strainphlan extract_markers.py ERROR: $!\n";
}
}
else {
print "$clade\.markers.fasta already exits\n";
}
#Build the multiple sequence alignment and phylogenetic tree
#Will align and clean sample-reconstructed strains (stored in .markers)
#and reference-genome-reconstructed strains (from clade.markers.fasta)
#Builds tree using RAxML
#If a reference genome is not specified or if no clade is specified then
#strainphlan.py will build the tree for all species it can detect
if (system("strainphlan.py --mpa_pkl $straindbpath/mpa_v20_m200.pkl --ifn_samples $straindir/$bodysite/consensus_markers/*.markers --ifn_markers $straindir/db_markers/$clade\.markers.fasta --ifn_ref_genomes $straindir/reference_genomes/$refgen\.fna.bz2 --output_dir $straindir/$bodysite/output --relaxed_parameters2 --nprocs_main 5 --clades $clade 1> $straindir/$bodysite/output/log_full.txt 2> $straindir/$bodysite/output/error_full.txt"))
{
die "strainphlan.py ERROR: $!\n";
}
#Add metadata to the tree
#must of metadata file in strainphlan group directory
#multiple trees and multiple metadata files can be used (space separated, and wild card can be used)
#metadata file (tab separated, can have multiple columns)
my $metadata = "SubjectID"; #change based on what metadata you want listed on the tree
if (system("add_metadata_tree.py --ifn_trees $straindir/$bodysite/output/RAxML_bestTree.$clade\.tree --ifn_metadatas $straindir/$bodysite/$bodysite\_metadata.txt --metadatas $metadata"))
{
die "Strainphlan add_metadata_tree.py ERROR: $!\n";
}
#Plot tree using Graphlan (graphlan scripts must be in path)
if (system("plot_tree_graphlan.py --ifn_tree $straindir/$bodysite/output/RAxML_bestTree.$clade\.tree.metadata --colorized_metadata $metadata --leaf_marker_size 60 --legend_marker_size 60"))
{
die "Graphlan plot_tree_graphlan.py ERROR: $!\n";
}
#Create dendrogram using ggtree script
#breadcrumbs directory must be in path
if (system("strainphlan_ggtree_Mod.R $straindir/$bodysite/output/RAxML_bestTree.$clade\.tree $straindir/$bodysite/$bodysite\_metadata.txt $straindir/$bodysite/output/$clade\.fasta $straindir/$bodysite/output/$bodysite\_$clade\_ggtree_1.png $straindir/$bodysite/output/$bodysite\_$clade\_ggtree_2.png"))
{
die "strainphlan_ggtree_Mod.R ERROR: $!\n";
}
#Create a distance matrix
if (system("distmat -sequence $straindir/$bodysite/output/$clade\.fasta -nucmethod 2 -outfile $straindir/$bodysite/output/$clade\.distmat"))
{
die "distmat ERROR: $!\n";
}
if (system("strainphlan_ordination_Mod.R $straindir/$bodysite/output/$clade\.distmat $straindir/$bodysite/$bodysite\_metadata.txt $straindir/$bodysite/output/$bodysite\_$clade\_strainord.png"))
{
die "strainphlan_ordination_Mod.R ERROR: $!\n";
}
| SESchmedes/HIDskinmicrobiome | strainID.pl | Perl | mit | 6,271 |
use strict;
use warnings;
use 5.018;
chomp(my @lines = <>);
foreach (@lines){
if(/\./){
say $_;
}
}
| moosan63/LearningPerl | chapter7/ex7-3.pl | Perl | mit | 118 |
#!/usr/bin/perl
#
# Convert erlang source into markdown
#
# 1/ non-comment lines become quoted blocks (github)
# 2/ comment lines are treated as straight markdown
#
# Pipe-only, usage:
# emdoc.pl < foo.erl > foo.erl.md
#
# The markdown can then be converted by other tools
# [Might be nice if we could do syntax coloring of the source.]
#
# UNFINISHED
# - accept some options to improve script usage
# - improve output, see below
use strict;
use warnings;
## Convert the lines in a straightforward fashion.
## NB: Tested by pasting into dillinger.io.
##
## When we switch text <-> code block, we insert an
## empty line. This seems to fix the formatting hijinks.
## 1 if emitting codeblock, 0 otherwise
## used to emit blank separation lines
my $code_mode = 0;
while (my $line = <>) {
if ($line =~ m{^%}) {
## Comment line, drop leading comment chars and an optional space
## (the result is processed as markdown)
##
## Emit extra blank line if previously emitting code block
if ($code_mode) {
print "\n";
}
$code_mode = 0; ## (or do it in the if)
$line =~ s{^%+\s?}{};
print $line;
} elsif ($line =~ m{^\s*$}) {
## Blank lines left untreated
print $line;
} else {
## Code line, put in verbatim block (= prepend 4 spaces)
##
## Emit extra blank line if previously emitting text
if (!$code_mode) {
print "\n";
}
$code_mode = 1; ## (or do it in the if)
print " ".$line;
}
}
| thomasl/emdoc | bin/emdoc.pl | Perl | mit | 1,435 |
package Thingiverse::SizedImage;
use strict;
use warnings;
use Moose;
use Moose::Util::TypeConstraints;
use Data::Dumper;
use Carp;
use Thingiverse::Types;
extends('Thingiverse');
has url => ( isa => 'Str', is => 'ro', required => 0, );
has image_type => ( isa => 'ThingiverseImageType', is => 'ro', required => 0, );
has image_size => ( isa => 'ThingiverseImageSize', is => 'ro', required => 0, );
# __PACKAGE__->thingiverse_attributes(
# {
# api_base => "/thing/%d/images/%d",
# pk => { id => { isa => 'ID' } },
# fields => {
# name => { isa => 'Str' },
# url => { isa => 'Str' },
# thing_id => { isa => 'ID' },
# }
# }
# );
# ABSTRACT: Images uploaded to Thingverse are resized to 15 different "standard" sizes, each with a url, a type and a size.
=head1 SYNOPSIS
...
=head1 SEE ALSO
=for :list
* L<Thingiverse>
* L<Thingiverse::User>
* L<Thingiverse::User::List>
* L<Thingiverse::Cache>
* L<Thingiverse::Thing>
* L<Thingiverse::Thing::List>
* L<Thingiverse::Tag>
* L<Thingiverse::Tag::List>
* L<Thingiverse::Category>
* L<Thingiverse::Collection>
* L<Thingiverse::Collection::List>
* L<Thingiverse::Comment>
* L<Thingiverse::File>
* L<Thingiverse::File::List>
* L<Thingiverse::Image>
* L<Thingiverse::Copy>
* L<Thingiverse::Pagination>
* L<Thingiverse::Cache>
* L<Thingiverse::Group>
=cut
no Moose;
__PACKAGE__->meta->make_immutable;
1;
__END__
from images attribute of Thingiverse::Image
sizes: [15]
0: {
type: "thumb"
size: "large"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_thumb_large.jpg"
}-
1: {
type: "thumb"
size: "medium"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_thumb_medium.jpg"
;-
has sizes => (
traits => ['Array'],
is => 'ro',
isa => 'ArrayRef[Thingiverse::SizedImage]',
required => 0,
handles => {
all_sizes => 'elements',
add_sizes => 'push',
map_sizes => 'map',
filter_sizes => 'grep',
find_sizes => 'grep',
get_sizes => 'get',
join_sizes => 'join',
count_sizes => 'count',
has_sizes => 'count',
has_no_sizes => 'is_empty',
sorted_sizes => 'sort',
},
builder => '_get_sized_versions_of_this_image',
);
sub _get_sized_versions_of_this_image {
my $self = shift;
my $sizes = $self->{sizes};
if ( ref($sizes) eq 'ARRAY' ) {
foreach ( @{$sizes} ) {
$_ = Thingiverse::SizedImage->new($_) if (ref($_) eq 'HASH' );;
}
}
return $self;
};
# need both the thing_id and the image_id to make an API call
# around BUILDARGS => sub {
# my $orig = shift;
# my $class = shift;
# my $thing_id;
# my $image_id;
# my $json;
# my $hash;
# if ( @_ == 1 && ref $_[0] eq 'HASH' && ${$_[0]}{'id'} && ${$_[0]}{'thing_id'} && not exists ${$_[0]}{'sizes'} ) {
# $thing_id = ${$_[0]}{'thing_id'};
# $image_id = ${$_[0]}{'id'};
# } elsif ( @_ == 2 ) { # passed two id's, thing_id and image_id
# $thing_id = $_[0];
# $image_id = $_[1];
# } else {
# my $return = $class->$orig(@_); # almost all Thingiverse::Image creatations will be from an predefined hash from a Thingiverse::Thing or Thingiverse::Collection.
# $return = _get_sized_versions_of_this_image($return);;
# return $return;
# }
# $json = _get_from_thingi_given_id($image_id,$thing_id);
# $hash = decode_json($json);
# $hash->{_original_json} = $json;
# return $hash;
# };
no Moose;
__PACKAGE__->meta->make_immutable;
1;
__END__
{
id: 880611
name: "rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0.jpg"
url: "https://api.thingiverse.com/things/316754/images/880611"
sizes: [15]
0: {
type: "thumb"
size: "large"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_thumb_large.jpg"
}-
1: {
type: "thumb"
size: "medium"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_thumb_medium.jpg"
}-
2: {
type: "thumb"
size: "small"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_thumb_small.jpg"
}-
3: {
type: "thumb"
size: "tiny"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_thumb_tiny.jpg"
}-
4: {
type: "preview"
size: "featured"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_preview_featured.jpg"
}-
5: {
type: "preview"
size: "card"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_preview_card.jpg"
}-
6: {
type: "preview"
size: "large"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_preview_large.jpg"
}-
7: {
type: "preview"
size: "medium"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_preview_medium.jpg"
}-
8: {
type: "preview"
size: "small"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_preview_small.jpg"
}-
9: {
type: "preview"
size: "birdwing"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_preview_birdwing.jpg"
}-
10: {
type: "preview"
size: "tiny"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_preview_tiny.jpg"
}-
11: {
type: "preview"
size: "tinycard"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_preview_tinycard.jpg"
}-
12: {
type: "display"
size: "large"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_display_large.jpg"
}-
13: {
type: "display"
size: "medium"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_display_medium.jpg"
}-
14: {
type: "display"
size: "small"
url: "https://thingiverse-production.s3.amazonaws.com/renders/e4/61/5c/c6/82/rounded_rectangular_parallelepiped_20140501-27286-2xdd64-0_display_small.jpg"
}
}
| perlygatekeeper/perlythingkeeper | lib/Thingiverse/SizedImage.pm | Perl | cc0-1.0 | 7,056 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V10::Resources::QualityInfo;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
creativeQualityScore => $args->{creativeQualityScore},
postClickQualityScore => $args->{postClickQualityScore},
qualityScore => $args->{qualityScore},
searchPredictedCtr => $args->{searchPredictedCtr}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V10/Resources/QualityInfo.pm | Perl | apache-2.0 | 1,220 |
%% these imports are added to all gx files
:- if(current_prolog_flag(dialect, ciao)).
:- use_module(library(terms_vars)).
:- else.
varset(A,B) :- term_variables(A,B).
:- endif.
| leuschel/logen | gxmodules.pl | Perl | apache-2.0 | 177 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::JSONServer::Tools::AlleleFrequency;
use strict;
use warnings;
use parent qw(EnsEMBL::Web::JSONServer::Tools);
sub object_type { 'AlleleFrequency' }
1;
| muffato/public-plugins | tools/modules/EnsEMBL/Web/JSONServer/Tools/AlleleFrequency.pm | Perl | apache-2.0 | 888 |
=head1 NAME
History
=head1 Description
Since mod_perl's inception in 1996, many things have changed, and it's
interesting to look at mod_perl's evolution and related events during
the time from then to now.
Based on the mod_perl I<Changes> file and talks with mod_perl
developer's, we have here reconstructed the important steps in the
development of this successful Open Source project.
=head1 Beginnings
The first version of mod_perl was written by Gisle Aas and released on
March 25, 1996. The effort was instantly recognized by Doug MacEachern
and Andreas Koenig; the former had been working on Perl
embedding. They picked up the torch and brought the project we all
love to what it has become today, thanks to the help of countless
contributors.
An extract from I<Changes> (the first one):
=item March 25, 1996
Initial version of mod_perl.c and perl_glue.xs
by Gisle Aas <aas (at) oslonett.no>
Thanks for getting this started Gisle!
Andreas Koenig tells us about how things happened:
"It was a time when FastCGI was still considered cool. But making
FastCGI work required a patched perl, since tied file handles were
still in their infancy.
"PHP was also around already, and was an embarrassing witness to Perl's
greatest weakness for server-side programming: that embedding Perl was
a pain. Although the hooks were there for embedding Perl, they were
both undocumented and buggy.
"Doug MacEachern first got involved by addressing these problems. He
wrote documentation for embedding Perl (the C<perlembed> manpage) and
fixed a couple of bugs. Then one day, Gisle Aas posted on
perl5-porters that he had built Apache with an embedded Perl
interpreter as a proof-of-concept. However, Gisle lacked the time for
further work.
"That announcement was like a lightening bolt for at least two guys:
Doug and me. While Doug shuffled the C code, I wrote the first working
I<Makefile.PL>, or at least I smoothed the build process to reduce the
error rate resulting from silly mistakes during installation. Doug was
working on HP-UX and I was using IRIX, so Doug wrote
C<ExtUtils::Embed> to generate the command line arguments for I<gcc>
that tie things together for embedded applications.
"Early versions of mod_perl needed to be recompiled to add a single
CGI application. To get over that, I invented something without a name
that mapped filenames to perl package names. When Doug received it, he
called it C<Apache::Registry>, as noted in I<Changes>:
=item release 0.75a1 - July 21, 1996
added Apache::Registry module contributed by
Andreas Koenig <andreas.koenig (at) franz.ww.tu-berlin.de>
"From that moment in July 1996, we could count on a number of
interested developers on the mailing list to test the nascent mod_perl.
The I<Changes> file mentions a few of them: Salvador Ortiz, Alan
Beale, and John Detloff. Rob Hartill of IMDB fame joined us in
July. (See L<contributors|about::contributors::people> for more
information.)
In August 1996, time was ripe to let a production server run mod_perl,
and PAUSE (the Perl Authors Upload Server) was the first such
server. We still had to use C<$r-E<gt>print>, and couldn't C<"use
CGI">, but we could add and remove scripts without recompiling and we
were happy. Being unable to use the popular C<CGI.pm> module turned
out to be a pain for us, so I wrote a complete C<CGI.pm> clone,
C<CGI::XA> and hoped that Lincoln would pick up the ball once there
was a working alternative implementation. Eventually, Lincoln (with
the encouragement of Mike Stok) made C<CGI.pm> compatible with
mod_perl starting with C<CGI.pm> 2.32, and in March 1997, C<CGI::XA>
was removed from the mod_perl distribution. This was one of the most
important entries into the Changes file:
=item 0.95 - 03/20/97
******************************************
*** CGI-XA/* removed from distribution ***
CGI.pm-2.32 is now mod_perl compatible, and
now ships with CGI::Switch and CGI::Apache.
******************************************
Can you feel the relief it was for Doug to write that? I think this
was the greatest day of the whole development. One year of very tough
work got the reward it deserved.
After that, mod_perl started to get attention from an increasing
number of users. Doug worked like mad on fixing bugs and inventing one
thing after another, just as he still does today. Things started
flowing and people sent in patches, so Doug got the impression that
the bazaar model was beginning to work. (From Eric Raymond's essay
"The Cathedral and the Bazaar," the unofficial manifesto of the Open
Source movement.)
I remember one day Doug got a confidential message from a Sun
employee. They had made an investigation on "where the web is
heading", and they had come to the conclusion that "mod_perl will have
an impact on the whole Web"."
=head1 Up to 1.0
The first public release after Gisle's proof-of-concept happened on
May 1, 1996: release 0.50a1 of mod_perl, with a long list of changes.
In 0.50a2, an alternative implementation was provided, mod_perl_fast,
which became the standard in 0.83_10.
Another probably important change was the possibility of using
C<print> instead of C<$r-E<gt>print>, greatly facilitating output
generation:
=item release 0.80 - September 06, 1996
[...]
we now take advantage of Perl's new IO abstraction so STDIN and
STDOUT are hooked up to the client.
Thanks to Sven Verdoolaege <skimo@breughel.ufsia.ac.be> for the
initial patch
With 0.85 came the start of the test suite!
=item 0.85
added the start of a mod_perl test suite
Another interesting feature was added just before 1.0: stacked
handlers!
=item 0.95_02
introduced experimental "stacked handlers" mechanism, allowing more
than one Perl*Handler to be defined and run during each stage of the
request.
Perl*Handler directives can now define any number of subroutines, e.g.
PerlTransHandler OneTrans TwoTrans RedTrans BlueTrans
with a new method, Apache->push_handlers can add to the stack by
scripts at runtime
And just after that, our beloved C<E<lt>PerlE<gt>> sections!
=item 0.95_03
[...]
added <Perl> config section (see httpd.conf.pl)
(needs 'perl Makefile.PL PERL_SECTIONS=1')
Finally, more than one year after Doug's original 0.50a1, 1.0 was
released on July 28 1997:
=item 1.00 - 07/28/97
It primarily began adapting to the 1.3 series of Apache.
=head1 1.x development
=head2 Core
During 1.x development, there has generally been many bug fixes and
adaptions to Apache and Perl versions, striving to remain
compatible. Some parts stand out as pretty important.
In v1.12, the important APACI-support was added thanks to Ralf
S. Engelschall:
=item 1.12 - June 14, 1998
added new (but still optional!) Apache 1.3 support via the new Apache
Autoconf-style Interface (APACI): The option USE_APACI=1 triggers a new
build-environment (from local apaci/) for the APACHE_SRC/src/modules/perl/
which provides a clean way (i.e. without patching anything inside APACHE_SRC,
not even the Configuration file) of configuring mod_perl via a
APACHE_SRC/src/modules/perl/mod_perl.config file. The complete configuration
is enabled by just using the APACI command "configure
--activate-module=src/modules/perl/libperl.a"
[Ralf S. Engelschall <rse@engelschall.com>]
And with new versions of Perl come new fixes to mod_perl of course.
=item 1.22 - March 22, 2000
compile fixes for 5.6 + -Duse5005threads
[Lincoln Stein <lstein@cshl.org>]
But the most important happenings weren't the bug fixes in the mod_perl
core, but all that happened around it. The L<usage
statistics|outstanding::stats::netcraft> show an incredible boom in
mod_perl deployment, which has been accompanied by the release of very
interesting applications and frameworks for mod_perl.
=head2 Related events
Maybe even more interesting are the things happening around mod_perl,
mainly concerning Perl and Apache. The reason is that this impacts as
much on mod_perl users as the changes to mod_perl itself; for example,
a bug fix in Perl will help a lot to someone writing Perl handlers,
and a security fix in Apache is of immense benefit to I<anyone>
running an Apache server.
I<Writing Apache Modules with Perl and C> (http://www.modperl.com/),
by Lincoln Stein and Doug MacEachern, for a long time considered the
best resource for mod_perl programmers, was published in March 1999 by
O'Reilly & Associates. While not the only book on the subject, it is
still a must-have for any serious mod_perl programmer.
At ApacheCon in Orlando in 1999, mod_perl officially became an Apache
Software Foundation project, and Ask Bjørn Hansen, Eric Cholet and
Stas Bekman were voted in as ASF members in addition to Doug
MacEachern. Together they formed L<the mod_perl PMC|about::pmc>.
In March 2000, Perl 5.6.0 was released, bringing many new features to
Perl and mod_perl programmers the world over.
In October 2000, Take23 (http://www.take23.org/) was created as an
alternative site for mod_perl, because of the back-and-forth
discussions about re-designing the I<perl.apache.org> site weren't
going anywhere at that time. It collected news and articles about
mod_perl and also related issues such as other interesting Apache
modules. It wasn't maintained for several years, and somewhere in 2003
it has disappeared.
Also in October 2000, Geoffrey Young got the idea to begin a mod_perl
mailing list digest (see
http://marc.theaimsgroup.com/?l=apache-modperl-dev&m=97051473628623&w=2
and
http://marc.theaimsgroup.com/?l=apache-modperl&m=97059662005378&w=2 ),
which he kept up regularly (weekly, then biweekly) up until late 2001,
when James G. Smith took over and has been running it since then.
The I<mod_perl Pocket Reference>
(http://www.oreilly.com/catalog/modperlpr/), by Andrew Ford, was
published by O'Reilly and Associates in December 2000. It summarizes
the whole mod_perl API as well as configuration directives and some
other tips in an easy-to-use format.
In January 2002, the I<mod_perl Developer's Cookbook>
(http://www.modperlcookbook.org/), authored by Geoffrey Young, Paul
Lindner and Randy Kobes, was published by Sams Publishing. It presents
the mod_perl API by example, teaching a programmer all the facets of
mod_perl installation, programming and configuration, and is a
valuable resource to everyone.
META:
- mailing list creations
- beginnings of new site
- conferences w/ mod_perl present
- when Doug and Stas funded? Stas: August 2001; end 2002
=head1 The arrival of 2.0
...
=head1 Future directions for mod_perl
mod_perl has clearly shown its strength as an Open Source project and
application development platform. mod_perl 2.0 has been a jump
forward, but with the arrival of Perl 6 we might expect another new
version of mod_perl. If the developers are still interested, that
is. There has been started development on mod_parrot
(http://svn.perl.org/parrot-modules/mod_parrot), but Perl 6 is far from
ready, so we don't really know what will be needed. The future hold
great things for us.
I will quote Stas Bekman's commentary in the L<contributors
list|about::contributors::people/stas-bekman>:
I<"He is now thinking about mod_perl 3.0's architecture, hopefully to
be implemented solely with AND and OR gates, driven by the Perl 6.0
chipset for the best performance. Don't be surprised when you get
offered a shiny Bluetooth mod_perl 3.0 PCI card when you shop for your
new server machine.">
Who knows?
=head1 See Also
=over
=item *
CPAST: Comprehensive Perl Arcana Society Tapestry,
http://history.perl.org/
=item *
About the Apache HTTP Server Project,
http://httpd.apache.org/ABOUT_APACHE.html
=item *
The I<perlhist> manpage, containing records of all perl versions, and
the I<perl*delta> manpages relating changes in the respective
versions.
=back
=head1 Maintainers
The maintainer is the person you should contact with updates,
corrections and patches.
=over
=item *
Per Einar Ellefsen E<lt>pereinar (at) oslo.online.noE<gt>
=back
=head1 Authors
=over
=item *
Per Einar Ellefsen E<lt>pereinar (at) oslo.online.noE<gt>
=back
Only the major authors are listed above. For contributors see the
Changes file.
=cut
| Distrotech/mod_perl | docs/src/about/history.pod | Perl | apache-2.0 | 12,251 |
#!/usr/bin/perl -w
## Raquel Norel (rnorel@us.ibm.com) 01/2015
#validate format of predictions for DREAM9.5 Olfactory Prediciton Challenge (sub challenge 1)
#not using gold standard here, hardcode or build needed info
use strict;
use Cwd qw(abs_path cwd);
## command line arguments
if (@ARGV != 3) {
print "\nUSAGE:perl $0 <input file to validate> <output file> <flag L for Leaderboard or F for final submission> \n\n";
print "Format validation for DREAM9.5 Olfactory Prediction Sub Challenge 1\n";
print "example: perl $0 my_prediction.txt errors.txt L\n";
exit;
}
##PARAMETERS
my @header = ('#oID','individual','descriptor','value');#just in case, since values for different columns are quite different
my $DATA_POINTS = 71001; # number of entries to get
my $Ncols = 4; # #oID descriptor value sigma
#global variables
my %oIDs = ();
my %descriptors = ();
my %individuals = ();
my $dir = cwd();
#prediction file
my $file = $ARGV[0]; #input file, predictions
my $out_fn = "$dir/$ARGV[1]"; #output file, results from validaiton script
my $phase = $ARGV[2]; #use F for Final scoring and L for Leaderboard
if (($phase ne 'F') && ($phase ne 'L')){
die "phase has to be either L or F not $phase. Bye...\n";
}
print STDERR "reading $file\n";
#generate expected ids
my ($ref_val) = generate_ids($phase);
my %val = %$ref_val;
my $lines;
{
open my $fh, "<", $file or die $!;
local $/; # enable localized slurp mode
$lines = <$fh>;
close $fh;
}
my @all = split(/[\r\n]+/,$lines); #Mac, Unix and DOS files
my $valid_data_lines=0; #how many valid data lines have been seen
my $check_header_flag=0;
my $errors = ''; #keep all errors to be reported to user
#while (<IN>) {
my $ln=1;
foreach (@all){
#print STDERR "processing $_";
my $line = $_;
if ($line =~ /^\s*$/) {next;} ## skip blank lines
#need to check for columns number before other check to avoid warning if less columns than expected
my @tmp = split ("\t", $line); #separating line by comma, separate only once for all the tests
$tmp[2] =~ s/\s//g; #remove spaces; detected on CHEMICAL
my $n_cols = scalar @tmp; #number of columns
if (!check_column_number($n_cols,$ln)){last;} #correct number of columns?
if (/^#/) { ## check header, assume is 1st line
$check_header_flag++ ; #header detected
for (my $i=0; $i< scalar(@header); $i++){
if ($tmp[$i] ne $header[$i]) {
$errors .= "\n".'ERROR in the header ';
my $tmpi = $i + 1;
$errors .= "Column $tmpi is $tmp[$i] and should be $header[$i]. Error at input line # $ln.\n";
last;
}
}
}
else{
if (!check_format_labels($tmp[0],$tmp[1],$tmp[2],$ln)){last;} #correct "labels", is it repeated?
if (!check_format_cols4($tmp[3],$ln)){last;} #correct format of col 4; nuemeric betwwen 0 and 100
$valid_data_lines++;
}
$ln++;
}
if ($check_header_flag != 1) { $errors .= "We didn't detect the correct header in the prediction file.\n";}
#error reporting
open (FH, "> $out_fn") or die "cannot open $out_fn (why? $!) \n";
if (($errors eq '' ) && ($valid_data_lines == $DATA_POINTS)) {print FH "OK\nValid data file\n";} #all good; still need to check for header count
elsif (($errors eq '' ) && ($valid_data_lines < $DATA_POINTS)){
check_missing(); #only check for missing prediction if no other errors are found, since quiting at 1st error
print FH "NOT_OK\nYou have the following error(s): $errors\n";
print FH "Please see the template file and resubmit the updated file.\n";
}
else {
print FH "NOT_OK\nYou have the following error(s): $errors\n";
print FH "Please see the template file and resubmit the updated file.\n";
}
close FH;
###########subroutines##############
#check id combinations with no predictions
sub check_missing{
foreach my $k1 (sort keys %oIDs){
for (my $i=1; $i<50; $i++){
foreach my $k2 (sort keys %descriptors){
if ( $val{$k1}{$i}{$k2} < 0){ $errors .= "Missing predictions for $k1 $i $k2 entry\n";}
}
}
}
return(1);
}
sub check_format_labels{ # checking the ID has not been used twice, and the name is correct
my ($oid, $ind, $des, $ln) = @_;
my $flag =1; #so far so good
if (!defined($oIDs{$oid})) { $errors .= "$oid is not a valid odor ID. Error at input line # $ln.\n"; return(0);} #failed test
if (!defined($descriptors{$des})) { $errors .= "$des is not a valid odor descriptor. Error at input line # $ln.\n"; return(0);} #failed test
if (!defined($individuals{$ind})) { $errors .= "$ind is not a valid individual id. Error at input line # $ln.\n"; return(0);} #failed test
if($val{$oid}{$ind}{$des} == -1){
$val{$oid}{$ind}{$des} = 1;
}
else {$errors .= "$oid, $ind with $des is a duplicated entry. Error at input line # $ln.\n"; return(0);}; #failed test
return(1);
}
sub check_format_cols4{ #is numeric? is it between 0 and 100?
my ($val,$ln) = @_;
# if (( $val =~ /^([+-]?)(?=\d|\.\d)\d*(\.\d*)?([Ee]([+-]?\d+))?$/) && ($val >= 0) && ($val <= 1) || ($val == 1) || ($val==0)){
if (( $val =~ /^([+]?)(?=\d|\.\d)\d*(\.\d*)?([Ee]([+-]?\d+))?$/) && ($val <= 100)){ #force to be positive
return(1);
} #test ok;
#test failed
$errors .= "Value must be a positive float number, less or equal to 100. Got $val, which is incorrect.\nError at input line # $ln.\n";
return(0);#failed test
}
#since I don;t read the gold standard, I need to generate the expected full set if IDs on the prediction file, to check against it
sub generate_ids{
my ($phase) = @_;
my @Ldescriptors = qw(ACID AMMONIA/URINOUS BAKERY BURNT CHEMICAL COLD DECAYED FISH FLOWER FRUIT GARLIC GRASS INTENSITY/STRENGTH MUSKY SOUR SPICES SWEATY SWEET VALENCE/PLEASANTNESS WARM WOOD);
my @LoIDs;
if ($phase eq 'F'){
@LoIDs = qw(1031 10857465 10886 11567 12020 12265 12377 1549025 15654 16537 17898 180 18467 21363 251531 262 264 2733294 27440 3102 31219 31276 31283 323 3314 440917 5281167 5281168 5318599);
my @tmp1 = qw(5352837 5364231 5371102 5862 5962 60998 61523 62089 62351 62465 6274 6322 637758 6506 6544 6561 6669 702 7092 7137 7302 7476 750 753 7559);
my @tmp2 = qw(7657 7770 7793 7797 8025 8049 8094 8419 8438 8468 853433 8815 8878 9012 962);
push (@LoIDs, @tmp1);
push (@LoIDs, @tmp2);
}
elsif ($phase eq 'L'){
@LoIDs = qw(1030 1060 10722 11124 11419 12206 12348 12748 13187 13204 13216 13436 14328 1549778 1550470 15606 16220109 22386 24020 243 24473 2682 31210);
my @tmp1 = qw(31266 33032 454 520108 5352539 5355850 5363233 5367698 61024 6114390 61151 61155 61177 61252 6137 61771 62572 638024 6386 641256 679 6826 6989);
my @tmp2 = qw(7047 7409 7601 7632 778574 7792 7820 7826 8038 8048 8051 8078 8137 8159 8163 8175 8180 8205 8294 8452 8467 8615 9024);
push (@LoIDs, @tmp1);
push (@LoIDs, @tmp2);
}
my $No = scalar (@LoIDs); #print "there are $No elements in oIDs\n";
my $Nd = scalar(@Ldescriptors); #print "there are $Nd descriptors\n";
my %val = ();
my %sigma = ();
#convinient to have as hash as well for ease of checking
for (my $i=0; $i<$No; $i++){
$oIDs{$LoIDs[$i]} = 1;
#print ".$LoIDs[$i].\n";
}
for (my $i=0; $i<$Nd; $i++){
$descriptors{$Ldescriptors[$i]} = 1;
#print ".$Ldescriptors[$i].\n";
}
for (my $i=1; $i<50; $i++){
$individuals{$i} = 1;
}
for (my $i=0; $i<$No; $i++){
for (my $k=1; $k<50; $k++){
for (my $j=0; $j<$Nd;$j++){
$val{$LoIDs[$i]}{$k}{$Ldescriptors[$j]} = -1;
}
}
}
#return(\%val,\%sigma); #it is enough to check for one of the values, since I alrwady check for number of columns
return(\%val);
}
#check that the number of columns per line is ok
sub check_column_number{
my ($n_cols, $ln) = @_;
if ($n_cols != $Ncols) {
$errors .= "Please note that the numbers of expected columns is $Ncols not $n_cols. Error at input line # $ln.\n";
return(0);#failed test
}
return(1); #test ok
}
| Sage-Bionetworks/OlfactionDREAMChallenge | src/main/resources/DREAM_Olfactory_S1_validation.pl | Perl | apache-2.0 | 7,965 |
#!/usr/bin/perl -w
#
# Copyright 2017, Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example adds a portfolio bidding strategy and uses it to
# construct a campaign.
use strict;
use lib "../../../lib";
use utf8;
use Google::Ads::AdWords::Client;
use Google::Ads::AdWords::Logging;
use Google::Ads::AdWords::v201809::SharedBiddingStrategy;
use Google::Ads::AdWords::v201809::TargetSpendBiddingScheme;
use Google::Ads::AdWords::v201809::Money;
use Google::Ads::AdWords::v201809::BiddingStrategyOperation;
use Google::Ads::AdWords::v201809::BudgetOperation;
use Google::Ads::AdWords::v201809::NetworkSetting;
use Google::Ads::AdWords::v201809::CampaignOperation;
use Cwd qw(abs_path);
use Data::Uniqid qw(uniqid);
# Replace with valid values of your account.
my $budget_id = 0;
# Example main subroutine.
sub use_portfolio_bidding_strategy {
my $client = shift;
my $budget_id = shift;
my $biddingStrategy = create_bidding_strategy($client);
if (!$biddingStrategy) {
return 0;
}
if (!$budget_id) {
my $budget = create_shared_budget($client);
if (!$budget) {
return 0;
}
$budget_id = $budget->get_budgetId();
}
create_campaign_with_bidding_strategy($client, $biddingStrategy->get_id(),
$budget_id);
return 1;
}
# Creates the bidding strategy object.
sub create_bidding_strategy {
my $client = shift;
my @operations = ();
# Create a portfolio bidding strategy.
my $bidding_strategy =
Google::Ads::AdWords::v201809::SharedBiddingStrategy->new({
name => "Maximize Clicks " . uniqid(),
type => "TARGET_SPEND",
# Create the bidding scheme.
biddingScheme =>
Google::Ads::AdWords::v201809::TargetSpendBiddingScheme->new({
# Optionally set additional bidding scheme parameters.
bidCeiling => Google::Ads::AdWords::v201809::Money->new(
{microAmount => 2000000,}
),
spendTarget => Google::Ads::AdWords::v201809::Money->new(
{microAmount => 20000000,})})});
# Create operation.
my $operation = Google::Ads::AdWords::v201809::BiddingStrategyOperation->new({
operator => "ADD",
operand => $bidding_strategy
});
push @operations, $operation;
my $result =
$client->BiddingStrategyService()->mutate({operations => \@operations});
if ($result->get_value()) {
my $strategy = $result->get_value()->[0];
printf "Portfolio bidding strategy with name \"%s\" and ID %d of type %s " .
"was created.\n", $strategy->get_name(), $strategy->get_id(),
$strategy->get_biddingScheme()->get_BiddingScheme__Type();
return $strategy;
} else {
print "No portfolio bidding strategies were added.\n";
return 0;
}
}
# Creates an explicit budget to be used only to create the campaign.
sub create_shared_budget {
my $client = shift;
my @operations = ();
# Create a shared budget operation.
my $operation = Google::Ads::AdWords::v201809::BudgetOperation->new({
operator => 'ADD',
operand => Google::Ads::AdWords::v201809::Budget->new({
amount => Google::Ads::AdWords::v201809::Money->new(
{microAmount => 50000000}
),
deliveryMethod => 'STANDARD',
isExplicitlyShared => 0
})});
push @operations, $operation;
# Make the mutate request.
my $result = $client->BudgetService()->mutate({operations => \@operations});
if ($result->get_value()) {
return $result->get_value()->[0];
} else {
print "No budgets were added.\n";
return 0;
}
}
# Create a campaign with a portfolio bidding strategy.
sub create_campaign_with_bidding_strategy {
my $client = shift;
my $bidding_strategy_id = shift;
my $budget_id = shift;
my @operations = ();
# Create campaign.
my $campaign = Google::Ads::AdWords::v201809::Campaign->new({
name => 'Interplanetary Cruise #' . uniqid(),
budget =>
Google::Ads::AdWords::v201809::Budget->new({budgetId => $budget_id}),
# Set bidding strategy (required).
biddingStrategyConfiguration =>
Google::Ads::AdWords::v201809::BiddingStrategyConfiguration->new(
{biddingStrategyId => $bidding_strategy_id}
),
# Set advertising channel type (required).
advertisingChannelType => 'SEARCH',
# Network targeting (recommended).
networkSetting => Google::Ads::AdWords::v201809::NetworkSetting->new({
targetGoogleSearch => 1,
targetSearchNetwork => 1,
targetContentNetwork => 1
}),
# Recommendation: Set the campaign to PAUSED when creating it to stop
# the ads from immediately serving. Set to ENABLED once you've added
# targeting and the ads are ready to serve.
status => "PAUSED"
});
# Create operation.
my $operation = Google::Ads::AdWords::v201809::CampaignOperation->new({
operator => 'ADD',
operand => $campaign
});
push @operations, $operation;
my $result = $client->CampaignService()->mutate({operations => \@operations});
if ($result->get_value()) {
my $new_campaign = $result->get_value()->[0];
printf "Campaign with name \"%s\", ID %d and bidding strategy ID %d was " .
"created.\n", $new_campaign->get_name(), $new_campaign->get_id(),
$new_campaign->get_biddingStrategyConfiguration()
->get_biddingStrategyId();
return $new_campaign;
} else {
print "No campaigns were added.\n";
return 0;
}
}
# Don't run the example if the file is being included.
if (abs_path($0) ne abs_path(__FILE__)) {
return 1;
}
# Log SOAP XML request, response and API errors.
Google::Ads::AdWords::Logging::enable_all_logging();
# Get AdWords Client, credentials will be read from ~/adwords.properties.
my $client = Google::Ads::AdWords::Client->new({version => "v201809"});
# By default examples are set to die on any server returned fault.
$client->set_die_on_faults(1);
# Call the example
use_portfolio_bidding_strategy($client, $budget_id);
| googleads/googleads-perl-lib | examples/v201809/advanced_operations/use_portfolio_bidding_strategy.pl | Perl | apache-2.0 | 6,548 |
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::IdMapping::SyntenyRegion - object representing syntenic regions
=head1 SYNOPSIS
# create a new SyntenyRegion from a source and a target gene
my $sr = Bio::EnsEMBL::IdMapping::SyntenyRegion->new_fast( [
$source_gene->start, $source_gene->end,
$source_gene->strand, $source_gene->seq_region_name,
$target_gene->start, $target_gene->end,
$target_gene->strand, $target_gene->seq_region_name,
$entry->score,
] );
# merge with another SyntenyRegion
my $merged_sr = $sr->merge($sr1);
# score a gene pair against this SyntenyRegion
my $score =
$sr->score_location_relationship( $source_gene1, $target_gene1 );
=head1 DESCRIPTION
This object represents a synteny between a source and a target location.
SyntenyRegions are built from mapped genes, and the their score is
defined as the score of the gene mapping. For merged SyntenyRegions,
scores are combined.
=head1 METHODS
new_fast
source_start
source_end
source_strand
source_seq_region_name
target_start
target_end
target_strand
target_seq_region_name
score
merge
stretch
score_location_relationship
to_string
=cut
package Bio::EnsEMBL::IdMapping::SyntenyRegion;
use strict;
use warnings;
no warnings 'uninitialized';
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
=head2 new_fast
Arg[1] : Arrayref $array_ref - the arrayref to bless into the
SyntenyRegion object
Example : my $sr = Bio::EnsEMBL::IdMapping::SyntenyRegion->new_fast([
]);
Description : Constructor. On instantiation, source and target regions are
reverse complemented so that source is always on forward strand.
Return type : a Bio::EnsEMBL::IdMapping::SyntenyRegion object
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub new_fast {
my $class = shift;
my $array_ref = shift;
# reverse complement source and target so that source is always on forward
# strand; this will make merging and other comparison operations easier
# at later stages
if ($array_ref->[2] == -1) {
$array_ref->[2] = 1;
$array_ref->[6] = -1 * $array_ref->[6];
}
return bless $array_ref, $class;
}
=head2 source_start
Arg[1] : (optional) Int - source location start coordinate
Description : Getter/setter for source location start coordinate.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub source_start {
my $self = shift;
$self->[0] = shift if (@_);
return $self->[0];
}
=head2 source_end
Arg[1] : (optional) Int - source location end coordinate
Description : Getter/setter for source location end coordinate.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub source_end {
my $self = shift;
$self->[1] = shift if (@_);
return $self->[1];
}
=head2 source_strand
Arg[1] : (optional) Int - source location strand
Description : Getter/setter for source location strand.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub source_strand {
my $self = shift;
$self->[2] = shift if (@_);
return $self->[2];
}
=head2 source_seq_region_name
Arg[1] : (optional) String - source location seq_region name
Description : Getter/setter for source location seq_region name.
Return type : String
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub source_seq_region_name {
my $self = shift;
$self->[3] = shift if (@_);
return $self->[3];
}
=head2 target_start
Arg[1] : (optional) Int - target location start coordinate
Description : Getter/setter for target location start coordinate.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub target_start {
my $self = shift;
$self->[4] = shift if (@_);
return $self->[4];
}
=head2 target_end
Arg[1] : (optional) Int - target location end coordinate
Description : Getter/setter for target location end coordinate.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub target_end {
my $self = shift;
$self->[5] = shift if (@_);
return $self->[5];
}
=head2 target_strand
Arg[1] : (optional) Int - target location strand
Description : Getter/setter for target location strand.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub target_strand {
my $self = shift;
$self->[6] = shift if (@_);
return $self->[6];
}
=head2 target_seq_region_name
Arg[1] : (optional) String - target location seq_region name
Description : Getter/setter for target location seq_region name.
Return type : String
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub target_seq_region_name {
my $self = shift;
$self->[7] = shift if (@_);
return $self->[7];
}
=head2 score
Arg[1] : (optional) Float - score
Description : Getter/setter for the score between source and target location.
Return type : Int
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub score {
my $self = shift;
$self->[8] = shift if (@_);
return $self->[8];
}
=head2 merge
Arg[1] : Bio::EnsEMBL::IdMapping::SyntenyRegion $sr - another
SyntenyRegion
Example : $merged_sr = $sr->merge($other_sr);
Description : Merges two overlapping SyntenyRegions if they meet certain
criteria (see documentation in the code for details). Score is
calculated as a combined distance score. If the two
SyntenyRegions aren't mergeable, this method returns undef.
Return type : Bio::EnsEMBL::IdMapping::SyntenyRegion or undef
Exceptions : warns on bad scores
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub merge {
my ($self, $sr) = @_;
# must be on same seq_region
if ($self->source_seq_region_name ne $sr->source_seq_region_name or
$self->target_seq_region_name ne $sr->target_seq_region_name) {
return 0;
}
# target must be on same strand
return 0 unless ($self->target_strand == $sr->target_strand);
# find the distance of source and target pair and compare
my $source_dist = $sr->source_start - $self->source_start;
my $target_dist;
if ($self->target_strand == 1) {
$target_dist = $sr->target_start - $self->target_start;
} else {
$target_dist = $self->target_end - $sr->target_end;
}
# prevent division by zero error
if ($source_dist == 0 or $target_dist == 0) {
warn("WARNING: source_dist ($source_dist) and/or target_dist ($target_dist) is zero.\n");
return 0;
}
# calculate a distance score
my $dist = $source_dist - $target_dist;
$dist = -$dist if ($dist < 0);
my $d1 = $dist/$source_dist;
$d1 = -$d1 if ($d1 < 0);
my $d2 = $dist/$target_dist;
$d2 = -$d2 if ($d2 < 0);
my $dist_score = 1 - $d1 - $d2;
# distance score must be more than 50%
return 0 if ($dist_score < 0.5);
my $new_score = $dist_score * ($sr->score + $self->score)/2;
if ($new_score > 1) {
warn("WARNING: Bad merge score: $new_score\n");
}
# extend SyntenyRegion to cover both sources and targets, set merged score
# and return
if ($sr->source_start < $self->source_start) {
$self->source_start($sr->source_start);
}
if ($sr->source_end > $self->source_end) {
$self->source_end($sr->source_end);
}
if ($sr->target_start < $self->target_start) {
$self->target_start($sr->target_start);
}
if ($sr->target_end > $self->target_end) {
$self->target_end($sr->target_end);
}
$self->score($new_score);
return $self;
}
=head2 stretch
Arg[1] : Float $factor - stretching factor
Example : $stretched_sr = $sr->stretch(2);
Description : Extends this SyntenyRegion to span a $factor * $score more area.
Return type : Bio::EnsEMBL::IdMapping::SyntenyRegion
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub stretch {
my ($self, $factor) = @_;
my $source_adjust = int(($self->source_end - $self->source_start + 1) *
$factor * $self->score);
$self->source_start($self->source_start - $source_adjust);
$self->source_end($self->source_end + $source_adjust);
#warn sprintf(" sss %d %d %d\n", $source_adjust, $self->source_start,
# $self->source_end);
my $target_adjust = int(($self->target_end - $self->target_start + 1) *
$factor * $self->score);
$self->target_start($self->target_start - $target_adjust);
$self->target_end($self->target_end + $target_adjust);
return $self;
}
=head2 score_location_relationship
Arg[1] : Bio::EnsEMBL::IdMapping::TinyGene $source_gene - source gene
Arg[2] : Bio::EnsEMBL::IdMapping::TinyGene $target_gene - target gene
Example : my $score = $sr->score_location_relationship($source_gene,
$target_gene);
Description : This function calculates how well the given source location
interpolates on given target location inside this SyntenyRegion.
Scoring is done the following way: Source and target location
are normalized with respect to this Regions source and target.
Source range will then be somewhere close to 0.0-1.0 and target
range anything around that.
The extend of the covered area between source and target range
is a measurement of how well they agree (smaller extend is
better). The extend (actually 2*extend) is reduced by the size
of the regions. This will result in 0.0 if they overlap
perfectly and bigger values if they dont.
This is substracted from 1.0 to give the score. The score is
likely to be below zero, but is cut off at 0.0f.
Finally, the score is multiplied with the score of the synteny
itself.
Return type : Float
Exceptions : warns if score out of range
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub score_location_relationship {
my ($self, $source_gene, $target_gene) = @_;
# must be on same seq_region
if (($self->source_seq_region_name ne $source_gene->seq_region_name) or
($self->target_seq_region_name ne $target_gene->seq_region_name)) {
return 0;
}
# strand relationship must be the same (use logical XOR to find out)
if (($self->source_strand == $source_gene->strand) xor
($self->target_strand == $target_gene->strand)) {
return 0;
}
# normalise source location
my $source_rel_start = ($source_gene->start - $self->source_start) /
($self->source_end - $self->source_start + 1);
my $source_rel_end = ($source_gene->end - $self->source_start + 1) /
($self->source_end - $self->source_start + 1);
#warn " aaa ".$self->to_string."\n";
#warn sprintf(" bbb %.6f %.6f\n", $source_rel_start, $source_rel_end);
# cut off if the source location is completely outside
return 0 if ($source_rel_start > 1.1 or $source_rel_end < -0.1);
# normalise target location
my ($target_rel_start, $target_rel_end);
my $t_length = $self->target_end - $self->target_start + 1;
if ($self->target_strand == 1) {
$target_rel_start = ($target_gene->start - $self->target_start) / $t_length;
$target_rel_end = ($target_gene->end - $self->target_start + 1) / $t_length;
} else {
$target_rel_start = ($self->target_end - $target_gene->end) / $t_length;
$target_rel_end = ($self->target_end - $target_gene->start + 1) / $t_length;
}
my $added_range = (($target_rel_end > $source_rel_end) ? $target_rel_end :
$source_rel_end) -
(($target_rel_start < $source_rel_start) ? $target_rel_start :
$source_rel_start);
my $score = $self->score * (1 - (2 * $added_range - $target_rel_end -
$source_rel_end + $target_rel_start + $source_rel_start));
#warn " ccc ".sprintf("%.6f:%.6f:%.6f:%.6f:%.6f\n", $added_range,
# $source_rel_start, $source_rel_end, $target_rel_start, $target_rel_end);
$score = 0 if ($score < 0);
# sanity check
if ($score > 1) {
warn "Out of range score ($score) for ".$source_gene->id.":".
$target_gene->id."\n";
}
return $score;
}
=head2 to_string
Example : print LOG $sr->to_string, "\n";
Description : Returns a string representation of the SyntenyRegion object.
Useful for debugging and logging.
Return type : String
Exceptions : none
Caller : Bio::EnsEMBL::IdMapping::SyntenyFramework
Status : At Risk
: under development
=cut
sub to_string {
my $self = shift;
return sprintf("%s:%s-%s:%s %s:%s-%s:%s %.6f",
$self->source_seq_region_name,
$self->source_start,
$self->source_end,
$self->source_strand,
$self->target_seq_region_name,
$self->target_start,
$self->target_end,
$self->target_strand,
$self->score
);
}
1;
| willmclaren/ensembl | modules/Bio/EnsEMBL/IdMapping/SyntenyRegion.pm | Perl | apache-2.0 | 14,970 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::Query::GlyphSet::Marker;
use strict;
use warnings;
use parent qw(EnsEMBL::Web::Query::Generic::GlyphSet);
our $VERSION = 15;
sub href {
my ($self,$f,$args) = @_;
return {
species => $args->{'species'},
type => 'Marker',
m => $f->{'drawing_id'}
};
}
sub colour_key { return lc $_[1]->marker->type; }
sub precache {
return {
markers => {
parts => 10,
loop => ['species','genome'],
args => {
}
}
}
}
sub fixup {
my ($self) = @_;
$self->fixup_slice('slice','species',1000000);
$self->fixup_location('start','slice',0);
$self->fixup_location('end','slice',1);
$self->fixup_unique('_unique');
$self->fixup_href('href');
$self->fixup_colour('colour','magenta');
$self->fixup_colour('label_colour','magenta');
$self->SUPER::fixup();
}
sub get {
my ($self,$args) = @_;
my $slice = $args->{'slice'};
my $length = $slice->length;
my $data = [];
# Get them
my @features;
if($args->{'text_export'}) {
@features = @{$slice->get_all_MarkerFeatures};
} else {
my $map_weight = 2;
@features = @{$slice->get_all_MarkerFeatures($args->{'logic_name'},
$args->{'priority'},
$map_weight)};
# Force add marker with our id if missed out above
if($args->{'marker_id'} and
!grep {$_->display_id eq $args->{'marker_id'}} @features) {
my $m = $slice->get_MarkerFeatures_by_Name($args->{'marker_id'});
push @features,@$m;
}
}
# Determine drawing_id for each marker
foreach my $f (@features) {
my $ms = $f->marker->display_MarkerSynonym;
my $id = $ms ? $ms->name : '';
($id) = grep $_ ne '-', map $_->name, @{$f->marker->get_all_MarkerSynonyms || []} if $id eq '-' || $id eq '';
$f->{'drawing_id'} = $id;
}
# Build output
foreach my $f (sort { $a->seq_region_start <=> $b->seq_region_start } @features) {
my $id = $f->{'drawing_id'};
my $feature_colour = $self->colour_key($f);
push @$data, {
'_unique' => join(':',$id,$f->start,$f->end),
'start' => $f->start,
'end' => $f->end,
'colour' => $feature_colour,
'label' => $id,
'label_colour' => $feature_colour,
'href' => $self->href($f,$args),
};
}
return $data;
}
1;
| Ensembl/ensembl-webcode | modules/EnsEMBL/Web/Query/GlyphSet/Marker.pm | Perl | apache-2.0 | 3,216 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::citrix::netscaler::snmp::mode::vserverstatus;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
use Digest::MD5 qw(md5_hex);
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'vservers', type => 1, cb_prefix_output => 'prefix_vservers_output', message_multiple => 'All Virtual Servers are ok' }
];
$self->{maps_counters}->{vservers} = [
{ label => 'status', threshold => 0, set => {
key_values => [ { name => 'state' } ],
closure_custom_output => $self->can('custom_status_output'),
closure_custom_perfdata => sub { return 0; },
closure_custom_threshold_check => $self->can('custom_threshold_output')
}
},
{ label => 'health', nlabel => 'vserver.health.percentage', set => {
key_values => [ { name => 'health' }, { name => 'display' } ],
output_template => 'Health: %.2f %%', output_error_template => 'Health: %s',
perfdatas => [
{ label => 'health', template => '%.2f',
unit => '%', min => 0, max => 100, label_extra_instance => 1, instance_use => 'display' }
]
}
},
{ label => 'in-traffic', nlabel => 'vserver.traffic.in.bitspersecond', set => {
key_values => [ { name => 'in', per_second => 1 }, { name => 'display' } ],
output_template => 'Traffic In: %s %s/s',
output_change_bytes => 2,
perfdatas => [
{ label => 'traffic_in', template => '%.2f',
min => 0, unit => 'b/s', label_extra_instance => 1, instance_use => 'display' }
]
}
},
{ label => 'out-traffic', nlabel => 'vserver.traffic.out.bitspersecond', set => {
key_values => [ { name => 'out', per_second => 1 }, { name => 'display' } ],
output_template => 'Traffic Out: %s %s/s',
output_change_bytes => 2,
perfdatas => [
{ label => 'traffic_out', template => '%.2f',
min => 0, unit => 'b/s', label_extra_instance => 1, instance_use => 'display' }
]
}
},
{ label => 'clients', nlabel => 'vserver.connections.client.count', set => {
key_values => [ { name => 'clients', diff => 1 }, { name => 'display' } ],
output_template => 'Total Client Connections : %s',
perfdatas => [
{ label => 'clients', template => '%s',
min => 0, label_extra_instance => 1, instance_use => 'display' }
]
}
},
{ label => 'servers', nlabel => 'vserver.connections.server.count', set => {
key_values => [ { name => 'servers', diff => 1 }, { name => 'display' } ],
output_template => 'Total Server Connections : %s',
perfdatas => [
{ label => 'servers', template => '%s',
min => 0, label_extra_instance => 1, instance_use => 'display' }
]
}
}
];
}
sub prefix_vservers_output {
my ($self, %options) = @_;
return "Virtual Server '" . $options{instance_value}->{display} . "' ";
}
my $overload_th = {};
my $thresholds = {
vs => [
['unknown', 'UNKNOWN'],
['down|outOfService|transitionToOutOfService|transitionToOutOfServiceDown', 'CRITICAL'],
['up', 'OK'],
],
};
sub get_severity {
my (%options) = @_;
my $status = 'UNKNOWN'; # default
if (defined($overload_th->{$options{section}})) {
foreach (@{$overload_th->{$options{section}}}) {
if ($options{value} =~ /$_->{filter}/i) {
$status = $_->{status};
return $status;
}
}
}
foreach (@{$thresholds->{$options{section}}}) {
if ($options{value} =~ /$$_[0]/i) {
$status = $$_[1];
return $status;
}
}
return $status;
}
sub custom_threshold_output {
my ($self, %options) = @_;
return get_severity(section => 'vs', value => $self->{result_values}->{state});
}
sub custom_status_output {
my ($self, %options) = @_;
return 'State : ' . $self->{result_values}->{state};
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options, statefile => 1);
bless $self, $class;
$options{options}->add_options(arguments => {
'filter-name:s' => { name => 'filter_name' },
'filter-type:s' => { name => 'filter_type' },
'force-counters64' => { name => 'force_counters64' },
'threshold-overload:s@' => { name => 'threshold_overload' }
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
foreach my $val (@{$self->{option_results}->{threshold_overload}}) {
if ($val !~ /^(.*?),(.*)$/) {
$self->{output}->add_option_msg(short_msg => "Wrong threshold-overload option '" . $val . "'.");
$self->{output}->option_exit();
}
my ($section, $status, $filter) = ('vs', $1, $2);
if ($self->{output}->is_litteral_status(status => $status) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong threshold-overload status '" . $val . "'.");
$self->{output}->option_exit();
}
$overload_th->{$section} = [] if (!defined($overload_th->{$section}));
push @{$overload_th->{$section}}, {filter => $filter, status => $status};
}
}
my %map_vs_type = (
0 => 'unknown',
1 => 'loadbalancing',
2 => 'loadbalancinggroup',
3 => 'sslvpn',
4 => 'contentswitching',
5 => 'cacheredirection',
);
my %map_vs_status = (
1 => 'down',
2 => 'unknown',
3 => 'busy',
4 => 'outOfService',
5 => 'transitionToOutOfService',
7 => 'up',
8 => 'transitionToOutOfServiceDown',
);
my $mapping = {
vsvrState => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.5', map => \%map_vs_status },
vsvrFullName => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.59' },
vsvrEntityType => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.64', map => \%map_vs_type },
};
my $mapping2 = {
vsvrTotalRequestBytesLow => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.13' },
vsvrTotalRequestBytesHigh => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.14' },
vsvrTotalResponseBytesLow => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.17' },
vsvrTotalResponseBytesHigh => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.18' },
vsvrTotalRequestBytes => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.31' },
vsvrTotalResponseBytes => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.33' },
vsvrTotalClients => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.56' },
vsvrHealth => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.62' },
vsvrTotalServers => { oid => '.1.3.6.1.4.1.5951.4.1.3.1.1.65' },
};
sub manage_selection {
my ($self, %options) = @_;
my $snmp_result = $options{snmp}->get_multiple_table(
oids => [
{ oid => $mapping->{vsvrFullName}->{oid} },
{ oid => $mapping->{vsvrState}->{oid} },
{ oid => $mapping->{vsvrEntityType}->{oid} }
],
return_type => 1,
nothing_quit => 1
);
$self->{vservers} = {};
foreach my $oid (keys %{$snmp_result}) {
next if ($oid !~ /^$mapping->{vsvrFullName}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $options{snmp}->map_instance(mapping => $mapping, results => $snmp_result, instance => $instance);
if (defined($self->{option_results}->{filter_type}) && $self->{option_results}->{filter_type} ne '' &&
$result->{vsvrEntityType} !~ /$self->{option_results}->{filter_type}/) {
$self->{output}->output_add(long_msg => "skipping Virtual Server '" . $result->{vsvrFullName} . "'.", debug => 1);
next;
}
if (defined($self->{option_results}->{filter_name}) && $self->{option_results}->{filter_name} ne '' &&
$result->{vsvrFullName} !~ /$self->{option_results}->{filter_name}/) {
$self->{output}->output_add(long_msg => "skipping Virtual Server '" . $result->{vsvrFullName} . "'.", debug => 1);
next;
}
$self->{vservers}->{$instance} = { display => $result->{vsvrFullName}, state => $result->{vsvrState} };
}
if (scalar(keys %{$self->{vservers}}) <= 0) {
$self->{output}->add_option_msg(short_msg => "No virtual server found.");
$self->{output}->option_exit();
}
$options{snmp}->load(
oids => defined($self->{option_results}->{force_counters64}) ? [
$mapping2->{vsvrTotalRequestBytes}->{oid}, $mapping2->{vsvrTotalResponseBytes}->{oid},
$mapping2->{vsvrTotalClients}->{oid}, $mapping2->{vsvrHealth}->{oid}, $mapping2->{vsvrTotalServers}->{oid}
] : [
$mapping2->{vsvrTotalRequestBytesLow}->{oid}, $mapping2->{vsvrTotalRequestBytesHigh}->{oid},
$mapping2->{vsvrTotalResponseBytesLow}->{oid}, $mapping2->{vsvrTotalResponseBytesHigh}->{oid},
$mapping2->{vsvrTotalRequestBytes}->{oid}, $mapping2->{vsvrTotalResponseBytes}->{oid},
$mapping2->{vsvrTotalClients}->{oid}, $mapping2->{vsvrHealth}->{oid}, $mapping2->{vsvrTotalServers}->{oid}
],
instances => [keys %{$self->{vservers}}], instance_regexp => '^(.*)$'
);
$snmp_result = $options{snmp}->get_leef(nothing_quit => 1);
foreach (keys %{$self->{vservers}}) {
my $result = $options{snmp}->map_instance(mapping => $mapping2, results => $snmp_result, instance => $_);
$self->{vservers}->{$_}->{out} = defined($result->{vsvrTotalResponseBytes}) ? $result->{vsvrTotalResponseBytes} * 8 :
(($result->{vsvrTotalResponseBytesHigh} << 32) + $result->{vsvrTotalResponseBytesLow}) * 8;
$self->{vservers}->{$_}->{in} = defined($result->{vsvrTotalRequestBytes}) ? $result->{vsvrTotalRequestBytes} * 8 :
(($result->{vsvrTotalRequestBytesHigh} << 32) + $result->{vsvrTotalRequestBytesLow}) * 8;
$self->{vservers}->{$_}->{health} = $result->{vsvrHealth};
$self->{vservers}->{$_}->{clients} = $result->{vsvrTotalClients};
$self->{vservers}->{$_}->{servers} = $result->{vsvrTotalServers};
}
$self->{cache_name} = "citrix_netscaler_" . $self->{mode} . '_' . $options{snmp}->get_hostname() . '_' . $options{snmp}->get_port() . '_' .
(defined($self->{option_results}->{filter_counters}) ? md5_hex($self->{option_results}->{filter_counters}) : md5_hex('all')) . '_' .
(defined($self->{option_results}->{filter_name}) ? md5_hex($self->{option_results}->{filter_name}) : md5_hex('all')) . '_' .
(defined($self->{option_results}->{filter_type}) ? md5_hex($self->{option_results}->{filter_type}) : md5_hex('all'));
}
1;
__END__
=head1 MODE
Check vservers status and health.
=over 8
=item B<--warning-*>
Threshold warning.
Can be: 'in-traffic', 'out-traffic', 'health' (%),
'clients', 'servers'.
=item B<--critical-*>
Threshold critical.
Can be: 'in-traffic', 'out-traffic', 'health' (%),
'clients', 'servers'.
=item B<--filter-name>
Filter by virtual server name (can be a regexp).
=item B<--filter-type>
Filter which type of vserver (can be a regexp).
=item B<--force-counters64>
Force to use 64 bits counters only. Can be used to improve performance,
or to solve a missing counters bug.
=item B<--threshold-overload>
Set to overload default threshold values (syntax: status,regexp)
It used before default thresholds (order stays).
Example: --threshold-overload='CRITICAL,^(?!(green)$)'
=back
=cut
| Tpo76/centreon-plugins | network/citrix/netscaler/snmp/mode/vserverstatus.pm | Perl | apache-2.0 | 12,826 |
package Paws::ElasticTranscoder::CreateJobOutput;
use Moose;
has AlbumArt => (is => 'ro', isa => 'Paws::ElasticTranscoder::JobAlbumArt');
has Captions => (is => 'ro', isa => 'Paws::ElasticTranscoder::Captions');
has Composition => (is => 'ro', isa => 'ArrayRef[Paws::ElasticTranscoder::Clip]');
has Encryption => (is => 'ro', isa => 'Paws::ElasticTranscoder::Encryption');
has Key => (is => 'ro', isa => 'Str');
has PresetId => (is => 'ro', isa => 'Str');
has Rotate => (is => 'ro', isa => 'Str');
has SegmentDuration => (is => 'ro', isa => 'Str');
has ThumbnailEncryption => (is => 'ro', isa => 'Paws::ElasticTranscoder::Encryption');
has ThumbnailPattern => (is => 'ro', isa => 'Str');
has Watermarks => (is => 'ro', isa => 'ArrayRef[Paws::ElasticTranscoder::JobWatermark]');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ElasticTranscoder::CreateJobOutput
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::ElasticTranscoder::CreateJobOutput object:
$service_obj->Method(Att1 => { AlbumArt => $value, ..., Watermarks => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::ElasticTranscoder::CreateJobOutput object:
$result = $service_obj->Method(...);
$result->Att1->AlbumArt
=head1 DESCRIPTION
The C<CreateJobOutput> structure.
=head1 ATTRIBUTES
=head2 AlbumArt => L<Paws::ElasticTranscoder::JobAlbumArt>
Information about the album art that you want Elastic Transcoder to add
to the file during transcoding. You can specify up to twenty album
artworks for each output. Settings for each artwork must be defined in
the job for the current output.
=head2 Captions => L<Paws::ElasticTranscoder::Captions>
You can configure Elastic Transcoder to transcode captions, or
subtitles, from one format to another. All captions must be in UTF-8.
Elastic Transcoder supports two types of captions:
=over
=item *
B<Embedded:> Embedded captions are included in the same file as the
audio and video. Elastic Transcoder supports only one embedded caption
per language, to a maximum of 300 embedded captions per file.
Valid input values include: C<CEA-608 (EIA-608>, first non-empty
channel only), C<CEA-708 (EIA-708>, first non-empty channel only), and
C<mov-text>
Valid outputs include: C<mov-text>
Elastic Transcoder supports a maximum of one embedded format per
output.
=item *
B<Sidecar:> Sidecar captions are kept in a separate metadata file from
the audio and video data. Sidecar captions require a player that is
capable of understanding the relationship between the video file and
the sidecar file. Elastic Transcoder supports only one sidecar caption
per language, to a maximum of 20 sidecar captions per file.
Valid input values include: C<dfxp> (first div element only),
C<ebu-tt>, C<scc>, C<smpt>, C<srt>, C<ttml> (first div element only),
and C<webvtt>
Valid outputs include: C<dfxp> (first div element only), C<scc>,
C<srt>, and C<webvtt>.
=back
If you want ttml or smpte-tt compatible captions, specify dfxp as your
output format.
Elastic Transcoder does not support OCR (Optical Character
Recognition), does not accept pictures as a valid input for captions,
and is not available for audio-only transcoding. Elastic Transcoder
does not preserve text formatting (for example, italics) during the
transcoding process.
To remove captions or leave the captions empty, set C<Captions> to
null. To pass through existing captions unchanged, set the
C<MergePolicy> to C<MergeRetain>, and pass in a null C<CaptionSources>
array.
For more information on embedded files, see the Subtitles Wikipedia
page.
For more information on sidecar files, see the Extensible Metadata
Platform and Sidecar file Wikipedia pages.
=head2 Composition => ArrayRef[L<Paws::ElasticTranscoder::Clip>]
You can create an output file that contains an excerpt from the input
file. This excerpt, called a clip, can come from the beginning, middle,
or end of the file. The Composition object contains settings for the
clips that make up an output file. For the current release, you can
only specify settings for a single clip per output file. The
Composition object cannot be null.
=head2 Encryption => L<Paws::ElasticTranscoder::Encryption>
You can specify encryption settings for any output files that you want
to use for a transcoding job. This includes the output file and any
watermarks, thumbnails, album art, or captions that you want to use.
You must specify encryption settings for each file individually.
=head2 Key => Str
The name to assign to the transcoded file. Elastic Transcoder saves the
file in the Amazon S3 bucket specified by the C<OutputBucket> object in
the pipeline that is specified by the pipeline ID. If a file with the
specified name already exists in the output bucket, the job fails.
=head2 PresetId => Str
The C<Id> of the preset to use for this job. The preset determines the
audio, video, and thumbnail settings that Elastic Transcoder uses for
transcoding.
=head2 Rotate => Str
The number of degrees clockwise by which you want Elastic Transcoder to
rotate the output relative to the input. Enter one of the following
values: C<auto>, C<0>, C<90>, C<180>, C<270>. The value C<auto>
generally works only if the file that you're transcoding contains
rotation metadata.
=head2 SegmentDuration => Str
(Outputs in Fragmented MP4 or MPEG-TS format only.
If you specify a preset in C<PresetId> for which the value of
C<Container> is C<fmp4> (Fragmented MP4) or C<ts> (MPEG-TS),
C<SegmentDuration> is the target maximum duration of each segment in
seconds. For C<HLSv3> format playlists, each media segment is stored in
a separate C<.ts> file. For C<HLSv4> and C<Smooth> playlists, all media
segments for an output are stored in a single file. Each segment is
approximately the length of the C<SegmentDuration>, though individual
segments might be shorter or longer.
The range of valid values is 1 to 60 seconds. If the duration of the
video is not evenly divisible by C<SegmentDuration>, the duration of
the last segment is the remainder of total length/SegmentDuration.
Elastic Transcoder creates an output-specific playlist for each output
C<HLS> output that you specify in OutputKeys. To add an output to the
master playlist for this job, include it in the C<OutputKeys> of the
associated playlist.
=head2 ThumbnailEncryption => L<Paws::ElasticTranscoder::Encryption>
The encryption settings, if any, that you want Elastic Transcoder to
apply to your thumbnail.
=head2 ThumbnailPattern => Str
Whether you want Elastic Transcoder to create thumbnails for your
videos and, if so, how you want Elastic Transcoder to name the files.
If you don't want Elastic Transcoder to create thumbnails, specify "".
If you do want Elastic Transcoder to create thumbnails, specify the
information that you want to include in the file name for each
thumbnail. You can specify the following values in any sequence:
=over
=item *
B<C<{count}> (Required)>: If you want to create thumbnails, you must
include C<{count}> in the C<ThumbnailPattern> object. Wherever you
specify C<{count}>, Elastic Transcoder adds a five-digit sequence
number (beginning with B<00001>) to thumbnail file names. The number
indicates where a given thumbnail appears in the sequence of thumbnails
for a transcoded file.
If you specify a literal value and/or C<{resolution}> but you omit
C<{count}>, Elastic Transcoder returns a validation error and does not
create the job.
=item *
B<Literal values (Optional)>: You can specify literal values anywhere
in the C<ThumbnailPattern> object. For example, you can include them as
a file name prefix or as a delimiter between C<{resolution}> and
C<{count}>.
=item *
B<C<{resolution}> (Optional)>: If you want Elastic Transcoder to
include the resolution in the file name, include C<{resolution}> in the
C<ThumbnailPattern> object.
=back
When creating thumbnails, Elastic Transcoder automatically saves the
files in the format (.jpg or .png) that appears in the preset that you
specified in the C<PresetID> value of C<CreateJobOutput>. Elastic
Transcoder also appends the applicable file name extension.
=head2 Watermarks => ArrayRef[L<Paws::ElasticTranscoder::JobWatermark>]
Information about the watermarks that you want Elastic Transcoder to
add to the video during transcoding. You can specify up to four
watermarks for each output. Settings for each watermark must be defined
in the preset for the current output.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::ElasticTranscoder>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/ElasticTranscoder/CreateJobOutput.pm | Perl | apache-2.0 | 9,105 |
package Web::Summarizer::Sequence::Featurizer;
use strict;
use warnings;
use Moose;
use namespace::autoclean;
# TODO : see parent class, is there a way to effectively recurse over all _id methods to build to the final id ?
sub _id {
my $this = shift;
return join( '::' , __PACKAGE__ );
}
sub run {
my $this = shift;
my $object = shift;
# 2 - vectorize sentence
# CURRENT : we need to be able to produce representations that are more than just unigrams
# TODO : should correspond to weight_callback ? , coordinate_weighter => $this->coordinate_weighter
my $sentence_vectorized = $object->get_ngrams( 1 , return_vector => 1 , surface_only => 1 );
return $sentence_vectorized;
}
with( 'Featurizer' );
__PACKAGE__->meta->make_immutable;
1;
| ypetinot/web-summarization | src/perl/Web/Summarizer/Sequence/Featurizer.pm | Perl | apache-2.0 | 787 |
package Unison::Utilities::Cluster;
use CBT::debug;
CBT::debug::identify_file() if ($CBT::debug::trace_uses);
use strict;
use warnings;
use base 'Exporter';
our @EXPORT = ();
our @EXPORT_OK = qw( cluster_data);
use Algorithm::Cluster qw/kcluster/;
sub new {
my $self = {};
bless $self;
$self->{param}->{npass} = 10;
$self->{param}->{transpose} = 0;
$self->{param}->{npass} = 10;
$self->{param}->{method} = 'a';
$self->{param}->{dist} = 'e';
$self->setParam(@_);
if ( !defined( $self->{param}->{nclusters} ) ) {
warn "number of clusters not set, will use default value of 4";
$self->{param}->{nclusters} = 4;
}
if ( !defined( $self->{param}->{algorithm} ) ) {
warn "Algorithm not set, will use default k-cluster";
$self->{param}->{algorithm} = "kclust";
}
return $self;
}
## method: setParam(parameters)
## sets clustering parameters in hash-style key=>value
## format
sub setParam {
my $self = shift;
my %param = @_;
foreach my $p ( keys %param ) {
$self->{param}->{$p} = $param{$p}
if ( defined $param{$p} && $param{$p} ne "" );
}
}
##
## Cluster values stored in a hash
## return 2D array of clusters
##
sub cluster_2dhash {
my ( $self, $scores ) = @_;
my ( @data, @mask, @weight );
my ( $clusters, $centroids, $error, $found, $cluster_arr );
my (%data_by_cluster);
my $k = 0;
foreach my $i ( keys %$scores ) {
foreach my $j ( keys %{ $$scores{$i} } ) {
if ( defined( $$scores{$i}{$j} ) ) {
$data[$k] = [ $$scores{$i}{$j} ];
${ $mask[$k] }[0] = 1;
$k++;
}
}
}
@weight = (1.0);
#------------------
# Define the params we want to pass to kcluster
my %params = (
nclusters => $self->{param}->{nclusters},
transpose => $self->{param}->{transpose},
npass => $self->{param}->{npass},
method => $self->{param}->{method},
dist => $self->{param}->{dist},
data => \@data,
mask => \@mask,
weight => \@weight,
);
( $clusters, $centroids, $error, $found ) = kcluster(%params);
my $i = 0;
foreach ( @{$clusters} ) {
push @{ $data_by_cluster{$_} }, @{ $data[$i] };
++$i;
}
$i = 0;
foreach ( @{$centroids} ) {
my @min_max = sort { $a <=> $b } @{ $data_by_cluster{$i} };
push @$cluster_arr, [@min_max];
++$i;
}
$self->{cluster_arr} = $cluster_arr;
return $cluster_arr;
}
##
## sort the ranges
## return the association
## based on the sorted range
## the input value belongs to
##
sub get_association {
my ( $self, $score, $order ) = @_;
my $sc;
if ( $order == -1 ) {
@$sc = sort { $$b[0] <=> $$a[0] } @{ $self->{cluster_arr} };
}
else {
@$sc = sort { $$a[0] <=> $$b[0] } @{ $self->{cluster_arr} };
}
foreach my $i ( 0 .. $self->{param}->{nclusters} - 1 ) {
return ${ $self->{param}->{associate} }[$i]
if ( $score <= $$sc[$i][ $#{ $$sc[$i] } ] and $score >= $$sc[$i][0] );
}
return ${ $self->{param}->{associate} }[ $self->{param}->{nclusters} ];
}
1;
| unison/unison | perl5/Unison/Utilities/Cluster.pm | Perl | apache-2.0 | 3,268 |
package Paws::Pinpoint::GetCampaignVersions;
use Moose;
has ApplicationId => (is => 'ro', isa => 'Str', traits => ['ParamInURI'], uri_name => 'application-id', required => 1);
has CampaignId => (is => 'ro', isa => 'Str', traits => ['ParamInURI'], uri_name => 'campaign-id', required => 1);
has PageSize => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'page-size');
has Token => (is => 'ro', isa => 'Str', traits => ['ParamInQuery'], query_name => 'token');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'GetCampaignVersions');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/v1/apps/{application-id}/campaigns/{campaign-id}/versions');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'GET');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::Pinpoint::GetCampaignVersionsResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::Pinpoint::GetCampaignVersions - Arguments for method GetCampaignVersions on Paws::Pinpoint
=head1 DESCRIPTION
This class represents the parameters used for calling the method GetCampaignVersions on the
Amazon Pinpoint service. Use the attributes of this class
as arguments to method GetCampaignVersions.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to GetCampaignVersions.
As an example:
$service_obj->GetCampaignVersions(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> ApplicationId => Str
=head2 B<REQUIRED> CampaignId => Str
=head2 PageSize => Str
The number of entries you want on each page in the response.
=head2 Token => Str
The NextToken string returned on a previous page that you use to get
the next page of results in a paginated response.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method GetCampaignVersions in L<Paws::Pinpoint>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/Pinpoint/GetCampaignVersions.pm | Perl | apache-2.0 | 2,461 |
package WordGraph::EdgeFeature;
use Moose;
use namespace::autoclean;
# id
has 'id' => ( is => 'ro' , isa => 'Str' , required => 1 );
with('Feature');
# params
has 'params' => ( is => 'ro' , isa => 'HashRef' , default => sub { {} } );
sub cache_key {
my $this = shift;
my $instance = shift;
my $edge = shift;
my $graph = shift;
# TODO: include graph id asap
my $cache_key = join( "::" , $edge->[0] , $edge->[1] , $instance->id );
return $cache_key;
}
# value
sub compute {
my $this = shift;
my $instance = shift;
my $edge = shift;
my $graph = shift;
my %features;
my $feature_key = $this->id();
# 1 - collect common resources
my $common_resources = $this->get_resources( $graph , $edge , $instance );
# 2 - compute node-level features (if required)
my $source_features = $this->value_node( $graph , $edge , $instance , $common_resources , 0 );
if ( $source_features ) {
$this->_update_features( "source" , \%features , $source_features );
}
my $sink_features = $this->value_node( $graph , $edge , $instance , $common_resources , 1 );
if ( $sink_features ) {
$this->_update_features( "sink" , \%features , $sink_features );
}
# 3 - compute edge-level features (if required)
my $edge_features = $this->value_edge( $graph , $edge , $instance , $common_resources , $source_features , $sink_features );
if ( $edge_features ) {
$this->_update_features( "edge" , \%features , $edge_features );
}
return \%features;
}
# default implementation of get_resources
sub get_resources {
my $this = shift;
my $instance = shift;
my $edge = shift;
my $graph = shift;
return $this->_get_resources( $graph , $edge , $instance );
}
# CURRENT : feature generation code as role applied onto Category::UrlData instances ?
# default implementation of _get_resources
sub _get_resources {
my $this = shift;
my $instance = shift;
my $edge = shift;
my $graph = shift;
# nothing
return {};
}
sub _update_features {
my $this = shift;
my $domain = shift;
my $features_ref = shift;
my $add_features = shift;
# TODO : can I somehow merge these two branches ?
if ( ref( $add_features ) ) {
foreach my $add_feature_id ( keys( %{ $add_features } ) ) {
my $feature_key = $this->key( $domain , $add_feature_id );
$features_ref->{ $feature_key } = $add_features->{ $add_feature_id };
}
}
else {
my $feature_key = $this->key( $domain );
$features_ref->{ $feature_key } = $add_features;
}
}
# feature key
sub key {
my $this = shift;
my $domain = shift;
return feature_key( $domain , $this->id() , @_ );
}
sub feature_key {
return join( "::" , map { if ( ref( $_ ) ) { $_->[ 0 ] } else { $_ } } @_ );
}
# default feature normalizer (no normalization)
sub _normalizer {
my $this = shift;
my $graph = shift;
my $instance = shift;
return 1;
}
__PACKAGE__->meta->make_immutable;
1;
| ypetinot/web-summarization | summarizers/graph-summarizer-4/src/WordGraph/EdgeFeature.pm | Perl | apache-2.0 | 3,024 |
#!/usr/bin/env perl
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## script to create a table of HGVS stings and variation_ids for search index building
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<helpdesk.org>.
=cut
use strict;
use warnings;
use FileHandle;
use Getopt::Long;
my $config = {};
GetOptions(
$config,
'version=i',
'script_dir=s',
'vep_input=s',
'cache_dir=s',
'no_cleanup',
'help!',
) or die "Error: Failed to parse command line arguments\n";
if ($config->{help}) {
usage();
exit(0);
}
if (!$config->{version}) {
die "Version is missing. Set version with --version.";
}
my $version = $config->{version};
my @assemblies = ('GRCh37', 'GRCh38');
my $cache_dirs = {
'homo_sapiens' => '',
'homo_sapiens_merged' => '--merged',
'homo_sapiens_refseq' => '--refseq',
};
foreach my $dir_name (qw/script_dir cache_dir/) {
if (! $config->{$dir_name}) {
die "Parameter ($dir_name) is not set.";
}
if (!(-d $config->{$dir_name})) {
die "$dir_name: ", $config->{$dir_name}, " is not a directory.";
}
}
my $script_dir = $config->{script_dir};
die "script_dir is missing file variant_effect_predictor.pl" unless (-f "$script_dir/variant_effect_predictor.pl");
$config->{vep_input} ||= "$script_dir/t/testdata/test_vep_input.txt.gz";
die "vep_input: ", $config->{vep_input}, " is not a file." unless (-f $config->{vep_input});
$config->{cache_dir} ||= $ENV{HOME} . '/.vep';
#all_cache_files_are_installed($config);
run_vep($config);
tests($config);
cleanup($config) unless($config->{no_cleanup});
sub all_cache_files_are_installed {
my $config = shift;
my $root_cache_dir = $config->{cache_dir};
foreach my $cache_dir (keys %$cache_dirs) {
foreach my $assembly (@assemblies) {
my $dir = "$root_cache_dir/$cache_dir/$version\_$assembly";
if (!(-d $dir)) {
die "$dir is not a directory. Cache files for $cache_dir/$version\_$assembly are missing.";
}
}
}
return 1;
}
sub run_vep {
my $config = shift;
my $script_dir = $config->{script_dir};
my $input = $config->{vep_input};
my $root_cache_dir = $config->{cache_dir};
my $output_files = {};
foreach my $cache_dir (keys %$cache_dirs) {
foreach my $assembly (@assemblies) {
my $vep_run_name = "$cache_dir\_$version\_$assembly";
my $params = $cache_dirs->{$cache_dir} . " --assembly $assembly";
my $output = "$script_dir/test_vep_output_$vep_run_name";
my $err_file = "$script_dir/err_$vep_run_name";
my $out_file = "$script_dir/out_$vep_run_name";
$output_files->{$vep_run_name}->{vep_output} = $output;
$output_files->{$vep_run_name}->{err} = $err_file;
$output_files->{$vep_run_name}->{out} = $out_file;
# -cache_version
my $cmd = "perl $script_dir/variant_effect_predictor.pl --cache --offline --dir $root_cache_dir -i $input -o $output --force_overwrite --no_stats --regulatory --sift b --polyphen b $params";
run_cmd("$cmd 1>$out_file 2>$err_file");
}
}
$config->{output_files} = $output_files;
}
sub tests {
my $config = shift;
my $output_files = $config->{output_files};
my @annotations = qw/SIFT PolyPhen MotifFeature RegulatoryFeature/;
foreach my $vep_run_name (keys %$output_files) {
my $vep_out_file = $output_files->{$vep_run_name}->{vep_output};
my $fh = FileHandle->new($vep_out_file, 'r');
my $covered_chroms = {};
my $has_annotation = {};
while (<$fh>) {
chomp;
next if /^#/;
my ($name, $location, $rest) = split("\t", $_, 3);
my ($chrom, $position) = split(':', $location);
$covered_chroms->{$chrom} = 1;
foreach my $annotation (@annotations) {
if ($rest =~ /$annotation/) {
$has_annotation->{$annotation} = 1;
}
}
}
$fh->close();
foreach my $chrom (1..22, 'X', 'Y', 'MT') {
if (!$covered_chroms->{$chrom}) {
die "Chrom $chrom is missing from VEP output $vep_out_file. Need to check cache files are dumped correctly.";
}
}
print STDOUT "All chromosomes are covered in $vep_out_file\n";
foreach my $annotation (@annotations) {
if (!$has_annotation->{$annotation}) {
die "Annotation: $annotation is missing from VEP output $vep_out_file. Need to check cache files are dumped correctly.";
}
}
print STDOUT "Annotations (", join(', ', @annotations), ") are contained in $vep_out_file\n";
}
return 1;
}
sub run_cmd {
my $cmd = shift;
if (my $return_value = system($cmd)) {
$return_value >>= 8;
die "system($cmd) failed: $return_value";
}
}
sub cleanup {
my $config = shift;
my $output_files = $config->{output_files};
foreach my $vep_run_name (keys %$output_files) {
foreach my $file_type (qw/vep_output err out/) {
my $file = $output_files->{$vep_run_name}->{$file_type};
run_cmd("rm $file");
}
}
}
sub usage {
my $usage =<<END;
Usage:
perl test_chrom_coverage_in_cache_files.pl [arguments]
The script runs for human only. It checks that all human chromosomes have been dumped to the
cache files (default, refseq, merged) for GRCh37 and GRCh38.
The script should be run after the cache file generation. Copy and unpack all the human cache files
to the cache file directory (--cache_dir).
bsub -J test_chrom_coverage -o out -e err -R"select[mem>2500] rusage[mem=2500]" -M2500 perl test_chrom_coverage_in_cache_files.pl -version 78 -cache_dir /lustre/scratch110/ensembl/at7/vep/ -script_dir ~/DEV/ensembl-tools/scripts/variant_effect_predictor/
Options
=======
--help Display this message and quit
--version Set the version for the new release
--script_dir Location of variant_effect_predictor.pl script
--cache_dir Cache file directory
--no_cleanup Don't clean up err, out, vep_output files
END
print $usage;
}
| dbolser/ensembl-variation | scripts/misc/test_chrom_coverage_in_cache_files.pl | Perl | apache-2.0 | 6,591 |
package VMOMI::HostRuntimeInfo;
use parent 'VMOMI::DynamicData';
use strict;
use warnings;
our @class_ancestors = (
'DynamicData',
);
our @class_members = (
['connectionState', 'HostSystemConnectionState', 0, ],
['powerState', 'HostSystemPowerState', 0, ],
['standbyMode', undef, 0, 1],
['inMaintenanceMode', 'boolean', 0, ],
['inQuarantineMode', 'boolean', 0, 1],
['bootTime', undef, 0, 1],
['healthSystemRuntime', 'HealthSystemRuntime', 0, 1],
['dasHostState', 'ClusterDasFdmHostState', 0, 1],
['tpmPcrValues', 'HostTpmDigestInfo', 1, 1],
['vsanRuntimeInfo', 'VsanHostRuntimeInfo', 0, 1],
['networkRuntimeInfo', 'HostRuntimeInfoNetworkRuntimeInfo', 0, 1],
['vFlashResourceRuntimeInfo', 'HostVFlashManagerVFlashResourceRunTimeInfo', 0, 1],
['hostMaxVirtualDiskCapacity', undef, 0, 1],
['cryptoState', undef, 0, 1],
['cryptoKeyId', 'CryptoKeyId', 0, 1],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/HostRuntimeInfo.pm | Perl | apache-2.0 | 1,143 |
:- use_package(persdb).
:- use_module(library('pillow/http_server')).
:- use_module(library(sockets)).
:- use_module(library('sockets/sockets_io'),[serve_socket/3]).
:- use_module(library(strings),[write_string/2]).
:- use_module(library(soap),[soap_message/2]).
:- use_module(library(write)).
% Predicados Persistentes
% En estos predicados se asertan los mensajes recibidos y los errores
% detectados. Cada hecho que se aserta va a un fichero:
% ./user/cached_1.pl y ./user/received_message_2.pl
% Muy util para "espiar" que es lo que esta pasando...
persistent_dir(db,'./ser').
:- persistent(received_message/2,db).
:- persistent(cached/1,db).
% Punto de entrada principal
% Invocando el ejecutable como server NUMERO, arranca el proceso servidor
% escuchando en el port NUMERO. Por cada nueva peticion de un cliente se llama
% al predicado serve/1, por cada error que ocurra se llama a catcher/1.
main(N):-
%atom_codes(S,Codes),
%number_codes(N,Codes),
set_prolog_flag(write_strings,on),
bind_socket(N,5,Socket),
serve_socket(Socket,server,catcher).
% Manejador de Peticiones
% Lee una peticion, la procesa, devuelve la respuesta y cierra la conexion.
% Para http esto es suficiente. Para otros protocolos podria escribirse y
% volver a leer, volver a escribir, etc. sobre el mismo Stream.
server(Stream):-
http_serve_fetch(Stream,serve(Stream)).
serve(Message,Stream,Answer):-
!,
display(Message),nl,
display(Stream),nl,
http_answer(good,Answer).
serve(Message,Stream,Answer):-
writeq(received_message(Stream,Message)), nl,
assertz_fact(received_message(Stream,Message)),
process(Message,Answer).
% Manejador de Errores
catcher(Error):-
!,
display('Error: '),
display(Error),nl.
catcher(Error):-
assertz_fact(cached(Error)),
writeq(Error), nl.
% Proceso de la Aplicacion
% Parsea el mensaje HTTP extrayendo el mensaje SOAP, luego parsea este
% obteniendo el contenido: un termino XML.
process(Message,[Answer]):-
http_message(Message,SoapStr),
soap_message(SoapStr,XmlTerm), !,
http_answer(good,Answer),
writeq(xmlterm(XmlTerm)), nl.
process(_Message,[Answer]):-
http_answer(bad,Answer).
http_answer(bad,status(request_error,400,"ERROR")).
http_answer(good,status(success,200,"OK")).
http_message(Message,SoapStr):-
member(post,Message), !,
member(content(SoapStr),Message).
http_message(Message,SoapStr):-
member(document(SoapStr),Message).
% Si el protocolo de la aplicacion es sincrono, el mensaje SOAP de respuesta
% deberia ir en Answer. Aqui se usa Answer como confirmacion de recepcion
% correcta; la respuesta SOAP se enviaria usando la parte cliente:
% ver client.pl
| leuschel/ecce | www/CiaoDE/ciao/library/connections/soap/server2.pl | Perl | apache-2.0 | 2,624 |
#! /usr/bin/env perl
#
# Grep Crawl vault files for some string and output the match preceded by
# vault name. Don't search things like SUBST lines or the MAP.
#
use File::Find;
my $myname = $0;
$myname =~ s,.*/,,;
$myname =~ s/(.)\..*$/$1/;
# probably pointless extensibility...
my @path = ($ENV{HOME} . '/Sources/crawl/crawl-ref/source/dat/des');
my @sfx = ('des');
my %sel = (monster => [qr/^MONS:/, qr/^KMONS:/]
,item => [qr/^ITEM:/, qr/^KITEM:/]
,feature => [qr/^FEAT:/, qr/^KFEAT:/]
);
my %extra = (property => [qr/^PROP:/, qr/^KPROP:/, qr/^TAGS:/, qr/^LTAGS:/]
,branch => [qr/^PLACE:/]
);
my (%which, %also);
my $err = 0;
my $and = 1;
while (@ARGV) {
my $arg = shift @ARGV;
if ($arg eq '--all') {
%which = map {$_ => 1} keys %sel;
}
elsif ($arg =~ /^--(\w+)$/ and exists $sel{$1}) {
$which{$1} = 1;
}
elsif ($arg =~ /^--no-(\w+)$/ and exists $sel{$1}) {
if (!keys %which) {
%which = map {$_ => 1} keys %sel;
}
delete $which{$1};
}
elsif ($arg =~ /^--(\w+)$/ and exists $extra{$1}) {
push @{$also{$1}}, shift @ARGV;
}
elsif ($arg =~ /^--(\w+)=(.*)$/ and exists $extra{$1}) {
push @{$also{$1}}, $2;
}
# @@@ should perhaps accept --any|--all, except see above...
elsif ($arg eq '--and' or $arg eq '-a') {
$and = 1;
}
elsif ($arg eq '--or' or $arg eq '-o') {
$and = 0;
}
elsif ($arg eq '--help' or $arg eq '-h') {
$err = 1;
last;
}
# @@@ do this via mapping somehow instead of breaking abstraction :/
elsif ($arg =~ /^-b(.*)$/) {
push @{$also{branch}}, $1;
}
elsif ($arg eq '-b') {
push @{$also{branch}}, shift @ARGV;
}
elsif ($arg eq '-m') {
$which{monster} = 1;
}
elsif ($arg eq '-i') {
$which{item} = 1;
}
elsif ($arg eq '-f') {
$which{feature} = 1;
}
elsif ($arg eq '--') {
last;
}
elsif ($arg =~ /^-/) {
print STDERR "$myname: unknown switch $arg\n";
$err = 1;
}
else {
unshift @ARGV, $arg;
last;
}
}
if ($err or !@ARGV) {
print STDERR "usage: $myname [--and|--or] [--all";
for (keys %sel) {
print STDERR "|--[no-]$_";
}
print STDERR "] [";
$err = 0;
for (keys %extra) {
$err and print STDERR '|';
print STDERR "--$_=pattern";
$err = 1;
}
print STDERR "] pattern...\n";
exit 1;
}
keys %which or %which = map {$_ => 1} keys %sel;
find(sub {vgrep(clean($File::Find::dir, @path), $_)}
,@path
);
###############################################################################
sub clean {
my ($dir, @pfx) = @_;
# @@@ after allowing for multiple paths, we make it useless...
for my $pfx (@pfx) {
$dir =~ s,^$pfx($|/),, and return $dir;
}
return $dir;
}
sub vgrep {
my ($dir, $name) = @_;
-f $_ or return;
my $ok = 0;
for my $sfx (@sfx) {
if (/\.$sfx$/i) {
$ok = 1;
last;
}
}
$ok or return;
# it's presumably a .des file; munch it
open($f, $_) or return;
my $ln;
my $map = 0;
my $lua = 0;
my $cur = undef;
my $lno = 0;
my $doing = -1;
my $dd = undef;
my $ldd = undef;
while (defined ($ln = <$f>)) {
$lno++;
chomp $ln;
$ln =~ /^\s*($|#)/ and next;
while ($ln =~ s/\\$//) {
my $l2 = <$f>;
unless (defined $l2) {
print STDERR "$dir/$_:$lno: warning: end of file in continued line\n";
$l2 = '';
}
$lno++;
chomp $l2;
$l2 =~ s/^\s+//;
$ln .= $l2;
}
if (defined $cur and !$map and !$lua and $ln =~ /^MAP$/) {
$map = 1;
next;
}
elsif (!$map and !$lua and $ln =~ /^:/) {
# one-liner lua
next;
}
elsif (!$map and !$lua and $ln =~ /^(?:lua\s*)?\{\{$/) {
$lua = 1;
next;
}
elsif ($lua and $ln =~ /^\}\}$/) {
$lua = 0;
next;
}
elsif ($map and $ln =~ /^ENDMAP$/) {
$cur = undef;
$map = 0;
next;
}
elsif ($map or $lua) {
next;
}
elsif ($ln =~ /^NAME:\s*(\S+)\s*$/) {
# @@@ serial vaults don't have maps in the main vaults
# @@@ check default depth vs. branch here to set $doing!
# @@@@ except that's wrong if it sets DEPTH:
# if (defined $cur) {
# print STDERR "$dir/$_:$lno: warning: already in $cur: $ln\n";
# }
$cur = $1;
$doing = -1;
$ldd = undef;
next;
}
# this is allowed outside of any definition
elsif (!defined $cur and $ln =~ /^default-depth:\s*(.*)$/) {
$dd = $1;
next;
}
elsif (!defined $cur) {
print STDERR "$dir/$_:$lno: warning: not in a definition: $ln\n";
next;
}
elsif ($ln =~ /^DEPTH:\s*(.*)$/) {
$ldd = $1;
}
else {
# look for extras matches
$ok = 0;
my $rok = 0;
for my $extra (keys %also) {
next if $extra eq 'branch'; # @@@@@@@@@@
# does this line match a selector?
for my $kw (@{$extra{$extra}}) {
if ($ln =~ $kw) {
$rok = 1;
for my $pat (@{$also{$extra}}) {
if ($ln =~ /$pat/) {
$ok = 1;
last;
}
}
$ok or $doing = 0;
last;
}
}
}
# if we matched any extra keyword then it can't be a section keyword
$rok and next;
# is section enabled?
for my $sect (keys %which) {
# does the line match a selector?
for my $kw (@{$sel{$sect}}) {
if ($ln =~ $kw) {
$ok = 1;
last;
}
}
$ok or next;
# figure out if we are in a selected branch
# @@@ and pray DEPTH: doesn't occur *after* MONS etc.
if ($doing == -1) {
if (!exists $also{branch}) {
$doing = 1;
}
elsif (defined $dd or defined $ldd) {
defined $ldd or $ldd = $dd;
$doing = 0;
for my $pat (map {split(',', $_)} @{$also{branch}}) {
if ($ldd =~ /(?:^|,\s*)$pat(?:,|:|$)/i) {
$doing = 1;
}
}
}
}
$doing or next;
# try matching against all the patterns.
# @@@ AND / OR expressions?
# @@@ for that matter, and/or sections... right now always OR
$ok = $and ? @ARGV : 0;
for my $pat (@ARGV) {
# @@@ might want to delete prefixes for those keywords that have them
if ($ln =~ /$pat/) {
if ($and) {
$ok--;
} else {
$ok = 1;
}
}
}
if (($and and !$ok) or (!$and and $ok)) {
print "$dir/$_:${lno}: [$cur] $ln\n";
}
}
}
}
}
| geekosaur/vaultgrep | vaultgrep.pl | Perl | bsd-2-clause | 6,218 |
# Copyright (c) 2015, Mitchell Cooper
package F::Operation;
use warnings;
use strict;
use 5.010;
use parent qw(F::NodeExpression);
use Scalar::Util 'blessed';
sub left_side { shift->first_child }
# handle signs.
sub adopt {
my ($op, $maybe) = (shift, @_);
my $before = $op->last_child;
# first element is an operator.
# don't worry about anything except signs (+/-)
# because the constructor will not allow that to happen.
if (!$before && is_op($maybe)) {
$before = F::new('Number', value => 0, zero => 1);
$op->SUPER::adopt($before);
$maybe->{token} = 'OP_S'.uc($maybe->op_type); # super sub
}
# two operators in a row.
elsif (is_op($before) && is_op($maybe)) {
# it could be a negation.
if (is_op($maybe, 'sub')) {
# two negatives = positive.
#
# note how this calls ->adopt again, rather than SUPER::adopt.
# this is because the below check is_op($maybe, 'add')
# may ignore it altogether.
#
my $super = is_op($before, 'ssub');
if (is_op($before, 'sub') || $super) {
$op->abandon($before);
return $op->adopt(F::new('Operator', token =>
$super ? 'OP_SADD' : 'OP_ADD' # super add
));
}
# otherwise it's just a normal negation.
$op->SUPER::adopt(F::new('Number', value => 0, zero => 1));
$maybe->{token} = 'OP_S'.uc($maybe->op_type); # super sub
return $op->SUPER::adopt(@_);
}
# it's just a positive; do nothing.
if (is_op($maybe, 'add')) {
return;
}
# otherwise, not allowed.
return $maybe->unexpected();
}
return $op->SUPER::adopt(@_);
}
sub compile {
my $op = shift;
return @{ $op->{compiled} } if $op->{compiled};
my @children = $op->children;
# for each operator, while there are instances of that operator
foreach my $op_type (qw/
range pow
ssub sadd
mod mul div
sub add
less less_e
gr8r gr8r_e
equal_i nequal_i
equal nequal
sim nsim
band xor bor
and or
/) {
while (grep { is_op($_, $op_type) } @children) {
my ($i, $left, $right) = -1;
# for each operator of this type...
foreach my $child (@children) { $i++;
is_op($child, $op_type) or next;
# replace the left side, operator, and right side with
# an array reference to represent the operation.
my $a = [ $op_type ];
($left, undef, $right) = splice @children, $i - 1, 3, $a;
# FIXME: do something proper if there is no $right
die 'no right side' if !$right;
# if the last is the same type of operation, combine.
if (ref $left eq 'ARRAY' && $left->[0] eq $op_type) {
push @$a, @$left[1..$#$left], $right;
}
else {
push @$a, $left, $right;
}
last;
}
}}
$op->{compiled} = \@children;
return @children;
}
my %map = (
'sub' => '_sub',
'ssub' => '_sub',
'sadd' => 'add',
'or' => 'any_true',
'and' => 'all_true',
'equal_i' => 'refs_equal',
'nequal_i' => 'refs_nequal'
);
# these are wrapped with sub because they are evaluated only as needed
my %wrap_sub = map { $_ => 1 } qw(
any_true
all_true
);
sub op_fmt {
my ($op, $op_name, @items) = (shift, @{ +shift });
$op_name = $map{$op_name} || $op_name;
my $main = $op->main;
$main->{required_operations}{$op_name}++;
# consider return when wrapped with sub{}?
return operation => {
operation => $op_name,
pos => $op->{create_pos},
items => join ', ', map {
my $fmt = ref $_ eq 'ARRAY' ?
F::get_perl_fmt($op->op_fmt($_)) :
$_->perl_fmt_do;
$wrap_sub{$op_name} ? "sub { $fmt }" : $fmt
} @items
};
}
sub perl_fmt {
my $op = shift;
return $op->op_fmt($op->compile);
}
sub is_op {
my ($op, $type) = @_;
blessed $op or return;
$op->type eq 'Operator' or return;
$op->op_type eq $type or return if length $type;
return 1;
}
1
| cooper/ferret | lib/F/Operation.pm | Perl | bsd-3-clause | 4,498 |
package Bio::WGS2NCBI::MrnaFeature;
use Bio::WGS2NCBI::Logger;
use Bio::WGS2NCBI::StrandedFeature;
use base 'Bio::WGS2NCBI::StrandedFeature';
=head1 NAME
Bio::WGS2NCBI::MrnaFeature - mRNA feature
=head1 DESCRIPTION
Objects of this class represent an mRNA feature for a gene.
=head1 SEE ALSO
L<Bio::WGS2NCBI::StrandedFeature>
=head1 METHODS
=over
=item new()
Returns a new mRNA feature. Requires the arguments 'product', 'protein_id' and
'transcript_id', for example:
my $mrna = Bio::WGS2NCBI::MrnaFeature->new(
product => $product,
protein_id => $protein_id,
transcript_id => $transcript_id,
);
=cut
sub new {
my $class = shift;
my %args = @_;
if ( not $args{'product'} or not $args{'protein_id'} or not $args{'transcript_id'} ) {
DEBUG "need product, protein_id and transcript_id, product_id";
}
$class->SUPER::new(%args);
}
=item qualifiers()
Returns the feature qualifiers for mRNA features, i.e. 'product', 'protein_id' and
'transcript_id'
=cut
sub qualifiers { qw(product protein_id transcript_id) }
sub product {
my $self = shift;
$self->{'product'} = shift if @_;
return $self->{'product'};
}
sub protein_id {
my $self = shift;
$self->{'protein_id'} = shift if @_;
return $self->{'protein_id'};
}
sub transcript_id {
my $self = shift;
$self->{'transcript_id'} = shift if @_;
return $self->{'transcript_id'};
}
=back
=cut
1; | naturalis/wgs2ncbi | lib/Bio/WGS2NCBI/MrnaFeature.pm | Perl | bsd-3-clause | 1,414 |
=pod
=head1 NAME
BIO_s_accept, BIO_set_accept_port, BIO_get_accept_port,
BIO_set_nbio_accept, BIO_set_accept_bios, BIO_set_bind_mode,
BIO_get_bind_mode, BIO_do_accept - accept BIO
=head1 SYNOPSIS
#include <openssl/bio.h>
BIO_METHOD *BIO_s_accept(void);
long BIO_set_accept_port(BIO *b, char *name);
char *BIO_get_accept_port(BIO *b);
BIO *BIO_new_accept(char *host_port);
long BIO_set_nbio_accept(BIO *b, int n);
long BIO_set_accept_bios(BIO *b, char *bio);
long BIO_set_bind_mode(BIO *b, long mode);
long BIO_get_bind_mode(BIO *b, long dummy);
#define BIO_BIND_NORMAL 0
#define BIO_BIND_REUSEADDR_IF_UNUSED 1
#define BIO_BIND_REUSEADDR 2
int BIO_do_accept(BIO *b);
=head1 DESCRIPTION
BIO_s_accept() returns the accept BIO method. This is a wrapper
round the platform's TCP/IP socket accept routines.
Using accept BIOs, TCP/IP connections can be accepted and data
transferred using only BIO routines. In this way any platform
specific operations are hidden by the BIO abstraction.
Read and write operations on an accept BIO will perform I/O
on the underlying connection. If no connection is established
and the port (see below) is set up properly then the BIO
waits for an incoming connection.
Accept BIOs support BIO_puts() but not BIO_gets().
If the close flag is set on an accept BIO then any active
connection on that chain is shutdown and the socket closed when
the BIO is freed.
Calling BIO_reset() on a accept BIO will close any active
connection and reset the BIO into a state where it awaits another
incoming connection.
BIO_get_fd() and BIO_set_fd() can be called to retrieve or set
the accept socket. See L<BIO_s_fd(3)|BIO_s_fd(3)>
BIO_set_accept_port() uses the string B<name> to set the accept
port. The port is represented as a string of the form "host:port",
where "host" is the interface to use and "port" is the port.
Either or both values can be "*" which is interpreted as meaning
any interface or port respectively. "port" has the same syntax
as the port specified in BIO_set_conn_port() for connect BIOs,
that is it can be a numerical port string or a string to lookup
using getservbyname() and a string table.
BIO_new_accept() combines BIO_new() and BIO_set_accept_port() into
a single call: that is it creates a new accept BIO with port
B<host_port>.
BIO_set_nbio_accept() sets the accept socket to blocking mode
(the default) if B<n> is 0 or non blocking mode if B<n> is 1.
BIO_set_accept_bios() can be used to set a chain of BIOs which
will be duplicated and prepended to the chain when an incoming
connection is received. This is useful if, for example, a
buffering or SSL BIO is required for each connection. The
chain of BIOs must not be freed after this call, they will
be automatically freed when the accept BIO is freed.
BIO_set_bind_mode() and BIO_get_bind_mode() set and retrieve
the current bind mode. If BIO_BIND_NORMAL (the default) is set
then another socket cannot be bound to the same port. If
BIO_BIND_REUSEADDR is set then other sockets can bind to the
same port. If BIO_BIND_REUSEADDR_IF_UNUSED is set then and
attempt is first made to use BIO_BIN_NORMAL, if this fails
and the port is not in use then a second attempt is made
using BIO_BIND_REUSEADDR.
BIO_do_accept() serves two functions. When it is first
called, after the accept BIO has been setup, it will attempt
to create the accept socket and bind an address to it. Second
and subsequent calls to BIO_do_accept() will await an incoming
connection, or request a retry in non blocking mode.
=head1 NOTES
When an accept BIO is at the end of a chain it will await an
incoming connection before processing I/O calls. When an accept
BIO is not at then end of a chain it passes I/O calls to the next
BIO in the chain.
When a connection is established a new socket BIO is created for
the connection and appended to the chain. That is the chain is now
accept->socket. This effectively means that attempting I/O on
an initial accept socket will await an incoming connection then
perform I/O on it.
If any additional BIOs have been set using BIO_set_accept_bios()
then they are placed between the socket and the accept BIO,
that is the chain will be accept->otherbios->socket.
If a server wishes to process multiple connections (as is normally
the case) then the accept BIO must be made available for further
incoming connections. This can be done by waiting for a connection and
then calling:
connection = BIO_pop(accept);
After this call B<connection> will contain a BIO for the recently
established connection and B<accept> will now be a single BIO
again which can be used to await further incoming connections.
If no further connections will be accepted the B<accept> can
be freed using BIO_free().
If only a single connection will be processed it is possible to
perform I/O using the accept BIO itself. This is often undesirable
however because the accept BIO will still accept additional incoming
connections. This can be resolved by using BIO_pop() (see above)
and freeing up the accept BIO after the initial connection.
If the underlying accept socket is non-blocking and BIO_do_accept() is
called to await an incoming connection it is possible for
BIO_should_io_special() with the reason BIO_RR_ACCEPT. If this happens
then it is an indication that an accept attempt would block: the application
should take appropriate action to wait until the underlying socket has
accepted a connection and retry the call.
BIO_set_accept_port(), BIO_get_accept_port(), BIO_set_nbio_accept(),
BIO_set_accept_bios(), BIO_set_bind_mode(), BIO_get_bind_mode() and
BIO_do_accept() are macros.
=head1 RETURN VALUES
TBA
=head1 EXAMPLE
This example accepts two connections on port 4444, sends messages
down each and finally closes both down.
BIO *abio, *cbio, *cbio2;
ERR_load_crypto_strings();
abio = BIO_new_accept("4444");
/* First call to BIO_accept() sets up accept BIO */
if(BIO_do_accept(abio) <= 0) {
fprintf(stderr, "Error setting up accept\n");
ERR_print_errors_fp(stderr);
exit(0);
}
/* Wait for incoming connection */
if(BIO_do_accept(abio) <= 0) {
fprintf(stderr, "Error accepting connection\n");
ERR_print_errors_fp(stderr);
exit(0);
}
fprintf(stderr, "Connection 1 established\n");
/* Retrieve BIO for connection */
cbio = BIO_pop(abio);
BIO_puts(cbio, "Connection 1: Sending out Data on initial connection\n");
fprintf(stderr, "Sent out data on connection 1\n");
/* Wait for another connection */
if(BIO_do_accept(abio) <= 0) {
fprintf(stderr, "Error accepting connection\n");
ERR_print_errors_fp(stderr);
exit(0);
}
fprintf(stderr, "Connection 2 established\n");
/* Close accept BIO to refuse further connections */
cbio2 = BIO_pop(abio);
BIO_free(abio);
BIO_puts(cbio2, "Connection 2: Sending out Data on second\n");
fprintf(stderr, "Sent out data on connection 2\n");
BIO_puts(cbio, "Connection 1: Second connection established\n");
/* Close the two established connections */
BIO_free(cbio);
BIO_free(cbio2);
=head1 SEE ALSO
TBA
| GaloisInc/hacrypto | src/C/libssl/HEAD/src/doc/crypto/BIO_s_accept.pod | Perl | bsd-3-clause | 7,022 |
/*****************************************************************************
* This file is part of the Prolog Development Tool (PDT)
*
* WWW: http://sewiki.iai.uni-bonn.de/research/pdt/start
* Mail: pdt@lists.iai.uni-bonn.de
* Copyright (C): 2004-2012, CS Dept. III, University of Bonn
*
* All rights reserved. This program is made available under the terms
* of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
****************************************************************************/
:- module(parse_util, [ generate_facts/1,
update_facts/2,
assert_new_node/4,
cleanup_nodes/0,
cleanup_nodes/1,
cleanup_computed_facts/0]).
:- reexport('pdt_factbase.pl').
:- ensure_loaded('util/walking_prolog_files.pl').
:- use_module(preparser).
:- use_module(predicates).
:- use_module(load_graph).
:- use_module(modules_and_visibility).
:- use_module(literal_parser).
:- use_module(cross_reference_builder).
generate_facts(Project):-
cleanup_nodes,
walking_file_list(Project,parse,1),
build_load_graph,
derive_all_predicates,
derive_directive_collections,
compute_all_predicate_properties,
compute_visibility_graph,
parse_bodies,
derive_edges.
%generate_facts(Project):-
% writeln('cleaning up'),
% cleanup_nodes,
% writeln('start parsing clauses'),
% time(walking_file_list(Project,parse,1)),
% writeln('generating loadgraph'),
% time(build_load_graph),
% writeln('generating predicates'),
% time(derive_all_predicates),
% writeln('genereating directive collections'),
% time(derive_directive_collections),
% writeln('compute_predicate_properties'),
% time(compute_all_predicate_properties),
% writeln('compute_visibilities'),
% time(compute_visibility_graph),
% writeln('parse literals'),
% time(parse_bodies),
% writeln('generate edges'),
% time(derive_edges).
update_facts(File, Project):-
cleanup_nodes(File),
cleanup_computed_facts,
walking_file_list([File|Project],parse,1),
build_load_graph,
derive_predicates_of_files([File|Project]),
derive_directive_collection_of_files([File|Project]),
compute_predicate_properties_for_files([File|Project]),
compute_visibility_graph,
parse_bodies,
derive_edges.
%update_facts(File, Project):-
% format('cleaning up facts for ~w~n',File),
% cleanup_nodes(File),
% cleanup_computed_facts,
% writeln('start parsing clauses'),
% time(walking_file_list(Project,parse,1)),
% writeln('generating loadgraph'),
% time(build_load_graph),
% writeln('generating predicates'),
% time(derive_all_predicates),
% writeln('genereating directive collections'),
% time(derive_onloads),
% writeln('compute_predicate_properties'),
% time(compute_all_predicate_properties),
% writeln('compute_visibilities'),
% time(compute_visibility_graph),
% writeln('parse literals'),
% time(parse_bodies),
% writeln('generate edges'),
% time(derive_edges).
/*
* cleanup_nodes/0 isdet
* retracts everything a former run of parse_util:generate_facts/1 could have asserted.
**/
cleanup_nodes:-
retractall(fileT(_,_,_)),
retractall(literalT(_,_,_,_,_,_)),
retractall(metaT(_,_,_,_,_,_)),
retractall(headT(_,_,_,_,_)),
retractall(clauseT(_,_,_,_,_)),
retractall(directiveT(_,_,_)),
retractall(operatorT(_,_,_,_,_,_,_,_)),
retractall(predicateT(_,_,_,_,_)),
retractall(onloadT(_,_,_)),
retractall(dynamicT(_,_)),
retractall(load_dir(_,_,_)),
retractall(import_dir(_,_)),
retractall(export_dir(_,_)),
retractall(library_dir(_,_,_)),
retractall(property_dir(_,_,_)),
retractall(transparentT(_,_)),
retractall(multifileT(_,_)),
retractall(meta_predT(_,_)),
retractall(termT(_,_)),
retractall(filePosT(_,_,_)),
retractall(literalT_ri(_,_,_,_)),
retractall(fileT_ri(_,_)),
retractall(predicateT_ri(_,_,_,_)),
retractall(pred_edge(_,_)),
retractall(onload_edge(_,_)),
retractall(pos_and_vars(_,_,_)),
retractall(error(_,_,_)),
retractall(warning(_,_,_)),
cleanup_computed_facts,
ctc_id_init_pdt.
cleanup_computed_facts:-
retractall(call_edge(_,_)),
retractall(load_edge(_,_,_,_)),
retractall(call_built_in(_,_,_,_)),
retractall(meta_predT(_,found)).
cleanup_nodes(File):-
fileT_ri(File,Id), !,
clean_file_entries(Id),
retractall(fileT_ri(File,Id)),
retractall(fileT(Id,_,_)),
retractall(error(_,_,Id)). %TODO: gegebenenfalls zu clean_general_references_to/1 schieben
cleanup_nodes(_).
clean_file_entries(FileId):-
directiveT(DirId,FileId,_),
termT(DirId,_),
clean_directives(DirId),
% retractall(directiveT(DirId,_,_)),
% retractall(import_dir(_,DirId)),
% retractall(export_dir(_,DirId)),
% retractall(load_dir(DirId,_,_)),
clean_general_references_to(DirId),
retractall(directiveT(DirId,_,_)),
fail.
clean_file_entries(FileId):-
clauseT(ClauseId,FileId,_,_,_),
clean_clause_references(ClauseId),
clean_general_references_to(ClauseId),
retractall(clauseT(ClauseId,_,_,_,_)),
fail.
clean_file_entries(FileId):-
predicateT(PredId,FileId,_,_,_),
retractall(predicateT(PredId,_,_,_,_)),
retractall(predicateT_ri(_,_,_,PredId)),
fail.
clean_file_entries(FileId):-
onloadT(Id,FileId,_),
retractall(onloadT(Id,_,_)),
fail.
clean_file_entries(_).
clean_clause_references(ClauseId):-
headT(HeadId,ClauseId,_,_,_),
clean_clause_references(HeadId),
retractall(headT(HeadId,_,_,_,_,_)),
fail.
clean_clause_references(ClauseId):-
literalT(LitId,_,ClauseId,_,_,_),
clean_clause_references(LitId),
retractall(literalT(LitId,_,ClauseId,M,F,A)),
retractall(literalT_ri(F,A,M,LitId)),
retractall(metaT(LitId,_,ClauseId,_,_,_)),
retractall(pred_edge(ClauseId,_)),
fail.
clean_clause_references(_).
clean_directives(DirectiveId):-
retractall(import_dir(_,DirectiveId)),
retractall(export_dir(_,DirectiveId)),
retractall(load_dir(DirectiveId,_,_)),
retractall(property_dir(DirectiveId,_,_)),
retractall(library_dir(_,_,DirectiveId)),
retractall(meta_pred(_,DirectiveId)),
retractall(onload_edge(DirectiveId,_)).
clean_general_references_to(Id):-
retractall(termT(Id,_)),
retractall(filePosT(Id,_,_)),
retractall(warning(Id,_,_)),
retractall(pos_and_vars(Id,_,_)).
/*
* assert_new_node(+Term,+From,+To,-Id)
* creates new identity Arg4 and asserts termT and filePosT with the information given
* by Arg1-Arg3 to this identity.
* the Arg6.
*/
assert_new_node(Term,From,To,Id):-
new_node_id_pdt(Id),
assert(termT(Id,Term)),
Length is To - From,
assert(filePosT(Id,From,Length)).
| TeamSPoon/logicmoo_base | prolog/logicmoo/pdt_server/pdt.builder/prolog-src/parse_util.pl | Perl | mit | 6,653 |
package Google::Ads::AdWords::v201406::IdeaType;
use strict;
use warnings;
sub get_xmlns { 'https://adwords.google.com/api/adwords/o/v201406'};
# derivation by restriction
use base qw(
SOAP::WSDL::XSD::Typelib::Builtin::string);
1;
__END__
=pod
=head1 NAME
=head1 DESCRIPTION
Perl data type class for the XML Schema defined simpleType
IdeaType from the namespace https://adwords.google.com/api/adwords/o/v201406.
Represents the type of idea. <span class="constraint AdxEnabled">This is disabled for AdX.</span>
This clase is derived from
SOAP::WSDL::XSD::Typelib::Builtin::string
. SOAP::WSDL's schema implementation does not validate data, so you can use it exactly
like it's base type.
# Description of restrictions not implemented yet.
=head1 METHODS
=head2 new
Constructor.
=head2 get_value / set_value
Getter and setter for the simpleType's value.
=head1 OVERLOADING
Depending on the simple type's base type, the following operations are overloaded
Stringification
Numerification
Boolification
Check L<SOAP::WSDL::XSD::Typelib::Builtin> for more information.
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201406/IdeaType.pm | Perl | apache-2.0 | 1,150 |
package # hide package from pause
DBIx::Class::PK::Auto::Pg;
use strict;
use warnings;
use base qw/DBIx::Class/;
__PACKAGE__->load_components(qw/PK::Auto/);
1;
__END__
=head1 NAME
DBIx::Class::PK::Auto::Pg - (DEPRECATED) Automatic primary key class for Pg
=head1 SYNOPSIS
Just load PK::Auto instead; auto-inc is now handled by Storage.
=head1 FURTHER QUESTIONS?
Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
=head1 COPYRIGHT AND LICENSE
This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
redistribute it and/or modify it under the same terms as the
L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
| ray66rus/vndrv | local/lib/perl5/DBIx/Class/PK/Auto/Pg.pm | Perl | apache-2.0 | 748 |
package Paws::OpsWorks::ListTagsResult;
use Moose;
has NextToken => (is => 'ro', isa => 'Str');
has Tags => (is => 'ro', isa => 'Paws::OpsWorks::Tags');
has _request_id => (is => 'ro', isa => 'Str');
### main pod documentation begin ###
=head1 NAME
Paws::OpsWorks::ListTagsResult
=head1 ATTRIBUTES
=head2 NextToken => Str
If a paginated request does not return all of the remaining results,
this parameter is set to a token that you can assign to the request
object's C<NextToken> parameter to get the next set of results. If the
previous paginated request returned all of the remaining results, this
parameter is set to C<null>.
=head2 Tags => L<Paws::OpsWorks::Tags>
A set of key-value pairs that contain tag keys and tag values that are
attached to a stack or layer.
=head2 _request_id => Str
=cut
1; | ioanrogers/aws-sdk-perl | auto-lib/Paws/OpsWorks/ListTagsResult.pm | Perl | apache-2.0 | 828 |
use v5.18;
$_ = <<'HERE';
Out "Top 'Middle "Bottom" Middle' Out"
HERE
my @matches;
say "Matched!" if m/
(?(DEFINE)
(?<QUOTE_MARK> ['"])
(?<NOT_QUOTE_MARK> [^'"])
(?<QUOTE>
(
(?<quote>(?"E_MARK))
(?:
(?&NOT_QUOTE_MARK)++
(?"E)
)*
\g{quote}
)
(?{ push @matches, $^N })
)
)
(?"E)
/x;
| mishin/presentation | regex_recursion/amelia_9.pl | Perl | apache-2.0 | 589 |
=pod
=head1 NAME
SSL_set_session - set a TLS/SSL session to be used during TLS/SSL connect
=head1 SYNOPSIS
#include <openssl/ssl.h>
int SSL_set_session(SSL *ssl, SSL_SESSION *session);
=head1 DESCRIPTION
SSL_set_session() sets B<session> to be used when the TLS/SSL connection
is to be established. SSL_set_session() is only useful for TLS/SSL clients.
When the session is set, the reference count of B<session> is incremented
by 1. If the session is not reused, the reference count is decremented
again during SSL_connect(). Whether the session was reused can be queried
with the L<SSL_session_reused(3)|SSL_session_reused(3)> call.
If there is already a session set inside B<ssl> (because it was set with
SSL_set_session() before or because the same B<ssl> was already used for
a connection), SSL_SESSION_free() will be called for that session.
=head1 NOTES
SSL_SESSION objects keep internal link information about the session cache
list, when being inserted into one SSL_CTX object's session cache.
One SSL_SESSION object, regardless of its reference count, must therefore
only be used with one SSL_CTX object (and the SSL objects created
from this SSL_CTX object).
=head1 RETURN VALUES
The following return values can occur:
=over 4
=item 0
The operation failed; check the error stack to find out the reason.
=item 1
The operation succeeded.
=back
=head1 SEE ALSO
L<ssl(3)|ssl(3)>, L<SSL_SESSION_free(3)|SSL_SESSION_free(3)>,
L<SSL_get_session(3)|SSL_get_session(3)>,
L<SSL_session_reused(3)|SSL_session_reused(3)>,
L<SSL_CTX_set_session_cache_mode(3)|SSL_CTX_set_session_cache_mode(3)>
=cut
| caidongyun/nginx-openresty-windows | nginx/objs/lib/openssl-1.0.1g/doc/ssl/SSL_set_session.pod | Perl | bsd-2-clause | 1,676 |
#! /usr/bin/env perl
# Copyright 2005-2021 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the Apache License 2.0 (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
# ====================================================================
# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
# project. The module is, however, dual licensed under OpenSSL and
# CRYPTOGAMS licenses depending on where you obtain it. For further
# details see http://www.openssl.org/~appro/cryptogams/.
# ====================================================================
# December 2005
#
# Pure SPARCv9/8+ and IALU-only bn_mul_mont implementation. The reasons
# for undertaken effort are multiple. First of all, UltraSPARC is not
# the whole SPARCv9 universe and other VIS-free implementations deserve
# optimized code as much. Secondly, newly introduced UltraSPARC T1,
# a.k.a. Niagara, has shared FPU and concurrent FPU-intensive paths,
# such as sparcv9a-mont, will simply sink it. Yes, T1 is equipped with
# several integrated RSA/DSA accelerator circuits accessible through
# kernel driver [only(*)], but having decent user-land software
# implementation is important too. Finally, reasons like desire to
# experiment with dedicated squaring procedure. Yes, this module
# implements one, because it was easiest to draft it in SPARCv9
# instructions...
# (*) Engine accessing the driver in question is on my TODO list.
# For reference, accelerator is estimated to give 6 to 10 times
# improvement on single-threaded RSA sign. It should be noted
# that 6-10x improvement coefficient does not actually mean
# something extraordinary in terms of absolute [single-threaded]
# performance, as SPARCv9 instruction set is by all means least
# suitable for high performance crypto among other 64 bit
# platforms. 6-10x factor simply places T1 in same performance
# domain as say AMD64 and IA-64. Improvement of RSA verify don't
# appear impressive at all, but it's the sign operation which is
# far more critical/interesting.
# You might notice that inner loops are modulo-scheduled:-) This has
# essentially negligible impact on UltraSPARC performance, it's
# Fujitsu SPARC64 V users who should notice and hopefully appreciate
# the advantage... Currently this module surpasses sparcv9a-mont.pl
# by ~20% on UltraSPARC-III and later cores, but recall that sparcv9a
# module still have hidden potential [see TODO list there], which is
# estimated to be larger than 20%...
$output = pop and open STDOUT,">$output";
# int bn_mul_mont(
$rp="%i0"; # BN_ULONG *rp,
$ap="%i1"; # const BN_ULONG *ap,
$bp="%i2"; # const BN_ULONG *bp,
$np="%i3"; # const BN_ULONG *np,
$n0="%i4"; # const BN_ULONG *n0,
$num="%i5"; # int num);
$frame="STACK_FRAME";
$bias="STACK_BIAS";
$car0="%o0";
$car1="%o1";
$car2="%o2"; # 1 bit
$acc0="%o3";
$acc1="%o4";
$mask="%g1"; # 32 bits, what a waste...
$tmp0="%g4";
$tmp1="%g5";
$i="%l0";
$j="%l1";
$mul0="%l2";
$mul1="%l3";
$tp="%l4";
$apj="%l5";
$npj="%l6";
$tpj="%l7";
$fname="bn_mul_mont_int";
$code=<<___;
#ifndef __ASSEMBLER__
# define __ASSEMBLER__ 1
#endif
#include "crypto/sparc_arch.h"
.section ".text",#alloc,#execinstr
.global $fname
.align 32
$fname:
cmp %o5,4 ! 128 bits minimum
bge,pt %icc,.Lenter
sethi %hi(0xffffffff),$mask
retl
clr %o0
.align 32
.Lenter:
save %sp,-$frame,%sp
sll $num,2,$num ! num*=4
or $mask,%lo(0xffffffff),$mask
ld [$n0],$n0
cmp $ap,$bp
and $num,$mask,$num
ld [$bp],$mul0 ! bp[0]
nop
add %sp,$bias,%o7 ! real top of stack
ld [$ap],$car0 ! ap[0] ! redundant in squaring context
sub %o7,$num,%o7
ld [$ap+4],$apj ! ap[1]
and %o7,-1024,%o7
ld [$np],$car1 ! np[0]
sub %o7,$bias,%sp ! alloca
ld [$np+4],$npj ! np[1]
be,pt SIZE_T_CC,.Lbn_sqr_mont
mov 12,$j
mulx $car0,$mul0,$car0 ! ap[0]*bp[0]
mulx $apj,$mul0,$tmp0 !prologue! ap[1]*bp[0]
and $car0,$mask,$acc0
add %sp,$bias+$frame,$tp
ld [$ap+8],$apj !prologue!
mulx $n0,$acc0,$mul1 ! "t[0]"*n0
and $mul1,$mask,$mul1
mulx $car1,$mul1,$car1 ! np[0]*"t[0]"*n0
mulx $npj,$mul1,$acc1 !prologue! np[1]*"t[0]"*n0
srlx $car0,32,$car0
add $acc0,$car1,$car1
ld [$np+8],$npj !prologue!
srlx $car1,32,$car1
mov $tmp0,$acc0 !prologue!
.L1st:
mulx $apj,$mul0,$tmp0
mulx $npj,$mul1,$tmp1
add $acc0,$car0,$car0
ld [$ap+$j],$apj ! ap[j]
and $car0,$mask,$acc0
add $acc1,$car1,$car1
ld [$np+$j],$npj ! np[j]
srlx $car0,32,$car0
add $acc0,$car1,$car1
add $j,4,$j ! j++
mov $tmp0,$acc0
st $car1,[$tp]
cmp $j,$num
mov $tmp1,$acc1
srlx $car1,32,$car1
bl %icc,.L1st
add $tp,4,$tp ! tp++
!.L1st
mulx $apj,$mul0,$tmp0 !epilogue!
mulx $npj,$mul1,$tmp1
add $acc0,$car0,$car0
and $car0,$mask,$acc0
add $acc1,$car1,$car1
srlx $car0,32,$car0
add $acc0,$car1,$car1
st $car1,[$tp]
srlx $car1,32,$car1
add $tmp0,$car0,$car0
and $car0,$mask,$acc0
add $tmp1,$car1,$car1
srlx $car0,32,$car0
add $acc0,$car1,$car1
st $car1,[$tp+4]
srlx $car1,32,$car1
add $car0,$car1,$car1
st $car1,[$tp+8]
srlx $car1,32,$car2
mov 4,$i ! i++
ld [$bp+4],$mul0 ! bp[1]
.Louter:
add %sp,$bias+$frame,$tp
ld [$ap],$car0 ! ap[0]
ld [$ap+4],$apj ! ap[1]
ld [$np],$car1 ! np[0]
ld [$np+4],$npj ! np[1]
ld [$tp],$tmp1 ! tp[0]
ld [$tp+4],$tpj ! tp[1]
mov 12,$j
mulx $car0,$mul0,$car0
mulx $apj,$mul0,$tmp0 !prologue!
add $tmp1,$car0,$car0
ld [$ap+8],$apj !prologue!
and $car0,$mask,$acc0
mulx $n0,$acc0,$mul1
and $mul1,$mask,$mul1
mulx $car1,$mul1,$car1
mulx $npj,$mul1,$acc1 !prologue!
srlx $car0,32,$car0
add $acc0,$car1,$car1
ld [$np+8],$npj !prologue!
srlx $car1,32,$car1
mov $tmp0,$acc0 !prologue!
.Linner:
mulx $apj,$mul0,$tmp0
mulx $npj,$mul1,$tmp1
add $tpj,$car0,$car0
ld [$ap+$j],$apj ! ap[j]
add $acc0,$car0,$car0
add $acc1,$car1,$car1
ld [$np+$j],$npj ! np[j]
and $car0,$mask,$acc0
ld [$tp+8],$tpj ! tp[j]
srlx $car0,32,$car0
add $acc0,$car1,$car1
add $j,4,$j ! j++
mov $tmp0,$acc0
st $car1,[$tp] ! tp[j-1]
srlx $car1,32,$car1
mov $tmp1,$acc1
cmp $j,$num
bl %icc,.Linner
add $tp,4,$tp ! tp++
!.Linner
mulx $apj,$mul0,$tmp0 !epilogue!
mulx $npj,$mul1,$tmp1
add $tpj,$car0,$car0
add $acc0,$car0,$car0
ld [$tp+8],$tpj ! tp[j]
and $car0,$mask,$acc0
add $acc1,$car1,$car1
srlx $car0,32,$car0
add $acc0,$car1,$car1
st $car1,[$tp] ! tp[j-1]
srlx $car1,32,$car1
add $tpj,$car0,$car0
add $tmp0,$car0,$car0
and $car0,$mask,$acc0
add $tmp1,$car1,$car1
add $acc0,$car1,$car1
st $car1,[$tp+4] ! tp[j-1]
srlx $car0,32,$car0
add $i,4,$i ! i++
srlx $car1,32,$car1
add $car0,$car1,$car1
cmp $i,$num
add $car2,$car1,$car1
st $car1,[$tp+8]
srlx $car1,32,$car2
bl,a %icc,.Louter
ld [$bp+$i],$mul0 ! bp[i]
!.Louter
add $tp,12,$tp
.Ltail:
add $np,$num,$np
add $rp,$num,$rp
sub %g0,$num,%o7 ! k=-num
ba .Lsub
subcc %g0,%g0,%g0 ! clear %icc.c
.align 16
.Lsub:
ld [$tp+%o7],%o0
ld [$np+%o7],%o1
subccc %o0,%o1,%o1 ! tp[j]-np[j]
add $rp,%o7,$i
add %o7,4,%o7
brnz %o7,.Lsub
st %o1,[$i]
subccc $car2,0,$car2 ! handle upmost overflow bit
sub %g0,$num,%o7
.Lcopy:
ld [$tp+%o7],%o1 ! conditional copy
ld [$rp+%o7],%o0
st %g0,[$tp+%o7] ! zap tp
movcs %icc,%o1,%o0
st %o0,[$rp+%o7]
add %o7,4,%o7
brnz %o7,.Lcopy
nop
mov 1,%i0
ret
restore
___
########
######## .Lbn_sqr_mont gives up to 20% *overall* improvement over
######## code without following dedicated squaring procedure.
########
$sbit="%o5";
$code.=<<___;
.align 32
.Lbn_sqr_mont:
mulx $mul0,$mul0,$car0 ! ap[0]*ap[0]
mulx $apj,$mul0,$tmp0 !prologue!
and $car0,$mask,$acc0
add %sp,$bias+$frame,$tp
ld [$ap+8],$apj !prologue!
mulx $n0,$acc0,$mul1 ! "t[0]"*n0
srlx $car0,32,$car0
and $mul1,$mask,$mul1
mulx $car1,$mul1,$car1 ! np[0]*"t[0]"*n0
mulx $npj,$mul1,$acc1 !prologue!
and $car0,1,$sbit
ld [$np+8],$npj !prologue!
srlx $car0,1,$car0
add $acc0,$car1,$car1
srlx $car1,32,$car1
mov $tmp0,$acc0 !prologue!
.Lsqr_1st:
mulx $apj,$mul0,$tmp0
mulx $npj,$mul1,$tmp1
add $acc0,$car0,$car0 ! ap[j]*a0+c0
add $acc1,$car1,$car1
ld [$ap+$j],$apj ! ap[j]
and $car0,$mask,$acc0
ld [$np+$j],$npj ! np[j]
srlx $car0,32,$car0
add $acc0,$acc0,$acc0
or $sbit,$acc0,$acc0
mov $tmp1,$acc1
srlx $acc0,32,$sbit
add $j,4,$j ! j++
and $acc0,$mask,$acc0
cmp $j,$num
add $acc0,$car1,$car1
st $car1,[$tp]
mov $tmp0,$acc0
srlx $car1,32,$car1
bl %icc,.Lsqr_1st
add $tp,4,$tp ! tp++
!.Lsqr_1st
mulx $apj,$mul0,$tmp0 ! epilogue
mulx $npj,$mul1,$tmp1
add $acc0,$car0,$car0 ! ap[j]*a0+c0
add $acc1,$car1,$car1
and $car0,$mask,$acc0
srlx $car0,32,$car0
add $acc0,$acc0,$acc0
or $sbit,$acc0,$acc0
srlx $acc0,32,$sbit
and $acc0,$mask,$acc0
add $acc0,$car1,$car1
st $car1,[$tp]
srlx $car1,32,$car1
add $tmp0,$car0,$car0 ! ap[j]*a0+c0
add $tmp1,$car1,$car1
and $car0,$mask,$acc0
srlx $car0,32,$car0
add $acc0,$acc0,$acc0
or $sbit,$acc0,$acc0
srlx $acc0,32,$sbit
and $acc0,$mask,$acc0
add $acc0,$car1,$car1
st $car1,[$tp+4]
srlx $car1,32,$car1
add $car0,$car0,$car0
or $sbit,$car0,$car0
add $car0,$car1,$car1
st $car1,[$tp+8]
srlx $car1,32,$car2
ld [%sp+$bias+$frame],$tmp0 ! tp[0]
ld [%sp+$bias+$frame+4],$tmp1 ! tp[1]
ld [%sp+$bias+$frame+8],$tpj ! tp[2]
ld [$ap+4],$mul0 ! ap[1]
ld [$ap+8],$apj ! ap[2]
ld [$np],$car1 ! np[0]
ld [$np+4],$npj ! np[1]
mulx $n0,$tmp0,$mul1
mulx $mul0,$mul0,$car0
and $mul1,$mask,$mul1
mulx $car1,$mul1,$car1
mulx $npj,$mul1,$acc1
add $tmp0,$car1,$car1
and $car0,$mask,$acc0
ld [$np+8],$npj ! np[2]
srlx $car1,32,$car1
add $tmp1,$car1,$car1
srlx $car0,32,$car0
add $acc0,$car1,$car1
and $car0,1,$sbit
add $acc1,$car1,$car1
srlx $car0,1,$car0
mov 12,$j
st $car1,[%sp+$bias+$frame] ! tp[0]=
srlx $car1,32,$car1
add %sp,$bias+$frame+4,$tp
.Lsqr_2nd:
mulx $apj,$mul0,$acc0
mulx $npj,$mul1,$acc1
add $acc0,$car0,$car0
add $tpj,$sbit,$sbit
ld [$ap+$j],$apj ! ap[j]
and $car0,$mask,$acc0
ld [$np+$j],$npj ! np[j]
srlx $car0,32,$car0
add $acc1,$car1,$car1
ld [$tp+8],$tpj ! tp[j]
add $acc0,$acc0,$acc0
add $j,4,$j ! j++
add $sbit,$acc0,$acc0
srlx $acc0,32,$sbit
and $acc0,$mask,$acc0
cmp $j,$num
add $acc0,$car1,$car1
st $car1,[$tp] ! tp[j-1]
srlx $car1,32,$car1
bl %icc,.Lsqr_2nd
add $tp,4,$tp ! tp++
!.Lsqr_2nd
mulx $apj,$mul0,$acc0
mulx $npj,$mul1,$acc1
add $acc0,$car0,$car0
add $tpj,$sbit,$sbit
and $car0,$mask,$acc0
srlx $car0,32,$car0
add $acc1,$car1,$car1
add $acc0,$acc0,$acc0
add $sbit,$acc0,$acc0
srlx $acc0,32,$sbit
and $acc0,$mask,$acc0
add $acc0,$car1,$car1
st $car1,[$tp] ! tp[j-1]
srlx $car1,32,$car1
add $car0,$car0,$car0
add $sbit,$car0,$car0
add $car0,$car1,$car1
add $car2,$car1,$car1
st $car1,[$tp+4]
srlx $car1,32,$car2
ld [%sp+$bias+$frame],$tmp1 ! tp[0]
ld [%sp+$bias+$frame+4],$tpj ! tp[1]
ld [$ap+8],$mul0 ! ap[2]
ld [$np],$car1 ! np[0]
ld [$np+4],$npj ! np[1]
mulx $n0,$tmp1,$mul1
and $mul1,$mask,$mul1
mov 8,$i
mulx $mul0,$mul0,$car0
mulx $car1,$mul1,$car1
and $car0,$mask,$acc0
add $tmp1,$car1,$car1
srlx $car0,32,$car0
add %sp,$bias+$frame,$tp
srlx $car1,32,$car1
and $car0,1,$sbit
srlx $car0,1,$car0
mov 4,$j
.Lsqr_outer:
.Lsqr_inner1:
mulx $npj,$mul1,$acc1
add $tpj,$car1,$car1
add $j,4,$j
ld [$tp+8],$tpj
cmp $j,$i
add $acc1,$car1,$car1
ld [$np+$j],$npj
st $car1,[$tp]
srlx $car1,32,$car1
bl %icc,.Lsqr_inner1
add $tp,4,$tp
!.Lsqr_inner1
add $j,4,$j
ld [$ap+$j],$apj ! ap[j]
mulx $npj,$mul1,$acc1
add $tpj,$car1,$car1
ld [$np+$j],$npj ! np[j]
srlx $car1,32,$tmp0
and $car1,$mask,$car1
add $tmp0,$sbit,$sbit
add $acc0,$car1,$car1
ld [$tp+8],$tpj ! tp[j]
add $acc1,$car1,$car1
st $car1,[$tp]
srlx $car1,32,$car1
add $j,4,$j
cmp $j,$num
be,pn %icc,.Lsqr_no_inner2
add $tp,4,$tp
.Lsqr_inner2:
mulx $apj,$mul0,$acc0
mulx $npj,$mul1,$acc1
add $tpj,$sbit,$sbit
add $acc0,$car0,$car0
ld [$ap+$j],$apj ! ap[j]
and $car0,$mask,$acc0
ld [$np+$j],$npj ! np[j]
srlx $car0,32,$car0
add $acc0,$acc0,$acc0
ld [$tp+8],$tpj ! tp[j]
add $sbit,$acc0,$acc0
add $j,4,$j ! j++
srlx $acc0,32,$sbit
and $acc0,$mask,$acc0
cmp $j,$num
add $acc0,$car1,$car1
add $acc1,$car1,$car1
st $car1,[$tp] ! tp[j-1]
srlx $car1,32,$car1
bl %icc,.Lsqr_inner2
add $tp,4,$tp ! tp++
.Lsqr_no_inner2:
mulx $apj,$mul0,$acc0
mulx $npj,$mul1,$acc1
add $tpj,$sbit,$sbit
add $acc0,$car0,$car0
and $car0,$mask,$acc0
srlx $car0,32,$car0
add $acc0,$acc0,$acc0
add $sbit,$acc0,$acc0
srlx $acc0,32,$sbit
and $acc0,$mask,$acc0
add $acc0,$car1,$car1
add $acc1,$car1,$car1
st $car1,[$tp] ! tp[j-1]
srlx $car1,32,$car1
add $car0,$car0,$car0
add $sbit,$car0,$car0
add $car0,$car1,$car1
add $car2,$car1,$car1
st $car1,[$tp+4]
srlx $car1,32,$car2
add $i,4,$i ! i++
ld [%sp+$bias+$frame],$tmp1 ! tp[0]
ld [%sp+$bias+$frame+4],$tpj ! tp[1]
ld [$ap+$i],$mul0 ! ap[j]
ld [$np],$car1 ! np[0]
ld [$np+4],$npj ! np[1]
mulx $n0,$tmp1,$mul1
and $mul1,$mask,$mul1
add $i,4,$tmp0
mulx $mul0,$mul0,$car0
mulx $car1,$mul1,$car1
and $car0,$mask,$acc0
add $tmp1,$car1,$car1
srlx $car0,32,$car0
add %sp,$bias+$frame,$tp
srlx $car1,32,$car1
and $car0,1,$sbit
srlx $car0,1,$car0
cmp $tmp0,$num ! i<num-1
bl %icc,.Lsqr_outer
mov 4,$j
.Lsqr_last:
mulx $npj,$mul1,$acc1
add $tpj,$car1,$car1
add $j,4,$j
ld [$tp+8],$tpj
cmp $j,$i
add $acc1,$car1,$car1
ld [$np+$j],$npj
st $car1,[$tp]
srlx $car1,32,$car1
bl %icc,.Lsqr_last
add $tp,4,$tp
!.Lsqr_last
mulx $npj,$mul1,$acc1
add $tpj,$acc0,$acc0
srlx $acc0,32,$tmp0
and $acc0,$mask,$acc0
add $tmp0,$sbit,$sbit
add $acc0,$car1,$car1
add $acc1,$car1,$car1
st $car1,[$tp]
srlx $car1,32,$car1
add $car0,$car0,$car0 ! recover $car0
add $sbit,$car0,$car0
add $car0,$car1,$car1
add $car2,$car1,$car1
st $car1,[$tp+4]
srlx $car1,32,$car2
ba .Ltail
add $tp,8,$tp
.type $fname,#function
.size $fname,(.-$fname)
.asciz "Montgomery Multiplication for SPARCv9, CRYPTOGAMS by <appro\@openssl.org>"
.align 32
___
$code =~ s/\`([^\`]*)\`/eval($1)/gem;
print $code;
close STDOUT or die "error closing STDOUT: $!";
| jens-maus/amissl | openssl/crypto/bn/asm/sparcv9-mont.pl | Perl | bsd-3-clause | 14,168 |
package HTTP::Proxy::Engine::Threaded;
use strict;
use HTTP::Proxy;
use threads;
# A massive hack of Engine::Fork to use the threads stuff
# Basically created to work under win32 so that the filters
# can share global caches among themselves
# Angelos Karageorgiou angelos@unix.gr
our @ISA = qw( HTTP::Proxy::Engine );
our %defaults = (
max_clients => 60,
);
__PACKAGE__->make_accessors( qw( kids select ), keys %defaults );
sub start {
my $self = shift;
$self->kids( [] );
$self->select( IO::Select->new( $self->proxy->daemon ) );
}
sub run {
my $self = shift;
my $proxy = $self->proxy;
my $kids = $self->kids;
# check for new connections
my @ready = $self->select->can_read(1);
for my $fh (@ready) { # there's only one, anyway
# single-process proxy (useful for debugging)
# accept the new connection
my $conn = $fh->accept;
my $child=threads->new(\&worker,$proxy,$conn);
if ( !defined $child ) {
$conn->close;
$proxy->log( HTTP::Proxy::ERROR, "PROCESS", "Cannot spawn thread" );
next;
}
$child->detach();
}
}
sub stop {
my $self = shift;
my $kids = $self->kids;
# not needed
}
sub worker {
my $proxy=shift;
my $conn=shift;
$proxy->serve_connections($conn);
$conn->close();
return;
}
1;
__END__
=head1 NAME
HTTP::Proxy::Engine::Threaded - A scoreboard-based HTTP::Proxy engine
=head1 SYNOPSIS
my $proxy = HTTP::Proxy->new( engine => 'Threaded' );
=head1 DESCRIPTION
This module provides a threaded engine to HTTP::Proxy.
=head1 METHODS
The module defines the following methods, used by HTTP::Proxy main loop:
=over 4
=item start()
Initialize the engine.
=item run()
Implements the forking logic: a new process is forked for each new
incoming TCP connection.
=item stop()
Reap remaining child processes.
=back
=head1 SEE ALSO
L<HTTP::Proxy>, L<HTTP::Proxy::Engine>.
=head1 AUTHOR
Angelos Karageorgiou C<< <angelos@unix.gr> >>. (Actual code)
Philippe "BooK" Bruhat, C<< <book@cpan.org> >>. (Documentation)
=head1 COPYRIGHT
Copyright 2010, Philippe Bruhat.
=head1 LICENSE
This module is free software; you can redistribute it or modify it under
the same terms as Perl itself.
=cut
| btovar/cvmfs | test/mock_services/HTTP/Proxy/Engine/Threaded.pm | Perl | bsd-3-clause | 2,285 |
=pod
=for comment openssl_manual_section:7
=head1 NAME
evp - high-level cryptographic functions
=head1 SYNOPSIS
#include <openssl/evp.h>
=head1 DESCRIPTION
The EVP library provides a high-level interface to cryptographic
functions.
L<B<EVP_Seal>I<...>|EVP_SealInit(3)> and L<B<EVP_Open>I<...>|EVP_OpenInit(3)>
provide public key encryption and decryption to implement digital "envelopes".
The L<B<EVP_DigestSign>I<...>|EVP_DigestSignInit(3)> and
L<B<EVP_DigestVerify>I<...>|EVP_DigestVerifyInit(3)> functions implement
digital signatures and Message Authentication Codes (MACs). Also see the older
L<B<EVP_Sign>I<...>|EVP_SignInit(3)> and L<B<EVP_Verify>I<...>|EVP_VerifyInit(3)>
functions.
Symmetric encryption is available with the L<B<EVP_Encrypt>I<...>|EVP_EncryptInit(3)>
functions. The L<B<EVP_Digest>I<...>|EVP_DigestInit(3)> functions provide message digests.
The B<EVP_PKEY>I<...> functions provide a high level interface to
asymmetric algorithms. To create a new EVP_PKEY see
L<EVP_PKEY_new(3)>. EVP_PKEYs can be associated
with a private key of a particular algorithm by using the functions
described on the L<EVP_PKEY_set1_RSA(3)> page, or
new keys can be generated using L<EVP_PKEY_keygen(3)>.
EVP_PKEYs can be compared using L<EVP_PKEY_cmp(3)>, or printed using
L<EVP_PKEY_print_private(3)>.
The EVP_PKEY functions support the full range of asymmetric algorithm operations:
=over
=item For key agreement see L<EVP_PKEY_derive(3)>
=item For signing and verifying see L<EVP_PKEY_sign(3)>,
L<EVP_PKEY_verify(3)> and L<EVP_PKEY_verify_recover(3)>.
However, note that
these functions do not perform a digest of the data to be signed. Therefore
normally you would use the L<EVP_DigestSignInit(3)>
functions for this purpose.
=item For encryption and decryption see L<EVP_PKEY_encrypt(3)>
and L<EVP_PKEY_decrypt(3)> respectively. However, note that
these functions perform encryption and decryption only. As public key
encryption is an expensive operation, normally you would wrap
an encrypted message in a "digital envelope" using the L<EVP_SealInit(3)> and
L<EVP_OpenInit(3)> functions.
=back
The L<EVP_BytesToKey(3)> function provides some limited support for password
based encryption. Careful selection of the parameters will provide a PKCS#5 PBKDF1 compatible
implementation. However, new applications should not typically use this (preferring, for example,
PBKDF2 from PCKS#5).
The L<B<EVP_Encode>I<...>|EVP_EncodeInit(3)> and
L<B<EVP_Decode>I<...>|EVP_EncodeInit(3)> functions implement base 64 encoding
and decoding.
All the symmetric algorithms (ciphers), digests and asymmetric algorithms
(public key algorithms) can be replaced by L<engine(3)> modules providing alternative
implementations. If ENGINE implementations of ciphers or digests are registered
as defaults, then the various EVP functions will automatically use those
implementations automatically in preference to built in software
implementations. For more information, consult the engine(3) man page.
Although low level algorithm specific functions exist for many algorithms
their use is discouraged. They cannot be used with an ENGINE and ENGINE
versions of new algorithms cannot be accessed using the low level functions.
Also makes code harder to adapt to new algorithms and some options are not
cleanly supported at the low level and some operations are more efficient
using the high level interface.
=head1 SEE ALSO
L<EVP_DigestInit(3)>,
L<EVP_EncryptInit(3)>,
L<EVP_OpenInit(3)>,
L<EVP_SealInit(3)>,
L<EVP_DigestSignInit(3)>,
L<EVP_SignInit(3)>,
L<EVP_VerifyInit(3)>,
L<EVP_EncodeInit(3)>,
L<EVP_PKEY_new(3)>,
L<EVP_PKEY_set1_RSA(3)>,
L<EVP_PKEY_keygen(3)>,
L<EVP_PKEY_print_private(3)>,
L<EVP_PKEY_decrypt(3)>,
L<EVP_PKEY_encrypt(3)>,
L<EVP_PKEY_sign(3)>,
L<EVP_PKEY_verify(3)>,
L<EVP_PKEY_verify_recover(3)>,
L<EVP_PKEY_derive(3)>,
L<EVP_BytesToKey(3)>,
L<engine(3)>
=head1 COPYRIGHT
Copyright 2000-2016 The OpenSSL Project Authors. All Rights Reserved.
Licensed under the OpenSSL license (the "License"). You may not use
this file except in compliance with the License. You can obtain a copy
in the file LICENSE in the source distribution or at
L<https://www.openssl.org/source/license.html>.
=cut
| openweave/openweave-core | third_party/openssl/openssl/doc/crypto/evp.pod | Perl | apache-2.0 | 4,219 |
#------------------------------------------------------------------------------
# File: ASF.pm
#
# Description: Read ASF/WMA/WMV meta information
#
# Revisions: 12/23/2005 - P. Harvey Created
#
# References: 1) http://www.microsoft.com/windows/windowsmedia/format/asfspec.aspx
# 2) http://www.adobe.com/devnet/xmp/pdfs/XMPSpecificationPart3.pdf (Oct 2008)
#------------------------------------------------------------------------------
package Image::ExifTool::ASF;
use strict;
use vars qw($VERSION);
use Image::ExifTool qw(:DataAccess :Utils);
use Image::ExifTool::Exif;
use Image::ExifTool::RIFF;
$VERSION = '1.10';
sub ProcessMetadata($$$);
sub ProcessContentDescription($$$);
sub ProcessPreview($$$);
sub ProcessCodecList($$$);
# GUID definitions
my %errorCorrection = (
'20FB5700-5B55-11CF-A8FD-00805F5C442B' => 'No Error Correction',
'BFC3CD50-618F-11CF-8BB2-00AA00B4E220' => 'Audio Spread',
);
my %streamType = (
'F8699E40-5B4D-11CF-A8FD-00805F5C442B' => 'Audio',
'BC19EFC0-5B4D-11CF-A8FD-00805F5C442B' => 'Video',
'59DACFC0-59E6-11D0-A3AC-00A0C90348F6' => 'Command',
'B61BE100-5B4E-11CF-A8FD-00805F5C442B' => 'JFIF',
'35907DE0-E415-11CF-A917-00805F5C442B' => 'Degradable JPEG',
'91BD222C-F21C-497A-8B6D-5AA86BFC0185' => 'File Transfer',
'3AFB65E2-47EF-40F2-AC2C-70A90D71D343' => 'Binary',
);
my %mutex = (
'D6E22A00-35DA-11D1-9034-00A0C90349BE' => 'MutexLanguage',
'D6E22A01-35DA-11D1-9034-00A0C90349BE' => 'MutexBitrate',
'D6E22A02-35DA-11D1-9034-00A0C90349BE' => 'MutexUnknown',
);
my %bandwidthSharing = (
'AF6060AA-5197-11D2-B6AF-00C04FD908E9' => 'SharingExclusive',
'AF6060AB-5197-11D2-B6AF-00C04FD908E9' => 'SharingPartial',
);
my %typeSpecific = (
'776257D4-C627-41CB-8F81-7AC7FF1C40CC' => 'WebStreamMediaSubtype',
'DA1E6B13-8359-4050-B398-388E965BF00C' => 'WebStreamFormat',
);
my %advancedContentEncryption = (
'7A079BB6-DAA4-4e12-A5CA-91D38DC11A8D' => 'DRMNetworkDevices',
);
# ASF top level objects
%Image::ExifTool::ASF::Main = (
PROCESS_PROC => \&Image::ExifTool::ASF::ProcessASF,
NOTES => q{
The ASF format is used by Windows WMA and WMV files, and DIVX videos. Tag
ID's aren't listed because they are huge 128-bit GUID's that would ruin the
formatting of this table.
},
'75B22630-668E-11CF-A6D9-00AA0062CE6C' => {
Name => 'Header',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::Header', Size => 6 },
},
'75B22636-668E-11CF-A6D9-00AA0062CE6C' => 'Data',
'33000890-E5B1-11CF-89F4-00A0C90349CB' => 'SimpleIndex',
'D6E229D3-35DA-11D1-9034-00A0C90349BE' => 'Index',
'FEB103F8-12AD-4C64-840F-2A1D2F7AD48C' => 'MediaIndex',
'3CB73FD0-0C4A-4803-953D-EDF7B6228F0C' => 'TimecodeIndex',
'BE7ACFCB-97A9-42E8-9C71-999491E3AFAC' => { #2
Name => 'XMP',
SubDirectory => { TagTable => 'Image::ExifTool::XMP::Main' },
},
);
# ASF header objects
%Image::ExifTool::ASF::Header = (
PROCESS_PROC => \&Image::ExifTool::ASF::ProcessASF,
'8CABDCA1-A947-11CF-8EE4-00C00C205365' => {
Name => 'FileProperties',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::FileProperties' },
},
'B7DC0791-A9B7-11CF-8EE6-00C00C205365' => {
Name => 'StreamProperties',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::StreamProperties' },
},
'5FBF03B5-A92E-11CF-8EE3-00C00C205365' => {
Name => 'HeaderExtension',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::HeaderExtension', Size => 22 },
},
'86D15240-311D-11D0-A3A4-00A0C90348F6' => {
Name => 'CodecList',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::CodecList' },
},
'1EFB1A30-0B62-11D0-A39B-00A0C90348F6' => 'ScriptCommand',
'F487CD01-A951-11CF-8EE6-00C00C205365' => 'Marker',
'D6E229DC-35DA-11D1-9034-00A0C90349BE' => 'BitrateMutualExclusion',
'75B22635-668E-11CF-A6D9-00AA0062CE6C' => 'ErrorCorrection',
'75B22633-668E-11CF-A6D9-00AA0062CE6C' => {
Name => 'ContentDescription',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::ContentDescr' },
},
'2211B3FA-BD23-11D2-B4B7-00A0C955FC6E' => {
Name => 'ContentBranding',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::ContentBranding' },
},
'D2D0A440-E307-11D2-97F0-00A0C95EA850' => {
Name => 'ExtendedContentDescr',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::ExtendedDescr' },
},
'7BF875CE-468D-11D1-8D82-006097C9A2B2' => 'StreamBitrateProps',
'2211B3FB-BD23-11D2-B4B7-00A0C955FC6E' => 'ContentEncryption',
'298AE614-2622-4C17-B935-DAE07EE9289C' => 'ExtendedContentEncryption',
'2211B3FC-BD23-11D2-B4B7-00A0C955FC6E' => 'DigitalSignature',
'1806D474-CADF-4509-A4BA-9AABCB96AAE8' => 'Padding',
);
%Image::ExifTool::ASF::ContentDescr = (
PROCESS_PROC => \&ProcessContentDescription,
GROUPS => { 2 => 'Video' },
0 => 'Title',
1 => { Name => 'Author', Groups => { 2 => 'Author' } },
2 => { Name => 'Copyright', Groups => { 2 => 'Author' } },
3 => 'Description',
4 => 'Rating',
);
%Image::ExifTool::ASF::ContentBranding = (
PROCESS_PROC => \&ProcessContentBranding,
GROUPS => { 2 => 'Author' },
0 => {
Name => 'BannerImageType',
PrintConv => {
0 => 'None',
1 => 'Bitmap',
2 => 'JPEG',
3 => 'GIF',
},
},
1 => { Name => 'BannerImage', Binary => 1 },
2 => 'BannerImageURL',
3 => 'CopyrightURL',
);
%Image::ExifTool::ASF::ExtendedDescr = (
PROCESS_PROC => \&ProcessExtendedContentDescription,
GROUPS => { 2 => 'Video' },
ASFLeakyBucketPairs => { Binary => 1 },
AspectRatioX => {},
AspectRatioY => {},
Author => { Groups => { 2 => 'Author' } },
AverageLevel => {},
BannerImageData => {},
BannerImageType => {},
BannerImageURL => {},
Bitrate => {},
Broadcast => {},
BufferAverage => {},
Can_Skip_Backward => {},
Can_Skip_Forward => {},
Copyright => { Groups => { 2 => 'Author' } },
CopyrightURL => { Groups => { 2 => 'Author' } },
CurrentBitrate => {},
Description => {},
DRM_ContentID => {},
DRM_DRMHeader_ContentDistributor => {},
DRM_DRMHeader_ContentID => {},
DRM_DRMHeader_IndividualizedVersion => {},
DRM_DRMHeader_KeyID => {},
DRM_DRMHeader_LicenseAcqURL => {},
DRM_DRMHeader_SubscriptionContentID => {},
DRM_DRMHeader => {},
DRM_IndividualizedVersion => {},
DRM_KeyID => {},
DRM_LASignatureCert => {},
DRM_LASignatureLicSrvCert => {},
DRM_LASignaturePrivKey => {},
DRM_LASignatureRootCert => {},
DRM_LicenseAcqURL => {},
DRM_V1LicenseAcqURL => {},
Duration => { PrintConv => 'ConvertDuration($val)' },
FileSize => {},
HasArbitraryDataStream => {},
HasAttachedImages => {},
HasAudio => {},
HasFileTransferStream => {},
HasImage => {},
HasScript => {},
HasVideo => {},
Is_Protected => {},
Is_Trusted => {},
IsVBR => {},
NSC_Address => {},
NSC_Description => {},
NSC_Email => {},
NSC_Name => {},
NSC_Phone => {},
NumberOfFrames => {},
OptimalBitrate => {},
PeakValue => {},
Rating => {},
Seekable => {},
Signature_Name => {},
Stridable => {},
Title => {},
VBRPeak => {},
# "WM/" tags...
AlbumArtist => {},
AlbumCoverURL => {},
AlbumTitle => {},
ASFPacketCount => {},
ASFSecurityObjectsSize => {},
AudioFileURL => {},
AudioSourceURL => {},
AuthorURL => { Groups => { 2 => 'Author' } },
BeatsPerMinute => {},
Category => {},
Codec => {},
Composer => {},
Conductor => {},
ContainerFormat => {},
ContentDistributor => {},
ContentGroupDescription => {},
Director => {},
DRM => {},
DVDID => {},
EncodedBy => {},
EncodingSettings => {},
EncodingTime => { Groups => { 2 => 'Time' } },
Genre => {},
GenreID => {},
InitialKey => {},
ISRC => {},
Language => {},
Lyrics => {},
Lyrics_Synchronised => {},
MCDI => {},
MediaClassPrimaryID => {},
MediaClassSecondaryID => {},
MediaCredits => {},
MediaIsDelay => {},
MediaIsFinale => {},
MediaIsLive => {},
MediaIsPremiere => {},
MediaIsRepeat => {},
MediaIsSAP => {},
MediaIsStereo => {},
MediaIsSubtitled => {},
MediaIsTape => {},
MediaNetworkAffiliation => {},
MediaOriginalBroadcastDateTime => { Groups => { 2 => 'Time' } },
MediaOriginalChannel => {},
MediaStationCallSign => {},
MediaStationName => {},
ModifiedBy => {},
Mood => {},
OriginalAlbumTitle => {},
OriginalArtist => {},
OriginalFilename => {},
OriginalLyricist => {},
OriginalReleaseTime => { Groups => { 2 => 'Time' } },
OriginalReleaseYear => { Groups => { 2 => 'Time' } },
ParentalRating => {},
ParentalRatingReason => {},
PartOfSet => {},
PeakBitrate => {},
Period => {},
Picture => {
SubDirectory => {
TagTable => 'Image::ExifTool::ASF::Preview',
},
},
PlaylistDelay => {},
Producer => {},
PromotionURL => {},
ProtectionType => {},
Provider => {},
ProviderCopyright => {},
ProviderRating => {},
ProviderStyle => {},
Publisher => {},
RadioStationName => {},
RadioStationOwner => {},
SharedUserRating => {},
StreamTypeInfo => {},
SubscriptionContentID => {},
SubTitle => {},
SubTitleDescription => {},
Text => {},
ToolName => {},
ToolVersion => {},
Track => {},
TrackNumber => {},
UniqueFileIdentifier => {},
UserWebURL => {},
VideoClosedCaptioning => {},
VideoFrameRate => {},
VideoHeight => {},
VideoWidth => {},
WMADRCAverageReference => {},
WMADRCAverageTarget => {},
WMADRCPeakReference => {},
WMADRCPeakTarget => {},
WMCollectionGroupID => {},
WMCollectionID => {},
WMContentID => {},
Writer => { Groups => { 2 => 'Author' } },
Year => { Groups => { 2 => 'Time' } },
);
%Image::ExifTool::ASF::Preview = (
PROCESS_PROC => \&ProcessPreview,
GROUPS => { 2 => 'Video' },
0 => {
Name => 'PreviewType',
PrintConv => {
0 => 'Other picture type',
1 => '32x32 PNG file icon',
2 => 'Other file icon',
3 => 'Front album cover',
4 => 'Back album cover',
5 => 'Leaflet page',
6 => 'Media label',
7 => 'Lead artist, performer, or soloist',
8 => 'Artists or performers',
9 => 'Conductor',
10 => 'Band or orchestra',
11 => 'Composer',
12 => 'Lyricist or writer',
13 => 'Recording studio or location',
14 => 'Recording session',
15 => 'Performance',
16 => 'Capture from movie or video',
17 => 'A bright colored fish',
18 => 'Illustration',
19 => 'Band or artist logo',
20 => 'Publisher or studio logo',
},
},
1 => 'PreviewMimeType',
2 => 'PreviewDescription',
3 => {
Name => 'PreviewImage',
RawConv => '$self->ValidateImage(\$val,$tag)',
},
);
%Image::ExifTool::ASF::FileProperties = (
PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData,
GROUPS => { 2 => 'Video' },
0 => {
Name => 'FileID',
Format => 'binary[16]',
ValueConv => 'Image::ExifTool::ASF::GetGUID($val)',
},
16 => { Name => 'FileSize', Format => 'int64u' },
24 => {
Name => 'CreationDate',
Format => 'int64u',
Groups => { 2 => 'Time' },
# time is in 100 ns intervals since 0:00 UTC Jan 1, 1601
ValueConv => q{ # (89 leap years between 1601 and 1970)
my $t = $val / 1e7 - (((1970-1601)*365+89)*24*3600);
return Image::ExifTool::ConvertUnixTime($t) . 'Z';
}
},
32 => { Name => 'DataPackets', Format => 'int64u' },
40 => {
Name => 'PlayDuration',
Format => 'int64u',
ValueConv => '$val / 1e7',
PrintConv => 'ConvertDuration($val)',
},
48 => {
Name => 'SendDuration',
Format => 'int64u',
ValueConv => '$val / 1e7',
PrintConv => 'ConvertDuration($val)',
},
56 => { Name => 'Preroll', Format => 'int64u' },
64 => { Name => 'Flags', Format => 'int32u' },
68 => { Name => 'MinPacketSize',Format => 'int32u' },
72 => { Name => 'MaxPacketSize',Format => 'int32u' },
76 => { Name => 'MaxBitrate', Format => 'int32u' },
);
%Image::ExifTool::ASF::StreamProperties = (
PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData,
GROUPS => { 2 => 'Video' },
NOTES => 'Tags with index 54 and greater are conditional based on the StreamType.',
0 => {
Name => 'StreamType',
Format => 'binary[16]',
RawConv => sub { # set ASF_STREAM_TYPE for use in conditional tags
my ($val, $exifTool) = @_;
$exifTool->{ASF_STREAM_TYPE} = $streamType{GetGUID($val)} || '';
return $val;
},
ValueConv => 'Image::ExifTool::ASF::GetGUID($val)',
PrintConv => \%streamType,
},
16 => {
Name => 'ErrorCorrectionType',
Format => 'binary[16]',
ValueConv => 'Image::ExifTool::ASF::GetGUID($val)',
PrintConv => \%errorCorrection,
},
32 => {
Name => 'TimeOffset',
Format => 'int64u',
ValueConv => '$val / 1e7',
PrintConv => '"$val s"',
},
48 => {
Name => 'StreamNumber',
Format => 'int16u',
PrintConv => '($val & 0x7f) . ($val & 0x8000 ? " (encrypted)" : "")',
},
54 => [
{
Condition => '$self->{ASF_STREAM_TYPE} eq "Audio"',
Name => 'AudioCodecID',
Format => 'int16u',
PrintHex => 1,
SeparateTable => 'RIFF AudioEncoding',
PrintConv => \%Image::ExifTool::RIFF::audioEncoding,
},
{
Condition => '$self->{ASF_STREAM_TYPE} =~ /^(Video|JFIF|Degradable JPEG)$/',
Name => 'ImageWidth',
Format => 'int32u',
},
],
56 => {
Condition => '$self->{ASF_STREAM_TYPE} eq "Audio"',
Name => 'AudioChannels',
Format => 'int16u',
},
58 => [
{
Condition => '$self->{ASF_STREAM_TYPE} eq "Audio"',
Name => 'AudioSampleRate',
Format => 'int32u',
},
{
Condition => '$self->{ASF_STREAM_TYPE} =~ /^(Video|JFIF|Degradable JPEG)$/',
Name => 'ImageHeight',
Format => 'int32u',
},
],
);
%Image::ExifTool::ASF::HeaderExtension = (
PROCESS_PROC => \&Image::ExifTool::ASF::ProcessASF,
'14E6A5CB-C672-4332-8399-A96952065B5A' => 'ExtendedStreamProps',
'A08649CF-4775-4670-8A16-6E35357566CD' => 'AdvancedMutualExcl',
'D1465A40-5A79-4338-B71B-E36B8FD6C249' => 'GroupMutualExclusion',
'D4FED15B-88D3-454F-81F0-ED5C45999E24' => 'StreamPrioritization',
'A69609E6-517B-11D2-B6AF-00C04FD908E9' => 'BandwidthSharing',
'7C4346A9-EFE0-4BFC-B229-393EDE415C85' => 'LanguageList',
'C5F8CBEA-5BAF-4877-8467-AA8C44FA4CCA' => {
Name => 'Metadata',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::Metadata' },
},
'44231C94-9498-49D1-A141-1D134E457054' => {
Name => 'MetadataLibrary',
SubDirectory => { TagTable => 'Image::ExifTool::ASF::Metadata' },
},
'D6E229DF-35DA-11D1-9034-00A0C90349BE' => 'IndexParameters',
'6B203BAD-3F11-48E4-ACA8-D7613DE2CFA7' => 'TimecodeIndexParms',
'75B22630-668E-11CF-A6D9-00AA0062CE6C' => 'Compatibility',
'43058533-6981-49E6-9B74-AD12CB86D58C' => 'AdvancedContentEncryption',
'ABD3D211-A9BA-11cf-8EE6-00C00C205365' => 'Reserved1',
);
%Image::ExifTool::ASF::Metadata = (
PROCESS_PROC => \&Image::ExifTool::ASF::ProcessMetadata,
);
%Image::ExifTool::ASF::CodecList = (
PROCESS_PROC => \&ProcessCodecList,
VideoCodecName => {},
VideoCodecDescription => {},
AudioCodecName => {},
AudioCodecDescription => {},
OtherCodecName => {},
OtherCodecDescription => {},
);
#------------------------------------------------------------------------------
# Generate GUID from 16 bytes of binary data
# Inputs: 0) data
# Returns: GUID
sub GetGUID($)
{
# must do some byte swapping
my $buff = unpack('H*',pack('NnnNN',unpack('VvvNN',$_[0])));
$buff =~ s/(.{8})(.{4})(.{4})(.{4})/$1-$2-$3-$4-/;
return uc($buff);
}
#------------------------------------------------------------------------------
# Process ASF content description
# Inputs: 0) ExifTool object reference, 1) dirInfo ref, 2) tag table reference
# Returns: 1 on success
sub ProcessContentDescription($$$)
{
my ($exifTool, $dirInfo, $tagTablePtr) = @_;
my $verbose = $exifTool->Options('Verbose');
my $dataPt = $$dirInfo{DataPt};
my $dirLen = $$dirInfo{DirLen};
return 0 if $dirLen < 10;
my @len = unpack('v5', $$dataPt);
my $pos = 10;
my $tag;
foreach $tag (0..4) {
my $len = shift @len;
next unless $len;
return 0 if $pos + $len > $dirLen;
my $val = $exifTool->Unicode2Charset(substr($$dataPt,$pos,$len),'II');
$exifTool->HandleTag($tagTablePtr, $tag, $val);
$pos += $len;
}
return 1;
}
#------------------------------------------------------------------------------
# Process ASF content branding
# Inputs: 0) ExifTool object reference, 1) dirInfo ref, 2) tag table reference
# Returns: 1 on success
sub ProcessContentBranding($$$)
{
my ($exifTool, $dirInfo, $tagTablePtr) = @_;
my $verbose = $exifTool->Options('Verbose');
my $dataPt = $$dirInfo{DataPt};
my $dirLen = $$dirInfo{DirLen};
return 0 if $dirLen < 40;
# decode banner image type
$exifTool->HandleTag($tagTablePtr, 0, unpack('V', $$dataPt));
# decode banner image, banner URL and copyright URL
my $pos = 4;
my $tag;
foreach $tag (1..3) {
return 0 if $pos + 4 > $dirLen;
my $size = unpack("x${pos}V", $$dataPt);
$pos += 4;
next unless $size;
return 0 if $pos + $size > $dirLen;
my $val = substr($$dataPt, $pos, $size);
$exifTool->HandleTag($tagTablePtr, $tag, $val);
$pos += $size;
}
return 1;
}
#------------------------------------------------------------------------------
# Read ASF value
# Inputs: 0) ExifTool object ref, 1) data reference, 2) value offset,
# 3) format number, 4) size
# Returns: converted value
sub ReadASF($$$$$)
{
my ($exifTool, $dataPt, $pos, $format, $size) = @_;
my @vals;
if ($format == 0) { # unicode string
$vals[0] = $exifTool->Unicode2Charset(substr($$dataPt,$pos,$size),'II');
} elsif ($format == 2) { # 4-byte boolean
@vals = ReadValue($dataPt, $pos, 'int32u', undef, $size);
foreach (@vals) {
$_ = $_ ? 'True' : 'False';
}
} elsif ($format == 3) { # int32u
@vals = ReadValue($dataPt, $pos, 'int32u', undef, $size);
} elsif ($format == 4) { # int64u
@vals = ReadValue($dataPt, $pos, 'int64u', undef, $size);
} elsif ($format == 5) { # int16u
@vals = ReadValue($dataPt, $pos, 'int16u', undef, $size);
} else { # any other format (including 1, byte array): return raw data
$vals[0] = substr($$dataPt,$pos,$size);
}
return join ' ', @vals;
}
#------------------------------------------------------------------------------
# Process extended content description
# Inputs: 0) ExifTool object reference, 1) dirInfo ref, 2) tag table reference
# Returns: 1 on success
sub ProcessExtendedContentDescription($$$)
{
my ($exifTool, $dirInfo, $tagTablePtr) = @_;
my $verbose = $exifTool->Options('Verbose');
my $dataPt = $$dirInfo{DataPt};
my $dirLen = $$dirInfo{DirLen};
return 0 if $dirLen < 2;
my $count = Get16u($dataPt, 0);
$exifTool->VerboseDir($dirInfo, $count);
my $pos = 2;
my $i;
for ($i=0; $i<$count; ++$i) {
return 0 if $pos + 6 > $dirLen;
my $nameLen = unpack("x${pos}v", $$dataPt);
$pos += 2;
return 0 if $pos + $nameLen + 4 > $dirLen;
my $tag = Image::ExifTool::Unicode2Latin(substr($$dataPt,$pos,$nameLen),'v');
$tag =~ s/^WM\///; # remove leading "WM/"
$pos += $nameLen;
my ($dType, $dLen) = unpack("x${pos}v2", $$dataPt);
my $val = ReadASF($exifTool,$dataPt,$pos+4,$dType,$dLen);
$exifTool->HandleTag($tagTablePtr, $tag, $val,
DataPt => $dataPt,
Start => $pos + 4,
Size => $dLen,
);
$pos += 4 + $dLen;
}
return 1;
}
#------------------------------------------------------------------------------
# Process WM/Picture preview
# Inputs: 0) ExifTool object reference, 1) dirInfo ref, 2) tag table reference
# Returns: 1 on success
sub ProcessPreview($$$)
{
my ($exifTool, $dirInfo, $tagTablePtr) = @_;
my $verbose = $exifTool->Options('Verbose');
my $dataPt = $$dirInfo{DataPt};
my $dirStart = $$dirInfo{DirStart};
my $dirLen = $$dirInfo{DirLen};
return 0 unless $dirLen > 9;
# extract picture type and length
my ($type, $picLen) = unpack("x${dirStart}CV", $$dataPt);
$exifTool->HandleTag($tagTablePtr, 0, $type);
# extract mime type and description strings (null-terminated unicode strings)
my $n = $dirLen - 5 - $picLen;
return 0 if $n & 0x01 or $n < 4;
my $str = substr($$dataPt, $dirStart+5, $n);
if ($str =~ /^((?:..)*?)\0\0((?:..)*?)\0\0/) {
my ($mime, $desc) = ($1, $2);
$exifTool->HandleTag($tagTablePtr, 1, $exifTool->Unicode2Charset($mime,'II'));
$exifTool->HandleTag($tagTablePtr, 2, $exifTool->Unicode2Charset($desc,'II')) if length $desc;
}
$exifTool->HandleTag($tagTablePtr, 3, substr($$dataPt, $dirStart+5+$n, $picLen));
return 1;
}
#------------------------------------------------------------------------------
# Process codec list
# Inputs: 0) ExifTool object reference, 1) dirInfo ref, 2) tag table reference
# Returns: 1 on success
sub ProcessCodecList($$$)
{
my ($exifTool, $dirInfo, $tagTablePtr) = @_;
my $verbose = $exifTool->Options('Verbose');
my $dataPt = $$dirInfo{DataPt};
my $dirLen = $$dirInfo{DirLen};
return 0 if $dirLen < 20;
my $count = Get32u($dataPt, 16);
$exifTool->VerboseDir($dirInfo, $count);
my $pos = 20;
my $i;
my %codecType = ( 1 => 'Video', 2 => 'Audio' );
for ($i=0; $i<$count; ++$i) {
return 0 if $pos + 8 > $dirLen;
my $type = ($codecType{Get16u($dataPt, $pos)} || 'Other') . 'Codec';
# stupid Windows programmers: these lengths are in characters (others are in bytes)
my $nameLen = Get16u($dataPt, $pos + 2) * 2;
$pos += 4;
return 0 if $pos + $nameLen + 2 > $dirLen;
my $name = $exifTool->Unicode2Charset(substr($$dataPt,$pos,$nameLen),'II');
$exifTool->HandleTag($tagTablePtr, "${type}Name", $name);
my $descLen = Get16u($dataPt, $pos + $nameLen) * 2;
$pos += $nameLen + 2;
return 0 if $pos + $descLen + 2 > $dirLen;
my $desc = $exifTool->Unicode2Charset(substr($$dataPt,$pos,$descLen),'II');
$exifTool->HandleTag($tagTablePtr, "${type}Description", $desc);
my $infoLen = Get16u($dataPt, $pos + $descLen);
$pos += $descLen + 2 + $infoLen;
}
return 1;
}
#------------------------------------------------------------------------------
# Process ASF metadata library
# Inputs: 0) ExifTool object reference, 1) dirInfo ref, 2) tag table reference
# Returns: 1 on success
sub ProcessMetadata($$$)
{
my ($exifTool, $dirInfo, $tagTablePtr) = @_;
my $verbose = $exifTool->Options('Verbose');
my $dataPt = $$dirInfo{DataPt};
my $dirLen = $$dirInfo{DirLen};
return 0 if $dirLen < 2;
my $count = Get16u($dataPt, 0);
$exifTool->VerboseDir($dirInfo, $count);
my $pos = 2;
my $i;
for ($i=0; $i<$count; ++$i) {
return 0 if $pos + 12 > $dirLen;
my ($index, $stream, $nameLen, $dType, $dLen) = unpack("x${pos}v4V", $$dataPt);
$pos += 12;
return 0 if $pos + $nameLen + $dLen > $dirLen;
my $tag = Image::ExifTool::Unicode2Latin(substr($$dataPt,$pos,$nameLen),'v');
my $val = ReadASF($exifTool,$dataPt,$pos+$nameLen,$dType,$dLen);
$exifTool->HandleTag($tagTablePtr, $tag, $val,
DataPt => $dataPt,
Start => $pos,
Size => $dLen,
);
$pos += $nameLen + $dLen;
}
return 1;
}
#------------------------------------------------------------------------------
# Extract information from a ASF file
# Inputs: 0) ExifTool object reference, 1) dirInfo reference, 2) tag table ref
# Returns: 1 on success, 0 if this wasn't a valid ASF file
sub ProcessASF($$;$)
{
my ($exifTool, $dirInfo, $tagTablePtr) = @_;
my $raf = $$dirInfo{RAF};
my $verbose = $exifTool->Options('Verbose');
my $rtnVal = 0;
my $pos = 0;
my ($buff, $err, @parentTable, @childEnd);
for (;;) {
last unless $raf->Read($buff, 24) == 24;
$pos += 24;
my $tag = GetGUID($buff);
unless ($tagTablePtr) {
# verify this is a valid ASF file
last unless $tag eq '75B22630-668E-11CF-A6D9-00AA0062CE6C';
my $fileType = $exifTool->{FILE_EXT};
$fileType = 'ASF' unless $fileType and $fileType =~ /^(ASF|WMV|WMA|DIVX)$/;
$exifTool->SetFileType($fileType);
SetByteOrder('II');
$tagTablePtr = GetTagTable('Image::ExifTool::ASF::Main');
$rtnVal = 1;
}
my $size = Image::ExifTool::Get64u(\$buff, 16) - 24;
if ($size < 0) {
$err = 'Invalid ASF object size';
last;
}
if ($size > 0x7fffffff) {
$err = 'Large ASF objects not supported';
last;
}
# go back to parent tag table if done with previous children
if (@childEnd and $pos >= $childEnd[-1]) {
pop @childEnd;
$tagTablePtr = pop @parentTable;
$exifTool->{INDENT} = substr($exifTool->{INDENT},0,-2);
}
my $tagInfo = $exifTool->GetTagInfo($tagTablePtr, $tag);
$verbose and $exifTool->VerboseInfo($tag, $tagInfo);
if ($tagInfo) {
my $subdir = $$tagInfo{SubDirectory};
if ($subdir) {
my $subTable = GetTagTable($$subdir{TagTable});
if ($$subTable{PROCESS_PROC} eq \&ProcessASF) {
if (defined $$subdir{Size}) {
my $s = $$subdir{Size};
if ($verbose > 2) {
$raf->Read($buff, $s) == $s or $err = 'Truncated file', last;
$exifTool->VerboseDump(\$buff);
} elsif (not $raf->Seek($s, 1)) {
$err = 'Seek error';
last;
}
# continue processing linearly using subTable
push @parentTable, $tagTablePtr;
push @childEnd, $pos + $size;
$tagTablePtr = $subTable;
$pos += $$subdir{Size};
if ($verbose) {
$exifTool->{INDENT} .= '| ';
$exifTool->VerboseDir($$tagInfo{Name});
}
next;
}
} elsif ($raf->Read($buff, $size) == $size) {
my %subdirInfo = (
DataPt => \$buff,
DirStart => 0,
DirLen => $size,
DirName => $$tagInfo{Name},
);
$exifTool->VerboseDump(\$buff) if $verbose > 2;
unless ($exifTool->ProcessDirectory(\%subdirInfo, $subTable)) {
$exifTool->Warn("Error processing $$tagInfo{Name} directory");
}
$pos += $size;
next;
} else {
$err = 'Unexpected end of file';
last;
}
}
}
if ($verbose > 2) {
$raf->Read($buff, $size) == $size or $err = 'Truncated file', last;
$exifTool->VerboseDump(\$buff);
} elsif (not $raf->Seek($size, 1)) { # skip the block
$err = 'Seek error';
last;
}
$pos += $size;
}
$err and $exifTool->Warn($err);
return $rtnVal;
}
1; # end
__END__
=head1 NAME
Image::ExifTool::ASF - Read ASF/WMA/WMV meta information
=head1 SYNOPSIS
This module is used by Image::ExifTool
=head1 DESCRIPTION
This module contains routines required by Image::ExifTool to extract
information from Microsoft Advanced Systems Format (ASF) files, including
Windows Media Audio (WMA) and Windows Media Video (WMV) files.
=head1 AUTHOR
Copyright 2003-2009, Phil Harvey (phil at owl.phy.queensu.ca)
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 REFERENCES
=over 4
=item L<http://www.microsoft.com/windows/windowsmedia/format/asfspec.aspx>
=back
=head1 SEE ALSO
L<Image::ExifTool::TagNames/ASF Tags>,
L<Image::ExifTool(3pm)|Image::ExifTool>
=cut
| opf-attic/ref | tools/fits/0.6.1/tools/exiftool/perl/lib/Image/ExifTool/ASF.pm | Perl | apache-2.0 | 30,495 |
use strict;
package Test::Tester;
BEGIN
{
if (*Test::Builder::new{CODE})
{
warn "You should load Test::Tester before Test::Builder (or anything that loads Test::Builder)"
}
}
use Test::Builder;
use Test::Tester::CaptureRunner;
use Test::Tester::Delegate;
require Exporter;
use vars qw( @ISA @EXPORT $VERSION );
$VERSION = "0.108";
@EXPORT = qw( run_tests check_tests check_test cmp_results show_space );
@ISA = qw( Exporter );
my $Test = Test::Builder->new;
my $Capture = Test::Tester::Capture->new;
my $Delegator = Test::Tester::Delegate->new;
$Delegator->{Object} = $Test;
my $runner = Test::Tester::CaptureRunner->new;
my $want_space = $ENV{TESTTESTERSPACE};
sub show_space
{
$want_space = 1;
}
my $colour = '';
my $reset = '';
if (my $want_colour = $ENV{TESTTESTERCOLOUR} || $ENV{TESTTESTERCOLOUR})
{
if (eval "require Term::ANSIColor")
{
my ($f, $b) = split(",", $want_colour);
$colour = Term::ANSIColor::color($f).Term::ANSIColor::color("on_$b");
$reset = Term::ANSIColor::color("reset");
}
}
sub new_new
{
return $Delegator;
}
sub capture
{
return Test::Tester::Capture->new;
}
sub fh
{
# experiment with capturing output, I don't like it
$runner = Test::Tester::FHRunner->new;
return $Test;
}
sub find_run_tests
{
my $d = 1;
my $found = 0;
while ((not $found) and (my ($sub) = (caller($d))[3]) )
{
# print "$d: $sub\n";
$found = ($sub eq "Test::Tester::run_tests");
$d++;
}
# die "Didn't find 'run_tests' in caller stack" unless $found;
return $d;
}
sub run_tests
{
local($Delegator->{Object}) = $Capture;
$runner->run_tests(@_);
return ($runner->get_premature, $runner->get_results);
}
sub check_test
{
my $test = shift;
my $expect = shift;
my $name = shift;
$name = "" unless defined($name);
@_ = ($test, [$expect], $name);
goto &check_tests;
}
sub check_tests
{
my $test = shift;
my $expects = shift;
my $name = shift;
$name = "" unless defined($name);
my ($prem, @results) = eval { run_tests($test, $name) };
$Test->ok(! $@, "Test '$name' completed") || $Test->diag($@);
$Test->ok(! length($prem), "Test '$name' no premature diagnostication") ||
$Test->diag("Before any testing anything, your tests said\n$prem");
local $Test::Builder::Level = $Test::Builder::Level + 1;
cmp_results(\@results, $expects, $name);
return ($prem, @results);
}
sub cmp_field
{
my ($result, $expect, $field, $desc) = @_;
if (defined $expect->{$field})
{
$Test->is_eq($result->{$field}, $expect->{$field},
"$desc compare $field");
}
}
sub cmp_result
{
my ($result, $expect, $name) = @_;
my $sub_name = $result->{name};
$sub_name = "" unless defined($name);
my $desc = "subtest '$sub_name' of '$name'";
{
local $Test::Builder::Level = $Test::Builder::Level + 1;
cmp_field($result, $expect, "ok", $desc);
cmp_field($result, $expect, "actual_ok", $desc);
cmp_field($result, $expect, "type", $desc);
cmp_field($result, $expect, "reason", $desc);
cmp_field($result, $expect, "name", $desc);
}
# if we got no depth then default to 1
my $depth = 1;
if (exists $expect->{depth})
{
$depth = $expect->{depth};
}
# if depth was explicitly undef then don't test it
if (defined $depth)
{
$Test->is_eq($result->{depth}, $depth, "checking depth") ||
$Test->diag('You need to change $Test::Builder::Level');
}
if (defined(my $exp = $expect->{diag}))
{
# if there actually is some diag then put a \n on the end if it's not
# there already
$exp .= "\n" if (length($exp) and $exp !~ /\n$/);
if (not $Test->ok($result->{diag} eq $exp,
"subtest '$sub_name' of '$name' compare diag")
)
{
my $got = $result->{diag};
my $glen = length($got);
my $elen = length($exp);
for ($got, $exp)
{
my @lines = split("\n", $_);
$_ = join("\n", map {
if ($want_space)
{
$_ = $colour.escape($_).$reset;
}
else
{
"'$colour$_$reset'"
}
} @lines);
}
$Test->diag(<<EOM);
Got diag ($glen bytes):
$got
Expected diag ($elen bytes):
$exp
EOM
}
}
}
sub escape
{
my $str = shift;
my $res = '';
for my $char (split("", $str))
{
my $c = ord($char);
if(($c>32 and $c<125) or $c == 10)
{
$res .= $char;
}
else
{
$res .= sprintf('\x{%x}', $c)
}
}
return $res;
}
sub cmp_results
{
my ($results, $expects, $name) = @_;
$Test->is_num(scalar @$results, scalar @$expects, "Test '$name' result count");
for (my $i = 0; $i < @$expects; $i++)
{
my $expect = $expects->[$i];
my $result = $results->[$i];
local $Test::Builder::Level = $Test::Builder::Level + 1;
cmp_result($result, $expect, $name);
}
}
######## nicked from Test::More
sub plan {
my(@plan) = @_;
my $caller = caller;
$Test->exported_to($caller);
my @imports = ();
foreach my $idx (0..$#plan) {
if( $plan[$idx] eq 'import' ) {
my($tag, $imports) = splice @plan, $idx, 2;
@imports = @$imports;
last;
}
}
$Test->plan(@plan);
__PACKAGE__->_export_to_level(1, __PACKAGE__, @imports);
}
sub import {
my($class) = shift;
{
no warnings 'redefine';
*Test::Builder::new = \&new_new;
}
goto &plan;
}
sub _export_to_level
{
my $pkg = shift;
my $level = shift;
(undef) = shift; # redundant arg
my $callpkg = caller($level);
$pkg->export($callpkg, @_);
}
############
1;
__END__
=head1 NAME
Test::Tester - Ease testing test modules built with Test::Builder
=head1 SYNOPSIS
use Test::Tester tests => 6;
use Test::MyStyle;
check_test(
sub {
is_mystyle_eq("this", "that", "not eq");
},
{
ok => 0, # expect this to fail
name => "not eq",
diag => "Expected: 'this'\nGot: 'that'",
}
);
or
use Test::Tester;
use Test::More tests => 3;
use Test::MyStyle;
my ($premature, @results) = run_tests(
sub {
is_database_alive("dbname");
}
);
# now use Test::More::like to check the diagnostic output
like($results[0]->{diag}, "/^Database ping took \\d+ seconds$"/, "diag");
=head1 DESCRIPTION
If you have written a test module based on Test::Builder then Test::Tester
allows you to test it with the minimum of effort.
=head1 HOW TO USE (THE EASY WAY)
From version 0.08 Test::Tester no longer requires you to included anything
special in your test modules. All you need to do is
use Test::Tester;
in your test script B<before> any other Test::Builder based modules and away
you go.
Other modules based on Test::Builder can be used to help with the
testing. In fact you can even use functions from your module to test
other functions from the same module (while this is possible it is
probably not a good idea, if your module has bugs, then
using it to test itself may give the wrong answers).
The easiest way to test is to do something like
check_test(
sub { is_mystyle_eq("this", "that", "not eq") },
{
ok => 0, # we expect the test to fail
name => "not eq",
diag => "Expected: 'this'\nGot: 'that'",
}
);
this will execute the is_mystyle_eq test, capturing it's results and
checking that they are what was expected.
You may need to examine the test results in a more flexible way, for
example, the diagnostic output may be quite long or complex or it may involve
something that you cannot predict in advance like a timestamp. In this case
you can get direct access to the test results:
my ($premature, @results) = run_tests(
sub {
is_database_alive("dbname");
}
);
like($result[0]->{diag}, "/^Database ping took \\d+ seconds$"/, "diag");
We cannot predict how long the database ping will take so we use
Test::More's like() test to check that the diagnostic string is of the right
form.
=head1 HOW TO USE (THE HARD WAY)
I<This is here for backwards compatibility only>
Make your module use the Test::Tester::Capture object instead of the
Test::Builder one. How to do this depends on your module but assuming that
your module holds the Test::Builder object in $Test and that all your test
routines access it through $Test then providing a function something like this
sub set_builder
{
$Test = shift;
}
should allow your test scripts to do
Test::YourModule::set_builder(Test::Tester->capture);
and after that any tests inside your module will captured.
=head1 TEST RESULTS
The result of each test is captured in a hash. These hashes are the same as
the hashes returned by Test::Builder->details but with a couple of extra
fields.
These fields are documented in L<Test::Builder> in the details() function
=over 2
=item ok
Did the test pass?
=item actual_ok
Did the test really pass? That is, did the pass come from
Test::Builder->ok() or did it pass because it was a TODO test?
=item name
The name supplied for the test.
=item type
What kind of test? Possibilities include, skip, todo etc. See
L<Test::Builder> for more details.
=item reason
The reason for the skip, todo etc. See L<Test::Builder> for more details.
=back
These fields are exclusive to Test::Tester.
=over 2
=item diag
Any diagnostics that were output for the test. This only includes
diagnostics output B<after> the test result is declared.
Note that Test::Builder ensures that any diagnostics end in a \n and
it in earlier versions of Test::Tester it was essential that you have
the final \n in your expected diagnostics. From version 0.10 onwards,
Test::Tester will add the \n if you forgot it. It will not add a \n if
you are expecting no diagnostics. See below for help tracking down
hard to find space and tab related problems.
=item depth
This allows you to check that your test module is setting the correct value
for $Test::Builder::Level and thus giving the correct file and line number
when a test fails. It is calculated by looking at caller() and
$Test::Builder::Level. It should count how many subroutines there are before
jumping into the function you are testing. So for example in
run_tests( sub { my_test_function("a", "b") } );
the depth should be 1 and in
sub deeper { my_test_function("a", "b") }
run_tests(sub { deeper() });
depth should be 2, that is 1 for the sub {} and one for deeper(). This
might seem a little complex but if your tests look like the simple
examples in this doc then you don't need to worry as the depth will
always be 1 and that's what Test::Tester expects by default.
B<Note>: if you do not specify a value for depth in check_test() then it
automatically compares it against 1, if you really want to skip the depth
test then pass in undef.
B<Note>: depth will not be correctly calculated for tests that run from a
signal handler or an END block or anywhere else that hides the call stack.
=back
Some of Test::Tester's functions return arrays of these hashes, just
like Test::Builder->details. That is, the hash for the first test will
be array element 1 (not 0). Element 0 will not be a hash it will be a
string which contains any diagnostic output that came before the first
test. This should usually be empty, if it's not, it means something
output diagnostics before any test results showed up.
=head1 SPACES AND TABS
Appearances can be deceptive, especially when it comes to emptiness. If you
are scratching your head trying to work out why Test::Tester is saying that
your diagnostics are wrong when they look perfectly right then the answer is
probably whitespace. From version 0.10 on, Test::Tester surrounds the
expected and got diag values with single quotes to make it easier to spot
trailing whitesapce. So in this example
# Got diag (5 bytes):
# 'abcd '
# Expected diag (4 bytes):
# 'abcd'
it is quite clear that there is a space at the end of the first string.
Another way to solve this problem is to use colour and inverse video on an
ANSI terminal, see below COLOUR below if you want this.
Unfortunately this is sometimes not enough, neither colour nor quotes will
help you with problems involving tabs, other non-printing characters and
certain kinds of problems inherent in Unicode. To deal with this, you can
switch Test::Tester into a mode whereby all "tricky" characters are shown as
\{xx}. Tricky characters are those with ASCII code less than 33 or higher
than 126. This makes the output more difficult to read but much easier to
find subtle differences between strings. To turn on this mode either call
show_space() in your test script or set the TESTTESTERSPACE environment
variable to be a true value. The example above would then look like
# Got diag (5 bytes):
# abcd\x{20}
# Expected diag (4 bytes):
# abcd
=head1 COLOUR
If you prefer to use colour as a means of finding tricky whitespace
characters then you can set the TESTTESTCOLOUR environment variable to a
comma separated pair of colours, the first for the foreground, the second
for the background. For example "white,red" will print white text on a red
background. This requires the Term::ANSIColor module. You can specify any
colour that would be acceptable to the Term::ANSIColor::color function.
If you spell colour differently, that's no problem. The TESTTESTERCOLOR
variable also works (if both are set then the British spelling wins out).
=head1 EXPORTED FUNCTIONS
=head3 ($premature, @results) = run_tests(\&test_sub)
\&test_sub is a reference to a subroutine.
run_tests runs the subroutine in $test_sub and captures the results of any
tests inside it. You can run more than 1 test inside this subroutine if you
like.
$premature is a string containing any diagnostic output from before
the first test.
@results is an array of test result hashes.
=head3 cmp_result(\%result, \%expect, $name)
\%result is a ref to a test result hash.
\%expect is a ref to a hash of expected values for the test result.
cmp_result compares the result with the expected values. If any differences
are found it outputs diagnostics. You may leave out any field from the
expected result and cmp_result will not do the comparison of that field.
=head3 cmp_results(\@results, \@expects, $name)
\@results is a ref to an array of test results.
\@expects is a ref to an array of hash refs.
cmp_results checks that the results match the expected results and if any
differences are found it outputs diagnostics. It first checks that the
number of elements in \@results and \@expects is the same. Then it goes
through each result checking it against the expected result as in
cmp_result() above.
=head3 ($premature, @results) = check_tests(\&test_sub, \@expects, $name)
\&test_sub is a reference to a subroutine.
\@expect is a ref to an array of hash refs which are expected test results.
check_tests combines run_tests and cmp_tests into a single call. It also
checks if the tests died at any stage.
It returns the same values as run_tests, so you can further examine the test
results if you need to.
=head3 ($premature, @results) = check_test(\&test_sub, \%expect, $name)
\&test_sub is a reference to a subroutine.
\%expect is a ref to an hash of expected values for the test result.
check_test is a wrapper around check_tests. It combines run_tests and
cmp_tests into a single call, checking if the test died. It assumes
that only a single test is run inside \&test_sub and include a test to
make sure this is true.
It returns the same values as run_tests, so you can further examine the test
results if you need to.
=head3 show_space()
Turn on the escaping of characters as described in the SPACES AND TABS
section.
=head1 HOW IT WORKS
Normally, a test module (let's call it Test:MyStyle) calls
Test::Builder->new to get the Test::Builder object. Test::MyStyle calls
methods on this object to record information about test results. When
Test::Tester is loaded, it replaces Test::Builder's new() method with one
which returns a Test::Tester::Delegate object. Most of the time this object
behaves as the real Test::Builder object. Any methods that are called are
delegated to the real Test::Builder object so everything works perfectly.
However once we go into test mode, the method calls are no longer passed to
the real Test::Builder object, instead they go to the Test::Tester::Capture
object. This object seems exactly like the real Test::Builder object,
except, instead of outputting test results and diagnostics, it just records
all the information for later analysis.
=head1 SEE ALSO
L<Test::Builder> the source of testing goodness. L<Test::Builder::Tester>
for an alternative approach to the problem tackled by Test::Tester -
captures the strings output by Test::Builder. This means you cannot get
separate access to the individual pieces of information and you must predict
B<exactly> what your test will output.
=head1 AUTHOR
This module is copyright 2005 Fergal Daly <fergal@esatclear.ie>, some parts
are based on other people's work.
Plan handling lifted from Test::More. written by Michael G Schwern
<schwern@pobox.com>.
Test::Tester::Capture is a cut down and hacked up version of Test::Builder.
Test::Builder was written by chromatic <chromatic@wgz.org> and Michael G
Schwern <schwern@pobox.com>.
=head1 LICENSE
Under the same license as Perl itself
See http://www.perl.com/perl/misc/Artistic.html
=cut
| liuyangning/WX_web | xampp/perl/vendor/lib/Test/Tester.pm | Perl | mit | 17,068 |
#line 1
package Module::Install::Base;
use strict 'vars';
use vars qw{$VERSION};
BEGIN {
$VERSION = '1.12';
}
# Suspend handler for "redefined" warnings
BEGIN {
my $w = $SIG{__WARN__};
$SIG{__WARN__} = sub { $w };
}
#line 42
sub new {
my $class = shift;
unless ( defined &{"${class}::call"} ) {
*{"${class}::call"} = sub { shift->_top->call(@_) };
}
unless ( defined &{"${class}::load"} ) {
*{"${class}::load"} = sub { shift->_top->load(@_) };
}
bless { @_ }, $class;
}
#line 61
sub AUTOLOAD {
local $@;
my $func = eval { shift->_top->autoload } or return;
goto &$func;
}
#line 75
sub _top {
$_[0]->{_top};
}
#line 90
sub admin {
$_[0]->_top->{admin}
or
Module::Install::Base::FakeAdmin->new;
}
#line 106
sub is_admin {
! $_[0]->admin->isa('Module::Install::Base::FakeAdmin');
}
sub DESTROY {}
package Module::Install::Base::FakeAdmin;
use vars qw{$VERSION};
BEGIN {
$VERSION = $Module::Install::Base::VERSION;
}
my $fake;
sub new {
$fake ||= bless(\@_, $_[0]);
}
sub AUTOLOAD {}
sub DESTROY {}
# Restore warning handler
BEGIN {
$SIG{__WARN__} = $SIG{__WARN__}->();
}
1;
#line 159
| gitpan/WWW-Search-KacurCZ | inc/Module/Install/Base.pm | Perl | bsd-2-clause | 1,127 |
package Env;
our $VERSION = '1.04';
=head1 NAME
Env - perl module that imports environment variables as scalars or arrays
=head1 SYNOPSIS
use Env;
use Env qw(PATH HOME TERM);
use Env qw($SHELL @LD_LIBRARY_PATH);
=head1 DESCRIPTION
Perl maintains environment variables in a special hash named C<%ENV>. For
when this access method is inconvenient, the Perl module C<Env> allows
environment variables to be treated as scalar or array variables.
The C<Env::import()> function ties environment variables with suitable
names to global Perl variables with the same names. By default it
ties all existing environment variables (C<keys %ENV>) to scalars. If
the C<import> function receives arguments, it takes them to be a list of
variables to tie; it's okay if they don't yet exist. The scalar type
prefix '$' is inferred for any element of this list not prefixed by '$'
or '@'. Arrays are implemented in terms of C<split> and C<join>, using
C<$Config::Config{path_sep}> as the delimiter.
After an environment variable is tied, merely use it like a normal variable.
You may access its value
@path = split(/:/, $PATH);
print join("\n", @LD_LIBRARY_PATH), "\n";
or modify it
$PATH .= ":.";
push @LD_LIBRARY_PATH, $dir;
however you'd like. Bear in mind, however, that each access to a tied array
variable requires splitting the environment variable's string anew.
The code:
use Env qw(@PATH);
push @PATH, '.';
is equivalent to:
use Env qw(PATH);
$PATH .= ":.";
except that if C<$ENV{PATH}> started out empty, the second approach leaves
it with the (odd) value "C<:.>", but the first approach leaves it with "C<.>".
To remove a tied environment variable from
the environment, assign it the undefined value
undef $PATH;
undef @LD_LIBRARY_PATH;
=head1 LIMITATIONS
On VMS systems, arrays tied to environment variables are read-only. Attempting
to change anything will cause a warning.
=head1 AUTHOR
Chip Salzenberg E<lt>F<chip@fin.uucp>E<gt>
and
Gregor N. Purdy E<lt>F<gregor@focusresearch.com>E<gt>
=cut
sub import {
my ($callpack) = caller(0);
my $pack = shift;
my @vars = grep /^[\$\@]?[A-Za-z_]\w*$/, (@_ ? @_ : keys(%ENV));
return unless @vars;
@vars = map { m/^[\$\@]/ ? $_ : '$'.$_ } @vars;
eval "package $callpack; use vars qw(" . join(' ', @vars) . ")";
die $@ if $@;
foreach (@vars) {
my ($type, $name) = m/^([\$\@])(.*)$/;
if ($type eq '$') {
tie ${"${callpack}::$name"}, Env, $name;
} else {
if ($^O eq 'VMS') {
tie @{"${callpack}::$name"}, Env::Array::VMS, $name;
} else {
tie @{"${callpack}::$name"}, Env::Array, $name;
}
}
}
}
sub TIESCALAR {
bless \($_[1]);
}
sub FETCH {
my ($self) = @_;
$ENV{$$self};
}
sub STORE {
my ($self, $value) = @_;
if (defined($value)) {
$ENV{$$self} = $value;
} else {
delete $ENV{$$self};
}
}
######################################################################
package Env::Array;
use Config;
use Tie::Array;
@ISA = qw(Tie::Array);
my $sep = $Config::Config{path_sep};
sub TIEARRAY {
bless \($_[1]);
}
sub FETCHSIZE {
my ($self) = @_;
return 1 + scalar(() = $ENV{$$self} =~ /\Q$sep\E/g);
}
sub STORESIZE {
my ($self, $size) = @_;
my @temp = split($sep, $ENV{$$self});
$#temp = $size - 1;
$ENV{$$self} = join($sep, @temp);
}
sub CLEAR {
my ($self) = @_;
$ENV{$$self} = '';
}
sub FETCH {
my ($self, $index) = @_;
return (split($sep, $ENV{$$self}))[$index];
}
sub STORE {
my ($self, $index, $value) = @_;
my @temp = split($sep, $ENV{$$self});
$temp[$index] = $value;
$ENV{$$self} = join($sep, @temp);
return $value;
}
sub EXISTS {
my ($self, $index) = @_;
return $index < $self->FETCHSIZE;
}
sub DELETE {
my ($self, $index) = @_;
my @temp = split($sep, $ENV{$$self});
my $value = splice(@temp, $index, 1, ());
$ENV{$$self} = join($sep, @temp);
return $value;
}
sub PUSH {
my $self = shift;
my @temp = split($sep, $ENV{$$self});
push @temp, @_;
$ENV{$$self} = join($sep, @temp);
return scalar(@temp);
}
sub POP {
my ($self) = @_;
my @temp = split($sep, $ENV{$$self});
my $result = pop @temp;
$ENV{$$self} = join($sep, @temp);
return $result;
}
sub UNSHIFT {
my $self = shift;
my @temp = split($sep, $ENV{$$self});
my $result = unshift @temp, @_;
$ENV{$$self} = join($sep, @temp);
return $result;
}
sub SHIFT {
my ($self) = @_;
my @temp = split($sep, $ENV{$$self});
my $result = shift @temp;
$ENV{$$self} = join($sep, @temp);
return $result;
}
sub SPLICE {
my $self = shift;
my $offset = shift;
my $length = shift;
my @temp = split($sep, $ENV{$$self});
if (wantarray) {
my @result = splice @temp, $offset, $length, @_;
$ENV{$$self} = join($sep, @temp);
return @result;
} else {
my $result = scalar splice @temp, $offset, $length, @_;
$ENV{$$self} = join($sep, @temp);
return $result;
}
}
######################################################################
package Env::Array::VMS;
use Tie::Array;
@ISA = qw(Tie::Array);
sub TIEARRAY {
bless \($_[1]);
}
sub FETCHSIZE {
my ($self) = @_;
my $i = 0;
while ($i < 127 and defined $ENV{$$self . ';' . $i}) { $i++; };
return $i;
}
sub FETCH {
my ($self, $index) = @_;
return $ENV{$$self . ';' . $index};
}
sub EXISTS {
my ($self, $index) = @_;
return $index < $self->FETCHSIZE;
}
sub DELETE { }
1;
| Dokaponteam/ITF_Project | xampp/perl/lib/Env.pm | Perl | mit | 5,524 |
#!/usr/bin/perl
# for example:
# ./add-require.pl `find . -name '*.js'`
#
# will iterate through all files passed in, open 'em up, read 'em, rewrite 'em, and write back to the same spot
# If it thinks it failed, it will not change them
local $/ = undef;
foreach my $file (@ARGV) {
open my $fh, '<', $file;
my $widget = <$fh>;
close $fh;
my $name = $1 if $widget =~ /KBWidget[\s\r]*\([\s\r]*{[[\s\r]*name\s*:\s*['"](\w+)/;
my @deps = ();
while ($widget =~ /\.(kbase[a-zA-Z]+)/g) {
push @deps, $1;
}
if ($widget =~ /parent\s*:\s*['"](kbase[a-zA-Z]+)/) {
push @deps, $1;
}
my %seen = ($name => 1);
@deps = grep {! $seen{$_}++} @deps;
unshift @deps, 'jquery', 'kbwidget';
if ($widget =~ /define\s*\(\s*\[/) {
warn "Already has define : $file";
next;
}
my $s1 = $widget =~ s/\s*\(\s*function\s*\(\s*\$\s*(,\s*undefined)?\s*\)\s*{/rewrite($name, @deps)/e;
my $s2 = $widget =~ s/}\s*[()]?\s*\(\s*jQuery\s*\)\s*\)?\s*;?/});/;
#my ($s1, $s2) = (1,1);
if ($name && $s1 && $s2) {
open my $fh, '>', $file;
print $fh $widget;
close $fh;
}
else {
if ($widget !~ /KBWidget/) {
warn "Not a KBWidget! ($file)";
}
elsif (! $s1) {
warn "Could not rewire jquery function def for $file";
}
elsif (! $s2) {
warn "Could not rewire jquery function arg for $file";
}
elsif (! $name) {
warn "Cannot process widget! No name known! ($file)";
}
}
}
sub rewrite {
my ($name, @deps) = @_;
my $deps = join(",\n\t", map {"'$_'"} @deps);
return <<"eDef";
define('$name',
[
$deps
],
function (\$) {
eDef
}
| eapearson/ui-common-dos | tools/add-require.pl | Perl | mit | 1,775 |
s --> np,vp.
np --> det,n.
vp --> tv,np.
vp --> v.
det --> [the].
det --> [a].
det --> [every].
n --> [man].
n --> [woman].
n --> [park].
n --> [loves].
tv --> [loves].
tv --> [likes].
v --> [walks].
=(X,L,L) :-
member(X,L).
+(X,L,[X|L]).
++(X,L0,[X=Val|L]) :-
less1(L0,X=Val0,L),!,
Val is Val0 + 1.
++(X,L,[X=1|L]).
less1([A|B], A, B).
less1([A|B], C, [A|D]) :-
less1(B, C, D).
fail(_,_) :-
fail.
print(X,L,L) :-
print(X),nl.
main(Words) :- go(Words, [],_).
go(W) --> reset,run(W), report.
reset --> +(words=0).
run([]) --> [].
run([H|T]) --> ++(words),run1(H), run(T).
run1(X) --> animal(X),!,++(animal).
run1(_) --> [].
animal(bird) --> [].
animal(dog) --> [].
report --> =(X), print(X),fail.
report --> [].
:- main([s]).
| timm/timmnix | pro/dcg.pl | Perl | mit | 750 |
#!/usr/bin/perl
#
# pwgen 1.4
#
# Usage: pwgen [length] [specials]
#
# length - an optional argument indicating the length of the password
# specials - use '1' to force use of special characters
#
# This will generate random passwords of the specified or default length.
# Requires the Perl package Math::Random::Secure to produce
# cyptographically secure passwords.
#
# Copyright (C) 2013 - Paul E. Jones <paulej@packetizer.com>
# Permission to use, copy, modify and distribute this software is granted.
#
use strict;
use Math::Random::Secure qw(irand);
# Define the default password length
$main::default_password_length = 16;
#
# GeneratePassword
#
# Description
# This routine will generate a password and return it as a string.
# By default, it will not utilize special characters like "~" in
# passwords, but if the second argument is a 1, it will. Note that
# use of special characters provides only minimum additional strenth,
# yet they are not very friendly for humans. For details, visit
# https://secure.packetizer.com/pwgen/.
#
# Parameters
# length [in]
# The length of the password
# special [in]
# Indicates whether to use special characters other than
# the letters A-Z, a-z, and digits 0-9.
#
# Returns
# A string containing the password, or an empty string if there
# was an error producing the password.
#
sub GeneratePassword
{
my ($length, $special) = @_;
my $password = "";
my @pwchars = (
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd',
'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r',
's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F',
'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T',
'U', 'V', 'W', 'X', 'Y', 'Z', '~', '`', '!', '@', '#', '$', '%', '^',
'&', '*', '(', ')', '_', '+', '=', '-', '{', '}', '|', '\\', ']', '[',
':', '"', '\'', ';', '<', '>', '?', '/', '.'
);
while($length > 0)
{
if ($special == 1)
{
$password .= $pwchars[irand(93)];
}
else
{
$password .= $pwchars[irand(62)];
}
$length--;
}
return $password;
}
#
# MAIN
#
{
my $password_length;
my $use_special_characters = 0;
#
# Grab the requested password length from the command-line
#
if ($#ARGV >= 0)
{
$password_length = $ARGV[0];
if (!($password_length > 0))
{
$password_length = $main::default_password_length;
}
}
else
{
$password_length = $main::default_password_length;
}
#
# Use special characters?
#
if ($#ARGV >= 1)
{
if ($ARGV[1] == 1)
{
$use_special_characters = 1;
}
}
# We will not utilize special char
print GeneratePassword($password_length,$use_special_characters) . "\n";
}
| ximenpo/simple-cpp | inc/simple/_third/pwgen/pwgen.pl | Perl | mit | 3,065 |
%Source: Ports of programs found in TPDB/C/AProVE_numeric
%query:test_fun(g,g).
% source: http://cl2-informatik.uibk.ac.at/mercurial.cgi/TPDB/file/72cccd64ec42/C/AProVE_numeric/svcomp_c.02.c
test_fun(X, Y) :- loop1(X, Y, 0).
loop1(X, Y, C) :- X >= 0, X1 is X + 1, Y1 is 1, loop2(X1, Y1, C).
loop1(X, Y, C) :- X < 0.
loop2(X, Y, C) :- X > Y, Y1 is Y + 1, C1 is C + 1, loop2(X, Y1, C1).
loop2(X, Y, C) :- X =< Y, X1 is X - 2, loop1(X1, Y, C).
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Prolog/AProVE_numeric/svcomp-c-02-c.pl | Perl | mit | 444 |
/* * <module>
% This module defines the way we lay out 2-D grids into room
%
% Logicmoo Project PrologMUD: A MUD server written in Prolog
% Maintainer: Douglas Miles
% Dec 13,2035
%
*/
% :-swi_module(world_2d,[]).
:-export(((
check_for_fall/3,
dir_offset/5,
doorLocation/5,
grid_size/4,
in_grid/2,
in_grid_rnd/2,
in_world_move/3,
is_3d/1,
loc_to_xy/4,
move_dir_target/3,
number_to_dir/3,
reverse_dir/2,
round_loc/8,
round_loc_target/8,
to_3d/2))).
:- include(prologmud(mud_header)).
:- do_gc.
grid_dist(L1,L2,Dist):- to_3d(L1,L13D),to_3d(L2,L23D),dist(L13D,L23D,Dist),!.
dist(_,_,5).
==> prologHybrid(pathBetween_call(tRegion,vtDirection,tRegion)).
% pathBetween_call(From,DirS,To):-string(DirS),!,atom_string(Dir,DirS),!,any_to_dir(Dir,Dir2),pathDirLeadsTo(From,Dir2,To),same(Dir,Dir2).
pathBetween_call_0(From,Dir,To):-any_to_dir(Dir,Dir2),is_asserted(pathDirLeadsTo(From,Dir2,To)),same(Dir,Dir2).
pathBetween_call(From,Dir,To):-pathBetween_call_0(From,DirS,To),same(Dir,DirS).
% 5x5 rooms are average
%% to_3d(L1,L13D):-compound(L1)->L13D=L1; room_center(L1,X,Y,Z),L13D = xyz(L1,X,Y,Z).
to_3d(xyzFn(L1,X,Y,Z),xyzFn(L1,X,Y,Z)):- nonvar(L1),!.
to_3d(L1,xyzFn(L1,X,Y,Z)):-room_center(L1,X,Y,Z),!.
center_xyz(MaxX,MidX):- MidX is MaxX div 2 + MaxX mod 2.
room_center(Region,X,Y,Z):-
grid_size(Region,MaxX,MaxY,MaxZ),
center_xyz(MaxX,X),
center_xyz(MaxY,Y),
center_xyz(MaxZ,Z),!,
dmsg(todo("get room size and calc center ",Region)).
loc_to_xy(LOC,X,Y,xyzFn(Region,X,Y,1)):- locationToRegion(LOC,Region),!.
loc_to_xy(Region,X,Y,xyzFn(Region,X,Y,1)).
is_3d(LOC):- compound(LOC).
% Quintus random(1,MaxX,X) and random(1,MaxY,Y)
grid_size(Room,MaxX,MaxY,MaxZ):- var(Room),!,tRegion(Room),grid_size(Room,MaxX,MaxY,MaxZ).
grid_size(Region,MaxX,MaxY,MaxZ):- fail,
typeGrid(What,1,L),length(L,MaxX),isaOrEq(Region,What),!,
maxZ(MaxZ),findall(1,typeGrid(What,_,_),LL),length(LL,MaxY),!.
grid_size(Room,MaxX,MaxY,MaxZ):- nonvar(Room), MaxX = 5 ,MaxY = 5 ,maxZ(MaxZ).
maxZ(2).
isaOrEq(Region,What):- isa(Region,What).
isaOrEq(Region,What):- =@=(Region,What).
in_grid(LocName,Var):-var(LocName),!,no_repeats_old([LocName,Var],(tRegion(LocName),in_grid_rnd(LocName,Var))).
in_grid(LocName,Var):-var(Var),!,(in_grid_rnd(LocName,Var);in_grid_rnd(LocName,Var);in_grid_rnd(LocName,Var)).
in_grid(LocName,Var):-in_grid_no_rnd(LocName,Var).
in_grid_no_rnd(xyzFn(LocName,X,Y,Z),xyzFn(LocName,X,Y,Z)) :- nonvar(X),!.
in_grid_no_rnd(LocName,xyzFn(LocName,X,Y,Z)) :- !,
grid_size(LocName,MaxX,MaxY,MaxZ),!,between(1,MaxX,X),between(1,MaxY,Y),between(1,MaxZ,Z).
in_grid_no_rnd(LocName,LocName).
in_grid_rnd(LocName,xyzFn(LocName,X,Y,1)) :-
grid_size(LocName,MaxX,MaxY,_MaxZ),
between(1,100,_),
X is (1 + random(MaxX-2)),
Y is (1 + random(MaxY-2)).
% in_grid_rnd(LocName,xyzFn(LocName,1,1,1)).
% for now not useing grids
init_location_grid(LocName):-
isa(LocName,LocType),
init_location_grid(LocName,LocType),!.
init_location_grid(LocName,LocType):-
isa(LocName,LocType),
init2(LocName,LocType,1,1).
% process map file (world.map.pl)
init2(LocName,LocType,Y,1) :-
call_u(gridValue(LocName,1,Y,L)),
!,
init3(LocName,LocType,xyzFn(LocName,1,Y,_),L).
init2(_LocName,_LocType,_,_).
init3(LocName,LocType,xyzFn(LocName,_,Y,1),[]) :-
!,
X is Y + 1,
init2(LocName,LocType,X,1).
init3(LocName,LocType,xyzFn(LocName,X,Y,1),[O|T]) :-
typeHasGlyph(Type, O),
rez_loc_object(xyzFn(LocName,X,Y,1),Type),
K is X + 1,
init3(LocName,LocType,xyzFn(LocName,K,Y,1),T).
% rez_loc_object(_,0):-!.
rez_loc_object(XY,Type):-
gensym(Type,Name2),
Name = xyN(XY,Name2),
assert_isa(Name,Type),
ain(mudAtLoc(Name,XY)),!,
find_and_call(add_missing_instance_defaults(Name)).
%prologDynamic(mudNearbyObjs(tObj,tObj)).
%prologDynamic(mudNearbyObjs(tObj,tObj)).
%predicateConventionMt(mudNearbyObjs(tObj,tObj),user).
mudNearbyObjs(X,Y):-mudAtLoc(X,L1),mudAtLoc(Y,L2),mudNearbyLocs(L1,L2).
is_location(Obj):-var(Obj),!,fail.
is_location(xyzFn(_,_,_,_)):-!.
is_location(Obj):-!,isa(Obj,tRegion),!.
locationToRegion(Obj,RegionIn):-locationToRegion_0(Obj,Region)->sanity((nonvar(Region),tRegion(Region))),!,RegionIn=Region.
locationToRegion_0(Obj,Obj):-var(Obj),dmsg(warn(var_locationToRegion(Obj,Obj))),!.
locationToRegion_0(xyzFn(Region,_,_,_),Region2):-nonvar(Region),!,locationToRegion_0(Region,Region2).
locationToRegion_0(Obj,Obj):-nonvar(Obj),!,isa(Obj,tRegion),!.
locationToRegion_0(Obj,Region):-nonvar(Obj),must(localityOfObject(Obj,Location)),!,
locationToRegion_0(Location,Region).
locationToRegion_0(Obj,Obj):-dmsg(warn(locationToRegion(Obj,Obj))),!.
:-export(mudNearbyLocs/2).
mudNearbyLocs(L1,L2):- var(L1),nonvar(L2),!,mudNearbyLocs(L2,L1).
mudNearbyLocs(L1,L2):- nonvar(L1),nonvar(L2),L2=xyzFn(_,_,_,_),locationToRegion(L1,R),!,lc_tcall(locs_near_i(R,L2)).
mudNearbyLocs(L1,L2):- nonvar(L1),nonvar(L2),locationToRegion(L1,R1),locationToRegion(L2,R2),!,mudNearbyRegions(R1,R2).
mudNearbyLocs(L1,L2):- must((quietly(mudNearbyRegions(R1,R2)),in_grid_no_rnd(R1,L1),in_grid_no_rnd(R2,L2))).
% :- decl_not_mpred(locs_near_i,2).
:-export(locs_near_i/2).
locs_near_i(L1,L2):- locationToRegion(L1,R),in_grid_no_rnd(R,L2).
locs_near_i(L1,L2):- locationToRegion(L1,R),pathBetween_call(R,_,R2),in_grid_no_rnd(R2,L2).
mudNearbyRegions(R1,R2):-pathBetween_call(R1,_,R2).
mudNearbyRegions(R1,R1).
% 345345 instTypeProps(OfAgent,agent,[facing(F),atloc(L)]):- dfsdfd ignore((nonvar(OfAgent),create_someval(facing,OfAgent,F),create_someval(atloc,OfAgent,L))).
% CANT transitive_other(mudAtLoc,1,Obj,What):-mudInsideOf(Obj,What).
is_at(Obj,Where):-localityOfObject(Obj,Where).
is_at(Obj,Where):-mudAtLoc(Obj,Where).
is_at(Obj,Where):-mudSubPart(What,Obj),is_at(What,Where).
% ((tObj(Obj), ~(mudPossess(_,Obj)))==>spatialInRegion(Obj)).
tPathway(Obj)==>spatialInRegion(Obj).
localityOfObject(Obj,Region),tRegion(Region)==> inRegion(Obj,Region).
mudAtLoc(Obj,LOC),{locationToRegion(LOC,Region)},tRegion(Region)==> inRegion(Obj,Region).
prologHybrid(mudInsideOf/2).
% :-sanity(( requires_storage((mudInsideOf(_G3775190, _G3775191):-is_asserted(mudStowing(_G3775191, _G3775190)))) )).
mudInsideOf(Inner,Outer):-loop_check(mudStowing(Outer,Inner)).
mudInsideOf(Inner,Outer):-loop_check(mudContains(Outer,Inner)).
moves_with(Obj1,Obj2):-nonvar(Obj2),!,moves_with(Obj2,Where),localityOfObject(Where,Obj1).
moves_with(Obj1,Obj2):-moves_with_sym(Obj1,Obj2).
moves_with(Obj1,Obj2):-moves_with_sym(Obj2,Obj1).
moves_with_sym(Obj1,Obj2):-localityOfObject(Where,Obj1),moves_with(Obj2,Where).
mudLocOnSurface(Clothes,Agent):-loop_check(wearsClothing(Agent,Clothes),fail).
:-export(same_regions/2).
same_regions(Agent,Obj):-inRegion(Agent,Where1),dif(Agent,Obj),inRegion(Obj,Where2),Where1=Where2.
==>(prologHybrid(inRegion(tObj,tRegion))).
%prologPTTP(localityOfObject(tObj,tSpatialthing)).
%:- ensure_universal_stub(prologPTTP,inRegion/2).
%:- ensure_universal_stub(prologPTTP,mudTestAgentWearing/2).
==>(prologHybrid(mudAtLoc/2)).
==>(meta_argtypes(mudAtLoc(tObj,tSpatialThing))).
% compute the most specific location description
mostSpecificLocalityOfObject(Obj,Where):-
one_must(is_asserted(mudAtLoc(Obj,Where)),one_must(is_asserted(localityOfObject(Obj,Where)),is_asserted(inRegion(Obj,Where)))).
% :- (rtrace,trace).
% objects can be two places x,y,z's at once
((spatialInRegion(Obj),mudAtLoc(Obj,NewLoc),
{(mudAtLoc(Obj,OldLoc), OldLoc\==NewLoc)})
==>
~mudAtLoc(Obj,OldLoc)).
% objects are placed by default in center of region
((spatialInRegion(Obj), inRegion(Obj,Region), {
\+ tPathway(Obj), \+ lookup_u(mudAtLoc(Obj,xyzFn(Region,_,_,_)))},
{in_grid_rnd(Region,LOC)})
==>
mudAtLoc(Obj,LOC)).
% objects cannot be in two localities (Regions?) at once
((spatialInRegion(Obj),localityOfObject(Obj,NewLoc),
{(localityOfObject(Obj,OldLoc), OldLoc\==NewLoc)})
==>
~localityOfObject(Obj,OldLoc)).
% if something leaves a room get rid of old location
((spatialInRegion(Obj),inRegion(Obj,NewRegion),
{(mudAtLoc(Obj,OldLoc), OldLoc\=xyzFn(NewRegion,_,_,_))})
==>
~mudAtLoc(Obj,OldLoc)).
% if something leaves a room get rid of old inRegion/2
((spatialInRegion(Obj),inRegion(Obj,NewRegion),
{dif(NewRegion,OldLoc),(inRegion(Obj,OldLoc), OldLoc\=NewRegion)})
==>
~inRegion(Obj,OldLoc)).
:- ain((inRegion(Obj,Region)==> {ain((spatialInRegion(Obj),tRegion(Region)))})).
% create pathway objects and place them in world
/*
(pathDirLeadsTo(Region,Dir,R2)/ground(pathDirLeadsTo(Region,Dir,R2)),
{ mudExitAtLoc(Region,Dir,LOC), Obj = apathFn_BAD1(Region,Dir) }) ==>
(tPathway(Obj),localityOfObject(Obj,Region),mudAtLoc(Obj,LOC)).
*/
:- ain(tPathway(apathFn(Region,Dir)) ==> mudDoorwayDir(Region,apathFnAA(Region,Dir),Dir)).
mudExitAtLoc(Region,Dir,xyzFn(Region,X,Y,Z)):- call_u(calc_from_center_xyz(Region,Dir,2,X,Y,Z)).
% :-kif_tell(localityOfObject(A,B) & localityOfObject(B,C) ==> localityOfObject(A,C)).
:- kb_shared(mudSubPart/2).
:- kb_shared(predInterArgIsa/1).
:- kb_shared(relationAllExists/3).
==>singleValuedInArgDefault(localityOfObject, 2, isMissing).
mudAtLoc(Who,xyzFn(Loc,_,_,_))==>localityOfObject(Who,Loc).
genls(tHominid,tAgent).
genls(tHumanBody,tBodyPart).
predInterArgIsa(mudSubPart(tBodyPart,tBodyPart)).
/* TODO Re-Enable
relationAllExists(mudSubPart,tHominid,tHumanBody).
relationAllExists(mudSubPart,tHumanBody,tBodyPart).
relationAllExists(mudSubPart,tHumanBody,isEach(tHumanHead,tHumanNeck,tHumanUpperTorso,tHumanLowerTorso,tHumanPelvis,tHumanArms,tHumanLegs)).
relationAllExists(mudSubPart,tHumanHead,isEach(tHumanFace,tHumanHair)).
*/
predPredicateToFunction(Pred,SubjT,ObjT,FullNameFnO):-
is_asserted(predPredicateToFunction(Pred,SubjT,ObjT,FullNameFn)) *-> FullNameFnO=FullNameFn ;
(i_name('i',ObjT,Obj),i_name(Obj,Pred,ObjPred),i_name('Of',SubjT,OfSubj),concat_atom([ObjPred,OfSubj,'Fn'],FullNameFn)),simplifyFullName(FullNameFn,FullNameFnO).
simplifyFullName(FullNameFn,FullNameFn).
find_instance_of(Pred,Subj,Obj):-
relationAllExists(Pred,SubjT,ObjT),
isa(Subj,SubjT),
((is_asserted(t(Pred,Subj,Obj)),isa(Obj,ObjT)) *-> true ; (predPredicateToFunction(Pred,SubjT,ObjT,PredFn), Obj =.. [PredFn,Subj])).
mudInsideOf(Inner,Outer)==>mudSubPart(Outer,Inner).
wearsClothing(Agent,Clothes)==>mudSubPart(Agent,Clothes).
% mudSubPart(Subj,Obj):- (nonvar(Subj);nonvar(Obj)),!,test_tl(infThirdOrder), find_instance_of(mudSubPart,Subj,Obj).
% mudSubPart(face,isEach(eyes,nose,mouth)).
% mudSubPart([upper_torso,arms,left_arm,left_hand,left_digits]).
% mudSubPart([upper_torso,arms,right_arm,right_hand,right_digits]).
% mudSubPart([pelvis,legs,left_leg,left_foot,left_toes]).
% mudSubPart([pelvis,legs,right_leg,right_foot,right_toes]).
is_in_world(Var):- is_ftVar(Var),!,trace_or_throw(var_is_in_world(Var)).
is_in_world(apathFn(A,B)):- ground(apathFn(A,B)),!.
is_in_world(Obj):-isa_asserted(Obj,tRegion),!.
is_in_world(Obj):-lookup_u(mudAtLoc(Obj,_)),!.
is_in_world(Obj):-lookup_u(mudStowing(Who,Obj)),!,is_in_world(Who).
is_in_world(Obj):-lookup_u(mudSubPart(Who,Obj)),!,is_in_world(Who).
put_in_world(Obj):- mudAtLoc(Obj,_XYZFn),!.
put_in_world(Obj):- is_in_world(Obj),!.
put_in_world(Obj):- localityOfObject(Obj,Loc),
in_grid(Loc,XYZFn),unoccupied(Obj,XYZFn),!,
ain(mudAtLoc(Obj,XYZFn)),
ain(mudNeedsLook(Obj,vTrue)).
put_in_world(Obj):- random_xyzFn(LOC),ain(mudAtLoc(Obj,LOC)),
ain(mudNeedsLook(Obj,vTrue)).
/*
% :-export decl_database_hook/2. action_info
:-export(deduce_facts/2).
:-export(create_random_fact/1).
:-export( hooked_random_instance/3).
%:-export fact_always_true/1.
:-export( fact_maybe_deduced/1).
:-export( fact_is_false/2).
:-dynamic fact_is_false/2.
*/
prologHybrid(mudInsideOf(tObj,tObj)).
% facts that cant be true
%fact_is_false(mudAtLoc(Obj,_LOC),mudInsideOf(Obj,What)) :- nonvar(Obj),is_asserted(mudInsideOf(Obj,What)),not(isa(What,tRegion)).
%fact_is_false(mudAtLoc(Obj,LOC),mudInsideOf(Obj,What)) :- nonvar(Obj),(mudInsideOf(Obj,What)),not(mudAtLoc(What,LOC)).
%fact_is_false(localityOfObject(Obj,_LOC),mudInsideOf(Obj,What)) :- nonvar(Obj),(mudInsideOf(Obj,What)),!.
% facts that must be true
% suggest a deducable fact that is always defiantely true but not maybe asserted
%TODO USE EVER? fact_always_true(localityOfObject(apathFn(Region,Dir),Region)):-is_asserted(pathDirLeadsTo(Region,Dir,_)).
fact_always_true(localityOfObject(Obj,Region)):- is_asserted(mudAtLoc(Obj,LOC)),locationToRegion(LOC,Region),!.
(((localityOfObject(_,_),{localityOfObject(apathFn(Region,Dir),Region)},
\+ pathDirLeadsTo(Region, Dir, _) ) ==>
\+ localityOfObject(apathFn(Region,Dir),Region))).
% suggest a deducable fact that is probably true but not already asserted
%TODO USE EVER? fact_maybe_deduced(localityOfObject(Obj,Region)):- is_asserted(mudAtLoc(Obj,LOC)),locationToRegion(LOC,Region),!.
%TODO USE EVER? fact_maybe_deduced(localityOfObject(apathFn(Region,Dir),Region)):-is_asserted(pathDirLeadsTo(Region,Dir,_)).
% create_and_assert_random_fact(_):- t_l:noDBaseHOOKS(_),!.
create_and_assert_random_fact(Fact):- fail,must(create_random_fact(Fact)),aina(Fact).
% suggest a random fact that is probably is not already true
create_random_fact(G) :- into_functor_form(t,G,MPred),G\=@=MPred,!,create_random_fact(MPred).
create_random_fact(G) :- is_asserted(G),!,dmsg((create_random_fact(G) :- is_asserted(G))).
create_random_fact(t(mudAtLoc,Obj,LOC)) :- !,nonvar(Obj),is_asserted(localityOfObject(Obj,Region)),!,((in_grid(Region,LOC),unoccupied(Obj,LOC),
\+ ( ~ mudAtLoc(Obj,LOC)))).
create_random_fact(t(localityOfObject,Obj,Region)) :- !, nonvar(Obj),not_asserted((localityOfObject(Obj,_))),
if_defined(asserted_or_deduced(localityOfObject(Obj,Region))).
create_random_fact(t(Other,Obj,Default)) :- nonvar(Obj),argIsa(Other,2,Type),random_instance_no_throw(Type,Default,ground(Default)),!.
% suggest random values
hooked_random_instance(vtDirection,Dir,Test) :- my_random_member(Dir,[vNorth,vSouth,vEast,vWest,vNE,vNW,vSE,vSW]),Test,!.
hooked_random_instance(ftInt,3,Test):-call(Test),dmsg(random_instance(ftInt,3,Test)),dmsg(hooked_random_instance(ftInt,3,Test)),!,fail.
random_region(LOC):- var(LOC),findall(O,isa(O,tRegion),LOCS),my_random_member(LOC,LOCS).
random_xyzFn(LOC):-
must_det(random_instance(tRegion,Region,true)),
in_grid_rnd(Region,LOC),!.
random_xyzFn(xyzFn('Area1000',1,1,1)):- fail, dmsg(trace_or_throw(mpred_not_loaded)).
unoccupied(_,Loc):- not_asserted((mudAtLoc(_,Loc))),!.
unoccupied(_,_):-!.
unoccupied(Obj,Loc):- loop_check(unoccupied_ilc(Obj,Loc),not_asserted((mudAtLoc(_,Loc)))),!.
unoccupied_ilc(Obj,Loc):- is_occupied(Loc,What),!,What=Obj.
unoccupied_ilc(_,_).
is_occupied(Loc,What):- is_asserted(mudAtLoc(What,Loc)),!.
is_occupied(Loc,What):- locationToRegion(Loc,Region),localityOfObject(What,Region),put_in_world(What),mudAtLoc(What,Loc),!.
% Used all over the place
% Transforms location based on cardinal direction given
calc_xyz(Region1,Dir,force(X1,Y1,Z1),X2,Y2,Z2):-
to_3d(Region1,xyzFn(_,X,Y,Z)),
get_dir_offset(Dir,1,OX,OY,OZ),
X2 is X+ (OX*X1),Y2 is Y+OY*Y1,Z2 is Z+OZ*Z1.
calc_from_center_xyz(Region1,Dir,R,X2,Y2,Z2):-
room_center(Region1,X,Y,Z),
get_dir_offset(Dir,R,OX,OY,_),
X2 is X+ OX ,Y2 is Y+ OY, Z2 is Z.
prologBuiltin(random_path_dir/1).
system:sys_random_path_dir(Dir):- random_path_dir(Dir).
% random_path_dir(Dir):- nonvar(Dir),random_path_dir(Dir0),Dir=Dir0,!.
random_path_dir(Dir):- no_repeats(random_path_dir0(Dir)).
random_travel_dir(Dir):- no_repeats(random_path_dir1(Dir)).
random_path_dir0(Dir):- call(call,random_instance(vtBasicDir,Dir,true)).
random_path_dir1(Dir):- call(call,random_instance(vtBasicDirPlusUpDown,Dir,true)).
random_path_dir1(Dir):- call(call,random_instance(vtDirection,Dir,true)).
from_dir_target(LOC,Dir,XXYY):- is_3d(LOC),!,
move_dir_target(LOC,Dir,XXYY).
from_dir_target(Agent,Dir,XXYY):-
mudAtLoc(Agent,RegionXYZ),
move_dir_target(RegionXYZ,Dir,XXYY).
move_dir_target(RegionXYZ,Dir,XXYY):-
move_dir_target(RegionXYZ,Dir,1,XXYY).
move_dir_target(RegionXYZ,DirS,Force,XXYY):-
any_to_atom(DirS,Dir),
once(((calc_xyz(RegionXYZ,Dir,force(Force,Force,Force),X,Y,Z)),
(locationToRegion(RegionXYZ,Region1)),
(round_loc_target(Region1,X,Y,Z,Region2,X2,Y2,Z2)),
XXYY = xyzFn(Region2,X2,Y2,Z2),
sanity(ground(XXYY)))),
check_ahead_for_ground(XXYY),!.
move_dir_target(RegionXYZ,Dir,_Force,XXYY):-
any_to_string(Dir,DirS),
locationToRegion(RegionXYZ,Region1),!,
pathBetween_call(Region1,DirS,Region2),
in_grid_rnd(Region2,XXYY),
XXYY = xyzFn(Region2,_X2,_Y2,_Z2),
sanity(ground(XXYY)),!,
check_ahead_for_ground(XXYY),!.
round_loc_target(Region1,X,Y,Z,Region3,X3,Y3,Z3):-
round_loc(Region1,X,Y,Z,Region2,X2,Y2,Z2),!,
Region2=Region3,X2=X3,Y2=Y3,Z2=Z3.
round_loc(Region1,X,Y,Z,Region2,X2,Y2,Z2):-
compute_dir(Region1,X,Y,Z,Dir),!,
round_loc_dir(Region1,X,Y,Z,Dir,Region2,X2,Y2,Z2),!.
round_loc_dir(Region1,X,Y,Z,'',Region2,X2,Y2,Z2):-!,
X2=X,Y2=Y,Z2=Z,Region2=Region1.
round_loc_dir(Region1,X,Y,Z,Dir,Region2,X2,Y2,Z2):-
any_to_dir(Dir,DirLong),
pathBetween_call(Region1,DirLong,Region2),!,
grid_size(Region1,X1,Y1,Z1),
calc_xyz(xyzFn(Region2,X,Y,Z),Dir,force(-X1,-Y1,-Z1),X2,Y2,Z2),!.
round_loc_dir(Region1,X,Y,Z,_Dir,Region2,X2,Y2,Z2):-Region2=Region1,X2=X,Y2=Y,Z2=Z.
prologBuiltin(compute_dir/5).
compute_dir(Region1,X,Y,Z,Dir):-
grid_size(Region1,MaxX,MaxY,MaxZ),
((X<1 -> EW=vWest ; X > MaxX -> EW=vEast ; EW= ''),
(Y<1 -> NS=vNorth ; Y > MaxY -> NS=vSouth ; NS= ''),
(Z<1 -> UD=vDown ; Z > MaxZ -> UD=vUp ; UD= '')),
atomic_list_concat_catch([NS,EW,UD],'',Dir),!.
prologBuiltin(get_dir_offset/5).
get_dir_offset(Dir,F,OX,OY,OZ):- sanity(nonvar(Dir)),
dir_offset(Dir,F,OX,OY,OZ),!.
get_dir_offset(reverseOf(Dir),F,OX,OY,OZ):- !,get_dir_offset((Dir),F,X,Y,Z),!, OX is -X, OY is -Y, OZ is -Z.
get_dir_offset(Dir,F,OX,OY,OZ):- any_to_atom(Dir,DirA),
dir_offset(DirA,F,OX,OY,OZ),!.
get_dir_offset(Dir,F,OX,OY,OZ):- any_to_string(Dir,DirS),
dir_offset(DirS,F,OX,OY,OZ),!.
p2c_dir2('s','vSouth').
p2c_dir2('w','vWest').
p2c_dir2('u','vUp').
p2c_dir2('d','vDown').
p2c_dir2('e','vEast').
p2c_dir2('n','vNorth').
:-export(is_any_dir/1).
is_any_dir(Dir):-var(Dir),!,fail.
is_any_dir(Dir):-any_to_dir(Dir,_).
:-export(any_to_dir/2).
any_to_dir(D,D):-var(D),!.
any_to_dir(S,D):-string(S),string_to_atom(S,A),any_to_dir(A,D),!.
any_to_dir(D,D):-dir_offset(D,_,_,_,_),!.
any_to_dir(A,D):-p2c_dir2(D,A),!.
any_to_dir(D,O):-atom(D),sub_atom(D, 0, 1, _, S),toLowercase(S,L),p2c_dir2(L,O),!.
any_to_dir(D,D):-pathDirLeadsTo(_,D,_),!.
:-export(dir_offset/5).
% prologHybrid(dir_offset(term,int,int,int,int)).
% :-mpred_trace_all.
prologBuiltin(dir_offset/5).
dir_offset(vUp,F,0,0,F).
dir_offset(vDown,F,0,0,-F).
dir_offset(vNorth,F,0,-F,0).
dir_offset(vSouth,F,0,F,0).
dir_offset(vEast,F,F,0,0).
dir_offset(vWest,F,-F,0,0).
dir_offset(vNE,F,F,-F,0).
dir_offset(vSW,F,-F,F,0).
dir_offset(vSE,F,F,F,0).
dir_offset(vNW,F,-F,-F,0).
dir_offset(vHere,_,0,0,0).
% :-mpred_no_spy_all. with_pfa
% MergedNess -1,0,1 = contacting_at,inside,outside_near_on
with_offset(detatched,F,X,Y,Z):-dir_offset(vHere,F,X,Y,Z).
with_offset(absolute_with,F,X,Y,Z):-dir_offset(vUp,F,X,Y,Z).
with_offset(relative_from,F,X,Y,Z):-dir_offset(vDown,F,X,Y,Z).
with_offset(surrounding,F,X,Y,Z):-dir_offset(vNorth,F,X,Y,Z).
with_offset(mudInsideOf,F,X,Y,Z):-dir_offset(vSouth,F,X,Y,Z).
with_offset(on,F,X,Y,Z):-dir_offset(vEast,F,X,Y,Z).
with_offset(tPartofObj,F,X,Y,Z):-dir_offset(vWest,F,X,Y,Z).
facing_offset(at,F,X,Y,Z):-dir_offset(vHere,F,X,Y,Z).
facing_offset(above,F,X,Y,Z):-dir_offset(vUp,F,X,Y,Z).
facing_offset(below,F,X,Y,Z):-dir_offset(vDown,F,X,Y,Z).
facing_offset(left,F,X,Y,Z):-dir_offset(vWest,F,X,Y,Z).
facing_offset(right,F,X,Y,Z):-dir_offset(vEast,F,X,Y,Z).
facing_offset(behind,F,X,Y,Z):-dir_offset(vSouth,F,X,Y,Z).
facing_offset(front,F,X,Y,Z):-dir_offset(vNorth,F,X,Y,Z).
% baseKB:decl_database_hook(clause( retract,_),mudAtLoc(Agent,_)):-padd(Agent,mudNeedsLook(vTrue)).
% mudAtLoc(Agent,_)==> mudNeedsLook(Agent,vTrue).
mudAtLoc(Agent,_)==>{padd(Agent,mudNeedsLook(vTrue))}.
% dir_mult(X,Y,Z,X1,Y1,Z1,X2,Y2,Z2):- X2 is X * X1,Y2 is Y * Y1,Z2 is Z * Z1.
% Used in move.pl,push.pl and climb.pl
% Move agent (usually). Used to relocate agent'vSouth location.
in_world_move(LOC,Agent,DirS) :-
string_to_atom(DirS,Dir),
ignore(is_asserted(mudAtLoc(Agent,LOC))),
must_det((locally(t_l:infAssertedOnly(mudAtLoc),in_world_move0(LOC,Agent,Dir)),
is_asserted(mudAtLoc(Agent,LOC2)),
LOC2 \== LOC)),!.
can_world_move(LOC,_Agent,Dir) :- check_behind_for_ground(LOC),move_dir_target(LOC,Dir,_).
in_world_move0(LOC,Agent,Dir) :-
any_to_dir(Dir,DirS),
% rtrace(padd(Agent,mudFacing(DirS))),
% must((
ain(mudFacing(Agent,DirS)),
% call_u(mudFacing(Agent,DirOther)),
%DirOther==DirS)),
sanity((is_asserted(mudAtLoc(Agent,LOC)))),
check_behind_for_ground(LOC),
move_dir_target(LOC,Dir,XXYY),!,
must_det_l_pred(show_call,(
dmsg(move_dir_target(LOC,DirS,XXYY)),
locationToRegion(LOC,Region1),
locationToRegion(XXYY,Region2),
((expire_dont_add, clr(mudAtLoc(Agent,LOC)))),
%rtrace,
call((expire_dont_add, ain_expanded(mudAtLoc(Agent,XXYY)),
%nortrace,
sanity((is_asserted(mudAtLoc(Agent,XXYY)))),
sanity((clause_u(mudAtLoc(Agent,LOC2)),LOC2 \== LOC)))),
ifThen(( Region1\==Region2) ,raise_location_event(LOC,actNotice(reciever,actLeave(Agent,Region1,to(Dir))))),
reverse_dir(Dir,Rev),
ifThen(( Region1\==Region2) ,raise_location_event(XXYY,actNotice(reciever,actEnter(Agent,Region2,from(Rev))))),!,
check_for_fall(LOC,XXYY,Agent))).
check_behind_for_ground(LOC):-nonvar(LOC).
check_ahead_for_ground(XXYY):-nonvar(XXYY),
to_3d(XXYY,xyzFn(L1,X,Y,Z)),
grid_size(L1,MX,MY,MZ),
inside_grid(L1,X,Y,Z,MX,MY,MZ).
inside_grid(_L1,X,Y,Z,MX,MY,MZ):-is_between(1,MX,X),is_between(1,MY,Y),is_between(1,MZ,Z).
is_between(L,H,V):- H >= V,L =< V.
% Used for every move
% Does the agent take a header off a high object?
check_for_fall(LOC,XXYY,Agent) :-
mudAtLoc(HighObj,LOC),
props(HighObj,mudHeight(Hh)),
% if nothing is there pretend it is 1
(\+ (mudAtLoc(_,XXYY)) -> Hl = 1; mudAtLoc(LowObj,XXYY)),
props(LowObj,mudHeight(Hl)),
Hd is Hh - Hl,
Hd > 1,
call_update_stats(Agent,fall).
check_for_fall(_,_,_).
% Reverses the direction returned by number_to_direction
% Used for fleeing
:- export(reverse_dir/2).
:- public(reverse_dir/2).
reverse_dir(W,R):-string(W),atom_string(A,W),!,reverse_dir0(A,RA),atom_string(RA,R),!.
reverse_dir(A,R):-reverse_dir0(A,R)*->true;reverse_dir1(A,R).
reverse_dir1(reverseOf(Was),RWas):-nonvar(Was),!,RWas=Was.
reverse_dir1(skPathFn(Direction,R2,R1),skPathFn(Direction,R1,R2)):-nonvar(Direction),!.
reverse_dir1(Was,reverseOf(Was)):-nonvar(Was),!.
reverse_dir0(vSouth,vNorth).
reverse_dir0(vEast,vWest).
reverse_dir0(vNorth,vSouth).
reverse_dir0(vWest,vEast).
reverse_dir0(vUp,vDown).
reverse_dir0(vDown,vUp).
reverse_dir0(vNW,vSE).
reverse_dir0(vNE,vSW).
reverse_dir0(vSW,vNE).
reverse_dir0(vSE,vNW).
% Yet another hash table to covert numbers into aDirectionsFn (or the reverse).
num_near_reverse(1,vNW,vHere).
num_near_reverse(2,vNorth,vHere).
num_near_reverse(3,vNE,vHere).
num_near_reverse(4,vWest,vHere).
num_near_reverse(6,vEast,vHere).
num_near_reverse(7,vSW,vHere).
num_near_reverse(8,vSouth,vHere).
num_near_reverse(9,vSE,vHere).
num_near_reverse(0,vDown,vHere).
num_near_reverse(5,vUp,vHere).
% Translates numbers returned from scan_lists_aux/3 (the number of the location)
% into thier relative aDirectionsFn.
number_to_dir(1,vNW,vNW).
number_to_dir(2,vNorth,vNW).
number_to_dir(3,vNorth,vNorth).
number_to_dir(4,vNorth,vNE).
number_to_dir(5,vNE,vNE).
number_to_dir(6,vWest,vNW).
number_to_dir(7,vNW,vHere).
number_to_dir(8,vNorth,vHere).
number_to_dir(9,vNE,vHere).
number_to_dir(10,vEast,vNE).
number_to_dir(11,vWest,vWest).
number_to_dir(12,vWest,vHere).
number_to_dir(14,vEast,vHere).
number_to_dir(15,vEast,vEast).
number_to_dir(16,vWest,vSW).
number_to_dir(17,vSW,vHere).
number_to_dir(18,vSouth,vHere).
number_to_dir(19,vSE,vHere).
number_to_dir(20,vEast,vSE).
number_to_dir(21,vSW,vSW).
number_to_dir(22,vSouth,vSW).
number_to_dir(23,vSouth,vSouth).
number_to_dir(24,vSouth,vSE).
number_to_dir(25,vSE,vSE).
% Scans through list of perceptions (as returned by look_percepts(Agent,L) or look_all(NearAgt,_,_,_,L,_))
% for an object,returns the direction in which the object lies.
list_object_dir_sensed(_,List,Type,Dir) :-
!,
scan_lists_aux(List,Type,1,N),
number_to_dir(N,Dir,_).
list_object_dir_near(List,Type,Dir) :-
!,
scan_lists_aux(List,Type,1,N),
num_near_reverse(N,Dir,_).
scan_lists_aux([Loc|_],Type,N,N) :-
member(Obj,Loc),
isa(Obj,Type),
!.
scan_lists_aux([_|Rest],Type,M,N) :-
Mtemp is M + 1,
!,
scan_lists_aux(Rest,Type,Mtemp,N).
doorLocation(_Room,3,0,_Z,vNorth).
doorLocation(_Room,2,0,_Z,vNorth).
doorLocation(_Room,4,0,_Z,vNorth).
doorLocation(_Room,3,6,_Z,vSouth).
doorLocation(_Room,2,6,_Z,vSouth).
doorLocation(_Room,4,6,_Z,vSouth).
doorLocation(_Room,0,2,_Z,vWest).
doorLocation(_Room,0,3,_Z,vWest).
doorLocation(_Room,0,4,_Z,vWest).
doorLocation(_Room,6,2,_Z,vEast).
doorLocation(_Room,6,3,_Z,vEast).
doorLocation(_Room,6,4,_Z,vEast).
doorLocation(_Room,6,0,_Z,vNE).
doorLocation(_Room,6,6,_Z,vSE).
doorLocation(_Room,0,0,_Z,vNW).
doorLocation(_Room,0,6,_Z,vSW).
doorLocation(_Room,_X,_Y,_Z,_Dir):-!,fail.
| TeamSPoon/logicmoo_workspace | packs_sys/prologmud/prolog/prologmud/vworld/world_2d.pfc.pl | Perl | mit | 25,813 |
#! /usr/bin/perl
###############################################################################
# Copyright (C) 1994 - 2006, Performance Dynamics Company #
# #
# This software is licensed as described in the file COPYING, which #
# you should have received as part of this distribution. The terms #
# are also available at http://www.perfdynamics.com/Tools/copyright.html. #
# #
# You may opt to use, copy, modify, merge, publish, distribute and/or sell #
# copies of the Software, and permit persons to whom the Software is #
# furnished to do so, under the terms of the COPYING file. #
# #
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY #
# KIND, either express or implied. #
###############################################################################
# mm1.pl
# Updated by NJG on Sat, Apr 8, 2006 per erratum for p. 220
use pdq;
## INPUTS ##
# Measured parameters
$MeasurePeriod = 3600; # seconds
$ArrivalCount = 1800;
$ServiceVisits = 10;
# Derived parameters
$ArrivalRate = $ArrivalCount / $MeasurePeriod;
$ServiceTime = 0.10; # seconds
$ServiceDemand = $ServiceVisits * $ServiceTime; # seconds
# Check the queue meets stability condition
$ServiceCap = 1 / $ServiceDemand;
if($ArrivalRate >= $ServiceCap) {
print "Error: Arrival rate $ArrivalRate ";
print "exceeds service capacity ServiceCap !!\n";
exit;
}
$NodeName = "FIFO";
$WorkName = "Work";
# Initialize PDQ internal variables
pdq::Init("FIFO Example");
# Define the FIFO queue
$pdq::nodes = pdq::CreateNode($NodeName, $pdq::CEN, $pdq::FCFS);
# Define the queueing circuit type and workload
$pdq::streams = pdq::CreateOpen($WorkName, $ArrivalRate);
# Change the units used by PDQ::Report()
pdq::SetWUnit("Requests");
pdq::SetTUnit("Seconds");
# Define service demand due to the workload at FIFO
pdq::SetDemand($NodeName, $WorkName, $ServiceDemand);
# Solve the PDQ model
pdq::Solve($pdq::CANON);
# NOTE: Must use CANON-ical method since this is an open circuit
## OUTPUTS ##
# Generate a report
pdq::Report();
| peterlharding/PDQ | examples/ppdq_2005/pdq_models/mm1.pl | Perl | mit | 2,453 |
#!/usr/bin/perl -w
#
# Copyright (c) AB_Life 2011
# Writer: xuxiong <xuxiong19880610@163.com>
# Program Date: 2011.
# Modifier: xuxiong <xuxiong19880610@163.com>
# Last Modified: 2011.
my $ver="1.0.0";
use strict;
use Getopt::Long;
use Data::Dumper;
use FindBin qw($Bin $Script);
use File::Basename qw(basename dirname);
#Before writing your programme,you must write the detailed time、discriptions、parameter and it's explanation,Meanwhile,annotation your programme in English if possible.
my %opts;
GetOptions(\%opts,"q=s" );
if(!defined($opts{q}) )
{
print <<" Usage End.";
Description:This programme is used for filtering uniq reads from bed file after alignment with bowtie
Version: $ver
Usage:perl $0
-q fq file must be given
Usage End.
exit;
}
###############Time_start##########
my $Time_Start;
$Time_Start = sub_format_datetime(localtime(time()));
#print "\nStart Time :[$Time_Start]\n\n";
###################################
my $fq=$opts{q};
my $total_count=0;
my $uniq_tag=0;
my %reads=();
open(FQ,$fq) or die $!;
while (<FQ>) {
my $seq = <FQ>;
chomp($seq);
# print $seq,"\n";
$uniq_tag++ if not defined($reads{$seq});
$reads{$seq}=1;
<FQ>;<FQ>;
$total_count++;
}
close FQ;
print "$fq\t";
print "$total_count\t";
print $uniq_tag,"(",sprintf("%3.2f%%",$uniq_tag/$total_count*100),")\n";
###############Time_end###########
my $Time_End;
$Time_End = sub_format_datetime(localtime(time()));
#print "\nEnd Time :[$Time_End]\n\n";
###############Sub_format_datetime
sub sub_format_datetime {#Time calculation subroutine
my($sec, $min, $hour, $day, $mon, $year, $wday, $yday, $isdst) = @_;
$wday = $yday = $isdst = 0;
sprintf("%4d-%02d-%02d %02d:%02d:%02d", $year+1900, $mon+1, $day, $hour, $min, $sec);
}
| ablifedev/ABLIRC | ABLIRC/bin/Dataclean/stat_uniq_tag.pl | Perl | mit | 1,796 |
#!/usr/bin/perl -w
use 5.010;
use strict;
chomp( my $num1 = <STDIN> );
chomp( my $num2 = <STDIN> );
my $res = 0;
my $error_msg = "wrong input, ";
$res = $num1 * $num2, $error_msg = "", if( &is_numeric($num1) && &is_numeric($num2) );
say $error_msg, "\$res is $res";
sub is_numeric{
$_ = shift @_;
1 if( /^-?\d{1,}\.?\d{0,}$/ );
}
| OldSix/Learning-Perl | chapter02/02.pl | Perl | mit | 343 |
###############################################################################
#Comment.pm
#Last Change: 2009-01-19
#Copyright (c) 2009 Marc-Seabstian "Maluku" Lucksch
#Version 0.8
####################
#This file is part of the Dotiac::DTL project.
#http://search.cpan.org/perldoc?Dotiac::DTL
#
#Comment.pm is published under the terms of the MIT license, which basically
#means "Do with it whatever you want". For more information, see the
#license.txt file that should be enclosed with libsofu distributions. A copy of
#the license is (at the time of writing) also available at
#http://www.opensource.org/licenses/mit-license.php .
###############################################################################
package Dotiac::DTL::Comment;
use strict;
use warnings;
use base qw/Dotiac::DTL::Tag/;
our $VERSION = 0.8;
sub new {
my $class=shift;
my $self={p=>shift()};
bless $self,$class;
$self->{comment}=shift();
$self->{n}=shift;
return $self;
}
sub print {
my $self=shift;
print $self->{p};
$self->{n}->print(@_);
}
sub string {
my $self=shift;
return $self->{p}.$self->{n}->string(@_);
}
sub perl {
my $self=shift;
my $fh=shift;
my $id=shift;
print $fh "my ";
print $fh (Data::Dumper->Dump([$self->{p}],["\$text$id"]));
return $self->{n}->perl($fh,$id+1,@_);
}
sub perlinit {
my $self=shift;
my $fh=shift;
my $id=shift;
return $self->{n}->perlinit($fh,$id+1,@_);
}
sub perlprint {
my $self=shift;
my $fh=shift;
my $id=shift;
my $level=shift;
my $in="\t" x $level;
print $fh $in,"print \$text$id;\n";
my $c=$self->{comment};
$c=~s/\n/\n$in#/g;
print $fh "$in# $c\n";
return $self->{n}->perlprint($fh,$id+1,$level,@_);
}
sub perlstring {
my $self=shift;
my $fh=shift;
my $id=shift;
my $level=shift;
my $in="\t" x $level;
print $fh $in,"\$r.=\$text$id;\n";
my $c=$self->{comment};
$c=~s/\n/\n$in#/g;
print $fh "$in# $c\n";
return $self->{n}->perlstring($fh,$id+1,$level,@_);
}
sub perleval {
my $self=shift;
my $fh=shift;
my $id=shift;
return $self->{n}->perleval($fh,$id+1,@_);
}
sub perlcount {
my $self=shift;
my $id=shift;
return $self->{n}->perlcount($id+1);
}
sub eval {
return;
}
1;
__END__
=head1 NAME
Dotiac::DTL::Comment - Stores a Django template comment tag.
=head1 SYNOPSIS
Template file
Some text....
{# A comment, #}
Some other Text.{# Another comment
over multiple lines #} Some more text.
=head1 DESCRIPTION
Everything between the starting {# and the next #} is skipped while generating
the output of the template.
During Template compiling it is converted to normal perl comment tags:
Template code...
{# Some comment
more comment #}
Template code..
Will be converted to:
Perl code...
# Some comment
# more comment
Perl code...
The module itself has no real use, it's just used by the Dotiac::DTL
parser to store those comments.
=head1 SEE ALSO
L<http://www.djangoproject.com>, L<Dotiac::DTL>
=head1 BUGS
If you find a bug, please report it.
=head1 LEGAL
Dotiac::DTL was built according to http://docs.djangoproject.com/en/dev/ref/templates/builtins/.
=head1 AUTHOR
Marc-Sebastian Lucksch
perl@marc-s.de
=cut
| gitpan/Dotiac | lib/Dotiac/DTL/Comment.pm | Perl | mit | 3,281 |
:- module(wrap_rdf, [
wrap_rdf/0
]).
:- use_module(library(semweb/rdf_db)).
:- use_module(prolog/rdet).
:- rdf_register_prefix(ex, 'http://example.com/').
:- rdet(rdf_test/1).
:- rdf_meta(rdf_test(r)).
rdf_test(S):-
rdf(S, rdf:type, ex:something).
wrap_rdf:-
rdf_test(ex:thing).
| rla/rdet | tests/wrap_rdf.pl | Perl | mit | 297 |
#!/usr/local/bin/perl
#-------------------------------------
# DTA Finder,
# (C)1997-2000 Harvard University
#
# W. S. Lane/M. A. Baker
#
# v3.1a
#
# licensed to Finnigan
#-------------------------------------
##
## For muchem-specific definitions and cgilib routines
##
################################################
# find and read in standard include file
{
my $path = $0;
$path =~ s!\\!/!g;
$path =~ s!^(.*)/[^/]+/.*$!$1/etc!;
unshift (@INC, "$path");
require "microchem_include.pl";
}
################################################
&cgi_receive();
&MS_pages_header ("DTA Finder", "#00009C");
print "<HR><P>\n";
#while (($k, $v) = each %FORM) { print STDERR ("$k=$v\n"); }
##
## code to get all of the sequest directories
##
$directories = $FORM{"directory"};
$givenmass = $FORM{"mass"};
$givenmass =~ s/\s//g;
$minmass = $FORM{"minmass"};
$minmass =~ s/\s//g;
$maxmass = $FORM{"maxmass"};
$maxmass =~ s/\s//g;
$by_mass = (defined $givenmass and $givenmass ne "")
|| (defined $minmass and $minmass ne "") || (defined $minmass and $maxmass ne "");
$nterm = $FORM{"Nterm"};
$nterm =~ s/\s//g;
$nterm =~ tr/a-z/A-Z/;
$cterm = $FORM{"Cterm"};
$cterm =~ s/\s//g;
$cterm =~ tr/a-z/A-Z/;
$by_seq = (defined $nterm and $nterm ne "") || (defined $cterm and $cterm ne "");
if ($by_seq) {
$use_mono = $FORM{"MassType"};
$ion_tol = $FORM{"ion_tolerance"};
$grep_outs = $FORM{"grep_outs"}; # if this is defined, we scan ".out" files
$dtascan = $FORM{"dtascan"}; # if this is defined, we scan .dta files for ions
if ((!$grep_outs) && (!$dtascan)) {
undef $by_seq;
}
}
if ((!defined $directories) || (!$by_seq && !$by_mass)){
&get_alldirs();
&output_form();
exit;
}
##
## otherwise, we will scan each directory for matching dta files
##
@dirs = split(", ", $directories);
if ($by_mass) {
$mass_tol = $FORM{"mass_tolerance"};
$floor = defined $givenmass ? &max ($givenmass - $mass_tol, $minmass) : $minmass;
$ceiling = defined $givenmass ? &min ($givenmass + $mass_tol, $maxmass) : $maxmass;
}
##
## here we read in all the DTAs, and exclude
## by mass if asked.
foreach $dir (@dirs) {
opendir (DIR, "$seqdir/$dir") || next;
@dtas = grep { m!\.dta$! } readdir (DIR);
closedir DIR;
@matching_dtas = ();
foreach $dta (@dtas) {
open (DTA, "$seqdir/$dir/$dta") || next;
$line = <DTA>;
close DTA;
($mass, $charge) = split (' ', $line);
next if ($floor and $mass < $floor);
next if ($ceiling and $mass > $ceiling);
$mass{"$dir/$dta"} = $mass;
$charge{"$dir/$dta"} = $charge;
push (@matching_dtas, $dta);
}
$matching{$dir} = join (", ", @matching_dtas);
}
##
## exclude and score by presence of ion if asked.
##
if ($by_seq) {
&ion_check();
}
$totalnum = 0;
foreach $dir (@dirs) {
@matching_dtas = split (", ", $matching{$dir});
$totalnum += $#matching_dtas + 1;
}
##
## organize this data for display
##
print "<div>\n";
if ($by_mass) {
print ("Searching for MH+ from $floor to $ceiling.<br>\n");
}
print "</div>\n";
# CODE ADDED FOR THE DTA VCR BUTTON
$vcr_count = 0;
print <<EOF;
<form action="$dtavcr" method="post" target="_blank">
<INPUT TYPE=SUBMIT CLASS=button VALUE="DTA VCR">
<INPUT TYPE=hidden NAME="DTAVCR:conserve_space" VALUE=1>
EOF
print "<br><br><div>\n";
if ($by_seq) {
print ("Searching for ");
if ($nterm and $cterm) {
print ("starting N-term sequence of $nterm, and ending C-term sequence of $cterm.<br>\n");
} elsif ($nterm) {
print ("starting N-term sequence of $nterm.<br>\n");
} elsif ($cterm) {
print ("ending C-term sequence of $cterm.<br>\n");
}
}
print ("<b>$totalnum</b> found.<p>\n");
print ("<ul>\n");
foreach $dir (@dirs) {
@matching_dtas = split (", ", $matching{$dir});
$num = $#matching_dtas + 1;
print qq(<li><a href="$webseqdir/$dir/" target=_blank>$dir</a>: <b>$num</b> found.\n);
print ("<ul>\n");
foreach $dta (@matching_dtas) {
$url = &urlize ("$dir/$dta");
$mass = $mass{"$dir/$dta"};
$charge = $charge{"$dir/$dta"};
$score = $score{"$dir/$dta"} if $by_seq;
print qq(<li><a href="$url" target=_blank>$dta</a>: MH+: $mass, z: $charge);
if ($by_seq) {
if ($score != 0) {
print (", ");
print ("<b>") if ($score == $maxscore);
print qq(Score: $score);
print ("</b>") if ($score == $maxscore);
}
$str = "";
if ($ntermsupergrepmatch{"$dir/$dta"}) {
$str = "Nterm";
}
if ($ctermsupergrepmatch{"$dir/$dta"}) {
$str .= " and " if ($str ne "");
$str .= "Cterm";
}
if ($str ne "") {
$url = &out_urlize("$dir/$dta");
print qq(, <b><a href="$url">$str SuperGrep match</a></b>);
}
}
print "\n";
# added by cmw for DTA VCR (8/27/99):
print qq(<input type=hidden name="DTAVCR:link$vcr_count" value="$url">\n);
$vcr_count++;
}
print ("</ul>\n");
}
print ("</ul>\n");
print "</div>\n";
print("</form>");
#END of DTA VCR BUTTON CODE
# converts an .out file to an URL for display
sub out_urlize {
my $file = $_[0];
my $out;
my $url;
($out = $file) =~ s!\.dta$!.out!;
$url = "$showout?OutFile=$seqdir/$out";
return $url;
}
# this takes a DTA filename and converts it to a URL for display
sub urlize {
my $file = $_[0];
my $url;
$url = "$fuzzyions?Dta=" . &url_encode("$seqdir/$file");
$url .= "&numaxis=1";
$url .= "&Ntermspace=$nterm";
$url .= "&Ctermspace=$cterm";
if ((defined $cterm and $cterm ne "") and !(defined $nterm and $nterm ne "")) {
$url .= "&side_to_walk=Cterm";
} elsif (!(defined $cterm and $cterm ne "") and (defined $nterm and $nterm ne "")) {
$url .= "&side_to_walk=Nterm";
}
return $url;
}
sub output_form {
$checked{$DEFS_DTA_FINDER{"Mono/Avg"}} = " CHECKED";
$checked{"ionscan"} = " CHECKED" if ($DEFS_DTA_FINDER{"DTA ion scan"} eq "yes");
$checked{"supergrep"} = " CHECKED" if ($DEFS_DTA_FINDER{"Long shot *.out SuperGrep"} eq "yes");
print <<EOM;
<FORM ACTION="$ourname" METHOD=POST>
<TABLE CELLSPACING=5 BORDER=0>
<TR>
<TD>
Pick a list of directories in which to search:<br>
<span class="dropbox"><SELECT SIZE=15 MULTIPLE NAME="directory">
EOM
foreach $dir (@ordered_names) {
print qq(<OPTION VALUE="$dir">$fancyname{$dir}\n);
}
print <<EOM;
</SELECT></span>
</TD>
<TD>
<h4>Mass Filter:</h4>
<p>
MH+:
<INPUT NAME="mass" SIZE=6 VALUE="$DEFS_DTA_FINDER{"MH+"}">
+/-:
<INPUT NAME="mass_tolerance" VALUE="$DEFS_DTA_FINDER{"MH+ +/-"}" SIZE=4>
<p>
<CENTER>or</CENTER>
<p>
Min:
<INPUT NAME="minmass" SIZE=6 VALUE="$DEFS_DTA_FINDER{"Min"}">
Max:
<INPUT NAME="maxmass" SIZE=6 VALUE="$DEFS_DTA_FINDER{"Max"}">
<p>
<INPUT TYPE=SUBMIT CLASS=button VALUE="Show">
<INPUT TYPE=RESET CLASS=button VALUE="Clear">
</TD>
<TD width=20></TD>
<TD>
<h4>Ion Filter:</h4>
<p>
Enter a short expected sequence:
<p>
Nterm:
<INPUT NAME="Nterm" SIZE=5 VALUE="$DEFS_DTA_FINDER{"Nterm"}">
<SPACER TYPE=HORIZONTAL SIZE=20>
Cterm:
<INPUT NAME="Cterm" SIZE=5 VALUE="$DEFS_DTA_FINDER{"Cterm"}">
<p>
Tolerance:
<INPUT NAME="ion_tolerance" SIZE=5 VALUE="$DEFS_DTA_FINDER{"Tolerance"}">
<p>
<INPUT NAME="MassType" TYPE=RADIO VALUE=1$checked{"Mono"}>Mono
<INPUT NAME="MassType" TYPE=RADIO VALUE=0$checked{"Avg"}>Avg
<p>
<INPUT TYPE="CHECKBOX" NAME="dtascan"$checked{"ionscan"}>DTA ion scan
<br>
<INPUT TYPE="CHECKBOX" NAME="grep_outs"$checked{"supergrep"}>Long shot *.out SuperGrep
</TD>
</TR>
</TABLE>
</FORM>
<span style="color:#00009C" class="largetimes"><B><I>Instructions:</I></B></span>
<p>
You can input an MH+ with tolerance to limit the DTAs by MH+,
input N-term or C-term sequences to scan the heavy ions for, or do both.
<p>
The two filters operate on a logical AND basis: DTAs shown will fit the mass limits
entered and contain ions matching the sequences.
<p>The SuperGrep feature checks to see if, by crazy coincidence, Sequest matched
the Nterm or Cterm sequences in one of its output files.
EOM
}
sub ion_check {
my (@letters, $middle, $subroutine, @residues, $m, @matching, @ions);
my ($mass, $numions, $sum, @score, $score, $ion, @our_matching);
my ($out, %matched, $charge);
##
## calculate array of masses to subtract from MH+
##
## first we calculate residues to create y-ions for the nterm seqs
if ($dtascan) {
@letters = split ("", $nterm);
$m = 0.0;
if ($use_mono) {
foreach $letter (@letters) {
$m += $Mono_mass{$letter};
push (@residues, $m);
}
} else {
foreach $letter (@letters) {
$m += $Average_mass{$letter};
push (@residues, $m);
}
}
## then, we calculate residues to create b-ions for the cterm seqs
@letters = split ("", $cterm);
$m = 18.0;
if ($use_mono) {
foreach $letter (@letters) {
$m += $Mono_mass{$letter};
push (@residues, $m);
}
} else {
foreach $letter (@letters) {
$m += $Average_mass{$letter};
push (@residues, $m);
}
}
} # $dtascan
foreach $dir (@dirs) {
next unless $matching{$dir};
@matching = split (", ", $matching{$dir});
@our_matching = ();
foreach $dta (@matching) {
if ($dtascan) {
@ions = ();
$mass = $mass{"$dir/$dta"};
$charge = $charge{"$dir/$dta"};
$precursor = ($mass - 1.0)/ $charge + 1.0;
foreach $res (@residues) {
$ion = $mass - $res;
next if (($ion <= $precursor) && ($charge != 1));
push (@ions, $ion);
}
# don't bother if we don't have any ions
if ($#ions + 1 > 0) {
@score = ();
$numions = 0;
open (DTA, "$seqdir/$dir/$dta");
$line = <DTA>; # skip first line;
while (<DTA>) {
($mass, $inten) = split;
$numions++;
$i = 0;
foreach $ion (@ions) {
$score[$i] = &max($inten, $score[$i]) if (abs ($mass - $ion) < $ion_tol);
$i++;
}
}
close DTA;
$threshold = $numions ? $sum /($numions * 2) : 0;
$score = 0;
foreach $num (@score) {
$score++ if ($num > $threshold);
}
if ($score > 0 ) {
$score{"$dir/$dta"} = $score;
$matched{"$dir/$dta"} = 1;
push (@our_matching, $dta);
}
$maxscore = &max($maxscore, $score);
}
} # $dtascan
if ($grep_outs) {
($out = $dta) =~ s!\.dta$!.out!;
open (OUT, "$seqdir/$dir/$out") || next;
while (<OUT>) {
s!#!!g; # eliminate the "#" marks for oxidized methionines
if ($nterm and (m!\s\(.\)$nterm\S*$!o || m!\s.\.$nterm\S*\..$!o)) { # updated for SequestC2 format OUTfiles
$ntermsupergrepmatch{"$dir/$dta"} = "1";
if (!$matched{"$dir/$dta"}) {
$matched{"$dir/$dta"} = 1;
push (@our_matching, $dta);
}
}
if ($cterm and (m!$cterm$!o || m!\s.\.\S*$cterm\..$!o)) { # updated for SequestC2 format OUTfiles
$ctermsupergrepmatch{"$dir/$dta"} = 1;
if (!$matched{"$dir/$dta"}) {
$matched{"$dir/$dta"} = 1;
push (@our_matching, $dta);
}
}
}
close OUT;
} # $grep_outs
} # foreach $dta ...
$matching{$dir} = join (", ", @our_matching);
} # foreach $dir ...
}
| wangchulab/CIMAGE | cravatt_web/cgi-bin/flicka/dtafinder.pl | Perl | mit | 10,909 |
=head1 LICENSE
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 MODIFICATIONS
Copyright [2014-2015] University of Edinburgh
All modifications licensed under the Apache License, Version 2.0, as above.
=cut
package EnsEMBL::Web::Document::Element::Logo;
use strict;
sub content {
my $self = shift;
my $url = $self->href || $self->home_url;
my $hub = $self->hub;
my $type = $hub->type;
my $site_name = $self->hub->species_defs->ENSEMBL_SITE_NAME_SHORT;
my $width = $self->hub->species_defs->MOBILE_LOGO_WIDTH;
my $height = $self->hub->species_defs->MOBILE_LOGO_HEIGHT;
## BEGIN LEPBASE MODIFICATIONS...
my $e_logo = '<img src="/i/e.png" alt="'.$site_name.' Home" title="'.$site_name.' Home" class="print_hide" style="width:'.$width.'px;height:'.$height.'px" />';
## ...END LEPBASE MODIFICATIONS
if ($type eq 'Help') {
## BEGIN LEPBASE MODIFICATIONS...
return sprintf( '%s%s',
$self->logo_img, $self->logo_print
## ...END LEPBASE MODIFICATIONS
);
}
## BEGIN LEPBASE MODIFICATIONS...
return sprintf( '<a href="%s">%s</a>%s',
$url, $self->logo_img, $self->logo_print#, $self->site_menu
## ...END LEPBASE MODIFICATIONS
);
}
1;
| lepbase/lepbase-ensembl | modules/EnsEMBL/Web/Document/Element/Logo.pm | Perl | apache-2.0 | 1,735 |
#!/usr/bin/env perl
## emb_genome_map.pl -- genome features as a Unison embeddable page
use strict;
use warnings;
use FindBin;
use lib "$FindBin::RealBin/../perl5", "$FindBin::RealBin/../perl5-ext";
use Unison::WWW;
use Unison::WWW::EmbPage;
use Unison;
use Unison::Exceptions;
use Unison::Utilities::genome_features;
my $p = new Unison::WWW::EmbPage;
my $u = $p->{unison};
my $v = $p->Vars();
# verify parameters
if (
!(
defined $v->{genasm_id}
&& (
(
defined $v->{chr} && defined $v->{gstart} && defined $v->{gstop}
)
|| ( defined $v->{pseq_id} )
)
&& ( defined $v->{params_id} )
)
) {
$p->die(&usage);
}
# merge defaults and options
my %opts = ( %Unison::Utilities::genome_features::opts, %$v );
# get tempfiles for the genome-feature png and imagemap
my ( $png_fh, $png_fn, $png_urn ) = $p->tempfile( SUFFIX => '.png' );
my $imagemap = '';
try {
my $panel =
Unison::Utilities::genome_features::genome_features_panel( $u, %opts );
# write the png to the temp file
$png_fh->print( $panel->gd()->png() );
$png_fh->close();
# assemble the imagemap as a string
foreach my $box ( $panel->boxes() ) {
my ( $feature, $x1, $y1, $x2, $y2 ) = @$box;
my $fstart = $feature->start; # should be unique
my $fname = $feature->name; # should be unique
next if not defined $fname;
if ( my ($pseq_id) = $fname =~ m/^Unison:(\d+)/ ) {
my $ba = $u->best_annotation( $pseq_id, 'HUMAN' )
|| $u->best_annotation( $pseq_id );
$imagemap .= qq(<AREA SHAPE="RECT" COORDS="$x1,$y1,$x2,$y2");
$imagemap .= qq( TOOLTIP="$ba") if defined $ba;
$imagemap .= qq( HREF="pseq_summary.pl?pseq_id=$pseq_id">\n);
} else {
my ( $chip, $probe ) = split( /:/, $fname );
$imagemap .= qq(<AREA SHAPE="RECT" COORDS="$x1,$y1,$x2,$y2");
$imagemap .= qq( TOOLTIP="$chip:$probe") if defined $chip and defined $probe;
if ( $p->is_genentech_instance() ) {
$imagemap .= qq( HREF="http://research/maprofile/cgi-bin/maprofile.cgi?probeid=$probe">\n);
}
}
}
}
catch Unison::Exception with {
$p->die(shift);
};
print $p->render(
"Genome Map",
"<center><img src=\"$png_urn\" usemap=\"#GENOME_MAP\"></center>",
"<MAP NAME=\"GENOME_MAP\">\n",
$imagemap, "</MAP>\n"
);
#-------------------------------------------------------------------------------
# NAME: usage
# PURPOSE: return usage string
#-------------------------------------------------------------------------------
sub usage {
return (
"USAGE: emb_genome_map.pl ? genasm_id=<gensam_id> params_id=<params_id> "
. "[(chr=<chr> ; gstart=<gstart> ; gstop=<gstop> "
. "|| pseq_id=<pseq_id>]" );
}
| unison/unison | www/emb_genome_map.pl | Perl | apache-2.0 | 2,701 |
# OpenXPKI::Server::Workflow::Activity::Tools::ParsePKCS10
# Copyright (c) 2014 by The OpenXPKI Project
# Largely copied from SCEPv2::ExtractCSR
package OpenXPKI::Server::Workflow::Activity::Tools::ParsePKCS10;
use strict;
use base qw( OpenXPKI::Server::Workflow::Activity );
use OpenXPKI::Server::Context qw( CTX );
use OpenXPKI::Exception;
use OpenXPKI::Debug;
use English;
use OpenXPKI::DN;
use Crypt::PKCS10;
use OpenXPKI::Serialization::Simple;
use Data::Dumper;
use Template;
use Digest::SHA qw(sha1_hex);
sub execute {
##! 1: 'execute'
my $self = shift;
my $workflow = shift;
my $serializer = OpenXPKI::Serialization::Simple->new();
my $context = $workflow->context();
my $param = {}; # hash to receive the context updates
my $config = CTX('config');
my $pkcs10 = $self->param('pkcs10');
$pkcs10 = $context->param('pkcs10') unless($pkcs10);
my $subject_prefix = $self->param('subject_prefix') || 'cert_';
my $verify_signature = $self->param('verify_signature') ? 1 : 0;
# Cleanup any existing values
$context->param({
'csr_subject' => '',
'csr_subject_key_identifier' => '',
'csr_signature_valid' => undef,
$subject_prefix.'subject_parts' => '',
$subject_prefix.'san_parts' => '',
$subject_prefix.'subject_alt_name' => '',
$subject_prefix.'info' => '',
});
# Source hash
my $source_ref = {};
my $ctx_source = $context->param('sources');
if ($ctx_source) {
$source_ref = $serializer->deserialize( $ctx_source );
}
# extract subject from CSR and add a context entry for it
Crypt::PKCS10->setAPIversion(1);
my $decoded = Crypt::PKCS10->new( $pkcs10, ignoreNonBase64 => 1, verifySignature => $verify_signature);
# if decoded is not set we have either a parse error or a signature problem
if (!$decoded) {
my $error = Crypt::PKCS10->error;
# if this works now it is the signature
$decoded = Crypt::PKCS10->new($pkcs10,
ignoreNonBase64 => 1,
verifySignature => 0 );
# if its still undef the structure is broken
OpenXPKI::Exception->throw(
message => 'PKCS10 structure can not be parsed: ' . $error
) unless ($decoded);
CTX('log')->application()->warn("PKCS#10 signature invalid ($error)");
$param->{'csr_signature_valid'} = 0;
} elsif ($verify_signature) {
$param->{'csr_signature_valid'} = 1;
CTX('log')->application()->debug("PKCS#10 signature valid");
}
my %hashed_dn;
my $csr_subject = $decoded->subject();
if ($csr_subject) {
# TODO - extend Crypt::PKCS10 to return RFC compliant subject
my $dn = OpenXPKI::DN->new( $csr_subject );
%hashed_dn = $dn->get_hashed_content();
$param->{csr_subject} = $dn->get_rfc_2253_dn();
##! 32: 'Subject DN ' . Dumper \%hashed_dn
}
if ($self->param('key_params')) {
$param->{csr_key_alg} = 'unsupported';
$param->{csr_key_params} = {};
eval {
my $key_param = $decoded->subjectPublicKeyParams();
if ($key_param->{keytype} eq 'RSA') {
$param->{csr_key_alg} = 'rsa';
$param->{csr_key_params} = { key_length => $key_param->{keylen} };
} elsif ($key_param->{keytype} eq 'DSA') {
$param->{csr_key_alg} = 'dsa';
$param->{csr_key_params} = { key_length => $key_param->{keylen} };
} elsif ($key_param->{keytype} eq 'ECC') {
$param->{csr_key_alg} = 'ec';
$param->{csr_key_params} = { key_length => $key_param->{keylen}, curve_name => $key_param->{curve} };
}
};
if ($EVAL_ERROR) {
CTX('log')->application()->warn("Unable to handle public key");
CTX('log')->application()->debug($EVAL_ERROR);
}
} else {
my $key_alg = $decoded->pkAlgorithm || '';
if( $key_alg eq 'rsaEncryption' ) {
$param->{csr_key_alg} = 'rsa';
} elsif( $key_alg eq 'ecPublicKey' ) {
$param->{csr_key_alg} = 'ec';
} elsif( $key_alg eq 'dsa' ) {
$param->{csr_key_alg} = 'dsa';
} else {
$param->{csr_key_alg} = 'unsupported';
}
}
my @t = $decoded->signatureAlgorithm() =~ m{ (with-?(md5|sha\d+))|((md5|sha\d+)with) }ix;
my ($csr_digest) = lc($t[1] || $t[3] || 'unknown');
$param->{csr_digest_alg} = $csr_digest;
$param->{csr_subject_key_identifier} =
uc( join ':', ( unpack '(A2)*', sha1_hex(
$decoded->{certificationRequestInfo}{subjectPKInfo}{subjectPublicKey}[0]
)));
# Get the profile name and style - required for templating
my $cert_profile = $self->param('cert_profile');
$cert_profile = $context->param('cert_profile') unless($cert_profile);
my $cert_subject_style = $self->param('cert_subject_style');
$cert_subject_style = $context->param('cert_subject_style') unless($cert_subject_style);
# Map SAN keys from ASN1 names to openssl format (all uppercased)
# TODO this should go to a central location
my $san_map = {
otherName => 'otherName',
rfc822Name => 'email',
dNSName => 'DNS',
x400Address => '', # not supported by openssl
directoryName => 'dirName',
ediPartyName => '', # not supported by openssl
uniformResourceIdentifier => 'URI',
iPAddress => 'IP',
registeredID => 'RID',
};
my $csr_san = {};
my @san_list;
# Retrieve the registered SAN property names
my @san_names = $decoded->subjectAltName();
# Walk all san keys
foreach my $san (@san_names) {
my $san_type = $san_map->{$san};
if (!$san_type) {
# type is not supported
next;
}
my @items = $decoded->subjectAltName( $san );
# san hash
$csr_san->{ $san_type } = \@items;
# merge into dn, uppercase key name
$hashed_dn{'SAN_'.uc($san_type)} = \@items;
# push items to @san_list in the nested array format as required by
# the csr persister
foreach my $value (@items) {
push @san_list, [ $san_type, $value ] if ($value);
}
}
##! 32: 'Extracted SAN ' . Dumper $csr_san
##! 32: 'Merged DN ' . Dumper \%hashed_dn
# Attributes, must be a list of OIDs, seperated by comma/blank
my $attr = $self->param('req_attributes');
my $req_attr = {};
if ($attr) {
my @attr = split /[\s,]+/, $attr;
foreach my $oid (@attr) {
my $val = $decoded->attributes($oid);
if ($val) {
$req_attr->{$oid} =$val;
}
}
$param->{req_attributes} = $req_attr;
$source_ref->{req_attributes} = 'PKCS10';
}
# Extensions, must be a list of OIDs, seperated by comma/blank
my $ext = $self->param('req_extensions');
my $req_ext = {};
if ($ext) {
my @ext = split /[\s,]+/, $ext;
foreach my $oid (@ext) {
if ($decoded->extensionPresent($oid)) {
$req_ext->{$oid} = $decoded->extensionValue($oid);
}
}
$param->{req_extensions} = $req_ext;
$source_ref->{req_extensions} = 'PKCS10';
}
# If the profile has NO ui section, we write the parsed hash and the SANs "as is" to the context
if (!$cert_profile or !$cert_subject_style or !$config->exists(['profile', $cert_profile, 'style', $cert_subject_style, 'ui' ])) {
$param->{$subject_prefix.'subject_parts'} = $serializer->serialize( \%hashed_dn ) ;
$source_ref->{$subject_prefix.'subject_parts'} = 'PKCS10';
if (scalar @san_list) {
$param->{$subject_prefix.'subject_alt_name'} = $serializer->serialize( \@san_list );
$source_ref->{$subject_prefix.'subject_alt_name'} = 'PKCS10';
}
} else {
my $userinfo = CTX('session')->data->userinfo || {};
my $cert_subject_parts = CTX('api2')->preset_subject_parts_from_profile(
profile => $cert_profile,
style => $cert_subject_style,
section => 'subject',
preset => { %hashed_dn, ( userinfo => $userinfo ) },
);
$param->{$subject_prefix.'subject_parts'} = $serializer->serialize( $cert_subject_parts );
$source_ref->{$subject_prefix.'subject_parts'} = 'Parser';
# Load the field spec for the san
# FIXME: this implies that the id of the field matches the san types name
# Evaluate: Replace with data from hashed_dn and preset?
if ($csr_san) {
my $san_names = CTX('api2')->list_supported_san();
my $fields = CTX('api2')->get_field_definition( profile => $cert_profile, style => $cert_subject_style, section => 'san' );
##! 16: 'san ui definition:' . Dumper $fields
my $cert_san_parts;
# Get all allowed san types
foreach my $field (@{$fields}) {
my $keys = ref $field->{keys} ? $field->{keys} : [ $field->{id} ];
##! 16: 'testing keys:' . join "-", @{$keys}
foreach my $key (@{$keys}) {
# hash items are mixed case
# user might also use wrong camelcasing
# the target hash is all lowercased
$key = lc($key);
my $case_key = $san_names->{$key};
if ($csr_san->{$case_key}) {
# check if it is a clonable field
if ($field->{clonable}) {
$cert_san_parts->{$key} = $csr_san->{$case_key};
} else {
$cert_san_parts->{$key} = $csr_san->{$case_key}->[0];
}
}
}
}
##! 16: 'san preset:' . Dumper $cert_san_parts
if ($cert_san_parts) {
$param->{$subject_prefix.'san_parts'} = $serializer->serialize( $cert_san_parts );
$source_ref->{$subject_prefix.'san_parts'} = 'Parser';
}
}
# call preset on cert_info block with userinfo from session
my $cert_info = CTX('api2')->preset_subject_parts_from_profile(
profile => $cert_profile,
style => $cert_subject_style,
section => 'info',
preset => { userinfo => $userinfo },
);
if ($cert_info) {
$param->{$subject_prefix.'info'} = $cert_info;
$source_ref->{$subject_prefix.'info'} = 'Parser';
}
}
##! 64: 'Params to set ' . Dumper $param
$context->param( $param );
$context->param('sources' => $serializer->serialize( $source_ref) );
return 1;
}
1;
__END__
=head1 Name
OpenXPKI::Server::Workflow::Activity::Tools::ParsePKCS10
=head1 Description
Take a pkcs10 container and extract information to the context. If a
profile name and style are given and the profile has a ui section, the
data extracted from the CSR is used to prefill the profile ui fields.
Otherwise the extracted subject and san information is put "as is" into
the context. Output definition is given below.
To get extra information from the CSR, add parameters key_params,
req_attributes and req_extensions to your activity configuration.
=head1 Configuration
=head2 Activity Parameters
=over
=item pkcs10
The PEM formatted pkcs10 request, has priority over context key.
=item cert_profile
Determines the used profile, has priority over context key.
=item cert_subject_style
Determines the used profile substyle, has priority over context key.
=item key_params
If set to a true value, details of the used public key are available
in the I<key_params> context entry. Requires Crypt::PK::ECC if ECC keys
need to be handled.
=item verify_signature
If set to a true value, the signature of the PKCS#10 container is checked
and the boolean result is written to csr_signature_valid. If not set, the
parameter is deleted from the context. It is recommended to check the
PCKS#10 container on upload already using the validator. Note that at least
the default backend will refuse broken signatures on the request to issue,
so you B<MUST> handle this.
=item subject_prefix
Prefix for context output keys to write the subject information into
(cert_subject_parts, cert_san_parts, cert_subject_alt_name).
Default is I<cert_>.
=back
=head2 Expected context values
=over
=item pkcs10
Read pkcs10 request from if not set using activity param.
=item cert_profile
Read cert_profile request from if not set using activity param.
=item cert_subject_style
Read cert_subject_style request from if not set using activity param.
=item req_extensions
List of OIDs (or names) of request extensions, multiple items must be
seperated by space. For each extensions that is found in the request,
a item in the req_extension context item is created. The key is the given
name, the content is the raw data as returned by Crypt::PKCS10 and depends
on the extensions.
=item req_attributes
List of OIDs (or names) of request attributes, similar to req_extension.
=back
=head2 Context value to be written
Prefix I<cert_> can be changed by setting I<subject_prefix>.
=over
=item csr_subject
The extracted subject as string (comma seperated)
=item cert_subject_parts
If a valid profile is given, contains the preset values for all fields given
in the profiles subject section. The values are determined by running the
appropriate template string for each field with the data extracted from the
csr.
In plain mode, it contains the parsed DN as key-value pairs where the key
is the shortname of the component (e.g: OU) and the value is an array of
values found. Note that any component is an array even if it has only one
item. All items found in the SAN part are also added with a prefix "SAN_"
and all uppercased names as used by openssl (SAN_OTHERNAME, SAN_EMAIL,
SAN_DNS, SAN_DIRNAME, SAN_URI, SAN_IP, SAN_RID)
=item cert_san_parts
Only in profile mode. Contains the preset values for all fields
given in the profiles san section. The values are determined by running the
appropriate template string for each field with the data extracted from the
csr.
=item cert_subject_alt_name
Only in plain mode. All SAN items as nested array list. Each item of the
list is a two item array with name and value of one SAN item. The names
are given as required to build then openssl extension file (otherName,
email, DNS, dirName, URI, IP, RID).
=item csr_key_alg
Algorithm of the public key, one of rsa, dsa, ec, unsupported
=item csr_digest_alg
The digest algorithm used to create the signature request (e.g. md5, sha1).
=item csr_key_params
Hash holding additional information on the used public key, only present
if key_params is set. Keys depend on the type of the key.
=over
=item key_length
Size of the used public key (RSA/DSA)/curve (ECC) in bits
=item curve
ec keys only, name of the curve - can be empty if curve is not known to
the current openssl version or if custom parameters have been used.
=back
=item csr_subject_key_identifier
The key identifier of the used public key, Hex with uppercased letters.
The format is identical to the return value of the API method
get_key_identifier_from_data and the format used in the certificates table.
=item csr_signature_valid
Boolean, set only if I<validate_signature> is set and recevies a literal
0/1 weather the PKCS#10 containers signature can be validated.
=back
| openxpki/openxpki | core/server/OpenXPKI/Server/Workflow/Activity/Tools/ParsePKCS10.pm | Perl | apache-2.0 | 15,659 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Enums::SimulationModificationMethodEnum;
use strict;
use warnings;
use Const::Exporter enums => [
UNSPECIFIED => "UNSPECIFIED",
UNKNOWN => "UNKNOWN",
UNIFORM => "UNIFORM",
DEFAULT => "DEFAULT",
SCALING => "SCALING"
];
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V8/Enums/SimulationModificationMethodEnum.pm | Perl | apache-2.0 | 862 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::fortinet::fortimanager::snmp::mode::memory;
use base qw(centreon::plugins::templates::counter);
use strict;
use warnings;
sub custom_usage_perfdata {
my ($self, %options) = @_;
$self->{output}->perfdata_add(label => 'used', unit => 'B',
value => $self->{result_values}->{used},
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{label}, total => $self->{result_values}->{total}, cast_int => 1),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{label}, total => $self->{result_values}->{total}, cast_int => 1),
min => 0, max => $self->{result_values}->{total});
}
sub custom_usage_threshold {
my ($self, %options) = @_;
my $exit = $self->{perfdata}->threshold_check(value => $self->{result_values}->{prct_used}, threshold => [ { label => 'critical-' . $self->{label}, exit_litteral => 'critical' }, { label => 'warning-' . $self->{label}, exit_litteral => 'warning' } ]);
return $exit;
}
sub custom_usage_output {
my ($self, %options) = @_;
my ($total_size_value, $total_size_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{total});
my ($total_used_value, $total_used_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{used});
my ($total_free_value, $total_free_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{free});
my $msg = sprintf("Memory Usage Total: %s Used: %s (%.2f%%) Free: %s (%.2f%%)",
$total_size_value . " " . $total_size_unit,
$total_used_value . " " . $total_used_unit, $self->{result_values}->{prct_used},
$total_free_value . " " . $total_free_unit, $self->{result_values}->{prct_free});
return $msg;
}
sub custom_usage_calc {
my ($self, %options) = @_;
$self->{result_values}->{total} = $options{new_datas}->{$self->{instance} . '_total'};
$self->{result_values}->{used} = $options{new_datas}->{$self->{instance} . '_used'};
$self->{result_values}->{free} = $options{new_datas}->{$self->{instance} . '_total'} - $options{new_datas}->{$self->{instance} . '_used'};
$self->{result_values}->{prct_free} = $self->{result_values}->{free} * 100 / $self->{result_values}->{total};
$self->{result_values}->{prct_used} = $self->{result_values}->{used} * 100 / $self->{result_values}->{total};
return 0;
}
sub set_counters {
my ($self, %options) = @_;
$self->{maps_counters_type} = [
{ name => 'memory', type => 0 }
];
$self->{maps_counters}->{memory} = [
{ label => 'usage', set => {
key_values => [ { name => 'used' }, { name => 'total' } ],
closure_custom_calc => $self->can('custom_usage_calc'),
closure_custom_output => $self->can('custom_usage_output'),
closure_custom_perfdata => $self->can('custom_usage_perfdata'),
closure_custom_threshold_check => $self->can('custom_usage_threshold'),
}
},
];
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
});
return $self;
}
sub manage_selection {
my ($self, %options) = @_;
my $oid_fmSysMemCapacity = '.1.3.6.1.4.1.12356.103.2.1.3.0'; # KB
my $oid_fmSysMemUsed = '.1.3.6.1.4.1.12356.103.2.1.2.0'; # KB
my $result = $options{snmp}->get_leef(oids => [$oid_fmSysMemCapacity, $oid_fmSysMemUsed],
nothing_quit => 1);
$self->{memory} = { used => $result->{$oid_fmSysMemUsed} * 1024, total => $result->{$oid_fmSysMemCapacity} * 1024 };
}
1;
__END__
=head1 MODE
Check memory usage.
=over 8
=item B<--warning-usage>
Threshold warning (in percent).
=item B<--critical-usage>
Threshold critical (in percent).
=back
=cut | Sims24/centreon-plugins | network/fortinet/fortimanager/snmp/mode/memory.pm | Perl | apache-2.0 | 4,933 |
package Paws::STS;
use Moose;
sub service { 'sts' }
sub version { '2011-06-15' }
sub flattened_arrays { 0 }
has max_attempts => (is => 'ro', isa => 'Int', default => 5);
has retry => (is => 'ro', isa => 'HashRef', default => sub {
{ base => 'rand', type => 'exponential', growth_factor => 2 }
});
has retriables => (is => 'ro', isa => 'ArrayRef', default => sub { [
] });
with 'Paws::API::Caller', 'Paws::API::EndpointResolver', 'Paws::Net::V4Signature', 'Paws::Net::QueryCaller', 'Paws::Net::XMLResponse';
has '+region_rules' => (default => sub {
my $regioninfo;
$regioninfo = [
{
constraints => [
[
'region',
'startsWith',
'cn-'
]
],
uri => '{scheme}://{service}.{region}.amazonaws.com.cn'
},
{
constraints => [
[
'region',
'startsWith',
'us-gov'
]
],
uri => 'https://{service}.{region}.amazonaws.com'
},
{
constraints => [
[
'region',
'equals',
undef
]
],
properties => {
credentialScope => {
region => 'us-east-1'
}
},
uri => 'https://sts.amazonaws.com'
},
{
uri => 'https://{service}.{region}.amazonaws.com'
}
];
return $regioninfo;
});
sub AssumeRole {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::STS::AssumeRole', @_);
return $self->caller->do_call($self, $call_object);
}
sub AssumeRoleWithSAML {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::STS::AssumeRoleWithSAML', @_);
return $self->caller->do_call($self, $call_object);
}
sub AssumeRoleWithWebIdentity {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::STS::AssumeRoleWithWebIdentity', @_);
return $self->caller->do_call($self, $call_object);
}
sub DecodeAuthorizationMessage {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::STS::DecodeAuthorizationMessage', @_);
return $self->caller->do_call($self, $call_object);
}
sub GetCallerIdentity {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::STS::GetCallerIdentity', @_);
return $self->caller->do_call($self, $call_object);
}
sub GetFederationToken {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::STS::GetFederationToken', @_);
return $self->caller->do_call($self, $call_object);
}
sub GetSessionToken {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::STS::GetSessionToken', @_);
return $self->caller->do_call($self, $call_object);
}
sub operations { qw/AssumeRole AssumeRoleWithSAML AssumeRoleWithWebIdentity DecodeAuthorizationMessage GetCallerIdentity GetFederationToken GetSessionToken / }
1;
### main pod documentation begin ###
=head1 NAME
Paws::STS - Perl Interface to AWS AWS Security Token Service
=head1 SYNOPSIS
use Paws;
my $obj = Paws->service('STS');
my $res = $obj->Method(
Arg1 => $val1,
Arg2 => [ 'V1', 'V2' ],
# if Arg3 is an object, the HashRef will be used as arguments to the constructor
# of the arguments type
Arg3 => { Att1 => 'Val1' },
# if Arg4 is an array of objects, the HashRefs will be passed as arguments to
# the constructor of the arguments type
Arg4 => [ { Att1 => 'Val1' }, { Att1 => 'Val2' } ],
);
=head1 DESCRIPTION
AWS Security Token Service
The AWS Security Token Service (STS) is a web service that enables you
to request temporary, limited-privilege credentials for AWS Identity
and Access Management (IAM) users or for users that you authenticate
(federated users). This guide provides descriptions of the STS API. For
more detailed information about using this service, go to Temporary
Security Credentials.
As an alternative to using the API, you can use one of the AWS SDKs,
which consist of libraries and sample code for various programming
languages and platforms (Java, Ruby, .NET, iOS, Android, etc.). The
SDKs provide a convenient way to create programmatic access to STS. For
example, the SDKs take care of cryptographically signing requests,
managing errors, and retrying requests automatically. For information
about the AWS SDKs, including how to download and install them, see the
Tools for Amazon Web Services page.
For information about setting up signatures and authorization through
the API, go to Signing AWS API Requests in the I<AWS General
Reference>. For general information about the Query API, go to Making
Query Requests in I<Using IAM>. For information about using security
tokens with other AWS products, go to AWS Services That Work with IAM
in the I<IAM User Guide>.
If you're new to AWS and need additional technical information about a
specific AWS product, you can find the product's technical
documentation at http://aws.amazon.com/documentation/.
B<Endpoints>
The AWS Security Token Service (STS) has a default endpoint of
https://sts.amazonaws.com that maps to the US East (N. Virginia)
region. Additional regions are available and are activated by default.
For more information, see Activating and Deactivating AWS STS in an AWS
Region in the I<IAM User Guide>.
For information about STS endpoints, see Regions and Endpoints in the
I<AWS General Reference>.
B<Recording API requests>
STS supports AWS CloudTrail, which is a service that records AWS calls
for your AWS account and delivers log files to an Amazon S3 bucket. By
using information collected by CloudTrail, you can determine what
requests were successfully made to STS, who made the request, when it
was made, and so on. To learn more about CloudTrail, including how to
turn it on and find your log files, see the AWS CloudTrail User Guide.
=head1 METHODS
=head2 AssumeRole(RoleArn => Str, RoleSessionName => Str, [DurationSeconds => Int, ExternalId => Str, Policy => Str, SerialNumber => Str, TokenCode => Str])
Each argument is described in detail in: L<Paws::STS::AssumeRole>
Returns: a L<Paws::STS::AssumeRoleResponse> instance
Returns a set of temporary security credentials (consisting of an
access key ID, a secret access key, and a security token) that you can
use to access AWS resources that you might not normally have access to.
Typically, you use C<AssumeRole> for cross-account access or
federation. For a comparison of C<AssumeRole> with the other APIs that
produce temporary credentials, see Requesting Temporary Security
Credentials and Comparing the AWS STS APIs in the I<IAM User Guide>.
B<Important:> You cannot call C<AssumeRole> by using AWS root account
credentials; access is denied. You must use credentials for an IAM user
or an IAM role to call C<AssumeRole>.
For cross-account access, imagine that you own multiple accounts and
need to access resources in each account. You could create long-term
credentials in each account to access those resources. However,
managing all those credentials and remembering which one can access
which account can be time consuming. Instead, you can create one set of
long-term credentials in one account and then use temporary security
credentials to access all the other accounts by assuming roles in those
accounts. For more information about roles, see IAM Roles (Delegation
and Federation) in the I<IAM User Guide>.
For federation, you can, for example, grant single sign-on access to
the AWS Management Console. If you already have an identity and
authentication system in your corporate network, you don't have to
recreate user identities in AWS in order to grant those user identities
access to AWS. Instead, after a user has been authenticated, you call
C<AssumeRole> (and specify the role with the appropriate permissions)
to get temporary security credentials for that user. With those
temporary security credentials, you construct a sign-in URL that users
can use to access the console. For more information, see Common
Scenarios for Temporary Credentials in the I<IAM User Guide>.
The temporary security credentials are valid for the duration that you
specified when calling C<AssumeRole>, which can be from 900 seconds (15
minutes) to a maximum of 3600 seconds (1 hour). The default is 1 hour.
The temporary security credentials created by C<AssumeRole> can be used
to make API calls to any AWS service with the following exception: you
cannot call the STS service's C<GetFederationToken> or
C<GetSessionToken> APIs.
Optionally, you can pass an IAM access policy to this operation. If you
choose not to pass a policy, the temporary security credentials that
are returned by the operation have the permissions that are defined in
the access policy of the role that is being assumed. If you pass a
policy to this operation, the temporary security credentials that are
returned by the operation have the permissions that are allowed by both
the access policy of the role that is being assumed, I< B<and> > the
policy that you pass. This gives you a way to further restrict the
permissions for the resulting temporary security credentials. You
cannot use the passed policy to grant permissions that are in excess of
those allowed by the access policy of the role that is being assumed.
For more information, see Permissions for AssumeRole,
AssumeRoleWithSAML, and AssumeRoleWithWebIdentity in the I<IAM User
Guide>.
To assume a role, your AWS account must be trusted by the role. The
trust relationship is defined in the role's trust policy when the role
is created. That trust policy states which accounts are allowed to
delegate access to this account's role.
The user who wants to access the role must also have permissions
delegated from the role's administrator. If the user is in a different
account than the role, then the user's administrator must attach a
policy that allows the user to call AssumeRole on the ARN of the role
in the other account. If the user is in the same account as the role,
then you can either attach a policy to the user (identical to the
previous different account user), or you can add the user as a
principal directly in the role's trust policy
B<Using MFA with AssumeRole>
You can optionally include multi-factor authentication (MFA)
information when you call C<AssumeRole>. This is useful for
cross-account scenarios in which you want to make sure that the user
who is assuming the role has been authenticated using an AWS MFA
device. In that scenario, the trust policy of the role being assumed
includes a condition that tests for MFA authentication; if the caller
does not include valid MFA information, the request to assume the role
is denied. The condition in a trust policy that tests for MFA
authentication might look like the following example.
C<"Condition": {"Bool": {"aws:MultiFactorAuthPresent": true}}>
For more information, see Configuring MFA-Protected API Access in the
I<IAM User Guide> guide.
To use MFA with C<AssumeRole>, you pass values for the C<SerialNumber>
and C<TokenCode> parameters. The C<SerialNumber> value identifies the
user's hardware or virtual MFA device. The C<TokenCode> is the
time-based one-time password (TOTP) that the MFA devices produces.
=head2 AssumeRoleWithSAML(PrincipalArn => Str, RoleArn => Str, SAMLAssertion => Str, [DurationSeconds => Int, Policy => Str])
Each argument is described in detail in: L<Paws::STS::AssumeRoleWithSAML>
Returns: a L<Paws::STS::AssumeRoleWithSAMLResponse> instance
Returns a set of temporary security credentials for users who have been
authenticated via a SAML authentication response. This operation
provides a mechanism for tying an enterprise identity store or
directory to role-based AWS access without user-specific credentials or
configuration. For a comparison of C<AssumeRoleWithSAML> with the other
APIs that produce temporary credentials, see Requesting Temporary
Security Credentials and Comparing the AWS STS APIs in the I<IAM User
Guide>.
The temporary security credentials returned by this operation consist
of an access key ID, a secret access key, and a security token.
Applications can use these temporary security credentials to sign calls
to AWS services.
The temporary security credentials are valid for the duration that you
specified when calling C<AssumeRole>, or until the time specified in
the SAML authentication response's C<SessionNotOnOrAfter> value,
whichever is shorter. The duration can be from 900 seconds (15 minutes)
to a maximum of 3600 seconds (1 hour). The default is 1 hour.
The temporary security credentials created by C<AssumeRoleWithSAML> can
be used to make API calls to any AWS service with the following
exception: you cannot call the STS service's C<GetFederationToken> or
C<GetSessionToken> APIs.
Optionally, you can pass an IAM access policy to this operation. If you
choose not to pass a policy, the temporary security credentials that
are returned by the operation have the permissions that are defined in
the access policy of the role that is being assumed. If you pass a
policy to this operation, the temporary security credentials that are
returned by the operation have the permissions that are allowed by the
intersection of both the access policy of the role that is being
assumed, I< B<and> > the policy that you pass. This means that both
policies must grant the permission for the action to be allowed. This
gives you a way to further restrict the permissions for the resulting
temporary security credentials. You cannot use the passed policy to
grant permissions that are in excess of those allowed by the access
policy of the role that is being assumed. For more information, see
Permissions for AssumeRole, AssumeRoleWithSAML, and
AssumeRoleWithWebIdentity in the I<IAM User Guide>.
Before your application can call C<AssumeRoleWithSAML>, you must
configure your SAML identity provider (IdP) to issue the claims
required by AWS. Additionally, you must use AWS Identity and Access
Management (IAM) to create a SAML provider entity in your AWS account
that represents your identity provider, and create an IAM role that
specifies this SAML provider in its trust policy.
Calling C<AssumeRoleWithSAML> does not require the use of AWS security
credentials. The identity of the caller is validated by using keys in
the metadata document that is uploaded for the SAML provider entity for
your identity provider.
Calling C<AssumeRoleWithSAML> can result in an entry in your AWS
CloudTrail logs. The entry includes the value in the C<NameID> element
of the SAML assertion. We recommend that you use a NameIDType that is
not associated with any personally identifiable information (PII). For
example, you could instead use the Persistent Identifier
(C<urn:oasis:names:tc:SAML:2.0:nameid-format:persistent>).
For more information, see the following resources:
=over
=item *
About SAML 2.0-based Federation in the I<IAM User Guide>.
=item *
Creating SAML Identity Providers in the I<IAM User Guide>.
=item *
Configuring a Relying Party and Claims in the I<IAM User Guide>.
=item *
Creating a Role for SAML 2.0 Federation in the I<IAM User Guide>.
=back
=head2 AssumeRoleWithWebIdentity(RoleArn => Str, RoleSessionName => Str, WebIdentityToken => Str, [DurationSeconds => Int, Policy => Str, ProviderId => Str])
Each argument is described in detail in: L<Paws::STS::AssumeRoleWithWebIdentity>
Returns: a L<Paws::STS::AssumeRoleWithWebIdentityResponse> instance
Returns a set of temporary security credentials for users who have been
authenticated in a mobile or web application with a web identity
provider, such as Amazon Cognito, Login with Amazon, Facebook, Google,
or any OpenID Connect-compatible identity provider.
For mobile applications, we recommend that you use Amazon Cognito. You
can use Amazon Cognito with the AWS SDK for iOS and the AWS SDK for
Android to uniquely identify a user and supply the user with a
consistent identity throughout the lifetime of an application.
To learn more about Amazon Cognito, see Amazon Cognito Overview in the
I<AWS SDK for Android Developer Guide> guide and Amazon Cognito
Overview in the I<AWS SDK for iOS Developer Guide>.
Calling C<AssumeRoleWithWebIdentity> does not require the use of AWS
security credentials. Therefore, you can distribute an application (for
example, on mobile devices) that requests temporary security
credentials without including long-term AWS credentials in the
application, and without deploying server-based proxy services that use
long-term AWS credentials. Instead, the identity of the caller is
validated by using a token from the web identity provider. For a
comparison of C<AssumeRoleWithWebIdentity> with the other APIs that
produce temporary credentials, see Requesting Temporary Security
Credentials and Comparing the AWS STS APIs in the I<IAM User Guide>.
The temporary security credentials returned by this API consist of an
access key ID, a secret access key, and a security token. Applications
can use these temporary security credentials to sign calls to AWS
service APIs.
The credentials are valid for the duration that you specified when
calling C<AssumeRoleWithWebIdentity>, which can be from 900 seconds (15
minutes) to a maximum of 3600 seconds (1 hour). The default is 1 hour.
The temporary security credentials created by
C<AssumeRoleWithWebIdentity> can be used to make API calls to any AWS
service with the following exception: you cannot call the STS service's
C<GetFederationToken> or C<GetSessionToken> APIs.
Optionally, you can pass an IAM access policy to this operation. If you
choose not to pass a policy, the temporary security credentials that
are returned by the operation have the permissions that are defined in
the access policy of the role that is being assumed. If you pass a
policy to this operation, the temporary security credentials that are
returned by the operation have the permissions that are allowed by both
the access policy of the role that is being assumed, I< B<and> > the
policy that you pass. This gives you a way to further restrict the
permissions for the resulting temporary security credentials. You
cannot use the passed policy to grant permissions that are in excess of
those allowed by the access policy of the role that is being assumed.
For more information, see Permissions for AssumeRole,
AssumeRoleWithSAML, and AssumeRoleWithWebIdentity in the I<IAM User
Guide>.
Before your application can call C<AssumeRoleWithWebIdentity>, you must
have an identity token from a supported identity provider and create a
role that the application can assume. The role that your application
assumes must trust the identity provider that is associated with the
identity token. In other words, the identity provider must be specified
in the role's trust policy.
Calling C<AssumeRoleWithWebIdentity> can result in an entry in your AWS
CloudTrail logs. The entry includes the Subject of the provided Web
Identity Token. We recommend that you avoid using any personally
identifiable information (PII) in this field. For example, you could
instead use a GUID or a pairwise identifier, as suggested in the OIDC
specification.
For more information about how to use web identity federation and the
C<AssumeRoleWithWebIdentity> API, see the following resources:
=over
=item *
Using Web Identity Federation APIs for Mobile Apps and Federation
Through a Web-based Identity Provider.
=item *
Web Identity Federation Playground. This interactive website lets you
walk through the process of authenticating via Login with Amazon,
Facebook, or Google, getting temporary security credentials, and then
using those credentials to make a request to AWS.
=item *
AWS SDK for iOS and AWS SDK for Android. These toolkits contain sample
apps that show how to invoke the identity providers, and then how to
use the information from these providers to get and use temporary
security credentials.
=item *
Web Identity Federation with Mobile Applications. This article
discusses web identity federation and shows an example of how to use
web identity federation to get access to content in Amazon S3.
=back
=head2 DecodeAuthorizationMessage(EncodedMessage => Str)
Each argument is described in detail in: L<Paws::STS::DecodeAuthorizationMessage>
Returns: a L<Paws::STS::DecodeAuthorizationMessageResponse> instance
Decodes additional information about the authorization status of a
request from an encoded message returned in response to an AWS request.
For example, if a user is not authorized to perform an action that he
or she has requested, the request returns a
C<Client.UnauthorizedOperation> response (an HTTP 403 response). Some
AWS actions additionally return an encoded message that can provide
details about this authorization failure.
Only certain AWS actions return an encoded authorization message. The
documentation for an individual action indicates whether that action
returns an encoded message in addition to returning an HTTP code.
The message is encoded because the details of the authorization status
can constitute privileged information that the user who requested the
action should not see. To decode an authorization status message, a
user must be granted permissions via an IAM policy to request the
C<DecodeAuthorizationMessage> (C<sts:DecodeAuthorizationMessage>)
action.
The decoded message includes the following type of information:
=over
=item *
Whether the request was denied due to an explicit deny or due to the
absence of an explicit allow. For more information, see Determining
Whether a Request is Allowed or Denied in the I<IAM User Guide>.
=item *
The principal who made the request.
=item *
The requested action.
=item *
The requested resource.
=item *
The values of condition keys in the context of the user's request.
=back
=head2 GetCallerIdentity()
Each argument is described in detail in: L<Paws::STS::GetCallerIdentity>
Returns: a L<Paws::STS::GetCallerIdentityResponse> instance
Returns details about the IAM identity whose credentials are used to
call the API.
=head2 GetFederationToken(Name => Str, [DurationSeconds => Int, Policy => Str])
Each argument is described in detail in: L<Paws::STS::GetFederationToken>
Returns: a L<Paws::STS::GetFederationTokenResponse> instance
Returns a set of temporary security credentials (consisting of an
access key ID, a secret access key, and a security token) for a
federated user. A typical use is in a proxy application that gets
temporary security credentials on behalf of distributed applications
inside a corporate network. Because you must call the
C<GetFederationToken> action using the long-term security credentials
of an IAM user, this call is appropriate in contexts where those
credentials can be safely stored, usually in a server-based
application. For a comparison of C<GetFederationToken> with the other
APIs that produce temporary credentials, see Requesting Temporary
Security Credentials and Comparing the AWS STS APIs in the I<IAM User
Guide>.
If you are creating a mobile-based or browser-based app that can
authenticate users using a web identity provider like Login with
Amazon, Facebook, Google, or an OpenID Connect-compatible identity
provider, we recommend that you use Amazon Cognito or
C<AssumeRoleWithWebIdentity>. For more information, see Federation
Through a Web-based Identity Provider.
The C<GetFederationToken> action must be called by using the long-term
AWS security credentials of an IAM user. You can also call
C<GetFederationToken> using the security credentials of an AWS root
account, but we do not recommended it. Instead, we recommend that you
create an IAM user for the purpose of the proxy application and then
attach a policy to the IAM user that limits federated users to only the
actions and resources that they need access to. For more information,
see IAM Best Practices in the I<IAM User Guide>.
The temporary security credentials that are obtained by using the
long-term credentials of an IAM user are valid for the specified
duration, from 900 seconds (15 minutes) up to a maximium of 129600
seconds (36 hours). The default is 43200 seconds (12 hours). Temporary
credentials that are obtained by using AWS root account credentials
have a maximum duration of 3600 seconds (1 hour).
The temporary security credentials created by C<GetFederationToken> can
be used to make API calls to any AWS service with the following
exceptions:
=over
=item *
You cannot use these credentials to call any IAM APIs.
=item *
You cannot call any STS APIs except C<GetCallerIdentity>.
=back
B<Permissions>
The permissions for the temporary security credentials returned by
C<GetFederationToken> are determined by a combination of the following:
=over
=item *
The policy or policies that are attached to the IAM user whose
credentials are used to call C<GetFederationToken>.
=item *
The policy that is passed as a parameter in the call.
=back
The passed policy is attached to the temporary security credentials
that result from the C<GetFederationToken> API call--that is, to the
I<federated user>. When the federated user makes an AWS request, AWS
evaluates the policy attached to the federated user in combination with
the policy or policies attached to the IAM user whose credentials were
used to call C<GetFederationToken>. AWS allows the federated user's
request only when both the federated user I< B<and> > the IAM user are
explicitly allowed to perform the requested action. The passed policy
cannot grant more permissions than those that are defined in the IAM
user policy.
A typical use case is that the permissions of the IAM user whose
credentials are used to call C<GetFederationToken> are designed to
allow access to all the actions and resources that any federated user
will need. Then, for individual users, you pass a policy to the
operation that scopes down the permissions to a level that's
appropriate to that individual user, using a policy that allows only a
subset of permissions that are granted to the IAM user.
If you do not pass a policy, the resulting temporary security
credentials have no effective permissions. The only exception is when
the temporary security credentials are used to access a resource that
has a resource-based policy that specifically allows the federated user
to access the resource.
For more information about how permissions work, see Permissions for
GetFederationToken. For information about using C<GetFederationToken>
to create temporary security credentials, see
GetFederationTokenE<mdash>Federation Through a Custom Identity Broker.
=head2 GetSessionToken([DurationSeconds => Int, SerialNumber => Str, TokenCode => Str])
Each argument is described in detail in: L<Paws::STS::GetSessionToken>
Returns: a L<Paws::STS::GetSessionTokenResponse> instance
Returns a set of temporary credentials for an AWS account or IAM user.
The credentials consist of an access key ID, a secret access key, and a
security token. Typically, you use C<GetSessionToken> if you want to
use MFA to protect programmatic calls to specific AWS APIs like Amazon
EC2 C<StopInstances>. MFA-enabled IAM users would need to call
C<GetSessionToken> and submit an MFA code that is associated with their
MFA device. Using the temporary security credentials that are returned
from the call, IAM users can then make programmatic calls to APIs that
require MFA authentication. If you do not supply a correct MFA code,
then the API returns an access denied error. For a comparison of
C<GetSessionToken> with the other APIs that produce temporary
credentials, see Requesting Temporary Security Credentials and
Comparing the AWS STS APIs in the I<IAM User Guide>.
The C<GetSessionToken> action must be called by using the long-term AWS
security credentials of the AWS account or an IAM user. Credentials
that are created by IAM users are valid for the duration that you
specify, from 900 seconds (15 minutes) up to a maximum of 129600
seconds (36 hours), with a default of 43200 seconds (12 hours);
credentials that are created by using account credentials can range
from 900 seconds (15 minutes) up to a maximum of 3600 seconds (1 hour),
with a default of 1 hour.
The temporary security credentials created by C<GetSessionToken> can be
used to make API calls to any AWS service with the following
exceptions:
=over
=item *
You cannot call any IAM APIs unless MFA authentication information is
included in the request.
=item *
You cannot call any STS API I<except> C<AssumeRole> or
C<GetCallerIdentity>.
=back
We recommend that you do not call C<GetSessionToken> with root account
credentials. Instead, follow our best practices by creating one or more
IAM users, giving them the necessary permissions, and using IAM users
for everyday interaction with AWS.
The permissions associated with the temporary security credentials
returned by C<GetSessionToken> are based on the permissions associated
with account or IAM user whose credentials are used to call the action.
If C<GetSessionToken> is called using root account credentials, the
temporary credentials have root account permissions. Similarly, if
C<GetSessionToken> is called using the credentials of an IAM user, the
temporary credentials have the same permissions as the IAM user.
For more information about using C<GetSessionToken> to create temporary
credentials, go to Temporary Credentials for Users in Untrusted
Environments in the I<IAM User Guide>.
=head1 PAGINATORS
Paginator methods are helpers that repetively call methods that return partial results
=head1 SEE ALSO
This service class forms part of L<Paws>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/STS.pm | Perl | apache-2.0 | 29,797 |
package Paws::IAM::ListAttachedUserPoliciesResponse;
use Moose;
has AttachedPolicies => (is => 'ro', isa => 'ArrayRef[Paws::IAM::AttachedPolicy]');
has IsTruncated => (is => 'ro', isa => 'Bool');
has Marker => (is => 'ro', isa => 'Str');
has _request_id => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::IAM::ListAttachedUserPoliciesResponse
=head1 ATTRIBUTES
=head2 AttachedPolicies => ArrayRef[L<Paws::IAM::AttachedPolicy>]
A list of the attached policies.
=head2 IsTruncated => Bool
A flag that indicates whether there are more items to return. If your
results were truncated, you can make a subsequent pagination request
using the C<Marker> request parameter to retrieve more items. Note that
IAM might return fewer than the C<MaxItems> number of results even when
there are more results available. We recommend that you check
C<IsTruncated> after every call to ensure that you receive all of your
results.
=head2 Marker => Str
When C<IsTruncated> is C<true>, this element is present and contains
the value to use for the C<Marker> parameter in a subsequent pagination
request.
=head2 _request_id => Str
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/IAM/ListAttachedUserPoliciesResponse.pm | Perl | apache-2.0 | 1,182 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::Component::Location::ViewTop;
use strict;
use warnings;
use previous qw(content);
use EnsEMBL::Web::Component::Location::Genoverse;
sub new {
## @override
## Returns the self object blessed with Genoverse class if its confirmed that we need a Genoverse image
my $self = shift->SUPER::new(@_);
my $hub = $self->hub;
# if genoverse request is confirmed, re-bless the object
if (!$self->force_static && $hub->param('genoverse')) {
$self = bless $self, 'EnsEMBL::Web::Component::Location::Genoverse';
$self->_init;
}
return $self;
}
sub force_static {
## Confirms if the image needed is static ViewTop
## @return Boolean
my $self = shift;
my $hub = $self->hub;
return $self->{'_force_static'} //= ($hub->session->get_record_data({type => 'image_type', code => $self->id}))->{'static'} || $hub->param('export') || 0;
}
sub content {
## @override
## Returns the default ViewTop panel if it's confirmed that we need a static image, otherwise return JS panel to check if the browser supports Genoverse
## This method does NOT get called if it's decided that we need a genoverse image
my $self = shift;
return $self->force_static ? $self->PREV::content(@_) : q(<div class="js_panel"><input type="hidden" class="panel_type" value="GenoverseTest" /></div>);
}
1;
| Ensembl/public-plugins | genoverse/modules/EnsEMBL/Web/Component/Location/ViewTop.pm | Perl | apache-2.0 | 2,046 |
package Paws::CloudWatchEvents::ListRuleNamesByTarget;
use Moose;
has Limit => (is => 'ro', isa => 'Int');
has NextToken => (is => 'ro', isa => 'Str');
has TargetArn => (is => 'ro', isa => 'Str', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'ListRuleNamesByTarget');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::CloudWatchEvents::ListRuleNamesByTargetResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::CloudWatchEvents::ListRuleNamesByTarget - Arguments for method ListRuleNamesByTarget on Paws::CloudWatchEvents
=head1 DESCRIPTION
This class represents the parameters used for calling the method ListRuleNamesByTarget on the
Amazon CloudWatch Events service. Use the attributes of this class
as arguments to method ListRuleNamesByTarget.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to ListRuleNamesByTarget.
As an example:
$service_obj->ListRuleNamesByTarget(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 Limit => Int
The maximum number of results to return.
=head2 NextToken => Str
The token returned by a previous call to retrieve the next set of
results.
=head2 B<REQUIRED> TargetArn => Str
The Amazon Resource Name (ARN) of the target resource.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method ListRuleNamesByTarget in L<Paws::CloudWatchEvents>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/CloudWatchEvents/ListRuleNamesByTarget.pm | Perl | apache-2.0 | 1,994 |
package Tapper::Cmd::DbDeploy;
BEGIN {
$Tapper::Cmd::DbDeploy::AUTHORITY = 'cpan:TAPPER';
}
{
$Tapper::Cmd::DbDeploy::VERSION = '4.1.8';
}
# ABSTRACT: Tapper - Backend functions for DB deployment
use 5.010;
use strict;
use warnings;
use Moose;
use Tapper::Config;
use Tapper::Schema::TestrunDB;
use Tapper::Schema::ReportsDB;
extends 'Tapper::Cmd';
sub insert_initial_values
{
my ($schema, $db) = @_;
if ($db eq 'TestrunDB')
{
# ---------- Topic ----------
# official topics
my %topic_description = %Tapper::Schema::TestrunDB::Result::Topic::topic_description;
foreach my $topic_name(keys %topic_description) {
my $topic = $schema->resultset('Topic')->new
({ name => $topic_name,
description => $topic_description{$topic_name},
});
$topic->insert;
}
my $queue = $schema->resultset('Queue')->new
({ name => 'AdHoc',
priority => 1000,
active => 1,
});
$queue->insert;
}
}
sub dbdeploy
{
my ($self, $db) = @_;
local $| =1;
my $dsn = Tapper::Config->subconfig->{database}{$db}{dsn};
my $user = Tapper::Config->subconfig->{database}{$db}{username};
my $pw = Tapper::Config->subconfig->{database}{$db}{password};
my $answer;
# ----- really? -----
print "dsn: $dsn\n";
print " Really delete all existing content and initialize from scratch (y/N)? ";
if ( lc substr(<STDIN>, 0, 1) ne 'y') {
say "Skip.";
return;
}
# ----- delete sqlite file -----
if ($dsn =~ /dbi:SQLite:dbname/) {
my ($tmpfname) = $dsn =~ m,dbi:SQLite:dbname=([\w./]+),i;
unlink $tmpfname;
}
my $stderr = '';
{
# capture known errors to hide them from printing
local *STDERR;
open STDERR, '>', \$stderr;
my $schema;
$schema = Tapper::Schema::TestrunDB->connect ($dsn, $user, $pw) if $db eq 'TestrunDB';
$schema = Tapper::Schema::ReportsDB->connect ($dsn, $user, $pw) if $db eq 'ReportsDB';
$schema->deploy({ add_drop_table => 1 }); # may fail, does not provide correct order to drop tables
insert_initial_values($schema, $db);
}
say STDERR $stderr if $stderr && $stderr !~ /Please call upgrade on your schema/;
}
1;
__END__
=pod
=encoding utf-8
=head1 NAME
Tapper::Cmd::DbDeploy - Tapper - Backend functions for DB deployment
=head1 SYNOPSIS
This module provides functions to initially set up Tapper in C<$HOME/.tapper/>.
use Tapper::Cmd::DbDeploy;
my $cmd = Tapper::Cmd::DbDeploy->new;
$cmd->dbdeploy("ReportsDB");
$cmd->dbdeploy("TestrunDB");
=head1 NAME
Tapper::Cmd::DbDeploy - Tapper - Backend functions for deploying databases
=head1 METHODS
=head2 insert_initial_values($schema, $db)
Insert required minimal set of values.
=head2 $self->dbdeploy($db)
Deploy a schema into DB.
$db can be "TestrunDB" or "ReportsDB";
Connection info is determined via Tapper::Config.
TODO: still an interactive tool but interactivity should be migrated back into Tapper::CLI::*.
=head1 AUTHOR
AMD OSRC Tapper Team <tapper@amd64.org>
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2013 by Advanced Micro Devices, Inc..
This is free software, licensed under:
The (two-clause) FreeBSD License
=cut
| gitpan/Tapper-Cmd | lib/Tapper/Cmd/DbDeploy.pm | Perl | bsd-2-clause | 3,736 |
#!/usr/bin/perl
use strict;
use warnings;
use Switch 'Perl6';
use LWP::UserAgent;
package xkcdbot;
use base qw ( Bot::BasicBot );
sub said {
my ($self, $message) = @_;
parse($self, $message);
}
sub parse {
my ($self, $message) = @_;
given ($message->{body}) {
# Comic Search
when (/\.xkcd (.+?)$/) {
my $result = search($self, $message);
if($message->{body} =~ /\| (.+?)$/) {
# Name Piping
talk($self, $message->{channel}, "$1, $result", "");
} else {
# Normal Send
talk($self, $message->{channel}, "$message->{who}, $result", "$message->{who}");
}
}
}
}
sub talk {
my ($self,$channel,$message,$who) = @_;
if($who eq "") { return; }
$self->say(
who => "$who",
channel => "$channel",
body => "$message",
);
}
sub search {
my ($self, $message) = @_;
my $ua = LWP::UserAgent->new();
$ua->agent('Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5 (.NET CLR 3.5.30729)');
my $response = $ua->get("http://www.google.com/search?hl=en&sitesearch=xkcd.com&safe=off&num=1&q=%22link to this comic%22 -forum $message->{body}");
my $content = $response->content;
if($content =~ /http:\/\/xkcd\.com\/(\d+?)\//g) {
return "http://xkcd.com/$1/";
}
return "Sorry but I can't seem to find a comic related to what you searched for.";
}
my $bot = xkcdbot->new(
nick => "",
username => "",
name => "",
server => "",
port => "6667",
channels => [ '' ],
)->run();
| directionalpad/xkcdbot | xkcd_bot.pl | Perl | bsd-3-clause | 1,510 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package XrefParser::Mim2GeneParser;
use strict;
use warnings;
use Carp;
use POSIX qw(strftime);
use File::Basename;
use base qw( XrefParser::BaseParser );
sub run {
my ($self, $ref_arg) = @_;
my $general_source_id = $ref_arg->{source_id};
my $species_id = $ref_arg->{species_id};
my $files = $ref_arg->{files};
my $verbose = $ref_arg->{verbose};
if((!defined $general_source_id) or (!defined $species_id) or (!defined $files) ){
croak "Need to pass source_id, species_id and files as pairs";
}
$verbose |=0;
my $file = @{$files}[0];
my $eg_io = $self->get_filehandle($file);
if ( !defined $eg_io ) {
print STDERR "ERROR: Could not open $file\n";
return 1; # 1 is an error
}
my $entrez_source_id =
$self->get_source_id_for_source_name(
'EntrezGene');
my (%mim_gene) = %{$self->get_valid_codes("MIM_GENE",$species_id)};
my (%mim_morbid) = %{$self->get_valid_codes("MIM_MORBID",$species_id)};
my (%entrez) = %{$self->get_valid_codes("EntrezGene",$species_id)};
my $dbi = $self->dbi();
my $add_dependent_xref_sth = $self->dbi->prepare("INSERT INTO dependent_xref (master_xref_id,dependent_xref_id, linkage_source_id) VALUES (?,?, $entrez_source_id)");
my $missed_entrez = 0;
my $missed_omim = 0;
my $diff_type = 0;
my $count;
$eg_io->getline(); # do not need header
while ( $_ = $eg_io->getline() ) {
$count++;
chomp;
my ($omim_id, $type, $entrez_id, $other) = split;
if(!defined($entrez{$entrez_id})){
$missed_entrez++;
next;
}
if((!defined $mim_gene{$omim_id} ) and (!defined $mim_morbid{$omim_id} ) ){
$missed_omim++;
next;
}
if($type eq "gene" || $type eq 'gene/phenotype'){
if(defined($mim_gene{$omim_id})){
foreach my $ent_id (@{$entrez{$entrez_id}}){
foreach my $mim_id (@{$mim_gene{$omim_id}}){
$add_dependent_xref_sth->execute($ent_id, $mim_id);
}
}
# $add_dependent_xref_sth->execute($entrez{$entrez_id}, $mim_gene{$omim_id});
}
else{
$diff_type++;
foreach my $ent_id (@{$entrez{$entrez_id}}){
foreach my $mim_id (@{$mim_morbid{$omim_id}}){
$add_dependent_xref_sth->execute($ent_id, $mim_id);
}
}
# $add_dependent_xref_sth->execute($entrez{$entrez_id}, $mim_morbid{$omim_id});
}
}
elsif($type eq "phenotype"){
if(defined($mim_morbid{$omim_id})){
foreach my $ent_id (@{$entrez{$entrez_id}}){
foreach my $mim_id (@{$mim_morbid{$omim_id}}){
$add_dependent_xref_sth->execute($ent_id, $mim_id);
}
}
# $add_dependent_xref_sth->execute($entrez{$entrez_id}, $mim_morbid{$omim_id});
}
else{
$diff_type++;
foreach my $ent_id (@{$entrez{$entrez_id}}){
foreach my $mim_id (@{$mim_gene{$omim_id}}){
$add_dependent_xref_sth->execute($ent_id, $mim_id);
}
}
# $add_dependent_xref_sth->execute($entrez{$entrez_id}, $mim_gene{$omim_id});
}
}
else{
print "WARNING unknown type $type\n";
next;
}
}
$add_dependent_xref_sth->finish;
print $missed_entrez." EntrezGene entries could not be found.\n";
print $missed_omim." Omim entries could not be found.\n";
print $diff_type." had different types out of $count Entries.\n";
return 0;
}
1;
| mjg17/ensembl | misc-scripts/xref_mapping/XrefParser/Mim2GeneParser.pm | Perl | apache-2.0 | 3,911 |
#
# Copyright 2017 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::acmepacket::snmp::mode::components::fan;
use strict;
use warnings;
use network::acmepacket::snmp::mode::components::resources qw($map_status);
my $mapping = {
apEnvMonFanStatusDescr => { oid => '.1.3.6.1.4.1.9148.3.3.1.4.1.1.3' },
apEnvMonFanStatusValue => { oid => '.1.3.6.1.4.1.9148.3.3.1.4.1.1.4' },
apEnvMonFanState => { oid => '.1.3.6.1.4.1.9148.3.3.1.4.1.1.5', map => $map_status },
};
my $oid_apEnvMonFanStatusEntry = '.1.3.6.1.4.1.9148.3.3.1.4.1.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_apEnvMonFanStatusEntry };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking fans");
$self->{components}->{fan} = {name => 'fans', total => 0, skip => 0};
return if ($self->check_filter(section => 'fan'));
my ($exit, $warn, $crit, $checked);
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_apEnvMonFanStatusEntry}})) {
next if ($oid !~ /^$mapping->{apEnvMonFanState}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_apEnvMonFanStatusEntry}, instance => $instance);
next if ($self->check_filter(section => 'fan', instance => $instance));
next if ($result->{apEnvMonFanState} =~ /notPresent/i &&
$self->absent_problem(section => 'fan', instance => $instance));
$self->{components}->{fan}->{total}++;
$self->{output}->output_add(long_msg => sprintf("fan '%s' status is '%s' [instance = %s, speed = %s]",
$result->{apEnvMonFanStatusDescr}, $result->{apEnvMonFanState}, $instance, defined($result->{apEnvMonFanStatusValue}) ? $result->{apEnvMonFanStatusValue} : 'unknown'));
$exit = $self->get_severity(label => 'default', section => 'fan', value => $result->{apEnvMonFanState});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Fan '%s' status is '%s'", $result->{apEnvMonFanStatusDescr}, $result->{apEnvMonFanState}));
}
($exit, $warn, $crit, $checked) = $self->get_severity_numeric(section => 'fan', instance => $instance, value => $result->{apEnvMonFanStatusValue});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Fan '%s' is '%s' %%", $result->{apEnvMonFanStatusDescr}, $result->{apEnvMonFanStatusValue}));
}
$self->{output}->perfdata_add(label => 'fan_' . $result->{apEnvMonFanStatusDescr}, unit => '%',
value => $result->{apEnvMonFanStatusValue},
warning => $warn,
critical => $crit, min => 0, max => 100
);
}
}
1;
| nichols-356/centreon-plugins | network/acmepacket/snmp/mode/components/fan.pm | Perl | apache-2.0 | 3,920 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 NAME
Bio::EnsEMBL::Compara::AlignSlice::Exon - Description
=head1 INHERITANCE
This module inherits attributes and methods from Bio::EnsEMBL::Exon module
=head1 SYNOPSIS
use Bio::EnsEMBL::Compara::AlignSlice::Exon;
my $exon = new Bio::EnsEMBL::Compara::AlignSlice::Exon(
);
SET VALUES
GET VALUES
=head1 OBJECT ATTRIBUTES
=over
=item exon
original Bio::EnsEMBL::Exon object
=item slice
Bio::EnsEMBL::Slice object on which this Bio::EnsEMBL::Compara::AlignSlice::Exon is defined
=item cigar_line
A string describing the mapping of this exon on the slice
=item phase
This exon results from the mapping of a real exon. It may suffer indels and duplications
during the process which makes this mapped exon unreadable by a translation machinery.
The phase is set to -1 by default.
=item end_phase
This exon results from the mapping of a real exon. It may suffer indels and duplications
during the process which makes this mapped exon unreadable by a translation machinery.
The end_phase is set to -1 by default.
=back
=head1 AUTHORS
Javier Herrero
=head1 CONTACT
This modules is part of the EnsEMBL project (http://www.ensembl.org)
Questions can be posted to the ensembl-dev mailing list:
http://lists.ensembl.org/mailman/listinfo/dev
=head1 APPENDIX
The rest of the documentation details each of the object methods. Internal methods are usually preceded with a _
=cut
# Let the code begin...
package Bio::EnsEMBL::Compara::AlignSlice::Exon;
use strict;
use Bio::EnsEMBL::Exon;
use Bio::EnsEMBL::Utils::Argument qw(rearrange);
use Bio::EnsEMBL::Utils::Exception qw(throw warning info);
our @ISA = qw(Bio::EnsEMBL::Exon);
=head2 new (CONSTRUCTOR)
Arg[1] : a reference to a hash where keys can be:
-exon
-adaptor
-reference_slice
Example : my $align_slice =
new Bio::EnsEMBL::Compara::AlignSlice(
-exon => $original_exon
-reference_slice => $reference_slice,
);
Description: Creates a new Bio::EnsEMBL::AlignSlice::Exon object
Returntype : Bio::EnsEMBL::Compara::AlignSlice::Exon object
Exceptions : return an object with no start, end nor strand if the
exon cannot be mapped on the reference Slice.
=cut
sub new {
my ($class, @args) = @_;
my $self = {};
bless($self, $class);
my ($exon, $align_slice, $from_mapper, $to_mapper, $original_rank) =
rearrange([qw(
EXON ALIGN_SLICE FROM_MAPPER TO_MAPPER ORIGINAL_RANK
)], @args);
$self->exon($exon) if (defined ($exon));
# $self->genomic_align($genomic_align) if (defined($genomic_align));
# $self->from_genomic_align_id($from_genomic_align_id) if (defined($from_genomic_align_id));
# $self->to_genomic_align_id($to_genomic_align_id) if (defined($to_genomic_align_id));
$self->slice($align_slice) if (defined($align_slice));
$self->original_rank($original_rank) if (defined($original_rank));
$self->phase(-1);
$self->end_phase(-1);
return $self->map_Exon_on_Slice($from_mapper, $to_mapper);
}
=head2 copy (CONSTRUCTOR)
Arg[1] : none
Example : my $new_align_slice = $old_align_slice->copy()
Description: Creates a new Bio::EnsEMBL::AlignSlice::Exon object which
is an exact copy of the calling object
Returntype : Bio::EnsEMBL::Compara::AlignSlice::Exon object
Exceptions :
Caller : $obeject->methodname
=cut
sub copy {
my ($self) = @_;
my $copy;
while (my ($key, $value) = each %$self) {
$copy->{$key} = $value;
}
bless($copy, ref($self));
return $copy;
}
=head2 slice
Arg[1] : (optional) Bio::EnsEMBL::Slice $reference_slice
Example : $align_exon->slice($reference_slice);
Example : $reference_slice = $align_exon->slice();
Description: Get/set the attribute slice. This method is overloaded in order
to map original coordinates onto the reference Slice.
Returntype : Bio::EnsEMBL::Slice object
Exceptions :
=cut
sub slice {
my ($self, $slice) = @_;
if (defined($slice)) {
$self->{'slice'} = $slice;
}
return $self->{'slice'};
}
=head2 original_rank
Arg[1] : (optional) integer $original_rank
Example : $align_exon->original_rank(5);
Example : $original_rank = $align_exon->original_rank();
Description: Get/set the attribute original_rank. The orignal_rank
is the position of the orginal Exon in the original
Transcript
Returntype : integer
Exceptions :
=cut
sub original_rank {
my ($self, $original_rank) = @_;
if (defined($original_rank)) {
$self->{'original_rank'} = $original_rank;
}
return $self->{'original_rank'};
}
=head2 map_Exon_on_Slice
Arg[1] : Bio::EnsEMBL::Mapper $from_mapper
Arg[2] : Bio::EnsEMBL::Mapper $to_mapper
Example : $align_exon->map_Exon_on_Slice($from_mapper, $to_mapper);
Description: This function takes the original exon and maps it on the slice
using the mappers.
Returntype : Bio::EnsEMBL::Compara::AlignSlice::Exon object
Exceptions : returns undef if not enough information is provided
Exceptions : returns undef if no piece of the original exon can be mapped.
Caller : new
=cut
sub map_Exon_on_Slice {
my ($self, $from_mapper, $to_mapper) = @_;
my $original_exon = $self->exon;
my $slice = $self->slice;
if (!defined($slice) or !defined($original_exon) or !defined($from_mapper)) {
return $self;
}
my @alignment_coords = $from_mapper->map_coordinates(
"sequence", # $self->genomic_align->dbID,
$original_exon->slice->start + $original_exon->start - 1,
$original_exon->slice->start + $original_exon->end - 1,
$original_exon->strand,
"sequence" # $from_mapper->from
);
my $aligned_start;
my $aligned_end;
my $aligned_strand = 0;
my $aligned_sequence = "";
my $aligned_cigar = "";
my $global_alignment_coord_start;
my $global_alignment_coord_end;
my $last_alignment_coord_end;
my $last_alignment_coord_start;
foreach my $alignment_coord (@alignment_coords) {
## $alignment_coord refer to genomic_align_block: (1 to genomic_align_block->length) [+]
if ($alignment_coord->isa("Bio::EnsEMBL::Mapper::Coordinate")) {
if ($alignment_coord->strand == 1) {
if ($last_alignment_coord_end) {
# Consider gap between this piece and the previous as a deletion
my $length = $alignment_coord->start - $last_alignment_coord_end - 1;
$aligned_cigar .= $length if ($length>1);
$aligned_cigar .= "D" if ($length);
}
$last_alignment_coord_end = $alignment_coord->end;
$global_alignment_coord_start = $alignment_coord->start if (!$global_alignment_coord_start);
$global_alignment_coord_end = $alignment_coord->end;
} else {
if ($last_alignment_coord_start) {
# Consider gap between this piece and the previous as a deletion
my $length = $last_alignment_coord_start - $alignment_coord->end - 1;
$aligned_cigar .= $length if ($length>1);
$aligned_cigar .= "D" if ($length);
}
$last_alignment_coord_start = $alignment_coord->start;
$global_alignment_coord_end = $alignment_coord->end if (!$global_alignment_coord_end);
$global_alignment_coord_start = $alignment_coord->start;
}
} else {
# This piece is outside of the alignment -> consider as an insertion
my $length = $alignment_coord->length;
$aligned_cigar .= $length if ($length>1);
$aligned_cigar .= "I" if ($length);
next;
}
if (!defined($to_mapper)) {
## Mapping on the alignment (expanded mode)
if ($alignment_coord->strand == 1) {
$aligned_strand = 1;
$aligned_start = $alignment_coord->start if (!$aligned_start);
$aligned_end = $alignment_coord->end;
} else {
$aligned_strand = -1;
$aligned_start = $alignment_coord->start;
$aligned_end = $alignment_coord->end if (!$aligned_end);
}
my $num = $alignment_coord->end - $alignment_coord->start + 1;
$aligned_cigar .= $num if ($num > 1);
$aligned_cigar .= "M" if ($num);
} else {
## Mapping on the reference_Slice (collapsed mode)
my @mapped_coords = $to_mapper->map_coordinates(
"alignment", # $self->genomic_align->genomic_align_block->dbID,
$alignment_coord->start,
$alignment_coord->end,
$alignment_coord->strand,
"alignment" # $to_mapper->to
);
foreach my $mapped_coord (@mapped_coords) {
## $mapped_coord refer to reference_slice
if ($mapped_coord->isa("Bio::EnsEMBL::Mapper::Coordinate")) {
if ($alignment_coord->strand == 1) {
$aligned_strand = 1;
$aligned_start = $mapped_coord->start if (!$aligned_start);
$aligned_end = $mapped_coord->end;
} else {
$aligned_strand = -1;
$aligned_start = $mapped_coord->start;
$aligned_end = $mapped_coord->end if (!$aligned_end);
}
my $num = $mapped_coord->end - $mapped_coord->start + 1;
$aligned_cigar .= $num if ($num > 1);
$aligned_cigar .= "M" if ($num);
} else {
my $num = $mapped_coord->end - $mapped_coord->start + 1;
$aligned_cigar .= $num if ($num > 1);
$aligned_cigar .= "I" if ($num);
}
}
}
}
if ($aligned_strand == 0) {
## the whole sequence maps on a gap
$self->{start} = undef;
$self->{end} = undef;
$self->{strand} = undef;
return $self;
}
## Set coordinates on "slice" coordinates
$self->start($aligned_start - $slice->start + 1);
$self->end($aligned_end - $slice->start + 1);
$self->strand($aligned_strand);
$self->cigar_line($aligned_cigar);
if ($self->start > $slice->length or $self->end < 1) {
$self->{start} = undef;
$self->{end} = undef;
$self->{strand} = undef;
return $self;
}
return $self;
}
=head2 exon
Arg[1] : (optional) Bio::EnsEMBL::Exon $original_exon
Example : $align_exon->exon($original_exon);
Example : $start = $align_exon->start();
Description: Get/set the attribute start. This method is overloaded in order to
return the starting postion on the AlignSlice instead of the
original one. Original starting position may be retrieved using
the SUPER::start() method or the orginal_start() method
Returntype : Bio::EnsEMBL::Exon object
Exceptions :
=cut
sub exon {
my ($self, $exon) = @_;
if (defined($exon)) {
$self->{'exon'} = $exon;
$self->stable_id($exon->stable_id) if (defined($exon->stable_id));
}
return $self->{'exon'};
}
=head2 cigar_line
Arg[1] : (optional) string $cigar_line
Example : $align_exon->cigar_line($cigar_line);
Example : $cigar_line = $align_exon->cigar_line();
Description: Get/set the attribute cigar_line.
Returntype : string
Exceptions : none
Caller : object->methodname
=cut
sub cigar_line {
my ($self, $cigar_line) = @_;
if (defined($cigar_line)) {
$self->{'cigar_line'} = $cigar_line;
}
return $self->{'cigar_line'};
}
sub get_aligned_start {
my ($self) = @_;
my $cigar_line = $self->cigar_line;
if (defined($cigar_line)) {
my @cig = ( $cigar_line =~ /(\d*[GMDI])/g );
my $cigType = substr( $cig[0], -1, 1 );
my $cigCount = substr( $cig[0], 0 ,-1 );
$cigCount = 1 unless ($cigCount =~ /^\d+$/);
next if ($cigCount == 0);
if ($cigType eq "I") {
return (1 + $cigCount);
} else {
return 1;
}
}
return undef;
}
sub get_aligned_end {
my ($self) = @_;
my $cigar_line = $self->cigar_line;
if (defined($cigar_line)) {
my @cig = ( $cigar_line =~ /(\d*[GMDI])/g );
my $cigType = substr( $cig[-1], -1, 1 );
my $cigCount = substr( $cig[-1], 0 ,-1 );
$cigCount = 1 unless ($cigCount =~ /^\d+$/);
next if ($cigCount == 0);
if ($cigType eq "I") {
return ($self->exon->end - $self->exon->start + 1 - $cigCount);
} else {
return ($self->exon->end - $self->exon->start + 1);
}
}
return undef;
}
=head2 seq
Arg [1] : none
Example : my $seq_str = $exon->seq->seq;
Description: Retrieves the dna sequence of this Exon.
Returned in a Bio::Seq object. Note that the sequence may
include UTRs (or even be entirely UTR).
Returntype : Bio::Seq or undef
Exceptions : warning if argument passed,
warning if exon does not have attatched slice
warning if exon strand is not defined (or 0)
Caller : general
=cut
sub seq {
my ($self, $seq) = @_;
if (defined($seq)) {
$self->{'_seq_cache'} = $seq->seq();
}
## Use _template_seq if defined. It is a concatenation of several original
## exon sequences and is produced during the merging of align_exons.
if(!defined($self->{'_seq_cache'})) {
my $seq = &_get_aligned_sequence_from_original_sequence_and_cigar_line(
($self->{'_template_seq'} or $self->exon->seq->seq),
$self->cigar_line,
"ref"
);
$self->{'_seq_cache'} = $seq;
}
return Bio::Seq->new(
-seq => $self->{'_seq_cache'},
-id => $self->stable_id,
-moltype => 'dna',
-alphabet => 'dna',
);
}
=head2 append_Exon
Arg [1] :
Example :
Description:
Returntype :
Exceptions :
Caller :
=cut
sub append_Exon {
my ($self, $exon, $gap_length) = @_;
$self->seq(new Bio::Seq(-seq =>
$self->seq->seq.("-"x$gap_length).$exon->seq->seq));
## As it is possible to merge two partially repeated parts of an Exon,
## the merging is done by concatenating both cigar_lines with the right
## number of gaps in the middle. The underlaying sequence must be lengthen
## accordingly. This is stored in the _template_seq private attribute
if (defined($self->{'_template_seq'})) {
$self->{'_template_seq'} .= $self->exon->seq->seq
} else {
$self->{'_template_seq'} = $self->exon->seq->seq x 2;
}
if ($gap_length) {
$gap_length = "" if ($gap_length == 1);
$self->cigar_line(
$self->cigar_line.
$gap_length."D".
$exon->cigar_line);
} else {
$self->cigar_line(
$self->cigar_line.
$exon->cigar_line);
}
return $self;
}
=head2 prepend_Exon
Arg [1] :
Example :
Description:
Returntype :
Exceptions :
Caller :
=cut
sub prepend_Exon {
my ($self, $exon, $gap_length) = @_;
$self->seq(new Bio::Seq(-seq =>
$exon->seq->seq.("-"x$gap_length).$self->seq->seq));
## As it is possible to merge two partially repeated parts of an Exon,
## the merging is done by concatenating both cigar_lines with the right
## number of gaps in the middle. The underlaying sequence must be lengthen
## accordingly. This is stored in the _template_seq private attribute
if (defined($self->{'_template_seq'})) {
$self->{'_template_seq'} .= $self->exon->seq->seq
} else {
$self->{'_template_seq'} = $self->exon->seq->seq x 2;
}
if ($gap_length) {
$gap_length = "" if ($gap_length == 1);
$self->cigar_line(
$exon->cigar_line.
$gap_length."D".
$self->cigar_line);
} else {
$self->cigar_line(
$exon->cigar_line.
$self->cigar_line);
}
return $self;
}
=head2 _get_aligned_sequence_from_original_sequence_and_cigar_line
Arg [1] : string $original_sequence
Arg [1] : string $cigar_line
Example : $aligned_sequence = _get_aligned_sequence_from_original_sequence_and_cigar_line(
"CGTAACTGATGTTA", "3MD8M2D3M")
Description: get gapped sequence from original one and cigar line
Returntype : string $aligned_sequence
Exceptions : thrown if cigar_line does not match sequence length
Caller : methodname
=cut
sub _get_aligned_sequence_from_original_sequence_and_cigar_line {
my ($original_sequence, $cigar_line, $mode) = @_;
my $aligned_sequence = "";
$mode ||= "";
return undef if (!$original_sequence or !$cigar_line);
my $seq_pos = 0;
my @cig = ( $cigar_line =~ /(\d*[GMDI])/g );
for my $cigElem ( @cig ) {
my $cigType = substr( $cigElem, -1, 1 );
my $cigCount = substr( $cigElem, 0 ,-1 );
$cigCount = 1 unless ($cigCount =~ /^\d+$/);
if( $cigType eq "M" ) {
$aligned_sequence .= substr($original_sequence, $seq_pos, $cigCount);
$seq_pos += $cigCount;
} elsif( $cigType eq "G" || $cigType eq "D") {
$aligned_sequence .= "-" x $cigCount;
} elsif( $cigType eq "I") {
$aligned_sequence .= "-" x $cigCount if ($mode ne "ref");
$seq_pos += $cigCount;
}
}
throw("Cigar line ($seq_pos) does not match sequence lenght (".length($original_sequence).")") if ($seq_pos != length($original_sequence));
return $aligned_sequence;
}
1;
| ckongEbi/ensembl-compara | modules/Bio/EnsEMBL/Compara/AlignSlice/Exon.pm | Perl | apache-2.0 | 17,693 |
package Crypt::HSXKPasswd::Dictionary::Basic;
use parent Crypt::HSXKPasswd::Dictionary;
# import required modules
use strict;
use warnings;
use Carp; # for nicer 'exception' handling for users of the module
use Fatal qw( :void open close binmode ); # make builtins throw exceptions on failure
use English qw(-no_match_vars); # for more readable code
use Readonly; # for truly constant constants
use Type::Params qw( compile multisig ); # for parameter validation with Type::Tiny objects
use Types::Standard qw( :types ); # for basic type checking (Int Str etc.)
use Crypt::HSXKPasswd::Types qw( :types ); # for custom type checking
use Crypt::HSXKPasswd::Helper; # exports utility functions like _error & _warn
# set things up for using UTF-8
use 5.016; # min Perl for good UTF-8 support, implies feature 'unicode_strings'
use Encode qw(encode decode);
use utf8;
binmode STDOUT, ':encoding(UTF-8)';
# Copyright (c) 2015, Bart Busschots T/A Bartificer Web Solutions All rights
# reserved.
#
# Code released under the FreeBSD license (included in the POD at the bottom of
# HSXKPasswd.pm)
#
# --- Constants ---------------------------------------------------------------
#
# version info
use version; our $VERSION = qv('1.2');
# utility variables
Readonly my $_CLASS => __PACKAGE__;
#
# --- Constructor -------------------------------------------------------------
#
#####-SUB-#####################################################################
# Type : CONSTRUCTOR (CLASS)
# Returns : An object of type Crypt::HSXKPasswd::Dictionary::Basic
# Arguments : 1) a string representing a file path to a dictionary file
# -- OR --
# an array ref containing a list of words
# 2) OPTIONAL - the encoding to import the file with. The default
# is UTF-8 (ignored if the first argument is not a file path).
# Throws : Croaks on invalid invocation and invalid args.
# Notes :
# See Also :
sub new{
my @args = @_;
my $class = shift @args;
_force_class($class);
# validate args
state $args_check = multisig(
[NonEmptyString, Optional[Maybe[NonEmptyString]]],
[ArrayRef[Str]],
);
my ($dict_source, $encoding) = $args_check->(@args);
# set defaults
$encoding = 'UTF-8' unless $encoding;
# start with a blank object
my $instance = {
words => [],
sources => {
files => [],
num_arrays => 0,
},
};
bless $instance, $class;
# try instantiate the word list as appropriate
$instance->add_words($dict_source, $encoding);
# return the object
return $instance;
}
#
# --- Public Instance functions -----------------------------------------------
#
#####-SUB-######################################################################
# Type : INSTANCE
# Purpose : Override clone() from the parent class and return a clone of
# self.
# Returns : An object of type Crypt::HSXKPasswd::Dictionary::Basic
# Arguments : NONE
# Throws : Croaks on invalid invocation
# Notes :
# See Also :
sub clone{
my $self = shift;
_force_instance($self);
# create an empty object
my $clone = {
words => [],
sources => {
files => [],
num_arrays => 0,
},
};
# fill in the values
foreach my $word (@{$self->{words}}){
push @{$clone->{words}}, $word;
}
foreach my $file (@{$self->{sources}->{files}}){
push @{$clone->{sources}->{files}}, $file;
}
$clone->{sources}->{num_arrays} = $self->{sources}->{num_arrays};
# bless the clone
bless $clone, $_CLASS;
# return the clone
return $clone;
}
#####-SUB-#####################################################################
# Type : INSTANCE
# Purpose : Override word_list() from the parent class and return the word
# list.
# Returns : An array reference.
# Arguments : NONE
# Throws : NOTHING
# Notes :
# See Also :
sub word_list{
my $self = shift;
_force_instance($self);
# return a reference to the word list
return $self->{words};
}
#####-SUB-#####################################################################
# Type : INSTANCE
# Purpose : Override source() from the parent class and return information
# about the word sources.
# Returns : An array reference.
# Arguments : NONE
# Throws : NOTHING
# Notes :
# See Also :
sub source{
my $self = shift;
_force_instance($self);
my $source = $self->SUPER::source();
if($self->{sources}->{num_arrays} || scalar @{$self->{sources}->{files}}){
$source .= ' (loaded from: ';
if($self->{sources}->{num_arrays}){
$source .= $self->{sources}->{num_arrays}.' array refs';
}
if($self->{sources}->{num_arrays} && scalar @{$self->{sources}->{files}}){
$source .= ' and ';
}
if(scalar @{$self->{sources}->{files}}){
$source .= 'the file(s) '.(join q{, }, @{$self->{sources}->{files}});
}
$source .= ')';
}
return $source;
}
#####-SUB-#####################################################################
# Type : INSTANCE
# Purpose : Blank the loaded word list.
# Returns : A reference to self to facilitate function chaining
# Arguments : NONE
# Throws : Croaks on invalid invocation
# Notes :
# See Also :
sub empty{
my $self = shift;
_force_instance($self);
# blank the word list and sources
$self->{words} = [];
$self->{sources}->{files} = [];
$self->{sources}->{num_arrays} = 0;
# return a reference to self
return $self;
}
#####-SUB-#####################################################################
# Type : INSTANCE
# Purpose : Load words from a file or array ref, appending them to the word
# list.
# Returns : a reference to self to facilitate function chaining
# Arguments : 1) the path to the file to load words from
# --OR--
# a reference to an array of words
# 2) OPTIONAL - the encoding to import the file with. The default
# is UTF-8 (ignored if the first argument is not a file path).
# Throws : Croaks on invalid invocation or invalid args. Carps on invalid
# invalid word.
# Notes :
# See Also :
sub add_words{
my @args = @_;
my $self = shift @args;
_force_instance($self);
# validate args
state $args_check = multisig(
[NonEmptyString, Optional[Maybe[NonEmptyString]]],
[ArrayRef[Str]], Optional[Item],
);
my ($dict_source, $encoding) = $args_check->(@args);
# set defaults
$encoding = 'UTF-8' unless $encoding;
# try load the words from the relevant source
my @new_words = ();
if(ref $dict_source eq 'ARRAY'){
# load valid words from the referenced array
@new_words = @{$dict_source};
# increase the array source count
$self->{sources}->{num_arrays}++;
}else{
# load the words from a file path
# make sure the file path is valid
unless(-f $dict_source){
_error("file $dict_source not found");
}
# try load and parse the contents of the file
open my $WORD_FILE_FH, "<:encoding($encoding)", $dict_source or _error("Failed to open $dict_source with error: $OS_ERROR");
my $word_file_contents = do{local $/ = undef; <$WORD_FILE_FH>};
close $WORD_FILE_FH;
LINE:
foreach my $line (split /\n/sx, $word_file_contents){
# skip empty lines
next LINE if $line =~ m/^\s*$/sx;
# skip comment lines
next LINE if $line =~ m/^[#]/sx;
# if we got here, store the word
push @new_words, $line;
}
# make sure we got at least one word!
unless(scalar @new_words){
_error("file $dict_source contained no valid words");
}
# add the file to the list of loaded files
push @{$self->{sources}->{files}}, $dict_source;
}
# merge with existing words and save into the instance
my @updated_words = (@{$self->{words}}, @new_words);
$self->{words} = [@updated_words];
# return a reference to self
return $self;
}
1; # because Perl is just a little bit odd :) | bbusschots/xkpasswd.pm | lib/Crypt/HSXKPasswd/Dictionary/Basic.pm | Perl | bsd-2-clause | 8,575 |
#!/usr/bin/perl
use locale;
use strict;
use ClusterManager;
## Model name ##
my $BASE_ID = $ENV{'TRAIN_NAME'};
defined($BASE_ID) || die("TRAIN_NAME environment variable needs to be set.");
## Path settings ##
my $BINDIR = $ENV{'TRAIN_BINDIR'};
defined($BINDIR) || die("TRAIN_BINDIR environment variable needs to be set.");
my $SCRIPTDIR = $ENV{'TRAIN_SCRIPTDIR'};
defined($SCRIPTDIR) || die("TRAIN_SCRIPTDIR environment variable needs to be set.");
my $WORKDIR = $ENV{'TRAIN_DIR'};
defined($WORKDIR) || die("TRAIN_DIR environment variable needs to be set.");
## Training file list ##
my $RECIPE = $ENV{'TRAIN_RECIPE'};
defined($RECIPE) || die("TRAIN_RECIPE environment variable needs to be set.");
## Initial model names ##
my $init_model = $ENV{'TRAIN_IM'};
defined($init_model) || die("TRAIN_IM environment variable needs to be set.");
## Batch settings ##
# Number of batches, maximum number of parallel jobs
my $NUM_BATCHES = $ENV{'TRAIN_BATCHES'};
$NUM_BATCHES = 20 if !defined($NUM_BATCHES);
my $BATCH_PRIORITY = 0; # Not used currently.
my $BATCH_MAX_MEM = 2000; # In megabytes
# Note that you may need memory if the training data contains
# e.g. long utterances! If too little memory is reserved, unexpected
# termination of the training may occur.
## Train/Baum-Welch settings ##
my $USE_HMMNETS = 1; # If 0, the script must call align appropriately
my $FORWARD_BEAM = 15;
my $BACKWARD_BEAM = 200;
my $AC_SCALE = 1; # Acoustic scaling (For ML 1, for MMI 1/(LMSCALE/lne(10)))
my $ML_STATS_MODE = "--ml";
my $ML_ESTIMATE_MODE = "--ml";
## HMMNET options ##
my $MORPH_HMMNETS = 0; # True (1) if HMMNETs are not based on words
my $LEX_FILE=$ENV{'TRAIN_LEX'};
defined($LEX_FILE) || die("TRAIN_LEX environment variable needs to be set.");
my $TRN_FILE=$ENV{'TRAIN_TRN'};
defined($TRN_FILE) || die("TRAIN_TRN environment variable needs to be set.");
## Alignment settings ##
my $ALIGN_WINDOW = 4000;
my $ALIGN_BEAM = 600;
my $ALIGN_SBEAM = 100;
## Misc settings ##
my $VERBOSITY = 1;
# NOTE: If you plan to recompile executables at the same time as running
# them, it is a good idea to copy the old binaries to a different directory.
my $COPY_BINARY_TO_WORK = 1;
## Ignore some nodes if SLURM_EXCLUDE_NODES environment variable is set ##
my $EXCLUDE_NODES = $ENV{'SLURM_EXCLUDE_NODES'};
$EXCLUDE_NODES = '' if !defined($EXCLUDE_NODES);
######################################################################
# Alignment script begins
######################################################################
# Create own working directory
my $tempdir = "$WORKDIR/temp";
mkdir $WORKDIR;
mkdir $tempdir;
chdir $tempdir || die("Could not chdir to $tempdir");
if ($COPY_BINARY_TO_WORK > 0) {
copy_binary_to_work($BINDIR, "$tempdir/bin");
}
generate_hmmnet_files($init_model, $tempdir);
if ($USE_HMMNETS) {
align_hmmnets($tempdir, $init_model, $RECIPE);
} else {
align($tempdir, $init_model, $RECIPE);
}
sub copy_binary_to_work {
my $orig_bin_dir = shift(@_);
my $new_bin_dir = shift(@_);
mkdir $new_bin_dir;
system("cp ${orig_bin_dir}/estimate ${new_bin_dir}");
system("cp ${orig_bin_dir}/align ${new_bin_dir}");
system("cp ${orig_bin_dir}/tie ${new_bin_dir}");
system("cp ${orig_bin_dir}/stats ${new_bin_dir}");
system("cp ${orig_bin_dir}/vtln ${new_bin_dir}");
system("cp ${orig_bin_dir}/dur_est ${new_bin_dir}");
system("cp ${orig_bin_dir}/gconvert ${new_bin_dir}");
system("cp ${orig_bin_dir}/gcluster ${new_bin_dir}");
system("cp ${orig_bin_dir}/phone_probs ${new_bin_dir}");
system("cp ${orig_bin_dir}/mllr ${new_bin_dir}");
system("cp ${orig_bin_dir}/combine_stats ${new_bin_dir}");
$BINDIR = $new_bin_dir;
}
sub generate_hmmnet_files {
my $im = shift(@_);
my $temp_dir = shift(@_);
my $new_temp_dir = "$temp_dir/hmmnets";
mkdir $new_temp_dir;
chdir $new_temp_dir || die("Could not chdir to $new_temp_dir");
my $cm = ClusterManager->new;
$cm->{"identifier"} = "hmmnets_${BASE_ID}";
$cm->{"run_dir"} = $new_temp_dir;
$cm->{"log_dir"} = $new_temp_dir;
$cm->{"run_time"} = 2000;
$cm->{"mem_req"} = 1000;
if ($NUM_BATCHES > 0) {
$cm->{"first_batch"} = 1;
$cm->{"last_batch"} = $NUM_BATCHES;
$cm->{"run_time"} = 239;
}
$cm->{"failed_batch_retry_count"} = 1;
$cm->{"exclude_nodes"} = $EXCLUDE_NODES;
my $batch_options = "";
$batch_options = "-B $NUM_BATCHES -I \$BATCH" if ($NUM_BATCHES > 0);
# Create hmmnets from TRN transcripts with a lexicon and
# alternative paths e.g. for silences:
my $morph_switch = "";
if ($MORPH_HMMNETS > 0) {
$morph_switch = "-m ${LEX_FILE}.voc";
}
my $trn_switch = "-t $TRN_FILE";
# Construct helper FSTs (L.fst, C.fst, H.fst, optional_silence.fst and
# end_mark.fst) and vocabulary file.
# Assumes that the current directory is $temp_dir!
if ($MORPH_HMMNETS > 0) {
$morph_switch = "-m";
}
system("$SCRIPTDIR/build_helper_fsts.sh $morph_switch -s $SCRIPTDIR $LEX_FILE $im.ph");
# Use real FST processing in create_hmmnets.pl to create hmmnets
# with alternative paths for pronunciations and silences
$cm->submit("$SCRIPTDIR/create_hmmnets.pl -n -r $RECIPE $morph_switch $trn_switch -T $new_temp_dir -F $new_temp_dir -D $BINDIR -s $SCRIPTDIR $batch_options\n", "");
chdir($temp_dir);
}
sub align {
my $temp_dir = shift(@_);
my $model = shift(@_);
my $recipe = shift(@_);
my $spkc_file = shift(@_);
my $batch_options = "";
my $spkc_switch = "";
$spkc_switch = "-S $spkc_file" if ($spkc_file ne "");
my $new_temp_dir = "$temp_dir/align";
mkdir $new_temp_dir;
chdir $new_temp_dir || die("Could not chdir to $new_temp_dir");
my $cm = ClusterManager->new;
$cm->{"identifier"} = "align_${BASE_ID}";
$cm->{"run_dir"} = $new_temp_dir;
$cm->{"log_dir"} = $new_temp_dir;
$cm->{"mem_req"} = $BATCH_MAX_MEM;
if ($NUM_BATCHES > 0) {
$cm->{"first_batch"} = 1;
$cm->{"last_batch"} = $NUM_BATCHES;
}
$cm->{"priority"} = $BATCH_PRIORITY;
$cm->{"failed_batch_retry_count"} = 1;
$cm->{"exclude_nodes"} = $EXCLUDE_NODES;
$batch_options = "-B $NUM_BATCHES -I \$BATCH" if ($NUM_BATCHES > 0);
$cm->submit("$BINDIR/align -b $model -c $model.cfg -r $recipe --swins $ALIGN_WINDOW --beam $ALIGN_BEAM --sbeam $ALIGN_SBEAM $spkc_switch $batch_options -i $VERBOSITY\n", "");
chdir($temp_dir);
}
sub align_hmmnets {
my $temp_dir = shift(@_);
my $model = shift(@_);
my $recipe = shift(@_);
my $spkc_file = shift(@_);
my $batch_options = "";
my $spkc_switch = "";
$spkc_switch = "-S $spkc_file" if ($spkc_file ne "");
my $new_temp_dir = "$temp_dir/align";
mkdir $new_temp_dir;
chdir $new_temp_dir || die("Could not chdir to $new_temp_dir");
my $cm = ClusterManager->new;
$cm->{"identifier"} = "align_${BASE_ID}";
$cm->{"run_dir"} = $new_temp_dir;
$cm->{"log_dir"} = $new_temp_dir;
$cm->{"run_time"} = 2000;
$cm->{"mem_req"} = $BATCH_MAX_MEM;
if ($NUM_BATCHES > 0) {
$cm->{"first_batch"} = 1;
$cm->{"last_batch"} = $NUM_BATCHES;
$cm->{"run_time"} = 239;
}
$cm->{"priority"} = $BATCH_PRIORITY;
$cm->{"failed_batch_retry_count"} = 1;
$cm->{"exclude_nodes"} = $EXCLUDE_NODES;
$batch_options = "-B $NUM_BATCHES -I \$BATCH" if ($NUM_BATCHES > 0);
$cm->submit("$BINDIR/stats -b $model -c $model.cfg -r $recipe -H --ml -M vit -a -n -o /dev/null $spkc_switch $batch_options -i $VERBOSITY\n", "");
}
| lingsoft/AaltoASR | aku/scripts/align.pl | Perl | bsd-3-clause | 7,468 |
#!/usr/bin/perl
#
#//===----------------------------------------------------------------------===//
#//
#// The LLVM Compiler Infrastructure
#//
#// This file is dual licensed under the MIT and the University of Illinois Open
#// Source Licenses. See LICENSE.txt for details.
#//
#//===----------------------------------------------------------------------===//
#
# Pragmas.
use strict;
use warnings;
# Standard modules.
use Data::Dumper; # Not actually used, but useful for debugging dumps.
# Enable `libomp/tools/lib/' module directory.
use FindBin;
use lib "$FindBin::Bin/lib";
# LIBOMP modules.
use Build;
use LibOMP;
use Platform ":vars";
use Uname;
use tools;
our $VERSION = "0.017";
# --------------------------------------------------------------------------------------------------
# Important variables.
# --------------------------------------------------------------------------------------------------
my $root_dir = $ENV{ LIBOMP_WORK };
my %makefiles = (
rtl => cat_file( $root_dir, "src", "makefile.mk" ),
timelimit => cat_file( $root_dir, "tools", "src", "timelimit", "makefile.mk" ),
);
# --------------------------------------------------------------------------------------------------
# Parse command line.
# --------------------------------------------------------------------------------------------------
# Possible options.
# * targets: comma separated list of targets the option has meaning for. For example,
# "version" option (4 or 5) has a meaning only for "rtl" target, while "mode" option has
# meaning for all targets.
# * base: If base is true this is a base option. All the possible values of base options are
# iterated if "--all" option is specified. If base is 0, this is an extra option.
# * params: A hash of possible option values. "*" denotes default option value. For example,
# if "versio" option is not specified, "--version=5" will be used implicitly.
# * suffux: Only for extra options. Subroutine returning suffix for build and output
# directories.
my $opts = {
"target" => { targets => "", base => 1, parms => { map( ( $_ => "" ), keys( %makefiles ) ), rtl => "*" }, },
"version" => { targets => "rtl", base => 1, parms => { 5 => "*", 4 => "" }, },
"lib-type" => { targets => "rtl", base => 1, parms => { normal => "*", stubs => "" }, },
"link-type" => { targets => "rtl", base => 1, parms => { dynamic => "*", static => "" }, },
"target-compiler" => { targets => "rtl,dsl", base => 0, parms => { 12 => "*", 11 => "" }, suffix => sub { $_[ 0 ]; } },
"mode" => { targets => "rtl,dsl,timelimit", base => 0, parms => { release => "*", diag => "", debug => "" }, suffix => sub { substr( $_[ 0 ], 0, 3 ); } },
"omp-version" => { targets => "rtl", base => 0, parms => { 40 => "*", 30 => "", 25 => "" }, suffix => sub { $_[ 0 ]; } },
"coverage" => { targets => "rtl", base => 0, parms => { off => "*", on => "" }, suffix => sub { $_[ 0 ] eq "on" ? "c1" : "c0"; } },
"tcheck" => { targets => "rtl", base => 0, parms => { 0 => "*", 1 => "", 2 => "" }, suffix => sub { "t" . $_[ 0 ]; } },
"mic-arch" => { targets => "rtl", base => 0, parms => { knf => "*", knc => "", knl => "" }, suffix => sub { $_[ 0 ]; } },
"mic-os" => { targets => "rtl", base => 0, parms => { bsd => "*", lin => "" }, suffix => sub { $_[ 0 ]; } },
"mic-comp" => { targets => "rtl", base => 0, parms => { native => "*", offload => "" }, suffix => sub { substr( $_[ 0 ], 0, 3 ); } },
};
my $synonyms = {
"debug" => [ qw{ dbg debg } ],
};
# This array specifies order of options to process, so it cannot be initialized with keys( %$opts ).
my @all_opts = qw{ target version lib-type link-type target-compiler mode omp-version coverage tcheck mic-arch mic-os mic-comp };
# This is the list of base options.
my @base_opts = grep( $opts->{ $_ }->{ base } == 1, @all_opts );
# This is the list of extra options.
my @extra_opts = grep( $opts->{ $_ }->{ base } == 0, @all_opts );
sub suffix($$$) {
my ( $opt, $value, $skip_if_default ) = @_;
my $suffix = "";
if ( not $skip_if_default or $value ne $opts->{ $opt }->{ dflt } ) {
$suffix = $opts->{ $opt }->{ suffix }->( $value );
}; # if
return $suffix;
}; # sub suffix
my $scuts = {}; # Shortcuts. Will help to locate proper item in $opts.
foreach my $opt ( keys( %$opts ) ) {
foreach my $parm ( keys( %{ $opts->{ $opt }->{ parms } } ) ) {
if ( $parm !~ m{\A(?:[012]|on|off)\z} ) {
$scuts->{ $parm } = $opts->{ $opt };
}; # if
if ( $opts->{ $opt }->{ parms }->{ $parm } eq "*" ) {
$opts->{ $opt }->{ dflt } = $parm;
}; # if
}; # foreach $parm
}; # foreach $opt
sub parse_option(@) {
# This function is called to process every option. $name is option name, $value is option value.
# For boolean options $value is either 1 or 0,
my ( $name, $value ) = @_;
if ( $name eq "all" or $name eq "ALL" ) {
foreach my $opt ( keys( %$opts ) ) {
if ( $opts->{ $opt }->{ base } or $name eq "ALL" ) {
foreach my $parm ( keys( %{ $opts->{ $opt }->{ parms } } ) ) {
$opts->{ $opt }->{ parms }->{ $parm } = 1;
}; # foreach $parm
}; # if
}; # foreach $opt
return;
}; # if
if ( exists( $opts->{ $name } ) ) {
# Suppose it is option with explicit value, like "target=normal".
if ( $value eq "all" ) {
foreach my $parm ( keys( %{ $opts->{ $name }->{ parms } } ) ) {
$opts->{ $name }->{ parms }->{ $parm } = 1;
}; # foreach
return;
} elsif ( exists( $opts->{ $name }->{ parms }->{ $value } ) ) {
$opts->{ $name }->{ parms }->{ $value } = 1;
return;
} elsif ( $value eq "" and exists( $opts->{ $name }->{ parms }->{ on } ) ) {
$opts->{ $name }->{ parms }->{ on } = 1;
return;
} else {
cmdline_error( "Illegal value of \"$name\" option: \"$value\"" );
}; # if
}; # if
# Ok, it is not an option with explicit value. Try to treat is as a boolean option.
if ( exists( $scuts->{ $name } ) ) {
( $value eq "1" or $value eq "0" ) or die "Internal error; stopped";
$scuts->{ $name }->{ parms }->{ $name } = $value;
return;
}; # if
# No, it is not a valid option at all.
cmdline_error( "Illegal option: \"$name\"" );
}; # sub parse_option
my $clean = 0;
my $clean_common = 0;
my $clobber = 0;
my $test_deps = 1;
my $test_touch = 1;
my @goals;
sub synonyms($) {
my ( $opt ) = @_;
return exists( $synonyms->{ $opt } ) ? "|" . join( "|", @{ $synonyms->{ $opt } } ) : "";
}; # sub synonyms
my @specs = (
map( ( "$_" . synonyms( $_ ) . "=s" => \&parse_option ), keys( %$opts ) ),
map( ( "$_" . synonyms( $_ ) . "!" => \&parse_option ), keys( %$scuts ) ),
);
my $answer;
get_options(
@specs,
Platform::target_options(),
"all" => \&parse_option,
"ALL" => \&parse_option,
"answer=s" => \$answer,
"test-deps!" => \$test_deps,
"test-touch!" => \$test_touch,
"version|ver:s" =>
sub {
# It is a tricky option. It specifies library version to build and it is also a standard
# option to request tool version.
if ( $_[ 1 ] eq "" ) {
# No arguments => version request.
print( "$tool version $VERSION\n" );
exit( 0 );
} else {
# Arguments => version to build.
parse_option( @_ )
};
},
);
@goals = @ARGV;
if ( grep( $_ eq "clobber", @goals ) ) {
$clobber = 1;
}; # if
if ( grep( $_ eq "clean", @goals ) ) {
$clean = 1;
}; # if
# Ok, now $opts is fulfilled with 0, 1 (explicitly set by the user) and "" and "*" (original
# values). In each option at least one 1 should be present (otherwise there is nothing to build).
foreach my $opt ( keys( %$opts ) ) {
if ( not grep( $_ eq "1", values( %{ $opts->{ $opt }->{ parms } } ) ) ) {
# No explicit "1" found. Enable default choice by replacing "*" with "1".
foreach my $parm ( keys( %{ $opts->{ $opt }->{ parms } } ) ) {
if ( $opts->{ $opt }->{ parms }->{ $parm } eq "*" ) {
$opts->{ $opt }->{ parms }->{ $parm } = 1;
}; # if
}; # foreach $parm
}; # if
}; # foreach $opt
# Clear $opts. Leave only "1".
foreach my $opt ( keys( %$opts ) ) {
foreach my $parm ( keys( %{ $opts->{ $opt }->{ parms } } ) ) {
if ( $opts->{ $opt }->{ parms }->{ $parm } ne "1" ) {
delete( $opts->{ $opt }->{ parms }->{ $parm } );
}; # if
}; # foreach $parm
}; # foreach $opt
# --------------------------------------------------------------------------------------------------
# Fill job queue.
# --------------------------------------------------------------------------------------------------
sub enqueue_jobs($$@);
sub enqueue_jobs($$@) {
my ( $jobs, $set, @rest ) = @_;
if ( @rest ) {
my $opt = shift( @rest );
if (
exists( $set->{ target } )
and
$opts->{ $opt }->{ targets } !~ m{(?:\A|,)$set->{ target }(?:,|\z)}
) {
# This option does not have meananing for the target,
# do not iterate, just use default value.
enqueue_jobs( $jobs, { $opt => $opts->{ $opt }->{ dflt }, %$set }, @rest );
} else {
foreach my $parm ( sort( keys( %{ $opts->{ $opt }->{ parms } } ) ) ) {
enqueue_jobs( $jobs, { $opt => $parm, %$set }, @rest );
}; # foreach $parm
}; # if
} else {
my $makefile = $makefiles{ $set->{ target } };
my @base = map( substr( $set->{ $_ }, 0, 3 ), @base_opts );
my @extra = map( suffix( $_, $set->{ $_ }, 0 ), @extra_opts );
my @ex = grep( $_ ne "", map( suffix( $_, $set->{ $_ }, 1 ), @extra_opts ) );
# Shortened version of @extra -- only non-default values.
my $suffix = ( @extra ? "." . join( ".", @extra ) : "" );
my $knights = index( $suffix, "kn" ) - 1;
if ( $target_platform !~ "lrb" and $knights > 0 ) {
$suffix = substr( $suffix, 0, $knights );
}
my $suf = ( @ex ? "." . join( ".", @ex ) : "" );
# Shortened version of $siffix -- only non-default values.
my $build_dir = join( "-", $target_platform, join( "_", @base ) . $suffix, Uname::host_name() );
my $out_arch_dir = cat_dir( $ENV{ LIBOMP_EXPORTS }, $target_platform . $suf );
my $out_cmn_dir = cat_dir( $ENV{ LIBOMP_EXPORTS }, "common" );
push(
@$jobs,
{
makefile => $makefile,
make_args => [
"os=" . $target_os,
"arch=" . $target_arch,
"MIC_OS=" . $set->{ "mic-os" },
"MIC_ARCH=" . $set->{ "mic-arch" },
"MIC_COMP=" . $set->{ "mic-comp" },
"date=" . Build::tstr( $Build::start ),
"TEST_DEPS=" . ( $test_deps ? "on" : "off" ),
"TEST_TOUCH=" . ( $test_touch ? "on" : "off" ),
"CPLUSPLUS=on",
"COVERAGE=" . $set->{ coverage },
# Option "mode" controls 3 make flags:
# debug => Full debugging : diagnostics, debug info, no optimization.
# diag => Only diagnostics : diagnostics, debug info, optimization.
# release => Production build : no diagnostics, no debug info, optimization.
"DEBUG_INFO=" . ( $set->{ mode } ne "release" ? "on" : "off" ),
"DIAG=" . ( $set->{ mode } ne "release" ? "on" : "off" ),
"OPTIMIZATION=" . ( $set->{ mode } ne "debug" ? "on" : "off" ),
"LIB_TYPE=" . substr( $set->{ "lib-type" }, 0, 4 ),
"LINK_TYPE=" . substr( $set->{ "link-type" }, 0, 4 ),
"OMP_VERSION=" . $set->{ "omp-version" },
"USE_TCHECK=" . $set->{ tcheck },
"VERSION=" . $set->{ version },
"TARGET_COMPILER=" . $set->{ "target-compiler" },
"suffix=" . $suf,
@goals,
],
build_dir => $build_dir
}
); # push
}; # if
}; # sub enqueue_jobs
my @jobs;
enqueue_jobs( \@jobs, {}, @all_opts );
# --------------------------------------------------------------------------------------------------
# Do the work.
# --------------------------------------------------------------------------------------------------
my $exit = 0;
Build::init();
if ( $clobber ) {
my @dirs = ( $ENV{ LIBOMP_TMP }, $ENV{ LIBOMP_EXPORTS }, cat_dir( $root_dir, "tools", "bin" ) );
my $rc = 0;
question(
"Clobber " . join( ", ", map( "\"" . Build::shorter( $_ ) . "\"", @dirs ) ) . " dirs? ",
$answer,
qr{\A(y|yes|n|no)\z}i
);
if ( $answer =~ m{\Ay}i ) {
info( "Clobbering..." );
$rc = Build::clean( @dirs );
info( Build::rstr( $rc ) );
}; # if
if ( $rc != 0 ) {
$exit = 3;
}; # if
} else { # Build or clean.
if ( @jobs ) {
my $total = @jobs; # Total number of jobs.
my $n = 0; # Current job number.
Build::progress( "", "" ); # Output empty line to log file.
my $goals = join( " ", @goals );
Build::progress( "Goals", $goals eq "" ? "(all)" : $goals );
Build::progress( "Configurations", scalar( @jobs ) );
foreach my $job ( @jobs ) {
++ $n;
my $base = get_file( $job->{ build_dir } );
Build::progress( "Making", "%3d of %3d : %s", $n, $total, $base );
$job->{ rc } = Build::make( $job, $clean, sprintf( "%d/%d", $n, $total ) );
}; # my $job
my $failures = Build::summary();
if ( $failures > 0 ) {
$exit = 3;
}; # if
} else {
info( "Nothing to do." );
}; # if
}; # if
# And exit.
exit( $exit );
__END__
=pod
=head1 NAME
B<build.pl> -- Build one or more configurations of OMP RTL libraries.
=head1 SYNOPSIS
B<build.pl> I<option>... [B<-->] I<make-option>... I<variable>... I<goal>...
=head1 OPTIONS
=over
=item B<--all>
Build all base configurations.
=item B<--ALL>
Build really all configurations, including extra ones.
=item B<--answer=>I<str>
Use specified string as default answer to all questions.
=item B<--architecture=>I<arch>
Specify target architecture to build. Default is architecture of host machine. I<arch> can be C<32>,
C<32e>, or one of known aliases like C<IA32>.
If architecture is not specified explicitly, value of LIBOMP_ARCH environment variable is used.
If LIBOMP_ARCH is not defined, host architecture detected.
=item B<--os=>I<os>
Specify target OS. Default is OS of host machine. I<os> can be C<lin>, C<lrb>, C<mac>, C<win>,
or one of known aliases like C<Linux>, C<WinNT>, etc.
=item B<--mic-os=>I<os>
Specify OS on Intel(R) Many Integrated Core Architecture card. Default is C<bsd>. I<os> can be C<bsd>, C<lin>.
=item B<--mic-arch=>I<arch>
Specify architecture of Intel(R) Many Integrated Core Architecture card. Default is C<knf>. I<arch> can be C<knf>, C<knc>, C<knl>.
=item B<--mic-comp=>I<compiler-type>
Specify whether the Intel(R) Many Integrated Core Compiler is native or offload. Default is C<native>.
I<compiler-type> can be C<native> or C<offload>.
=item B<-->[B<no->]B<test-deps>
Enable or disable C<test-deps>. The test runs in any case, but result of disabled test is ignored.
By default, test is enabled.
=item B<-->[B<no->]B<test-touch>
Enable or disable C<test-touch>. The test runs in any case, but result of disabled test is ignored.
By default, test is enabled.
=item Base Configuration Selection Options
=over
=item B<--target=>I<target>
Build specified target, either C<rtl> (OMP Runtime Library; default),
or C<timelimit> (program used in testing), or C<all>.
=item B<--lib-type=>I<lib>
Build specified library, either C<normal> (default), or C<stubs>, or C<all>.
=item B<--link-type=>I<type>
Build specified link type, either C<dynamic> (default) or C<all>.
=back
=item Extra Configuration Selection Options
=over
=item B<--cover=>I<switch>
Build for code coverage data collection. I<switch> can be C<off> (default), C<on>
or C<all>.
=item B<--mode=>I<mode>
Build library of specified I<mode>, either C<debug>, C<diag>, C<release> (default), or C<all>.
Mode controls 3 features:
---------------------------------------------------
feature/mode debug diag release
---------------------------------------------------
debug info o o
diagnostics (asserts, traces) o o
code optimization o o
---------------------------------------------------
=item B<--target-compiler=>I<version>
Build files for specified target compiler, C<11> or C<12>.
=back
=item Shortcuts
If option with C<no> prefix is used, corresponding configuration will B<not> be built.
Useful for excluding some configurations if one or more other options specified with C<all>
value (see Examples).
=over
=item B<-->[B<no>]B<11>
Build files for compiler C<11>.
=item B<-->[B<no>]B<12>
Build files for compiler C<12>.
=item B<-->[B<no>]B<debug>
=item B<-->[B<no>]B<debg>
=item B<-->[B<no>]B<dbg>
Build debuggable library.
=item B<-->[B<no>]B<diag>
Build library with diagnostics enabled.
=item B<-->[B<no>]B<dynamic>
Build dynamic library (default).
=item B<-->[B<no>]B<normal>
Build normal library (default).
=item B<-->[B<no>]B<release>
Build release library (default).
=item B<-->[B<no>]B<rtl>
Build OMP RTL (default).
=item B<-->[B<no>]B<stubs>
Build stubs library.
=item B<-->[B<no>]B<timelimit>
Build timelimit utility program.
=back
=item Standard Options
=over
=item B<--doc>
=item B<--manual>
Print full help message and exit.
=item B<--help>
Print short help message and exit.
=item B<--usage>
Print very short usage message and exit.
=item B<--verbose>
Do print informational messages.
=item B<--version>
Print program version and exit.
=item B<--quiet>
Work quiet, do not print informational messages.
=back
=back
=head1 ARGUMENTS
=over
=item I<make-option>
Any option for makefile, for example C<-k> or C<-n>. If you pass some options to makefile, C<-->
delimiter is mandatory, otherwise C<build.pl> processes all the options internally.
=item I<variable>
Define makefile variable in form I<name>B<=>I<value>. Most makefile capabilities are
accessible through C<build.pl> options, so there is no need in defining make variables in command
line.
=item I<goal>
Makefile goal to build (or clean).
=over
=item B<all>
Build C<lib>, C<tests>, C<inc>.
=item B<common>
Build common (architecture-independent) files. Common files are not configuration-dependent, so
there is no point in building it for more than one configuration (thought it is harmless).
However, do not build common files on many machines simultaneously.
=item B<clean>
Delete the export files and clean build directory of configuration(s) specified by options. Note
that C<clean> goal cannot be mixed with other goals (except for C<clean-common>).
=item B<clean-common>
Delete the common files in F<exports/> directory.
=item B<clobber>
Clean F<export/> and F<tmp/> directories. If C<clobber> is specified, other goals and/or options
do not matter.
Note: Clobbering is potentialy dangerous operation, because it deletes content of directory
pointed by If C<LIBOMP_TMP> environment variable, so C<build.pl> asks a confirmation before
clobbering. To suppress the question, use option C<--answer=yes>.
=item B<fat>
C<mac_32e> only: Build fat libraries for both mac_32 and mac_32e. Should be run when C<lib>
goal is built on both C<mac_32> and C<mac_32e>.
=item I<file.o>
(Windows* OS: I<file.obj>) Build specified object file only.
=item I<file.i>
Create preprocessed source file.
=item B<force-tests>
Force performing tests.
=item B<force-test-deps>
Force performing test-deps.
=item B<force-test-instr>
Force performing test-instr.
=item B<force-test-relo>
Force performing test-relo.
=item B<force-test-touch>
Force performing test-touch.
=item B<inc>
Build Fortran include files, omp_lib.h, omp_lib.mod and omp_lib_kinds.mod.
=item B<lib>
Build library (on Windows* OS in case of dynamic linking, it also builds import library).
=item B<tests>
Perform tests: C<test-deps>, C<test-instr>, C<test-relo>, and C<test-touch>.
=item B<test-deps>
Check the library dependencies.
=item B<test-instr>
Intel(R) Many Integrated Core Architecture only: check the library does not contain undesired instructions.
=item B<test-relo>
Linux* OS with dynamic linking only: check the library does not contain position-dependent
code.
=item B<test-touch>
Build a very simple application with native compiler (GNU on Linux* OS and OS X*, MS
on Windows* OS), check it does not depend on C<libirc> library, and run it.
=back
=back
=head1 DESCRIPTION
C<build.pl> constructs the name of a build directory, creates the directory if it
does not exist, changes to it, and runs make to build the goals in specified configuration.
If more than one configuration are specified in command line C<build.pl> builds them all.
Being run with C<clean> goal, C<build.pl> does not build but deletes export files and
cleans build directories of configuration specified by other options. For example,
C<build.pl --all clean> means "clean build directories for all configurations",
it does B<not> mean "clean then build all".
C<clear-common> goal deletes common files in F<exports/> directory.
Since common files are really common and not architecture and/or configuration dependent,
there are no much meaning in combining C<clear-common> with configuration selection options.
For example, C<build.pl --all clean-common> deletes the same files 13 times.
However, it does not hurt and can be used in conjunction with C<clear> goal.
C<clobber> goal instructs C<build.pl> to clean exports and all build
directories, e. g. clean everything under F<exports/> and F<tmp/> directories.
Logs are saved automatically, there is no need in explicit output redirection.
Log file for each particular configuration is named F<build.log> and located in build directory.
Summary log file (just result of each configuration) is saved in F<tmp/> directory.
Log files are never overwritten. C<build.pl> always appends output to log files.
However (obviously), C<clear> deletes log file for cleared configurations,
and C<clobber> deletes all summary log files.
=head2 Environment Variables
=over
=item B<LIBOMP_ARCH>
Specifies target architecture. If not present, host architecture is used. Environment variable may
be overriden by C<--architecture> command line option.
=item B<LIBOMP_EXPORTS>
Specifies directory for output files. If not set, C<$LIBOMP_WORK/exports/> used by default.
=item B<LIBOMP_OS>
Specifies target OS. If not present, host OS is used. Environment variable may
be overriden by C<--os> command line option.
=item B<LIBOMP_TMP>
Directory for temporary files. C<build.pl> creates build directories there. If not set,
C<$LIBOMP_WORK/tmp/> used by default.
On Windows* OS F<tmp/> directory on local drive speeds up the build process.
=item B<LIBOMP_WORK>
Root of libomp directory tree, contains F<src/>, F<tools/>, and F<exports/> subdirs.
If not set, C<build.pl> guesses the root dir (it is a parent of dir containing C<build.pl>).
Note: Guessing it not reliable. Please set C<LIBOMP_WORK> environment variable appropriately.
=back
=head1 EXAMPLES
=head2 Development
Build normal (performance) dynamic library for debugging:
$ build.pl --debug
Build all libraries (normal, stub; dynamic RTL) for debugging:
$ build.pl --all --debug
Do a clean build for all:
$ build.pl --all --debug clean && build.pl --all --debug
Debugging libraries are saved in F<exports/I<platform>.deb/>.
=head2 Promotion
=over
=item 1
Clobber everything; on one machine:
$ build.pl clobber
=item 2
Build common headers, on one machine:
$ build.pl common
=item 3
Build all platform-dependent files, on all machines:
$ build.pl --all
=item 4
Build OS X* universal (fat) libraries, on C<mac_32e>:
$ build.pl fat
=back
=cut
# end of file #
| HackLinux/goblin-core | riscv/riscv-omp/runtime/tools/build.pl | Perl | bsd-3-clause | 25,320 |
sa(200001740,1,200004227,3).
sa(200001740,1,200005041,3).
sa(200005815,1,200006238,3).
sa(200005815,1,200006238,2).
sa(200005815,1,202200341,1).
sa(200010435,1,202122522,1).
sa(200014742,1,200015806,2).
sa(200014742,1,200015806,1).
sa(200014742,1,201177314,2).
sa(200014742,1,201177505,2).
sa(200015303,3,200017282,7).
sa(200016380,1,200017282,8).
sa(200024649,2,200024814,4).
sa(200031820,1,200802136,1).
sa(200031820,1,200802136,2).
sa(200031820,1,200851933,5).
sa(200033955,1,200742149,1).
sa(200036362,1,200025034,1).
sa(200044149,2,200046534,1).
sa(200046534,1,200044149,2).
sa(200046534,1,200045346,1).
sa(200067545,1,200046022,1).
sa(200101956,1,200006238,4).
sa(200101956,1,202200341,3).
sa(200102974,2,201827745,1).
sa(200102974,5,201827745,2).
sa(200135857,1,200429060,8).
sa(200135857,1,202539788,3).
sa(200138508,2,201510399,2).
sa(200138508,2,201510576,3).
sa(200146138,2,200205598,1).
sa(200146138,2,201808626,1).
sa(200147815,0,202537642,0).
sa(200147815,0,202530003,0).
sa(200147815,0,201997862,0).
sa(200147815,0,200334186,0).
sa(200147815,0,200397405,0).
sa(200147815,0,200204391,0).
sa(200147815,0,201113806,0).
sa(200149583,3,200248659,5).
sa(200149583,3,200248659,6).
sa(200149583,3,201108627,2).
sa(200149583,3,201111028,6).
sa(200173338,2,200641252,3).
sa(200181664,2,200181559,1).
sa(200181664,2,200181559,2).
sa(200217152,1,200751131,3).
sa(200230746,1,202540347,1).
sa(200246217,1,201139623,2).
sa(200262703,1,200764902,2).
sa(200277935,1,201463520,3).
sa(200277935,1,200621058,8).
sa(200292507,1,200778275,4).
sa(200298556,1,201994288,2).
sa(200298896,1,202717831,3).
sa(200298896,1,200407848,1).
sa(200298896,1,201643657,5).
sa(200298896,1,200677683,3).
sa(200298896,1,200724150,4).
sa(200298896,1,200851239,3).
sa(200305109,1,200437449,4).
sa(200311113,3,202516255,4).
sa(200314272,1,202676496,1).
sa(200319214,1,202153387,1).
sa(200328802,1,200436404,2).
sa(200334186,1,200209174,2).
sa(200339464,1,200345508,2).
sa(200339464,1,200426749,1).
sa(200358431,1,200427683,1).
sa(200358431,1,200427683,2).
sa(200358431,1,200242026,2).
sa(200358431,1,200224168,1).
sa(200392960,2,200305417,1).
sa(200392960,2,202683671,1).
sa(200394813,3,200620379,2).
sa(200416705,1,202672540,5).
sa(200416705,1,202672540,6).
sa(200416705,1,202672540,7).
sa(200421408,1,200156485,3).
sa(200452512,1,201695567,2).
sa(200452512,1,201194938,4).
sa(200461493,1,202148109,4).
sa(200473322,1,200236592,4).
sa(200473322,1,200429060,3).
sa(200473322,1,200472671,1).
sa(200481941,3,200486703,2).
sa(200508032,1,200234725,1).
sa(200508032,1,200234725,2).
sa(200519056,1,201763101,1).
sa(200607780,7,200611256,2).
sa(200617748,3,200618057,2).
sa(200622384,5,200621653,1).
sa(200622384,5,200634906,4).
sa(200623151,2,202660819,2).
sa(200628491,1,200630380,2).
sa(200628491,1,200629889,1).
sa(200628491,1,201634142,1).
sa(200637259,6,200712708,6).
sa(200637259,7,200634906,3).
sa(200640828,1,202645007,3).
sa(200672433,1,200672017,1).
sa(200672433,1,200673766,2).
sa(200704249,2,202640226,1).
sa(200715868,1,202629390,2).
sa(200721437,2,200598954,6).
sa(200752493,1,201469770,2).
sa(200752493,1,202384686,2).
sa(200752493,1,202384686,3).
sa(200752493,1,202486693,1).
sa(200759269,1,200894221,2).
sa(200779601,1,200779360,1).
sa(200792471,1,201629958,11).
sa(200792471,1,201646866,3).
sa(200792471,1,201097960,1).
sa(200853633,2,200851933,6).
sa(200889947,1,200884814,1).
sa(200913065,3,200912048,3).
sa(200913065,4,200975584,1).
sa(200918580,1,201139104,1).
sa(200941990,2,200916123,1).
sa(200941990,4,201051956,2).
sa(200947857,1,200947717,1).
sa(200947857,1,201101313,1).
sa(200952524,1,200825648,1).
sa(200954422,1,202193194,8).
sa(200962447,2,202542141,1).
sa(200996485,1,200966152,2).
sa(200996485,1,202343816,1).
sa(200996485,1,202343816,2).
sa(201002481,1,201002618,1).
sa(201040550,1,201860620,1).
sa(201072949,1,201081652,1).
sa(201078783,2,201084048,1).
sa(201086103,2,201081505,1).
sa(201090335,2,201131197,3).
sa(201130607,1,201389942,1).
sa(201130607,1,201389942,2).
sa(201136614,1,201136964,1).
sa(201156834,4,201197980,4).
sa(201158572,1,201157517,3).
sa(201166351,1,201167537,1).
sa(201166351,1,201167640,1).
sa(201166351,1,201196802,1).
sa(201167981,1,201167537,2).
sa(201167981,1,201167640,2).
sa(201169704,1,201197980,3).
sa(201170052,1,201175937,2).
sa(201170052,1,201202374,7).
sa(201173933,1,201174099,2).
sa(201206218,1,201979702,1).
sa(201212230,4,201392237,2).
sa(201212230,4,200968211,11).
sa(201214265,1,202492584,1).
sa(201214265,1,202569630,3).
sa(201214265,1,201312371,5).
sa(201214265,1,201434278,1).
sa(201214265,1,201214786,1).
sa(201214265,1,201572510,2).
sa(201235769,1,201240308,4).
sa(201238640,1,201909978,5).
sa(201238640,1,201412346,4).
sa(201238640,1,201414088,2).
sa(201238640,1,201414088,1).
sa(201239619,1,201240308,2).
sa(201239619,2,201240308,5).
sa(201239619,2,201417705,2).
sa(201249724,1,201254013,4).
sa(201249724,1,201254013,5).
sa(201249724,1,201232738,2).
sa(201249724,1,201548718,2).
sa(201252971,1,202516255,3).
sa(201255967,1,202467662,6).
sa(201256600,2,201259458,1).
sa(201257173,1,201257507,1).
sa(201257173,1,201258091,2).
sa(201257173,1,201299268,2).
sa(201259458,1,201256600,2).
sa(201260428,1,201260685,2).
sa(201261018,1,200677445,2).
sa(201261018,4,200677445,1).
sa(201269521,1,202158034,2).
sa(201281611,1,201281343,1).
sa(201285440,1,201286913,1).
sa(201285440,1,201286913,2).
sa(201289155,1,200768630,1).
sa(201290255,1,202705535,2).
sa(201290422,1,202716165,1).
sa(201306853,1,202284662,1).
sa(201306853,1,202290029,1).
sa(201309143,1,201549719,1).
sa(201309701,1,201313923,2).
sa(201309701,1,201173057,2).
sa(201309701,1,201312261,1).
sa(201312810,1,201174099,3).
sa(201312810,1,202290029,2).
sa(201314738,1,201314440,1).
sa(201323958,1,201327582,2).
sa(201339294,2,202172534,1).
sa(201340283,1,200715541,2).
sa(201345109,2,201347678,5).
sa(201345109,2,201587062,4).
sa(201348174,1,201347678,7).
sa(201348174,1,201348452,1).
sa(201349130,1,201349318,1).
sa(201350449,1,201281611,2).
sa(201350449,1,201375637,2).
sa(201350449,1,201468058,2).
sa(201350449,1,201506157,4).
sa(201357429,1,201328513,3).
sa(201357831,1,200715541,3).
sa(201360571,1,201360715,1).
sa(201365549,1,201366426,1).
sa(201369346,1,201259691,4).
sa(201369346,1,201259691,5).
sa(201369346,1,201562061,2).
sa(201369346,1,201562061,3).
sa(201369346,2,201254324,1).
sa(201369346,2,201254324,2).
sa(201369346,2,201552390,3).
sa(201369758,1,201657977,4).
sa(201369758,1,201657977,5).
sa(201369758,1,201259458,4).
sa(201369758,1,201560984,2).
sa(201369758,1,200643473,3).
sa(201369758,2,201566185,1).
sa(201370561,1,202401809,5).
sa(201370561,1,201371651,1).
sa(201370561,1,201586738,1).
sa(201370561,1,201586738,2).
sa(201372556,1,202401809,4).
sa(201378556,1,202060141,3).
sa(201378556,1,202077148,2).
sa(201378556,1,202028994,4).
sa(201378556,1,201360715,2).
sa(201380638,1,201976089,3).
sa(201382083,1,202107248,1).
sa(201382083,1,202117955,2).
sa(201382083,1,201957107,1).
sa(201382083,1,201592456,4).
sa(201382083,1,200677544,2).
sa(201382083,1,200598954,5).
sa(201382083,3,200677203,1).
sa(201391806,2,201391946,1).
sa(201392237,1,201392918,1).
sa(201392237,1,201392918,2).
sa(201392237,1,201548718,5).
sa(201392237,1,201157517,8).
sa(201392237,2,201212230,4).
sa(201394200,1,201274254,1).
sa(201410223,1,201258302,3).
sa(201417868,2,201666002,1).
sa(201437254,1,200949974,3).
sa(201438902,1,201295275,2).
sa(201438902,1,200967625,2).
sa(201439190,1,201998793,1).
sa(201439745,3,202304648,1).
sa(201440139,1,200744572,1).
sa(201445932,1,201445756,2).
sa(201447868,1,201675780,2).
sa(201447868,1,201349318,2).
sa(201447868,1,201375637,3).
sa(201448100,1,201661804,7).
sa(201448100,1,201592456,3).
sa(201448100,1,201351170,4).
sa(201448100,1,201351170,2).
sa(201448100,1,201453718,3).
sa(201448100,1,201505254,3).
sa(201448100,1,201449053,1).
sa(201449796,1,200804476,2).
sa(201453433,1,202677797,6).
sa(201460029,2,200677544,1).
sa(201483779,2,201955009,1).
sa(201486151,1,202291258,3).
sa(201493741,1,201790739,4).
sa(201493741,1,201800422,5).
sa(201493741,1,201819387,2).
sa(201493741,1,201569017,2).
sa(201493741,1,200021065,6).
sa(201493741,1,200021065,5).
sa(201493741,1,201327133,1).
sa(201493741,1,201355906,1).
sa(201493741,1,201327133,2).
sa(201493741,1,200213794,3).
sa(201493741,1,200486018,6).
sa(201493741,1,202760495,2).
sa(201494310,1,202642814,4).
sa(201494310,1,201308381,2).
sa(201494310,1,202702674,3).
sa(201494310,1,202642814,10).
sa(201494310,1,201981036,2).
sa(201494310,1,202494356,9).
sa(201494310,1,201656788,3).
sa(201494310,1,201363648,1).
sa(201494310,1,201570403,2).
sa(201494310,1,201544692,2).
sa(201494310,1,200742320,5).
sa(201494310,2,201489465,3).
sa(201494310,2,202702674,1).
sa(201494310,2,202573127,2).
sa(201494310,2,202578008,4).
sa(201494310,2,201979241,3).
sa(201494310,2,201656788,4).
sa(201494310,2,201661243,4).
sa(201494310,2,201463963,2).
sa(201494310,2,201474209,4).
sa(201494310,6,201474209,3).
sa(201503404,1,201504298,3).
sa(201503404,2,201504298,1).
sa(201503404,3,201504298,2).
sa(201503952,1,202281093,6).
sa(201507143,3,201304716,2).
sa(201516534,2,201352996,2).
sa(201516534,3,200606471,3).
sa(201522276,2,200435688,2).
sa(201522276,3,200435688,1).
sa(201524523,2,202007680,1).
sa(201528069,3,202713372,1).
sa(201539063,2,201282142,1).
sa(201539063,7,201505254,5).
sa(201539063,7,201282142,2).
sa(201543123,1,200670179,1).
sa(201543123,1,201984902,1).
sa(201544692,1,201469263,3).
sa(201544692,1,202305856,4).
sa(201544692,1,202265979,2).
sa(201544692,2,201494310,1).
sa(201544692,3,202663643,1).
sa(201545883,1,201590007,2).
sa(201545883,1,202663643,2).
sa(201546111,1,201983264,5).
sa(201552519,1,201553987,1).
sa(201552519,1,201554799,1).
sa(201552519,1,201255967,2).
sa(201552519,1,201309701,3).
sa(201552519,1,201259458,3).
sa(201552519,1,201549719,2).
sa(201573515,1,201573891,2).
sa(201573515,1,200846344,2).
sa(201573515,1,201661804,5).
sa(201586278,1,200711715,3).
sa(201588493,3,202320078,1).
sa(201588493,3,202319824,1).
sa(201594978,4,202587239,3).
sa(201617192,1,201619725,4).
sa(201617192,1,201634424,4).
sa(201628449,1,202627666,1).
sa(201629589,1,201644050,2).
sa(201629589,1,202522864,3).
sa(201629589,1,201629958,9).
sa(201637633,1,201634142,3).
sa(201652139,1,202663340,3).
sa(201682582,1,202516255,1).
sa(201682582,1,202148109,2).
sa(201690816,1,200706804,3).
sa(201698271,1,201754576,1).
sa(201727684,1,201050565,1).
sa(201761706,5,202585050,3).
sa(201770802,2,201761120,2).
sa(201771535,1,201821996,1).
sa(201779165,1,201785748,5).
sa(201779165,1,201785748,7).
sa(201822936,1,202550296,1).
sa(201822936,1,201776214,1).
sa(201831531,1,201848465,1).
sa(201834213,1,201848465,4).
sa(201835496,2,201992503,6).
sa(201835496,2,201921964,4).
sa(201835496,2,202072849,7).
sa(201835496,2,202073714,6).
sa(201835496,2,202667558,1).
sa(201835496,2,202051694,5).
sa(201835496,2,201968569,5).
sa(201835496,2,201989873,3).
sa(201835496,2,201970646,2).
sa(201835496,2,201970826,3).
sa(201835496,2,201989873,4).
sa(201835496,2,200781000,2).
sa(201835496,2,200969873,1).
sa(201835496,3,201992503,4).
sa(201835496,3,201856211,1).
sa(201835496,3,201994442,8).
sa(201849221,1,202716165,3).
sa(201849221,1,202524171,3).
sa(201849221,1,202667419,1).
sa(201849221,1,202619924,4).
sa(201849221,1,201970348,2).
sa(201849221,1,201990281,2).
sa(201852701,1,202676496,2).
sa(201854132,1,201630532,5).
sa(201860107,1,201860485,1).
sa(201866192,1,201966706,2).
sa(201866192,1,201867816,1).
sa(201869563,1,202522581,4).
sa(201869563,2,202522581,5).
sa(201870275,2,200618057,2).
sa(201871979,2,202013840,1).
sa(201871979,2,202056466,3).
sa(201874424,1,202623346,1).
sa(201886488,2,201988886,2).
sa(201886488,2,202072849,8).
sa(201887576,1,202072849,5).
sa(201888295,1,202076027,2).
sa(201888295,2,202072849,6).
sa(201894520,1,202522581,1).
sa(201899262,1,201996574,2).
sa(201904930,1,201910965,3).
sa(201904930,1,201910965,2).
sa(201907258,1,202004528,2).
sa(201907258,1,202034300,3).
sa(201907258,1,200897564,2).
sa(201911888,1,202076027,3).
sa(201911888,1,202076027,5).
sa(201919391,1,201992503,5).
sa(201920048,1,201920220,1).
sa(201920048,1,201920594,1).
sa(201921964,1,201923414,6).
sa(201921964,1,201978576,2).
sa(201926311,1,201883716,11).
sa(201926311,1,202075049,6).
sa(201928838,1,202016401,1).
sa(201950798,2,201955508,2).
sa(201950798,2,201955127,3).
sa(201951480,1,202402825,7).
sa(201951480,1,202465939,3).
sa(201963942,1,201923414,5).
sa(201963942,2,202674564,1).
sa(201966861,1,201923414,1).
sa(201966861,2,200616498,4).
sa(201968569,1,201990281,3).
sa(201970826,2,201989053,2).
sa(201974062,2,201976089,2).
sa(201989873,1,201457954,2).
sa(201996574,1,200780191,6).
sa(201997119,1,201997680,1).
sa(201997119,1,201997680,2).
sa(201997119,1,201997680,3).
sa(201997119,1,201997376,1).
sa(202001858,1,202002720,1).
sa(202001858,5,202027030,1).
sa(202014165,2,202345647,3).
sa(202016523,1,202212646,1).
sa(202016523,7,201855982,1).
sa(202018524,1,202019716,1).
sa(202038357,1,201590007,3).
sa(202038357,1,201590007,1).
sa(202038357,3,201909978,2).
sa(202038357,3,201909397,3).
sa(202040273,1,202063610,2).
sa(202045043,1,202676496,3).
sa(202050132,1,200616498,2).
sa(202050132,1,201915365,5).
sa(202050132,1,201912159,5).
sa(202055267,2,200432572,1).
sa(202055649,2,200438178,2).
sa(202056971,1,202056466,2).
sa(202056971,1,202002720,2).
sa(202056971,1,202002720,6).
sa(202056971,1,202002720,4).
sa(202058590,1,202640226,2).
sa(202058994,5,200438178,2).
sa(202061495,5,202004701,2).
sa(202062632,2,200909134,1).
sa(202066939,1,202072159,4).
sa(202066939,2,202634567,2).
sa(202069551,1,201202374,5).
sa(202072501,1,202017937,1).
sa(202076280,1,202072849,4).
sa(202076280,1,202076027,1).
sa(202077656,1,201993926,2).
sa(202077656,1,201981036,3).
sa(202077656,1,201973125,5).
sa(202077656,1,201974062,5).
sa(202077656,1,202157100,2).
sa(202077656,1,202157100,3).
sa(202123672,1,202126022,2).
sa(202124106,2,202126022,1).
sa(202124106,2,202126022,3).
sa(202125032,1,202125460,1).
sa(202125223,1,202125460,2).
sa(202130524,1,202132099,1).
sa(202130524,1,202132263,1).
sa(202130524,1,202167435,1).
sa(202130524,1,202128653,2).
sa(202130524,1,201827858,2).
sa(202130524,1,201315613,3).
sa(202130524,1,200661824,3).
sa(202130524,1,200696414,2).
sa(202130524,1,200877083,3).
sa(202137132,1,202141973,3).
sa(202169119,4,202125460,4).
sa(202169891,1,202189714,1).
sa(202175057,1,200945648,2).
sa(202175057,1,200945648,1).
sa(202175057,1,201037498,4).
sa(202179518,1,201651110,1).
sa(202180898,1,202175861,1).
sa(202182342,1,200780191,1).
sa(202199590,1,202284951,4).
sa(202202384,1,202213690,2).
sa(202202384,1,202283324,3).
sa(202205272,1,202311387,3).
sa(202205272,1,202205887,1).
sa(202205272,1,202492584,1).
sa(202207206,1,202323286,2).
sa(202207206,1,202274299,3).
sa(202207206,1,202274299,2).
sa(202208118,1,202311387,3).
sa(202208903,3,202209499,2).
sa(202218443,1,202209499,3).
sa(202230772,1,202589013,2).
sa(202230772,1,202230247,1).
sa(202230772,1,202296153,4).
sa(202230772,1,200742320,2).
sa(202230772,2,202201644,3).
sa(202230772,2,202230615,1).
sa(202230772,2,202293321,1).
sa(202236124,2,202216560,2).
sa(202236124,2,202301825,2).
sa(202236124,2,202346724,4).
sa(202236124,2,202656995,1).
sa(202244956,1,202294436,9).
sa(202244956,1,202260362,1).
sa(202251743,1,202215506,1).
sa(202251743,1,202352019,2).
sa(202251743,1,202256853,2).
sa(202251743,1,202284803,2).
sa(202251743,1,202301502,2).
sa(202253456,1,201153947,1).
sa(202253456,1,201153947,2).
sa(202256354,1,202279513,1).
sa(202306087,1,202281093,2).
sa(202306087,1,202305856,5).
sa(202350175,1,202629256,2).
sa(202367363,1,202376429,3).
sa(202367363,1,202517655,1).
sa(202367363,1,202536557,2).
sa(202384940,1,202486693,2).
sa(202399331,2,202482425,5).
sa(202406916,4,201993549,2).
sa(202425462,1,201645421,2).
sa(202461314,1,202400637,1).
sa(202461314,1,202473688,5).
sa(202516437,1,202516255,5).
sa(202519991,2,202672540,1).
sa(202530167,1,202531625,4).
sa(202534307,1,202534062,6).
sa(202558172,6,202537812,1).
sa(202571251,1,202570062,1).
sa(202576921,5,201634424,3).
sa(202611373,1,202699497,2).
sa(202611373,1,202674564,4).
sa(202611373,1,202673965,2).
sa(202611373,1,202726385,1).
sa(202611373,1,202638630,1).
sa(202611373,1,201116585,4).
sa(202617798,1,202660819,5).
sa(202623194,1,202623346,2).
sa(202645839,2,202678663,1).
sa(202649830,3,201177314,1).
sa(202649830,3,201177505,1).
sa(202651424,1,202648639,3).
sa(202660442,3,202660819,3).
sa(202681795,1,202496388,1).
sa(202681795,1,202449340,2).
sa(202681795,1,201189823,1).
sa(202681795,1,202679530,3).
sa(202681795,1,200811720,1).
sa(202681795,1,200235918,2).
sa(202681795,1,200495808,3).
sa(202681795,1,201113975,1).
sa(202681795,3,201131473,6).
sa(202681795,3,201116585,2).
sa(202681795,3,202682424,1).
sa(202681795,3,202618149,8).
sa(202681795,3,200459776,3).
sa(202685665,1,202690093,3).
sa(202685665,1,202690299,1).
sa(202685951,1,202703539,2).
sa(202685951,5,202685665,2).
sa(202704349,1,202679012,2).
sa(202710673,2,202703952,1).
sa(202714974,1,202715923,2).
sa(202715279,1,202715923,1).
sa(202762981,2,202762806,2).
sa(202764438,1,202762806,3).
sa(202770717,2,202771020,2).
sa(202770717,2,202771020,3).
sa(300004413,0,301442186,0).
sa(300009046,0,301299888,0).
sa(300009978,0,301297315,0).
sa(300011757,0,302391867,0).
sa(300011757,0,301980250,0).
sa(300013160,0,302391455,0).
sa(300013160,0,301834304,0).
sa(300013160,0,301932973,0).
sa(300013887,0,300105746,0).
sa(300013887,0,302021905,0).
sa(300016756,0,300106456,0).
sa(300017782,0,302080577,0).
sa(300018584,0,302082218,0).
sa(300018584,0,302540236,0).
sa(300019131,0,300604617,0).
sa(300020787,0,301133876,0).
sa(300020787,0,301195536,0).
sa(300020787,0,301372049,0).
sa(300021766,0,300631391,0).
sa(300021766,0,300914421,0).
sa(300021766,0,301837744,0).
sa(300023383,0,301839417,0).
sa(300023383,0,300632438,0).
sa(300023383,0,300915787,0).
sa(300027247,0,301375174,0).
sa(300028672,0,301376894,0).
sa(300031974,0,300873603,0).
sa(300031974,0,300804695,0).
sa(300033574,0,300875712,0).
sa(300041361,0,300099290,0).
sa(300051045,0,302080577,0).
sa(300051696,0,302082218,0).
sa(300056002,0,300453726,0).
sa(300056002,0,301794340,0).
sa(300060397,0,301791911,0).
sa(300060397,0,300457598,0).
sa(300061262,0,300147734,0).
sa(300063277,0,301140514,0).
sa(300064479,0,300931555,0).
sa(300064479,0,301660994,0).
sa(300064479,0,301870889,0).
sa(300065488,0,300932367,0).
sa(300065488,0,301160031,0).
sa(300065488,0,301661914,0).
sa(300065791,0,300249721,0).
sa(300065791,0,300262792,0).
sa(300065791,0,300326436,0).
sa(300066800,0,300325281,0).
sa(300066800,0,300251809,0).
sa(300067038,0,301898129,0).
sa(300067038,0,302569130,0).
sa(300067767,0,302570282,0).
sa(300067767,0,301899360,0).
sa(300067966,0,302569130,0).
sa(300068278,0,302570282,0).
sa(300068278,0,301899360,0).
sa(300070939,0,301559903,0).
sa(300071992,0,301560320,0).
sa(300073048,0,301798828,0).
sa(300074346,0,301799035,0).
sa(300075135,0,300995775,0).
sa(300075135,0,301817500,0).
sa(300075515,0,301293158,0).
sa(300075515,0,300996448,0).
sa(300077645,0,300264776,0).
sa(300077645,0,300251809,0).
sa(300081671,0,300249721,0).
sa(300081671,0,300262792,0).
sa(300082241,0,300156101,0).
sa(300082241,0,301244410,0).
sa(300082241,0,301628946,0).
sa(300084956,0,301740892,0).
sa(300084956,0,300156839,0).
sa(300085264,0,300531342,0).
sa(300085264,0,300919542,0).
sa(300085264,0,301737241,0).
sa(300085264,0,302405326,0).
sa(300085264,0,301923391,0).
sa(300085264,0,302303077,0).
sa(300087152,0,300529266,0).
sa(300087152,0,302531422,0).
sa(300087152,0,301922763,0).
sa(300087152,0,302407603,0).
sa(300087152,0,302301560,0).
sa(300087152,0,300920881,0).
sa(300091311,0,300186616,0).
sa(300091311,0,300804695,0).
sa(300091311,0,300162386,0).
sa(300092551,0,300162990,0).
sa(300093810,0,301348258,0).
sa(300094448,0,300118066,0).
sa(300099290,0,300040325,0).
sa(300099874,0,300041051,0).
sa(300101609,0,302099019,0).
sa(300101800,0,302098325,0).
sa(300102201,0,300895442,0).
sa(300102201,0,300430191,0).
sa(300103696,0,300428404,0).
sa(300103696,0,300896555,0).
sa(300104051,0,300884778,0).
sa(300105023,0,300885415,0).
sa(300105746,0,300013887,0).
sa(300105746,0,302335828,0).
sa(300106456,0,302336449,0).
sa(300106456,0,301496021,0).
sa(300106456,0,300016756,0).
sa(300110853,0,300138912,0).
sa(300110853,0,301296474,0).
sa(300110853,0,301430111,0).
sa(300111129,0,300139126,0).
sa(300111129,0,301296270,0).
sa(300111129,0,301430111,0).
sa(300119875,0,300804695,0).
sa(300121865,0,300125711,0).
sa(300122626,0,300127296,0).
sa(300125711,0,300121865,0).
sa(300127296,0,300122626,0).
sa(300130778,0,300198147,0).
sa(300131426,0,300197576,0).
sa(300133417,0,301716227,0).
sa(300133417,0,302395115,0).
sa(300133851,0,302395115,0).
sa(300133851,0,301716491,0).
sa(300134701,0,300138169,0).
sa(300134701,0,301878466,0).
sa(300135718,0,301880531,0).
sa(300135718,0,300138622,0).
sa(300138169,0,300134701,0).
sa(300138622,0,300135718,0).
sa(300138912,0,300110853,0).
sa(300138912,0,301296474,0).
sa(300139126,0,301296270,0).
sa(300139126,0,300111129,0).
sa(300147734,0,300061262,0).
sa(300149861,0,301569965,0).
sa(300150202,0,300494907,0).
sa(300150202,0,302283703,0).
sa(300151105,0,300500569,0).
sa(300153898,0,301743217,0).
sa(300154965,0,301743909,0).
sa(300156101,0,300082241,0).
sa(300156101,0,300712419,0).
sa(300156839,0,300788821,0).
sa(300156839,0,300251809,0).
sa(300156839,0,300084956,0).
sa(300157925,0,301463965,0).
sa(300163592,0,300543603,0).
sa(300164863,0,300311663,0).
sa(300164863,0,300165943,0).
sa(300164863,0,300754873,0).
sa(300166146,0,300217728,0).
sa(300166146,0,301358363,0).
sa(300166146,0,301807219,0).
sa(300166146,0,302097268,0).
sa(300169164,0,300220956,0).
sa(300169164,0,301359035,0).
sa(300169164,0,302098136,0).
sa(300173764,0,301452593,0).
sa(300173764,0,301746605,0).
sa(300174379,0,301748318,0).
sa(300174379,0,301918984,0).
sa(300176150,0,300177186,0).
sa(300177186,0,300176150,0).
sa(300177186,0,300995119,0).
sa(300177963,0,300995468,0).
sa(300178575,0,301395617,0).
sa(300178575,0,301406640,0).
sa(300178575,0,301632537,0).
sa(300179486,0,301407465,0).
sa(300179486,0,301633591,0).
sa(300181476,0,301499686,0).
sa(300186616,0,300091311,0).
sa(300186616,0,300190115,0).
sa(300186616,0,300570590,0).
sa(300187736,0,300191603,0).
sa(300187736,0,300571643,0).
sa(300190115,0,300186616,0).
sa(300190115,0,300570590,0).
sa(300190115,0,302102484,0).
sa(300190115,0,300192756,0).
sa(300191603,0,300187736,0).
sa(300191603,0,302102796,0).
sa(300191603,0,300571643,0).
sa(300191603,0,300192880,0).
sa(300192756,0,300190115,0).
sa(300192880,0,300191603,0).
sa(300196934,0,300866471,0).
sa(300197576,0,301033371,0).
sa(300197576,0,300131426,0).
sa(300198147,0,300130778,0).
sa(300198147,0,301010862,0).
sa(300198147,0,301033081,0).
sa(300201354,0,301876957,0).
sa(300201354,0,301884539,0).
sa(300203237,0,301033081,0).
sa(300203237,0,301876006,0).
sa(300204077,0,300251809,0).
sa(300204491,0,300249721,0).
sa(300217728,0,300166146,0).
sa(300217728,0,301139352,0).
sa(300217728,0,301807219,0).
sa(300220956,0,301808822,0).
sa(300220956,0,300169164,0).
sa(300220956,0,301140514,0).
sa(300223802,0,301372049,0).
sa(300224166,0,300227003,0).
sa(300226618,0,301159655,0).
sa(300226618,0,301372049,0).
sa(300227003,0,301160031,0).
sa(300227003,0,300224166,0).
sa(300227003,0,301373728,0).
sa(300227507,0,301123148,0).
sa(300227507,0,300504592,0).
sa(300227507,0,302341266,0).
sa(300227507,0,302439949,0).
sa(300229630,0,301125429,0).
sa(300229630,0,302345272,0).
sa(300245952,0,300679147,0).
sa(300245952,0,301752167,0).
sa(300247013,0,301749320,0).
sa(300247013,0,300681094,0).
sa(300247439,0,301507402,0).
sa(300249721,0,300065791,0).
sa(300249721,0,300262792,0).
sa(300249721,0,300204491,0).
sa(300249721,0,300081671,0).
sa(300251809,0,300077645,0).
sa(300251809,0,300204077,0).
sa(300251809,0,300264776,0).
sa(300251809,0,300066800,0).
sa(300251809,0,300156839,0).
sa(300252954,0,301064286,0).
sa(300254152,0,301061489,0).
sa(300257742,0,301006566,0).
sa(300259568,0,301592642,0).
sa(300262792,0,300065791,0).
sa(300262792,0,300249721,0).
sa(300262792,0,301989669,0).
sa(300262792,0,300081671,0).
sa(300264776,0,300077645,0).
sa(300264776,0,301589217,0).
sa(300264776,0,300251809,0).
sa(300267871,0,302509292,0).
sa(300283703,0,301812846,0).
sa(300286214,0,301985247,0).
sa(300287640,0,302561888,0).
sa(300289082,0,300679147,0).
sa(300289082,0,301752167,0).
sa(300289082,0,301317954,0).
sa(300289082,0,302274253,0).
sa(300289365,0,301749320,0).
sa(300289365,0,302273326,0).
sa(300289365,0,300681094,0).
sa(300289365,0,301319182,0).
sa(300289983,0,300596769,0).
sa(300290923,0,300594413,0).
sa(300292937,0,300754107,0).
sa(300292937,0,300863946,0).
sa(300294175,0,300835609,0).
sa(300294175,0,300864693,0).
sa(300298041,0,302030935,0).
sa(300298767,0,301919931,0).
sa(300301187,0,300738593,0).
sa(300301777,0,300739273,0).
sa(300302761,0,301740892,0).
sa(300303727,0,300438567,0).
sa(300303727,0,301742296,0).
sa(300306314,0,300510050,0).
sa(300307474,0,300511214,0).
sa(300309021,0,300325281,0).
sa(300309021,0,300754107,0).
sa(300309021,0,301898129,0).
sa(300311663,0,301899360,0).
sa(300311663,0,300164863,0).
sa(300311663,0,300326436,0).
sa(300311663,0,300754873,0).
sa(300311663,0,301873406,0).
sa(300311663,0,300545015,0).
sa(300325281,0,300309021,0).
sa(300325281,0,300066800,0).
sa(300326436,0,300065791,0).
sa(300326436,0,300311663,0).
sa(300336168,0,300740336,0).
sa(300340239,0,301841544,0).
sa(300341405,0,301842001,0).
sa(300342250,0,300786291,0).
sa(300342250,0,300178575,0).
sa(300344125,0,300917613,0).
sa(300344125,0,300583990,0).
sa(300346991,0,300583239,0).
sa(300346991,0,301754421,0).
sa(300355258,0,302507968,0).
sa(300356926,0,302468635,0).
sa(300357983,0,302469928,0).
sa(300359459,0,301111016,0).
sa(300359862,0,301115081,0).
sa(300359862,0,301112573,0).
sa(300360650,0,301548193,0).
sa(300360650,0,301904845,0).
sa(300360650,0,302513269,0).
sa(300361509,0,301549291,0).
sa(300361509,0,301905377,0).
sa(300362467,0,301361414,0).
sa(300362467,0,301148283,0).
sa(300364479,0,301368192,0).
sa(300364479,0,301149494,0).
sa(300366691,0,300373209,0).
sa(300366691,0,300394562,0).
sa(300386392,0,300404202,0).
sa(300392812,0,300409440,0).
sa(300393105,0,300408660,0).
sa(300401650,0,300386392,0).
sa(300402419,0,300366691,0).
sa(300404202,0,300386392,0).
sa(300408031,0,300806512,0).
sa(300408660,0,300393105,0).
sa(300409440,0,300392812,0).
sa(300411886,0,300829745,0).
sa(300411886,0,301947266,0).
sa(300412567,0,300830717,0).
sa(300417413,0,302115324,0).
sa(300417413,0,302422685,0).
sa(300419289,0,302424254,0).
sa(300424008,0,300682932,0).
sa(300424370,0,300683185,0).
sa(300427331,0,300417413,0).
sa(300427331,0,301905653,0).
sa(300427786,0,301908039,0).
sa(300428404,0,300532892,0).
sa(300428404,0,300700451,0).
sa(300428404,0,300779374,0).
sa(300428404,0,300103696,0).
sa(300430191,0,300102201,0).
sa(300430191,0,300701479,0).
sa(300430191,0,300533851,0).
sa(300430191,0,300781168,0).
sa(300430191,0,300433115,0).
sa(300433115,0,300430191,0).
sa(300435492,0,301765643,0).
sa(300437539,0,301507134,0).
sa(300437852,0,301507402,0).
sa(300438166,0,302402559,0).
sa(300438567,0,302403030,0).
sa(300438707,0,301334398,0).
sa(300439588,0,301336587,0).
sa(300441781,0,300742879,0).
sa(300442057,0,300743435,0).
sa(300445548,0,300442361,0).
sa(300453726,0,300056002,0).
sa(300457598,0,300060397,0).
sa(300464513,0,301430111,0).
sa(300464513,0,301925372,0).
sa(300464962,0,301430847,0).
sa(300464962,0,301926376,0).
sa(300466808,0,301330986,0).
sa(300466808,0,302111684,0).
sa(300466808,0,302475855,0).
sa(300467913,0,302478749,0).
sa(300467913,0,300539793,0).
sa(300472336,0,301330986,0).
sa(300479330,0,300822115,0).
sa(300479933,0,300822449,0).
sa(300481222,0,300889831,0).
sa(300481855,0,300892379,0).
sa(300485711,0,300489108,0).
sa(300485711,0,301672607,0).
sa(300487653,0,301675190,0).
sa(300487653,0,300490035,0).
sa(300489108,0,300485711,0).
sa(300490035,0,300487653,0).
sa(300492677,0,302111684,0).
sa(300493460,0,302109678,0).
sa(300494907,0,300150202,0).
sa(300494907,0,302383831,0).
sa(300500569,0,300151105,0).
sa(300500569,0,302383380,0).
sa(300502180,0,300538565,0).
sa(300503982,0,300889831,0).
sa(300503982,0,301718158,0).
sa(300504592,0,300227507,0).
sa(300504592,0,301675190,0).
sa(300506299,0,301261867,0).
sa(300506299,0,301507134,0).
sa(300506299,0,302374914,0).
sa(300507053,0,301507402,0).
sa(300507053,0,302375828,0).
sa(300507464,0,300560586,0).
sa(300507464,0,301163083,0).
sa(300507464,0,301486489,0).
sa(300508192,0,302064745,0).
sa(300508192,0,301164072,0).
sa(300508192,0,301487201,0).
sa(300508192,0,300561036,0).
sa(300510050,0,300306314,0).
sa(300510050,0,300839619,0).
sa(300510050,0,301911053,0).
sa(300510050,0,302225510,0).
sa(300511214,0,300840902,0).
sa(300511214,0,301911415,0).
sa(300511214,0,302228335,0).
sa(300514278,0,301735736,0).
sa(300514884,0,300520214,0).
sa(300516539,0,300523978,0).
sa(300520214,0,300525453,0).
sa(300520214,0,300514884,0).
sa(300523978,0,300516539,0).
sa(300525453,0,300520214,0).
sa(300525453,0,301101391,0).
sa(300525453,0,302560548,0).
sa(300529266,0,300751525,0).
sa(300529266,0,300087152,0).
sa(300531342,0,300085264,0).
sa(300531342,0,300752392,0).
sa(300532892,0,300428404,0).
sa(300532892,0,300938801,0).
sa(300532892,0,301336926,0).
sa(300533851,0,300939105,0).
sa(300533851,0,300430191,0).
sa(300533851,0,301337121,0).
sa(300535452,0,301353014,0).
sa(300537339,0,301353226,0).
sa(300538565,0,300502180,0).
sa(300539793,0,300467913,0).
sa(300543603,0,300163592,0).
sa(300543603,0,302455297,0).
sa(300545015,0,300311663,0).
sa(300545015,0,302459109,0).
sa(300546646,0,302383380,0).
sa(300548781,0,302383831,0).
sa(300550282,0,300684480,0).
sa(300550777,0,300685483,0).
sa(300550777,0,300740336,0).
sa(300555325,0,301913200,0).
sa(300556709,0,301913715,0).
sa(300558373,0,302002227,0).
sa(300558373,0,301064286,0).
sa(300559425,0,301061489,0).
sa(300560586,0,300507464,0).
sa(300560586,0,301074650,0).
sa(300560586,0,302374914,0).
sa(300561036,0,300508192,0).
sa(300561036,0,301076793,0).
sa(300561036,0,302375828,0).
sa(300562116,0,300134701,0).
sa(300567593,0,301974740,0).
sa(300569090,0,302356820,0).
sa(300569090,0,302523275,0).
sa(300569568,0,302357115,0).
sa(300569568,0,302525206,0).
sa(300570590,0,300186616,0).
sa(300570590,0,300190115,0).
sa(300570590,0,302102484,0).
sa(300570590,0,302520219,0).
sa(300571643,0,300187736,0).
sa(300571643,0,302102796,0).
sa(300571643,0,302521353,0).
sa(300571643,0,300191603,0).
sa(300573225,0,302053818,0).
sa(300573225,0,302055062,0).
sa(300574422,0,301689880,0).
sa(300574422,0,302030080,0).
sa(300575230,0,302030935,0).
sa(300576680,0,302505716,0).
sa(300577920,0,302504131,0).
sa(300579084,0,300773109,0).
sa(300583239,0,300958880,0).
sa(300583239,0,302505716,0).
sa(300583239,0,302290998,0).
sa(300583239,0,300346991,0).
sa(300583990,0,300344125,0).
sa(300583990,0,300959731,0).
sa(300583990,0,302291843,0).
sa(300583990,0,302504131,0).
sa(300583990,0,302519555,0).
sa(300584820,0,301817500,0).
sa(300586183,0,301160031,0).
sa(300586183,0,301818234,0).
sa(300588797,0,301148283,0).
sa(300588797,0,301805157,0).
sa(300589624,0,301805889,0).
sa(300589624,0,301149494,0).
sa(300590390,0,301916229,0).
sa(300590923,0,301918184,0).
sa(300593374,0,300596769,0).
sa(300593374,0,301067193,0).
sa(300594413,0,300290923,0).
sa(300596769,0,300593374,0).
sa(300596769,0,300289983,0).
sa(300598679,0,302000118,0).
sa(300599821,0,302000680,0).
sa(300604617,0,300931555,0).
sa(300604617,0,300019131,0).
sa(300605516,0,301041916,0).
sa(300605516,0,301688271,0).
sa(300606602,0,301686439,0).
sa(300612114,0,301718867,0).
sa(300612652,0,301718867,0).
sa(300614990,0,301769843,0).
sa(300615457,0,300645493,0).
sa(300619972,0,301195536,0).
sa(300620585,0,301613463,0).
sa(300620585,0,301198019,0).
sa(300620731,0,301549291,0).
sa(300624026,0,301382086,0).
sa(300625393,0,300626800,0).
sa(300625774,0,300627410,0).
sa(300626800,0,300625393,0).
sa(300627410,0,300625774,0).
sa(300627849,0,300629997,0).
sa(300629146,0,300630802,0).
sa(300629997,0,300627849,0).
sa(300630802,0,300629146,0).
sa(300631391,0,300021766,0).
sa(300631391,0,301878466,0).
sa(300631391,0,302460502,0).
sa(300632438,0,302461723,0).
sa(300632438,0,301880531,0).
sa(300632438,0,300023383,0).
sa(300635456,0,301613463,0).
sa(300638981,0,301372049,0).
sa(300638981,0,302384383,0).
sa(300638981,0,302099019,0).
sa(300639356,0,302098325,0).
sa(300639356,0,302384843,0).
sa(300639356,0,302420530,0).
sa(300639842,0,301993940,0).
sa(300640283,0,301994602,0).
sa(300643250,0,301686439,0).
sa(300643250,0,301865197,0).
sa(300644372,0,301688271,0).
sa(300644372,0,301866535,0).
sa(300644839,0,301799457,0).
sa(300644839,0,302417895,0).
sa(300645493,0,301799957,0).
sa(300645493,0,300647070,0).
sa(300645493,0,300615457,0).
sa(300645493,0,302418538,0).
sa(300646413,0,302271544,0).
sa(300646413,0,302462619,0).
sa(300647070,0,302463154,0).
sa(300647070,0,300645493,0).
sa(300647542,0,302079029,0).
sa(300647542,0,300996448,0).
sa(300649586,0,300771373,0).
sa(300649586,0,302083615,0).
sa(300650351,0,302084538,0).
sa(300650577,0,300655779,0).
sa(300655779,0,300650577,0).
sa(300655779,0,300684480,0).
sa(300655779,0,300900616,0).
sa(300664449,0,300878829,0).
sa(300666058,0,301535709,0).
sa(300666058,0,301640850,0).
sa(300668571,0,301638438,0).
sa(300668571,0,301727926,0).
sa(300679147,0,300245952,0).
sa(300679147,0,300289082,0).
sa(300679147,0,300734318,0).
sa(300679147,0,301274261,0).
sa(300679147,0,301317954,0).
sa(300681094,0,300247013,0).
sa(300681094,0,300289365,0).
sa(300681094,0,301275395,0).
sa(300681094,0,301319182,0).
sa(300682932,0,300424008,0).
sa(300682932,0,300688947,0).
sa(300682932,0,301878466,0).
sa(300683185,0,300424370,0).
sa(300683185,0,301880531,0).
sa(300683185,0,300689471,0).
sa(300684480,0,300550282,0).
sa(300684480,0,300655779,0).
sa(300685483,0,300550777,0).
sa(300685483,0,301992149,0).
sa(300685638,0,301989669,0).
sa(300686081,0,301992149,0).
sa(300688947,0,301878466,0).
sa(300689471,0,301880531,0).
sa(300689471,0,300683185,0).
sa(300690058,0,300994882,0).
sa(300693743,0,301932973,0).
sa(300694608,0,302333453,0).
sa(300695024,0,302331262,0).
sa(300695523,0,301613463,0).
sa(300695523,0,302330336,0).
sa(300695523,0,302451951,0).
sa(300695523,0,301475282,0).
sa(300695523,0,302566015,0).
sa(300696518,0,301474513,0).
sa(300696518,0,301612053,0).
sa(300696518,0,302451113,0).
sa(300696518,0,302564986,0).
sa(300697188,0,300700451,0).
sa(300697389,0,300701479,0).
sa(300700451,0,300428404,0).
sa(300700451,0,300697188,0).
sa(300700451,0,300779374,0).
sa(300700451,0,300940437,0).
sa(300701479,0,300781168,0).
sa(300701479,0,300430191,0).
sa(300701479,0,300697389,0).
sa(300703109,0,300909363,0).
sa(300703109,0,301149494,0).
sa(300704609,0,300908929,0).
sa(300704609,0,301148283,0).
sa(300704609,0,301366718,0).
sa(300705891,0,300708017,0).
sa(300705891,0,302040049,0).
sa(300705891,0,302324397,0).
sa(300707366,0,302037708,0).
sa(300707366,0,302321009,0).
sa(300707366,0,302447344,0).
sa(300710260,0,300744916,0).
sa(300711528,0,300749230,0).
sa(300712419,0,300156101,0).
sa(300715140,0,300846052,0).
sa(300720296,0,301988166,0).
sa(300720913,0,301987341,0).
sa(300721505,0,301918184,0).
sa(300722110,0,300940437,0).
sa(300723163,0,300941485,0).
sa(300724081,0,302464693,0).
sa(300724861,0,302466111,0).
sa(300725772,0,301064286,0).
sa(300727564,0,301061489,0).
sa(300727564,0,302109678,0).
sa(300732960,0,301459422,0).
sa(300732960,0,302526925,0).
sa(300733905,0,301460421,0).
sa(300733905,0,302527734,0).
sa(300734318,0,300679147,0).
sa(300738593,0,300301187,0).
sa(300739273,0,300301777,0).
sa(300739273,0,300740336,0).
sa(300740336,0,300550777,0).
sa(300740336,0,300739273,0).
sa(300740336,0,300336168,0).
sa(300742879,0,300441781,0).
sa(300742879,0,302028612,0).
sa(300743435,0,300442057,0).
sa(300743435,0,302029167,0).
sa(300744916,0,300710260,0).
sa(300744916,0,300836277,0).
sa(300744916,0,301155354,0).
sa(300749230,0,300838296,0).
sa(300749230,0,302174896,0).
sa(300749230,0,301156112,0).
sa(300749230,0,300711528,0).
sa(300751525,0,300529266,0).
sa(300751525,0,300849357,0).
sa(300752392,0,300531342,0).
sa(300752392,0,300851103,0).
sa(300753093,0,300758459,0).
sa(300753093,0,300753504,0).
sa(300753378,0,300753659,0).
sa(300753504,0,300753093,0).
sa(300753659,0,300753378,0).
sa(300754107,0,300292937,0).
sa(300754107,0,300309021,0).
sa(300754873,0,300311663,0).
sa(300754873,0,300164863,0).
sa(300758459,0,301813400,0).
sa(300760916,0,302310895,0).
sa(300762355,0,302311544,0).
sa(300763901,0,301222360,0).
sa(300771373,0,300649586,0).
sa(300771373,0,300774323,0).
sa(300772360,0,300773759,0).
sa(300773579,0,300774323,0).
sa(300773759,0,301101391,0).
sa(300773759,0,300775900,0).
sa(300774323,0,300771373,0).
sa(300774323,0,300773579,0).
sa(300775900,0,300773759,0).
sa(300779374,0,300428404,0).
sa(300779374,0,300700451,0).
sa(300779374,0,301837744,0).
sa(300781168,0,300701479,0).
sa(300781168,0,300430191,0).
sa(300783129,0,301198737,0).
sa(300783675,0,301199751,0).
sa(300787136,0,301539642,0).
sa(300788821,0,301892953,0).
sa(300788821,0,301612053,0).
sa(300788821,0,302329355,0).
sa(300788821,0,300156839,0).
sa(300791227,0,302338615,0).
sa(300792991,0,302340213,0).
sa(300792991,0,302329355,0).
sa(300800826,0,301809655,0).
sa(300804695,0,300091311,0).
sa(300804695,0,300118567,0).
sa(300804695,0,300873603,0).
sa(300804695,0,302434605,0).
sa(300804695,0,302278939,0).
sa(300806512,0,300408031,0).
sa(300806512,0,302281325,0).
sa(300806512,0,300119409,0).
sa(300808191,0,300873603,0).
sa(300810916,0,300885695,0).
sa(300812952,0,301010862,0).
sa(300822115,0,300479330,0).
sa(300822115,0,302407603,0).
sa(300822449,0,302405326,0).
sa(300822449,0,300479933,0).
sa(300822449,0,301923391,0).
sa(300827923,0,300988232,0).
sa(300828175,0,300986027,0).
sa(300828779,0,301182024,0).
sa(300828779,0,301716227,0).
sa(300828779,0,302446651,0).
sa(300829496,0,301182747,0).
sa(300829496,0,302445636,0).
sa(300829496,0,301716491,0).
sa(300829745,0,300411886,0).
sa(300829745,0,300884007,0).
sa(300829745,0,301306273,0).
sa(300829745,0,301332386,0).
sa(300829745,0,301421602,0).
sa(300829745,0,300832075,0).
sa(300830717,0,301421679,0).
sa(300830717,0,300832200,0).
sa(300830717,0,300412567,0).
sa(300830717,0,300884286,0).
sa(300830717,0,301308425,0).
sa(300832075,0,300829745,0).
sa(300834198,0,300838856,0).
sa(300834198,0,300839619,0).
sa(300834198,0,301824244,0).
sa(300834198,0,301825671,0).
sa(300834198,0,302495922,0).
sa(300835609,0,300294175,0).
sa(300835609,0,300839411,0).
sa(300835609,0,300840902,0).
sa(300835609,0,301827535,0).
sa(300835609,0,302497141,0).
sa(300836277,0,300744916,0).
sa(300838296,0,300749230,0).
sa(300838856,0,300834198,0).
sa(300839411,0,300835609,0).
sa(300839619,0,300510050,0).
sa(300839619,0,300834198,0).
sa(300840902,0,300511214,0).
sa(300840902,0,300835609,0).
sa(300843146,0,301022064,0).
sa(300845528,0,301023289,0).
sa(300848074,0,301580050,0).
sa(300849357,0,300751525,0).
sa(300849357,0,301139352,0).
sa(300849357,0,301947266,0).
sa(300849357,0,302270342,0).
sa(300849357,0,302392878,0).
sa(300851103,0,302384843,0).
sa(300851103,0,302393401,0).
sa(300851103,0,300752392,0).
sa(300851103,0,301949149,0).
sa(300851744,0,301911053,0).
sa(300852988,0,301911415,0).
sa(300853776,0,301559903,0).
sa(300853776,0,301560513,0).
sa(300853776,0,301725712,0).
sa(300853776,0,302530861,0).
sa(300856860,0,302531422,0).
sa(300856860,0,301727439,0).
sa(300856860,0,301560320,0).
sa(300856860,0,301561564,0).
sa(300863946,0,300292937,0).
sa(300864693,0,300294175,0).
sa(300866047,0,302270342,0).
sa(300866471,0,301195536,0).
sa(300866471,0,301228530,0).
sa(300866471,0,300196934,0).
sa(300867213,0,301770903,0).
sa(300867213,0,301229020,0).
sa(300867213,0,301198019,0).
sa(300871255,0,301291937,0).
sa(300872195,0,300872510,0).
sa(300872347,0,300872906,0).
sa(300872510,0,300872195,0).
sa(300872906,0,300872347,0).
sa(300873603,0,300031974,0).
sa(300873603,0,300808191,0).
sa(300873603,0,300884778,0).
sa(300873603,0,300804695,0).
sa(300873603,0,302278939,0).
sa(300875712,0,300033574,0).
sa(300878829,0,300664449,0).
sa(300884007,0,300829745,0).
sa(300884007,0,301306273,0).
sa(300884286,0,300830717,0).
sa(300884286,0,301308425,0).
sa(300884778,0,300065791,0).
sa(300884778,0,300104051,0).
sa(300884778,0,300873603,0).
sa(300885415,0,300066800,0).
sa(300885415,0,300105023,0).
sa(300885695,0,301725712,0).
sa(300885695,0,302278939,0).
sa(300887062,0,301727439,0).
sa(300887062,0,302281325,0).
sa(300889831,0,300481222,0).
sa(300889831,0,300503982,0).
sa(300889831,0,302062670,0).
sa(300892379,0,300481855,0).
sa(300895442,0,300102201,0).
sa(300896555,0,300103696,0).
sa(300898289,0,301754421,0).
sa(300898963,0,301858094,0).
sa(300900478,0,301861205,0).
sa(300900616,0,300655779,0).
sa(300900616,0,301275562,0).
sa(300900616,0,300903668,0).
sa(300900616,0,301348258,0).
sa(300900616,0,301580050,0).
sa(300900616,0,301852174,0).
sa(300900616,0,300933032,0).
sa(300902652,0,300903449,0).
sa(300902652,0,300932695,0).
sa(300902652,0,301349041,0).
sa(300902652,0,301853461,0).
sa(300902652,0,301279978,0).
sa(300902652,0,301581115,0).
sa(300903449,0,301581115,0).
sa(300903668,0,301580050,0).
sa(300904163,0,302584981,0).
sa(300904548,0,301589217,0).
sa(300904548,0,302588099,0).
sa(300905386,0,302034828,0).
sa(300905728,0,302035337,0).
sa(300905905,0,300995775,0).
sa(300906455,0,300996448,0).
sa(300908929,0,300704609,0).
sa(300908929,0,301148283,0).
sa(300909363,0,300703109,0).
sa(300909363,0,301149494,0).
sa(300909545,0,302236842,0).
sa(300909545,0,302301560,0).
sa(300911327,0,302238462,0).
sa(300911327,0,302303077,0).
sa(300914421,0,300021766,0).
sa(300914421,0,301418789,0).
sa(300914421,0,301749320,0).
sa(300914421,0,301837744,0).
sa(300915787,0,301839417,0).
sa(300915787,0,300023383,0).
sa(300919542,0,300085264,0).
sa(300920881,0,300087152,0).
sa(300921014,0,301343918,0).
sa(300921014,0,301896478,0).
sa(300921014,0,302131072,0).
sa(300921014,0,302306288,0).
sa(300922594,0,301344963,0).
sa(300922594,0,301897787,0).
sa(300922594,0,302307367,0).
sa(300923321,0,301319874,0).
sa(300923993,0,301320988,0).
sa(300927017,0,300928525,0).
sa(300927578,0,300041051,0).
sa(300928525,0,300927017,0).
sa(300929164,0,300927578,0).
sa(300929567,0,302360351,0).
sa(300930290,0,302359464,0).
sa(300931555,0,300064479,0).
sa(300931555,0,300604617,0).
sa(300931555,0,301813400,0).
sa(300931555,0,302495922,0).
sa(300932367,0,300065488,0).
sa(300932367,0,301814085,0).
sa(300932695,0,301978395,0).
sa(300933032,0,300900616,0).
sa(300933032,0,301978894,0).
sa(300933154,0,302500884,0).
sa(300935500,0,302225510,0).
sa(300935500,0,302447344,0).
sa(300936740,0,302228335,0).
sa(300936740,0,302271544,0).
sa(300938801,0,300532892,0).
sa(300938801,0,302266145,0).
sa(300939105,0,300533851,0).
sa(300939105,0,302266600,0).
sa(300940437,0,300700451,0).
sa(300940437,0,300722110,0).
sa(300940437,0,301704761,0).
sa(300940437,0,300896555,0).
sa(300941485,0,300723163,0).
sa(300941485,0,301705655,0).
sa(300948103,0,301692969,0).
sa(300948670,0,301693799,0).
sa(300950272,0,300329831,0).
sa(300950706,0,301873406,0).
sa(300952395,0,301692222,0).
sa(300952867,0,301692786,0).
sa(300956131,0,301723308,0).
sa(300956131,0,301369663,0).
sa(300956131,0,301943406,0).
sa(300957176,0,301722965,0).
sa(300957176,0,301370590,0).
sa(300958151,0,300956131,0).
sa(300958151,0,301369663,0).
sa(300958151,0,301723308,0).
sa(300958880,0,300583239,0).
sa(300958880,0,302460502,0).
sa(300958880,0,302464693,0).
sa(300959731,0,300583990,0).
sa(300959731,0,302466111,0).
sa(300961392,0,301740207,0).
sa(300962634,0,301740630,0).
sa(300965606,0,301375174,0).
sa(300966477,0,301034457,0).
sa(300966477,0,301376894,0).
sa(300973844,0,301638438,0).
sa(300976508,0,301270175,0).
sa(300976508,0,301143279,0).
sa(300980527,0,301142804,0).
sa(300980527,0,301271700,0).
sa(300983862,0,301947266,0).
sa(300983862,0,302422685,0).
sa(300985387,0,301949149,0).
sa(300985387,0,302424254,0).
sa(300986027,0,300828175,0).
sa(300986027,0,302043898,0).
sa(300986027,0,302410393,0).
sa(300988232,0,300827923,0).
sa(300988232,0,302412164,0).
sa(300994882,0,300690058,0).
sa(300995119,0,300177186,0).
sa(300995468,0,300177963,0).
sa(300995775,0,300075135,0).
sa(300995775,0,300905905,0).
sa(300995775,0,301123148,0).
sa(300996448,0,301125429,0).
sa(300996448,0,300647542,0).
sa(300996448,0,301244410,0).
sa(300996448,0,300075515,0).
sa(300996448,0,300906455,0).
sa(300999817,0,301148283,0).
sa(301000881,0,301149494,0).
sa(301001689,0,301001945,0).
sa(301001689,0,301080297,0).
sa(301001689,0,301824244,0).
sa(301001689,0,301865197,0).
sa(301002377,0,301082535,0).
sa(301002377,0,301824751,0).
sa(301002377,0,301866535,0).
sa(301005063,0,301713373,0).
sa(301005063,0,301951684,0).
sa(301005063,0,302236842,0).
sa(301005675,0,302238462,0).
sa(301005675,0,301713925,0).
sa(301005675,0,301952405,0).
sa(301006566,0,301557614,0).
sa(301007354,0,301557903,0).
sa(301010862,0,300812952,0).
sa(301010862,0,300198147,0).
sa(301010862,0,301686439,0).
sa(301010862,0,301852174,0).
sa(301017161,0,301170243,0).
sa(301017161,0,302273326,0).
sa(301017161,0,302540578,0).
sa(301017738,0,302541302,0).
sa(301017738,0,301172889,0).
sa(301017738,0,302274253,0).
sa(301020393,0,300134701,0).
sa(301022064,0,300843146,0).
sa(301023289,0,300845528,0).
sa(301033081,0,300203237,0).
sa(301033081,0,300198147,0).
sa(301033371,0,300197576,0).
sa(301037540,0,301568375,0).
sa(301038102,0,301567862,0).
sa(301041916,0,300605516,0).
sa(301041916,0,301045216,0).
sa(301044240,0,301045711,0).
sa(301045216,0,301041916,0).
sa(301045216,0,302016535,0).
sa(301045711,0,301044240,0).
sa(301045711,0,302018486,0).
sa(301047874,0,301468097,0).
sa(301047874,0,301864123,0).
sa(301047874,0,302331262,0).
sa(301049462,0,301864471,0).
sa(301049462,0,301468682,0).
sa(301049462,0,302333453,0).
sa(301059400,0,302095311,0).
sa(301060785,0,302095786,0).
sa(301061489,0,301065694,0).
sa(301061489,0,300727564,0).
sa(301061489,0,300254152,0).
sa(301061489,0,300559425,0).
sa(301061489,0,302000680,0).
sa(301061489,0,302002814,0).
sa(301064286,0,300252954,0).
sa(301064286,0,300558373,0).
sa(301064286,0,300725772,0).
sa(301064286,0,302002227,0).
sa(301065694,0,301061489,0).
sa(301066070,0,301064286,0).
sa(301067193,0,300593374,0).
sa(301067694,0,301640850,0).
sa(301068726,0,301638438,0).
sa(301068726,0,301688271,0).
sa(301073822,0,302395115,0).
sa(301074650,0,301246579,0).
sa(301074650,0,300560586,0).
sa(301074650,0,301141743,0).
sa(301074650,0,301243825,0).
sa(301074650,0,302257141,0).
sa(301074650,0,302248349,0).
sa(301074650,0,302530861,0).
sa(301076793,0,302531422,0).
sa(301076793,0,301244410,0).
sa(301076793,0,301244195,0).
sa(301076793,0,300561036,0).
sa(301076793,0,302258600,0).
sa(301078302,0,301251128,0).
sa(301078302,0,301506770,0).
sa(301079683,0,301505991,0).
sa(301080297,0,301001689,0).
sa(301080297,0,301865197,0).
sa(301082535,0,301002377,0).
sa(301090308,0,302123812,0).
sa(301090993,0,302124654,0).
sa(301101391,0,300525453,0).
sa(301101391,0,301839417,0).
sa(301101391,0,300773759,0).
sa(301101391,0,302278337,0).
sa(301103021,0,301837744,0).
sa(301103021,0,302277607,0).
sa(301111016,0,300359459,0).
sa(301111016,0,301114434,0).
sa(301111016,0,302099019,0).
sa(301112573,0,302098325,0).
sa(301112573,0,300359862,0).
sa(301112573,0,301115081,0).
sa(301114434,0,301111016,0).
sa(301115081,0,301112573,0).
sa(301115349,0,301222360,0).
sa(301115349,0,301932973,0).
sa(301115349,0,302179279,0).
sa(301115349,0,302460502,0).
sa(301116380,0,301571363,0).
sa(301116380,0,302461723,0).
sa(301116380,0,302180797,0).
sa(301116380,0,301934554,0).
sa(301120925,0,301375174,0).
sa(301122411,0,301227137,0).
sa(301122411,0,301376894,0).
sa(301123148,0,300227507,0).
sa(301123148,0,300230335,0).
sa(301123148,0,300995775,0).
sa(301123148,0,301129977,0).
sa(301123148,0,301612053,0).
sa(301123148,0,301993140,0).
sa(301125429,0,301613463,0).
sa(301125429,0,301131043,0).
sa(301125429,0,300996448,0).
sa(301125429,0,300231252,0).
sa(301125429,0,300229630,0).
sa(301129977,0,301123148,0).
sa(301129977,0,301548193,0).
sa(301129977,0,302034828,0).
sa(301129977,0,302036578,0).
sa(301129977,0,302513269,0).
sa(301129977,0,302584981,0).
sa(301131043,0,301125429,0).
sa(301131043,0,301549291,0).
sa(301131043,0,301624633,0).
sa(301131043,0,302037272,0).
sa(301131043,0,302513740,0).
sa(301131043,0,302035337,0).
sa(301133876,0,301372049,0).
sa(301133876,0,301800349,0).
sa(301133876,0,300089051,0).
sa(301134769,0,301801600,0).
sa(301139352,0,300217728,0).
sa(301139352,0,300849357,0).
sa(301140514,0,300063277,0).
sa(301140514,0,300220956,0).
sa(301141743,0,301074650,0).
sa(301141743,0,301947266,0).
sa(301142349,0,301949149,0).
sa(301142804,0,300980527,0).
sa(301143279,0,300976508,0).
sa(301144359,0,301234167,0).
sa(301144887,0,301233347,0).
sa(301148283,0,300362467,0).
sa(301148283,0,300588797,0).
sa(301148283,0,301361414,0).
sa(301148283,0,300704609,0).
sa(301148283,0,300908929,0).
sa(301148283,0,300999817,0).
sa(301148283,0,301363613,0).
sa(301148283,0,301366718,0).
sa(301149494,0,300703109,0).
sa(301149494,0,300589624,0).
sa(301149494,0,300909363,0).
sa(301149494,0,301000881,0).
sa(301149494,0,301368192,0).
sa(301149494,0,301361863,0).
sa(301149494,0,301364008,0).
sa(301149494,0,300364479,0).
sa(301150475,0,301743217,0).
sa(301155354,0,300744916,0).
sa(301155354,0,302106761,0).
sa(301155354,0,301507402,0).
sa(301155354,0,302448437,0).
sa(301156112,0,302448889,0).
sa(301156112,0,302533313,0).
sa(301159655,0,300226618,0).
sa(301159655,0,301611839,0).
sa(301159655,0,302450640,0).
sa(301159655,0,301713095,0).
sa(301159655,0,302057829,0).
sa(301160031,0,300586183,0).
sa(301160031,0,300065488,0).
sa(301160031,0,300227003,0).
sa(301160031,0,301611067,0).
sa(301160031,0,301628302,0).
sa(301160031,0,301711071,0).
sa(301160031,0,302449430,0).
sa(301160031,0,302559180,0).
sa(301163083,0,300507464,0).
sa(301164072,0,300508192,0).
sa(301164763,0,302115324,0).
sa(301164763,0,302557357,0).
sa(301167817,0,302113827,0).
sa(301167817,0,302449430,0).
sa(301167817,0,302559180,0).
sa(301170243,0,301017161,0).
sa(301170243,0,302037708,0).
sa(301170243,0,302540578,0).
sa(301170243,0,302557357,0).
sa(301172889,0,302040049,0).
sa(301172889,0,302541302,0).
sa(301172889,0,301017738,0).
sa(301172889,0,302559180,0).
sa(301178974,0,301557903,0).
sa(301178974,0,302055062,0).
sa(301178974,0,301304374,0).
sa(301180695,0,301557614,0).
sa(301180695,0,302056880,0).
sa(301180695,0,302577061,0).
sa(301182024,0,300828779,0).
sa(301182747,0,300829496,0).
sa(301195536,0,300020787,0).
sa(301195536,0,300619972,0).
sa(301195536,0,300866471,0).
sa(301195536,0,302495922,0).
sa(301198019,0,300867213,0).
sa(301198019,0,300620585,0).
sa(301198737,0,302064745,0).
sa(301198737,0,300783129,0).
sa(301198737,0,302506555,0).
sa(301199751,0,302062670,0).
sa(301199751,0,300783675,0).
sa(301199751,0,301966488,0).
sa(301204557,0,302385102,0).
sa(301204557,0,302439949,0).
sa(301204557,0,302490170,0).
sa(301206474,0,302491961,0).
sa(301206474,0,302340213,0).
sa(301206474,0,302386612,0).
sa(301210854,0,302338615,0).
sa(301212469,0,302340213,0).
sa(301222360,0,300763901,0).
sa(301222360,0,301115349,0).
sa(301222360,0,301226240,0).
sa(301222360,0,301309991,0).
sa(301222360,0,302179279,0).
sa(301222360,0,302318464,0).
sa(301222360,0,302460502,0).
sa(301222360,0,302464693,0).
sa(301222884,0,302319129,0).
sa(301222884,0,301227137,0).
sa(301222884,0,302461723,0).
sa(301222884,0,302180797,0).
sa(301222884,0,302466111,0).
sa(301225398,0,301222360,0).
sa(301225898,0,301222884,0).
sa(301226240,0,301222360,0).
sa(301226240,0,301369663,0).
sa(301226240,0,301548193,0).
sa(301226240,0,301588172,0).
sa(301226240,0,301982646,0).
sa(301226240,0,302584981,0).
sa(301227137,0,301222884,0).
sa(301227137,0,301983797,0).
sa(301227137,0,301589217,0).
sa(301227137,0,301122411,0).
sa(301227137,0,301370590,0).
sa(301227137,0,302588099,0).
sa(301228530,0,300866471,0).
sa(301228530,0,301663571,0).
sa(301229020,0,300867213,0).
sa(301229020,0,301823092,0).
sa(301229020,0,301664581,0).
sa(301233347,0,301144887,0).
sa(301233347,0,302310895,0).
sa(301234167,0,301144359,0).
sa(301234167,0,301718867,0).
sa(301243825,0,301074650,0).
sa(301244195,0,301076793,0).
sa(301244410,0,300082241,0).
sa(301244410,0,301460421,0).
sa(301244410,0,301628946,0).
sa(301244410,0,301076793,0).
sa(301244410,0,301742296,0).
sa(301244410,0,302510879,0).
sa(301246579,0,301074650,0).
sa(301246579,0,301459422,0).
sa(301246579,0,301740892,0).
sa(301247240,0,302529264,0).
sa(301251128,0,302529945,0).
sa(301251128,0,301078302,0).
sa(301256332,0,301725712,0).
sa(301257612,0,302531422,0).
sa(301257612,0,301727439,0).
sa(301261867,0,300411886,0).
sa(301261867,0,300506299,0).
sa(301261867,0,301258617,0).
sa(301261867,0,301507134,0).
sa(301262611,0,301711071,0).
sa(301264336,0,301807219,0).
sa(301270175,0,300976508,0).
sa(301274261,0,300679147,0).
sa(301274261,0,301317954,0).
sa(301275395,0,300681094,0).
sa(301275395,0,301319182,0).
sa(301275562,0,300900616,0).
sa(301275562,0,302161432,0).
sa(301275562,0,302500884,0).
sa(301279978,0,301497736,0).
sa(301279978,0,300902652,0).
sa(301279978,0,302164402,0).
sa(301282014,0,301560513,0).
sa(301286539,0,301892953,0).
sa(301289701,0,302575008,0).
sa(301290333,0,302572823,0).
sa(301291937,0,301904156,0).
sa(301291937,0,300871255,0).
sa(301292128,0,302564986,0).
sa(301293158,0,300075515,0).
sa(301293158,0,302566015,0).
sa(301294300,0,301732270,0).
sa(301295443,0,301727926,0).
sa(301296270,0,300139126,0).
sa(301296270,0,300111129,0).
sa(301296474,0,300138912,0).
sa(301296474,0,300110853,0).
sa(301297315,0,300009978,0).
sa(301299888,0,300009046,0).
sa(301301624,0,301876006,0).
sa(301303776,0,301131043,0).
sa(301304374,0,301178974,0).
sa(301304570,0,301323449,0).
sa(301306273,0,300829745,0).
sa(301306273,0,300884007,0).
sa(301306273,0,302270342,0).
sa(301308425,0,302271544,0).
sa(301308425,0,300830717,0).
sa(301308425,0,300884286,0).
sa(301309991,0,302271544,0).
sa(301309991,0,302179279,0).
sa(301310685,0,302180797,0).
sa(301316222,0,301987341,0).
sa(301316222,0,302000118,0).
sa(301317012,0,301988166,0).
sa(301317012,0,302000680,0).
sa(301317954,0,300289082,0).
sa(301317954,0,300679147,0).
sa(301317954,0,301274261,0).
sa(301317954,0,302274253,0).
sa(301319182,0,302057829,0).
sa(301319182,0,302273326,0).
sa(301319182,0,300289365,0).
sa(301319182,0,300681094,0).
sa(301319182,0,301275395,0).
sa(301319874,0,300923321,0).
sa(301319874,0,302036578,0).
sa(301320988,0,300923993,0).
sa(301320988,0,302037272,0).
sa(301323449,0,301304570,0).
sa(301326148,0,301330986,0).
sa(301326148,0,302111684,0).
sa(301326148,0,302475855,0).
sa(301326917,0,302478749,0).
sa(301326917,0,302109678,0).
sa(301330986,0,300466808,0).
sa(301330986,0,300472336,0).
sa(301330986,0,301326148,0).
sa(301332386,0,300829745,0).
sa(301332386,0,301779986,0).
sa(301332386,0,302083615,0).
sa(301333718,0,302084538,0).
sa(301334398,0,301839829,0).
sa(301334398,0,300438707,0).
sa(301336587,0,301840366,0).
sa(301336587,0,300439588,0).
sa(301336926,0,300532892,0).
sa(301337121,0,300533851,0).
sa(301337486,0,301558385,0).
sa(301337486,0,301797148,0).
sa(301337486,0,301842763,0).
sa(301338730,0,301558912,0).
sa(301338730,0,301797862,0).
sa(301343918,0,300921014,0).
sa(301343918,0,302306288,0).
sa(301344963,0,300922594,0).
sa(301344963,0,302307367,0).
sa(301348258,0,300900616,0).
sa(301348258,0,300093810,0).
sa(301349041,0,300902652,0).
sa(301351021,0,302258600,0).
sa(301351391,0,302257141,0).
sa(301353014,0,300535452,0).
sa(301353226,0,300537339,0).
sa(301356683,0,302306288,0).
sa(301358363,0,300166146,0).
sa(301359035,0,300169164,0).
sa(301359543,0,302236842,0).
sa(301359861,0,302238462,0).
sa(301360413,0,302243806,0).
sa(301360692,0,302244472,0).
sa(301361414,0,300362467,0).
sa(301363613,0,301148283,0).
sa(301363613,0,301366718,0).
sa(301364008,0,301368192,0).
sa(301364008,0,301149494,0).
sa(301366718,0,300704609,0).
sa(301366718,0,301148283,0).
sa(301366718,0,301363613,0).
sa(301368192,0,300364479,0).
sa(301368192,0,301364008,0).
sa(301368192,0,301149494,0).
sa(301369663,0,300956131,0).
sa(301369663,0,301226240,0).
sa(301369663,0,302034828,0).
sa(301369663,0,302036578,0).
sa(301370590,0,301227137,0).
sa(301370590,0,300957176,0).
sa(301370590,0,302037272,0).
sa(301370590,0,302035337,0).
sa(301372049,0,300638981,0).
sa(301372049,0,300226618,0).
sa(301372049,0,301133876,0).
sa(301372049,0,301507134,0).
sa(301372049,0,301156112,0).
sa(301373728,0,300227003,0).
sa(301373728,0,301507402,0).
sa(301375174,0,300027247,0).
sa(301375174,0,300965606,0).
sa(301375174,0,301120925,0).
sa(301376894,0,301122411,0).
sa(301376894,0,300028672,0).
sa(301376894,0,300966477,0).
sa(301395617,0,301400562,0).
sa(301395617,0,302318464,0).
sa(301396047,0,302319129,0).
sa(301396047,0,301401854,0).
sa(301397385,0,301772032,0).
sa(301397385,0,302088086,0).
sa(301398199,0,301772609,0).
sa(301398199,0,302087956,0).
sa(301398199,0,301447302,0).
sa(301400562,0,301395617,0).
sa(301400562,0,301406640,0).
sa(301401854,0,301407465,0).
sa(301401854,0,301960656,0).
sa(301401854,0,301396047,0).
sa(301406640,0,300178575,0).
sa(301406640,0,301400562,0).
sa(301406640,0,302498708,0).
sa(301407465,0,301401854,0).
sa(301407465,0,302499750,0).
sa(301407465,0,300179486,0).
sa(301409581,0,302062670,0).
sa(301411065,0,300889831,0).
sa(301411291,0,300892379,0).
sa(301411451,0,301821266,0).
sa(301415021,0,302561888,0).
sa(301416508,0,301971237,0).
sa(301417228,0,301971237,0).
sa(301418789,0,300914421,0).
sa(301418789,0,302018486,0).
sa(301419149,0,302016535,0).
sa(301421077,0,300829745,0).
sa(301421077,0,302270342,0).
sa(301421679,0,300830717,0).
sa(301430111,0,300110853,0).
sa(301430111,0,300464513,0).
sa(301430111,0,300111129,0).
sa(301430111,0,301925372,0).
sa(301430111,0,301943406,0).
sa(301430847,0,300464962,0).
sa(301430847,0,301926376,0).
sa(301430847,0,301944660,0).
sa(301446749,0,302403671,0).
sa(301447302,0,302403206,0).
sa(301447302,0,301398199,0).
sa(301449564,0,301450969,0).
sa(301450713,0,301451498,0).
sa(301450969,0,300669478,0).
sa(301450969,0,301449564,0).
sa(301450969,0,301957454,0).
sa(301451498,0,300670741,0).
sa(301451498,0,301450713,0).
sa(301451498,0,301956964,0).
sa(301452593,0,300173764,0).
sa(301452593,0,301919931,0).
sa(301454636,0,301918984,0).
sa(301459422,0,301246579,0).
sa(301459422,0,300732960,0).
sa(301459422,0,301463965,0).
sa(301460421,0,301244410,0).
sa(301460421,0,301624633,0).
sa(301460421,0,300733905,0).
sa(301461822,0,302526925,0).
sa(301463137,0,302527734,0).
sa(301463965,0,300157925,0).
sa(301463965,0,301459422,0).
sa(301463965,0,301725712,0).
sa(301463965,0,302533313,0).
sa(301466593,0,302533540,0).
sa(301467298,0,301474324,0).
sa(301467700,0,301473996,0).
sa(301468097,0,301047874,0).
sa(301468682,0,301049462,0).
sa(301473996,0,301467700,0).
sa(301474324,0,301467298,0).
sa(301474513,0,300696518,0).
sa(301474513,0,301612053,0).
sa(301474513,0,302451113,0).
sa(301475282,0,300695523,0).
sa(301475282,0,301613463,0).
sa(301475282,0,302451951,0).
sa(301475831,0,301483324,0).
sa(301476685,0,301483324,0).
sa(301477806,0,301484083,0).
sa(301481612,0,301482956,0).
sa(301482228,0,301483143,0).
sa(301482956,0,301481612,0).
sa(301482956,0,301486489,0).
sa(301483143,0,301482228,0).
sa(301483324,0,301476685,0).
sa(301483324,0,301475831,0).
sa(301483324,0,301486084,0).
sa(301484083,0,301477806,0).
sa(301484083,0,301486197,0).
sa(301486084,0,301483324,0).
sa(301486197,0,301484083,0).
sa(301486489,0,300507464,0).
sa(301486489,0,301482956,0).
sa(301487201,0,300508192,0).
sa(301488245,0,301643620,0).
sa(301489722,0,301495535,0).
sa(301489722,0,301646941,0).
sa(301495381,0,301488245,0).
sa(301495535,0,301489722,0).
sa(301496976,0,301909890,0).
sa(301496976,0,302161432,0).
sa(301497736,0,302164402,0).
sa(301497736,0,301910488,0).
sa(301497736,0,301279978,0).
sa(301505991,0,301079683,0).
sa(301506770,0,301078302,0).
sa(301507134,0,300437539,0).
sa(301507134,0,300506299,0).
sa(301507134,0,301261867,0).
sa(301507134,0,301372049,0).
sa(301507134,0,301156112,0).
sa(301507402,0,300247439,0).
sa(301507402,0,301155354,0).
sa(301507402,0,300437852,0).
sa(301507402,0,301785180,0).
sa(301507402,0,300507053,0).
sa(301507402,0,301373728,0).
sa(301508719,0,301531375,0).
sa(301508719,0,302401590,0).
sa(301509527,0,301533120,0).
sa(301509527,0,302402268,0).
sa(301509527,0,301872745,0).
sa(301522376,0,301561771,0).
sa(301525116,0,301564073,0).
sa(301531375,0,301508719,0).
sa(301531375,0,302401590,0).
sa(301533120,0,302402268,0).
sa(301533120,0,301509527,0).
sa(301533120,0,301944660,0).
sa(301535709,0,300666058,0).
sa(301535709,0,300816481,0).
sa(301535709,0,301640850,0).
sa(301536911,0,301638438,0).
sa(301536911,0,301727926,0).
sa(301537885,0,300682932,0).
sa(301538858,0,300683185,0).
sa(301539225,0,301892953,0).
sa(301539642,0,301994602,0).
sa(301539642,0,300787136,0).
sa(301539642,0,301889256,0).
sa(301540365,0,301913200,0).
sa(301540365,0,302506555,0).
sa(301541013,0,301913715,0).
sa(301548193,0,300360650,0).
sa(301548193,0,301129977,0).
sa(301548193,0,301226240,0).
sa(301548193,0,302036578,0).
sa(301548193,0,302513269,0).
sa(301549291,0,301131043,0).
sa(301549291,0,300361509,0).
sa(301549291,0,302037272,0).
sa(301549291,0,302513740,0).
sa(301551130,0,302085449,0).
sa(301551461,0,302085898,0).
sa(301551633,0,301556355,0).
sa(301551633,0,302267308,0).
sa(301552885,0,301556616,0).
sa(301552885,0,302267308,0).
sa(301553629,0,301555133,0).
sa(301554510,0,301555416,0).
sa(301555133,0,301553629,0).
sa(301555416,0,301554510,0).
sa(301556355,0,301551633,0).
sa(301556355,0,301555133,0).
sa(301556616,0,301552885,0).
sa(301556616,0,301555416,0).
sa(301557614,0,301180695,0).
sa(301557614,0,301006566,0).
sa(301557903,0,301178974,0).
sa(301557903,0,301007354,0).
sa(301558385,0,301337486,0).
sa(301558912,0,301338730,0).
sa(301559903,0,300070939,0).
sa(301559903,0,300853776,0).
sa(301560320,0,300071992,0).
sa(301560320,0,300856860,0).
sa(301560513,0,300853776,0).
sa(301560513,0,301282014,0).
sa(301560513,0,302306288,0).
sa(301561564,0,300856860,0).
sa(301561564,0,302307367,0).
sa(301561771,0,301522376,0).
sa(301564073,0,301525116,0).
sa(301567862,0,301038102,0).
sa(301568375,0,301037540,0).
sa(301569965,0,301570892,0).
sa(301569965,0,301574036,0).
sa(301570470,0,300070939,0).
sa(301570470,0,301599532,0).
sa(301570470,0,301574446,0).
sa(301570892,0,301569965,0).
sa(301571363,0,301116380,0).
sa(301571363,0,301934554,0).
sa(301574446,0,301934554,0).
sa(301580050,0,300900616,0).
sa(301580050,0,300903668,0).
sa(301580050,0,300848074,0).
sa(301581115,0,300902652,0).
sa(301586342,0,301800349,0).
sa(301587077,0,301801600,0).
sa(301588172,0,301226240,0).
sa(301589217,0,300904548,0).
sa(301589217,0,300264776,0).
sa(301589217,0,301227137,0).
sa(301592642,0,300259568,0).
sa(301593649,0,302073443,0).
sa(301593649,0,302295998,0).
sa(301595596,0,302074092,0).
sa(301599532,0,302108827,0).
sa(301599532,0,301570470,0).
sa(301611067,0,301160031,0).
sa(301611067,0,302449430,0).
sa(301611067,0,302559180,0).
sa(301611839,0,301159655,0).
sa(301612053,0,300696518,0).
sa(301612053,0,302328659,0).
sa(301612053,0,301123148,0).
sa(301612053,0,301474513,0).
sa(301612053,0,300788821,0).
sa(301612053,0,302451113,0).
sa(301613463,0,301125429,0).
sa(301613463,0,300695523,0).
sa(301613463,0,300635456,0).
sa(301613463,0,302451951,0).
sa(301613463,0,302326695,0).
sa(301613463,0,301475282,0).
sa(301616474,0,301996377,0).
sa(301618053,0,301704761,0).
sa(301620509,0,301653538,0).
sa(301624633,0,301131043,0).
sa(301624633,0,301460421,0).
sa(301624633,0,301716491,0).
sa(301624633,0,301801600,0).
sa(301628302,0,301994602,0).
sa(301628302,0,301160031,0).
sa(301628946,0,300082241,0).
sa(301628946,0,301244410,0).
sa(301630117,0,301887076,0).
sa(301632537,0,300178575,0).
sa(301633591,0,300179486,0).
sa(301638438,0,300668571,0).
sa(301638438,0,301536911,0).
sa(301638438,0,301727926,0).
sa(301638438,0,301068726,0).
sa(301638438,0,302580449,0).
sa(301640850,0,300666058,0).
sa(301640850,0,301067694,0).
sa(301640850,0,301535709,0).
sa(301643620,0,300935500,0).
sa(301643620,0,301488245,0).
sa(301643620,0,302099774,0).
sa(301646941,0,301489722,0).
sa(301646941,0,302100709,0).
sa(301646941,0,301640850,0).
sa(301652380,0,301652902,0).
sa(301652782,0,301653538,0).
sa(301652902,0,301652380,0).
sa(301652902,0,302086991,0).
sa(301653538,0,301620509,0).
sa(301653538,0,302086667,0).
sa(301653538,0,301652782,0).
sa(301660994,0,300064479,0).
sa(301661914,0,300065488,0).
sa(301663571,0,301228530,0).
sa(301663571,0,301817500,0).
sa(301664581,0,301229020,0).
sa(301664581,0,301818234,0).
sa(301668340,0,302382934,0).
sa(301668858,0,302383151,0).
sa(301670221,0,301671732,0).
sa(301671095,0,301671881,0).
sa(301671732,0,301670221,0).
sa(301671881,0,301671095,0).
sa(301672607,0,300485711,0).
sa(301672607,0,300489108,0).
sa(301675190,0,300504592,0).
sa(301675190,0,300487653,0).
sa(301675190,0,300490035,0).
sa(301675190,0,302341266,0).
sa(301675190,0,301282014,0).
sa(301682229,0,301684426,0).
sa(301683667,0,301685377,0).
sa(301684426,0,301682229,0).
sa(301685377,0,301683667,0).
sa(301686439,0,300643250,0).
sa(301686439,0,301010862,0).
sa(301686439,0,301852174,0).
sa(301686439,0,300606602,0).
sa(301688271,0,300605516,0).
sa(301688271,0,301853461,0).
sa(301688271,0,301068726,0).
sa(301688271,0,300644372,0).
sa(301689880,0,300574422,0).
sa(301689880,0,302294777,0).
sa(301692222,0,300952395,0).
sa(301692786,0,300952867,0).
sa(301692969,0,300948103,0).
sa(301693799,0,300948670,0).
sa(301704761,0,300940437,0).
sa(301704761,0,302090341,0).
sa(301704761,0,301861205,0).
sa(301705655,0,300941485,0).
sa(301705655,0,302517265,0).
sa(301705655,0,302088404,0).
sa(301709815,0,300136354,0).
sa(301711071,0,301160031,0).
sa(301711071,0,301262611,0).
sa(301713095,0,301159655,0).
sa(301713373,0,301005063,0).
sa(301713925,0,301005675,0).
sa(301716227,0,300133417,0).
sa(301716227,0,300828779,0).
sa(301716227,0,302395115,0).
sa(301716491,0,300829496,0).
sa(301716491,0,301624633,0).
sa(301716491,0,302399399,0).
sa(301716491,0,300133851,0).
sa(301716491,0,301801600,0).
sa(301717117,0,301746605,0).
sa(301717669,0,301748318,0).
sa(301718158,0,300503982,0).
sa(301718158,0,302371718,0).
sa(301718867,0,300612114,0).
sa(301718867,0,300612652,0).
sa(301718867,0,301234167,0).
sa(301722965,0,300957176,0).
sa(301723308,0,300956131,0).
sa(301725712,0,300853776,0).
sa(301725712,0,300885695,0).
sa(301725712,0,301256332,0).
sa(301725712,0,301463965,0).
sa(301727439,0,301257612,0).
sa(301727439,0,300856860,0).
sa(301727439,0,300887062,0).
sa(301727926,0,300668571,0).
sa(301727926,0,301638438,0).
sa(301727926,0,301295443,0).
sa(301732270,0,301294300,0).
sa(301732270,0,301883898,0).
sa(301734436,0,301737825,0).
sa(301734884,0,301738335,0).
sa(301735736,0,302436341,0).
sa(301735736,0,300514278,0).
sa(301737241,0,300085264,0).
sa(301737825,0,301734436,0).
sa(301738335,0,301734884,0).
sa(301740207,0,300961392,0).
sa(301740630,0,300962634,0).
sa(301740892,0,301246579,0).
sa(301740892,0,302512922,0).
sa(301740892,0,301922763,0).
sa(301740892,0,300084956,0).
sa(301742296,0,301244410,0).
sa(301742296,0,300303727,0).
sa(301742296,0,302510879,0).
sa(301743217,0,300153898,0).
sa(301743217,0,301150475,0).
sa(301743909,0,300154965,0).
sa(301743909,0,301957454,0).
sa(301746605,0,300173764,0).
sa(301746605,0,302515341,0).
sa(301748318,0,301717669,0).
sa(301748318,0,300174379,0).
sa(301748318,0,302517265,0).
sa(301749320,0,300914421,0).
sa(301749320,0,300247013,0).
sa(301749320,0,300289365,0).
sa(301749320,0,302497938,0).
sa(301752167,0,300245952,0).
sa(301752167,0,300289082,0).
sa(301753365,0,300737973,0).
sa(301754421,0,300898289,0).
sa(301754421,0,302290998,0).
sa(301754421,0,300346991,0).
sa(301755627,0,302291843,0).
sa(301760944,0,302435026,0).
sa(301761186,0,302435671,0).
sa(301763813,0,301887076,0).
sa(301765643,0,300435492,0).
sa(301767329,0,301858094,0).
sa(301769843,0,300614990,0).
sa(301770903,0,300867213,0).
sa(301772032,0,301397385,0).
sa(301772032,0,301773234,0).
sa(301772609,0,301773665,0).
sa(301772609,0,301398199,0).
sa(301773234,0,301772032,0).
sa(301773665,0,301772609,0).
sa(301781478,0,301783158,0).
sa(301781478,0,302055062,0).
sa(301781478,0,302578235,0).
sa(301781478,0,302513269,0).
sa(301782519,0,301784017,0).
sa(301782519,0,302056880,0).
sa(301782519,0,302513740,0).
sa(301783158,0,301781478,0).
sa(301784017,0,301782519,0).
sa(301785180,0,301507402,0).
sa(301791911,0,302174896,0).
sa(301791911,0,300060397,0).
sa(301791911,0,302018486,0).
sa(301791911,0,301851523,0).
sa(301794340,0,300056002,0).
sa(301794340,0,302016535,0).
sa(301797148,0,301337486,0).
sa(301797148,0,301842763,0).
sa(301797862,0,301338730,0).
sa(301798828,0,300073048,0).
sa(301799035,0,300074346,0).
sa(301799457,0,300644839,0).
sa(301799957,0,300645493,0).
sa(301800349,0,301133876,0).
sa(301800349,0,301586342,0).
sa(301800349,0,301807219,0).
sa(301801600,0,301808822,0).
sa(301801600,0,301134769,0).
sa(301801600,0,301587077,0).
sa(301801600,0,301624633,0).
sa(301801600,0,301716491,0).
sa(301805157,0,300588797,0).
sa(301805889,0,300589624,0).
sa(301807219,0,300166146,0).
sa(301807219,0,300217728,0).
sa(301807219,0,301264336,0).
sa(301807219,0,301800349,0).
sa(301808822,0,300220956,0).
sa(301808822,0,301801600,0).
sa(301809655,0,302047807,0).
sa(301809655,0,300800826,0).
sa(301811997,0,300278551,0).
sa(301812846,0,300283703,0).
sa(301812846,0,301949149,0).
sa(301813400,0,300758459,0).
sa(301814085,0,302570282,0).
sa(301814085,0,300932367,0).
sa(301817500,0,300075135,0).
sa(301817500,0,300584820,0).
sa(301817500,0,301663571,0).
sa(301817500,0,302354537,0).
sa(301818234,0,300586183,0).
sa(301818234,0,301664581,0).
sa(301818234,0,302356430,0).
sa(301821266,0,301411451,0).
sa(301821266,0,301834304,0).
sa(301821266,0,301939984,0).
sa(301821266,0,302417895,0).
sa(301823092,0,301229020,0).
sa(301823092,0,301836350,0).
sa(301823092,0,302357115,0).
sa(301823092,0,301941999,0).
sa(301823092,0,302418538,0).
sa(301824244,0,300834198,0).
sa(301824244,0,301001689,0).
sa(301824244,0,301825671,0).
sa(301824751,0,301827535,0).
sa(301824751,0,301002377,0).
sa(301825671,0,300834198,0).
sa(301825671,0,301824244,0).
sa(301825671,0,302321009,0).
sa(301827535,0,301824751,0).
sa(301827535,0,300835609,0).
sa(301827535,0,302324397,0).
sa(301834304,0,300013160,0).
sa(301834304,0,301821266,0).
sa(301834304,0,301939984,0).
sa(301834304,0,302123812,0).
sa(301836350,0,301823092,0).
sa(301836350,0,301941999,0).
sa(301837744,0,300021766,0).
sa(301837744,0,300779374,0).
sa(301837744,0,300914421,0).
sa(301837744,0,301103021,0).
sa(301839417,0,301101391,0).
sa(301839417,0,300023383,0).
sa(301839417,0,300915787,0).
sa(301839829,0,301334398,0).
sa(301840366,0,301336587,0).
sa(301841544,0,300340239,0).
sa(301842001,0,300341405,0).
sa(301842763,0,301337486,0).
sa(301842763,0,301797148,0).
sa(301843092,0,301797862,0).
sa(301843380,0,301930512,0).
sa(301845160,0,301932234,0).
sa(301849288,0,302393401,0).
sa(301851523,0,301791911,0).
sa(301851523,0,302392878,0).
sa(301852174,0,300900616,0).
sa(301852174,0,301010862,0).
sa(301852174,0,301686439,0).
sa(301853461,0,300902652,0).
sa(301853461,0,301688271,0).
sa(301858094,0,300898963,0).
sa(301858094,0,301767329,0).
sa(301861205,0,300900478,0).
sa(301861205,0,301704761,0).
sa(301864123,0,301047874,0).
sa(301864123,0,302021905,0).
sa(301864471,0,302022953,0).
sa(301864471,0,301049462,0).
sa(301865197,0,300643250,0).
sa(301865197,0,301001689,0).
sa(301865197,0,301080297,0).
sa(301865197,0,301870889,0).
sa(301865197,0,302331262,0).
sa(301866535,0,301002377,0).
sa(301866535,0,300644372,0).
sa(301866535,0,301871949,0).
sa(301870889,0,300064479,0).
sa(301870889,0,301865197,0).
sa(301870889,0,302495922,0).
sa(301871949,0,301866535,0).
sa(301871949,0,302333453,0).
sa(301871949,0,302497141,0).
sa(301872745,0,301509527,0).
sa(301872745,0,302083615,0).
sa(301873406,0,300311663,0).
sa(301873406,0,302120458,0).
sa(301873406,0,300950706,0).
sa(301876006,0,300203237,0).
sa(301876006,0,301301624,0).
sa(301876957,0,300201354,0).
sa(301878466,0,300134701,0).
sa(301878466,0,300631391,0).
sa(301878466,0,300682932,0).
sa(301878466,0,300688947,0).
sa(301878466,0,302034828,0).
sa(301880531,0,300135718,0).
sa(301880531,0,300683185,0).
sa(301880531,0,300689471,0).
sa(301880531,0,301021301,0).
sa(301880531,0,302035337,0).
sa(301883898,0,301732270,0).
sa(301884539,0,300201354,0).
sa(301884930,0,302525206,0).
sa(301884930,0,302093888,0).
sa(301886407,0,302094755,0).
sa(301886407,0,302360448,0).
sa(301886407,0,302523275,0).
sa(301887076,0,301630117,0).
sa(301887076,0,301763813,0).
sa(301889256,0,301539642,0).
sa(301892953,0,301539225,0).
sa(301894758,0,302157399,0).
sa(301894758,0,301898129,0).
sa(301894758,0,302421158,0).
sa(301895837,0,301899360,0).
sa(301895837,0,302422068,0).
sa(301896478,0,300921014,0).
sa(301897787,0,302512922,0).
sa(301897787,0,300922594,0).
sa(301898129,0,300309021,0).
sa(301898129,0,301894758,0).
sa(301898129,0,301996377,0).
sa(301898129,0,302569130,0).
sa(301899360,0,300311663,0).
sa(301899360,0,302570282,0).
sa(301899360,0,301895837,0).
sa(301904156,0,301291937,0).
sa(301904845,0,300360650,0).
sa(301905377,0,300361509,0).
sa(301905653,0,300427331,0).
sa(301905653,0,301951684,0).
sa(301908039,0,300427786,0).
sa(301908039,0,301952405,0).
sa(301909890,0,301496976,0).
sa(301909890,0,301989669,0).
sa(301910488,0,301497736,0).
sa(301911053,0,300510050,0).
sa(301911053,0,300851744,0).
sa(301911415,0,300511214,0).
sa(301911415,0,300852988,0).
sa(301913200,0,300555325,0).
sa(301913200,0,301540365,0).
sa(301913715,0,300556709,0).
sa(301913715,0,301541013,0).
sa(301916229,0,300590390,0).
sa(301918184,0,300590923,0).
sa(301918184,0,300721505,0).
sa(301918984,0,300174379,0).
sa(301918984,0,301454636,0).
sa(301919931,0,300298767,0).
sa(301919931,0,301452593,0).
sa(301922763,0,301740892,0).
sa(301922763,0,302389946,0).
sa(301922763,0,300087152,0).
sa(301923391,0,300085264,0).
sa(301923391,0,300822449,0).
sa(301923391,0,302390335,0).
sa(301925372,0,301430111,0).
sa(301925372,0,301943406,0).
sa(301925372,0,302073443,0).
sa(301926376,0,301430847,0).
sa(301926376,0,300464962,0).
sa(301926376,0,301944660,0).
sa(301930512,0,301843380,0).
sa(301930512,0,302564986,0).
sa(301932234,0,301845160,0).
sa(301932973,0,300013160,0).
sa(301932973,0,301115349,0).
sa(301932973,0,301939984,0).
sa(301932973,0,302179279,0).
sa(301934554,0,301571363,0).
sa(301934554,0,301116380,0).
sa(301934554,0,302180797,0).
sa(301934554,0,301574446,0).
sa(301934554,0,301941999,0).
sa(301939984,0,301821266,0).
sa(301939984,0,301834304,0).
sa(301939984,0,301932973,0).
sa(301941999,0,301823092,0).
sa(301941999,0,301836350,0).
sa(301941999,0,301934554,0).
sa(301943406,0,300956131,0).
sa(301943406,0,301430111,0).
sa(301943406,0,301925372,0).
sa(301943406,0,302498708,0).
sa(301944660,0,301430847,0).
sa(301944660,0,301533120,0).
sa(301944660,0,301926376,0).
sa(301947266,0,300411886,0).
sa(301947266,0,300849357,0).
sa(301947266,0,300983862,0).
sa(301947266,0,301141743,0).
sa(301947266,0,301811997,0).
sa(301949149,0,300851103,0).
sa(301949149,0,300985387,0).
sa(301949149,0,301142349,0).
sa(301949149,0,301812846,0).
sa(301951684,0,301005063,0).
sa(301951684,0,301905653,0).
sa(301951684,0,301953635,0).
sa(301952405,0,301908039,0).
sa(301952405,0,301005675,0).
sa(301952405,0,301954781,0).
sa(301953635,0,301951684,0).
sa(301954781,0,301952405,0).
sa(301956964,0,301451498,0).
sa(301957454,0,301450969,0).
sa(301957454,0,301743909,0).
sa(301959294,0,302019021,0).
sa(301959294,0,302371718,0).
sa(301959294,0,302382934,0).
sa(301960656,0,302373599,0).
sa(301960656,0,301401854,0).
sa(301960656,0,302238462,0).
sa(301960656,0,302021050,0).
sa(301960656,0,302303077,0).
sa(301960656,0,302383151,0).
sa(301971046,0,300730731,0).
sa(301971237,0,301417228,0).
sa(301971237,0,301416508,0).
sa(301978395,0,300932695,0).
sa(301978894,0,300933032,0).
sa(301980250,0,300011757,0).
sa(301982646,0,301226240,0).
sa(301982646,0,301993140,0).
sa(301983797,0,301227137,0).
sa(301983797,0,301993843,0).
sa(301987341,0,300204077,0).
sa(301987341,0,301316222,0).
sa(301987341,0,302000118,0).
sa(301987341,0,300720913,0).
sa(301988166,0,300720296,0).
sa(301988166,0,301317012,0).
sa(301988166,0,302000680,0).
sa(301989669,0,300262792,0).
sa(301989669,0,300684480,0).
sa(301989669,0,301909890,0).
sa(301992149,0,300685483,0).
sa(301993140,0,301123148,0).
sa(301993140,0,301982646,0).
sa(301993140,0,302584981,0).
sa(301993940,0,300639842,0).
sa(301993940,0,301892953,0).
sa(301993940,0,302011810,0).
sa(301994602,0,301539642,0).
sa(301994602,0,300640283,0).
sa(301994602,0,302012504,0).
sa(301994602,0,301628302,0).
sa(301994602,0,302079029,0).
sa(301996377,0,301616474,0).
sa(301996377,0,301898129,0).
sa(301996377,0,302464693,0).
sa(301999306,0,302103481,0).
sa(301999306,0,302105990,0).
sa(301999766,0,302105375,0).
sa(301999766,0,302106761,0).
sa(302000118,0,300598679,0).
sa(302000118,0,301316222,0).
sa(302000118,0,301987341,0).
sa(302000680,0,301061489,0).
sa(302000680,0,300599821,0).
sa(302000680,0,301317012,0).
sa(302000680,0,301988166,0).
sa(302002227,0,300415228,0).
sa(302002227,0,300558373,0).
sa(302002227,0,301064286,0).
sa(302002814,0,301061489,0).
sa(302005756,0,301977155,0).
sa(302006031,0,301978003,0).
sa(302011810,0,301993940,0).
sa(302012504,0,301994602,0).
sa(302015571,0,301807219,0).
sa(302015571,0,301870889,0).
sa(302016535,0,301794340,0).
sa(302016535,0,301419149,0).
sa(302018486,0,301045711,0).
sa(302018486,0,301418789,0).
sa(302018486,0,301791911,0).
sa(302019021,0,301959294,0).
sa(302021050,0,301960656,0).
sa(302021905,0,300013887,0).
sa(302021905,0,301864123,0).
sa(302022953,0,301864471,0).
sa(302028612,0,300742879,0).
sa(302028612,0,302031986,0).
sa(302029167,0,300743435,0).
sa(302029167,0,302032953,0).
sa(302030080,0,300574422,0).
sa(302030935,0,300575230,0).
sa(302030935,0,300298041,0).
sa(302031986,0,302028612,0).
sa(302032953,0,302029167,0).
sa(302034828,0,300905386,0).
sa(302034828,0,301129977,0).
sa(302034828,0,301369663,0).
sa(302034828,0,301878466,0).
sa(302035337,0,301131043,0).
sa(302035337,0,301880531,0).
sa(302035337,0,301549291,0).
sa(302035337,0,300905728,0).
sa(302035337,0,301370590,0).
sa(302035337,0,302513740,0).
sa(302036578,0,301129977,0).
sa(302036578,0,301319874,0).
sa(302036578,0,301369663,0).
sa(302036578,0,301548193,0).
sa(302036578,0,302513269,0).
sa(302036578,0,302584981,0).
sa(302037272,0,301131043,0).
sa(302037272,0,301320988,0).
sa(302037272,0,301549291,0).
sa(302037272,0,301370590,0).
sa(302037272,0,302588099,0).
sa(302037272,0,302513740,0).
sa(302037708,0,301170243,0).
sa(302037708,0,300707366,0).
sa(302037708,0,302321009,0).
sa(302040049,0,300705891,0).
sa(302040049,0,302324397,0).
sa(302040652,0,302043898,0).
sa(302043217,0,302047807,0).
sa(302043898,0,302317327,0).
sa(302043898,0,300986027,0).
sa(302043898,0,302049918,0).
sa(302043898,0,302050116,0).
sa(302043898,0,302040652,0).
sa(302047807,0,301809655,0).
sa(302047807,0,302043217,0).
sa(302049918,0,302043898,0).
sa(302050116,0,302043898,0).
sa(302053818,0,300573225,0).
sa(302053818,0,302055062,0).
sa(302055062,0,300573225,0).
sa(302055062,0,301178974,0).
sa(302055062,0,301781478,0).
sa(302056880,0,301180695,0).
sa(302056880,0,301782519,0).
sa(302056880,0,302577061,0).
sa(302057829,0,301159655,0).
sa(302057829,0,302525206,0).
sa(302057829,0,302093888,0).
sa(302057829,0,301319182,0).
sa(302058794,0,302094755,0).
sa(302058794,0,302523275,0).
sa(302062670,0,300889831,0).
sa(302062670,0,301409581,0).
sa(302062670,0,302071420,0).
sa(302062670,0,302507968,0).
sa(302064745,0,302073113,0).
sa(302064745,0,300508192,0).
sa(302064745,0,301410363,0).
sa(302064745,0,302506555,0).
sa(302069355,0,302109678,0).
sa(302071420,0,302062670,0).
sa(302073443,0,301925372,0).
sa(302074092,0,301926376,0).
sa(302074092,0,301944660,0).
sa(302079029,0,300647542,0).
sa(302079029,0,301994602,0).
sa(302080577,0,300017782,0).
sa(302080577,0,300051045,0).
sa(302082218,0,300051696,0).
sa(302082218,0,300018584,0).
sa(302083615,0,300649586,0).
sa(302083615,0,301332386,0).
sa(302083615,0,301872745,0).
sa(302084538,0,301333718,0).
sa(302085449,0,301551130,0).
sa(302085898,0,301551461,0).
sa(302086667,0,301653538,0).
sa(302086991,0,301652902,0).
sa(302087956,0,301398199,0).
sa(302088086,0,301397385,0).
sa(302088404,0,301705655,0).
sa(302090341,0,301704761,0).
sa(302093888,0,302525206,0).
sa(302093888,0,301884930,0).
sa(302093888,0,302057829,0).
sa(302094755,0,302058794,0).
sa(302094755,0,301886407,0).
sa(302094755,0,302523275,0).
sa(302095311,0,301059400,0).
sa(302095786,0,301060785,0).
sa(302097268,0,300166146,0).
sa(302097268,0,302131072,0).
sa(302098136,0,300169164,0).
sa(302098325,0,300101800,0).
sa(302098325,0,300639356,0).
sa(302098325,0,301112573,0).
sa(302099019,0,300101609,0).
sa(302099019,0,300638981,0).
sa(302099019,0,301111016,0).
sa(302099774,0,301643620,0).
sa(302100709,0,301646941,0).
sa(302100709,0,302329355,0).
sa(302102484,0,300190115,0).
sa(302102484,0,300570590,0).
sa(302102484,0,302103481,0).
sa(302102796,0,302105375,0).
sa(302102796,0,302106761,0).
sa(302102796,0,300191603,0).
sa(302102796,0,300571643,0).
sa(302103481,0,301999306,0).
sa(302103481,0,302102484,0).
sa(302103481,0,302360448,0).
sa(302105375,0,302102796,0).
sa(302105375,0,301999766,0).
sa(302105375,0,302363358,0).
sa(302105990,0,301999306,0).
sa(302106761,0,301155354,0).
sa(302106761,0,302102796,0).
sa(302106761,0,302448437,0).
sa(302106761,0,301999766,0).
sa(302108827,0,301599532,0).
sa(302109678,0,302478749,0).
sa(302109678,0,300727564,0).
sa(302109678,0,300493460,0).
sa(302109678,0,302069355,0).
sa(302109678,0,301326917,0).
sa(302109678,0,302152848,0).
sa(302111684,0,300466808,0).
sa(302111684,0,300492677,0).
sa(302111684,0,301326148,0).
sa(302111684,0,302152216,0).
sa(302111684,0,302475855,0).
sa(302113827,0,302116934,0).
sa(302113827,0,301167817,0).
sa(302115324,0,300417413,0).
sa(302115324,0,302116618,0).
sa(302115324,0,301164763,0).
sa(302116618,0,302115324,0).
sa(302116934,0,302113827,0).
sa(302120458,0,301873406,0).
sa(302123812,0,301090308,0).
sa(302123812,0,301834304,0).
sa(302123812,0,302495922,0).
sa(302124654,0,301090993,0).
sa(302124654,0,302497141,0).
sa(302131072,0,300921014,0).
sa(302131072,0,302097268,0).
sa(302140492,0,302373599,0).
sa(302152216,0,302111684,0).
sa(302152848,0,302109678,0).
sa(302161432,0,301275562,0).
sa(302161432,0,301496976,0).
sa(302164402,0,301497736,0).
sa(302164402,0,301279978,0).
sa(302166346,0,302174896,0).
sa(302166346,0,302244619,0).
sa(302171024,0,302176178,0).
sa(302171024,0,302245403,0).
sa(302171024,0,302244619,0).
sa(302174896,0,300749230,0).
sa(302174896,0,301791911,0).
sa(302174896,0,302166346,0).
sa(302176178,0,302171024,0).
sa(302176178,0,300744916,0).
sa(302179279,0,301115349,0).
sa(302179279,0,301222360,0).
sa(302179279,0,301309991,0).
sa(302179279,0,301932973,0).
sa(302179279,0,302460502,0).
sa(302180797,0,301116380,0).
sa(302180797,0,301222884,0).
sa(302180797,0,301310685,0).
sa(302180797,0,302461723,0).
sa(302180797,0,301934554,0).
sa(302225510,0,300510050,0).
sa(302225510,0,300935500,0).
sa(302228335,0,300511214,0).
sa(302228335,0,300936740,0).
sa(302230990,0,302238462,0).
sa(302232251,0,302236842,0).
sa(302234781,0,302236842,0).
sa(302236271,0,302238462,0).
sa(302236842,0,300909545,0).
sa(302236842,0,302232251,0).
sa(302236842,0,301359543,0).
sa(302236842,0,302234781,0).
sa(302238462,0,302230990,0).
sa(302238462,0,301960656,0).
sa(302238462,0,302236271,0).
sa(302238462,0,300911327,0).
sa(302238462,0,301359861,0).
sa(302238462,0,301812846,0).
sa(302244619,0,302166346,0).
sa(302244619,0,302171024,0).
sa(302245403,0,302166346,0).
sa(302245403,0,302171024,0).
sa(302248349,0,301074650,0).
sa(302248349,0,302252634,0).
sa(302248349,0,302257141,0).
sa(302249441,0,302253065,0).
sa(302249441,0,302258600,0).
sa(302252634,0,302248349,0).
sa(302253065,0,302249441,0).
sa(302257141,0,301351391,0).
sa(302257141,0,301074650,0).
sa(302257141,0,302248349,0).
sa(302258600,0,301076793,0).
sa(302258600,0,302249441,0).
sa(302260035,0,301150915,0).
sa(302263982,0,301086545,0).
sa(302266145,0,300938801,0).
sa(302266600,0,300939105,0).
sa(302267308,0,301552885,0).
sa(302267308,0,301551633,0).
sa(302270342,0,301106614,0).
sa(302270342,0,300866047,0).
sa(302270342,0,300849357,0).
sa(302270342,0,301306273,0).
sa(302270342,0,301421602,0).
sa(302270342,0,302577061,0).
sa(302271544,0,300646413,0).
sa(302271544,0,301308425,0).
sa(302271544,0,302578235,0).
sa(302273326,0,301017161,0).
sa(302273326,0,301170243,0).
sa(302273326,0,300289365,0).
sa(302273326,0,300681094,0).
sa(302273326,0,301319182,0).
sa(302273326,0,302557357,0).
sa(302274253,0,300289082,0).
sa(302274253,0,300679147,0).
sa(302274253,0,301317954,0).
sa(302274253,0,301017738,0).
sa(302274253,0,301172889,0).
sa(302274253,0,302559180,0).
sa(302277607,0,301103021,0).
sa(302278337,0,301101391,0).
sa(302278939,0,300873603,0).
sa(302278939,0,300885695,0).
sa(302278939,0,300804695,0).
sa(302281325,0,300806512,0).
sa(302281325,0,300887062,0).
sa(302283703,0,300150202,0).
sa(302284578,0,302213270,0).
sa(302290998,0,300583239,0).
sa(302290998,0,301754421,0).
sa(302290998,0,302301560,0).
sa(302291843,0,301755627,0).
sa(302291843,0,300583990,0).
sa(302291843,0,302303077,0).
sa(302294777,0,301689880,0).
sa(302295998,0,301593649,0).
sa(302301560,0,300909545,0).
sa(302301560,0,302290998,0).
sa(302301560,0,300087152,0).
sa(302303077,0,300085264,0).
sa(302303077,0,301960656,0).
sa(302303077,0,300911327,0).
sa(302303077,0,302291843,0).
sa(302306288,0,300921014,0).
sa(302306288,0,301343918,0).
sa(302306288,0,301356683,0).
sa(302306288,0,301560513,0).
sa(302306288,0,302309341,0).
sa(302307367,0,300922594,0).
sa(302307367,0,301344963,0).
sa(302307367,0,301561564,0).
sa(302309341,0,302306288,0).
sa(302310895,0,300760916,0).
sa(302310895,0,301233347,0).
sa(302311544,0,302317327,0).
sa(302311544,0,300762355,0).
sa(302318464,0,301222360,0).
sa(302318464,0,301395617,0).
sa(302319129,0,301222884,0).
sa(302319129,0,301396047,0).
sa(302321009,0,301825671,0).
sa(302321009,0,302037708,0).
sa(302321009,0,300707366,0).
sa(302321009,0,302447344,0).
sa(302324397,0,300705891,0).
sa(302324397,0,302448166,0).
sa(302324397,0,301827535,0).
sa(302326695,0,301613463,0).
sa(302326695,0,302451951,0).
sa(302326695,0,301025913,0).
sa(302328659,0,301612053,0).
sa(302328659,0,302451113,0).
sa(302329355,0,300788821,0).
sa(302329355,0,300792991,0).
sa(302330336,0,300695523,0).
sa(302331262,0,301047874,0).
sa(302331262,0,301865197,0).
sa(302331262,0,300695024,0).
sa(302333453,0,300694608,0).
sa(302333453,0,301049462,0).
sa(302333453,0,301871949,0).
sa(302335828,0,300105746,0).
sa(302336449,0,300106456,0).
sa(302337667,0,302367604,0).
sa(302337667,0,302368336,0).
sa(302338327,0,302367785,0).
sa(302338615,0,300791227,0).
sa(302338615,0,301210854,0).
sa(302340213,0,301206474,0).
sa(302340213,0,301212469,0).
sa(302340213,0,300792991,0).
sa(302341266,0,300227507,0).
sa(302345272,0,300229630,0).
sa(302354537,0,301817500,0).
sa(302356430,0,301818234,0).
sa(302356820,0,300569090,0).
sa(302357115,0,301823092,0).
sa(302357115,0,300569568,0).
sa(302359464,0,300930290,0).
sa(302360351,0,300929567,0).
sa(302360448,0,302103481,0).
sa(302360448,0,301886407,0).
sa(302360448,0,302523275,0).
sa(302363358,0,302105375,0).
sa(302367604,0,302337667,0).
sa(302367604,0,302368336,0).
sa(302367785,0,302368787,0).
sa(302367785,0,302338327,0).
sa(302368336,0,302337667,0).
sa(302368336,0,302367604,0).
sa(302368336,0,302395115,0).
sa(302368787,0,302367785,0).
sa(302368787,0,302369763,0).
sa(302368787,0,302395115,0).
sa(302369763,0,302368787,0).
sa(302371718,0,301718158,0).
sa(302371718,0,301959294,0).
sa(302373599,0,301960656,0).
sa(302374914,0,300506299,0).
sa(302374914,0,300560586,0).
sa(302374914,0,301372049,0).
sa(302375828,0,300507053,0).
sa(302375828,0,300561036,0).
sa(302375828,0,301999766,0).
sa(302377186,0,302377651,0).
sa(302377651,0,302377186,0).
sa(302382934,0,301668340,0).
sa(302382934,0,301959294,0).
sa(302383151,0,301668858,0).
sa(302383151,0,301960656,0).
sa(302383380,0,300546646,0).
sa(302383380,0,300500569,0).
sa(302383831,0,300494907,0).
sa(302383831,0,300548781,0).
sa(302384383,0,300638981,0).
sa(302384843,0,300639356,0).
sa(302384843,0,300851103,0).
sa(302385102,0,301382086,0).
sa(302385102,0,301204557,0).
sa(302386612,0,301206474,0).
sa(302388145,0,302451113,0).
sa(302389220,0,302451951,0).
sa(302389946,0,301922763,0).
sa(302390335,0,301923391,0).
sa(302391455,0,300013160,0).
sa(302391867,0,300011757,0).
sa(302392878,0,300849357,0).
sa(302392878,0,301851523,0).
sa(302393401,0,300851103,0).
sa(302393401,0,301849288,0).
sa(302395115,0,300133417,0).
sa(302395115,0,301716227,0).
sa(302395115,0,301073822,0).
sa(302395115,0,302368787,0).
sa(302395115,0,302368336,0).
sa(302399399,0,300133851,0).
sa(302399399,0,301716491,0).
sa(302400218,0,302366200,0).
sa(302400929,0,302365397,0).
sa(302401590,0,301508719,0).
sa(302401590,0,301531375,0).
sa(302402268,0,301533120,0).
sa(302402268,0,301509527,0).
sa(302402559,0,300438166,0).
sa(302403030,0,300438567,0).
sa(302403206,0,301447302,0).
sa(302403671,0,301446749,0).
sa(302405326,0,300085264,0).
sa(302405326,0,300822449,0).
sa(302407603,0,300822115,0).
sa(302407603,0,300087152,0).
sa(302410393,0,300986027,0).
sa(302410393,0,302560548,0).
sa(302412164,0,302561888,0).
sa(302412164,0,300988232,0).
sa(302417895,0,300644839,0).
sa(302417895,0,301821266,0).
sa(302418538,0,301823092,0).
sa(302418538,0,300645493,0).
sa(302418872,0,300638981,0).
sa(302420530,0,300639356,0).
sa(302421158,0,301894758,0).
sa(302422068,0,301895837,0).
sa(302422685,0,300417413,0).
sa(302422685,0,300983862,0).
sa(302422685,0,302427087,0).
sa(302424254,0,300419289,0).
sa(302424254,0,300985387,0).
sa(302424254,0,302428377,0).
sa(302427087,0,302422685,0).
sa(302428377,0,302424254,0).
sa(302434605,0,300804695,0).
sa(302435026,0,301760944,0).
sa(302435671,0,301761186,0).
sa(302436341,0,300359459,0).
sa(302436341,0,301735736,0).
sa(302436622,0,300359862,0).
sa(302439949,0,300227507,0).
sa(302439949,0,301204557,0).
sa(302445636,0,300829496,0).
sa(302446651,0,300828779,0).
sa(302447344,0,300935500,0).
sa(302447344,0,300707366,0).
sa(302447344,0,302321009,0).
sa(302448166,0,302324397,0).
sa(302448437,0,301155354,0).
sa(302448437,0,302106761,0).
sa(302448889,0,301156112,0).
sa(302449430,0,301160031,0).
sa(302449430,0,301611067,0).
sa(302449430,0,301167817,0).
sa(302450640,0,301159655,0).
sa(302451113,0,300696518,0).
sa(302451113,0,302328659,0).
sa(302451113,0,301474513,0).
sa(302451113,0,301612053,0).
sa(302451113,0,302388145,0).
sa(302451951,0,300695523,0).
sa(302451951,0,301613463,0).
sa(302451951,0,302326695,0).
sa(302451951,0,301475282,0).
sa(302451951,0,302389220,0).
sa(302455297,0,300543603,0).
sa(302455297,0,301923391,0).
sa(302459109,0,301922763,0).
sa(302459109,0,300545015,0).
sa(302460502,0,300631391,0).
sa(302460502,0,300958880,0).
sa(302460502,0,301115349,0).
sa(302460502,0,301222360,0).
sa(302460502,0,302179279,0).
sa(302461723,0,301222884,0).
sa(302461723,0,301116380,0).
sa(302461723,0,300632438,0).
sa(302461723,0,302180797,0).
sa(302462619,0,300646413,0).
sa(302463154,0,300647070,0).
sa(302464693,0,300958880,0).
sa(302464693,0,301222360,0).
sa(302464693,0,300724081,0).
sa(302464693,0,301996377,0).
sa(302466111,0,301222884,0).
sa(302466111,0,300959731,0).
sa(302466111,0,300724861,0).
sa(302468635,0,300356926,0).
sa(302469928,0,301595596,0).
sa(302469928,0,300357983,0).
sa(302475855,0,300466808,0).
sa(302475855,0,301326148,0).
sa(302475855,0,302111684,0).
sa(302478749,0,300467913,0).
sa(302478749,0,301326917,0).
sa(302478749,0,302109678,0).
sa(302484208,0,302490170,0).
sa(302485650,0,302491961,0).
sa(302490170,0,302484208,0).
sa(302490170,0,301204557,0).
sa(302491961,0,302485650,0).
sa(302491961,0,301206474,0).
sa(302495922,0,300834198,0).
sa(302495922,0,300931555,0).
sa(302495922,0,301195536,0).
sa(302495922,0,301870889,0).
sa(302495922,0,302123812,0).
sa(302497141,0,300835609,0).
sa(302497141,0,301871949,0).
sa(302497141,0,302124654,0).
sa(302497938,0,301749320,0).
sa(302498708,0,301406640,0).
sa(302498708,0,301943406,0).
sa(302499750,0,301407465,0).
sa(302500884,0,300933154,0).
sa(302500884,0,301275562,0).
sa(302500884,0,302584981,0).
sa(302502163,0,302588099,0).
sa(302504131,0,300577920,0).
sa(302504131,0,300583990,0).
sa(302505716,0,300576680,0).
sa(302505716,0,300583239,0).
sa(302506555,0,302064745,0).
sa(302506555,0,301198737,0).
sa(302506555,0,301540365,0).
sa(302507968,0,302062670,0).
sa(302509292,0,300267871,0).
sa(302510879,0,301244410,0).
sa(302510879,0,301742296,0).
sa(302512922,0,301740892,0).
sa(302512922,0,301897787,0).
sa(302513269,0,300360650,0).
sa(302513269,0,301129977,0).
sa(302513269,0,301548193,0).
sa(302513269,0,301781478,0).
sa(302513269,0,302036578,0).
sa(302513740,0,301131043,0).
sa(302513740,0,301549291,0).
sa(302513740,0,301782519,0).
sa(302513740,0,302037272,0).
sa(302513740,0,302035337,0).
sa(302515341,0,301746605,0).
sa(302517265,0,301748318,0).
sa(302517265,0,301705655,0).
sa(302519555,0,300583990,0).
sa(302520219,0,300570590,0).
sa(302520219,0,302564986,0).
sa(302520219,0,301337486,0).
sa(302521353,0,300571643,0).
sa(302521353,0,302566015,0).
sa(302523275,0,300569090,0).
sa(302523275,0,302058794,0).
sa(302523275,0,302094755,0).
sa(302523275,0,302360448,0).
sa(302523275,0,301886407,0).
sa(302525206,0,301884930,0).
sa(302525206,0,302057829,0).
sa(302525206,0,302093888,0).
sa(302525206,0,300569568,0).
sa(302526925,0,300732960,0).
sa(302526925,0,301461822,0).
sa(302526925,0,302539968,0).
sa(302527734,0,300733905,0).
sa(302527734,0,301463137,0).
sa(302527734,0,302540236,0).
sa(302529264,0,301247240,0).
sa(302529945,0,301251128,0).
sa(302530861,0,300853776,0).
sa(302530861,0,301074650,0).
sa(302531422,0,300087152,0).
sa(302531422,0,301257612,0).
sa(302531422,0,300856860,0).
sa(302531422,0,301076793,0).
sa(302533313,0,301463965,0).
sa(302533540,0,301466593,0).
sa(302539968,0,302526925,0).
sa(302540236,0,302527734,0).
sa(302540578,0,301017161,0).
sa(302540578,0,301170243,0).
sa(302541302,0,301172889,0).
sa(302541302,0,301017738,0).
sa(302555954,0,302225510,0).
sa(302556720,0,302228335,0).
sa(302557357,0,301164763,0).
sa(302557357,0,301170243,0).
sa(302557357,0,302273326,0).
sa(302559180,0,301160031,0).
sa(302559180,0,301611067,0).
sa(302559180,0,301167817,0).
sa(302559180,0,301172889,0).
sa(302559180,0,302274253,0).
sa(302560548,0,300525453,0).
sa(302560548,0,302410393,0).
sa(302561888,0,301415021,0).
sa(302561888,0,300287640,0).
sa(302561888,0,302412164,0).
sa(302564986,0,300696518,0).
sa(302564986,0,301292128,0).
sa(302564986,0,301930512,0).
sa(302564986,0,302520219,0).
sa(302566015,0,300695523,0).
sa(302566015,0,301293158,0).
sa(302566015,0,302521353,0).
sa(302569130,0,300067038,0).
sa(302569130,0,300067966,0).
sa(302569130,0,301813400,0).
sa(302569130,0,301898129,0).
sa(302570282,0,301814085,0).
sa(302570282,0,301899360,0).
sa(302570282,0,300067767,0).
sa(302570282,0,300068278,0).
sa(302572823,0,301290333,0).
sa(302575008,0,301289701,0).
sa(302577061,0,301180695,0).
sa(302577061,0,302056880,0).
sa(302577061,0,302270342,0).
sa(302578235,0,302271544,0).
sa(302578235,0,301781478,0).
sa(302580449,0,301638438,0).
sa(302584981,0,300904163,0).
sa(302584981,0,301129977,0).
sa(302584981,0,301226240,0).
sa(302584981,0,302036578,0).
sa(302584981,0,302500884,0).
sa(302588099,0,300904548,0).
sa(302588099,0,301227137,0).
sa(302588099,0,302037272,0).
sa(302588099,0,302502163,0).
| kraison/nlp | prolog/wn_sa.pl | Perl | mit | 94,892 |
package HTTP::AppServer::Plugin::HTTPAuth;
# Plugin for HTTP::AppServer uses HTTP authentication to
# authenticate a client. The authentication works based
# on a certain handler regex.
# 2010 by Tom Kirchner
#use 5.010000;
use strict;
use warnings;
use MIME::Base64;
use HTTP::AppServer::Plugin;
use base qw(HTTP::AppServer::Plugin);
our $VERSION = '0.01';
my $Logins = {};
my $URLs = [];
# called by the server when the plugin is installed
# to determine which routes are handled by the plugin
sub init
{
my ($class, $server, %options) = @_;
$Logins = $options{'Logins'} if exists $options{'Logins'};
$URLs = $options{'URLs'} if exists $options{'URLs'};
if (exists $options{'LoginsFile'}) {
my $filename = $options{'LoginsFile'};
open(FH, '<'.$filename)
or print STDERR "HTTPAuth: Failed to open loginsfile '$filename': $! $@\n";
while (my $line = <FH>) {
chomp $line;
my ($username, $password) = split /\:/, $line;
$Logins->{$username} = $password
if defined $username && length $username && defined $password;
}
close FH;
}
# hash that contains all active http login accounts
$server->set('httpauth_logins', $Logins);
# hash that contains all restricted area URLs
$server->set('httpauth_urls', $URLs);
# the plugin installs a match-all URL handler
return (
'^(.*)$' => \&_auth,
);
}
sub _auth
{
my ($server, $cgi, $url) = @_;
if (scalar grep { ($url =~ /$_/) == 1 } @{$server->httpauth_urls()}) {
my $auth = $cgi->http('Authorization');
my $authorized = 0;
if (defined $auth) {
# try to authenticate user
my ($prefix, $encoded) = split /\s/, $auth;
my ($username, $password) = split /\:/, decode_base64($encoded);
if (exists $server->httpauth_logins()->{$username} &&
$server->httpauth_logins()->{$username} eq $password) {
my $session_id = time().sprintf('%.0f', rand(100000000));
$authorized = 1;
}
}
unless ($authorized) {
# tell client to authorizate itself
print
"HTTP/1.0 401 Unauthorized\r\n".
$cgi->header(
-WWW_Authenticate => 'Basic realm="MySite"',
);
return 1;
}
}
return 0;
}
1;
__END__
=head1 NAME
HTTP::AppServer::Plugin::HTTPAuth - Plugin for HTTP::AppServer uses HTTP authentication to authenticate a client. The authentication works based on a certain handler regex.
=head1 SYNOPSIS
use HTTP::AppServer;
my $server = HTTP::AppServer->new();
$server->plugin('HTTPAuth', Logins => {guest => '', mrx => 'pass'}, URLs => ['^\/admin']);
=head1 DESCRIPTION
Plugin for HTTP::AppServer uses HTTP authentication to authenticate a client.
The authentication works based on a certain handler regex.
=head2 Plugin configuration
=head3 Logins => I<hash>
A hash containing the available accounts that are allowed to
access the restricted URLs, e.g.:
..., Logins => {guest => '', mrx => 'pass'}, ...
=head3 URLs => I<array>
This is a list of restricted URLs. When an URL is accessed that matches
any regular expression in this list, a HTTP authorization is preformed.
If the authorization fails an error page is returned. In all other
cases (URL not restricted or authorization was successful) other
handlers are allowed to process the URL.
=head3 LoginsFile => I<filename>
This can be supplied additionally to the Logins option.
The account information is then read from a file that has the
format of normal .htpasswd files, e.g.
username1:password
username2:password
...
while I<password> is a Base64 encoded password.
=head2 Installed URL handlers
HTTPAuth installs a binding to the URL '^(.*)$', which means
it matches everything. It allows for further processing after
that if the URL is not restricted (is not contained in the URLs
option when loading the plugin).
=head2 Installed server properties
=head3 httpauth_logins
This is a reference to the Logins that are configured when loading the plugin.
=head3 httpauth_urls
This is a reference to the URLs that are configured when loading the plugin.
=head2 Installed server methods
None.
=head1 SEE ALSO
HTTP::AppServer, HTTP::AppServer::Plugin
=head1 AUTHOR
Tom Kirchner, E<lt>tom@tkirchner.comE<gt>
=head1 COPYRIGHT AND LICENSE
Copyright (C) 2010 by Tom Kirchner
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself, either Perl version 5.10.0 or,
at your option, any later version of Perl 5 you may have available.
=cut
| btovar/cvmfs | test/mock_services/HTTP/AppServer/Plugin/HTTPAuth.pm | Perl | bsd-3-clause | 4,437 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
return <<'END';
19E0 19FF
END
| liuyangning/WX_web | xampp/perl/lib/unicore/lib/Blk/KhmerSym.pl | Perl | mit | 421 |
#-----------------------------------------------------------
# legacy_tln.pl
#
#
# Change history
# 20120620 - modified legacy.pl to legacy_tln.pl
# 20090429 - legacy.pl created
#
# Reference: http://support.microsoft.com/kb/310592
#
#
# Analysis Tip:
# The keys of interested begin with LEGACY_<servicename>, for example,
# "LEGACY_EVENTSYSTEM". The LastWrite time on this key seems to indicate
# the first time that the serivce was launched. The LastWrite time on
# keys named, for example, "LEGACY_EVENTSYSTEM\0000", appear to indicate
# the most recent time that the service was launched. One example to look
# for is services related to malware/lateral movement, such as PSExec.
#
# copyright 2012 Quantum Analytics Research, LLC
# Author: H. Carvey, keydet89@yahoo.com
#-----------------------------------------------------------
package legacy_tln;
my %config = (hive => "System",
hasShortDescr => 1,
hasDescr => 0,
hasRefs => 0,
osmask => 22,
version => 20120620);
sub getConfig{return %config}
sub getShortDescr {
return "Lists LEGACY_* entries in Enum\\Root key in TLN format";
}
sub getDescr{}
sub getRefs {}
sub getHive {return $config{hive};}
sub getVersion {return $config{version};}
my $VERSION = getVersion();
sub pluginmain {
my $class = shift;
my $hive = shift;
my $reg = Parse::Win32Registry->new($hive);
my $root_key = $reg->get_root_key();
# First thing to do is get the ControlSet00x marked current...this is
# going to be used over and over again in plugins that access the system
# file
my $current;
my $key_path = 'Select';
my $key;
if ($key = $root_key->get_subkey($key_path)) {
$current = $key->get_value("Current")->get_data();
my $ccs = "ControlSet00".$current;
my $root_path = $ccs."\\Enum\\Root";
my %legacy;
if (my $root = $root_key->get_subkey($root_path)) {
my @sk = $root->get_list_of_subkeys();
if (scalar(@sk) > 0) {
foreach my $s (@sk) {
my $name = $s->get_name();
next unless ($name =~ m/^LEGACY_/);
push(@{$legacy{$s->get_timestamp()}},$name);
eval {
my @s_sk = $s->get_list_of_subkeys();
if (scalar(@s_sk) > 0) {
foreach my $s_s (@s_sk) {
my $desc;
eval {
$desc = $s_s->get_value("DeviceDesc")->get_data();
push(@{$legacy{$s_s->get_timestamp()}},$name."\\".$s_s->get_name()." - ".$desc);
};
push(@{$legacy{$s_s->get_timestamp()}},$name."\\".$s_s->get_name()) if ($@);
}
}
};
}
}
else {
::rptMsg($root_path." has no subkeys.");
}
foreach my $t (reverse sort {$a <=> $b} keys %legacy) {
foreach my $item (@{$legacy{$t}}) {
::rptMsg($t."|REG|||[Program Execution] - $item");
}
# ::rptMsg(gmtime($t)." (UTC)");
# foreach my $item (@{$legacy{$t}}) {
# ::rptMsg(" ".$item);
# }
}
}
else {
::rptMsg($root_path." not found.");
}
}
else {
::rptMsg($key_path." not found.");
}
}
1; | APriestman/autopsy | thirdparty/rr-full/plugins/legacy_tln.pl | Perl | apache-2.0 | 3,065 |
# The documentation is at the __END__
package Win32::OLE::Enum;
1;
# everything is pure XS in Win32::OLE::Enum
# - new
# - DESTROY
#
# - All
# - Clone
# - Next
# - Reset
# - Skip
__END__
=head1 NAME
Win32::OLE::Enum - OLE Automation Collection Objects
=head1 SYNOPSIS
my $Sheets = $Excel->Workbooks(1)->Worksheets;
my $Enum = Win32::OLE::Enum->new($Sheets);
my @Sheets = $Enum->All;
while (defined(my $Sheet = $Enum->Next)) { ... }
=head1 DESCRIPTION
This module provides an interface to OLE collection objects from
Perl. It defines an enumerator object closely mirroring the
functionality of the IEnumVARIANT interface.
Please note that the Reset() method is not available in all implementations
of OLE collections (like Excel 7). In that case the Enum object is good
only for a single walk through of the collection.
=head2 Functions/Methods
=over 8
=item Win32::OLE::Enum->new($object)
Creates an enumerator for $object, which must be a valid OLE collection
object. Note that correctly implemented collection objects must support
the C<Count> and C<Item> methods, so creating an enumerator is not always
necessary.
=item $Enum->All()
Returns a list of all objects in the collection. You have to call
$Enum->Reset() before the enumerator can be used again. The previous
position in the collection is lost.
This method can also be called as a class method:
my @list = Win32::OLE::Enum->All($Collection);
=item $Enum->Clone()
Returns a clone of the enumerator maintaining the current position within
the collection (if possible). Note that the C<Clone> method is often not
implemented. Use $Enum->Clone() in an eval block to avoid dying if you
are not sure that Clone is supported.
=item $Enum->Next( [$count] )
Returns the next element of the collection. In a list context the optional
$count argument specifies the number of objects to be returned. In a scalar
context only the last of at most $count retrieved objects is returned. The
default for $count is 1.
=item $Enum->Reset()
Resets the enumeration sequence to the beginning. There is no guarantee that
the exact same set of objects will be enumerated again (e.g. when enumerating
files in a directory). The methods return value indicates the success of the
operation. (Note that the Reset() method seems to be unimplemented in some
applications like Excel 7. Use it in an eval block to avoid dying.)
=item $Enum->Skip( [$count] )
Skip the next $count elements of the enumeration. The default for $count is 1.
The functions returns TRUE if at least $count elements could be skipped. It
returns FALSE if not enough elements were left.
=back
=head1 AUTHORS/COPYRIGHT
This module is part of the Win32::OLE distribution.
=cut
| Dokaponteam/ITF_Project | xampp/perl/vendor/lib/Win32/OLE/Enum.pm | Perl | mit | 2,744 |
# Copyright (c) 2010 - Action Without Borders
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
package IF::StatusMessage;
use strict;
########################################################################
# Class
sub newInfoMessage {
my ($className, $message) = @_;
return $className->_newMessage("INFO", $message);
}
sub newWarningMessage {
my ($className, $message) = @_;
return $className->_newMessage("WARNING", $message);
}
sub newErrorMessage {
my ($className, $message) = @_;
return $className->_newMessage("ERROR", $message);
}
sub newConfirmationMessage {
my ($className, $message) = @_;
return $className->_newMessage("CONFIRMATION", $message);
}
sub _newMessage {
my ($className, $type, $text) = @_;
my $self = { }; # $className->new();
bless($self, $className);
$self->setType($type);
$self->setText($text);
return $self;
}
########################################################################
# Instance
sub cssClass {
my ($self) = @_;
return "status-messages-error" if $self->type() eq "ERROR";
return "status-messages-warning" if $self->type() eq "WARNING";
return "status-messages-confirmation" if $self->type() eq "CONFIRMATION";
return "status-messages-info";
}
############################################
sub type {
my $self = shift;
return $self->{type};
}
sub setType {
my ($self, $value) = @_;
$self->{type} = $value;
}
sub typeIsError {
my ($self) = @_;
return $self->type() eq 'ERROR';
}
sub typeIsWarning {
my ($self) = @_;
return $self->type() eq 'WARNING';
}
sub typeIsInfo {
my ($self) = @_;
return $self->type() eq 'INFO';
}
sub typeIsConfirmation {
my ($self) = @_;
return $self->type() eq 'CONFIRMATION';
}
sub text {
my $self = shift;
return $self->{text};
}
sub setText {
my ($self, $value) = @_;
$self->{text} = $value;
}
1;
| quile/if-framework | framework/lib/IF/StatusMessage.pm | Perl | mit | 2,950 |
use v6;
use lib 'lib';
use MPD;
my $a = MPD.new('localhost', 6600);
say "State: {$a.state}";
say $a.current-song;
| tadzik/perl6-MPD | test.pl | Perl | mit | 115 |
name(prologmud_samples).
version('2.0.3').
title('Online text adventure game - Sample').
author('Douglas R. Miles','logicmoo@gmail.com').
author('Douglas Miles', 'http://www.linkedin.com/in/logicmoo' ).
packager('logicmoo', 'https://github.com/logicmoo/' ).
maintainer('logicmoo', 'https://github.com/logicmoo/' ).
home('https://github.com/logicmoo/prologmud_samples.git' ).
download( 'https://github.com/logicmoo/prologmud_samples/release/*.zip' ).
requires(prologmud).
requires(wam_common_lisp).
| TeamSPoon/logicmoo_workspace | packs_sys/prologmud_samples/pack.pl | Perl | mit | 499 |
package # This is JSON::backportPP
JSON::PP;
# JSON-2.0
use 5.005;
use strict;
use base qw(Exporter);
use overload ();
use Carp ();
use B ();
#use Devel::Peek;
use vars qw($VERSION);
$VERSION = '2.27204';
@JSON::PP::EXPORT = qw(encode_json decode_json from_json to_json);
# instead of hash-access, i tried index-access for speed.
# but this method is not faster than what i expected. so it will be changed.
use constant P_ASCII => 0;
use constant P_LATIN1 => 1;
use constant P_UTF8 => 2;
use constant P_INDENT => 3;
use constant P_CANONICAL => 4;
use constant P_SPACE_BEFORE => 5;
use constant P_SPACE_AFTER => 6;
use constant P_ALLOW_NONREF => 7;
use constant P_SHRINK => 8;
use constant P_ALLOW_BLESSED => 9;
use constant P_CONVERT_BLESSED => 10;
use constant P_RELAXED => 11;
use constant P_LOOSE => 12;
use constant P_ALLOW_BIGNUM => 13;
use constant P_ALLOW_BAREKEY => 14;
use constant P_ALLOW_SINGLEQUOTE => 15;
use constant P_ESCAPE_SLASH => 16;
use constant P_AS_NONBLESSED => 17;
use constant P_ALLOW_UNKNOWN => 18;
use constant OLD_PERL => $] < 5.008 ? 1 : 0;
BEGIN {
my @xs_compati_bit_properties = qw(
latin1 ascii utf8 indent canonical space_before space_after allow_nonref shrink
allow_blessed convert_blessed relaxed allow_unknown
);
my @pp_bit_properties = qw(
allow_singlequote allow_bignum loose
allow_barekey escape_slash as_nonblessed
);
# Perl version check, Unicode handling is enable?
# Helper module sets @JSON::PP::_properties.
if ($] < 5.008 ) {
my $helper = $] >= 5.006 ? 'JSON::backportPP::Compat5006' : 'JSON::backportPP::Compat5005';
eval qq| require $helper |;
if ($@) { Carp::croak $@; }
}
for my $name (@xs_compati_bit_properties, @pp_bit_properties) {
my $flag_name = 'P_' . uc($name);
eval qq/
sub $name {
my \$enable = defined \$_[1] ? \$_[1] : 1;
if (\$enable) {
\$_[0]->{PROPS}->[$flag_name] = 1;
}
else {
\$_[0]->{PROPS}->[$flag_name] = 0;
}
\$_[0];
}
sub get_$name {
\$_[0]->{PROPS}->[$flag_name] ? 1 : '';
}
/;
}
}
# Functions
my %encode_allow_method
= map {($_ => 1)} qw/utf8 pretty allow_nonref latin1 self_encode escape_slash
allow_blessed convert_blessed indent indent_length allow_bignum
as_nonblessed
/;
my %decode_allow_method
= map {($_ => 1)} qw/utf8 allow_nonref loose allow_singlequote allow_bignum
allow_barekey max_size relaxed/;
my $JSON; # cache
sub encode_json ($) { # encode
($JSON ||= __PACKAGE__->new->utf8)->encode(@_);
}
sub decode_json { # decode
($JSON ||= __PACKAGE__->new->utf8)->decode(@_);
}
# Obsoleted
sub to_json($) {
Carp::croak ("JSON::PP::to_json has been renamed to encode_json.");
}
sub from_json($) {
Carp::croak ("JSON::PP::from_json has been renamed to decode_json.");
}
# Methods
sub new {
my $class = shift;
my $self = {
max_depth => 512,
max_size => 0,
indent => 0,
FLAGS => 0,
fallback => sub { encode_error('Invalid value. JSON can only reference.') },
indent_length => 3,
};
bless $self, $class;
}
sub encode {
return $_[0]->PP_encode_json($_[1]);
}
sub decode {
return $_[0]->PP_decode_json($_[1], 0x00000000);
}
sub decode_prefix {
return $_[0]->PP_decode_json($_[1], 0x00000001);
}
# accessor
# pretty printing
sub pretty {
my ($self, $v) = @_;
my $enable = defined $v ? $v : 1;
if ($enable) { # indent_length(3) for JSON::XS compatibility
$self->indent(1)->indent_length(3)->space_before(1)->space_after(1);
}
else {
$self->indent(0)->space_before(0)->space_after(0);
}
$self;
}
# etc
sub max_depth {
my $max = defined $_[1] ? $_[1] : 0x80000000;
$_[0]->{max_depth} = $max;
$_[0];
}
sub get_max_depth { $_[0]->{max_depth}; }
sub max_size {
my $max = defined $_[1] ? $_[1] : 0;
$_[0]->{max_size} = $max;
$_[0];
}
sub get_max_size { $_[0]->{max_size}; }
sub filter_json_object {
$_[0]->{cb_object} = defined $_[1] ? $_[1] : 0;
$_[0]->{F_HOOK} = ($_[0]->{cb_object} or $_[0]->{cb_sk_object}) ? 1 : 0;
$_[0];
}
sub filter_json_single_key_object {
if (@_ > 1) {
$_[0]->{cb_sk_object}->{$_[1]} = $_[2];
}
$_[0]->{F_HOOK} = ($_[0]->{cb_object} or $_[0]->{cb_sk_object}) ? 1 : 0;
$_[0];
}
sub indent_length {
if (!defined $_[1] or $_[1] > 15 or $_[1] < 0) {
Carp::carp "The acceptable range of indent_length() is 0 to 15.";
}
else {
$_[0]->{indent_length} = $_[1];
}
$_[0];
}
sub get_indent_length {
$_[0]->{indent_length};
}
sub sort_by {
$_[0]->{sort_by} = defined $_[1] ? $_[1] : 1;
$_[0];
}
sub allow_bigint {
Carp::carp("allow_bigint() is obsoleted. use allow_bignum() insted.");
}
###############################
###
### Perl => JSON
###
{ # Convert
my $max_depth;
my $indent;
my $ascii;
my $latin1;
my $utf8;
my $space_before;
my $space_after;
my $canonical;
my $allow_blessed;
my $convert_blessed;
my $indent_length;
my $escape_slash;
my $bignum;
my $as_nonblessed;
my $depth;
my $indent_count;
my $keysort;
sub PP_encode_json {
my $self = shift;
my $obj = shift;
$indent_count = 0;
$depth = 0;
my $idx = $self->{PROPS};
($ascii, $latin1, $utf8, $indent, $canonical, $space_before, $space_after, $allow_blessed,
$convert_blessed, $escape_slash, $bignum, $as_nonblessed)
= @{$idx}[P_ASCII .. P_SPACE_AFTER, P_ALLOW_BLESSED, P_CONVERT_BLESSED,
P_ESCAPE_SLASH, P_ALLOW_BIGNUM, P_AS_NONBLESSED];
($max_depth, $indent_length) = @{$self}{qw/max_depth indent_length/};
$keysort = $canonical ? sub { $a cmp $b } : undef;
if ($self->{sort_by}) {
$keysort = ref($self->{sort_by}) eq 'CODE' ? $self->{sort_by}
: $self->{sort_by} =~ /\D+/ ? $self->{sort_by}
: sub { $a cmp $b };
}
encode_error("hash- or arrayref expected (not a simple scalar, use allow_nonref to allow this)")
if(!ref $obj and !$idx->[ P_ALLOW_NONREF ]);
my $str = $self->object_to_json($obj);
$str .= "\n" if ( $indent ); # JSON::XS 2.26 compatible
unless ($ascii or $latin1 or $utf8) {
utf8::upgrade($str);
}
if ($idx->[ P_SHRINK ]) {
utf8::downgrade($str, 1);
}
return $str;
}
sub object_to_json {
my ($self, $obj) = @_;
my $type = ref($obj);
if($type eq 'HASH'){
return $self->hash_to_json($obj);
}
elsif($type eq 'ARRAY'){
return $self->array_to_json($obj);
}
elsif ($type) { # blessed object?
if (blessed($obj)) {
return $self->value_to_json($obj) if ( $obj->isa('JSON::PP::Boolean') );
if ( $convert_blessed and $obj->can('TO_JSON') ) {
my $result = $obj->TO_JSON();
if ( defined $result and ref( $result ) ) {
if ( refaddr( $obj ) eq refaddr( $result ) ) {
encode_error( sprintf(
"%s::TO_JSON method returned same object as was passed instead of a new one",
ref $obj
) );
}
}
return $self->object_to_json( $result );
}
return "$obj" if ( $bignum and _is_bignum($obj) );
return $self->blessed_to_json($obj) if ($allow_blessed and $as_nonblessed); # will be removed.
encode_error( sprintf("encountered object '%s', but neither allow_blessed "
. "nor convert_blessed settings are enabled", $obj)
) unless ($allow_blessed);
return 'null';
}
else {
return $self->value_to_json($obj);
}
}
else{
return $self->value_to_json($obj);
}
}
sub hash_to_json {
my ($self, $obj) = @_;
my @res;
encode_error("json text or perl structure exceeds maximum nesting level (max_depth set too low?)")
if (++$depth > $max_depth);
my ($pre, $post) = $indent ? $self->_up_indent() : ('', '');
my $del = ($space_before ? ' ' : '') . ':' . ($space_after ? ' ' : '');
for my $k ( _sort( $obj ) ) {
if ( OLD_PERL ) { utf8::decode($k) } # key for Perl 5.6 / be optimized
push @res, string_to_json( $self, $k )
. $del
. ( $self->object_to_json( $obj->{$k} ) || $self->value_to_json( $obj->{$k} ) );
}
--$depth;
$self->_down_indent() if ($indent);
return '{' . ( @res ? $pre : '' ) . ( @res ? join( ",$pre", @res ) . $post : '' ) . '}';
}
sub array_to_json {
my ($self, $obj) = @_;
my @res;
encode_error("json text or perl structure exceeds maximum nesting level (max_depth set too low?)")
if (++$depth > $max_depth);
my ($pre, $post) = $indent ? $self->_up_indent() : ('', '');
for my $v (@$obj){
push @res, $self->object_to_json($v) || $self->value_to_json($v);
}
--$depth;
$self->_down_indent() if ($indent);
return '[' . ( @res ? $pre : '' ) . ( @res ? join( ",$pre", @res ) . $post : '' ) . ']';
}
sub value_to_json {
my ($self, $value) = @_;
return 'null' if(!defined $value);
my $b_obj = B::svref_2object(\$value); # for round trip problem
my $flags = $b_obj->FLAGS;
return $value # as is
if $flags & ( B::SVp_IOK | B::SVp_NOK ) and !( $flags & B::SVp_POK ); # SvTYPE is IV or NV?
my $type = ref($value);
if(!$type){
return string_to_json($self, $value);
}
elsif( blessed($value) and $value->isa('JSON::PP::Boolean') ){
return $$value == 1 ? 'true' : 'false';
}
elsif ($type) {
if ((overload::StrVal($value) =~ /=(\w+)/)[0]) {
return $self->value_to_json("$value");
}
if ($type eq 'SCALAR' and defined $$value) {
return $$value eq '1' ? 'true'
: $$value eq '0' ? 'false'
: $self->{PROPS}->[ P_ALLOW_UNKNOWN ] ? 'null'
: encode_error("cannot encode reference to scalar");
}
if ( $self->{PROPS}->[ P_ALLOW_UNKNOWN ] ) {
return 'null';
}
else {
if ( $type eq 'SCALAR' or $type eq 'REF' ) {
encode_error("cannot encode reference to scalar");
}
else {
encode_error("encountered $value, but JSON can only represent references to arrays or hashes");
}
}
}
else {
return $self->{fallback}->($value)
if ($self->{fallback} and ref($self->{fallback}) eq 'CODE');
return 'null';
}
}
my %esc = (
"\n" => '\n',
"\r" => '\r',
"\t" => '\t',
"\f" => '\f',
"\b" => '\b',
"\"" => '\"',
"\\" => '\\\\',
"\'" => '\\\'',
);
sub string_to_json {
my ($self, $arg) = @_;
$arg =~ s/([\x22\x5c\n\r\t\f\b])/$esc{$1}/g;
$arg =~ s/\//\\\//g if ($escape_slash);
$arg =~ s/([\x00-\x08\x0b\x0e-\x1f])/'\\u00' . unpack('H2', $1)/eg;
if ($ascii) {
$arg = JSON_PP_encode_ascii($arg);
}
if ($latin1) {
$arg = JSON_PP_encode_latin1($arg);
}
if ($utf8) {
utf8::encode($arg);
}
return '"' . $arg . '"';
}
sub blessed_to_json {
my $reftype = reftype($_[1]) || '';
if ($reftype eq 'HASH') {
return $_[0]->hash_to_json($_[1]);
}
elsif ($reftype eq 'ARRAY') {
return $_[0]->array_to_json($_[1]);
}
else {
return 'null';
}
}
sub encode_error {
my $error = shift;
Carp::croak "$error";
}
sub _sort {
defined $keysort ? (sort $keysort (keys %{$_[0]})) : keys %{$_[0]};
}
sub _up_indent {
my $self = shift;
my $space = ' ' x $indent_length;
my ($pre,$post) = ('','');
$post = "\n" . $space x $indent_count;
$indent_count++;
$pre = "\n" . $space x $indent_count;
return ($pre,$post);
}
sub _down_indent { $indent_count--; }
sub PP_encode_box {
{
depth => $depth,
indent_count => $indent_count,
};
}
} # Convert
sub _encode_ascii {
join('',
map {
$_ <= 127 ?
chr($_) :
$_ <= 65535 ?
sprintf('\u%04x', $_) : sprintf('\u%x\u%x', _encode_surrogates($_));
} unpack('U*', $_[0])
);
}
sub _encode_latin1 {
join('',
map {
$_ <= 255 ?
chr($_) :
$_ <= 65535 ?
sprintf('\u%04x', $_) : sprintf('\u%x\u%x', _encode_surrogates($_));
} unpack('U*', $_[0])
);
}
sub _encode_surrogates { # from perlunicode
my $uni = $_[0] - 0x10000;
return ($uni / 0x400 + 0xD800, $uni % 0x400 + 0xDC00);
}
sub _is_bignum {
$_[0]->isa('Math::BigInt') or $_[0]->isa('Math::BigFloat');
}
#
# JSON => Perl
#
my $max_intsize;
BEGIN {
my $checkint = 1111;
for my $d (5..64) {
$checkint .= 1;
my $int = eval qq| $checkint |;
if ($int =~ /[eE]/) {
$max_intsize = $d - 1;
last;
}
}
}
{ # PARSE
my %escapes = ( # by Jeremy Muhlich <jmuhlich [at] bitflood.org>
b => "\x8",
t => "\x9",
n => "\xA",
f => "\xC",
r => "\xD",
'\\' => '\\',
'"' => '"',
'/' => '/',
);
my $text; # json data
my $at; # offset
my $ch; # 1chracter
my $len; # text length (changed according to UTF8 or NON UTF8)
# INTERNAL
my $depth; # nest counter
my $encoding; # json text encoding
my $is_valid_utf8; # temp variable
my $utf8_len; # utf8 byte length
# FLAGS
my $utf8; # must be utf8
my $max_depth; # max nest number of objects and arrays
my $max_size;
my $relaxed;
my $cb_object;
my $cb_sk_object;
my $F_HOOK;
my $allow_bigint; # using Math::BigInt
my $singlequote; # loosely quoting
my $loose; #
my $allow_barekey; # bareKey
# $opt flag
# 0x00000001 .... decode_prefix
# 0x10000000 .... incr_parse
sub PP_decode_json {
my ($self, $opt); # $opt is an effective flag during this decode_json.
($self, $text, $opt) = @_;
($at, $ch, $depth) = (0, '', 0);
if ( !defined $text or ref $text ) {
decode_error("malformed JSON string, neither array, object, number, string or atom");
}
my $idx = $self->{PROPS};
($utf8, $relaxed, $loose, $allow_bigint, $allow_barekey, $singlequote)
= @{$idx}[P_UTF8, P_RELAXED, P_LOOSE .. P_ALLOW_SINGLEQUOTE];
if ( $utf8 ) {
utf8::downgrade( $text, 1 ) or Carp::croak("Wide character in subroutine entry");
}
else {
utf8::upgrade( $text );
}
$len = length $text;
($max_depth, $max_size, $cb_object, $cb_sk_object, $F_HOOK)
= @{$self}{qw/max_depth max_size cb_object cb_sk_object F_HOOK/};
if ($max_size > 1) {
use bytes;
my $bytes = length $text;
decode_error(
sprintf("attempted decode of JSON text of %s bytes size, but max_size is set to %s"
, $bytes, $max_size), 1
) if ($bytes > $max_size);
}
# Currently no effect
# should use regexp
my @octets = unpack('C4', $text);
$encoding = ( $octets[0] and $octets[1]) ? 'UTF-8'
: (!$octets[0] and $octets[1]) ? 'UTF-16BE'
: (!$octets[0] and !$octets[1]) ? 'UTF-32BE'
: ( $octets[2] ) ? 'UTF-16LE'
: (!$octets[2] ) ? 'UTF-32LE'
: 'unknown';
white(); # remove head white space
my $valid_start = defined $ch; # Is there a first character for JSON structure?
my $result = value();
return undef if ( !$result && ( $opt & 0x10000000 ) ); # for incr_parse
decode_error("malformed JSON string, neither array, object, number, string or atom") unless $valid_start;
if ( !$idx->[ P_ALLOW_NONREF ] and !ref $result ) {
decode_error(
'JSON text must be an object or array (but found number, string, true, false or null,'
. ' use allow_nonref to allow this)', 1);
}
Carp::croak('something wrong.') if $len < $at; # we won't arrive here.
my $consumed = defined $ch ? $at - 1 : $at; # consumed JSON text length
white(); # remove tail white space
if ( $ch ) {
return ( $result, $consumed ) if ($opt & 0x00000001); # all right if decode_prefix
decode_error("garbage after JSON object");
}
( $opt & 0x00000001 ) ? ( $result, $consumed ) : $result;
}
sub next_chr {
return $ch = undef if($at >= $len);
$ch = substr($text, $at++, 1);
}
sub value {
white();
return if(!defined $ch);
return object() if($ch eq '{');
return array() if($ch eq '[');
return string() if($ch eq '"' or ($singlequote and $ch eq "'"));
return number() if($ch =~ /[0-9]/ or $ch eq '-');
return word();
}
sub string {
my ($i, $s, $t, $u);
my $utf16;
my $is_utf8;
($is_valid_utf8, $utf8_len) = ('', 0);
$s = ''; # basically UTF8 flag on
if($ch eq '"' or ($singlequote and $ch eq "'")){
my $boundChar = $ch;
OUTER: while( defined(next_chr()) ){
if($ch eq $boundChar){
next_chr();
if ($utf16) {
decode_error("missing low surrogate character in surrogate pair");
}
utf8::decode($s) if($is_utf8);
return $s;
}
elsif($ch eq '\\'){
next_chr();
if(exists $escapes{$ch}){
$s .= $escapes{$ch};
}
elsif($ch eq 'u'){ # UNICODE handling
my $u = '';
for(1..4){
$ch = next_chr();
last OUTER if($ch !~ /[0-9a-fA-F]/);
$u .= $ch;
}
# U+D800 - U+DBFF
if ($u =~ /^[dD][89abAB][0-9a-fA-F]{2}/) { # UTF-16 high surrogate?
$utf16 = $u;
}
# U+DC00 - U+DFFF
elsif ($u =~ /^[dD][c-fC-F][0-9a-fA-F]{2}/) { # UTF-16 low surrogate?
unless (defined $utf16) {
decode_error("missing high surrogate character in surrogate pair");
}
$is_utf8 = 1;
$s .= JSON_PP_decode_surrogates($utf16, $u) || next;
$utf16 = undef;
}
else {
if (defined $utf16) {
decode_error("surrogate pair expected");
}
if ( ( my $hex = hex( $u ) ) > 127 ) {
$is_utf8 = 1;
$s .= JSON_PP_decode_unicode($u) || next;
}
else {
$s .= chr $hex;
}
}
}
else{
unless ($loose) {
$at -= 2;
decode_error('illegal backslash escape sequence in string');
}
$s .= $ch;
}
}
else{
if ( ord $ch > 127 ) {
if ( $utf8 ) {
unless( $ch = is_valid_utf8($ch) ) {
$at -= 1;
decode_error("malformed UTF-8 character in JSON string");
}
else {
$at += $utf8_len - 1;
}
}
else {
utf8::encode( $ch );
}
$is_utf8 = 1;
}
if (!$loose) {
if ($ch =~ /[\x00-\x1f\x22\x5c]/) { # '/' ok
$at--;
decode_error('invalid character encountered while parsing JSON string');
}
}
$s .= $ch;
}
}
}
decode_error("unexpected end of string while parsing JSON string");
}
sub white {
while( defined $ch ){
if($ch le ' '){
next_chr();
}
elsif($ch eq '/'){
next_chr();
if(defined $ch and $ch eq '/'){
1 while(defined(next_chr()) and $ch ne "\n" and $ch ne "\r");
}
elsif(defined $ch and $ch eq '*'){
next_chr();
while(1){
if(defined $ch){
if($ch eq '*'){
if(defined(next_chr()) and $ch eq '/'){
next_chr();
last;
}
}
else{
next_chr();
}
}
else{
decode_error("Unterminated comment");
}
}
next;
}
else{
$at--;
decode_error("malformed JSON string, neither array, object, number, string or atom");
}
}
else{
if ($relaxed and $ch eq '#') { # correctly?
pos($text) = $at;
$text =~ /\G([^\n]*(?:\r\n|\r|\n|$))/g;
$at = pos($text);
next_chr;
next;
}
last;
}
}
}
sub array {
my $a = $_[0] || []; # you can use this code to use another array ref object.
decode_error('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')
if (++$depth > $max_depth);
next_chr();
white();
if(defined $ch and $ch eq ']'){
--$depth;
next_chr();
return $a;
}
else {
while(defined($ch)){
push @$a, value();
white();
if (!defined $ch) {
last;
}
if($ch eq ']'){
--$depth;
next_chr();
return $a;
}
if($ch ne ','){
last;
}
next_chr();
white();
if ($relaxed and $ch eq ']') {
--$depth;
next_chr();
return $a;
}
}
}
decode_error(", or ] expected while parsing array");
}
sub object {
my $o = $_[0] || {}; # you can use this code to use another hash ref object.
my $k;
decode_error('json text or perl structure exceeds maximum nesting level (max_depth set too low?)')
if (++$depth > $max_depth);
next_chr();
white();
if(defined $ch and $ch eq '}'){
--$depth;
next_chr();
if ($F_HOOK) {
return _json_object_hook($o);
}
return $o;
}
else {
while (defined $ch) {
$k = ($allow_barekey and $ch ne '"' and $ch ne "'") ? bareKey() : string();
white();
if(!defined $ch or $ch ne ':'){
$at--;
decode_error("':' expected");
}
next_chr();
$o->{$k} = value();
white();
last if (!defined $ch);
if($ch eq '}'){
--$depth;
next_chr();
if ($F_HOOK) {
return _json_object_hook($o);
}
return $o;
}
if($ch ne ','){
last;
}
next_chr();
white();
if ($relaxed and $ch eq '}') {
--$depth;
next_chr();
if ($F_HOOK) {
return _json_object_hook($o);
}
return $o;
}
}
}
$at--;
decode_error(", or } expected while parsing object/hash");
}
sub bareKey { # doesn't strictly follow Standard ECMA-262 3rd Edition
my $key;
while($ch =~ /[^\x00-\x23\x25-\x2F\x3A-\x40\x5B-\x5E\x60\x7B-\x7F]/){
$key .= $ch;
next_chr();
}
return $key;
}
sub word {
my $word = substr($text,$at-1,4);
if($word eq 'true'){
$at += 3;
next_chr;
return $JSON::PP::true;
}
elsif($word eq 'null'){
$at += 3;
next_chr;
return undef;
}
elsif($word eq 'fals'){
$at += 3;
if(substr($text,$at,1) eq 'e'){
$at++;
next_chr;
return $JSON::PP::false;
}
}
$at--; # for decode_error report
decode_error("'null' expected") if ($word =~ /^n/);
decode_error("'true' expected") if ($word =~ /^t/);
decode_error("'false' expected") if ($word =~ /^f/);
decode_error("malformed JSON string, neither array, object, number, string or atom");
}
sub number {
my $n = '';
my $v;
# According to RFC4627, hex or oct digits are invalid.
if($ch eq '0'){
my $peek = substr($text,$at,1);
my $hex = $peek =~ /[xX]/; # 0 or 1
if($hex){
decode_error("malformed number (leading zero must not be followed by another digit)");
($n) = ( substr($text, $at+1) =~ /^([0-9a-fA-F]+)/);
}
else{ # oct
($n) = ( substr($text, $at) =~ /^([0-7]+)/);
if (defined $n and length $n > 1) {
decode_error("malformed number (leading zero must not be followed by another digit)");
}
}
if(defined $n and length($n)){
if (!$hex and length($n) == 1) {
decode_error("malformed number (leading zero must not be followed by another digit)");
}
$at += length($n) + $hex;
next_chr;
return $hex ? hex($n) : oct($n);
}
}
if($ch eq '-'){
$n = '-';
next_chr;
if (!defined $ch or $ch !~ /\d/) {
decode_error("malformed number (no digits after initial minus)");
}
}
while(defined $ch and $ch =~ /\d/){
$n .= $ch;
next_chr;
}
if(defined $ch and $ch eq '.'){
$n .= '.';
next_chr;
if (!defined $ch or $ch !~ /\d/) {
decode_error("malformed number (no digits after decimal point)");
}
else {
$n .= $ch;
}
while(defined(next_chr) and $ch =~ /\d/){
$n .= $ch;
}
}
if(defined $ch and ($ch eq 'e' or $ch eq 'E')){
$n .= $ch;
next_chr;
if(defined($ch) and ($ch eq '+' or $ch eq '-')){
$n .= $ch;
next_chr;
if (!defined $ch or $ch =~ /\D/) {
decode_error("malformed number (no digits after exp sign)");
}
$n .= $ch;
}
elsif(defined($ch) and $ch =~ /\d/){
$n .= $ch;
}
else {
decode_error("malformed number (no digits after exp sign)");
}
while(defined(next_chr) and $ch =~ /\d/){
$n .= $ch;
}
}
$v .= $n;
if ($v !~ /[.eE]/ and length $v > $max_intsize) {
if ($allow_bigint) { # from Adam Sussman
require Math::BigInt;
return Math::BigInt->new($v);
}
else {
return "$v";
}
}
elsif ($allow_bigint) {
require Math::BigFloat;
return Math::BigFloat->new($v);
}
return 0+$v;
}
sub is_valid_utf8 {
$utf8_len = $_[0] =~ /[\x00-\x7F]/ ? 1
: $_[0] =~ /[\xC2-\xDF]/ ? 2
: $_[0] =~ /[\xE0-\xEF]/ ? 3
: $_[0] =~ /[\xF0-\xF4]/ ? 4
: 0
;
return unless $utf8_len;
my $is_valid_utf8 = substr($text, $at - 1, $utf8_len);
return ( $is_valid_utf8 =~ /^(?:
[\x00-\x7F]
|[\xC2-\xDF][\x80-\xBF]
|[\xE0][\xA0-\xBF][\x80-\xBF]
|[\xE1-\xEC][\x80-\xBF][\x80-\xBF]
|[\xED][\x80-\x9F][\x80-\xBF]
|[\xEE-\xEF][\x80-\xBF][\x80-\xBF]
|[\xF0][\x90-\xBF][\x80-\xBF][\x80-\xBF]
|[\xF1-\xF3][\x80-\xBF][\x80-\xBF][\x80-\xBF]
|[\xF4][\x80-\x8F][\x80-\xBF][\x80-\xBF]
)$/x ) ? $is_valid_utf8 : '';
}
sub decode_error {
my $error = shift;
my $no_rep = shift;
my $str = defined $text ? substr($text, $at) : '';
my $mess = '';
my $type = $] >= 5.008 ? 'U*'
: $] < 5.006 ? 'C*'
: utf8::is_utf8( $str ) ? 'U*' # 5.6
: 'C*'
;
for my $c ( unpack( $type, $str ) ) { # emulate pv_uni_display() ?
$mess .= $c == 0x07 ? '\a'
: $c == 0x09 ? '\t'
: $c == 0x0a ? '\n'
: $c == 0x0d ? '\r'
: $c == 0x0c ? '\f'
: $c < 0x20 ? sprintf('\x{%x}', $c)
: $c == 0x5c ? '\\\\'
: $c < 0x80 ? chr($c)
: sprintf('\x{%x}', $c)
;
if ( length $mess >= 20 ) {
$mess .= '...';
last;
}
}
unless ( length $mess ) {
$mess = '(end of string)';
}
Carp::croak (
$no_rep ? "$error" : "$error, at character offset $at (before \"$mess\")"
);
}
sub _json_object_hook {
my $o = $_[0];
my @ks = keys %{$o};
if ( $cb_sk_object and @ks == 1 and exists $cb_sk_object->{ $ks[0] } and ref $cb_sk_object->{ $ks[0] } ) {
my @val = $cb_sk_object->{ $ks[0] }->( $o->{$ks[0]} );
if (@val == 1) {
return $val[0];
}
}
my @val = $cb_object->($o) if ($cb_object);
if (@val == 0 or @val > 1) {
return $o;
}
else {
return $val[0];
}
}
sub PP_decode_box {
{
text => $text,
at => $at,
ch => $ch,
len => $len,
depth => $depth,
encoding => $encoding,
is_valid_utf8 => $is_valid_utf8,
};
}
} # PARSE
sub _decode_surrogates { # from perlunicode
my $uni = 0x10000 + (hex($_[0]) - 0xD800) * 0x400 + (hex($_[1]) - 0xDC00);
my $un = pack('U*', $uni);
utf8::encode( $un );
return $un;
}
sub _decode_unicode {
my $un = pack('U', hex shift);
utf8::encode( $un );
return $un;
}
#
# Setup for various Perl versions (the code from JSON::PP58)
#
BEGIN {
unless ( defined &utf8::is_utf8 ) {
require Encode;
*utf8::is_utf8 = *Encode::is_utf8;
}
if ( $] >= 5.008 ) {
*JSON::PP::JSON_PP_encode_ascii = \&_encode_ascii;
*JSON::PP::JSON_PP_encode_latin1 = \&_encode_latin1;
*JSON::PP::JSON_PP_decode_surrogates = \&_decode_surrogates;
*JSON::PP::JSON_PP_decode_unicode = \&_decode_unicode;
}
if ($] >= 5.008 and $] < 5.008003) { # join() in 5.8.0 - 5.8.2 is broken.
package # hide from PAUSE
JSON::PP;
require subs;
subs->import('join');
eval q|
sub join {
return '' if (@_ < 2);
my $j = shift;
my $str = shift;
for (@_) { $str .= $j . $_; }
return $str;
}
|;
}
sub JSON::PP::incr_parse {
local $Carp::CarpLevel = 1;
( $_[0]->{_incr_parser} ||= JSON::PP::IncrParser->new )->incr_parse( @_ );
}
sub JSON::PP::incr_skip {
( $_[0]->{_incr_parser} ||= JSON::PP::IncrParser->new )->incr_skip;
}
sub JSON::PP::incr_reset {
( $_[0]->{_incr_parser} ||= JSON::PP::IncrParser->new )->incr_reset;
}
eval q{
sub JSON::PP::incr_text : lvalue {
$_[0]->{_incr_parser} ||= JSON::PP::IncrParser->new;
if ( $_[0]->{_incr_parser}->{incr_parsing} ) {
Carp::croak("incr_text can not be called when the incremental parser already started parsing");
}
$_[0]->{_incr_parser}->{incr_text};
}
} if ( $] >= 5.006 );
} # Setup for various Perl versions (the code from JSON::PP58)
###############################
# Utilities
#
BEGIN {
eval 'require Scalar::Util';
unless($@){
*JSON::PP::blessed = \&Scalar::Util::blessed;
*JSON::PP::reftype = \&Scalar::Util::reftype;
*JSON::PP::refaddr = \&Scalar::Util::refaddr;
}
else{ # This code is from Scalar::Util.
# warn $@;
eval 'sub UNIVERSAL::a_sub_not_likely_to_be_here { ref($_[0]) }';
*JSON::PP::blessed = sub {
local($@, $SIG{__DIE__}, $SIG{__WARN__});
ref($_[0]) ? eval { $_[0]->a_sub_not_likely_to_be_here } : undef;
};
my %tmap = qw(
B::NULL SCALAR
B::HV HASH
B::AV ARRAY
B::CV CODE
B::IO IO
B::GV GLOB
B::REGEXP REGEXP
);
*JSON::PP::reftype = sub {
my $r = shift;
return undef unless length(ref($r));
my $t = ref(B::svref_2object($r));
return
exists $tmap{$t} ? $tmap{$t}
: length(ref($$r)) ? 'REF'
: 'SCALAR';
};
*JSON::PP::refaddr = sub {
return undef unless length(ref($_[0]));
my $addr;
if(defined(my $pkg = blessed($_[0]))) {
$addr .= bless $_[0], 'Scalar::Util::Fake';
bless $_[0], $pkg;
}
else {
$addr .= $_[0]
}
$addr =~ /0x(\w+)/;
local $^W;
#no warnings 'portable';
hex($1);
}
}
}
# shamelessly copied and modified from JSON::XS code.
unless ( $INC{'JSON/PP.pm'} ) {
eval q|
package
JSON::PP::Boolean;
use overload (
"0+" => sub { ${$_[0]} },
"++" => sub { $_[0] = ${$_[0]} + 1 },
"--" => sub { $_[0] = ${$_[0]} - 1 },
fallback => 1,
);
|;
}
$JSON::PP::true = do { bless \(my $dummy = 1), "JSON::PP::Boolean" };
$JSON::PP::false = do { bless \(my $dummy = 0), "JSON::PP::Boolean" };
sub is_bool { defined $_[0] and UNIVERSAL::isa($_[0], "JSON::PP::Boolean"); }
sub true { $JSON::PP::true }
sub false { $JSON::PP::false }
sub null { undef; }
###############################
###############################
package # hide from PAUSE
JSON::PP::IncrParser;
use strict;
use constant INCR_M_WS => 0; # initial whitespace skipping
use constant INCR_M_STR => 1; # inside string
use constant INCR_M_BS => 2; # inside backslash
use constant INCR_M_JSON => 3; # outside anything, count nesting
use constant INCR_M_C0 => 4;
use constant INCR_M_C1 => 5;
use vars qw($VERSION);
$VERSION = '1.01';
my $unpack_format = $] < 5.006 ? 'C*' : 'U*';
sub new {
my ( $class ) = @_;
bless {
incr_nest => 0,
incr_text => undef,
incr_parsing => 0,
incr_p => 0,
}, $class;
}
sub incr_parse {
my ( $self, $coder, $text ) = @_;
$self->{incr_text} = '' unless ( defined $self->{incr_text} );
if ( defined $text ) {
if ( utf8::is_utf8( $text ) and !utf8::is_utf8( $self->{incr_text} ) ) {
utf8::upgrade( $self->{incr_text} ) ;
utf8::decode( $self->{incr_text} ) ;
}
$self->{incr_text} .= $text;
}
my $max_size = $coder->get_max_size;
if ( defined wantarray ) {
$self->{incr_mode} = INCR_M_WS unless defined $self->{incr_mode};
if ( wantarray ) {
my @ret;
$self->{incr_parsing} = 1;
do {
push @ret, $self->_incr_parse( $coder, $self->{incr_text} );
unless ( !$self->{incr_nest} and $self->{incr_mode} == INCR_M_JSON ) {
$self->{incr_mode} = INCR_M_WS if $self->{incr_mode} != INCR_M_STR;
}
} until ( length $self->{incr_text} >= $self->{incr_p} );
$self->{incr_parsing} = 0;
return @ret;
}
else { # in scalar context
$self->{incr_parsing} = 1;
my $obj = $self->_incr_parse( $coder, $self->{incr_text} );
$self->{incr_parsing} = 0 if defined $obj; # pointed by Martin J. Evans
return $obj ? $obj : undef; # $obj is an empty string, parsing was completed.
}
}
}
sub _incr_parse {
my ( $self, $coder, $text, $skip ) = @_;
my $p = $self->{incr_p};
my $restore = $p;
my @obj;
my $len = length $text;
if ( $self->{incr_mode} == INCR_M_WS ) {
while ( $len > $p ) {
my $s = substr( $text, $p, 1 );
$p++ and next if ( 0x20 >= unpack($unpack_format, $s) );
$self->{incr_mode} = INCR_M_JSON;
last;
}
}
while ( $len > $p ) {
my $s = substr( $text, $p++, 1 );
if ( $s eq '"' ) {
if (substr( $text, $p - 2, 1 ) eq '\\' ) {
next;
}
if ( $self->{incr_mode} != INCR_M_STR ) {
$self->{incr_mode} = INCR_M_STR;
}
else {
$self->{incr_mode} = INCR_M_JSON;
unless ( $self->{incr_nest} ) {
last;
}
}
}
if ( $self->{incr_mode} == INCR_M_JSON ) {
if ( $s eq '[' or $s eq '{' ) {
if ( ++$self->{incr_nest} > $coder->get_max_depth ) {
Carp::croak('json text or perl structure exceeds maximum nesting level (max_depth set too low?)');
}
}
elsif ( $s eq ']' or $s eq '}' ) {
last if ( --$self->{incr_nest} <= 0 );
}
elsif ( $s eq '#' ) {
while ( $len > $p ) {
last if substr( $text, $p++, 1 ) eq "\n";
}
}
}
}
$self->{incr_p} = $p;
return if ( $self->{incr_mode} == INCR_M_STR and not $self->{incr_nest} );
return if ( $self->{incr_mode} == INCR_M_JSON and $self->{incr_nest} > 0 );
return '' unless ( length substr( $self->{incr_text}, 0, $p ) );
local $Carp::CarpLevel = 2;
$self->{incr_p} = $restore;
$self->{incr_c} = $p;
my ( $obj, $tail ) = $coder->PP_decode_json( substr( $self->{incr_text}, 0, $p ), 0x10000001 );
$self->{incr_text} = substr( $self->{incr_text}, $p );
$self->{incr_p} = 0;
return $obj || '';
}
sub incr_text {
if ( $_[0]->{incr_parsing} ) {
Carp::croak("incr_text can not be called when the incremental parser already started parsing");
}
$_[0]->{incr_text};
}
sub incr_skip {
my $self = shift;
$self->{incr_text} = substr( $self->{incr_text}, $self->{incr_c} );
$self->{incr_p} = 0;
}
sub incr_reset {
my $self = shift;
$self->{incr_text} = undef;
$self->{incr_p} = 0;
$self->{incr_mode} = 0;
$self->{incr_nest} = 0;
$self->{incr_parsing} = 0;
}
###############################
1;
__END__
=pod
=head1 NAME
JSON::PP - JSON::XS compatible pure-Perl module.
=head1 SYNOPSIS
use JSON::PP;
# exported functions, they croak on error
# and expect/generate UTF-8
$utf8_encoded_json_text = encode_json $perl_hash_or_arrayref;
$perl_hash_or_arrayref = decode_json $utf8_encoded_json_text;
# OO-interface
$coder = JSON::PP->new->ascii->pretty->allow_nonref;
$json_text = $json->encode( $perl_scalar );
$perl_scalar = $json->decode( $json_text );
$pretty_printed = $json->pretty->encode( $perl_scalar ); # pretty-printing
# Note that JSON version 2.0 and above will automatically use
# JSON::XS or JSON::PP, so you should be able to just:
use JSON;
=head1 VERSION
2.27200
L<JSON::XS> 2.27 (~2.30) compatible.
=head1 DESCRIPTION
This module is L<JSON::XS> compatible pure Perl module.
(Perl 5.8 or later is recommended)
JSON::XS is the fastest and most proper JSON module on CPAN.
It is written by Marc Lehmann in C, so must be compiled and
installed in the used environment.
JSON::PP is a pure-Perl module and has compatibility to JSON::XS.
=head2 FEATURES
=over
=item * correct unicode handling
This module knows how to handle Unicode (depending on Perl version).
See to L<JSON::XS/A FEW NOTES ON UNICODE AND PERL> and
L<UNICODE HANDLING ON PERLS>.
=item * round-trip integrity
When you serialise a perl data structure using only data types
supported by JSON and Perl, the deserialised data structure is
identical on the Perl level. (e.g. the string "2.0" doesn't suddenly
become "2" just because it looks like a number). There I<are> minor
exceptions to this, read the MAPPING section below to learn about
those.
=item * strict checking of JSON correctness
There is no guessing, no generating of illegal JSON texts by default,
and only JSON is accepted as input by default (the latter is a
security feature). But when some options are set, loose checking
features are available.
=back
=head1 FUNCTIONAL INTERFACE
Some documents are copied and modified from L<JSON::XS/FUNCTIONAL INTERFACE>.
=head2 encode_json
$json_text = encode_json $perl_scalar
Converts the given Perl data structure to a UTF-8 encoded, binary string.
This function call is functionally identical to:
$json_text = JSON::PP->new->utf8->encode($perl_scalar)
=head2 decode_json
$perl_scalar = decode_json $json_text
The opposite of C<encode_json>: expects an UTF-8 (binary) string and tries
to parse that as an UTF-8 encoded JSON text, returning the resulting
reference.
This function call is functionally identical to:
$perl_scalar = JSON::PP->new->utf8->decode($json_text)
=head2 JSON::PP::is_bool
$is_boolean = JSON::PP::is_bool($scalar)
Returns true if the passed scalar represents either JSON::PP::true or
JSON::PP::false, two constants that act like C<1> and C<0> respectively
and are also used to represent JSON C<true> and C<false> in Perl strings.
=head2 JSON::PP::true
Returns JSON true value which is blessed object.
It C<isa> JSON::PP::Boolean object.
=head2 JSON::PP::false
Returns JSON false value which is blessed object.
It C<isa> JSON::PP::Boolean object.
=head2 JSON::PP::null
Returns C<undef>.
See L<MAPPING>, below, for more information on how JSON values are mapped to
Perl.
=head1 HOW DO I DECODE A DATA FROM OUTER AND ENCODE TO OUTER
This section supposes that your perl version is 5.8 or later.
If you know a JSON text from an outer world - a network, a file content, and so on,
is encoded in UTF-8, you should use C<decode_json> or C<JSON> module object
with C<utf8> enable. And the decoded result will contain UNICODE characters.
# from network
my $json = JSON::PP->new->utf8;
my $json_text = CGI->new->param( 'json_data' );
my $perl_scalar = $json->decode( $json_text );
# from file content
local $/;
open( my $fh, '<', 'json.data' );
$json_text = <$fh>;
$perl_scalar = decode_json( $json_text );
If an outer data is not encoded in UTF-8, firstly you should C<decode> it.
use Encode;
local $/;
open( my $fh, '<', 'json.data' );
my $encoding = 'cp932';
my $unicode_json_text = decode( $encoding, <$fh> ); # UNICODE
# or you can write the below code.
#
# open( my $fh, "<:encoding($encoding)", 'json.data' );
# $unicode_json_text = <$fh>;
In this case, C<$unicode_json_text> is of course UNICODE string.
So you B<cannot> use C<decode_json> nor C<JSON> module object with C<utf8> enable.
Instead of them, you use C<JSON> module object with C<utf8> disable.
$perl_scalar = $json->utf8(0)->decode( $unicode_json_text );
Or C<encode 'utf8'> and C<decode_json>:
$perl_scalar = decode_json( encode( 'utf8', $unicode_json_text ) );
# this way is not efficient.
And now, you want to convert your C<$perl_scalar> into JSON data and
send it to an outer world - a network or a file content, and so on.
Your data usually contains UNICODE strings and you want the converted data to be encoded
in UTF-8, you should use C<encode_json> or C<JSON> module object with C<utf8> enable.
print encode_json( $perl_scalar ); # to a network? file? or display?
# or
print $json->utf8->encode( $perl_scalar );
If C<$perl_scalar> does not contain UNICODE but C<$encoding>-encoded strings
for some reason, then its characters are regarded as B<latin1> for perl
(because it does not concern with your $encoding).
You B<cannot> use C<encode_json> nor C<JSON> module object with C<utf8> enable.
Instead of them, you use C<JSON> module object with C<utf8> disable.
Note that the resulted text is a UNICODE string but no problem to print it.
# $perl_scalar contains $encoding encoded string values
$unicode_json_text = $json->utf8(0)->encode( $perl_scalar );
# $unicode_json_text consists of characters less than 0x100
print $unicode_json_text;
Or C<decode $encoding> all string values and C<encode_json>:
$perl_scalar->{ foo } = decode( $encoding, $perl_scalar->{ foo } );
# ... do it to each string values, then encode_json
$json_text = encode_json( $perl_scalar );
This method is a proper way but probably not efficient.
See to L<Encode>, L<perluniintro>.
=head1 METHODS
Basically, check to L<JSON> or L<JSON::XS>.
=head2 new
$json = JSON::PP->new
Returns a new JSON::PP object that can be used to de/encode JSON
strings.
All boolean flags described below are by default I<disabled>.
The mutators for flags all return the JSON object again and thus calls can
be chained:
my $json = JSON::PP->new->utf8->space_after->encode({a => [1,2]})
=> {"a": [1, 2]}
=head2 ascii
$json = $json->ascii([$enable])
$enabled = $json->get_ascii
If $enable is true (or missing), then the encode method will not generate characters outside
the code range 0..127. Any Unicode characters outside that range will be escaped using either
a single \uXXXX or a double \uHHHH\uLLLLL escape sequence, as per RFC4627.
(See to L<JSON::XS/OBJECT-ORIENTED INTERFACE>).
In Perl 5.005, there is no character having high value (more than 255).
See to L<UNICODE HANDLING ON PERLS>.
If $enable is false, then the encode method will not escape Unicode characters unless
required by the JSON syntax or other flags. This results in a faster and more compact format.
JSON::PP->new->ascii(1)->encode([chr 0x10401])
=> ["\ud801\udc01"]
=head2 latin1
$json = $json->latin1([$enable])
$enabled = $json->get_latin1
If $enable is true (or missing), then the encode method will encode the resulting JSON
text as latin1 (or iso-8859-1), escaping any characters outside the code range 0..255.
If $enable is false, then the encode method will not escape Unicode characters
unless required by the JSON syntax or other flags.
JSON::XS->new->latin1->encode (["\x{89}\x{abc}"]
=> ["\x{89}\\u0abc"] # (perl syntax, U+abc escaped, U+89 not)
See to L<UNICODE HANDLING ON PERLS>.
=head2 utf8
$json = $json->utf8([$enable])
$enabled = $json->get_utf8
If $enable is true (or missing), then the encode method will encode the JSON result
into UTF-8, as required by many protocols, while the decode method expects to be handled
an UTF-8-encoded string. Please note that UTF-8-encoded strings do not contain any
characters outside the range 0..255, they are thus useful for bytewise/binary I/O.
(In Perl 5.005, any character outside the range 0..255 does not exist.
See to L<UNICODE HANDLING ON PERLS>.)
In future versions, enabling this option might enable autodetection of the UTF-16 and UTF-32
encoding families, as described in RFC4627.
If $enable is false, then the encode method will return the JSON string as a (non-encoded)
Unicode string, while decode expects thus a Unicode string. Any decoding or encoding
(e.g. to UTF-8 or UTF-16) needs to be done yourself, e.g. using the Encode module.
Example, output UTF-16BE-encoded JSON:
use Encode;
$jsontext = encode "UTF-16BE", JSON::PP->new->encode ($object);
Example, decode UTF-32LE-encoded JSON:
use Encode;
$object = JSON::PP->new->decode (decode "UTF-32LE", $jsontext);
=head2 pretty
$json = $json->pretty([$enable])
This enables (or disables) all of the C<indent>, C<space_before> and
C<space_after> flags in one call to generate the most readable
(or most compact) form possible.
Equivalent to:
$json->indent->space_before->space_after
=head2 indent
$json = $json->indent([$enable])
$enabled = $json->get_indent
The default indent space length is three.
You can use C<indent_length> to change the length.
=head2 space_before
$json = $json->space_before([$enable])
$enabled = $json->get_space_before
If C<$enable> is true (or missing), then the C<encode> method will add an extra
optional space before the C<:> separating keys from values in JSON objects.
If C<$enable> is false, then the C<encode> method will not add any extra
space at those places.
This setting has no effect when decoding JSON texts.
Example, space_before enabled, space_after and indent disabled:
{"key" :"value"}
=head2 space_after
$json = $json->space_after([$enable])
$enabled = $json->get_space_after
If C<$enable> is true (or missing), then the C<encode> method will add an extra
optional space after the C<:> separating keys from values in JSON objects
and extra whitespace after the C<,> separating key-value pairs and array
members.
If C<$enable> is false, then the C<encode> method will not add any extra
space at those places.
This setting has no effect when decoding JSON texts.
Example, space_before and indent disabled, space_after enabled:
{"key": "value"}
=head2 relaxed
$json = $json->relaxed([$enable])
$enabled = $json->get_relaxed
If C<$enable> is true (or missing), then C<decode> will accept some
extensions to normal JSON syntax (see below). C<encode> will not be
affected in anyway. I<Be aware that this option makes you accept invalid
JSON texts as if they were valid!>. I suggest only to use this option to
parse application-specific files written by humans (configuration files,
resource files etc.)
If C<$enable> is false (the default), then C<decode> will only accept
valid JSON texts.
Currently accepted extensions are:
=over 4
=item * list items can have an end-comma
JSON I<separates> array elements and key-value pairs with commas. This
can be annoying if you write JSON texts manually and want to be able to
quickly append elements, so this extension accepts comma at the end of
such items not just between them:
[
1,
2, <- this comma not normally allowed
]
{
"k1": "v1",
"k2": "v2", <- this comma not normally allowed
}
=item * shell-style '#'-comments
Whenever JSON allows whitespace, shell-style comments are additionally
allowed. They are terminated by the first carriage-return or line-feed
character, after which more white-space and comments are allowed.
[
1, # this comment not allowed in JSON
# neither this one...
]
=back
=head2 canonical
$json = $json->canonical([$enable])
$enabled = $json->get_canonical
If C<$enable> is true (or missing), then the C<encode> method will output JSON objects
by sorting their keys. This is adding a comparatively high overhead.
If C<$enable> is false, then the C<encode> method will output key-value
pairs in the order Perl stores them (which will likely change between runs
of the same script).
This option is useful if you want the same data structure to be encoded as
the same JSON text (given the same overall settings). If it is disabled,
the same hash might be encoded differently even if contains the same data,
as key-value pairs have no inherent ordering in Perl.
This setting has no effect when decoding JSON texts.
If you want your own sorting routine, you can give a code reference
or a subroutine name to C<sort_by>. See to C<JSON::PP OWN METHODS>.
=head2 allow_nonref
$json = $json->allow_nonref([$enable])
$enabled = $json->get_allow_nonref
If C<$enable> is true (or missing), then the C<encode> method can convert a
non-reference into its corresponding string, number or null JSON value,
which is an extension to RFC4627. Likewise, C<decode> will accept those JSON
values instead of croaking.
If C<$enable> is false, then the C<encode> method will croak if it isn't
passed an arrayref or hashref, as JSON texts must either be an object
or array. Likewise, C<decode> will croak if given something that is not a
JSON object or array.
JSON::PP->new->allow_nonref->encode ("Hello, World!")
=> "Hello, World!"
=head2 allow_unknown
$json = $json->allow_unknown ([$enable])
$enabled = $json->get_allow_unknown
If $enable is true (or missing), then "encode" will *not* throw an
exception when it encounters values it cannot represent in JSON (for
example, filehandles) but instead will encode a JSON "null" value.
Note that blessed objects are not included here and are handled
separately by c<allow_nonref>.
If $enable is false (the default), then "encode" will throw an
exception when it encounters anything it cannot encode as JSON.
This option does not affect "decode" in any way, and it is
recommended to leave it off unless you know your communications
partner.
=head2 allow_blessed
$json = $json->allow_blessed([$enable])
$enabled = $json->get_allow_blessed
If C<$enable> is true (or missing), then the C<encode> method will not
barf when it encounters a blessed reference. Instead, the value of the
B<convert_blessed> option will decide whether C<null> (C<convert_blessed>
disabled or no C<TO_JSON> method found) or a representation of the
object (C<convert_blessed> enabled and C<TO_JSON> method found) is being
encoded. Has no effect on C<decode>.
If C<$enable> is false (the default), then C<encode> will throw an
exception when it encounters a blessed object.
=head2 convert_blessed
$json = $json->convert_blessed([$enable])
$enabled = $json->get_convert_blessed
If C<$enable> is true (or missing), then C<encode>, upon encountering a
blessed object, will check for the availability of the C<TO_JSON> method
on the object's class. If found, it will be called in scalar context
and the resulting scalar will be encoded instead of the object. If no
C<TO_JSON> method is found, the value of C<allow_blessed> will decide what
to do.
The C<TO_JSON> method may safely call die if it wants. If C<TO_JSON>
returns other blessed objects, those will be handled in the same
way. C<TO_JSON> must take care of not causing an endless recursion cycle
(== crash) in this case. The name of C<TO_JSON> was chosen because other
methods called by the Perl core (== not by the user of the object) are
usually in upper case letters and to avoid collisions with the C<to_json>
function or method.
This setting does not yet influence C<decode> in any way.
If C<$enable> is false, then the C<allow_blessed> setting will decide what
to do when a blessed object is found.
=head2 filter_json_object
$json = $json->filter_json_object([$coderef])
When C<$coderef> is specified, it will be called from C<decode> each
time it decodes a JSON object. The only argument passed to the coderef
is a reference to the newly-created hash. If the code references returns
a single scalar (which need not be a reference), this value
(i.e. a copy of that scalar to avoid aliasing) is inserted into the
deserialised data structure. If it returns an empty list
(NOTE: I<not> C<undef>, which is a valid scalar), the original deserialised
hash will be inserted. This setting can slow down decoding considerably.
When C<$coderef> is omitted or undefined, any existing callback will
be removed and C<decode> will not change the deserialised hash in any
way.
Example, convert all JSON objects into the integer 5:
my $js = JSON::PP->new->filter_json_object (sub { 5 });
# returns [5]
$js->decode ('[{}]'); # the given subroutine takes a hash reference.
# throw an exception because allow_nonref is not enabled
# so a lone 5 is not allowed.
$js->decode ('{"a":1, "b":2}');
=head2 filter_json_single_key_object
$json = $json->filter_json_single_key_object($key [=> $coderef])
Works remotely similar to C<filter_json_object>, but is only called for
JSON objects having a single key named C<$key>.
This C<$coderef> is called before the one specified via
C<filter_json_object>, if any. It gets passed the single value in the JSON
object. If it returns a single value, it will be inserted into the data
structure. If it returns nothing (not even C<undef> but the empty list),
the callback from C<filter_json_object> will be called next, as if no
single-key callback were specified.
If C<$coderef> is omitted or undefined, the corresponding callback will be
disabled. There can only ever be one callback for a given key.
As this callback gets called less often then the C<filter_json_object>
one, decoding speed will not usually suffer as much. Therefore, single-key
objects make excellent targets to serialise Perl objects into, especially
as single-key JSON objects are as close to the type-tagged value concept
as JSON gets (it's basically an ID/VALUE tuple). Of course, JSON does not
support this in any way, so you need to make sure your data never looks
like a serialised Perl hash.
Typical names for the single object key are C<__class_whatever__>, or
C<$__dollars_are_rarely_used__$> or C<}ugly_brace_placement>, or even
things like C<__class_md5sum(classname)__>, to reduce the risk of clashing
with real hashes.
Example, decode JSON objects of the form C<< { "__widget__" => <id> } >>
into the corresponding C<< $WIDGET{<id>} >> object:
# return whatever is in $WIDGET{5}:
JSON::PP
->new
->filter_json_single_key_object (__widget__ => sub {
$WIDGET{ $_[0] }
})
->decode ('{"__widget__": 5')
# this can be used with a TO_JSON method in some "widget" class
# for serialisation to json:
sub WidgetBase::TO_JSON {
my ($self) = @_;
unless ($self->{id}) {
$self->{id} = ..get..some..id..;
$WIDGET{$self->{id}} = $self;
}
{ __widget__ => $self->{id} }
}
=head2 shrink
$json = $json->shrink([$enable])
$enabled = $json->get_shrink
In JSON::XS, this flag resizes strings generated by either
C<encode> or C<decode> to their minimum size possible.
It will also try to downgrade any strings to octet-form if possible.
In JSON::PP, it is noop about resizing strings but tries
C<utf8::downgrade> to the returned string by C<encode>.
See to L<utf8>.
See to L<JSON::XS/OBJECT-ORIENTED INTERFACE>
=head2 max_depth
$json = $json->max_depth([$maximum_nesting_depth])
$max_depth = $json->get_max_depth
Sets the maximum nesting level (default C<512>) accepted while encoding
or decoding. If a higher nesting level is detected in JSON text or a Perl
data structure, then the encoder and decoder will stop and croak at that
point.
Nesting level is defined by number of hash- or arrayrefs that the encoder
needs to traverse to reach a given point or the number of C<{> or C<[>
characters without their matching closing parenthesis crossed to reach a
given character in a string.
If no argument is given, the highest possible setting will be used, which
is rarely useful.
See L<JSON::XS/SSECURITY CONSIDERATIONS> for more info on why this is useful.
When a large value (100 or more) was set and it de/encodes a deep nested object/text,
it may raise a warning 'Deep recursion on subroutine' at the perl runtime phase.
=head2 max_size
$json = $json->max_size([$maximum_string_size])
$max_size = $json->get_max_size
Set the maximum length a JSON text may have (in bytes) where decoding is
being attempted. The default is C<0>, meaning no limit. When C<decode>
is called on a string that is longer then this many bytes, it will not
attempt to decode the string but throw an exception. This setting has no
effect on C<encode> (yet).
If no argument is given, the limit check will be deactivated (same as when
C<0> is specified).
See L<JSON::XS/SECURITY CONSIDERATIONS> for more info on why this is useful.
=head2 encode
$json_text = $json->encode($perl_scalar)
Converts the given Perl data structure (a simple scalar or a reference
to a hash or array) to its JSON representation. Simple scalars will be
converted into JSON string or number sequences, while references to arrays
become JSON arrays and references to hashes become JSON objects. Undefined
Perl values (e.g. C<undef>) become JSON C<null> values.
References to the integers C<0> and C<1> are converted into C<true> and C<false>.
=head2 decode
$perl_scalar = $json->decode($json_text)
The opposite of C<encode>: expects a JSON text and tries to parse it,
returning the resulting simple scalar or reference. Croaks on error.
JSON numbers and strings become simple Perl scalars. JSON arrays become
Perl arrayrefs and JSON objects become Perl hashrefs. C<true> becomes
C<1> (C<JSON::true>), C<false> becomes C<0> (C<JSON::false>) and
C<null> becomes C<undef>.
=head2 decode_prefix
($perl_scalar, $characters) = $json->decode_prefix($json_text)
This works like the C<decode> method, but instead of raising an exception
when there is trailing garbage after the first JSON object, it will
silently stop parsing there and return the number of characters consumed
so far.
JSON->new->decode_prefix ("[1] the tail")
=> ([], 3)
=head1 INCREMENTAL PARSING
Most of this section are copied and modified from L<JSON::XS/INCREMENTAL PARSING>.
In some cases, there is the need for incremental parsing of JSON texts.
This module does allow you to parse a JSON stream incrementally.
It does so by accumulating text until it has a full JSON object, which
it then can decode. This process is similar to using C<decode_prefix>
to see if a full JSON object is available, but is much more efficient
(and can be implemented with a minimum of method calls).
This module will only attempt to parse the JSON text once it is sure it
has enough text to get a decisive result, using a very simple but
truly incremental parser. This means that it sometimes won't stop as
early as the full parser, for example, it doesn't detect parenthesis
mismatches. The only thing it guarantees is that it starts decoding as
soon as a syntactically valid JSON text has been seen. This means you need
to set resource limits (e.g. C<max_size>) to ensure the parser will stop
parsing in the presence if syntax errors.
The following methods implement this incremental parser.
=head2 incr_parse
$json->incr_parse( [$string] ) # void context
$obj_or_undef = $json->incr_parse( [$string] ) # scalar context
@obj_or_empty = $json->incr_parse( [$string] ) # list context
This is the central parsing function. It can both append new text and
extract objects from the stream accumulated so far (both of these
functions are optional).
If C<$string> is given, then this string is appended to the already
existing JSON fragment stored in the C<$json> object.
After that, if the function is called in void context, it will simply
return without doing anything further. This can be used to add more text
in as many chunks as you want.
If the method is called in scalar context, then it will try to extract
exactly I<one> JSON object. If that is successful, it will return this
object, otherwise it will return C<undef>. If there is a parse error,
this method will croak just as C<decode> would do (one can then use
C<incr_skip> to skip the erroneous part). This is the most common way of
using the method.
And finally, in list context, it will try to extract as many objects
from the stream as it can find and return them, or the empty list
otherwise. For this to work, there must be no separators between the JSON
objects or arrays, instead they must be concatenated back-to-back. If
an error occurs, an exception will be raised as in the scalar context
case. Note that in this case, any previously-parsed JSON texts will be
lost.
Example: Parse some JSON arrays/objects in a given string and return them.
my @objs = JSON->new->incr_parse ("[5][7][1,2]");
=head2 incr_text
$lvalue_string = $json->incr_text
This method returns the currently stored JSON fragment as an lvalue, that
is, you can manipulate it. This I<only> works when a preceding call to
C<incr_parse> in I<scalar context> successfully returned an object. Under
all other circumstances you must not call this function (I mean it.
although in simple tests it might actually work, it I<will> fail under
real world conditions). As a special exception, you can also call this
method before having parsed anything.
This function is useful in two cases: a) finding the trailing text after a
JSON object or b) parsing multiple JSON objects separated by non-JSON text
(such as commas).
$json->incr_text =~ s/\s*,\s*//;
In Perl 5.005, C<lvalue> attribute is not available.
You must write codes like the below:
$string = $json->incr_text;
$string =~ s/\s*,\s*//;
$json->incr_text( $string );
=head2 incr_skip
$json->incr_skip
This will reset the state of the incremental parser and will remove the
parsed text from the input buffer. This is useful after C<incr_parse>
died, in which case the input buffer and incremental parser state is left
unchanged, to skip the text parsed so far and to reset the parse state.
=head2 incr_reset
$json->incr_reset
This completely resets the incremental parser, that is, after this call,
it will be as if the parser had never parsed anything.
This is useful if you want to repeatedly parse JSON objects and want to
ignore any trailing data, which means you have to reset the parser after
each successful decode.
See to L<JSON::XS/INCREMENTAL PARSING> for examples.
=head1 JSON::PP OWN METHODS
=head2 allow_singlequote
$json = $json->allow_singlequote([$enable])
If C<$enable> is true (or missing), then C<decode> will accept
JSON strings quoted by single quotations that are invalid JSON
format.
$json->allow_singlequote->decode({"foo":'bar'});
$json->allow_singlequote->decode({'foo':"bar"});
$json->allow_singlequote->decode({'foo':'bar'});
As same as the C<relaxed> option, this option may be used to parse
application-specific files written by humans.
=head2 allow_barekey
$json = $json->allow_barekey([$enable])
If C<$enable> is true (or missing), then C<decode> will accept
bare keys of JSON object that are invalid JSON format.
As same as the C<relaxed> option, this option may be used to parse
application-specific files written by humans.
$json->allow_barekey->decode('{foo:"bar"}');
=head2 allow_bignum
$json = $json->allow_bignum([$enable])
If C<$enable> is true (or missing), then C<decode> will convert
the big integer Perl cannot handle as integer into a L<Math::BigInt>
object and convert a floating number (any) into a L<Math::BigFloat>.
On the contrary, C<encode> converts C<Math::BigInt> objects and C<Math::BigFloat>
objects into JSON numbers with C<allow_blessed> enable.
$json->allow_nonref->allow_blessed->allow_bignum;
$bigfloat = $json->decode('2.000000000000000000000000001');
print $json->encode($bigfloat);
# => 2.000000000000000000000000001
See to L<JSON::XS/MAPPING> about the normal conversion of JSON number.
=head2 loose
$json = $json->loose([$enable])
The unescaped [\x00-\x1f\x22\x2f\x5c] strings are invalid in JSON strings
and the module doesn't allow one to C<decode> to these (except for \x2f).
If C<$enable> is true (or missing), then C<decode> will accept these
unescaped strings.
$json->loose->decode(qq|["abc
def"]|);
See L<JSON::XS/SSECURITY CONSIDERATIONS>.
=head2 escape_slash
$json = $json->escape_slash([$enable])
According to JSON Grammar, I<slash> (U+002F) is escaped. But default
JSON::PP (as same as JSON::XS) encodes strings without escaping slash.
If C<$enable> is true (or missing), then C<encode> will escape slashes.
=head2 indent_length
$json = $json->indent_length($length)
JSON::XS indent space length is 3 and cannot be changed.
JSON::PP set the indent space length with the given $length.
The default is 3. The acceptable range is 0 to 15.
=head2 sort_by
$json = $json->sort_by($function_name)
$json = $json->sort_by($subroutine_ref)
If $function_name or $subroutine_ref are set, its sort routine are used
in encoding JSON objects.
$js = $pc->sort_by(sub { $JSON::PP::a cmp $JSON::PP::b })->encode($obj);
# is($js, q|{"a":1,"b":2,"c":3,"d":4,"e":5,"f":6,"g":7,"h":8,"i":9}|);
$js = $pc->sort_by('own_sort')->encode($obj);
# is($js, q|{"a":1,"b":2,"c":3,"d":4,"e":5,"f":6,"g":7,"h":8,"i":9}|);
sub JSON::PP::own_sort { $JSON::PP::a cmp $JSON::PP::b }
As the sorting routine runs in the JSON::PP scope, the given
subroutine name and the special variables C<$a>, C<$b> will begin
'JSON::PP::'.
If $integer is set, then the effect is same as C<canonical> on.
=head1 INTERNAL
For developers.
=over
=item PP_encode_box
Returns
{
depth => $depth,
indent_count => $indent_count,
}
=item PP_decode_box
Returns
{
text => $text,
at => $at,
ch => $ch,
len => $len,
depth => $depth,
encoding => $encoding,
is_valid_utf8 => $is_valid_utf8,
};
=back
=head1 MAPPING
This section is copied from JSON::XS and modified to C<JSON::PP>.
JSON::XS and JSON::PP mapping mechanisms are almost equivalent.
See to L<JSON::XS/MAPPING>.
=head2 JSON -> PERL
=over 4
=item object
A JSON object becomes a reference to a hash in Perl. No ordering of object
keys is preserved (JSON does not preserver object key ordering itself).
=item array
A JSON array becomes a reference to an array in Perl.
=item string
A JSON string becomes a string scalar in Perl - Unicode codepoints in JSON
are represented by the same codepoints in the Perl string, so no manual
decoding is necessary.
=item number
A JSON number becomes either an integer, numeric (floating point) or
string scalar in perl, depending on its range and any fractional parts. On
the Perl level, there is no difference between those as Perl handles all
the conversion details, but an integer may take slightly less memory and
might represent more values exactly than floating point numbers.
If the number consists of digits only, C<JSON> will try to represent
it as an integer value. If that fails, it will try to represent it as
a numeric (floating point) value if that is possible without loss of
precision. Otherwise it will preserve the number as a string value (in
which case you lose roundtripping ability, as the JSON number will be
re-encoded to a JSON string).
Numbers containing a fractional or exponential part will always be
represented as numeric (floating point) values, possibly at a loss of
precision (in which case you might lose perfect roundtripping ability, but
the JSON number will still be re-encoded as a JSON number).
Note that precision is not accuracy - binary floating point values cannot
represent most decimal fractions exactly, and when converting from and to
floating point, C<JSON> only guarantees precision up to but not including
the least significant bit.
When C<allow_bignum> is enable, the big integers
and the numeric can be optionally converted into L<Math::BigInt> and
L<Math::BigFloat> objects.
=item true, false
These JSON atoms become C<JSON::PP::true> and C<JSON::PP::false>,
respectively. They are overloaded to act almost exactly like the numbers
C<1> and C<0>. You can check whether a scalar is a JSON boolean by using
the C<JSON::is_bool> function.
print JSON::PP::true . "\n";
=> true
print JSON::PP::true + 1;
=> 1
ok(JSON::true eq '1');
ok(JSON::true == 1);
C<JSON> will install these missing overloading features to the backend modules.
=item null
A JSON null atom becomes C<undef> in Perl.
C<JSON::PP::null> returns C<undef>.
=back
=head2 PERL -> JSON
The mapping from Perl to JSON is slightly more difficult, as Perl is a
truly typeless language, so we can only guess which JSON type is meant by
a Perl value.
=over 4
=item hash references
Perl hash references become JSON objects. As there is no inherent ordering
in hash keys (or JSON objects), they will usually be encoded in a
pseudo-random order that can change between runs of the same program but
stays generally the same within a single run of a program. C<JSON>
optionally sort the hash keys (determined by the I<canonical> flag), so
the same data structure will serialise to the same JSON text (given same
settings and version of JSON::XS), but this incurs a runtime overhead
and is only rarely useful, e.g. when you want to compare some JSON text
against another for equality.
=item array references
Perl array references become JSON arrays.
=item other references
Other unblessed references are generally not allowed and will cause an
exception to be thrown, except for references to the integers C<0> and
C<1>, which get turned into C<false> and C<true> atoms in JSON. You can
also use C<JSON::false> and C<JSON::true> to improve readability.
to_json [\0,JSON::PP::true] # yields [false,true]
=item JSON::PP::true, JSON::PP::false, JSON::PP::null
These special values become JSON true and JSON false values,
respectively. You can also use C<\1> and C<\0> directly if you want.
JSON::PP::null returns C<undef>.
=item blessed objects
Blessed objects are not directly representable in JSON. See the
C<allow_blessed> and C<convert_blessed> methods on various options on
how to deal with this: basically, you can choose between throwing an
exception, encoding the reference as if it weren't blessed, or provide
your own serialiser method.
See to L<convert_blessed>.
=item simple scalars
Simple Perl scalars (any scalar that is not a reference) are the most
difficult objects to encode: JSON::XS and JSON::PP will encode undefined scalars as
JSON C<null> values, scalars that have last been used in a string context
before encoding as JSON strings, and anything else as number value:
# dump as number
encode_json [2] # yields [2]
encode_json [-3.0e17] # yields [-3e+17]
my $value = 5; encode_json [$value] # yields [5]
# used as string, so dump as string
print $value;
encode_json [$value] # yields ["5"]
# undef becomes null
encode_json [undef] # yields [null]
You can force the type to be a string by stringifying it:
my $x = 3.1; # some variable containing a number
"$x"; # stringified
$x .= ""; # another, more awkward way to stringify
print $x; # perl does it for you, too, quite often
You can force the type to be a number by numifying it:
my $x = "3"; # some variable containing a string
$x += 0; # numify it, ensuring it will be dumped as a number
$x *= 1; # same thing, the choice is yours.
You can not currently force the type in other, less obscure, ways.
Note that numerical precision has the same meaning as under Perl (so
binary to decimal conversion follows the same rules as in Perl, which
can differ to other languages). Also, your perl interpreter might expose
extensions to the floating point numbers of your platform, such as
infinities or NaN's - these cannot be represented in JSON, and it is an
error to pass those in.
=item Big Number
When C<allow_bignum> is enable,
C<encode> converts C<Math::BigInt> objects and C<Math::BigFloat>
objects into JSON numbers.
=back
=head1 UNICODE HANDLING ON PERLS
If you do not know about Unicode on Perl well,
please check L<JSON::XS/A FEW NOTES ON UNICODE AND PERL>.
=head2 Perl 5.8 and later
Perl can handle Unicode and the JSON::PP de/encode methods also work properly.
$json->allow_nonref->encode(chr hex 3042);
$json->allow_nonref->encode(chr hex 12345);
Returns C<"\u3042"> and C<"\ud808\udf45"> respectively.
$json->allow_nonref->decode('"\u3042"');
$json->allow_nonref->decode('"\ud808\udf45"');
Returns UTF-8 encoded strings with UTF8 flag, regarded as C<U+3042> and C<U+12345>.
Note that the versions from Perl 5.8.0 to 5.8.2, Perl built-in C<join> was broken,
so JSON::PP wraps the C<join> with a subroutine. Thus JSON::PP works slow in the versions.
=head2 Perl 5.6
Perl can handle Unicode and the JSON::PP de/encode methods also work.
=head2 Perl 5.005
Perl 5.005 is a byte semantics world -- all strings are sequences of bytes.
That means the unicode handling is not available.
In encoding,
$json->allow_nonref->encode(chr hex 3042); # hex 3042 is 12354.
$json->allow_nonref->encode(chr hex 12345); # hex 12345 is 74565.
Returns C<B> and C<E>, as C<chr> takes a value more than 255, it treats
as C<$value % 256>, so the above codes are equivalent to :
$json->allow_nonref->encode(chr 66);
$json->allow_nonref->encode(chr 69);
In decoding,
$json->decode('"\u00e3\u0081\u0082"');
The returned is a byte sequence C<0xE3 0x81 0x82> for UTF-8 encoded
japanese character (C<HIRAGANA LETTER A>).
And if it is represented in Unicode code point, C<U+3042>.
Next,
$json->decode('"\u3042"');
We ordinary expect the returned value is a Unicode character C<U+3042>.
But here is 5.005 world. This is C<0xE3 0x81 0x82>.
$json->decode('"\ud808\udf45"');
This is not a character C<U+12345> but bytes - C<0xf0 0x92 0x8d 0x85>.
=head1 TODO
=over
=item speed
=item memory saving
=back
=head1 SEE ALSO
Most of the document are copied and modified from JSON::XS doc.
L<JSON::XS>
RFC4627 (L<http://www.ietf.org/rfc/rfc4627.txt>)
=head1 AUTHOR
Makamaka Hannyaharamitu, E<lt>makamaka[at]cpan.orgE<gt>
=head1 COPYRIGHT AND LICENSE
Copyright 2007-2012 by Makamaka Hannyaharamitu
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
| jcubic/leash | cgi-bin/JSON/backportPP.pm | Perl | mit | 82,128 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.