code
stringlengths
2
1.05M
repo_name
stringlengths
5
101
path
stringlengths
4
991
language
stringclasses
3 values
license
stringclasses
5 values
size
int64
2
1.05M
package VMOMI::VmClonedEvent; use parent 'VMOMI::VmCloneEvent'; use strict; use warnings; our @class_ancestors = ( 'VmCloneEvent', 'VmEvent', 'Event', 'DynamicData', ); our @class_members = ( ['sourceVm', 'VmEventArgument', 0, ], ); sub get_class_ancestors { return @class_ancestors; } sub get_class_members { my $class = shift; my @super_members = $class->SUPER::get_class_members(); return (@super_members, @class_members); } 1;
stumpr/p5-vmomi
lib/VMOMI/VmClonedEvent.pm
Perl
apache-2.0
475
#!/usr/bin/perl # # Copyright © 2013 Inria. All rights reserved. # Copyright (c) 2013-2014 University of Wisconsin-La Crosse.# # All rights reserved. # # See COPYING in top-level directory. # # $HEADER$ # use strict; use Getopt::Long; my $rawdir = "./ib-raw"; my $outdir = "./netloc"; my $verbose; my $help; &Getopt::Long::Configure("bundling"); my $ok = Getopt::Long::GetOptions( "out-dir|o=s" => \$outdir, "raw-dir|r=s" => \$rawdir, "verbose|v" => \$verbose, "help|h" => \$help ); if (!$ok or !defined $rawdir or defined($help) ) { print "Input directory with raw IB data must be specified with\n"; print " --raw-dir <dir> (default is ./ib-raw)\n"; print "Output directory for netloc data can be specified with\n"; print " --out-dir <dir> (default is ./netloc)\n"; print "Verbose\n"; print " --verbose | -v\n"; print "Help\n"; print " --help | -h\n"; exit -1; } my @subnets; my %ibnetdiscover_files; my %ibroutes_files; opendir DIR, $rawdir or die "Failed to open raw IB input directory $rawdir\n"; while (my $file = readdir(DIR)) { if ($file =~ /^ib-subnet-([0-9a-fA-F:]{19}).txt$/) { my $subnet = $1; next unless -f "$rawdir/$file"; next unless -d "$rawdir/ibroutes-$subnet"; push @subnets, $1; $ibnetdiscover_files{$subnet} = "$rawdir/$file"; $ibroutes_files{$subnet} = "$rawdir/ibroutes-$subnet/"; } } closedir DIR; if (!-d $outdir) { mkdir $outdir or die "Cannot create output directory ($!)"; } ############################################################################### my $cmd; foreach my $subnet (@subnets) { print "-"x70 . "\n"; print "Processing Subnet: " . $subnet . "\n"; print "-"x70 . "\n"; print "-"x15 . " General Network Information\n"; $cmd = ("netloc_reader_ib ". " --subnet ".$subnet. " --outdir ".$outdir. " --file ".$ibnetdiscover_files{$subnet}. " --routedir ".$ibroutes_files{$subnet}); if( !defined($verbose) ) { $cmd .= " 1>/dev/null"; } else { $cmd .= " -p "; } #print "CMD: $cmd\n"; system($cmd); } exit 0;
open-mpi/netloc
tools/gather_ib/netloc_ib_extract_dats.pl
Perl
bsd-3-clause
2,139
############################################################################### ## Copyright 2005-2016 OCSInventory-NG/OCSInventory-Server contributors. ## See the Contributors file for more details about them. ## ## This file is part of OCSInventory-NG/OCSInventory-ocsreports. ## ## OCSInventory-NG/OCSInventory-Server is free software: you can redistribute ## it and/or modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation, either version 2 of the License, ## or (at your option) any later version. ## ## OCSInventory-NG/OCSInventory-Server is distributed in the hope that it ## will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty ## of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with OCSInventory-NG/OCSInventory-ocsreports. if not, write to the ## Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, ## MA 02110-1301, USA. ################################################################################ package Apache::Ocsinventory::Server::Modperl1; # For compatibilities with apache 1,5 ##################################### use mod_perl; use constant MP2 => $mod_perl::VERSION >= 1.99; require Exporter; our @ISA = qw /Exporter/; our @EXPORT = qw/ APACHE_SERVER_ERROR APACHE_FORBIDDEN APACHE_OK APACHE_BAD_REQUEST _set_http_header _set_http_content_type _get_http_header _send_http_headers /; BEGIN{ if(MP2){ require Apache::compat; Apache::compat->import(); require Apache::Const; Apache::Const->import(-compile => qw(:common :http)); }else{ require Apache::Constants; Apache::Constants->import(qw(:common :response)); } } # retrieve apache constants use constant APACHE_SERVER_ERROR => MP2?Apache::SERVER_ERROR:Apache::Constants::SERVER_ERROR; use constant APACHE_FORBIDDEN => MP2?Apache::FORBIDDEN:Apache::Constants::FORBIDDEN; use constant APACHE_OK => MP2?Apache::OK:Apache::Constants::OK; use constant APACHE_BAD_REQUEST => MP2?Apache::BAD_REQUEST:Apache::Constants::BAD_REQUEST; # Wrappers sub _set_http_header{ my ($header, $value, $r) = @_; $r->header_out($header => $value); return(0); } sub _set_http_content_type{ my ($type, $r) = @_; $r->content_type($type); } sub _get_http_header{ my ($header, $r) = @_; return $r->headers_in->{$header}; } sub _send_http_headers{ my $r = shift; $r->send_http_header; } 1;
himynameismax/codeigniter
ocsinventory-server/perl/Apache/Ocsinventory/Server/Modperl1.pm
Perl
mit
2,552
#!/usr/bin/perl -w use strict; use warnings; ############################################################################################ ##################################SUBRUTINE LIB############################################# ############################################################################################ ############################################################################################ ##################################01 DNS EGYESITES########################################## ############################################################################################ sub egyesit {my ($DNA1 , $DNA2) = @_; ($DNA1 , $DNA2) = ($DNA1 . $DNA2); return $DNA1; return $DNA2; } ############################################################################################# ###################################02 Nukleotid százalék Adenin############################## ############################################################################################# sub szazalek_a { my ($DNA) = @_; my ($count_a)=0; my ($ossz) = length $DNA; ($count_a)= ( $DNA =~ tr/Aa//); my ($szazalek) = $count_a/$ossz*100; print "ADENIN : $szazalek %\n"; } ############################################################################################# ###################################03 Nukleotid százalék GUANIN############################## ############################################################################################# sub szazalek_g { my ($DNA) = @_; my ($count_g)=0; my ($ossz) = length $DNA; ($count_g)= ( $DNA =~ tr/Gg//); my ($szazalek) = $count_g/$ossz*100; print "GUANIN : $szazalek %\n"; } ############################################################################################# ###################################04 Nukleotid százalék CITOZIN############################# ############################################################################################# sub szazalek_c { my ($DNA) = @_; my ($count_a)=0; my ($ossz) = length $DNA; ($count_a)= ( $DNA =~ tr/Cc//); my ($szazalek) = $count_a/$ossz*100; print "CITOZIN : $szazalek %\n"; } ############################################################################################# ###################################05 Nukleotid százalék TIMIN############################## ############################################################################################# sub szazalek_t { my ($DNA) = @_; my ($count_a)=0; my ($ossz) = length $DNA; ($count_a)= ( $DNA =~ tr/Tt//); my ($szazalek) = $count_a/$ossz*100; print "Timin : $szazalek %\n"; } ############################################################################################# ###################################07 Nukleotid százalék Osszes############################## ############################################################################################# sub szazalek_osszes { my ($DNA) = @_; my ($count_a)=0; my ($count_g)=0; my ($count_c)=0; my ($count_t)=0; my ($ossz) = length $DNA; ($count_a)= ( $DNA =~ tr/Aa//); ($count_g)= ( $DNA =~ tr/Gg//); ($count_c)= ( $DNA =~ tr/Cc//); ($count_t)= ( $DNA =~ tr/Tt//); my ($aszazalek) = $count_a/$ossz*100; my ($gszazalek) = $count_g/$ossz*100; my ($cszazalek) = $count_c/$ossz*100; my ($tszazalek) = $count_t/$ossz*100; print "ADENIN : $aszazalek %\n"; print "GUANIN : $gszazalek %\n"; print "CITOZIN : $cszazalek %\n"; print "TIMIN : $tszazalek %\n"; } ################################################################################################ ################################08 Inputra válaszol############################################# ################################################################################################ sub kerdes { print "Irj be valamit!\n"; my $i = <STDIN>; chomp $i; print "es a gep valoszol"; } ################################################################################################# ################################09 CMD line inputre valasz####################################### ################################################################################################# #közös mappa! sub help { my (@i) = @_; if (@i =~ /-help/) {my $USAGE = print "Olvasd el a program dokumentációját";} elsif (@i =~ /-h/) {my $USAGE = print "Olvasd el a program dokumentációját";} elsif (@i =~ /--help/) {my $USAGE = print "Olvasd el a program dokumentációját";} return $i; } 1;
amidoimidazol/bio_info
Beginning Perl for Bioinformatics/libz/libz.pm
Perl
mit
4,503
# OpenXPKI::Server::Workflow::Activity::Reports::CertExport::GenerateExportFile # Written by Oliver Welter for the OpenXPKI project 2013 # Copyright (c) 2013 by The OpenXPKI Project package OpenXPKI::Server::Workflow::Activity::Reports::CertExport::GenerateExportFile; use strict; use base qw( OpenXPKI::Server::Workflow::Activity ); use OpenXPKI::Server::Context qw( CTX ); use OpenXPKI::Exception; use OpenXPKI::Debug; use English; use OpenXPKI::Serialization::Simple; use MIME::Base64; use XML::Simple; use OpenXPKI::FileUtils; use File::Temp; use Data::Dumper; sub execute { ##! 1: 'execute' my $self = shift; my $workflow = shift; my $context = $workflow->context(); my $ser = OpenXPKI::Serialization::Simple->new(); my $key_namespace = $context->param ( 'key_namespace' ); # Clear the target params (just in case we missed it) $context->param( 'exported_cert_ids' , ''); $context->param( 'xml_filename' , '' ); $context->param( 'xml_targetname' , '' ); # Step 1 - find exportable certificates # We load the list of certificates from the datapool my $dp_cert_to_export = CTX('api')->list_data_pool_entries({ 'NAMESPACE' => $context->param ( 'queue_namespace' ) , 'LIMIT' => $context->param ( 'max_records' ) }); # Nothing to do if (! scalar @{$dp_cert_to_export}) { CTX('log')->application()->info('Certificate export - nothing to do'); return 1; } ##! 16: 'cert to export ' . Dumper $dp_cert_to_export # Step 2 - load encrpytion certs and prepare token my $enc_cert_ids = $context->param ( 'enc_cert_ids' ); if ($enc_cert_ids) { $enc_cert_ids = $ser->deserialize( $enc_cert_ids ); # Array of identifiers } ##! 16: 'enc_cert_ids ' . Dumper $enc_cert_ids my $token = CTX('api')->get_default_token(); my @enc_certs; if ($enc_cert_ids) { foreach my $ident (@{$enc_cert_ids}) { my $cert = CTX('api')->get_cert ({ IDENTIFIER => $ident, FORMAT => 'PEM'}); push @enc_certs, $cert; } } else { CTX('log')->application()->info('No encryption targets given, wont search for private keys'); } my @xmlout; my @exported; # Step 3 - iterate over export list foreach my $cert (@{$dp_cert_to_export}) { my $cert_identifier = $cert->{KEY}; ##! 16: ' Exporting cert ' . $cert_identifier # Step 3a - fetch PEM my $cert_pem = CTX('api')->get_cert ({ IDENTIFIER => $cert_identifier, FORMAT => 'PEM'}); my $cert_xml = { id => $cert_identifier }; # Attributes for the certificate tag are taken from the datapool my $attributes = CTX('api')->get_data_pool_entry( $cert ); if ($attributes && $attributes->{VALUE}) { $attributes = $ser->deserialize( $attributes->{VALUE} ); if (ref $attributes eq "HASH") { $cert_xml = $attributes; } # Someting like { certType => "sig", EmployeeID => "sb2130", email => "user\@company.com" }; } ##! 32: 'Basic attributes ' . Dumper $cert_xml # if no export targets are avail we do not export the p12 if (@enc_certs) { # Step 3b - look for key my $msg = CTX('api')->get_data_pool_entry({ NAMESPACE => $key_namespace, KEY => $cert_identifier }); # Block for escrow certs if ($msg && $msg->{VALUE}) { ##! 8: 'Escrow key found' # Step 3c - create p12 using random challenge my $p12_key = $token->command({ COMMAND => 'create_random', RANDOM_LENGTH => 32, }); my $escrow_key = $msg->{VALUE}; my $p12 = $token->command({ COMMAND => 'create_pkcs12', PASSWD => 'OpenXPKI', KEY => $escrow_key, CERT => $cert_pem, PKCS12_PASSWD => $p12_key }); ##! 32: 'Created P12 ' . encode_base64( $p12 ) # Step 3d - encrypt random challenge with target certificates ##! 16: 'Encrypting challenge using cert ' . $enc_certs[0] my $p7_secrets = $token->command({ COMMAND => 'pkcs7_encrypt', CONTENT => $p12_key, CERT => \@enc_certs, OUTFORM => 'DER' }); ##! 32: 'Created P7 Keyfile ' . encode_base64( $p7_secrets ) $cert_xml->{'pkcs12-container'} = { "enc-password" => encode_base64( $p7_secrets, ''), content => encode_base64( $p12, '') }; CTX('log')->application()->info("added private key to export for $cert_identifier"); CTX('log')->audit('key')->info("private key export", { certid => $cert_identifier }); } } # Strip envelope and line feeds from PEM encoding $cert_pem =~ s/\r\n//g; $cert_pem =~ s/-----\w+ CERTIFICATE-----//g; $cert_xml->{'x509-certificate'} = { format => "PEM", content => $cert_pem }; push @xmlout, $cert_xml; push @exported, $cert_identifier; } my $fh = File::Temp->new( UNLINK => 0, DIR => $context->param( 'tmpfile_tmpdir' ) ); my $xs = XML::Simple->new(RootName => 'certificates', ContentKey => '-content', OutputFile => $fh); my $xml = $xs->XMLout( { certificate => \@xmlout } ); # Change mode of the file if requested my $umask = $context->param( 'tmpfile_umask' ); if ($umask) { ##! 32: 'Change umask ' . $umask chmod oct($umask), $fh->filename ; } # Put list of exported id in context to tag them later $context->param( 'exported_cert_ids' , $ser->serialize( \@exported ) ); # Name of the xml file $context->param( 'xml_filename' , $fh->filename ); # Target name - according to user spec my $date = DateTime->now( time_zone => 'UTC' ); $context->param( 'xml_targetname' , $date->strftime("export_%Y%m%d_%H%M%S.xml") ); CTX('log')->application()->info('Certificate export file has been generated: ' . $fh->filename); return 1; } 1; =head1 Name OpenXPKI::Server::Workflow::Activity::Reports::CertExport::GenerateExportFile =head1 Description Use the config from the context to load the next batch of certs to be exported. All certificates are written to one large XML file for transfer, for certificates having an escrow key, a PKCS12 container with key and cert is created and protected with a random passphrase. The passphrase is exported as PKCS7 container, encrypted with the given target identifiers. =head1 Configuration The activity pulls its configuration from the workflow context: =over =item * max_records max. number of certificates to put into one file =item * key_namespace The namespace in the datapool to search for a matching escrowed key =item * queue_namespace The namespace in the datapool to get the identifiers to be exported =item * enc_cert_ids Array of certificate identifiers to enrypt the p12 passwords to =back =head1 Return values The following parameters are written to the context =over =item * exported_cert_ids Array of certificate identifiers contained in the xml =item * xml_filename Name of the xml file on the local system (tempfile) =item * xml_targetname Expected name of the file on the target system after transfer =back
stefanomarty/openxpki
core/server/OpenXPKI/Server/Workflow/Activity/Reports/CertExport/GenerateExportFile.pm
Perl
apache-2.0
7,652
=head1 SHCrossPowerDensityL =over 23 =item SHCrossPowerDensityL - Compute the spectral cross-power density of two real functions for a single degree L. =back =head1 SYNOPSIS =over 39 =item REAL*8 FUNCTION SHCrossPowerDensityL ( C1, C2, L ) =back =over 4 =over 10 =item REAL*8 C1(2, L+1, L+1), C2(2, L+1, L+1) =item INTEGER L =back =back =head1 DESCRIPTION I<SHCrossPowerDensityL> will calculate the spectral cross-power density of two functions expressed in real spherical harmonics for a single degree L. This is explicitly calculated as: Sum_{i=1}^2 Sum_{m=0}^L C1(i, L+1, m+1) * C2(i, L+1, m+1) / (2L + 1). =head1 ARGUMENTS =over 10 =item C1 (input) REAL*8, DIMENSION (2, L+1, L+1) The first function expressed in real spherical harmonics. =item C2 (input) REAL*8, DIMENSION (2, L+1, L+1) The second function expressed in real spherical harmonics. =item L (input) INTEGER The spherical harmonic degree for which the cross power density will be calculated. =back =head1 SEE ALSO shpowerl(1), shpowerdensityl(1), shcrosspowerl(1), shpowerspectrum(1), shpowerspectrumdensity(1), shcrosspowerspectrum(1), shcrosspowerspectrumdensity(1), shadmitcorr(1) L<http://www.ipgp.jussieu.fr/~wieczor/SHTOOLS/SHTOOLS.html> =head1 COPYRIGHT AND LICENSE Copyright 2005-2008 by Mark Wieczorek <wieczor@ipgp.fr>. This is free software; you can distribute and modify it under the terms of the revised BSD license.
pletnes/SHTOOLS
src/doc/shcrosspowerdensityl.pod
Perl
bsd-3-clause
1,440
#!/usr/bin/env perl use Math::BigInt; $mask=0x01; $num_args = $#ARGV; if ($num_args !=0) { print("\nUsage: ./mysteg.pl gile.bmp\n"); exit; } open (MYBMP, $ARGV[0]); read(MYBMP,$head,54); #Variable qui nous permet de sauter le bit inutile de chaque pixel $rgb=4; #Variable qui nous permet de multiplier le bit de poid faible $nbit=128; #Buffer de creation de chaque octet extrait $buffer=0; #Message a decouvrir $message=''; #Variable de check $count1=0; $count2=0; while (read(MYBMP,$byte,1)) { $count1+=1; #une fois sur quatre on saute l'octet $rgb-=1; if($rgb!=0) { #extraction du lsb $lsb=(ord($byte) & $mask); #creation de l'octet $buffer=$buffer+$lsb*$nbit; $nbit/=2; #Lorsque l'octet est complet if($nbit<1) { if($buffer>255) { print "\n\n ! ERROR ERROR ERROR ! \n\n"; } $count2+=1; #On recupere le caractere $message=$message.chr($buffer); #On recupere quand meme le lsb (pas fou!!!) $nbit=128; $buffer=0; } } else { $rgb=4; } #print "byte:".ord($byte)."\n"; #print "mask:".$mask."\n"; #print "band:".(ord($byte) & $mask)."\n"; } #On oublie pas le dernier octet if($nbit>128) { $count2+=1; if($buffer>255) { print "\n\n ! ERROR ERROR ERROR ! \n\n"; } $message=$message.chr($buffer); } print $message."\n"; print $count1."\n"; print $count2."\n";
AnyMaster/Ctoollhu
lsb_reverse.pl
Perl
mit
1,420
package DDG::Spice::What3Words; use DDG::Spice; use Net::Domain::TLD qw(tld_exists); use strict; spice is_cached => 1; spice proxy_cache_valid => "200 1d"; spice from => "([^/]+)/([^/]+)/([^/]+)"; spice to => 'https://api.what3words.com/v2/$1?key={{ENV{DDG_SPICE_W3W_APIKEY}}}&$2=$3&callback={{callback}}'; spice alt_to => { what3words_arcgis => { to => 'http://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/reverseGeocode?location=$1&distance=5000&f=json' } }; my $w3w_re = qr/(?:(?:what ?(?:3|three) ?words|w3w)\s)/; my $coord_re = qr/[+-]?[0-9]+(?:\.\d{1,6})?/; # Handles queries like: # what3words | w3w | what three words word.word.word triggers query_lc => qr/^(?:$w3w_re)(\p{L}{4,}+\.\p{L}{4,}+\.\p{L}{1,}+)$/i; # Handles queries like: # what3words | w3w | what three words +/-##.####, +/-###.##### triggers query_lc => qr/^(?:$w3w_re)($coord_re), ?($coord_re)$/i; handle matches => sub { my ($direction, $param, $remainder); # handle three word, forward geocode queries if (scalar @_ == 1){ $remainder = lc shift; my $end = pop @{[split(/\./, $remainder)]}; #split string into list, cast into array, pop last element return if tld_exists($end); #prevent searches for websites $direction = "forward"; $param = "addr"; } # handle lat/lon, reverse geocode queries else { my $lat = shift; my $lon = shift; return if abs $lat > 90 || abs $lon > 180; $remainder = "$lat,$lon"; $direction = "reverse"; $param = "coords"; } return $direction, $param, $remainder; }; 1;
lernae/zeroclickinfo-spice
lib/DDG/Spice/What3Words.pm
Perl
apache-2.0
1,640
# # Copyright 2017 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package cloud::nutanix::snmp::mode::clusterusage; use base qw(centreon::plugins::templates::counter); use strict; use warnings; use centreon::plugins::misc; my $instance_mode; my $cluster_name = ''; sub custom_status_threshold { my ($self, %options) = @_; my $status = 'ok'; my $message; eval { local $SIG{__WARN__} = sub { $message = $_[0]; }; local $SIG{__DIE__} = sub { $message = $_[0]; }; if (defined($instance_mode->{option_results}->{critical_status}) && $instance_mode->{option_results}->{critical_status} ne '' && eval "$instance_mode->{option_results}->{critical_status}") { $status = 'critical'; } elsif (defined($instance_mode->{option_results}->{warning_status}) && $instance_mode->{option_results}->{warning_status} ne '' && eval "$instance_mode->{option_results}->{warning_status}") { $status = 'warning'; } }; if (defined($message)) { $self->{output}->output_add(long_msg => 'filter status issue: ' . $message); } return $status; } sub custom_status_output { my ($self, %options) = @_; my $msg = 'status : ' . $self->{result_values}->{status}; return $msg; } sub custom_status_calc { my ($self, %options) = @_; $self->{result_values}->{status} = $options{new_datas}->{$self->{instance} . '_clusterStatus'}; $self->{result_values}->{display} = $options{new_datas}->{$self->{instance} . '_display'}; return 0; } sub custom_usage_perfdata { my ($self, %options) = @_; my $label = 'used'; my $value_perf = $self->{result_values}->{used}; if (defined($instance_mode->{option_results}->{free})) { $label = 'free'; $value_perf = $self->{result_values}->{free}; } my %total_options = (); if ($instance_mode->{option_results}->{units} eq '%') { $total_options{total} = $self->{result_values}->{total}; $total_options{cast_int} = 1; } $self->{output}->perfdata_add(label => $label, unit => 'B', value => $value_perf, warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning-' . $self->{label}, %total_options), critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical-' . $self->{label}, %total_options), min => 0, max => $self->{result_values}->{total}); } sub custom_usage_threshold { my ($self, %options) = @_; my ($exit, $threshold_value); $threshold_value = $self->{result_values}->{used}; $threshold_value = $self->{result_values}->{free} if (defined($instance_mode->{option_results}->{free})); if ($instance_mode->{option_results}->{units} eq '%') { $threshold_value = $self->{result_values}->{prct_used}; $threshold_value = $self->{result_values}->{prct_free} if (defined($instance_mode->{option_results}->{free})); } $exit = $self->{perfdata}->threshold_check(value => $threshold_value, threshold => [ { label => 'critical-' . $self->{label}, exit_litteral => 'critical' }, { label => 'warning-'. $self->{label}, exit_litteral => 'warning' } ]); return $exit; } sub custom_usage_output { my ($self, %options) = @_; my ($total_size_value, $total_size_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{total}); my ($total_used_value, $total_used_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{used}); my ($total_free_value, $total_free_unit) = $self->{perfdata}->change_bytes(value => $self->{result_values}->{free}); my $msg = sprintf("Usage Total: %s Used: %s (%.2f%%) Free: %s (%.2f%%)", $total_size_value . " " . $total_size_unit, $total_used_value . " " . $total_used_unit, $self->{result_values}->{prct_used}, $total_free_value . " " . $total_free_unit, $self->{result_values}->{prct_free}); return $msg; } sub custom_usage_calc { my ($self, %options) = @_; $self->{result_values}->{display} = $options{new_datas}->{$self->{instance} . '_display'}; $self->{result_values}->{total} = $options{new_datas}->{$self->{instance} . '_clusterTotalStorageCapacity'}; $self->{result_values}->{used} = $options{new_datas}->{$self->{instance} . '_clusterUsedStorageCapacity'}; $self->{result_values}->{free} = $self->{result_values}->{total} - $self->{result_values}->{used}; $self->{result_values}->{prct_used} = $self->{result_values}->{used} * 100 / $self->{result_values}->{total}; $self->{result_values}->{prct_free} = 100 - $self->{result_values}->{prct_used}; return 0; } sub set_counters { my ($self, %options) = @_; $self->{maps_counters_type} = [ { name => 'cluster', type => 0, cb_prefix_output => 'prefix_cluster_output', skipped_code => { -10 => 1 } }, ]; $self->{maps_counters}->{cluster} = [ { label => 'status', threshold => 0, set => { key_values => [ { name => 'clusterStatus' } ], closure_custom_calc => $self->can('custom_status_calc'), closure_custom_output => $self->can('custom_status_output'), closure_custom_perfdata => sub { return 0; }, closure_custom_threshold_check => $self->can('custom_status_threshold'), } }, { label => 'usage', set => { key_values => [ { name => 'clusterTotalStorageCapacity' }, { name => 'clusterUsedStorageCapacity' } ], closure_custom_calc => $self->can('custom_usage_calc'), closure_custom_output => $self->can('custom_usage_output'), closure_custom_perfdata => $self->can('custom_usage_perfdata'), closure_custom_threshold_check => $self->can('custom_usage_threshold'), } }, { label => 'avg-latency', set => { key_values => [ { name => 'clusterLatency' } ], output_template => 'Average Latency : %s µs', perfdatas => [ { label => 'avg_latency', value => 'clusterLatency_absolute', template => '%s', unit => 'µs', min => 0 }, ], } }, { label => 'iops', set => { key_values => [ { name => 'clusterIops' } ], output_template => 'IOPs : %s', perfdatas => [ { label => 'iops', value => 'clusterIops_absolute', template => '%s', unit => 'iops', min => 0 }, ], } }, ]; } sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options); bless $self, $class; $self->{version} = '1.0'; $options{options}->add_options(arguments => { "warning-status:s" => { name => 'warning_status', default => '' }, "critical-status:s" => { name => 'critical_status', default => '' }, "units:s" => { name => 'units', default => '%' }, "free" => { name => 'free' }, }); return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::check_options(%options); $instance_mode = $self; $self->change_macros(); } sub change_macros { my ($self, %options) = @_; foreach (('warning_status', 'critical_status')) { if (defined($self->{option_results}->{$_})) { $self->{option_results}->{$_} =~ s/%\{(.*?)\}/\$self->{result_values}->{$1}/g; } } } sub prefix_cluster_output { my ($self, %options) = @_; return "Cluster '" . $cluster_name . "' "; } my $mapping = { clusterName => { oid => '.1.3.6.1.4.1.41263.501' }, clusterStatus => { oid => '.1.3.6.1.4.1.41263.503' }, clusterTotalStorageCapacity => { oid => '.1.3.6.1.4.1.41263.504' }, clusterUsedStorageCapacity => { oid => '.1.3.6.1.4.1.41263.505' }, clusterIops => { oid => '.1.3.6.1.4.1.41263.506' }, clusterLatency => { oid => '.1.3.6.1.4.1.41263.507' }, }; my $oid_nutanix = '.1.3.6.1.4.1.41263'; sub manage_selection { my ($self, %options) = @_; if ($options{snmp}->is_snmpv1()) { $self->{output}->add_option_msg(short_msg => "Need to use SNMP v2c or v3."); $self->{output}->option_exit(); } my $snmp_result = $options{snmp}->get_table(oid => $oid_nutanix, start => $mapping->{clusterName}->{oid}, end => $mapping->{clusterLatency}->{oid}, nothing_quit => 1); my $result = $options{snmp}->map_instance(mapping => $mapping, results => $snmp_result, instance => '0'); $self->{cluster} = { %$result }; $cluster_name = centreon::plugins::misc::trim($result->{clusterName}); } 1; __END__ =head1 MODE Check cluster usage. =over 8 =item B<--filter-counters> Only display some counters (regexp can be used). Example: --filter-counters='^usage$' =item B<--warning-status> Set warning threshold for status. Can used special variables like: %{status}, %{display} =item B<--critical-status> Set critical threshold for status. Can used special variables like: %{status}, %{display} =item B<--warning-*> Threshold warning. Can be: 'usage', 'avg-latency', 'iops'. =item B<--critical-*> Threshold critical. Can be: 'usage', 'avg-latency', 'iops'. =item B<--units> Units of thresholds (Default: '%') ('%', 'B'). =item B<--free> Thresholds are on free space left. =back =cut
Shini31/centreon-plugins
cloud/nutanix/snmp/mode/clusterusage.pm
Perl
apache-2.0
10,547
package platform::Windows::cppbuilder; use vars qw(@ISA); require platform::Windows::MSVC; @ISA = qw(platform::Windows::MSVC); sub pdbext { '.tds' } # C++Builder's Clang-based compilers prepend an underscore to __cdecl-convention # C functions, and the linker needs those as the InternalName in the .def file. sub export2internal { return "_$_[1]"; } 1;
jens-maus/amissl
openssl/Configurations/platform/Windows/cppbuilder.pm
Perl
bsd-3-clause
376
#!/usr/bin/env perl # run-test-suites.pl # # This file is part of mbed TLS (https://tls.mbed.org) # # Copyright (c) 2015-2018, ARM Limited, All Rights Reserved =head1 SYNOPSIS Execute all the test suites and print a summary of the results. run-test-suites.pl [[-v|--verbose] [VERBOSITY]] [--skip=SUITE[...]] Options: -v|--verbose Print detailed failure information. -v 2|--verbose=2 Print detailed failure information and summary messages. -v 3|--verbose=3 Print detailed information about every test case. --skip=SUITE[,SUITE...] Skip the specified SUITE(s). This option can be used multiple times. =cut use warnings; use strict; use utf8; use open qw(:std utf8); use Getopt::Long qw(:config auto_help gnu_compat); use Pod::Usage; my $verbose = 0; my @skip_patterns = (); GetOptions( 'skip=s' => \@skip_patterns, 'verbose|v:1' => \$verbose, ) or die; # All test suites = executable files, excluding source files, debug # and profiling information, etc. We can't just grep {! /\./} because # some of our test cases' base names contain a dot. my @suites = grep { -x $_ || /\.exe$/ } glob 'test_suite_*'; @suites = grep { !/\.c$/ && !/\.data$/ && -f } @suites; die "$0: no test suite found\n" unless @suites; # "foo" as a skip pattern skips "test_suite_foo" and "test_suite_foo.bar" # but not "test_suite_foobar". my $skip_re = ( '\Atest_suite_(' . join('|', map { s/[ ,;]/|/g; # allow any of " ,;|" as separators s/\./\./g; # "." in the input means ".", not "any character" $_ } @skip_patterns) . ')(\z|\.)' ); # in case test suites are linked dynamically $ENV{'LD_LIBRARY_PATH'} = '../library'; $ENV{'DYLD_LIBRARY_PATH'} = '../library'; my $prefix = $^O eq "MSWin32" ? '' : './'; my ($failed_suites, $total_tests_run, $failed, $suite_cases_passed, $suite_cases_failed, $suite_cases_skipped, $total_cases_passed, $total_cases_failed, $total_cases_skipped ); my $suites_skipped = 0; sub pad_print_center { my( $width, $padchar, $string ) = @_; my $padlen = ( $width - length( $string ) - 2 ) / 2; print $padchar x( $padlen ), " $string ", $padchar x( $padlen ), "\n"; } for my $suite (@suites) { print "$suite ", "." x ( 72 - length($suite) - 2 - 4 ), " "; if( $suite =~ /$skip_re/o ) { print "SKIP\n"; ++$suites_skipped; next; } my $command = "$prefix$suite"; if( $verbose ) { $command .= ' -v'; } my $result = `$command`; $suite_cases_passed = () = $result =~ /.. PASS/g; $suite_cases_failed = () = $result =~ /.. FAILED/g; $suite_cases_skipped = () = $result =~ /.. ----/g; if( $result =~ /PASSED/ ) { print "PASS\n"; if( $verbose > 2 ) { pad_print_center( 72, '-', "Begin $suite" ); print $result; pad_print_center( 72, '-', "End $suite" ); } } else { $failed_suites++; print "FAIL\n"; if( $verbose ) { pad_print_center( 72, '-', "Begin $suite" ); print $result; pad_print_center( 72, '-', "End $suite" ); } } my ($passed, $tests, $skipped) = $result =~ /([0-9]*) \/ ([0-9]*) tests.*?([0-9]*) skipped/; $total_tests_run += $tests - $skipped; if( $verbose > 1 ) { print "(test cases passed:", $suite_cases_passed, " failed:", $suite_cases_failed, " skipped:", $suite_cases_skipped, " of total:", ($suite_cases_passed + $suite_cases_failed + $suite_cases_skipped), ")\n" } $total_cases_passed += $suite_cases_passed; $total_cases_failed += $suite_cases_failed; $total_cases_skipped += $suite_cases_skipped; } print "-" x 72, "\n"; print $failed_suites ? "FAILED" : "PASSED"; printf( " (%d suites, %d tests run%s)\n", scalar(@suites) - $suites_skipped, $total_tests_run, $suites_skipped ? ", $suites_skipped suites skipped" : "" ); if( $verbose > 1 ) { print " test cases passed :", $total_cases_passed, "\n"; print " failed :", $total_cases_failed, "\n"; print " skipped :", $total_cases_skipped, "\n"; print " of tests executed :", ( $total_cases_passed + $total_cases_failed ), "\n"; print " of available tests :", ( $total_cases_passed + $total_cases_failed + $total_cases_skipped ), "\n"; if( $suites_skipped != 0 ) { print "Note: $suites_skipped suites were skipped.\n"; } } exit( $failed_suites ? 1 : 0 );
shenki/skiboot
libstb/crypto/mbedtls/tests/scripts/run-test-suites.pl
Perl
apache-2.0
4,674
# just copy modules # TODO: copy tests and try to run them... # this file may be used as example on how to use comp.pl my @files; my %dirs; sub mk { my $r = shift; return if exists $dirs{$r}; if ($r=~/\//) { $r=~/^(.*)\/[^\/]*?$/; mk($1); } print STDERR "..\\miniperl.exe -MCross comp.pl --do cemkdir [p]\\lib\\$r\n"; system("..\\miniperl.exe -I..\\lib -MCross comp.pl --do cemkdir [p]\\lib\\$r"); $dirs{$r}++; } for (@files) { if (/\//) { /^(.*)\/[^\/]*?$/; mk($1); } # currently no stripping POD system("..\\miniperl.exe -I..\\lib -MCross comp.pl --copy pc:..\\lib\\$_ ce:[p]\\lib\\$_"); } sub BEGIN { @files = qw( attributes.pm AutoLoader.pm AutoSplit.pm autouse.pm base.pm Benchmark.pm bigint.pm bignum.pm bigrat.pm blib.pm bytes.pm Carp.pm charnames.pm Config.pm constant.pm Cwd.pm DB.pm diagnostics.pm Digest.pm DirHandle.pm Dumpvalue.pm DynaLoader.pm English.pm Env.pm Exporter.pm Fatal.pm fields.pm FileCache.pm FileHandle.pm filetest.pm FindBin.pm if.pm integer.pm less.pm locale.pm Memoize.pm NEXT.pm open.pm overload.pm PerlIO.pm re.pm SelectSaver.pm SelfLoader.pm Shell.pm sigtrap.pm sort.pm strict.pm subs.pm Switch.pm Symbol.pm Test.pm UNIVERSAL.pm utf8.pm vars.pm vmsish.pm warnings.pm XSLoader.pm warnings/register.pm Unicode/Collate.pm Unicode/UCD.pm Time/gmtime.pm Time/Local.pm Time/localtime.pm Time/tm.pm Tie/Array.pm Tie/File.pm Tie/Handle.pm Tie/Hash.pm Tie/Memoize.pm Tie/RefHash.pm Tie/Scalar.pm Tie/SubstrHash.pm Text/Abbrev.pm Text/Balanced.pm Text/ParseWords.pm Text/Soundex.pm Text/Tabs.pm Text/Wrap.pm Test/Builder.pm Test/Harness.pm Test/More.pm Test/Simple.pm Test/Harness/Assert.pm Test/Harness/Iterator.pm Test/Harness/Straps.pm Term/ANSIColor.pm Term/Cap.pm Term/Complete.pm Term/ReadLine.pm Search/Dict.pm Pod/Checker.pm Pod/Find.pm Pod/Functions.pm Pod/Html.pm Pod/InputObjects.pm Pod/LaTeX.pm Pod/Man.pm Pod/ParseLink.pm Pod/Parser.pm Pod/ParseUtils.pm Pod/Plainer.pm Pod/Select.pm Pod/Text.pm Pod/Usage.pm Pod/Text/Color.pm Pod/Text/Overstrike.pm Pod/Text/Termcap.pm Math/BigFloat.pm Math/BigInt.pm Math/BigRat.pm Math/Complex.pm Math/Trig.pm Math/BigInt/Calc.pm Math/BigInt/Trace.pm Math/BigFloat/Trace.pm Locale/Constants.pm Locale/Country.pm Locale/Currency.pm Locale/Language.pm Locale/Maketext.pm Locale/Script.pm IPC/Open2.pm IPC/Open3.pm I18N/Collate.pm I18N/LangTags.pm I18N/LangTags/List.pm Hash/Util.pm Getopt/Long.pm Getopt/Std.pm Filter/Simple.pm File/Basename.pm File/CheckTree.pm File/Compare.pm File/Copy.pm File/DosGlob.pm File/Find.pm File/Path.pm File/Spec.pm File/stat.pm File/Temp.pm File/Spec/Functions.pm File/Spec/Mac.pm File/Spec/Unix.pm File/Spec/Win32.pm ExtUtils/Command.pm ExtUtils/Constant.pm ExtUtils/Embed.pm ExtUtils/Install.pm ExtUtils/Installed.pm ExtUtils/Liblist.pm ExtUtils/MakeMaker.pm ExtUtils/Manifest.pm ExtUtils/Miniperl.pm ExtUtils/Mkbootstrap.pm ExtUtils/Mksymlists.pm ExtUtils/MM.pm ExtUtils/MM_Any.pm ExtUtils/MM_DOS.pm ExtUtils/MM_Unix.pm ExtUtils/MM_UWIN.pm ExtUtils/MM_Win32.pm ExtUtils/MM_Win95.pm ExtUtils/MY.pm ExtUtils/Packlist.pm ExtUtils/testlib.pm ExtUtils/Liblist/Kid.pm ExtUtils/Command/MM.pm Exporter/Heavy.pm Devel/SelfStubber.pm Class/ISA.pm Class/Struct.pm Carp/Heavy.pm Attribute/Handlers.pm Attribute/Handlers/demo/Demo.pm Attribute/Handlers/demo/Descriptions.pm Attribute/Handlers/demo/MyClass.pm ); }
Lh4cKg/sl4a
perl/src/win32/ce-helpers/cecopy-lib.pl
Perl
apache-2.0
4,054
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. package AI::MXNet::Gluon::Utils; use strict; use warnings; use AI::MXNet::Base; use AI::MXNet::Function::Parameters; use Digest::SHA qw(sha1_hex); use File::Path qw(make_path); use HTTP::Tiny; use Exporter; use base qw(Exporter); @AI::MXNet::Gluon::Utils::EXPORT_OK = qw(download); =head1 NAME AI::MXNet::Gluon::Utils =cut =head1 DESCRIPTION Miscellaneous utilities. =cut =head2 split_data Splits an NDArray into `num_slice` slices along `batch_axis`. Usually used for data parallelism where each slices is sent to one device (i.e. GPU). Parameters ---------- $data : NDArray A batch of data. $num_slice : int Number of desired slices. $batch_axis=0 : int, default 0 The axis along which to slice. :$even_split=1 : bool, default True Whether to force all slices to have the same number of elements. If `True`, an error will be raised when `num_slice` does not evenly divide `data.shape[batch_axis]`. Returns ------- array ref of NDArray Return value is a array ref even if `num_slice` is 1. =cut method split_data(AI::MXNet::NDArray $data, Int $num_slice, Int $batch_axis=0, Bool :$even_split=1) { my $size = $data->shape->[$batch_axis]; if($size < $num_slice) { Carp::confess( sprintf( "Too many slices for data with shape (%s). Arguments are ". "num_slice=%d and batch_axis=%d.", join(',', @{ $data->shape }), $num_slice, $batch_axis ) ); } if($even_split and $size % $num_slice != 0) { Carp::confess( sprintf( "data with shape %s cannot be evenly split into %d slices along axis %d. ". "Use a batch size that's multiple of %d or set even_split=False to allow ". "uneven partitioning of data.", join(',', @{ $data->shape }), $num_slice, $batch_axis, $num_slice ) ); } my $step = int($size/$num_slice); my $slices = []; if($batch_axis == 0) { for my $i (0 .. $num_slice-1) { if($i < $num_slice-1) { push @$slices, $data->slice([$i*$step, ($i+1)*$step-1]); } else { push @$slices, $data->slice([$i*$step, $size-1]); } } } elsif($even_split) { $slices = AI::MXNet::NDArray->split($data, num_outputs => $num_slice, axis => $batch_axis); } else { for my $i (0 .. $num_slice-1) { if($i < $num_slice-1) { push @$slices, $data->slice_axis($batch_axis, $i*$step, ($i+1)*$step); } else { push @$slices, $data->slice_axis($batch_axis, $i*$step, $size); } } } return $slices; } =head2 split_and_load Splits an NDArray into `len(ctx_list)` slices along `batch_axis` and loads each slice to one context in `ctx_list`. Parameters ---------- $data : AcceptableInput A batch of data. :$ctx_list : list of Context A list of Contexts. :$batch_axis : int, default 0 The axis along which to slice. :$even_split : bool, default True Whether to force all slices to have the same number of elements. Returns ------- list of NDArray Each corresponds to a context in `ctx_list`. =cut method split_and_load( PDL|PDL::Matrix|ArrayRef|AI::MXNet::NDArray $data, ArrayRef[AI::MXNet::Context] :$ctx_list, Int :$batch_axis=0, Bool :$even_split=1 ) { if(not (blessed $data and $data->isa('AI::MXNet::NDArray'))) { $data = AI::MXNet::NDArray->array($data, ctx => $ctx_list->[0]) } if(@{ $ctx_list } == 1) { return [$data->as_in_context($ctx_list->[0])]; } my $slices = __PACKAGE__->split_data($data, scalar(@$ctx_list), $batch_axis, $even_split); my @ret; for(zip($slices, $ctx_list)) { my ($i, $ctx) = @$_; push @ret, $i->as_in_context($ctx); } return \@ret; } =head2 clip_global_norm Rescales NDArrays so that the sum of their 2-norm is smaller than `max_norm`. =cut method clip_global_norm(ArrayRef[AI::MXNet::NDArray] $arrays, Num $max_norm) { assert(@$arrays > 0); my $total_norm = 0; for my $arr (@$arrays) { $arr = $arr->reshape([-1]); $total_norm += AI::MXNet::NDArray->dot($arr, $arr); } $total_norm = sqrt($total_norm->asscalar); my $scale = $max_norm / ($total_norm + 1e-8); if($scale < 1) { $_ *= $scale for @{ $arrays }; } return $total_norm } =head2 check_sha1 Check whether the sha1 hash of the file content matches the expected hash. Parameters ---------- filename : str Path to the file. sha1_hash : str Expected sha1 hash in hexadecimal digits. Returns ------- bool Whether the file content matches the expected hash. =cut func check_sha1(Str $filename, Str $sha1_hash) { local($/) = undef; open(F, $filename) or Carp::confess("can't open $filename $!"); my $data = <F>; close(F); return sha1_hex($data) eq $sha1_hash; } =head2 download Download an given URL Parameters ---------- $url : str URL to download :$path : str, optional Destination path to store downloaded file. By default stores to the current directory with same name as in url. :$overwrite : bool, optional Whether to overwrite destination file if already exists. :$sha1_hash : str, optional Expected sha1 hash in hexadecimal digits. Will ignore existing file when hash is specified but doesn't match. Returns ------- str The file path of the downloaded file. =cut func download(Str $url, Maybe[Str] :$path=, Bool :$overwrite=0, Maybe[Str] :$sha1_hash=) { my $fname; $path =~ s/~/$ENV{HOME}/ if defined $path; if(not defined $path) { $fname = (split(m[/], $url))[-1]; } elsif(-d $path) { $fname = join('/', $path, (split(m[/], $url))[-1]); } else { $fname = $path; } if($overwrite or not -f $fname or ($sha1_hash and not check_sha1($fname, $sha1_hash))) { $fname =~ s/~/$ENV{HOME}/; my $dirname = $fname; $dirname =~ s/[^\/]+$//; if(not -d $dirname) { make_path($dirname); } warn "Downloading $fname from $url ...\n"; my $response = HTTP::Tiny->new->get($url); Carp::confess("download of url failed! ($response->{status} $response->{reason})\n") unless $response->{success}; open(F, ">$fname") or Carp::confess("can't open $fname: $!"); print F $response->{content}; close(F); } return $fname } 1;
crazy-cat/incubator-mxnet
perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Utils.pm
Perl
apache-2.0
7,730
format EMPLOYEE= ========================= @<<<<<<<<<<<<<<<<<< @<< $name $age @#####.## $salary ========================= . select(STDOUT); $~ = EMPLOYEE; @n = ("Ali", "Runoob", "Linkerist"); @a = (20, 30, 40); @s = (2000.00, 2500.00, 4000.00); $i = 0; foreach (@n) { $name = $_; $age = $a[$i]; $salary = $s[$i++]; write; }
doc-cloud/perl
format/width.pl
Perl
mit
331
use strict; use Data::Dumper; use Carp; # # This is a SAS Component # =head1 NAME all_entities_ContigSequence =head1 SYNOPSIS all_entities_ContigSequence [-a] [--fields fieldlist] > entity-data =head1 DESCRIPTION Return all instances of the ContigSequence entity. ContigSequences are strings of DNA. Contigs have an associated genome, but ContigSequences do not. We can think of random samples of DNA as a set of ContigSequences. There are no length constraints imposed on ContigSequences -- they can be either very short or very long. The basic unit of data that is moved to/from the database is the ContigChunk, from which ContigSequences are formed. The key of a ContigSequence is the sequence's MD5 identifier. Example: all_entities_ContigSequence -a would retrieve all entities of type ContigSequence and include all fields in the entities in the output. =head2 Related entities The ContigSequence entity has the following relationship links: =over 4 =item HasSection ContigChunk =item IsAlignedDNAComponentOf AlignmentRow =item IsSequenceOf Contig =back =head1 COMMAND-LINE OPTIONS Usage: all_entities_ContigSequence [arguments] > entity.data --fields list Choose a set of fields to return. List is a comma-separated list of strings. -a Return all available fields. --show-fields List the available fields. The following fields are available: =over 4 =item length number of base pairs in the contig =back =head1 AUTHORS L<The SEED Project|http://www.theseed.org> =cut use Bio::KBase::CDMI::CDMIClient; use Getopt::Long; #Default fields my @all_fields = ( 'length' ); my %all_fields = map { $_ => 1 } @all_fields; our $usage = <<'END'; Usage: all_entities_ContigSequence [arguments] > entity.data --fields list Choose a set of fields to return. List is a comma-separated list of strings. -a Return all available fields. --show-fields List the available fields. The following fields are available: length number of base pairs in the contig END my $a; my $f; my @fields; my $show_fields; my $help; my $geO = Bio::KBase::CDMI::CDMIClient->new_get_entity_for_script("a" => \$a, "show-fields" => \$show_fields, "h" => \$help, "fields=s" => \$f); if ($help) { print $usage; exit 0; } if ($show_fields) { print "Available fields:\n"; print "\t$_\n" foreach @all_fields; exit 0; } if (@ARGV != 0 || ($a && $f)) { print STDERR $usage, "\n"; exit 1; } if ($a) { @fields = @all_fields; } elsif ($f) { my @err; for my $field (split(",", $f)) { if (!$all_fields{$field}) { push(@err, $field); } else { push(@fields, $field); } } if (@err) { print STDERR "all_entities_ContigSequence: unknown fields @err. Valid fields are: @all_fields\n"; exit 1; } } my $start = 0; my $count = 1_000_000; my $h = $geO->all_entities_ContigSequence($start, $count, \@fields ); while (%$h) { while (my($k, $v) = each %$h) { print join("\t", $k, map { ref($_) eq 'ARRAY' ? join(",", @$_) : $_ } @$v{@fields}), "\n"; } $start += $count; $h = $geO->all_entities_ContigSequence($start, $count, \@fields); }
kbase/kb_seed
scripts/all_entities_ContigSequence.pl
Perl
mit
3,230
#!/usr/bin/perl -w use strict; my %tab = ( UUU => 'F' , CUU => 'L' , AUU => 'I' , GUU => 'V' , UUC => 'F' , CUC => 'L' , AUC => 'I' , GUC => 'V' , UUA => 'L' , CUA => 'L' , AUA => 'I' , GUA => 'V' , UUG => 'L' , CUG => 'L' , AUG => 'M' , GUG => 'V' , UCU => 'S' , CCU => 'P' , ACU => 'T' , GCU => 'A' , UCC => 'S' , CCC => 'P' , ACC => 'T' , GCC => 'A' , UCA => 'S' , CCA => 'P' , ACA => 'T' , GCA => 'A' , UCG => 'S' , CCG => 'P' , ACG => 'T' , GCG => 'A' , UAU => 'Y' , CAU => 'H' , AAU => 'N' , GAU => 'D' , UAC => 'Y' , CAC => 'H' , AAC => 'N' , GAC => 'D' , UAA => '' , CAA => 'Q' , AAA => 'K' , GAA => 'E' , UAG => '' , CAG => 'Q' , AAG => 'K' , GAG => 'E' , UGU => 'C' , CGU => 'R' , AGU => 'S' , GGU => 'G' , UGC => 'C' , CGC => 'R' , AGC => 'S' , GGC => 'G' , UGA => '' , CGA => 'R' , AGA => 'R' , GGA => 'G' , UGG => 'W' , CGG => 'R' , AGG => 'R' , GGG => 'G' ); die "No file given\n" unless (@ARGV); open my $in, '<', $ARGV[0] or die "$ARGV[0]: $!"; my ($dna, @introns) = <$in>; close $in; $dna =~ s/[ \r\n]+$//; my $intron_str = join '|', @introns; $intron_str =~ s/\s//gs; (my $exons = $dna) =~ s/$intron_str//g; $exons =~ tr/T/U/; my $prot = r2p($exons); print "$prot\n"; sub r2p { my $rna = shift; return join ('', map { $tab{$_} } $rna =~ m/([A-Z]{3})/g); }
tsnowlan/rosalind
splc.pl
Perl
mit
1,528
/** <module> parse_conf * * Build an editable AST out of a set of markup configuration files. * See https://collectd.org/documentation/manpages/collectd.conf.5.shtml * * This config file controls how the system statistics collection daemon * collectd behaves. The most significant option is LoadPlugin, which * controls which plugins to load. These plugins ultimately define * collectd's behavior. If the AutoLoadPlugin option has been enabled, * the explicit LoadPlugin lines may be omitted for all plugins with a * configuration block, i.e. a <Plugin...> block. * -------- * * source file /home/carlo/develop/work/karsten/loadavgd/configEditor/proto/pl/parse_conf.pl * created at mer set 16 06:59:56 2015 * * License : MIT * Copyright (c) 2015,2016 Sputnik7 */ :- module(parse_conf, [parse_conf/0 ,parse_conf_cs/2 ,unquoted//1 ]). :- use_module(library(dcg/basics)). %% parse_conf is det. % % test reading & dumping default main collectd configuration file. % parse_conf :- parse_conf(AST), maplist(indented(0), AST). %% indented(+Off, +T) is det. % % Output a readable report of parsed AST. % indented(Off, comment(_,_,C)) :- iformat(Off, '~w~n', C). indented(Off, key_value(_,_,K-V)) :- iformat(Off, '~w~n', K-V). indented(Off, xml_like(_,_,xml(Tag, Attrs, Body))) :- iformat(Off, '~s ~w [~n', [Tag, Attrs]), indented(Off + 1, Body), iformat(Off, ']~n', _). indented(Off, C) :- iformat(Off, '~w~n', [C]). iformat(Off, Fmt, Args) :- indent(Off), format(Fmt, Args). indent(Off) :- N is Off, forall(between(0, N, _), write(' ')). /* $ locate collectd.conf /etc/collectd/collectd.conf /etc/collectd/collectd.conf.d /etc/collectd/collectd.conf.d/filters.conf /etc/collectd/collectd.conf.d/thresholds.conf /usr/share/doc/collectd-core/examples/collectd.conf /usr/share/man/man5/collectd.conf.5.gz /var/lib/dpkg/info/collectd.conffiles */ %config_main('/etc/collectd/collectd.conf.d/filters.conf'). %config_main('/etc/collectd/collectd.conf'). config_main('/home/carlo/develop/work/karsten/loadavgd/configEditor/proto/pl/x.conf'). %% parse_conf(+AST) is det. % % The syntax of this config file is similar to the config file of the % famous Apache webserver. Each line contains either an option (a key and % a list of one or more values) or a section-start or -end. Empty lines % and everything after a non-quoted hash-symbol (#) is ignored. Keys are % unquoted strings, consisting only of alphanumeric characters and the % underscore (_) character. Keys are handled case insensitive by collectd % itself and all plugins included with it. Values can either be an % unquoted string, a quoted string (enclosed in double-quotes) a number % or a boolean expression. Unquoted strings consist of only alphanumeric % characters and underscores (_) and do not need to be quoted. Quoted % strings are enclosed in double quotes ("). You can use the backslash % character (\) to include double quotes as part of the string. Numbers % can be specified in decimal and floating point format (using a dot . as % decimal separator), hexadecimal when using the 0x prefix and octal with % a leading zero (0). Boolean values are either true or false. % parse_conf(AST) :- config_main(CFG), read_file_to_codes(CFG, Cs, []), parse_conf_cs(Cs, AST). :- dynamic text_length/1. parse_conf_cs(Cs, AST) :- length(Cs, TL), retractall(text_length(_)), assert(text_length(TL)), phrase(cfg(AST), Cs). %% cfg(-List)// is det. % % Build a list of nested elements with positions. % Parsing is deterministic, committed after each element. % cfg([]) --> []. cfg([Comment|Ls]) --> comment(Comment), !, cfg(Ls). cfg([KeyValue|Ls]) --> key_value(KeyValue), !, cfg(Ls). cfg([XmlTree|Ls]) --> xml_like(false, XmlTree), !, cfg(Ls). cfg([Skip|Ls]) --> skip(Skip), !, cfg(Ls). cfg(Ls) --> [_], !, cfg(Ls). :- meta_predicate xml_like(0, +, +,-). comment(comment(X, Y, C)) --> commline(X), xml_like(true, C), pos(Y), !. comment(comment(X, Y, C)) --> commline(X), key_value(C), pos(Y), !. comment(comment(X, Y, C)) --> commline(X), string(S), eol(Y), !, {atom_codes(C, S)}. commline(X) --> pos(X), whites, "#", whites. key_value(pair(X, Y, K-value(Xv, Yv, V))) --> pos(X), whites, key(K), whites, pos(Xv), value(V), pos(Yv), whites, eol(Y). key(key(Xk, Yk, K)) --> pos(Xk), unquoted([C|Cs]), pos(Yk), {atom_codes(K, [C|Cs])}. skip(skip(X, Y, C)) --> pos(X), string(S), eol(Y), {atom_codes(C, S)}. %% xml_like(+InComment, -XmlLike)// is det. % % Multiline describe complex configuration options. % Parse markup with an XML like syntax. % xml_like(InComment, xml(tag(Tag,X,Y,U,V), Attrs, Body)) --> whites, "<", tag(TagC,X,Y), whites, string(AttrsC), whites, ">", !, body(InComment, TagC, Body, U,V), {maplist(atom_codes, [Tag, Attrs], [TagC, AttrsC])}. body(InComment, Tag, [], U,V) --> ( {InComment} -> blanks, "#" ; [] ), blanks, "</", tag(Tag, U,V), whites, ">". body(InComment, Tag, [], U,V) --> % there is a bug in my collectd.conf ( {InComment} -> blanks, "#" ; [] ), blanks, "<", tag(Tag, U,V), whites, ">". body(InComment, Tag, [C|Cs], U,V) --> comment(C), body(InComment, Tag, Cs, U,V). body(InComment, Tag, [C|Cs], U,V) --> key_value(C), body(InComment, Tag, Cs, U,V). body(InComment, Tag, [C|Cs], U,V) --> xml_like(false, C), body(InComment, Tag, Cs, U,V). body(InComment, Tag, Cs, U,V) --> blank, body(InComment, Tag, Cs, U,V). value(quoted(V)) --> quoted(V). value(number(int, V)) --> integer(V). value(number(float, V)) --> float(V). value(number(hex, V)) --> xinteger(V). value(number(oct, V)) --> [0'0], octals(Ds), {digits_octal(Ds, V)}. value(boolean(true)) --> "true", !. value(boolean(false)) --> "false", !. value(unquoted(V)) --> unquoted(V). octals([D|Ds]) --> [C], {code_type(C, digit(D)), D >= 0, D < 8}, octals(Ds). octals([]) --> []. %% unquoted(-Symbol)// is det. % % unquoted([C|Cs]) --> [C], {C = 0'_ ; code_type(C, alnum)}, unquoted(Cs). unquoted([]) --> []. %% quoted(-V)// is det. % % Get all text between quotes. % Handle quotes inside and multiline (line continuation) % quoted(V) --> "\"", quoted_(Cs), {atom_codes(V, Cs)}. quoted_([]) --> "\"". quoted_([0'\n|Cs]) --> "\\\n", quoted_(Cs). quoted_([0'"|Cs]) --> "\\\"", quoted_(Cs). quoted_([C|Cs]) --> [C], quoted_(Cs). %% tag(?T, -X, -Y)// is det. % % Start/Stop tokens for XML like entries. % Maybe this should restrict somewhat the allowed text. % tag(T, X, Y) --> pos(X), unquoted(T), pos(Y). %% pos(-C, +P, -P) is det. % % capture offset from end of stream % %pos(C, P, P) :- length(P, C). pos(C, P, P) :- text_length(L), length(P, Q), C is L - Q. eol(P) --> pos(P), eol. eol --> "\n" ; eos. %% digits_octal(+Ds, -Oct) is det % % sequence of numbers in range 0..7 (octal) converted to integer % digits_octal(Ds, Oct) :- digits_number(Ds, 8, Oct, _). %% digits_number(+Digits, +Base, -Number, -Factor) is det. % % convert a sequence of numbers applying a base % digits_number([Num], _Base, Num, 0). digits_number([D|Ds], Base, Num, F1) :- digits_number(Ds, Base, Right, F), F1 is F + 1, Num is Right + D * Base ** F1. show_dcg(P, _) :- length(S, 20), append(S, _, P), format('[~s]~n', [S]), fail.
loadavg/collectdcp
swipl-plgi/parse_conf.pl
Perl
mit
7,180
# This file is auto-generated by the Perl DateTime Suite time zone # code generator (0.07) This code generator comes with the # DateTime::TimeZone module distribution in the tools/ directory # # Generated from debian/tzdata/africa. Olson data version 2008c # # Do not edit this file directly. # package DateTime::TimeZone::Africa::Nairobi; use strict; use Class::Singleton; use DateTime::TimeZone; use DateTime::TimeZone::OlsonDB; @DateTime::TimeZone::Africa::Nairobi::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' ); my $spans = [ [ DateTime::TimeZone::NEG_INFINITY, 60825936764, DateTime::TimeZone::NEG_INFINITY, 60825945600, 8836, 0, 'LMT' ], [ 60825936764, 60873368400, 60825947564, 60873379200, 10800, 0, 'EAT' ], [ 60873368400, 61188903000, 60873377400, 61188912000, 9000, 0, 'BEAT' ], [ 61188903000, 61820054115, 61188912885, 61820064000, 9885, 0, 'BEAUT' ], [ 61820054115, DateTime::TimeZone::INFINITY, 61820064915, DateTime::TimeZone::INFINITY, 10800, 0, 'EAT' ], ]; sub olson_version { '2008c' } sub has_dst_changes { 0 } sub _max_year { 2018 } sub _new_instance { return shift->_init( @_, spans => $spans ); } 1;
carlgao/lenga
images/lenny64-peon/usr/share/perl5/DateTime/TimeZone/Africa/Nairobi.pm
Perl
mit
1,185
use utf8; package Weather::Schema::Result::Measurement; # Created by DBIx::Class::Schema::Loader # DO NOT MODIFY THE FIRST PART OF THIS FILE =head1 NAME Weather::Schema::Result::Measurement =cut use strict; use warnings; use base 'DBIx::Class::Core'; =head1 TABLE: C<measurement> =cut __PACKAGE__->table("measurement"); =head1 ACCESSORS =head2 measurement_id data_type: 'integer' is_auto_increment: 1 is_nullable: 0 sequence: 'measurement_measurement_id_seq' =head2 time data_type: 'timestamp' is_nullable: 1 =head2 type_id data_type: 'bigint' is_foreign_key: 1 is_nullable: 1 =head2 value data_type: 'real' is_nullable: 1 =head2 file_id data_type: 'bigint' is_foreign_key: 1 is_nullable: 1 =head2 sensor_id data_type: 'bigint' is_foreign_key: 1 is_nullable: 1 =cut __PACKAGE__->add_columns( "measurement_id", { data_type => "integer", is_auto_increment => 1, is_nullable => 0, sequence => "measurement_measurement_id_seq", }, "time", { data_type => "timestamp", is_nullable => 1 }, "type_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, "value", { data_type => "real", is_nullable => 1 }, "file_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, "sensor_id", { data_type => "bigint", is_foreign_key => 1, is_nullable => 1 }, ); =head1 PRIMARY KEY =over 4 =item * L</measurement_id> =back =cut __PACKAGE__->set_primary_key("measurement_id"); =head1 RELATIONS =head2 file Type: belongs_to Related object: L<Weather::Schema::Result::File> =cut __PACKAGE__->belongs_to( "file", "Weather::Schema::Result::File", { file_id => "file_id" }, { is_deferrable => 0, join_type => "LEFT", on_delete => "NO ACTION", on_update => "NO ACTION", }, ); =head2 sensor Type: belongs_to Related object: L<Weather::Schema::Result::Sensor> =cut __PACKAGE__->belongs_to( "sensor", "Weather::Schema::Result::Sensor", { sensor_id => "sensor_id" }, { is_deferrable => 0, join_type => "LEFT", on_delete => "NO ACTION", on_update => "NO ACTION", }, ); =head2 type Type: belongs_to Related object: L<Weather::Schema::Result::Cvterm> =cut __PACKAGE__->belongs_to( "type", "Weather::Schema::Result::Cvterm", { cvterm_id => "type_id" }, { is_deferrable => 0, join_type => "LEFT", on_delete => "NO ACTION", on_update => "NO ACTION", }, ); # Created by DBIx::Class::Schema::Loader v0.07033 @ 2016-01-26 14:56:49 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:RXNMoinG32XBGtmmQ1yZpA # You can replace this text with custom code or comments, and it will be preserved on regeneration 1;
solgenomics/Weather
lib/Weather/Schema/Result/Measurement.pm
Perl
mit
2,754
use strict; use Data::Dumper; use Carp; # # This is a SAS Component # =head1 NAME get_relationship_OperonContains =head1 SYNOPSIS get_relationship_OperonContains [-c N] [-a] [--fields field-list] < ids > table.with.fields.added =head1 DESCRIPTION Example: get_relationship_OperonContains -a < ids > table.with.fields.added would read in a file of ids and add a column for each field in the relationship. The standard input should be a tab-separated table (i.e., each line is a tab-separated set of fields). Normally, the last field in each line would contain the id. If some other column contains the id, use -c N where N is the column (from 1) that contains the id. This is a pipe command. The input is taken from the standard input, and the output is to the standard output. =head1 COMMAND-LINE OPTIONS Usage: get_relationship_OperonContains [arguments] < ids > table.with.fields.added =over 4 =item -c num Select the identifier from column num =item -from field-list Choose a set of fields from the Operon entity to return. Field-list is a comma-separated list of strings. The following fields are available: =over 4 =item id =back =item -rel field-list Choose a set of fields from the relationship to return. Field-list is a comma-separated list of strings. The following fields are available: =over 4 =item from_link =item to_link =item rank =back =item -to field-list Choose a set of fields from the Feature entity to return. Field-list is a comma-separated list of strings. The following fields are available: =over 4 =item id =item feature_type =item source_id =item sequence_length =item function =item alias =back =back =head1 AUTHORS L<The SEED Project|http://www.theseed.org> =cut use Bio::KBase::Utilities::ScriptThing; use Bio::KBase::CDMI::CDMIClient; use Getopt::Long; #Default fields my @all_from_fields = ( 'id', ); my @all_rel_fields = ( 'from_link', 'to_link', 'rank' ); my @all_to_fields = ( 'id', 'feature_type', 'source_id', 'sequence_length', 'function', 'alias' ); my %all_from_fields = map { $_ => 1 } @all_from_fields; my %all_rel_fields = map { $_ => 1 } @all_rel_fields; my %all_to_fields = map { $_ => 1 } @all_to_fields; my @default_fields = ('from-link', 'to-link'); my @from_fields; my @rel_fields; my @to_fields; our $usage = <<'END'; Usage: get_relationship_OperonContains [arguments] < ids > table.with.fields.added --show-fields List the available fields. -c num Select the identifier from column num --from field-list Choose a set of fields from the Operon entity to return. Field-list is a comma-separated list of strings. The following fields are available: id --rel field-list Choose a set of fields from the relationship to return. Field-list is a comma-separated list of strings. The following fields are available: from_link to_link rank --to field-list Choose a set of fields from the Feature entity to return. Field-list is a comma-separated list of strings. The following fields are available: id feature_type source_id sequence_length function alias END my $column; my $input_file; my $a; my $f; my $r; my $t; my $help; my $show_fields; my $i = "-"; my $geO = Bio::KBase::CDMI::CDMIClient->new_get_entity_for_script("c=i" => \$column, "h" => \$help, "show-fields" => \$show_fields, "a" => \$a, "from=s" => \$f, "rel=s" => \$r, "to=s" => \$t, 'i=s' => \$i); if ($help) { print $usage; exit 0; } if ($show_fields) { print "from fields:\n"; print " $_\n" foreach @all_from_fields; print "relation fields:\n"; print " $_\n" foreach @all_rel_fields; print "to fields:\n"; print " $_\n" foreach @all_to_fields; exit 0; } if ($a && ($f || $r || $t)) {die $usage}; if ($a) { @from_fields = @all_from_fields; @rel_fields = @all_rel_fields; @to_fields = @all_to_fields; } elsif ($f || $t || $r) { my $err = 0; if ($f) { @from_fields = split(",", $f); $err += check_fields(\@from_fields, %all_from_fields); } if ($r) { @rel_fields = split(",", $r); $err += check_fields(\@rel_fields, %all_rel_fields); } if ($t) { @to_fields = split(",", $t); $err += check_fields(\@to_fields, %all_to_fields); } if ($err) {exit 1;} } else { @rel_fields = @default_fields; } my $ih; if ($input_file) { open $ih, "<", $input_file or die "Cannot open input file $input_file: $!"; } else { $ih = \*STDIN; } while (my @tuples = Bio::KBase::Utilities::ScriptThing::GetBatch($ih, undef, $column)) { my @h = map { $_->[0] } @tuples; my $h = $geO->get_relationship_OperonContains(\@h, \@from_fields, \@rel_fields, \@to_fields); my %results; for my $result (@$h) { my @from; my @rel; my @to; my $from_id; my $res = $result->[0]; for my $key (@from_fields) { push (@from,$res->{$key}); } $res = $result->[1]; $from_id = $res->{'from_link'}; for my $key (@rel_fields) { push (@rel,$res->{$key}); } $res = $result->[2]; for my $key (@to_fields) { push (@to,$res->{$key}); } if ($from_id) { push @{$results{$from_id}}, [@from, @rel, @to]; } } for my $tuple (@tuples) { my($id, $line) = @$tuple; my $resultsForId = $results{$id}; if ($resultsForId) { for my $result (@$resultsForId) { print join("\t", $line, @$result) . "\n"; } } } } sub check_fields { my ($fields, %all_fields) = @_; my @err; for my $field (@$fields) { if (!$all_fields{$field}) { push(@err, $field); } } if (@err) { my @f = keys %all_fields; print STDERR "get_relationship_OperonContains: unknown fields @err. Valid fields are @f\n"; return 1; } return 0; }
kbase/kb_seed
scripts/get_relationship_OperonContains.pl
Perl
mit
5,887
split(X, N, L, R) :- split(X, N, 0, L, R). split(X, N, N, [], X). split([H|T], N, M, L, R) :- K is (M + 1), split(T, N, K, Ls, R), append([H], Ls, L).
dvberkel/99-prolog-problems
Prolog-Lists/17.pl
Perl
mit
152
package Experiments::NAACL_HLT_2015::ReferenceRanking; # TODO : paper as full programs ? use strict; use warnings; use Moose; use namespace::autoclean; extends( 'Experiment::Table' ); with( 'Experiments::EMNLP_2015' ); # system entries builder sub _system_entries_builder { my $this = shift; # systems (configurations keys) # TODO : these could be coming from a top-level configuration file but not necessary at all (again config files are really hidden scripts) my $retrieval_mode_prefix = '@reference-collector-params@index-query-field'; my @retrieval_modes = ( [ 's' , 'description' ] , [ 'c' , 'content-rendered' ] , [ 't' , 'title' ] ); my $ranking_mode_prefix = '@reference-ranker-class'; my @ranking_modes = ( [ 'k' , 'WordGraph::ReferenceRanker::ReferenceTargetJointProbability' ] , [ 'f' , 'WordGraph::ReferenceRanker::SymmetricTargetSimilarity' ] ); my $similarity_mode_prefix = '@reference-ranker-params@similarity-field'; my @similarity_modes = ( ['c' , 'content' ] , [ 't' , 'title' ] , [ 'a' , 'anchortext' ] , [ 'u' , 'url' ] ); my @core_systems; push @core_systems , [ 'wg-baseline-ranking-max' , 1 , 0 , 0 ]; push @core_systems , [ 'graph4-baseline-ranking' , 1 , 1 , 1 ]; push @core_systems , [ 'wg-baseline-retrieval' , 1 , 0 , 0 ]; push @core_systems , [ 'title' , 0 , 0 , 0 ]; push @core_systems , [ 'wg-baseline-ranking-min' , 1 , 0 , 0 ]; my @systems; foreach my $core_system (@core_systems) { my $core_system_id = $core_system->[ 0 ]; my $variable_retriever = $core_system->[ 1 ]; my $variable_ranker = $core_system->[ 2 ]; my $variable_similarity_field = $core_system->[ 3 ]; if ( $variable_retriever ) { foreach my $retrieval_mode (@retrieval_modes) { my $retrieval_mode_label = $retrieval_mode->[ 0 ]; my $retrieval_mode_key = $retrieval_mode->[ 1 ]; my $retrieval_mode_parameter_key = $this->_generate_parameter_key_value_string( $retrieval_mode_prefix , $retrieval_mode_key ); if ( $variable_ranker ) { foreach my $ranking_mode (@ranking_modes) { my $ranking_mode_label = $ranking_mode->[ 0 ]; my $ranking_mode_key = $ranking_mode->[ 1 ]; my $ranking_mode_parameter_key = $this->_generate_parameter_key_value_string( $ranking_mode_prefix , $ranking_mode_key ); # TODO : can we do better ? if ( $variable_similarity_field ) { foreach my $similarity_mode (@similarity_modes) { my $similarity_mode_label = $similarity_mode->[ 0 ]; my $similarity_mode_key = $similarity_mode->[ 1 ]; my $similarity_mode_parameter_key = $this->_generate_parameter_key_value_string( $similarity_mode_prefix , $similarity_mode_key ); my $system_label = "retrieval[$retrieval_mode_label]+ranking[$ranking_mode_label-$similarity_mode_label]"; push @systems , [ $system_label , $this->_generate_system_id( $core_system_id , $retrieval_mode_parameter_key , $ranking_mode_parameter_key , $similarity_mode_parameter_key ) ]; } } else { push @systems , [ "retrieval[$retrieval_mode_label]+$core_system_id\[$ranking_mode_label\]" , $this->_generate_system_id( $core_system_id , $ranking_mode_parameter_key , $retrieval_mode_parameter_key ) ]; } } } else { # Note : generate-summarizers-list will only include parameters that are being scanned => this seems like the expected behavior push @systems , [ "$core_system_id\[$retrieval_mode_label\]" , $this->_generate_system_id( $core_system_id , $retrieval_mode_parameter_key ) ]; } } } else { # TODO : can we avoid writing the same variable twice ? push @systems , [ $core_system_id , $core_system_id ]; } } return \@systems; } # build the table - this is necessarily a custom function sub table_builder { my $this = shift; # => yes, use the definition to produce the list of cells (some of them being units) => then fill individual units # => problem, how do we get the table definition ? => coded # => take the table and make it a template # Note : store cells in row format # TODO : use CPAN module instead ? my @cells; # CURRENT : assuming I get generate-summarizers-list to produce the unit group key ... what else is needed ? # => get experiment driver to produce the list ... => can the table be produced from the meta configuration ? foreach my $system (@systems) { my $system_label = $system->[ 0 ]; my $system_id = $system->[ 1 ]; # TODO my $system_params = $system->[ 2 ]; =pod # 1 - load system configuration my $configuration = Config::JSON->new( $system_configuration )->config; =cut } return \@cells; } # TODO : how to mark cells that ask as references ? how to mark cells for which significance is to be computed ? __PACKAGE__->meta->make_immutable; 1;
ypetinot/web-summarization
evaluation/src/Experiments/NAACL_HLT_2015/ReferenceRanking.pm
Perl
apache-2.0
4,979
#------------------------------------------------------------------------------ # File: Shortcuts.pm # # Description: ExifTool shortcut tags # # Revisions: 02/07/2004 - PH Moved out of Exif.pm # 09/15/2004 - PH Added D70Boring from Greg Troxel # 01/11/2005 - PH Added Canon20D from Christian Koller # 03/03/2005 - PH Added user defined shortcuts # 03/26/2005 - PH Added Nikon from Tom Christiansen # 02/28/2007 - PH Removed model-dependent shortcuts # --> this is what UserDefined::Shortcuts is for # 02/25/2009 - PH Added Unsafe # 07/03/2010 - PH Added CommonIFD0 #------------------------------------------------------------------------------ package Image::ExifTool::Shortcuts; use strict; use vars qw($VERSION); $VERSION = '1.48'; # this is a special table used to define command-line shortcuts # (documentation Notes may be added for these via %shortcutNotes in BuildTagLookup.pm) %Image::ExifTool::Shortcuts::Main = ( # this shortcut allows the three common date/time tags to be shifted at once AllDates => [ 'DateTimeOriginal', 'CreateDate', 'ModifyDate', ], # This is a shortcut to some common information which is useful in most images Common => [ 'FileName', 'FileSize', 'Model', 'DateTimeOriginal', 'ImageSize', 'Quality', 'FocalLength', 'ShutterSpeed', 'Aperture', 'ISO', 'WhiteBalance', 'Flash', ], # This shortcut provides the same information as the Canon utilities Canon => [ 'FileName', 'Model', 'DateTimeOriginal', 'ShootingMode', 'ShutterSpeed', 'Aperture', 'MeteringMode', 'ExposureCompensation', 'ISO', 'Lens', 'FocalLength', 'ImageSize', 'Quality', 'Flash', 'FlashType', 'ConditionalFEC', 'RedEyeReduction', 'ShutterCurtainHack', 'WhiteBalance', 'FocusMode', 'Contrast', 'Sharpness', 'Saturation', 'ColorTone', 'ColorSpace', 'LongExposureNoiseReduction', 'FileSize', 'FileNumber', 'DriveMode', 'OwnerName', 'SerialNumber', ], Nikon => [ 'Model', 'SubSecDateTimeOriginal', 'ShutterCount', 'LensSpec', 'FocalLength', 'ImageSize', 'ShutterSpeed', 'Aperture', 'ISO', 'NoiseReduction', 'ExposureProgram', 'ExposureCompensation', 'WhiteBalance', 'WhiteBalanceFineTune', 'ShootingMode', 'Quality', 'MeteringMode', 'FocusMode', 'ImageOptimization', 'ToneComp', 'ColorHue', 'ColorSpace', 'HueAdjustment', 'Saturation', 'Sharpness', 'Flash', 'FlashMode', 'FlashExposureComp', ], # This shortcut may be useful when copying tags between files to either # copy the maker notes as a block or prevent it from being copied MakerNotes => [ 'MakerNotes', # (for RIFF MakerNotes) 'MakerNoteApple', 'MakerNoteCanon', 'MakerNoteCasio', 'MakerNoteCasio2', 'MakerNoteFLIR', 'MakerNoteFujiFilm', 'MakerNoteGE', 'MakerNoteGE2', 'MakerNoteHasselblad', 'MakerNoteHP', 'MakerNoteHP2', 'MakerNoteHP4', 'MakerNoteHP6', 'MakerNoteISL', 'MakerNoteJVC', 'MakerNoteJVCText', 'MakerNoteKodak1a', 'MakerNoteKodak1b', 'MakerNoteKodak2', 'MakerNoteKodak3', 'MakerNoteKodak4', 'MakerNoteKodak5', 'MakerNoteKodak6a', 'MakerNoteKodak6b', 'MakerNoteKodak7', 'MakerNoteKodak8a', 'MakerNoteKodak8b', 'MakerNoteKodak8c', 'MakerNoteKodak9', 'MakerNoteKodak10', 'MakerNoteKodakUnknown', 'MakerNoteKyocera', 'MakerNoteMinolta', 'MakerNoteMinolta2', 'MakerNoteMinolta3', 'MakerNoteNikon', 'MakerNoteNikon2', 'MakerNoteNikon3', 'MakerNoteOlympus', 'MakerNoteOlympus2', 'MakerNoteLeica', 'MakerNoteLeica2', 'MakerNoteLeica3', 'MakerNoteLeica4', 'MakerNoteLeica5', 'MakerNoteLeica6', 'MakerNotePanasonic', 'MakerNotePanasonic2', 'MakerNotePentax', 'MakerNotePentax2', 'MakerNotePentax3', 'MakerNotePentax4', 'MakerNotePentax5', 'MakerNotePentax6', 'MakerNotePhaseOne', 'MakerNoteReconyx', 'MakerNoteRicoh', 'MakerNoteRicohText', 'MakerNoteSamsung1a', 'MakerNoteSamsung1b', 'MakerNoteSamsung2', 'MakerNoteSanyo', 'MakerNoteSanyoC4', 'MakerNoteSanyoPatch', 'MakerNoteSigma', 'MakerNoteSony', 'MakerNoteSony2', 'MakerNoteSony3', 'MakerNoteSony4', 'MakerNoteSony5', 'MakerNoteSonyEricsson', 'MakerNoteSonySRF', 'MakerNoteUnknownText', 'MakerNoteUnknownBinary', 'MakerNoteUnknown', ], # "unsafe" tags we normally don't copy in JPEG images, defined # as a shortcut to use when rebuilding JPEG EXIF from scratch Unsafe => [ 'IFD0:YCbCrPositioning', 'IFD0:YCbCrCoefficients', 'IFD0:TransferFunction', 'ExifIFD:ComponentsConfiguration', 'ExifIFD:CompressedBitsPerPixel', 'InteropIFD:InteropIndex', 'InteropIFD:InteropVersion', 'InteropIFD:RelatedImageWidth', 'InteropIFD:RelatedImageHeight', ], # common metadata tags found in IFD0 of TIFF images CommonIFD0 => [ # standard EXIF 'IFD0:ImageDescription', 'IFD0:Make', 'IFD0:Model', 'IFD0:Software', 'IFD0:ModifyDate', 'IFD0:Artist', 'IFD0:Copyright', # other TIFF tags 'IFD0:Rating', 'IFD0:RatingPercent', 'IFD0:DNGLensInfo', 'IFD0:PanasonicTitle', 'IFD0:PanasonicTitle2', 'IFD0:XPTitle', 'IFD0:XPComment', 'IFD0:XPAuthor', 'IFD0:XPKeywords', 'IFD0:XPSubject', ], ); #------------------------------------------------------------------------------ # load user-defined shortcuts if available # Inputs: reference to user-defined shortcut hash sub LoadShortcuts($) { my $shortcuts = shift; my $shortcut; foreach $shortcut (keys %$shortcuts) { my $val = $$shortcuts{$shortcut}; # also allow simple aliases $val = [ $val ] unless ref $val eq 'ARRAY'; # save the user-defined shortcut or alias $Image::ExifTool::Shortcuts::Main{$shortcut} = $val; } } # (for backward compatibility, renamed in ExifTool 7.75) if (%Image::ExifTool::Shortcuts::UserDefined) { LoadShortcuts(\%Image::ExifTool::Shortcuts::UserDefined); } if (%Image::ExifTool::UserDefined::Shortcuts) { LoadShortcuts(\%Image::ExifTool::UserDefined::Shortcuts); } 1; # end __END__ =head1 NAME Image::ExifTool::Shortcuts - ExifTool shortcut tags =head1 SYNOPSIS This module is required by Image::ExifTool. =head1 DESCRIPTION This module contains definitions for tag name shortcuts used by Image::ExifTool. You can customize this file to add your own shortcuts. Individual users may also add their own shortcuts to the .ExifTool_config file in their home directory (or the directory specified by the EXIFTOOL_HOME environment variable). The shortcuts are defined in a hash called %Image::ExifTool::UserDefined::Shortcuts. The keys of the hash are the shortcut names, and the elements are either tag names or references to lists of tag names. An example shortcut definition in .ExifTool_config: %Image::ExifTool::UserDefined::Shortcuts = ( MyShortcut => ['createdate','exif:exposuretime','aperture'], MyAlias => 'FocalLengthIn35mmFormat', ); In this example, MyShortcut is a shortcut for the CreateDate, EXIF:ExposureTime and Aperture tags, and MyAlias is a shortcut for FocalLengthIn35mmFormat. =head1 AUTHOR Copyright 2003-2014, Phil Harvey (phil at owl.phy.queensu.ca) This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L<Image::ExifTool(3pm)|Image::ExifTool> =cut
pericles-project/pet
nativeTools/exiftool_OSX/lib/Image/ExifTool/Shortcuts.pm
Perl
apache-2.0
8,569
use warnings; use strict; my $i; for($i = 0; $i < 300; $i = $i + 1) { print "$i annotated\n"; }
jmmut/eva-v2
eva-pipeline/src/test/resources/mockvep.pl
Perl
apache-2.0
101
package Paws::IAM::DeleteSAMLProvider; use Moose; has SAMLProviderArn => (is => 'ro', isa => 'Str', required => 1); use MooseX::ClassAttribute; class_has _api_call => (isa => 'Str', is => 'ro', default => 'DeleteSAMLProvider'); class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::API::Response'); class_has _result_key => (isa => 'Str', is => 'ro'); 1; ### main pod documentation begin ### =head1 NAME Paws::IAM::DeleteSAMLProvider - Arguments for method DeleteSAMLProvider on Paws::IAM =head1 DESCRIPTION This class represents the parameters used for calling the method DeleteSAMLProvider on the AWS Identity and Access Management service. Use the attributes of this class as arguments to method DeleteSAMLProvider. You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DeleteSAMLProvider. As an example: $service_obj->DeleteSAMLProvider(Att1 => $value1, Att2 => $value2, ...); Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object. =head1 ATTRIBUTES =head2 B<REQUIRED> SAMLProviderArn => Str The Amazon Resource Name (ARN) of the SAML provider to delete. =head1 SEE ALSO This class forms part of L<Paws>, documenting arguments for method DeleteSAMLProvider in L<Paws::IAM> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/IAM/DeleteSAMLProvider.pm
Perl
apache-2.0
1,653
=head1 LICENSE Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut =head1 CONTACT Please email comments or questions to the public Ensembl developers list at <http://lists.ensembl.org/mailman/listinfo/dev>. Questions may also be sent to the Ensembl help desk at <http://www.ensembl.org/Help/Contact>. =cut =head1 NAME Bio::EnsEMBL::Utils::ConversionSupport - Utility module for Vega release and schema conversion scripts =head1 SYNOPSIS my $serverroot = '/path/to/ensembl'; my $suport = new Bio::EnsEMBL::Utils::ConversionSupport($serverroot); # parse common options $support->parse_common_options; # parse extra options for your script $support->parse_extra_options( 'string_opt=s', 'numeric_opt=n' ); # ask user if he wants to run script with these parameters $support->confirm_params; # see individual method documentation for more stuff =head1 DESCRIPTION This module is a collection of common methods and provides helper functions for the Vega release and schema conversion scripts. Amongst others, it reads options from a config file, parses commandline options and does logging. =head1 METHODS =cut package Bio::EnsEMBL::Utils::Logger; use strict; use warnings; no warnings 'uninitialized'; use FindBin qw($Bin $Script); use POSIX qw(strftime); use Bio::EnsEMBL::Utils::Argument qw(rearrange); use Bio::EnsEMBL::Utils::Exception qw(throw); use Bio::EnsEMBL::Utils::ScriptUtils qw(parse_bytes); my %level_defs = ( 'error' => 1, 'warn' => 2, 'warning' => 2, 'info' => 3, 'debug' => 4, 'verbose' => 4, ); my @reverse_level_defs = (undef, qw(error warning info debug)); =head2 new Arg[1] : String $serverroot - root directory of your ensembl sandbox Example : my $support = new Bio::EnsEMBL::Utils::ConversionSupport( '/path/to/ensembl'); Description : constructor Return type : Bio::EnsEMBL::Utils::ConversionSupport object Exceptions : thrown on invalid loglevel Caller : general =cut sub new { my $caller = shift; my $class = ref($caller) || $caller; my ($logfile, $logauto, $logautobase, $logautoid, $logpath, $logappend, $loglevel, $is_component) = rearrange( ['LOGFILE', 'LOGAUTO', 'LOGAUTOBASE', 'LOGAUTOID', 'LOGPATH', 'LOGAPPEND', 'LOGLEVEL', 'IS_COMPONENT'], @_); my $self = { '_warnings' => 0, }; bless ($self, $class); # initialise $self->logfile($logfile); $self->logpath($logpath); $self->logappend($logappend); $self->is_component($is_component); # automatic logfile creation $self->logauto($logauto); $logautoid ||= strftime("%Y%m%d-%H%M%S", localtime); $self->log_auto_id($logautoid); $self->create_auto_logfile($logautobase); $loglevel ||= 'info'; if ($loglevel =~ /^\d+$/ and $loglevel > 0 and $loglevel < 5) { $self->{'loglevel'} = $loglevel; } elsif ($level_defs{lc($loglevel)}) { $self->{'loglevel'} = $level_defs{lc($loglevel)}; } else { throw('Unknown loglevel: $loglevel.'); } return $self; } =head2 log_generic Arg[1] : String $txt - the text to log Arg[2] : Int $indent - indentation level for log message Example : my $log = $support->log_filehandle; $support->log('Log foo.\n', 1); Description : Logs a message to the filehandle initialised by calling $self->log_filehandle(). You can supply an indentation level to get nice hierarchical log messages. Return type : true on success Exceptions : thrown when no filehandle can be obtained Caller : general =cut sub log_generic { my ($self, $txt, $indent, $stamped) = @_; $indent ||= 0; my $fh = $self->log_filehandle; # append timestamp and memory usage to log text if requested if ($stamped) { $txt =~ s/^(\n*)(.*)(\n*)$/$2/; $txt = sprintf("%-60s%20s", $txt, $self->time_and_mem); $txt = $1.$txt.$3; } # strip off leading linebreaks so that indenting doesn't break $txt =~ s/^(\n*)//; # indent $txt = $1." "x$indent . $txt; print $fh "$txt"; return(1); } =head2 error Arg[1] : String $txt - the error text to log Arg[2] : Int $indent - indentation level for log message Example : my $log = $support->log_filehandle; $support->log_error('Log foo.\n', 1); Description : Logs a message via $self->log and exits the script. Return type : none Exceptions : none Caller : general =cut sub error { my ($self, $txt, $indent, $stamped) = @_; return(0) unless ($self->{'loglevel'} >= 1); $txt = "ERROR: ".$txt; $self->log_generic($txt, $indent, $stamped); $self->log_generic("\nExiting prematurely.\n\n"); $self->log_generic("Runtime: ".$self->runtime." ".$self->date_and_mem."\n\n"); exit(1); } =head2 warning Arg[1] : String $txt - the warning text to log Arg[2] : Int $indent - indentation level for log message Example : my $log = $support->log_filehandle; $support->log_warning('Log foo.\n', 1); Description : Logs a message via $self->log and increases the warning counter. Return type : true on success Exceptions : none Caller : general =cut sub warning { my ($self, $txt, $indent, $stamped) = @_; return(0) unless ($self->{'loglevel'} >= 2); $txt = "WARNING: " . $txt; $self->log_generic($txt, $indent, $stamped); $self->{'_warnings'}++; return(1); } sub info { my ($self, $txt, $indent, $stamped) = @_; return(0) unless ($self->{'loglevel'} >= 3); $self->log_generic($txt, $indent, $stamped); return(1); } =head2 debug Arg[1] : String $txt - the warning text to log Arg[2] : Int $indent - indentation level for log message Example : my $log = $support->log_filehandle; $support->log_verbose('Log this verbose message.\n', 1); Description : Logs a message via $self->log if --verbose option was used Return type : TRUE on success, FALSE if not verbose Exceptions : none Caller : general =cut sub debug { my ($self, $txt, $indent, $stamped) = @_; return(0) unless ($self->{'loglevel'} >= 4); $self->log_generic($txt, $indent, $stamped); return(1); } sub log_progress { my $self = shift; my $name = shift; my $curr = shift; my $indent = shift; throw("You must provide a name and the current value for your progress bar") unless ($name and $curr); # return if we haven't reached the next increment return if ($curr < int($self->{'_progress'}->{$name}->{'next'})); my $index = $self->{'_progress'}->{$name}->{'index'}; my $num_bins = $self->{'_progress'}->{$name}->{'numbins'}; my $percent = $index/$num_bins*100; my $log_str; $log_str .= ' 'x$indent if ($index == 0); $log_str .= "\b"x4; $log_str .= sprintf("%3s%%", $percent); $log_str .= "\n" if ($curr == $self->{'_progress'}->{$name}->{'max_val'}); $self->info($log_str); # increment counters $self->{'_progress'}->{$name}->{'index'}++; $self->{'_progress'}->{$name}->{'next'} += $self->{'_progress'}->{$name}->{'binsize'}; } sub log_progressbar { my $self = shift; my $name = shift; my $curr = shift; my $indent = shift; throw("You must provide a name and the current value for your progress bar") unless ($name and $curr); # return if we haven't reached the next increment return if ($curr < int($self->{'_progress'}->{$name}->{'next'})); my $index = $self->{'_progress'}->{$name}->{'index'}; my $num_bins = $self->{'_progress'}->{$name}->{'numbins'}; my $percent = $index/$num_bins*100; my $log_str = "\r".(' 'x$indent)."[".('='x$index).(' 'x($num_bins-$index))."] ${percent}\%"; $log_str .= "\n" if ($curr == $self->{'_progress'}->{$name}->{'max_val'}); $self->info($log_str); # increment counters $self->{'_progress'}->{$name}->{'index'}++; $self->{'_progress'}->{$name}->{'next'} += $self->{'_progress'}->{$name}->{'binsize'}; } sub init_progress { my $self = shift; my $max = shift; my $num_bins = shift || 50; throw("You must provide the maximum value for your progress bar") unless (defined($max)); # auto-generate a unique name for your progressbar my $name = time . '_' . int(rand(1000)); # calculate bin size; we will use 50 bins (2% increments) my $binsize = $max/$num_bins; $self->{'_progress'}->{$name}->{'max_val'} = $max; $self->{'_progress'}->{$name}->{'binsize'} = $binsize; $self->{'_progress'}->{$name}->{'numbins'} = $num_bins; $self->{'_progress'}->{$name}->{'next'} = 0; $self->{'_progress'}->{$name}->{'index'} = 0; return $name; } =head2 log_filehandle Arg[1] : (optional) String $mode - file access mode Example : my $log = $support->log_filehandle; # print to the filehandle print $log 'Lets start logging...\n'; # log via the wrapper $self->log() $support->log('Another log message.\n'); Description : Returns a filehandle for logging (STDERR by default, logfile if set from config or commandline). You can use the filehandle directly to print to, or use the smart wrapper $self->log(). Logging mode (truncate or append) can be set by passing the mode as an argument to log_filehandle(), or with the --logappend commandline option (default: truncate) Return type : Filehandle - the filehandle to log to Exceptions : thrown if logfile can't be opened Caller : general =cut sub log_filehandle { my ($self, $mode) = @_; unless ($self->{'_log_filehandle'}) { $mode ||= '>'; $mode = '>>' if ($self->logappend); my $fh = \*STDERR; if (my $logfile = $self->logfile) { if (my $logpath = $self->logpath) { unless (-e $logpath) { system("mkdir -p $logpath") == 0 or throw("Can't create log dir $logpath: $!\n"); } $logfile = "$logpath/".$self->logfile; } open($fh, "$mode", $logfile) or throw("Unable to open $logfile for writing: $!"); } $self->{'_log_filehandle'} = $fh; } return $self->{'_log_filehandle'}; } =head2 extract_log_identifier Arg[1] : Example : Description : Return type : Exceptions : Caller : Status : =cut sub extract_log_identifier { my $self = shift; if (my $logfile = $self->logfile) { $logfile =~ /.+\.([^\.]+)\.log/; return $1; } else { return undef; } } =head2 init_log Example : $support->init_log; Description : Opens a filehandle to the logfile and prints some header information to this file. This includes script name, date, user running the script and parameters the script will be running with. Return type : Filehandle - the log filehandle Exceptions : none Caller : general =cut sub init_log { my $self = shift; my $params = shift; # get a log filehandle my $log = $self->log_filehandle; # remember start time $self->{'_start_time'} = time; # don't log parameters if this script is run by another one if ($self->logauto or ! $self->is_component) { # print script name, date, user who is running it my $hostname = `hostname`; chomp $hostname; my $script = "$hostname:$Bin/$Script"; my $user = `whoami`; chomp $user; $self->info("Script: $script\nDate: ".$self->date."\nUser: $user\n"); # print parameters the script is running with if ($params) { $self->info("Parameters:\n\n"); $self->info($params); } } return $log; } =head2 finish_log Example : $support->finish_log; Description : Writes footer information to a logfile. This includes the number of logged warnings, timestamp and memory footprint. Return type : TRUE on success Exceptions : none Caller : general =cut sub finish_log { my $self = shift; $self->info("\nAll done for $Script.\n"); $self->info($self->warning_count." warnings. "); $self->info("Runtime: ".$self->runtime." ".$self->date_and_mem."\n\n"); return(1); } sub runtime { my $self = shift; my $runtime = "n/a"; if ($self->{'_start_time'}) { my $diff = time - $self->{'_start_time'}; my $sec = $diff % 60; $diff = ($diff - $sec) / 60; my $min = $diff % 60; my $hours = ($diff - $min) / 60; $runtime = "${hours}h ${min}min ${sec}sec"; } return $runtime; } =head2 date_and_mem Example : print LOG "Time, memory usage: ".$support->date_and_mem."\n"; Description : Prints a timestamp and the memory usage of your script. Return type : String - timestamp and memory usage Exceptions : none Caller : general =cut sub date_and_mem { my $date = strftime "%Y-%m-%d %T", localtime; my $mem = `ps -p $$ -o vsz |tail -1`; chomp $mem; $mem = parse_bytes($mem*1000); return "[$date, mem $mem]"; } sub time_and_mem { my $date = strftime "%T", localtime; my $mem = `ps -p $$ -o vsz |tail -1`; chomp $mem; $mem = parse_bytes($mem*1000); $mem =~ s/ //; return "[$date|$mem]"; } =head2 date Example : print "Date: " . $support->date . "\n"; Description : Prints a nicely formatted timestamp (YYYY-DD-MM hh:mm:ss) Return type : String - the timestamp Exceptions : none Caller : general =cut sub date { return strftime "%Y-%m-%d %T", localtime; } =head2 mem Example : print "Memory usage: " . $support->mem . "\n"; Description : Prints the memory used by your script. Not sure about platform dependence of this call ... Return type : String - memory usage Exceptions : none Caller : general =cut sub mem { my $mem = `ps -p $$ -o vsz |tail -1`; chomp $mem; return $mem; } =head2 warning_count Example : print LOG "There were ".$support->warnings." warnings.\n"; Description : Returns the number of warnings encountered while running the script (the warning counter is increased by $self->log_warning). Return type : Int - number of warnings Exceptions : none Caller : general =cut sub warning_count { my $self = shift; return $self->{'_warnings'}; } =head2 logfile Arg[1] : Example : Description : Getter and setter for the logfile Return type : Exceptions : Caller : Status : =cut sub logfile { my $self = shift; $self->{'_logfile'} = shift if (@_); return $self->{'_logfile'}; } =head2 log_auto_id Arg[1] : Example : Description : Return type : Exceptions : Caller : Status : =cut sub log_auto_id { my $self = shift; $self->{'_log_auto_id'} = shift if (@_); return $self->{'_log_auto_id'}; } sub logauto { my $self = shift; $self->{'_log_auto'} = shift if (@_); return $self->{'_log_auto'}; } =head2 create_auto_logfile Arg[1] : Example : Description : Return type : Exceptions : Caller : Status : At Risk : under development =cut sub create_auto_logfile { my $self = shift; my $logautobase = shift; # do nothing if automatic logfile generation isn't set return unless ($self->logauto); # an explicit logfile name overrides LOGAUTO return if ($self->logfile); # argument check unless ($logautobase) { throw('Need a base logfile name for auto-generating logfile.'); } # create a logfile name $self->logfile("${logautobase}_".$self->log_auto_id.".log"); } =head2 logpath Arg[1] : Example : Description : Return type : Exceptions : Caller : Status : =cut sub logpath { my $self = shift; $self->{'_logpath'} = shift if (@_); return $self->{'_logpath'}; } =head2 logappend Arg[1] : Example : Description : Return type : Exceptions : Caller : Status : =cut sub logappend { my $self = shift; $self->{'_logappend'} = shift if (@_); return $self->{'_logappend'}; } =head2 is_component Arg[1] : Example : Description : Return type : Exceptions : Caller : Status : =cut sub is_component { my $self = shift; $self->{'_is_component'} = shift if (@_); return $self->{'_is_component'}; } sub loglevel { my $self = shift; return $reverse_level_defs[$self->{'loglevel'}]; } # # deprecated methods (left here for backwards compatibility # sub log_error { return $_[0]->error(@_); } sub log_warning { return $_[0]->warning(@_); } sub log { return $_[0]->info(@_); } sub log_verbose { return $_[0]->debug(@_); } sub log_stamped { return $_[0]->log(@_, 1); } 1;
willmclaren/ensembl
modules/Bio/EnsEMBL/Utils/Logger.pm
Perl
apache-2.0
17,336
package XrefParser::AedesCAPParser; use strict; use File::Basename; use base qw( XrefParser::BaseParser ); # Aedes CAP database dump - FASTA format # >... # # # # Anopheles one: # >ANXB10B|Annexin B10B # MSWYYTPHPTVVPAEDFDASADANALRKAMKGFGTDEQAIIDILCARSNGQRQEIAEAFKRELGRDLIDDLKSELGGKFEDVILGLMLRPEAYLCKQLHKAMDGIGTDEKSLIEII # CPQTNDQIRAIVDCYEEMYSRPLAEHLCSETSGSFRRLLTMIIVGSRDPQGTVDPELAVEQAKQLYDAGEGKLGTDEEVFYKILAHASFDQLEIVFEEYKSLSGRTIEQALKAELS # GELYDALSAIVECVQMAPHFFAKRLHKAMDGVGTDDATLIRIIVSRSEIDLQNIKDEFEQMYNKTLVSAVRSETSGDYKRALCALIGNA sub run { my $self = shift if (defined(caller(1))); my $source_id = shift; my $species_id = shift; my $files = shift; my $release_file = shift; my $verbose = shift; my $file = @{$files}[0]; next if (/^File:/); # skip header my @xrefs; local $/ = "\n>"; my $file_io = $self->get_filehandle($file); if ( !defined $file_io ) { print STDERR "Could not open $file\n"; return 1; } while ( $_ = $file_io->getline() ) { my $xref; my ($header, $sequence) = $_ =~ /^>?(.+?)\n([^>]*)/s or warn("Can't parse FASTA entry: $_\n"); # deconstruct header - just use first part my ($accession, $symbol, $description, $chr, $start, $end) = split /\|/, $header; if ($symbol eq "") { $symbol = "$accession" ; } # make sequence into one long string $sequence =~ s/\n//g; # build the xref object and store it $xref->{ACCESSION} = $accession; $xref->{LABEL} = $symbol; $xref->{DESCRIPTION} = $description; $xref->{SEQUENCE} = $sequence; $xref->{SOURCE_ID} = $source_id; $xref->{SPECIES_ID} = $species_id; $xref->{SEQUENCE_TYPE} = 'peptide'; $xref->{STATUS} = 'manual annotation'; push @xrefs, $xref; } $file_io->close(); XrefParser::BaseParser->upload_xref_object_graphs(\@xrefs); print scalar(@xrefs) . " Aedes CAP xrefs succesfully parsed\n" if($verbose); return 0; } 1;
adamsardar/perl-libs-custom
EnsemblAPI/ensembl/misc-scripts/xref_mapping/XrefParser/AedesCAPParser.pm
Perl
apache-2.0
1,976
#!/usr/bin/perl =head1 NAME lod-transcode.pl - Materialize the files necessary to host slash-based linked data. =head1 SYNOPSIS lod-transcode.pl [OPTIONS] data.rdf http://base /path/to/www =head1 DESCRIPTION This script will materialize the necessary files for serving static linked data. Given an input file data.rdf, this script will find all triples that use a URI as subject or object that contains the supplied base URI, and serialize the matching triples to the appropriate files for serving as linked data. For example, using the input RDF: @prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> . @prefix db: <http://dbpedia.org/resource/> . @prefix prop: <http://dbpedia.org/property/> . @prefix dbo: <http://dbpedia.org/ontology/> . db:Berlin a dbo:City ; rdfs:label "Berlin"@en ; prop:population 3431700 . db:Deutsche_Bahn dbo:locationCity db:Berlin . Invoking this command: lod-transcode.pl -i=turtle data.ttl http://dbpedia.org /var/www Will produce the files: /var/www/data/Berlin.rdf /var/www/data/Berlin.ttl /var/www/data/Deutsche_Bahn.rdf /var/www/data/Deutsche_Bahn.ttl The process of mapping URIs to files on disk can be configured using the command line OPTIONS 'uripattern' and 'filepattern': lod-materialize.pl --uripattern="/resource/(.*)" --filepattern="/page/\\1" data.rdf http://dbpedia.org /var/www This will create the files: /var/www/page/Berlin.rdf /var/www/page/Berlin.ttl /var/www/page/Deutsche_Bahn.rdf /var/www/page/Deutsche_Bahn.ttl =head1 OPTIONS Valid command line options are: =over 4 =item * -in=FORMAT =item * -i=FORMAT Specify the name of the RDF format used by the input file. Defaults to "ntriples". =item * -out=FORMAT,FORMAT =item * -o=FORMAT,FORMAT Specify a comma-seperated list of RDF formats used for serializing the output files. Defaults to "rdfxml,turtle,ntriples". =item * --define ns=URI =item * -D ns=URI Specify a namespace mapping used by the serializers. =item * --verbose Print information about file modifications to STDERR. =item * -n Perform a dry-run without modifying any files on disk. =item * --progress[=N] Prints out periodic progress of the materialization process. If specified, the frequency argument N is used to only print the progress information on every Nth triple. =item * --concurrency=N Performs the transcoding of materialized files into secondary RDF formats using the specified number of threads. =item * --uripattern=PATTERN Specifies the URI pattern to match against URIs used in the input RDF. URIs in the input RDF are matched against this pattern appended to the base URI (http://base above). =item * --filepattern=PATTERN Specifies the path template to use in constructing data filenames. This pattern will be used to construct an absolute filename by interpreting it relative to the path specified for the document root (/path/to/www above). =item * --directoryindex=FILE If specified, will look for any files created that share a base name with a created directory (e.g. ./foo.rdf and ./foo/), move the file into the directory, and rename it to the specified directoryindex FILE name with its original file extension intact (e.g. ./foo/index.rdf). This will allow Apache's MultiViews mechanism to properly serve the data. =item * --apache Print the Apache configuration needed to serve the produced RDF files as linked data. This includes setting Multiview for content negotiation, the media type registration for RDF files and mod_rewrite rules for giving 303 redirects from resource URIs to the content negotiated data URIs. =item * --buffer-size=TRIPLES Specifies the number of output triples to buffer before flushing data to disk. This can dramatically improve performance as writes to commonly used files can be aggregated into a single large IO ops instead of many small IO ops. =back =cut use strict; use warnings; use threads; use FindBin qw($Bin); use File::Copy; use Fcntl qw(LOCK_EX LOCK_UN); use File::Spec; use File::Find; use File::Path 2.06 qw(make_path); use Getopt::Long; use Data::Dumper; use List::MoreUtils qw(part); $| = 1; my %namespaces; my $in = 'ntriples'; my $out = 'rdfxml,turtle,ntriples'; my $matchre = q</resource/(.*)>; my $outre = '/data/$1'; my $dryrun = 0; my $debug = 0; my $apache = 0; my $count = 0; my $threads = 1; my $cache_size = 1; my $files_per_dir = 0; my $dir_index; my $result = GetOptions ( "in=s" => \$in, "out=s" => \$out, "define=s" => \%namespaces, "D=s" => \%namespaces, "uripattern=s" => \$matchre, "filepattern=s" => \$outre, "verbose+" => \$debug, "n" => \$dryrun, "progress:1" => \$count, "apache" => \$apache, "concurrency|j=s" => \$threads, "filelimit|L=i" => \$files_per_dir, "directoryindex=s" => \$dir_index, "buffer-size|S=i" => \$cache_size, ); if ($in ne 'ntriples') { warn "Input for materialization must be ntriples but '$in' requested\n"; exit(2); } unless (@ARGV) { print <<"END"; Usage: $0 [OPTIONS] data.rdf http://base /path/to/www/ END exit(1); } my $file = shift or die "An RDF filename must be given"; my $url = shift or die "A URL base must be given"; my $base = shift or die "A path to the base URL must be given"; my @out = split(',', $out); my %files; my %paths; if ($url =~ m<[/]$>) { chop($url); } if ($debug) { warn "Input file : $file\n"; warn "Input format : $in\n"; warn "Output formats : " . join(', ', @out) . "\n"; warn "URL Pattern : $matchre\n"; warn "File Pattern : $outre\n"; warn "Output path : " . File::Spec->rel2abs($base) . "\n"; warn "File Limit per Directory : $files_per_dir\n" if ($files_per_dir); } my %ext = ( rdfxml => 'rdf', 'rdfxml-abbrev' => 'rdf', turtle => 'ttl', ntriples => 'nt' ); my @new_formats = grep { $_ ne 'ntriples' } @out; my $format_string = '' . join(' ', map {qq[-f 'xmlns:$_="$namespaces{$_}"']} (keys %namespaces)); if (@new_formats) { my $i = 0; my @files; find( { no_chdir => 1, wanted => sub { local($/) = undef; return unless ($File::Find::name =~ /[.]nt$/); my $input = File::Spec->rel2abs( $File::Find::name ); push(@files, $input); } }, $base ); if ($threads == 1) { transcode_file( 1, \@files ); } else { my @partitions = part { $i++ % $threads } @files; my @threads; foreach my $pnum (0 .. $#partitions) { my $t = threads->create( \&transcode_files, $pnum, $partitions[ $pnum ] ); push(@threads, $t); } $_->join() for (@threads); } } sub transcode_files { my $process = shift; my $files = shift; my $total = scalar(@$files); foreach my $i (0 .. $#{ $files }) { my $filename = $files->[ $i ]; if ($dir_index) { my ($dir) = ($filename =~ /^(.*)[.]nt$/); if (-d $dir) { my $newfilename = File::Spec->catfile($dir, "${dir_index}.nt"); # warn "*** SHOULD RENAME $filename to $newfilename\n"; rename($filename, $newfilename); $filename = $newfilename; } } if ($count) { my $num = $i+1; my $perc = ($num/$total) * 100; printf("\rProcess $process transcoding file $num / $total (%3.1f%%)\t\t", $perc); } foreach my $format (@new_formats) { my $ext = $ext{ $format }; my $outfile = $filename; $outfile =~ s/[.]nt/.$ext/; if (-r $outfile) { my $in_mtime = (stat($filename))[9]; my $out_mtime = (stat($outfile))[9]; if ($out_mtime > $in_mtime) { # warn "*** $filename seems to already have been transcoded to $format\n"; next; } } warn "Creating file $outfile ...\n" if ($debug > 1); unless ($dryrun) { my $cmd = "rapper -q -i ntriples -o $format $format_string $filename"; open(my $fh, "$cmd|") or do { warn $!; next; }; open(my $tfh, '>', $outfile) or do { warn $!; next }; print {$tfh} <$fh>; } } } printf("\n"); }
timrdf/csv2rdf4lod-automation
bin/lod-materialize/c/lod-transcode.pl
Perl
apache-2.0
7,829
#!/usr/bin/env perl =head1 LICENSE Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute Copyright [2016-2020] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =head1 CONTACT Please email comments or questions to the public Ensembl developers list at <http://lists.ensembl.org/mailman/listinfo/dev>. Questions may also be sent to the Ensembl help desk at <http://www.ensembl.org/Help/Contact>. =head1 NAME run_binding_site_import_pipeline.pl -- deletes previous data and runs the binding site import =head1 DESCRIPTION Deletes all motif features for matrices that we're importing and sets up the import pipeline for import =head1 OPTIONS =over =item B<help> Give short help =item B<-dbhost> Host where the database is =item B<-dbuser> User of the database =item B<-dbpass> Password for the database user Needs writing access =item B<-dbport> Port of the host where the database is =item B<-dbname> Name of the database =item B<-dnadbhost> Host of the specific core database to use =item B<-dnadbuser> User of the specific core database =item B<-dnadbport> Port of the host where the specific core database to use is =item B<-dnadbname> Name of the specific core database to use =item B<-workdir> Folder where the data is found =item B<-output_dir> Folder to output the results of the pipeline =item B<-slices> List of slices to be imported, separated by ; eg. 1;2;X;23 =item B<-continue> If set, it will assume a pipeline db already exists =back =cut use strict; use warnings; use Getopt::Long; use Pod::Usage; use Bio::EnsEMBL::Utils::Exception qw(throw); use Bio::EnsEMBL::DBSQL::DBAdaptor; use Bio::EnsEMBL::Funcgen::DBSQL::DBAdaptor; my ($host, $port, $user, $pass, $dbname); my ($dnadb_host, $dnadb_port, $dnadb_user, $dnadb_name); my ($help, $workdir, $output_dir, @slices); #get command line options print "$0 @ARGV\n"; GetOptions ( 'dnadb_host=s' => \$dnadb_host, 'dnadb_user=s' => \$dnadb_user, 'dnadb_port=i' => \$dnadb_port, 'dnadb_name=s' => \$dnadb_name, 'dbhost=s' => \$host, 'dbuser=s' => \$user, 'dbport=i' => \$port, 'dbpass=s' => \$pass, 'dbname=s' => \$dbname, 'workdir=s' => \$workdir, 'output_dir=s' => \$output_dir, 'slices=s{,}' => \@slices, "help|h" => \$help, ) or pod2usage( -exitval => 1 ); #Catch unknown opts pod2usage(1) if ($help); if(!$host || !$port || !$user || !$dbname ) { print "Missing connection parameters for efg db\n"; pod2usage(0); } if(!$dnadb_host || !$dnadb_port || !$dnadb_user || !$dnadb_name ) { print "Missing connection parameters for core db\n"; pod2usage(0); } if(!$workdir) { print "Missing working folder(s)\n"; pod2usage(0); } if(!$output_dir) { print "Missing output folder(s)\n"; pod2usage(0); } my $coredb = Bio::EnsEMBL::DBSQL::DBAdaptor->new ('-host' => $dnadb_host, '-user' => $dnadb_user, '-port' => $dnadb_port, '-dbname' => $dnadb_name ); my $efgdb = Bio::EnsEMBL::Funcgen::DBSQL::DBAdaptor->new (-host => $host, -port => $port, -user => $user, -dbname => $dbname, -pass => $pass, -dnadb => $coredb ); # Test connection $efgdb->dbc->db_handle; my $first = 1; #For omitting -job_topup my $bma = $efgdb->get_BindingMatrixAdaptor; opendir(DIR, $workdir) || throw("Could not opendir:\t$workdir"); my @files = readdir(DIR); closedir DIR; foreach my $file (@files){ next if $file !~ /^(.*)\.filtered\.bed$/; my $matrix = $1; print "Matrix: ".$matrix."\n"; my @bms = @{ $bma->fetch_all_by_name($matrix) }; if(scalar(@bms) != 1){ die("Failed to find unique matrix with name $matrix.". "(Probably because it is present in two sources/analyses)") } my $bm = $bms[0]; my $mf_sql = 'motif_feature where binding_matrix_id='.$bm->dbID; if(scalar(@slices)>0){ warn "Restricting to requested ".scalar(@slices)." slices"; $mf_sql = $mf_sql." and seq_region_id in (select seq_region_id from seq_region where name in ('".join("','",@slices)."'))"; } my $sql = "delete from associated_motif_feature where motif_feature_id in (select motif_feature_id from $mf_sql )"; warn "Deleting $matrix data(".$bm->dbID.") - regulatory_attribute/associated_motif_feature records will need regenerating\n"; #warn $sql; $efgdb->dbc->do($sql); $sql = "delete from $mf_sql"; #warn $sql; $efgdb->dbc->do($sql); my $cmd="init_pipeline.pl Bio::EnsEMBL::Funcgen::HiveConfig::ImportMotifFeatures_conf -dnadb_host $dnadb_host -dnadb_port $dnadb_port -dnadb_user $dnadb_user -dnadb_name $dnadb_name -host $host -port $port -user $user -pass $pass -dbname $dbname -output_dir $output_dir -efg_src $ENV{SRC}/ensembl-funcgen/ -file ${workdir}/${file} -matrix $matrix ". (scalar(@slices)>0 ? " -slices ".join(",",@slices) : "")." ".($first ? '' : " -job_topup"); print $cmd."\n"; system($cmd); $first = 0; }
Ensembl/ensembl-funcgen
scripts/pwm_mappings/run_binding_site_import_pipeline.pl
Perl
apache-2.0
5,596
package Paws::CloudFormation::StackInstance; use Moose; has Account => (is => 'ro', isa => 'Str'); has Region => (is => 'ro', isa => 'Str'); has StackId => (is => 'ro', isa => 'Str'); has StackSetId => (is => 'ro', isa => 'Str'); has Status => (is => 'ro', isa => 'Str'); has StatusReason => (is => 'ro', isa => 'Str'); 1; ### main pod documentation begin ### =head1 NAME Paws::CloudFormation::StackInstance =head1 USAGE This class represents one of two things: =head3 Arguments in a call to a service Use the attributes of this class as arguments to methods. You shouldn't make instances of this class. Each attribute should be used as a named argument in the calls that expect this type of object. As an example, if Att1 is expected to be a Paws::CloudFormation::StackInstance object: $service_obj->Method(Att1 => { Account => $value, ..., StatusReason => $value }); =head3 Results returned from an API call Use accessors for each attribute. If Att1 is expected to be an Paws::CloudFormation::StackInstance object: $result = $service_obj->Method(...); $result->Att1->Account =head1 DESCRIPTION An AWS CloudFormation stack, in a specific account and region, that's part of a stack set operation. A stack instance is a reference to an attempted or actual stack in a given account within a given region. A stack instance can exist without a stackE<mdash>for example, if the stack couldn't be created for some reason. A stack instance is associated with only one stack set. Each stack instance contains the ID of its associated stack set, as well as the ID of the actual stack and the stack status. =head1 ATTRIBUTES =head2 Account => Str The name of the AWS account that the stack instance is associated with. =head2 Region => Str The name of the AWS region that the stack instance is associated with. =head2 StackId => Str The ID of the stack instance. =head2 StackSetId => Str The name or unique ID of the stack set that the stack instance is associated with. =head2 Status => Str The status of the stack instance, in terms of its synchronization with its associated stack set. =over =item * C<INOPERABLE>: A C<DeleteStackInstances> operation has failed and left the stack in an unstable state. Stacks in this state are excluded from further C<UpdateStackSet> operations. You might need to perform a C<DeleteStackInstances> operation, with C<RetainStacks> set to C<true>, to delete the stack instance, and then delete the stack manually. =item * C<OUTDATED>: The stack isn't currently up to date with the stack set because: =over =item * The associated stack failed during a C<CreateStackSet> or C<UpdateStackSet> operation. =item * The stack was part of a C<CreateStackSet> or C<UpdateStackSet> operation that failed or was stopped before the stack was created or updated. =back =item * C<CURRENT>: The stack is currently up to date with the stack set. =back =head2 StatusReason => Str The explanation for the specific status code that is assigned to this stack instance. =head1 SEE ALSO This class forms part of L<Paws>, describing an object used in L<Paws::CloudFormation> =head1 BUGS and CONTRIBUTIONS The source code is located here: https://github.com/pplu/aws-sdk-perl Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/CloudFormation/StackInstance.pm
Perl
apache-2.0
3,340
=head1 LICENSE Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut =pod =head1 NAME Bio::EnsEMBL::Compara::RunnableDB::MakeNTSpeciesTree::PhylofitFactory =cut =head1 SYNOPSIS =cut package Bio::EnsEMBL::Compara::RunnableDB::MakeNTSpeciesTree::PhylofitFactory; use strict; use Bio::EnsEMBL::Registry; use Bio::EnsEMBL::Compara::DBSQL::DBAdaptor; use Data::Dumper; use base('Bio::EnsEMBL::Compara::RunnableDB::BaseRunnable'); sub fetch_input { my $self = shift @_; my $prev_compara_dba = new Bio::EnsEMBL::Compara::DBSQL::DBAdaptor( %{ $self->param('previous_compara_db') } ); my $gab_a = $prev_compara_dba->get_GenomicAlignBlockAdaptor; my $sp_tree_a = $prev_compara_dba->get_SpeciesTreeAdaptor; my @genomic_align_block_ids; my $mlss_id = $self->param('msa_mlssid'); my $sql1 = "SELECT COUNT(*) FROM species_set ss ". "INNER JOIN method_link_species_set ". "mlss ON mlss.species_set_id = ss.species_set_id ". "WHERE mlss.method_link_species_set_id = ?"; my $sth1 = $gab_a->dbc->prepare("$sql1"); $sth1->execute("$mlss_id"); my $count = $sth1->fetchall_arrayref()->[0]->[0]; # only use alignments with a reasonable number of species my $sql2 = "SELECT gab.genomic_align_block_id FROM genomic_align_block gab INNER JOIN genomic_align ga ON ga.genomic_align_block_id = gab.genomic_align_block_id INNER JOIN dnafrag df ON df.dnafrag_id = ga.dnafrag_id WHERE ga.method_link_species_set_id = ? GROUP BY gab.genomic_align_block_id HAVING COUNT(distinct(df.genome_db_id)) = ?"; my $sth2 = $gab_a->dbc->prepare("$sql2"); $sth2->execute($mlss_id, $count); while(my $genomic_align_block_id = $sth2->fetchrow_array){ my $genomic_align_block = $gab_a->fetch_by_dbID($genomic_align_block_id); # if the alignments consist of ancestral sequences - skip these next if $genomic_align_block->genomic_align_array->[0]->dnafrag->genome_db->name eq "ancestral_sequences"; push @genomic_align_block_ids, { 'block_id' => $genomic_align_block->dbID, 'tree_mlss_id' => $mlss_id}; } $self->param('gab_ids', \@genomic_align_block_ids); } sub write_output { my $self = shift @_; $self->dataflow_output_id($self->param('gab_ids'), 2); } 1;
dbolser-ebi/ensembl-compara
modules/Bio/EnsEMBL/Compara/RunnableDB/MakeNTSpeciesTree/PhylofitFactory.pm
Perl
apache-2.0
2,869
# # Copyright 2015 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package hardware::ups::mge::snmp::mode::inputlines; use base qw(centreon::plugins::mode); use strict; use warnings; use centreon::plugins::values; my %map_input_status = ( 1 => 'no', 2 => 'outoftolvolt', 3 => 'outoftolfreq', 4 => 'utilityoff', ); my $maps_counters = { voltage => { class => 'centreon::plugins::values', obj => undef, set => { key_values => [ { name => 'voltage', no_value => 0, }, ], output_template => 'Voltage: %.2f V', output_error_template => 'Voltage: %s', perfdatas => [ { value => 'voltage_absolute', label => 'voltage', template => '%.2f', unit => 'V', min => 0, label_extra_instance => 1 }, ], } }, current => { class => 'centreon::plugins::values', obj => undef, set => { key_values => [ { name => 'current', no_value => 0 }, ], output_template => 'Current: %.2f A', output_error_template => 'Current: %s', perfdatas => [ { value => 'current_absolute', label => 'current', template => '%.2f', unit => 'A', min => 0, label_extra_instance => 1 }, ], } }, frequence => { class => 'centreon::plugins::values', obj => undef, set => { key_values => [ { name => 'frequence', no_value => 0, }, ], output_template => 'Frequence: %.2f Hz', output_error_template => 'Frequence: %s', perfdatas => [ { value => 'frequence_absolute', label => 'frequence', template => '%.2f', unit => 'Hz', min => 0 }, ], } }, }; my $oid_upsmgInputPhaseNumEntry = '.1.3.6.1.4.1.705.1.6.1'; my $oid_mginputVoltageEntry = '.1.3.6.1.4.1.705.1.6.2.1.2'; # in dV my $oid_mginputFrequencyEntry = '.1.3.6.1.4.1.705.1.6.2.1.3'; # in dHz my $oid_mginputCurrentEntry = '.1.3.6.1.4.1.705.1.6.2.1.6'; # in dA my $oid_upsmgInputBadStatusEntry = '.1.3.6.1.4.1.705.1.6.3'; my $oid_upsmgInputLineFailCauseEntry = '.1.3.6.1.4.1.705.1.6.4'; sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options); bless $self, $class; $self->{version} = '1.0'; $options{options}->add_options(arguments => { }); $self->{instance_selected} = {}; foreach (keys %{$maps_counters}) { $options{options}->add_options(arguments => { 'warning-' . $_ . ':s' => { name => 'warning-' . $_ }, 'critical-' . $_ . ':s' => { name => 'critical-' . $_ }, }); my $class = $maps_counters->{$_}->{class}; $maps_counters->{$_}->{obj} = $class->new(output => $self->{output}, perfdata => $self->{perfdata}, label => $_); $maps_counters->{$_}->{obj}->set(%{$maps_counters->{$_}->{set}}); } return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::init(%options); foreach (keys %{$maps_counters}) { $maps_counters->{$_}->{obj}->init(option_results => $self->{option_results}); } } sub manage_counters { my ($self, %options) = @_; my ($short_msg, $short_msg_append, $long_msg, $long_msg_append) = ('', '', '', ''); my @exits; foreach (sort keys %{$options{maps_counters}}) { $options{maps_counters}->{$_}->{obj}->set(instance => $options{instance}); my ($value_check) = $options{maps_counters}->{$_}->{obj}->execute(values => $self->{instance_selected}->{$options{instance}}); # We don't want to display no value next if ($value_check == -10); if ($value_check != 0) { $long_msg .= $long_msg_append . $options{maps_counters}->{$_}->{obj}->output_error(); $long_msg_append = ', '; next; } my $exit2 = $options{maps_counters}->{$_}->{obj}->threshold_check(); push @exits, $exit2; my $output = $options{maps_counters}->{$_}->{obj}->output(); $long_msg .= $long_msg_append . $output; $long_msg_append = ', '; if (!$self->{output}->is_status(litteral => 1, value => $exit2, compare => 'ok')) { $short_msg .= $short_msg_append . $output; $short_msg_append = ', '; } $options{maps_counters}->{$_}->{obj}->perfdata(extra_instance => $self->{multiple}); } $self->{output}->output_add(long_msg => $options{label} . " " . $long_msg); my $exit = $self->{output}->get_most_critical(status => [ @exits ]); if (!$self->{output}->is_status(litteral => 1, value => $exit, compare => 'ok')) { $self->{output}->output_add(severity => $exit, short_msg => $options{label} . " " . $short_msg ); } if ($self->{multiple} == 0) { $self->{output}->output_add(short_msg => $options{label} . " " . $long_msg); } } sub run { my ($self, %options) = @_; # $options{snmp} = snmp object $self->{snmp} = $options{snmp}; $self->manage_selection(); $self->{output}->output_add(severity => 'OK', short_msg => 'Input Line(s) status is ok'); if (defined($self->{results}->{$oid_upsmgInputBadStatusEntry}->{$oid_upsmgInputBadStatusEntry . '.0'}) && $self->{results}->{$oid_upsmgInputBadStatusEntry}->{$oid_upsmgInputBadStatusEntry . '.0'} == 1) { $self->{output}->output_add(severity => 'CRITICAL', short_msg => sprintf("Input Line(s) status is '%s'", $map_input_status{$self->{results}->{$oid_upsmgInputLineFailCauseEntry}->{$oid_upsmgInputLineFailCauseEntry . '.0'}})); } $self->{multiple} = 1; if (scalar(keys %{$self->{instance_selected}}) == 1) { $self->{multiple} = 0; } if ($self->{multiple} == 1) { $self->{output}->output_add(severity => 'OK', short_msg => 'Input Lines are ok'); } foreach my $id (sort keys %{$self->{instance_selected}}) { $self->manage_counters(instance => $id, maps_counters => $maps_counters, label => "Input Line '" . $id . "'"); } $self->{output}->display(); $self->{output}->exit(); } sub add_result { my ($self, %options) = @_; $self->{instance_selected}->{$options{instance}} = {} if (!defined($self->{instance_selected}->{$options{instance}})); $self->{instance_selected}->{$options{instance}}->{$options{name}} = $self->{results}->{$options{oid}}->{$options{oid} . '.' . $options{instance2}} * 0.1; } sub manage_selection { my ($self, %options) = @_; $self->{results} = $self->{snmp}->get_multiple_table(oids => [ { oid => $oid_upsmgInputPhaseNumEntry }, { oid => $oid_mginputVoltageEntry }, { oid => $oid_mginputFrequencyEntry }, { oid => $oid_mginputCurrentEntry }, { oid => $oid_upsmgInputBadStatusEntry }, { oid => $oid_upsmgInputLineFailCauseEntry }, ], , nothing_quit => 1); if (!defined($self->{results}->{$oid_upsmgInputPhaseNumEntry}->{$oid_upsmgInputPhaseNumEntry . '.0'}) || $self->{results}->{$oid_upsmgInputPhaseNumEntry}->{$oid_upsmgInputPhaseNumEntry . '.0'} == 0) { $self->{output}->add_option_msg(short_msg => "No input lines found."); $self->{output}->option_exit(); } my %instances = (); # can be 'xxx.1' or 'xxx.1.0' (cannot respect MIB :) foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_mginputVoltageEntry}})) { $oid =~ /^$oid_mginputVoltageEntry\.((\d+).*)/; if (scalar(keys %instances) < $self->{results}->{$oid_upsmgInputPhaseNumEntry}->{$oid_upsmgInputPhaseNumEntry . '.0'}) { $instances{$2} = 1; $self->add_result(instance => $2, instance2 => $1, name => 'voltage', oid => $oid_mginputVoltageEntry); } } %instances = (); foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_mginputCurrentEntry}})) { $oid =~ /^$oid_mginputCurrentEntry\.((\d+).*)/; if (scalar(keys %instances) < $self->{results}->{$oid_upsmgInputPhaseNumEntry}->{$oid_upsmgInputPhaseNumEntry . '.0'}) { $instances{$2} = 1; $self->add_result(instance => $2, instance2 => $1, name => 'current', oid => $oid_mginputCurrentEntry); } } %instances = (); foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_mginputFrequencyEntry}})) { $oid =~ /^$oid_mginputFrequencyEntry\.((\d+).*)/; if (scalar(keys %instances) < $self->{results}->{$oid_upsmgInputPhaseNumEntry}->{$oid_upsmgInputPhaseNumEntry . '.0'}) { $instances{$2} = 1; $self->add_result(instance => $2, instance2 => $1, name => 'frequence', oid => $oid_mginputFrequencyEntry); } } if (scalar(keys %{$self->{instance_selected}}) <= 0) { $self->{output}->add_option_msg(short_msg => "No input lines found."); $self->{output}->option_exit(); } } 1; __END__ =head1 MODE Check Input lines metrics (frequence, voltage, current). =over 8 =item B<--warning-*> Threshold warning. Can be: 'frequence', 'voltage', 'current'. =item B<--critical-*> Threshold critical. Can be: 'frequence', 'voltage', 'current'. =back =cut
s-duret/centreon-plugins
hardware/ups/mge/snmp/mode/inputlines.pm
Perl
apache-2.0
11,358
=head1 LICENSE Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute Copyright [2016-2022] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =head1 CONTACT Please email comments or questions to the public Ensembl developers list at <http://lists.ensembl.org/mailman/listinfo/dev>. Questions may also be sent to the Ensembl help desk at <http://www.ensembl.org/Help/Contact>. =cut package HiveRNASeq_conf; use strict; use warnings; use File::Spec::Functions qw(catfile catdir file_name_is_absolute); use Bio::EnsEMBL::Analysis::Tools::Utilities qw (get_analysis_settings); use parent ('Bio::EnsEMBL::Analysis::Hive::Config::HiveBaseConfig_conf'); use Bio::EnsEMBL::ApiVersion qw/software_version/; sub default_options { my ($self) = @_; return { # inherit other stuff from the base class %{ $self->SUPER::default_options() }, ########################################################################## # # # CHANGE STUFF HERE # # # ########################################################################## 'dbowner' => '', 'release' => '', 'base_dir' => '', # base_output_dir is created with <base_dir>/<species>/<assembly_accession> 'species' => '', # It MUST be the scientific name so the analyses are created correctly 'assembly_name' => '', 'assembly_accession' => '', 'email' => '', # Add your email so you can be notified when a bam file is removed 'pipeline_name' => '', 'dna_db_name' => '', 'pipe_db_server' => '', 'dna_db_server' => '', 'data_db_server' => '', # Server for the blast, refine and rough DBs, or you can set them down below 'pipe_db_port' => '', 'dna_db_port' => '', 'data_db_port' => '', # Port for the blast, refine and rough DBs, or you can set them down below 'user' => '', 'password' => '', 'user_r' => '', # You have the choice between: # * using a csv file you already created # * using a study_accession like PRJEB19386 # * using the taxon_id of your species # 'rnaseq_summary_file' should always be set. If 'taxon_id' or 'study_accession' are not undef # they will be used to retrieve the information from ENA and to create the csv file. In this case, # 'file_columns' and 'summary_file_delimiter' should not be changed unless you know what you are doing 'taxon_id' => '', 'study_accession' => '', 'rnaseq_summary_file' => '', 'uniprotdb' => '/hps/nobackup2/production/ensembl/genebuild/blastdb/uniprot/uniprot_2018_04/PE12_vertebrata', # Blast database for comparing the final models to. 'uniprotindex' => '/hps/nobackup2/production/ensembl/genebuild/blastdb/uniprot/uniprot_2018_04/PE12_vertebrata_index/', # Indicate Index for the blast database. 'max_reads_per_split' => 2500000, # This sets the number of reads to split the fastq files on 'max_total_reads' => 200000000, # This is the total number of reads to allow from a single, unsplit file 'summary_file_delimiter' => '\t', # Use this option to change the delimiter for your summary data file 'summary_csv_table' => 'csv_data', 'read_length_table' => 'read_length', 'rnaseq_data_provider' => 'ENA', #It will be set during the pipeline or it will use this value 'genome_file' => 'genome/genome.fa', # Leave this as genome/genome.fa to automatically dump the genome 'base_output_dir' => catdir($self->o('species'), $self->o('assembly_accession'), 'rnaseq'), 'input_dir' => catdir($self->o('base_output_dir'),'input'), 'output_dir' => catdir($self->o('base_output_dir'),'output'), 'merge_dir' => catdir($self->o('base_output_dir'),'merge'), 'sam_dir' => catdir($self->o('base_output_dir'),'sams'), 'pipe_db_name' => $self->o('dbowner').'_'.$self->o('species').'_hive_'.$self->o('release'), 'blast_db_name' => $self->o('dbowner').'_'.$self->o('species').'_blast_'.$self->o('release'), 'refine_db_name' => $self->o('dbowner').'_'.$self->o('species').'_refine_'.$self->o('release'), 'rough_db_name' => $self->o('dbowner').'_'.$self->o('species').'_rough_'.$self->o('release'), 'blast_db_server' => $self->o('data_db_server'), 'refine_db_server' => $self->o('data_db_server'), 'rough_db_server' => $self->o('data_db_server'), 'rnaseq_ftp_base' => 'ftp://ftp.sra.ebi.ac.uk/vol1/fastq/', 'use_ucsc_naming' => 0, 'sequence_dump_script' => catfile($self->o('enscode_root_dir'),'ensembl-analysis', 'scripts', 'sequence_dump.pl'), 'create_type' => 'clone', 'samtools' => catfile($self->o('binary_base'), 'samtools'), #You may need to specify the full path to the samtools binary 'picard_lib_jar' => catfile($self->o('software_base_path'), 'Cellar', 'picard-tools', '2.6.0', 'libexec', 'picard.jar'), #You need to specify the full path to the picard library 'short_read_aligner' => catfile($self->o('software_base_path'), 'opt', 'bwa-051mt', 'bin', 'bwa'), #You may need to specify the full path to the bwa binary 'refine_ccode_exe' => catfile($self->o('binary_base'), 'RefineSolexaGenes'), #You may need to specify the full path to the RefineSolexaGenes binary blastp => catfile($self->o('binary_base'), 'blastp'), #You may need to specify the full path to the blastp binary # blast used, it can be either ncbi or wu, it is overriding the -type value from BLAST_PARAMS blast_type => 'ncbi', splicing_aligner => catfile($self->o('software_base_path'), 'opt', 'exonerate09', 'bin', 'exonerate'), #You may need to specify the full path to the exonerate binary version 0.9.0 # If your reads are unpaired you may want to run on slices to avoid # making overlong rough models. If you want to do this, specify a # slice length here otherwise it will default to whole chromosomes. slice_length => 10000000, # Regular expression to allow FastQ files to be correctly paired, # for example: file_1.fastq and file_2.fastq could be paired using # the expression "\S+_(\d)\.\S+". Need to identify the read number # in brackets; the name the read number (1, 2) and the # extension. pairing_regex => '\S+_(\d)\.\S+', paired => 1, # Do you want to make models for the each individual sample as well # as for the pooled samples (1/0)? single_tissue => 1, # What Read group tag would you like to group your samples # by? Default = ID read_group_tag => 'SM', read_id_tag => 'ID', use_threads => 3, read_min_paired => 50, read_min_mapped => 50, other_isoforms => 'other', # If you don't want isoforms, set this to undef normal_queue => 'production-rh7', # If LSF/other system has submission queues with multiple run time allowed, you might want to change this long_queue => 'production-rh7', # If LSF/other system has submission queues with multiple run time allowed, you might want to change this # Please assign some or all columns from the summary file to the # some or all of the following categories. Multiple values can be # separted with commas. ID, SM, DS, CN, is_paired, filename, read_length, is_13plus, # is_mate_1 are required. If pairing_regex can work for you, set is_mate_1 to -1. # You can use any other tag specified in the SAM specification: # http://samtools.github.io/hts-specs/SAMv1.pdf #################################################################### # This is just an example based on the file snippet shown below. It # will vary depending on how your data looks. #################################################################### file_columns => ['SM', 'ID', 'is_paired', 'filename', 'is_mate_1', 'read_length', 'is_13plus', 'CN', 'PL', 'DS'], ########################################################################## # # # MOSTLY STAYS CONSTANT, MOSTLY # # # ########################################################################## maxintron => 200000, 'port' => '4533', blast_db_port => $self->o('data_db_port'), blast_db_user => $self->o('user'), blast_db_password => $self->o('password'), blast_db_driver => $self->o('hive_driver'), refine_db_port => $self->o('data_db_port'), refine_db_user => $self->o('user'), refine_db_password => $self->o('password'), refine_db_driver => $self->o('hive_driver'), rough_db_port => $self->o('data_db_port'), rough_db_user => $self->o('user'), rough_db_password => $self->o('password'), rough_db_driver => $self->o('hive_driver'), databases_to_delete => ['blast_db', 'refine_db', 'rough_db'], 'blast_db' => { -dbname => $self->o('blast_db_name'), -host => $self->o('blast_db_server'), -port => $self->o('blast_db_port'), -user => $self->o('blast_db_user'), -pass => $self->o('blast_db_password'), -driver => $self->o('blast_db_driver'), }, 'refine_db' => { -dbname => $self->o('refine_db_name'), -host => $self->o('refine_db_server'), -port => $self->o('refine_db_port'), -user => $self->o('refine_db_user'), -pass => $self->o('refine_db_password'), -driver => $self->o('refine_db_driver'), }, 'rough_db' => { -dbname => $self->o('rough_db_name'), -host => $self->o('rough_db_server'), -port => $self->o('rough_db_port'), -user => $self->o('rough_db_user'), -pass => $self->o('rough_db_password'), -driver => $self->o('rough_db_driver'), }, }; } sub pipeline_wide_parameters { my ($self) = @_; my $output_sam_dir = $self->o('sam_dir') ? $self->o('sam_dir') : catdir($self->o('output_dir'), 'SAM'); my $merge_dir = $self->o('merge_dir') ? $self->o('merge_dir') : catdir($self->o('output_dir'), 'merge_out'); my $genome_file = file_name_is_absolute($self->o('genome_file')) ? $self->o('genome_file') : catdir($self->o('input_dir'), $self->o('genome_file')); my $rnaseq_summary_file = $self->o('rnaseq_summary_file') ? $self->o('rnaseq_summary_file') : catfile($self->o('input_dir'), $self->o('pipeline_name').'.csv'); return { %{ $self->SUPER::pipeline_wide_parameters() }, # inherit other stuff from the base class wide_rnaseq_summary_file => $rnaseq_summary_file, wide_genome_file => $genome_file, wide_input_dir => $self->o('input_dir'), wide_output_dir => $self->o('output_dir'), wide_merge_dir => $merge_dir, wide_short_read_aligner => $self->o('short_read_aligner'), wide_samtools => $self->o('samtools'), wide_output_sam_dir => $output_sam_dir, wide_species => $self->o('species'), wide_use_ucsc_naming => $self->o('use_ucsc_naming'), wide_intron_bam_file => catfile($self->o('output_dir'), 'introns'), }; } =head2 pipeline_create_commands Arg [1] : None Description: Add a table named with 'summary_csv_table' to store information about the reads The columns are defined by 'file_columns' Returntype : Arrayref String, commands to create/delete databases and/or tables Exceptions : None =cut sub pipeline_create_commands { my ($self) = @_; my $tables; my %small_columns = ( paired => 1, read_length => 1, is_13plus => 1, is_mate_1 => 1, ); # We need to store the values of the csv file to easily process it. It will be used at different stages foreach my $key (@{$self->default_options->{'file_columns'}}) { if (exists $small_columns{$key}) { $tables .= $key.' SMALLINT UNSIGNED NOT NULL,' } elsif ($key eq 'DS') { $tables .= $key.' VARCHAR(255) NOT NULL,' } else { $tables .= $key.' VARCHAR(50) NOT NULL,' } } $tables .= ' KEY(SM), KEY(ID)'; return [ # inheriting database and hive tables' creation @{$self->SUPER::pipeline_create_commands}, $self->db_cmd('CREATE TABLE '.$self->o('summary_csv_table')." ($tables)"), $self->db_cmd('CREATE TABLE '.$self->o('read_length_table').' ('. 'fastq varchar(50) NOT NULL,'. 'read_length int(50) NOT NULL,'. 'PRIMARY KEY (fastq))'), ]; } =head2 create_header_line Arg [1] : Arrayref String, it will contains the values of 'file_columns' Example : create_header_line($self->o('file_columns'); Description: It will create a RG line using only the keys present in your csv file Returntype : String representing the RG line in a BAM file Exceptions : None =cut sub create_header_line { my ($items) = shift; my @read_tags = qw(ID SM DS CN DT FO KS LB PG PI PL PM PU); my $read_line = '@RG'; foreach my $rt (@read_tags) { $read_line .= "\t$rt:#$rt#" if (grep($rt eq $_, @$items)); } return $read_line."\n"; } ## See diagram for pipeline structure sub pipeline_analyses { my ($self) = @_; my $header_line = create_header_line($self->default_options->{'file_columns'}); my @analysis = ( { -logic_name => 'checking_file_path', -module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd', -rc_name => '1GB', -parameters => { cmd => 'EXIT_CODE=0; for F in #wide_short_read_aligner# #wide_samtools# '.join (' ', $self->o('splicing_aligner'), $self->o('sequence_dump_script'), $self->o('blastp')).'; do which "$F"; if [ "$?" == 1 ]; then EXIT_CODE=1;fi; done; for D in #wide_output_dir# #wide_input_dir# #wide_merge_dir# #wide_output_sam_dir# `dirname #wide_genome_file#`; do mkdir -p "$D"; done; exit $EXIT_CODE', }, -input_ids => [{ alignment_bam_file => catfile('#wide_merge_dir#', '#assembly_name#.#rnaseq_data_provider#.merged.1.bam'), assembly_name => $self->o('assembly_name'), inputfile => '#wide_rnaseq_summary_file#', }], -flow_into => { 1 => ['downloading_csv'], }, }, { -logic_name => 'downloading_csv', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveDownloadCsvENA', -rc_name => '1GB', -parameters => { study_accession => $self->o('study_accession'), taxon_id => $self->o('taxon_id'), }, -flow_into => { '1->A' => ['create_rnaseq_genome_file', 'create_fastq_download_jobs'], 'A->1' => ['create_rough_db'], }, }, { -logic_name => 'create_rnaseq_genome_file', -module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd', -rc_name => '1GB', -parameters => { cmd => 'if [ ! -s "#wide_genome_file#" ]; then perl '.$self->o('sequence_dump_script').' -dbhost '.$self->o('dna_db_server').' -dbuser '.$self->o('dna_db_user').' -dbport '.$self->o('dna_db_port').' -dbname '.$self->o('dna_db_name').' -coord_system_name '.$self->o('assembly_name').' -toplevel -onefile -header rnaseq -filename #wide_genome_file#;fi', }, -flow_into => { 1 => [ 'index_genome_file'], }, }, { -logic_name => 'index_genome_file', -module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd', -rc_name => '5GB', -parameters => { cmd => 'if [ ! -e "#wide_genome_file#.ann" ]; then #wide_short_read_aligner# index -a bwtsw #wide_genome_file#;fi', }, }, { -logic_name => 'create_fastq_download_jobs', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveCreateFastqDownloadJobs', -parameters => { inputfile => '#wide_rnaseq_summary_file#', }, -flow_into => { 2 => {'download_RNASeq_fastqs' => {'iid' => '#iid#'}}, }, }, { -logic_name => 'download_RNASeq_fastqs', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveDownloadRNASeqFastqs', -parameters =>{ ftp_base_url => $self->o('rnaseq_ftp_base'), input_dir => $self->o('input_dir'), }, -flow_into => { 1 => ['get_read_lengths'], }, }, { -logic_name => 'get_read_lengths', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveCalculateReadLength', -parameters =>{ input_dir => $self->o('input_dir'), read_length_table => $self->o('read_length_table'), }, -flow_into => { 1 => ['split_fastq_files'], }, }, { -logic_name => 'split_fastq_files', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::SplitFastQFiles', -parameters => { 'max_reads_per_split' => $self->o('max_reads_per_split'), 'max_total_reads' => $self->o('max_total_reads'), 'rnaseq_summary_file' => '#wide_rnaseq_summary_file#', 'fastq_dir' => $self->o('input_dir'), }, }, { -logic_name => 'create_rough_db', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveCreateDatabase', -parameters => { source_db => $self->o('dna_db'), target_db => $self->o('rough_db'), create_type => $self->o('create_type'), }, -rc_name => '1GB', -flow_into => { 1 => ['backup_original_csv'], }, }, { -logic_name => 'backup_original_csv', -module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd', -parameters => { cmd => 'cp #wide_rnaseq_summary_file# #wide_rnaseq_summary_file#_orig_bak', }, -flow_into => { '1' => ['create_updated_csv'], }, }, { -logic_name => 'create_updated_csv', -module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd', -parameters => { cmd => 'cat '.$self->o('input_dir').'/*_new.csv > #wide_rnaseq_summary_file#', }, -flow_into => { '1' => ['parse_summary_file'], }, }, { -logic_name => 'parse_summary_file', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveParseCsvIntoTable', -rc_name => '1GB', -parameters => { column_names => $self->o('file_columns'), sample_column => $self->o('read_group_tag'), inputfile => '#wide_rnaseq_summary_file#', delimiter => $self->o('summary_file_delimiter'), csvfile_table => $self->o('summary_csv_table'), pairing_regex => $self->o('pairing_regex'), read_length_table => $self->o('read_length_table'), }, -flow_into => { '2->A' => [ 'create_tissue_jobs'], 'A->1' => [ 'merged_bam_file' ], }, }, { -logic_name => 'create_tissue_jobs', -module => 'Bio::EnsEMBL::Hive::RunnableDB::JobFactory', -parameters => { inputquery => join(' ', 'SELECT', $self->o('read_group_tag'), ',', $self->o('read_id_tag'), ', is_paired, CN', 'FROM', $self->o('summary_csv_table'), 'WHERE', $self->o('read_group_tag'), '= "#sample_name#"'), column_names => [$self->o('read_group_tag'), $self->o('read_id_tag'), 'is_paired', 'rnaseq_data_provider'], }, -rc_name => '1GB', -flow_into => { '2->A' => ['create_bwa_jobs'], 'A->1' => ['merged_tissue_file'], }, }, { -logic_name => 'create_bwa_jobs', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveCreateBWAJobs', -parameters => { sample_column => $self->o('read_group_tag'), sample_id_column => $self->o('read_id_tag'), csvfile_table => $self->o('summary_csv_table'), column_names => $self->o('file_columns'), use_threading => $self->o('use_threads'), }, -rc_name => '1GB', -flow_into => { '2->A' => ['bwa', 'create_header_files'], 'A->1' => ['bwa2bam'], }, }, { -logic_name => 'create_header_files', -module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd', -rc_name => '1GB', -parameters => { cmd => 'if [ ! -e "#wide_output_dir#/#'.$self->o('read_id_tag').'#_header.h" ]; then printf "'.$header_line.'" > #wide_output_dir#/#'.$self->o('read_id_tag').'#_header.h; fi', }, }, { -logic_name => 'bwa', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBWA', -parameters => { disconnect_jobs => 1, }, -flow_into => { 1 => [ ':////accu?fastq=[]' ], -1 => [ 'bwa_20GB' ], -2 => [ 'bwa_20GB' ], }, -rc_name => '10GB_multithread', }, { -logic_name => 'bwa_20GB', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBWA', -can_be_empty => 1, -parameters => { disconnect_jobs => 1, }, -flow_into => { 1 => [ ':////accu?fastq=[]' ], }, -rc_name => '20GB_multithread', }, { -logic_name => 'bwa2bam', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBWA2BAM', -parameters => { sampe_options => '-A -a '.$self->o('maxintron'), samse_options => '', min_paired => $self->o('read_min_paired'), min_mapped => $self->o('read_min_mapped'), header_file => '#wide_output_dir#/#'.$self->o('read_id_tag').'#_header.h', bam_prefix => $self->o('read_id_tag'), email => $self->o('email_address'), disconnect_jobs => 1, }, -flow_into => { 1 => [ ':////accu?filename=[]' ], -1 => {'bwa2bam_20GB' => { fastq => '#fastq#', is_paired => '#is_paired#', $self->o('read_id_tag') => '#'.$self->o('read_id_tag').'#'}}, -2 => {'bwa2bam_20GB' => { fastq => '#fastq#', is_paired => '#is_paired#', $self->o('read_id_tag') => '#'.$self->o('read_id_tag').'#'}}, }, -rc_name => '10GB', }, { -logic_name => 'bwa2bam_20GB', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBWA2BAM', -can_be_empty => 1, -parameters => { sampe_options => '-A -a '.$self->o('maxintron'), samse_options => '', min_paired => $self->o('read_min_paired'), min_mapped => $self->o('read_min_mapped'), header_file => '#wide_output_dir#/#'.$self->o('read_id_tag').'#_header.h', bam_prefix => $self->o('read_id_tag'), email => $self->o('email_address'), disconnect_jobs => 1, }, -flow_into => { 1 => [ ':////accu?filename=[]' ], }, -rc_name => '20GB', }, { -logic_name => 'merged_tissue_file', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveMergeBamFiles', -parameters => { java => 'java', java_options => '-Xmx2g', # If 0, do not use multithreading, faster but can use more memory. # If > 0, tells how many cpu to use for samtools or just to use multiple cpus for picard use_threading => $self->o('use_threads'), # Path to MergeSamFiles.jar picard_lib => $self->o('picard_lib_jar'), # Use this default options for Picard: 'MAX_RECORDS_IN_RAM=20000000 CREATE_INDEX=true SORT_ORDER=coordinate ASSUME_SORTED=true VALIDATION_STRINGENCY=LENIENT' # You will need to change the options if you want to use samtools for merging options => 'MAX_RECORDS_IN_RAM=20000000 CREATE_INDEX=true SORT_ORDER=coordinate ASSUME_SORTED=true VALIDATION_STRINGENCY=LENIENT', # target_db is the database where we will write the files in the data_file table # You can use store_datafile => 0, if you don't want to store the output file target_db => $self->o('rough_db'), assembly_name => $self->o('assembly_name'), rnaseq_data_provider => $self->o('rnaseq_data_provider'), disconnect_jobs => 1, }, -rc_name => '3GB_multithread', -flow_into => { 1 => ['create_analyses_type_job', '?accu_name=filename&accu_address=[]&accu_input_variable=alignment_bam_file' ], }, }, { -logic_name => 'create_analyses_type_job', -module => 'Bio::EnsEMBL::Hive::RunnableDB::JobFactory', -rc_name => '1GB', -parameters => { inputlist => ['gene', 'daf', 'ise'], column_names => ['type'], species => $self->o('species'), }, -flow_into => { 2 => {'create_analyses' => {analyses => [{'-logic_name' => '#species#_#sample_name#_rnaseq_#type#'}]}}, }, }, { -logic_name => 'create_analyses', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveAddAnalyses', -rc_name => '1GB', -parameters => { source_type => 'list', target_db => $self->o('rough_db'), }, }, { -logic_name => 'merged_bam_file', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveMergeBamFiles', -parameters => { java => 'java', java_options => '-Xmx2g', # If 0, do not use multithreading, faster but can use more memory. # If > 0, tells how many cpu to use for samtools or just to use multiple cpus for picard use_threading => $self->o('use_threads'), # Path to MergeSamFiles.jar picard_lib => $self->o('picard_lib_jar'), # Use this default options for Picard: 'MAX_RECORDS_IN_RAM=20000000 CREATE_INDEX=true SORT_ORDER=coordinate ASSUME_SORTED=true VALIDATION_STRINGENCY=LENIENT' # You will need to change the options if you want to use samtools for merging options => 'MAX_RECORDS_IN_RAM=20000000 CREATE_INDEX=true SORT_ORDER=coordinate ASSUME_SORTED=true VALIDATION_STRINGENCY=LENIENT', # target_db is the database where we will write the files in the data_file table # You can use store_datafile => 0, if you don't want to store the output file target_db => $self->o('rough_db'), assembly_name => $self->o('assembly_name'), rnaseq_data_provider => $self->o('rnaseq_data_provider'), disconnect_jobs => 1, }, -rc_name => '5GB_merged_multithread', -flow_into => { 2 => ['create_header_intron'], }, }, { -logic_name => 'create_header_intron', -module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd', -rc_name => '1GB', -parameters => { cmd => '#wide_samtools# view -H #filename# | grep -v @SQ | grep -v @HD > #wide_output_dir#/merged_header.h', }, -flow_into => { '1->A' => [ 'create_toplevel_input_ids'], 'A->1' => ['sam2bam'], }, }, { -logic_name => 'create_toplevel_input_ids', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveSubmitAnalysis', -rc_name => '1GB', -parameters => { iid_type => 'slice', coord_system_name => 'toplevel', slice => 1, batch_slice_ids => 1, batch_target_size => 1000000, include_non_reference => 0, top_level => 1, slice_size => 5000000, slice_overlaps => 2500000, target_db => $self->o('rough_db'), }, -flow_into => { 2 => {'split_on_low_coverage' => {'iid' => '#iid#', alignment_bam_file => '#filename#'}}, }, }, { -logic_name => 'split_on_low_coverage', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::CalculateLowCoverageSlices', -parameters => { 'disconnect_jobs' => 1, 'dna_db' => $self->o('dna_db'), }, -rc_name => '10GB', -flow_into => { 2 => ['rough_transcripts'], -1 => ['split_on_low_coverage_20GB'], }, }, { -logic_name => 'split_on_low_coverage_20GB', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::CalculateLowCoverageSlices', -parameters => { 'disconnect_jobs' => 1, 'dna_db' => $self->o('dna_db'), }, -rc_name => '20GB', -flow_into => { 2 => ['rough_transcripts'], }, }, { -logic_name => 'rough_transcripts', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBam2Genes', -parameters => { logic_name => 'rough_transcripts', output_db => $self->o('rough_db'), dna_db => $self->o('dna_db'), min_length => 300, min_exons => 1, max_intron_length => $self->o('maxintron'), min_single_exon_length => 1000, min_span => 1.5, paired => $self->o('paired'), pairing_regex => $self->o('pairing_regex'), }, -rc_name => '2GB_rough', -flow_into => { 1 => ['create_bam2introns_input_ids'], -1 => {'rough_transcripts_5GB' => {'iid' => '#iid#', alignment_bam_file => '#alignment_bam_file#'}}, -2 => {'rough_transcripts_5GB' => {'iid' => '#iid#', alignment_bam_file => '#alignment_bam_file#'}}, }, }, { -logic_name => 'rough_transcripts_5GB', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBam2Genes', -can_be_empty => 1, -parameters => { logic_name => 'rough_transcripts', output_db => $self->o('rough_db'), dna_db => $self->o('dna_db'), min_length => 300, min_exons => 1, max_intron_length => $self->o('maxintron'), min_single_exon_length => 1000, min_span => 1.5, paired => $self->o('paired'), pairing_regex => $self->o('pairing_regex'), }, -rc_name => '5GB_rough', -flow_into => { 1 => ['create_bam2introns_input_ids'], -1 => {'rough_transcripts_15GB' => {'iid' => '#iid#', alignment_bam_file => '#alignment_bam_file#'}}, -2 => {'rough_transcripts_15GB' => {'iid' => '#iid#', alignment_bam_file => '#alignment_bam_file#'}}, }, }, { -logic_name => 'rough_transcripts_15GB', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBam2Genes', -can_be_empty => 1, -parameters => { logic_name => 'rough_transcripts', output_db => $self->o('rough_db'), dna_db => $self->o('dna_db'), min_length => 300, min_exons => 1, max_intron_length => $self->o('maxintron'), min_single_exon_length => 1000, min_span => 1.5, paired => $self->o('paired'), pairing_regex => $self->o('pairing_regex'), }, -rc_name => '15GB_rough', -flow_into => { 1 => ['create_bam2introns_input_ids'], }, }, { -logic_name => 'create_bam2introns_input_ids', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveSubmitAnalysis', -parameters => { iid_type => 'slice_to_feature_ids', target_db => $self->o('rough_db'), feature_type => 'gene', logic_name => ['rough_transcripts'], use_stable_ids => 1, create_stable_ids => 1, stable_id_prefix => 'RNASEQ', }, -rc_name => '1GB_rough', -batch_size => 100, -flow_into => { 2 => {'bam2introns' => {iid => '#iid#', bam_file => '#alignment_bam_file#'}}, }, }, { -logic_name => 'bam2introns', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBam2Introns', -parameters => { program_file => $self->o('splicing_aligner'), input_db => $self->o('rough_db'), dna_db => $self->o('dna_db'), missmatch => 6, word_length => 10, saturate_threshold => 10000, mask => 1, percent_id => 97, coverage => 90, fullseq => 1, max_transcript => 1000000, batch_size => 10000, maxintron => $self->o('maxintron'), }, -rc_name => '2GB_introns', -analysis_capacity => 500, -batch_size => 100, -flow_into => { 1 => [':////accu?filename=[]'], 2 => {'bam2introns' => {iid => '#iid#', bam_file => '#bam_file#'}}, -1 => {'bam2introns_20GB' => {iid => '#iid#', bam_file => '#bam_file#'}}, -2 => {'bam2introns_20GB' => {iid => '#iid#', bam_file => '#bam_file#'}}, }, }, { -logic_name => 'bam2introns_20GB', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBam2Introns', -can_be_empty => 1, -parameters => { program_file => $self->o('splicing_aligner'), input_db => $self->o('rough_db'), dna_db => $self->o('dna_db'), missmatch => 6, word_length => 10, saturate_threshold => 10000, mask => 1, percent_id => 97, coverage => 90, fullseq => 1, max_transcript => 1000000, batch_size => 10000, maxintron => $self->o('maxintron'), }, -rc_name => '10GB_introns', -analysis_capacity => 500, -flow_into => { 1 => [':////accu?filename=[]'], 2 => {'bam2introns' => {iid => '#iid#', bam_file => '#bam_file#'}}, -1 => {'bam2introns_50GB' => {iid => '#iid#', bam_file => '#bam_file#'}}, -2 => {'bam2introns_50GB' => {iid => '#iid#', bam_file => '#bam_file#'}}, }, }, { -logic_name => 'bam2introns_50GB', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBam2Introns', -can_be_empty => 1, -parameters => { program_file => $self->o('splicing_aligner'), input_db => $self->o('rough_db'), dna_db => $self->o('dna_db'), missmatch => 6, word_length => 10, saturate_threshold => 10000, mask => 1, percent_id => 97, coverage => 90, fullseq => 1, max_transcript => 1000000, batch_size => 10000, maxintron => $self->o('maxintron'), }, -rc_name => '50GB_introns', -analysis_capacity => 500, -flow_into => { 1 => [':////accu?filename=[]'], 2 => {'bam2introns' => {iid => '#iid#', bam_file => '#bam_file#'}}, }, }, { -logic_name => 'sam2bam', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveSam2Bam', -parameters => { regex => '.sam', headerfile => '#wide_output_dir#/merged_header.h', disconnect_jobs => 1, }, -rc_name => '5GB', -flow_into => ['check_and_delete_broken_duplicated'], }, { -logic_name => 'check_and_delete_broken_duplicated', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveRemoveBrokenAndDuplicatedObjects', -parameters => { target_db => $self->o('rough_db'), check_support => 0, }, -rc_name => '5GB', -flow_into => ['create_refine_db'], }, { -logic_name => 'create_refine_db', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveCreateDatabase', -parameters => { source_db => $self->o('rough_db'), target_db => $self->o('refine_db'), create_type => $self->o('create_type'), extra_data_tables => ['data_file'], }, -rc_name => '1GB', -flow_into => ['create_blast_db'], }, { -logic_name => 'create_blast_db', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveCreateDatabase', -parameters => { source_db => $self->o('refine_db'), target_db => $self->o('blast_db'), create_type => $self->o('create_type'), extra_data_tables => ['data_file'], }, -rc_name => '1GB', -flow_into => ['create_ccode_config'], }, { -logic_name => 'create_ccode_config', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveGenerateRefineConfig', -parameters => { single_tissue => $self->o('single_tissue'), sample_column => $self->o('read_group_tag'), sample_id_column => $self->o('read_id_tag'), csvfile_table => $self->o('summary_csv_table'), input_db => $self->o('rough_db'), dna_db => $self->o('dna_db'), output_db => $self->o('refine_db'), target_db => $self->o('refine_db'), # write the intron features into the OUTPUT_DB along with the models write_introns => 1, # maximum number of times to loop when building all possible paths through the transcript max_recursions => 10000000000000, # analysis logic_name for the dna_align_features to fetch from the INTRON_DB # If left blank all features will be fetched logicname => [], # logic name of the gene models to fetch model_ln => '', # penalty for removing a retined intron retained_intron_penalty => 2, #Remove introns that overlap X introns filter_on_overlap => 0, # minimum size for an intron min_intron_size => 30, max_intron_size => $self->o('maxintron'), # biotype to give to single exon models if left blank single exons are ignored # minimum single exon size (bp) min_single_exon => 1000, # minimum percentage of single exon length that is coding single_exon_cds => 66, # Intron with most support determines the splice sites for an internal exon # lower scoring introns with different splice sites are rejected strict_internal_splice_sites => 1, # In some species alternate splice sites for end exons seem to be common strict_internal_end_exon_splice_sites => 1, # biotypes to give gene models if left blank these models will not get written to the output database # best score - model with most supporting intron features # all other possible models # max number of other models to make - blank = all other_num => '10', # max number of other models to process - blank = all max_num => '1000', other_isoforms => $self->o('other_isoforms'), # biotype to label bad models ( otherwise they are not written ) # do you want to trim UTR trim_utr => 1, # config for trimming UTR max_3prime_exons => 2, max_3prime_length => 5000, max_5prime_exons => 3, max_5prime_length => 1000, # % of average intron score that a UTR intron must have reject_intron_cutoff => 5, }, -rc_name => '1GB', -flow_into => { 2 => ['create_ccode_input_ids'], }, }, { -logic_name => 'create_ccode_input_ids', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveSubmitAnalysis', -rc_name => '1GB', -parameters => { iid_type => 'slice', coord_system_name => 'toplevel', slice => 1, include_non_reference => 0, top_level => 1, feature_constraint => 1, feature_type => 'gene', target_db => $self->o('rough_db'), }, -flow_into => { 2 => {'refine_genes' => {iid => '#iid#', logic_name => '#logic_name#', config_file => '#config_file#'}}, }, }, { -logic_name => 'refine_genes', -module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd', -parameters => { cmd => $self->o('refine_ccode_exe').($self->o('use_ucsc_naming') ? ' -u ' : ' ').($self->o('use_threads') ? ' -t '.$self->o('use_threads').' ' : ' ').'-c #config_file# -i #iid# -l #logic_name# -v 0', return_codes_2_branches => { 42 => 2, }, }, -rc_name => '2GB_refine', -flow_into => { 1 => ['create_gene_id_input_ids'], -1 => {'refine_genes_20GB' => {iid => '#iid#', config_file => '#config_file#', logic_name => '#logic_name#'}}, }, }, { -logic_name => 'refine_genes_20GB', -module => 'Bio::EnsEMBL::Hive::RunnableDB::SystemCmd', -parameters => { cmd => $self->o('refine_ccode_exe').($self->o('use_ucsc_naming') ? ' -u ' : ' ').($self->o('use_threads') ? ' -t '.$self->o('use_threads').' ' : ' ').'-c #config_file# -i #iid# -l #logic_name# -v 0', return_codes_2_branches => { 42 => 2, }, }, -rc_name => '20GB_refine', -flow_into => { 1 => ['create_gene_id_input_ids'], }, }, { -logic_name => 'create_gene_id_input_ids', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveSubmitAnalysis', -rc_name => '1GB', -parameters => { iid_type => 'feature_id', coord_system_name => 'toplevel', target_db => $self->o('refine_db'), feature_logic_names => ['#logic_name#'], feature_type => 'gene', batch_size => 50, }, -flow_into => { 2 => {'blast_rnaseq' => {iid => '#iid#', logic_name => '#logic_name#'}}, }, }, { -logic_name => 'blast_rnaseq', -module => 'Bio::EnsEMBL::Analysis::Hive::RunnableDB::HiveBlastRNASeqPep', -parameters => { input_db => $self->o('refine_db'), output_db => $self->o('blast_db'), dna_db => $self->o('dna_db'), iid_type => 'object_id', # path to index to fetch the sequence of the blast hit to calculate % coverage indicate_index => $self->o('uniprotindex'), uniprot_index => [$self->o('uniprotdb')], blast_program => $self->o('blastp'), %{get_analysis_settings('Bio::EnsEMBL::Analysis::Hive::Config::BlastStatic','BlastGenscanPep', {BLAST_PARAMS => {-type => $self->o('blast_type')}})}, commandline_params => $self->o('blast_type') eq 'wu' ? '-cpus='.$self->o('use_threads').' -hitdist=40' : '-num_threads '.$self->o('use_threads').' -window_size 40', }, -rc_name => '2GB_blast', }, ); foreach my $analyses (@analysis) { $analyses->{-max_retry_count} = 1 unless (exists $analyses->{-max_retry_count}); } return \@analysis; } sub resource_classes { my $self = shift; return { %{ $self->SUPER::resource_classes() }, # inherit other stuff from the base class '1GB' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 1000, [$self->default_options->{'pipe_db_server'}])}, '1GB_rough' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 1000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}])}, '2GB_rough' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 2000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}])}, '5GB_rough' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 5000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}])}, '15GB_rough' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 15000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}])}, '2GB_blast' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 2000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'refine_db_server'}, $self->default_options->{'blast_db_server'}], undef, ($self->default_options->{'use_threads'}+1))}, '2GB' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 2000, [$self->default_options->{'pipe_db_server'}])}, '4GB' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 4000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'dna_db_server'}])}, '5GB' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 5000, [$self->default_options->{'pipe_db_server'}])}, '10GB' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 10000, [$self->default_options->{'pipe_db_server'}])}, '20GB' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 20000, [$self->default_options->{'pipe_db_server'}])}, '2GB_introns' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 2000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}, $self->default_options->{'dna_db_server'}])}, '2GB_refine' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 2000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}, $self->default_options->{'dna_db_server'}, $self->default_options->{'refine_db_server'}])}, '5GB_introns' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 5000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}, $self->default_options->{'dna_db_server'}])}, '10GB_introns' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 10000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}, $self->default_options->{'dna_db_server'}])}, '20GB_introns' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 20000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}, $self->default_options->{'dna_db_server'}])}, '50GB_introns' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 50000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}, $self->default_options->{'dna_db_server'}])}, '3GB_multithread' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 3000, [$self->default_options->{'pipe_db_server'}], undef, $self->default_options->{'use_threads'})}, '5GB_merged_multithread' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 5000, [$self->default_options->{'pipe_db_server'}], undef, ($self->default_options->{'use_threads'}))}, '5GB_multithread' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 5000, [$self->default_options->{'pipe_db_server'}], undef, ($self->default_options->{'use_threads'}+1))}, '10GB_multithread' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 10000, [$self->default_options->{'pipe_db_server'}], undef, ($self->default_options->{'use_threads'}+1))}, '20GB_multithread' => { LSF => $self->lsf_resource_builder($self->default_options->{long_queue}, 20000, [$self->default_options->{'pipe_db_server'}], undef, ($self->default_options->{'use_threads'}+1))}, '5GB_refine' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 5000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}, $self->default_options->{'dna_db_server'}, $self->default_options->{'refine_db_server'}])}, '20GB_refine' => { LSF => $self->lsf_resource_builder($self->default_options->{normal_queue}, 20000, [$self->default_options->{'pipe_db_server'}, $self->default_options->{'rough_db_server'}, $self->default_options->{'dna_db_server'}, $self->default_options->{'refine_db_server'}])}, }; } 1;
Ensembl/ensembl-analysis
modules/Bio/EnsEMBL/Analysis/Hive/Config/HiveRNASeq_conf.pm
Perl
apache-2.0
54,928
package VMOMI::VmCreatedEvent; use parent 'VMOMI::VmEvent'; use strict; use warnings; our @class_ancestors = ( 'VmEvent', 'Event', 'DynamicData', ); our @class_members = ( ); sub get_class_ancestors { return @class_ancestors; } sub get_class_members { my $class = shift; my @super_members = $class->SUPER::get_class_members(); return (@super_members, @class_members); } 1;
stumpr/p5-vmomi
lib/VMOMI/VmCreatedEvent.pm
Perl
apache-2.0
408
package App::Netdisco::Web; use Dancer ':syntax'; use Dancer::Plugin::Ajax; use Dancer::Plugin::DBIC; use Dancer::Plugin::Auth::Extensible; use Socket6 (); # to ensure dependency is met use HTML::Entities (); # to ensure dependency is met use URI::QueryParam (); # part of URI, to add helper methods use Path::Class 'dir'; use Module::Find (); use Module::Load (); use App::Netdisco::Util::Web 'interval_to_daterange'; # can override splats only by loading first Module::Find::usesub 'App::NetdiscoE::Web'; use App::Netdisco::Web::AuthN; use App::Netdisco::Web::Static; use App::Netdisco::Web::Search; use App::Netdisco::Web::Device; use App::Netdisco::Web::Report; use App::Netdisco::Web::AdminTask; use App::Netdisco::Web::TypeAhead; use App::Netdisco::Web::PortControl; use App::Netdisco::Web::Statistics; use App::Netdisco::Web::Password; use App::Netdisco::Web::GenericReport; sub _load_web_plugins { my $plugin_list = shift; foreach my $plugin (@$plugin_list) { $plugin =~ s/^X::/+App::NetdiscoX::Web::Plugin::/; $plugin = 'App::Netdisco::Web::Plugin::'. $plugin if $plugin !~ m/^\+/; $plugin =~ s/^\+//; debug "loading Netdisco plugin $plugin"; Module::Load::load $plugin; } } if (setting('web_plugins') and ref [] eq ref setting('web_plugins')) { _load_web_plugins( setting('web_plugins') ); } if (setting('extra_web_plugins') and ref [] eq ref setting('extra_web_plugins')) { unshift @INC, dir(($ENV{NETDISCO_HOME} || $ENV{HOME}), 'site_plugins')->stringify; _load_web_plugins( setting('extra_web_plugins') ); } # after plugins are loaded, add our own template path push @{ config->{engines}->{netdisco_template_toolkit}->{INCLUDE_PATH} }, setting('views'); # workaround for https://github.com/PerlDancer/Dancer/issues/935 hook after_error_render => sub { setting('layout' => 'main') }; # this hook should be loaded _after_ all plugins hook 'before_template' => sub { my $tokens = shift; # allow portable static content $tokens->{uri_base} = request->base->path if request->base->path ne '/'; # allow portable dynamic content $tokens->{uri_for} = sub { uri_for(@_)->path_query }; # access to logged in user's roles $tokens->{user_has_role} = sub { user_has_role(@_) }; # create date ranges from within templates $tokens->{to_daterange} = sub { interval_to_daterange(@_) }; # data structure for DataTables records per page menu $tokens->{table_showrecordsmenu} = to_json( setting('table_showrecordsmenu') ); # fix Plugin Template Variables to be only path+query $tokens->{$_} = $tokens->{$_}->path_query for qw/search_node search_device device_ports/; # allow very long lists of ports $Template::Directive::WHILE_MAX = 10_000; # allow hash keys with leading underscores $Template::Stash::PRIVATE = undef; }; # remove empty lines from CSV response # this makes writing templates much more straightforward! hook 'after' => sub { my $r = shift; # a Dancer::Response if ($r->content_type and $r->content_type eq 'text/comma-separated-values') { my @newlines = (); my @lines = split m/\n/, $r->content; foreach my $line (@lines) { push @newlines, $line if $line !~ m/^\s*$/; } $r->content(join "\n", @newlines); } }; any qr{.*} => sub { var('notfound' => true); status 'not_found'; template 'index'; }; { # https://github.com/PerlDancer/Dancer/issues/967 no warnings 'redefine'; *Dancer::_redirect = sub { my ($destination, $status) = @_; my $response = Dancer::SharedData->response; $response->status($status || 302); $response->headers('Location' => $destination); }; } true;
gitpan/App-Netdisco
lib/App/Netdisco/Web.pm
Perl
bsd-3-clause
3,768
#!/usr/bin/perl # This software code is made available "AS IS" without warranties of any # kind. You may copy, display, modify and redistribute the software # code either by itself or as incorporated into your code; provided that # you do not remove any proprietary notices. Your use of this software # code is at your own risk and you waive any claim against Amazon # Digital Services, Inc. or its affiliates with respect to your use of # this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its # affiliates. package S3; use base qw(Exporter); @EXPORT_OK = qw(canonical_string encode merge_meta $DEFAULT_HOST $PORTS_BY_SECURITY $AMAZON_HEADER_PREFIX $METADATA_PREFIX $CALLING_FORMATS urlencode); use strict; use warnings; use Carp; use Digest::HMAC_SHA1; use MIME::Base64 qw(encode_base64); use URI::Escape; use LWP::UserAgent; use HTTP::Request; our $DEFAULT_HOST = 's3.amazonaws.com'; our $PORTS_BY_SECURITY = { 0 => 80, 1 => 443 }; our $AMAZON_HEADER_PREFIX = 'x-amz-'; our $METADATA_PREFIX = 'x-amz-meta-'; our $CALLING_FORMATS = [ "SUBDOMAIN", # http://bucket.s3.amazonaws.com/key "PATH", # http://s3.amazonaws.com/bucket/key "VANITY", # http://<vanity_domain>/key -- vanity_domain resolves to s3.amazonaws.com ]; # The result of this subroutine is a string to which the resource path (sans # bucket, including a leading '/') can be appended. Examples include: # - "http://s3.amazonaws.com/bucket" # - "http://bucket.s3.amazonaws.com" # - "http://<vanity_domain>" -- vanity_domain resolves to s3.amazonaws.com # # parameters: # - protocol - ex: "http" or "https" # - server - ex: "s3.amazonaws.com" # - port - ex: "80" # - bucket - ex: "my_bucket" # - format - ex: "SUBDOMAIN" sub build_url_base { my ($protocol, $server, $port, $bucket, $format) = @_; my $buf = "$protocol://"; if ($bucket eq '') { $buf .= "$server:$port"; } elsif ($format eq "PATH") { $buf .= "$server:$port/$bucket"; } elsif ($format eq "SUBDOMAIN") { $buf .= "$bucket.$server:$port"; } elsif ($format eq "VANITY") { $buf .= "$bucket:$port"; } else { croak "unknown or unhandled CALLING_FORMAT"; } return $buf; } sub trim { my ($value) = @_; $value =~ s/^\s+//; $value =~ s/\s+$//; return $value; } # Generate a canonical string for the given parameters. Expires is optional and is # only used by query string authentication. $path is the resource NOT INCLUDING # THE BUCKET. sub canonical_string { my ($method, $bucket, $path, $path_args, $headers, $expires) = @_; my %interesting_headers = (); while (my ($key, $value) = each %$headers) { my $lk = lc $key; if ( $lk eq 'content-md5' or $lk eq 'content-type' or $lk eq 'date' or $lk =~ /^$AMAZON_HEADER_PREFIX/ ) { $interesting_headers{$lk} = trim($value); } } # these keys get empty strings if they don't exist $interesting_headers{'content-type'} ||= ''; $interesting_headers{'content-md5'} ||= ''; # just in case someone used this. it's not necessary in this lib. $interesting_headers{'date'} = '' if $interesting_headers{'x-amz-date'}; # if you're using expires for query string auth, then it trumps date # (and x-amz-date) $interesting_headers{'date'} = $expires if $expires; my $buf = "$method\n"; foreach my $key (sort keys %interesting_headers) { if ($key =~ /^$AMAZON_HEADER_PREFIX/) { $buf .= "$key:$interesting_headers{$key}\n"; } else { $buf .= "$interesting_headers{$key}\n"; } } if ($bucket) { $buf .= "/$bucket" } $buf .= "/$path"; my @special_arg_list = qw/acl location logging torrent/; my @found_special_args = grep { exists $path_args->{$_} } @special_arg_list; if (@found_special_args > 1) { croak "more than one special query-string argument found: " . join(",", @found_special_args); } elsif (@found_special_args == 1) { $buf .= "?$found_special_args[0]"; } return $buf; } # finds the hmac-sha1 hash of the canonical string and the aws secret access key and then # base64 encodes the result (optionally urlencoding after that). sub encode { my ($aws_secret_access_key, $str, $urlencode) = @_; my $hmac = Digest::HMAC_SHA1->new($aws_secret_access_key); $hmac->add($str); my $b64 = encode_base64($hmac->digest, ''); if ($urlencode) { return urlencode($b64); } else { return $b64; } } sub urlencode { my ($unencoded) = @_; return uri_escape($unencoded, '^A-Za-z0-9_-'); } # generates an HTTP::Headers objects given one hash that represents http # headers to set and another hash that represents an object's metadata. sub merge_meta { my ($headers, $metadata) = @_; $headers ||= {}; $metadata ||= {}; my $http_header = HTTP::Headers->new; while (my ($k, $v) = each %$headers) { $http_header->header($k => $v); } while (my ($k, $v) = each %$metadata) { $http_header->header("$METADATA_PREFIX$k" => $v); } return $http_header; } # Build a URL arguments string from the path_args hash ref. # Returns a string like: "key1=val1&key2=val2&key3=val3" # This function handles url escaping keys and values. sub path_args_hash_to_string { my ($path_args) = @_; my $arg_string = ''; foreach my $key (keys %$path_args) { my $value = $path_args->{$key}; $arg_string .= $key; if ($value) { $arg_string .= "=".S3::urlencode($value); } $arg_string .= '&'; } chop $arg_string; # remove trailing '&' return $arg_string } 1;
zenfactory/portfolio
perl/amazonS3/S3.pm
Perl
mit
5,763
#!/usr/bin/env perl # auther: oxnz # coding: utf-8 use strict; use warnings; use Interpreter; my $interpreter = Interpreter->new; $interpreter->execute("12345abcde");
PoisonBOx/design-patterns
src/interpreter/perl/test.pl
Perl
mit
170
package OpenXPKI::Server::Workflow::Activity::Tools::GetCertificateIdentifier; use strict; use base qw( OpenXPKI::Server::Workflow::Activity ); use OpenXPKI::Server::Context qw( CTX ); use OpenXPKI::Exception; use OpenXPKI::Debug; use Workflow::Exception qw(configuration_error); sub execute { my $self = shift; my $workflow = shift; my $context = $workflow->context(); my $certificate = $self->param('certificate') // $context->param('certificate'); return unless($certificate); my $cert_identifier = CTX('api2')->get_cert_identifier(cert => $certificate); CTX('log')->application()->debug('Identifier of certificate is ' . $cert_identifier); my $target_key = $self->param('target_key') || 'cert_identifier'; $context->param( $target_key => $cert_identifier ); return 1; } 1; __END__ =head1 Name OpenXPKI::Server::Workflow::Activity::Tools::GetCertificateIdentifier =head1 Description Calculate the certificate's identifier =head1 Parameters =over =item certificate the PEM encoded certificate, default is context value of certificate =item target_key Context parameter to use for certificate identifier output (default: cert_identifier) =back
oliwel/openxpki
core/server/OpenXPKI/Server/Workflow/Activity/Tools/GetCertificateIdentifier.pm
Perl
apache-2.0
1,219
# # Copyright 2017 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package apps::video::zixi::restapi::mode::broadcasteroutputusage; use base qw(centreon::plugins::templates::counter); use strict; use warnings; use Digest::MD5 qw(md5_hex); my $instance_mode; sub custom_status_threshold { my ($self, %options) = @_; my $status = 'ok'; my $message; eval { local $SIG{__WARN__} = sub { $message = $_[0]; }; local $SIG{__DIE__} = sub { $message = $_[0]; }; if (defined($instance_mode->{option_results}->{critical_status}) && $instance_mode->{option_results}->{critical_status} ne '' && eval "$instance_mode->{option_results}->{critical_status}") { $status = 'critical'; } elsif (defined($instance_mode->{option_results}->{warning_status}) && $instance_mode->{option_results}->{warning_status} ne '' && eval "$instance_mode->{option_results}->{warning_status}") { $status = 'warning'; } }; if (defined($message)) { $self->{output}->output_add(long_msg => 'filter status issue: ' . $message); } return $status; } sub custom_status_output { my ($self, %options) = @_; my $msg = 'status : ' . $self->{result_values}->{status} . ' [error: ' . $self->{result_values}->{error} . ']'; return $msg; } sub custom_status_calc { my ($self, %options) = @_; $self->{result_values}->{status} = $options{new_datas}->{$self->{instance} . '_status'}; $self->{result_values}->{name} = $options{new_datas}->{$self->{instance} . '_name'}; $self->{result_values}->{error} = $options{new_datas}->{$self->{instance} . '_error'}; return 0; } sub set_counters { my ($self, %options) = @_; $self->{maps_counters_type} = [ { name => 'output_stream', type => 1, cb_prefix_output => 'prefix_output_output', message_multiple => 'All outputs are ok', skipped_code => { -11 => 1 } }, ]; $self->{maps_counters}->{output_stream} = [ { label => 'status', threshold => 0, set => { key_values => [ { name => 'status' }, { name => 'name' }, { name => 'error' } ], closure_custom_calc => $self->can('custom_status_calc'), closure_custom_output => $self->can('custom_status_output'), closure_custom_perfdata => sub { return 0; }, closure_custom_threshold_check => $self->can('custom_status_threshold'), } }, { label => 'traffic-in', set => { key_values => [ { name => 'traffic_in', diff => 1 }, { name => 'name' } ], per_second => 1, output_change_bytes => 2, output_template => 'Traffic In : %s %s/s', perfdatas => [ { label => 'traffic_in', value => 'traffic_in_per_second', template => '%.2f', min => 0, unit => 'b/s', label_extra_instance => 1, instance_use => 'name_absolute' }, ], } }, { label => 'traffic-out', set => { key_values => [ { name => 'traffic_out', diff => 1 }, { name => 'name' } ], per_second => 1, output_change_bytes => 2, output_template => 'Traffic Out : %s %s/s', perfdatas => [ { label => 'traffic_out', value => 'traffic_out_per_second', template => '%.2f', min => 0, unit => 'b/s', label_extra_instance => 1, instance_use => 'name_absolute' }, ], } }, { label => 'dropped-in', set => { key_values => [ { name => 'dropped_in', diff => 1 }, { name => 'name' } ], output_template => 'Packets Dropped In : %s', perfdatas => [ { label => 'dropped_in', value => 'dropped_in_absolute', template => '%.2f', min => 0, label_extra_instance => 1, instance_use => 'name_absolute' }, ], } }, ]; } sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options, statefile => 1); bless $self, $class; $self->{version} = '1.0'; $options{options}->add_options(arguments => { "filter-name:s" => { name => 'filter_name' }, "warning-status:s" => { name => 'warning_status' }, "critical-status:s" => { name => 'critical_status', default => '%{status} !~ /Connecting|Connected/i || %{error} !~ /none/i' }, }); return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::check_options(%options); $instance_mode = $self; $self->change_macros(); } sub prefix_output_output { my ($self, %options) = @_; return "Output '" . $options{instance_value}->{name} . "' "; } sub change_macros { my ($self, %options) = @_; foreach (('warning_status', 'critical_status')) { if (defined($self->{option_results}->{$_})) { $self->{option_results}->{$_} =~ s/%\{(.*?)\}/\$self->{result_values}->{$1}/g; } } } my %mapping_output_status = (0 => 'none', 1 => 'unknown', 2 => 'resolve error', 3 => 'timeout', 4 => 'network error', 5 => 'protocol error', 6 => 'server is full', 7 => 'connection rejected', 8 => 'authentication error', 9 => 'license error', 10 => 'end of file', 11 => 'flood error', 12 => 'redirect', 13 => 'stopped', 14 => 'limit', 15 => 'not found', 16 => 'not supported', 17 => 'local file system error', 18 => 'remote file system error', 19 => 'stream replaced', 20 => 'p2p abort', 21 => 'compression error', 22 => 'source collision error', 23 => 'adaptive', 24 => 'tcp connection error', 25 => 'rtmp connection error', 26 => 'rtmp handshake error', 27 => 'tcp connection closed', 28 => 'rtmp stream error', 29 => 'rtmp publish error', 30 => 'rtmp stream closed', 31 => 'rtmp play error', 32 => 'rtmp protocol error', 33 => 'rtmp analyze timeout', 34 => 'busy', 35 => 'encryption error', 36 => 'transcoder error', 37 => 'error in invocation a transcoder subprocess', 38 => 'error communicating with a transcoder subprocess', 39 => 'error in RTMP Akamai authentication', 40 => 'maximum outputs for the source reached', 41 => 'generic error', 42 => 'zero bitrate warning', 43 => 'low bitrate warning', 44 => 'multicast join failed', ); sub manage_selection { my ($self, %options) = @_; $self->{output_stream} = {}; my $result = $options{custom}->get(path => '/zixi/outputs.json?complete=1'); foreach my $entry (@{$result->{outputs}}) { my $name = $entry->{name} . '/' . $entry->{requested_stream_id}; if (defined($self->{option_results}->{filter_name}) && $self->{option_results}->{filter_name} ne '' && $name !~ /$self->{option_results}->{filter_name}/) { $self->{output}->output_add(long_msg => "skipping '" . $name . "': no matching filter.", debug => 1); next; } $self->{output_stream}->{$entry->{id}} = { name => $name, status => $entry->{status}, error => $mapping_output_status{$entry->{error_code}}, traffic_in => $entry->{stats}->{net_recv}->{bytes} * 8, traffic_out => $entry->{stats}->{net_send}->{bytes} * 8, dropped_in => $entry->{stats}->{net_recv}->{dropped}, }; } if (scalar(keys %{$self->{output_stream}}) <= 0) { $self->{output}->add_option_msg(short_msg => "No output found."); $self->{output}->option_exit(); } $self->{cache_name} = "zixi_" . $self->{mode} . '_' . $options{custom}->{hostname} . '_' . $options{custom}->{port} . '_' . (defined($self->{option_results}->{filter_counters}) ? md5_hex($self->{option_results}->{filter_counters}) : md5_hex('all')) . '_' . (defined($self->{option_results}->{filter_name}) ? md5_hex($self->{option_results}->{filter_name}) : md5_hex('all')); } 1; __END__ =head1 MODE Check output usage. =over 8 =item B<--filter-name> Filter name (can be a regexp). =item B<--filter-counters> Only display some counters (regexp can be used). Example: --filter-counters='^status$' =item B<--warning-*> Threshold warning. Can be: 'traffic-in', 'traffic-out', 'dropped-in'. =item B<--critical-*> Threshold critical. Can be: 'traffic-in', 'traffic-out', 'dropped-in'. =item B<--warning-status> Set warning threshold for status (Default: -) Can used special variables like: %{name}, %{status}, %{error}. =item B<--critical-status> Set critical threshold for status (Default: '%{status} !~ /Connecting|Connected/i || %{error} !~ /none/i'). Can used special variables like: %{name}, %{status}, %{error}. =back =cut
nichols-356/centreon-plugins
apps/video/zixi/restapi/mode/broadcasteroutputusage.pm
Perl
apache-2.0
9,712
# # Copyright 2016 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package apps::vmware::connector::mode::servicehost; use base qw(centreon::plugins::mode); use strict; use warnings; sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options); bless $self, $class; $self->{version} = '1.0'; $options{options}->add_options(arguments => { "esx-hostname:s" => { name => 'esx_hostname' }, "filter" => { name => 'filter' }, "scope-datacenter:s" => { name => 'scope_datacenter' }, "scope-cluster:s" => { name => 'scope_cluster' }, "disconnect-status:s" => { name => 'disconnect_status', default => 'unknown' }, "filter-services:s" => { name => 'filter_services' }, }); return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::init(%options); if ($self->{output}->is_litteral_status(status => $self->{option_results}->{disconnect_status}) == 0) { $self->{output}->add_option_msg(short_msg => "Wrong disconnect-status status option '" . $self->{option_results}->{disconnect_status} . "'."); $self->{output}->option_exit(); } } sub run { my ($self, %options) = @_; $self->{connector} = $options{custom}; $self->{connector}->add_params(params => $self->{option_results}, command => 'servicehost'); $self->{connector}->run(); } 1; __END__ =head1 MODE Check ESX services. =over 8 =item B<--esx-hostname> ESX hostname to check. If not set, we check all ESX. =item B<--filter> ESX hostname is a regexp. =item B<--scope-datacenter> Search in following datacenter(s) (can be a regexp). =item B<--scope-cluster> Search in following cluster(s) (can be a regexp). =item B<--disconnect-status> Status if ESX host disconnected (default: 'unknown'). =item B<--filter-services> Filter services you want to check (can be a regexp). =back =cut
bcournaud/centreon-plugins
apps/vmware/connector/mode/servicehost.pm
Perl
apache-2.0
2,930
package Google::Ads::AdWords::v201409::AdwordsUserListService::getResponse; use strict; use warnings; { # BLOCK to scope variables sub get_xmlns { 'https://adwords.google.com/api/adwords/rm/v201409' } __PACKAGE__->__set_name('getResponse'); __PACKAGE__->__set_nillable(); __PACKAGE__->__set_minOccurs(); __PACKAGE__->__set_maxOccurs(); __PACKAGE__->__set_ref(); use base qw( SOAP::WSDL::XSD::Typelib::Element Google::Ads::SOAP::Typelib::ComplexType ); our $XML_ATTRIBUTE_CLASS; undef $XML_ATTRIBUTE_CLASS; sub __get_attr_class { return $XML_ATTRIBUTE_CLASS; } use Class::Std::Fast::Storable constructor => 'none'; use base qw(Google::Ads::SOAP::Typelib::ComplexType); { # BLOCK to scope variables my %rval_of :ATTR(:get<rval>); __PACKAGE__->_factory( [ qw( rval ) ], { 'rval' => \%rval_of, }, { 'rval' => 'Google::Ads::AdWords::v201409::UserListPage', }, { 'rval' => 'rval', } ); } # end BLOCK } # end of BLOCK 1; =pod =head1 NAME Google::Ads::AdWords::v201409::AdwordsUserListService::getResponse =head1 DESCRIPTION Perl data type class for the XML Schema defined element getResponse from the namespace https://adwords.google.com/api/adwords/rm/v201409. =head1 PROPERTIES The following properties may be accessed using get_PROPERTY / set_PROPERTY methods: =over =item * rval $element->set_rval($data); $element->get_rval(); =back =head1 METHODS =head2 new my $element = Google::Ads::AdWords::v201409::AdwordsUserListService::getResponse->new($data); Constructor. The following data structure may be passed to new(): { rval => $a_reference_to, # see Google::Ads::AdWords::v201409::UserListPage }, =head1 AUTHOR Generated by SOAP::WSDL =cut
gitpan/GOOGLE-ADWORDS-PERL-CLIENT
lib/Google/Ads/AdWords/v201409/AdwordsUserListService/getResponse.pm
Perl
apache-2.0
1,779
#!/usr/bin/perl open(PARSE_FD,"< t.txt") || die "can't open t.txt"; while(<PARSE_FD>){ #chop; my @fl = split/'\t'/,$_; @foo = grep(/:*/,$_); print $foo[0]; print $fl[0]; }
ctsidev/SecureWise
wise/archived_files/email_scheduler/edu/ucla/wise/client/t.pl
Perl
bsd-3-clause
184
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!! # This file is machine-generated by lib/unicore/mktables from the Unicode # database, Version 6.2.0. Any changes made here will be lost! # !!!!!!! INTERNAL PERL USE ONLY !!!!!!! # This file is for internal use by core Perl only. The format and even the # name or existence of this file are subject to change without notice. Don't # use it directly. return <<'END'; 2D80 2DDF END
Bjay1435/capstone
rootfs/usr/share/perl/5.18.2/unicore/lib/Blk/Ethiopi2.pl
Perl
mit
433
package Data::UUID; #--------CUSTOM AMENDMENTS ------ # # a portable version (binary perl dist, PAR::Packer , PerlApp) # can't have a hard coded path package Data::UUID::LOADER; use strict; use File::Spec; sub _get_cava_uuid_dir { my $oldmask = umask 0000; my $folder = File::Spec->tmpdir; if($^O =~ /^mswin/i) { $folder .= qq(\\custom_data_uuid); } elsif(($^O =~ /^darwin/i) && ($folder ne '/tmp')) { # recent darwin has user specific temp $folder .= '/custom_data_uuid'; } else { # linux && old darwin need username my $usename = (getpwuid($<))[0]; $usename ||= 'system_data_uuid'; $usename =~ s/[^A-Za-z0-9\_]/-/g; $folder .= '/' . $usename; mkdir($folder, 0777) if(!-d $folder); $folder .= '/custom_data_uuid'; } mkdir($folder, 0777) if(!-d $folder); return $folder; } sub _perl_get_nodeid_store { my $gdir = _get_cava_uuid_dir(); $gdir .= '/CUSTOMUUID_NODEID_STORE'; $gdir =~ s/\//\\/g if($^O =~ /^MSWin/); chmod(0666, $gdir); return $gdir; } sub _perl_get_state_store { my $gdir = _get_cava_uuid_dir(); $gdir .= '/CUSTOMUUID_STATE_NV_STORE'; $gdir =~ s/\//\\/g if($^O =~ /^MSWin/); chmod(0666, $gdir); return $gdir; } #------------END CUSTOM AMENDMENTS package Data::UUID; use strict; use Carp; use vars qw($VERSION @ISA @EXPORT @EXPORT_OK); require Exporter; require DynaLoader; require Digest::MD5; @ISA = qw(Exporter DynaLoader); # Items to export into callers namespace by default. Note: do not export # names by default without a very good reason. Use EXPORT_OK instead. # Do not simply export all your public functions/methods/constants. @EXPORT = qw( NameSpace_DNS NameSpace_OID NameSpace_URL NameSpace_X500 ); $VERSION = '1.217_001'; bootstrap Data::UUID $VERSION; 1; __END__ =head1 NAME Data::UUID - Perl extension for generating Globally/Universally Unique Identifiers (GUIDs/UUIDs). =head1 SYNOPSIS use Data::UUID; $ug = new Data::UUID; $uuid1 = $ug->create(); $uuid2 = $ug->create_from_name(<namespace>, <name>); $res = $ug->compare($uuid1, $uuid2); $str = $ug->to_string( $uuid ); $uuid = $ug->from_string( $str ); =head1 DESCRIPTION This module provides a framework for generating v3 UUIDs (Universally Unique Identifiers, also known as GUIDs (Globally Unique Identifiers). A UUID is 128 bits long, and is guaranteed to be different from all other UUIDs/GUIDs generated until 3400 CE. UUIDs were originally used in the Network Computing System (NCS) and later in the Open Software Foundation's (OSF) Distributed Computing Environment. Currently many different technologies rely on UUIDs to provide unique identity for various software components. Microsoft COM/DCOM for instance, uses GUIDs very extensively to uniquely identify classes, applications and components across network-connected systems. The algorithm for UUID generation, used by this extension, is described in the Internet Draft "UUIDs and GUIDs" by Paul J. Leach and Rich Salz. (See RFC 4122.) It provides reasonably efficient and reliable framework for generating UUIDs and supports fairly high allocation rates -- 10 million per second per machine -- and therefore is suitable for identifying both extremely short-lived and very persistent objects on a given system as well as across the network. This modules provides several methods to create a UUID: # creates binary (16 byte long binary value) UUID. $ug->create(); $ug->create_bin(); # creates binary (16-byte long binary value) UUID based on particular # namespace and name string. $ug->create_from_name(<namespace>, <name>); $ug->create_from_name_bin(<namespace>, <name>); # creates UUID string, using conventional UUID string format, # such as: 4162F712-1DD2-11B2-B17E-C09EFE1DC403 $ug->create_str(); $ug->create_from_name_str(<namespace>, <name>); # creates UUID string as a hex string, # such as: 0x4162F7121DD211B2B17EC09EFE1DC403 $ug->create_hex(); $ug->create_from_name_hex(<namespace>, <name>); # creates UUID string as a Base64-encoded string $ug->create_b64(); $ug->create_from_name_b64(<namespace>, <name>); Binary UUIDs can be converted to printable strings using following methods: # convert to conventional string representation $ug->to_string(<uuid>); # convert to hex string $ug->to_hexstring(<uuid>); # convert to Base64-encoded string $ug->to_b64string(<uuid>); Conversly, string UUIDs can be converted back to binary form: # recreate binary UUID from string $ug->from_string(<uuid>); $ug->from_hexstring(<uuid>); # recreate binary UUID from Base64-encoded string $ug->from_b64string(<uuid>); Finally, two binary UUIDs can be compared using the following method: # returns -1, 0 or 1 depending on whether uuid1 less # than, equals to, or greater than uuid2 $ug->compare(<uuid1>, <uuid2>); Examples: use Data::UUID; # this creates a new UUID in string form, based on the standard namespace # UUID NameSpace_URL and name "www.mycompany.com" $ug = new Data::UUID; print $ug->create_from_name_str(NameSpace_URL, "www.mycompany.com"); =head2 EXPORT The module allows exporting of several standard namespace UUIDs: =over =item NameSpace_DNS =item NameSpace_URL =item NameSpace_OID =item NameSpace_X500 =back =head1 AUTHOR Alexander Golomshtok <agolomsh@cpan.org> =head1 SEE ALSO The Internet Draft "UUIDs and GUIDs" by Paul J. Leach and Rich Salz (RFC 4122) =cut
thomaspreece10/STLExtract
Slic3r/Linux/lib/std/Data/UUID.pm
Perl
mit
5,593
######################################################################## # Bio::KBase::ObjectAPI::KBaseExpression::DB::GPL - This is the moose object corresponding to the KBaseExpression.GPL object # Authors: Christopher Henry, Scott Devoid, Paul Frybarger # Contact email: chenry@mcs.anl.gov # Development location: Mathematics and Computer Science Division, Argonne National Lab ######################################################################## package Bio::KBase::ObjectAPI::KBaseExpression::DB::GPL; use Bio::KBase::ObjectAPI::BaseObject; use Moose; use namespace::autoclean; extends 'Bio::KBase::ObjectAPI::BaseObject'; # PARENT: has parent => (is => 'rw', isa => 'Ref', weak_ref => 1, type => 'parent', metaclass => 'Typed'); # ATTRIBUTES: has uuid => (is => 'rw', lazy => 1, isa => 'Str', type => 'msdata', metaclass => 'Typed',builder => '_build_uuid'); has _reference => (is => 'rw', lazy => 1, isa => 'Str', type => 'msdata', metaclass => 'Typed',builder => '_build_reference'); has gpl_organism => (is => 'rw', isa => 'Str', printOrder => '-1', type => 'attribute', metaclass => 'Typed'); has gpl_technology => (is => 'rw', isa => 'Str', printOrder => '-1', type => 'attribute', metaclass => 'Typed'); has gpl_title => (is => 'rw', isa => 'Str', printOrder => '-1', type => 'attribute', metaclass => 'Typed'); has gpl_id => (is => 'rw', isa => 'Str', printOrder => '-1', type => 'attribute', metaclass => 'Typed'); has gpl_tax_id => (is => 'rw', isa => 'Str', printOrder => '-1', type => 'attribute', metaclass => 'Typed'); # LINKS: # BUILDERS: sub _build_reference { my ($self) = @_;return $self->parent()->_reference().'/gsm_platform/id/'.$self->id(); } sub _build_uuid { my ($self) = @_;return $self->_reference(); } # CONSTANTS: sub _type { return 'KBaseExpression.GPL'; } sub _module { return 'KBaseExpression'; } sub _class { return 'GPL'; } sub _top { return 0; } my $attributes = [ { 'req' => 0, 'printOrder' => -1, 'name' => 'gpl_organism', 'type' => 'Str', 'perm' => 'rw' }, { 'req' => 0, 'printOrder' => -1, 'name' => 'gpl_technology', 'type' => 'Str', 'perm' => 'rw' }, { 'req' => 0, 'printOrder' => -1, 'name' => 'gpl_title', 'type' => 'Str', 'perm' => 'rw' }, { 'req' => 0, 'printOrder' => -1, 'name' => 'gpl_id', 'type' => 'Str', 'perm' => 'rw' }, { 'req' => 0, 'printOrder' => -1, 'name' => 'gpl_tax_id', 'type' => 'Str', 'perm' => 'rw' } ]; my $attribute_map = {gpl_organism => 0, gpl_technology => 1, gpl_title => 2, gpl_id => 3, gpl_tax_id => 4}; sub _attributes { my ($self, $key) = @_; if (defined($key)) { my $ind = $attribute_map->{$key}; if (defined($ind)) { return $attributes->[$ind]; } else { return; } } else { return $attributes; } } my $links = []; my $link_map = {}; sub _links { my ($self, $key) = @_; if (defined($key)) { my $ind = $link_map->{$key}; if (defined($ind)) { return $links->[$ind]; } else { return; } } else { return $links; } } my $subobjects = []; my $subobject_map = {}; sub _subobjects { my ($self, $key) = @_; if (defined($key)) { my $ind = $subobject_map->{$key}; if (defined($ind)) { return $subobjects->[$ind]; } else { return; } } else { return $subobjects; } } __PACKAGE__->meta->make_immutable; 1;
kbase/KBaseFBAModeling
lib/Bio/KBase/ObjectAPI/KBaseExpression/DB/GPL.pm
Perl
mit
3,715
package Perun::beans::TaskResult; use strict; use warnings; use Perun::Common; use overload '""' => \&toString; sub toString { my $self = shift; my $id = $self->{_id}; my $taskId = $self->{_taskId}; my $destinationId = $self->{_destinationId}; my $errorMessage = $self->{_errorMessage}; my $standardMessage = $self->{_standardMessage}; my $returnCode = $self->{_returnCode}; my $timestamp = $self->{_timestamp}; my $status = $self->{_status}; my $destination = $self->{_destination}; my $service = $self->{_service}; my $str = 'TaskResult ('; $str .= "id: $id, " if ($id); $str .= "taskId: $taskId, " if ($taskId); $str .= "destinationId: $destinationId, " if ($destinationId); $str .= "errorMessage: $errorMessage, " if ($errorMessage); $str .= "standardMessage: $standardMessage, " if ($standardMessage); $str .= "returnCode: $returnCode, " if ($returnCode); $str .= "timestamp: $timestamp, " if ($timestamp); $str .= "status: $status" if ($status); $str .= "destination: $destination" if ($destination); $str .= "service: $service" if ($service); $str .= ')'; return $str; } sub new { bless({}); } sub fromHash { return Perun::Common::fromHash(@_); } sub TO_JSON { my $self = shift; my $id; if (defined($self->{_id})) { $id = $self->{_id}*1; } else { $id = 0; } my $taskId; if (defined($self->{_taskId})) { $taskId = $self->{_taskId}*1; } else { $taskId = 0; } my $destinationId; if (defined($self->{_destinationId})) { $destinationId = $self->{_destinationId}*1; } else { $destinationId = 0; } my $errorMessage; if (defined($self->{_errorMessage})) { $errorMessage = "$self->{_errorMessage}"; } else { $errorMessage = undef; } my $standardMessage; if (defined($self->{_standardMessage})) { $standardMessage = "$self->{_standardMessage}"; } else { $standardMessage = undef; } my $returnCode; if (defined($self->{_returnCode})) { $returnCode = $self->{_returnCode}*1; } else { $returnCode = 0; } my $service; if (defined($self->{_service})) { $service = $self->{_service}; } else { $service = undef; } my $timestamp = $self->{_timestamp}; my $status = $self->{_status}; my $destination = $self->{_destination}; return {id => $id, taskId => $taskId, destinationId => $destinationId, errorMessage => $errorMessage, standardMessage => $standardMessage, returnCode => $returnCode, timestamp => $timestamp, status => $status, destination => $destination, service => $service}; } sub getCommonArrayRepresentation { my $self = shift; return ($self->{_id}, $self->{_taskId}, $self->{_destinationId}, $self->{_errorMessage}, $self->{_standardMessage}, $self->{_returnCode}, $self->{_timestamp}, $self->{_status}, $self->{_destination}->{destination}, $self->{_service}->{name}); } sub getCommonArrayRepresentationHeading { return ('ID','TaskId', 'DestinationId', 'stderr', 'stdout', 'ReturnCode', 'Timestamp', 'Status', 'Destination', 'Service'); } sub getId { my $self = shift; return $self->{_id}; } sub setId { my $self = shift; $self->{_id} = shift; return; } sub getTaskId { my $self = shift; return $self->{_taskId}; } sub setTaskId { my $self = shift; $self->{_taskId} = shift; return; } sub getDestinationId { my $self = shift; return $self->{_destinationId}; } sub setDestinationId { my $self = shift; $self->{_destinationId} = shift; return; } sub getErrorMessage { my $self = shift; return $self->{_errorMessage}; } sub setErrorMessage { my $self = shift; $self->{_errorMessage} = shift; return; } sub getStandardMessage { my $self = shift; return $self->{_standardMessage}; } sub setStandardMessage { my $self = shift; $self->{_standardMessage} = shift; return; } sub getReturnCode { my $self = shift; return $self->{_returnCode}; } sub setReturnCode { my $self = shift; $self->{_returnCode} = shift; return; } sub getTimestamp { my $self = shift; return $self->{_timestamp}; } sub setTimestamp { my $self = shift; $self->{_timestamp} = shift; return; } sub getStatus { my $self = shift; return $self->{_status}; } sub setStatus { my $self = shift; $self->{_status} = shift; return; } sub getDestination { my $self = shift; return $self->{_destination}; } sub setDestination { my $self = shift; $self->{_destination} = shift; return; } sub getDestinationName { my $destination = shift->{_destination}; return $destination->{destination}; } sub setService { my $self = shift; $self->{_service} = shift; return; } sub getServiceName { my $service = shift->{_service}; return $service->{service}; } 1;
Laliska/perun
perun-rpc/src/main/perl/Perun/beans/TaskResult.pm
Perl
bsd-2-clause
4,614
# This file is auto-generated by the Perl DateTime Suite time zone # code generator (0.07) This code generator comes with the # DateTime::TimeZone module distribution in the tools/ directory # # Generated from /tmp/ympzZnp0Uq/northamerica. Olson data version 2012c # # Do not edit this file directly. # package DateTime::TimeZone::America::Los_Angeles; { $DateTime::TimeZone::America::Los_Angeles::VERSION = '1.46'; } use strict; use Class::Singleton 1.03; use DateTime::TimeZone; use DateTime::TimeZone::OlsonDB; @DateTime::TimeZone::America::Los_Angeles::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' ); my $spans = [ [ DateTime::TimeZone::NEG_INFINITY, 59418043200, DateTime::TimeZone::NEG_INFINITY, 59418014822, -28378, 0, 'LMT' ], [ 59418043200, 60502413600, 59418014400, 60502384800, -28800, 0, 'PST' ], [ 60502413600, 60520554000, 60502388400, 60520528800, -25200, 1, 'PDT' ], [ 60520554000, 60533863200, 60520525200, 60533834400, -28800, 0, 'PST' ], [ 60533863200, 60552003600, 60533838000, 60551978400, -25200, 1, 'PDT' ], [ 60552003600, 61255476000, 60551974800, 61255447200, -28800, 0, 'PST' ], [ 61255476000, 61366287600, 61255450800, 61366262400, -25200, 1, 'PWT' ], [ 61366287600, 61370298000, 61366262400, 61370272800, -25200, 1, 'PPT' ], [ 61370298000, 61378329600, 61370269200, 61378300800, -28800, 0, 'PST' ], [ 61378329600, 61447716000, 61378300800, 61447687200, -28800, 0, 'PST' ], [ 61447716000, 61473027600, 61447690800, 61473002400, -25200, 1, 'PDT' ], [ 61473027600, 61514848800, 61472998800, 61514820000, -28800, 0, 'PST' ], [ 61514848800, 61527546000, 61514823600, 61527520800, -25200, 1, 'PDT' ], [ 61527546000, 61546298400, 61527517200, 61546269600, -28800, 0, 'PST' ], [ 61546298400, 61559600400, 61546273200, 61559575200, -25200, 1, 'PDT' ], [ 61559600400, 61577748000, 61559571600, 61577719200, -28800, 0, 'PST' ], [ 61577748000, 61591050000, 61577722800, 61591024800, -25200, 1, 'PDT' ], [ 61591050000, 61609197600, 61591021200, 61609168800, -28800, 0, 'PST' ], [ 61609197600, 61622499600, 61609172400, 61622474400, -25200, 1, 'PDT' ], [ 61622499600, 61640647200, 61622470800, 61640618400, -28800, 0, 'PST' ], [ 61640647200, 61653949200, 61640622000, 61653924000, -25200, 1, 'PDT' ], [ 61653949200, 61672096800, 61653920400, 61672068000, -28800, 0, 'PST' ], [ 61672096800, 61685398800, 61672071600, 61685373600, -25200, 1, 'PDT' ], [ 61685398800, 61704151200, 61685370000, 61704122400, -28800, 0, 'PST' ], [ 61704151200, 61717453200, 61704126000, 61717428000, -25200, 1, 'PDT' ], [ 61717453200, 61735600800, 61717424400, 61735572000, -28800, 0, 'PST' ], [ 61735600800, 61748902800, 61735575600, 61748877600, -25200, 1, 'PDT' ], [ 61748902800, 61767050400, 61748874000, 61767021600, -28800, 0, 'PST' ], [ 61767050400, 61780352400, 61767025200, 61780327200, -25200, 1, 'PDT' ], [ 61780352400, 61798500000, 61780323600, 61798471200, -28800, 0, 'PST' ], [ 61798500000, 61811802000, 61798474800, 61811776800, -25200, 1, 'PDT' ], [ 61811802000, 61829949600, 61811773200, 61829920800, -28800, 0, 'PST' ], [ 61829949600, 61843251600, 61829924400, 61843226400, -25200, 1, 'PDT' ], [ 61843251600, 61862004000, 61843222800, 61861975200, -28800, 0, 'PST' ], [ 61862004000, 61874701200, 61861978800, 61874676000, -25200, 1, 'PDT' ], [ 61874701200, 61893453600, 61874672400, 61893424800, -28800, 0, 'PST' ], [ 61893453600, 61909174800, 61893428400, 61909149600, -25200, 1, 'PDT' ], [ 61909174800, 61924903200, 61909146000, 61924874400, -28800, 0, 'PST' ], [ 61924903200, 61940624400, 61924878000, 61940599200, -25200, 1, 'PDT' ], [ 61940624400, 61956352800, 61940595600, 61956324000, -28800, 0, 'PST' ], [ 61956352800, 61972074000, 61956327600, 61972048800, -25200, 1, 'PDT' ], [ 61972074000, 61987802400, 61972045200, 61987773600, -28800, 0, 'PST' ], [ 61987802400, 62004128400, 61987777200, 62004103200, -25200, 1, 'PDT' ], [ 62004128400, 62019252000, 62004099600, 62019223200, -28800, 0, 'PST' ], [ 62019252000, 62035578000, 62019226800, 62035552800, -25200, 1, 'PDT' ], [ 62035578000, 62041017600, 62035549200, 62040988800, -28800, 0, 'PST' ], [ 62041017600, 62051306400, 62040988800, 62051277600, -28800, 0, 'PST' ], [ 62051306400, 62067027600, 62051281200, 62067002400, -25200, 1, 'PDT' ], [ 62067027600, 62082756000, 62066998800, 62082727200, -28800, 0, 'PST' ], [ 62082756000, 62098477200, 62082730800, 62098452000, -25200, 1, 'PDT' ], [ 62098477200, 62114205600, 62098448400, 62114176800, -28800, 0, 'PST' ], [ 62114205600, 62129926800, 62114180400, 62129901600, -25200, 1, 'PDT' ], [ 62129926800, 62145655200, 62129898000, 62145626400, -28800, 0, 'PST' ], [ 62145655200, 62161376400, 62145630000, 62161351200, -25200, 1, 'PDT' ], [ 62161376400, 62177104800, 62161347600, 62177076000, -28800, 0, 'PST' ], [ 62177104800, 62193430800, 62177079600, 62193405600, -25200, 1, 'PDT' ], [ 62193430800, 62209159200, 62193402000, 62209130400, -28800, 0, 'PST' ], [ 62209159200, 62224880400, 62209134000, 62224855200, -25200, 1, 'PDT' ], [ 62224880400, 62240608800, 62224851600, 62240580000, -28800, 0, 'PST' ], [ 62240608800, 62256330000, 62240583600, 62256304800, -25200, 1, 'PDT' ], [ 62256330000, 62262381600, 62256301200, 62262352800, -28800, 0, 'PST' ], [ 62262381600, 62287779600, 62262356400, 62287754400, -25200, 1, 'PDT' ], [ 62287779600, 62298064800, 62287750800, 62298036000, -28800, 0, 'PST' ], [ 62298064800, 62319229200, 62298039600, 62319204000, -25200, 1, 'PDT' ], [ 62319229200, 62334957600, 62319200400, 62334928800, -28800, 0, 'PST' ], [ 62334957600, 62351283600, 62334932400, 62351258400, -25200, 1, 'PDT' ], [ 62351283600, 62366407200, 62351254800, 62366378400, -28800, 0, 'PST' ], [ 62366407200, 62382733200, 62366382000, 62382708000, -25200, 1, 'PDT' ], [ 62382733200, 62398461600, 62382704400, 62398432800, -28800, 0, 'PST' ], [ 62398461600, 62414182800, 62398436400, 62414157600, -25200, 1, 'PDT' ], [ 62414182800, 62429911200, 62414154000, 62429882400, -28800, 0, 'PST' ], [ 62429911200, 62445632400, 62429886000, 62445607200, -25200, 1, 'PDT' ], [ 62445632400, 62461360800, 62445603600, 62461332000, -28800, 0, 'PST' ], [ 62461360800, 62477082000, 62461335600, 62477056800, -25200, 1, 'PDT' ], [ 62477082000, 62492810400, 62477053200, 62492781600, -28800, 0, 'PST' ], [ 62492810400, 62508531600, 62492785200, 62508506400, -25200, 1, 'PDT' ], [ 62508531600, 62524260000, 62508502800, 62524231200, -28800, 0, 'PST' ], [ 62524260000, 62540586000, 62524234800, 62540560800, -25200, 1, 'PDT' ], [ 62540586000, 62555709600, 62540557200, 62555680800, -28800, 0, 'PST' ], [ 62555709600, 62572035600, 62555684400, 62572010400, -25200, 1, 'PDT' ], [ 62572035600, 62587764000, 62572006800, 62587735200, -28800, 0, 'PST' ], [ 62587764000, 62603485200, 62587738800, 62603460000, -25200, 1, 'PDT' ], [ 62603485200, 62619213600, 62603456400, 62619184800, -28800, 0, 'PST' ], [ 62619213600, 62634934800, 62619188400, 62634909600, -25200, 1, 'PDT' ], [ 62634934800, 62650663200, 62634906000, 62650634400, -28800, 0, 'PST' ], [ 62650663200, 62666384400, 62650638000, 62666359200, -25200, 1, 'PDT' ], [ 62666384400, 62680298400, 62666355600, 62680269600, -28800, 0, 'PST' ], [ 62680298400, 62697834000, 62680273200, 62697808800, -25200, 1, 'PDT' ], [ 62697834000, 62711748000, 62697805200, 62711719200, -28800, 0, 'PST' ], [ 62711748000, 62729888400, 62711722800, 62729863200, -25200, 1, 'PDT' ], [ 62729888400, 62743197600, 62729859600, 62743168800, -28800, 0, 'PST' ], [ 62743197600, 62761338000, 62743172400, 62761312800, -25200, 1, 'PDT' ], [ 62761338000, 62774647200, 62761309200, 62774618400, -28800, 0, 'PST' ], [ 62774647200, 62792787600, 62774622000, 62792762400, -25200, 1, 'PDT' ], [ 62792787600, 62806701600, 62792758800, 62806672800, -28800, 0, 'PST' ], [ 62806701600, 62824237200, 62806676400, 62824212000, -25200, 1, 'PDT' ], [ 62824237200, 62838151200, 62824208400, 62838122400, -28800, 0, 'PST' ], [ 62838151200, 62855686800, 62838126000, 62855661600, -25200, 1, 'PDT' ], [ 62855686800, 62869600800, 62855658000, 62869572000, -28800, 0, 'PST' ], [ 62869600800, 62887741200, 62869575600, 62887716000, -25200, 1, 'PDT' ], [ 62887741200, 62901050400, 62887712400, 62901021600, -28800, 0, 'PST' ], [ 62901050400, 62919190800, 62901025200, 62919165600, -25200, 1, 'PDT' ], [ 62919190800, 62932500000, 62919162000, 62932471200, -28800, 0, 'PST' ], [ 62932500000, 62950640400, 62932474800, 62950615200, -25200, 1, 'PDT' ], [ 62950640400, 62964554400, 62950611600, 62964525600, -28800, 0, 'PST' ], [ 62964554400, 62982090000, 62964529200, 62982064800, -25200, 1, 'PDT' ], [ 62982090000, 62996004000, 62982061200, 62995975200, -28800, 0, 'PST' ], [ 62996004000, 63013539600, 62995978800, 63013514400, -25200, 1, 'PDT' ], [ 63013539600, 63027453600, 63013510800, 63027424800, -28800, 0, 'PST' ], [ 63027453600, 63044989200, 63027428400, 63044964000, -25200, 1, 'PDT' ], [ 63044989200, 63058903200, 63044960400, 63058874400, -28800, 0, 'PST' ], [ 63058903200, 63077043600, 63058878000, 63077018400, -25200, 1, 'PDT' ], [ 63077043600, 63090352800, 63077014800, 63090324000, -28800, 0, 'PST' ], [ 63090352800, 63108493200, 63090327600, 63108468000, -25200, 1, 'PDT' ], [ 63108493200, 63121802400, 63108464400, 63121773600, -28800, 0, 'PST' ], [ 63121802400, 63139942800, 63121777200, 63139917600, -25200, 1, 'PDT' ], [ 63139942800, 63153856800, 63139914000, 63153828000, -28800, 0, 'PST' ], [ 63153856800, 63171392400, 63153831600, 63171367200, -25200, 1, 'PDT' ], [ 63171392400, 63185306400, 63171363600, 63185277600, -28800, 0, 'PST' ], [ 63185306400, 63202842000, 63185281200, 63202816800, -25200, 1, 'PDT' ], [ 63202842000, 63216756000, 63202813200, 63216727200, -28800, 0, 'PST' ], [ 63216756000, 63234896400, 63216730800, 63234871200, -25200, 1, 'PDT' ], [ 63234896400, 63248205600, 63234867600, 63248176800, -28800, 0, 'PST' ], [ 63248205600, 63266346000, 63248180400, 63266320800, -25200, 1, 'PDT' ], [ 63266346000, 63279655200, 63266317200, 63279626400, -28800, 0, 'PST' ], [ 63279655200, 63297795600, 63279630000, 63297770400, -25200, 1, 'PDT' ], [ 63297795600, 63309290400, 63297766800, 63309261600, -28800, 0, 'PST' ], [ 63309290400, 63329850000, 63309265200, 63329824800, -25200, 1, 'PDT' ], [ 63329850000, 63340740000, 63329821200, 63340711200, -28800, 0, 'PST' ], [ 63340740000, 63361299600, 63340714800, 63361274400, -25200, 1, 'PDT' ], [ 63361299600, 63372189600, 63361270800, 63372160800, -28800, 0, 'PST' ], [ 63372189600, 63392749200, 63372164400, 63392724000, -25200, 1, 'PDT' ], [ 63392749200, 63404244000, 63392720400, 63404215200, -28800, 0, 'PST' ], [ 63404244000, 63424803600, 63404218800, 63424778400, -25200, 1, 'PDT' ], [ 63424803600, 63435693600, 63424774800, 63435664800, -28800, 0, 'PST' ], [ 63435693600, 63456253200, 63435668400, 63456228000, -25200, 1, 'PDT' ], [ 63456253200, 63467143200, 63456224400, 63467114400, -28800, 0, 'PST' ], [ 63467143200, 63487702800, 63467118000, 63487677600, -25200, 1, 'PDT' ], [ 63487702800, 63498592800, 63487674000, 63498564000, -28800, 0, 'PST' ], [ 63498592800, 63519152400, 63498567600, 63519127200, -25200, 1, 'PDT' ], [ 63519152400, 63530042400, 63519123600, 63530013600, -28800, 0, 'PST' ], [ 63530042400, 63550602000, 63530017200, 63550576800, -25200, 1, 'PDT' ], [ 63550602000, 63561492000, 63550573200, 63561463200, -28800, 0, 'PST' ], [ 63561492000, 63582051600, 63561466800, 63582026400, -25200, 1, 'PDT' ], [ 63582051600, 63593546400, 63582022800, 63593517600, -28800, 0, 'PST' ], [ 63593546400, 63614106000, 63593521200, 63614080800, -25200, 1, 'PDT' ], [ 63614106000, 63624996000, 63614077200, 63624967200, -28800, 0, 'PST' ], [ 63624996000, 63645555600, 63624970800, 63645530400, -25200, 1, 'PDT' ], [ 63645555600, 63656445600, 63645526800, 63656416800, -28800, 0, 'PST' ], [ 63656445600, 63677005200, 63656420400, 63676980000, -25200, 1, 'PDT' ], [ 63677005200, 63687895200, 63676976400, 63687866400, -28800, 0, 'PST' ], [ 63687895200, 63708454800, 63687870000, 63708429600, -25200, 1, 'PDT' ], [ 63708454800, 63719344800, 63708426000, 63719316000, -28800, 0, 'PST' ], [ 63719344800, 63739904400, 63719319600, 63739879200, -25200, 1, 'PDT' ], [ 63739904400, 63751399200, 63739875600, 63751370400, -28800, 0, 'PST' ], [ 63751399200, 63771958800, 63751374000, 63771933600, -25200, 1, 'PDT' ], [ 63771958800, 63782848800, 63771930000, 63782820000, -28800, 0, 'PST' ], [ 63782848800, 63803408400, 63782823600, 63803383200, -25200, 1, 'PDT' ], [ 63803408400, 63814298400, 63803379600, 63814269600, -28800, 0, 'PST' ], [ 63814298400, 63834858000, 63814273200, 63834832800, -25200, 1, 'PDT' ], ]; sub olson_version { '2012c' } sub has_dst_changes { 79 } sub _max_year { 2022 } sub _new_instance { return shift->_init( @_, spans => $spans ); } sub _last_offset { -28800 } my $last_observance = bless( { 'format' => 'P%sT', 'gmtoff' => '-8:00', 'local_start_datetime' => bless( { 'formatter' => undef, 'local_rd_days' => 718067, 'local_rd_secs' => 0, 'offset_modifier' => 0, 'rd_nanosecs' => 0, 'tz' => bless( { 'name' => 'floating', 'offset' => 0 }, 'DateTime::TimeZone::Floating' ), 'utc_rd_days' => 718067, 'utc_rd_secs' => 0, 'utc_year' => 1968 }, 'DateTime' ), 'offset_from_std' => 0, 'offset_from_utc' => -28800, 'until' => [], 'utc_start_datetime' => bless( { 'formatter' => undef, 'local_rd_days' => 718067, 'local_rd_secs' => 28800, 'offset_modifier' => 0, 'rd_nanosecs' => 0, 'tz' => bless( { 'name' => 'floating', 'offset' => 0 }, 'DateTime::TimeZone::Floating' ), 'utc_rd_days' => 718067, 'utc_rd_secs' => 28800, 'utc_year' => 1968 }, 'DateTime' ) }, 'DateTime::TimeZone::OlsonDB::Observance' ) ; sub _last_observance { $last_observance } my $rules = [ bless( { 'at' => '2:00', 'from' => '2007', 'in' => 'Nov', 'letter' => 'S', 'name' => 'US', 'offset_from_std' => 0, 'on' => 'Sun>=1', 'save' => '0', 'to' => 'max', 'type' => undef }, 'DateTime::TimeZone::OlsonDB::Rule' ), bless( { 'at' => '2:00', 'from' => '2007', 'in' => 'Mar', 'letter' => 'D', 'name' => 'US', 'offset_from_std' => 3600, 'on' => 'Sun>=8', 'save' => '1:00', 'to' => 'max', 'type' => undef }, 'DateTime::TimeZone::OlsonDB::Rule' ) ] ; sub _rules { $rules } 1;
leighpauls/k2cro4
third_party/perl/perl/vendor/lib/DateTime/TimeZone/America/Los_Angeles.pm
Perl
bsd-3-clause
15,562
package LibrangeCentcom; sub functions_provided { return qw/centcom_environment centcom_services centcom_facility centcom_status/; } sub centcom_status { eval "require Centcom;"; return () if ($@); my ( $rr, $range ) = @_; my @ret; my $centcom = Centcom->new; for my $status (@$range) { push @ret, $centcom->findHosts( { status => $status } ); } return @ret; } sub centcom_facility { eval "require Centcom;"; return () if ($@); my ( $rr, $range ) = @_; my @ret; my $centcom = Centcom->new; for my $env (@$range) { push @ret, $centcom->findHosts( { facility => $env } ); } return @ret; } sub centcom_services { eval "require Centcom;"; return () if ($@); my ( $rr, $range ) = @_; my @ret; my $centcom = Centcom->new; for my $env (@$range) { push @ret, $centcom->findHosts( { services => $env } ); } return @ret; } sub centcom_environment { eval "require Centcom;"; return () if ($@); my ( $rr, $range ) = @_; my @ret; my $centcom = Centcom->new; for my $env (@$range) { push @ret, $centcom->findHosts( { environment => $env } ); } return @ret; } 1;
square/libcrange
root/var/libcrange/perl/LibrangeCentcom.pm
Perl
bsd-3-clause
1,224
#!perl -w # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. use strict; my %constants; my $count = 0; my $o; my @objects = (); my @objsize; $constants{CKO_DATA} = "static const CK_OBJECT_CLASS cko_data = CKO_DATA;\n"; $constants{CK_TRUE} = "static const CK_BBOOL ck_true = CK_TRUE;\n"; $constants{CK_FALSE} = "static const CK_BBOOL ck_false = CK_FALSE;\n"; while(<>) { my @fields = (); my $size; s/^((?:[^"#]+|"[^"]*")*)(\s*#.*$)/$1/; next if (/^\s*$/); # This was taken from the perl faq #4. my $text = $_; push(@fields, $+) while $text =~ m{ "([^\"\\]*(?:\\.[^\"\\]*)*)"\s? # groups the phrase inside the quotes | ([^\s]+)\s? | \s }gx; push(@fields, undef) if substr($text,-1,1) eq '\s'; if( $fields[0] =~ /BEGINDATA/ ) { next; } if( $fields[1] =~ /MULTILINE/ ) { $fields[2] = ""; while(<>) { last if /END/; chomp; $fields[2] .= "\"$_\"\n"; } } if( $fields[1] =~ /UTF8/ ) { if( $fields[2] =~ /^"/ ) { ; } else { $fields[2] = "\"" . $fields[2] . "\""; } my $scratch = eval($fields[2]); $size = length($scratch) + 1; # null terminate } if( $fields[1] =~ /OCTAL/ ) { if( $fields[2] =~ /^"/ ) { ; } else { $fields[2] = "\"" . $fields[2] . "\""; } my $scratch = $fields[2]; $size = $scratch =~ tr/\\//; # no null termination } if( $fields[1] =~ /^CK_/ ) { my $lcv = $fields[2]; $lcv =~ tr/A-Z/a-z/; if( !defined($constants{$fields[2]}) ) { $constants{$fields[2]} = "static const $fields[1] $lcv = $fields[2];\n"; } $size = "sizeof($fields[1])"; $fields[2] = "&$lcv"; } if( $fields[0] =~ /CKA_CLASS/ ) { $count++; $objsize[$count] = 0; } @{$objects[$count][$objsize[$count]++]} = ( "$fields[0]", $fields[2], "$size" ); # print "$fields[0] | $fields[1] | $size | $fields[2]\n"; } doprint(); sub dudump { my $i; for( $i = 1; $i <= $count; $i++ ) { print "\n"; $o = $objects[$i]; my @ob = @{$o}; my $l; my $j; for( $j = 0; $j < @ob; $j++ ) { $l = $ob[$j]; my @a = @{$l}; print "$a[0] ! $a[1] ! $a[2]\n"; } } } sub doprint { my $i; print <<EOD /* THIS IS A GENERATED FILE */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #ifndef BUILTINS_H #include "builtins.h" #endif /* BUILTINS_H */ EOD ; foreach $b (sort values(%constants)) { print $b; } for( $i = 1; $i <= $count; $i++ ) { print "static const CK_ATTRIBUTE_TYPE nss_builtins_types_$i [] = {\n"; $o = $objects[$i]; my @ob = @{$o}; my $j; for( $j = 0; $j < @ob; $j++ ) { my $l = $ob[$j]; my @a = @{$l}; print " $a[0]"; if( $j+1 != @ob ) { print ", "; } } print "\n};\n"; } for( $i = 1; $i <= $count; $i++ ) { print "static const NSSItem nss_builtins_items_$i [] = {\n"; $o = $objects[$i]; my @ob = @{$o}; my $j; for( $j = 0; $j < @ob; $j++ ) { my $l = $ob[$j]; my @a = @{$l}; print " { (void *)$a[1], (PRUint32)$a[2] }"; if( $j+1 != @ob ) { print ",\n"; } else { print "\n"; } } print "};\n"; } print "\nbuiltinsInternalObject\n"; print "nss_builtins_data[] = {\n"; for( $i = 1; $i <= $count; $i++ ) { print " { $objsize[$i], nss_builtins_types_$i, nss_builtins_items_$i, {NULL} }"; if( $i == $count ) { print "\n"; } else { print ",\n"; } } print "};\n"; print "const PRUint32\n"; print "nss_builtins_nObjects = $count;\n"; }
GaloisInc/hacrypto
src/C/nss/nss-3.16.1/nss/lib/ckfw/builtins/certdata.perl
Perl
bsd-3-clause
3,758
package Fixtures::ProfileParameter; # # Copyright 2015 Comcast Cable Communications Management, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # use Moose; extends 'DBIx::Class::EasyFixture'; use namespace::autoclean; use Digest::SHA1 qw(sha1_hex); my %definition_for = ( domain_name => { new => 'ProfileParameter', using => { profile => 1, parameter => 3, }, }, domain_name2 => { new => 'ProfileParameter', using => { profile => 2, parameter => 3, }, }, domain_name3 => { new => 'ProfileParameter', using => { profile => 3, parameter => 3, }, }, domain_name5 => { new => 'ProfileParameter', using => { profile => 5, parameter => 3, }, }, domain_name6 => { new => 'ProfileParameter', using => { profile => 6, parameter => 3, }, }, domain_name7 => { new => 'ProfileParameter', using => { profile => 7, parameter => 3, }, }, rascal_properties1 => { new => 'ProfileParameter', using => { profile => 2, parameter => 4, }, }, rascal_properties2 => { new => 'ProfileParameter', using => { profile => 2, parameter => 5, }, }, rascal_properties2 => { new => 'ProfileParameter', using => { profile => 2, parameter => 6, }, }, edge1_key0 => { new => 'ProfileParameter', using => { profile => 1, parameter => 7, }, }, edge1_key1 => { new => 'ProfileParameter', using => { profile => 1, parameter => 8, }, }, edge1_key2 => { new => 'ProfileParameter', using => { profile => 1, parameter => 9, }, }, edge1_key3 => { new => 'ProfileParameter', using => { profile => 1, parameter => 10, }, }, edge1_key4 => { new => 'ProfileParameter', using => { profile => 1, parameter => 11, }, }, edge1_key5 => { new => 'ProfileParameter', using => { profile => 1, parameter => 12, }, }, edge1_key6 => { new => 'ProfileParameter', using => { profile => 1, parameter => 13, }, }, edge1_key7 => { new => 'ProfileParameter', using => { profile => 1, parameter => 14, }, }, edge1_key8 => { new => 'ProfileParameter', using => { profile => 1, parameter => 15, }, }, edge1_key9 => { new => 'ProfileParameter', using => { profile => 1, parameter => 16, }, }, edge1_key10 => { new => 'ProfileParameter', using => { profile => 1, parameter => 17, }, }, edge1_key11 => { new => 'ProfileParameter', using => { profile => 1, parameter => 18, }, }, edge1_key12 => { new => 'ProfileParameter', using => { profile => 1, parameter => 19, }, }, edge1_key13 => { new => 'ProfileParameter', using => { profile => 1, parameter => 20, }, }, edge1_key14 => { new => 'ProfileParameter', using => { profile => 1, parameter => 21, }, }, edge1_key15 => { new => 'ProfileParameter', using => { profile => 1, parameter => 22, }, }, 'edge1_url_sig_cdl-c2.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 23, }, }, 'edge1_error_url' => { new => 'ProfileParameter', using => { profile => 1, parameter => 24, }, }, 'edge1_CONFIG-proxy.config.allocator.debug_filter' => { new => 'ProfileParameter', using => { profile => 1, parameter => 25, }, }, 'edge1_CONFIG-proxy.config.allocator.enable_reclaim' => { new => 'ProfileParameter', using => { profile => 1, parameter => 26, }, }, 'edge1_CONFIG-proxy.config.allocator.max_overage' => { new => 'ProfileParameter', using => { profile => 1, parameter => 27, }, }, 'edge1_CONFIG-proxy.config.diags.show_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 28, }, }, 'edge1_CONFIG-proxy.config.http.cache.allow_empty_doc' => { new => 'ProfileParameter', using => { profile => 1, parameter => 29, }, }, 'LOCAL-proxy.config.cache.interim.storage' => { new => 'ProfileParameter', using => { profile => 1, parameter => 30, }, }, 'edge1_CONFIG-proxy.config.http.parent_proxy.file' => { new => 'ProfileParameter', using => { profile => 1, parameter => 31, }, }, 'edge1_12M_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 32, }, }, 'edge1_cacheurl_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 33, }, }, 'edge1_ip_allow_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 34, }, }, 'edge1_astats_over_http.so' => { new => 'ProfileParameter', using => { profile => 1, parameter => 35, }, }, 'edge1_crontab_root_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 36, }, }, 'edge1_hdr_rw_cdl-c2.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 37, }, }, 'edge1_50-ats.rules_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 38, }, }, 'edge1_parent.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 39, }, }, 'edge1_remap.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 40, }, }, 'edge1_drop_qstring.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 41, }, }, 'edge1_LogFormat.Format' => { new => 'ProfileParameter', using => { profile => 1, parameter => 42, }, }, 'edge1_LogFormat.Name' => { new => 'ProfileParameter', using => { profile => 1, parameter => 43, }, }, 'edge1_LogObject.Format' => { new => 'ProfileParameter', using => { profile => 1, parameter => 44, }, }, 'edge1_LogObject.Filename' => { new => 'ProfileParameter', using => { profile => 1, parameter => 45, }, }, 'edge1_cache.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 46, }, }, 'edge1_CONFIG-proxy.config.cache.control.filename' => { new => 'ProfileParameter', using => { profile => 1, parameter => 47, }, }, 'edge1_regex_revalidate.so' => { new => 'ProfileParameter', using => { profile => 1, parameter => 48, }, }, 'edge1_regex_revalidate.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 49, }, }, 'edge1_hosting.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 50, }, }, 'edge1_volume.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 51, }, }, 'edge1_allow_ip' => { new => 'ProfileParameter', using => { profile => 1, parameter => 52, }, }, 'edge1_allow_ip6' => { new => 'ProfileParameter', using => { profile => 1, parameter => 53, }, }, 'edge1_record_types' => { new => 'ProfileParameter', using => { profile => 1, parameter => 54, }, }, 'edge1_astats.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 55, }, }, 'edge1_astats.config_path' => { new => 'ProfileParameter', using => { profile => 1, parameter => 56, }, }, 'edge1_storage.config_location' => { new => 'ProfileParameter', using => { profile => 1, parameter => 57, }, }, 'edge1_Drive_Prefix' => { new => 'ProfileParameter', using => { profile => 1, parameter => 58, }, }, 'edge1_Drive_Letters' => { new => 'ProfileParameter', using => { profile => 1, parameter => 59, }, }, 'edge1_Disk_Volume' => { new => 'ProfileParameter', using => { profile => 1, parameter => 60, }, }, 'edge1_CONFIG-proxy.config.hostdb.storage_size' => { new => 'ProfileParameter', using => { profile => 1, parameter => 61, }, }, mid1_key0 => { new => 'ProfileParameter', using => { profile => 2, parameter => 7, }, }, mid1_key1 => { new => 'ProfileParameter', using => { profile => 2, parameter => 8, }, }, mid1_key2 => { new => 'ProfileParameter', using => { profile => 2, parameter => 9, }, }, mid1_key3 => { new => 'ProfileParameter', using => { profile => 2, parameter => 10, }, }, mid1_key4 => { new => 'ProfileParameter', using => { profile => 2, parameter => 11, }, }, mid1_key5 => { new => 'ProfileParameter', using => { profile => 2, parameter => 12, }, }, mid1_key6 => { new => 'ProfileParameter', using => { profile => 2, parameter => 13, }, }, mid1_key7 => { new => 'ProfileParameter', using => { profile => 2, parameter => 14, }, }, mid1_key8 => { new => 'ProfileParameter', using => { profile => 2, parameter => 15, }, }, mid1_key9 => { new => 'ProfileParameter', using => { profile => 2, parameter => 16, }, }, mid1_key10 => { new => 'ProfileParameter', using => { profile => 2, parameter => 17, }, }, mid1_key11 => { new => 'ProfileParameter', using => { profile => 2, parameter => 18, }, }, mid1_key12 => { new => 'ProfileParameter', using => { profile => 2, parameter => 19, }, }, mid1_key13 => { new => 'ProfileParameter', using => { profile => 2, parameter => 20, }, }, mid1_key14 => { new => 'ProfileParameter', using => { profile => 2, parameter => 21, }, }, mid1_key15 => { new => 'ProfileParameter', using => { profile => 2, parameter => 22, }, }, 'mid1_url_sig_cdl-c2.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 23, }, }, 'mid1_error_url' => { new => 'ProfileParameter', using => { profile => 2, parameter => 24, }, }, 'mid1_CONFIG-proxy.config.allocator.debug_filter' => { new => 'ProfileParameter', using => { profile => 2, parameter => 25, }, }, 'mid1_CONFIG-proxy.config.allocator.enable_reclaim' => { new => 'ProfileParameter', using => { profile => 2, parameter => 26, }, }, 'mid1_CONFIG-proxy.config.allocator.max_overage' => { new => 'ProfileParameter', using => { profile => 2, parameter => 27, }, }, 'mid1_CONFIG-proxy.config.diags.show_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 28, }, }, 'mid1_CONFIG-proxy.config.http.cache.allow_empty_doc' => { new => 'ProfileParameter', using => { profile => 2, parameter => 29, }, }, 'LOCAL-proxy.config.cache.interim.storage' => { new => 'ProfileParameter', using => { profile => 2, parameter => 30, }, }, 'mid1_CONFIG-proxy.config.http.parent_proxy.file' => { new => 'ProfileParameter', using => { profile => 2, parameter => 31, }, }, 'mid1_12M_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 32, }, }, 'mid1_cacheurl_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 33, }, }, 'mid1_ip_allow_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 34, }, }, 'mid1_astats_over_http.so' => { new => 'ProfileParameter', using => { profile => 2, parameter => 35, }, }, 'mid1_crontab_root_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 36, }, }, 'mid1_hdr_rw_cdl-c2.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 37, }, }, 'mid1_50-ats.rules_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 38, }, }, 'mid1_parent.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 39, }, }, 'mid1_remap.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 40, }, }, 'mid1_drop_qstring.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 41, }, }, 'mid1_LogFormat.Format' => { new => 'ProfileParameter', using => { profile => 2, parameter => 42, }, }, 'mid1_LogFormat.Name' => { new => 'ProfileParameter', using => { profile => 2, parameter => 43, }, }, 'mid1_LogObject.Format' => { new => 'ProfileParameter', using => { profile => 2, parameter => 44, }, }, 'mid1_LogObject.Filename' => { new => 'ProfileParameter', using => { profile => 2, parameter => 45, }, }, 'mid1_cache.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 46, }, }, 'mid1_CONFIG-proxy.config.cache.control.filename' => { new => 'ProfileParameter', using => { profile => 2, parameter => 47, }, }, 'mid1_regex_revalidate.so' => { new => 'ProfileParameter', using => { profile => 2, parameter => 48, }, }, 'mid1_regex_revalidate.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 49, }, }, 'mid1_hosting.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 50, }, }, 'mid1_volume.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 51, }, }, 'mid1_allow_ip' => { new => 'ProfileParameter', using => { profile => 2, parameter => 52, }, }, 'mid1_allow_ip6' => { new => 'ProfileParameter', using => { profile => 2, parameter => 53, }, }, 'mid1_record_types' => { new => 'ProfileParameter', using => { profile => 2, parameter => 54, }, }, 'mid1_astats.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 55, }, }, 'mid1_astats.config_path' => { new => 'ProfileParameter', using => { profile => 2, parameter => 56, }, }, 'mid1_storage.config_location' => { new => 'ProfileParameter', using => { profile => 2, parameter => 57, }, }, 'mid1_Drive_Prefix' => { new => 'ProfileParameter', using => { profile => 2, parameter => 58, }, }, 'mid1_Drive_Letters' => { new => 'ProfileParameter', using => { profile => 2, parameter => 59, }, }, 'mid1_Disk_Volume' => { new => 'ProfileParameter', using => { profile => 2, parameter => 60, }, }, 'mid1_CONFIG-proxy.config.hostdb.storage_size' => { new => 'ProfileParameter', using => { profile => 2, parameter => 61, }, }, ); sub get_definition { my ( $self, $name ) = @_; return $definition_for{$name}; } sub all_fixture_names { return keys %definition_for; } __PACKAGE__->meta->make_immutable; 1;
smalenfant/traffic_control
traffic_ops/app/lib/Fixtures/ProfileParameter.pm
Perl
apache-2.0
20,607
=pod =head1 NAME DH_new, DH_free - allocate and free DH objects =head1 SYNOPSIS #include <openssl/dh.h> DH* DH_new(void); void DH_free(DH *dh); =head1 DESCRIPTION DH_new() allocates and initializes a B<DH> structure. DH_free() frees the B<DH> structure and its components. The values are erased before the memory is returned to the system. If B<dh> is NULL nothing is done. =head1 RETURN VALUES If the allocation fails, DH_new() returns B<NULL> and sets an error code that can be obtained by L<ERR_get_error(3)>. Otherwise it returns a pointer to the newly allocated structure. DH_free() returns no value. =head1 SEE ALSO L<dh(3)>, L<ERR_get_error(3)>, L<DH_generate_parameters(3)>, L<DH_generate_key(3)> =head1 COPYRIGHT Copyright 2000-2016 The OpenSSL Project Authors. All Rights Reserved. Licensed under the OpenSSL license (the "License"). You may not use this file except in compliance with the License. You can obtain a copy in the file LICENSE in the source distribution or at L<https://www.openssl.org/source/license.html>. =cut
openweave/openweave-core
third_party/openssl/openssl/doc/crypto/DH_new.pod
Perl
apache-2.0
1,060
#!/usr/bin/perl use 5.010; use strict; use warnings; use Scalar::Util qw(looks_like_number); sub fibonacci { my $x = $_[0]; if($x < 0 ) { print "Warning! Cann't count"; return; } if($x == 0 ) { print "Fibonacci $x = 0"; return; } if(($x == 1 ) || ($x == 2)) { print "Fibonacci $x = 1"; return; } my $i = 2; my $y = 0; my $fiboprint = 1; while($i <= $x) { my $z = $fiboprint; $fiboprint += $y; $y = $z; $i++; } print "Fibonacci $x = $fiboprint" } sub factorial { my $x = $_[0]; if($x < 0 ) { print "Warning! Cann't count"; return; } if($x == 0 ) { print "Factorial $x = 1"; return; } my $i = 1; my $factorialprint = 1; while($i <= $x) { $factorialprint *= $i; $i++; } print "Factorial $x = $factorialprint" } sub Main { print "Fibonacci: "; my $fib; chomp ($fib = <STDIN>); if (looks_like_number($fib)) { fibonacci($fib); } print "\n"; print "Factorial: "; my $fac; chomp ($fac = <STDIN>); if (looks_like_number($fac)) { factorial($fac); } print "\n"; } Main(); __END__
dimir2/hse12pi2-scripts
KolchinE/Perl/Hw1.pl
Perl
mit
1,002
package O2CMS::Obj::Territory::Municipality; use strict; use base 'O2CMS::Obj::Territory'; 1;
haakonsk/O2-CMS
lib/O2CMS/Obj/Territory/Municipality.pm
Perl
mit
96
package treesequal; # Takes one file as a command line argument, which contains two newick strings # ASSUMES TREES ARE ALL BINARY! Dies otherwise # Uses the data structures of Day 1985 for O(n) strict consensus. # The two tree structures, t1 and t2, are package globals and are accessed by various functions. #use strict; my ($t1,$t2); sub treesequal { # reads two newick strings passed as args $t1 = treeInit($_[0]); min_maxLeafID($t1->{ROOT}); setupX($t1->{ROOT}); $t2 = treeInit($_[1]); eval { min_maxLeafID_OTHER($t2->{ROOT}); # this will throw an exception if there is a label mismatch between trees! so that's good! if ($t1->{NLEAVES} != $t2->{NLEAVES} ) {die "Trees have different numbers of taxa\n"}; compareTrees($t2->{ROOT}); }; deleteTree($t1); deleteTree($t2); if ($@) { return "Trees are different: $@\n"; } else { return "Trees are the same\n" } } #################################################################### sub compareTrees { my ($n)=@_; if (isLeaf($n)) { return }; my $L2 = $n->{minDescID}; my $R2 = $n->{maxDescID}; die "Mismatched cluster size\n" if ($n->{NLEAVES} != $R2 - $L2 + 1) ; # Exception! Mismatch based on cluster size # OK, might be a matching cluster. If so, this pair, (L2,R2) will be stored in either row # L2 or row R2 of the X matrix. Check for this and throw exception if not. See Day, p. 17. die "Mismatched clade composition\n" if ( ! ( ( $XL[$L2] == $L2 && $XR[$L2] == $R2 ) || ( $XL[$R2] == $L2 && $XR[$R2] == $R2 ) ) ); for my $child (@{$n->{DESC}}) { compareTrees($child); } return; } #################################################################### sub setupX { my ($n) = @_; if (isLeaf($n)) {return}; # leave undefined for leaves my $g; if (isRoot($n)) # g(j) from eq(1) of Day 1985. { $g = $t1->{NLEAVES}; } else { my $thisIX = $n->{ID}; my $nextIX = $thisIX + 1; # f(j) + 1 my $nextNode = $t1->{NODES}[$nextIX]; if (isLeaf($nextNode)) { $g = $n->{maxDescID} } # select R of eq 1 else { $g = $n->{minDescID} } # select R of eq 1 } $XL[$g]=$n->{minDescID}; $XR[$g]=$n->{maxDescID}; for my $child (@{$n->{DESC}}) { setupX($child); } } #################################################################### sub treeInit { my ($newick)=@_; my ($root,%treeH); $name = '[\w\d\_\.]+|\'.*?\''; # notice the non-greedy match needed in the '...' format @tokens = ($newick=~/($name|\,|\(|\)|\;)/g); $tokens_ix=0; parseCheck(); $tokens_ix=0; #print "@tokens\n"; my $tok = next_tok(); if ($tok =~ /\(/) { $root = make_group(); } else {die "First token is incorrect: $tok\n";} initIDTree(1,1,$root); initNLEAVES($root); #min_maxLeafID($root); undef @taxaAll; #used in descLeafNodes to return an array of all nodes indexed by the postorder node index $n->{ID} my @tt = descLeafNodes($root); # an array of all the leaf nodes in no particular order my %leafH = makeLeafHash(@tt); # keys are leaf names and values are nodes of the tree #foreach (keys %leafH) {print "$_\n"} $treeH{ROOT}=$root; $treeH{LEAVES}=[@tt]; $treeH{NODES}=[@taxaAll]; # has size n+m+1 because we index from 1..n+m $treeH{LEAFH}={%leafH}; $treeH{NLEAVES}= $root->{NLEAVES}; die ("Fatal error: tree is NOT binary\n") if (!isBinaryTree($root)); return \%treeH; } sub initIDTree # sets up postorder ids for all nodes [0,..,M-1], and leaf IDs on [0..N-1]. M nodes, N leaves { my ($startNodeIX,$startLeafIX,$root)=@_; $gNodeIX=$startNodeIX; $gLeafIX=$startLeafIX; recurseIndexTree($root); } sub recurseIndexTree { my ($nodeRef)=@_; if (isLeaf($nodeRef)) {$nodeRef->{leafID}=$gLeafIX++} for my $child (@{$nodeRef->{DESC}}) { recurseIndexTree($child); } $nodeRef->{ID}=$gNodeIX++; # these are the post order ids return ; } sub initNLEAVES { my ($nodeRef)=@_; my $sum; if (isLeaf($nodeRef)) {$sum=1} else {$sum=0}; for my $child (@{$nodeRef->{DESC}}) { $sum += initNLEAVES($child); } $nodeRef->{NLEAVES}=$sum; return $sum; } #################################################################### sub min_maxLeafID # sets the min and max leaf Id for all the leaves descended from this internal node { my ($n)=@_; if (isLeaf($n)) { my $id = $n->{leafID}; $n->{minDescID}=$id; $n->{maxDescID}=$id; return ($id,$id); } my $tMin = +10000000; my $tMax = -1; for my $child (@{$n->{DESC}}) { my ($min,$max) = min_maxLeafID($child); if ($min < $tMin) {$tMin = $min} if ($max > $tMax) {$tMax = $max} } $n->{minDescID}=$tMin; $n->{maxDescID}=$tMax; return ($tMin,$tMax); } #################################################################### sub min_maxLeafID_OTHER # sets the min and max leaf Id for tree but using TREE1's leaf ID's { my ($n)=@_; if (isLeaf($n)) { my $thisName = $n->{NAME}; # name of this node on tree 2 my $t1Node = $t1->{LEAFH}{$thisName}; # the corresponding node on tree 1 if (!defined $t1Node) {die "Name not found on tree1 (trees different because of different label sets)\n"} my $id = $t1Node->{leafID}; #...and its leaf ID $n->{minDescID}=$id; $n->{maxDescID}=$id; return ($id,$id); } my $tMin = +10000000; my $tMax = -1; for my $child (@{$n->{DESC}}) { my ($min,$max) = min_maxLeafID_OTHER($child); if ($min < $tMin) {$tMin = $min} if ($max > $tMax) {$tMax = $max} } $n->{minDescID}=$tMin; $n->{maxDescID}=$tMax; return ($tMin,$tMax); } #################################################################### sub makeLeafHash # a hash where keys are leaf names and values are nodes { my (%leafH)=(); foreach (@_) {$leafH{$_->{NAME}}=$_} return %leafH; } #################################################################### sub descLeafNames { return map {$_->{NAME}} descLeafNodes(@_[0]); } sub descLeafNodes { my ($root)=@_; undef @taxaT; recursePush($root); return @taxaT; } sub recursePush { my ($nodeRef)=@_; $taxaAll[$nodeRef->{ID}] = $nodeRef; if (isLeaf($nodeRef)) {push @taxaT, $nodeRef;} for my $child (@{$nodeRef->{DESC}}) { recursePush($child); } return ; } #################################################################### sub make_group { my $rootRef=nodeNew(""); while (my $tok = next_tok()) { if ($tok =~ /$name/) { my $nodeRef = nodeNew($tok); addChild($rootRef,$nodeRef); } elsif ($tok =~ /\(/) { $nodeRef = make_group(); addChild($rootRef,$nodeRef); } elsif ($tok =~ /\)/) { return $rootRef; } elsif ($tok =~ /,/) { next; } } } # ********************************************************** sub next_tok { if ($tokens_ix >= $#tokens) {return 0} return ($tokens[$tokens_ix++]) } # ********************************************************** sub nodeNew { my ($name)=@_; return {ID=>-1,leafID=>-1,minDescID=>-1,maxDescID=>-1,NAME=>$name,DESC=>[],ANC=>-1,NLEAVES=>-1}; } # ********************************************************** sub addChild { my ($nodeRef,$childRef)=@_; $childRef->{ANC}=$nodeRef; push @{ ${$nodeRef}{DESC} },$childRef; } # ********************************************************** sub isBinaryTree { my ($nodeRef)=@_; if (isLeaf($nodeRef)) {return 1}; if (scalar @{$nodeRef->{DESC}} != 2) {return 0}; for my $child (@{$nodeRef->{DESC}}) { if (!isBinaryTree($child)) {return 0}; } return 1 ; } # ********************************************************** sub recursePrint { my ($n)=@_; print "$n->{ID}: num leaves:$n->{NLEAVES}:taxon name = $n->{NAME}\tLeaf ID = $n->{leafID} MinDescID = $n->{minDescID} MaxDescId = $n->{maxDescID}\n"; for my $child (@{$n->{DESC}}) { recursePrint($child); } return ; } # ********************************************************** sub isLeaf { my ($nodeRef)=@_; if (scalar @{$nodeRef->{DESC}} == 0) {return 1} else {return 0} } sub isRoot { my ($nodeRef)=@_; if ($nodeRef->{ANC} == -1) {return 1} else {return 0} } #********************************************************** sub parseCheck { my ($left,$right,$commas,$nTax); for $tok (@tokens) { if ($tok =~ /\(/) {$left++}; if ($tok =~ /\)/) {$right++}; if ($tok =~ /$name/) {$nTax++}; if ($tok =~ /\,/) {$commas++}; } #print "ntaxa=$nTax,left=$left, right=$right,commas=$commas\n"; die "Unmatched parens in newick string\n" if ($left != $right); } # ********************************************************** sub deleteTree { my ($TreeRef)=@_; deleteTreeStructure($TreeRef->{ROOT}); undef @{$TreeRef->{LEAVES}}; undef @{$TreeRef->{NODES}}; undef %{$TreeRef->{LEAFH}}; undef $TreeRef->{ROOT}; undef $TreeRef; } sub deleteTreeStructure { my ($nodeRef)=@_; for my $child (@{$nodeRef->{DESC}}) { deleteTreeStructure($child); } undef $nodeRef->{DESC}; undef $nodeRef->{ID}; undef $nodeRef->{leafID}; undef $nodeRef->{minDescID}; undef $nodeRef->{maxDescID}; undef $nodeRef->{ANC}; undef $nodeRef->{NAME}; undef $nodeRef->{NLEAVES}; undef $nodeRef; return ; } # ********************************************************** 1;
zwickl/terraphy
examples/perlScripts/treesequal.pm
Perl
mit
8,838
package JSON::WebToken::Crypt::RSA; use strict; use warnings; use parent 'JSON::WebToken::Crypt'; use Crypt::OpenSSL::RSA (); our $ALGORITHM2SIGNING_METHOD_MAP = { RS256 => 'use_sha256_hash', RS384 => 'use_sha384_hash', RS512 => 'use_sha512_hash', RSA1_5 => 'use_pkcs1_padding', }; sub sign { my ($class, $algorithm, $message, $key) = @_; my $private_key = Crypt::OpenSSL::RSA->new_private_key($key); my $method = $ALGORITHM2SIGNING_METHOD_MAP->{$algorithm}; $private_key->$method; return $private_key->sign($message); } sub verify { my ($class, $algorithm, $message, $key, $signature) = @_; my $public_key = Crypt::OpenSSL::RSA->new_public_key($key); my $method = $ALGORITHM2SIGNING_METHOD_MAP->{$algorithm}; $public_key->$method; return $public_key->verify($message, $signature) ? 1 : 0; } 1; __END__
movabletype/mt-plugin-google-openid-connect
plugins/GoogleOpenIDConnect/extlib/JSON/WebToken/Crypt/RSA.pm
Perl
mit
870
=head1 NAME SGN::Controller::AJAX::Image - image ajax requests =head1 DESCRIPTION Implements the following endpoints: GET /ajax/image/<image_id> GET /ajax/image/<image_id>/stock/<stock_id>/display_order POST /ajax/image/<image_id>/stock/<stock_id>/display_order/<display_order> GET /ajax/image/<image_id>/locus/<locus_id>/display_order POST /ajax/image/<image_id>/locus/<locus_id>/display_order/<display_order> =head1 AUTHOR Lukas Mueller <lam87@cornell.edu> =cut package SGN::Controller::AJAX::Image; use Moose; BEGIN { extends 'Catalyst::Controller::REST' }; __PACKAGE__->config( default => 'application/json', stash_key => 'rest', map => { 'application/json' => 'JSON' }, ); # parse /ajax/image/<image_id> # sub basic_ajax_image :Chained('/') PathPart('ajax/image') CaptureArgs(1) ActionClass('REST') { } sub basic_ajax_image_GET { my $self = shift; my $c = shift; $c->stash->{image_id} = shift; $c->stash->{image} = SGN::Image->new($c->dbc->dbh(), $c->stash->{image_id}); } sub basic_ajax_image_POST { my $self = shift; my $c = shift; $c->stash->{image_id} = shift; $c->stash->{image} = SGN::Image->new($c->dbc->dbh(), $c->stash->{image_id}); } # endpoint /ajax/image/<image_id> # sub image_info :Chained('basic_ajax_image') PathPart('') Args(0) ActionClass('REST') {} sub image_info_GET { my $self = shift; my $c = shift; my @display_order_info = $c->stash->{image}->get_display_order_info(); my $response = { thumbnail => $c->stash->{image}->get_image_url("thumbnail"), small => $c->stash->{image}->get_image_url("small"), medium => $c->stash->{image}->get_image_url("medium"), large => $c->stash->{image}->get_image_url("large"), sp_person_id => $c->stash->{image}->get_sp_person_id(), md5sum => $c->stash->{image}->get_md5sum(), display_order => \@display_order_info }; $c->stash->{rest} = $response; } # parse /ajax/image/<image_id>/stock/<stock_id> # sub image_stock_connection :Chained('basic_ajax_image') PathPart('stock') CaptureArgs(1) ActionClass('REST') { } sub image_stock_connection_GET { my $self = shift; my $c = shift; my $stock_id = shift; $self->image_stock_connection_POST($c, $stock_id); } sub image_stock_connection_POST { my $self = shift; my $c = shift; $c->stash->{stock_id} = shift; } # GET endpoint /ajax/image/<image_id>/stock/<stock_id>/display_order # sub get_image_stock_display_order : Chained('image_stock_connection') PathPart('display_order') Args(0) ActionClass('REST') { } sub get_image_stock_display_order_GET { my $self = shift; my $c = shift; my $do = $c->stash->{image}->get_stock_page_display_order($c->stash->{stock_id}); $c->stash->{rest} = { stock_id => $c->stash->{stock_id}, image_id => $c->stash->{image_id}, display_order => $do, }; } # POST endpoint /ajax/image/<image_id>/stock/<stock_id>/display_order/<display_order> # sub add_image_stock_display_order :Chained('image_stock_connection') PathPart('display_order') Args(1) ActionClass('REST') { } sub add_image_stock_display_order_GET { my $self = shift; my $c = shift; my $display_order = shift; $self->add_image_stock_display_order_POST($c, $display_order); } sub add_image_stock_display_order_POST { my $self = shift; my $c = shift; my $display_order = shift; if (!$c->user()) { $c->stash->{rest} = { error => "you need to be logged in to modify the display order of images"}; return; } if (!$c->user()->check_roles("curator") && $c->stash->{image}->get_sp_person_id() != $c->user()->get_object()->get_sp_person_id()) { $c->stash->{rest} = { error => "You cannot modify an image that you don't own.\n" }; return; } my $error = $c->stash->{image}->set_stock_page_display_order($c->stash->{stock_id}, $display_order); if ($error) { $c->stash->{rest} = { error => $error }; } else { $c->stash->{rest} = { success => 1 }; } } # parse /ajax/image/<image_id>/locus/<locus_id> # sub image_locus_connection :Chained('basic_ajax_image') PathPart('locus') CaptureArgs(1) ActionClass('REST') { } sub image_locus_connection_GET { my $self = shift; my $c = shift; my $stock_id = shift; $self->image_locus_connection_POST($c, $stock_id); } sub image_locus_connection_POST { my $self = shift; my $c = shift; $c->stash->{locus_id} = shift; if (!$c->user()) { $c->stash->{rest} = { error => "you need to be logged in to modify the display order of images"}; return; } if (!$c->user()->check_roles("curator") && $c->stash->{image}->get_sp_person_id() != $c->user()->get_object()->get_sp_person_id()) { $c->stash->{rest} = { error => "You cannot modify an image that you don't own.\n" }; return; } } # GET endpoint /ajax/image/<image_id>/locus/<locus_id>/display_order # sub get_image_locus_display_order :Chained('image_locus_connection') PathPart('display_order') Args(0) ActionClass('REST') { } sub get_image_locus_display_order_GET { my $self = shift; my $c = shift; my $do = $c->stash->{image}->get_locus_page_display_order($c->stash->{locus_id}); $c->stash->{rest} = { locus_id => $c->stash->{locus_id}, image_id => $c->stash->{image_id}, display_order => $do, }; } # POST endpoint /ajax/image/<image_id>/locus/<locus_id>/display_order/<display_order> # sub add_image_locus_display_order :Chained('image_locus_connection') PathPart('display_order') Args(1) ActionClass('REST') { } sub add_image_locus_display_order_GET { my $self = shift; my $c = shift; my $display_order = shift; $self->add_image_locus_display_order_POST($c, $display_order); } sub add_image_locus_display_order_POST { my $self = shift; my $c = shift; my $display_order = shift; my $error = $c->stash->{image}->set_locus_page_display_order($c->stash->{image_id}, $display_order); if ($error) { $c->stash->{rest} = { error => $error }; } else { $c->stash->{rest} = { success => 1 }; } } sub image_metadata_store { my $self = shift; my $params = shift; my $image_dir = shift; my $user_id = shift; my $user_type = shift; my $image_id = shift; my $page_size = $self->page_size; my $page = $self->page; my $status = $self->status; my $dbh = $self->bcs_schema()->storage()->dbh(); my $imageName = $params->{imageName} ? $params->{imageName}[0] : ""; my $description = $params->{description} ? $params->{description}[0] : ""; my $imageFileName = $params->{imageFileName} ? $params->{imageFileName}[0] : ""; my $mimeType = $params->{mimeType} ? $params->{mimeType}[0] : undef; my $observationUnitDbId = $params->{observationUnitDbId} ? $params->{observationUnitDbId}[0] : undef; my $descriptiveOntologyTerms_arrayref = $params->{descriptiveOntologyTerms} || (); my $observationDbIds_arrayref = $params->{observationDbIds} || (); # metadata store for the rest not yet implemented my $imageFileSize = $params->{imageFileSize} ? $params->{imageFileSize}[0] : undef; my $imageHeight = $params->{imageHeight} ? $params->{imageHeight}[0] : (); my $imageWidth = $params->{imageWidth} ? $params->{imageWidth}[0] : (); my $copyright = $params->{copyright} || ""; my $imageTimeStamp = $params->{imageTimeStamp} || ""; my $imageLocation_hashref = $params->{imageLocation} || (); my $additionalInfo_hashref = $params->{additionalInfo} || (); # Prechecks before storing # Check that our observation unit db id exists. If not return error. if ($observationUnitDbId) { my $stock = $self->bcs_schema()->resultset("Stock::Stock")->find({ stock_id => $observationUnitDbId }); if (! defined $stock) { return CXGN::BrAPI::JSONResponse->return_error($self->status, 'Stock id is not valid. Cannot generate image metadata'); } } # Check that the cvterms are valid before continuing my @cvterm_ids; foreach (@$descriptiveOntologyTerms_arrayref) { my $cvterm_id; # If is like number, search for id if (looks_like_number($_)) { # Check if the trait exists $cvterm_id = SGN::Model::Cvterm->find_trait_by_id($self->bcs_schema(), $_); } else { # else search for string $cvterm_id = SGN::Model::Cvterm->find_trait_by_name($self->bcs_schema(), $_); } if (!defined $cvterm_id) { return CXGN::BrAPI::JSONResponse->return_error($self->status, sprintf('Descriptive ontology term %s not found. Cannot generate image metadata', $_)); } push(@cvterm_ids, $cvterm_id); } # Check that the image type they want to pass in is supported. # If it is not converted, and is the same after _get_extension, it is not supported. my $extension_type = _get_extension($mimeType); if ($extension_type eq $mimeType) { return CXGN::BrAPI::JSONResponse->return_error($self->status, sprintf('Mime type %s is not supported.', $mimeType)); } # Check if an image id was passed in, and if that image exists my $image_obj = CXGN::Image->new( dbh=>$dbh, image_dir => $image_dir, image_id => $image_id); if ($image_id && ! defined $image_obj->get_create_date()) { return CXGN::BrAPI::JSONResponse->return_error($self->status, sprintf('Image with id of %s, does not exist', $image_id)); } # Check that the observationDbIds they passed exists foreach (@$observationDbIds_arrayref) { my $phenotype = $self->bcs_schema()->resultset("Phenotype::Phenotype")->find({ phenotype_id => $_ }); if (! defined $phenotype) { return CXGN::BrAPI::JSONResponse->return_error($self->status, sprintf('Observation with id of %s, does not exist', $_)); } } # End of prechecks # Assign image properties unless ($image_id) { $image_obj->set_sp_person_id($user_id); } $image_obj->set_name($imageName); $image_obj->set_description($description); $image_obj->set_original_filename($imageFileName); $image_obj->set_file_ext($extension_type); # Save the image to the db $image_id = $image_obj->store(); my $image = SGN::Image->new($self->bcs_schema()->storage->dbh(), $image_id); # Remove cvterms so we can reassign them later my @prev_cvterms = $image->get_cvterms(); foreach (@prev_cvterms) { $image->remove_associated_cvterm($_->cvterm_id); } # Store desceriptiveOntologyTerms in the cvterm after finding the cvterm here. foreach (@cvterm_ids) { $image->associate_cvterm($_); } # Clear previously associated stocks. my @stocks = $image->get_stocks(); foreach(@stocks){ $image->remove_stock($_->stock_id); } # Associate our stock with the image, if a stock_id was provided. if ($observationUnitDbId) { my $person = CXGN::People::Person->new($dbh, $user_id); my $user_name = $person->get_username; $image->associate_stock($observationUnitDbId, $user_name); } # Clear previously associated phenotypes $image->remove_associated_phenotypes(); # Associate the image with the observations specified foreach (@$observationDbIds_arrayref) { my $nd_experiment_phenotype = $self->bcs_schema()->resultset("NaturalDiversity::NdExperimentPhenotype")->find({ phenotype_id => $_ }); if ($nd_experiment_phenotype) { my %image_hash = ($nd_experiment_phenotype->nd_experiment_id => $image_id); $image->associate_phenotype(\%image_hash); } else { return CXGN::BrAPI::JSONResponse->return_error($self->status, sprintf('Cannot find experiment associated with observation with id of %s, does not exist', $_)); } } my $url = ""; my @image_ids; push @image_ids, $image_id; my $image_search = CXGN::Image::Search->new({ bcs_schema=>$self->bcs_schema(), people_schema=>$self->people_schema(), phenome_schema=>$self->phenome_schema(), image_id_list=>\@image_ids }); my ($search_result, $total_count) = $image_search->search(); my %result; foreach (@$search_result) { # Get the cv terms assigned my $image = SGN::Image->new($self->bcs_schema()->storage->dbh(), $_->{'image_id'}); my @cvterms = $image->get_cvterms(); # Process cvterms my @cvterm_names; foreach (@cvterms) { if ($_->name) { push(@cvterm_names, $_->name); } } # Get the observation variable db ids my @observationDbIds; my $observations_array = $_->{'observations_array'}; foreach (@$observations_array) { my $observationDbId = $_->{'phenotype_id'}; push @observationDbIds, $observationDbId } # Construct the response %result = ( additionalInfo => { observationLevel => $_->{'stock_type_name'}, observationUnitName => $_->{'stock_uniquename'}, }, copyright => $_->{'image_username'} . " " . substr($_->{'image_modified_date'},0,4), description => $_->{'image_description'}, descriptiveOntologyTerms => \@cvterm_names, imageDbId => $_->{'image_id'}, imageFileName => $_->{'image_original_filename'}, # Since breedbase doesn't care what file size is saved when the actual saving happens, # just return what the user passes in. imageFileSize => $imageFileSize, imageHeight => $imageHeight, imageWidth => $imageWidth, imageName => $_->{'image_name'}, imageTimeStamp => $_->{'image_modified_date'}, imageURL => $url, mimeType => _get_mimetype($_->{'image_file_ext'}), observationUnitDbId => $_->{'stock_id'}, # location and linked phenotypes are not yet available for images in the db imageLocation => { geometry => { coordinates => [], type=> '', }, type => '', }, observationDbIds => [@observationDbIds], ); } my $total_count = 1; my $pagination = CXGN::BrAPI::Pagination->pagination_response($total_count,$page_size,$page); return CXGN::BrAPI::JSONResponse->return_success( \%result, $pagination, undef, $self->status()); } sub image_data_store { my $self = shift; my $image_dir = shift; my $image_id = shift; my $inputs = shift; my $content_type = shift; print STDERR "Image ID: $image_id. inputs to image metadata store: ".Dumper($inputs); # Get our image file extension type from the database my @image_ids; push @image_ids, $image_id; my $image_search = CXGN::Image::Search->new({ bcs_schema=>$self->bcs_schema(), people_schema=>$self->people_schema(), phenome_schema=>$self->phenome_schema(), image_id_list=>\@image_ids }); my ($search_result, $total_count) = $image_search->search(); my $file_extension = @$search_result[0]->{'image_file_ext'}; if (! defined $file_extension) { return CXGN::BrAPI::JSONResponse->return_error($self->status, sprintf('Unsupported image type, %s', $file_extension)); } my $tempfile = $inputs->filename(); my $file_with_extension = $tempfile.$file_extension; rename($tempfile, $file_with_extension); print STDERR "TEMP FILE : $tempfile\n"; # process image data through CXGN::Image... # my $cxgn_img = CXGN::Image->new(dbh=>$self->bcs_schema()->storage()->dbh(), image_dir => $image_dir, image_id => $image_id); eval { $cxgn_img->process_image($file_with_extension); }; if ($@) { print STDERR "An error occurred during image processing... $@\n"; } else { print STDERR "Image processed successfully.\n"; } my %result = ( image_id => $image_id); foreach (@$search_result) { my $sgn_image = SGN::Image->new($self->bcs_schema()->storage->dbh(), $_->{'image_id'}); my $page_obj = CXGN::Page->new(); my $hostname = $page_obj->get_hostname(); my $url = $hostname . $sgn_image->get_image_url('medium'); my $filename = $sgn_image->get_filename(); my $size = (stat($filename))[7]; my ($width, $height) = imgsize($filename); # Get the observation variable db ids my @observationDbIds; my $observations_array = $_->{'observations_array'}; foreach (@$observations_array) { my $observationDbId = $_->{'phenotype_id'}; push @observationDbIds, $observationDbId } %result = ( additionalInfo => { observationLevel => $_->{'stock_type_name'}, observationUnitName => $_->{'stock_uniquename'}, }, copyright => $_->{'image_username'} . " " . substr($_->{'image_modified_date'},0,4), description => $_->{'image_description'}, imageDbId => $_->{'image_id'}, imageFileName => $_->{'image_original_filename'}, imageFileSize => $size, imageHeight => $height, imageWidth => $width, imageName => $_->{'image_name'}, imageTimeStamp => $_->{'image_modified_date'}, imageURL => $url, mimeType => _get_mimetype($_->{'image_file_ext'}), observationUnitDbId => $_->{'stock_id'}, # location and linked phenotypes are not yet available for images in the db imageLocation => { geometry => { coordinates => [], type=> '', }, type => '', }, observationDbIds => [@observationDbIds], ); } my $pagination = CXGN::BrAPI::Pagination->pagination_response(1, 10, 0); return CXGN::BrAPI::JSONResponse->return_success( \%result, $pagination, [], $self->status(), 'Image data store successful'); } sub _get_mimetype { my $extension = shift; my %mimetypes = ( '.jpg' => 'image/jpeg', '.JPG' => 'image/jpeg', '.jpeg' => 'image/jpeg', '.png' => 'image/png', '.gif' => 'image/gif', '.svg' => 'image/svg+xml', '.pdf' => 'application/pdf', '.ps' => 'application/postscript', ); if ( defined $mimetypes{$extension} ) { return $mimetypes{$extension}; } else { return $extension; } } sub _get_extension { my $mimetype = shift; my %extensions = ( 'image/jpeg' => '.jpg', 'image/png' => '.png', 'image/gif' => '.gif', 'image/svg+xml' => '.svg', 'application/pdf' => '.pdf', 'application/postscript' => '.ps' ); if ( defined $extensions{$mimetype} ) { return $extensions{$mimetype}; } else { return $mimetype; } } 1;
solgenomics/sgn
lib/SGN/Controller/AJAX/Image.pm
Perl
mit
19,245
#!/usr/bin/perl print "\$a value restore after loop : "; $a = 1; print $a; foreach $a(2, 3, 4) { print $a; } print $a; print "\nchange loop variable value, result in value change : "; @a = (1, 2, 3); print @a; foreach $c(@a) { $c = 4 if $c == 2; } print @a; print "\nable to access part of array : "; @a = (1, 2, 3); print @a; foreach $c(@a[1, 2]) { print $c; } print "\ndefault value of loop variable \$_:"; @a = (1, 2, 3); foreach (@a) { print "$_"; } print "\nfor and foreach are in common use : "; foreach($i = 1; $i < 5; $i++) { print $i; } <>; print "use label : "; LABEL:for($i = 1; $i < 5; $i++) { for($j = 1; $j < 5; $j++) { print "$j "; last LABEL if $i == 3; } } print "\$i = $i"; <>; print "continue:"; $a = 0; while($a++ < 10) { ($f = 1, next) if $a == 5; print "$a "; } continue { (print (":$a in continue : "), $f = 0) if ($f==1); }
doc-cloud/perl
control-structure/control-structure.pl
Perl
mit
870
package SQL::Statement::Util; sub type { my($self)=@_; return 'function' if $self->isa('SQL::Statement::Util::Function'); return 'column' if $self->isa('SQL::Statement::Util::Column'); } package SQL::Statement::Util::Column; use base 'SQL::Statement::Util'; sub new { my $class = shift; my $col_name = shift; my $tables = shift; my $display_name = shift || $col_name; my $function = shift; my $table_name = $col_name; #my @c = caller 0; print $c[2]; if (ref $col_name eq 'HASH') { $tables = [ $col_name->{"table"} ]; $col_name = $col_name->{"column"} ; } # print " $col_name !\n"; my $num_tables = scalar @{ $tables }; if ($table_name && ( $table_name =~ /^(".+")\.(.*)$/ or $table_name =~ /^([^.]*)\.(.*)$/ )) { $table_name = $1; $col_name = $2; } elsif ($num_tables == 1) { $table_name = $tables->[0]; } else { undef $table_name; } my $self = { name => $col_name, table => $table_name, display_name => $display_name, function => $function, }; return bless $self, $class; } sub function { shift->{"function"} } sub display_name { shift->{"display_name"} } sub name { shift->{"name"} } sub table { shift->{"table"} } package SQL::Statement::Util::Function; use base 'SQL::Statement::Util'; sub new { my($class,$name,$sub_name,$args) = @_; my($pkg,$sub) = $sub_name =~ /^(.*::)([^:]+$)/; if (!$sub) { $pkg = 'main'; $sub = $sub_name; } $pkg = 'main' if $pkg eq '::'; $pkg =~ s/::$//; my %newfunc = ( name => $name, sub_name => $sub, pkg_name => $pkg, args => $args, type => 'function', ); return bless \%newfunc,$class; } sub name { shift->{name} } sub pkg_name { shift->{pkg_name} } sub sub_name { shift->{sub_name} } sub args { shift->{args} } sub validate { my($self) = @_; my $pkg = $self->pkg_name; my $sub = $self->sub_name; $pkg =~ s~::~/~g; eval { require "$pkg.pm" } unless $pkg eq 'SQL/Statement/Functions' or $pkg eq 'main'; die $@ if $@; $pkg =~ s~/~::~g; die "Can't find subroutine $pkg"."::$sub\n" unless $pkg->can($sub); return 1; } sub run { use SQL::Statement::Functions; my($self) = shift; my $sub = $self->sub_name; my $pkg = $self->pkg_name; return $pkg->$sub(@_); } 1;
carlgao/lenga
images/lenny64-peon/usr/share/perl5/SQL/Statement/Util.pm
Perl
mit
2,623
=head1 NAME WWW::Mechanize::Examples - Sample programs that use WWW::Mechanize =head1 SYNOPSIS Plenty of people have learned WWW::Mechanize, and now, you can too! Following are user-supplied samples of WWW::Mechanize in action. If you have samples you'd like to contribute, please send 'em to C<< <andy@petdance.com> >>. You can also look at the F<t/*.t> files in the distribution. Please note that these examples are not intended to do any specific task. For all I know, they're no longer functional because the sites they hit have changed. They're here to give examples of how people have used WWW::Mechanize. Note that the examples are in reverse order of my having received them, so the freshest examples are always at the top. =head2 Starbucks Density Calculator, by Nat Torkington Here's a pair of scripts from Nat Torkington, editor for O'Reilly Media and co-author of the I<Perl Cookbook>. =over 4 Rael [Dornfest] discovered that you can easily find out how many Starbucks there are in an area by searching for "Starbucks". So I wrote a silly scraper for some old census data and came up with some Starbucks density figures. There's no meaning to these numbers thanks to errors from using old census data coupled with false positives in Yahoo search (e.g., "Dodie Starbuck-Your Style Desgn" in Portland OR). But it was fun to waste a night on. Here are the top twenty cities in descending order of population, with the amount of territory each Starbucks has. E.g., A New York NY Starbucks covers 1.7 square miles of ground. New York, NY 1.7 Los Angeles, CA 1.2 Chicago, IL 1.0 Houston, TX 4.6 Philadelphia, PA 6.8 San Diego, CA 2.7 Detroit, MI 19.9 Dallas, TX 2.7 Phoenix, AZ 4.1 San Antonio, TX 12.3 San Jose, CA 1.1 Baltimore, MD 3.9 Indianapolis, IN 12.1 San Francisco, CA 0.5 Jacksonville, FL 39.9 Columbus, OH 7.3 Milwaukee, WI 5.1 Memphis, TN 15.1 Washington, DC 1.4 Boston, MA 0.5 =back C<get_pop_data> #!/usr/bin/perl -w use WWW::Mechanize; use Storable; $url = 'http://www.census.gov/population/www/documentation/twps0027.html'; $m = WWW::Mechanize->new(); $m->get($url); $c = $m->content; $c =~ m{<A NAME=.tabA.>(.*?)</TABLE>}s or die "Can't find the population table\n"; $t = $1; @outer = $t =~ m{<TR.*?>(.*?)</TR>}gs; shift @outer; foreach $r (@outer) { @bits = $r =~ m{<TD.*?>(.*?)</TD>}gs; for ($x = 0; $x < @bits; $x++) { $b = $bits[$x]; @v = split /\s*<BR>\s*/, $b; foreach (@v) { s/^\s+//; s/\s+$// } push @{$data[$x]}, @v; } } for ($y = 0; $y < @{$data[0]}; $y++) { $data{$data[1][$y]} = { NAME => $data[1][$y], RANK => $data[0][$y], POP => comma_free($data[2][$y]), AREA => comma_free($data[3][$y]), DENS => comma_free($data[4][$y]), }; } store(\%data, "cities.dat"); sub comma_free { my $n = shift; $n =~ s/,//; return $n; } C<plague_of_coffee> #!/usr/bin/perl -w use WWW::Mechanize; use strict; use Storable; $SIG{__WARN__} = sub {} ; # ssssssh my $Cities = retrieve("cities.dat"); my $m = WWW::Mechanize->new(); $m->get("http://local.yahoo.com/"); my @cities = sort { $Cities->{$a}{RANK} <=> $Cities->{$b}{RANK} } keys %$Cities; foreach my $c ( @cities ) { my $fields = { 'stx' => "starbucks", 'csz' => $c, }; my $r = $m->submit_form(form_number => 2, fields => $fields); die "Couldn't submit form" unless $r->is_success; my $hits = number_of_hits($r); # my $ppl = sprintf("%d", 1000 * $Cities->{$c}{POP} / $hits); # print "$c has $hits Starbucks. That's one for every $ppl people.\n"; my $density = sprintf("%.1f", $Cities->{$c}{AREA} / $hits); print "$c : $density\n"; } sub number_of_hits { my $r = shift; my $c = $r->content; if ($c =~ m{\d+ out of <b>(\d+)</b> total results for}) { return $1; } if ($c =~ m{Sorry, no .*? found in or near}) { return 0; } if ($c =~ m{Your search matched multiple cities}) { warn "Your search matched multiple cities\n"; return 0; } if ($c =~ m{Sorry we couldn.t find that location}) { warn "No cities\n"; return 0; } if ($c =~ m{Could not find.*?, showing results for}) { warn "No matches\n"; return 0; } die "Unknown response\n$c\n"; } =head2 pb-upload, by John Beppu This script takes filenames of images from the command line and uploads them to a www.photobucket.com folder. John Beppu, the author, says: =over 4 I had 92 pictures I wanted to upload, and doing it through a browser would've been torture. But thanks to mech, all I had to do was `./pb.upload *.jpg` and watch it do its thing. It felt good. If I had more time, I'd implement WWW::Photobucket on top of WWW::Mechanize. =back #!/usr/bin/perl -w -T use strict; use WWW::Mechanize; my $login = "login_name"; my $password = "password"; my $folder = "folder"; my $url = "http://img78.photobucket.com/albums/v281/$login/$folder/"; # login to your photobucket.com account my $mech = WWW::Mechanize->new(); $mech->get($url); $mech->submit_form( form_number => 1, fields => { password => $password }, ); die unless ($mech->success); # upload image files specified on command line foreach (@ARGV) { print "$_\n"; $mech->form_number(2); $mech->field('the_file[]' => $_); $mech->submit(); } =head2 listmod, by Ian Langworth Ian Langworth contributes this little gem that will bring joy to beleagured mailing list admins. It discards spam messages through mailman's web interface. #!/arch/unix/bin/perl use strict; use warnings; # # listmod - fast alternative to mailman list interface # # usage: listmod crew XXXXXXXX # die "usage: $0 <listname> <password>\n" unless @ARGV == 2; my ($listname, $password) = @ARGV; use CGI qw(unescape); use WWW::Mechanize; my $m = WWW::Mechanize->new( autocheck => 1 ); use Term::ReadLine; my $term = Term::ReadLine->new($0); # submit the form, get the cookie, go to the list admin page $m->get("https://lists.ccs.neu.edu/bin/admindb/$listname"); $m->set_visible( $password ); $m->click; # exit if nothing to do print "There are no pending requests.\n" and exit if $m->content =~ /There are no pending requests/; # select the first form and examine its contents $m->form_number(1); my $f = $m->current_form or die "Couldn't get first form!\n"; # get me the base form element for each email item my @items = map {m/^.+?-(.+)/} grep {m/senderbanp/} $f->param or die "Couldn't get items in first form!\n"; # iterate through items, prompt user, commit actions foreach my $item (@items) { # show item info my $sender = unescape($item); my ($subject) = [$f->find_input("senderbanp-$item")->value_names]->[1] =~ /Subject:\s+(.+?)\s+Size:/g; # prompt user my $choice = ''; while ( $choice !~ /^[DAX]$/ ) { print "$sender\: '$subject'\n"; $choice = uc $term->readline("Action: defer/accept/discard [dax]: "); print "\n\n"; } # set button $m->field("senderaction-$item" => {D=>0,A=>1,X=>3}->{$choice}); } # submit actions $m->click; =head2 ccdl, by Andy Lester Steve McConnell, author of the landmark I<Code Complete> has put up the chapters for the 2nd edition in PDF format on his website. I needed to download them to take to Kinko's to have printed. This little script did it for me. #!/usr/bin/perl -w use strict; use WWW::Mechanize; my $start = "http://www.stevemcconnell.com/cc2/cc.htm"; my $mech = WWW::Mechanize->new( autocheck => 1 ); $mech->get( $start ); my @links = $mech->find_all_links( url_regex => qr/\d+.+\.pdf$/ ); for my $link ( @links ) { my $url = $link->url_abs; my $filename = $url; $filename =~ s[^.+/][]; print "Fetching $url"; $mech->get( $url, ':content_file' => $filename ); print " ", -s $filename, " bytes\n"; } =head2 quotes.pl, by Andy Lester This was a script that was going to get a hack in I<Spidering Hacks>, but got cut at the last minute, probably because it's against IMDB's TOS to scrape from it. I present it here as an example, not a suggestion that you break their TOS. Last I checked, it didn't work because their HTML didn't match, but it's still good as sample code. #!/usr/bin/perl -w use strict; use WWW::Mechanize; use Getopt::Long; use Text::Wrap; my $match = undef; my $random = undef; GetOptions( "match=s" => \$match, "random" => \$random, ) or exit 1; my $movie = shift @ARGV or die "Must specify a movie\n"; my $quotes_page = get_quotes_page( $movie ); my @quotes = extract_quotes( $quotes_page ); if ( $match ) { $match = quotemeta($match); @quotes = grep /$match/i, @quotes; } if ( $random ) { print $quotes[rand @quotes]; } else { print join( "\n", @quotes ); } sub get_quotes_page { my $movie = shift; my $mech = new WWW::Mechanize; $mech->get( "http://www.imdb.com/search" ); $mech->success or die "Can't get the search page"; $mech->submit_form( form_number => 2, fields => { title => $movie, restrict => "Movies only", }, ); my @links = $mech->find_all_links( url_regex => qr[^/Title] ) or die "No matches for \"$movie\" were found.\n"; # Use the first link my ( $url, $title ) = @{$links[0]}; warn "Checking $title...\n"; $mech->get( $url ); my $link = $mech->find_link( text_regex => qr/Memorable Quotes/i ) or die qq{"$title" has no quotes in IMDB!\n}; warn "Fetching quotes...\n\n"; $mech->get( $link->[0] ); return $mech->content; } sub extract_quotes { my $page = shift; # Nibble away at the unwanted HTML at the beginnning... $page =~ s/.+Memorable Quotes//si; $page =~ s/.+?(<a name)/$1/si; # ... and the end of the page $page =~ s/Browse titles in the movie quotes.+$//si; $page =~ s/<p.+$//g; # Quotes separated by an <HR> tag my @quotes = split( /<hr.+?>/, $page ); for my $quote ( @quotes ) { my @lines = split( /<br>/, $quote ); for ( @lines ) { s/<[^>]+>//g; # Strip HTML tags s/\s+/ /g; # Squash whitespace s/^ //; # Strip leading space s/ $//; # Strip trailing space s/&#34;/"/g; # Replace HTML entity quotes # Word-wrap to fit in 72 columns $Text::Wrap::columns = 72; $_ = wrap( '', ' ', $_ ); } $quote = join( "\n", @lines ); } return @quotes; } =head2 cpansearch.pl, by Ed Silva A quick little utility to search the CPAN and fire up a browser with a results page. #!/usr/bin/perl # turn on perl's safety features use strict; use warnings; # work out the name of the module we're looking for my $module_name = $ARGV[0] or die "Must specify module name on command line"; # create a new browser use WWW::Mechanize; my $browser = WWW::Mechanize->new(); # tell it to get the main page $browser->get("http://search.cpan.org/"); # okay, fill in the box with the name of the # module we want to look up $browser->form_number(1); $browser->field("query", $module_name); $browser->click(); # click on the link that matches the module name $browser->follow_link( text_regex => $module_name ); my $url = $browser->uri; # launch a browser... system('galeon', $url); exit(0); =head2 lj_friends.cgi, by Matt Cashner #!/usr/bin/perl # Provides an rss feed of a paid user's LiveJournal friends list # Full entries, protected entries, etc. # Add to your favorite rss reader as # http://your.site.com/cgi-bin/lj_friends.cgi?user=USER&password=PASSWORD use warnings; use strict; use WWW::Mechanize; use CGI; my $cgi = CGI->new(); my $form = $cgi->Vars; my $agent = WWW::Mechanize->new(); $agent->get('http://www.livejournal.com/login.bml'); $agent->form_number('3'); $agent->field('user',$form->{user}); $agent->field('password',$form->{password}); $agent->submit(); $agent->get('http://www.livejournal.com/customview.cgi?user='.$form->{user}.'&styleid=225596&checkcookies=1'); print "Content-type: text/plain\n\n"; print $agent->content(); =head2 Hacking Movable Type, by Dan Rinzel use WWW::Mechanize; # a tool to automatically post entries to a moveable type weblog, and set arbitary creation dates my $mech = WWW::Mechanize->new(); my %entry; $entry->{title} = "Test AutoEntry Title"; $entry->{btext} = "Test AutoEntry Body"; $entry->{date} = '2002-04-15 14:18:00'; my $start = qq|http://my.blog.site/mt.cgi|; $mech->get($start); $mech->field('username','und3f1n3d'); $mech->field('password','obscur3d'); $mech->submit(); # to get login cookie $mech->get(qq|$start?__mode=view&_type=entry&blog_id=1|); $mech->form_name('entry_form'); $mech->field('title',$entry->{title}); $mech->field('category_id',1); # adjust as needed $mech->field('text',$entry->{btext}); $mech->field('status',2); # publish, or 1 = draft $results = $mech->submit(); # if we're ok with this entry being datestamped "NOW" (no {date} in %entry) # we're done. Otherwise, time to be tricksy # MT returns a 302 redirect from this form. the redirect itself contains a <body onload=""> handler # which takes the user to an editable version of the form where the create date can be edited # MT date format of YYYY-MM-DD HH:MI:SS is the only one that won't error out if ($entry->{date} && $entry->{date} =~ /^\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2}/) { # travel the redirect $results = $mech->get($results->{_headers}->{location}); $results->{_content} =~ /<body onLoad="([^\"]+)"/is; my $js = $1; $js =~ /\'([^']+)\'/; $results = $mech->get($start.$1); $mech->form_name('entry_form'); $mech->field('created_on_manual',$entry->{date}); $mech->submit(); } =head2 get-despair, by Randal Schwartz Randal submitted this bot that walks the despair.com site sucking down all the pictures. use strict; $|++; use WWW::Mechanize; use File::Basename; my $m = WWW::Mechanize->new; $m->get("http://www.despair.com/indem.html"); my @top_links = @{$m->links}; for my $top_link_num (0..$#top_links) { next unless $top_links[$top_link_num][0] =~ /^http:/; $m->follow_link( n=>$top_link_num ) or die "can't follow $top_link_num"; print $m->uri, "\n"; for my $image (grep m{^http://store4}, map $_->[0], @{$m->links}) { my $local = basename $image; print " $image...", $m->mirror($image, $local)->message, "\n" } $m->back or die "can't go back"; }
carlgao/lenga
images/lenny64-peon/usr/share/perl5/WWW/Mechanize/Examples.pod
Perl
mit
15,830
#!/usr/bin/env perl # ------------------------------------------------------------------------------ ##The MIT License (MIT) ## ##Copyright (c) 2016 Jordi Abante ## ##Permission is hereby granted, free of charge, to any person obtaining a copy ##of this software and associated documentation files (the "Software"), to deal ##in the Software without restriction, including without limitation the rights ##to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ##copies of the Software, and to permit persons to whom the Software is ##furnished to do so, subject to the following conditions: ## ##The above copyright notice and this permission notice shall be included in all ##copies or substantial portions of the Software. ## ##THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ##IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ##FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ##AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ##LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ##OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ##SOFTWARE. # ------------------------------------------------------------------------------ # Libraries use strict; # Read arguments my $scriptname=$0; # Get script name my $n_seqs=@ARGV[0]; # Number of sequences in fasta file my $n_files=@ARGV[1]; # Number of files my $outprefix=@ARGV[2]; # Output files prefix # Handlers my $FASTA; # Fasta file handler # Variables my $n_per_file; # Number of sequences per file my $n=0; # Current number of sequences my $file_id=1; # Current file ID my $outfile; # Current ouput file # Hashes my %fasta_hash=(); # Hash containing sequence info of each sample ################################ Main ######################################### # Compute sequeces for file $n_per_file=int($n_seqs/$n_files); # New file $outfile="${outprefix}_${file_id}.fa"; # Open output file open(OUT,">$outfile") or die "Can't open file '${outfile}' $!"; # Read in STDIN while(my $line=<STDIN>) { chomp($line); # Increment counter if fasta entry if( $line =~ />/) { $n++; } # Check current number of sequences in file if($n eq $n_per_file+1) { # Close handler close OUT; # Update variables $n=1; $file_id++; # New file $outfile="${outprefix}_${file_id}.fa"; # Open output file open(OUT,">$outfile") or die "Can't open file '${outfile}' $!"; # Print line print OUT "$line\n"; } else { # Print line print OUT "$line\n"; } } ##############################################################################
jordiabante/myutils
src/fasta_split/perl/fasta_split.pl
Perl
mit
2,955
=head1 NAME Mail::Message::Field::Address - One e-mail address =head1 INHERITANCE Mail::Message::Field::Address is a Mail::Identity is a User::Identity::Item =head1 SYNOPSIS my $addr = Mail::Message::Field::Address->new(...); my $ui = User::Identity->new(...); my $addr = Mail::Message::Field::Address->coerce($ui); my $mi = Mail::Identity->new(...); my $addr = Mail::Message::Field::Address->coerce($mi); print $addr->address; print $addr->fullName; # possibly unicode! print $addr->domain; =head1 DESCRIPTION Many header fields can contain e-mail addresses. Each e-mail address can be represented by an object of this class. These objects will handle interpretation and character set encoding and decoding for you. =head1 OVERLOADED overload: B<boolean> =over 4 The object used as boolean will always return C<true> =back overload: B<stringification> =over 4 When the object is used in string context, it will return the encoded representation of the e-mail address, just like L<string()|Mail::Message::Field::Address/"Access to the content"> does. =back =head1 METHODS =head2 Constructors $obj-E<gt>B<coerce>(STRING|OBJECT, OPTIONS) =over 4 Try to coerce the OBJECT into a C<Mail::Message::Field::Address>. In case of a STRING, it is interpreted as an email address. The OPTIONS are passed to the object creation, and overrule the values found in the OBJECT. The result may be C<undef> or a newly created object. If the OBJECT is already of the correct type, it is returned unmodified. The OBJECT may currently be a L<Mail::Address|Mail::Address>, a L<Mail::Identity|Mail::Identity>, or a L<User::Identity|User::Identity>. In case of the latter, one of the user's addresses is chosen at random. =back $obj-E<gt>B<from>(OBJECT) =over 4 See L<Mail::Identity/"Constructors"> =back Mail::Message::Field::Address-E<gt>B<new>([NAME], OPTIONS) =over 4 See L<Mail::Identity/"METHODS"> =back $obj-E<gt>B<parse>(STRING) =over 4 Parse the string for an address. You never know whether one or more addresses are specified on a line (often applications are wrong), therefore, the STRING is first parsed for as many addresses as possible and then the one is taken at random. =back =head2 Attributes $obj-E<gt>B<address> =over 4 See L<Mail::Identity/"Attributes"> =back $obj-E<gt>B<charset> =over 4 See L<Mail::Identity/"Attributes"> =back $obj-E<gt>B<comment>([STRING]) =over 4 See L<Mail::Identity/"Attributes"> =back $obj-E<gt>B<description> =over 4 See L<User::Identity::Item/"Attributes"> =back $obj-E<gt>B<domain> =over 4 See L<Mail::Identity/"Attributes"> =back $obj-E<gt>B<language> =over 4 See L<Mail::Identity/"Attributes"> =back $obj-E<gt>B<location> =over 4 See L<Mail::Identity/"Attributes"> =back $obj-E<gt>B<name>([NEWNAME]) =over 4 See L<User::Identity::Item/"Attributes"> =back $obj-E<gt>B<organization> =over 4 See L<Mail::Identity/"Attributes"> =back $obj-E<gt>B<phrase> =over 4 See L<Mail::Identity/"Attributes"> =back $obj-E<gt>B<username> =over 4 See L<Mail::Identity/"Attributes"> =back =head2 Collections $obj-E<gt>B<add>(COLLECTION, ROLE) =over 4 See L<User::Identity::Item/"Collections"> =back $obj-E<gt>B<addCollection>(OBJECT | ([TYPE], OPTIONS)) =over 4 See L<User::Identity::Item/"Collections"> =back $obj-E<gt>B<collection>(NAME) =over 4 See L<User::Identity::Item/"Collections"> =back $obj-E<gt>B<find>(COLLECTION, ROLE) =over 4 See L<User::Identity::Item/"Collections"> =back $obj-E<gt>B<parent>([PARENT]) =over 4 See L<User::Identity::Item/"Collections"> =back $obj-E<gt>B<removeCollection>(OBJECT|NAME) =over 4 See L<User::Identity::Item/"Collections"> =back $obj-E<gt>B<type> Mail::Message::Field::Address-E<gt>B<type> =over 4 See L<User::Identity::Item/"Collections"> =back $obj-E<gt>B<user> =over 4 See L<User::Identity::Item/"Collections"> =back =head2 Access to the content $obj-E<gt>B<string> =over 4 Returns an RFC compliant e-mail address, which will have character set encoding if needed. The objects are also overloaded to call this method in string context. example: print $address->string; print $address; # via overloading =back =head1 DIAGNOSTICS Error: $object is not a collection. =over 4 The first argument is an object, but not of a class which extends L<User::Identity::Collection|User::Identity::Collection>. =back Error: Cannot coerce a $type into a Mail::Message::Field::Address =over 4 When addresses are specified to be included in header fields, they may be coerced into L<Mail::Message::Field::Address|Mail::Message::Field::Address> objects first. What you specify is not accepted as address specification. This may be an internal error. =back Error: Cannot load collection module for $type ($class). =over 4 Either the specified $type does not exist, or that module named $class returns compilation errors. If the type as specified in the warning is not the name of a package, you specified a nickname which was not defined. Maybe you forgot the 'require' the package which defines the nickname. =back Error: Creation of a collection via $class failed. =over 4 The $class did compile, but it was not possible to create an object of that class using the options you specified. =back Error: Don't know what type of collection you want to add. =over 4 If you add a collection, it must either by a collection object or a list of options which can be used to create a collection object. In the latter case, the type of collection must be specified. =back Warning: No collection $name =over 4 The collection with $name does not exist and can not be created. =back =head1 SEE ALSO This module is part of Mail-Box distribution version 2.082, built on April 28, 2008. Website: F<http://perl.overmeer.net/mailbox/> =head1 LICENSE Copyrights 2001-2008 by Mark Overmeer. For other contributors see ChangeLog. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. See F<http://www.perl.com/perl/misc/Artistic.html>
carlgao/lenga
images/lenny64-peon/usr/share/perl5/Mail/Message/Field/Address.pod
Perl
mit
6,107
#!/usr/bin/perl -w use strict; my $genomeRseq = '/opt/index/hs/genes_longName.fa'; my $indir = 'Hs4Rs'; my $infile = 'aligned.sam'; my $outdir = 'HsRs'; opendir(DIR,'.') or die "$!\n"; while(my $subdir = readdir(DIR)) { next unless -d $subdir; next if $subdir eq '.'; next if $subdir eq '..'; next if (-e "$subdir/$outdir/"); my $read1 = $subdir . ".fastq"; next unless (-e "$subdir/$read1"); print "Analyzing $subdir\n"; chdir($subdir); mkdir $outdir; system("rseq expression_analysis $genomeRseq $indir/$infile"); system("mv $indir/$infile.* $outdir/"); chdir('..'); }
cckim47/kimlab
rnaseq/alignAndModel/model_HsRs.pl
Perl
mit
626
#! /usr/bin/perl -w # # colorgcc # # Version: 2.0.5 # # $Id: colorgcc,v 1.10 1999/04/29 17:15:52 jamoyers Exp $ # # A wrapper to colorize the output from compilers whose messages # match the "gcc" format. # # Requires the ANSIColor module from CPAN. # # Usage: # # In a directory that occurs in your PATH _before_ the directory # where the compiler lives, create a softlink to colorgcc for # each compiler you want to colorize: # # g++ -> colorgcc # gcc -> colorgcc # cc -> colorgcc # etc. # # That's it. When "g++" is invoked, colorgcc is run instead. # colorgcc looks at the program name to figure out which compiler to run. # # The default settings can be overridden with ~/.colorgccrc. # See the comments in the sample .colorgccrc for more information. # # Note: # # colorgcc will only emit color codes if: # # (1) Its STDOUT is a tty and # (2) the value of $TERM is not listed in the "nocolor" option. # # If colorgcc colorizes the output, the compiler's STDERR will be # combined with STDOUT. Otherwise, colorgcc just passes the output from # the compiler through without modification. # # Author: Jamie Moyers <jmoyers@geeks.com> # Started: April 20, 1999 # Licence: GNU Public License # # Credits: # # I got the idea for this from a script called "color_cvs": # color_cvs .03 Adrian Likins <adrian@gimp.org> <adrian@redhat.com> # # <seh4@ix.netcom.com> (Scott Harrington) # Much improved handling of compiler command line arguments. # exec compiler when not colorizing to preserve STDOUT, STDERR. # Fixed my STDIN kludge. # # <ecarotti@athena.polito.it> (Elias S. G. Carotti) # Corrected handling of text like -DPACKAGE=\"Package\" # Spotted return code bug. # # <erwin@erwin.andreasen.org> (Erwin S. Andreasen) # <schurchi@ucsd.edu> (Steve Churchill) # Return code bug fixes. # # <rik@kde.org> (Rik Hemsley) # Found STDIN bug. # # Changes: # # 1.3.2 Better handling of command line arguments to compiler. # # If we aren't colorizing output, we just exec the compiler which # preserves the original STDOUT and STDERR. # # Removed STDIN kludge. STDIN being passed correctly now. # # 1.3.1 Added kludge to copy STDIN to the compiler's STDIN. # # 1.3.0 Now correctly returns (I hope) the return code of the compiler # process as its own. # # 1.2.1 Applied patch to handle text similar to -DPACKAGE=\"Package\". # # 1.2.0 Added tty check. If STDOUT is not a tty, don't do color. # # 1.1.0 Added the "nocolor" option to turn off the color if the terminal type # ($TERM) is listed. # # 1.0.0 Initial Version use strict; use Term::ANSIColor; use IPC::Open3; use Cwd 'abs_path'; my(%nocolor, %colors, %compilerPaths); my($unfinishedQuote, $previousColor); sub initDefaults { $nocolor{"dumb"} = "true"; $colors{"srcColor"} = color("cyan"); $colors{"introFileNameColor"} = color("blue"); $colors{"introMessageColor"} = color("blue"); $colors{"warningFileNameColor"} = color("yellow"); $colors{"warningNumberColor"} = color("yellow"); $colors{"warningMessageColor"} = color("yellow"); $colors{"errorFileNameColor"} = color("bold red"); $colors{"errorNumberColor"} = color("bold red"); $colors{"errorMessageColor"} = color("bold red"); $colors{"noteFileNameColor"} = color("green"); $colors{"noteNumberColor"} = color("green"); $colors{"noteMessageColor"} = color("green"); } sub loadPreferences { # Usage: loadPreferences("filename"); my($filename) = @_; open(PREFS, "<$filename") || return; while(<PREFS>) { next if (m/^\#.*/); # It's a comment. next if (!m/(.*):\s*(.*)/); # It's not of the form "foo: bar". my $option = $1; my $value = $2; if ($option =~ m/Color/) { $colors{$option} = color($value); } elsif ($option eq "nocolor") { # The nocolor option lists terminal types, separated by # spaces, not to do color on. foreach my $term (split(' ', $value)) { $nocolor{$term} = 1; } } else { $compilerPaths{$option} = $value; } } close(PREFS); } sub srcscan { # Usage: srcscan($text, $normalColor) # $text -- the text to colorize # $normalColor -- The escape sequence to use for non-source text. # Looks for text between ` and ', and colors it srcColor. my($line, $normalColor) = @_; if (defined $normalColor) { $previousColor = $normalColor; } else { $normalColor = $previousColor; } my($srcon) = color("reset") . $colors{"srcColor"}; my($srcoff) = color("reset") . $normalColor; $line = ($unfinishedQuote? $srcon : $normalColor) . $line; # These substitutions replaces `foo' with `AfooB' where A is the escape # sequence that turns on the the desired source color, and B is the # escape sequence that returns to $normalColor. # Handle multi-line quotes. if ($unfinishedQuote) { if ($line =~ s/^([^\`]*?)\'/$1$srcoff\'/) { $unfinishedQuote = 0; } } if ($line =~ s/\`([^\']*?)$/\`$srcon$1/) { $unfinishedQuote = 1; } # Single line quoting. $line =~ s/[\`\'](.*?)\'/\`$srcon$1$srcoff\'/g; print($line, color("reset")); } # # Main program # # Set up default values for colors and compilers. initDefaults(); # Read the configuration file, if there is one. my $configFile = "~/.colorgccrc"; if (-f $configFile) { loadPreferences($configFile); } elsif (-f '/etc/colorgcc/colorgccrc') { loadPreferences('/etc/colorgcc/colorgccrc'); } # Set our default output color. This presumes that any unrecognized output # is an error. $previousColor = $colors{"errorMessageColor"}; # Figure out which compiler to invoke based on our program name. $0 =~ m%.*/(.*)$%; my $progName = $1 || $0; my $compiler_pid; # If called as "colorgcc", just filter STDIN to STDOUT. if ($progName =~ 'colorgcc') { open(GCCOUT, "<&STDIN"); } else { # See if the user asked for a specific compiler. my $compiler; if (!defined($compiler = $compilerPaths{$progName})) { # Find our wrapper dir on the PATH and tweak the PATH to remove # everything up-to and including our wrapper dir. if ($0 =~ m#(.*)/#) { # We were called with an explicit path, so trim that off the PATH. my $find = $1; $find = abs_path($1) unless $find =~ m#^/#; $ENV{'PATH'} =~ s#.*(^|:)\Q$find\E(:|$)##; } else { my(@dirs) = split(/:/, $ENV{'PATH'}); while (defined($_ = shift @dirs)) { if (-x "$_/$progName") { $ENV{'PATH'} = join(':', @dirs); last; } } } $compiler = $progName; } # Get the terminal type. my $terminal = $ENV{"TERM"} || "dumb"; # If it's in the list of terminal types not to color, or if # we're writing to something that's not a tty, don't do color. if (! -t STDOUT || $nocolor{$terminal}) { exec $compiler, @ARGV or die("Couldn't exec"); } # Keep the pid of the compiler process so we can get its return # code and use that as our return code. $compiler_pid = open3('<&STDIN', \*GCCOUT, \*GCCOUT, $compiler, @ARGV); } # Colorize the output from the compiler. if ($progName eq "gcc" || $progName eq "g++" || $progName =~ 'colorgcc') { while(<GCCOUT>) { if (m#^(.+?\.[^:/ ]+:)(\d+:)(.*)$#) # filename:lineno:message { my $field1 = $1 || ""; my $field2 = $2 || ""; my $field3 = $3 || ""; if ($field3 =~ m/\s+warning:.*/) { print($colors{"warningFileNameColor"}, "$field1", color("reset")); print($colors{"warningNumberColor"}, "$field2", color("reset")); srcscan("$field3\n", $colors{"warningMessageColor"}); } # main.cpp:145:2: #error print() is not implemented for this system # gcc produces such "undefined reference" messages when debugging info # is available. # util.h:92: undefined reference to `util::read(int*, char const*); elsif ($field3 =~ m/\s+error:.*/ || $field3 =~ m/\d+:.*?#error.*/ || $field3 =~ m/undefined reference/) { print($colors{"errorFileNameColor"}, "$field1", color("reset")); print($colors{"errorNumberColor"}, "$field2", color("reset")); srcscan("$field3\n", $colors{"errorMessageColor"}); } else { print($colors{"noteFileNameColor"}, "$field1", color("reset")); print($colors{"noteNumberColor"}, "$field2", color("reset")); srcscan("$field3\n", $colors{"noteMessageColor"}); } } # gcc produces such "undefined reference" messages w/o debugging info. # main.cpp:(.text+0x13): undefined reference to `floor(double)' elsif (m/^(.+?:)(\(.+?\):)( undefined reference to `.+?')$/) { my $field1 = $1 || ""; my $field2 = $2 || ""; my $field3 = $3 || ""; print($colors{"errorFileNameColor"}, "$field1", color("reset")); print($colors{"errorNumberColor"}, "$field2", color("reset")); srcscan("$field3\n", $colors{"errorMessageColor"}); } elsif (m/^(.*from)(.+?:)(\d+[,:])$/) # message:filename:line { my $field1 = $1 || ""; my $field2 = $2 || ""; my $field3 = $3 || ""; # from /home/dgoncharov/src/main.cpp:63, print($colors{"noteMessageColor"}, "$field1", color("reset")); print($colors{"noteFileNameColor"}, "$field2", color("reset")); print($colors{"noteNumberColor"}, "$field3\n", color("reset")); } # node.cpp:(.text+0x3a86): warning: the use of `mktemp' is # dangerous, better use `mkstemp' elsif (m/^(.+?:)(\(.+?\):)( warning: .+?)$/) { my $field1 = $1 || ""; my $field2 = $2 || ""; my $field3 = $3 || ""; print($colors{"warningFileNameColor"}, "$field1", color("reset")); print($colors{"warningNumberColor"}, "$field2", color("reset")); srcscan("$field3\n", $colors{"warningMessageColor"}); } elsif (m/^:.+`.*'$/) # filename:message: { srcscan($_, $colors{"warningMessageColor"}); } elsif (m/^(.*?:)(.+:)$/) # filename:message: { my $field1 = $1 || ""; my $field2 = $2 || ""; # No line number, treat as an "introductory" line of text. print($colors{"introFileNameColor"}, "$field1", color("reset")); srcscan("$field2\n", $colors{"introMessageColor"}); } else # Anything else. { # Doesn't seem to be a warning or an error. Print normally. print(color("reset"), $_); } } } elsif ($progName eq "CC" || $progName =~ "^sun[cC]" || $^O eq "solaris") { # Colorize the output from the compiler. while(<GCCOUT>) { # "main.cpp", line 8: Warning: Implicit int is not supported in C++. # "main.cpp", line 9: Error: The function "bar" must have a prototype. # "main.cpp":8: Warning: Implicit int is not supported in C++. # "main.cpp":9: Error: The function "bar" must have a prototype. # CC: Warning: "-g0" overrides "-g", debugging information will be # produced. Note, inlining will not be disabled. # "m.cpp", line 92: Warning (Anachronism): Formal argument 2 of type if (m/^(\".+?\",)( line [0-9]+:)(.+)$/) # filename:lineno:message { my $field1 = $1 || ""; my $field2 = $2 || ""; my $field3 = $3 || ""; if ($field3 =~ m/Warning/) { print($colors{"warningFileNameColor"}, "$field1", color("reset")); print($colors{"warningNumberColor"}, "$field2", color("reset")); print($colors{"warningMessageColor"}, "$field3\n", color("reset")); } # "sstream.h", line 345: Warning, hidef: std::basic_stringstream # <char, std::char_traits<char>, std::allocator<char>>::rdbuf hides # the function std::ios::rdbuf(std::streambuf *). # "main.cpp", line 740: Where, temwhilespec: While # specializing "std::basic_stringstream<char, std::char_traits<char>, # std::allocator<char>>". elsif ($field3 =~ m/Where,/) { print($colors{"noteFileNameColor"}, "$field1", color("reset")); print($colors{"noteNumberColor"}, "$field2", color("reset")); print($colors{"noteMessageColor"}, "$field3\n", color("reset")); } else { print($colors{"errorFileNameColor"}, "$field1", color("reset")); print($colors{"errorNumberColor"}, "$field2", color("reset")); print($colors{"errorMessageColor"}, "$field3\n", color("reset")); } } # ld: fatal: file test.mk: unknown file type # ld: fatal: File processing errors. No output written to a.out elsif (m/^ld: fatal: .+$/) { print($colors{"errorMessageColor"}, $_, color("reset")); } elsif (m/^CC: Warning: (.+)$/) { print($colors{"warningMessageColor"}, $_, color("reset")); } else # Anything else. { # Doesn't seem to be a warning or an error. Print normally. print(color("reset"), $_); } } } elsif ($progName =~ "^g?xl[cC].*" || $^O eq "aix") { # Colorize the output from the compiler. while(<GCCOUT>) { # "test.cpp", line 9.12: 1540-0274 (S) The name lookup for "bar" did not # find a # declaration. # "test.cpp", line 15.1: 1540-1101 (W) A return value of type "int" is # expected. # "test.cpp", line 22.17: 1540-0256 (S) A parameter of type "const char # &" cannot # be initialized with an expression of type "const char [5]". # "test.cpp", line 22.17: 1540-1205 (I) The error occurred while # converting to # parameter 1 of "std::vector<char,std::allocator<char> # >::push_back(const char # &)". if (m/^(\".+?\",)( line [0-9\.]+:)( [-0-9]+.+)$/) # filename:lineno:message { my $field1 = $1 || ""; my $field2 = $2 || ""; my $field3 = $3 || ""; if ($field3 =~ m/\((W|I)\)/) { print($colors{"warningFileNameColor"}, "$field1", color("reset")); print($colors{"warningNumberColor"}, "$field2", color("reset")); print($colors{"warningMessageColor"}, "$field3\n", color("reset")); } else { print($colors{"errorFileNameColor"}, "$field1", color("reset")); print($colors{"errorNumberColor"}, "$field2", color("reset")); print($colors{"errorMessageColor"}, "$field3\n", color("reset")); } } # /bb/util/version10-082009/usr/vacpp/bin/.orig/xlC_r: 1501-218 (W) file # test.mk contains an incorrect file suffix elsif (m/^.+?: [-0-9]+ \(W\).+$/) { print($colors{"warningMessageColor"}, $_, color("reset")); } # ld: 0711-715 ERROR: File test.mk cannot be processed. # The file must be an object file, an import file, or an archive. elsif (m/^ld: [-0-9]+ ERROR: .+$/) { print($colors{"errorMessageColor"}, $_, color("reset")); } elsif (m/The file must be an object file, an import file, or an/) { print($colors{"errorMessageColor"}, $_, color("reset")); } elsif (m/\(I\) INFORMATION:/) { print($colors{"noteMessageColor"}, "$_", color("reset")); } else # Anything else. { # Doesn't seem to be a warning or an error. Print normally. print(color("reset"), $_); } } } elsif ($progName eq "aCC" || $^O eq "hpux") { # Colorize the output from the compiler. while(<GCCOUT>) { # "test.cpp", line 7: warning #2837-D: omission of explicit type is # nonstandard # "test.c", line 8: remark #2177-D: variable "str" was # declared but never referenced # "test.c", line 9: error #2077-D: this declaration has no # storage class or type specifier if (m/^(\".+?\",)( line [0-9]+:)(.+)$/) # filename:lineno:message { my $field1 = $1 || ""; my $field2 = $2 || ""; my $field3 = $3 || ""; if ($field3 =~ m/error #/) { print($colors{"errorFileNameColor"}, "$field1", color("reset")); print($colors{"errorNumberColor"}, "$field2", color("reset")); print($colors{"errorMessageColor"}, "$field3\n", color("reset")); while(<GCCOUT>) { if (m/^$/) { last; } print($colors{"srcColor"}, $_, color("reset")); } } else { print($colors{"warningFileNameColor"}, "$field1", color("reset")); print($colors{"warningNumberColor"}, "$field2", color("reset")); print($colors{"warningMessageColor"}, "$field3\n", color("reset")); while(<GCCOUT>) { if (m/^$/) { last; } print($colors{"srcColor"}, $_, color("reset")); } } print("\n"); } # 5 errors detected in the compilation of "test.cpp". elsif (m/^[0-9]+ errors detected in the compilation of .+$/) { print($colors{"errorMessageColor"}, $_, color("reset")); } # ld: Mismatched ABI (not an ELF file) for test.mk # Fatal error. elsif (m/ld: Mismatched ABI/) { print($colors{"errorMessageColor"}, $_, color("reset")); } elsif (m/Fatal error/) { print($colors{"errorMessageColor"}, $_, color("reset")); } # aCC: warning 930: the +Orelaxed_debug option is not valid at this # opt_level. elsif (m/^aCC: warning \d+: .+$/) { print($colors{"warningMessageColor"}, $_, color("reset")); } else # Anything else. { # Doesn't seem to be a warning or an error. Print normally. print(color("reset"), $_); } } } elsif ($progName eq "icc" || $progName eq "icpc") { # Colorize the output from the compiler. while(<GCCOUT>) { # icc on linux uses gnu ld. The following are gnu ld messages # /tmp/iccYHwrGF.o: In function `ff()': # 4.cpp:(.text+0x34): undefined reference to `gg()' if (m/^(.+?:)( .+?`.+?':)$/) { my $field1 = $1 || ""; my $field2 = $2 || ""; srcscan($field1, $colors{"noteFileNameColor"}); srcscan("$field2\n", $colors{"noteMessageColor"}); } # test.cpp(6): error: identifier "c" is undefined elsif (m/^(.+?)(\(\d+\):)(.+)$/) # filename:lineno:message { my $field1 = $1 || ""; my $field2 = $2 || ""; my $field3 = $3 || ""; if ($field3 =~ m/remark/) { print($colors{"warningFileNameColor"}, "$field1", color("reset")); print($colors{"warningNumberColor"}, "$field2", color("reset")); print($colors{"warningMessageColor"}, "$field3\n", color("reset")); while(<GCCOUT>) { if (m/^$/) { last; } print($colors{"introMessageColor"}, $_, color("reset")); } } else { print($colors{"errorFileNameColor"}, "$field1", color("reset")); print($colors{"errorNumberColor"}, "$field2", color("reset")); print($colors{"errorMessageColor"}, "$field3\n", color("reset")); while(<GCCOUT>) { if (m/^$/) { last; } print($colors{"introMessageColor"}, $_, color("reset")); } } print("\n"); } # icc: error #10236: File not found: '4.cpp' elsif (m/^(icc|icpc): error #\d+: .+$/) { print($colors{"errorMessageColor"}, "$_", color("reset")); } elsif (m/^(.+?:)(.+)$/) { my $field1 = $1 || ""; my $field2 = $2 || ""; srcscan($field1, $colors{"errorFileNameColor"}); srcscan("$field2\n", $colors{"errorMessageColor"}); } else # Anything else. { # Doesn't seem to be a warning or an error. Print normally. print(color("reset"), $_); } } } else { while(<GCCOUT>) { print(color("reset"), $_); } } if ($compiler_pid) { # Get the return code of the compiler and exit with that. waitpid($compiler_pid, 0); exit ($? >> 8); }
stephensolis/modmap-generator-cpp
external/colorgcc.pl
Perl
mit
20,854
# <@LICENSE> # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to you under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # </@LICENSE> =head1 NAME Mail::SpamAssassin::Plugin::WhiteListSubject - whitelist by Subject header =head1 SYNOPSIS loadplugin Mail::SpamAssassin::Plugin::WhiteListSubject header SUBJECT_IN_WHITELIST eval:check_subject_in_whitelist() header SUBJECT_IN_BLACKLIST eval:check_subject_in_blacklist() score SUBJECT_IN_WHITELIST -100 score SUBJECT_IN_BLACKLIST 100 whitelist_subject [Bug *] blacklist_subject Make Money Fast =head1 DESCRIPTION This SpamAssassin plugin module provides eval tests for whitelisting and blacklisting particular strings in the Subject header. The value for whitelist_subject or blacklist_subject are strings which may contain file -glob -style patterns, similar to the other whitelist_* config options. =cut package Mail::SpamAssassin::Plugin::WhiteListSubject; use Mail::SpamAssassin::Plugin; use strict; use warnings; use bytes; use re 'taint'; use vars qw(@ISA); @ISA = qw(Mail::SpamAssassin::Plugin); # constructor: register the eval rule sub new { my $class = shift; my $mailsaobject = shift; $class = ref($class) || $class; my $self = $class->SUPER::new($mailsaobject); bless ($self, $class); $self->register_eval_rule ("check_subject_in_whitelist"); $self->register_eval_rule ("check_subject_in_blacklist"); $self->set_config($mailsaobject->{conf}); return $self; } sub set_config { my ($self, $conf) = @_; my @cmds; push(@cmds, { setting => 'whitelist_subject', default => {}, type => $Mail::SpamAssassin::Conf::CONF_TYPE_ADDRLIST, code => sub { my ($self, $key, $value, $line) = @_; $value = lc $value; my $re = $value; $re =~ s/[\000\\\(]/_/gs; # paranoia $re =~ s/([^\*\?_a-zA-Z0-9])/\\$1/g; # escape any possible metachars $re =~ tr/?/./; # "?" -> "." $re =~ s/\*+/\.\*/g; # "*" -> "any string" $conf->{$key}->{$value} = ${re}; }}); push(@cmds, { setting => 'blacklist_subject', default => {}, type => $Mail::SpamAssassin::Conf::CONF_TYPE_ADDRLIST, code => sub { my ($self, $key, $value, $line) = @_; $value = lc $value; my $re = $value; $re =~ s/[\000\\\(]/_/gs; # paranoia $re =~ s/([^\*\?_a-zA-Z0-9])/\\$1/g; # escape any possible metachars $re =~ tr/?/./; # "?" -> "." $re =~ s/\*+/\.\*/g; # "*" -> "any string" $conf->{$key}->{$value} = ${re}; }}); $conf->{parser}->register_commands(\@cmds); } sub check_subject_in_whitelist { my ($self, $permsgstatus) = @_; my $subject = $permsgstatus->get('Subject'); return 0 unless $subject ne ''; return $self->_check_subject($permsgstatus->{conf}->{whitelist_subject}, $subject); } sub check_subject_in_blacklist { my ($self, $permsgstatus) = @_; my $subject = $permsgstatus->get('Subject'); return 0 unless $subject ne ''; return $self->_check_subject($permsgstatus->{conf}->{blacklist_subject}, $subject); } sub _check_subject { my ($self, $list, $subject) = @_; $subject = lc $subject; return 1 if defined($list->{$subject}); study $subject; # study is a no-op since perl 5.16.0, eliminating bugs foreach my $regexp (values %{$list}) { if ($subject =~ qr/$regexp/i) { return 1; } } return 0; } 1;
gitpan/Mail-SpamAssassin
lib/Mail/SpamAssassin/Plugin/WhiteListSubject.pm
Perl
apache-2.0
4,221
# # Copyright 2016 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package hardware::server::hp::proliant::snmp::mode::globalstatus; use base qw(centreon::plugins::mode); use strict; use warnings; my $thresholds = { global => [ ['other', 'UNKNOWN'], ['ok', 'OK'], ['degraded', 'WARNING'], ['failed', 'CRITICAL'], ], }; my %map_status = ( 1 => 'other', 2 => 'ok', 3 => 'degraded', 4 => 'failed', ); sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options); bless $self, $class; $self->{version} = '1.0'; $options{options}->add_options(arguments => { "threshold-overload:s@" => { name => 'threshold_overload' }, }); return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::init(%options); $self->{overload_th} = {}; foreach my $val (@{$self->{option_results}->{threshold_overload}}) { if ($val !~ /^(.*?),(.*)$/) { $self->{output}->add_option_msg(short_msg => "Wrong threshold-overload option '" . $val . "'."); $self->{output}->option_exit(); } my ($section, $status, $filter) = ('global', $1, $2); if ($self->{output}->is_litteral_status(status => $status) == 0) { $self->{output}->add_option_msg(short_msg => "Wrong threshold-overload status '" . $val . "'."); $self->{output}->option_exit(); } $self->{overload_th}->{$section} = [] if (!defined($self->{overload_th}->{$section})); push @{$self->{overload_th}->{$section}}, {filter => $filter, status => $status}; } } sub get_severity { my ($self, %options) = @_; my $status = 'UNKNOWN'; # default if (defined($self->{overload_th}->{$options{section}})) { foreach (@{$self->{overload_th}->{$options{section}}}) { if ($options{value} =~ /$_->{filter}/i) { $status = $_->{status}; return $status; } } } foreach (@{$thresholds->{$options{section}}}) { if ($options{value} =~ /$$_[0]/i) { $status = $$_[1]; return $status; } } return $status; } sub run { my ($self, %options) = @_; # $options{snmp} = snmp object $self->{snmp} = $options{snmp}; my $oid_cpqHeMibCondition = '.1.3.6.1.4.1.232.6.1.3.0'; my $result = $self->{snmp}->get_leef(oids => [$oid_cpqHeMibCondition], nothing_quit => 1); my $exit = $self->get_severity(section => 'global', value => $map_status{$result->{$oid_cpqHeMibCondition}}); $self->{output}->output_add(severity => $exit, short_msg => sprintf("Overall global status is '%s'", $map_status{$result->{$oid_cpqHeMibCondition}})); $self->{output}->display(); $self->{output}->exit(); } 1; __END__ =head1 MODE Check the overall status of the server. =over 8 =item B<--threshold-overload> Set to overload default threshold values (syntax: status,regexp) It used before default thresholds (order stays). Example: --threshold-overload='CRITICAL,^(?!(ok)$)' =back =cut
golgoth31/centreon-plugins
hardware/server/hp/proliant/snmp/mode/globalstatus.pm
Perl
apache-2.0
4,080
## OpenXPKI::Crypto::X509 ## Rewritten 2005 by Michael Bell for the OpenXPKI project ## (C) Copyright 2003-2006 by The OpenXPKI Project use strict; use warnings; package OpenXPKI::Crypto::X509; use OpenXPKI::Debug; use OpenXPKI::DN; use Math::BigInt; use Digest::SHA qw(sha1_base64); use OpenXPKI::DateTime; use base qw(OpenXPKI::Crypto::Object); use English; # use Smart::Comments; sub new { my $self = shift; my $class = ref($self) || $self; $self = {}; bless $self, $class; my $keys = { @_ }; $self->{DATA} = $keys->{DATA}; $self->{TOKEN} = $keys->{TOKEN}; if (not $self->{DATA}) { OpenXPKI::Exception->throw ( message => "I18N_OPENXPKI_CRYPTO_X509_NEW_MISSING_DATA"); } if (not $self->{TOKEN}) { OpenXPKI::Exception->throw ( message => "I18N_OPENXPKI_CRYPTO_X509_NEW_MISSING_TOKEN"); } $self->__init(); return $self; } sub __init { my $self = shift; ##! 1: "start" ########################## ## init objects ## ########################## $self->{header} = OpenXPKI::Crypto::Header->new (DATA => $self->{DATA}); eval { $self->{x509} = $self->{TOKEN}->get_object({DATA => $self->{header}->get_body(), TYPE => "X509"}); }; if (my $exc = OpenXPKI::Exception->caught()) { OpenXPKI::Exception->throw ( message => "I18N_OPENXPKI_CRYPTO_X509_INIT_OBJECT_FAILED", children => [ $exc ]); } elsif ($EVAL_ERROR) { $EVAL_ERROR->rethrow(); } ############################################### ## compute SHA1 hash of DER representation ## ############################################### my $cert_der = $self->{TOKEN}->command({ COMMAND => 'convert_cert', DATA => $self->get_body(), OUT => 'DER', }); $self->{SHA1} = sha1_base64($cert_der); ## RFC 3548 URL and filename safe base64 $self->{SHA1} =~ tr/+\//-_/; ########################## ## core parsing ## ########################## $self->{PARSED}->{HEADER} = $self->{header}->get_parsed(); foreach my $attr ( "serial", "subject", "issuer", "notbefore", "notafter", "alias", "modulus", "pubkey", "fingerprint", "emailaddress", "version", "pubkey_hash", "pubkey_algorithm", "signature_algorithm", "exponent", "keysize", "extensions", "openssl_subject" ) { $self->{PARSED}->{BODY}->{uc($attr)} = $self->{TOKEN}->get_object_function ( { OBJECT => $self->{x509}, FUNCTION => $attr, }); if ($attr eq 'serial') { # add serial in hex as well so clients do not have to convert # it themselves my $serial = Math::BigInt->new($self->{PARSED}->{BODY}->{SERIAL}); $self->{PARSED}->{BODY}->{SERIAL_HEX} = $serial->as_hex(); $self->{PARSED}->{BODY}->{SERIAL_HEX} =~ s{\A 0x}{}xms; } } $self->{TOKEN}->free_object ($self->{x509}); delete $self->{x509}; ##! 2: "loaded cert attributes" my $ret = $self->{PARSED}->{BODY}; ### parsed body: $ret ########################### ## parse subject ## ########################### ## load the differnt parts of the DN into SUBJECT_HASH my $obj = OpenXPKI::DN->new ($ret->{SUBJECT}); %{$ret->{SUBJECT_HASH}} = $obj->get_hashed_content(); ## FIXME: the following comment and code are wrong ## FIXME: the second equal sign has not to be escaped because ## FIXME: because it is no special character ## FIXME: if there is a problem with a DN parser then this ## FIXME: parser has a bug # ## OpenSSL includes a bug in -nameopt RFC2253 # ## = signs are not escaped if they are normal values # my $i = 0; # my $now = "name"; # while ($i < length ($ret->{DN})) # { # if (substr ($ret->{DN}, $i, 1) =~ /\\/) # { # $i++; # } elsif (substr ($ret->{DN}, $i, 1) =~ /=/) { # if ($now =~ /value/) # { # ## OpenSSL forgets to escape = # $ret->{DN} = substr ($ret->{DN}, 0, $i)."\\".substr ($ret->{DN}, $i); # $i++; # } else { # $now = "value"; # } # } elsif (substr ($ret->{DN}, $i, 1) =~ /[,+]/) { # $now = "name"; # } # $i++; # } ################################## ## parse emailaddresses ## ################################## if ($ret->{EMAILADDRESS}) { if (index ($ret->{EMAILADDRESS}, "\n") < 0 ) { $ret->{EMAILADDRESSES}->[0] = $ret->{EMAILADDRESS}; } else { my @harray = split /\n/, $ret->{EMAILADDRESS}; $ret->{EMAILADDRESSES} = \@harray; $ret->{EMAILADDRESS} = $ret->{EMAILADDRESSES}->[0]; } } # OpenSSL's get_email has a bug so we must add rfc822Mailbox by ourselves if (not $ret->{EMAILADDRESS} and exists $ret->{SUBJECT_HASH}->{MAIL} and $ret->{SUBJECT_HASH}->{MAIL}[0]) { $ret->{EMAILADDRESS} = $ret->{SUBJECT_HASH}->{MAIL}[0]; $ret->{EMAILADDRESSES} = \@{$ret->{SUBJECT_HASH}->{MAIL}}; } ############################### ## extension parsing ## ############################### ## load all extensions $ret->{PLAIN_EXTENSIONS} = $ret->{EXTENSIONS} || ''; delete $ret->{EXTENSIONS}; $ret->{OPENSSL_EXTENSIONS} = {}; my ($val, $key); my @lines = split(/\n/, $ret->{PLAIN_EXTENSIONS}); my $i = 0; while($i < @lines) { if ($lines[$i] =~ /^\s*([^:]+):\s*(?:critical|)\s*$/i) { $key = $1; $ret->{OPENSSL_EXTENSIONS}->{$key} = []; $i++; while(exists $lines[$i] and $lines[$i] !~ /^\s*[^:]+:\s*(?:critical|)\s*$/ && $i < @lines) { $val = $lines[$i]; $val =~ s/^\s+//; $val =~ s/\s+$//; $i++; next if $val =~ /^$/; if ($key eq 'X509v3 Subject Alternative Name') { # when OpenSSL encounters CSR IP Subject Alternative Names # the parsed output contains "IP Address:d.d.d.d", however # OpenSSL expects "IP:d.d.d.d" in a config file for # certificate issuance if you intend to issue a certificate # we hereby declare that "IP" is the canonical identifier # for an IP Subject Alternative Name $val =~ s{ \A IP\ Address: }{IP:}xms; } push(@{$ret->{OPENSSL_EXTENSIONS}->{$key}}, $val); } } else { ## FIXME: can this ever happen? $i++; } } ##! 2: "show all extensions and their values" while(($key, $val) = each(%{$ret->{OPENSSL_EXTENSIONS}})) { ##! 4: "found extension: $key" foreach(@{$val}) { ##! 8: "with value(s): $_" } } ## signal CA certiticate my $h = $ret->{OPENSSL_EXTENSIONS}->{"X509v3 Basic Constraints"}[0]; $h ||= ""; $h =~ s/\s//g; if ($h =~ /CA:TRUE/i) { $ret->{IS_CA} = 1; $ret->{EXTENSIONS}->{BASIC_CONSTRAINTS}->{CA} = 1; } else { $ret->{IS_CA} = 0; $ret->{EXTENSIONS}->{BASIC_CONSTRAINTS}->{CA} = 0; } # add keyusage extensions as arrayref my $keyusage = $ret->{OPENSSL_EXTENSIONS}->{'X509v3 Key Usage'}->[0]; my @keyusages = (); if ($keyusage) { @keyusages = split /, /, $keyusage; } $ret->{'EXTENSIONS'}->{'KEYUSAGE'} = \@keyusages; ## add extensions for chain tracking foreach my $item (@{$ret->{OPENSSL_EXTENSIONS}->{"X509v3 Authority Key Identifier"}}) { next if (not defined $item or not length ($item)); my ($value) = ($item =~ /^[^:]+:(.*)$/); $ret->{EXTENSIONS}->{AUTHORITY_KEY_IDENTIFIER}->{CA_KEYID} = $value if ($item =~ /^keyid:/); $ret->{EXTENSIONS}->{AUTHORITY_KEY_IDENTIFIER}->{CA_ISSUER_NAME} = $value if ($item =~ /^DirName:/); if ($item =~ /^serial:/) { $value =~ s/://g; $value = "0x$value"; $value = Math::BigInt->new ($value); $value = $value->bstr(); $ret->{EXTENSIONS}->{AUTHORITY_KEY_IDENTIFIER}->{CA_ISSUER_SERIAL} = $value; } } if ($ret->{OPENSSL_EXTENSIONS}->{"X509v3 Subject Key Identifier"}[0]) { $ret->{EXTENSIONS}->{SUBJECT_KEY_IDENTIFIER} = $ret->{OPENSSL_EXTENSIONS}->{"X509v3 Subject Key Identifier"}[0]; $ret->{EXTENSIONS}->{SUBJECT_KEY_IDENTIFIER} =~ s/^\s*//; } ## make them visible for the database interface foreach $key (keys %{$ret->{EXTENSIONS}->{AUTHORITY_KEY_IDENTIFIER}}) { next if (not $key); $ret->{$key} = $ret->{EXTENSIONS}->{AUTHORITY_KEY_IDENTIFIER}->{$key}; } $ret->{KEYID} = $ret->{EXTENSIONS}->{SUBJECT_KEY_IDENTIFIER} if (exists $ret->{EXTENSIONS}->{SUBJECT_KEY_IDENTIFIER}); return 1; } sub get_converted { my $self = shift; my $format = shift; if (not $format) { OpenXPKI::Exception->throw ( message => "I18N_OPENXPKI_CRYPTO_X509_GET_CONVERTED_MISSING_FORMAT"); } if ($format ne "PEM" and $format ne "DER" and $format ne "TXT" and $format ne "PKCS7") { OpenXPKI::Exception->throw ( message => "I18N_OPENXPKI_CRYPTO_X509_GET_CONVERTED_WRONG_FORMAT", params => {"FORMAT" => $format}); } if ($format eq 'PEM' ) { return $self->get_body(); } else { my $result = eval {$self->{TOKEN}->command ({COMMAND => "convert_cert", DATA => $self->get_body(), OUT => $format})}; if (my $exc = OpenXPKI::Exception->caught()) { OpenXPKI::Exception->throw ( message => "I18N_OPENXPKI_CRYPTO_X509_GET_CONVERTED_CONVERSION_FAILED", children => [ $exc ]); } elsif ($EVAL_ERROR) { $EVAL_ERROR->rethrow(); } return $result; } } sub get_identifier { my $self = shift; if (! exists $self->{SHA1}) { OpenXPKI::Exception->throw( message => 'I18N_OPENXPKI_CRYPTO_X509_GET_IDENTIFIER_NOT_INITIALIZED', ); } return $self->{SHA1}; } sub get_subject { my $self = shift; if (! exists $self->{PARSED}->{BODY}->{SUBJECT}) { OpenXPKI::Exception->throw( message => 'I18N_OPENXPKI_CRYPTO_X509_GET_SUBJECT_NOT_INITIALIZED', ); } return $self->{PARSED}->{BODY}->{SUBJECT}; } sub get_status { my $self = shift; if (not exists $self->{STATUS}) { OpenXPKI::Exception->throw ( message => "I18N_OPENXPKI_CRYPTO_X509_GET_STATUS_NOT_INITIALIZED"); } return $self->{STATUS}; } sub set_status { my $self = shift; $self->{STATUS} = shift; return $self->get_status(); } sub get_subject_key_id { my $self = shift; if (exists $self->{PARSED}->{BODY}->{EXTENSIONS}->{SUBJECT_KEY_IDENTIFIER}) { return $self->{PARSED}->{BODY}->{EXTENSIONS}->{SUBJECT_KEY_IDENTIFIER}; } else { return undef; } } # TODO Fix return values (can be Scalar or HashRef!) sub get_authority_key_id { my $self = shift; my $authkeyid = $self->{PARSED}->{BODY}->{EXTENSIONS}->{AUTHORITY_KEY_IDENTIFIER}; if (exists $authkeyid->{CA_KEYID}) { return $authkeyid->{CA_KEYID}; } elsif (exists $authkeyid->{CA_ISSUER_NAME} && exists $authkeyid->{CA_ISSUER_SERIAL}) { my $return_hashref; $return_hashref->{ISSUER_NAME} = $authkeyid->{CA_ISSUER_NAME}; $return_hashref->{ISSUER_SERIAL} = $authkeyid->{CA_ISSUER_SERIAL}; return $return_hashref; } else { return undef; } } sub to_db_hash { my $self = shift; my %insert_hash; $insert_hash{CERTIFICATE_SERIAL} = $self->get_serial(); $insert_hash{IDENTIFIER} = $self->get_identifier(); $insert_hash{DATA} = $self->{DATA}; $insert_hash{SUBJECT} = $self->{PARSED}->{BODY}->{SUBJECT}; $insert_hash{ISSUER_DN} = $self->{PARSED}->{BODY}->{ISSUER}; $insert_hash{PUBKEY} = $self->{PARSED}->{BODY}->{PUBKEY}; # set subject key id and authority key id, if defined. if (defined $self->get_subject_key_id()) { $insert_hash{SUBJECT_KEY_IDENTIFIER} = $self->get_subject_key_id(); } if (defined $self->get_authority_key_id() && ref $self->get_authority_key_id() eq '') { # TODO: do we save if authority key id is hash, and if # yes, in which format? $insert_hash{AUTHORITY_KEY_IDENTIFIER} = $self->get_authority_key_id(); } $insert_hash{NOTAFTER} = OpenXPKI::DateTime::convert_date({ DATE => $self->{PARSED}->{BODY}->{NOTAFTER}, OUTFORMAT => 'epoch', }); $insert_hash{NOTBEFORE} = OpenXPKI::DateTime::convert_date({ DATE => $self->{PARSED}->{BODY}->{NOTBEFORE}, OUTFORMAT => 'epoch', }); return %insert_hash; } 1; __END__ =head1 Name OpenXPKI::Crypto::X509 =head1 Description This class is used for the handling of X.509v3 certificates. All functions of OpenXPKI::Crypto::Object are supported. All functions which differ from the base class OpenXPKI::Crypto::Object are described below. =head1 Functions =head2 new The constructor supports two options - TOKEN and DATA. TOKEN must be a crypto token from the token manager. This is necessary to extract some informations from the data. The parameter DATA must contain a PEM encoded certificate. This is the base of the object. =head2 get_converted expects only one value - the requested format of the certificate. PEM, TXT, PKCS7 and DER are supported. TXT is a plain text representation which can be directly displayed to the user. =head2 get_identifier returns the base64-encoded SHA1 hash of the DER representation of the certificate, which is used as an identifier in the database =head2 set_status sets the certificate status, i.e. ISSUED, SUSPENDED, REVOKED =head2 get_status gets the certificate status =head2 get_subject_key_id gets the subject key identifier from the extension, if present. If not, returns undef. =head2 get_authority_key_id gets the authority key identifier from the extension, if present. Returns either the key identifier as a string or a hash reference containing the ISSUER_NAME and ISSUER_SERIAL field, if the key identifier is not present. If none of the above are available, returns undef. =head2 to_db_hash returns the certificate data in a format that can be inserted into the database table 'CERTIFICATE'.
stefanomarty/openxpki
core/server/OpenXPKI/Crypto/X509.pm
Perl
apache-2.0
15,212
%% %% Class sun.io.CharToByteConverter %% NOTICE: Do not edit this file. This file has been generated from: %% java.awt.event.ActionEvent %% :- class('CharToByteConverter'). :- use_package(objects). ojo> es publico :- export('CharToByteConverter'/0). :- export(byteOff/1). ojo> es publico :- export(canConvert/2). ojo> es publico :- export(convert/7). ojo> es publico :- export(convertAll/2). ojo> es publico :- export(convertAny/7). ojo> es publico :- export(flush/4). ojo> es publico :- export(flushAny/4). ojo> es publico :- export(getBadInputLength/1). ojo> es publico :- export(getCharacterEncoding/1). ojo> es publico :- export(getConverter/2). ojo> es publico :- export(getDefault/1). ojo> es publico :- export(getMaxBytesPerChar/1). ojo> es publico :- export(nextByteIndex/1). ojo> es publico :- export(nextCharIndex/1). ojo> es publico :- export(reset/0). ojo> es publico :- export(setSubstitutionBytes/1). ojo> es publico :- export(setSubstitutionMode/1). :- export(subBytes/1). ojo> es publico :- export(toString/1). %%-------------------------------------------------- %% Interface Information. %%-------------------------------------------------- %%-------------------------------------------------- %% Inheritance information. %%-------------------------------------------------- :- inherit_class(library('javaobs/java/lang/Object')). %%-------------------------------------------------- %% Declared classes. %%-------------------------------------------------- :- use_class(library('javaobs/java/lang/String')). %%-------------------------------------------------- %% Miscelanea. %%-------------------------------------------------- :- redefining(_). :- set_prolog_flag(multi_arity_warnings,off). :- discontiguous java_assert/2. :- use_module(library(lists)). :- public(java_constructor/1). :- public(java_invoke_method/1). :- public(java_get_value/1). :- public(java_set_value/1). :- public(java_add_listener/2). :- public(java_remove_listener/2). :- public(java_delete_object/0). :- public(get_java_id/1). :- use_class(library('javaobs/java_obj')). %%-------------------------------------------------- %% Destructor. %%-------------------------------------------------- destructor :- java_delete_object. %%-------------------------------------------------- %% Java fields. %%-------------------------------------------------- %%-------------------------------------------------- %% Constructors. %%-------------------------------------------------- 'CharToByteConverter' :- java_constructor('sun.io.CharToByteConverter'). %%-------------------------------------------------- %% Methods. %%-------------------------------------------------- canConvert(_V0, Result) :- var(Result), integer(_V0), java_invoke_method(canConvert(_V0, Result)). convertAll(_V0, Result) :- var(Result), list(_V0), java_invoke_method(convertAll(_V0, Result)). convertAny(_V0, _V1, _V2, _V3, _V4, _V5, Result) :- var(Result), list(_V0), integer(_V1), integer(_V2), list(_V3), integer(_V4), integer(_V5), java_invoke_method(convertAny(_V0, _V1, _V2, _V3, _V4, _V5, Result)). flushAny(_V0, _V1, _V2, Result) :- var(Result), list(_V0), integer(_V1), integer(_V2), java_invoke_method(flushAny(_V0, _V1, _V2, Result)). getBadInputLength(Result) :- var(Result), java_invoke_method(getBadInputLength(Result)). getConverter(_V0, Result) :- var(Result), atom(_V0), java_invoke_method(getConverter(_V0, Result)). getDefault(Result) :- var(Result), java_invoke_method(getDefault(Result)). nextByteIndex(Result) :- var(Result), java_invoke_method(nextByteIndex(Result)). nextCharIndex(Result) :- var(Result), java_invoke_method(nextCharIndex(Result)). setSubstitutionBytes(_V0) :- list(_V0), java_invoke_method(setSubstitutionBytes(_V0, _)). setSubstitutionMode(_V0) :- interface(_V0, boolean), interface(_V0, java_obj), _V0:get_java_id(_V0OBJ), java_invoke_method(setSubstitutionMode(_V0OBJ, _)). toString(Result) :- var(Result), java_invoke_method(toString(Result)). :- set_prolog_flag(multi_arity_warnings,on).
leuschel/ecce
www/CiaoDE/ciao/library/javaobs/sun/io/CharToByteConverter.pl
Perl
apache-2.0
4,182
package SentenceSentenceFactorType; use strict; use warnings; use Feature::CosineSimilarity; use Moose; use namespace::autoclean; # feature definitions has 'feature_definitions' => ( is => 'rw' , isa => 'ArrayRef' , lazy => 1 , builder => '_sentence_sentence_feature_definitions_builder' ); sub _sentence_sentence_feature_definitions_builder { my $this = shift; my @sentence_sentence_features; # We instantiate object-object features # TODO: provide list of features through system configuration #push @sentence_sentence_features , new Feature::CosineSimilarity( object1 => $this->object1 , object2 => $this->object2 ); push @sentence_sentence_features , new Feature::CosineSimilarity(); # 2 - KL divergence between word distributions # TODO return \@sentence_sentence_features; } with('PairwiseFactorType'); __PACKAGE__->meta->make_immutable; 1;
ypetinot/web-summarization
summarizers/graph-summarizer-4/src/SentenceSentenceFactorType.pm
Perl
apache-2.0
897
# # Copyright 2021 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package cloud::azure::database::redis::mode::load; use base qw(cloud::azure::custom::mode); use strict; use warnings; sub get_metrics_mapping { my ($self, %options) = @_; my $metrics_mapping = { 'serverload' => { 'output' => 'Server Load', 'label' => 'server-load-percentage', 'nlabel' => 'redis.cache.server.load.percentage', 'unit' => '%', 'min' => '0', 'max' => '100' } }; return $metrics_mapping; } sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options, force_new_perfdata => 1); bless $self, $class; $options{options}->add_options(arguments => { 'filter-metric:s' => { name => 'filter_metric' }, 'resource:s' => { name => 'resource' }, 'resource-group:s' => { name => 'resource_group' } }); return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::check_options(%options); if (!defined($self->{option_results}->{resource}) || $self->{option_results}->{resource} eq '') { $self->{output}->add_option_msg(short_msg => 'Need to specify either --resource <name> with --resource-group option or --resource <id>.'); $self->{output}->option_exit(); } my $resource = $self->{option_results}->{resource}; my $resource_group = defined($self->{option_results}->{resource_group}) ? $self->{option_results}->{resource_group} : ''; if ($resource =~ /^\/subscriptions\/.*\/resourceGroups\/(.*)\/providers\/Microsoft\.Cache\/Redis\/(.*)$/) { $resource_group = $1; $resource = $2; } $self->{az_resource} = $resource; $self->{az_resource_group} = $resource_group; $self->{az_resource_type} = 'Redis'; $self->{az_resource_namespace} = 'Microsoft.Cache'; $self->{az_timeframe} = defined($self->{option_results}->{timeframe}) ? $self->{option_results}->{timeframe} : 900; $self->{az_interval} = defined($self->{option_results}->{interval}) ? $self->{option_results}->{interval} : 'PT5M'; $self->{az_aggregations} = ['Maximum']; if (defined($self->{option_results}->{aggregation})) { $self->{az_aggregations} = []; foreach my $stat (@{$self->{option_results}->{aggregation}}) { if ($stat ne '') { push @{$self->{az_aggregations}}, ucfirst(lc($stat)); } } } foreach my $metric (keys %{$self->{metrics_mapping}}) { next if (defined($self->{option_results}->{filter_metric}) && $self->{option_results}->{filter_metric} ne '' && $metric !~ /$self->{option_results}->{filter_metric}/); push @{$self->{az_metrics}}, $metric; } } 1; __END__ =head1 MODE Check Azure Redis server load statistics. Example: Using resource name : perl centreon_plugins.pl --plugin=cloud::azure::database::redis::plugin --mode=load --custommode=api --resource=<redis_id> --resource-group=<resourcegroup_id> --aggregation='maximum' --warning-server-load-percentage='80' --critical-server-load-percentage='90' Using resource id : perl centreon_plugins.pl --plugin=cloud::azure::database::redis::plugin --mode=load --custommode=api --resource='/subscriptions/<subscription_id>/resourceGroups/<resourcegroup_id>/providers/Microsoft.Cache/Redis/<redis_id>' --aggregation='maximum' --warning-server-load-percentage='80' --critical-server-load-percentage='90' Default aggregation: 'maximum' / 'minimum', 'maximum' and 'average' are valid. =over 8 =item B<--resource> Set resource name or id (Required). =item B<--resource-group> Set resource group (Required if resource's name is used). =item B<--warning-server-load-percentage> Warning threshold. =item B<--critical-server-load-percentage> Critical threshold. =back =cut
Tpo76/centreon-plugins
cloud/azure/database/redis/mode/load.pm
Perl
apache-2.0
4,598
# # Copyright 2018 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package hardware::server::sun::mgmt_cards::mode::showenvironment; use base qw(centreon::plugins::mode); use strict; use warnings; use centreon::plugins::misc; use hardware::server::sun::mgmt_cards::components::showenvironment::resources qw($thresholds); use hardware::server::sun::mgmt_cards::components::showenvironment::psu; use hardware::server::sun::mgmt_cards::components::showenvironment::fan; use hardware::server::sun::mgmt_cards::components::showenvironment::temperature; use hardware::server::sun::mgmt_cards::components::showenvironment::sensors; use hardware::server::sun::mgmt_cards::components::showenvironment::voltage; use hardware::server::sun::mgmt_cards::components::showenvironment::si; use hardware::server::sun::mgmt_cards::components::showenvironment::disk; sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options); bless $self, $class; $self->{version} = '1.0'; $options{options}->add_options(arguments => { "hostname:s" => { name => 'hostname' }, "port:s" => { name => 'port', default => 23 }, "username:s" => { name => 'username' }, "password:s" => { name => 'password' }, "timeout:s" => { name => 'timeout', default => 30 }, "command-plink:s" => { name => 'command_plink', default => 'plink' }, "ssh" => { name => 'ssh' }, "exclude:s" => { name => 'exclude' }, "component:s" => { name => 'component', default => 'all' }, "no-component:s" => { name => 'no_component' }, "threshold-overload:s@" => { name => 'threshold_overload' }, }); $self->{components} = {}; $self->{no_components} = undef; return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::init(%options); if (!defined($self->{option_results}->{hostname})) { $self->{output}->add_option_msg(short_msg => "Need to specify a hostname."); $self->{output}->option_exit(); } if (!defined($self->{option_results}->{username})) { $self->{output}->add_option_msg(short_msg => "Need to specify a username."); $self->{output}->option_exit(); } if (!defined($self->{option_results}->{password})) { $self->{output}->add_option_msg(short_msg => "Need to specify a password."); $self->{output}->option_exit(); } if (!defined($self->{option_results}->{ssh})) { require hardware::server::sun::mgmt_cards::lib::telnet; } if (defined($self->{option_results}->{no_component})) { if ($self->{option_results}->{no_component} ne '') { $self->{no_components} = $self->{option_results}->{no_component}; } else { $self->{no_components} = 'critical'; } } $self->{overload_th} = {}; foreach my $val (@{$self->{option_results}->{threshold_overload}}) { if ($val !~ /^(.*?),(.*?),(.*)$/) { $self->{output}->add_option_msg(short_msg => "Wrong threshold-overload option '" . $val . "'."); $self->{output}->option_exit(); } my ($section, $status, $filter) = ($1, $2, $3); if ($self->{output}->is_litteral_status(status => $status) == 0) { $self->{output}->add_option_msg(short_msg => "Wrong threshold-overload status '" . $val . "'."); $self->{output}->option_exit(); } $self->{overload_th}->{$section} = [] if (!defined($self->{overload_th}->{$section})); push @{$self->{overload_th}->{$section}}, {filter => $filter, status => $status}; } } sub ssh_command { my ($self, %options) = @_; my $cmd_in = $self->{option_results}->{username} . '\n' . $self->{option_results}->{password} . '\nshowenvironment\nlogout\n'; my $cmd = "echo -e '$cmd_in' | " . $self->{option_results}->{command_plink} . " -batch " . $self->{option_results}->{hostname} . " 2>&1"; my ($lerror, $stdout, $exit_code) = centreon::plugins::misc::backtick( command => $cmd, timeout => $self->{option_results}->{timeout}, wait_exit => 1 ); $stdout =~ s/\r//g; if ($lerror <= -1000) { $self->{output}->output_add(severity => 'UNKNOWN', short_msg => $stdout); $self->{output}->display(); $self->{output}->exit(); } if ($exit_code != 0) { $stdout =~ s/\n/ - /g; $self->{output}->output_add(severity => 'UNKNOWN', short_msg => "Command error: $stdout"); $self->{output}->display(); $self->{output}->exit(); } if ($stdout !~ /Environmental Status/mi) { $self->{output}->output_add(long_msg => $stdout); $self->{output}->output_add(severity => 'UNKNOWN', short_msg => "Command 'showenvironment' problems (see additional info)."); $self->{output}->display(); $self->{output}->exit(); } return $stdout; } sub global { my ($self, %options) = @_; hardware::server::sun::mgmt_cards::components::showenvironment::psu::check($self); hardware::server::sun::mgmt_cards::components::showenvironment::fan::check($self); hardware::server::sun::mgmt_cards::components::showenvironment::temperature::check($self); hardware::server::sun::mgmt_cards::components::showenvironment::sensors::check($self); hardware::server::sun::mgmt_cards::components::showenvironment::voltage::check($self); hardware::server::sun::mgmt_cards::components::showenvironment::si::check($self); hardware::server::sun::mgmt_cards::components::showenvironment::disk::check($self); } sub component { my ($self, %options) = @_; if ($self->{option_results}->{component} eq 'si') { hardware::server::sun::mgmt_cards::components::showenvironment::si::check($self); } elsif ($self->{option_results}->{component} eq 'psu') { hardware::server::sun::mgmt_cards::components::showenvironment::psu::check($self); } elsif ($self->{option_results}->{component} eq 'fan') { hardware::server::sun::mgmt_cards::components::showenvironment::fan::check($self); } elsif ($self->{option_results}->{component} eq 'temperature') { hardware::server::sun::mgmt_cards::components::showenvironment::temperature::check($self); } elsif ($self->{option_results}->{component} eq 'sensors') { hardware::server::sun::mgmt_cards::components::showenvironment::sensors::check($self); } elsif ($self->{option_results}->{component} eq 'voltage') { hardware::server::sun::mgmt_cards::components::showenvironment::voltage::check($self); } elsif ($self->{option_results}->{component} eq 'disk') { hardware::server::sun::mgmt_cards::components::showenvironment::disk::check($self); } else { $self->{output}->add_option_msg(short_msg => "Wrong option. Cannot find component '" . $self->{option_results}->{component} . "'."); $self->{output}->option_exit(); } } sub run { my ($self, %options) = @_; if (defined($self->{option_results}->{ssh})) { $self->{stdout} = $self->ssh_command(); } else { my $telnet_handle = hardware::server::sun::mgmt_cards::lib::telnet::connect( username => $self->{option_results}->{username}, password => $self->{option_results}->{password}, hostname => $self->{option_results}->{hostname}, port => $self->{option_results}->{port}, timeout => $self->{option_results}->{timeout}, output => $self->{output}); my @lines = $telnet_handle->cmd("showenvironment"); $self->{stdout} = join("", @lines); } $self->{stdout} =~ s/\r//msg; if ($self->{option_results}->{component} eq 'all') { $self->global(); } else { $self->component(); } my $total_components = 0; my $display_by_component = ''; my $display_by_component_append = ''; foreach my $comp (sort(keys %{$self->{components}})) { # Skipping short msg when no components next if ($self->{components}->{$comp}->{total} == 0 && $self->{components}->{$comp}->{skip} == 0); $total_components += $self->{components}->{$comp}->{total} + $self->{components}->{$comp}->{skip}; $display_by_component .= $display_by_component_append . $self->{components}->{$comp}->{total} . '/' . $self->{components}->{$comp}->{skip} . ' ' . $self->{components}->{$comp}->{name}; $display_by_component_append = ', '; } $self->{output}->output_add(severity => 'OK', short_msg => sprintf("All %s components [%s] are ok.", $total_components, $display_by_component) ); if (defined($self->{option_results}->{no_component}) && $total_components == 0) { $self->{output}->output_add(severity => $self->{no_components}, short_msg => 'No components are checked.'); } $self->{output}->display(); $self->{output}->exit(); } sub check_exclude { my ($self, %options) = @_; if (defined($options{instance})) { if (defined($self->{option_results}->{exclude}) && $self->{option_results}->{exclude} =~ /(^|\s|,)${options{section}}[^,]*#\Q$options{instance}\E#/) { $self->{components}->{$options{section}}->{skip}++; $self->{output}->output_add(long_msg => sprintf("Skipping $options{section} section $options{instance} instance.")); return 1; } } elsif (defined($self->{option_results}->{exclude}) && $self->{option_results}->{exclude} =~ /(^|\s|,)$options{section}(\s|,|$)/) { $self->{output}->output_add(long_msg => sprintf("Skipping $options{section} section.")); return 1; } return 0; } sub get_severity { my ($self, %options) = @_; my $status = 'UNKNOWN'; # default if (defined($self->{overload_th}->{$options{section}})) { foreach (@{$self->{overload_th}->{$options{section}}}) { if ($options{value} =~ /$_->{filter}/i) { $status = $_->{status}; return $status; } } } foreach (@{$thresholds->{$options{section}}}) { if ($options{value} =~ /$$_[0]/i) { $status = $$_[1]; return $status; } } return $status; } 1; __END__ =head1 MODE Check Sun vXXX (v240, v440, v245,...) Hardware (through ALOM). =over 8 =item B<--hostname> Hostname to query. =item B<--port> telnet port (Default: 23). =item B<--username> telnet username. =item B<--password> telnet password. =item B<--timeout> Timeout in seconds for the command (Default: 30). =item B<--command-plink> Plink command (default: plink). Use to set a path. =item B<--ssh> Use ssh (with plink) instead of telnet. =item B<--component> Which component to check (Default: 'all'). Can be: 'temperature', 'si', 'disk', 'fan', 'voltage', 'psu', 'sensors'. =item B<--exclude> Exclude some parts (comma seperated list) (Example: --exclude=fan) Can also exclude specific instance: --exclude=fan#F1.RS# =item B<--no-component> Return an error if no compenents are checked. If total (with skipped) is 0. (Default: 'critical' returns). =item B<--threshold-overload> Set to overload default threshold values (syntax: section,status,regexp) It used before default thresholds (order stays). Example: --threshold-overload='fan,CRITICAL,^(?!(OK|NOT PRESENT)$)' =back =cut
wilfriedcomte/centreon-plugins
hardware/server/sun/mgmt_cards/mode/showenvironment.pm
Perl
apache-2.0
13,156
# # Copyright 2021 Centreon (http://www.centreon.com/) # # Centreon is a full-fledged industry-strength solution that meets # the needs in IT infrastructure and application monitoring for # service performance. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # package cloud::aws::ec2::mode::discovery; use base qw(centreon::plugins::mode); use strict; use warnings; use JSON::XS; sub new { my ($class, %options) = @_; my $self = $class->SUPER::new(package => __PACKAGE__, %options); bless $self, $class; $options{options}->add_options(arguments => { "prettify" => { name => 'prettify' }, "filter-type:s" => { name => 'filter_type' }, }); return $self; } sub check_options { my ($self, %options) = @_; $self->SUPER::init(%options); } sub run { my ($self, %options) = @_; my @disco_data; my $disco_stats; $disco_stats->{start_time} = time(); my %asgs; my $instances = $options{custom}->discovery( service => 'ec2', command => 'describe-instances' ); foreach my $reservation (@{$instances->{Reservations}}) { foreach my $instance (@{$reservation->{Instances}}) { next if (!defined($instance->{InstanceId})); my %asg; $asg{type} = "asg"; my %ec2; $ec2{type} = "ec2"; $ec2{id} = $instance->{InstanceId}; $ec2{state} = $instance->{State}->{Name}; $ec2{key_name} = $instance->{KeyName}; $ec2{private_ip} = $instance->{PrivateIpAddress}; $ec2{private_dns_name} = $instance->{PrivateDnsName}; $ec2{public_dns_name} = $instance->{PublicDnsName}; $ec2{instance_type} = $instance->{InstanceType}; $ec2{vpc_id} = $instance->{VpcId}; foreach my $tag (@{$instance->{Tags}}) { if ($tag->{Key} eq "aws:autoscaling:groupName" && defined($tag->{Value})) { $ec2{asg} = $tag->{Value}; next if (defined($asgs{$tag->{Value}})); $asg{name} = $tag->{Value}; $asgs{$tag->{Value}} = 1; } if ($tag->{Key} eq "Name" && defined($tag->{Value})) { $ec2{name} = $tag->{Value}; } push @{$ec2{tags}}, { key => $tag->{Key}, value => $tag->{Value} }; } push @disco_data, \%ec2 unless (defined($self->{option_results}->{filter_type}) && $ec2{type} !~ /$self->{option_results}->{filter_type}/); push @disco_data, \%asg unless ((defined($self->{option_results}->{filter_type}) && $asg{type} !~ /$self->{option_results}->{filter_type}/) || !defined($asg{name}) || $asg{name} eq ''); } } $disco_stats->{end_time} = time(); $disco_stats->{duration} = $disco_stats->{end_time} - $disco_stats->{start_time}; $disco_stats->{discovered_items} = @disco_data; $disco_stats->{results} = \@disco_data; my $encoded_data; eval { if (defined($self->{option_results}->{prettify})) { $encoded_data = JSON::XS->new->utf8->pretty->encode($disco_stats); } else { $encoded_data = JSON::XS->new->utf8->encode($disco_stats); } }; if ($@) { $encoded_data = '{"code":"encode_error","message":"Cannot encode discovered data into JSON format"}'; } $self->{output}->output_add(short_msg => $encoded_data); $self->{output}->display(nolabel => 1, force_ignore_perfdata => 1); $self->{output}->exit(); } 1; __END__ =head1 MODE EC2/ASG discovery. =over 8 =item B<--filter-type> Filter type. =item B<--prettify> Prettify JSON output. =back =cut
Tpo76/centreon-plugins
cloud/aws/ec2/mode/discovery.pm
Perl
apache-2.0
4,264
=head1 TITLE C<my Dog $spot> is just an assertion =head1 VERSION Maintainer: Piers Cawley <pdcawley@bofh.org.uk> Date: 13 Sep 2000 Last Modified: 25 Sep 2000 Mailing List: perl6-language-objects@perl.org Number: 218 Version: 2 Status: Frozen =head1 ABSTRACT The behaviour of the <my Dog $spot> syntax should simply be an assertion of the invariant: (!defined($spot) || (ref($spot) && $spot->isa('Dog))) =head1 NOTES ON THE FREEZE The original version of this attracted very little comment and what there was was positive. Therefore I've frozen it as it stands. =head1 DESCRIPTION The syntax my Dog $spot = Dog->new(); currently carries little weight with Perl, often failing to do what one expects: $ perl -wle 'my Dog::$spot; print "ok"' No such class Dog at -e line 1, near "my Dog" Execution of -e aborted due to compilation errors. $ perl -wle 'sub Dog::new; my Dog $spot; print "ok"' ok $ perl -wle 'sub Dog::new; my Dog $spot = 1' ok The first example is obvious, as is the second. The third one is I<weird>. I therefore propose that C<my Dog $spot> comes to mean that C<$spot> is restricted to being either undefined or a reference to a C<Dog> object (or any subclasses of Dog). Simply having this implicit assertion can be useful to the programmer, but I would argue that its main advantage is that the compiler knows the object's interface at compile time and can potentially use this fact to speed up method dispatch. =head2 Examples In class methods: package Dog; sub speak { my Dog $self = shift; # Sadly 'my __PACKAGE__ $self' doesn't work print $self->name, " barks!\n"; } Admittedly, this makes little sense unless there is some optimization available, but it can still be useful as a notational convenience. Or, consider the case where you have an C<AnimalShelter> object and you're looking to get a Dog from there. my AnimalShelter $shelter = RSPCA->find_nearest_shelter; my Dog $stray; try { PET: while (!defined($stray)) { $stray = $shelter->next_animal; } } catch Exception::WrongClass { next PET; } $stray->bark; Admittedly this makes some assumptions about the possibility of loops within try blocks, but I think the point is still valid. My main concern with this proposal is to make it possible to use the C<my Dog $spot> syntax along with it's associated (posited) optimizations and assertions wherever it's appropriate in user code. =head1 IMPLEMENTATION I've not really looked into using source filters, but if C<my Dog $spot> can be mapped to C<tie my $spot, Tie::Invariant, 'Dog'> then Tie::Invariant can look something like: package Tie::Invariant; use carp; sub TIESCALAR { my $self = bless {value => undef}, shift; $self->{class} = shift; return $self; } sub FETCH { my $self = shift; return $self->value; } sub STORE { my($self,$newval) = @_; if (!defined($newval) || (ref($newval) && UNIVERSAL::isa($newval, "UNIVERSAL") && $newval->isa($self->{class}))) { croak "Value must be 'undef' or be a $self->{class}" } $self->{value} = $newval; } Note that the above is merely a sample implementation written in Perl5. I would hope that the 'real' code would be within the perl5 interpreter and compiler. And all the faster for that. =head1 MIGRATION Migration issues should be minor, the only problem arising when people have assigned things that aren't objects of the appropriate type to typed variables, but they deserve to lose anyway. =head1 REFERENCES RFC 171: my Dog $spot should call a constructor implicitly This RFC is a counter RFC to RFC 171. See my forthcoming 'new pragma: use package' RFC for something that addresses one of the concerns of RFC 171. RFC 137: Overview: Perl OO should I<not> be fundamentally changed My guiding principle.
autarch/perlweb
docs/dev/perl6/rfc/218.pod
Perl
apache-2.0
4,098
package VMOMI::DiagnosticPartitionType; use parent 'VMOMI::SimpleType'; use strict; use warnings; 1;
stumpr/p5-vmomi
lib/VMOMI/DiagnosticPartitionType.pm
Perl
apache-2.0
103
=head1 NAME mod_perl Tutorials and Presentations =head1 Description Over the years, mod_perl talks have been held at several conferences about Perl, Apache or Open Source. Many of those giving talks put some information online, and you can learn a lot from that. If you know of any other mod_perl tutorials and presentations not listed on this page please let us know at the L<users|maillist::modperl> mailing list. =head1 Tutorials and Presentations =over =item * Geoffrey Young gives several wonderful tutorials every year, if you haven't been to his talks you definitely should. He has put his materials online at: http://www.modperlcookbook.org/~geoff/slides/ =item * I<Industrial Strength Publishing>, by Ian Callen, Salon.com. http://www.salonmag.com/contact/staff/idk/print.html. Talk held at the O'Reilly Open Source Software Convention in Monterery, California on 07/20/2000. =item * I<Introduction to mod_perl> by Nathan Torkington. http://prometheus.frii.com/~gnat/mod_perl/ (PPT and PDF formats). =item * Stas Bekman has put all his talks online: http://stason.org/talks/ =item * I<Real World Performance Tuning for mod_perl> by Ask Bjørn Hansen. http://develooper.com/modperl/ (held at The Perl Conference 5, San Diego, July 2001). =item * Eric Cholet has a collection of talks he has given at http://www.logilune.com/eric/ =item * Thomas Klausner gave a tutorial titled "Web Application Development using mod_perl and CPAN" at the German Perl Workshop 2003. The slides (in German) are available from here: http://domm.zsi.at/talks/modperl_pws2003/ =item * Mike Fletcher has two introductory talks on mod_perl: http://phydeaux.org/perl/yapc/ and http://phydeaux.org/perl/mptalk/ . =item * Michael Parker's 1998 talk on Performance Tuning: http://www.perl.org/oscon/1998/Perl_and_Apache/Performance%20Tuning/ =item * I<Apache, mod_perl and Embperl> by Malcolmn Beattie. http://web.archive.org/web/20010605114950/http://users.ox.ac.uk/~mbeattie/oxlug-oct99/. Held at OxLUG, 3 October 1999. =item * Enrico Sorcinelli gave the talk I<20 things you may not know about mod_perl> at the 2nd Italian Perl Workshop on June 2005: http://www.perl.it/documenti/talks/ipw/2005/bepi/mod_perl_20_things-eng/img0.html =back META: Move this to Conferences part if we make one. Would be nice to be able to have a standard presentation format to put them all online. =cut
Distrotech/mod_perl
docs/src/docs/offsite/presentations.pod
Perl
apache-2.0
2,403
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % % This file is part of VivoMind Prolog Unicode Resources % SPDX-License-Identifier: CC0-1.0 % % VivoMind Prolog Unicode Resources is free software distributed using the % Creative Commons CC0 1.0 Universal (CC0 1.0) - Public Domain Dedication % license % %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Last modified: March 29, 2012 unicode_category_(0x0300, 'Mn'). unicode_category_(0x0301, 'Mn'). unicode_category_(0x0302, 'Mn'). unicode_category_(0x0303, 'Mn'). unicode_category_(0x0304, 'Mn'). unicode_category_(0x0305, 'Mn'). unicode_category_(0x0306, 'Mn'). unicode_category_(0x0307, 'Mn'). unicode_category_(0x0308, 'Mn'). unicode_category_(0x0309, 'Mn'). unicode_category_(0x030A, 'Mn'). unicode_category_(0x030B, 'Mn'). unicode_category_(0x030C, 'Mn'). unicode_category_(0x030D, 'Mn'). unicode_category_(0x030E, 'Mn'). unicode_category_(0x030F, 'Mn'). unicode_category_(0x0310, 'Mn'). unicode_category_(0x0311, 'Mn'). unicode_category_(0x0312, 'Mn'). unicode_category_(0x0313, 'Mn'). unicode_category_(0x0314, 'Mn'). unicode_category_(0x0315, 'Mn'). unicode_category_(0x0316, 'Mn'). unicode_category_(0x0317, 'Mn'). unicode_category_(0x0318, 'Mn'). unicode_category_(0x0319, 'Mn'). unicode_category_(0x031A, 'Mn'). unicode_category_(0x031B, 'Mn'). unicode_category_(0x031C, 'Mn'). unicode_category_(0x031D, 'Mn'). unicode_category_(0x031E, 'Mn'). unicode_category_(0x031F, 'Mn'). unicode_category_(0x0320, 'Mn'). unicode_category_(0x0321, 'Mn'). unicode_category_(0x0322, 'Mn'). unicode_category_(0x0323, 'Mn'). unicode_category_(0x0324, 'Mn'). unicode_category_(0x0325, 'Mn'). unicode_category_(0x0326, 'Mn'). unicode_category_(0x0327, 'Mn'). unicode_category_(0x0328, 'Mn'). unicode_category_(0x0329, 'Mn'). unicode_category_(0x032A, 'Mn'). unicode_category_(0x032B, 'Mn'). unicode_category_(0x032C, 'Mn'). unicode_category_(0x032D, 'Mn'). unicode_category_(0x032E, 'Mn'). unicode_category_(0x032F, 'Mn'). unicode_category_(0x0330, 'Mn'). unicode_category_(0x0331, 'Mn'). unicode_category_(0x0332, 'Mn'). unicode_category_(0x0333, 'Mn'). unicode_category_(0x0334, 'Mn'). unicode_category_(0x0335, 'Mn'). unicode_category_(0x0336, 'Mn'). unicode_category_(0x0337, 'Mn'). unicode_category_(0x0338, 'Mn'). unicode_category_(0x0339, 'Mn'). unicode_category_(0x033A, 'Mn'). unicode_category_(0x033B, 'Mn'). unicode_category_(0x033C, 'Mn'). unicode_category_(0x033D, 'Mn'). unicode_category_(0x033E, 'Mn'). unicode_category_(0x033F, 'Mn'). unicode_category_(0x0340, 'Mn'). unicode_category_(0x0341, 'Mn'). unicode_category_(0x0342, 'Mn'). unicode_category_(0x0343, 'Mn'). unicode_category_(0x0344, 'Mn'). unicode_category_(0x0345, 'Mn'). unicode_category_(0x0346, 'Mn'). unicode_category_(0x0347, 'Mn'). unicode_category_(0x0348, 'Mn'). unicode_category_(0x0349, 'Mn'). unicode_category_(0x034A, 'Mn'). unicode_category_(0x034B, 'Mn'). unicode_category_(0x034C, 'Mn'). unicode_category_(0x034D, 'Mn'). unicode_category_(0x034E, 'Mn'). unicode_category_(0x034F, 'Mn'). unicode_category_(0x0350, 'Mn'). unicode_category_(0x0351, 'Mn'). unicode_category_(0x0352, 'Mn'). unicode_category_(0x0353, 'Mn'). unicode_category_(0x0354, 'Mn'). unicode_category_(0x0355, 'Mn'). unicode_category_(0x0356, 'Mn'). unicode_category_(0x0357, 'Mn'). unicode_category_(0x0358, 'Mn'). unicode_category_(0x0359, 'Mn'). unicode_category_(0x035A, 'Mn'). unicode_category_(0x035B, 'Mn'). unicode_category_(0x035C, 'Mn'). unicode_category_(0x035D, 'Mn'). unicode_category_(0x035E, 'Mn'). unicode_category_(0x035F, 'Mn'). unicode_category_(0x0360, 'Mn'). unicode_category_(0x0361, 'Mn'). unicode_category_(0x0362, 'Mn'). unicode_category_(0x0363, 'Mn'). unicode_category_(0x0364, 'Mn'). unicode_category_(0x0365, 'Mn'). unicode_category_(0x0366, 'Mn'). unicode_category_(0x0367, 'Mn'). unicode_category_(0x0368, 'Mn'). unicode_category_(0x0369, 'Mn'). unicode_category_(0x036A, 'Mn'). unicode_category_(0x036B, 'Mn'). unicode_category_(0x036C, 'Mn'). unicode_category_(0x036D, 'Mn'). unicode_category_(0x036E, 'Mn'). unicode_category_(0x036F, 'Mn'). unicode_category_(0x0483, 'Mn'). unicode_category_(0x0484, 'Mn'). unicode_category_(0x0485, 'Mn'). unicode_category_(0x0486, 'Mn'). unicode_category_(0x0487, 'Mn'). unicode_category_(0x0591, 'Mn'). unicode_category_(0x0592, 'Mn'). unicode_category_(0x0593, 'Mn'). unicode_category_(0x0594, 'Mn'). unicode_category_(0x0595, 'Mn'). unicode_category_(0x0596, 'Mn'). unicode_category_(0x0597, 'Mn'). unicode_category_(0x0598, 'Mn'). unicode_category_(0x0599, 'Mn'). unicode_category_(0x059A, 'Mn'). unicode_category_(0x059B, 'Mn'). unicode_category_(0x059C, 'Mn'). unicode_category_(0x059D, 'Mn'). unicode_category_(0x059E, 'Mn'). unicode_category_(0x059F, 'Mn'). unicode_category_(0x05A0, 'Mn'). unicode_category_(0x05A1, 'Mn'). unicode_category_(0x05A2, 'Mn'). unicode_category_(0x05A3, 'Mn'). unicode_category_(0x05A4, 'Mn'). unicode_category_(0x05A5, 'Mn'). unicode_category_(0x05A6, 'Mn'). unicode_category_(0x05A7, 'Mn'). unicode_category_(0x05A8, 'Mn'). unicode_category_(0x05A9, 'Mn'). unicode_category_(0x05AA, 'Mn'). unicode_category_(0x05AB, 'Mn'). unicode_category_(0x05AC, 'Mn'). unicode_category_(0x05AD, 'Mn'). unicode_category_(0x05AE, 'Mn'). unicode_category_(0x05AF, 'Mn'). unicode_category_(0x05B0, 'Mn'). unicode_category_(0x05B1, 'Mn'). unicode_category_(0x05B2, 'Mn'). unicode_category_(0x05B3, 'Mn'). unicode_category_(0x05B4, 'Mn'). unicode_category_(0x05B5, 'Mn'). unicode_category_(0x05B6, 'Mn'). unicode_category_(0x05B7, 'Mn'). unicode_category_(0x05B8, 'Mn'). unicode_category_(0x05B9, 'Mn'). unicode_category_(0x05BA, 'Mn'). unicode_category_(0x05BB, 'Mn'). unicode_category_(0x05BC, 'Mn'). unicode_category_(0x05BD, 'Mn'). unicode_category_(0x05BF, 'Mn'). unicode_category_(0x05C1, 'Mn'). unicode_category_(0x05C2, 'Mn'). unicode_category_(0x05C4, 'Mn'). unicode_category_(0x05C5, 'Mn'). unicode_category_(0x05C7, 'Mn'). unicode_category_(0x0610, 'Mn'). unicode_category_(0x0611, 'Mn'). unicode_category_(0x0612, 'Mn'). unicode_category_(0x0613, 'Mn'). unicode_category_(0x0614, 'Mn'). unicode_category_(0x0615, 'Mn'). unicode_category_(0x0616, 'Mn'). unicode_category_(0x0617, 'Mn'). unicode_category_(0x0618, 'Mn'). unicode_category_(0x0619, 'Mn'). unicode_category_(0x061A, 'Mn'). unicode_category_(0x064B, 'Mn'). unicode_category_(0x064C, 'Mn'). unicode_category_(0x064D, 'Mn'). unicode_category_(0x064E, 'Mn'). unicode_category_(0x064F, 'Mn'). unicode_category_(0x0650, 'Mn'). unicode_category_(0x0651, 'Mn'). unicode_category_(0x0652, 'Mn'). unicode_category_(0x0653, 'Mn'). unicode_category_(0x0654, 'Mn'). unicode_category_(0x0655, 'Mn'). unicode_category_(0x0656, 'Mn'). unicode_category_(0x0657, 'Mn'). unicode_category_(0x0658, 'Mn'). unicode_category_(0x0659, 'Mn'). unicode_category_(0x065A, 'Mn'). unicode_category_(0x065B, 'Mn'). unicode_category_(0x065C, 'Mn'). unicode_category_(0x065D, 'Mn'). unicode_category_(0x065E, 'Mn'). unicode_category_(0x065F, 'Mn'). unicode_category_(0x0670, 'Mn'). unicode_category_(0x06D6, 'Mn'). unicode_category_(0x06D7, 'Mn'). unicode_category_(0x06D8, 'Mn'). unicode_category_(0x06D9, 'Mn'). unicode_category_(0x06DA, 'Mn'). unicode_category_(0x06DB, 'Mn'). unicode_category_(0x06DC, 'Mn'). unicode_category_(0x06DF, 'Mn'). unicode_category_(0x06E0, 'Mn'). unicode_category_(0x06E1, 'Mn'). unicode_category_(0x06E2, 'Mn'). unicode_category_(0x06E3, 'Mn'). unicode_category_(0x06E4, 'Mn'). unicode_category_(0x06E7, 'Mn'). unicode_category_(0x06E8, 'Mn'). unicode_category_(0x06EA, 'Mn'). unicode_category_(0x06EB, 'Mn'). unicode_category_(0x06EC, 'Mn'). unicode_category_(0x06ED, 'Mn'). unicode_category_(0x0711, 'Mn'). unicode_category_(0x0730, 'Mn'). unicode_category_(0x0731, 'Mn'). unicode_category_(0x0732, 'Mn'). unicode_category_(0x0733, 'Mn'). unicode_category_(0x0734, 'Mn'). unicode_category_(0x0735, 'Mn'). unicode_category_(0x0736, 'Mn'). unicode_category_(0x0737, 'Mn'). unicode_category_(0x0738, 'Mn'). unicode_category_(0x0739, 'Mn'). unicode_category_(0x073A, 'Mn'). unicode_category_(0x073B, 'Mn'). unicode_category_(0x073C, 'Mn'). unicode_category_(0x073D, 'Mn'). unicode_category_(0x073E, 'Mn'). unicode_category_(0x073F, 'Mn'). unicode_category_(0x0740, 'Mn'). unicode_category_(0x0741, 'Mn'). unicode_category_(0x0742, 'Mn'). unicode_category_(0x0743, 'Mn'). unicode_category_(0x0744, 'Mn'). unicode_category_(0x0745, 'Mn'). unicode_category_(0x0746, 'Mn'). unicode_category_(0x0747, 'Mn'). unicode_category_(0x0748, 'Mn'). unicode_category_(0x0749, 'Mn'). unicode_category_(0x074A, 'Mn'). unicode_category_(0x07A6, 'Mn'). unicode_category_(0x07A7, 'Mn'). unicode_category_(0x07A8, 'Mn'). unicode_category_(0x07A9, 'Mn'). unicode_category_(0x07AA, 'Mn'). unicode_category_(0x07AB, 'Mn'). unicode_category_(0x07AC, 'Mn'). unicode_category_(0x07AD, 'Mn'). unicode_category_(0x07AE, 'Mn'). unicode_category_(0x07AF, 'Mn'). unicode_category_(0x07B0, 'Mn'). unicode_category_(0x07EB, 'Mn'). unicode_category_(0x07EC, 'Mn'). unicode_category_(0x07ED, 'Mn'). unicode_category_(0x07EE, 'Mn'). unicode_category_(0x07EF, 'Mn'). unicode_category_(0x07F0, 'Mn'). unicode_category_(0x07F1, 'Mn'). unicode_category_(0x07F2, 'Mn'). unicode_category_(0x07F3, 'Mn'). unicode_category_(0x0816, 'Mn'). unicode_category_(0x0817, 'Mn'). unicode_category_(0x0818, 'Mn'). unicode_category_(0x0819, 'Mn'). unicode_category_(0x081B, 'Mn'). unicode_category_(0x081C, 'Mn'). unicode_category_(0x081D, 'Mn'). unicode_category_(0x081E, 'Mn'). unicode_category_(0x081F, 'Mn'). unicode_category_(0x0820, 'Mn'). unicode_category_(0x0821, 'Mn'). unicode_category_(0x0822, 'Mn'). unicode_category_(0x0823, 'Mn'). unicode_category_(0x0825, 'Mn'). unicode_category_(0x0826, 'Mn'). unicode_category_(0x0827, 'Mn'). unicode_category_(0x0829, 'Mn'). unicode_category_(0x082A, 'Mn'). unicode_category_(0x082B, 'Mn'). unicode_category_(0x082C, 'Mn'). unicode_category_(0x082D, 'Mn'). unicode_category_(0x0859, 'Mn'). unicode_category_(0x085A, 'Mn'). unicode_category_(0x085B, 'Mn'). unicode_category_(0x08E4, 'Mn'). unicode_category_(0x08E5, 'Mn'). unicode_category_(0x08E6, 'Mn'). unicode_category_(0x08E7, 'Mn'). unicode_category_(0x08E8, 'Mn'). unicode_category_(0x08E9, 'Mn'). unicode_category_(0x08EA, 'Mn'). unicode_category_(0x08EB, 'Mn'). unicode_category_(0x08EC, 'Mn'). unicode_category_(0x08ED, 'Mn'). unicode_category_(0x08EE, 'Mn'). unicode_category_(0x08EF, 'Mn'). unicode_category_(0x08F0, 'Mn'). unicode_category_(0x08F1, 'Mn'). unicode_category_(0x08F2, 'Mn'). unicode_category_(0x08F3, 'Mn'). unicode_category_(0x08F4, 'Mn'). unicode_category_(0x08F5, 'Mn'). unicode_category_(0x08F6, 'Mn'). unicode_category_(0x08F7, 'Mn'). unicode_category_(0x08F8, 'Mn'). unicode_category_(0x08F9, 'Mn'). unicode_category_(0x08FA, 'Mn'). unicode_category_(0x08FB, 'Mn'). unicode_category_(0x08FC, 'Mn'). unicode_category_(0x08FD, 'Mn'). unicode_category_(0x08FE, 'Mn'). unicode_category_(0x0900, 'Mn'). unicode_category_(0x0901, 'Mn'). unicode_category_(0x0902, 'Mn'). unicode_category_(0x093A, 'Mn'). unicode_category_(0x093C, 'Mn'). unicode_category_(0x0941, 'Mn'). unicode_category_(0x0942, 'Mn'). unicode_category_(0x0943, 'Mn'). unicode_category_(0x0944, 'Mn'). unicode_category_(0x0945, 'Mn'). unicode_category_(0x0946, 'Mn'). unicode_category_(0x0947, 'Mn'). unicode_category_(0x0948, 'Mn'). unicode_category_(0x094D, 'Mn'). unicode_category_(0x0951, 'Mn'). unicode_category_(0x0952, 'Mn'). unicode_category_(0x0953, 'Mn'). unicode_category_(0x0954, 'Mn'). unicode_category_(0x0955, 'Mn'). unicode_category_(0x0956, 'Mn'). unicode_category_(0x0957, 'Mn'). unicode_category_(0x0962, 'Mn'). unicode_category_(0x0963, 'Mn'). unicode_category_(0x0981, 'Mn'). unicode_category_(0x09BC, 'Mn'). unicode_category_(0x09C1, 'Mn'). unicode_category_(0x09C2, 'Mn'). unicode_category_(0x09C3, 'Mn'). unicode_category_(0x09C4, 'Mn'). unicode_category_(0x09CD, 'Mn'). unicode_category_(0x09E2, 'Mn'). unicode_category_(0x09E3, 'Mn'). unicode_category_(0x0A01, 'Mn'). unicode_category_(0x0A02, 'Mn'). unicode_category_(0x0A3C, 'Mn'). unicode_category_(0x0A41, 'Mn'). unicode_category_(0x0A42, 'Mn'). unicode_category_(0x0A47, 'Mn'). unicode_category_(0x0A48, 'Mn'). unicode_category_(0x0A4B, 'Mn'). unicode_category_(0x0A4C, 'Mn'). unicode_category_(0x0A4D, 'Mn'). unicode_category_(0x0A51, 'Mn'). unicode_category_(0x0A70, 'Mn'). unicode_category_(0x0A71, 'Mn'). unicode_category_(0x0A75, 'Mn'). unicode_category_(0x0A81, 'Mn'). unicode_category_(0x0A82, 'Mn'). unicode_category_(0x0ABC, 'Mn'). unicode_category_(0x0AC1, 'Mn'). unicode_category_(0x0AC2, 'Mn'). unicode_category_(0x0AC3, 'Mn'). unicode_category_(0x0AC4, 'Mn'). unicode_category_(0x0AC5, 'Mn'). unicode_category_(0x0AC7, 'Mn'). unicode_category_(0x0AC8, 'Mn'). unicode_category_(0x0ACD, 'Mn'). unicode_category_(0x0AE2, 'Mn'). unicode_category_(0x0AE3, 'Mn'). unicode_category_(0x0B01, 'Mn'). unicode_category_(0x0B3C, 'Mn'). unicode_category_(0x0B3F, 'Mn'). unicode_category_(0x0B41, 'Mn'). unicode_category_(0x0B42, 'Mn'). unicode_category_(0x0B43, 'Mn'). unicode_category_(0x0B44, 'Mn'). unicode_category_(0x0B4D, 'Mn'). unicode_category_(0x0B56, 'Mn'). unicode_category_(0x0B62, 'Mn'). unicode_category_(0x0B63, 'Mn'). unicode_category_(0x0B82, 'Mn'). unicode_category_(0x0BC0, 'Mn'). unicode_category_(0x0BCD, 'Mn'). unicode_category_(0x0C3E, 'Mn'). unicode_category_(0x0C3F, 'Mn'). unicode_category_(0x0C40, 'Mn'). unicode_category_(0x0C46, 'Mn'). unicode_category_(0x0C47, 'Mn'). unicode_category_(0x0C48, 'Mn'). unicode_category_(0x0C4A, 'Mn'). unicode_category_(0x0C4B, 'Mn'). unicode_category_(0x0C4C, 'Mn'). unicode_category_(0x0C4D, 'Mn'). unicode_category_(0x0C55, 'Mn'). unicode_category_(0x0C56, 'Mn'). unicode_category_(0x0C62, 'Mn'). unicode_category_(0x0C63, 'Mn'). unicode_category_(0x0CBC, 'Mn'). unicode_category_(0x0CBF, 'Mn'). unicode_category_(0x0CC6, 'Mn'). unicode_category_(0x0CCC, 'Mn'). unicode_category_(0x0CCD, 'Mn'). unicode_category_(0x0CE2, 'Mn'). unicode_category_(0x0CE3, 'Mn'). unicode_category_(0x0D41, 'Mn'). unicode_category_(0x0D42, 'Mn'). unicode_category_(0x0D43, 'Mn'). unicode_category_(0x0D44, 'Mn'). unicode_category_(0x0D4D, 'Mn'). unicode_category_(0x0D62, 'Mn'). unicode_category_(0x0D63, 'Mn'). unicode_category_(0x0DCA, 'Mn'). unicode_category_(0x0DD2, 'Mn'). unicode_category_(0x0DD3, 'Mn'). unicode_category_(0x0DD4, 'Mn'). unicode_category_(0x0DD6, 'Mn'). unicode_category_(0x0E31, 'Mn'). unicode_category_(0x0E34, 'Mn'). unicode_category_(0x0E35, 'Mn'). unicode_category_(0x0E36, 'Mn'). unicode_category_(0x0E37, 'Mn'). unicode_category_(0x0E38, 'Mn'). unicode_category_(0x0E39, 'Mn'). unicode_category_(0x0E3A, 'Mn'). unicode_category_(0x0E47, 'Mn'). unicode_category_(0x0E48, 'Mn'). unicode_category_(0x0E49, 'Mn'). unicode_category_(0x0E4A, 'Mn'). unicode_category_(0x0E4B, 'Mn'). unicode_category_(0x0E4C, 'Mn'). unicode_category_(0x0E4D, 'Mn'). unicode_category_(0x0E4E, 'Mn'). unicode_category_(0x0EB1, 'Mn'). unicode_category_(0x0EB4, 'Mn'). unicode_category_(0x0EB5, 'Mn'). unicode_category_(0x0EB6, 'Mn'). unicode_category_(0x0EB7, 'Mn'). unicode_category_(0x0EB8, 'Mn'). unicode_category_(0x0EB9, 'Mn'). unicode_category_(0x0EBB, 'Mn'). unicode_category_(0x0EBC, 'Mn'). unicode_category_(0x0EC8, 'Mn'). unicode_category_(0x0EC9, 'Mn'). unicode_category_(0x0ECA, 'Mn'). unicode_category_(0x0ECB, 'Mn'). unicode_category_(0x0ECC, 'Mn'). unicode_category_(0x0ECD, 'Mn'). unicode_category_(0x0F18, 'Mn'). unicode_category_(0x0F19, 'Mn'). unicode_category_(0x0F35, 'Mn'). unicode_category_(0x0F37, 'Mn'). unicode_category_(0x0F39, 'Mn'). unicode_category_(0x0F71, 'Mn'). unicode_category_(0x0F72, 'Mn'). unicode_category_(0x0F73, 'Mn'). unicode_category_(0x0F74, 'Mn'). unicode_category_(0x0F75, 'Mn'). unicode_category_(0x0F76, 'Mn'). unicode_category_(0x0F77, 'Mn'). unicode_category_(0x0F78, 'Mn'). unicode_category_(0x0F79, 'Mn'). unicode_category_(0x0F7A, 'Mn'). unicode_category_(0x0F7B, 'Mn'). unicode_category_(0x0F7C, 'Mn'). unicode_category_(0x0F7D, 'Mn'). unicode_category_(0x0F7E, 'Mn'). unicode_category_(0x0F80, 'Mn'). unicode_category_(0x0F81, 'Mn'). unicode_category_(0x0F82, 'Mn'). unicode_category_(0x0F83, 'Mn'). unicode_category_(0x0F84, 'Mn'). unicode_category_(0x0F86, 'Mn'). unicode_category_(0x0F87, 'Mn'). unicode_category_(0x0F8D, 'Mn'). unicode_category_(0x0F8E, 'Mn'). unicode_category_(0x0F8F, 'Mn'). unicode_category_(0x0F90, 'Mn'). unicode_category_(0x0F91, 'Mn'). unicode_category_(0x0F92, 'Mn'). unicode_category_(0x0F93, 'Mn'). unicode_category_(0x0F94, 'Mn'). unicode_category_(0x0F95, 'Mn'). unicode_category_(0x0F96, 'Mn'). unicode_category_(0x0F97, 'Mn'). unicode_category_(0x0F99, 'Mn'). unicode_category_(0x0F9A, 'Mn'). unicode_category_(0x0F9B, 'Mn'). unicode_category_(0x0F9C, 'Mn'). unicode_category_(0x0F9D, 'Mn'). unicode_category_(0x0F9E, 'Mn'). unicode_category_(0x0F9F, 'Mn'). unicode_category_(0x0FA0, 'Mn'). unicode_category_(0x0FA1, 'Mn'). unicode_category_(0x0FA2, 'Mn'). unicode_category_(0x0FA3, 'Mn'). unicode_category_(0x0FA4, 'Mn'). unicode_category_(0x0FA5, 'Mn'). unicode_category_(0x0FA6, 'Mn'). unicode_category_(0x0FA7, 'Mn'). unicode_category_(0x0FA8, 'Mn'). unicode_category_(0x0FA9, 'Mn'). unicode_category_(0x0FAA, 'Mn'). unicode_category_(0x0FAB, 'Mn'). unicode_category_(0x0FAC, 'Mn'). unicode_category_(0x0FAD, 'Mn'). unicode_category_(0x0FAE, 'Mn'). unicode_category_(0x0FAF, 'Mn'). unicode_category_(0x0FB0, 'Mn'). unicode_category_(0x0FB1, 'Mn'). unicode_category_(0x0FB2, 'Mn'). unicode_category_(0x0FB3, 'Mn'). unicode_category_(0x0FB4, 'Mn'). unicode_category_(0x0FB5, 'Mn'). unicode_category_(0x0FB6, 'Mn'). unicode_category_(0x0FB7, 'Mn'). unicode_category_(0x0FB8, 'Mn'). unicode_category_(0x0FB9, 'Mn'). unicode_category_(0x0FBA, 'Mn'). unicode_category_(0x0FBB, 'Mn'). unicode_category_(0x0FBC, 'Mn'). unicode_category_(0x0FC6, 'Mn'). unicode_category_(0x102D, 'Mn'). unicode_category_(0x102E, 'Mn'). unicode_category_(0x102F, 'Mn'). unicode_category_(0x1030, 'Mn'). unicode_category_(0x1032, 'Mn'). unicode_category_(0x1033, 'Mn'). unicode_category_(0x1034, 'Mn'). unicode_category_(0x1035, 'Mn'). unicode_category_(0x1036, 'Mn'). unicode_category_(0x1037, 'Mn'). unicode_category_(0x1039, 'Mn'). unicode_category_(0x103A, 'Mn'). unicode_category_(0x103D, 'Mn'). unicode_category_(0x103E, 'Mn'). unicode_category_(0x1058, 'Mn'). unicode_category_(0x1059, 'Mn'). unicode_category_(0x105E, 'Mn'). unicode_category_(0x105F, 'Mn'). unicode_category_(0x1060, 'Mn'). unicode_category_(0x1071, 'Mn'). unicode_category_(0x1072, 'Mn'). unicode_category_(0x1073, 'Mn'). unicode_category_(0x1074, 'Mn'). unicode_category_(0x1082, 'Mn'). unicode_category_(0x1085, 'Mn'). unicode_category_(0x1086, 'Mn'). unicode_category_(0x108D, 'Mn'). unicode_category_(0x109D, 'Mn'). unicode_category_(0x135D, 'Mn'). unicode_category_(0x135E, 'Mn'). unicode_category_(0x135F, 'Mn'). unicode_category_(0x1712, 'Mn'). unicode_category_(0x1713, 'Mn'). unicode_category_(0x1714, 'Mn'). unicode_category_(0x1732, 'Mn'). unicode_category_(0x1733, 'Mn'). unicode_category_(0x1734, 'Mn'). unicode_category_(0x1752, 'Mn'). unicode_category_(0x1753, 'Mn'). unicode_category_(0x1772, 'Mn'). unicode_category_(0x1773, 'Mn'). unicode_category_(0x17B4, 'Mn'). unicode_category_(0x17B5, 'Mn'). unicode_category_(0x17B7, 'Mn'). unicode_category_(0x17B8, 'Mn'). unicode_category_(0x17B9, 'Mn'). unicode_category_(0x17BA, 'Mn'). unicode_category_(0x17BB, 'Mn'). unicode_category_(0x17BC, 'Mn'). unicode_category_(0x17BD, 'Mn'). unicode_category_(0x17C6, 'Mn'). unicode_category_(0x17C9, 'Mn'). unicode_category_(0x17CA, 'Mn'). unicode_category_(0x17CB, 'Mn'). unicode_category_(0x17CC, 'Mn'). unicode_category_(0x17CD, 'Mn'). unicode_category_(0x17CE, 'Mn'). unicode_category_(0x17CF, 'Mn'). unicode_category_(0x17D0, 'Mn'). unicode_category_(0x17D1, 'Mn'). unicode_category_(0x17D2, 'Mn'). unicode_category_(0x17D3, 'Mn'). unicode_category_(0x17DD, 'Mn'). unicode_category_(0x180B, 'Mn'). unicode_category_(0x180C, 'Mn'). unicode_category_(0x180D, 'Mn'). unicode_category_(0x18A9, 'Mn'). unicode_category_(0x1920, 'Mn'). unicode_category_(0x1921, 'Mn'). unicode_category_(0x1922, 'Mn'). unicode_category_(0x1927, 'Mn'). unicode_category_(0x1928, 'Mn'). unicode_category_(0x1932, 'Mn'). unicode_category_(0x1939, 'Mn'). unicode_category_(0x193A, 'Mn'). unicode_category_(0x193B, 'Mn'). unicode_category_(0x1A17, 'Mn'). unicode_category_(0x1A18, 'Mn'). unicode_category_(0x1A56, 'Mn'). unicode_category_(0x1A58, 'Mn'). unicode_category_(0x1A59, 'Mn'). unicode_category_(0x1A5A, 'Mn'). unicode_category_(0x1A5B, 'Mn'). unicode_category_(0x1A5C, 'Mn'). unicode_category_(0x1A5D, 'Mn'). unicode_category_(0x1A5E, 'Mn'). unicode_category_(0x1A60, 'Mn'). unicode_category_(0x1A62, 'Mn'). unicode_category_(0x1A65, 'Mn'). unicode_category_(0x1A66, 'Mn'). unicode_category_(0x1A67, 'Mn'). unicode_category_(0x1A68, 'Mn'). unicode_category_(0x1A69, 'Mn'). unicode_category_(0x1A6A, 'Mn'). unicode_category_(0x1A6B, 'Mn'). unicode_category_(0x1A6C, 'Mn'). unicode_category_(0x1A73, 'Mn'). unicode_category_(0x1A74, 'Mn'). unicode_category_(0x1A75, 'Mn'). unicode_category_(0x1A76, 'Mn'). unicode_category_(0x1A77, 'Mn'). unicode_category_(0x1A78, 'Mn'). unicode_category_(0x1A79, 'Mn'). unicode_category_(0x1A7A, 'Mn'). unicode_category_(0x1A7B, 'Mn'). unicode_category_(0x1A7C, 'Mn'). unicode_category_(0x1A7F, 'Mn'). unicode_category_(0x1B00, 'Mn'). unicode_category_(0x1B01, 'Mn'). unicode_category_(0x1B02, 'Mn'). unicode_category_(0x1B03, 'Mn'). unicode_category_(0x1B34, 'Mn'). unicode_category_(0x1B36, 'Mn'). unicode_category_(0x1B37, 'Mn'). unicode_category_(0x1B38, 'Mn'). unicode_category_(0x1B39, 'Mn'). unicode_category_(0x1B3A, 'Mn'). unicode_category_(0x1B3C, 'Mn'). unicode_category_(0x1B42, 'Mn'). unicode_category_(0x1B6B, 'Mn'). unicode_category_(0x1B6C, 'Mn'). unicode_category_(0x1B6D, 'Mn'). unicode_category_(0x1B6E, 'Mn'). unicode_category_(0x1B6F, 'Mn'). unicode_category_(0x1B70, 'Mn'). unicode_category_(0x1B71, 'Mn'). unicode_category_(0x1B72, 'Mn'). unicode_category_(0x1B73, 'Mn'). unicode_category_(0x1B80, 'Mn'). unicode_category_(0x1B81, 'Mn'). unicode_category_(0x1BA2, 'Mn'). unicode_category_(0x1BA3, 'Mn'). unicode_category_(0x1BA4, 'Mn'). unicode_category_(0x1BA5, 'Mn'). unicode_category_(0x1BA8, 'Mn'). unicode_category_(0x1BA9, 'Mn'). unicode_category_(0x1BAB, 'Mn'). unicode_category_(0x1BE6, 'Mn'). unicode_category_(0x1BE8, 'Mn'). unicode_category_(0x1BE9, 'Mn'). unicode_category_(0x1BED, 'Mn'). unicode_category_(0x1BEF, 'Mn'). unicode_category_(0x1BF0, 'Mn'). unicode_category_(0x1BF1, 'Mn'). unicode_category_(0x1C2C, 'Mn'). unicode_category_(0x1C2D, 'Mn'). unicode_category_(0x1C2E, 'Mn'). unicode_category_(0x1C2F, 'Mn'). unicode_category_(0x1C30, 'Mn'). unicode_category_(0x1C31, 'Mn'). unicode_category_(0x1C32, 'Mn'). unicode_category_(0x1C33, 'Mn'). unicode_category_(0x1C36, 'Mn'). unicode_category_(0x1C37, 'Mn'). unicode_category_(0x1CD0, 'Mn'). unicode_category_(0x1CD1, 'Mn'). unicode_category_(0x1CD2, 'Mn'). unicode_category_(0x1CD4, 'Mn'). unicode_category_(0x1CD5, 'Mn'). unicode_category_(0x1CD6, 'Mn'). unicode_category_(0x1CD7, 'Mn'). unicode_category_(0x1CD8, 'Mn'). unicode_category_(0x1CD9, 'Mn'). unicode_category_(0x1CDA, 'Mn'). unicode_category_(0x1CDB, 'Mn'). unicode_category_(0x1CDC, 'Mn'). unicode_category_(0x1CDD, 'Mn'). unicode_category_(0x1CDE, 'Mn'). unicode_category_(0x1CDF, 'Mn'). unicode_category_(0x1CE0, 'Mn'). unicode_category_(0x1CE2, 'Mn'). unicode_category_(0x1CE3, 'Mn'). unicode_category_(0x1CE4, 'Mn'). unicode_category_(0x1CE5, 'Mn'). unicode_category_(0x1CE6, 'Mn'). unicode_category_(0x1CE7, 'Mn'). unicode_category_(0x1CE8, 'Mn'). unicode_category_(0x1CED, 'Mn'). unicode_category_(0x1CF4, 'Mn'). unicode_category_(0x1DC0, 'Mn'). unicode_category_(0x1DC1, 'Mn'). unicode_category_(0x1DC2, 'Mn'). unicode_category_(0x1DC3, 'Mn'). unicode_category_(0x1DC4, 'Mn'). unicode_category_(0x1DC5, 'Mn'). unicode_category_(0x1DC6, 'Mn'). unicode_category_(0x1DC7, 'Mn'). unicode_category_(0x1DC8, 'Mn'). unicode_category_(0x1DC9, 'Mn'). unicode_category_(0x1DCA, 'Mn'). unicode_category_(0x1DCB, 'Mn'). unicode_category_(0x1DCC, 'Mn'). unicode_category_(0x1DCD, 'Mn'). unicode_category_(0x1DCE, 'Mn'). unicode_category_(0x1DCF, 'Mn'). unicode_category_(0x1DD0, 'Mn'). unicode_category_(0x1DD1, 'Mn'). unicode_category_(0x1DD2, 'Mn'). unicode_category_(0x1DD3, 'Mn'). unicode_category_(0x1DD4, 'Mn'). unicode_category_(0x1DD5, 'Mn'). unicode_category_(0x1DD6, 'Mn'). unicode_category_(0x1DD7, 'Mn'). unicode_category_(0x1DD8, 'Mn'). unicode_category_(0x1DD9, 'Mn'). unicode_category_(0x1DDA, 'Mn'). unicode_category_(0x1DDB, 'Mn'). unicode_category_(0x1DDC, 'Mn'). unicode_category_(0x1DDD, 'Mn'). unicode_category_(0x1DDE, 'Mn'). unicode_category_(0x1DDF, 'Mn'). unicode_category_(0x1DE0, 'Mn'). unicode_category_(0x1DE1, 'Mn'). unicode_category_(0x1DE2, 'Mn'). unicode_category_(0x1DE3, 'Mn'). unicode_category_(0x1DE4, 'Mn'). unicode_category_(0x1DE5, 'Mn'). unicode_category_(0x1DE6, 'Mn'). unicode_category_(0x1DFC, 'Mn'). unicode_category_(0x1DFD, 'Mn'). unicode_category_(0x1DFE, 'Mn'). unicode_category_(0x1DFF, 'Mn'). unicode_category_(0x20D0, 'Mn'). unicode_category_(0x20D1, 'Mn'). unicode_category_(0x20D2, 'Mn'). unicode_category_(0x20D3, 'Mn'). unicode_category_(0x20D4, 'Mn'). unicode_category_(0x20D5, 'Mn'). unicode_category_(0x20D6, 'Mn'). unicode_category_(0x20D7, 'Mn'). unicode_category_(0x20D8, 'Mn'). unicode_category_(0x20D9, 'Mn'). unicode_category_(0x20DA, 'Mn'). unicode_category_(0x20DB, 'Mn'). unicode_category_(0x20DC, 'Mn'). unicode_category_(0x20E1, 'Mn'). unicode_category_(0x20E5, 'Mn'). unicode_category_(0x20E6, 'Mn'). unicode_category_(0x20E7, 'Mn'). unicode_category_(0x20E8, 'Mn'). unicode_category_(0x20E9, 'Mn'). unicode_category_(0x20EA, 'Mn'). unicode_category_(0x20EB, 'Mn'). unicode_category_(0x20EC, 'Mn'). unicode_category_(0x20ED, 'Mn'). unicode_category_(0x20EE, 'Mn'). unicode_category_(0x20EF, 'Mn'). unicode_category_(0x20F0, 'Mn'). unicode_category_(0x2CEF, 'Mn'). unicode_category_(0x2CF0, 'Mn'). unicode_category_(0x2CF1, 'Mn'). unicode_category_(0x2D7F, 'Mn'). unicode_category_(0x2DE0, 'Mn'). unicode_category_(0x2DE1, 'Mn'). unicode_category_(0x2DE2, 'Mn'). unicode_category_(0x2DE3, 'Mn'). unicode_category_(0x2DE4, 'Mn'). unicode_category_(0x2DE5, 'Mn'). unicode_category_(0x2DE6, 'Mn'). unicode_category_(0x2DE7, 'Mn'). unicode_category_(0x2DE8, 'Mn'). unicode_category_(0x2DE9, 'Mn'). unicode_category_(0x2DEA, 'Mn'). unicode_category_(0x2DEB, 'Mn'). unicode_category_(0x2DEC, 'Mn'). unicode_category_(0x2DED, 'Mn'). unicode_category_(0x2DEE, 'Mn'). unicode_category_(0x2DEF, 'Mn'). unicode_category_(0x2DF0, 'Mn'). unicode_category_(0x2DF1, 'Mn'). unicode_category_(0x2DF2, 'Mn'). unicode_category_(0x2DF3, 'Mn'). unicode_category_(0x2DF4, 'Mn'). unicode_category_(0x2DF5, 'Mn'). unicode_category_(0x2DF6, 'Mn'). unicode_category_(0x2DF7, 'Mn'). unicode_category_(0x2DF8, 'Mn'). unicode_category_(0x2DF9, 'Mn'). unicode_category_(0x2DFA, 'Mn'). unicode_category_(0x2DFB, 'Mn'). unicode_category_(0x2DFC, 'Mn'). unicode_category_(0x2DFD, 'Mn'). unicode_category_(0x2DFE, 'Mn'). unicode_category_(0x2DFF, 'Mn'). unicode_category_(0x302A, 'Mn'). unicode_category_(0x302B, 'Mn'). unicode_category_(0x302C, 'Mn'). unicode_category_(0x302D, 'Mn'). unicode_category_(0x3099, 'Mn'). unicode_category_(0x309A, 'Mn'). unicode_category_(0xA66F, 'Mn'). unicode_category_(0xA674, 'Mn'). unicode_category_(0xA675, 'Mn'). unicode_category_(0xA676, 'Mn'). unicode_category_(0xA677, 'Mn'). unicode_category_(0xA678, 'Mn'). unicode_category_(0xA679, 'Mn'). unicode_category_(0xA67A, 'Mn'). unicode_category_(0xA67B, 'Mn'). unicode_category_(0xA67C, 'Mn'). unicode_category_(0xA67D, 'Mn'). unicode_category_(0xA69F, 'Mn'). unicode_category_(0xA6F0, 'Mn'). unicode_category_(0xA6F1, 'Mn'). unicode_category_(0xA802, 'Mn'). unicode_category_(0xA806, 'Mn'). unicode_category_(0xA80B, 'Mn'). unicode_category_(0xA825, 'Mn'). unicode_category_(0xA826, 'Mn'). unicode_category_(0xA8C4, 'Mn'). unicode_category_(0xA8E0, 'Mn'). unicode_category_(0xA8E1, 'Mn'). unicode_category_(0xA8E2, 'Mn'). unicode_category_(0xA8E3, 'Mn'). unicode_category_(0xA8E4, 'Mn'). unicode_category_(0xA8E5, 'Mn'). unicode_category_(0xA8E6, 'Mn'). unicode_category_(0xA8E7, 'Mn'). unicode_category_(0xA8E8, 'Mn'). unicode_category_(0xA8E9, 'Mn'). unicode_category_(0xA8EA, 'Mn'). unicode_category_(0xA8EB, 'Mn'). unicode_category_(0xA8EC, 'Mn'). unicode_category_(0xA8ED, 'Mn'). unicode_category_(0xA8EE, 'Mn'). unicode_category_(0xA8EF, 'Mn'). unicode_category_(0xA8F0, 'Mn'). unicode_category_(0xA8F1, 'Mn'). unicode_category_(0xA926, 'Mn'). unicode_category_(0xA927, 'Mn'). unicode_category_(0xA928, 'Mn'). unicode_category_(0xA929, 'Mn'). unicode_category_(0xA92A, 'Mn'). unicode_category_(0xA92B, 'Mn'). unicode_category_(0xA92C, 'Mn'). unicode_category_(0xA92D, 'Mn'). unicode_category_(0xA947, 'Mn'). unicode_category_(0xA948, 'Mn'). unicode_category_(0xA949, 'Mn'). unicode_category_(0xA94A, 'Mn'). unicode_category_(0xA94B, 'Mn'). unicode_category_(0xA94C, 'Mn'). unicode_category_(0xA94D, 'Mn'). unicode_category_(0xA94E, 'Mn'). unicode_category_(0xA94F, 'Mn'). unicode_category_(0xA950, 'Mn'). unicode_category_(0xA951, 'Mn'). unicode_category_(0xA980, 'Mn'). unicode_category_(0xA981, 'Mn'). unicode_category_(0xA982, 'Mn'). unicode_category_(0xA9B3, 'Mn'). unicode_category_(0xA9B6, 'Mn'). unicode_category_(0xA9B7, 'Mn'). unicode_category_(0xA9B8, 'Mn'). unicode_category_(0xA9B9, 'Mn'). unicode_category_(0xA9BC, 'Mn'). unicode_category_(0xAA29, 'Mn'). unicode_category_(0xAA2A, 'Mn'). unicode_category_(0xAA2B, 'Mn'). unicode_category_(0xAA2C, 'Mn'). unicode_category_(0xAA2D, 'Mn'). unicode_category_(0xAA2E, 'Mn'). unicode_category_(0xAA31, 'Mn'). unicode_category_(0xAA32, 'Mn'). unicode_category_(0xAA35, 'Mn'). unicode_category_(0xAA36, 'Mn'). unicode_category_(0xAA43, 'Mn'). unicode_category_(0xAA4C, 'Mn'). unicode_category_(0xAAB0, 'Mn'). unicode_category_(0xAAB2, 'Mn'). unicode_category_(0xAAB3, 'Mn'). unicode_category_(0xAAB4, 'Mn'). unicode_category_(0xAAB7, 'Mn'). unicode_category_(0xAAB8, 'Mn'). unicode_category_(0xAABE, 'Mn'). unicode_category_(0xAABF, 'Mn'). unicode_category_(0xAAC1, 'Mn'). unicode_category_(0xAAEC, 'Mn'). unicode_category_(0xAAED, 'Mn'). unicode_category_(0xAAF6, 'Mn'). unicode_category_(0xABE5, 'Mn'). unicode_category_(0xABE8, 'Mn'). unicode_category_(0xABED, 'Mn'). unicode_category_(0xFB1E, 'Mn'). unicode_category_(0xFE00, 'Mn'). unicode_category_(0xFE01, 'Mn'). unicode_category_(0xFE02, 'Mn'). unicode_category_(0xFE03, 'Mn'). unicode_category_(0xFE04, 'Mn'). unicode_category_(0xFE05, 'Mn'). unicode_category_(0xFE06, 'Mn'). unicode_category_(0xFE07, 'Mn'). unicode_category_(0xFE08, 'Mn'). unicode_category_(0xFE09, 'Mn'). unicode_category_(0xFE0A, 'Mn'). unicode_category_(0xFE0B, 'Mn'). unicode_category_(0xFE0C, 'Mn'). unicode_category_(0xFE0D, 'Mn'). unicode_category_(0xFE0E, 'Mn'). unicode_category_(0xFE0F, 'Mn'). unicode_category_(0xFE20, 'Mn'). unicode_category_(0xFE21, 'Mn'). unicode_category_(0xFE22, 'Mn'). unicode_category_(0xFE23, 'Mn'). unicode_category_(0xFE24, 'Mn'). unicode_category_(0xFE25, 'Mn'). unicode_category_(0xFE26, 'Mn'). unicode_category_(0x101FD, 'Mn'). unicode_category_(0x10A01, 'Mn'). unicode_category_(0x10A02, 'Mn'). unicode_category_(0x10A03, 'Mn'). unicode_category_(0x10A05, 'Mn'). unicode_category_(0x10A06, 'Mn'). unicode_category_(0x10A0C, 'Mn'). unicode_category_(0x10A0D, 'Mn'). unicode_category_(0x10A0E, 'Mn'). unicode_category_(0x10A0F, 'Mn'). unicode_category_(0x10A38, 'Mn'). unicode_category_(0x10A39, 'Mn'). unicode_category_(0x10A3A, 'Mn'). unicode_category_(0x10A3F, 'Mn'). unicode_category_(0x11001, 'Mn'). unicode_category_(0x11038, 'Mn'). unicode_category_(0x11039, 'Mn'). unicode_category_(0x1103A, 'Mn'). unicode_category_(0x1103B, 'Mn'). unicode_category_(0x1103C, 'Mn'). unicode_category_(0x1103D, 'Mn'). unicode_category_(0x1103E, 'Mn'). unicode_category_(0x1103F, 'Mn'). unicode_category_(0x11040, 'Mn'). unicode_category_(0x11041, 'Mn'). unicode_category_(0x11042, 'Mn'). unicode_category_(0x11043, 'Mn'). unicode_category_(0x11044, 'Mn'). unicode_category_(0x11045, 'Mn'). unicode_category_(0x11046, 'Mn'). unicode_category_(0x11080, 'Mn'). unicode_category_(0x11081, 'Mn'). unicode_category_(0x110B3, 'Mn'). unicode_category_(0x110B4, 'Mn'). unicode_category_(0x110B5, 'Mn'). unicode_category_(0x110B6, 'Mn'). unicode_category_(0x110B9, 'Mn'). unicode_category_(0x110BA, 'Mn'). unicode_category_(0x11100, 'Mn'). unicode_category_(0x11101, 'Mn'). unicode_category_(0x11102, 'Mn'). unicode_category_(0x11127, 'Mn'). unicode_category_(0x11128, 'Mn'). unicode_category_(0x11129, 'Mn'). unicode_category_(0x1112A, 'Mn'). unicode_category_(0x1112B, 'Mn'). unicode_category_(0x1112D, 'Mn'). unicode_category_(0x1112E, 'Mn'). unicode_category_(0x1112F, 'Mn'). unicode_category_(0x11130, 'Mn'). unicode_category_(0x11131, 'Mn'). unicode_category_(0x11132, 'Mn'). unicode_category_(0x11133, 'Mn'). unicode_category_(0x11134, 'Mn'). unicode_category_(0x11180, 'Mn'). unicode_category_(0x11181, 'Mn'). unicode_category_(0x111B6, 'Mn'). unicode_category_(0x111B7, 'Mn'). unicode_category_(0x111B8, 'Mn'). unicode_category_(0x111B9, 'Mn'). unicode_category_(0x111BA, 'Mn'). unicode_category_(0x111BB, 'Mn'). unicode_category_(0x111BC, 'Mn'). unicode_category_(0x111BD, 'Mn'). unicode_category_(0x111BE, 'Mn'). unicode_category_(0x116AB, 'Mn'). unicode_category_(0x116AD, 'Mn'). unicode_category_(0x116B0, 'Mn'). unicode_category_(0x116B1, 'Mn'). unicode_category_(0x116B2, 'Mn'). unicode_category_(0x116B3, 'Mn'). unicode_category_(0x116B4, 'Mn'). unicode_category_(0x116B5, 'Mn'). unicode_category_(0x116B7, 'Mn'). unicode_category_(0x16F8F, 'Mn'). unicode_category_(0x16F90, 'Mn'). unicode_category_(0x16F91, 'Mn'). unicode_category_(0x16F92, 'Mn'). unicode_category_(0x1D167, 'Mn'). unicode_category_(0x1D168, 'Mn'). unicode_category_(0x1D169, 'Mn'). unicode_category_(0x1D17B, 'Mn'). unicode_category_(0x1D17C, 'Mn'). unicode_category_(0x1D17D, 'Mn'). unicode_category_(0x1D17E, 'Mn'). unicode_category_(0x1D17F, 'Mn'). unicode_category_(0x1D180, 'Mn'). unicode_category_(0x1D181, 'Mn'). unicode_category_(0x1D182, 'Mn'). unicode_category_(0x1D185, 'Mn'). unicode_category_(0x1D186, 'Mn'). unicode_category_(0x1D187, 'Mn'). unicode_category_(0x1D188, 'Mn'). unicode_category_(0x1D189, 'Mn'). unicode_category_(0x1D18A, 'Mn'). unicode_category_(0x1D18B, 'Mn'). unicode_category_(0x1D1AA, 'Mn'). unicode_category_(0x1D1AB, 'Mn'). unicode_category_(0x1D1AC, 'Mn'). unicode_category_(0x1D1AD, 'Mn'). unicode_category_(0x1D242, 'Mn'). unicode_category_(0x1D243, 'Mn'). unicode_category_(0x1D244, 'Mn'). unicode_category_(0xE0100, 'Mn'). unicode_category_(0xE0101, 'Mn'). unicode_category_(0xE0102, 'Mn'). unicode_category_(0xE0103, 'Mn'). unicode_category_(0xE0104, 'Mn'). unicode_category_(0xE0105, 'Mn'). unicode_category_(0xE0106, 'Mn'). unicode_category_(0xE0107, 'Mn'). unicode_category_(0xE0108, 'Mn'). unicode_category_(0xE0109, 'Mn'). unicode_category_(0xE010A, 'Mn'). unicode_category_(0xE010B, 'Mn'). unicode_category_(0xE010C, 'Mn'). unicode_category_(0xE010D, 'Mn'). unicode_category_(0xE010E, 'Mn'). unicode_category_(0xE010F, 'Mn'). unicode_category_(0xE0110, 'Mn'). unicode_category_(0xE0111, 'Mn'). unicode_category_(0xE0112, 'Mn'). unicode_category_(0xE0113, 'Mn'). unicode_category_(0xE0114, 'Mn'). unicode_category_(0xE0115, 'Mn'). unicode_category_(0xE0116, 'Mn'). unicode_category_(0xE0117, 'Mn'). unicode_category_(0xE0118, 'Mn'). unicode_category_(0xE0119, 'Mn'). unicode_category_(0xE011A, 'Mn'). unicode_category_(0xE011B, 'Mn'). unicode_category_(0xE011C, 'Mn'). unicode_category_(0xE011D, 'Mn'). unicode_category_(0xE011E, 'Mn'). unicode_category_(0xE011F, 'Mn'). unicode_category_(0xE0120, 'Mn'). unicode_category_(0xE0121, 'Mn'). unicode_category_(0xE0122, 'Mn'). unicode_category_(0xE0123, 'Mn'). unicode_category_(0xE0124, 'Mn'). unicode_category_(0xE0125, 'Mn'). unicode_category_(0xE0126, 'Mn'). unicode_category_(0xE0127, 'Mn'). unicode_category_(0xE0128, 'Mn'). unicode_category_(0xE0129, 'Mn'). unicode_category_(0xE012A, 'Mn'). unicode_category_(0xE012B, 'Mn'). unicode_category_(0xE012C, 'Mn'). unicode_category_(0xE012D, 'Mn'). unicode_category_(0xE012E, 'Mn'). unicode_category_(0xE012F, 'Mn'). unicode_category_(0xE0130, 'Mn'). unicode_category_(0xE0131, 'Mn'). unicode_category_(0xE0132, 'Mn'). unicode_category_(0xE0133, 'Mn'). unicode_category_(0xE0134, 'Mn'). unicode_category_(0xE0135, 'Mn'). unicode_category_(0xE0136, 'Mn'). unicode_category_(0xE0137, 'Mn'). unicode_category_(0xE0138, 'Mn'). unicode_category_(0xE0139, 'Mn'). unicode_category_(0xE013A, 'Mn'). unicode_category_(0xE013B, 'Mn'). unicode_category_(0xE013C, 'Mn'). unicode_category_(0xE013D, 'Mn'). unicode_category_(0xE013E, 'Mn'). unicode_category_(0xE013F, 'Mn'). unicode_category_(0xE0140, 'Mn'). unicode_category_(0xE0141, 'Mn'). unicode_category_(0xE0142, 'Mn'). unicode_category_(0xE0143, 'Mn'). unicode_category_(0xE0144, 'Mn'). unicode_category_(0xE0145, 'Mn'). unicode_category_(0xE0146, 'Mn'). unicode_category_(0xE0147, 'Mn'). unicode_category_(0xE0148, 'Mn'). unicode_category_(0xE0149, 'Mn'). unicode_category_(0xE014A, 'Mn'). unicode_category_(0xE014B, 'Mn'). unicode_category_(0xE014C, 'Mn'). unicode_category_(0xE014D, 'Mn'). unicode_category_(0xE014E, 'Mn'). unicode_category_(0xE014F, 'Mn'). unicode_category_(0xE0150, 'Mn'). unicode_category_(0xE0151, 'Mn'). unicode_category_(0xE0152, 'Mn'). unicode_category_(0xE0153, 'Mn'). unicode_category_(0xE0154, 'Mn'). unicode_category_(0xE0155, 'Mn'). unicode_category_(0xE0156, 'Mn'). unicode_category_(0xE0157, 'Mn'). unicode_category_(0xE0158, 'Mn'). unicode_category_(0xE0159, 'Mn'). unicode_category_(0xE015A, 'Mn'). unicode_category_(0xE015B, 'Mn'). unicode_category_(0xE015C, 'Mn'). unicode_category_(0xE015D, 'Mn'). unicode_category_(0xE015E, 'Mn'). unicode_category_(0xE015F, 'Mn'). unicode_category_(0xE0160, 'Mn'). unicode_category_(0xE0161, 'Mn'). unicode_category_(0xE0162, 'Mn'). unicode_category_(0xE0163, 'Mn'). unicode_category_(0xE0164, 'Mn'). unicode_category_(0xE0165, 'Mn'). unicode_category_(0xE0166, 'Mn'). unicode_category_(0xE0167, 'Mn'). unicode_category_(0xE0168, 'Mn'). unicode_category_(0xE0169, 'Mn'). unicode_category_(0xE016A, 'Mn'). unicode_category_(0xE016B, 'Mn'). unicode_category_(0xE016C, 'Mn'). unicode_category_(0xE016D, 'Mn'). unicode_category_(0xE016E, 'Mn'). unicode_category_(0xE016F, 'Mn'). unicode_category_(0xE0170, 'Mn'). unicode_category_(0xE0171, 'Mn'). unicode_category_(0xE0172, 'Mn'). unicode_category_(0xE0173, 'Mn'). unicode_category_(0xE0174, 'Mn'). unicode_category_(0xE0175, 'Mn'). unicode_category_(0xE0176, 'Mn'). unicode_category_(0xE0177, 'Mn'). unicode_category_(0xE0178, 'Mn'). unicode_category_(0xE0179, 'Mn'). unicode_category_(0xE017A, 'Mn'). unicode_category_(0xE017B, 'Mn'). unicode_category_(0xE017C, 'Mn'). unicode_category_(0xE017D, 'Mn'). unicode_category_(0xE017E, 'Mn'). unicode_category_(0xE017F, 'Mn'). unicode_category_(0xE0180, 'Mn'). unicode_category_(0xE0181, 'Mn'). unicode_category_(0xE0182, 'Mn'). unicode_category_(0xE0183, 'Mn'). unicode_category_(0xE0184, 'Mn'). unicode_category_(0xE0185, 'Mn'). unicode_category_(0xE0186, 'Mn'). unicode_category_(0xE0187, 'Mn'). unicode_category_(0xE0188, 'Mn'). unicode_category_(0xE0189, 'Mn'). unicode_category_(0xE018A, 'Mn'). unicode_category_(0xE018B, 'Mn'). unicode_category_(0xE018C, 'Mn'). unicode_category_(0xE018D, 'Mn'). unicode_category_(0xE018E, 'Mn'). unicode_category_(0xE018F, 'Mn'). unicode_category_(0xE0190, 'Mn'). unicode_category_(0xE0191, 'Mn'). unicode_category_(0xE0192, 'Mn'). unicode_category_(0xE0193, 'Mn'). unicode_category_(0xE0194, 'Mn'). unicode_category_(0xE0195, 'Mn'). unicode_category_(0xE0196, 'Mn'). unicode_category_(0xE0197, 'Mn'). unicode_category_(0xE0198, 'Mn'). unicode_category_(0xE0199, 'Mn'). unicode_category_(0xE019A, 'Mn'). unicode_category_(0xE019B, 'Mn'). unicode_category_(0xE019C, 'Mn'). unicode_category_(0xE019D, 'Mn'). unicode_category_(0xE019E, 'Mn'). unicode_category_(0xE019F, 'Mn'). unicode_category_(0xE01A0, 'Mn'). unicode_category_(0xE01A1, 'Mn'). unicode_category_(0xE01A2, 'Mn'). unicode_category_(0xE01A3, 'Mn'). unicode_category_(0xE01A4, 'Mn'). unicode_category_(0xE01A5, 'Mn'). unicode_category_(0xE01A6, 'Mn'). unicode_category_(0xE01A7, 'Mn'). unicode_category_(0xE01A8, 'Mn'). unicode_category_(0xE01A9, 'Mn'). unicode_category_(0xE01AA, 'Mn'). unicode_category_(0xE01AB, 'Mn'). unicode_category_(0xE01AC, 'Mn'). unicode_category_(0xE01AD, 'Mn'). unicode_category_(0xE01AE, 'Mn'). unicode_category_(0xE01AF, 'Mn'). unicode_category_(0xE01B0, 'Mn'). unicode_category_(0xE01B1, 'Mn'). unicode_category_(0xE01B2, 'Mn'). unicode_category_(0xE01B3, 'Mn'). unicode_category_(0xE01B4, 'Mn'). unicode_category_(0xE01B5, 'Mn'). unicode_category_(0xE01B6, 'Mn'). unicode_category_(0xE01B7, 'Mn'). unicode_category_(0xE01B8, 'Mn'). unicode_category_(0xE01B9, 'Mn'). unicode_category_(0xE01BA, 'Mn'). unicode_category_(0xE01BB, 'Mn'). unicode_category_(0xE01BC, 'Mn'). unicode_category_(0xE01BD, 'Mn'). unicode_category_(0xE01BE, 'Mn'). unicode_category_(0xE01BF, 'Mn'). unicode_category_(0xE01C0, 'Mn'). unicode_category_(0xE01C1, 'Mn'). unicode_category_(0xE01C2, 'Mn'). unicode_category_(0xE01C3, 'Mn'). unicode_category_(0xE01C4, 'Mn'). unicode_category_(0xE01C5, 'Mn'). unicode_category_(0xE01C6, 'Mn'). unicode_category_(0xE01C7, 'Mn'). unicode_category_(0xE01C8, 'Mn'). unicode_category_(0xE01C9, 'Mn'). unicode_category_(0xE01CA, 'Mn'). unicode_category_(0xE01CB, 'Mn'). unicode_category_(0xE01CC, 'Mn'). unicode_category_(0xE01CD, 'Mn'). unicode_category_(0xE01CE, 'Mn'). unicode_category_(0xE01CF, 'Mn'). unicode_category_(0xE01D0, 'Mn'). unicode_category_(0xE01D1, 'Mn'). unicode_category_(0xE01D2, 'Mn'). unicode_category_(0xE01D3, 'Mn'). unicode_category_(0xE01D4, 'Mn'). unicode_category_(0xE01D5, 'Mn'). unicode_category_(0xE01D6, 'Mn'). unicode_category_(0xE01D7, 'Mn'). unicode_category_(0xE01D8, 'Mn'). unicode_category_(0xE01D9, 'Mn'). unicode_category_(0xE01DA, 'Mn'). unicode_category_(0xE01DB, 'Mn'). unicode_category_(0xE01DC, 'Mn'). unicode_category_(0xE01DD, 'Mn'). unicode_category_(0xE01DE, 'Mn'). unicode_category_(0xE01DF, 'Mn'). unicode_category_(0xE01E0, 'Mn'). unicode_category_(0xE01E1, 'Mn'). unicode_category_(0xE01E2, 'Mn'). unicode_category_(0xE01E3, 'Mn'). unicode_category_(0xE01E4, 'Mn'). unicode_category_(0xE01E5, 'Mn'). unicode_category_(0xE01E6, 'Mn'). unicode_category_(0xE01E7, 'Mn'). unicode_category_(0xE01E8, 'Mn'). unicode_category_(0xE01E9, 'Mn'). unicode_category_(0xE01EA, 'Mn'). unicode_category_(0xE01EB, 'Mn'). unicode_category_(0xE01EC, 'Mn'). unicode_category_(0xE01ED, 'Mn'). unicode_category_(0xE01EE, 'Mn'). unicode_category_(0xE01EF, 'Mn').
LogtalkDotOrg/logtalk3
library/unicode_data/unicode_categories/unicode_category_mn_mark_nonspacing.pl
Perl
apache-2.0
43,038
#!/usr/bin/perl # This file was retrieved from # https://www.ruby-forum.com/attachment/1583/fastcgi-wrapper.pl # on May 31, 2014 and modified slightly for use in haskell.org infra. use FCGI; use Socket; use POSIX qw(setsid); require 'syscall.ph'; &daemonize; # This keeps the program alive or something after exec'ing perl scripts END() { } BEGIN() { } *CORE::GLOBAL::exit = sub { die "fakeexit\nrc=".shift()."\n"; }; eval q{exit}; if ($@) { exit unless $@ =~ /^fakeexit/; }; &main; sub daemonize() { chdir '/' or die "Can't chdir to /: $!"; defined(my $pid = fork) or die "Can't fork: $!"; exit if $pid; setsid or die "Can't start a new session: $!"; umask 0; } sub main { $socket = FCGI::OpenSocket( "127.0.0.1:8999", 10 ); #use IP sockets $request = FCGI::Request( \*STDIN, \*STDOUT, \*STDERR, \%req_params, $socket ); if ($request) { request_loop()}; FCGI::CloseSocket( $socket ); } sub request_loop { while( $request->Accept() >= 0 ) { #processing any STDIN input from WebServer (for CGI-POST actions) $stdin_passthrough =''; $req_len = 0 + $req_params{'CONTENT_LENGTH'}; if (($req_params{'REQUEST_METHOD'} eq 'POST') && ($req_len != 0) ){ my $bytes_read = 0; while ($bytes_read < $req_len) { my $data = ''; my $bytes = read(STDIN, $data, ($req_len - $bytes_read)); last if ($bytes == 0 || !defined($bytes)); $stdin_passthrough .= $data; $bytes_read += $bytes; } } #running the cgi app if ( (-x $req_params{SCRIPT_FILENAME}) && #can I execute this? (-s $req_params{SCRIPT_FILENAME}) && #Is this file empty? (-r $req_params{SCRIPT_FILENAME}) #can I read this file? ){ pipe(CHILD_RD, PARENT_WR); my $pid = open(KID_TO_READ, "-|"); unless(defined($pid)) { print("Content-type: text/plain\r\n\r\n"); print "Error: CGI app returned no output - Executing $req_params{SCRIPT_FILENAME} failed !\n"; next; } if ($pid > 0) { close(CHILD_RD); print PARENT_WR $stdin_passthrough; close(PARENT_WR); while(my $s = <KID_TO_READ>) { print $s; } close KID_TO_READ; waitpid($pid, 0); } else { foreach $key ( keys %req_params){ $ENV{$key} = $req_params{$key}; } # cd to the script's local directory if ($req_params{SCRIPT_FILENAME} =~ /^(.*)\/[^\/]+$/) { chdir $1; } close(PARENT_WR); close(STDIN); #fcntl(CHILD_RD, F_DUPFD, 0); syscall(&SYS_dup2, fileno(CHILD_RD), 0); #open(STDIN, "<&CHILD_RD"); exec($req_params{SCRIPT_FILENAME}); die("exec failed"); } } else { print("Content-type: text/plain\r\n\r\n"); print "Error: No such CGI app - $req_params{SCRIPT_FILENAME} may not exist or is not executable by this process.\n"; } } }
haskell-infra/ansible
centos/roles/nagios/files/fastcgi-wrapper.pl
Perl
bsd-2-clause
3,241
package Sisimai::Reason::Suspend; use feature ':5.10'; use strict; use warnings; sub text { 'suspend' } sub description { 'Email rejected due to a recipient account is being suspended' } sub match { # Try to match that the given text and regular expressions # @param [String] argv1 String to be matched with regular expressions # @return [Integer] 0: Did not match # 1: Matched # @since v4.0.0 my $class = shift; my $argv1 = shift // return undef; state $index = [ ' is currently suspended', ' temporary locked', 'boite du destinataire archivee', 'email account that you tried to reach is disabled', 'invalid/inactive user', 'is a deactivated mailbox', # http://service.mail.qq.com/cgi-bin/help?subtype=1&&id=20022&&no=1000742 'mailbox currently suspended', 'mailbox unavailable or access denied', 'recipient rejected: temporarily inactive', 'recipient suspend the service', 'this account has been disabled or discontinued', 'user suspended', # http://mail.163.com/help/help_spam_16.htm 'vdelivermail: account is locked email bounced', ]; return 1 if grep { rindex($argv1, $_) > -1 } @$index; return 0; } sub true { # The envelope recipient's mailbox is suspended or not # @param [Sisimai::Data] argvs Object to be detected the reason # @return [Integer] 1: is mailbox suspended # 0: is not suspended # @since v4.0.0 # @see http://www.ietf.org/rfc/rfc2822.txt my $class = shift; my $argvs = shift // return undef; return undef unless $argvs->deliverystatus; return 1 if $argvs->reason eq 'suspend'; return 1 if __PACKAGE__->match(lc $argvs->diagnosticcode); return 0 } 1; __END__ =encoding utf-8 =head1 NAME Sisimai::Reason::Suspend - Bounce reason is C<suspend> or not. =head1 SYNOPSIS use Sisimai::Reason::Suspend; print Sisimai::Reason::Suspend->match('recipient suspend the service'); # 1 =head1 DESCRIPTION Sisimai::Reason::Suspend checks the bounce reason is C<suspend> or not. This class is called only Sisimai::Reason class. This is the error that a recipient account is being suspended due to unpaid or other reasons. =head1 CLASS METHODS =head2 C<B<text()>> C<text()> returns string: C<suspend>. print Sisimai::Reason::Suspend->text; # suspend =head2 C<B<match(I<string>)>> C<match()> returns 1 if the argument matched with patterns defined in this class. print Sisimai::Reason::Suspend->match('recipient suspend the service'); # 1 =head2 C<B<true(I<Sisimai::Data>)>> C<true()> returns 1 if the bounce reason is C<suspend>. The argument must be Sisimai::Data object and this method is called only from Sisimai::Reason class. =head1 AUTHOR azumakuniyuki =head1 COPYRIGHT Copyright (C) 2014-2018 azumakuniyuki, All rights reserved. =head1 LICENSE This software is distributed under The BSD 2-Clause License. =cut
azumakuniyuki/p5-Sisimai
lib/Sisimai/Reason/Suspend.pm
Perl
bsd-2-clause
3,064
package POE::API::Peek; { $POE::API::Peek::VERSION = '2.20'; } # ABSTRACT: Peek into the internals of a running POE environment use 5.006001; use warnings; use strict; BEGIN { use POE; my $ver = $POE::VERSION; $ver =~ s/_.+$//; if($ver < '1.300') { die(__PACKAGE__." is only certified for POE version 1.300 and up and you are running POE version " . $ver . ". Check CPAN for an appropriate version of ".__PACKAGE__."."); } } use POE; use Devel::Size qw(total_size); $Devel::Size::warn = 0; use Carp; our @CARP_NOT = qw(__PACKAGE__); # new {{{ sub new { my $class = shift; my $self = { broken_event_queue_bitch => 0, }; return bless $self, $class; } # }}} # id() {{{ sub id { return $poe_kernel->ID } # }}} # Kernel fun {{{ # is_kernel_running {{{ sub is_kernel_running { my $kr_run_warning = ${ $poe_kernel->[ POE::Kernel::KR_RUN() ] }; if($kr_run_warning |= POE::Kernel::KR_RUN_CALLED()) { return 1; } else { return 0; } } #}}} # active_event {{{ sub active_event { return ${ $poe_kernel->[ POE::Kernel::KR_ACTIVE_EVENT() ] }; } #}}} # kernel_memory_size {{{ sub kernel_memory_size { return total_size($poe_kernel); } # }}} # event_list {{{ sub event_list { my $self = shift; my %events; foreach my $session_ref (keys %{ $poe_kernel->[ &POE::Kernel::KR_SESSIONS() ] }) { my $session = $poe_kernel->[ &POE::Kernel::KR_SESSIONS() ]->{ $session_ref }->[ &POE::Kernel::SS_SESSION() ]; next if $session->isa('POE::Kernel'); my $id = $session->ID; my @events = sort keys %{ $session->[ &POE::Session::SE_STATES() ] }; $events{ $id } = \@events; } return \%events; } # }}} # which_loop {{{ sub which_loop { return POE::Kernel::poe_kernel_loop(); } #}}} # }}} # Session fun {{{ # current_session {{{ # the value of KR_ACTIVE_SESSION is a ref to a scalar. so we deref it before # handing it to the user. sub current_session { return ${ $poe_kernel->[POE::Kernel::KR_ACTIVE_SESSION] } } # }}} # get_session_children {{{ sub get_session_children { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_ses_get_children($sid); } # }}} # is_session_child {{{ sub is_session_child { my $self = shift; my $parent = shift or return undef; my $psid = ref $parent ? $parent->ID : $parent; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_ses_is_child($psid, $sid); } # }}} # get_session_parent {{{ sub get_session_parent { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_ses_get_parent($sid); } # }}} # resolve_session_to_ref {{{ sub resolve_session_to_ref { my $self = shift; my $sid = shift || $self->current_session()->ID; return $poe_kernel->_data_sid_resolve($sid); } # }}} # resolve_session_to_id {{{ sub resolve_session_to_id { my $self = shift; my $session = shift || $self->current_session(); return $poe_kernel->_data_ses_resolve_to_id($session); } # }}} # get_session_refcount {{{ sub get_session_refcount { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_ses_refcount($sid); } # }}} # session_count {{{ sub session_count { return $poe_kernel->_data_ses_count(); } # }}} # session_list {{{ sub session_list { my @sessions; my $kr_sessions = $POE::Kernel::poe_kernel->[POE::Kernel::KR_SESSIONS]; foreach my $key ( keys %$kr_sessions ) { next if $key =~ /POE::Kernel/; push @sessions, $kr_sessions->{$key}->[0]; } return @sessions; } # }}} # session_memory_size {{{ sub session_memory_size { my $self = shift; my $session = shift || $self->current_session(); $session = $poe_kernel->_data_sid_resolve($session) unless ref $session; return total_size($session); } # }}}} # session_event_list {{{ sub session_event_list { my $self = shift; my $session = shift || $self->current_session(); my @events = sort keys %{ $session->[ &POE::Session::SE_STATES() ] }; if(wantarray) { return @events; } else { return \@events; } } # }}} # }}} # Alias fun {{{ # resolve_alias {{{ sub resolve_alias { my $self = shift; my $alias = shift or return undef; return $poe_kernel->_data_alias_resolve($alias); } # }}} # session_alias_list {{{ sub session_alias_list { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_alias_list($sid); } # }}} # session_alias_count {{{ sub session_alias_count { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_alias_count_ses($sid); } # }}} # session_id_loggable {{{ sub session_id_loggable { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_alias_loggable($sid); } # }}} # }}} # Event fun {{{ sub event_count_to { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_ev_get_count_to($sid); } #}}} # event_count_from {{{ sub event_count_from { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_ev_get_count_from($sid); } #}}} # event_queue {{{ sub event_queue { return $poe_kernel->[POE::Kernel::KR_QUEUE] } # }}} # event_queue_dump {{{ sub event_queue_dump { my $self = shift; my $queue = $self->event_queue; my @happy_queue; my @queue = $queue->peek_items(sub { return 1; }); my $i = 0; foreach my $qitem (@queue) { my $item = {}; my ($priority, $id, $payload) = @$qitem; $item->{ID} = $id; $item->{index} = $i++; $item->{priority} = $priority; my $ev_name = $payload->[POE::Kernel::EV_NAME()]; $item->{event} = $ev_name; $item->{source} = $payload->[POE::Kernel::EV_SOURCE]; $item->{destination} = $payload->[POE::Kernel::EV_SESSION]; my $type = $payload->[POE::Kernel::EV_TYPE()]; my $type_str; if ($type & POE::Kernel::ET_START()) { $type_str = '_start'; } elsif ($type & POE::Kernel::ET_STOP()) { $type_str = '_stop'; } elsif ($type & POE::Kernel::ET_SIGNAL()) { $type_str = '_signal'; } elsif ($type & POE::Kernel::ET_GC()) { $type_str = '_garbage_collect'; } elsif ($type & POE::Kernel::ET_PARENT()) { $type_str = '_parent'; } elsif ($type & POE::Kernel::ET_CHILD()) { $type_str = '_child'; } elsif ($type & POE::Kernel::ET_SCPOLL()) { $type_str = '_sigchld_poll'; } elsif ($type & POE::Kernel::ET_ALARM()) { $type_str = 'Alarm'; } elsif ($type & POE::Kernel::ET_SELECT()) { $type_str = 'File Activity'; } else { if($type & POE::Kernel::ET_POST()) { $type_str = 'User'; } elsif ($type & POE::Kernel::ET_CALL()) { $type_str = 'User (not enqueued)'; } else { $type_str = 'Unknown'; } } $item->{type} = $type_str; push @happy_queue, $item; } return @happy_queue; } #}}} # }}} # Extref fun {{{ # extref_count {{{ sub extref_count { return $poe_kernel->_data_extref_count(); } # }}} # get_session_extref_count {{{ sub get_session_extref_count { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_extref_count_ses($sid); } # }}} # }}} # Filehandles Fun {{{ # is_handle_tracked {{{ sub is_handle_tracked { my($self, $handle, $mode) = @_; return $poe_kernel->_data_handle_is_good($handle, $mode); } # }}} # handle_count {{{ sub handle_count { return $poe_kernel->_data_handle_count(); } # }}} # session_handle_count {{{ sub session_handle_count { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_handle_count_ses($sid); } # }}} # }}} # PID Fun {{{ # session_pid_count {{{ sub session_pid_count { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; my $ver = $POE::VERSION; $ver =~ s/_.+$//; if($ver < '1.350') { return $poe_kernel->_data_sig_pids_ses($sid); } carp "session_pid_count() is not available for POE 1.350 and above\n"; return; } # }}} # }}} # Signals Fun {{{ # get_safe_signals {{{ sub get_safe_signals { return $poe_kernel->_data_sig_get_safe_signals(); } # }}} # get_signal_type {{{ sub get_signal_type { my $self = shift; my $sig = shift or return undef; return $poe_kernel->_data_sig_type($sig); } # }}} # is_signal_watched {{{ sub is_signal_watched { my $self = shift; my $sig = shift or return undef; return $poe_kernel->_data_sig_explicitly_watched($sig); } # }}} # signals_watched_by_session {{{ sub signals_watched_by_session { my $self = shift; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; my %sigs = $poe_kernel->_data_sig_watched_by_session($sid); my %ret; foreach my $k (keys %sigs) { my $ev = $sigs{$k}[0]; $ret{$k} = $ev; } return %ret; } # }}} # signal_watchers {{{ sub signal_watchers { my $self = shift; my $sig = shift or return undef; my %sigs = $poe_kernel->_data_sig_watchers($sig); my %ret; foreach my $k (keys %sigs) { my $ev = $sigs{$k}[0]; $ret{$poe_kernel->alias_resolve($k)} = $ev; } return %ret; } # }}} # is_signal_watched_by_session {{{ sub is_signal_watched_by_session { my $self = shift; my $signal = shift or return undef; my $session = shift || $self->current_session(); my $sid = ref $session ? $session->ID : $session; return $poe_kernel->_data_sig_is_watched_by_session($signal, $sid); } # }}} # }}} 1; =pod =head1 NAME POE::API::Peek - Peek into the internals of a running POE environment =head1 VERSION version 2.20 =head1 DESCRIPTION POE::API::Peek extends the POE::Kernel interface to provide clean access to Kernel internals in a cross-version compatible manner. Other calculated data is also available. My intention is to provide massive amounts of internal data for use in POE debugging. =head1 WARNING B<This version of this module is certified against POE version 1.300 and above. It will fail on any other POE version.> B<Further, this module requires perl v5.6.1 or above.> =head1 METHODS =cut =pod =head2 new my $api = POE::API::Peek->new(); Returns a blessed reference. Takes no parameters. =cut =pod =head2 id my $foo = $api->id(); Obtain the unique id for the kernel. Takes no parameters. Returns a scalar containing a string. =cut =pod =head1 KERNEL UTILITIES =cut =pod =head2 is_kernel_running if($api->is_kernel_running) { # do stuff... } Tell if the POE Kernel is running and active. Returns 1 if the Kernel is running and 0 if it is not. =cut =pod =head2 active_event my $event = $api->active_event(); Get the active event name. Returns a string containing the event name. =cut =pod =head2 kernel_memory_size my $size = $api->kernel_memory_size(); Get the memory footprint of the kernel and consequently the entire POE environment. See the Devel::Size documentation for several caveats involved in this metric. =cut =pod =head2 event_list my $events = $api->event_list(); Gets the list of events for the whole POE environment. Returns a hash with the session IDs as the keys and a list of events as the values. =cut =pod =head2 which_loop my $loop_name = $api->which_loop(); Tell which Loop POE has decided to use. Returns the string name of the Loop module. =cut =pod =head1 SESSION UTILITIES =cut =pod =head2 current_session my $foo = $api->current_session(); Get the POE::Session object for the currently active session. Takes no parameters. Returns a scalar containing a reference. =cut =pod =head2 get_session_children my @children = $api->get_session_children($session_id); my @children = $api->get_session_children(); Get the children (if any) for a given session. Takes one optional parameter, a POE::Session object or ID. If this parameter is not provided, the method defaults to the currently active session. Returns a list of POE::Session objects. =cut =pod =head2 is_session_child if($api->is_session_child($parent, $session_id)) { } if($api->is_session_child($parent, $session)) { } if($api->is_session_child($parent)) { } Determine if POE::Session A is a child of POE::Session B. Takes one mandatory parameter, a POE::Session object which is the potential parent session this method will interrogate. Takes one optional parameter, a POE::Session object which is the session whose parentage this method will determine. If this parameter is not specified, it will default to the currently active session. Returns a boolean. =cut =pod =head2 get_session_parent my $parent = $api->get_session_parent($session_id); my $parent = $api->get_session_parent($session); my $parent = $api->get_session_parent(); Get the parent for a given session. Takes one optional parameter, a POE::Session object or ID. If this parameter is not provided, the method defaults to the currently active session. Returns a POE::Session object. =cut =pod =head2 resolve_session_to_ref my $session = $api->resolve_session_to_ref($session_id); my $session = $api->resolve_session_to_ref(); Obtain a reference to a session given its ID. Takes one optional parameter, a POE::Session ID. If this parameter is not specified, it will default to the currently active session. Returns a reference to a POE::Session object on success; undef on failure. =cut =pod =head2 resolve_session_to_id my $session_id = $api->resolve_session_to_id($session); my $session_id = $api->resolve_session_to_id(); Obtain the session id for a given POE::Session object. Takes one optional parameter, a POE::Session object or ID. If this parameter is not specified, it will default to the currently active session. Returns an integer on success and undef on failure. =cut =pod =head2 get_session_refcount my $count = $api->get_session_refcount($session_id); my $count = $api->get_session_refcount($session); my $count = $api->get_session_refcount(); Obtain the reference count for a given POE::Session. Takes one optional parameter, a POE::Session object or ID. If this parameter is not specified, it will default to the currently active session. Returns an integer. =cut =pod =head2 session_count my $count = $api->session_count(); Obtain a count of how many sessions exist. Takes no parameters. Returns an integer. Note: for various reasons, the Kernel counts as a session. =cut =pod =head2 session_list my @sessions = $api->session_list(); Obtain a list of all the sessions that exist. Takes no parameters. Returns a list populated with POE::Session objects. Note: While the Kernel counts as a session, it has been extracted from this list. =cut =pod =head2 session_memory_size my $size = $api->session_memory_size(); my $size = $api->session_memory_size($session); my $size = $api->session_memory_size($session_id); Get the memory footprint of a session. If no session is provided, the current session is used. See the Devel::Size documentation for several caveats involved in this metric. =cut =pod =head2 session_event_list my @events = $api->session_event_list(); my $events = $api->session_event_list(); my @events = $api->session_event_list($session); my $events = $api->session_event_list($session); my @events = $api->session_event_list($session_id); my $events = $api->session_event_list($session_id); Get the list of events for a session. If no session is provided, the current session is used. =cut =pod =head1 ALIAS UTILITIES =cut =pod =head2 resolve_alias my $session = $api->resolve_alias($session_alias); Resolve a session alias into a POE::Session object. Takes one mandatory parameter, a session alias. Returns a POE::Session object on success or undef on failure. =cut =pod =head2 session_alias_list my @aliases = $api->session_alias_list($session_id); my @aliases = $api->session_alias_list($session); my @aliases = $api->session_alias_list(); Obtain a list of aliases for a POE::Session object. Takes one optional parameter, a POE::Session object or ID. If this parameter is not specified, it will default to the currently active session. Returns a list of strings. =cut =pod =head2 session_alias_count my $count = $api->session_alias_count($session_id); my $count = $api->session_alias_count($session); my $count = $api->session_alias_count(); Obtain the count of how many aliases a session has. Takes one optional parameter, a POE::Session object or ID. If this parameter is not specified, it will default to the currently active session. Returns an integer. =cut =pod =head2 session_id_loggable my $str = $api->session_id_loggable($session_id); my $str = $api->session_id_loggable($session); my $str = $api->session_id_loggable(); Obtain a loggable version of a session id. Takes one optional parameter, a POE::Session object or ID. If this parameter is not specified, it will default to the currently active session. Returns a string. =cut =pod =head1 EVENT UTILITIES # event_count_to {{{ =head2 event_count_to my $count = $api->event_count_to($session_id); my $count = $api->event_count_to($session); my $count = $api->event_count_to(); Get the number of events heading toward a particular session. Takes one parameter, a POE::Session object or ID. if none is provided, defaults to the current session. Returns an integer. =cut =pod =head2 event_count_from my $count = $api->get_session_extref_count($session_id); my $count = $api->event_count_from($session); my $count = $api->event_count_from(); Get the number of events heading out from a particular session. Takes one parameter, a POE::Session object or ID. If none is provided, defaults to the current session. Return an integer. =cut =pod =head2 event_queue my $foo = $api->event_queue(); Access the internal event queue. Takes no parameters. Returns a scalar containing a reference to a POE::Queue::Array object. =cut =pod =head2 event_queue_dump my @queue = $api->event_queue_dump(); Dump the contents of the event queue in a nice understandable fashion. Takes no parameters. Returns a list of queue items. Each item is a hash containing the following entries: =over 4 =item * ID The id number that POE's queue identifies this entry as. =item * index The index into the POE::Queue::Array which holds this entry. =item * priority The priority level this entry has. =item * event The name of this event =item * source What caused this event. Usually a POE::Session. =item * destination Where this event is headed. Usually a POE::Session. =item * type The type of event this is. May have the value User, _start, _stop, _signal, _garbage_collect, _parent, _child, _sigchld_poll, Alarm, File Activity, or Unknown. =back =cut =pod =head1 EXTREF UTILITIES =cut =pod =head2 extref_count my $count = $api->extref_count(); Obtain a count of sessions with extra references. Takes no parameters. Returns an integer. =cut =pod =head2 get_session_extref_count my $count = $api->get_session_extref_count($session_id); my $count = $api->get_session_extref_count($session); my $count = $api->get_session_extref_count(); Obtain the number of extra references a session has. Takes one optional parameter, a POE::Session object or ID. If this parameter is not specified, it will default to the currently active session. Returns an integer. =cut =pod =head1 FILEHANDLE UTILITIES =cut =pod =head2 is_handle_tracked if($api->is_handle_tracked($handle, $mode)) { } Determine if POE is tracking a handle. Takes two mandatory parameters, a filehandle and a mode indicator. Returns a boolean. =cut =pod =head2 handle_count my $count = $api->handle_count(); Obtain a count of how many handles POE is tracking. Takes no parameters. Returns an integer. =cut =pod =head2 session_handle_count my $count = $api->session_handle_count($session_id); my $count = $api->session_handle_count($session); my $count = $api->session_handle_count(); Obtain a count of the active handles for a given session. Takes one optional parameter, a POE::Session object or ID. If this parameter is not supplied, it will default to the currently active session. =cut =pod =head1 PID UTILITIES =cut =pod =head2 session_pid_count my $count = $api->session_pid_count($session_id); my $count = $api->session_pid_count($session); my $count = $api->session_pid_count(); Obtain a count of the process IDs being watched by a session. Takes one optional parameter, a POE::Session object or ID. If this parameter is not supplied, it will default to the currently active session. Since 1.350 of L<POE> it is no longer possible to query the number of processes a session is watching. This method is deprecated and will be removed in a future version. =cut =pod =head1 SIGNAL UTILITIES POTENTIAL BREAKAGE NOTE: In POE v1.293 (in particular: svn rev 2916) changed the structure of signals. Previously, the data portion of a signal was simply the name of the event to be called. Now it contains a data portion, continuation style arguments that may be passed on to the signal handler. See the L<POE::Kernel> documentation for more info. =cut =pod =head2 get_safe_signals my @safe_signals = $api->get_safe_signals(); Obtain a list of signals which it is safe for POE to manipulate. Takes no parameters. Returns a list of strings. =cut =pod =head2 get_signal_type my $type = $api->get_signal_type($signal_name); Figure out which type of signal this is. Signals can be one of three types, BENIGN, TERMINAL, NONMASKABLE. The type value returned here, corresponds to subroutine constants SIGTYPE_BENIGN, SIGTYPE_TERMINAL, and SIGTYPE_NONMASKABLE in POE::Kernel's namespace. Takes one mandatory parameter, a signal name. =cut =pod =head2 is_signal_watched if($api->is_signal_watched($signal_name)) { } Determine if a signal is being explicitly watched. Takes one mandatory parameter, a signal name. Returns a boolean. =cut =pod =head2 signals_watched_by_session my %signals = $api->signals_watched_by_session($session); my %signals = $api->signals_watched_by_session(); Get the signals watched by a session and the events they generate. Takes one optional parameter, a POE::Session object or ID. If this parameter is not supplied, it will default to the currently active session. Returns a hash, with a signal name as the key and the event the session generates as the value. =cut =pod =head2 signal_watchers my %watchers = $api->signal_watchers($signal_name); Get a list of the sessions watching a particular signal. Takes one mandatory parameter, a signal name. Returns a hash, keyed by session reference with an event name as the value. =cut =pod =head2 is_signal_watched_by_session if($api->is_signal_watched_by_session($signal_name, $session_id)) { } if($api->is_signal_watched_by_session($signal_name, $session)) { } if($api->is_signal_watched_by_session($signal_name)) { } Determine if a given session is explicitly watching a signal. Takes one mandatory parameter, a signal name. Takes one optional parameter, a POE::Session object or ID. If this parameter is not provided, it will default to the currently active session. Returns a boolean. =head1 AUTHORS sungo <sungo@sungo.us> Yuval Kogman <nothingmuch@woobling.org> Chris 'BinGOs' Williams <bingos@cpan.org> Philip Gwyn <gwyn@cpan.org> =head1 COPYRIGHT AND LICENSE This software is Copyright (c) 2012 by Matt Cashner (sungo). This is free software, licensed under: The (three-clause) BSD License =cut __END__ # sungo // vim: ts=4 sw=4 noet
gitpan/POE-API-Peek
lib/POE/API/Peek.pm
Perl
bsd-3-clause
24,157
#!%PERL% -w package Vhffs::Panel; require Exporter; @ISA = qw(Exporter); @EXPORT = qw( display ); use strict; use utf8; use POSIX qw(locale_h); use locale; use Locale::gettext; use CGI::Session; use File::Spec; use Encode; use Template; use Vhffs; use Vhffs::User; use Vhffs::Group; use Vhffs::Functions; use Vhffs::Constants; =pod =head1 NAME Vhffs::Panel - Provides acces to common VHFFS functionnalities from Vhffs Panel. =head1 SYNOPSIS TODO =head1 METHODS =cut =pod =head2 get_config $panel->get_config; Returns panel configuration. =cut sub get_config { my $panel = shift; return $panel->{vhffs}->get_config->get_panel; } =head2 check_public $panel->check_public; Checks that public area is available, if it's not the case show a message and returns. =cut sub check_public { my $panel = shift; return $panel->get_config->{'use_public'}; } =pod =head2 is_open $panel->is_open; Return 1 if panel is open, else return 0 =cut sub is_open { my $panel = shift; return $panel->get_config->{'open'}; } =pod =head2 is_public $panel->is_public; Return 1 if public part is enabled, else return 0 =cut sub is_public { my $panel = shift; return $panel->get_config->{'use_public'}; } =pod =head2 use_avatars $panel->use_avatars; Return 1 if either or both users or groups avatars are enabled, else return 0 =cut sub use_avatars { my $panel = shift; return ( $panel->get_config->{'users_avatars'} or $panel->get_config->{'groups_avatars'} ); } =pod =head2 use_users_avatars $panel->use_users_avatars; Return 1 if users avatars are enabled, else return 0 =cut sub use_users_avatars { my $panel = shift; return $panel->get_config->{'users_avatars'}; } =pod =head2 use_groups_avatars $panel->use_groups_avatars; Return 1 if groups avatars are enabled, else return 0 =cut sub use_groups_avatars { my $panel = shift; return $panel->get_config->{'groups_avatars'}; } =pod =head2 check_modo $panel->check_modo Checks that logged in user is admin or moderator. If it is not the case, show a message and returns. =cut sub check_modo { my $panel = shift; my $user = $panel->{user}; unless($user->is_moderator or $user->is_admin) { $panel->set_title( gettext('Access denied') ); $panel->render('misc/message.tt', { message => gettext('You are not allowed to access this page') }); return 0; } $panel->{display_admin_menu} = 1; return 1; } =head2 $panel->check_admin Check that logged in user is an admin. If it is not the case, show a message and returns. =cut sub check_admin { my $panel = shift; my $user = $panel->{user}; unless($user->is_admin) { $panel->set_title( gettext('Access denied') ); $panel->render('misc/message.tt', { message => gettext('You are not allowed to access this page') }); return 0; } $panel->{display_admin_menu} = 1; return 1; } sub get_available_themes { my $panel = shift; my @themes; my $path = $panel->get_config->{'themesdir'}; opendir( my $dir, $path ) or return; my @files = readdir( $dir ); foreach( @files ) { next if /^\./; next unless -d $path.'/'.$_; push @themes, $_; } closedir( $dir ); return @themes; } sub new { my $class = ref($_[0]) || $_[0]; my $vhffs = $_[1]; my $cgi = $_[2]; return undef unless( defined $vhffs and defined $cgi ); $vhffs->clear_current_user; $cgi->charset('UTF-8'); my $self = {}; bless( $self, $class ); $self->{errors} = []; $self->{infos} = []; $self->{cookies} = []; $self->{vhffs} = $vhffs; $self->{cgi} = $cgi; $self->{url} = $cgi->url(); $self->{display_admin_menu} = 0; # FIXME: maybe we should move templatedir to <panel/> configuration ? my $templatedir = $vhffs->get_config->get_templatedir; $self->{templatedir} = $templatedir; # lang cookie my $lang = $cgi->param('lang'); $self->add_cookie( $cgi->cookie( -name=>'language', -value=>$lang, -expires=>'+10y' ) ) if defined $lang; $lang = $cgi->cookie('language') unless defined $lang; $lang = $vhffs->get_config->get_default_language unless( defined $lang and grep { $_ eq $lang } $vhffs->get_config->get_available_languages ); $lang = 'en_US' unless defined $lang; $self->{lang} = $lang; setlocale(LC_ALL, $lang ); bindtextdomain('vhffs', '%localedir%'); textdomain('vhffs'); # theme cookie my $theme = $cgi->param('theme'); $self->add_cookie( $cgi->cookie( -name=>'theme', -value=>$theme, -expires=>'+10y' ) ) if defined $theme; $theme = $cgi->cookie('theme') unless defined $theme; $theme = $self->get_config->{'default_theme'} unless defined $theme; $theme = 'vhffs' unless( defined $theme and -f $self->get_config->{'themesdir'}.'/'.$theme.'/main.css' ); # theme feature is more or less deprecated since we never had more than one theme working, let me force to the current theme $self->{theme} = 'light-grey'; unless( $vhffs->reconnect() and $self->get_config->{'open'} ) { $self->render('misc/closed.tt', undef, 'anonymous.tt'); undef $self; return undef; } $self->{is_ajax_request} = (defined $self->{cgi}->http('X-Requested-With') and $self->{cgi}->http('X-Requested-With') eq 'XMLHttpRequest'); return $self; } sub get_session { my $panel = shift; my $vhffs = $panel->{vhffs}; my $cgi = $panel->{cgi}; require Vhffs::Panel::Auth; my $sid = $cgi->cookie( CGI::Session::name() ); unless( defined $sid ) { $panel->add_error( gettext('No cookie found, please accept the cookie and then please login again !') ); Vhffs::Panel::Auth::display_login( $panel ); return; } my $session = new CGI::Session('driver:File', $sid, { Directory => '/tmp' } ); unless( defined $session ) { $panel->add_error( gettext('Cannot fetch session file, please check that /tmp is readable and writeable') ); Vhffs::Panel::Auth::display_login( $panel ); return; } my $uid = $session->param('uid'); unless( defined $uid ) { $panel->add_error( gettext('Expired session ! Please login again') ); $session->delete(); $session->flush(); # Recommended practice says use flush() after delete(). Vhffs::Panel::Auth::display_login( $panel ); return; } my $user = Vhffs::User::get_by_uid($vhffs, $uid); unless ( defined $user ) { $panel->add_error( gettext('User does not exist') ); $session->delete(); $session->flush(); # Recommended practice says use flush() after delete(). Vhffs::Panel::Auth::display_login( $panel ); return; } unless( $user->get_status == Vhffs::Constants::ACTIVATED ) { $panel->add_error( gettext('You\'re are not allowed to browse panel') ); $session->delete(); $session->flush(); # Recommended practice says use flush() after delete(). Vhffs::Panel::Auth::display_login( $panel ); return; } $panel->{session} = $session; $panel->{user} = $user; $vhffs->set_current_user( $user ); return $session; } sub set_group { my $panel = shift; $panel->{group} = shift; } sub has_errors { my $panel = shift; return (@{$panel->{errors}} > 0); } sub set_title { my ($panel, $title) = @_; $panel->{title} = $title; } sub add_error { my ($panel, $error) = @_; # TODO Do not use anonymous hash when Template::Toolkit transition is over. push(@{$panel->{errors}}, {msg => $error}); } sub add_info { my ($panel, $info) = @_; # TODO Do not use anonymous hash when Template::Toolkit transition is over. push(@{$panel->{infos}}, {msg => $info}); } sub add_cookie { my ($panel, $cookie) = @_; push(@{$panel->{cookies}}, $cookie); } sub clear_infos { my $panel = shift; $panel->{infos} = []; } sub get_lang { my $panel = shift; return $panel->{lang}; } sub get_theme { my $panel = shift; return $panel->{theme}; } =head2 $panel->render($file, $vars, $layout) Renders given template with substitution variables C<$vars>. If no C<$layout> is provided, C<layouts/panel.tt> will be used otherwise C<$layout> should be the name of the layout relative to the C<layouts> folder. If request was made through Ajax, no layout will be processed. B<This function never return>. =cut sub render { my ($self, $file, $vars, $layout, $include_path) = @_; my $vhffs = $self->{vhffs}; my $cgi = $self->{cgi}; $vhffs->clear_current_user; # TODO Should be in parent class when Template::Toolkit switch is over my $create_vars = { INCLUDE_PATH => $self->{templatedir}.(defined $include_path ? '/'.$include_path.'/' : '/panel/'), CONSTANTS => { vhffs => { VERSION => Vhffs::Constants::VHFFS_VERSION, RELEASE_NAME => Vhffs::Constants::VHFFS_RELEASE_NAME, }, object_statuses => { WAITING_FOR_VALIDATION => Vhffs::Constants::WAITING_FOR_VALIDATION, VALIDATION_REFUSED => Vhffs::Constants::VALIDATION_REFUSED, WAITING_FOR_CREATION => Vhffs::Constants::WAITING_FOR_CREATION, CREATION_ERROR => Vhffs::Constants::CREATION_ERROR, WAITING_FOR_ACTIVATION => Vhffs::Constants::WAITING_FOR_ACTIVATION, ACTIVATED => Vhffs::Constants::ACTIVATED, ACTIVATION_ERROR => Vhffs::Constants::ACTIVATION_ERROR, WAITING_FOR_SUSPENSION => Vhffs::Constants::WAITING_FOR_SUSPENSION, SUSPENDED => Vhffs::Constants::SUSPENDED, SUSPENSION_ERROR => Vhffs::Constants::SUSPENSION_ERROR, WAITING_FOR_MODIFICATION => Vhffs::Constants::WAITING_FOR_MODIFICATION, MODIFICATION_ERROR => Vhffs::Constants::MODIFICATION_ERROR, WAITING_FOR_DELETION => Vhffs::Constants::WAITING_FOR_DELETION, DELETION_ERROR => Vhffs::Constants::DELETION_ERROR }, user_permissions => { NORMAL => Vhffs::Constants::USER_NORMAL, MODERATOR => Vhffs::Constants::USER_MODERATOR, ADMIN => Vhffs::Constants::USER_ADMIN }, acl => { UNDEFINED => Vhffs::Constants::ACL_UNDEFINED, DENIED => Vhffs::Constants::ACL_DENIED, VIEW => Vhffs::Constants::ACL_VIEW, MODIFY => Vhffs::Constants::ACL_MODIFY, MANAGEACL => Vhffs::Constants::ACL_MANAGEACL, DELETE => Vhffs::Constants::ACL_DELETE }, mailinglist => { SUBSCRIBE_NO_APPROVAL_REQUIRED => Vhffs::Constants::ML_SUBSCRIBE_NO_APPROVAL_REQUIRED, SUBSCRIBE_APPROVAL_REQUIRED => Vhffs::Constants::ML_SUBSCRIBE_APPROVAL_REQUIRED, SUBSCRIBE_CLOSED => Vhffs::Constants::ML_SUBSCRIBE_CLOSED, POSTING_OPEN_ALL => Vhffs::Constants::ML_POSTING_OPEN_ALL, POSTING_MODERATED_ALL => Vhffs::Constants::ML_POSTING_MODERATED_ALL, POSTING_OPEN_MEMBERS_MODERATED_OTHERS => Vhffs::Constants::ML_POSTING_OPEN_MEMBERS_MODERATED_OTHERS, POSTING_MEMBERS_ONLY => Vhffs::Constants::ML_POSTING_MEMBERS_ONLY, POSTING_MEMBERS_ONLY_MODERATED => Vhffs::Constants::ML_POSTING_MEMBERS_ONLY_MODERATED, POSTING_ADMINS_ONLY => Vhffs::Constants::ML_POSTING_ADMINS_ONLY, RIGHT_SUB_WAITING_FOR_REPLY => Vhffs::Constants::ML_RIGHT_SUB_WAITING_FOR_REPLY, RIGHT_SUB_WAITING_FOR_VALIDATION => Vhffs::Constants::ML_RIGHT_SUB_WAITING_FOR_VALIDATION, RIGHT_SUB => Vhffs::Constants::ML_RIGHT_SUB, RIGHT_SUB_WAITING_FOR_DEL => Vhffs::Constants::ML_RIGHT_SUB_WAITING_FOR_DEL, RIGHT_ADMIN => Vhffs::Constants::ML_RIGHT_ADMIN } }, FILTERS => { i18n => \&gettext, mail => sub { return Vhffs::Functions::obfuscate_email($vhffs, $_[0]); }, # Format filter accept only one argument # pretty_print can 'sprintf' anything, use it as # [% '%s is $%d' | pretty_print(article, price)] pretty_print => [sub { my $context = shift; my @args = @_; return sub { my $format = shift; return sprintf($format, @args); } }, 1], stringify_status => sub { return Vhffs::Functions::status_string_from_status_id( $_[0] ); }, stringify_type => sub { Vhffs::Functions::type_string_from_type_id( $_[0] ); }, idn_to_unicode => sub { require Net::LibIDN; Encode::decode_utf8( Net::LibIDN::idn_to_unicode( $_[0] , 'utf-8') ); }, }, PRE_CHOMP => 2 }; $vars = {} unless(defined $vars); my $query_string = ''; foreach( my @params = $cgi->url_param ) { my $p = $cgi->url_param($_); $query_string .= $_.'='.$p.';' if defined $p and $_ ne 'lang' and $_ ne 'theme'; } chop $query_string; undef $query_string unless $query_string; $vars->{do} = $cgi->url_param('do'); $vars->{query_string} = $query_string; $vars->{theme} = $self->{theme}; $vars->{panel_url} = $self->get_config->{url}; $vars->{title} = sprintf( gettext( '%s\'s Panel' ), $vhffs->get_config->get_host_name ); $vars->{page_title} = $self->{title}; $vars->{public_url} = $self->get_config->{'url_public'} if $self->is_public; $vars->{msg} = Encode::decode_utf8($self->{cgi}->param('msg')) if defined $self->{cgi}->param('msg'); my @langs = $vhffs->get_config->get_available_languages; $vars->{languages} = \@langs; $vars->{language} = $self->{lang}; $vars->{errors} = $self->{errors}; $vars->{infos} = $self->{infos}; $vars->{current_user} = $self->{user}; $vars->{current_group} = $self->{group}; # Handling ajax stuff if($self->{is_ajax_request}) { delete $create_vars->{PROCESS}; } else { if(defined $layout) { $create_vars->{PROCESS} = 'layouts/'.$layout; } else { $create_vars->{PROCESS} = 'layouts/panel.tt'; $vars->{panel_header} = { help_url => $self->get_config->{'url_help'} || 'http://www.vhffs.org/', admin_menu => $self->{display_admin_menu}, available_services => $vhffs->get_config->get_available_services }; } } my $template = new Template($create_vars); my $http_accept = ( $cgi->http('HTTP_ACCEPT') or '' ); print $cgi->header( -cookie=>[ @{$self->{cookies}} ], -type=>( $http_accept =~ /application\/xhtml\+xml/ ? 'application/xhtml+xml' : 'text/html' ), -charset=>'utf-8' ); my $data; unless( $template->process($file, $vars, \$data) ) { warn 'Error while processing template: '.$template->error(); return; } # FCGI does not handle UTF8 print Encode::encode_utf8( $data ); } =pod =head2 redirect $panel->redirect($dest); Issues a redirection header sending to $dest. =cut sub redirect { my ($panel, $dest) = @_; my $vhffs = $panel->{vhffs}; my $cgi = $panel->{cgi}; $vhffs->clear_current_user; print $cgi->redirect( -uri=>Encode::encode_utf8($dest), -cookie=>[ @{$panel->{cookies}} ] ); } 1;
najamelan/vhffs-4.5
vhffs-api/src/Vhffs/Panel.pm
Perl
bsd-3-clause
14,589
# # ServiceClass.pm : part of the Mace toolkit for building distributed systems # # Copyright (c) 2011, Charles Killian, James W. Anderson, Adolfo Rodriguez, Dejan Kostic # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the names of the contributors, nor their associated universities # or organizations may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # ----END-OF-LEGAL-STUFF---- package Mace::Compiler::ServiceClass; use strict; use Mace::Compiler::Method; use Mace::Compiler::AutoType; use Class::MakeMethods::Template::Hash ( 'new' => 'new', 'string' => "name", 'array_of_objects' => ["methods" => { class => "Mace::Compiler::Method" }], 'array_of_objects' => ["constructors" => { class => "Mace::Compiler::Method" }], 'array_of_objects' => ["auto_types" => { class => "Mace::Compiler::AutoType" }], 'object' => ["destructor" => { class => "Mace::Compiler::Method" }], 'array' => "superclasses", 'array' => "handlers", 'boolean' => "isServiceClass", 'boolean' => "isHandler", 'boolean' => "isAutoType", 'boolean' => "isNamespace", 'string --get_concat' => "pre", 'string --get_concat' => "post", 'string __get_concat' => "maceLiteral", 'array' => "literals", ); sub className { my $this = shift; my $ext = ""; if ($this->isHandler) { $ext = "Handler"; } elsif ($this->isServiceClass()) { $ext = "ServiceClass"; } return $this->name() . $ext; } # className sub allMethods { my $this = shift; my @r = (); if ($this->count_constructors()) { # print "has " . $this->count_constructors() . " constructor(s)\n"; push(@r, @{$this->constructors()}); } if ($this->hasDestructor()) { # print "has destructor!\n"; push(@r, $this->destructor()); } if ($this->count_methods()) { # print "has " . $this->count_methods() . " method(s)\n"; push(@r, @{$this->methods()}); } return @r; } # allMethods sub hasDestructor { my $this = shift; return defined($this->destructor()); } # hasDestructor sub toString { my $this = shift; my $r = "class " . $this->name(); if ($this->isServiceClass or $this->isHandler()) { $r .= " ["; if ($this->isServiceClass()) { $r .= "ServiceClass"; } if ($this->isHandler()) { $r .= "Handler"; } $r .= "]"; } if ($this->count_superclasses()) { $r .= " extends " . join(", ", $this->superclasses()); } $r .= "\n"; for my $m ($this->allMethods()) { $r .= $m->toString() . "\n"; } return $r; } # toString 1;
jojochuang/eventwave
perl5/Mace/Compiler/ServiceClass.pm
Perl
bsd-3-clause
3,921
package Bio::KBase::DataStoreInterface::DataStoreInterfaceImpl; use strict; use Bio::KBase::Exceptions; # Use Semantic Versioning (2.0.0-rc.1) # http://semver.org our $VERSION = "0.1.0"; =head1 NAME AbstractHandle =head1 DESCRIPTION The AbstractHandle module provides a programmatic access to a remote file store. =cut #BEGIN_HEADER # read the config file into this package. use Data::Dumper; use Config::Simple; use IPC::System::Simple qw(capture); use JSON; use Log::Log4perl qw(:easy); Log::Log4perl->easy_init($INFO); our $cfg = {}; our $default_shock; if (defined $ENV{KB_DEPLOYMENT_CONFIG} && -e $ENV{KB_DEPLOYMENT_CONFIG}) { $cfg = new Config::Simple($ENV{KB_DEPLOYMENT_CONFIG}) or die "could not construct new Config::Simple object"; $default_shock = $cfg->param('handle_service.default-shock-server'); INFO "$$ reading config from $ENV{KB_DEPLOYMENT_CONFIG}"; INFO "$$ using $default_shock as the default shock server"; } else { die "could not find KB_DEPLOYMENT_CONFIG"; } #END_HEADER sub new { my($class, @args) = @_; my $self = { }; bless $self, $class; #BEGIN_CONSTRUCTOR # TODO need to solve this. $self->{registry} = {}; system("curl -h > /dev/null 2>&1") == 0 or die "curl not found, maybe you need to install it"; #END_CONSTRUCTOR if ($self->can('_init_instance')) { $self->_init_instance(); } return $self; } =head1 METHODS =head2 new_handle $h = $obj->new_handle() =over 4 =item Parameter and return types =begin html <pre> $h is a Handle Handle is a reference to a hash where the following keys are defined: file_name has a value which is a string id has a value which is a string type has a value which is a string url has a value which is a string remote_md5 has a value which is a string remote_sha1 has a value which is a string </pre> =end html =begin text $h is a Handle Handle is a reference to a hash where the following keys are defined: file_name has a value which is a string id has a value which is a string type has a value which is a string url has a value which is a string remote_md5 has a value which is a string remote_sha1 has a value which is a string =end text =item Description new_handle returns a Handle object with a url and a node id =back =cut sub new_handle { my $self = shift; my $ctx = $Bio::KBase::DataStoreInterface::Service::CallContext; my($h); #BEGIN new_handle $h->{file_name} = undef; $h->{id} = undef; $h = $self->localize_handle(ref $self, $h); $h = $self->initialize_handle($h); #END new_handle my @_bad_returns; (ref($h) eq 'HASH') or push(@_bad_returns, "Invalid type for return variable \"h\" (value was \"$h\")"); if (@_bad_returns) { my $msg = "Invalid returns passed to new_handle:\n" . join("", map { "\t$_\n" } @_bad_returns); Bio::KBase::Exceptions::ArgumentValidationError->throw(error => $msg, method_name => 'new_handle'); } return($h); } =head2 localize_handle $h2 = $obj->localize_handle($service_name, $h1) =over 4 =item Parameter and return types =begin html <pre> $service_name is a string $h1 is a Handle $h2 is a Handle Handle is a reference to a hash where the following keys are defined: file_name has a value which is a string id has a value which is a string type has a value which is a string url has a value which is a string remote_md5 has a value which is a string remote_sha1 has a value which is a string </pre> =end html =begin text $service_name is a string $h1 is a Handle $h2 is a Handle Handle is a reference to a hash where the following keys are defined: file_name has a value which is a string id has a value which is a string type has a value which is a string url has a value which is a string remote_md5 has a value which is a string remote_sha1 has a value which is a string =end text =item Description The localize_handle function attempts to locate a shock server near the service. The localize_handle function must be called before the Handle is initialized becuase when the handle is initialized, it is given a node id that maps to the shock server where the node was created. =back =cut sub localize_handle { my $self = shift; my($service_name, $h1) = @_; my @_bad_arguments; (!ref($service_name)) or push(@_bad_arguments, "Invalid type for argument \"service_name\" (value was \"$service_name\")"); (ref($h1) eq 'HASH') or push(@_bad_arguments, "Invalid type for argument \"h1\" (value was \"$h1\")"); if (@_bad_arguments) { my $msg = "Invalid arguments passed to localize_handle:\n" . join("", map { "\t$_\n" } @_bad_arguments); Bio::KBase::Exceptions::ArgumentValidationError->throw(error => $msg, method_name => 'localize_handle'); } my $ctx = $Bio::KBase::DataStoreInterface::Service::CallContext; my($h2); #BEGIN localize_handle $h2 = $h1; my ($url, $type); my $registry = $self->{registry}; if (exists $registry->{$service_name}) { $type = $registry->{$service_name}->{type}; $url = $registry->{$service_name}->{url}; } else { $type = 'shock'; $url = $default_shock; } unless (defined $h2->{url}) { $h2->{url} = $url; $h2->{type} = $type; } #END localize_handle my @_bad_returns; (ref($h2) eq 'HASH') or push(@_bad_returns, "Invalid type for return variable \"h2\" (value was \"$h2\")"); if (@_bad_returns) { my $msg = "Invalid returns passed to localize_handle:\n" . join("", map { "\t$_\n" } @_bad_returns); Bio::KBase::Exceptions::ArgumentValidationError->throw(error => $msg, method_name => 'localize_handle'); } return($h2); } =head2 initialize_handle $h2 = $obj->initialize_handle($h1) =over 4 =item Parameter and return types =begin html <pre> $h1 is a Handle $h2 is a Handle Handle is a reference to a hash where the following keys are defined: file_name has a value which is a string id has a value which is a string type has a value which is a string url has a value which is a string remote_md5 has a value which is a string remote_sha1 has a value which is a string </pre> =end html =begin text $h1 is a Handle $h2 is a Handle Handle is a reference to a hash where the following keys are defined: file_name has a value which is a string id has a value which is a string type has a value which is a string url has a value which is a string remote_md5 has a value which is a string remote_sha1 has a value which is a string =end text =item Description initialize_handle returns a Handle object with an ID. =back =cut sub initialize_handle { my $self = shift; my($h1) = @_; my @_bad_arguments; (ref($h1) eq 'HASH') or push(@_bad_arguments, "Invalid type for argument \"h1\" (value was \"$h1\")"); if (@_bad_arguments) { my $msg = "Invalid arguments passed to initialize_handle:\n" . join("", map { "\t$_\n" } @_bad_arguments); Bio::KBase::Exceptions::ArgumentValidationError->throw(error => $msg, method_name => 'initialize_handle'); } my $ctx = $Bio::KBase::DataStoreInterface::Service::CallContext; my($h2); #BEGIN initialize_handle $h2 = $h1; my $cmd = "curl -s -H \'Authorization: OAuth " . $ctx->{token} . "\' -X POST $default_shock/node"; my $json_node = capture($cmd); my $ref = decode_json $json_node; $h2->{id} = $ref->{data}->{id} or die "could not find node id in $json_node"; #END initialize_handle my @_bad_returns; (ref($h2) eq 'HASH') or push(@_bad_returns, "Invalid type for return variable \"h2\" (value was \"$h2\")"); if (@_bad_returns) { my $msg = "Invalid returns passed to initialize_handle:\n" . join("", map { "\t$_\n" } @_bad_returns); Bio::KBase::Exceptions::ArgumentValidationError->throw(error => $msg, method_name => 'initialize_handle'); } return($h2); } =head2 version $return = $obj->version() =over 4 =item Parameter and return types =begin html <pre> $return is a string </pre> =end html =begin text $return is a string =end text =item Description Return the module version. This is a Semantic Versioning number. =back =cut sub version { return $VERSION; } =head1 TYPES =head2 Handle =over 4 =item Description Handle provides a unique reference that enables access to the data files through functions provided as part of the DSI. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads. =item Definition =begin html <pre> a reference to a hash where the following keys are defined: file_name has a value which is a string id has a value which is a string type has a value which is a string url has a value which is a string remote_md5 has a value which is a string remote_sha1 has a value which is a string </pre> =end html =begin text a reference to a hash where the following keys are defined: file_name has a value which is a string id has a value which is a string type has a value which is a string url has a value which is a string remote_md5 has a value which is a string remote_sha1 has a value which is a string =end text =back =cut 1;
kbase/handle_service
lib/Bio/KBase/DataStoreInterface/DataStoreInterfaceImpl.pm
Perl
mit
9,638
########################################################################### # # This file is partially auto-generated by the DateTime::Locale generator # tools (v0.10). This code generator comes with the DateTime::Locale # distribution in the tools/ directory, and is called generate-modules. # # This file was generated from the CLDR JSON locale data. See the LICENSE.cldr # file included in this distribution for license details. # # Do not edit this file directly unless you are sure the part you are editing # is not created by the generator. # ########################################################################### =pod =encoding UTF-8 =head1 NAME DateTime::Locale::en_IO - Locale data examples for the en-IO locale. =head1 DESCRIPTION This pod file contains examples of the locale data available for the English British Indian Ocean Territory locale. =head2 Days =head3 Wide (format) Monday Tuesday Wednesday Thursday Friday Saturday Sunday =head3 Abbreviated (format) Mon Tue Wed Thu Fri Sat Sun =head3 Narrow (format) M T W T F S S =head3 Wide (stand-alone) Monday Tuesday Wednesday Thursday Friday Saturday Sunday =head3 Abbreviated (stand-alone) Mon Tue Wed Thu Fri Sat Sun =head3 Narrow (stand-alone) M T W T F S S =head2 Months =head3 Wide (format) January February March April May June July August September October November December =head3 Abbreviated (format) Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec =head3 Narrow (format) J F M A M J J A S O N D =head3 Wide (stand-alone) January February March April May June July August September October November December =head3 Abbreviated (stand-alone) Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec =head3 Narrow (stand-alone) J F M A M J J A S O N D =head2 Quarters =head3 Wide (format) 1st quarter 2nd quarter 3rd quarter 4th quarter =head3 Abbreviated (format) Q1 Q2 Q3 Q4 =head3 Narrow (format) 1 2 3 4 =head3 Wide (stand-alone) 1st quarter 2nd quarter 3rd quarter 4th quarter =head3 Abbreviated (stand-alone) Q1 Q2 Q3 Q4 =head3 Narrow (stand-alone) 1 2 3 4 =head2 Eras =head3 Wide (format) Before Christ Anno Domini =head3 Abbreviated (format) BC AD =head3 Narrow (format) B A =head2 Date Formats =head3 Full 2008-02-05T18:30:30 = Tuesday, 5 February 2008 1995-12-22T09:05:02 = Friday, 22 December 1995 -0010-09-15T04:44:23 = Saturday, 15 September -10 =head3 Long 2008-02-05T18:30:30 = 5 February 2008 1995-12-22T09:05:02 = 22 December 1995 -0010-09-15T04:44:23 = 15 September -10 =head3 Medium 2008-02-05T18:30:30 = 5 Feb 2008 1995-12-22T09:05:02 = 22 Dec 1995 -0010-09-15T04:44:23 = 15 Sep -10 =head3 Short 2008-02-05T18:30:30 = 05/02/2008 1995-12-22T09:05:02 = 22/12/1995 -0010-09-15T04:44:23 = 15/09/-10 =head2 Time Formats =head3 Full 2008-02-05T18:30:30 = 18:30:30 UTC 1995-12-22T09:05:02 = 09:05:02 UTC -0010-09-15T04:44:23 = 04:44:23 UTC =head3 Long 2008-02-05T18:30:30 = 18:30:30 UTC 1995-12-22T09:05:02 = 09:05:02 UTC -0010-09-15T04:44:23 = 04:44:23 UTC =head3 Medium 2008-02-05T18:30:30 = 18:30:30 1995-12-22T09:05:02 = 09:05:02 -0010-09-15T04:44:23 = 04:44:23 =head3 Short 2008-02-05T18:30:30 = 18:30 1995-12-22T09:05:02 = 09:05 -0010-09-15T04:44:23 = 04:44 =head2 Datetime Formats =head3 Full 2008-02-05T18:30:30 = Tuesday, 5 February 2008 at 18:30:30 UTC 1995-12-22T09:05:02 = Friday, 22 December 1995 at 09:05:02 UTC -0010-09-15T04:44:23 = Saturday, 15 September -10 at 04:44:23 UTC =head3 Long 2008-02-05T18:30:30 = 5 February 2008 at 18:30:30 UTC 1995-12-22T09:05:02 = 22 December 1995 at 09:05:02 UTC -0010-09-15T04:44:23 = 15 September -10 at 04:44:23 UTC =head3 Medium 2008-02-05T18:30:30 = 5 Feb 2008, 18:30:30 1995-12-22T09:05:02 = 22 Dec 1995, 09:05:02 -0010-09-15T04:44:23 = 15 Sep -10, 04:44:23 =head3 Short 2008-02-05T18:30:30 = 05/02/2008, 18:30 1995-12-22T09:05:02 = 22/12/1995, 09:05 -0010-09-15T04:44:23 = 15/09/-10, 04:44 =head2 Available Formats =head3 E (ccc) 2008-02-05T18:30:30 = Tue 1995-12-22T09:05:02 = Fri -0010-09-15T04:44:23 = Sat =head3 EHm (E HH:mm) 2008-02-05T18:30:30 = Tue 18:30 1995-12-22T09:05:02 = Fri 09:05 -0010-09-15T04:44:23 = Sat 04:44 =head3 EHms (E HH:mm:ss) 2008-02-05T18:30:30 = Tue 18:30:30 1995-12-22T09:05:02 = Fri 09:05:02 -0010-09-15T04:44:23 = Sat 04:44:23 =head3 Ed (E d) 2008-02-05T18:30:30 = Tue 5 1995-12-22T09:05:02 = Fri 22 -0010-09-15T04:44:23 = Sat 15 =head3 Ehm (E h:mm a) 2008-02-05T18:30:30 = Tue 6:30 PM 1995-12-22T09:05:02 = Fri 9:05 AM -0010-09-15T04:44:23 = Sat 4:44 AM =head3 Ehms (E h:mm:ss a) 2008-02-05T18:30:30 = Tue 6:30:30 PM 1995-12-22T09:05:02 = Fri 9:05:02 AM -0010-09-15T04:44:23 = Sat 4:44:23 AM =head3 Gy (y G) 2008-02-05T18:30:30 = 2008 AD 1995-12-22T09:05:02 = 1995 AD -0010-09-15T04:44:23 = -10 BC =head3 GyMMM (MMM y G) 2008-02-05T18:30:30 = Feb 2008 AD 1995-12-22T09:05:02 = Dec 1995 AD -0010-09-15T04:44:23 = Sep -10 BC =head3 GyMMMEd (E, d MMM y G) 2008-02-05T18:30:30 = Tue, 5 Feb 2008 AD 1995-12-22T09:05:02 = Fri, 22 Dec 1995 AD -0010-09-15T04:44:23 = Sat, 15 Sep -10 BC =head3 GyMMMd (d MMM y G) 2008-02-05T18:30:30 = 5 Feb 2008 AD 1995-12-22T09:05:02 = 22 Dec 1995 AD -0010-09-15T04:44:23 = 15 Sep -10 BC =head3 H (HH) 2008-02-05T18:30:30 = 18 1995-12-22T09:05:02 = 09 -0010-09-15T04:44:23 = 04 =head3 Hm (HH:mm) 2008-02-05T18:30:30 = 18:30 1995-12-22T09:05:02 = 09:05 -0010-09-15T04:44:23 = 04:44 =head3 Hms (HH:mm:ss) 2008-02-05T18:30:30 = 18:30:30 1995-12-22T09:05:02 = 09:05:02 -0010-09-15T04:44:23 = 04:44:23 =head3 Hmsv (HH:mm:ss v) 2008-02-05T18:30:30 = 18:30:30 UTC 1995-12-22T09:05:02 = 09:05:02 UTC -0010-09-15T04:44:23 = 04:44:23 UTC =head3 Hmv (HH:mm v) 2008-02-05T18:30:30 = 18:30 UTC 1995-12-22T09:05:02 = 09:05 UTC -0010-09-15T04:44:23 = 04:44 UTC =head3 M (L) 2008-02-05T18:30:30 = 2 1995-12-22T09:05:02 = 12 -0010-09-15T04:44:23 = 9 =head3 MEd (E, dd/MM) 2008-02-05T18:30:30 = Tue, 05/02 1995-12-22T09:05:02 = Fri, 22/12 -0010-09-15T04:44:23 = Sat, 15/09 =head3 MMM (LLL) 2008-02-05T18:30:30 = Feb 1995-12-22T09:05:02 = Dec -0010-09-15T04:44:23 = Sep =head3 MMMEd (E, d MMM) 2008-02-05T18:30:30 = Tue, 5 Feb 1995-12-22T09:05:02 = Fri, 22 Dec -0010-09-15T04:44:23 = Sat, 15 Sep =head3 MMMMd (d MMMM) 2008-02-05T18:30:30 = 5 February 1995-12-22T09:05:02 = 22 December -0010-09-15T04:44:23 = 15 September =head3 MMMd (d MMM) 2008-02-05T18:30:30 = 5 Feb 1995-12-22T09:05:02 = 22 Dec -0010-09-15T04:44:23 = 15 Sep =head3 MMdd (dd/MM) 2008-02-05T18:30:30 = 05/02 1995-12-22T09:05:02 = 22/12 -0010-09-15T04:44:23 = 15/09 =head3 Md (dd/MM) 2008-02-05T18:30:30 = 05/02 1995-12-22T09:05:02 = 22/12 -0010-09-15T04:44:23 = 15/09 =head3 d (d) 2008-02-05T18:30:30 = 5 1995-12-22T09:05:02 = 22 -0010-09-15T04:44:23 = 15 =head3 h (h a) 2008-02-05T18:30:30 = 6 PM 1995-12-22T09:05:02 = 9 AM -0010-09-15T04:44:23 = 4 AM =head3 hm (h:mm a) 2008-02-05T18:30:30 = 6:30 PM 1995-12-22T09:05:02 = 9:05 AM -0010-09-15T04:44:23 = 4:44 AM =head3 hms (h:mm:ss a) 2008-02-05T18:30:30 = 6:30:30 PM 1995-12-22T09:05:02 = 9:05:02 AM -0010-09-15T04:44:23 = 4:44:23 AM =head3 hmsv (h:mm:ss a v) 2008-02-05T18:30:30 = 6:30:30 PM UTC 1995-12-22T09:05:02 = 9:05:02 AM UTC -0010-09-15T04:44:23 = 4:44:23 AM UTC =head3 hmv (h:mm a v) 2008-02-05T18:30:30 = 6:30 PM UTC 1995-12-22T09:05:02 = 9:05 AM UTC -0010-09-15T04:44:23 = 4:44 AM UTC =head3 ms (mm:ss) 2008-02-05T18:30:30 = 30:30 1995-12-22T09:05:02 = 05:02 -0010-09-15T04:44:23 = 44:23 =head3 y (y) 2008-02-05T18:30:30 = 2008 1995-12-22T09:05:02 = 1995 -0010-09-15T04:44:23 = -10 =head3 yM (MM/y) 2008-02-05T18:30:30 = 02/2008 1995-12-22T09:05:02 = 12/1995 -0010-09-15T04:44:23 = 09/-10 =head3 yMEd (E, dd/MM/y) 2008-02-05T18:30:30 = Tue, 05/02/2008 1995-12-22T09:05:02 = Fri, 22/12/1995 -0010-09-15T04:44:23 = Sat, 15/09/-10 =head3 yMMM (MMM y) 2008-02-05T18:30:30 = Feb 2008 1995-12-22T09:05:02 = Dec 1995 -0010-09-15T04:44:23 = Sep -10 =head3 yMMMEd (E, d MMM y) 2008-02-05T18:30:30 = Tue, 5 Feb 2008 1995-12-22T09:05:02 = Fri, 22 Dec 1995 -0010-09-15T04:44:23 = Sat, 15 Sep -10 =head3 yMMMM (MMMM y) 2008-02-05T18:30:30 = February 2008 1995-12-22T09:05:02 = December 1995 -0010-09-15T04:44:23 = September -10 =head3 yMMMd (d MMM y) 2008-02-05T18:30:30 = 5 Feb 2008 1995-12-22T09:05:02 = 22 Dec 1995 -0010-09-15T04:44:23 = 15 Sep -10 =head3 yMd (dd/MM/y) 2008-02-05T18:30:30 = 05/02/2008 1995-12-22T09:05:02 = 22/12/1995 -0010-09-15T04:44:23 = 15/09/-10 =head3 yQQQ (QQQ y) 2008-02-05T18:30:30 = Q1 2008 1995-12-22T09:05:02 = Q4 1995 -0010-09-15T04:44:23 = Q3 -10 =head3 yQQQQ (QQQQ y) 2008-02-05T18:30:30 = 1st quarter 2008 1995-12-22T09:05:02 = 4th quarter 1995 -0010-09-15T04:44:23 = 3rd quarter -10 =head2 Miscellaneous =head3 Prefers 24 hour time? Yes =head3 Local first day of the week 1 (Monday) =head1 SUPPORT See L<DateTime::Locale>. =cut
jkb78/extrajnm
local/lib/perl5/DateTime/Locale/en_IO.pod
Perl
mit
9,542
package WebService::Telnic; use warnings; use strict; =head1 NAME WebService::Telnic - Interface to Telnic's SOAP API's =head1 VERSION Version 0.2 =cut our $VERSION = '0.2'; =head1 AUTHOR Peter Makholm, C<< <peter at makholm.net> >> =head1 BUGS Please report any bugs or feature requests to C<bug-webservice-telnic at rt.cpan.org>, or through the web interface at L<http://rt.cpan.org/NoAuth/ReportBug.html?Queue=WebService-Telnic>. I will be notified, and then you'll automatically be notified of progress on your bug as I make changes. =head1 SEE ALSO L<WebService::Telnic::Client> =head1 SUPPORT You can also look for information at: =over 4 =item * github: Public version control system L<http://github.com/pmakholm/telnic-perl/tree> =item * RT: CPAN's request tracker L<http://rt.cpan.org/NoAuth/Bugs.html?Dist=WebService-Telnic> =item * AnnoCPAN: Annotated CPAN documentation L<http://annocpan.org/dist/WebService-Telnic> =item * CPAN Ratings L<http://cpanratings.perl.org/d/WebService-Telnic> =item * Search CPAN L<http://search.cpan.org/dist/WebService-Telnic> =back =head1 ACKNOWLEDGEMENTS =head1 COPYRIGHT & LICENSE Copyright 2009 Peter Makholm, all rights reserved. This software is released under the MIT license cited below. =head1 The "MIT" License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. =head1 DISCLAIMER OF WARRANTY BECAUSE THIS SOFTWARE IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE SOFTWARE, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE SOFTWARE "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE SOFTWARE IS WITH YOU. SHOULD THE SOFTWARE PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR, OR CORRECTION. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE SOFTWARE AS PERMITTED BY THE ABOVE LICENCE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE SOFTWARE (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE SOFTWARE TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. =cut 1; # End of WebService::Telnic
pmakholm/telnic-perl
lib/WebService/Telnic.pm
Perl
mit
3,624
# please insert nothing before this line: -*- mode: cperl; cperl-indent-level: 4; cperl-continued-statement-offset: 4; indent-tabs-mode: nil -*- package TestFilter::out_str_subreq_modperl; use strict; use warnings FATAL => 'all'; use Apache::Test; use Apache::TestUtil; use Apache2::RequestRec (); use Apache2::RequestIO (); use Apache2::SubRequest (); use Apache2::Filter (); use Apache2::Const -compile => qw(OK); # include the contents of a subrequest # in the filter, a la mod_include's # <!--#include virtual="/subrequest" --> sub include { my $filter = shift; unless ($filter->ctx) { # don't forget to remove the C-L header $filter->r->headers_out->unset('Content-Length'); $filter->ctx(1); } while ($filter->read(my $buffer, 1024)){ if ($buffer eq "<tag>\n") { my $sub = $filter->r->lookup_uri('/modperl_subrequest'); my $rc = $sub->run; } else { # send all other data along unaltered $filter->print($buffer); } } # add our own at the end if ($filter->seen_eos) { $filter->print("filter\n"); $filter->ctx(1); } return Apache2::Const::OK; } sub subrequest { my $r = shift; $r->content_type('text/plain'); $r->print("modperl subrequest\n"); return Apache2::Const::OK; } sub response { my $r = shift; $r->content_type('text/plain'); $r->print("content\n"); $r->rflush; $r->print("<tag>\n"); $r->rflush; $r->print("more content\n"); Apache2::Const::OK; } 1; __DATA__ SetHandler modperl PerlModule TestFilter::out_str_subreq_modperl PerlResponseHandler TestFilter::out_str_subreq_modperl::response PerlOutputFilterHandler TestFilter::out_str_subreq_modperl::include <Location /modperl_subrequest> SetHandler modperl PerlResponseHandler TestFilter::out_str_subreq_modperl::subrequest </Location>
dreamhost/dpkg-ndn-perl-mod-perl
t/filter/TestFilter/out_str_subreq_modperl.pm
Perl
apache-2.0
1,947
package Paws::MobileHub::ExportBundleResult; use Moose; has DownloadUrl => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'downloadUrl'); has _request_id => (is => 'ro', isa => 'Str'); 1; ### main pod documentation begin ### =head1 NAME Paws::MobileHub::ExportBundleResult =head1 ATTRIBUTES =head2 DownloadUrl => Str URL which contains the custom-generated SDK and tool packages used to integrate the client mobile app or web app with the AWS resources created by the AWS Mobile Hub project. =head2 _request_id => Str =cut
ioanrogers/aws-sdk-perl
auto-lib/Paws/MobileHub/ExportBundleResult.pm
Perl
apache-2.0
568
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. package AI::MXNet::Symbol; =head1 NAME AI::MXNet::Symbol - Symbolic interface of MXNet. =cut use strict; use warnings; use AI::MXNet::Base; use AI::MXNet::Symbol::Base; use AI::MXNet::Symbol::Random; use AI::MXNet::Types; use Mouse; use AI::MXNet::Function::Parameters; use overload '""' => \&stringify, '+' => \&add, '-' => \&subtract, '*' => \&multiply, '/' => \&divide, '/=' => \&idivide, '**' => \&power, '%' => \&mod, '==' => \&equal, '!=' => \&not_equal, '>' => \&greater, '>=' => \&greater_equal, '<' => \&lesser, '<=' => \&lesser_equal, '&{}' => sub { my $self = shift; sub { $self->call(@_) } }, '@{}' => sub { my $self = shift; [map { $self->slice($_) } @{ $self->list_outputs }] }; extends 'AI::MXNet::Symbol::Base'; has 'handle' => (is => 'rw', isa => 'SymbolHandle', required => 1); sub DEMOLISH { check_call(AI::NNVMCAPI::SymbolFree(shift->handle)); } method STORABLE_freeze($cloning) { return $self->tojson(); } method STORABLE_thaw($cloning, $json) { my $handle = check_call( AI::MXNetCAPI::SymbolCreateFromJSON( $json ) ); $self->handle($handle); } method stringify($other=, $reverse=) { my $name = $self->name; sprintf( "<%s %s%s>", ref($self), $name ? ($name, '') : ('group [', join(', ', map { $_->name } @{ $self }) . ']') ); } method add(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_Plus _PlusScalar/ ); } method subtract(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_Minus _MinusScalar _RMinusScalar/, $reverse ); } method multiply(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_Mul _MulScalar/ ); } method divide(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_Div _DivScalar _RDivScalar/, $reverse ); } method power(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_Power _PowerScalar _RPowerScalar/, $reverse ); } method equal(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_equal _equal_scalar/ ); } method not_equal(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_not_equal _not_equal_scalar/ ); } method greater(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_greater _greater_scalar _lesser_scalar/, $reverse ); } method greater_equal(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_greater_equal _greater_equal_scalar _lesser_equal_scalar/, $reverse ); } method lesser(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_lesser _lesser_scalar _greater_scalar/, $reverse ); } method lesser_equal(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_lesser_equal _lesser_equal_scalar _greater_equal_scalar/, $reverse ); } method true_divide(AI::MXNet::Symbol|Num $other, $reverse=) { return $self->divide($other, $reverse); } method mod(AI::MXNet::Symbol|Num $other, $reverse=) { return _ufunc_helper( $self, $other, qw/_Mod _ModScalar _RModScalar/, $reverse ); } method maximum(AI::MXNet::Symbol|Num $other) { return _ufunc_helper( $self, $other, qw/_Maximum _MaximumScalar/ ); } method minimum(AI::MXNet::Symbol|Num $other) { return _ufunc_helper( $self, $other, qw/_Minimum _MinimumScalar/ ); } method hypot(AI::MXNet::Symbol|Num $other) { return _ufunc_helper( $self, $other, qw/_Hypot _HypotScalar/ ); } method deepcopy() { my $handle = check_call(AI::MXNetCAPI::SymbolCopy($self->handle)); return __PACKAGE__->new(handle => $handle); } method call(@args) { my $s = $self->deepcopy(); $s->_compose(@args); return $s; } method slice(Str|Index $index) { ## __getitem__ tie needs to die if(not find_type_constraint('Index')->check($index)) { my $i = 0; my $idx; for my $name (@{ $self->list_outputs() }) { if($name eq $index) { if(defined $idx) { confess(qq/There are multiple outputs with name "$index"/); } $idx = $i; } $i++; } confess(qq/Cannot find output that matches name "$index"/) unless defined $idx; $index = $idx; } elsif($index >= @{ $self->list_outputs() }) { confess("Index: [$index] is outside of the range of the symbol: $self outputs"); } my $handle = check_call(AI::MXNetCAPI::SymbolGetOutput($self->handle, $index)); return __PACKAGE__->new(handle => $handle); } =head2 name Get name string from the symbol, this function only works for non-grouped symbol. Returns ------- value : str The name of this symbol, returns None for grouped symbol. =cut method name() { my ($name, $success) = check_call(AI::MXNetCAPI::SymbolGetName($self->handle)); return $success ? $name : undef; } =head2 attr Get an attribute string from the symbol, this function only works for non-grouped symbol. Parameters ---------- key : str The key to get attribute from. Returns ------- value : str The attribute value of the key, returns None if attribute do not exist. =cut method attr(Str $key) { my ($attr, $success) = check_call( AI::MXNetCAPI::SymbolGetAttr($self->handle, $key) ); return $success ? $attr : undef; } =head2 list_attr Get all attributes from the symbol. Returns ------- ret : hash ref of str to str a dicitonary mapping attribute keys to values =cut method list_attr() { my %ret; my @attrs = @{ check_call(AI::MXNetCAPI::SymbolListAttrShallow($self->handle)) }; while(@attrs) { my $k = shift(@attrs); my $v = shift(@attrs); $ret{ $k } = $v; } return \%ret; } =head2 attr_dict Recursively get all attributes from the symbol and its childrens Returns ------- ret : hash ref of str to hash ref. Returns a dict whose keys are names of the symbol and its children. Values of the returned dict are dictionaries that map attribute keys to values. =cut method attr_dict() { my %ret; my @attrs = @{ check_call(AI::MXNetCAPI::SymbolListAttr($self->handle)) }; my $size = @attrs/2; for (my $i = 0; $i < $size; $i++) { my ($name, $key) = split(/\$/, $attrs[$i*2]); my $val = $attrs[$i*2+1]; $ret{ $name }{ $key } = $val; } return \%ret; } method _set_attr(Str @args) { my %kwargs = @args; while(my ($key, $val) = each(%kwargs)) { check_call( AI::MXNetCAPI::SymbolSetAttr( $self->handle, $key, $val ) ); } } =head2 get_internals Get a new grouped symbol whose output contains all the internal outputs of this symbol. Returns ------- sgroup : AI::MXNet::Symbol The internal symbol of the symbol. =cut method get_internals() { my $handle = check_call(AI::MXNetCAPI::SymbolGetInternals($self->handle)); return __PACKAGE__->new(handle => $handle); } =head2 get_children Get a new grouped symbol whose output contains inputs to output nodes of the original symbol Returns ------- sgroup : Symbol or undef The children of the head node. If the symbol has no inputs undef will be returned. =cut method get_children() { my $handle = check_call(AI::MXNetCAPI::SymbolGetChildren($self->handle)); my $ret = __PACKAGE__->new(handle => $handle); return undef unless @{ $ret->list_outputs }; return $ret; } =head2 list_arguments List all the arguments in the symbol. Returns ------- args : array ref of strings =cut method list_arguments() { return scalar(check_call(AI::MXNetCAPI::SymbolListArguments($self->handle))); } =head2 list_outputs() List all outputs in the symbol. Returns ------- $out : array ref of strings. =cut method list_outputs() { return scalar(check_call(AI::MXNetCAPI::SymbolListOutputs($self->handle))); } =head2 list_auxiliary_states() List all auxiliary states in the symbol. Returns ------- aux_states : array ref of string List the names of the auxiliary states. Notes ----- Auxiliary states are special states of symbols that do not corresponds to an argument, and do not have gradient. But still be useful for the specific operations. A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. Most operators do not have Auxiliary states. =cut method list_auxiliary_states() { return scalar(check_call(AI::MXNetCAPI::SymbolListAuxiliaryStates($self->handle))); } =head2 list_inputs Lists all arguments and auxiliary states of this Symbol. Returns ------- inputs : array ref of str List of all inputs. Examples -------- >>> my $bn = mx->sym->BatchNorm(name=>'bn'); =cut method list_inputs() { return scalar(check_call(AI::NNVMCAPI::SymbolListInputNames($self->handle, 0))); } =head2 infer_type Infer the type of outputs and arguments of given known types of arguments. User can either pass in the known types in positional way or keyword argument way. Tuple of Nones is returned if there is not enough information passed in. An error will be raised if there is inconsistency found in the known types passed in. Parameters ---------- args : Array Provide type of arguments in a positional way. Unknown type can be marked as None kwargs : Hash ref, must ne ssupplied as as sole argument to the method. Provide keyword arguments of known types. Returns ------- arg_types : array ref of Dtype or undef List of types of arguments. The order is in the same order as list_arguments() out_types : array ref of Dtype or undef List of types of outputs. The order is in the same order as list_outputs() aux_types : array ref of Dtype or undef List of types of outputs. The order is in the same order as list_auxiliary() =cut method infer_type(Str|Undef @args) { my ($positional_arguments, $kwargs, $kwargs_order) = _parse_arguments("Dtype", @args); my $sdata = []; my $keys = []; if(@$positional_arguments) { @{ $sdata } = map { defined($_) ? DTYPE_STR_TO_MX->{ $_ } : -1 } @{ $positional_arguments }; } else { @{ $keys } = @{ $kwargs_order }; @{ $sdata } = map { DTYPE_STR_TO_MX->{ $_ } } @{ $kwargs }{ @{ $kwargs_order } }; } my ($arg_type, $out_type, $aux_type, $complete) = check_call(AI::MXNetCAPI::SymbolInferType( $self->handle, scalar(@{ $sdata }), $keys, $sdata ) ); if($complete) { return ( [ map { DTYPE_MX_TO_STR->{ $_ } } @{ $arg_type }], [ map { DTYPE_MX_TO_STR->{ $_ } } @{ $out_type }], [ map { DTYPE_MX_TO_STR->{ $_ } } @{ $aux_type }] ); } else { return (undef, undef, undef); } } =head2 infer_shape Infer the shape of outputs and arguments of given known shapes of arguments. User can either pass in the known shapes in positional way or keyword argument way. Tuple of Nones is returned if there is not enough information passed in. An error will be raised if there is inconsistency found in the known shapes passed in. Parameters ---------- *args : Provide shape of arguments in a positional way. Unknown shape can be marked as undef **kwargs : Provide keyword arguments of known shapes. Returns ------- arg_shapes : array ref of Shape or undef List of shapes of arguments. The order is in the same order as list_arguments() out_shapes : array ref of Shape or undef List of shapes of outputs. The order is in the same order as list_outputs() aux_shapes : array ref of Shape or undef List of shapes of outputs. The order is in the same order as list_auxiliary() =cut method infer_shape(Maybe[Str|Shape] @args) { my @res = $self->_infer_shape_impl(0, @args); if(not defined $res[1]) { my ($arg_shapes) = $self->_infer_shape_impl(1, @args); my $arg_names = $self->list_arguments; my @unknowns; zip(sub { my ($name, $shape) = @_; if(not ref $shape or not @$shape or not product(@$shape)) { if(@unknowns >= 10) { $unknowns[10] = '...'; } else { my @shape = eval { @$shape }; push @unknowns, "$name @shape"; } } }, $arg_names, $arg_shapes); AI::MXNet::Logging->warning( "Cannot decide shape for the following arguments " ."(0s in shape means unknown dimensions). " ."Consider providing them as input:\n\t" ."\n\t" .join(", ", @unknowns) ); } return @res; } =head2 infer_shape_partial Partially infer the shape. The same as infer_shape, except that the partial results can be returned. =cut method infer_shape_partial(Maybe[Str|Shape] @args) { $self->_infer_shape_impl(1, @args) } # The actual implementation for calling shape inference API. method _infer_shape_impl(Maybe[Str|Shape] @args) { my $partial = shift(@args); my ($positional_arguments, $kwargs, $kwargs_order) = _parse_arguments("Shape", @args); my $sdata = []; my $indptr = [0]; my $keys = []; if(@{ $positional_arguments }) { for my $shape (grep { defined } @{ $positional_arguments }) { push @{ $sdata }, @{ $shape }; push @{ $indptr }, scalar(@{ $sdata }); } } { for my $k (@{ $kwargs_order }) { push @{ $keys }, $k; push @{ $sdata }, @{ $kwargs->{ $k } }; push @{ $indptr }, scalar(@{ $sdata }); } } my $infer_func = $partial ? \&AI::MXNetCAPI::SymbolInferShapePartial : \&AI::MXNetCAPI::SymbolInferShape; my ($arg_shapes, $out_shapes, $aux_shapes, $complete) = check_call( $infer_func->( $self->handle, scalar(@{ $indptr }) - 1, $keys, $indptr, $sdata, ) ); if($complete) { return $arg_shapes, $out_shapes, $aux_shapes; } else { return (undef, undef, undef); } } =head2 debug_str The debug string. Returns ------- debug_str : string Debug string of the symbol. =cut method debug_str() { return scalar(check_call(AI::MXNetCAPI::SymbolPrint($self->handle))); } =head2 save Save the symbol into a file. You can also use Storable to do the job if you only work with Perl. The advantage of load/save is the file is language agnostic. This means the file saved using save can be loaded by other language binding of mxnet. You also get the benefit being able to directly load/save from cloud storage(S3, HDFS) Parameters ---------- fname : str The name of the file - s3://my-bucket/path/my-s3-symbol - hdfs://my-bucket/path/my-hdfs-symbol - /path-to/my-local-symbol See Also -------- load : Used to load symbol from file. =cut method save(Str $fname) { check_call(AI::MXNetCAPI::SymbolSaveToFile($self->handle, $fname)); } =head2 tojson Save the symbol into a JSON string. See Also -------- load_json : Used to load symbol from JSON string. =cut method tojson() { return scalar(check_call(AI::MXNetCAPI::SymbolSaveToJSON($self->handle))); } method _get_ndarray_inputs( Str $arg_key, HashRef[AI::MXNet::NDArray]|ArrayRef[AI::MXNet::NDArray] $args, ArrayRef[Str] $arg_names, Bool $allow_missing=0 ) { my ($arg_handles, $arg_arrays) = ([], []); if(ref $args eq 'ARRAY') { confess("Length of $arg_key do not match number of arguments") unless @$args == @$arg_names; @{ $arg_handles } = map { $_->handle } @{ $args }; $arg_arrays = $args; } else { my %tmp = ((map { $_ => undef } @$arg_names), %$args); if(not $allow_missing and grep { not defined } values %tmp) { my ($missing) = grep { not defined $tmp{ $_ } } (keys %tmp); confess("key $missing is missing in $arg_key"); } for my $name (@$arg_names) { push @$arg_handles, defined($tmp{ $name }) ? $tmp{ $name }->handle : undef; push @$arg_arrays, defined($tmp{ $name }) ? $tmp{ $name } : undef; } } return ($arg_handles, $arg_arrays); } =head2 simple_bind Bind current symbol to get an executor, allocate all the ndarrays needed. Allows specifying data types. This function will ask user to pass in ndarray of position they like to bind to, and it will automatically allocate the ndarray for arguments and auxiliary states that user did not specify explicitly. Parameters ---------- :$ctx : AI::MXNet::Context The device context the generated executor to run on. :$grad_req: string {'write', 'add', 'null'}, or list of str or dict of str to str, optional Specifies how we should update the gradient to the args_grad. - 'write' means everytime gradient is write to specified args_grad NDArray. - 'add' means everytime gradient is add to the specified NDArray. - 'null' means no action is taken, the gradient may not be calculated. :$type_dict : hash ref of str->Dtype Input type map, name->dtype :$group2ctx : hash ref of string to AI::MXNet::Context The mapping of the ctx_group attribute to the context assignment. :$shapes : hash ref of str->Shape Input shape map, name->shape :$shared_arg_names : Maybe[ArrayRef[Str]] The argument names whose 'NDArray' of shared_exec can be reused for initializing the current executor. :$shared_exec : Maybe[AI::MXNet::Executor] The executor whose arg_arrays, arg_arrays, grad_arrays, and aux_arrays can be reused for initializing the current executor. :$shared_buffer : Maybe[HashRef[AI::MXNet::NDArray]] The dict mapping argument names to the `NDArray` that can be reused for initializing the current executor. This buffer will be checked for reuse if one argument name of the current executor is not found in `shared_arg_names`. Returns ------- $executor : AI::MXNet::Executor The generated Executor =cut method simple_bind( AI::MXNet::Context :$ctx=AI::MXNet::Context->current_ctx, GradReq|ArrayRef[GradReq]|HashRef[GradReq] :$grad_req='write', Maybe[HashRef[Shape]] :$shapes=, Maybe[HashRef[Dtype]] :$type_dict=, Maybe[HashRef[AI::MXNet::Context]] :$group2ctx=, Maybe[ArrayRef[Str]] :$shared_arg_names=, Maybe[AI::MXNet::Executor] :$shared_exec=, Maybe[HashRef[AI::MXNet::NDArray]] :$shared_buffer= ) { my $num_provided_arg_types; my @provided_arg_type_names; my @provided_arg_type_data; if(defined $type_dict) { while(my ($k, $v) = each %{ $type_dict }) { push @provided_arg_type_names, $k; push @provided_arg_type_data, DTYPE_STR_TO_MX->{$v}; } $num_provided_arg_types = @provided_arg_type_names; } my @provided_arg_shape_data; # argument shape index in sdata, # e.g. [sdata[indptr[0]], sdata[indptr[1]]) is the shape of the first arg my @provided_arg_shape_idx = (0); my @provided_arg_shape_names; while(my ($k, $v) = each %{ $shapes//{} }) { push @provided_arg_shape_names, $k; push @provided_arg_shape_data, @{ $v }; push @provided_arg_shape_idx, scalar(@provided_arg_shape_data); } $num_provided_arg_types = @provided_arg_type_names; my $provided_req_type_list_len = 0; my @provided_grad_req_types; my @provided_grad_req_names; if(defined $grad_req) { if(not ref $grad_req) { push @provided_grad_req_types, $grad_req; } elsif(ref $grad_req eq 'ARRAY') { assert((@{ $grad_req } != 0), 'grad_req in simple_bind cannot be an empty list'); @provided_grad_req_types = @{ $grad_req }; $provided_req_type_list_len = @provided_grad_req_types; } elsif(ref $grad_req eq 'HASH') { assert((keys %{ $grad_req } != 0), 'grad_req in simple_bind cannot be an empty hash'); while(my ($k, $v) = each %{ $grad_req }) { push @provided_grad_req_names, $k; push @provided_grad_req_types, $v; } $provided_req_type_list_len = @provided_grad_req_types; } } my $num_ctx_map_keys = 0; my @ctx_map_keys; my @ctx_map_dev_types; my @ctx_map_dev_ids; if(defined $group2ctx) { while(my ($k, $v) = each %{ $group2ctx }) { push @ctx_map_keys, $k; push @ctx_map_dev_types, $v->device_type_id; push @ctx_map_dev_ids, $v->device_id; } $num_ctx_map_keys = @ctx_map_keys; } my @shared_arg_name_list; if(defined $shared_arg_names) { @shared_arg_name_list = @{ $shared_arg_names }; } my %shared_data; if(defined $shared_buffer) { while(my ($k, $v) = each %{ $shared_buffer }) { $shared_data{$k} = $v->handle; } } my $shared_exec_handle = defined $shared_exec ? $shared_exec->handle : undef; my ( $updated_shared_data, $in_arg_handles, $arg_grad_handles, $aux_state_handles, $exe_handle ); eval { ($updated_shared_data, $in_arg_handles, $arg_grad_handles, $aux_state_handles, $exe_handle) = check_call( AI::MXNetCAPI::ExecutorSimpleBind( $self->handle, $ctx->device_type_id, $ctx->device_id, $num_ctx_map_keys, \@ctx_map_keys, \@ctx_map_dev_types, \@ctx_map_dev_ids, $provided_req_type_list_len, \@provided_grad_req_names, \@provided_grad_req_types, scalar(@provided_arg_shape_names), \@provided_arg_shape_names, \@provided_arg_shape_data, \@provided_arg_shape_idx, $num_provided_arg_types, \@provided_arg_type_names, \@provided_arg_type_data, scalar(@shared_arg_name_list), \@shared_arg_name_list, defined $shared_buffer ? \%shared_data : undef, $shared_exec_handle ) ); }; if($@) { confess( "simple_bind failed: Error: $@; Arguments: ". Data::Dumper->new( [$shapes//{}] )->Purity(1)->Deepcopy(1)->Terse(1)->Dump ); } if(defined $shared_buffer) { while(my ($k, $v) = each %{ $updated_shared_data }) { $shared_buffer->{$k} = AI::MXNet::NDArray->new(handle => $v); } } my @arg_arrays = map { AI::MXNet::NDArray->new(handle => $_) } @{ $in_arg_handles }; my @grad_arrays = map { defined $_ ? AI::MXNet::NDArray->new(handle => $_) : undef } @{ $arg_grad_handles }; my @aux_arrays = map { AI::MXNet::NDArray->new(handle => $_) } @{ $aux_state_handles }; my $executor = AI::MXNet::Executor->new( handle => $exe_handle, symbol => $self, ctx => $ctx, grad_req => $grad_req, group2ctx => $group2ctx ); $executor->arg_arrays(\@arg_arrays); $executor->grad_arrays(\@grad_arrays); $executor->aux_arrays(\@aux_arrays); return $executor; } =head2 bind Bind current symbol to get an executor. Parameters ---------- :$ctx : AI::MXNet::Context The device context the generated executor to run on. :$args : HashRef[AI::MXNet::NDArray]|ArrayRef[AI::MXNet::NDArray] Input arguments to the symbol. - If type is array ref of NDArray, the position is in the same order of list_arguments. - If type is hash ref of str to NDArray, then it maps the name of arguments to the corresponding NDArray. - In either case, all the arguments must be provided. :$args_grad : Maybe[HashRef[AI::MXNet::NDArray]|ArrayRef[AI::MXNet::NDArray]] When specified, args_grad provide NDArrays to hold the result of gradient value in backward. - If type is array ref of NDArray, the position is in the same order of list_arguments. - If type is hash ref of str to NDArray, then it maps the name of arguments to the corresponding NDArray. - When the type is hash ref of str to NDArray, users only need to provide the dict for needed argument gradient. Only the specified argument gradient will be calculated. :$grad_req : {'write', 'add', 'null'}, or array ref of str or hash ref of str to str, optional Specifies how we should update the gradient to the args_grad. - 'write' means everytime gradient is write to specified args_grad NDArray. - 'add' means everytime gradient is add to the specified NDArray. - 'null' means no action is taken, the gradient may not be calculated. :$aux_states : array ref of NDArray, or hash ref of str to NDArray, optional Input auxiliary states to the symbol, only need to specify when list_auxiliary_states is not empty. - If type is array ref of NDArray, the position is in the same order of list_auxiliary_states - If type is hash ref of str to NDArray, then it maps the name of auxiliary_states to the corresponding NDArray, - In either case, all the auxiliary_states need to be provided. :$group2ctx : hash ref of string to AI::MXNet::Context The mapping of the ctx_group attribute to the context assignment. :$shared_exec : AI::MXNet::Executor Executor to share memory with. This is intended for runtime reshaping, variable length sequences, etc. The returned executor shares state with shared_exec, and should not be used in parallel with it. Returns ------- $executor : AI::MXNet::Executor The generated Executor Notes ----- Auxiliary states are special states of symbols that do not corresponds to an argument, and do not have gradient. But still be useful for the specific operations. A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. Most operators do not have auxiliary states and this parameter can be safely ignored. User can give up gradient by using a hash ref in args_grad and only specify the gradient they're interested in. =cut method bind( AI::MXNet::Context :$ctx, HashRef[AI::MXNet::NDArray]|ArrayRef[AI::MXNet::NDArray] :$args, Maybe[HashRef[AI::MXNet::NDArray]|ArrayRef[AI::MXNet::NDArray]] :$args_grad=, Str|HashRef[Str]|ArrayRef[Str] :$grad_req='write', Maybe[HashRef[AI::MXNet::NDArray]|ArrayRef[AI::MXNet::NDArray]] :$aux_states=, Maybe[HashRef[AI::MXNet::Context]] :$group2ctx=, Maybe[AI::MXNet::Executor] :$shared_exec= ) { $grad_req //= 'write'; my $listed_arguments = $self->list_arguments(); my ($args_handle, $args_grad_handle, $aux_args_handle) = ([], [], []); ($args_handle, $args) = $self->_get_ndarray_inputs('args', $args, $listed_arguments); if(not defined $args_grad) { @$args_grad_handle = ((undef) x (@$args)); } else { ($args_grad_handle, $args_grad) = $self->_get_ndarray_inputs( 'args_grad', $args_grad, $listed_arguments, 1 ); } if(not defined $aux_states) { $aux_states = []; } ($aux_args_handle, $aux_states) = $self->_get_ndarray_inputs( 'aux_states', $aux_states, $self->list_auxiliary_states() ); # setup requirements my $req_map = { null => 0, write => 1, add => 3 }; my $req_array = []; if(not ref $grad_req) { confess('grad_req must be one of "null,write,add"') unless exists $req_map->{ $grad_req }; @{ $req_array } = (($req_map->{ $grad_req }) x @{ $listed_arguments }); } elsif(ref $grad_req eq 'ARRAY') { @{ $req_array } = map { $req_map->{ $_ } } @{ $grad_req }; } else { for my $name (@{ $listed_arguments }) { if(exists $grad_req->{ $name }) { push @{ $req_array }, $req_map->{ $grad_req->{ $name } }; } else { push @{ $req_array }, 0; } } } my $ctx_map_keys = []; my $ctx_map_dev_types = []; my $ctx_map_dev_ids = []; if(defined $group2ctx) { while(my ($key, $val) = each %{ $group2ctx }) { push @{ $ctx_map_keys } , $key; push @{ $ctx_map_dev_types }, $val->device_type_id; push @{ $ctx_map_dev_ids }, $val->device_id; } } my $shared_handle = $shared_exec->handle if $shared_exec; my $handle = check_call(AI::MXNetCAPI::ExecutorBindEX( $self->handle, $ctx->device_type_id, $ctx->device_id, scalar(@{ $ctx_map_keys }), $ctx_map_keys, $ctx_map_dev_types, $ctx_map_dev_ids, scalar(@{ $args }), $args_handle, $args_grad_handle, $req_array, scalar(@{ $aux_states }), $aux_args_handle, $shared_handle ) ); my $executor = AI::MXNet::Executor->new( handle => $handle, symbol => $self, ctx => $ctx, grad_req => $grad_req, group2ctx => $group2ctx ); $executor->arg_arrays($args); $executor->grad_arrays($args_grad); $executor->aux_arrays($aux_states); return $executor; } =head2 eval Evaluate a symbol given arguments The `eval` method combines a call to `bind` (which returns an executor) with a call to `forward` (executor method). For the common use case, where you might repeatedly evaluate with same arguments, eval is slow. In that case, you should call `bind` once and then repeatedly call forward. Eval allows simpler syntax for less cumbersome introspection. Parameters ---------- :$ctx : Context The device context the generated executor to run on. Optional, defaults to cpu(0) :$args array ref of NDArray or hash ref of NDArray - If the type is an array ref of NDArray, the position is in the same order of list_arguments. - If the type is a hash of str to NDArray, then it maps the name of the argument to the corresponding NDArray. - In either case, all arguments must be provided. Returns ---------- result : an array ref of NDArrays corresponding to the values taken by each symbol when evaluated on given args. When called on a single symbol (not a group), the result will be an array ref with one element. Examples: my $result = $symbol->eval(ctx => mx->gpu, args => {data => mx->nd->ones([5,5])}); my $result = $symbol->eval(args => {data => mx->nd->ones([5,5])}); =cut method eval(:$ctx=AI::MXNet::Context->cpu, HashRef[AI::MXNet::NDArray]|ArrayRef[AI::MXNet::NDArray] :$args) { return $self->bind(ctx => $ctx, args => $args)->forward; } =head2 grad Get the autodiff of current symbol. This function can only be used if current symbol is a loss function. Parameters ---------- $wrt : Array of String keyword arguments of the symbol that the gradients are taken. Returns ------- grad : AI::MXNet::Symbol A gradient Symbol with returns to be the corresponding gradients. =cut method grad(ArrayRef[Str] $wrt) { my $handle = check_call(AI::MXNetCAPI::SymbolGrad( $self->handle, scalar(@$wrt), $wrt ) ); return __PACKAGE__->new(handle => $handle); } =head2 Variable Create a symbolic variable with specified name. Parameters ---------- name : str Name of the variable. attr : hash ref of string -> string Additional attributes to set on the variable. shape : array ref of positive integers Optionally, one can specify the shape of a variable. This will be used during shape inference. If user specified a different shape for this variable using keyword argument when calling shape inference, this shape information will be ignored. lr_mult : float Specify learning rate muliplier for this variable. wd_mult : float Specify weight decay muliplier for this variable. dtype : Dtype Similar to shape, we can specify dtype for this variable. init : initializer (mx->init->*) Specify initializer for this variable to override the default initializer kwargs : hash ref other additional attribute variables Returns ------- variable : Symbol The created variable symbol. =cut method Variable( Str $name, HashRef[Str] :$attr={}, Maybe[Shape] :$shape=, Maybe[Num] :$lr_mult=, Maybe[Num] :$wd_mult=, Maybe[Dtype] :$dtype=, Maybe[Initializer] :$init=, HashRef[Str] :$kwargs={}, Maybe[Str] :$__layout__= ) { my $handle = check_call(AI::MXNetCAPI::SymbolCreateVariable($name)); my $ret = __PACKAGE__->new(handle => $handle); $attr = AI::MXNet::Symbol::AttrScope->current->get($attr); $attr->{__shape__} = "(".join(',', @{ $shape }).")" if $shape; $attr->{__lr_mult__} = $lr_mult if defined $lr_mult; $attr->{__wd_mult__} = $wd_mult if defined $wd_mult; $attr->{__dtype__} = DTYPE_STR_TO_MX->{ $dtype } if $dtype; $attr->{__init__} = "$init" if defined $init; $attr->{__layout__} = $__layout__ if defined $__layout__; while(my ($k, $v) = each %{ $kwargs }) { if($k =~ /^__/ and $k =~ /__$/) { $attr->{$k} = "$v"; } else { confess("Attribute name=$k is not supported.". ' Additional attributes must start and end with double underscores,'. ' e.g, __yourattr__' ); } } $ret->_set_attr(%{ $attr }); return $ret; } =head2 var A synonym to Variable. =cut *var = \&Variable; =head2 Group Create a symbol that groups symbols together. Parameters ---------- symbols : array ref List of symbols to be grouped. Returns ------- sym : Symbol The created group symbol. =cut method Group(ArrayRef[AI::MXNet::Symbol] $symbols) { my @handles = map { $_->handle } @{ $symbols }; my $handle = check_call(AI::MXNetCAPI::SymbolCreateGroup(scalar(@handles), \@handles)); return __PACKAGE__->new(handle => $handle); } =head2 load Load symbol from a JSON file. You can also use Storable to do the job if you only work with Perl. The advantage of load/save is the file is language agnostic. This means the file saved using save can be loaded by other language binding of mxnet. You also get the benefit being able to directly load/save from cloud storage(S3, HDFS) Parameters ---------- fname : str The name of the file, examples: - `s3://my-bucket/path/my-s3-symbol` - `hdfs://my-bucket/path/my-hdfs-symbol` - `/path-to/my-local-symbol` Returns ------- sym : Symbol The loaded symbol. See Also -------- AI::MXNet::Symbol->save : Used to save symbol into file. =cut method load(Str $fname) { my $handle = check_call(AI::MXNetCAPI::SymbolCreateFromFile($fname)); return __PACKAGE__->new(handle => $handle); } =head2 load_json Load symbol from json string. Parameters ---------- json_str : str A json string. Returns ------- sym : Symbol The loaded symbol. See Also -------- AI::MXNet::Symbol->tojson : Used to save symbol into json string. =cut method load_json(Str $json) { my $handle = check_call(AI::MXNetCAPI::SymbolCreateFromJSON($json)); return __PACKAGE__->new(handle => $handle); } method zeros(Shape :$shape, Dtype :$dtype='float32', Maybe[Str] :$name=, Maybe[Str] :$__layout__=) { return __PACKAGE__->_zeros({ shape => $shape, dtype => $dtype, name => $name, ($__layout__ ? (__layout__ => $__layout__) : ()) }); } method ones(Shape :$shape, Dtype :$dtype='float32', Maybe[Str] :$name=, Maybe[Str] :$__layout__=) { return __PACKAGE__->_ones({ shape => $shape, dtype => $dtype, name => $name, ($__layout__ ? (__layout__ => $__layout__) : ()) }); } =head2 arange Simlar function in the MXNet ndarray as numpy.arange See Also https://docs.scipy.org/doc/numpy/reference/generated/numpy.arange.html. Parameters ---------- start : number Start of interval. The interval includes this value. The default start value is 0. stop : number, optional End of interval. The interval does not include this value. step : number, optional Spacing between values repeat : int, optional "The repeating time of all elements. E.g repeat=3, the element a will be repeated three times --> a, a, a. dtype : type, optional The value type of the NDArray, default to np.float32 Returns ------- out : Symbol The created Symbol =cut method arange(Index :$start=0, Index :$stop=, Num :$step=1.0, Index :$repeat=1, Maybe[Str] :$name=, Dtype :$dtype='float32') { return __PACKAGE__->_arange({ start => $start, (defined $stop ? (stop => $stop) : ()), step => $step, repeat => $repeat, name => $name, dtype => $dtype }); } sub _parse_arguments { my $type = shift; my @args = @_; my $type_c = find_type_constraint($type); my $str_c = find_type_constraint("Str"); my @positional_arguments; my %kwargs; my @kwargs_order; my $only_dtypes_and_undefs = (@args == grep { not defined($_) or $type_c->check($_) } @args); my $only_dtypes_and_strs = (@args == grep { $type_c->check($_) or $str_c->check($_) } @args); if(@args % 2 and $only_dtypes_and_undefs) { @positional_arguments = @args; } else { if($only_dtypes_and_undefs) { @positional_arguments = @args; } elsif($only_dtypes_and_strs) { my %tmp = @args; if(values(%tmp) == grep { $type_c->check($_) } values(%tmp)) { %kwargs = %tmp; my $i = 0; @kwargs_order = grep { $i ^= 1 } @args; } else { confess("Argument need to be of type $type"); } } else { confess("Argument need to be one type $type"); } } return (\@positional_arguments, \%kwargs, \@kwargs_order); } sub _ufunc_helper { my ($lhs, $rhs, $fn_symbol, $lfn_scalar, $rfn_scalar, $reverse) = @_; ($rhs, $lhs) = ($lhs, $rhs) if $reverse and $rfn_scalar; if(not ref $lhs) { if(not $rfn_scalar) { return __PACKAGE__->can($lfn_scalar)->(__PACKAGE__, $rhs, { "scalar" => $lhs }); } else { return __PACKAGE__->can($rfn_scalar)->(__PACKAGE__, $rhs, { "scalar" => $lhs }); } } elsif(not ref $rhs) { return __PACKAGE__->can($lfn_scalar)->(__PACKAGE__, $lhs, { "scalar" => $rhs }); } else { return __PACKAGE__->can($fn_symbol)->(__PACKAGE__, $lhs, $rhs); } } sub contrib { 'AI::MXNet::Contrib::Symbol' } sub random { 'AI::MXNet::Symbol::Random' } 1;
Mega-DatA-Lab/mxnet
perl-package/AI-MXNet/lib/AI/MXNet/Symbol.pm
Perl
apache-2.0
42,975
package Action::itemGroupReview; use ItemConstants; use Item; use Session; sub run { our $q = shift; our $dbh = shift; our %in = map { $_ => $q->param($_) } $q->param; our $user = Session::getUser($q->env, $dbh); our $debug = 1; our $sth; our $sql; our $userType = $review_type_map{$user->{reviewType}}; our $isAdmin = $user->{adminType} ? 1 : 0; our $item = new Item( $dbh, $in{itemId} ); our $commentViewState = $in{commentViewState} || 0; # this will hold a list of all comments, if used our %comment_list = (); # add a comment if($in{myAction} eq 'addComment') { &addItemComment($dbh, $in{itemId}, $user->{id}, $commentViewState, $item->{devState}, $in{itemRating}, $in{commentText}); $in{message} = 'Comment Updated.'; } # look for an existing comment from this user, in case we've already commented on it $sql = sprintf('SELECT * FROM item_comment WHERE i_id=%d AND u_id=%d AND ic_dev_state=%d AND ic_type=%d', $in{itemId}, $user->{id}, $item->{devState}, $commentViewState); $sth = $dbh->prepare($sql); $sth->execute(); if(my $row=$sth->fetchrow_hashref) { $in{itemRating} = $row->{ic_rating}; $in{commentText} = $row->{ic_comment}; } # if this is a lead, then also assemble the list of comments already made if($commentViewState == 2) { $sql = <<SQL; SELECT ic.*, u.u_first_name, u.u_last_name FROM item_comment AS ic, user AS u WHERE ic.i_id=$in{itemId} AND ic.ic_dev_state=$item->{devState} AND ic.ic_type=1 AND ic.u_id=u.u_id SQL $sth = $dbh->prepare($sql); $sth->execute(); while(my $row = $sth->fetchrow_hashref) { my $key = $row->{ic_id}; $comment_list{$key}{userId} = $row->{u_id}; $comment_list{$key}{userName} = $row->{u_last_name} . ', ' . $row->{u_first_name}; $comment_list{$key}{rating} = $item_rating{$row->{ic_rating}}; $comment_list{$key}{comment} = $row->{ic_comment}; $comment_list{$key}{timestamp} = $row->{ic_timestamp}; } $sth->finish; } return [ $q->psgi_header('text/html'), [ &print_item(\%in) ]]; } ### ALL DONE! ### sub print_item { my $psgi_out = ''; my $params = shift; my $c = $item->getDisplayContent(); my $gle = $item->getGLE(); my $documentReadyFunction = $c->{documentReadyFunction}; my $formatName = $item_formats{ $item->{format} } || ''; my $difficultyName = $difficulty_levels{ $item->{difficulty} } || ''; my $itemApprover = $item->getApprover(); my $itemWriter = $item->{authorName} || ''; my $devStateName = $dev_states{ $item->{devState} }; my $doCommentView = $params->{doCommentView} || 0; my $commentViewState = $params->{commentViewState} || 0; my $gleName = ( defined $gle->{name} ? $gle->{name} : '' ); $gleName =~ s/GLE//; my $gleText = ( defined $gle->{text} ? $gle->{text} : '' ); $gleText =~ s/\r?\n/<br \/>/g; my $charDisplay = ""; foreach (@ctypes) { $charDisplay .= '<tr><td>' . ( $labels[$_] || '' ) . '</td>' . '<td><b>' . ( $const[$_]->{ $item->{$_} || '' } || '' ) . '</b></td></tr>'; } my $passages = $item->getPassages(); my $rubrics = $item->getRubrics(); my $metafiles = $item->getMetafiles(); my $displayRatingHtml = &hashToSelect( 'itemRating', \%item_rating, $in{itemRating} || 0 ); #my $mediaAssets = &getMediaAssetAttributes($dbh, $item->{id}); #if (scalar(@{$mediaAssets})>0) { # $documentReadyFunction .= '$("#noMediaMessage").hide();'; #} if($commentViewState == 2) { $documentReadyFunction .= "\$(\"#commentTable\").tablesorter();\n"; } my $msg = ($in{message} eq '') ? '' : '<p style="color:blue;">' . $in{message} . '</p>'; $psgi_out .= <<END_HERE; <!DOCTYPE HTML> <html> <head> <title>Item Viewer</title> <link href="${orcaUrl}style/text.css" rel="stylesheet" type="text/css"> <link href="${orcaUrl}style/item-style.css" rel="stylesheet" type="text/css"> <link rel="stylesheet" href="${orcaUrl}style/tablesorter/style.css" type="text/css" /> <link href="${commonUrl}style/jplayer/jplayer.blue.monday.css" rel="stylesheet" type="text/css"> <script type="text/javascript" src="${commonUrl}js/jquery-1.4.2.min.js"></script> <script type="text/javascript" src="${commonUrl}js/jquery.jplayer.min.js"></script> <script type="text/javascript" src="${commonUrl}js/jquery.tablesorter.min.js"></script> <script type="text/javascript"> \$(document).ready(function() { ${documentReadyFunction} }); </script> <style type="text/css"> td { vertical-align: middle; } </style> <script language="JavaScript"> function myOpen(name,url,w,h) { var myWin = window.open(url,name,'width='+w+',height='+h+',resizable=yes,scrollbars=yes,left=250,top=100,toolbar=no,location=no,directories=no,status=no,menubar=no,copyhistory=no'); return true; } function doCommentSubmit() { document.itemForm.myAction.value = 'addComment'; document.itemForm.submit(); } </script> </head> <body> <form name="itemForm" action="itemGroupReview.pl" method="POST"> <input type="hidden" name="myAction" value="" /> <input type="hidden" name="itemId" value="$in{itemId}" /> ${msg} <br /> <p>Item:&nbsp;&nbsp;<b>$item->{name}</b>&nbsp;&nbsp;&lt;$item->{bankName}&gt;<br /> Description:&nbsp;&nbsp;$item->{description}</p> END_HERE if ( $gleName ne '' ) { $psgi_out .= <<END_HERE; <table class="no-style" border="1" cellspacing="3" cellpadding="3" width="455"> <tr><th align="center">GLE ${gleName}</th></tr> <tr><td>${gleText}</td></tr> </table> <br /> END_HERE } if($doCommentView) { $psgi_out .= <<END_HERE; <input type="hidden" name="doCommentView" value="${doCommentView}" /> <input type="hidden" name="commentViewState" value="${commentViewState}" /> <table border="1" cellspacing="2" cellpadding="2"> <tr> <td>Comment</td> <td><textarea name="commentText" rows="4" cols="50">$in{commentText}</textarea></td> </tr> <tr> <td>Rating</td> <td>${displayRatingHtml}</td> </tr> <tr> <td colspan="2"><input type="button" value="Add/Update Comment" onClick="doCommentSubmit();" /></td> </tr> </table> </form> <br /> END_HERE if($commentViewState == 2) { $psgi_out .= <<END_HERE; <p>Reviewer Comments:</p> <table id="commentTable" class="tablesorter" border="1" cellspacing="2" cellpadding="2"> <thead> <tr> <th width="15%">Rating</th> <th width="45%">Comment</th> <th width="15%">User</th> <th width="15%">Time</th> </tr> </thead> <tbody> END_HERE foreach my $key (sort { $comment_list{$b}{timestamp} cmp $comment_list{$a}{timestamp} } keys %comment_list) { my $data = $comment_list{$key}; $psgi_out .= <<END_HERE; <tr> <td>$data->{rating}</td> <td>$data->{comment}</td> <td>$data->{userName}</td> <td>$data->{timestamp}</td> </tr> END_HERE } $psgi_out .= '</tbody></table><br />'; } } $psgi_out .= <<END_HERE; <p><a href="${orcaUrl}cgi-bin/cde.pl?action=displayPublicationHistory&item_id=$in{itemId}&instance_name=${instance_name}" target="_blank">View Item Publication History</a></p> <br /> $c->{itemBody} END_HERE #$psgi_out .= "<br />".&getMediaTableHtml($mediaAssets); $psgi_out .= <<END_HERE; <br /> $c->{distractorRationale} <br /> $c->{correctResponse} <br /> <table border=1 cellspacing=3 cellpadding=2> <tr><td>Item Format:</td><td><b>${formatName}</b></td></tr> ${charDisplay} <tr><td>Dev State:</td><td><b>${devStateName}</b></td></tr> <tr><td>Difficulty:</td><td><b>${difficultyName}</b></td></tr> END_HERE if (%$passages) { $psgi_out .= '<tr><td>Linked Passages:</td><td>'; foreach my $pkey (%$passages) { next unless defined $passages->{$pkey}->{name}; $psgi_out .= '<div><a href="#" onClick="myOpen(\'passageWin\',\'' . $orcaUrl . 'cgi-bin/passageView.pl?passageId=' . $pkey . '\',500,600);">' . $passages->{$pkey}->{name} . '</a></div>'; } $psgi_out .= '</td></tr>'; } if (%$rubrics) { $psgi_out .= '<tr><td>Linked Rubrics:</td><td>'; foreach my $key (%$rubrics) { next unless defined $rubrics->{$key}->{name}; $psgi_out .= '<div><a href="' . $orcaUrl . 'cgi-bin/rubricView.pl?rubricId=' . $key . '" target="_blank">' . $rubrics->{$key}->{name} . '</a></div>'; } $psgi_out .= '</td></tr>'; } if (%$metafiles) { $psgi_out .= <<END_HERE; <tr> <td>Metafiles:</td> <td><input type="button" value="View Metafiles" onClick="myOpen('itemMetafileWindow','${orcaUrl}cgi-bin/itemMetafiles.pl?itemId=$item->{id}',600,550);" </td> </tr> END_HERE } unless ( $item->{sourceDoc} eq '' ) { $psgi_out .= <<END_HERE; <tr> <td>Source Doc:</td><td>$item->{sourceDoc}</td> </tr> END_HERE } $psgi_out .= <<END_HERE; <tr> <td>Writer:</td><td>${itemWriter}</td> </tr> <tr> <td>Reviewer:</td><td>${itemApprover}</td> </tr> </table> END_HERE $psgi_out .= <<END_HERE; </body> </html> END_HERE return $psgi_out; } 1;
SmarterApp/ItemAuthoring
sbac-iaip/perl/Action/itemGroupReview.pm
Perl
apache-2.0
10,032
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package Lucy::Store::RAMFileHandle; use Lucy; our $VERSION = '0.006000'; $VERSION = eval $VERSION; 1; __END__
apache/lucy
perl/lib/Lucy/Store/RAMFileHandle.pm
Perl
apache-2.0
896
#------------------------------------------------------------------------------ # File: FLIR.pm # # Description: Read FLIR meta information # # Revisions: 2013/03/28 - P. Harvey Created # # References: 1) http://u88.n24.queensu.ca/exiftool/forum/index.php/topic,4898.0.html # 2) http://www.nuage.ch/site/flir-i7-some-analysis/ # 3) http://www.workswell.cz/manuals/flir/hardware/A3xx_and_A6xx_models/Streaming_format_ThermoVision.pdf # 4) http://support.flir.com/DocDownload/Assets/62/English/1557488%24A.pdf # 5) http://code.google.com/p/dvelib/source/browse/trunk/flirPublicFormat/fpfConverter/Fpfimg.h?spec=svn3&r=3 # 6) http://u88.n24.queensu.ca/exiftool/forum/index.php/topic,5538.0.html # JD) Jens Duttke private communication # # Glossary: FLIR = Forward Looking Infra Red #------------------------------------------------------------------------------ package Image::ExifTool::FLIR; use strict; use vars qw($VERSION); use Image::ExifTool qw(:DataAccess :Utils); use Image::ExifTool::Exif; use Image::ExifTool::GPS; $VERSION = '1.14'; sub ProcessFLIR($$;$); sub ProcessFLIRText($$$); sub ProcessMeasInfo($$$); sub GetImageType($$$); my %temperatureInfo = ( Writable => 'rational64u', Format => 'rational64s', # (have seen negative values) ); # tag information for floating point Kelvin tag my %floatKelvin = ( Format => 'float', ValueConv => '$val - 273.15', PrintConv => 'sprintf("%.1f C",$val)', ); # commonly used tag information elements my %float1f = ( Format => 'float', PrintConv => 'sprintf("%.1f",$val)' ); my %float2f = ( Format => 'float', PrintConv => 'sprintf("%.2f",$val)' ); my %float6f = ( Format => 'float', PrintConv => 'sprintf("%.6f",$val)' ); my %float8g = ( Format => 'float', PrintConv => 'sprintf("%.8g",$val)' ); # FLIR makernotes tags (ref PH) %Image::ExifTool::FLIR::Main = ( WRITE_PROC => \&Image::ExifTool::Exif::WriteExif, CHECK_PROC => \&Image::ExifTool::Exif::CheckExif, GROUPS => { 0 => 'MakerNotes', 2 => 'Camera' }, WRITABLE => 1, PRIORITY => 0, # (unreliable) NOTES => q{ Information extracted from the maker notes of JPEG images from thermal imaging cameras by FLIR Systems Inc. }, 0x01 => { #2 Name => 'ImageTemperatureMax', %temperatureInfo, Notes => q{ these temperatures may be in Celcius, Kelvin or Fahrenheit, but there is no way to tell which }, }, 0x02 => { Name => 'ImageTemperatureMin', %temperatureInfo }, #2 0x03 => { #1 Name => 'Emissivity', Writable => 'rational64u', PrintConv => 'sprintf("%.2f",$val)', PrintConvInv => '$val', }, # 0x04 does not change with temperature units; often 238, 250 or 457 0x04 => { Name => 'UnknownTemperature', %temperatureInfo, Unknown => 1 }, # 0x05,0x06 are unreliable. As written by FLIR tools, these are the # CameraTemperatureRangeMax/Min, but the units vary depending on the # options settings. But as written by some cameras, the values are different. 0x05 => { Name => 'CameraTemperatureRangeMax', %temperatureInfo, Unknown => 1 }, 0x06 => { Name => 'CameraTemperatureRangeMin', %temperatureInfo, Unknown => 1 }, # 0x07 - string[33] (some sort of image ID?) # 0x08 - string[33] # 0x09 - undef (tool info) # 0x0a - int32u: 1 # 0x0f - rational64u: 0/1000 # 0x10,0x11,0x12 - int32u: 0 # 0x13 - rational64u: 0/1000 ); # FLIR FFF tag table (ref PH) %Image::ExifTool::FLIR::FFF = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&ProcessFLIR, VARS => { ALPHA_FIRST => 1 }, NOTES => q{ Information extracted from FLIR FFF images and the FLIR APP1 segment of JPEG images. These tags may also be extracted from the first frame of an FLIR SEQ file. }, "_header" => { Name => 'FFFHeader', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::Header' }, }, # 0 = free (ref 3) 0x01 => { Name => 'RawData', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::RawData' }, }, # 2 = GainMap (ref 3) # 3 = OffsMap (ref 3) # 4 = DeadMap (ref 3) 0x05 => { #6 Name => 'GainDeadData', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::GainDeadData' }, }, 0x06 => { #6 Name => 'CoarseData', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::CoarseData' }, }, # 7 = ImageMap (ref 3) 0x0e => { Name => 'EmbeddedImage', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::EmbeddedImage' }, }, 0x20 => { Name => 'CameraInfo', # (BasicData - ref 3) SubDirectory => { TagTable => 'Image::ExifTool::FLIR::CameraInfo' }, }, 0x21 => { #6 Name => 'MeasurementInfo', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::MeasInfo' }, }, 0x22 => { Name => 'PaletteInfo', # (ColorPal - ref 3) SubDirectory => { TagTable => 'Image::ExifTool::FLIR::PaletteInfo' }, }, 0x23 => { Name => 'TextInfo', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::TextInfo' }, }, 0x24 => { Name => 'EmbeddedAudioFile', # (sometimes has an unknown 8-byte header) RawConv => q{ return \$val if $val =~ s/^.{0,16}?RIFF/RIFF/s; $self->Warn('Unknown EmbeddedAudioFile format'); return undef; }, }, # 0x27: 01 00 08 00 10 00 00 00 0x28 => { Name => 'PaintData', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::PaintData' }, }, 0x2a => { Name => 'PiP', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::PiP', ByteOrder => 'LittleEndian', }, }, 0x2b => { Name => 'GPSInfo', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::GPSInfo' }, }, 0x2c => { Name => 'MeterLink', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::MeterLink' , ByteOrder => 'LittleEndian' }, }, 0x2e => { Name => 'ParameterInfo', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::ParamInfo' }, }, ); # FFF file header (ref PH) %Image::ExifTool::FLIR::Header = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FIRST_ENTRY => 0, NOTES => 'Tags extracted from the FLIR FFF/AFF header.', 4 => { Name => 'CreatorSoftware', Format => 'string[16]' }, ); # FLIR raw data record (ref PH) %Image::ExifTool::FLIR::RawData = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'int16u', FIRST_ENTRY => 0, NOTES => q{ The thermal image data may be stored either as raw data, or in PNG format. If stored as raw data, ExifTool adds a TIFF header to allow the data to be viewed as a TIFF image. If stored in PNG format, the PNG image is extracted as-is. Note that most FLIR cameras using the PNG format seem to write the 16-bit raw image data in the wrong byte order. }, 0x00 => { # use this tag only to determine the byte order of the raw data # (the value should be 0x0002 if the byte order is correct) # - always "II" when RawThermalImageType is "TIFF" # - seen both "II" and "MM" when RawThermalImageType is "PNG" Name => 'RawDataByteOrder', Hidden => 1, RawConv => 'ToggleByteOrder() if $val >= 0x0100; undef', }, 0x01 => { Name => 'RawThermalImageWidth', RawConv => '$$self{RawThermalImageWidth} = $val', }, 0x02 => { Name => 'RawThermalImageHeight', RawConv => '$$self{RawThermalImageHeight} = $val', }, # 0x03-0x05: 0 # 0x06: raw image width - 1 # 0x07: 0 # 0x08: raw image height - 1 # 0x09: 0,15,16 # 0x0a: 0,2,3,11,12,13,30 # 0x0b: 0,2 # 0x0c: 0 or a large number # 0x0d: 0,3,4,6 # 0x0e-0x0f: 0 16 => { Name => 'RawThermalImageType', Format => 'undef[$size-0x20]', RawConv => 'Image::ExifTool::FLIR::GetImageType($self, $val, "RawThermalImage")', }, 16.1 => { Name => 'RawThermalImage', Groups => { 2 => 'Preview' }, RawConv => '\$$self{RawThermalImage}', }, ); # GainDeadMap record (ref 6) (see RawData above) %Image::ExifTool::FLIR::GainDeadData = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'int16u', FIRST_ENTRY => 0, NOTES => 'Information found in FFF-format .GAN calibration image files.', 0x00 => { Name => 'GainDeadMapByteOrder', Hidden => 1, RawConv => 'ToggleByteOrder() if $val >= 0x0100; undef', }, 0x01 => { Name => 'GainDeadMapImageWidth', RawConv => '$$self{GainDeadMapImageWidth} = $val', }, 0x02 => { Name => 'GainDeadMapImageHeight', RawConv => '$$self{GainDeadMapImageHeight} = $val', }, 16 => { Name => 'GainDeadMapImageType', Format => 'undef[$size-0x20]', RawConv => 'Image::ExifTool::FLIR::GetImageType($self, $val, "GainDeadMapImage")', }, 16.1 => { Name => 'GainDeadMapImage', RawConv => '\$$self{GainDeadMapImage}', }, ); # CoarseMap record (ref 6) (see RawData above) %Image::ExifTool::FLIR::CoarseData = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'int16u', FIRST_ENTRY => 0, NOTES => 'Information found in FFF-format .CRS correction image files.', 0x00 => { Name => 'CoarseMapByteOrder', Hidden => 1, RawConv => 'ToggleByteOrder() if $val >= 0x0100; undef', }, 0x01 => { Name => 'CoarseMapImageWidth', RawConv => '$$self{CoarseMapImageWidth} = $val', }, 0x02 => { Name => 'CoarseMapImageHeight', RawConv => '$$self{CoarseMapImageHeight} = $val', }, 16 => { Name => 'CoarseMapImageType', Format => 'undef[$size-0x20]', RawConv => 'Image::ExifTool::FLIR::GetImageType($self, $val, "CoarseMapImage")', }, 16.1 => { Name => 'CoarseMapImage', RawConv => '\$$self{CoarseMapImage}', }, ); # "Paint colors" record (ref PH) %Image::ExifTool::FLIR::PaintData = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'int16u', FIRST_ENTRY => 0, NOTES => 'Information generated by FLIR Tools "Paint colors" tool.', 0x01 => { Name => 'PaintByteOrder', Hidden => 1, RawConv => 'ToggleByteOrder() if $val >= 0x0100; undef', }, 0x05 => { Name => 'PaintImageWidth', RawConv => '$$self{PaintImageWidth} = $val', }, 0x06 => { Name => 'PaintImageHeight', RawConv => '$$self{PaintImageHeight} = $val', }, 20 => { Name => 'PaintImageType', Format => 'undef[$size-0x28]', RawConv => 'Image::ExifTool::FLIR::GetImageType($self, $val, "PaintImage")', }, 20.1 => { Name => 'PaintImage', RawConv => '\$$self{PaintImage}', }, ); # FLIR embedded image (ref 1) %Image::ExifTool::FLIR::EmbeddedImage = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'int16u', FIRST_ENTRY => 0, 0 => { # use this tag only to determine the byte order # (the value should be 0x0003 if the byte order is correct) Name => 'EmbeddedImageByteOrder', Format => 'int16u', Hidden => 1, RawConv => 'ToggleByteOrder() if $val >= 0x0100; undef', }, 1 => 'EmbeddedImageWidth', 2 => 'EmbeddedImageHeight', 16 => { Name => 'EmbeddedImageType', Format => 'undef[4]', RawConv => '$val =~ /^\x89PNG/s ? "PNG" : ($val =~ /^\xff\xd8\xff/ ? "JPG" : "DAT")', Notes => q{ "PNG" for PNG image in Y Cb Cr colors, "JPG" for a JPEG image, or "DAT" for other image data }, }, 16.1 => { Name => 'EmbeddedImage', Groups => { 2 => 'Preview' }, Format => 'undef[$size-0x20]', Binary => 1, }, ); # FLIR camera record (ref PH) %Image::ExifTool::FLIR::CameraInfo = ( GROUPS => { 0 => 'APP1', 2 => 'Camera' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FIRST_ENTRY => 0, NOTES => q{ FLIR camera information. The Planck tags are variables used in the temperature calculation. See L<http://u88.n24.queensu.ca/exiftool/forum/index.php?topic=4898.msg23972#msg23972> for details. }, 0x00 => { # use this tag only to determine the byte order # (the value should be 0x0002 if the byte order is correct) Name => 'CameraInfoByteOrder', Format => 'int16u', Hidden => 1, RawConv => 'ToggleByteOrder() if $val >= 0x0100; undef', }, # 0x02 - int16u: image width # 0x04 - int16u: image height # 0x0c - int32u: image width - 1 # 0x10 - int32u: image height - 1 0x20 => { Name => 'Emissivity', %float2f }, 0x24 => { Name => 'ObjectDistance', Format => 'float', PrintConv => 'sprintf("%.2f m",$val)' }, 0x28 => { Name => 'ReflectedApparentTemperature', %floatKelvin }, 0x2c => { Name => 'AtmosphericTemperature', %floatKelvin }, 0x30 => { Name => 'IRWindowTemperature', %floatKelvin }, 0x34 => { Name => 'IRWindowTransmission', %float2f }, # 0x38: 0 0x3c => { Name => 'RelativeHumidity', Format => 'float', ValueConv => '$val > 2 ? $val / 100 : $val', # have seen value expressed as percent in FFF file PrintConv => 'sprintf("%.1f %%",$val*100)', }, # 0x40 - float: 0,6 # 0x44,0x48,0x4c: 0 # 0x50 - int32u: 1 # 0x54: 0 0x58 => { Name => 'PlanckR1', %float8g }, #1 0x5c => { Name => 'PlanckB', %float8g }, #1 0x60 => { Name => 'PlanckF', %float8g }, #1 # 0x64,0x68,0x6c: 0 0x070 => { Name => 'AtmosphericTransAlpha1', %float6f }, #1 (value: 0.006569) 0x074 => { Name => 'AtmosphericTransAlpha2', %float6f }, #1 (value: 0.012620) 0x078 => { Name => 'AtmosphericTransBeta1', %float6f }, #1 (value: -0.002276) 0x07c => { Name => 'AtmosphericTransBeta2', %float6f }, #1 (value: -0.006670) 0x080 => { Name => 'AtmosphericTransX', %float6f }, #1 (value: 1.900000) # 0x84,0x88: 0 # 0x8c - float: 0,4,6 0x90 => { Name => 'CameraTemperatureRangeMax', %floatKelvin }, 0x94 => { Name => 'CameraTemperatureRangeMin', %floatKelvin }, 0x98 => { Name => 'CameraTemperatureMaxClip', %floatKelvin }, # 50 degrees over camera max 0x9c => { Name => 'CameraTemperatureMinClip', %floatKelvin }, # usually 10 or 20 degrees below camera min 0xa0 => { Name => 'CameraTemperatureMaxWarn', %floatKelvin }, # same as camera max 0xa4 => { Name => 'CameraTemperatureMinWarn', %floatKelvin }, # same as camera min 0xa8 => { Name => 'CameraTemperatureMaxSaturated', %floatKelvin }, # usually 50 or 88 degrees over camera max 0xac => { Name => 'CameraTemperatureMinSaturated', %floatKelvin }, # usually 10, 20 or 40 degrees below camera min 0xd4 => { Name => 'CameraModel', Format => 'string[32]' }, 0xf4 => { Name => 'CameraPartNumber', Format => 'string[16]' }, #1 0x104 => { Name => 'CameraSerialNumber',Format => 'string[16]' }, #1 0x114 => { Name => 'CameraSoftware', Format => 'string[16]' }, #1/PH (NC) 0x170 => { Name => 'LensModel', Format => 'string[32]' }, # note: it seems that FLIR updated their lenses at some point, so lenses with the same # name may have different part numbers (eg. the FOL38 is either 1196456 or T197089) 0x190 => { Name => 'LensPartNumber', Format => 'string[16]' }, 0x1a0 => { Name => 'LensSerialNumber', Format => 'string[16]' }, 0x1b4 => { Name => 'FieldOfView', Format => 'float', PrintConv => 'sprintf("%.1f deg", $val)' }, #1 # 0x1d0 - int16u: 0,12,24,25,46 # 0x1d2 - int16u: 170,180,190,380,760,52320 0x1ec => { Name => 'FilterModel', Format => 'string[16]' }, 0x1fc => { Name => 'FilterPartNumber', Format => 'string[32]' }, 0x21c => { Name => 'FilterSerialNumber',Format => 'string[32]' }, 0x308 => { Name => 'PlanckO', Format => 'int32s' }, #1 0x30c => { Name => 'PlanckR2', %float8g }, #1 0x338 => { Name => 'RawValueMedian', Format => 'int16u', Groups => { 2 => 'Image' } }, 0x33c => { Name => 'RawValueRange', Format => 'int16u', Groups => { 2 => 'Image' } }, 0x384 => { Name => 'DateTimeOriginal', Description => 'Date/Time Original', Format => 'undef[10]', Groups => { 2 => 'Time' }, RawConv => q{ my $tm = Get32u(\$val, 0); my $ss = Get32u(\$val, 4) & 0xffff; my $tz = Get16s(\$val, 8); ConvertUnixTime($tm - $tz * 60) . sprintf('.%.3d', $ss) . TimeZoneString(-$tz); }, PrintConv => '$self->ConvertDateTime($val)', }, 0x390 => { Name => 'FocusStepCount', Format => 'int16u' }, 0x45c => { Name => 'FocusDistance', Format => 'float', PrintConv => 'sprintf("%.1f m",$val)' }, # 0x43c - string: either "Live" or the file name ); # FLIR measurement tools record (ref 6) %Image::ExifTool::FLIR::MeasInfo = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&ProcessMeasInfo, FORMAT => 'int16u', VARS => { NO_ID => 1 }, NOTES => q{ Tags listed below are only for the first measurement tool, however multiple measurements may be added, and information is extracted for all of them. Tags for subsequent measurements are generated as required with the prefixes "Meas2", "Meas3", etc. }, Meas1Type => { PrintConv => { 1 => 'Spot', 2 => 'Area', 3 => 'Ellipse', 4 => 'Line', 5 => 'Endpoint', #PH (NC, FLIR Tools v2.0 for Mac generates an empty one of these after each Line) 6 => 'Alarm', #PH seen params: "0 1 0 1 9142 0 9142 0" (called "Isotherm" by Mac version) 7 => 'Unused', #PH (NC) (or maybe "Free"?) 8 => 'Difference', }, }, Meas1Params => { Notes => 'Spot=X,Y; Area=X1,Y1,W,H; Ellipse=XC,YC,X1,Y1,X2,Y2; Line=X1,Y1,X2,Y2', }, Meas1Label => { }, ); # FLIR palette record (ref PH/JD) %Image::ExifTool::FLIR::PaletteInfo = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FIRST_ENTRY => 0, 0x00 => { #JD Name => 'PaletteColors', RawConv => '$$self{PaletteColors} = $val', }, 0x06 => { Name => 'AboveColor', Format => 'int8u[3]', Notes => 'Y Cr Cb color components' }, #JD 0x09 => { Name => 'BelowColor', Format => 'int8u[3]' }, #JD 0x0c => { Name => 'OverflowColor', Format => 'int8u[3]' }, #JD 0x0f => { Name => 'UnderflowColor', Format => 'int8u[3]' }, #JD 0x12 => { Name => 'Isotherm1Color', Format => 'int8u[3]' }, #JD 0x15 => { Name => 'Isotherm2Color', Format => 'int8u[3]' }, #JD 0x1a => { Name => 'PaletteMethod' }, #JD 0x1b => { Name => 'PaletteStretch' }, #JD 0x30 => { Name => 'PaletteFileName', Format => 'string[32]', # (not valid for all images) RawConv => q{ $val =~ s/\0.*//; $val =~ /^[\x20-\x7e]{3,31}$/ ? $val : undef; }, }, 0x50 => { Name => 'PaletteName', Format => 'string[32]', # (not valid for all images) RawConv => q{ $val =~ s/\0.*//; $val =~ /^[\x20-\x7e]{3,31}$/ ? $val : undef; }, }, 0x70 => { Name => 'Palette', Format => 'undef[3*$$self{PaletteColors}]', Notes => 'Y Cr Cb byte values for each palette color', Binary => 1, }, ); # FLIR text information record (ref PH) %Image::ExifTool::FLIR::TextInfo = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&ProcessFLIRText, VARS => { NO_ID => 1 }, Label0 => { }, Value0 => { }, Label1 => { }, Value1 => { }, Label2 => { }, Value2 => { }, Label3 => { }, Value3 => { }, # (there could be more, and we will generate these on the fly if necessary) ); # FLIR parameter information record (ref PH) %Image::ExifTool::FLIR::ParamInfo = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&ProcessFLIRText, VARS => { NO_ID => 1 }, Generated => { Name => 'DateTimeGenerated', Description => 'Date/Time Generated', Groups => { 2 => 'Time' }, ValueConv => '$val =~ tr/-/:/; $val', PrintConv => '$self->ConvertDateTime($val)', }, Param0 => { }, Param1 => { }, Param2 => { }, Param3 => { }, # (there could be more, and we will generate these on the fly if necessary) ); # FLIR Picture in Picture record (ref 1) %Image::ExifTool::FLIR::PiP = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FIRST_ENTRY => 0, NOTES => 'FLIR Picture in Picture tags.', FORMAT => 'int16s', 0x00 => { Name => 'Real2IR', Format => 'float', }, 2 => { Name => 'OffsetX', Notes => 'offset from of insertion point from center', PrintConv => 'sprintf("%+d",$val)', # (add sign for direct use with IM convert) }, 3 => { Name => 'OffsetY', PrintConv => 'sprintf("%+d",$val)', }, 4 => { Name => 'PiPX1', Description => 'PiP X1', Notes => 'crop size for radiometric image', }, 5 => { Name => 'PiPX2', Description => 'PiP X2' }, 6 => { Name => 'PiPY1', Description => 'PiP Y1' }, 7 => { Name => 'PiPY2', Description => 'PiP Y2' }, ); # FLIR GPS record (ref PH/JD) %Image::ExifTool::FLIR::GPSInfo = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FIRST_ENTRY => 0, 0x58 => { Name => 'GPSMapDatum', Format => 'string[16]', }, ); # humidity meter information # (ref http://u88.n24.queensu.ca/exiftool/forum/index.php/topic,5325.0.html) # The %Image::ExifTool::UserDefined hash defines new tags to be added to existing tables. %Image::ExifTool::FLIR::MeterLink = ( GROUPS => { 0 => 'APP1', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FIRST_ENTRY => 0, NOTES => 'Tags containing Meterlink humidity meter information.', 26 => { Name => 'Reading1Units', DataMember => 'Reading1Units', RawConv => '$$self{Reading1Units} = $val', PrintHex => 1, PrintConv => { 0x0d => 'C', 0x1b => '%', 0x1d => 'Relative', 0x24 => 'g/kg', }, }, 28 => { Name => 'Reading1Description', DataMember => 'Reading1Description', RawConv => '$$self{Reading1Description} = $val', PrintConv => { 0 => 'Humidity', 3 => 'Moisture', # Pinless Moisture Readings with INTernal sensor 7 => 'Dew Point', 8 => 'Air Temperature', 9 => 'IR Temperature', 11 => 'Difference Temperature', # Difference Temp: IR-Temp and DewPoint }, }, 32 => { Name => 'Reading1Device', Format => 'string[16]', }, 96 => { Name => 'Reading1Value', Format => 'double', # convert Kelvin -> Celcius and kg/kg -> g/kg ValueConv => q{ return $val - 273.15 if $$self{Reading1Units} == 0x0d and $$self{Reading1Description} != 11; return $val *= 1000 if $$self{Reading1Units} == 0x24; return $val; }, }, # add 100 for subsequent readings 126 => { Name => 'Reading2Units', DataMember => 'Reading2Units', RawConv => '$$self{Reading2Units} = $val', PrintHex => 1, PrintConv => { 0x0d => 'C', 0x1b => '%', 0x1d => 'rel', 0x24 => 'g/kg', }, }, 128 => { Name => 'Reading2Description', DataMember => 'Reading2Description', RawConv => '$$self{Reading2Description} = $val', PrintConv => { 0 => 'Humidity', 3 => 'Moisture', 7 => 'Dew Point', 8 => 'Air Temperature', 9 => 'IR Temperature', 11 => 'Difference Temperature', # Difference Temp: IR-Temp and DewPoint }, }, 132 => { Name => 'Reading2Device', Format => 'string[16]', }, 196 => { Name => 'Reading2Value', Format => 'double', # convert Kelvin -> Celcius and kg/kg -> g/kg ValueConv => q{ return $val - 273.15 if $$self{Reading2Units} == 0x0d and $$self{Reading2Description} != 11; return $val *= 1000 if $$self{Reading2Units} == 0x24; return $val; }, }, 226 => { Name => 'Reading3Units', DataMember => 'Reading3Units', RawConv => '$$self{Reading3Units} = $val', PrintHex => 1, PrintConv => { 0x0d => 'C', 0x1b => '%', 0x1d => 'rel', 0x24 => 'g/kg', }, }, 228 => { Name => 'Reading3Description', DataMember => 'Reading3Description', RawConv => '$$self{Reading3Description} = $val', PrintConv => { 0 => 'Humidity', 3 => 'Moisture', 7 => 'Dew Point', 8 => 'Air Temperature', 9 => 'IR Temperature', 11 => 'Difference Temperature', # Difference Temp: IR-Temp and DewPoint }, }, 232 => { Name => 'Reading3Device', Format => 'string[16]', }, 296 => { Name => 'Reading3Value', Format => 'double', # convert Kelvin -> Celcius and kg/kg -> g/kg ValueConv => q{ return $val - 273.15 if $$self{Reading3Units} == 0x0d and $$self{Reading3Description} != 11; return $val *= 1000 if $$self{Reading3Units} == 0x24; return $val; }, }, 326 => { Name => 'Reading4Units', DataMember => 'Reading4Units', RawConv => '$$self{Reading4Units} = $val', PrintHex => 1, PrintConv => { 0x0d => 'C', 0x1b => '%', 0x1d => 'rel', 0x24 => 'g/kg', }, }, 328 => { Name => 'Reading4Description', DataMember => 'Reading4Description', RawConv => '$$self{Reading4Description} = $val', PrintConv => { 0 => 'Humidity', 3 => 'Moisture', 7 => 'Dew Point', 8 => 'Air Temperature', 9 => 'IR Temperature', 11 => 'Difference Temperature', # Difference Temp: IR-Temp and DewPoint }, }, 332 => { Name => 'Reading4Device', Format => 'string[16]', }, 396 => { Name => 'Reading4Value', Format => 'double', # convert Kelvin -> Celcius and kg/kg -> g/kg ValueConv => q{ return $val - 273.15 if $$self{Reading4Units} == 0x0d and $$self{Reading4Description} != 11; return $val *= 1000 if $$self{Reading4Units} == 0x24; return $val; }, }, ); # FLIR public image format (ref 4/5) %Image::ExifTool::FLIR::FPF = ( GROUPS => { 0 => 'FLIR', 2 => 'Image' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, NOTES => 'Tags extracted from FLIR Public image Format (FPF) files.', 0x20 => { Name => 'FPFVersion', Format => 'int32u' }, 0x24 => { Name => 'ImageDataOffset', Format => 'int32u' }, 0x28 => { Name => 'ImageType', Format => 'int16u', PrintConv => { 0 => 'Temperature', 1 => 'Temperature Difference', 2 => 'Object Signal', 3 => 'Object Signal Difference', }, }, 0x2a => { Name => 'ImagePixelFormat', Format => 'int16u', PrintConv => { 0 => '2-byte short integer', 1 => '4-byte long integer', 2 => '4-byte float', 3 => '8-byte double', }, }, 0x2c => { Name => 'ImageWidth', Format => 'int16u' }, 0x2e => { Name => 'ImageHeight', Format => 'int16u' }, 0x30 => { Name => 'ExternalTriggerCount',Format => 'int32u' }, 0x34 => { Name => 'SequenceFrameNumber',Format => 'int32u' }, 0x78 => { Name => 'CameraModel', Format => 'string[32]', Groups => { 2 => 'Camera' } }, 0x98 => { Name => 'CameraPartNumber', Format => 'string[32]', Groups => { 2 => 'Camera' } }, 0xb8 => { Name => 'CameraSerialNumber', Format => 'string[32]', Groups => { 2 => 'Camera' } }, 0xd8 => { Name => 'CameraTemperatureRangeMin', %floatKelvin, Groups => { 2 => 'Camera' } }, 0xdc => { Name => 'CameraTemperatureRangeMax', %floatKelvin, Groups => { 2 => 'Camera' } }, 0xe0 => { Name => 'LensModel', Format => 'string[32]', Groups => { 2 => 'Camera' } }, 0x100 => { Name => 'LensPartNumber', Format => 'string[32]', Groups => { 2 => 'Camera' } }, 0x120 => { Name => 'LensSerialNumber', Format => 'string[32]', Groups => { 2 => 'Camera' } }, 0x140 => { Name => 'FilterModel', Format => 'string[32]', Groups => { 2 => 'Camera' } }, 0x150 => { Name => 'FilterPartNumber', Format => 'string[32]', Groups => { 2 => 'Camera' } }, 0x180 => { Name => 'FilterSerialNumber',Format => 'string[32]', Groups => { 2 => 'Camera' } }, 0x1e0 => { Name => 'Emissivity', %float2f }, 0x1e4 => { Name => 'ObjectDistance', Format => 'float', PrintConv => 'sprintf("%.2f m",$val)' }, 0x1e8 => { Name => 'ReflectedApparentTemperature', %floatKelvin }, 0x1ec => { Name => 'AtmosphericTemperature', %floatKelvin }, 0x1f0 => { Name => 'RelativeHumidity', Format => 'float', PrintConv => 'sprintf("%.1f %%",$val*100)' }, 0x1f4 => { Name => 'ComputedAtmosphericTrans', %float2f }, 0x1f8 => { Name => 'EstimatedAtmosphericTrans',%float2f }, 0x1fc => { Name => 'ReferenceTemperature', %floatKelvin }, 0x200 => { Name => 'IRWindowTemperature', %floatKelvin, Groups => { 2 => 'Camera' } }, 0x204 => { Name => 'IRWindowTransmission', %float2f, Groups => { 2 => 'Camera' } }, 0x248 => { Name => 'DateTimeOriginal', Description => 'Date/Time Original', Groups => { 2 => 'Time' }, Format => 'int32u[7]', ValueConv => 'sprintf("%.4d:%.2d:%.2d %.2d:%.2d:%.2d.%.3d",split(" ",$val))', PrintConv => '$self->ConvertDateTime($val)', }, # Notes (based on ref 4): # 1) The above date/time structure is documented to be 32 bytes for FPFVersion 1, but in # fact it is only 28. Maybe this is why the full header length of my FPFVersion 2 # sample is 892 bytes instead of 896. If this was a documentation error, we are OK, # but if the alignment was really different in version 1, then the temperatures below # will be mis-aligned. I don't have any version 1 samples to check this. # 2) The following temperatures may not always be in Kelvin 0x2a4 => { Name => 'CameraScaleMin', %float1f }, 0x2a8 => { Name => 'CameraScaleMax', %float1f }, 0x2ac => { Name => 'CalculatedScaleMin',%float1f }, 0x2b0 => { Name => 'CalculatedScaleMax',%float1f }, 0x2b4 => { Name => 'ActualScaleMin', %float1f }, 0x2b8 => { Name => 'ActualScaleMax', %float1f }, ); # top-level user data written by FLIR cameras in MP4 videos %Image::ExifTool::FLIR::UserData = ( GROUPS => { 1 => 'FLIR', 2 => 'Camera' }, NOTES => q{ Tags written by some FLIR cameras in a top-level (!) "udta" atom of MP4 videos. }, uuid => [ { Name => 'FLIR_Parts', Condition => '$$valPt=~/^\x43\xc3\x99\x3b\x0f\x94\x42\x4b\x82\x05\x6b\x66\x51\x3f\x48\x5d/s', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::Parts', Start => 16, }, }, { Name => 'FLIR_Serial', Condition => '$$valPt=~/^\x57\xf5\xb9\x3e\x51\xe4\x48\xaf\xa0\xd9\xc3\xef\x1b\x37\xf7\x12/s', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::SerialNums', Start => 16, }, }, { Name => 'FLIR_Params', Condition => '$$valPt=~/^\x41\xe5\xdc\xf9\xe8\x0a\x41\xce\xad\xfe\x7f\x0c\x58\x08\x2c\x19/s', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::Params', Start => 16, }, }, { Name => 'FLIR_UnknownUUID', Condition => '$$valPt=~/^\x57\x45\x20\x50\x2c\xbb\x44\xad\xae\x54\x15\xe9\xb8\x39\xd9\x03/s', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::UnknownUUID', Start => 16, }, }, { Name => 'FLIR_GPS', Condition => '$$valPt=~/^\x7f\x2e\x21\x00\x8b\x46\x49\x18\xaf\xb1\xde\x70\x9a\x74\xf6\xf5/s', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::GPS_UUID', Start => 16, }, }, { Name => 'FLIR_MoreInfo', Condition => '$$valPt=~/^\x2b\x45\x2f\xdc\x74\x35\x40\x94\xba\xee\x22\xa6\xb2\x3a\x7c\xf8/s', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::MoreInfo', Start => 16, }, }, { Name => 'SoftwareComponents', Condition => '$$valPt=~/^\x78\x3f\xc7\x83\x0c\x95\x4b\x00\x8c\xc7\xac\xf1\xec\xb4\xd3\x9a/s', Unknown => 1, ValueConv => 'join " ", unpack "x20N4xZ*", $val', }, { Name => 'FLIR_Unknown', Condition => '$$valPt=~/^\x52\xae\xda\x45\x17\x1e\x48\xb1\x92\x47\x93\xa4\x21\x4e\x43\xf5/s', Unknown => 1, ValueConv => 'unpack "x20C*", $val', }, { Name => 'Units', Condition => '$$valPt=~/^\xf8\xab\x72\x1e\x84\x73\x44\xa0\xb8\xc8\x1b\x04\x82\x6e\x07\x24/s', List => 1, RawConv => 'my @a = split "\0", substr($val, 20); \@a', }, { Name => 'ThumbnailImage', Groups => { 2 => 'Preview' }, Condition => '$$valPt=~/^\x91\xaf\x9b\x93\x45\x9b\x44\x56\x98\xd1\x5e\x76\xea\x01\x04\xac....\xff\xd8\xff/s', RawConv => 'substr($val, 20)', Binary => 1, }, ], ); # uuid 43c3993b0f94424b82056b66513f485d box of MP4 videos (ref PH) %Image::ExifTool::FLIR::Parts = ( GROUPS => { 0 => 'MakerNotes', 1 => 'FLIR', 2 => 'Camera' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'undef', NOTES => q{ Tags extracted from the "uuid" box with ID 43c3993b0f94424b82056b66513f485d in FLIR MP4 videos. }, 4 => [ { Name => 'BAHPVer', Condition => '$$valPt =~ /^bahpver\0/', Format => 'undef[$size]', RawConv => 'join " ", split "\0", substr($val, 8)', }, { Name => 'BALPVer', Condition => '$$valPt =~ /^balpver\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 8)', }, { Name => 'Battery', Condition => '$$valPt =~ /^battery\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 8)', }, { Name => 'BAVPVer', Condition => '$$valPt =~ /^bavpver\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 8)', # (the first string corresponds with a lens part number) }, { Name => 'CamCore', Condition => '$$valPt =~ /^camcore\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 8)', }, { Name => 'DetectorBoard', Condition => '$$valPt =~ /^det_board\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 10)', }, { Name => 'Detector', Condition => '$$valPt =~ /^detector\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 9)', }, { Name => 'GIDCVer', Condition => '$$valPt =~ /^gidcver\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 8)', }, { Name => 'GIDPVer', Condition => '$$valPt =~ /^gidpver\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 8)', }, { Name => 'GIPC_CPLD', Condition => '$$valPt =~ /^gipccpld\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 9)', }, { Name => 'GIPCVer', Condition => '$$valPt =~ /^gipcver\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 8)', }, { Name => 'GIXIVer', Condition => '$$valPt =~ /^gixiver\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 8)', }, { Name => 'MainBoard', Condition => '$$valPt =~ /^mainboard\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 10)', }, { Name => 'Optics', Condition => '$$valPt =~ /^optics\0/', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", substr($val, 7)', }, { Name => 'PartNumber', Format => 'undef[$size]', ValueConv => 'join " ", split "\0", $val', }, ], ); # uuid 57f5b93e51e448afa0d9c3ef1b37f712 box of MP4 videos (ref PH) %Image::ExifTool::FLIR::SerialNums = ( GROUPS => { 0 => 'MakerNotes', 1 => 'FLIR', 2 => 'Camera' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FIRST_ENTRY => 0, NOTES => q{ Tags extracted from the "uuid" box with ID 57f5b93e51e448afa0d9c3ef1b37f712 in FLIR MP4 videos. }, # (not sure if these offsets are constant) 0x0c => { Name => 'UnknownSerial1', Format => 'string[33]', Unknown => 1 }, 0x2d => { Name => 'UnknownSerial2', Format => 'string[33]', Unknown => 1 }, 0x4e => { Name => 'UnknownSerial3', Format => 'string[33]', Unknown => 1 }, 0x6f => { Name => 'UnknownSerial4', Format => 'string[11]', Unknown => 1 }, 0x7b => { Name => 'UnknownNumber', Format => 'string[3]', Unknown => 1 }, 0x7e => { Name => 'CameraSerialNumber', Format => 'string[9]' }, ); # uuid 41e5dcf9e80a41ceadfe7f0c58082c19 box of MP4 videos (ref PH) %Image::ExifTool::FLIR::Params = ( GROUPS => { 0 => 'MakerNotes', 1 => 'FLIR', 2 => 'Camera' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'float', FIRST_ENTRY => 0, NOTES => q{ Tags extracted from the "uuid" box with ID 41e5dcf9e80a41ceadfe7f0c58082c19 in FLIR MP4 videos. }, 1 => { Name => 'ReflectedApparentTemperature', %floatKelvin }, 2 => { Name => 'AtmosphericTemperature', %floatKelvin }, 3 => { Name => 'Emissivity', %float2f }, 4 => { Name => 'ObjectDistance', PrintConv => 'sprintf("%.2f m",$val)' }, 5 => { Name => 'RelativeHumidity', PrintConv => 'sprintf("%.1f %%",$val*100)' }, 6 => { Name => 'EstimatedAtmosphericTrans', %float2f }, 7 => { Name => 'IRWindowTemperature', %floatKelvin }, 8 => { Name => 'IRWindowTransmission', %float2f }, ); # uuid 574520502cbb44adae5415e9b839d903 box of MP4 videos (ref PH) %Image::ExifTool::FLIR::UnknownUUID = ( GROUPS => { 0 => 'MakerNotes', 1 => 'FLIR', 2 => 'Camera' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'float', FIRST_ENTRY => 0, NOTES => q{ Tags extracted from the "uuid" box with ID 574520502cbb44adae5415e9b839d903 in FLIR MP4 videos. }, # 1 - 1 # 2 - 0 # 3 - 0 ); # uuid 7f2e21008b464918afb1de709a74f6f5 box of MP4 videos (ref PH) %Image::ExifTool::FLIR::GPS_UUID = ( GROUPS => { 0 => 'MakerNotes', 1 => 'FLIR', 2 => 'Location' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'float', FIRST_ENTRY => 0, NOTES => q{ Tags extracted from the "uuid" box with ID 7f2e21008b464918afb1de709a74f6f5 in FLIR MP4 videos. }, 1 => { Name => 'GPSLatitude', PrintConv => 'Image::ExifTool::GPS::ToDMS($self, $val, 1, "N")', }, 2 => { Name => 'GPSLongitude', PrintConv => 'Image::ExifTool::GPS::ToDMS($self, $val, 1, "E")', }, 3 => { Name => 'GPSAltitude', PrintConv => '$val=int($val*100+0.5)/100;"$val m"', }, # 4 - int32u: 0x0001bf74 # 5 - int32u: 0 # 6 - int32u: 1 ); # uuid 2b452fdc74354094baee22a6b23a7cf8 box of MP4 videos (ref PH) %Image::ExifTool::FLIR::MoreInfo = ( GROUPS => { 0 => 'MakerNotes', 1 => 'FLIR', 2 => 'Camera' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FIRST_ENTRY => 0, NOTES => q{ Tags extracted from the "uuid" box with ID 2b452fdc74354094baee22a6b23a7cf8 in FLIR MP4 videos. }, 5 => { Name => 'LensModel', Format => 'string[6]' }, 11 => { Name => 'UnknownTemperature1', %floatKelvin, Unknown => 1 }, # (-14.9 C) 15 => { Name => 'UnknownTemperature2', %floatKelvin, Unknown => 1 }, # (60.0 C) ); # FLIR AFF tag table (ref PH) %Image::ExifTool::FLIR::AFF = ( GROUPS => { 0 => 'FLIR', 1 => 'FLIR', 2 => 'Image' }, NOTES => 'Tags extracted from FLIR "AFF" SEQ images.', VARS => { ALPHA_FIRST => 1 }, "_header" => { Name => 'AFFHeader', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::Header' }, }, 0x01 => { Name => 'AFF1', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::AFF1' }, }, 0x05 => { Name => 'AFF5', SubDirectory => { TagTable => 'Image::ExifTool::FLIR::AFF5' }, }, ); # AFF record type 1 (ref forum?topic=4898.msg27627) %Image::ExifTool::FLIR::AFF1 = ( GROUPS => { 0 => 'FLIR', 1 => 'FLIR', 2 => 'Camera' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'int16u', FIRST_ENTRY => 0, 0x00 => { # use this tag only to determine the byte order of the raw data # (the value should be 0x0002 if the byte order is correct) Name => 'RawDataByteOrder', Hidden => 1, RawConv => 'ToggleByteOrder() if $val >= 0x0100; undef', }, 0x01 => { Name => 'SensorWidth', Format => 'int16u' }, 0x02 => { Name => 'SensorHeight', Format => 'int16u' }, ); # AFF record type 5 (ref forum?topic=4898.msg27628) %Image::ExifTool::FLIR::AFF5 = ( GROUPS => { 0 => 'FLIR', 1 => 'FLIR', 2 => 'Camera' }, PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData, FORMAT => 'int16u', FIRST_ENTRY => 0, 0x12 => { # use this tag only to determine the byte order of the raw data # (the value should be 0x0002 if the byte order is correct) Name => 'RawDataByteOrder', Hidden => 1, RawConv => 'ToggleByteOrder() if $val >= 0x0100; undef', }, 0x13 => { Name => 'SensorWidth', Format => 'int16u' }, 0x14 => { Name => 'SensorHeight', Format => 'int16u' }, ); # FLIR composite tags (ref 1) %Image::ExifTool::FLIR::Composite = ( GROUPS => { 1 => 'FLIR', 2 => 'Camera' }, PeakSpectralSensitivity => { Require => 'FLIR:PlanckB', ValueConv => '14387.6515/$val', PrintConv => 'sprintf("%.1f um", $val)', }, ); # add our composite tags Image::ExifTool::AddCompositeTags('Image::ExifTool::FLIR'); #------------------------------------------------------------------------------ # Get image type from raw image data # Inputs: 0) ExifTool ref, 1) image data, 2) tag name # Returns: image type (PNG, JPG, TIFF or undef) # - image itself is stored in $$et{$tag} sub GetImageType($$$) { my ($et, $val, $tag) = @_; my ($w, $h) = @$et{"${tag}Width","${tag}Height"}; my $type = 'DAT'; # add TIFF header only if this looks like 16-bit raw data # (note: MakeTiffHeader currently works only for little-endian, # and I haven't seen any big-endian samples, but check anwyay) if ($val =~ /^\x89PNG\r\n\x1a\n/) { $type = 'PNG'; } elsif ($val =~ /^\xff\xd8\xff/) { # (haven't seen this, but just in case - PH) $type = 'JPG'; } elsif (length $val != $w * $h * 2) { $et->Warn("Unrecognized FLIR $tag data format"); } elsif (GetByteOrder() eq 'II') { require Image::ExifTool::Sony; $val = Image::ExifTool::Sony::MakeTiffHeader($w,$h,1,16) . $val; $type = 'TIFF'; } else { $et->Warn("Don't yet support big-endian TIFF $tag"); } # save image data $$et{$tag} = $val; return $type; } #------------------------------------------------------------------------------ # Unescape FLIR Unicode character # Inputs: 0) escaped character code # Returns: UTF8 character sub UnescapeFLIR($) { my $char = shift; return $char unless length $char eq 4; # escaped ASCII char (eg. '\\') my $val = hex $char; return chr($val) if $val < 0x80; # simple ASCII return pack('C0U', $val) if $] >= 5.006001; return Image::ExifTool::PackUTF8($val); } #------------------------------------------------------------------------------ # Process FLIR text info record (ref PH) # Inputs: 0) ExifTool ref, 1) dirInfo ref, 2) tag table ref # Returns: 1 on success sub ProcessFLIRText($$$) { my ($et, $dirInfo, $tagTablePtr) = @_; my $dataPt = $$dirInfo{DataPt}; my $dirStart = $$dirInfo{DirStart} || 0; my $dirLen = $$dirInfo{DirLen}; return 0 if $dirLen < 12; $et->VerboseDir('FLIR Text'); my $dat = substr($$dataPt, $dirStart+12, $dirLen-12); $dat =~ s/\0.*//s; # truncate at null # the parameter text contains an additional header entry... if ($tagTablePtr eq \%Image::ExifTool::FLIR::ParamInfo and $dat =~ /# (Generated) at (.*?)[\n\r]/) { $et->HandleTag($tagTablePtr, $1, $2); } for (;;) { $dat =~ /.(\d+).(label|value|param) (unicode|text) "(.*)"/g or last; my ($tag, $val) = (ucfirst($2) . $1, $4); if ($3 eq 'unicode' and $val =~ /\\/) { # convert escaped Unicode characters (backslash followed by 4 hex digits) $val =~ s/\\([0-9a-fA-F]{4}|.)/UnescapeFLIR($1)/sge; $et->Decode($val, 'UTF8'); } $$tagTablePtr{$tag} or AddTagToTable($tagTablePtr, $tag, { Name => $tag }); $et->HandleTag($tagTablePtr, $tag, $val); } return 1; } #------------------------------------------------------------------------------ # Process FLIR measurement tool record (ref 6) # Inputs: 0) ExifTool ref, 1) dirInfo ref, 2) tag table ref # Returns: 1 on success # (code-driven decoding isn't pretty, but sometimes it is necessary) sub ProcessMeasInfo($$$) { my ($et, $dirInfo, $tagTablePtr) = @_; my $dataPt = $$dirInfo{DataPt}; my $dirStart = $$dirInfo{DirStart} || 0; my $dataPos = $$dirInfo{DataPos}; my $dirEnd = $dirStart + $$dirInfo{DirLen}; my $verbose = $et->Options('Verbose'); my $pos = $dirStart + 12; return 0 if $pos > $dirEnd; ToggleByteOrder() if Get16u($dataPt, $dirStart) >= 0x100; my ($i, $t, $p); for ($i=1; ; ++$i) { last if $pos + 2 > $dirEnd; my $recLen = Get16u($dataPt, $pos); last if $recLen < 0x28 or $pos + $recLen > $dirEnd; my $pre = 'Meas' . $i; $et->VerboseDir("MeasInfo $i", undef, $recLen); if ($verbose > 2) { HexDump($dataPt, $recLen, Start=>$pos, Prefix=>$$et{INDENT}, DataPos=>$dataPos); } my $coordLen = Get16u($dataPt, $pos+4); # generate tag table entries for this tool if necessary foreach $t ('Type', 'Params', 'Label') { my $tag = $pre . $t; last if $$tagTablePtr{$tag}; my $tagInfo = { Name => $tag }; $$tagInfo{PrintConv} = $$tagTablePtr{"Meas1$t"}{PrintConv}; AddTagToTable($tagTablePtr, $tag, $tagInfo); } # extract measurement tool type $et->HandleTag($tagTablePtr, "${pre}Type", undef, DataPt=>$dataPt, DataPos=>$dataPos, Start=>$pos+0x0a, Size=>2); last if $pos + 0x24 + $coordLen > $dirEnd; # extract measurement parameters $et->HandleTag($tagTablePtr, "${pre}Params", undef, DataPt=>$dataPt, DataPos=>$dataPos, Start=>$pos+0x24, Size=>$coordLen); my @uni; # extract label (sometimes-null-terminated Unicode) for ($p=0x24+$coordLen; $p<$recLen-1; $p+=2) { my $ch = Get16u($dataPt, $p+$pos); # FLIR Tools v2.0 for Mac doesn't properly null-terminate these strings, # so end the string at any funny character last if $ch < 0x20 or $ch > 0x7f; push @uni, $ch; } # convert to the ExifTool character set require Image::ExifTool::Charset; my $val = Image::ExifTool::Charset::Recompose($et, \@uni); $et->HandleTag($tagTablePtr, "${pre}Label", $val, DataPt=>$dataPt, DataPos=>$dataPos, Start=>$pos+0x24+$coordLen, Size=>2*scalar(@uni)); $pos += $recLen; # step to next record } return 1; } #------------------------------------------------------------------------------ # Process FLIR FFF record (ref PH/1/3) # Inputs: 0) ExifTool ref, 1) dirInfo ref, 2) tag table ref # Returns: 1 if this was a valid FFF record sub ProcessFLIR($$;$) { my ($et, $dirInfo, $tagTablePtr) = @_; my $raf = $$dirInfo{RAF} || new File::RandomAccess($$dirInfo{DataPt}); my $verbose = $et->Options('Verbose'); my $out = $et->Options('TextOut'); my ($i, $hdr, $buff, $rec); # read and verify FFF header $raf->Read($hdr, 0x40) == 0x40 and $hdr =~ /^([AF]FF)\0/ or return 0; my $type = $1; # set file type if reading from FFF or SEQ file ($tagTablePtr will not be defined) $et->SetFileType($type eq 'FFF' ? 'FLIR' : 'SEQ') unless $tagTablePtr; # FLIR file header (ref 3) # 0x00 - string[4] file format ID = "FFF\0" # 0x04 - string[16] file creator: seen "\0","MTX IR\0","CAMCTRL\0" # 0x14 - int32u file format version = 100 # 0x18 - int32u offset to record directory # 0x1c - int32u number of entries in record directory # 0x20 - int32u next free index ID = 2 # 0x24 - int16u swap pattern = 0 (?) # 0x28 - int16u[7] spares # 0x34 - int32u[2] reserved # 0x3c - int32u checksum # determine byte ordering by validating version number # (in my samples FLIR APP1 is big-endian, FFF files are little-endian) for ($i=0; ; ++$i) { my $ver = Get32u(\$hdr, 0x14); last if $ver >= 100 and $ver < 200; # (have seen 100 and 101 - PH) ToggleByteOrder(); $i and $et->Warn("Unsupported FLIR $type version"), return 1; } # read the FLIR record directory my $pos = Get32u(\$hdr, 0x18); my $num = Get32u(\$hdr, 0x1c); unless ($raf->Seek($pos) and $raf->Read($buff, $num * 0x20) == $num * 0x20) { $et->Warn('Truncated FLIR FFF directory'); return 1; } unless ($tagTablePtr) { $tagTablePtr = GetTagTable("Image::ExifTool::FLIR::$type"); $$et{SET_GROUP0} = 'FLIR'; # (set group 0 to 'FLIR' for FFF files) } # process the header data $et->HandleTag($tagTablePtr, '_header', $hdr); my $oldIndent = $$et{INDENT}; $$et{INDENT} .= '| '; $et->VerboseDir($type, $num); for ($i=0; $i<$num; ++$i) { # FLIR record entry (ref 3): # 0x00 - int16u record type # 0x02 - int16u record subtype: RawData 1=BE, 2=LE, 3=PNG; 1 for other record types # 0x04 - int32u record version: seen 0x64,0x66,0x67,0x68,0x6f,0x104 # 0x08 - int32u index id = 1 # 0x0c - int32u record offset from start of FLIR data # 0x10 - int32u record length # 0x14 - int32u parent = 0 (?) # 0x18 - int32u object number = 0 (?) # 0x1c - int32u checksum: 0 for no checksum my $entry = $i * 0x20; my $recType = Get16u(\$buff, $entry); next if $recType == 0; # ignore free records my $recPos = Get32u(\$buff, $entry + 0x0c); my $recLen = Get32u(\$buff, $entry + 0x10); $verbose and printf $out "%sFLIR Record 0x%.2x, offset 0x%.4x, length 0x%.4x\n", $$et{INDENT}, $recType, $recPos, $recLen; unless ($raf->Seek($recPos) and $raf->Read($rec, $recLen) == $recLen) { $et->Warn('Invalid FLIR record'); last; } if ($$tagTablePtr{$recType}) { $et->HandleTag($tagTablePtr, $recType, undef, DataPt => \$rec, DataPos => $recPos, Start => 0, Size => $recLen, Index => $i, ); } elsif ($verbose > 2) { my %parms = ( DataPos => $recPos, Prefix => $$et{INDENT} ); $parms{MaxLen} = 96 if $verbose < 4; HexDump(\$rec, $recLen, %parms); } } delete $$et{SET_GROUP0}; $$et{INDENT} = $oldIndent; return 1; } #------------------------------------------------------------------------------ # Process FLIR public image format (FPF) file (ref PH/4) # Inputs: 0) ExifTool ref, 1) dirInfo ref, 2) tag table ref # Returns: 1 if this was a valid FFF file sub ProcessFPF($$) { my ($et, $dirInfo) = @_; my $raf = $$dirInfo{RAF}; my $buff; $raf->Read($buff, 892) == 892 and $buff =~ /^FPF Public Image Format\0/ or return 0; # I think these are always little-endian, but check FPFVersion just in case SetByteOrder('II'); ToggleByteOrder() unless Get32u(\$buff, 0x20) & 0xffff; my $tagTablePtr = GetTagTable('Image::ExifTool::FLIR::FPF'); $et->SetFileType(); $et->ProcessDirectory( { DataPt => \$buff, Parent => 'FPF' }, $tagTablePtr); return 1; } 1; # end __END__ =head1 NAME Image::ExifTool::FLIR - Read FLIR meta information =head1 SYNOPSIS This module is loaded automatically by Image::ExifTool when required. =head1 DESCRIPTION This module contains the definitions to read meta information from FLIR Systems Inc. thermal image files (FFF, FPF and JPEG format). =head1 AUTHOR Copyright 2003-2016, Phil Harvey (phil at owl.phy.queensu.ca) This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 REFERENCES =over 4 =item L<http://u88.n24.queensu.ca/exiftool/forum/index.php/topic,4898.0.html> =item L<http://www.nuage.ch/site/flir-i7-some-analysis/> =item L<http://www.workswell.cz/manuals/flir/hardware/A3xx_and_A6xx_models/Streaming_format_ThermoVision.pdf> =item L<http://support.flir.com/DocDownload/Assets/62/English/1557488%24A.pdf> =item L<http://code.google.com/p/dvelib/source/browse/trunk/flirPublicFormat/fpfConverter/Fpfimg.h?spec=svn3&r=3> =item L<http://u88.n24.queensu.ca/exiftool/forum/index.php/topic,5538.0.html> =back =head1 ACKNOWLEDGEMENTS Thanks to Tomas for his hard work in decoding much of this information, and to Jens Duttke for getting me started on this format. =head1 SEE ALSO L<Image::ExifTool::TagNames/FLIR Tags>, L<Image::ExifTool(3pm)|Image::ExifTool> =cut
project802/Focus-Points
focuspoints.lrdevplugin/bin/exiftool/lib/Image/ExifTool/FLIR.pm
Perl
apache-2.0
56,492
# input check functions sub checkReportName { my ($userName, $reportNameID, $reportName) = @_; if (length($reportName) == 0) {return("Please enter a report name");} if ($reportNameID ne $reportName) { if(-e "$perfhome/var/db/users/$userName/reports/$reportName.ser") {return("Report Name is already taken");} } } 1;
ktenzer/perfstat
ui/reportMonitor/content/layoutReport/lib_inputCheck.pl
Perl
apache-2.0
324
#!/usr/bin/perl -w # $Id: smcontrol.pl,v 8.8 2008/07/21 21:31:43 ca Exp $ use strict; use Getopt::Std; use FileHandle; use Socket; my $sendmailDaemon = "/usr/sbin/sendmail -q30m -bd"; ########################################################################## # # &get_controlname -- read ControlSocketName option from sendmail.cf # # Parameters: # none. # # Returns: # control socket filename, undef if not found # sub get_controlname { my $cn = undef; my $qd = undef; open(CF, "</etc/mail/sendmail.cf") or return $cn; while (<CF>) { chomp; if (/^O ControlSocketName\s*=\s*([^#]+)$/o) { $cn = $1; } if (/^O QueueDirectory\s*=\s*([^#]+)$/o) { $qd = $1; } if (/^OQ([^#]+)$/o) { $qd = $1; } } close(CF); if (not defined $cn) { return undef; } if ($cn !~ /^\//o) { return undef if (not defined $qd); $cn = $qd . "/" . $cn; } return $cn; } ########################################################################## # # &do_command -- send command to sendmail daemon view control socket # # Parameters: # controlsocket -- filename for socket # command -- command to send # # Returns: # reply from sendmail daemon # sub do_command { my $controlsocket = shift; my $command = shift; my $proto = getprotobyname('ip'); my @reply; my $i; socket(SOCK, PF_UNIX, SOCK_STREAM, $proto) or return undef; for ($i = 0; $i < 4; $i++) { if (!connect(SOCK, sockaddr_un($controlsocket))) { if ($i == 3) { close(SOCK); return undef; } sleep 1; next; } last; } autoflush SOCK 1; print SOCK "$command\n"; @reply = <SOCK>; close(SOCK); return join '', @reply; } ########################################################################## # # &sendmail_running -- check if sendmail is running via SMTP # # Parameters: # none # # Returns: # 1 if running, undef otherwise # sub sendmail_running { my $port = getservbyname("smtp", "tcp") || 25; my $proto = getprotobyname("tcp"); my $iaddr = inet_aton("localhost"); my $paddr = sockaddr_in($port, $iaddr); socket(SOCK, PF_INET, SOCK_STREAM, $proto) or return undef; if (!connect(SOCK, $paddr)) { close(SOCK); return undef; } autoflush SOCK 1; while (<SOCK>) { if (/^(\d{3})([ -])/) { if ($1 != 220) { close(SOCK); return undef; } } else { close(SOCK); return undef; } last if ($2 eq " "); } print SOCK "QUIT\n"; while (<SOCK>) { last if (/^\d{3} /); } close(SOCK); return 1; } ########################################################################## # # &munge_status -- turn machine readable status into human readable text # # Parameters: # raw -- raw results from sendmail daemon STATUS query # # Returns: # human readable text # sub munge_status { my $raw = shift; my $cooked = ""; my $daemonStatus = ""; if ($raw =~ /^(\d+)\/(\d+)\/(\d+)\/(\d+)/mg) { $cooked .= "Current number of children: $1"; if ($2 > 0) { $cooked .= " (maximum $2)"; } $cooked .= "\n"; $cooked .= "QueueDir free disk space (in blocks): $3\n"; $cooked .= "Load average: $4\n"; } while ($raw =~ /^(\d+) (.*)$/mg) { if (not $daemonStatus) { $daemonStatus = "(process $1) " . ucfirst($2) . "\n"; } else { $cooked .= "Child Process $1 Status: $2\n"; } } return ($daemonStatus, $cooked); } ########################################################################## # # &start_daemon -- fork off a sendmail daemon # # Parameters: # control -- control socket name # # Returns: # Error message or "OK" if successful # sub start_daemon { my $control = shift; my $pid; if ($pid = fork) { my $exitstat; waitpid $pid, 0 or return "Could not get status of created process: $!\n"; $exitstat = $? / 256; if ($exitstat != 0) { return "sendmail daemon startup exited with exit value $exitstat"; } } elsif (defined $pid) { exec($sendmailDaemon); die "Unable to start sendmail daemon: $!.\n"; } else { return "Could not create new process: $!\n"; } return "OK\n"; } ########################################################################## # # &stop_daemon -- stop the sendmail daemon using control socket # # Parameters: # control -- control socket name # # Returns: # Error message or status message # sub stop_daemon { my $control = shift; my $status; if (not defined $control) { return "The control socket is not configured so the daemon can not be stopped.\n"; } return &do_command($control, "SHUTDOWN"); } ########################################################################## # # &restart_daemon -- restart the sendmail daemon using control socket # # Parameters: # control -- control socket name # # Returns: # Error message or status message # sub restart_daemon { my $control = shift; my $status; if (not defined $control) { return "The control socket is not configured so the daemon can not be restarted."; } return &do_command($control, "RESTART"); } ########################################################################## # # &memdump -- get memdump from the daemon using the control socket # # Parameters: # control -- control socket name # # Returns: # Error message or status message # sub memdump { my $control = shift; my $status; if (not defined $control) { return "The control socket is not configured so the daemon can not be queried for memdump."; } return &do_command($control, "MEMDUMP"); } ########################################################################## # # &help -- get help from the daemon using the control socket # # Parameters: # control -- control socket name # # Returns: # Error message or status message # sub help { my $control = shift; my $status; if (not defined $control) { return "The control socket is not configured so the daemon can not be queried for help."; } return &do_command($control, "HELP"); } my $status = undef; my $daemonStatus = undef; my $opts = {}; getopts('f:', $opts) || die "Usage: $0 [-f /path/to/control/socket] command\n"; my $control = $opts->{f} || &get_controlname; my $command = shift; if (not defined $control) { die "No control socket available.\n"; } if (not defined $command) { die "Usage: $0 [-f /path/to/control/socket] command\n"; } if ($command eq "status") { $status = &do_command($control, "STATUS"); if (not defined $status) { # Not responding on control channel, query via SMTP if (&sendmail_running) { $daemonStatus = "Sendmail is running but not answering status queries."; } else { $daemonStatus = "Sendmail does not appear to be running."; } } else { # Munge control channel output ($daemonStatus, $status) = &munge_status($status); } } elsif (lc($command) eq "shutdown") { $status = &stop_daemon($control); } elsif (lc($command) eq "restart") { $status = &restart_daemon($control); } elsif (lc($command) eq "start") { $status = &start_daemon($control); } elsif (lc($command) eq "memdump") { $status = &memdump($control); } elsif (lc($command) eq "help") { $status = &help($control); } elsif (lc($command) eq "mstat") { $status = &do_command($control, "mstat"); if (not defined $status) { # Not responding on control channel, query via SMTP if (&sendmail_running) { $daemonStatus = "Sendmail is running but not answering status queries."; } else { $daemonStatus = "Sendmail does not appear to be running."; } } } else { die "Unrecognized command $command\n"; } if (defined $daemonStatus) { print "Daemon Status: $daemonStatus\n"; } if (defined $status) { print "$status\n"; } else { die "No response\n"; }
dplbsd/soc2013
head/contrib/sendmail/contrib/smcontrol.pl
Perl
bsd-2-clause
7,581
package Fastq_reader; use strict; use warnings; use PerlIO::gzip; sub new { my ($packagename, $fastqFile) = @_; ## note: fastqFile can be a filename or an IO::Handle my $self = { fastqFile => undef, fileHandle => undef }; bless ($self, $packagename); ## create filehandle my $filehandle = undef; if (ref $fastqFile eq 'IO::Handle') { $filehandle = $fastqFile; } else { if ( $fastqFile =~ /\.gz$/ ) { open ($filehandle, "<:gzip", $fastqFile) or die "Error: Couldn't open compressed $fastqFile\n"; } else { open ($filehandle, $fastqFile) or die "Error: Couldn't open $fastqFile\n"; } $self->{fastqFile} = $fastqFile; } $self->{fileHandle} = $filehandle; return ($self); } #### next() fetches next Sequence object. sub next { my $self = shift; my $filehandle = $self->{fileHandle}; my $next_text_input = ""; if (! eof($filehandle)) { for (1..4) { $next_text_input .= <$filehandle>; } } my $read_obj = undef; if ($next_text_input) { $read_obj = Fastq_record->new($next_text_input); } return ($read_obj); #returns null if not instantiated. } #### finish() closes the open filehandle to the query database. sub finish { my $self = shift; my $filehandle = $self->{fileHandle}; close $filehandle; $self->{fileHandle} = undef; } ############################################## package Fastq_record; use strict; use warnings; use Carp; sub new { my $packagename = shift; my ($text_lines) = @_; my @split_text = split(/\n/, $text_lines); unless (scalar @split_text == 4) { confess "Error, fastQ entry doesn't have 4 lines: " . $text_lines; } my ($name_line, $seq_line, $plus, $qual_line) = @split_text; unless ($name_line =~ /^\@/) { confess "Error, cannot identify first line as read name line: " . $text_lines; } my ($read_name, $rest) = split(/\s+/, $name_line); $read_name =~ s/^\@//; my $pair_dir = 0; # assume single if ($read_name =~ /^(\S+)\/([12])$/) { $read_name = $1; $pair_dir = $2; } elsif (defined($rest) && $rest =~ /^([12]):/) { $pair_dir = $1; } my $self = { core_read_name => $read_name, pair_dir => $pair_dir, # (0, 1, or 2), with 0 = unpaired. sequence => $seq_line, quals => $qual_line, record => $text_lines, }; bless ($self, $packagename); return ($self); } #### sub get_core_read_name { my $self = shift; return ($self->{core_read_name}); } #### sub get_full_read_name { my $self = shift; my $read_name = $self->{core_read_name}; if ($self->{pair_dir}) { return(join("/", $read_name, $self->{pair_dir})); } } #### sub get_sequence { my $self = shift; return($self->{sequence}); } #### sub get_quals { my $self = shift; return($self->{quals}); } #### sub get_fastq_record { my $self = shift; return($self->{record}); } 1; #EOM
vipints/oqtans
oqtans_tools/Trinity/r2013_08_14/PerlLib/Fastq_reader.pm
Perl
bsd-3-clause
3,183
package API::Asn; # # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # # JvD Note: you always want to put Utils as the first use. Sh*t don't work if it's after the Mojo lines. use UI::Utils; use Mojo::Base 'Mojolicious::Controller'; use Data::Dumper; # Index sub index { my $self = shift; my $cg_id = $self->param('cachegroup'); my %criteria; if ( defined $cg_id ) { $criteria{'cachegroup'} = $cg_id; } my @data; my $orderby = $self->param('orderby') || "asn"; my $rs_data = $self->db->resultset("Asn")->search( \%criteria, { prefetch => [ { 'cachegroup' => undef } ], order_by => "me." . $orderby } ); while ( my $row = $rs_data->next ) { push( @data, { "id" => $row->id, "asn" => $row->asn, "cachegroupId" => $row->cachegroup->id, "cachegroup" => $row->cachegroup->name, "lastUpdated" => $row->last_updated } ); } $self->success( \@data ); } sub index_v11 { my $self = shift; my @data; my $orderby = $self->param('orderby') || "asn"; my $rs_data = $self->db->resultset("Asn")->search( undef, { prefetch => [ { 'cachegroup' => undef } ], order_by => "me." . $orderby } ); while ( my $row = $rs_data->next ) { push( @data, { "id" => $row->id, "asn" => $row->asn, "cachegroup" => $row->cachegroup->name, "lastUpdated" => $row->last_updated, } ); } $self->success( { "asns" => \@data } ); } # Show sub show { my $self = shift; my $id = $self->param('id'); my $rs_data = $self->db->resultset("Asn")->search( { 'me.id' => $id }, { prefetch => ['cachegroup'] } ); my @data = (); while ( my $row = $rs_data->next ) { push( @data, { "id" => $row->id, "asn" => $row->asn, "cachegroupId" => $row->cachegroup->id, "cachegroup" => $row->cachegroup->name, "lastUpdated" => $row->last_updated } ); } $self->success( \@data ); } sub update { my $self = shift; my $id = $self->param('id'); my $params = $self->req->json; if ( !&is_oper($self) ) { return $self->forbidden(); } my $asn = $self->db->resultset('Asn')->find( { id => $id } ); if ( !defined($asn) ) { return $self->not_found(); } if ( !defined($params) ) { return $self->alert("parameters must be in JSON format."); } if ( !defined( $params->{asn} ) ) { return $self->alert("ASN is required."); } if ( !defined( $params->{cachegroupId} ) ) { return $self->alert("Cachegroup is required."); } my $values = { asn => $params->{asn}, cachegroup => $params->{cachegroupId} }; my $rs = $asn->update($values); if ( $rs ) { my $response; $response->{id} = $rs->id; $response->{asn} = $rs->asn; $response->{cachegroupId} = $rs->cachegroup->id; $response->{cachegroup} = $rs->cachegroup->name; $response->{lastUpdated} = $rs->last_updated; &log( $self, "Updated ASN name '" . $rs->asn . "' for id: " . $rs->id, "APICHANGE" ); return $self->success( $response, "ASN update was successful." ); } else { return $self->alert("ASN update failed."); } } sub create { my $self = shift; my $params = $self->req->json; if ( !&is_oper($self) ) { return $self->forbidden(); } if ( !defined($params->{asn}) ) { return $self->alert("ASN is required."); } if ( !defined($params->{cachegroupId}) ) { return $self->alert("Cachegroup Id is required."); } my $values = { asn => $params->{asn} , cachegroup => $params->{cachegroupId} }; my $insert = $self->db->resultset('Asn')->create($values); my $rs = $insert->insert(); if ($rs) { my $response; $response->{id} = $rs->id; $response->{asn} = $rs->asn; $response->{cachegroupId} = $rs->cachegroup->id; $response->{cachegroup} = $rs->cachegroup->name; $response->{lastUpdated} = $rs->last_updated; &log( $self, "Created ASN name '" . $rs->asn . "' for id: " . $rs->id, "APICHANGE" ); return $self->success( $response, "ASN create was successful." ); } else { return $self->alert("ASN create failed."); } } sub delete { my $self = shift; my $id = $self->param('id'); if ( !&is_oper($self) ) { return $self->forbidden(); } my $asn = $self->db->resultset('Asn')->find( { id => $id } ); if ( !defined($asn) ) { return $self->not_found(); } my $rs = $asn->delete(); if ($rs) { return $self->success_message("ASN deleted."); } else { return $self->alert( "ASN delete failed." ); } } 1;
serDrem/incubator-trafficcontrol
traffic_ops/app/lib/API/Asn.pm
Perl
apache-2.0
4,945
#! c:\perl\bin\perl.exe #----------------------------------------------------------- # muicache_tln.pl # Plugin for Registry Ripper, NTUSER.DAT edition - gets the # MUICache values # # Change history # 20130425 - added alertMsg() functionality # 20120522 - updated to collect info from Win7 USRCLASS.DAT # # # copyright 2013 Quantum Research Analytics, LLC # Author: H. Carvey, keydet89@yahoo.com #----------------------------------------------------------- package muicache_tln; use strict; my %config = (hive => "NTUSER\.DAT,USRCLASS\.DAT", hasShortDescr => 1, hasDescr => 0, hasRefs => 0, osmask => 22, version => 20130425); sub getConfig{return %config} sub getShortDescr { return "Gets EXEs from user's MUICache key (TLN)"; } sub getDescr{} sub getRefs {} sub getHive {return $config{hive};} sub getVersion {return $config{version};} my $VERSION = getVersion(); sub pluginmain { my $class = shift; my $ntuser = shift; ::logMsg("Launching muicache_tln v.".$VERSION); my $reg = Parse::Win32Registry->new($ntuser); my $root_key = $reg->get_root_key; my $key_path = 'Software\\Microsoft\\Windows\\ShellNoRoam\\MUICache'; my $key; if ($key = $root_key->get_subkey($key_path)) { # ::rptMsg($key_path); # ::rptMsg("LastWrite Time ".gmtime($key->get_timestamp())." (UTC)"); my $lw = $key->get_timestamp(); my @vals = $key->get_list_of_values(); if (scalar(@vals) > 0) { foreach my $v (@vals) { my $name = $v->get_name(); next if ($name =~ m/^@/ || $name eq "LangID"); my $data = $v->get_data(); ::alertMsg($lw."|ALERT|||HKCU\\".$key_path." ".$name." has \"Temp\" in path: ".$data) if (grep(/[Tt]emp/,$name)); # ::rptMsg(" ".$name." (".$data.")"); } } else { # ::rptMsg($key_path." has no values."); } } else { # ::rptMsg($key_path." not found."); # ::rptMsg(""); } # Added for access to USRCLASS.DAT my $key_path = 'Local Settings\\Software\\Microsoft\\Windows\\Shell\\MUICache'; my $key; if ($key = $root_key->get_subkey($key_path)) { # ::rptMsg($key_path); # ::rptMsg("LastWrite Time ".gmtime($key->get_timestamp())." (UTC)"); # ::rptMsg(""); my $lw = $key->get_timestamp(); my @vals = $key->get_list_of_values(); if (scalar(@vals) > 0) { foreach my $v (@vals) { my $name = $v->get_name(); next if ($name =~ m/^@/ || $name eq "LangID"); my $data = $v->get_data(); ::alertMsg($lw."|ALERT|||HKCU\\".$key_path." ".$name." has \"Temp\" in path: ".$data) if (grep(/[Tt]emp/,$name)); } } else { # ::rptMsg($key_path." has no values."); } } else { # ::rptMsg($key_path." not found."); } } 1;
mhmdfy/autopsy
RecentActivity/release/rr-full/plugins/muicache_tln.pl
Perl
apache-2.0
2,701
#------------------------------------------------------------------------ # Copyright 2008-2009 (c) Jeff Brown <spadix@users.sourceforge.net> # # This file is part of the ZBar Bar Code Reader. # # The ZBar Bar Code Reader is free software; you can redistribute it # and/or modify it under the terms of the GNU Lesser Public License as # published by the Free Software Foundation; either version 2.1 of # the License, or (at your option) any later version. # # The ZBar Bar Code Reader is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty # of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser Public License for more details. # # You should have received a copy of the GNU Lesser Public License # along with the ZBar Bar Code Reader; if not, write to the Free # Software Foundation, Inc., 51 Franklin St, Fifth Floor, # Boston, MA 02110-1301 USA # # http://sourceforge.net/projects/zbar #------------------------------------------------------------------------ =pod =head1 NAME Barcode::ZBar::Image - image object to scan for bar codes =head1 SYNOPSIS use Barcode::ZBar; my $image = Barcode::ZBar::Image->new(); $image->set_format('422P'); $image->set_size(114, 80); $image->set_data($raw_bits); my @symbols = $image->get_symbols(); =head1 DESCRIPTION Barcode::ZBar::Image is used to pass images to the bar code scanner. It wraps raw image data with the meta-data required to interpret it (size, pixel format, etc) =head2 Image Formats Image data formats are represented by (relatively) standard "Four Character Codes" (fourcc), represented by four character strings in Perl. A list of supported formats is available on the project wiki. Examples: =over 2 =item * 'GREY' - single 8bpp intensity plane =item * 'BGR3' - 24bpp packed RGB component format =item * 'YUYV' - 12bpp packed luminance/chrominance (YCbCr) format =back =head1 REFERENCE =head2 Methods =over 4 =item new() Create a new Barcode::ZBar::Image object. The size, pixel format and data must be defined before the object may be used. =item get_format() =item set_format(I<format>) Return/specify the fourcc code corresponding to the image pixel format. =item get_sequence() =item set_sequence(I<seq_num>) Return/specify the video frame or page number associated with the image. =item get_size() =item set_size(I<width>, I<height>) Return/specify the (I<width>, I<height>) image size tuple. =item get_data() =item set_data(I<raw>) Return/specify the raw image data as a binary string. =item get_symbols() Return a list of scanned Barcode::ZBar::Symbol results attached to this image. =item convert(I<format>) Return a new Barcode::ZBar::Image object converted to the indicated fourcc format. Returns C<undef> if the conversion is not supported. Conversion complexity ranges from CPU intensive to trivial depending on the formats involved. Note that only a few conversions retain color information. =back =head1 SEE ALSO Barcode::ZBar, Barcode::ZBar::Image, Barcode::ZBar::Symbol zbarimg(1), zbarcam(1) http://zbar.sf.net =head1 AUTHOR Jeff Brown, E<lt>spadix@users.sourceforge.netE<gt> =head1 COPYRIGHT AND LICENSE Copyright 2008-2009 (c) Jeff Brown E<lt>spadix@users.sourceforge.netE<gt> The ZBar Bar Code Reader is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. =cut
krunalsoni01/yocto-zbar
zbar-0.8/perl/ZBar/Image.pod
Perl
mit
3,600
#Copyright 2009, 2010 Daniel Gaston, andrew Roger Lab #This code is copyrighted under the GNU General Public License Version 3.0 # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. package RAxML; use 5.010000; use strict; use warnings; require Exporter; our @ISA = qw(Exporter); # Items to export into callers namespace by default. Note: do not export # names by default without a very good reason. Use EXPORT_OK instead. # Do not simply export all your public functions/methods/constants. # This allows declaration use QmmRAxML ':all'; # If you do not need this, moving things directly into @EXPORT or @EXPORT_OK # will save memory. our %EXPORT_TAGS = ( 'all' => [ qw() ] ); our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } ); our @EXPORT = qw(callRAxML callParallelRAxML getRAxMLSiteLH); our $VERSION = '0.1'; # Preloaded methods go here. sub callRAxML{ my $string = shift; print "Calling raxmlHPC with the following: $string\n"; `raxmlHPC $string`; } sub callParallelRAxML{ my $string = shift; print "Calling raxmlHPC-PTHREADS-SSE3 with the following: $string\n"; `raxmlHPC-PTHREADS-SSE3 $string` } sub getRAxMLSiteLH{ my $file = shift; open(IN, "$file") or die "Could not open $file. Exiting...\n\n"; my @data = <IN>; close(IN); my @lhs = split /\s+/, $data[1]; shift @lhs; return @lhs; } 1; __END__ =head1 NAME QmmRAxML - Perl extension for calling qmmRAxML and parsing output files =head1 SYNOPSIS use QmmRAxML; blah blah blah =head1 DESCRIPTION This set of functions are useful for calling the phylogenetic program qmmRAxML as well as parsing the output files. =head2 EXPORT None by default. =head1 SEE ALSO Mention other useful documentation such as the documentation of related modules or operating system documentation (such as man pages in UNIX), or any relevant external documentation such as RFCs or standards. If you have a mailing list set up for your module, mention it here. If you have a web site set up for your module, mention it here. =head1 AUTHOR Daniel Gaston (daniel.gaston@dal.ca) for the Andrew Roger Lab, Dalhousie University Permanent Contact: Andrew.Roger@dal.ca =head1 COPYRIGHT AND LICENSE Copyright (C) 2009 by Daniel Gaston This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself, either Perl version 5.10.0 or, at your option, any later version of Perl 5 you may have available. =cut
GastonLab/fundi-dev
RAxML.pm
Perl
mit
3,082
#!/usr/bin/env perl use strict; use warnings; use Fcntl; ### Initialization Section use Log::Log4perl qw(:easy); Log::Log4perl->easy_init($INFO); use Getopt::Long; my $linenumberreport = 10000000; my $ranksfileout = "ranks.bin"; my $nodesfileout = "nodes.bin"; my @files2download = qw(ftp://ftp.ncbi.nlm.nih.gov/pub/taxonomy/taxcat.tar.gz ftp://ftp.ncbi.nlm.nih.gov/pub/taxonomy/taxdump.tar.gz); my $overwrite = 0; my $download = 1; my $quiet = 0; my $getopt_result = GetOptions( 'overwrite!' => \$overwrite, 'download!' => \$download, 'quiet' => \$quiet, ); ### check if quiet is requested if ($quiet != 0) { # set the logger to FATALs only Log::Log4perl->easy_init($FATAL); } ### Get a logger my $logger = get_logger(); $logger->info("Started update process..."); # should I download the files? if ($download) { require File::Basename; foreach my $file (@files2download) { my $basename = File::Basename::basename($file, (".gz", ".tar.gz")); # does the file exist? if (-e "$basename") { # remove the file unlink($basename) || $logger->logdie("Unable to delete file '$basename'"); } my $cmd = "wget -O - '$file' 2>/dev/null"; if ($file =~ /.tar.gz$/) { $cmd .= "| tar xzf -"; } elsif ($file =~ /.gz$/) { $cmd .= "| gunzip > $basename"; } $logger->info("Running command '$cmd'..."); qx($cmd); $logger->info("Running command '$cmd' finished."); } } $logger->info("Starting import of information about merged taxids"); my $merged = getmergedimported(); $logger->info("Finished import of information about merged taxids"); $logger->info("Starting import of information about names for taxids"); my $names = getnamesimported(); $logger->info("Finished import of information about names for taxids"); $logger->info("Starting import of information about nodes for taxids"); my ($nodes, $ranks) = getnodesimported(); $logger->info("Finished import of information about nodes for taxids"); $logger->info("Starting extraction and storage of used ranks"); my @ranks_used = keys %{$ranks}; nstore(\@ranks_used, $ranksfileout) || $logger->logdie("Unable to store information about the used ranks in file '$ranksfileout'"); $logger->info("Finished extraction and storage of used ranks"); $logger->info("Starting combining node and names information"); foreach my $act_taxid (0..@{$nodes}-1) { if (ref $nodes->[$act_taxid]) { $nodes->[$act_taxid]->{sciname} = $names->{$act_taxid}; $nodes->[$act_taxid]->{taxid} = $act_taxid; } } $logger->info("Finished combining node and names information"); $logger->info("Started adding merged taxid information"); foreach my $merged_taxid (keys %{$merged}) { $nodes->[$merged_taxid]->{merged_with} = $merged->{$merged_taxid}; $nodes->[$merged_taxid]->{taxid} = $merged_taxid; foreach (qw(ancestor rank sciname)) { $nodes->[$merged_taxid]->{$_} = $nodes->[$nodes->[$merged_taxid]->{merged_with}]->{$_}; } } $logger->info("Finished adding merged taxid information"); $logger->info("Started storing node information in binary format"); nstore($nodes, $nodesfileout) || $logger->logdie("Unable to store node information in file '$nodesfileout'"); $logger->info("Finished storing node information in binary format"); $logger->info("Update process finished"); ### functions are located here use Storable qw(nstore); sub getnodesimported { # I want to read the nodes.dmp my $nodesfileinput = "nodes.dmp"; $logger->info("Started import of nodes.dmp from file '$nodesfileinput'"); my @nodes = (); my %ranks = (); open(FH, "<", $nodesfileinput) || $logger->logdie("Unable to open file '$nodesfileinput'"); while (<FH>) { my @tmp = split(/\t\|\t/, $_ ); unless (defined $tmp[0] && defined $tmp[1] && defined $tmp[2]) { # skip the line if the elements 0..2 in @tmp are not defined! $logger->debug("Found undefined values for the line '$_'"); next; } $nodes[$tmp[0]] = {ancestor => int($tmp[1]), rank => $tmp[2]}; $ranks{$tmp[2]}++; } close(FH) || $logger->logdie("Unable to close file '$nodesfileinput'"); $logger->info("Finished import of nodes.dmp from file '$nodesfileinput'"); return \@nodes, \%ranks; } sub getnamesimported { # I want to read the names.dmp my $namesfileinput = "names.dmp"; $logger->info("Started import of names.dmp from file '$namesfileinput'"); my %names_by_taxid = (); open(FH, "<", $namesfileinput) || $logger->logdie("Unable to open file '$namesfileinput'"); while (<FH>) { my @tmp = split(/\t\|\t/, $_ ); next if ($tmp[3] !~ /scientific name/); print STDERR "Doppelbelegung von $tmp[0]" if (defined $names_by_taxid{$tmp[0]}); $names_by_taxid{$tmp[0]} = $tmp[1]; } close(FH) || $logger->logdie("Unable to close file '$namesfileinput'"); $logger->info("Finished import of names.dmp from file '$namesfileinput'"); return \%names_by_taxid; } sub getmergedimported { # I want to read the merged.dmp my $mergedfileinput = "merged.dmp"; $logger->info("Started import of merged.dmp from file '$mergedfileinput'"); my %merged_by_taxid = (); open(FH, "<", $mergedfileinput) || $logger->logdie("Unable to open file '$mergedfileinput'"); while (<FH>) { my @tmp = split(/[\s\|]+/, $_ ); print STDERR "Doppelbelegung von $tmp[0]" if (defined $merged_by_taxid{$tmp[0]}); $merged_by_taxid{$tmp[0]} = $tmp[1]; } close(FH) || $logger->logdie("Unable to close file '$mergedfileinput'"); $logger->info("Finished import of merged.dmp from file '$mergedfileinput'"); return \%merged_by_taxid; } exit; __END__ my $act_line=0; my @lines = split(/\s+/, qx(tail -qn 1 | cut -f 1 | tr "\n" " ")); my @files=('gi_taxid_nucl.dmp', 'gi_taxid_prot.dmp'); @files= reverse @files if ($lines[0]<$lines[1]); open(FH, "<".$files[0]); open(OUT, ">".'gi_taxid.txt'); # frueher tmp.txt while (<FH>) { if ($act_line != 0 && $act_line%100000==0) { if ($act_line%1000000==0) { print STDERR int($act_line/1000000); } else { print STDERR "."; } } my ($gi,$taxid) = $_ =~ /^(\d+)\t(\d+)/; while ($act_line != ($gi-1)) { print OUT ((" "x15)."\t".(" "x7)."\n"); $act_line++; } printf OUT "%15i\t%7i\n", $gi, $taxid; $act_line++ } close(OUT); close(FH); print STDERR "\nFinished first file!\n"; $act_line = 0; open(FH, "<".$files[1]); sysopen(OUT, 'gi_taxid.txt', O_WRONLY, 0440); binmode(OUT); while (<FH>) { $act_line++; if ($act_line != 0 && $act_line%100000==0) { if ($act_line%1000000==0) { print STDERR int($act_line/1000000); } else { print STDERR "."; } } my ($gi,$taxid) = $_ =~ /^(\d+)\t(\d+)/; sysseek(OUT, ($gi-1)*24, 0); syswrite(OUT, sprintf("%15i\t%7i\n", $gi, $taxid), 24); } close(OUT); close(FH);
greatfireball/NCBI-Taxonomy
make_taxid_indizes.pl
Perl
mit
7,004
sub BackToForwardSlash { my( $path ) = shift; $path =~ s,\\,/,g; return $path; } sub RemoveFileName { my( $in ) = shift; $in = &BackToForwardSlash( $in ); $in =~ s,/[^/]*$,,; return $in; } sub RemovePath { my( $in ) = shift; $in = &BackToForwardSlash( $in ); $in =~ s,^(.*)/([^/]*)$,$2,; return $in; } sub MakeDirHier { my( $in ) = shift; # print "MakeDirHier( $in )\n"; $in = &BackToForwardSlash( $in ); my( @path ); while( $in =~ m,/, ) # while $in still has a slash { my( $end ) = &RemovePath( $in ); push @path, $end; # print $in . "\n"; $in = &RemoveFileName( $in ); } my( $i ); my( $numelems ) = scalar( @path ); my( $curpath ); for( $i = $numelems - 1; $i >= 0; $i-- ) { $curpath .= "/" . $path[$i]; my( $dir ) = $in . $curpath; if( !stat $dir ) { # print "mkdir $dir\n"; mkdir $dir, 0777; } } } sub FileExists { my $filename = shift; my @statresult = stat $filename; my $iswritable = @statresult != 0; return $iswritable; } sub MakeFileWritable { my $filename = shift; if ( &FileExists( $filename ) ) { chmod 0666, $filename || die; } } sub MakeFileReadOnly { my $filename = shift; chmod 0444, $filename || die; } # Run a command and get stdout and stderr to an array sub RunCommand { my $cmd = shift; # print STDERR "command: $cmd\n"; system "$cmd > cmdout.txt 2>&1" || die; local( *FILE ); open FILE, "<cmdout.txt" || die; my @output = <FILE>; # print STDERR "command output: @output\n"; close FILE; unlink "cmdout.txt" || die; return @output; } sub PerforceEditOrAdd { return; my $filename = shift; my $changelistarg = shift; # Is the file on the client? my $cmd = "p4 fstat \"$filename\""; my @p4output = &RunCommand( $cmd ); my $p4output = join "", @p4output; if( $p4output =~ m/no such file/ ) { # not on client. . add my $cmd = "p4 add $changelistarg $filename"; my @p4output = &RunCommand( $cmd ); my $p4output = join "", @p4output; if( $p4output =~ m/opened for add/ ) { print $p4output; return; } print "ERROR: $p4output"; return; } # The file is known to be on the client at this point. # Is it open for edit? if( $p4output =~ m/action edit/ ) { # Is is open for edit, let's see if it's still different. # check for opened files that are not different from the revision in the depot. my $cmd = "p4 diff -sr \"$filename\""; my @p4output = &RunCommand( $cmd ); my $outputstring = join "", @p4output; # check for empty string if( !( $outputstring =~ m/^\s*$/ ) ) { my $cmd = "p4 revert \"$filename\""; my @p4output = &RunCommand( $cmd ); my $outputstring = join "", @p4output; print $outputstring; return; } } # check for unopened files that are different from the revision in the depot. my $cmd = "p4 diff -se \"$filename\""; my @p4output = &RunCommand( $cmd ); my $outputstring = join "", @p4output; # check for empty string if( $outputstring =~ m/^\s*$/ ) { &MakeFileReadOnly( $filename ); return; } # We need to edit the file since it is known to be different here. my $cmd = "p4 edit $changelistarg \"$filename\""; my @p4output = &RunCommand( $cmd ); my $line; foreach $line ( @p4output ) { if( $line =~ m/not on client/ ) { #print "notonclient..."; print "ERROR: @p4output\n"; return; } if( $line =~ m/currently opened for edit/ ) { return; } if( $line =~ m/opened for edit/ ) { print $line; } } } sub FileIsWritable { local( $filename ) = shift; local( @statresult ) = stat $filename; local( $mode, $iswritable ); $mode = oct( $statresult[2] ); $iswritable = ( $mode & 2 ) != 0; return $iswritable; } sub TouchFile { my $filename = shift; if( !&FileExists( $filename ) ) { if( !open FILE, ">$filename" ) { die; } close FILE; } my $now = time; local( *FILE ); utime $now, $now, $filename; } sub FileExistsInPerforce { my $filename = shift; my @output = &RunCommand( "p4 fstat $filename" ); my $line; foreach $line (@output) { if( $line =~ m/no such file/ ) { return 0; } } return 1; } sub PerforceWriteFile { my $filename = shift; my $filecontents = shift; # Make the target vcs writable if it exists MakeFileWritable( $filename ); # Write the file. local( *FP ); open FP, ">$filename"; print FP $filecontents; close FP; } sub WriteFile { my $filename = shift; my $filecontents = shift; # Make the target vcs writable if it exists MakeFileWritable( $filename ); # Write the file. local( *FP ); open FP, ">$filename"; print FP $filecontents; close FP; } sub PrintCleanPerforceOutput { my $line; while( $line = shift ) { if( $line =~ m/currently opened/i ) { next; } if( $line =~ m/already opened for edit/i ) { next; } if( $line =~ m/also opened/i ) { next; } if( $line =~ m/add of existing file/i ) { next; } print $line; } } # HACK!!!! Need to pass something in to do this rather than hard coding. sub NormalizePerforceFilename { my $line = shift; # remove newlines. $line =~ s/\n//; # downcase. $line =~ tr/[A-Z]/[a-z]/; # backslash to forwardslash $line =~ s,\\,/,g; # for inc files HACK! $line =~ s/^.*(fxctmp9.*)/$1/i; $line =~ s/^.*(vshtmp9.*)/$1/i; # for vcs files. HACK! $line =~ s,^.*game/platform/shaders/,,i; return $line; } sub MakeSureFileExists { local( $filename ) = shift; local( $testexists ) = shift; local( $testwrite ) = shift; local( @statresult ) = stat $filename; if( !@statresult && $testexists ) { die "$filename doesn't exist!\n"; } local( $mode, $iswritable ); $mode = oct( $statresult[2] ); $iswritable = ( $mode & 2 ) != 0; if( !$iswritable && $testwrite ) { die "$filename isn't writable!\n"; } } sub LoadShaderListFile_GetShaderType { my $shadername = shift; my $shadertype; if( $shadername =~ m/\.vsh/i ) { $shadertype = "vsh"; } elsif( $shadername =~ m/\.psh/i ) { $shadertype = "psh"; } elsif( $shadername =~ m/\.fxc/i ) { $shadertype = "fxc"; } else { die; } return $shadertype; } sub LoadShaderListFile_GetShaderSrc { my $shadername = shift; if ( $shadername =~ m/^(.*)-----/i ) { return $1; } else { return $shadername; } } sub LoadShaderListFile_GetShaderBase { my $shadername = shift; if ( $shadername =~ m/-----(.*)$/i ) { return $1; } else { my $shadertype = &LoadShaderListFile_GetShaderType( $shadername ); $shadername =~ s/\.$shadertype//i; return $shadername; } } sub LoadShaderListFile { my $inputbase = shift; my @srcfiles; &MakeSureFileExists( "$inputbase.txt", 1, 0 ); open SHADERLISTFILE, "<$inputbase.txt" || die; my $line; while( $line = <SHADERLISTFILE> ) { $line =~ s/\/\/.*$//; # remove comments "//..." $line =~ s/^\s*//; # trim leading whitespace $line =~ s/\s*$//; # trim trailing whitespace next if( $line =~ m/^\s*$/ ); if( $line =~ m/\.fxc/ || $line =~ m/\.vsh/ || $line =~ m/\.psh/ ) { my $shaderbase = &LoadShaderListFile_GetShaderBase( $line ); if( $ENV{"DIRECTX_FORCE_MODEL"} =~ m/^30$/i ) # forcing all shaders to be ver. 30 { my $targetbase = $shaderbase; $targetbase =~ s/_ps2x/_ps30/i; $targetbase =~ s/_ps20b/_ps30/i; $targetbase =~ s/_ps20/_ps30/i; $targetbase =~ s/_vs20/_vs30/i; $targetbase =~ s/_vsxx/_vs30/i; push @srcfiles, ( $line . "-----" . $targetbase ); } else { if( $shaderbase =~ m/_ps2x/i ) { #my $targetbase = $shaderbase; #$targetbase =~ s/_ps2x/_ps20/i; #push @srcfiles, ( $line . "-----" . $targetbase ); $targetbase = $shaderbase; $targetbase =~ s/_ps2x/_ps20b/i; push @srcfiles, ( $line . "-----" . $targetbase ); } elsif( $shaderbase =~ m/_vsxx/i ) { my $targetbase = $shaderbase; $targetbase =~ s/_vsxx/_vs20/i; push @srcfiles, ( $line . "-----" . $targetbase ); } else { push @srcfiles, ( $line . "-----" . $shaderbase ); } } } } close SHADERLISTFILE; return @srcfiles; } sub ReadInputFileWithIncludes { local( $filename ) = shift; # print STDERR "ReadInputFileWithIncludes: $filename\n"; local( *INPUT ); local( $output ); # print STDERR "before open\n"; open INPUT, "<$filename" || die; # print STDERR "after open\n"; local( $line ); while( $line = <INPUT> ) { # print STDERR $line; if( $line =~ m/\#include\s+\"(.*)\"/i ) { $output.= ReadInputFileWithIncludes( $1 ); } else { $output .= $line; } } close INPUT; return $output; } sub GetCRCFromSourceFile { my $filename = shift; my $data = &ReadInputFileWithIncludes( $filename ); # print STDERR $data; $crc = crc32( $data ); # print STDERR "GetCRCFromSourceFile: $crc\n"; return $crc; } sub GetCRCFromVCSFile { my $filename = shift; # print STDERR "GetCRCFromVCSFile $filename\n"; local( *FP ); open FP, "<$filename" || die "GetCRCFromVCSFile: can't open file $filename\n"; binmode( FP ); # unpack arguments my $sInt = "i"; my $uInt = "I"; if( $filename =~ m/\.360\./ ) { # Change arguments to "big endian long" $sInt = "N"; $uInt = "N"; } my $header; read FP, $header, 7 * 4 || die "updateshaders.pl:GetCRCFromVCSFile: can't read header for $filename\n"; my $version,$numCombos,$numDynamicCombos,$flags,$centroidMask,$refSize,$crc; ($version,$numCombos,$numDynamicCombos,$flags,$centroidMask,$refSize,$crc) = unpack "$sInt$sInt$sInt$uInt$uInt$uInt$uInt", $header; unless( $version == 4 || $version == 5 || $version == 6 ) { print STDERR "ERROR: GetCRCFromVCSFile: $filename is version $version\n"; return 0; } # print STDERR "version: $version\n"; # print STDERR "numCombos: $numCombos\n"; # print STDERR "numDynamicCombos: $numDynamicCombos\n"; # print STDERR "flags: $flags\n"; # print STDERR "centroidMask: $centroidMask\n"; # print STDERR "refSize: $refSize\n"; # print STDERR "GetCRCFromVCSFile: $crc\n"; close( FP ); return $crc; } sub CheckCRCAgainstTarget { my $srcFileName = shift; my $vcsFileName = shift; my $warn = shift; # Make sure both files exist. # print STDERR "$srcFileName doesn't exist\n" if( !( -e $srcFileName ) ); # print STDERR "$vcsFileName doesn't exist\n" if( !( -e $vcsFileName ) ); if( !( -e $srcFileName ) ) { if( $warn ) { print "$srcFileName missing\n"; } return 0; } if( !( -e $vcsFileName ) ) { if( $warn ) { print "$vcsFileName missing\n"; } return 0; } # print STDERR "CheckCRCAgainstTarget( $srcFileName, $vcsFileName );\n"; # print STDERR "vcsFileName: $vcsFileName\n"; # print STDERR "vcsFileName: $srcFileName\n"; my $vcsCRC = &GetCRCFromVCSFile( $vcsFileName ); my $srcCRC = &GetCRCFromSourceFile( $srcFileName ); if( $warn && ( $vcsCRC != $srcCRC ) ) { print "$vcsFileName checksum ($vcsCRC) != $srcFileName checksum: ($srcCRC)\n"; } # return 0; # use this to skip crc checking. # if( $vcsCRC == $srcCRC ) # { # print STDERR "CRC passed for $srcFileName $vcsFileName $vcsCRC\n"; # } return $vcsCRC == $srcCRC; } 1;
SCell555/hl2-asw-port
src/devtools/bin/valve_perl_helpers.pl
Perl
mit
10,926
:- ensure_loaded('$REGULUS/PrologLib/compatibility'). :- module(ebl_grammar_probs, [sents_file_or_files_to_grammar_probs_data_file/3, sents_file_to_grammar_probs_data_file/3] ). /* sents_file_to_grammar_probs_data_file(+InFile, +GrammarName, +OutFile) 1. InFile Annotated wavfiles file. 2. GrammarName Prolog atom 3. OutFile File suitable for use in PFSG training. Each line is of form <GrammarName> <Transcription> */ :- use_module('$REGULUS/Prolog/regulus_utilities'). :- use_module('$REGULUS/PrologLib/utilities'). :- use_module(library(lists)). %-------------------------------------------------------------------------------------------- sents_file_to_grammar_probs_data_file(InFile, BaseGrammarName, OutFile) :- sents_file_or_files_to_grammar_probs_data_file([InFile], BaseGrammarName, OutFile). sents_file_or_files_to_grammar_probs_data_file(InFiles, BaseGrammarName, OutFile) :- absolute_file_name(OutFile, AbsOutFile), open(AbsOutFile, write, SOut), sents_file_or_files_to_grammar_probs_data_stream(InFiles, BaseGrammarName, SOut, 0-Count), close(SOut), format('~N~n-- Grammar probs data file ~w written (~d records)~n', [AbsOutFile, Count]), !. sents_file_or_files_to_grammar_probs_data_file(InFiles, BaseGrammarName, OutFile) :- format2error('~N*** Error: bad call: ~w~n', [sents_file_or_files_to_grammar_probs_data_file(InFiles, BaseGrammarName, OutFile)]), fail. sents_file_or_files_to_grammar_probs_data_stream([], _BaseGrammarName, _SOut, CountIn-CountIn) :- !. sents_file_or_files_to_grammar_probs_data_stream(File, BaseGrammarName, SOut, CountIn-CountOut) :- \+ is_list(File), !, sents_file_or_files_to_grammar_probs_data_stream([File], BaseGrammarName, SOut, CountIn-CountOut). sents_file_or_files_to_grammar_probs_data_stream([F | R], BaseGrammarName, SOut, CountIn-CountOut) :- single_sents_file_to_grammar_probs_data_stream(F, BaseGrammarName, SOut, CountIn-CountNext), !, sents_file_or_files_to_grammar_probs_data_stream(R, BaseGrammarName, SOut, CountNext-CountOut). single_sents_file_to_grammar_probs_data_stream(InFile, BaseGrammarName, SOut, CountIn-CountOut) :- absolute_file_name(InFile, AbsInFile), format('~N~n-- Reading sents file ~w~n', [AbsInFile]), open(AbsInFile, read, SIn), sents_file_to_grammar_probs_data_stream(SIn, BaseGrammarName, SOut, CountIn-CountOut), close(SIn), !. sents_file_to_grammar_probs_data_stream(SIn, BaseGrammarName, SOut, CIn-COut) :- read(SIn, T), sents_file_to_grammar_probs_data_stream1(T, SIn, BaseGrammarName, SOut, CIn-COut). sents_file_to_grammar_probs_data_stream1(end_of_file, _SIn, _BaseGrammarName, _SOut, CIn-CIn) :- !. sents_file_to_grammar_probs_data_stream1(Record, SIn, BaseGrammarName, SOut, CIn-COut) :- fake_sent_record(Record), !, sents_file_to_grammar_probs_data_stream(SIn, BaseGrammarName, SOut, CIn-COut). sents_file_to_grammar_probs_data_stream1(Record, SIn, BaseGrammarName, SOut, CIn-COut) :- functor(Record, sent, N), N >= 1, arg(1, Record, TranscriptionAtom), ( N = 1 -> GrammarIDs = [default] ; arg(2, Record, GrammarIDs) ), sents_file_to_grammar_probs_data_stream2(GrammarIDs, TranscriptionAtom, BaseGrammarName, SOut, CIn-CNext), !, sents_file_to_grammar_probs_data_stream(SIn, BaseGrammarName, SOut, CNext-COut). sents_file_to_grammar_probs_data_stream1(Record, SIn, BaseGrammarName, SOut, C) :- format2error('~N*** Error: bad call: ~w~n', [sents_file_to_grammar_probs_data_stream1(Record, SIn, BaseGrammarName, SOut, C)]), fail. sents_file_to_grammar_probs_data_stream2([], _TranscriptionAtom, _BaseGrammarName, _SOut, CIn-CIn) :- !. sents_file_to_grammar_probs_data_stream2([GrammarID | GrammarIDs], TranscriptionAtom, BaseGrammarName, SOut, CIn-COut) :- sents_file_to_grammar_probs_data_stream3(GrammarID, TranscriptionAtom, BaseGrammarName, SOut, CIn-CNext), !, sents_file_to_grammar_probs_data_stream2(GrammarIDs, TranscriptionAtom, BaseGrammarName, SOut, CNext-COut). sents_file_to_grammar_probs_data_stream3(GrammarID, TranscriptionAtom, BaseGrammarName, SOut, CIn-COut) :- ( GrammarID = default -> format(SOut, '~N~w ~w~n', [BaseGrammarName, TranscriptionAtom]) ; format(SOut, '~N~w__~w ~w~n', [BaseGrammarName, GrammarID, TranscriptionAtom]) ), COut is CIn + 1, !.
TeamSPoon/logicmoo_workspace
packs_sys/logicmoo_nlu/ext/regulus/Prolog/ebl_grammar_probs.pl
Perl
mit
4,292
#!/usr/bin/env perl use strict; my @pidfiles = @ARGV; my %alllibs; my %ctglibreads; my %ctglibpid; unless(scalar(@pidfiles) > 1) { die "Usage: $0 [BAM pid summary files...]\n"; } foreach my $file (@pidfiles) { my $lib = $file; $lib =~ s/.+\///g; $lib =~ s/\.pid_summary\.txt$//; $alllibs{$lib} = 1; open(IN, $file) or die "Unable to open file $file\n"; my $fl = 1; while(<IN>) { chomp; if($fl) { unless($_ eq join("\t", ('id','reads','ave_identity'))) { die "Unrecognized format for BAM pid summary file: $file\n"; } } else { my ($id, $reads, $pid) = split(/\t/); $ctglibreads{$id}{$lib} = $reads; $ctglibpid{$id}{$lib} = $pid; } $fl = 0; } close(IN); } my @liblist = sort keys %alllibs; print "id"; foreach my $lib (@liblist) { print "\treads_".$lib; } foreach my $lib (@liblist) { print "\tpid_".$lib; } print "\n"; foreach my $id (sort keys %ctglibreads) { print $id; foreach my $lib (@liblist) { print "\t".$ctglibreads{$id}{$lib}; } foreach my $lib (@liblist) { print "\t".$ctglibpid{$id}{$lib}; } print "\n"; }
mccrowjp/utilities
bam_pid_summary_merge.pl
Perl
mit
1,275