code stringlengths 2 1.05M | repo_name stringlengths 5 101 | path stringlengths 4 991 | language stringclasses 3 values | license stringclasses 5 values | size int64 2 1.05M |
|---|---|---|---|---|---|
# test/cms-test.pl
# Written by Dr Stephen N Henson (steve@openssl.org) for the OpenSSL
# project.
#
# ====================================================================
# Copyright (c) 2008 The OpenSSL Project. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# 3. All advertising materials mentioning features or use of this
# software must display the following acknowledgment:
# "This product includes software developed by the OpenSSL Project
# for use in the OpenSSL Toolkit. (http://www.OpenSSL.org/)"
#
# 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
# endorse or promote products derived from this software without
# prior written permission. For written permission, please contact
# licensing@OpenSSL.org.
#
# 5. Products derived from this software may not be called "OpenSSL"
# nor may "OpenSSL" appear in their names without prior written
# permission of the OpenSSL Project.
#
# 6. Redistributions of any form whatsoever must retain the following
# acknowledgment:
# "This product includes software developed by the OpenSSL Project
# for use in the OpenSSL Toolkit (http://www.OpenSSL.org/)"
#
# THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
# EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
# ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
# ====================================================================
# CMS, PKCS7 consistency test script. Run extensive tests on
# OpenSSL PKCS#7 and CMS implementations.
my $ossl_path;
my $redir = " 2> cms.err > cms.out";
# Make VMS work
if ( $^O eq "VMS" && -f "OSSLX:openssl.exe" ) {
$ossl_path = "pipe mcr OSSLX:openssl";
}
# Make MSYS work
elsif ( $^O eq "MSWin32" && -f "../apps/openssl.exe" ) {
$ossl_path = "cmd /c ..\\apps\\openssl";
}
elsif ( -f "../apps/openssl$ENV{EXE_EXT}" ) {
$ossl_path = "../util/shlib_wrap.sh ../apps/openssl";
}
elsif ( -f "..\\out32dll\\openssl.exe" ) {
$ossl_path = "..\\out32dll\\openssl.exe";
}
elsif ( -f "..\\out32\\openssl.exe" ) {
$ossl_path = "..\\out32\\openssl.exe";
}
else {
die "Can't find OpenSSL executable";
}
my $pk7cmd = "$ossl_path smime ";
my $cmscmd = "$ossl_path cms ";
my $smdir = "smime-certs";
my $halt_err = 1;
my $badcmd = 0;
my $ossl8 = `$ossl_path version -v` =~ /0\.9\.8/;
my @smime_pkcs7_tests = (
[
"signed content DER format, RSA key",
"-sign -in smcont.txt -outform \"DER\" -nodetach"
. " -certfile $smdir/smroot.pem"
. " -signer $smdir/smrsa1.pem -out test.cms",
"-verify -in test.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed detached content DER format, RSA key",
"-sign -in smcont.txt -outform \"DER\""
. " -signer $smdir/smrsa1.pem -out test.cms",
"-verify -in test.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt -content smcont.txt"
],
[
"signed content test streaming BER format, RSA",
"-sign -in smcont.txt -outform \"DER\" -nodetach"
. " -stream -signer $smdir/smrsa1.pem -out test.cms",
"-verify -in test.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed content DER format, DSA key",
"-sign -in smcont.txt -outform \"DER\" -nodetach"
. " -signer $smdir/smdsa1.pem -out test.cms",
"-verify -in test.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed detached content DER format, DSA key",
"-sign -in smcont.txt -outform \"DER\""
. " -signer $smdir/smdsa1.pem -out test.cms",
"-verify -in test.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt -content smcont.txt"
],
[
"signed detached content DER format, add RSA signer",
"-resign -inform \"DER\" -in test.cms -outform \"DER\""
. " -signer $smdir/smrsa1.pem -out test2.cms",
"-verify -in test2.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt -content smcont.txt"
],
[
"signed content test streaming BER format, DSA key",
"-sign -in smcont.txt -outform \"DER\" -nodetach"
. " -stream -signer $smdir/smdsa1.pem -out test.cms",
"-verify -in test.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed content test streaming BER format, 2 DSA and 2 RSA keys",
"-sign -in smcont.txt -outform \"DER\" -nodetach"
. " -signer $smdir/smrsa1.pem -signer $smdir/smrsa2.pem"
. " -signer $smdir/smdsa1.pem -signer $smdir/smdsa2.pem"
. " -stream -out test.cms",
"-verify -in test.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed content test streaming BER format, 2 DSA and 2 RSA keys, no attributes",
"-sign -in smcont.txt -outform \"DER\" -noattr -nodetach"
. " -signer $smdir/smrsa1.pem -signer $smdir/smrsa2.pem"
. " -signer $smdir/smdsa1.pem -signer $smdir/smdsa2.pem"
. " -stream -out test.cms",
"-verify -in test.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed content test streaming S/MIME format, 2 DSA and 2 RSA keys",
"-sign -in smcont.txt -nodetach"
. " -signer $smdir/smrsa1.pem -signer $smdir/smrsa2.pem"
. " -signer $smdir/smdsa1.pem -signer $smdir/smdsa2.pem"
. " -stream -out test.cms",
"-verify -in test.cms " . " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed content test streaming multipart S/MIME format, 2 DSA and 2 RSA keys",
"-sign -in smcont.txt"
. " -signer $smdir/smrsa1.pem -signer $smdir/smrsa2.pem"
. " -signer $smdir/smdsa1.pem -signer $smdir/smdsa2.pem"
. " -stream -out test.cms",
"-verify -in test.cms " . " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"enveloped content test streaming S/MIME format, 3 recipients",
"-encrypt -in smcont.txt"
. " -stream -out test.cms"
. " $smdir/smrsa1.pem $smdir/smrsa2.pem $smdir/smrsa3.pem ",
"-decrypt -recip $smdir/smrsa1.pem -in test.cms -out smtst.txt"
],
[
"enveloped content test streaming S/MIME format, 3 recipients, 3rd used",
"-encrypt -in smcont.txt"
. " -stream -out test.cms"
. " $smdir/smrsa1.pem $smdir/smrsa2.pem $smdir/smrsa3.pem ",
"-decrypt -recip $smdir/smrsa3.pem -in test.cms -out smtst.txt"
],
[
"enveloped content test streaming S/MIME format, 3 recipients, key only used",
"-encrypt -in smcont.txt"
. " -stream -out test.cms"
. " $smdir/smrsa1.pem $smdir/smrsa2.pem $smdir/smrsa3.pem ",
"-decrypt -inkey $smdir/smrsa3.pem -in test.cms -out smtst.txt"
],
[
"enveloped content test streaming S/MIME format, AES-256 cipher, 3 recipients",
"-encrypt -in smcont.txt"
. " -aes256 -stream -out test.cms"
. " $smdir/smrsa1.pem $smdir/smrsa2.pem $smdir/smrsa3.pem ",
"-decrypt -recip $smdir/smrsa1.pem -in test.cms -out smtst.txt"
],
);
my @smime_cms_tests = (
[
"signed content test streaming BER format, 2 DSA and 2 RSA keys, keyid",
"-sign -in smcont.txt -outform \"DER\" -nodetach -keyid"
. " -signer $smdir/smrsa1.pem -signer $smdir/smrsa2.pem"
. " -signer $smdir/smdsa1.pem -signer $smdir/smdsa2.pem"
. " -stream -out test.cms",
"-verify -in test.cms -inform \"DER\" "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed content test streaming PEM format, 2 DSA and 2 RSA keys",
"-sign -in smcont.txt -outform PEM -nodetach"
. " -signer $smdir/smrsa1.pem -signer $smdir/smrsa2.pem"
. " -signer $smdir/smdsa1.pem -signer $smdir/smdsa2.pem"
. " -stream -out test.cms",
"-verify -in test.cms -inform PEM "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed content MIME format, RSA key, signed receipt request",
"-sign -in smcont.txt -signer $smdir/smrsa1.pem -nodetach"
. " -receipt_request_to test\@openssl.org -receipt_request_all"
. " -out test.cms",
"-verify -in test.cms "
. " \"-CAfile\" $smdir/smroot.pem -out smtst.txt"
],
[
"signed receipt MIME format, RSA key",
"-sign_receipt -in test.cms"
. " -signer $smdir/smrsa2.pem"
. " -out test2.cms",
"-verify_receipt test2.cms -in test.cms"
. " \"-CAfile\" $smdir/smroot.pem"
],
[
"enveloped content test streaming S/MIME format, 3 recipients, keyid",
"-encrypt -in smcont.txt"
. " -stream -out test.cms -keyid"
. " $smdir/smrsa1.pem $smdir/smrsa2.pem $smdir/smrsa3.pem ",
"-decrypt -recip $smdir/smrsa1.pem -in test.cms -out smtst.txt"
],
[
"enveloped content test streaming PEM format, KEK",
"-encrypt -in smcont.txt -outform PEM -aes128"
. " -stream -out test.cms "
. " -secretkey 000102030405060708090A0B0C0D0E0F "
. " -secretkeyid C0FEE0",
"-decrypt -in test.cms -out smtst.txt -inform PEM"
. " -secretkey 000102030405060708090A0B0C0D0E0F "
. " -secretkeyid C0FEE0"
],
[
"enveloped content test streaming PEM format, KEK, key only",
"-encrypt -in smcont.txt -outform PEM -aes128"
. " -stream -out test.cms "
. " -secretkey 000102030405060708090A0B0C0D0E0F "
. " -secretkeyid C0FEE0",
"-decrypt -in test.cms -out smtst.txt -inform PEM"
. " -secretkey 000102030405060708090A0B0C0D0E0F "
],
[
"data content test streaming PEM format",
"-data_create -in smcont.txt -outform PEM -nodetach"
. " -stream -out test.cms",
"-data_out -in test.cms -inform PEM -out smtst.txt"
],
[
"encrypted content test streaming PEM format, 128 bit RC2 key",
"\"-EncryptedData_encrypt\" -in smcont.txt -outform PEM"
. " -rc2 -secretkey 000102030405060708090A0B0C0D0E0F"
. " -stream -out test.cms",
"\"-EncryptedData_decrypt\" -in test.cms -inform PEM "
. " -secretkey 000102030405060708090A0B0C0D0E0F -out smtst.txt"
],
[
"encrypted content test streaming PEM format, 40 bit RC2 key",
"\"-EncryptedData_encrypt\" -in smcont.txt -outform PEM"
. " -rc2 -secretkey 0001020304"
. " -stream -out test.cms",
"\"-EncryptedData_decrypt\" -in test.cms -inform PEM "
. " -secretkey 0001020304 -out smtst.txt"
],
[
"encrypted content test streaming PEM format, triple DES key",
"\"-EncryptedData_encrypt\" -in smcont.txt -outform PEM"
. " -des3 -secretkey 000102030405060708090A0B0C0D0E0F1011121314151617"
. " -stream -out test.cms",
"\"-EncryptedData_decrypt\" -in test.cms -inform PEM "
. " -secretkey 000102030405060708090A0B0C0D0E0F1011121314151617"
. " -out smtst.txt"
],
[
"encrypted content test streaming PEM format, 128 bit AES key",
"\"-EncryptedData_encrypt\" -in smcont.txt -outform PEM"
. " -aes128 -secretkey 000102030405060708090A0B0C0D0E0F"
. " -stream -out test.cms",
"\"-EncryptedData_decrypt\" -in test.cms -inform PEM "
. " -secretkey 000102030405060708090A0B0C0D0E0F -out smtst.txt"
],
);
my @smime_cms_comp_tests = (
[
"compressed content test streaming PEM format",
"-compress -in smcont.txt -outform PEM -nodetach"
. " -stream -out test.cms",
"-uncompress -in test.cms -inform PEM -out smtst.txt"
]
);
print "CMS => PKCS#7 compatibility tests\n";
run_smime_tests( \$badcmd, \@smime_pkcs7_tests, $cmscmd, $pk7cmd );
print "CMS <= PKCS#7 compatibility tests\n";
run_smime_tests( \$badcmd, \@smime_pkcs7_tests, $pk7cmd, $cmscmd );
print "CMS <=> CMS consistency tests\n";
run_smime_tests( \$badcmd, \@smime_pkcs7_tests, $cmscmd, $cmscmd );
run_smime_tests( \$badcmd, \@smime_cms_tests, $cmscmd, $cmscmd );
if ( `$ossl_path version -f` =~ /ZLIB/ ) {
run_smime_tests( \$badcmd, \@smime_cms_comp_tests, $cmscmd, $cmscmd );
}
else {
print "Zlib not supported: compression tests skipped\n";
}
print "Running modified tests for OpenSSL 0.9.8 cms backport\n" if($ossl8);
if ($badcmd) {
print "$badcmd TESTS FAILED!!\n";
}
else {
print "ALL TESTS SUCCESSFUL.\n";
}
unlink "test.cms";
unlink "test2.cms";
unlink "smtst.txt";
unlink "cms.out";
unlink "cms.err";
sub run_smime_tests {
my ( $rv, $aref, $scmd, $vcmd ) = @_;
foreach $smtst (@$aref) {
my ( $tnam, $rscmd, $rvcmd ) = @$smtst;
if ($ossl8)
{
# Skip smime resign: 0.9.8 smime doesn't support -resign
next if ($scmd =~ /smime/ && $rscmd =~ /-resign/);
# Disable streaming: option not supported in 0.9.8
$tnam =~ s/streaming//;
$rscmd =~ s/-stream//;
$rvcmd =~ s/-stream//;
}
system("$scmd$rscmd$redir");
if ($?) {
print "$tnam: generation error\n";
$$rv++;
exit 1 if $halt_err;
next;
}
system("$vcmd$rvcmd$redir");
if ($?) {
print "$tnam: verify error\n";
$$rv++;
exit 1 if $halt_err;
next;
}
if (!cmp_files("smtst.txt", "smcont.txt")) {
print "$tnam: content verify error\n";
$$rv++;
exit 1 if $halt_err;
next;
}
print "$tnam: OK\n";
}
}
sub cmp_files {
my ( $f1, $f2 ) = @_;
my ( $fp1, $fp2 );
my ( $rd1, $rd2 );
if ( !open( $fp1, "<$f1" ) ) {
print STDERR "Can't Open file $f1\n";
return 0;
}
if ( !open( $fp2, "<$f2" ) ) {
print STDERR "Can't Open file $f2\n";
return 0;
}
binmode $fp1;
binmode $fp2;
my $ret = 0;
for ( ; ; ) {
$n1 = sysread $fp1, $rd1, 4096;
$n2 = sysread $fp2, $rd2, 4096;
last if ( $n1 != $n2 );
last if ( $rd1 ne $rd2 );
if ( $n1 == 0 ) {
$ret = 1;
last;
}
}
close $fp1;
close $fp2;
return $ret;
}
| jiangzhu1212/oooii | Ouroboros/External/OpenSSL/openssl-1.0.0e/test/cms-test.pl | Perl | mit | 16,073 |
#!usr/bin/perl
# Two files need to be generated in order to analyze the two strains of
# Prevotella Intermedia: one is a list of scores between protein-ORF pairs;
# the other is a list of indels and mismatches between the two strains.
# The scores list would contain the ids, total length, aligned length,
# unaligned length, identities, mismatches, and gaps.
# The indels list would contain the ids, location of indels,
# and the nucleic acid on each strain.
# This script would only generate the scores list.
use strict;
use warnings;
# finding all indices of a substring of string
sub find_index {
my ($str, $ptrn) = @_;
my $offset = 0;
my $pos;
my @ret;
while (1) {
$pos = index($str, $ptrn, $offset);
last if ($pos < 0);
push @ret, $pos;
$offset = $pos + 1;
}
return @ret;
}
# The ids of ORF-protein pairs should be stored for later reference.
# Locations of each sequence should be stored to pinpoint the location
# of a certain indel.
# Length will be calculated using the raw data.
# The aligned portion and the unaligned portion will be separated so that
# the length of each portion can be calculated.
my %pairs;
my $shot_id;
my $ref_id;
my %shot_lens;
my %ref_lens;
my $flag = 0;
my $shot_orf;
my $align_result;
my %alens;
my %shot_unlens;
my %ref_unlens;
my %iden_counts;
my %mis_counts;
my %gap_counts_shot;
my %gap_counts_ref;
my @mismatches;
my @gaps;
open PROT, "amino.global.aligned.txt";
while (<PROT>) {
chomp;
my $line = $_;
if ($flag == -1) {
$flag = 0;
# the first line marks PI25611 and its id and location
} elsif (/>(?<sid>pint\d+_c_\d+_\d+) [^\[]+\[(?<sloc>\d+) - (?<eloc>\d+)/) {
$shot_id = $+{sid};
# the second line marks PI17 and its id
} elsif (/>gi\|(?<refid>\d+)/) {
$ref_id = $+{refid};
$pairs{$shot_id} = $ref_id;
# the third line contains the contig sequences,
# which should be stored for later use
} elsif ($flag == 0) {
$shot_orf = $line;
$shot_lens{$shot_id} = length $line;
$flag = 1;
# the fourth line contains the result of Needleman-Wunsch
} elsif ($flag == 1) {
# protein length cannot be measured by nucleotidal locations
# mismatch and gap indices will later be used for listing indels
@mismatches = &find_index ($line, '*');
@gaps = &find_index ($line, ' ');
$flag = 2;
$align_result = $line;
my $align = $line;
$align =~ s/^ +| +$//g;
$alens{$shot_id} = length $align;
$shot_unlens{$shot_id} = $shot_lens{$shot_id} - length($align);
$iden_counts{$shot_id} = ($align =~ s/\|/\|/g) || 0;
$mis_counts{$shot_id} = ($align =~ s/\*/\*/g) || 0;
# the fifth line contains the reference sequences, which is needed to
# complete the indel lists.
} else {
$ref_lens{$ref_id} = length $line;
$ref_unlens{$ref_id} = $ref_lens{$ref_id} - $alens{$shot_id};
$align_result =~ m/(^ +)/;
my $front_sp = defined $1 ? length $1 : 0;
$align_result =~ m/( +$)/;
my $tail_sp = defined $1 ? length $1 : 0;
$gap_counts_shot{$shot_id} = 0;
$gap_counts_ref{$shot_id} = 0;
foreach my $gap (@gaps) {
if ($gap >= $front_sp && ($gap < ((length $align_result) - $tail_sp))) {
if (substr ($shot_orf, $gap, 1) eq '-') {
$gap_counts_shot{$shot_id}++;
} else {
$gap_counts_ref{$shot_id}++;
}
}
}
$flag = -1;
}
}
close PROT;
open PROUT, ">PI.amino.scores.txt";
print PROUT "PI17\tPI25611\tPI17 Length\tPI25611 Length\tAligned\t";
print PROUT "PI17 Nonaligned\tPI25611 Nonaligned\tIdentities\t";
print PROUT "Mismatches\tPI17 Gaps\tPI25611 Gaps\n";
foreach my $sid (sort keys %pairs) {
my $rkey = $pairs{$sid};
print PROUT "$sid\t$rkey\t$shot_lens{$sid}\t$ref_lens{$rkey}\t";
print PROUT "$alens{$sid}\t$ref_unlens{$rkey}\t$shot_unlens{$sid}\t";
my $iden_per = ($iden_counts{$sid} / $alens{$sid} * 100);
my $mis_per = ($mis_counts{$sid} / $alens{$sid} * 100);
my $gap_shotper = ($gap_counts_shot{$sid} / $alens{$sid} * 100);
my $gap_refper = ($gap_counts_ref{$sid} / $alens{$sid} * 100);
my $format = "%.2f\t%.2f\t%.2f\t%.2f\n";
printf PROUT $format, $iden_per, $mis_per, $gap_refper, $gap_shotper;
}
close PROUT;
| gtblack/DNA.analysis | scripts/ascores_indel.pl | Perl | mit | 4,153 |
# messages to ignore {{{
# boulders {{{
recolor qr/With great effort you move the boulder./ => "darkgray";
recolor qr/You try to move the boulder, but in vain./ => "darkgray";
# }}}
# pets {{{
recolor qr/You displaced (?:your )?[^.?!]*./ => "darkgray";
recolor qr/You stop[^.?!]*. (?:Your )?.*? is in the way!/ => "darkgray";
# }}}
# fountains {{{
recolor qr/You hear water falling on coins./ => "darkgray";
recolor qr/You hear bubbling water./ => "darkgray";
recolor qr/You hear the splashing of a naiad./ => "darkgray";
# }}}
# vaults {{{
recolor qr/You hear someone counting money./ => "darkgray";
recolor qr/You hear the footsteps of a guard on patrol./ => "darkgray";
# }}}
# misc {{{
recolor qr/Unknown command '[^']+'./ => "darkgray";
recolor qr/--More--/ => "darkgray";
# }}}
# }}}
# good messages {{{
# gain ability/level {{{
recolor qr/You feel (?:strong|agile|tough|smart|wise|charismatic)!/ => "purple";
recolor qr/Welcome to experience level \d+./ => "purple";
# }}}
# wishing {{{
recolor qr/For what do you wish\?/ => "purple";
# }}}
# resists {{{
recolor qr/You feel (?:(?:especially )?healthy|hardy)./ => "green";
recolor qr/You feel (?:full of hot air|warm)\./ => "green";
recolor qr/You (?:feel a momentary chill|feel cool|be chillin')\./ => "green";
recolor qr/You feel (?:wide )?awake./ => "green";
recolor qr/You feel (?:very firm|totally together, man)./ => "green";
recolor qr/Your health currently feels amplified!/ => "green";
recolor qr/You feel (?:insulated|grounded in reality)./ => "green";
# }}}
# other intrinsics {{{
recolor qr/You feel (?:very jumpy|diffuse)./ => "blue";
recolor qr/You feel (?:in control of yourself|centered in your personal space)./ => "blue";
recolor qr/You feel controlled/ => "blue";
recolor qr/You feel (?:a strange mental acuity|in touch with the cosmos)./ => "blue";
recolor qr/You feel (?:hidden|perceptive|stealthy|sensitive)./ => "blue";
recolor qr/You feel (?:very self-conscious|transparent)./ => "blue";
recolor qr/You see an image of someone stalking you./ => "blue";
recolor qr/Your vision becomes clear./ => "blue";
recolor qr/You (?:seem faster|feel quick)./ => "blue";
# }}}
# skills {{{
recolor qr/You feel more confident in your.*? skills./ => "cyan";
recolor qr/You feel you could be more dangerous!/ => "cyan";
# }}}
# misc {{{
recolor qr/You feel a mild buzz./ => "bblue";
# }}}
# }}}
# dangerous messages {{{
# attack effects {{{
recolor qr/Goodbye level \d+./ => "red";
recolor qr/The [^.!\e]*? grabs you[^,]/ => "red";
# }}}
# pets {{{
recolor qr/The [^.!\e]*? yowls!/ => "red";
recolor qr/You have a (?:sad|peculiar) feeling for a moment, then it passes\./ => "red";
# }}}
# traps {{{
# had an effect {{{
# arrow trap {{{
recolor qr/An arrow shoots out at you!/ => "red";
# }}}
# dart trap {{{
recolor qr/A little dart shoots out at you!/ => "red";
# }}}
# rock trap {{{
recolor qr/A trap door in .*? opens and .*? falls on your [^!]*!/ => "red";
# }}}
# squeaky board {{{
recolor qr/A board beneath you squeaks loudly./ => "red";
# }}}
# bear trap {{{
recolor qr/\e\[H\S+ bear trap closes on [^!]*!/ => "red";
# }}}
# rust trap
# pit
# hole
# trapdoor
# teleporter
# magic trap {{{
recolor qr/You are caught in a magical explosion!/ => "red";
recolor qr/You are momentarily blinded by a flash of light!/ => "red";
recolor qr/You hear a deafening roar!/ => "red";
recolor qr/You see a flash of light!/ => "red";
# }}}
# antimagic trap {{{
recolor qr/You feel your magical energy drain away./ => "red";
# }}}
# rolling boulder
# sleeping gas trap {{{
recolor qr/A cloud of gas puts you to sleep./ => "red";
# }}}
# levporter
# spiked pit
# landmine
# web
# statue trap
# polytrap
# fire trap {{{
recolor qr/A tower of flame (bursts|erupts) from(?: the \w+)?(?! under the[^!]*)!/ => "red";
# }}}
# }}}
# no effect {{{
# arrow trap {{{
recolor qr/You hear a loud click./ => "darkgray";
# }}}
# dart trap {{{
recolor qr/You hear a soft click./ => "darkgray";
# }}}
# rock trap {{{
recolor qr/A trap door in .*? opens, but nothing falls out!/ => "darkgray";
# }}}
# squeaky board {{{
recolor qr/You notice a loose board below you./ => "darkgray";
recolor qr/You notice a crease in the linoleum./ => "darkgray";
# }}}
# bear trap {{{
recolor qr/\w+ bear trap closes harmlessly (through|over) you./ => "darkgray";
# }}}
# rust trap
# pit
# hole
# trapdoor
# teleporter
# magic trap {{{
recolor qr/A shiver runs up and down your spine!/ => "darkgray";
recolor qr/You hear a distant howling./ => "darkgray";
recolor qr/You hear the moon howling at you./ => "darkgray";
recolor qr/Your pack shakes violently!/ => "darkgray";
recolor qr/You smell charred flesh./ => "darkgray";
recolor qr/You smell hamburgers./ => "darkgray";
recolor qr/You feel tired./ => "darkgray";
recolor qr/You suddenly yearn for your (?:distant|nearby) homeland./ => "darkgray";
recolor qr/You suddenly yearn for Cleveland./ => "darkgray";
recolor qr/You feel (?:oddly )?like the prodigal son./ => "darkgray";
# }}}
# antimagic trap {{{
recolor qr/You feel momentarily lethargic./ => "darkgray";
# }}}
# rolling boulder
# sleeping gas trap {{{
recolor qr/You are enveloped in a cloud of gas./ => "red";
# }}}
# levporter
# spiked pit
# landmine
# web
# statue trap
# polytrap
# fire trap
# }}}
# }}}
# negative status effects {{{
recolor qr/Oh wow! Great stuff!/ => "red";
recolor qr/You (reel|stagger)\.\.\./ => "red";
recolor qr/You feel somewhat dizzy./ => "red";
recolor qr/You feel feverish./ => "red";
recolor qr/You can't see in here./ => "red";
recolor qr/Everything suddenly goes dark./ => "red";
recolor qr/The world spins and goes dark./ => "red";
recolor qr/It suddenly gets dark./ => "red";
recolor qr/The [^.!\e]*? gaze confuses you./ => "red";
recolor qr/The [^.!\e]*? blinds you./ => "red";
# }}}
# monster spells {{{
recolor qr/You feel a malignant aura surround you./ => "red";
# }}}
# losing resists {{{
recolor qr/You feel (?:warmer|a little sick|cooler|tired|conductive)./ => "red";
# }}}
# losing intrinsics {{{
recolor qr/You seem slower./ => "red";
recolor qr/You slow down./ => "red";
recolor qr/You feel (?:slow|slower)\./ => "red";
recolor qr/You feel (?:less attractive|paranoid|vulnerable|clumsy)./ => "red";
recolor qr/You feel (?:uncontrolled|less jumpy)./ => "red";
recolor qr/You (?:thought you saw something|tawt you taw a puttie tat)./ => "red";
recolor qr/Your senses fail./ => "red";
# }}}
# hunger {{{
recolor qr/You are beginning to feel hungry./ => "red";
recolor qr/You are beginning to feel weak./ => "red";
recolor qr/(?:Wizard|Valkyrie|Elf) needs food, badly!/ => "red";
# }}}
# encumbrance {{{
recolor qr/Your movements are slowed slightly because of your load\./ => "red";
recolor qr/You rebalance your load\. Movement is difficult\./ => "bred";
recolor qr/You stagger under your heavy load\. Movement is very hard\./ => "bred";
recolor qr/You can barely move a handspan with this load!/ => "bred";
recolor qr/You can't even move a handspan with this load!/ => "bred";
# }}}
# serious life threatening issues {{{
recolor qr/You faint from lack of food./ => "bred";
recolor qr/Stop eating\?/ => "bred";
recolor qr/You are slowing down./ => "bred";
recolor qr/Your limbs are stiffening./ => "bred";
recolor qr/You (?:don't feel very well|are turning a little green)./ => "bred";
recolor qr/Your (?:limbs are getting oozy|skin begins to peel away)./ => "bred";
recolor qr/You are turning into a green slime./ => "bred";
recolor qr/You feel deathly sick./ => "bred";
recolor qr/You feel (?:much|even) worse./ => "bred";
recolor qr/The [^.!\e]*? swings itself around you!/ => "bred";
recolor qr/Really quit\?/ => "bred";
# }}}
# Rodders {{{
recolor qr/\e\[(?:1;\d+)?H"So thou thought thou couldst kill me, fool\."/ => "bred";
recolor qr/\e\[(?:1;\d+)?HDouble Trouble\.\.\./ => "bred";
# }}}
# }}}
# useful messages {{{
# shops {{{
recolor qr/You hear someone cursing shoplifters./ => "yellow";
recolor qr/You hear the chime of a cash register./ => "yellow";
# }}}
# }}}
# plot {{{
recolor qr/[Tt]he high priest(?:ess)? of (?!Moloch)\S+/ => "yellow";
recolor qr/You feel a strange vibration [^.?!]*./ => "yellow";
# }}}
# misc {{{
recolor qr/Elbereth/i => "purple";
# }}}
| TAEB/Interhack | plugins/doy-colors-messages.pl | Perl | mit | 8,165 |
#!/usr/bin/env perl
######################################
# $URL: http://mishin.narod.ru $
# $Date: 2011-12-23 19:53:20 +0300 (Web, 14 Sep 2011) $
# $Author: mishin nikolay $
# $Revision: 1.02 $
# $Source: convert_var_to_yaml.pl $
# $Description: convert perl variables to yaml format $
##############################################################################
use YAML::Tiny;
my @input_data = <DATA>;
my $RGX_PERL_VAR = qr{"([^"]+)","([^"]+)","([^"]+)","([^"]+)","([^"]+)"}smo;
my $RGX_DATE_FULL = qr{.*"(\d{4}-\w{2}-\d{2} \d{2}:\d{2}:\d{2})".*}smo;
my @res =
grep {
extract_time($_) >= $start_date
and extract_time($_) <= $end_date
and ( extract_four($_) ~~ @list )
} @input_data;
sub extract_perl_variables {
my ($search_str) = @_;
$search_str =~ s/$RGX_DATE_FULL/$1/sm;
return str2time($search_str);
}
__DATA__
my $count_xml = 10000;
my $test_file = 'test_message.xml';
my $orig_idx_file = 'orig_test_message.xml.idx';
my $commit_size = 1000;
| mishin/presentation | my_scripts/test_perl_variable.pl | Perl | apache-2.0 | 1,019 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package XrefMapper::sus_scrofa;
use XrefMapper::BasicMapper;
use XrefMapper::SubmitMapper;
use strict;
use vars '@ISA';
@ISA = qw{ XrefMapper::BasicMapper };
sub gene_description_filter_regexps {
return ('^BA\S+\s+\(NOVEL PROTEIN\)\.?',
'^DJ\S+\s+\(NOVEL PROTEIN\)\.?',
'^LOC\d+\s*(PROTEIN)?\.?',
'^Putative uncharacterized protein.*',
'^ORF.*',
'^PROTEIN C\d+ORF\d+\.*',
'\(CLONE \S+\)\s+',
'^BC\d+\_\d+\.?',
'^CGI\-\d+ PROTEIN\.?\;?',
'[0-9A-Z]{10}RIK PROTEIN[ \.]',
'R\d{5}_\d[ \.,].*',
'PROTEIN KIAA\d+[ \.].*',
'RIKEN CDNA [0-9A-Z]{10}[ \.]',
'^\(*HYPOTHETICAL\s+.*',
'^UNKNOWN\s+.*',
'^DKFZP[A-Z0-9]+\s+PROTEIN[\.;]?.*',
'^CHROMOSOME\s+\d+\s+OPEN\s+READING\s+FRAME\s+\d+\.?.*',
'^FKSG\d+\.?.*',
'^HSPC\d+\s+PROTEIN\.?.*',
'^KIAA\d+\s+PROTEIN\.?.*',
'^KIAA\d+\s+GENE\s+PRODUCT\.?.*',
'^HSPC\d+.*',
'^PRO\d+\s+PROTEIN\.?.*',
'^PRO\d+\.?.*',
'^FLJ\d+\s+PROTEIN.*',
'^PRED\d+\s+PROTEIN.*',
'^WUGSC:.*\s+PROTEIN\.?.*',
'^SIMILAR TO GENE.*',
'^SIMILAR TO PUTATIVE[ \.]',
'^SIMILAR TO HYPOTHETICAL.*',
'^SIMILAR TO (KIAA|LOC).*',
'^SIMILAR TO\s+$',
'^WUGSC:H_.*',
'^\s*\(?PROTEIN\)?\.?\s*$',
'^\s*\(?FRAGMENT\)?\.?\s*$',
'^\s*\(?GENE\)?\.?\s*$',
'^\s*\(\s*\)\s*$',
'^\s*\(\d*\)\s*[ \.]$');
}
sub get_official_name{
return "PIGGY";
}
sub get_canonical_name{
return "PIGGY";
}
# Not running transcript_names_from_gene for merged species
# as this is already beng done in the OfficialNaming mapper
sub transcript_names_from_gene {
return;
}
# For merged species, display xref assigned from the stable_table
# which has been populated by the OfficialNaming mapper
sub set_display_xrefs{
my $self = shift;
my $display = XrefMapper::DisplayXrefs->new($self);
$display->set_display_xrefs_from_stable_table();
}
1;
| at7/ensembl | misc-scripts/xref_mapping/XrefMapper/sus_scrofa.pm | Perl | apache-2.0 | 2,530 |
package #
Date::Manip::TZ::assama00;
# Copyright (c) 2008-2014 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Fri Nov 21 10:41:46 EST 2014
# Data version: tzdata2014j
# Code version: tzcode2014j
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our (%Dates,%LastRule);
END {
undef %Dates;
undef %LastRule;
}
our ($VERSION);
$VERSION='6.48';
END { undef $VERSION; }
%Dates = (
1 =>
[
[ [1,1,2,0,0,0],[1,1,2,4,27,53],'+04:27:53',[4,27,53],
'LMT',0,[1924,5,1,19,32,6],[1924,5,1,23,59,59],
'0001010200:00:00','0001010204:27:53','1924050119:32:06','1924050123:59:59' ],
],
1924 =>
[
[ [1924,5,1,19,32,7],[1924,5,1,23,32,7],'+04:00:00',[4,0,0],
'SAMT',0,[1930,6,20,19,59,59],[1930,6,20,23,59,59],
'1924050119:32:07','1924050123:32:07','1930062019:59:59','1930062023:59:59' ],
],
1930 =>
[
[ [1930,6,20,20,0,0],[1930,6,21,1,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1981,3,31,18,59,59],[1981,3,31,23,59,59],
'1930062020:00:00','1930062101:00:00','1981033118:59:59','1981033123:59:59' ],
],
1981 =>
[
[ [1981,3,31,19,0,0],[1981,4,1,1,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1981,9,30,17,59,59],[1981,9,30,23,59,59],
'1981033119:00:00','1981040101:00:00','1981093017:59:59','1981093023:59:59' ],
[ [1981,9,30,18,0,0],[1981,10,1,0,0,0],'+06:00:00',[6,0,0],
'TAST',0,[1982,3,31,17,59,59],[1982,3,31,23,59,59],
'1981093018:00:00','1981100100:00:00','1982033117:59:59','1982033123:59:59' ],
],
1982 =>
[
[ [1982,3,31,18,0,0],[1982,4,1,0,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1982,9,30,17,59,59],[1982,9,30,23,59,59],
'1982033118:00:00','1982040100:00:00','1982093017:59:59','1982093023:59:59' ],
[ [1982,9,30,18,0,0],[1982,9,30,23,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1983,3,31,18,59,59],[1983,3,31,23,59,59],
'1982093018:00:00','1982093023:00:00','1983033118:59:59','1983033123:59:59' ],
],
1983 =>
[
[ [1983,3,31,19,0,0],[1983,4,1,1,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1983,9,30,17,59,59],[1983,9,30,23,59,59],
'1983033119:00:00','1983040101:00:00','1983093017:59:59','1983093023:59:59' ],
[ [1983,9,30,18,0,0],[1983,9,30,23,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1984,3,31,18,59,59],[1984,3,31,23,59,59],
'1983093018:00:00','1983093023:00:00','1984033118:59:59','1984033123:59:59' ],
],
1984 =>
[
[ [1984,3,31,19,0,0],[1984,4,1,1,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1984,9,29,20,59,59],[1984,9,30,2,59,59],
'1984033119:00:00','1984040101:00:00','1984092920:59:59','1984093002:59:59' ],
[ [1984,9,29,21,0,0],[1984,9,30,2,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1985,3,30,20,59,59],[1985,3,31,1,59,59],
'1984092921:00:00','1984093002:00:00','1985033020:59:59','1985033101:59:59' ],
],
1985 =>
[
[ [1985,3,30,21,0,0],[1985,3,31,3,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1985,9,28,20,59,59],[1985,9,29,2,59,59],
'1985033021:00:00','1985033103:00:00','1985092820:59:59','1985092902:59:59' ],
[ [1985,9,28,21,0,0],[1985,9,29,2,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1986,3,29,20,59,59],[1986,3,30,1,59,59],
'1985092821:00:00','1985092902:00:00','1986032920:59:59','1986033001:59:59' ],
],
1986 =>
[
[ [1986,3,29,21,0,0],[1986,3,30,3,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1986,9,27,20,59,59],[1986,9,28,2,59,59],
'1986032921:00:00','1986033003:00:00','1986092720:59:59','1986092802:59:59' ],
[ [1986,9,27,21,0,0],[1986,9,28,2,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1987,3,28,20,59,59],[1987,3,29,1,59,59],
'1986092721:00:00','1986092802:00:00','1987032820:59:59','1987032901:59:59' ],
],
1987 =>
[
[ [1987,3,28,21,0,0],[1987,3,29,3,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1987,9,26,20,59,59],[1987,9,27,2,59,59],
'1987032821:00:00','1987032903:00:00','1987092620:59:59','1987092702:59:59' ],
[ [1987,9,26,21,0,0],[1987,9,27,2,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1988,3,26,20,59,59],[1988,3,27,1,59,59],
'1987092621:00:00','1987092702:00:00','1988032620:59:59','1988032701:59:59' ],
],
1988 =>
[
[ [1988,3,26,21,0,0],[1988,3,27,3,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1988,9,24,20,59,59],[1988,9,25,2,59,59],
'1988032621:00:00','1988032703:00:00','1988092420:59:59','1988092502:59:59' ],
[ [1988,9,24,21,0,0],[1988,9,25,2,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1989,3,25,20,59,59],[1989,3,26,1,59,59],
'1988092421:00:00','1988092502:00:00','1989032520:59:59','1989032601:59:59' ],
],
1989 =>
[
[ [1989,3,25,21,0,0],[1989,3,26,3,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1989,9,23,20,59,59],[1989,9,24,2,59,59],
'1989032521:00:00','1989032603:00:00','1989092320:59:59','1989092402:59:59' ],
[ [1989,9,23,21,0,0],[1989,9,24,2,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1990,3,24,20,59,59],[1990,3,25,1,59,59],
'1989092321:00:00','1989092402:00:00','1990032420:59:59','1990032501:59:59' ],
],
1990 =>
[
[ [1990,3,24,21,0,0],[1990,3,25,3,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1990,9,29,20,59,59],[1990,9,30,2,59,59],
'1990032421:00:00','1990032503:00:00','1990092920:59:59','1990093002:59:59' ],
[ [1990,9,29,21,0,0],[1990,9,30,2,0,0],'+05:00:00',[5,0,0],
'SAMT',0,[1991,3,30,20,59,59],[1991,3,31,1,59,59],
'1990092921:00:00','1990093002:00:00','1991033020:59:59','1991033101:59:59' ],
],
1991 =>
[
[ [1991,3,30,21,0,0],[1991,3,31,3,0,0],'+06:00:00',[6,0,0],
'SAMST',1,[1991,8,31,17,59,59],[1991,8,31,23,59,59],
'1991033021:00:00','1991033103:00:00','1991083117:59:59','1991083123:59:59' ],
[ [1991,8,31,18,0,0],[1991,9,1,0,0,0],'+06:00:00',[6,0,0],
'UZST',1,[1991,9,28,20,59,59],[1991,9,29,2,59,59],
'1991083118:00:00','1991090100:00:00','1991092820:59:59','1991092902:59:59' ],
[ [1991,9,28,21,0,0],[1991,9,29,2,0,0],'+05:00:00',[5,0,0],
'UZT',0,[9999,12,31,0,0,0],[9999,12,31,5,0,0],
'1991092821:00:00','1991092902:00:00','9999123100:00:00','9999123105:00:00' ],
],
);
%LastRule = (
);
1;
| nriley/Pester | Source/Manip/TZ/assama00.pm | Perl | bsd-2-clause | 6,840 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 8.0.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly. Use Unicode::UCD to access the Unicode character data
# base.
return <<'END';
V34
173
174
1536
1542
1564
1565
1757
1758
1807
1808
6158
6159
8203
8208
8234
8239
8288
8293
8294
8304
65279
65280
65529
65532
69821
69822
113824
113828
119155
119163
917505
917506
917536
917632
END
| operepo/ope | bin/usr/share/perl5/core_perl/unicore/lib/Gc/Cf.pl | Perl | mit | 680 |
# For internal Module::CoreList use only.
package Module::CoreList::TieHashDelta;
use strict;
use vars qw($VERSION);
$VERSION = "2.83";
sub TIEHASH {
my ($class, $changed, $removed, $parent) = @_;
return bless {
changed => $changed,
removed => $removed,
parent => $parent,
keys_inflated => 0,
}, $class;
}
sub FETCH {
my ($self, $key) = @_;
if (exists $self->{changed}{$key}) {
return $self->{changed}{$key};
} elsif (exists $self->{removed}{$key}) {
return undef;
} elsif (defined $self->{parent}) {
return $self->{parent}{$key};
}
return undef;
}
sub EXISTS {
my ($self, $key) = @_;
if (exists $self->{changed}{$key}) {
return 1;
} elsif (exists $self->{removed}{$key}) {
return '';
} elsif (defined $self->{parent}) {
return exists $self->{parent}{$key};
}
return '';
}
sub FIRSTKEY {
my ($self) = @_;
if (not $self->{keys_inflated}) {
# This inflates the whole set of hashes... Somewhat expensive, but saves
# many tied hash calls later.
my @parent_keys;
if (defined $self->{parent}) {
@parent_keys = keys %{$self->{parent}};
}
@parent_keys = grep !exists $self->{removed}{$_}, @parent_keys;
for my $key (@parent_keys) {
next if exists $self->{changed}->{$key};
$self->{changed}{$key} = $self->{parent}{$key};
}
$self->{keys_inflated} = 1;
}
keys %{$self->{changed}}; # reset each
$self->NEXTKEY;
}
sub NEXTKEY {
my ($self) = @_;
each %{$self->{changed}};
}
1;
| Dokaponteam/ITF_Project | xampp/perl/lib/Module/CoreList/TieHashDelta.pm | Perl | mit | 1,655 |
# orgcard2txt.pl - a script to generate orgcard.txt from orgcard.tex
# Copyright (C) 2010, 2013 Osamu OKANO
#
# Version: 0.1
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Usage:
# ======
# perl orgcard2txt.pl orgcard.tex > orgcard.txt
use strict;
use warnings;
sub rep_esc{
my $s = shift @_;
$s =~ s/\\kbd{([^}]+)}/$1/g;
$s =~ s/\$\^([0-9])\$/[$1]/g;
$s =~ s/\\rm //g;
$s =~ s/\\\///g;
$s =~ s/\\\^{}/^/g;
$s =~ s/\\}/}/g;
$s =~ s/\\{/{/g;
$s =~ s/\\\#/#/g;
$s =~ s/\\\^/^/g;
$s =~ s/\\\%/%/g;
$s =~ s/\\\_/_/g;
$s =~ s/\\\&/&/g;
$s =~ s/\\\$/\$/g;
$s =~ s/\$\\leftrightarrow\$/<->/g;
$s =~ s/\$\\pm 1\$/±1/g;
$s =~ s/``{\\tt ([^}]+)}''/`$1'/g;
return $s;
}
my $page=0;
my $orgversionnumber;
open(IN,$ARGV[0]);
while(<IN>){
last if(/\f/);
$orgversionnumber = $1 if /\\def\\orgversionnumber{([^}]+)}/;
}
close(IN);
print <<HEAD;
================================================================================
Org-Mode Reference Card (for version $orgversionnumber)
================================================================================
HEAD
my $key;
my $value;
format STDOUT =
@<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< @<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
$key,$value
.
open(IN,$ARGV[0]);
while(<IN>){
if(/\f/){
$page = $page + 1;
next;
}
next if($page != 1);
next if(/^%/);
next if /Org-Mode Reference Card \([12]\/2\)/;
next if /\\centerline{\(for version \\orgversionnumber\)}/;
next if /\(for version \)/;
next if /\\newcolumn/;
next if /\\copyrightnotice/;
next if /\\bye/;
next if /\\title{([^}]+)}/;
chomp;
# print "b:$_\n";
s/([^\\])\%.+$/$1/;
# print "a:$_\n";
if (/\\section{(.+)}/){
my $sec = rep_esc($1);
print "================================================================================\n";
print "$sec\n";
print "================================================================================\n";
next;
}
if (/{\\bf (.+)}/){
my $bf = rep_esc($1);
print "--------------------------------------------------------------------------------\n";
print "$bf\n";
print "--------------------------------------------------------------------------------\n";
next;
}
if (/^{\\it (.+)}/){
my $it = rep_esc($1);
print "--------------------------------------------------------------------------------\n";
print "$it\n";
print "--------------------------------------------------------------------------------\n";
next;
}
if(/^\\key{(.+)}\s*$/||/^\\metax{(.+)}\s*$/){
my ($k,$v) = split(/}{/,$1);
my $k2 = &rep_esc($k);
my $v2 = &rep_esc($v);
# print "$k2\t$v2\n";
($key,$value)=($k2,$v2);
write;
next;
}
my $line = rep_esc($_);
$line =~ s/{\\it ([^}]+)}/$1/g;
$line =~ s/{\\tt ([^}]+)}/$1/g;
print "$line\n";
}
close(IN);
| nuppt/.emacs.d | packages/org-mode/mk/orgcard2txt.pl | Perl | mit | 3,429 |
##############################################################################
# $URL: http://perlcritic.tigris.org/svn/perlcritic/trunk/distributions/Perl-Critic/lib/Perl/Critic/Policy/InputOutput/RequireBriefOpen.pm $
# $Date: 2012-07-02 22:16:39 -0700 (Mon, 02 Jul 2012) $
# $Author: thaljef $
# $Revision: 4126 $
##############################################################################
package Perl::Critic::Policy::InputOutput::RequireBriefOpen;
use 5.006001;
use strict;
use warnings;
use Readonly;
use List::MoreUtils qw(any);
use Perl::Critic::Utils qw{ :severities :classification :booleans
hashify parse_arg_list
};
use base 'Perl::Critic::Policy';
our $VERSION = '1.118';
#-----------------------------------------------------------------------------
Readonly::Scalar my $DESC => q<Close filehandles as soon as possible after opening them..>;
Readonly::Scalar my $EXPL => [209];
Readonly::Scalar my $SCALAR_SIGIL => q<$>;
Readonly::Scalar my $GLOB_SIGIL => q<*>;
# Identify the builtins that are equivalent to 'open' and 'close'. Note that
# 'return' is considered equivalent to 'close'.
Readonly::Hash my %CLOSE_BUILTIN => hashify( qw{
close
CORE::close
CORE::GLOBAL::close
return
} );
Readonly::Hash my %OPEN_BUILTIN => hashify( qw{
open
CORE::open
CORE::GLOBAL::open
} );
# Possible values for $is_lexical
Readonly::Scalar my $NOT_LEXICAL => 0; # Guaranteed only false value
Readonly::Scalar my $LOCAL_LEXICAL => 1;
Readonly::Scalar my $NON_LOCAL_LEXICAL => 2;
Readonly::Scalar my $LAST_ELEMENT => -1;
#-----------------------------------------------------------------------------
sub supported_parameters {
return (
{
name => 'lines',
description => 'The maximum number of lines between an open() and a close().',
default_string => '9',
behavior => 'integer',
integer_minimum => 1,
},
);
}
sub default_severity { return $SEVERITY_HIGH }
sub default_themes { return qw< core pbp maintenance > }
sub applies_to { return 'PPI::Token::Word' }
#-----------------------------------------------------------------------------
sub violates {
my ( $self, $elem, undef ) = @_;
# Is it a call to open?
$OPEN_BUILTIN{$elem->content()} or return;
return if ! is_function_call($elem);
my @open_args = parse_arg_list($elem);
return if 2 > @open_args; # not a valid call to open()
my ($is_lexical, $fh) = _get_opened_fh($open_args[0]);
return if not $fh;
return if $fh =~ m< \A [*]? STD (?: IN|OUT|ERR ) \z >xms;
for my $close_token ( $self->_find_close_invocations_or_return(
$elem, $is_lexical ) ) {
# The $close_token might be a close() or a return()
# It doesn't matter which -- both satisfy this policy
if (is_function_call($close_token)) {
my @close_args = parse_arg_list($close_token);
my $close_parameter = $close_args[0];
if ('ARRAY' eq ref $close_parameter) {
$close_parameter = ${$close_parameter}[0];
}
if ( $close_parameter ) {
$close_parameter = "$close_parameter";
return if $fh eq $close_parameter;
if ( any { m< \A [*] >xms } ($fh, $close_parameter) ) {
(my $stripped_fh = $fh) =~ s< \A [*] ><>xms;
(my $stripped_parameter = $close_parameter) =~
s< \A [*] ><>xms;
return if $stripped_fh eq $stripped_parameter;
}
}
}
elsif ($is_lexical && is_method_call($close_token)) {
my $tok = $close_token->sprevious_sibling->sprevious_sibling;
return if $fh eq $tok;
}
}
return $self->violation( $DESC, $EXPL, $elem );
}
sub _find_close_invocations_or_return {
my ($self, $elem, $is_lexical) = @_;
my $parent = $self->_get_scope( $elem, $is_lexical );
return if !$parent; # I can't think of a scenario where this would happen
my $open_loc = $elem->location;
# we don't actually allow _lines to be zero or undef, but maybe we will
my $end_line = $self->{_lines} ? $open_loc->[0] + $self->{_lines} : undef;
my $closes = $parent->find(sub {
##no critic (ProhibitExplicitReturnUndef)
my ($parent, $candidate) = @_; ## no critic(Variables::ProhibitReusedNames)
return undef if $candidate->isa('PPI::Statement::Sub');
my $candidate_loc = $candidate->location;
return undef if !defined $candidate_loc->[0];
return 0 if $candidate_loc->[0] < $open_loc->[0];
return 0 if $candidate_loc->[0] == $open_loc->[0] && $candidate_loc->[1] <= $open_loc->[1];
return undef if defined $end_line && $candidate_loc->[0] > $end_line;
return 0 if !$candidate->isa('PPI::Token::Word');
return $CLOSE_BUILTIN{ $candidate->content() } || 0;
});
return @{$closes || []};
}
sub _get_scope {
my ( $self, $elem, $is_lexical ) = @_;
my $open_loc = $elem->location;
my $end_line = ( $self->{_lines} && defined $open_loc->[0] ) ?
$open_loc->[0] + $self->{_lines} :
undef;
while ( my $dad = $elem->parent) {
$elem = $dad;
next if not $elem->scope;
# If we are analyzing something like 'open my $fh ...', the
# most-local scope suffices. RT #64437
return $elem if $LOCAL_LEXICAL == $is_lexical;
next if not defined $end_line; # Presume search everywhere
# If we are analyzing something like 'open $fh ...', 'open FH
# ...', or 'open *FH ...' we need to use a scope that includes
# the end of the legal range. We just give up and return the
# current scope if we can not determine any of the locations
# involved. RT #64437
return $elem if not $open_loc;
my $elem_loc = $elem->location
or return $elem;
my $last_kid = $elem->child( $LAST_ELEMENT )
or return $elem; # What? no children?
my $last_kid_loc = $last_kid->location
or return $elem;
# At this point, the scope we have, even if it is not the
# correct scope for the file handle, is big enough that if the
# corresponding close() is outside it, it must be a violation.
# RT #64437
return $elem if $last_kid_loc->[0] > $end_line;
}
return $elem; # Whatever the top-level PPI::Node was.
}
sub _get_opened_fh {
my ($tokens) = shift;
my $is_lexical;
my $fh;
if ( 2 == @{$tokens} ) {
if ('my' eq $tokens->[0] &&
$tokens->[1]->isa('PPI::Token::Symbol') &&
$SCALAR_SIGIL eq $tokens->[1]->raw_type) {
$is_lexical = $LOCAL_LEXICAL;
$fh = $tokens->[1];
}
}
elsif (1 == @{$tokens}) {
my $argument = _unwrap_block( $tokens->[0] );
if ( $argument->isa('PPI::Token::Symbol') ) {
my $sigil = $argument->raw_type();
if ($SCALAR_SIGIL eq $sigil) {
$is_lexical = $NON_LOCAL_LEXICAL; # We need to
# distinguish between
# 'open my $fh ...' and
# 'open $fh ...'. RT #64437
$fh = $argument;
}
elsif ($GLOB_SIGIL eq $sigil) {
$is_lexical = $NOT_LEXICAL;
$fh = $argument;
}
}
elsif ($argument->isa('PPI::Token::Word') && $argument eq uc $argument) {
$is_lexical = $NOT_LEXICAL;
$fh = $argument;
}
}
return ($is_lexical, $fh);
}
sub _unwrap_block {
my ($element) = @_;
return $element if not $element->isa('PPI::Structure::Block');
my @children = $element->schildren();
return $element if 1 != @children;
my $child = $children[0];
return $child if not $child->isa('PPI::Statement');
my @grandchildren = $child->schildren();
return $element if 1 != @grandchildren;
return $grandchildren[0];
}
1;
__END__
#-----------------------------------------------------------------------------
=pod
=for stopwords redeclared
=head1 NAME
Perl::Critic::Policy::InputOutput::RequireBriefOpen - Close filehandles as soon as possible after opening them.
=head1 AFFILIATION
This Policy is part of the core L<Perl::Critic|Perl::Critic>
distribution.
=head1 DESCRIPTION
One way that production systems fail unexpectedly is by running out of
filehandles. Filehandles are a finite resource on every operating
system that I'm aware of, and running out of them is virtually
impossible to recover from. The solution is to not run out in the
first place. What causes programs to run out of filehandles?
Usually, it's leaks: you open a filehandle and forget to close it, or
just wait a really long time before closing it.
This problem is rarely exposed by test systems, because the tests
rarely run long enough or have enough load to hit the filehandle
limit. So, the best way to avoid the problem is 1) always close all
filehandles that you open and 2) close them as soon as is practical.
This policy takes note of calls to C<open()> where there is no
matching C<close()> call within C<N> lines of code. If you really
need to do a lot of processing on an open filehandle, then you can
move that processing to another method like this:
sub process_data_file {
my ($self, $filename) = @_;
open my $fh, '<', $filename
or croak 'Failed to read datafile ' . $filename . '; ' . $OS_ERROR;
$self->_parse_input_data($fh);
close $fh;
return;
}
sub _parse_input_data {
my ($self, $fh) = @_;
while (my $line = <$fh>) {
...
}
return;
}
As a special case, this policy also allows code to return the
filehandle after the C<open> instead of closing it. Just like the
close, however, that C<return> has to be within the right number of
lines. From there, you're on your own to figure out whether the code
is promptly closing the filehandle.
The STDIN, STDOUT, and STDERR handles are exempt from this policy.
=head1 CONFIGURATION
This policy allows C<close()> invocations to be up to C<N> lines after
their corresponding C<open()> calls, where C<N> defaults to 9. You
can override this to set it to a different number with the C<lines>
setting. To do this, put entries in a F<.perlcriticrc> file like
this:
[InputOutput::RequireBriefOpen]
lines = 5
=head1 CAVEATS
=head2 C<IO::File-E<gt>new>
This policy only looks for explicit C<open> calls. It does not detect
calls to C<CORE::open> or C<IO::File-E<gt>new> or the like.
=head2 Is it the right lexical?
We don't currently check for redeclared filehandles. So the following
code is false negative, for example, because the outer scoped
filehandle is not closed:
open my $fh, '<', $file1 or croak;
if (open my $fh, '<', $file2) {
print <$fh>;
close $fh;
}
This is a contrived example, but it isn't uncommon for people to use
C<$fh> for the name of the filehandle every time. Perhaps it's time
to think of better variable names...
=head1 CREDITS
Initial development of this policy was supported by a grant from the
Perl Foundation.
=head1 AUTHOR
Chris Dolan <cdolan@cpan.org>
=head1 COPYRIGHT
Copyright (c) 2007-2011 Chris Dolan. Many rights reserved.
This program is free software; you can redistribute it and/or modify
it under the same terms as Perl itself. The full text of this license
can be found in the LICENSE file included with this module
=cut
# Local Variables:
# mode: cperl
# cperl-indent-level: 4
# fill-column: 78
# indent-tabs-mode: nil
# c-indentation-style: bsd
# End:
# ex: set ts=8 sts=4 sw=4 tw=78 ft=perl expandtab shiftround :
| amidoimidazol/bio_info | Beginning Perl for Bioinformatics/lib/Perl/Critic/Policy/InputOutput/RequireBriefOpen.pm | Perl | mit | 11,993 |
=pod
=head1 NAME
ERR_put_error, ERR_add_error_data - record an error
=head1 SYNOPSIS
#include <openssl/err.h>
void ERR_put_error(int lib, int func, int reason, const char *file,
int line);
void ERR_add_error_data(int num, ...);
=head1 DESCRIPTION
ERR_put_error() adds an error code to the thread's error queue. It
signals that the error of reason code B<reason> occurred in function
B<func> of library B<lib>, in line number B<line> of B<file>.
This function is usually called by a macro.
ERR_add_error_data() associates the concatenation of its B<num> string
arguments with the error code added last.
L<ERR_load_strings(3)> can be used to register
error strings so that the application can a generate human-readable
error messages for the error code.
=head1 RETURN VALUES
ERR_put_error() and ERR_add_error_data() return
no values.
=head1 SEE ALSO
L<err(3)>, L<ERR_load_strings(3)>
=head1 HISTORY
ERR_put_error() is available in all versions of SSLeay and OpenSSL.
ERR_add_error_data() was added in SSLeay 0.9.0.
=cut
| vbloodv/blood | extern/openssl.orig/doc/crypto/ERR_put_error.pod | Perl | mit | 1,047 |
/* Part of LogicMOO Base An Implementation a MUD server in SWI-Prolog
% ===================================================================
% File 'dcg_meta.pl'
% Purpose: An Implementation in SWI-Prolog of certain debugging tools
% Maintainer: Douglas Miles
% Contact: $Author: dmiles $@users.sourceforge.net ;
% Version: 'logicmoo_util_bugger.pl' 1.0.0
% Revision: $Revision: 1.1 $
% Revised At: $Date: 2002/07/11 21:57:28 $
% ===================================================================
*/
:- module(dcg_must,[
dcg_peek/3]).
:- set_module(class(library)).
:- use_module(library(logicmoo_common)).
%dcg_must_each_det(G, S, E):- phrase(G, S, E), !.
quietly(DCG, S, E):- setup_call_cleanup(quietly(phrase(DCG, S, E)),true,true).
% quietly(DCG,S,E):- quietly(phrase(DCG,S,E)).
notrace(DCG,S,E):- quietly(DCG,S,E). %notrace(phrase(DCG,S,E)).
must(DCG,S,E):- must(phrase(DCG,S,E)).
ignore_must(DCG,S,E):- ignore_must(phrase(DCG,S,E)).
dcg_if_defined(DCG,S,E):- catch(phrase(DCG,S,E),error(existence_error(procedure,_),context(_,_47656)),fail).
dcg_peek(DCG,S,S):- phrase(DCG,S,_).
dcg_must_each_det(_, S, _):- S == [], !, fail.
dcg_must_each_det((G1, G2), S, E):- !, must(phrase(G1, S, M)), !, dcg_must_each_det(G2, M, E), !.
dcg_must_each_det(G, S, E):- !, must(phrase(G, S, E)), !.
dcg_and(DCG1, DCG2, S, E) :- dcg_condition(DCG1, S, E), phrase(DCG2, S, E), !.
dcg_unless(DCG1, DCG2, S, E) :- \+ dcg_condition(DCG1, S, _), !, phrase(DCG2, S, E).
dcg_when(DCG1, DCG2, S, E) :- dcg_condition(DCG1, S, _),!, phrase(DCG2, S, E).
dcg_length(Len,S,E):- \+ var(Len) -> (length(L,Len), append(L,E,S));
(length(S,Full),between(Full,0,Len),length(L,Len), append(L,E,S)).
dcg_from_right(DCG1, DCG2, S, E) :- length(S,Full), between(Full,0,Start), dcg_scan(DCG1,Start,DCG2,S,E).
dcg_from_left(DCG1, DCG2, S, E) :- length(S,Full), between(0,Full,Start), dcg_scan(DCG1,Start,DCG2,S,E).
dcg_scan(DCG1,Start2,DCG2,S,E):-
length(Before,Start2), append(Before,Mid,S), \+ \+ phrase(DCG2, Mid, _),
phrase(DCG1, Before, []), phrase(DCG2, Mid, E).
dcg_condition([], S, _):- S \== [], !, fail.
dcg_condition(DCG, S, E):- phrase(DCG, S, E).
% Push a new term onto DCG stack
dcg_push(List, S, ListS):- is_list(List), !, =(List,ListO), append(ListO, S, ListS).
dcg_push(A, S, [B|S]):- =(A,B).
:- fixup_exports.
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_utils/prolog/logicmoo/dcg_must.pl | Perl | mit | 2,319 |
=head1 NAME
AtteanX::Parser::SPARQLJSON - SPARQL JSON Parser
=head1 VERSION
This document describes AtteanX::Parser::SPARQLJSON version 0.002
=head1 SYNOPSIS
use Attean;
my $parser = Attean->get_parser('SPARQLJSON')->new();
$parser->parse_list_from_io( $fh );
=head1 DESCRIPTION
...
=head1 METHODS
=over 4
=cut
use v5.14;
use warnings;
package AtteanX::Parser::SPARQLJSON 0.001 {
use Attean;
use Moo;
use JSON;
use Encode qw(decode);
=item C<< canonical_media_type >>
Returns the canonical media type for SPARQL XML: application/sparql-results+json.
=cut
sub canonical_media_type { return "application/sparql-results+json" }
=item C<< media_types >>
Returns a list of media types that may be parsed with the SPARQL XML parser:
application/sparql-results+json.
=cut
sub media_types {
return [qw(application/sparql-results+json)];
}
=item C<< file_extensions >>
Returns a list of file extensions that may be parsed with the parser.
=cut
sub file_extensions { return [qw(srj)] }
with 'Attean::API::ResultOrTermParser';
with 'Attean::API::AtOnceParser';
sub parse_list_from_io {
my $self = shift;
my $io = shift;
my $data = do { local($/) = undef; <$io> };
return $self->parse_list_from_bytes($data);
}
sub parse_list_from_bytes {
my $self = shift;
my $octets = shift;
my $json = decode('UTF-8', $octets, Encode::FB_CROAK);
my $data = from_json($json, {utf8 => 1});
my $head = $data->{head};
my $vars = $head->{vars};
my $res = $data->{results};
if (defined(my $bool = $data->{boolean})) {
return ($bool) ? Attean::Literal->true : Attean::Literal->false;
} elsif (my $binds = $res->{bindings}) {
my @results;
foreach my $b (@$binds) {
my %data;
foreach my $v (@$vars) {
if (defined(my $value = $b->{ $v })) {
my $type = $value->{type};
if ($type eq 'uri') {
my $data = $value->{value};
$data{ $v } = Attean::IRI->new( $data );
} elsif ($type eq 'bnode') {
my $data = $value->{value};
$data{ $v } = Attean::Blank->new( $data );
} elsif ($type eq 'literal') {
my $data = $value->{value};
if (my $lang = $value->{'xml:lang'}) {
$data{ $v } = Attean::Literal->new( value => $data, language => $lang );
} else {
$data{ $v } = Attean::Literal->new( $data );
}
} elsif ($type eq 'typed-literal') {
my $data = $value->{value};
my $dt = $value->{datatype};
$data{ $v } = Attean::Literal->new( value => $data, datatype => $dt );
} else {
warn Dumper($data, $b);
die "Unknown node type $type during parsing of SPARQL JSON Results";
}
}
}
push(@results, Attean::Result->new( bindings => \%data ));
}
return @results;
}
}
}
1;
__END__
=back
=head1 BUGS
Please report any bugs or feature requests to through the GitHub web interface
at L<https://github.com/kasei/perlrdf/issues>.
=head1 AUTHOR
Gregory Todd Williams C<< <gwilliams@cpan.org> >>
=head1 COPYRIGHT
Copyright (c) 2006-2012 Gregory Todd Williams. This
program is free software; you can redistribute it and/or modify it under
the same terms as Perl itself.
=cut
| gitpan/Attean | lib/AtteanX/Parser/SPARQLJSON.pm | Perl | mit | 3,187 |
#!/usr/bin/env perl
use strict;
# Requires samtools: http://samtools.sourceforge.net/
my $samtools = "samtools";
sub run($) {
my $exe = shift;
chomp $exe;
print STDERR $exe."\n";
system($exe);
}
my @bamfiles = @ARGV;
unless(scalar(@bamfiles) > 0) {
die "Usage: $0 [BAM file(s)]\n";
}
foreach my $bamfile (@bamfiles) {
if(-e $bamfile) {
my $basefile = $bamfile;
$basefile =~ s/.+\///g;
$basefile =~ s/\.(bam|sam)$//i;
my $msfile = $basefile."_mapped.bam";
run("$samtools view -F 4 -b $bamfile > $msfile");
} else {
print STDERR "Unable to find BAM file: $bamfile\n";
}
}
| mccrowjp/utilities | bam_mapped.pl | Perl | mit | 665 |
=encoding UTF-8
=head1 Name
sqitchtutorial-oracle - A tutorial introduction to Sqitch change management on Oracle
=head1 Synopsis
sqitch *
=head1 Description
This tutorial explains how to create a sqitch-enabled Oracle project, use a
VCS for deployment planning, and work with other developers to make sure
changes remain in sync and in the proper order.
We'll start by creating new project from scratch, a fictional antisocial
networking site called Flipr. All examples use L<Git|http://git-scm.com/> as
the VCS and L<Oracle|http://www.oracle.com/us/products/database/> as the
storage engine. Note that you will need to set
L<C<$ORACLE_HOME>|http://www.orafaq.com/wiki/ORACLE_HOME> so that all the
database connections will work.
If you'd like to manage an PostgreSQL database, see L<sqitchtutorial>.
If you'd like to manage an SQLite database, see L<sqitchtutorial-sqlite>.
If you'd like to manage an MySQL database, see L<sqitchtutorial-mysql>.
If you'd like to manage an Firebird database, see L<sqitchtutorial-firebird>.
If you'd like to manage an Vertica database, see L<sqitchtutorial-vertica>.
=head2 VM Configuration
Some instructions for setting up a VM for following along in this tutorial.
=over
=item *
See F<t/oracle.t> for instructions on downloading, installing, and configuring
the Oracle developer days VM.
=item *
Connect as the DBA via SQL*Plus:
sqlplus sys/oracle@localhost/ORCL as sysdba
=item *
Give user C<scott> the access it needs:
ALTER USER scott IDENTIFIED BY tiger;
GRANT ALL PRIVILEGES TO scott;
=item *
Add this entry to F<tnsnames.ora>:
FLIPR_TEST =
(DESCRIPTION =
(ADDRESS = (PROTOCOL = TCP)(HOST = localhost)(PORT = 1521))
(CONNECT_DATA =
(SERVER = DEDICATED)
(SERVICE_NAME = orcl)
)
)
=back
=head1 Starting a New Project
Usually the first thing to do when starting a new project is to create a
source code repository. So let's do that with Git:
> mkdir flipr
> cd flipr
> git init .
Initialized empty Git repository in /flipr/.git/
> touch README.md
> git add .
> git commit -am 'Initialize project, add README.'
[master (root-commit) 1bd134b] Initialize project, add README.
1 file changed, 38 insertions(+)
create mode 100644 README.md
If you're a Git user and want to follow along the history, the repository used
in these examples is L<on GitHub|https://github.com/theory/sqitch-oracle-intro>.
Now that we have a repository, let's get started with Sqitch. Every Sqitch
project must have a name associated with it, and, optionally, a unique URI. We
recommend including the URI, as it increases the uniqueness of object
identifiers internally, so let's specify one when we initialize Sqitch:
> sqitch --engine oracle init flipr --uri https://github.com/theory/sqitch-oracle-intro/
Created sqitch.conf
Created sqitch.plan
Created deploy/
Created revert/
Created verify/
Let's have a look at F<sqitch.conf>:
> cat sqitch.conf
[core]
engine = oracle
# plan_file = sqitch.plan
# top_dir = .
# deploy_dir = deploy
# revert_dir = revert
# verify_dir = verify
# extension = sql
# [engine "oracle"]
# target = db:oracle:
# registry =
# client = /usr/local/instantclient_11_2/sqlplus
Good, it picked up on the fact that we're creating changes for the Oracle
engine, thanks to the C<--engine oracle> option, and saved it to the file.
Furthermore, it wrote a commented-out C<[engine "oracle"]> section with all the
available Oracle engine-specific settings commented out and ready to be edited
as appropriate. This includes the path to
L<SQL*Plus|http://www.orafaq.com/wiki/SQL*Plus> in my C<$ORACLE_HOME>.
By default, Sqitch will read F<sqitch.conf> in the current directory for
settings. But it will also read F<~/.sqitch/sqitch.conf> for user-specific
settings. Let's tell it who we are, since this data will be used in all of our
projects:
> sqitch config --user user.name 'Marge N. O’Vera'
> sqitch config --user user.email 'marge@example.com'
Have a look at F<~/.sqitch/sqitch.conf> and you'll see this:
> cat ~/.sqitch/sqitch.conf
[user]
name = Marge N. O’Vera
email = marge@example.com
Which means that Sqitch will always properly identify us when planning and
committing changes. Back to the repository. Have a look at the plan file,
F<sqitch.plan>:
> cat sqitch.plan
%syntax-version=1.0.0-b2
%project=flipr
%uri=https://github.com/theory/sqitch-oracle-intro/
Note that it has picked up on the name and URI of the app we're building.
Sqitch uses this data to manage cross-project dependencies. The
C<%syntax-version> pragma is always set by Sqitch, so that it always knows how
to parse the plan, even if the format changes in the future.
Let's commit these changes and start creating the database changes.
> git add .
> git commit -am 'Initialize Sqitch configuration.'
[master bd82f41] Initialize Sqitch configuration.
2 files changed, 19 insertions(+)
create mode 100644 sqitch.conf
create mode 100644 sqitch.plan
=head1 Our First Change
First, our project will need an Oracle user and accompanying schema. This
creates a nice namespace for all of the objects that will be part of the flipr
app. Run this command:
> sqitch add appschema -n 'App user and schema for all flipr objects.'
Created deploy/appschema.sql
Created revert/appschema.sql
Created verify/appschema.sql
Added "appschema" to sqitch.plan
The L<C<add>|sqitch-add> command adds a database change to the plan and writes
deploy, revert, and verify scripts that represent the change. Now we edit
these files. The C<deploy> script's job is to create the user. So we add
this to F<deploy/appschema.sql>:
CREATE USER flipr IDENTIFIED BY whatever;
The C<revert> script's job is to precisely revert the change to the deploy
script, so we add this to F<revert/appschema.sql>:
DROP USER flipr;
Now we can try deploying this change. We'll assume you have an Oracle SID
named C<flipr_test> set up in your
C<F<TNSNAMES.ORA>|http://www.orafaq.com/wiki/Tnsnames.ora> file:
> sqitch deploy db:oracle://scott:tiger@/flipr_test
Adding registry tables to db:oracle://scott:@/flipr_test
Deploying changes to db:oracle://scott:@/flipr_test
+ appschema .. ok
First Sqitch created the registry tables used to track database changes. The
structure and name of the registry varies between databases, but in Oracle
they are simply stored in the current schema -- that is, the schema with the
same name as the user you've connected as. In this example, that schema is
C<scott>. Ideally, only Sqitch data will be stored in this schema, so it
probably makes the most sense to create a superuser named C<sqitch> or
something similar and use it to deploy changes.
If you'd like it to use a different database as the registry database, use
C<sqitch engine add oracle $name> to configure it (or via the
L<C<target> command|sqitch-target>; more L<below|/On Target>). This will be
useful if you don't want to use the same registry database to manage multiple
databases on the same server.
Next, Sqitch deploys changes to the target database, which we specified on the
command-line. We only have one change so far; the C<+> reinforces the idea
that the change is being I<added> to the database.
With this change deployed, if you connect to the database, you'll be able to
see the schema:
> echo "SELECT username FROM all_users WHERE username = 'FLIPR';" \
| sqlplus -S scott/tiger@flipr_test
USERNAME
------------------------------
FLIPR
=head2 Trust, But Verify
But that's too much work. Do you really want to do something like that after
every deploy?
Here's where the C<verify> script comes in. Its job is to test that the deploy
did was it was supposed to. It should do so without regard to any data that
might be in the database, and should throw an error if the deploy was not
successful. In Oracle, the simplest way to do so for schema is probably to
simply create an object in the schema. Put this SQL into
F<verify/appschema.sql>:
CREATE TABLE flipr.verify__ (id int);
DROP TABLE flipr.verify__;
In truth, you can use I<any> query that generates an SQL error if the schema
doesn't exist. This works because Sqitch configures SQL*Plus so that SQL
errors cause it to exit with the error code (more on that below). Another
handy way to do that is to divide by zero if an object doesn't exist. For
example, to throw an error when the C<flipr> schema does not exist, you could
do something like this:
SELECT 1/COUNT(*) FROM sys.all_users WHERE username = 'FLIPR';
Either way, run the C<verify> script with the L<C<verify>|sqitch-verify>
command:
> sqitch verify db:oracle://scott:tiger@/flipr_test
Verifying db:oracle://scott:@/flipr_test
* appschema .. ok
Verify successful
Looks good! If you want to make sure that the verify script correctly dies if
the schema doesn't exist, temporarily change the schema name in the script to
something that doesn't exist, something like:
CREATE TABLE nonesuch.verify__ (id int);
Then L<C<verify>|sqitch-verify> again:
> sqitch verify db:oracle://scott:tiger@/flipr_test
Verifying db:oracle://scott:@/flipr_test
* appschema .. CREATE TABLE nonesuch.verify__ (id int)
*
ERROR at line 1:
ORA-01918: user 'NONESUCH' does not exist
# Verify script "verify/appschema.sql" failed.
not ok
Verify Summary Report
---------------------
Changes: 1
Errors: 1
Verify failed
It's even nice enough to tell us what the problem is. Or, for the
divide-by-zero example, change the schema name:
SELECT 1/COUNT(*) FROM sys.all_users WHERE username = 'NONESUCH';
Then the verify will look something like:
> sqitch verify db:oracle://scott:tiger@/flipr_test
Verifying db:oracle://scott:@/flipr_test
* appschema .. SELECT 1/COUNT(*) FROM sys.all_users WHERE username = 'NONESUCH'
*
ERROR at line 1:
ORA-01476: divisor is equal to zero
# Verify script "verify/appschema.sql" failed.
not ok
Verify Summary Report
---------------------
Changes: 1
Errors: 1
Verify failed
Less useful error output, but enough to alert us that something has gone
wrong.
Don't forget to change the schema name back before continuing!
=head2 Status, Revert, Log, Repeat
For purely informational purposes, we can always see how a deployment was
recorded via the L<C<status>|sqitch-status> command, which reads the registry
tables from the database:
> sqitch status db:oracle://scott:tiger@/flipr_test
# On database db:oracle://scott:@/flipr_test
# Project: flipr
# Change: c59e700589fc03568e8f35f592c0d9b7c638cbdd
# Name: appschema
# Deployed: 2013-12-31 15:25:23 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
Let's make sure that we can revert the change:
> sqitch revert db:oracle://scott:tiger@/flipr_test
Revert all changes from db:oracle://scott:@/flipr_test? [Yes]
- appschema .. ok
The L<C<revert>|sqitch-revert> command first prompts to make sure that we
really do want to revert. This is to prevent unnecessary accidents. You can
pass the C<-y> option to disable the prompt. Also, notice the C<-> before the
change name in the output, which reinforces that the change is being
I<removed> from the database. And now the schema should be gone:
> echo "SELECT username FROM all_users WHERE username = 'FLIPR';" \
| sqlplus -S scott/tiger@flipr_test
no rows selected
And the status message should reflect as much:
> sqitch status db:oracle://scott:tiger@/flipr_test
# On database db:oracle://scott:@/flipr_test
No changes deployed
Of course, since nothing is deployed, the L<C<verify>|sqitch-verify> command
has nothing to verify:
> sqitch verify db:oracle://scott:tiger@/flipr_test
Verifying db:oracle://scott:@/flipr_test
No changes deployed
However, we still have a record that the change happened, visible via the
L<C<log>|sqitch-log> command:
> sqitch log db:oracle://scott:tiger@/flipr_test
On database db:oracle://scott:@/flipr_test
Revert c59e700589fc03568e8f35f592c0d9b7c638cbdd
Name: appschema
Committer: Marge N. O’Vera <marge@example.com>
Date: 2013-12-31 16:19:38 -0800
App user and schema for all flipr objects.
Deploy c59e700589fc03568e8f35f592c0d9b7c638cbdd
Name: appschema
Committer: Marge N. O’Vera <marge@example.com>
Date: 2013-12-31 15:25:23 -0800
App user and schema for all flipr objects.
Note that the actions we took are shown in reverse chronological order, with
the revert first and then the deploy.
Cool. Now let's commit it.
> git add .
> git commit -m 'Add flipr schema.'
[master e0e0b11] Add flipr schema.
4 files changed, 11 insertions(+)
create mode 100644 deploy/appschema.sql
create mode 100644 revert/appschema.sql
create mode 100644 verify/appschema.sql
And then deploy again. This time, let's use the C<--verify> option, so that
the C<verify> script is applied when the change is deployed:
> sqitch deploy --verify db:oracle://scott:tiger@/flipr_test
Deploying changes to db:oracle://scott:@/flipr_test
+ appschema .. ok
And now the schema should be back:
> echo "SELECT username FROM all_users WHERE username = 'FLIPR';" \
| sqlplus -S scott/tiger@flipr_test
USERNAME
------------------------------
FLIPR
When we look at the status, the deployment will be there:
> sqitch status db:oracle://scott:tiger@/flipr_test
# On database db:oracle://scott:@/flipr_test
# Project: flipr
# Change: c59e700589fc03568e8f35f592c0d9b7c638cbdd
# Name: appschema
# Deployed: 2013-12-31 16:22:01 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
=head1 On Target
I'm getting a little tired of always having to type
C<db:oracle://scott:tiger@/flipr_test>, aren't you? This
L<database connection URI|https://github.com/theory/uri-db/> tells Sqitch how
to connect to the deployment target, but we don't have to keep using the URI.
We can name the target:
> sqitch target add flipr_test db:oracle://scott:tiger@/flipr_test
The L<C<target>|sqitch-target> command, inspired by
L<C<git-remote>|http://git-scm.com/docs/git-remote>, allows management of one
or more named deployment targets. We've just added a target named
C<flipr_test>, which means we can use the string C<flipr_test> for the target,
rather than the URI. But since we're doing so much testing, we can also tell
Sqitch to deploy to the C<flipr_test> target by default:
> sqitch engine add oracle flipr_test
Now we can omit the target argument altogether, unless we need to deploy to
another database. Which we will, eventually, but at least our examples will be
simpler from here on in, e.g.:
> sqitch status
# On database flipr_test
# Project: flipr
# Change: c59e700589fc03568e8f35f592c0d9b7c638cbdd
# Name: appschema
# Deployed: 2013-12-31 16:22:01 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
Yay, that allows things to be a little more concise. Let's also make sure that
changes are verified after deploying them:
> sqitch config --bool deploy.verify true
> sqitch config --bool rebase.verify true
We'll see the L<C<rebase>|sqitch-rebase> command a bit later. In the meantime,
let's commit the new configuration and and make some more changes!
> git commit -am 'Set default target and always verify.'
[master c4a308a] Set default target and always verify.
1 file changed, 8 insertions(+)
=head1 Deploy with Dependency
Let's add another change, this time to create a table. Our app will need
users, of course, so we'll create a table for them. First, add the new change:
> sqitch add users --requires appschema -n 'Creates table to track our users.'
Created deploy/users.sql
Created revert/users.sql
Created verify/users.sql
Added "users [appschema]" to sqitch.plan
Note that we're requiring the C<appschema> change as a dependency of the new
C<users> change. Although that change has already been added to the plan and
therefore should always be applied before the C<users> change, it's a good
idea to be explicit about dependencies.
Now edit the scripts. When you're done, F<deploy/users.sql> should look like
this:
-- Deploy users
-- requires: appschema
CREATE TABLE flipr.users (
nickname VARCHAR2(512 CHAR) PRIMARY KEY,
password VARCHAR2(512 CHAR) NOT NULL,
timestamp TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP NOT NULL
);
A few things to notice here. On the second line, the dependence on the
C<appschema> change has been listed in a comment. This doesn't do anything,
but the default SQLite C<deploy> template lists it here for your reference
while editing the file. Useful, right?
The table itself will been created in the C<flipr> schema. This is why we need
to require the C<appschema> change.
Notice that we've done nothing about error handling. Sqitch needs SQL*Plus
to return failure when a script experiences an error, so one might expect that
each script would need to start with lines like these:
WHENEVER OSERROR EXIT 9
WHENEVER SQLERROR EXIT SQL.SQLCODE
However, Sqitch always sets these error handling parameters before it executes
your scripts, so you don't have to.
Now for the verify script. The simplest way to check that the table was
created and has the expected columns without touching the data? Just select
from the table with a false C<WHERE> clause. Add this to F<verify/users.sql>:
SELECT nickname, password, timestamp
FROM flipr.users
WHERE 1 = 1;
Note that we have once again set the SQL*Plus error handling. In truth, this
line should be in I<all> Sqitch scripts.
Now for the revert script: all we have to do is drop the table. Add this to
F<revert/users.sql>:
DROP TABLE flipr.users;
Couldn't be much simpler, right? Let's deploy this bad boy:
> sqitch deploy
Deploying changes to flipr_test
+ users .. ok
We know, since verification is enabled, that the table must have been created.
But for the purposes of visibility, let's have a quick look:
> echo "DESCRIBE flipr.users;" | sqlplus -S scott/tiger@flipr_test
Name Null? Type
----------------------------------------- -------- ----------------------------
NICKNAME NOT NULL VARCHAR2(512 CHAR)
PASSWORD NOT NULL VARCHAR2(512 CHAR)
TIMESTAMP NOT NULL TIMESTAMP(6) WITH TIME ZONE
We can also verify all currently deployed changes with the
L<C<verify>|sqitch-verify> command:
> sqitch verify
Verifying flipr_test
* appschema .. ok
* users ...... ok
Verify successful
Now have a look at the status:
> sqitch status
# On database flipr_test
# Project: flipr
# Change: 6840dc13beb0cd716b8bd3979b03a259c1e94405
# Name: users
# Deployed: 2013-12-31 16:32:31 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
Success! Let's make sure we can revert the change, as well:
> sqitch revert --to @HEAD^ -y
Reverting changes to appschema from flipr_test
- users .. ok
Note that we've used the C<--to> option to specify the change to revert to.
And what do we revert to? The symbolic tag C<@HEAD>, when passed to
L<C<revert>|sqitch-revert>, always refers to the last change deployed to the
database. (For other commands, it refers to the last change in the plan.)
Appending the caret (C<^>) tells Sqitch to select the change I<prior> to the
last deployed change. So we revert to C<appschema>, the penultimate change.
The other potentially useful symbolic tag is C<@ROOT>, which refers to the
first change deployed to the database (or in the plan, depending on the
command).
Back to the database. The C<users> table should be gone but the C<flipr> schema
should still be around:
> echo "DESCRIBE flipr.users;" | sqlplus -S scott/tiger@flipr_test
ERROR:
ORA-04043: object flipr.users does not exist
The L<C<status>|sqitch-status> command politely informs us that we have
undeployed changes:
> sqitch status
# On database flipr_test
# Project: flipr
# Change: c59e700589fc03568e8f35f592c0d9b7c638cbdd
# Name: appschema
# Deployed: 2013-12-31 16:22:01 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Undeployed change:
* users
As does the L<C<verify>|sqitch-verify> command:
> sqitch verify
Verifying flipr_test
* appschema .. ok
Undeployed change:
* users
Verify successful
Note that the verify is successful, because all currently-deployed changes are
verified. The list of undeployed changes (just "users" here) reminds us about
the current state.
Okay, let's commit and deploy again:
> git add .
> git commit -am 'Add users table.'
[master 2506312] Add users table.
4 files changed, 17 insertions(+)
create mode 100644 deploy/users.sql
create mode 100644 revert/users.sql
create mode 100644 verify/users.sql
> sqitch deploy
Deploying changes to flipr_test
+ users .. ok
Looks good. Check the status:
> sqitch status
# On database flipr_test
# Project: flipr
# Change: 6840dc13beb0cd716b8bd3979b03a259c1e94405
# Name: users
# Deployed: 2013-12-31 16:34:28 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
Excellent. Let's do some more!
=head1 Add Two at Once
Let's add a couple more changes to add functions for managing users.
> sqitch add insert_user --requires users --requires appschema \
-n 'Creates a function to insert a user.'
Created deploy/insert_user.sql
Created revert/insert_user.sql
Created verify/insert_user.sql
Added "insert_user [users appschema]" to sqitch.plan
> sqitch add change_pass --requires users --requires appschema \
-n 'Creates a function to change a user password.'
Created deploy/change_pass.sql
Created revert/change_pass.sql
Created verify/change_pass.sql
Added "change_pass [users appschema]" to sqitch.plan
Now might be a good time to have a look at the deployment plan:
> cat sqitch.plan
%syntax-version=1.0.0-b2
%project=flipr
%uri=https://github.com/theory/sqitch-oracle-intro/
appschema 2013-12-31T22:34:42Z Marge N. O’Vera <marge@example.com> # App user and schema for all flipr objects.
users [appschema] 2014-01-01T00:31:20Z Marge N. O’Vera <marge@example.com> # Creates table to track our users.
insert_user [users appschema] 2014-01-01T00:35:21Z Marge N. O’Vera <marge@example.com> # Creates a function to insert a user.
change_pass [users appschema] 2014-01-01T00:35:28Z Marge N. O’Vera <marge@example.com> # Creates a function to change a user password.
Each change appears on a single line with the name of the change, a bracketed
list of dependencies, a timestamp, the name and email address of the user who
planned the change, and a note.
Let's write the code for the new changes. Here's what
F<deploy/insert_user.sql> should look like:
-- Deploy insert_user
-- requires: users
-- requires: appschema
CREATE OR REPLACE PROCEDURE flipr.insert_user(
nickname VARCHAR2,
password VARCHAR2
) AS
BEGIN
INSERT INTO flipr.users VALUES(
nickname,
LOWER( RAWTOHEX( UTL_RAW.CAST_TO_RAW(
sys.dbms_obfuscation_toolkit.md5(input_string => password)
) ) ),
DEFAULT
);
END;
/
SHOW ERRORS;
-- Drop and die on error.
DECLARE
l_err_count INTEGER;
BEGIN
SELECT COUNT(*)
INTO l_err_count
FROM all_errors
WHERE owner = 'FLIPR'
AND name = 'INSERT_USER';
IF l_err_count > 0 THEN
EXECUTE IMMEDIATE 'DROP PROCEDURE flipr.insert_user';
raise_application_error(-20001, 'Errors in FLIPR.INSERT_USER');
END IF;
END;
/
The C<DECLARE> PL/SQL block is to catch compilation warnings, which are not
normally fatal. It's admittedly
L<a bit convoluted|http://stackoverflow.com/a/16429231/79202>, but ensures that
errors propagate and a broken function get dropped.
Here's what F<verify/insert_user.sql> might look like:
-- Verify insert_user
DESCRIBE flipr.insert_user;
We simply take advantage of the fact that C<DESCRIBE> throws an exception if
the specified function does not exist.
And F<revert/insert_user.sql> should look something like this:
-- Revert insert_user
DROP PROCEDURE flipr.insert_user;
Now for C<change_pass>; F<deploy/change_pass.sql> might look like this:
-- Deploy change_pass
-- requires: users
-- requires: appschema
CREATE OR REPLACE PROCEDURE flipr.change_pass(
nick VARCHAR2,
oldpass VARCHAR2,
newpass VARCHAR2
) IS
flipr_auth_failed EXCEPTION;
BEGIN
UPDATE flipr.users
SET password = LOWER( RAWTOHEX( UTL_RAW.CAST_TO_RAW(
sys.dbms_obfuscation_toolkit.md5(input_string => newpass)
) ) )
WHERE nickname = nick
AND password = LOWER( RAWTOHEX( UTL_RAW.CAST_TO_RAW(
sys.dbms_obfuscation_toolkit.md5(input_string => oldpass)
) ) );
IF SQL%ROWCOUNT = 0 THEN RAISE flipr_auth_failed; END IF;
END;
/
SHOW ERRORS;
-- Drop and die on error.
DECLARE
l_err_count INTEGER;
BEGIN
SELECT COUNT(*)
INTO l_err_count
FROM all_errors
WHERE owner = 'FLIPR'
AND name = 'CHANGE_PASS';
IF l_err_count > 0 THEN
EXECUTE IMMEDIATE 'DROP PROCEDURE flipr.CHANGE_PASS';
raise_application_error(-20001, 'Errors in FLIPR.CHANGE_PASS');
END IF;
END;
/
We again need the C<DECLARE> PL/SQL block to detect compilation warnings and
make the script die. Use C<DESCRIBE> in F<verify/change_pass.sql> again:
-- Verify change_pass
DESCRIBE flipr.change_pass;
And of course, its C<revert> script, F<revert/change_pass.sql>, should look
something like:
-- Revert change_pass
DROP PROCEDURE flipr.change_pass;
Try em out!
> sqitch deploy
Deploying changes to flipr_test
+ insert_user .. No errors.
ok
+ change_pass .. No errors.
ok
Looks good. The "No errors" notices come from the C<SHOW ERRORS> SQL*Plus
command. It's not very useful here, but very useful if there are compilation
errors. If it bothers you, you can drop the C<SHOW ERRORS> line and select the
error for display in the C<DECLARE> block, instead.
Now, do we have the functions? Of course we do, they were verified. Still,
have a look:
> echo "DESCRIBE flipr.insert_user;\nDESCRIBE flipr.change_pass;" \
| sqlplus -S scott/tiger@flipr_test
PROCEDURE flipr.insert_user
Argument Name Type In/Out Default?
------------------------------ ----------------------- ------ --------
NICKNAME VARCHAR2 IN
PASSWORD VARCHAR2 IN
PROCEDURE flipr.change_pass
Argument Name Type In/Out Default?
------------------------------ ----------------------- ------ --------
NICK VARCHAR2 IN
OLDPASS VARCHAR2 IN
NEWPASS VARCHAR2 IN
And what's the status?
> sqitch status
# On database flipr_test
# Project: flipr
# Change: e1c9df6a95da835769eb560790588c16174f78df
# Name: change_pass
# Deployed: 2013-12-31 16:37:22 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
Looks good. Let's make sure revert works:
> sqitch revert -y --to @HEAD^^
Reverting changes to users from flipr_test
- change_pass .. ok
- insert_user .. ok
> echo "DESCRIBE flipr.insert_user;\nDESCRIBE flipr.change_pass;" \
| sqlplus -S dwheeler/dwheeler@flipr_test
ERROR:
ORA-04043: object flipr.insert_user does not exist
ERROR:
ORA-04043: object flipr.change_pass does not exist
Note the use of C<@HEAD^^> to specify that the revert be to two changes prior
the last deployed change. Looks good. Let's do the commit and re-deploy dance:
> git add .
> git commit -m 'Add `insert_user()` and `change_pass()`.'
[master 6b6797e] Add `insert_user()` and `change_pass()`.
7 files changed, 92 insertions(+)
create mode 100644 deploy/change_pass.sql
create mode 100644 deploy/insert_user.sql
create mode 100644 revert/change_pass.sql
create mode 100644 revert/insert_user.sql
create mode 100644 verify/change_pass.sql
create mode 100644 verify/insert_user.sql
> sqitch deploy
Deploying changes to flipr_test
+ insert_user .. No errors.
ok
+ change_pass .. No errors.
ok
> sqitch status
# On database flipr_test
# Project: flipr
# Change: e1c9df6a95da835769eb560790588c16174f78df
# Name: change_pass
# Deployed: 2013-12-31 16:38:46 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
> sqitch verify
Verifying flipr_test
* appschema .... ok
* users ........ ok
* insert_user .. ok
* change_pass .. ok
Verify successful
Great, we're fully up-to-date!
=head1 Ship It!
Let's do a first release of our app. Let's call it C<1.0.0-dev1> Since we want
to have it go out with deployments tied to the release, let's tag it:
> sqitch tag v1.0.0-dev1 -n 'Tag v1.0.0-dev1.'
Tagged "change_pass" with @v1.0.0-dev1
> git commit -am 'Tag the database with v1.0.0-dev1.'
[master eae5f71] Tag the database with v1.0.0-dev1.
1 file changed, 1 insertion(+)
> git tag v1.0.0-dev1 -am 'Tag v1.0.0-dev1'
We can try deploying to make sure the tag gets picked up by deploying to a new
database, like so (assuming you have an Oracle SID named C<flipr_dev> that
points to a different database):
> sqitch deploy db:oracle://scott:tiger@/flipr_dev
Adding registry tables to db:oracle://scott:@/flipr_dev
Deploying changes to db:oracle://scott:@/flipr_dev
+ appschema ................. ok
+ users ..................... ok
+ insert_user ............... No errors.
ok
+ change_pass @v1.0.0-dev1 .. No errors.
ok
Great, all four changes were deployed and C<change_pass> was tagged with
C<@v1.0.0-dev1>. Let's have a look at the status:
> sqitch status db:oracle://scott:tiger@/flipr_dev
# On database db:oracle://scott:tiger@/flipr_dev
# Project: flipr
# Change: e1c9df6a95da835769eb560790588c16174f78df
# Name: change_pass
# Deployed: 2013-12-31 16:40:02 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
Note the listing of the tag as part of the status message. Now let's bundle
everything up for release:
> sqitch bundle
Bundling into bundle/
Writing config
Writing plan
Writing scripts
+ appschema
+ users
+ insert_user
+ change_pass @v1.0.0-dev1
Now we can package the F<bundle> directory and distribute it. When it gets
installed somewhere, users can use Sqitch to deploy to the database. Let's try
deploying it to yet another database (again, assuming you have a SID named C<oracle_prod>:
> cd bundle
> sqitch deploy db:oracle://scott:tiger@/flipr_prod
Adding registry tables to db:oracle://scott:@/flipr_prod
Deploying changes to flipr_prod
+ appschema ................. ok
+ users ..................... ok
+ insert_user ............... ok
+ change_pass @v1.0.0-dev1 .. ok
Looks much the same as before, eh? Package it up and ship it!
=head1 Flip Out
Now that we've got the basics of user management done, let's get to work on
the core of our product, the "flip." Since other folks are working on other
tasks in the repository, we'll work on a branch, so we can all stay out of
each other's way. So let's branch:
> git checkout -b flips
Switched to a new branch 'flips'
Now we can add a new change to create a table for our flips.
> sqitch add flips -r appschema -r users -n 'Adds table for storing flips.'
Created deploy/flips.sql
Created revert/flips.sql
Created verify/flips.sql
Added "flips [appschema users]" to sqitch.plan
You know the drill by now. Edit F<deploy/flips.sql>:
-- Deploy flips
-- requires: appschema
-- requires: users
CREATE TABLE flipr.flips (
id INTEGER PRIMARY KEY,
nickname VARCHAR2(512 CHAR) NOT NULL REFERENCES flipr.users(nickname),
body VARCHAR2(180 CHAR) NOT NULL,
timestamp TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP NOT NULL
);
CREATE SEQUENCE flipr.flip_id_seq START WITH 1 INCREMENT BY 1 NOCACHE;
CREATE OR REPLACE TRIGGER flipr.flip_pk BEFORE INSERT ON flipr.flips
FOR EACH ROW WHEN (NEW.id IS NULL)
DECLARE
v_id flipr.flips.id%TYPE;
BEGIN
SELECT flipr.flip_id_seq.nextval INTO v_id FROM DUAL;
:new.id := v_id;
END;
/
Edit F<verify/flips.sql>:
-- Verify flips
DESCRIBE flipr.flips;
And edit F<revert/flips.sql>:
-- Revert flips
DROP TRIGGER flipr.flip_pk;
DROP SEQUENCE flipr.flip_id_seq;
DROP TABLE flipr.flips;
And give it a whirl:
> sqitch deploy
Deploying changes to flipr_test
+ flips .. ok
Look good?
> sqitch status --show-tags
# On database flipr_test
# Project: flipr
# Change: 8e1573bb5ce5dfc239d5370c33d6e10820234aad
# Name: flips
# Deployed: 2013-12-31 16:51:54 -0800
# By: Marge N. O’Vera <marge@example.com>
#
# Tag:
# @v1.0.0-dev1 - 2013-12-31 16:44:00 -0800 - Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
Note the use of C<--show tags> to show all the deployed tags. Now make it so:
> git add .
> git commit -am 'Add flips table.'
[flips bbea131] Add flips table.
4 files changed, 32 insertions(+)
create mode 100644 deploy/flips.sql
create mode 100644 revert/flips.sql
create mode 100644 verify/flips.sql
=head1 Wash, Rinse, Repeat
Now comes the time to add functions to manage flips. I'm sure you have things
nailed down now. Go ahead and add C<insert_flip> and C<delete_flip> changes
and commit them. The C<insert_flip> deploy script might look something like:
-- Deploy insert_flip
-- requires: flips
-- requires: appschema
CREATE OR REPLACE PROCEDURE flipr.insert_flip(
nickname VARCHAR2,
body VARCHAR2
) AS
BEGIN
INSERT INTO flipr.flips (nickname, body)
VALUES (nickname, body);
END;
/
SHOW ERRORS;
-- Drop and die on error.
DECLARE
l_err_count INTEGER;
BEGIN
SELECT COUNT(*)
INTO l_err_count
FROM all_errors
WHERE owner = 'FLIPR'
AND name = 'INSERT_FLIP';
IF l_err_count > 0 THEN
EXECUTE IMMEDIATE 'DROP PROCEDURE flipr.insert_flip';
raise_application_error(-20001, 'Errors in FLIPR.INSERT_FLIP');
END IF;
END;
/
And the C<delete_flip> deploy script might look something like:
-- Deploy delete_flip
-- requires: flips
-- requires: appschema
CREATE OR REPLACE PROCEDURE flipr.delete_flip(
flip_id INTEGER
) IS
flipr_flip_delete_failed EXCEPTION;
BEGIN
DELETE FROM flipr.flips WHERE id = flip_id;
IF SQL%ROWCOUNT = 0 THEN RAISE flipr_flip_delete_failed; END IF;
END;
/
SHOW ERRORS;
-- Drop and die on error.
DECLARE
l_err_count INTEGER;
BEGIN
SELECT COUNT(*)
INTO l_err_count
FROM all_errors
WHERE owner = 'FLIPR'
AND name = 'DELETE_FLIP';
IF l_err_count > 0 THEN
EXECUTE IMMEDIATE 'DROP PROCEDURE flipr.delete_flip';
raise_application_error(-20001, 'Errors in FLIPR.DELETE_FLIP');
END IF;
END;
/
The C<verify> scripts are:
-- Verify insert_flip
DESCRIBE flipr.insert_flip;
And:
-- Verify delete_flip
DESCRIBE flipr.delete_flip;
The C<revert> scripts are:
-- Revert insert_flip
DROP PROCEDURE flipr.insert_flip;
And:
-- Revert delete_flip
DROP PROCEDURE flipr.delete_flip;
Check the L<example git repository|https://github.com/theory/sqitch-oracle-intro> for
the complete details. Test L<C<deploy>|sqitch-deploy> and
L<C<revert>|sqitch-revert>, then commit it to the repository. The status
should end up looking something like this:
> sqitch status --show-tags
# On database flipr_test
# Project: flipr
# Change: a47be5a474eaad1a28546666eadeb0eba3ac12dc
# Name: delete_flip
# Deployed: 2013-12-31 16:54:31 -0800
# By: Marge N. O’Vera <marge@example.com>
#
# Tag:
# @v1.0.0-dev1 - 2013-12-31 16:44:00 -0800 - Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
Good, we've finished this feature. Time to merge back into C<master>.
=head2 Emergency
Let's do it:
> git checkout master
Switched to branch 'master'
> git pull
Updating eae5f71..a16f97c
Fast-forward
deploy/delete_list.sql | 35 +++++++++++++++++++++++++++++++++++
deploy/insert_list.sql | 33 +++++++++++++++++++++++++++++++++
deploy/lists.sql | 10 ++++++++++
revert/delete_list.sql | 3 +++
revert/insert_list.sql | 3 +++
revert/lists.sql | 3 +++
sqitch.plan | 4 ++++
verify/delete_list.sql | 3 +++
verify/insert_list.sql | 3 +++
verify/lists.sql | 5 +++++
10 files changed, 102 insertions(+)
create mode 100644 deploy/delete_list.sql
create mode 100644 deploy/insert_list.sql
create mode 100644 deploy/lists.sql
create mode 100644 revert/delete_list.sql
create mode 100644 revert/insert_list.sql
create mode 100644 revert/lists.sql
create mode 100644 verify/delete_list.sql
create mode 100644 verify/insert_list.sql
create mode 100644 verify/lists.sql
Hrm, that's interesting. Looks like someone made some changes to C<master>.
They added list support. Well, let's see what happens when we merge our
changes.
> git merge --no-ff flips
Auto-merging sqitch.plan
CONFLICT (content): Merge conflict in sqitch.plan
Automatic merge failed; fix conflicts and then commit the result.
Oh, a conflict in F<sqitch.plan>. Not too surprising, since both the merged
C<lists> branch and our C<flips> branch added changes to the plan. Let's try a
different approach.
The truth is, we got lazy. Those changes when we pulled master from the origin
should have raised a red flag. It's considered a bad practice not to look at
what's changed in C<master> before merging in a branch. What one I<should> do
is either:
=over
=item *
Rebase the F<flips> branch from master before merging. This "rewinds" the
branch changes, pulls from C<master>, and then replays the changes back on top
of the pulled changes.
=item *
Create a patch and apply I<that> to master. This is the sort of thing you
might have to do if you're sending changes to another user, especially if the
VCS is not Git.
=back
So let's restore things to how they were at master:
> git reset --hard HEAD
HEAD is now at a16f97c Merge branch 'lists'
That throws out our botched merge. Now let's go back to our branch and rebase
it on C<master>:
> git checkout flips
Switched to branch 'flips'
> git rebase master
First, rewinding head to replay your work on top of it...
Applying: Add flips table.
Using index info to reconstruct a base tree...
M sqitch.plan
Falling back to patching base and 3-way merge...
Auto-merging sqitch.plan
CONFLICT (content): Merge conflict in sqitch.plan
Failed to merge in the changes.
Patch failed at 0001 Add flips table.
The copy of the patch that failed is found in:
.git/rebase-apply/patch
When you have resolved this problem, run "git rebase --continue".
If you prefer to skip this patch, run "git rebase --skip" instead.
To check out the original branch and stop rebasing, run "git rebase --abort".
Oy, that's kind of a pain. It seems like no matter what we do, we'll need to
resolve conflicts in that file. Except in Git. Fortunately for us, we can tell
Git to resolve conflicts in F<sqitch.plan> differently. Because we only ever
append lines to the file, we can have it use the "union" merge driver, which,
according to L<its
docs|http://git-scm.com/docs/gitattributes#_built-in_merge_drivers>:
=over
Run 3-way file level merge for text files, but take lines from both versions,
instead of leaving conflict markers. This tends to leave the added lines in
the resulting file in random order and the user should verify the result. Do
not use this if you do not understand the implications.
=back
This has the effect of appending lines from all the merging files, which is
exactly what we need. So let's give it a try. First, back out the botched
rebase:
> git rebase --abort
Now add the union merge driver to F<.gitattributes> for F<sqitch.plan>
and rebase again:
> echo sqitch.plan merge=union > .gitattributes
> git rebase master
First, rewinding head to replay your work on top of it...
Applying: Add flips table.
Using index info to reconstruct a base tree...
M sqitch.plan
Falling back to patching base and 3-way merge...
Auto-merging sqitch.plan
Applying: Add functions to insert and delete flips.
Using index info to reconstruct a base tree...
M sqitch.plan
Falling back to patching base and 3-way merge...
Auto-merging sqitch.plan
Ah, that looks a bit better. Let's have a look at the plan:
> cat sqitch.plan
%syntax-version=1.0.0-b2
%project=flipr
%uri=https://github.com/theory/sqitch-oracle-intro/
appschema 2013-12-31T22:34:42Z Marge N. O’Vera <marge@example.com> # App user and schema for all flipr objects.
users [appschema] 2014-01-01T00:31:20Z Marge N. O’Vera <marge@example.com> # Creates table to track our users.
insert_user [users appschema] 2014-01-01T00:35:21Z Marge N. O’Vera <marge@example.com> # Creates a function to insert a user.
change_pass [users appschema] 2014-01-01T00:35:28Z Marge N. O’Vera <marge@example.com> # Creates a function to change a user password.
@v1.0.0-dev1 2014-01-01T00:39:35Z Marge N. O’Vera <marge@example.com> # Tag v1.0.0-dev1.
lists [appschema users] 2014-01-01T00:43:46Z Marge N. O’Vera <marge@example.com> # Adds table for storing lists.
insert_list [lists appschema] 2014-01-01T00:45:24Z Marge N. O’Vera <marge@example.com> # Creates a function to insert a list.
delete_list [lists appschema] 2014-01-01T00:45:43Z Marge N. O’Vera <marge@example.com> # Creates a function to delete a list.
flips [appschema users] 2014-01-01T00:51:15Z Marge N. O’Vera <marge@example.com> # Adds table for storing flips.
insert_flip [flips appschema] 2014-01-01T00:53:00Z Marge N. O’Vera <marge@example.com> # Creates a function to insert a flip.
delete_flip [flips appschema] 2014-01-01T00:53:16Z Marge N. O’Vera <marge@example.com> # Creates a function to delete a flip.
Note that it has appended the changes from the merged "lists" branch, and then
merged the changes from our "flips" branch. Test it to make sure it works as
expected:
> sqitch rebase -y
Reverting all changes from flipr_test
- delete_flip ............... ok
- insert_flip ............... ok
- flips ..................... ok
- change_pass @v1.0.0-dev1 .. ok
- insert_user ............... ok
- users ..................... ok
- appschema ................. ok
Deploying changes to flipr_test
+ appschema ................. ok
+ users ..................... ok
+ insert_user ............... No errors.
ok
+ change_pass @v1.0.0-dev1 .. No errors.
ok
+ lists ..................... ok
+ insert_list ............... No errors.
ok
+ delete_list ............... No errors.
ok
+ flips ..................... ok
+ insert_flip ............... No errors.
ok
+ delete_flip ............... No errors.
ok
Note the use of L<C<rebase>|sqitch-rebase>, which combines a
L<C<revert>|sqitch-revert> and a L<C<deploy>|sqitch-deploy> into a single
command. Handy, right? It correctly reverted our changes, and then deployed
them all again in the proper order. So let's commit F<.gitattributes>; seems
worthwhile to keep that change:
> git add .
> git commit -m 'Add `.gitattributes` with union merge for `sqitch.plan`.'
[flips 383691f] Add `.gitattributes` with union merge for `sqitch.plan`.
1 file changed, 1 insertion(+)
create mode 100644 .gitattributes
=head2 Merges Mastered
And now, finally, we can merge into C<master>:
> git checkout master
Switched to branch 'master'
> git merge --no-ff flips -m "Merge branch 'flips'"
Merge made by the 'recursive' strategy.
.gitattributes | 1 +
deploy/delete_flip.sql | 32 ++++++++++++++++++++++++++++++++
deploy/flips.sql | 22 ++++++++++++++++++++++
deploy/insert_flip.sql | 32 ++++++++++++++++++++++++++++++++
revert/delete_flip.sql | 3 +++
revert/flips.sql | 5 +++++
revert/insert_flip.sql | 3 +++
sqitch.plan | 3 +++
verify/delete_flip.sql | 3 +++
verify/flips.sql | 3 +++
verify/insert_flip.sql | 3 +++
11 files changed, 110 insertions(+)
create mode 100644 .gitattributes
create mode 100644 deploy/delete_flip.sql
create mode 100644 deploy/flips.sql
create mode 100644 deploy/insert_flip.sql
create mode 100644 revert/delete_flip.sql
create mode 100644 revert/flips.sql
create mode 100644 revert/insert_flip.sql
create mode 100644 verify/delete_flip.sql
create mode 100644 verify/flips.sql
create mode 100644 verify/insert_flip.sql
And double-check our work:
> cat sqitch.plan
%syntax-version=1.0.0-b2
%project=flipr
%uri=https://github.com/theory/sqitch-oracle-intro/
appschema 2013-12-31T22:34:42Z Marge N. O’Vera <marge@example.com> # App user and schema for all flipr objects.
users [appschema] 2014-01-01T00:31:20Z Marge N. O’Vera <marge@example.com> # Creates table to track our users.
insert_user [users appschema] 2014-01-01T00:35:21Z Marge N. O’Vera <marge@example.com> # Creates a function to insert a user.
change_pass [users appschema] 2014-01-01T00:35:28Z Marge N. O’Vera <marge@example.com> # Creates a function to change a user password.
@v1.0.0-dev1 2014-01-01T00:39:35Z Marge N. O’Vera <marge@example.com> # Tag v1.0.0-dev1.
lists [appschema users] 2014-01-01T00:43:46Z Marge N. O’Vera <marge@example.com> # Adds table for storing lists.
insert_list [lists appschema] 2014-01-01T00:45:24Z Marge N. O’Vera <marge@example.com> # Creates a function to insert a list.
delete_list [lists appschema] 2014-01-01T00:45:43Z Marge N. O’Vera <marge@example.com> # Creates a function to delete a list.
flips [appschema users] 2014-01-01T00:51:15Z Marge N. O’Vera <marge@example.com> # Adds table for storing flips.
insert_flip [flips appschema] 2014-01-01T00:53:00Z Marge N. O’Vera <marge@example.com> # Creates a function to insert a flip.
delete_flip [flips appschema] 2014-01-01T00:53:16Z Marge N. O’Vera <marge@example.com> # Creates a function to delete a flip.
Much much better, a nice clean master now. And because it is now identical to
the "flips" branch, we can just carry on. Go ahead and tag it, bundle, and
release:
> sqitch tag v1.0.0-dev2 -n 'Tag v1.0.0-dev2.'
Tagged "delete_flip" with @v1.0.0-dev2
> git commit -am 'Tag the database with v1.0.0-dev2.'
[master 5427456] Tag the database with v1.0.0-dev2.
1 file changed, 1 insertion(+)
> git tag v1.0.0-dev2 -am 'Tag v1.0.0-dev2'
> sqitch bundle --dest-dir flipr-1.0.0-dev2
Bundling into flipr-1.0.0-dev2
Writing config
Writing plan
Writing scripts
+ appschema
+ users
+ insert_user
+ change_pass @v1.0.0-dev1
+ lists
+ insert_list
+ delete_list
+ flips
+ insert_flip
+ delete_flip @v1.0.0-dev2
Note the use of the C<--dest-dir> option to C<sqitch bundle>. Just a nicer way
to create the top-level directory name so we don't have to rename it from
F<bundle>.
=head1 In Place Changes
Uh-oh, someone just noticed that MD5 hashing is not particularly secure. Why?
Have a look at this:
=begin comment
If you get this error:
ORA-01950: no privileges on tablespace 'USERS'
Then connect as sysdba and grant unlimited quota to flipr:
ALTER USER flipr QUOTA UNLIMITED ON USERS;
=end comment
> echo "
DELETE FROM flipr.users;
EXECUTE flipr.insert_user('foo', 's3cr3t');
EXECUTE flipr.insert_user('bar', 's3cr3t');
SELECT nickname, password FROM flipr.users;
" | sqlplus -S scott/tiger@flipr_test
PL/SQL procedure successfully completed.
PL/SQL procedure successfully completed.
NICKNAME
--------------------------------------------------------------------------------
PASSWORD
--------------------------------------------------------------------------------
foo
a4d80eac9ab26a4a2da04125bc2c096a
bar
a4d80eac9ab26a4a2da04125bc2c096a
If user "foo" ever got access to the database, she could quickly discover that
user "bar" has the same password and thus be able to exploit the account. Not
a great idea. So we need to modify the C<insert_user()> and C<change_pass()>
functions to fix that. How?
We'll create a function that encrypts passwords using a
L<cryptographic salt|https://en.wikipedia.org/wiki/Salt_(cryptography)>. This
will allow the password hashes to be stored with random hashing. So we'll need
to add the function. The deploy script should be:
-- Deploy crypt
-- requires: appschema
CREATE OR REPLACE FUNCTION flipr.crypt(
password VARCHAR2,
salt VARCHAR2
) RETURN VARCHAR2 IS
salted CHAR(10) := SUBSTR(salt, 0, 10);
BEGIN
RETURN salted || LOWER( RAWTOHEX( UTL_RAW.CAST_TO_RAW(
sys.dbms_obfuscation_toolkit.md5(input_string => password || salted)
) ) );
END;
/
SHOW ERRORS;
-- Drop and die on error.
DECLARE
l_err_count INTEGER;
BEGIN
SELECT COUNT(*)
INTO l_err_count
FROM all_errors
WHERE owner = 'FLIPR'
AND name = 'CRYPT';
IF l_err_count > 0 THEN
EXECUTE IMMEDIATE 'DROP PROCEDURE flipr.crypt';
raise_application_error(-20001, 'Errors in FLIPR.CRYPT');
END IF;
END;
/
And the revert script should be:
-- Revert crypt.
DROP FUNCTION flipr.crypt;
And, as usual, the verify script should just use C<DESCRIBE>:
-- Verify crypt
DESCRIBE flipr.crypt;
With that change in place and committed, we're ready to make use of the
improved encryption. But how to deploy the changes to C<insert_user()> and
C<change_pass()>?
Normally, modifying functions in database changes is a
L<PITA|http://www.urbandictionary.com/define.php?term=pita>. You have to make
changes like these:
=over
=item 1.
Copy F<deploy/insert_user.sql> to F<deploy/insert_user_crypt.sql>.
=item 2.
Edit F<deploy/insert_user_crypt.sql> to switch from
C<sys.dbms_obfuscation_toolkit.md5()> to C<flipr.crypt()> and to add a
dependency on the C<crypt> change.
=item 3.
Copy F<deploy/insert_user.sql> to F<revert/insert_user_crypt.sql>.
Yes, copy the original change script to the new revert change.
=item 4.
Copy F<verify/insert_user.sql> to F<verify/insert_user_crypt.sql>.
=item 5.
Edit F<verify/insert_user_crypt.sql> to test that the function now properly
uses C<flipr.crypt()>.
=item 6.
Test the changes to make sure you can deploy and revert the
C<insert_user_crypt> change.
=item 7.
Now do the same for the C<change_pass> scripts.
=back
But you can have Sqitch do it for you. The only requirement is that a tag
appear between the two instances of a change we want to modify. In general,
you're going to make a change like this after a release, which you've tagged
anyway, right? Well we have, with C<@v1.0.0-dev2> added in the previous
section. With that, we can let Sqitch do most of the hard work for us, thanks
to the L<C<rework>|sqitch-rework> command, which is similar to
L<C<add>|sqitch-add>, including support for the C<--requires> option:
> sqitch rework insert_user --requires crypt -n 'Change insert_user to use crypt.'
Added "insert_user [insert_user@v1.0.0-dev2 crypt]" to sqitch.plan.
Modify these files as appropriate:
* deploy/insert_user.sql
* revert/insert_user.sql
* verify/insert_user.sql
Oh, so we can edit those files in place. Nice! How does Sqitch do it? Well, in
point of fact, it has copied the files to stand in for the previous instance
of the C<insert_user> change, which we can see via C<git status>:
> git status
# On branch master
# Your branch is ahead of 'origin/master' by 2 commits.
# (use "git push" to publish your local commits)
#
# Changes not staged for commit:
# (use "git add <file>..." to update what will be committed)
# (use "git checkout -- <file>..." to discard changes in working directory)
#
# modified: revert/insert_user.sql
# modified: sqitch.plan
#
# Untracked files:
# (use "git add <file>..." to include in what will be committed)
#
# deploy/insert_user@v1.0.0-dev2.sql
# revert/insert_user@v1.0.0-dev2.sql
# verify/insert_user@v1.0.0-dev2.sql
no changes added to commit (use "git add" and/or "git commit -a")
The "untracked files" part of the output is the first thing to notice. They
are all named C<insert_user@v1.0.0-dev2.sql>. What that means is: "the
C<insert_user> change as it was implemented as of the C<@v1.0.0-dev2> tag."
These are copies of the original scripts, and thereafter Sqitch will find them
when it needs to run scripts for the first instance of the C<insert_user>
change. As such, it's important not to change them again. But hey, if you're
reworking the change, you shouldn't need to.
The other thing to notice is that F<revert/insert_user.sql> has changed.
Sqitch replaced it with the original deploy script. As of now,
F<deploy/insert_user.sql> and F<revert/insert_user.sql> are identical. This is
on the assumption that the deploy script will be changed (we're reworking it,
remember?), and that the revert script should actually change things back to
how they were before. Of course, the original deploy script may not be
L<idempotent|http://en.wikipedia.org/wiki/Idempotence> -- that is, able to be
applied multiple times without changing the result beyond the initial
application. If it's not, you will likely need to modify it so that it
properly restores things to how they were after the original deploy script was
deployed. Or, more simply, it should revert changes back to how they were
as-of the deployment of F<deploy/insert_user@v1.0.0-dev2.sql>.
Fortunately, our function deploy scripts are already idempotent, thanks to the
use of the C<OR REPLACE> expression. No matter how many times a deployment
script is run, the end result will be the same instance of the function, with
no duplicates or errors.
As a result, there is no need to explicitly add changes. So go ahead. Modify the
script to switch to C<crypt()>. Make this change to
F<deploy/insert_user.sql>:
@@ -1,6 +1,7 @@
-- Deploy insert_user
-- requires: users
-- requires: appschema
+-- requires: crypt
CREATE OR REPLACE PROCEDURE flipr.insert_user(
nickname VARCHAR2,
@@ -9,9 +10,7 @@ CREATE OR REPLACE PROCEDURE flipr.insert_user(
BEGIN
INSERT INTO flipr.users VALUES(
nickname,
- LOWER( RAWTOHEX( UTL_RAW.CAST_TO_RAW(
- sys.dbms_obfuscation_toolkit.md5(input_string => password)
- ) ) ),
+ flipr.crypt(password, DBMS_RANDOM.STRING('p', 10)),
DEFAULT
);
END;
Go ahead and rework the C<change_pass> change, too:
> sqitch rework change_pass --requires crypt -n 'Change change_pass to use crypt.'
Added "change_pass [change_pass@v1.0.0-dev2 crypt]" to sqitch.plan.
Modify these files as appropriate:
* deploy/change_pass.sql
* revert/change_pass.sql
* verify/change_pass.sql
And make this change to F<deploy/change_pass.sql>:
@@ -1,6 +1,7 @@
-- Deploy change_pass
-- requires: users
-- requires: appschema
+-- requires: crypt
CREATE OR REPLACE PROCEDURE flipr.change_pass(
nick VARCHAR2,
@@ -10,13 +11,9 @@ CREATE OR REPLACE PROCEDURE flipr.change_pass(
flipr_auth_failed EXCEPTION;
BEGIN
UPDATE flipr.users
- SET password = LOWER( RAWTOHEX( UTL_RAW.CAST_TO_RAW(
- sys.dbms_obfuscation_toolkit.md5(input_string => newpass)
- ) ) )
+ SET password = flipr.crypt(newpass, DBMS_RANDOM.STRING('p', 10))
WHERE nickname = nick
- AND password = LOWER( RAWTOHEX( UTL_RAW.CAST_TO_RAW(
- sys.dbms_obfuscation_toolkit.md5(input_string => oldpass)
- ) ) );
+ AND password = flipr.crypt(oldpass, password);
IF SQL%ROWCOUNT = 0 THEN RAISE flipr_auth_failed; END IF;
END;
/
And then try a deployment:
> sqitch deploy
Deploying changes to flipr_test
+ insert_user .. No errors.
ok
+ change_pass .. No errors.
ok
So, are the changes deployed?
> echo "
DELETE FROM flipr.users;
EXECUTE flipr.insert_user('foo', 's3cr3t');
EXECUTE flipr.insert_user('bar', 's3cr3t');
SELECT nickname, password FROM flipr.users;
" | sqlplus -S scott/tiger@flipr_test
PL/SQL procedure successfully completed.
PL/SQL procedure successfully completed.
NICKNAME
--------------------------------------------------------------------------------
PASSWORD
--------------------------------------------------------------------------------
foo
cP?.eR!V[pf3d91ce9b7dcfe9260c6f4bb94ed0b22
bar
Z+l"_W_JiSefb62b789c0ff114cddcccc69c422e78
Awesome, the stored passwords are different now. But can we revert, even
though we haven't written any reversion scripts?
> sqitch revert --to @HEAD^^ -y
Reverting changes to crypt from flipr_test
- change_pass .. No errors.
ok
- insert_user .. No errors.
ok
Did that work, are the MD5 passwords back?
> echo "
DELETE FROM flipr.users;
EXECUTE flipr.insert_user('foo', 's3cr3t');
EXECUTE flipr.insert_user('bar', 's3cr3t');
SELECT nickname, password FROM flipr.users;
" | sqlplus -S scott/tiger@flipr_test
PL/SQL procedure successfully completed.
PL/SQL procedure successfully completed.
NICKNAME
--------------------------------------------------------------------------------
PASSWORD
--------------------------------------------------------------------------------
foo
a4d80eac9ab26a4a2da04125bc2c096a
bar
a4d80eac9ab26a4a2da04125bc2c096a
Yes, it works! Sqitch properly finds the original instances of these changes
in the new script files that include tags.
But what about the verify script? How can we verify that the functions have
been modified to use C<crypt()>? I think the simplest thing to do is to
examine the body of the function by querying the
L<C<all_source>|http://docs.oracle.com/cd/B19306_01/server.102/b14237/statviews_2063.htm>
view. So the C<insert_user> verify script looks like this:
-- Verify insert_user
DESCRIBE flipr.insert_user;
SELECT 1/COUNT(*)
FROM all_source
WHERE type = 'PROCEDURE'
AND name = 'INSERT_USER'
AND text LIKE '%flipr.crypt(password, DBMS_RANDOM.STRING(''p'', 10))%';
And the C<change_pass> verify script looks like this:
-- Verify change_pass
DESCRIBE flipr.change_pass;
SELECT 1/COUNT(*)
FROM all_source
WHERE type = 'PROCEDURE'
AND name = 'CHANGE_PASS'
AND text LIKE '%password = flipr.crypt(newpass, DBMS_RANDOM.STRING(''p'', 10))%';
Make sure these pass by re-deploying:
> sqitch deploy
Deploying changes to flipr_test
+ insert_user .. No errors.
ok
+ change_pass .. No errors.
ok
Excellent. Let's go ahead and commit these changes:
> git add .
> git commit -m 'Use crypt to encrypt passwords.'
[master be46175] Use crypt to encrypt passwords.
13 files changed, 181 insertions(+), 15 deletions(-)
create mode 100644 deploy/change_pass@v1.0.0-dev2.sql
create mode 100644 deploy/insert_user@v1.0.0-dev2.sql
rewrite revert/change_pass.sql (98%)
rename revert/{change_pass.sql => change_pass@v1.0.0-dev2.sql} (100%)
rewrite revert/insert_user.sql (98%)
rename revert/{insert_user.sql => insert_user@v1.0.0-dev2.sql} (100%)
create mode 100644 verify/change_pass@v1.0.0-dev2.sql
create mode 100644 verify/insert_user@v1.0.0-dev2.sql
> sqitch status
# On database flipr_test
# Project: flipr
# Change: 8367dc3bff7a563ec27f145421a1ffdf724cb6de
# Name: change_pass
# Deployed: 2013-12-31 17:18:28 -0800
# By: Marge N. O’Vera <marge@example.com>
#
Nothing to deploy (up-to-date)
=head1 More to Come
Sqitch is a work in progress. Better integration with version control systems
is planned to make managing idempotent reworkings even easier. Stay tuned.
=head1 Author
David E. Wheeler <david@justatheory.com>
=head1 License
Copyright (c) 2012-2014 iovation Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=cut
| gitpan/App-Sqitch | lib/sqitchtutorial-oracle.pod | Perl | mit | 61,607 |
#!/usr/bin/env perl
use strict;
use local::lib;
use Daemon::Control;
$ENV{HOME} = '/vagrant';
Daemon::Control->new(
{ name => 'Maximus-Docs',
path => '/vagrant/script/init.d/maximus-docs.pl',
directory => '/vagrant/docs',
program => 'http_this',
program_args => ['--port', 3002],
user => 'vagrant',
group => 'vagrant',
fork => 2,
pid_file => '/tmp/maximus-docs.pid',
stdout_file => '/vagrant/maximus-docs.log',
stderr_file => '/vagrant/maximus-docs.log',
lsb_start => '$syslog $network',
lsb_stop => '$syslog',
lsb_sdesc => 'Maximus-Docs script',
lsb_desc => 'Maximus-Docs script',
}
)->run;
| maximos/maximus-web | puppet/modules/maximus/files/maximus_docs.pl | Perl | mit | 784 |
package PlasmidFinder::FinderStats;
use strict;
use warnings;
use List::MoreUtils qw(any);
use Class::Std::Utils;
use Statistics::Basic qw(mean stddev);
# #
## ##
###Getter Methods###
## ##
# #
sub calc_num_devs {
# #
## ##
###Internal methods###
## ##
# #
}
1;
| hunter-cameron/Bioinformatics | perl/PlasmidFinder/FinderStats.pm | Perl | mit | 471 |
=head1 LICENSE
See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=head1 NAME
Bio::EnsEMBL::Compara::DBSQL::GeneTreeAdaptor
=head1 DESCRIPTION
Adaptor for a GeneTree object (individual nodes will be internally retrieved
with the GeneTreeNodeAdaptor).
=head1 INHERITANCE TREE
Bio::EnsEMBL::Compara::DBSQL::GeneTreeAdaptor
+- Bio::EnsEMBL::Compara::DBSQL::BaseAdaptor
`- Bio::EnsEMBL::Compara::DBSQL::TagAdaptor
=head1 AUTHORSHIP
Ensembl Team. Individual contributions can be found in the GIT log.
=head1 APPENDIX
The rest of the documentation details each of the object methods.
Internal methods are usually preceded with an underscore (_)
=cut
package Bio::EnsEMBL::Compara::DBSQL::GeneTreeAdaptor;
use strict;
use warnings;
use Data::Dumper;
use Bio::EnsEMBL::Utils::Argument qw(rearrange);
use Bio::EnsEMBL::Utils::Scalar qw(:assert);
use Bio::EnsEMBL::Compara::Utils::Scalar qw(:assert);
use Bio::EnsEMBL::Compara::GeneTree;
use DBI qw(:sql_types);
use base ('Bio::EnsEMBL::Compara::DBSQL::BaseAdaptor', 'Bio::EnsEMBL::Compara::DBSQL::TagAdaptor');
#
# FETCH methods
###########################
=head2 fetch_all
Arg [-TREE_TYPE] (opt)
: string: the type of trees that have to be fetched
Currently one of 'clusterset', 'supertree', 'tree'
Arg [-MEMBER_TYPE] (opt)
: string: the type of the members that are part of the tree
Currently 'protein' or 'ncrna'
Arg [-METHOD_LINK_SPECIES_SET] (opt)
: MethodLinkSpeciesSet or int: either the object or its dbID
NB: It currently gives the same partition of the data as member_type
Arg [-CLUSTERSET_ID] (opt)
: string: the name of the clusterset (use "default" to get the default
trees). Currently, there is a clusterset for the default trees, one for
each phylogenetic model used in the protein tree pipeline
Example : $all_trees = $genetree_adaptor->fetch_all();
Description: Fetches from the database all the gene trees
Returntype : arrayref of Bio::EnsEMBL::Compara::GeneTree
Exceptions : none
Caller : general
=cut
sub fetch_all {
my ($self, @args) = @_;
my ($clusterset_id, $mlss, $tree_type, $member_type)
= rearrange([qw(CLUSTERSET_ID METHOD_LINK_SPECIES_SET TREE_TYPE MEMBER_TYPE)], @args);
my @constraint = ();
if (defined $tree_type) {
push @constraint, '(gtr.tree_type = ?)';
$self->bind_param_generic_fetch($tree_type, SQL_VARCHAR);
}
if (defined $member_type) {
push @constraint, '(gtr.member_type = ?)';
$self->bind_param_generic_fetch($member_type, SQL_VARCHAR);
}
if (defined $mlss) {
assert_ref_or_dbID($mlss, 'Bio::EnsEMBL::Compara::MethodLinkSpeciesSet', 'mlss');
my $mlss_id = (ref($mlss) ? $mlss->dbID : $mlss);
push @constraint, '(gtr.method_link_species_set_id = ?)';
$self->bind_param_generic_fetch($mlss_id, SQL_INTEGER);
}
if (defined $clusterset_id) {
push @constraint, '(gtr.clusterset_id = ?)';
$self->bind_param_generic_fetch($clusterset_id, SQL_VARCHAR);
}
return $self->generic_fetch(join(' AND ', @constraint));
}
=head2 fetch_by_stable_id
Arg[1] : string $tree_stable_id
Example : $tree = $genetree_adaptor->fetch_by_stable_id("ENSGT00590000083078");
Description: Fetches from the database the gene tree for that stable ID
Returntype : Bio::EnsEMBL::Compara::GeneTree
Exceptions : returns undef if $stable_id is not found.
Caller : general
=cut
sub fetch_by_stable_id {
my ($self, $stable_id) = @_;
$self->bind_param_generic_fetch($stable_id, SQL_VARCHAR);
return $self->generic_fetch_one('gtr.stable_id = ?');
}
=head2 fetch_by_root_id
Arg[1] : int $tree_root_id
Example : $tree = $genetree_adaptor->fetch_by_root_id(3);
Description: Fetches from the database the gene tree for that root ID
This is equivalent to fetch_by_dbID
Returntype : Bio::EnsEMBL::Compara::GeneTree
Exceptions : returns undef if $root_id is not found.
Caller : general
=cut
sub fetch_by_root_id {
my ($self, $root_id) = @_;
$self->bind_param_generic_fetch($root_id, SQL_INTEGER);
return $self->generic_fetch_one('gtr.root_id = ?');
}
=head2 fetch_by_dbID
Arg[1] : int $tree_root_id
Example : $tree = $genetree_adaptor->fetch_by_dbID(3);
Description: Fetches from the database the gene tree for that root ID
This is equivalent to fetch_by_root_id
Returntype : Bio::EnsEMBL::Compara::GeneTree
Exceptions : returns undef if $root_id is not found.
Caller : general
=cut
sub fetch_by_dbID {
my ($self, $root_id) = @_;
$self->bind_param_generic_fetch($root_id, SQL_INTEGER);
return $self->generic_fetch_one('gtr.root_id = ?');
}
=head2 fetch_by_node_id
Arg[1] : int $tree_node_id
Example : $tree = $genetree_adaptor->fetch_by_node_id(3);
Description: Fetches from the database the gene tree that contains
this node
Returntype : Bio::EnsEMBL::Compara::GeneTree
Exceptions : returns undef if $node_id is not found.
Caller : general
=cut
sub fetch_by_node_id {
my ($self, $node_id) = @_;
$self->bind_param_generic_fetch($node_id, SQL_INTEGER);
my $join = [[['gene_tree_node', 'gtn'], 'gtn.root_id = gtr.root_id']];
return $self->generic_fetch_one('gtn.node_id = ?', $join);
}
=head2 fetch_all_by_Member
Arg[1] : GeneMember, SeqMember or seq_member_id
Arg [-METHOD_LINK_SPECIES_SET] (opt)
: MethodLinkSpeciesSet or int: either the object or its dbID
Arg [-CLUSTERSET_ID] (opt)
: string: the name of the clusterset (default is "default")
Example : $all_trees = $genetree_adaptor->fetch_all_by_Member($member);
Description: Fetches from the database all the gene trees that contains this member
If the member is a non-canonical SeqMember, returns an empty list
Returntype : arrayref of Bio::EnsEMBL::Compara::GeneTree
Exceptions : none
Caller : general
=cut
sub fetch_all_by_Member {
my ($self, $member, @args) = @_;
my ($clusterset_id, $mlss) = rearrange([qw(CLUSTERSET_ID METHOD_LINK_SPECIES_SET)], @args);
assert_ref_or_dbID($member, 'Bio::EnsEMBL::Compara::Member', 'member');
my $join = [[['gene_tree_node', 'gtn'], 'gtn.root_id = gtr.root_id']];
my $constraint = '(gtn.seq_member_id = ?)';
my $seq_member_id = (ref($member) ? ($member->isa('Bio::EnsEMBL::Compara::GeneMember') ? $member->canonical_member_id : $member->dbID) : $member);
$self->bind_param_generic_fetch($seq_member_id, SQL_INTEGER);
if (defined $mlss) {
assert_ref_or_dbID($mlss, 'Bio::EnsEMBL::Compara::MethodLinkSpeciesSet', 'mlss');
my $mlss_id = (ref($mlss) ? $mlss->dbID : $mlss);
$constraint .= ' AND (gtr.method_link_species_set_id = ?)';
$self->bind_param_generic_fetch($mlss_id, SQL_INTEGER);
}
if (defined $clusterset_id) {
$constraint .= ' AND (gtr.clusterset_id = ?)';
$self->bind_param_generic_fetch($clusterset_id, SQL_VARCHAR);
}
return $self->generic_fetch($constraint, $join);
}
=head2 fetch_default_for_Member
Arg[1] : GeneMember, SeqMember or seq_member_id
Arg[2] : (optional) clusterset_id (example values: "default", "murinae")
Example : $trees = $genetree_adaptor->fetch_default_for_Member($member);
Description: Fetches from the database the default gene tree that contains this member
If the member is a non-canonical SeqMember, returns undef
Returntype : Bio::EnsEMBL::Compara::GeneTree
Exceptions : none
Caller : general
=cut
sub fetch_default_for_Member {
my ($self, $member, $clusterset_id) = @_;
my $all_trees = $self->fetch_all_by_Member($member, -CLUSTERSET_ID => $clusterset_id);
return $all_trees->[0] if scalar(@$all_trees) == 1;
my @sorted_trees = sort {$a->root_id <=> $b->root_id} grep {!$_->ref_root_id} @$all_trees;
return $sorted_trees[0];
}
=head2 fetch_by_Gene
Arg[1] : Bio::EnsEMBL::Gene $gene
Example : $tree = $genetree_adaptor->fetch_by_Gene($gene);
Description: Fetches from the database the default gene tree that contains this gene
Returntype : Bio::EnsEMBL::Compara::GeneTree
Exceptions : none
Caller : general
=cut
sub fetch_by_Gene {
my ($self, $gene) = @_;
assert_ref($gene, 'Bio::EnsEMBL::Gene', 'gene');
my $gene_member = $self->db->get_GeneMemberAdaptor->fetch_by_Gene($gene);
return $gene_member ? $self->fetch_default_for_Member($gene_member) : undef;
}
=head2 fetch_parent_tree
Arg[1] : GeneTree $tree or its root_id
Example : $parent = $genetree_adaptor->fetch_parent_tree($tree);
Description: Fetches from the database the parent (tree) of the argument tree
Returntype : Bio::EnsEMBL::Compara::GeneTree
Exceptions : returns undef if called on a 'clusterset' tree
Caller : general
=cut
sub fetch_parent_tree {
my ($self, $tree) = @_;
assert_ref_or_dbID($tree, 'Bio::EnsEMBL::Compara::GeneTree', 'tree');
my $tree_id = (ref($tree) ? $tree->root_id : $tree);
my $join = [[['gene_tree_node', 'gtn1'], 'gtn1.root_id = gtr.root_id'], [['gene_tree_node', 'gtn2'], 'gtn1.node_id = gtn2.parent_id']];
my $constraint = "(gtn2.root_id = gtn2.node_id) AND (gtn2.root_id = ?)";
$self->bind_param_generic_fetch($tree_id, SQL_INTEGER);
return $self->generic_fetch_one($constraint, $join);
}
=head2 fetch_subtrees
Arg[1] : GeneTree $tree or its root_id
Example : $subtrees = $genetree_adaptor->fetch_subtrees($tree);
Description: Fetches from the database the trees that are children of the argument tree
Returntype : arrayref of Bio::EnsEMBL::Compara::GeneTree
Exceptions : the array is empty if called on a 'tree' tree
Caller : general
=cut
sub fetch_subtrees {
my ($self, $tree) = @_;
assert_ref_or_dbID($tree, 'Bio::EnsEMBL::Compara::GeneTree', 'tree');
my $tree_id = (ref($tree) ? $tree->root_id : $tree);
my $join = [[['gene_tree_node', 'gtn2'], 'gtn2.node_id = gtr.root_id', {'gtn2.parent_id' => '_parent_id'}], [['gene_tree_node', 'gtn1'], 'gtn1.node_id = gtn2.parent_id']];
my $constraint = "(gtn1.root_id = ?) AND (gtn2.root_id != gtn1.root_id)";
$self->bind_param_generic_fetch($tree_id, SQL_INTEGER);
return $self->generic_fetch($constraint, $join);
}
=head2 fetch_all_linked_trees
Arg[1] : GeneTree $tree or its root_id
Example : $othertrees = $genetree_adaptor->fetch_all_linked_trees($tree);
Description: Fetches from the database all trees that are associated to the argument tree.
The other trees generally contain the same members, but are either build
with a different phylogenetic model, or have a different multiple alignment.
Returntype : arrayref of Bio::EnsEMBL::Compara::GeneTree
Caller : general
=cut
sub fetch_all_linked_trees {
my ($self, $tree) = @_;
# Currently, all linked trees are accessible in 1 hop
if ($tree->ref_root_id) {
# Trees that share the same reference
$self->bind_param_generic_fetch($tree->ref_root_id, SQL_INTEGER);
$self->bind_param_generic_fetch($tree->root_id, SQL_INTEGER);
$self->bind_param_generic_fetch($tree->ref_root_id, SQL_INTEGER);
return $self->generic_fetch('(ref_root_id = ? AND root_id != ?) OR (root_id = ?)');
} else {
# The given tree is the reference
$self->bind_param_generic_fetch($tree->root_id, SQL_INTEGER);
return $self->generic_fetch('ref_root_id = ?');
}
}
=head2 fetch_all_removed_seq_member_ids_by_root_id
Arg[1] : int: root_id: ID of the root node of the tree
Example : $all_removed_members = $genetree_adaptor->fetch_all_Removed_Members_by_root_id($root_id);
Description: Gets all the removed members of the given tree.
Returntype : arrayref of seq_member_ids
Exceptions : none
Caller : general
=cut
sub fetch_all_removed_seq_member_ids_by_root_id {
my ( $self, $root_id ) = @_;
return $self->dbc->db_handle->selectcol_arrayref( 'SELECT seq_member_id FROM gene_tree_backup WHERE is_removed = 1 AND root_id = ? ;', undef, $root_id );
}
#
# STORE/DELETE methods
###########################
sub store {
my ($self, $tree) = @_;
# Firstly, store the nodes
my $has_root_id = (exists $tree->{'_root_id'} ? 1 : 0);
my $root_id = $self->db->get_GeneTreeNodeAdaptor->store_nodes_rec($tree->root);
$tree->{'_root_id'} = $root_id;
# Secondly, the tree itself
my $sth;
# Make sure that the variables are in the same order
if ($has_root_id) {
$sth = $self->prepare('UPDATE gene_tree_root SET tree_type=?, member_type=?, clusterset_id=?, gene_align_id=?, method_link_species_set_id=?, species_tree_root_id=?, stable_id=?, version=?, ref_root_id=? WHERE root_id=?'),
} else {
$sth = $self->prepare('INSERT INTO gene_tree_root (tree_type, member_type, clusterset_id, gene_align_id, method_link_species_set_id, species_tree_root_id, stable_id, version, ref_root_id, root_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)');
}
$sth->execute($tree->tree_type, $tree->member_type, $tree->clusterset_id, $tree->gene_align_id, $tree->method_link_species_set_id, $tree->species_tree_root_id, $tree->stable_id, $tree->version, $tree->ref_root_id, $root_id);
$tree->adaptor($self);
return $root_id;
}
sub delete_tree {
my ($self, $tree) = @_;
assert_ref($tree, 'Bio::EnsEMBL::Compara::GeneTree', 'tree');
# Make sure the tags are loaded (so that we can access the tags that
# link to alternative alignments)
$tree->_load_tags;
my $root_id = $tree->root->node_id;
# Query to reset gene_member_hom_stats
my $gene_member_hom_stats_sql = 'UPDATE gene_member_hom_stats SET gene_trees = 0, orthologues = 0, paralogues = 0, homoeologues = 0 WHERE gene_member_id = ?';
for my $leaf (@{$tree->get_all_leaves}) {
if ($leaf->isa('Bio::EnsEMBL::Compara::GeneTreeMember')) {
$self->dbc->do($gene_member_hom_stats_sql, undef, $leaf->gene_member_id);
}
}
# Delete any associated CAFE data
my $cafe_adaptor = $self->db->get_CAFEGeneFamilyAdaptor;
my $cafe_gene_family = $cafe_adaptor->fetch_by_GeneTree($tree);
$cafe_adaptor->delete($cafe_gene_family) if ( $cafe_gene_family );
# Remove all the nodes but the root
my $gene_tree_node_Adaptor = $self->db->get_GeneTreeNodeAdaptor;
for my $node (@{$tree->get_all_nodes}) {
next if ($node->node_id() == $root_id);
$gene_tree_node_Adaptor->delete_node($node);
}
# List of all the gene_align_ids that need deleting
my %gene_align_ids;
$gene_align_ids{$tree->gene_align_id} = 1 if $tree->gene_align_id;
# Only for "default" trees
unless ($tree->ref_root_id) {
# Linked trees must be removed as well as they refer to the default tree
foreach my $other_tree (@{$self->fetch_all_linked_trees($tree)}) {
$gene_align_ids{$other_tree->gene_align_id} = 1 if $other_tree->gene_align_id;
$other_tree->preload();
$self->delete_tree($other_tree);
$other_tree->release_tree();
}
}
if (my $gene_count = $tree->get_value_for_tag('gene_count')) {
my $current_tree = $tree;
while (my $parent_tree = $self->fetch_parent_tree($current_tree)) {
if (my $parent_gene_count = $parent_tree->get_value_for_tag('gene_count')) {
$parent_tree->store_tag('gene_count', $parent_gene_count-$gene_count);
$current_tree = $parent_tree;
} else {
last;
}
}
}
# Is this a subtree of a supertree? If so, clean up the supertree too
if ( $tree->root->parent->tree->tree_type eq 'supertree' ) {
$self->_clean_supertree($tree->root);
}
# Finally remove the root node
$gene_tree_node_Adaptor->delete_node($tree->root) if $tree->root;
# Only for "default" trees
unless ($tree->ref_root_id) {
# Register more alignments
foreach my $gene_align_id_tag (qw(mcoffee_scores_gene_align_id filtered_gene_align_id)) {
if (my $gene_align_id = $tree->get_value_for_tag($gene_align_id_tag)) {
$gene_align_ids{$gene_align_id} = 1;
}
}
# Delete all the alignments (no foreign key problems since all the
# trees have been removed by now)
foreach my $gene_align_id (keys %gene_align_ids) {
$self->db->get_GeneAlignAdaptor->delete($gene_align_id);
}
# The HMM profile
$self->dbc->do('DELETE FROM hmm_profile WHERE model_id = ?', undef, $root_id);
}
}
sub _clean_supertree {
my ($self, $subtree_root) = @_;
my $gtn_adaptor = $self->db->get_GeneTreeNodeAdaptor;
my $supertree_leaf = $subtree_root->parent;
my $supertree_root = $supertree_leaf->root;
my $supertree_gene_align_id = $supertree_root->tree->gene_align_id;
my @supertree_leaves = @{$supertree_root->tree->get_all_leaves};
my @supertree_leaf_ids = map {$_->node_id} @supertree_leaves;
if ( scalar(@supertree_leaves) < 3 ) {
# removing a node from this supertree results in a single-leaf tree
# delete the whole supertree, leaving the other subtree intact
foreach my $supertree_leaf ( @supertree_leaves ) {
# link the subtree to the supertree's parent
# this is usually a clusterset, but may be another supertree
# (there is no easy way to do this using API calls in place of raw SQL)
my $unlink_subtree_sql = "UPDATE gene_tree_node SET parent_id = ? WHERE parent_id = ?";
my $sth = $self->prepare($unlink_subtree_sql);
$sth->execute($supertree_root->parent->node_id, $supertree_leaf->node_id);
}
$self->delete_tree($supertree_root->tree);
} else {
# remove the deleted subtree's parent node and minimize the supertree
# (i.e. clean up single-child nodes)
$gtn_adaptor->delete_node($supertree_leaf);
my $pruned_supertree = $supertree_root->tree;
my @orig_child_nodes = map {$_->node_id} @{$pruned_supertree->root->get_all_nodes()};
# clean the tree, update the indexes, update the tree in the db
my $minimized_tree = $pruned_supertree->root->minimize_tree;
# sometimes minimize_tree removes the old root - replace the original root_id
$minimized_tree->root->node_id($supertree_root->node_id);
# delete any nodes that have been removed in the minimization process from db
my %minimized_child_nodes = map {$_->node_id => 1} @{$minimized_tree->get_all_nodes()};
foreach my $orig_child_id ( @orig_child_nodes ) {
next if $minimized_child_nodes{$orig_child_id};
$gtn_adaptor->delete_node($gtn_adaptor->fetch_by_dbID($orig_child_id));
}
$minimized_tree->root->build_leftright_indexing();
$gtn_adaptor->update_subtree($minimized_tree);
$supertree_root = $minimized_tree->root;
# now remove subtree's members from the supertree alignment
my $subtree_members = $subtree_root->tree->get_all_Members;
$self->db->get_GeneAlignAdaptor->delete_members($supertree_gene_align_id, $subtree_root->tree->get_all_Members);
# memory management
$pruned_supertree->release_tree;
$minimized_tree->release_tree;
}
}
sub change_clusterset {
my ($self, $tree, $target_clusterset) = @_;
my $sth;
$sth = $self->prepare('SELECT root_id FROM gene_tree_root WHERE tree_type = "clusterset" AND clusterset_id = ? ;'),
$sth->execute($target_clusterset);
my $target_clusterset_root_id = $sth->fetchrow();
$sth->finish();
my $cluster_set_leave = $tree->root->parent;
$sth = $self->prepare('UPDATE gene_tree_node SET parent_id=?, root_id=? WHERE node_id=? and seq_member_id IS NULL'),
$sth->execute($target_clusterset_root_id, $target_clusterset_root_id , $cluster_set_leave->node_id);
$sth->finish();
$sth = $self->prepare('UPDATE gene_tree_root SET clusterset_id=? WHERE root_id=?'),
$sth->execute($target_clusterset, $tree->root->node_id);
$sth->finish();
}
#
# Virtual methods from TagAdaptor
###################################
sub _tag_capabilities {
return ('gene_tree_root_tag', 'gene_tree_root_attr', 'root_id', 'root_id', 'tag', 'value');
}
#
# Virtual methods from BaseAdaptor
####################################
sub _tables {
return (['gene_tree_root', 'gtr'], ['gene_align', 'ga'])
}
sub _left_join {
return (
['gene_align', 'gtr.gene_align_id = ga.gene_align_id'],
);
}
sub _columns {
return qw (
gtr.root_id
gtr.tree_type
gtr.member_type
gtr.clusterset_id
gtr.gene_align_id
gtr.method_link_species_set_id
gtr.species_tree_root_id
gtr.stable_id
gtr.version
gtr.ref_root_id
ga.seq_type
ga.aln_length
ga.aln_method
);
}
sub _objs_from_sth {
my ($self, $sth) = @_;
return $self->generic_objs_from_sth($sth, 'Bio::EnsEMBL::Compara::GeneTree', [
'_root_id',
'_tree_type',
'_member_type',
'_clusterset_id',
'_gene_align_id',
'_method_link_species_set_id',
'_species_tree_root_id',
'_stable_id',
'_version',
'_ref_root_id',
'_seq_type',
'_aln_length',
'_aln_method',
] );
}
1;
| Ensembl/ensembl-compara | modules/Bio/EnsEMBL/Compara/DBSQL/GeneTreeAdaptor.pm | Perl | apache-2.0 | 22,566 |
#!/usr/bin/env perl
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2017] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<helpdesk.org>.
=cut
#
use strict;
Use DBH;
use Bio::EnsEMBL::Registry;
use Bio::EnsEMBL::DBSQL::DBAdaptor;
use Bio::EnsEMBL::Variation::DBSQL::DBAdaptor;
use Bio::EnsEMBL::Utils::Exception qw(verbose throw warning);
use Bio::EnsEMBL::Utils::Argument qw( rearrange );
use Data::Dumper;
use Bio::SeqIO;
use FindBin qw( $Bin );
use Getopt::Long;
use ImportUtils qw(dumpSQL debug create_and_load load);
our ($species, $seq_region_id, $chr_name, $TMP_DIR, $TMP_FILE);
GetOptions('species=s' => \$species,
'seq_region_id=i' => \$seq_region_id,
'tmpdir=s' => \$ImportUtils::TMP_DIR,
'tmpfile=s' => \$ImportUtils::TMP_FILE,
);
my $registry_file ||= $Bin . "/ensembl.registry";
$TMP_DIR = $ImportUtils::TMP_DIR;
$TMP_FILE = $ImportUtils::TMP_FILE;
$species ||= 'mouse';
Bio::EnsEMBL::Registry->load_all( $registry_file );
my $cdb = Bio::EnsEMBL::Registry->get_DBAdaptor($species,'core');
my $vdb = Bio::EnsEMBL::Registry->get_DBAdaptor($species,'variation');
print "core_db is ",$cdb->dbc->dbname, " and var_db is ",$vdb->dbc->dbname," and seq_region_id is $seq_region_id\n";
my $dbCore = $cdb->dbc->db_handle;
my $dbVar = $vdb->dbc->db_handle;
my %rec_repeat;
my $slice_adaptor = $cdb->get_SliceAdaptor();
&run();
sub run {
my (%rec_name,%rec_start);
my ($variation_name,$seq_start);
my $sthv = $dbVar->prepare(qq{select variation_name,seq_region_start
from variation_feature
where seq_region_id = $seq_region_id
and source_id=2
});
$sthv->execute();
$sthv->bind_columns(\$variation_name,\$seq_start);
my $count;
while($sthv->fetch()) {
$count++;
$rec_name{$variation_name}=$seq_start;
}
my $slice = $slice_adaptor->fetch_by_seq_region_id($seq_region_id);
my $slice_size = 10000;
my $start = 1;
my $end = $start + $slice_size -1;
$end = $slice->length if($end > $slice->length);
while ($start <= $slice->length) {
my $sub_slice = $slice->sub_Slice($start,$end);
my @variation_features = @{$sub_slice->get_all_VariationFeatures};
my @vfs;
foreach my $vf (@variation_features) {
if ($rec_name{$vf->variation_name}) {
push @vfs, $vf;
}
}
my %rec_starts = map {$_->start, $_->variation_name} @vfs;
my $repeat_masked_slice = $sub_slice->get_repeatmasked_seq();
my $dna = $repeat_masked_slice->seq();
my @dnas = split "", $dna;
#warn `ps -p $$ -o vsz |tail -1`;
foreach my $var_start (sort {$a<=>$b} keys %rec_starts) {
if ($dnas[$var_start-1] eq 'N') {
my $variation_name = $rec_starts{$var_start};
$rec_repeat{$variation_name}=1;
}
}
$start = $end + 1;
$end = $start + $slice_size -1;
$end = $slice->length if($end > $slice->length);
}
}
open OUT, ">$TMP_DIR/$TMP_FILE\_$seq_region_id";
foreach my $variation_name (keys %rec_repeat) {
print OUT "$variation_name\n";
}
| willmclaren/ensembl-variation | scripts/import/repeats_filter.pl | Perl | apache-2.0 | 3,889 |
package Google::Ads::AdWords::v201809::PolicySummaryDenormalizedStatus;
use strict;
use warnings;
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809'};
# derivation by restriction
use base qw(
SOAP::WSDL::XSD::Typelib::Builtin::string);
1;
__END__
=pod
=head1 NAME
=head1 DESCRIPTION
Perl data type class for the XML Schema defined simpleType
PolicySummaryDenormalizedStatus from the namespace https://adwords.google.com/api/adwords/cm/v201809.
The denormalized status of a reviewable, calculated based on the status of its individual policy entries.
This clase is derived from
SOAP::WSDL::XSD::Typelib::Builtin::string
. SOAP::WSDL's schema implementation does not validate data, so you can use it exactly
like it's base type.
# Description of restrictions not implemented yet.
=head1 METHODS
=head2 new
Constructor.
=head2 get_value / set_value
Getter and setter for the simpleType's value.
=head1 OVERLOADING
Depending on the simple type's base type, the following operations are overloaded
Stringification
Numerification
Boolification
Check L<SOAP::WSDL::XSD::Typelib::Builtin> for more information.
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/PolicySummaryDenormalizedStatus.pm | Perl | apache-2.0 | 1,206 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::server::hp::ilo::xmlapi::mode::components::battery;
use strict;
use warnings;
sub load { }
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking batteries");
$self->{components}->{battery} = {name => 'battery', total => 0, skip => 0};
return if ($self->check_filter(section => 'battery'));
return if (!defined($self->{xml_result}->{GET_EMBEDDED_HEALTH_DATA}->{POWER_SUPPLIES}->{SMART_STORAGE_BATTERY}));
#<POWER_SUPPLIES>
# <SMART_STORAGE_BATTERY>
# <LABEL VALUE = "Battery 1"/>
# <PRESENT VALUE = "Yes"/>
# <STATUS VALUE = "OK"/>
# <MODEL VALUE = "727258-B21"/>
# <SPARE VALUE = "815983-001"/>
# <SERIAL_NUMBER VALUE = "6EZBN0FB230885"/>
# <CAPACITY VALUE = "96 Watts"/>
# <FIRMWARE_VERSION VALUE = "1.1"/>
# </SMART_STORAGE_BATTERY>
foreach my $result (@{$self->{xml_result}->{GET_EMBEDDED_HEALTH_DATA}->{POWER_SUPPLIES}->{SMART_STORAGE_BATTERY}}) {
my $instance = $result->{LABEL}->{VALUE};
next if ($self->check_filter(section => 'battery', instance => $instance));
next if ($result->{STATUS}->{VALUE} =~ /not installed|n\/a|not present|not applicable/i &&
$self->absent_problem(section => 'battery', instance => $instance));
$self->{components}->{battery}->{total}++;
$self->{output}->output_add(long_msg => sprintf("battery '%s' status is '%s' [instance = %s]",
$result->{LABEL}->{VALUE}, $result->{STATUS}->{VALUE}, $instance));
my $exit = $self->get_severity(label => 'default', section => 'battery', value => $result->{STATUS}->{VALUE});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Battery '%s' status is '%s'", $result->{LABEL}->{VALUE}, $result->{STATUS}->{VALUE}));
}
}
}
1; | Sims24/centreon-plugins | hardware/server/hp/ilo/xmlapi/mode/components/battery.pm | Perl | apache-2.0 | 2,830 |
package Google::Ads::AdWords::v201809::CampaignError::Reason;
use strict;
use warnings;
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809'};
# derivation by restriction
use base qw(
SOAP::WSDL::XSD::Typelib::Builtin::string);
1;
__END__
=pod
=head1 NAME
=head1 DESCRIPTION
Perl data type class for the XML Schema defined simpleType
CampaignError.Reason from the namespace https://adwords.google.com/api/adwords/cm/v201809.
The reasons for the target error.
This clase is derived from
SOAP::WSDL::XSD::Typelib::Builtin::string
. SOAP::WSDL's schema implementation does not validate data, so you can use it exactly
like it's base type.
# Description of restrictions not implemented yet.
=head1 METHODS
=head2 new
Constructor.
=head2 get_value / set_value
Getter and setter for the simpleType's value.
=head1 OVERLOADING
Depending on the simple type's base type, the following operations are overloaded
Stringification
Numerification
Boolification
Check L<SOAP::WSDL::XSD::Typelib::Builtin> for more information.
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/CampaignError/Reason.pm | Perl | apache-2.0 | 1,113 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::alcatel::omniswitch::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '0.1';
%{$self->{modes}} = (
'cpu' => 'network::alcatel::omniswitch::snmp::mode::cpu',
'hardware' => 'network::alcatel::omniswitch::snmp::mode::hardware',
'interfaces' => 'snmp_standard::mode::interfaces',
'list-interfaces' => 'snmp_standard::mode::listinterfaces',
'list-spanning-trees' => 'snmp_standard::mode::listspanningtrees',
'flash-memory' => 'network::alcatel::omniswitch::snmp::mode::flashmemory',
'memory' => 'network::alcatel::omniswitch::snmp::mode::memory',
'spanning-tree' => 'snmp_standard::mode::spanningtree',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Alcatel Omniswitch in SNMP.
=cut
| Sims24/centreon-plugins | network/alcatel/omniswitch/snmp/plugin.pm | Perl | apache-2.0 | 1,979 |
package DDG::Goodie::MarkdownReference;
# ABSTRACT: Provide a cheatsheet for common Markdown syntax
use strict;
use DDG::Goodie;
use JSON;
zci answer_type => 'markdown_reference';
zci is_cached => 1;
triggers startend => (
'markdown', 'md',
);
my $json = share('synonyms.json')->slurp();
my $synonyms = decode_json($json);
sub get_element_from_alias {
my $name = shift;
my %synonyms = %{$synonyms};
while (my ($elt_type, $syns) = each %synonyms) {
return $elt_type if $name eq $elt_type;
return $elt_type if defined $syns->{$name};
};
}
sub get_element_type {
my $query = shift;
return get_element_from_alias $query;
}
sub element_to_subtitle {
my $element = shift;
return ucfirst $element;
}
sub get_results {
my $query = shift;
my $element_type = get_element_type $query or return;
my %elements = map { $_ => 0 } (
'header',
'list',
'emphasis',
'bold',
'blockquote',
'image',
'link',
);
$elements{$element_type} = 1;
my $subtitle = element_to_subtitle $element_type;
return ($subtitle, \%elements);
}
handle remainder => sub {
my $query = shift;
return unless $query;
my ($subtitle, $elements) = get_results $query or return;
return $subtitle,
structured_answer => {
id => 'markdown_reference',
name => 'Answer',
data => {
elements => $elements,
element_type => $subtitle,
title => "Markdown Reference",
},
meta => {
sourceName => 'Daring Fireball',
sourceUrl => 'https://daringfireball.net/projects/markdown/syntax',
},
templates => {
group => 'text',
options => {
subtitle_content => 'DDH.markdown_reference.content',
moreAt => 1,
},
},
};
};
1;
| mohan08p/zeroclickinfo-goodies | lib/DDG/Goodie/MarkdownReference.pm | Perl | apache-2.0 | 2,020 |
package Paws::DAX::DeleteSubnetGroup;
use Moose;
has SubnetGroupName => (is => 'ro', isa => 'Str', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'DeleteSubnetGroup');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::DAX::DeleteSubnetGroupResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::DAX::DeleteSubnetGroup - Arguments for method DeleteSubnetGroup on Paws::DAX
=head1 DESCRIPTION
This class represents the parameters used for calling the method DeleteSubnetGroup on the
Amazon DynamoDB Accelerator (DAX) service. Use the attributes of this class
as arguments to method DeleteSubnetGroup.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DeleteSubnetGroup.
As an example:
$service_obj->DeleteSubnetGroup(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 B<REQUIRED> SubnetGroupName => Str
The name of the subnet group to delete.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method DeleteSubnetGroup in L<Paws::DAX>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/DAX/DeleteSubnetGroup.pm | Perl | apache-2.0 | 1,637 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package cloud::azure::custom::azcli;
use strict;
use warnings;
use DateTime;
use JSON::XS;
sub new {
my ($class, %options) = @_;
my $self = {};
bless $self, $class;
if (!defined($options{output})) {
print "Class Custom: Need to specify 'output' argument.\n";
exit 3;
}
if (!defined($options{options})) {
$options{output}->add_option_msg(short_msg => "Class Custom: Need to specify 'options' argument.");
$options{output}->option_exit();
}
if (!defined($options{noptions})) {
$options{options}->add_options(arguments => {
"subscription:s" => { name => 'subscription' },
"tenant:s" => { name => 'tenant' },
"client-id:s" => { name => 'client_id' },
"client-secret:s" => { name => 'client_secret' },
"timeframe:s" => { name => 'timeframe' },
"interval:s" => { name => 'interval' },
"aggregation:s@" => { name => 'aggregation' },
"zeroed" => { name => 'zeroed' },
"timeout:s" => { name => 'timeout', default => 50 },
"sudo" => { name => 'sudo' },
"command:s" => { name => 'command', default => 'az' },
"command-path:s" => { name => 'command_path' },
"command-options:s" => { name => 'command_options', default => '' },
"proxyurl:s" => { name => 'proxyurl' },
});
}
$options{options}->add_help(package => __PACKAGE__, sections => 'AZCLI OPTIONS', once => 1);
$self->{output} = $options{output};
$self->{mode} = $options{mode};
return $self;
}
sub set_options {
my ($self, %options) = @_;
$self->{option_results} = $options{option_results};
}
sub set_defaults {
my ($self, %options) = @_;
foreach (keys %{$options{default}}) {
if ($_ eq $self->{mode}) {
for (my $i = 0; $i < scalar(@{$options{default}->{$_}}); $i++) {
foreach my $opt (keys %{$options{default}->{$_}[$i]}) {
if (!defined($self->{option_results}->{$opt}[$i])) {
$self->{option_results}->{$opt}[$i] = $options{default}->{$_}[$i]->{$opt};
}
}
}
}
}
}
sub check_options {
my ($self, %options) = @_;
if (defined($self->{option_results}->{proxyurl}) && $self->{option_results}->{proxyurl} ne '') {
$ENV{HTTP_PROXY} = $self->{option_results}->{proxyurl};
$ENV{HTTPS_PROXY} = $self->{option_results}->{proxyurl};
}
if (defined($self->{option_results}->{aggregation})) {
foreach my $aggregation (@{$self->{option_results}->{aggregation}}) {
if ($aggregation !~ /average|maximum|minimum|total/i) {
$self->{output}->add_option_msg(short_msg => "Aggregation '" . $aggregation . "' is not handled");
$self->{output}->option_exit();
}
}
}
$self->{subscription} = (defined($self->{option_results}->{subscription})) ? $self->{option_results}->{subscription} : undef;
return 0;
}
sub execute {
my ($self, %options) = @_;
$self->{output}->output_add(long_msg => "Command line: '" . $self->{option_results}->{command} . " " . $options{cmd_options} . "'", debug => 1);
my ($response) = centreon::plugins::misc::execute(
output => $self->{output},
options => $self->{option_results},
sudo => $self->{option_results}->{sudo},
command => $self->{option_results}->{command},
command_path => $self->{option_results}->{command_path},
command_options => $options{cmd_options});
my $raw_results;
eval {
$raw_results = JSON::XS->new->utf8->decode($response);
};
if ($@) {
$self->{output}->output_add(long_msg => $response, debug => 1);
$self->{output}->add_option_msg(short_msg => "Cannot decode response (add --debug option to display returned content)");
$self->{output}->option_exit();
}
return $raw_results;
}
sub convert_duration {
my ($self, %options) = @_;
my $duration;
if ($options{time_string} =~ /^P.*S$/) {
centreon::plugins::misc::mymodule_load(output => $self->{output}, module => 'DateTime::Format::Duration::ISO8601',
error_msg => "Cannot load module 'DateTime::Format::Duration::ISO8601'.");
my $format = DateTime::Format::Duration::ISO8601->new;
my $d = $format->parse_duration($options{time_string});
$duration = $d->minutes * 60 + $d->seconds;
} elsif ($options{time_string} =~ /^(\d+):(\d+):(\d+)\.\d+$/) {
centreon::plugins::misc::mymodule_load(output => $self->{output}, module => 'DateTime::Duration',
error_msg => "Cannot load module 'DateTime::Format::Duration'.");
my $d = DateTime::Duration->new(hours => $1, minutes => $2, seconds => $3);
$duration = $d->minutes * 60 + $d->seconds;
}
return $duration;
}
sub azure_get_metrics_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "monitor metrics list --metrics '" . join('\' \'', @{$options{metrics}}) . "' --start-time $options{start_time} --end-time $options{end_time} " .
"--interval $options{interval} --aggregation '" . join('\' \'', @{$options{aggregations}}) . "' --output json --resource '$options{resource}' " .
"--resource-group '$options{resource_group}' --resource-type '$options{resource_type}' --resource-namespace '$options{resource_namespace}'";
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_get_metrics {
my ($self, %options) = @_;
my $results = {};
my $start_time = DateTime->now->subtract(seconds => $options{timeframe})->iso8601.'Z';
my $end_time = DateTime->now->iso8601.'Z';
my $cmd_options = $self->azure_get_metrics_set_cmd(%options, start_time => $start_time, end_time => $end_time);
my $raw_results = $self->execute(cmd_options => $cmd_options);
foreach my $metric (@{$raw_results->{value}}) {
my $metric_name = lc($metric->{name}->{value});
$metric_name =~ s/ /_/g;
$results->{$metric_name} = { points => 0, name => $metric->{name}->{localizedValue} };
foreach my $timeserie (@{$metric->{timeseries}}) {
foreach my $point (@{$timeserie->{data}}) {
if (defined($point->{average})) {
$results->{$metric_name}->{average} = 0 if (!defined($results->{$metric_name}->{average}));
$results->{$metric_name}->{average} += $point->{average};
$results->{$metric_name}->{points}++;
}
if (defined($point->{minimum})) {
$results->{$metric_name}->{minimum} = $point->{minimum}
if (!defined($results->{$metric_name}->{minimum}) || $point->{minimum} < $results->{$metric_name}->{minimum});
}
if (defined($point->{maximum})) {
$results->{$metric_name}->{maximum} = $point->{maximum}
if (!defined($results->{$metric_name}->{maximum}) || $point->{maximum} > $results->{$metric_name}->{maximum});
}
if (defined($point->{total})) {
$results->{$metric_name}->{total} = 0 if (!defined($results->{$metric_name}->{total}));
$results->{$metric_name}->{total} += $point->{total};
$results->{$metric_name}->{points}++;
}
}
}
if (defined($results->{$metric_name}->{average})) {
$results->{$metric_name}->{average} /= $results->{$metric_name}->{points};
}
}
return $results, $raw_results;
}
sub azure_list_resources_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "resource list --output json";
$cmd_options .= " --namespace '$options{namespace}'" if (defined($options{namespace}) && $options{namespace} ne '');
$cmd_options .= " --resource-type '$options{resource_type}'" if (defined($options{resource_type}) && $options{resource_type} ne '');
$cmd_options .= " --location '$options{location}'" if (defined($options{location}) && $options{location} ne '');
$cmd_options .= " --resource-group '$options{resource_group}'" if (defined($options{resource_group}) && $options{resource_group} ne '');
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_resources {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_resources_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_vm_sizes_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "vm list-sizes --location '$options{location}' --output json";
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_vm_sizes {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_vm_sizes_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_vms_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "vm list --output json";
$cmd_options .= " --resource-group '$options{resource_group}'" if (defined($options{resource_group}) && $options{resource_group} ne '');
$cmd_options .= " --show-details" if (defined($options{show_details}));
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_vms {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_vms_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_groups_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "group list --output json";
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_groups {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_groups_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_deployments_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "group deployment list --resource-group '$options{resource_group}' --output json";
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_deployments {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_deployments_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_vaults_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "backup vault list --output json";
$cmd_options .= " --resource-group '$options{resource_group}'" if (defined($options{resource_group}) && $options{resource_group} ne '');
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_vaults {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_vaults_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_backup_jobs_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "backup job list --resource-group '$options{resource_group}' --vault-name '$options{vault_name}' --output json";
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_backup_jobs {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_backup_jobs_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_backup_items_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "backup item list --resource-group '$options{resource_group}' --vault-name '$options{vault_name}' --output json";
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_backup_items {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_backup_items_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_expressroute_circuits_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "network express-route list --output json";
$cmd_options .= " --resource-group '$options{resource_group}'" if (defined($options{resource_group}) && $options{resource_group} ne '');
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_expressroute_circuits {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_expressroute_circuits_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_vpn_gateways_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "network vnet-gateway list --resource-group '$options{resource_group}' --output json";
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_vpn_gateways {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_vpn_gateways_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_virtualnetworks_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "network vnet list --output json";
$cmd_options .= " --resource-group '$options{resource_group}'" if (defined($options{resource_group}) && $options{resource_group} ne '');
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_virtualnetworks {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_virtualnetworks_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_vnet_peerings_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "network vnet peering list --resource-group '$options{resource_group}' --vnet-name '$options{vnet_name}' --output json";
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_vnet_peerings {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_vnet_peerings_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_sqlservers_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "sql server list --output json";
$cmd_options .= " --resource-group '$options{resource_group}'" if (defined($options{resource_group}) && $options{resource_group} ne '');
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_sqlservers {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_sqlservers_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
sub azure_list_sqldatabases_set_cmd {
my ($self, %options) = @_;
return if (defined($self->{option_results}->{command_options}) && $self->{option_results}->{command_options} ne '');
my $cmd_options = "sql db list --resource-group '$options{resource_group}' --server '$options{server}' --output json";
$cmd_options .= " --subscription '$self->{subscription}'" if (defined($self->{subscription}) && $self->{subscription} ne '');
return $cmd_options;
}
sub azure_list_sqldatabases {
my ($self, %options) = @_;
my $cmd_options = $self->azure_list_sqldatabases_set_cmd(%options);
my $raw_results = $self->execute(cmd_options => $cmd_options);
return $raw_results;
}
1;
__END__
=head1 NAME
Microsoft Azure CLI
=head1 AZCLI OPTIONS
Microsoft Azure CLI 2.0
To install the Azure CLI 2.0 in a CentOS/RedHat environment :
(As root)
# rpm --import https://packages.microsoft.com/keys/microsoft.asc
# sh -c 'echo -e "[azure-cli]\nname=Azure CLI\nbaseurl=https://packages.microsoft.com/yumrepos/azure-cli\nenabled=1\ngpgcheck=1\ngpgkey=https://packages.microsoft.com/keys/microsoft.asc" > /etc/yum.repos.d/azure-cli.repo'
# yum install azure-cli
(As centreon-engine)
# az login
Go to https://aka.ms/devicelogin and enter the code given by the last command.
For futher informations, visit https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest.
=over 8
=item B<--subscription>
Set Azure subscription (Required if logged to several subscriptions).
=item B<--timeframe>
Set timeframe in seconds (i.e. 3600 to check last hour).
=item B<--interval>
Set interval of the metric query (Can be : PT1M, PT5M, PT15M, PT30M, PT1H, PT6H, PT12H, PT24H).
=item B<--aggregation>
Set monitor aggregation (Can be multiple, Can be: 'minimum', 'maximum', 'average', 'total').
=item B<--zeroed>
Set metrics value to 0 if none. Usefull when Monitor
does not return value when not defined.
=item B<--timeout>
Set timeout in seconds (Default: 50).
=item B<--sudo>
Use 'sudo' to execute the command.
=item B<--command>
Command to get information (Default: 'az').
Can be changed if you have output in a file.
=item B<--command-path>
Command path (Default: none).
=item B<--command-options>
Command options (Default: none).
=item B<--proxyurl>
Proxy URL if any
=back
=head1 DESCRIPTION
B<custom>.
=cut
| Sims24/centreon-plugins | cloud/azure/custom/azcli.pm | Perl | apache-2.0 | 21,319 |
#
# Copyright 2015 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package os::windows::local::mode::ntp;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
use centreon::plugins::misc;
use Net::NTP;
# Need to patch Net::NTP for windows and comment:
# IO::Socket::INET6
# Otherwise, we have a "cannot determine peer address" error.
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"ntp-hostname:s" => { name => 'ntp_hostname' },
"ntp-port:s" => { name => 'ntp_port', default => 123 },
"warning:s" => { name => 'warning' },
"critical:s" => { name => 'critical' },
"timeout:s" => { name => 'timeout', default => 30 },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
my $ntp_hostname = $self->{option_results}->{ntp_hostname};
if (!defined($ntp_hostname)) {
my ($stdout) = centreon::plugins::misc::windows_execute(output => $self->{output},
timeout => $self->{option_results}->{timeout},
command => 'w32tm /dumpreg /subkey:parameters',
command_path => undef,
command_options => undef);
my ($type, $ntp_server);
$stdout =~ /^Type\s+\S+\s+(\S+)/mi;
$type = $1;
if ($stdout =~ /^NtpServer\s+\S+\s+(\S+)/mi) {
($ntp_server, my $flag) = split /,/, $1;
}
# type can be:
# NoSync: The client does not synchronize time)
# NTP: The client synchronizes time from an external time source
# NT5DS: The client is configured to use the domain hierarchy for its time synchronization
# AllSync: The client synchronizes time from any available time source, including domain hierarchy and external time sources
if ($type =~ /NoSync/i) {
$self->{output}->output_add(severity => 'UNKNOWN',
short_msg => sprintf("No ntp configuration set. Please use --ntp-hostname or set windows ntp configuration."));
$self->{output}->display();
$self->{output}->exit();
}
if (!defined($ntp_server)) {
$self->{output}->output_add(severity => 'UNKNOWN',
short_msg => sprintf("Cannot get ntp source configuration (it uses AD). Please use --ntp-hostname."));
$self->{output}->display();
$self->{output}->exit();
}
$ntp_hostname = $ntp_server;
}
my %ntp;
eval {
%ntp = Net::NTP::get_ntp_response($ntp_hostname, $self->{option_results}->{ntp_port});
};
if ($@) {
$self->{output}->output_add(severity => 'UNKNOWN',
short_msg => "Couldn't connect to ntp server ($ntp_hostname): " . $@);
$self->{output}->display();
$self->{output}->exit();
}
my $diff = $ntp{Offset};
my $exit = $self->{perfdata}->threshold_check(value => $diff,
threshold => [ { label => 'critical', exit_litteral => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Time offset %.3f second(s)", $diff));
$self->{output}->perfdata_add(label => 'offset', unit => 's',
value => sprintf("%.3f", $diff),
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical'),
);
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check time offset of server with ntp server.
=over 8
=item B<--warning>
Threshold warning.
=item B<--critical>
Threshold critical.
=item B<--ntp-hostname>
Set the ntp hostname (if not set, we try to find it with w32tm command).
=item B<--ntp-port>
Set the ntp port (Default: 123).
=item B<--timeout>
Set timeout time for 'w32tm' command execution (Default: 30 sec)
=back
=cut
| s-duret/centreon-plugins | os/windows/local/mode/ntp.pm | Perl | apache-2.0 | 6,108 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 NAME
Bio::EnsEMBL::Compara::PipeConfig::Lastz_conf
=head1 SYNOPSIS
#1. Update ensembl-hive, ensembl and ensembl-compara GIT repositories before each new release
#3. Check all default_options in PairAligner_conf.pm, especically:
release
pipeline_db (-host)
resource_classes
#4. Check all default_options below, especially
ref_species (if not homo_sapiens)
default_chunks (especially if the reference is not human, since the masking_option_file option will have to be changed)
pair_aligner_options
#5. Run init_pipeline.pl script:
Using command line arguments:
init_pipeline.pl Bio::EnsEMBL::Compara::PipeConfig::Lastz_conf --dbname hsap_btau_lastz_64 --password <your password> --mlss_id 534 --pipeline_db -host=compara1 --ref_species homo_sapiens --pipeline_name LASTZ_hs_bt_64
#5. Run the "beekeeper.pl ... -loop" command suggested by init_pipeline.pl
=head1 DESCRIPTION
This configuaration file gives defaults specific for the lastz net pipeline. It inherits from PairAligner_conf.pm and parameters here will over-ride the parameters in PairAligner_conf.pm.
Please see PairAligner_conf.pm for general details of the pipeline.
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
package Bio::EnsEMBL::Compara::PipeConfig::Example::EGLastz_conf;
use strict;
use warnings;
use base ('Bio::EnsEMBL::Compara::PipeConfig::Example::EGPairAligner_conf'); # Inherit from base PairAligner class
sub default_options {
my ($self) = @_;
return {
%{$self->SUPER::default_options}, # inherit the generic ones
'pipeline_name' => 'LASTZ_'.$self->o('rel_with_suffix'), # name the pipeline to differentiate the submitted processes
'exe_dir' => '/nfs/panda/ensemblgenomes/production/compara/binaries',
#Define location of core databases separately (over-ride curr_core_sources_locs in Pairwise_conf.pm)
#'reference' => {
# -host => "host_name",
# -port => port,
# -user => "user_name",
# -dbname => "my_human_database",
# -species => "homo_sapiens"
# },
#'non_reference' => {
# -host => "host_name",
# -port => port,
# -user => "user_name",
# -dbname => "my_bushbaby_database",
# -species => "otolemur_garnettii"
# },
#'curr_core_dbs_locs' => [ $self->o('reference'), $self->o('non_reference') ],
#'curr_core_sources_locs'=> '',
#Reference species
# 'ref_species' => 'homo_sapiens',
'ref_species' => '',
#Define chunking
'default_chunks' => {#human example
# 'reference' => {'chunk_size' => 30000000,
# 'overlap' => 0,
# 'include_non_reference' => 0, #1 => include non_reference regions (eg human assembly patches)
#0 => do not include non_reference regions
#-1 => auto-detect (only include non_reference regions if the non-reference species is high-coverage
#ie has chromosomes since these analyses are the only ones we keep up-to-date with the patches-pipeline)
#Should use this for human vs non-primate
# 'masking_options_file' => $self->o('ensembl_cvs_root_dir') . "/ensembl-compara/scripts/pipeline/human36.spec"},
#non human example
'reference' => {'chunk_size' => 10000000,
'overlap' => 0,
'masking_options' => '{default_soft_masking => 1}'},
'non_reference' => {'chunk_size' => 10100000,
'group_set_size' => 10100000,
'overlap' => 100000,
'masking_options' => '{default_soft_masking => 1}'},
},
#Location of executables
# 'pair_aligner_exe' => '/software/ensembl/compara/bin/lastz',
'pair_aligner_exe' => $self->o('exe_dir').'/lastz',
#
#Default pair_aligner
#
'pair_aligner_method_link' => [1001, 'LASTZ_RAW'],
'pair_aligner_logic_name' => 'LastZ',
'pair_aligner_module' => 'Bio::EnsEMBL::Compara::RunnableDB::PairAligner::LastZ',
'pair_aligner_options' => 'T=1 L=3000 H=2200 O=400 E=30 --ambiguous=iupac',
# 'pair_aligner_options' => 'T=1 L=3000 H=2200 O=400 E=30 --ambiguous=iupac --identity=80..100 --matchcount=1000', # Tomato Vs Potato
#
#Default chain
#
'chain_input_method_link' => [1001, 'LASTZ_RAW'],
'chain_output_method_link' => [1002, 'LASTZ_CHAIN'],
'linear_gap' => 'medium',
#
#Default net
#
'net_input_method_link' => [1002, 'LASTZ_CHAIN'],
'net_output_method_link' => [16, 'LASTZ_NET'],
};
}
1;
| danstaines/ensembl-compara | modules/Bio/EnsEMBL/Compara/PipeConfig/Example/EGLastz_conf.pm | Perl | apache-2.0 | 5,732 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Users::Command::Account::Bookmark::Share;
### Command module to save a shared bookmark as a group record
### @author hr5
use strict;
use EnsEMBL::Users::Messages qw(
MESSAGE_NO_GROUP_SELECTED
MESSAGE_GROUP_NOT_FOUND
MESSAGE_NO_BOOKMARK_SELECTED
MESSAGE_BOOKMARK_NOT_FOUND
);
use parent qw(EnsEMBL::Users::Command::Account);
sub process {
my $self = shift;
my $object = $self->object;
my $hub = $self->hub;
my $user = $hub->user;
my $group_id = $hub->param('group');
my @bookmark_ids = $hub->param('id');
my $err;
if (@bookmark_ids) {
my $bookmarks = $user->records({'record_id' => \@bookmark_ids});
if ($bookmarks->count) {
if ($group_id) {
my $group = $user->group($group_id);
my $new_bookmarks;
if ($group) {
for (@$bookmarks) {
my $bookmark_data = $_->clone_and_reset->data;
$bookmark_data->{'click'} = 0;
if (!$new_bookmarks) {
$new_bookmarks = $group->add_record({'data' => $bookmark_data, 'type' => 'bookmark'});
} else {
$new_bookmarks->add($group->add_record({'data' => $bookmark_data, 'type' => 'bookmark'}));
}
}
$new_bookmarks->save({'user' => $user});
$user->has_changes(1);
## notify members if needed
$self->send_group_sharing_notification_email($group, $new_bookmarks);
return $self->ajax_redirect($hub->url({'action' => 'Groups', 'function' => 'View', 'id' => $group->group_id}));
} else {
$err = MESSAGE_GROUP_NOT_FOUND;
}
} else {
$err = MESSAGE_NO_GROUP_SELECTED;
}
} else {
$err = MESSAGE_BOOKMARK_NOT_FOUND;
}
} else {
$err = MESSAGE_NO_BOOKMARK_SELECTED;
}
return $self->ajax_redirect($hub->url({'species' => '', 'type' => 'Account', 'action' => 'Share', 'function' => 'Bookmark', 'err' => $err}));
}
1;
| muffato/public-plugins | users/modules/EnsEMBL/Users/Command/Account/Bookmark/Share.pm | Perl | apache-2.0 | 2,719 |
:- use_module(library(lists),[append/3]).
% APPLY CHANGES BELOW
% Application
set_app_search_paths:-
absolute_file_name('app_search_paths.pl',Abs),
atom_chars(Abs,AbsChars),
append(PathChars,"/app_search_paths.pl",AbsChars),
atom_chars(Path,PathChars),
assert(user:file_search_path(app,Path)),
assert(user:library_directory(app(''))),
assert(user:library_directory(app('Resources'))),
ensure_loaded('$GODIS/search_paths_aod').
% assert(user:library_directory(app('ResourceInterfaces'))),
% assert(user:library_directory(app('Modules'))),
% assert(user:library_directory(app('General'))).
:-set_app_search_paths.
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_nlu/ext/SIRIDUS/UGOT-D31/godis-apps/domain-player/app_search_paths.pl | Perl | mit | 636 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::github::mode::stats;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
use centreon::plugins::http;
use JSON;
use Data::Dumper;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"hostname:s" => { name => 'hostname', default => 'api.github.com' },
"port:s" => { name => 'port', default => '443'},
"proto:s" => { name => 'proto', default => 'https' },
"credentials" => { name => 'credentials' },
"username:s" => { name => 'username' },
"password:s" => { name => 'password' },
"timeout:s" => { name => 'timeout' },
"owner:s" => { name => 'owner' },
"repository:s" => { name => 'repository' },
});
$self->{http} = centreon::plugins::http->new(output => $self->{output});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (!defined($self->{option_results}->{repository})) {
$self->{output}->add_option_msg(short_msg => "Please set the repository option");
$self->{output}->option_exit();
}
if (!defined($self->{option_results}->{owner})) {
$self->{output}->add_option_msg(short_msg => "Please set the owner option");
$self->{output}->option_exit();
}
$self->{option_results}->{url_path} = "/repos/" . $self->{option_results}->{owner} . "/" . $self->{option_results}->{repository};
$self->{http}->set_options(%{$self->{option_results}});
}
sub run {
my ($self, %options) = @_;
my $jsoncontent = $self->{http}->request();
my $json = JSON->new;
my $webcontent;
eval {
$webcontent = $json->decode($jsoncontent);
};
if ($@) {
$self->{output}->add_option_msg(short_msg => "Cannot decode json response: $@");
$self->{output}->output_add(long_msg => Data::Dumper::Dumper(), debug => 1);
$self->{output}->option_exit();
}
my $watchers = $webcontent->{subscribers_count};
my $forks = $webcontent->{forks_count};
my $stars = $webcontent->{watchers_count};
$self->{output}->output_add(severity => "OK",
short_msg => sprintf("%d forks - %d watchers - %d stars", $forks, $watchers, $stars));
$self->{output}->perfdata_add(label => 'forks',
value => $forks,
min => 0);
$self->{output}->perfdata_add(label => 'watchers',
value => $watchers,
min => 0);
$self->{output}->perfdata_add(label => 'stars',
value => $stars,
min => 0);
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check GitHub's statistics
=over 8
=item B<--hostname>
IP Addr/FQDN of the GitHub's status website (Default: status.github.com)
=item B<--port>
Port used by GitHub's status website (Default: '443')
=item B<--proto>
Specify https if needed (Default: 'https')
=item B<--urlpath>
Set path to get GitHub's status information (Default: '/repo/:owner/:repository')
=item B<--credentials>
Specify this option if you access webpage over basic authentification
=item B<--username>
Specify username
=item B<--password>
Specify password
=item B<--timeout>
Threshold for HTTP timeout (Default: 5)
=back
=cut
| bcournaud/centreon-plugins | apps/github/mode/stats.pm | Perl | apache-2.0 | 4,423 |
#! /usr/bin/perl -w
#
# Copyright (c) 2004 Liviu Daia <Liviu.Daia@imar.ro>
# All rights reserved.
#
# Revision
# Id
# Source
#
use HTML::Parser;
use strict;
use Carp ();
local $SIG{__WARN__} = \&Carp::cluck;
my ($p, $fn, %a);
sub
html_parse_start ($$)
{
my ($t, $attr) = @_;
push @{$a{$attr->{name}}}, $fn
if ($t eq 'a' and defined $attr->{name});
}
$p = HTML::Parser->new(api_version => 3);
$p->strict_comment (0);
$p->report_tags (qw(a));
$p->ignore_elements (qw(script style));
$p->handler (start => \&html_parse_start, 'tagname, attr');
while ($fn = shift)
{
$p->parse_file ($fn);
$p->eof;
}
for (keys %a)
{
print "$_\t\tdefined in ", (join ', ', @{$a{$_}}), "\n"
if (@{$a{$_}} > 1);
print "$_\t\tnumerical in ", (join ', ', @{$a{$_}}), "\n"
if (m/^[\d.]+$/o);
}
| execunix/vinos | external/ibm-public/postfix/dist/mantools/get_anchors.pl | Perl | apache-2.0 | 803 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Lucy::Analysis::EasyAnalyzer;
use Lucy;
our $VERSION = '0.004000';
$VERSION = eval $VERSION;
1;
__END__
| kidaa/lucy | perl/lib/Lucy/Analysis/EasyAnalyzer.pm | Perl | apache-2.0 | 898 |
#!/usr/bin/env perl
use strict;
my $usage = <<USAGE;
Function: Reset start position for circular genome.
Usage: reset_start_position_for_circular_genome <fasta file> <new start>
Example:
1. Set the 100th base as the new start position
reset_start_position_for_circular_genome seq.fa 100
Author: Wei Shen <shenwei356#gmail.com> <http://shenwei.me>
Change history:
- 2014-04-30 rewrite.
- 2011 first edition.
USAGE
die $usage unless @ARGV == 2;
my ( $infile, $newstart, $head, $seq, $newseq, $buffer, $outfile );
$infile = shift;
$newstart = shift;
die "newstart should be integer greater than 0, you input $newstart.\n"
unless $newstart =~ /^\d+$/ and $newstart > 0;
$buffer = '';
open IN, $infile or die "fail to open sequence file $infile!\n";
local $/ = '>';
<IN>;
while (<IN>) {
s/>$//;
( $head, $seq ) = split "\r?\n", $_, 2;
$seq =~ s/\s+//g;
$newseq = substr( $seq, $newstart - 1 ) . substr( $seq, 0, $newstart - 1 );
$buffer .= ">$head (start position move to $newstart)\n"
. format_seq( $newseq, 70 ) . "\n";
}
close IN;
$/ = "\n";
$outfile = "$infile.newstart$newstart.fa";
if ( $infile =~ /(.+)\.(.+?)$/ ) {
$outfile = "$1.newstart$newstart.$2";
}
open OUT, ">", $outfile or die "failed to open file $outfile\n";
print OUT $buffer;
close OUT;
sub format_seq($$) {
my ( $s, $n ) = @_;
my $s2 = '';
my ( $j, $int );
$int = int( ( length $s ) / $n );
for ( $j = 0; $j <= $int - 1; $j++ ) {
$s2 .= substr( $s, $j * $n, $n ) . "\n";
}
$s2 .= substr( $s, $int * $n );
return $s2;
}
| shenwei356/bio_scripts | sequence/fasta_reset_start_position_for_circular_genome.pl | Perl | mit | 1,608 |
%% ---
%% Excerpted from "Seven Languages in Seven Weeks",
%% published by The Pragmatic Bookshelf.
%% Copyrights apply to this code. It may not be used to create training material,
%% courses, books, articles, and the like. Contact us if you are in doubt.
%% We make no guarantees that this code is fit for any purpose.
%% Visit http://www.pragmaticprogrammer.com/titles/btlang for more book information.
%%---
concatenate([], List, List).
concatenate([Head|[]], List, [Head|List]).
| nmcl/scratch | prolog/concat_step_2.pl | Perl | apache-2.0 | 493 |
#!/usr/bin/perl
#
# Copyright (c) 2010 Kungliga Tekniska Högskolan
# (Royal Institute of Technology, Stockholm, Sweden).
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Institute nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
require 'getopts.pl';
my $output;
my $CFILE, $HFILE;
my $onlybase;
my $header = 0;
Getopts('b:h') || die "foo";
if($opt_b) {
$onlybase = $opt_b;
}
$header = 1 if ($opt_h);
printf "/* Generated file */\n";
if ($header) {
printf "#ifndef GSSAPI_GSSAPI_OID\n";
printf "#define GSSAPI_GSSAPI_OID 1\n\n";
} else {
printf "#include \"mech_locl.h\"\n\n";
}
my %tables;
my %types;
while(<>) {
if (/^\w*#(.*)/) {
my $comment = $1;
if ($header) {
printf("$comment\n");
}
} elsif (/^oid\s+([\w\.]+)\s+(\w+)\s+([\w\.]+)/) {
my ($base, $name, $oid) = ($1, $2, $3);
next if (defined $onlybase and $onlybase ne $base);
my $store = "__" . lc($name) . "_oid_desc";
# encode oid
my @array = split(/\./, $oid);
my $length = 0;
my $data = "";
my $num;
$n = $#array;
while ($n > 1) {
$num = $array[$n];
my $p = int($num % 128);
$data = sprintf("\\x%02x", $p) . $data;
$num = int($num / 128);
$length += 1;
while ($num > 0) {
$p = int($num % 128) + 128;
$num = int($num / 128);
$data = sprintf("\\x%02x", $p) . $data;
$length += 1;
}
$n--;
}
$num = int($array[0] * 40 + $array[1]);
$data = sprintf("\\x%x", $num) . $data;
$length += 1;
if ($header) {
printf "extern GSSAPI_LIB_VARIABLE gss_OID_desc $store;\n";
printf "#define $name (&$store)\n\n";
} else {
printf "/* $name - $oid */\n";
printf "gss_OID_desc GSSAPI_LIB_VARIABLE $store = { $length, rk_UNCONST(\"$data\") };\n\n";
}
} elsif (/^desc\s+([\w]+)\s+(\w+)\s+(\"[^\"]*\")\s+(\"[^\"]*\")/) {
my ($type, $oid, $short, $long) = ($1, $2, $3, $4);
my $object = { type=> $type, oid => $oid, short => $short, long => $long };
$tables{$oid} = \$object;
$types{$type} = 1;
}
}
foreach my $k (keys %types) {
if (!$header) {
print "struct _gss_oid_name_table _gss_ont_" . $k . "[] = {\n";
foreach my $m (values %tables) {
if ($$m->{type} eq $k) {
printf " { %s, \"%s\", %s, %s },\n", $$m->{oid}, $$m->{oid}, $$m->{short}, $$m->{long};
}
}
printf " { NULL }\n";
printf "};\n\n";
}
}
if ($header) {
printf "#endif /* GSSAPI_GSSAPI_OID */\n";
}
| GaloisInc/hacrypto | src/C/NetBSD-7.0/crypto/external/bsd/heimdal/dist/lib/gssapi/gen-oid.pl | Perl | bsd-3-clause | 3,785 |
package Agua::Common::Volume;
use Moose::Role;
use Method::Signatures::Simple;
method automount () {
=head2
**** DEPRECATED: automount.py STARCLUSTER PLUGIN TAKES CARE OF THIS **
SUBROUTINE automount
PURPOSE
MAKE SURE THAT THE NODES ARE FULLY OPERATIONAL BEFORE THE MOUNTS ARE ATTEMPTED
=cut
#### OPEN NFS AND SGE PORTS IN SECURITY GROUP
my $cluster = $self->cluster();
$self->logDebug("cluster", $cluster);
$self->logDebug("Doing self->openPorts()");
$self->openPorts("\@sc-$cluster");
#### SETUP SHARES FROM HEAD
$self->addNfsMounts();
#### MOUNT SHARES ON MASTER AND ALL NEW NODES
$self->mountShares($self->privatekey(), $self->publiccert(), $self->username(), $self->keyname(), $self->cluster(), $self->nodes());
##### **** DEPRECATED: sge.py STARCLUSTER PLUGIN TAKES CARE OF THIS **
##### SET THE DEFAULT QUEUE ON MASTER
#$self->setQueue("default");
#
###### SET threaded PARALLEL ENVIRONMENT ON MASTER
#$self->logDebug("Doing self->setPE('threaded')");
#$self->setPE("threaded", "default");
}
method monitorAddedNodes ($outputfile) {
=head2
#############################################################
####
#### DEPRECATED DEPRECATED DEPRECATED
####
#### automount.py STARCLUSTER PLUGIN TAKES CARE OF THIS
####
####
#############################################################
SUBROUTINE monitorAddedNodes
PURPOSE
MONITOR ADDED STARCLUSTER NODES BY TRACKING LOAD
BALANCER OUTPUT, E.G:
...
>>> *** ADDING 1 NODES at 2011-02-09 02:42:52.871015.
>>> Launching node(s): node002
...
NOTES
1. PERIODICALLY CHECK FOR '*** ADDING 1 NODES ' IN
<cluster>-starcluster.out FILE
2. FOR EACH FOUND, GET NODE NAME (E.G., 'node002') AND
USE starcluster listclusters <cluster> TO RETRIEVE
INTERNAL IP
3. CHECK AGAINST LIST OF COMPLETED NODES
4. IF NOT COMPLETED, SET NFS EXPORTS ON THE HEAD NODE
FOR THIS NEWLY ADDED NODE
5. ADD NODE TO LIST OF COMPLETED NODES
=cut
#### SET KEYPAIR FILE
my $keypairfile = $self->keypairfile();
$self->logDebug("keypairfile", $keypairfile);
#### SET CONFIG FILE
my $configfile = $self->configfile();
$self->logDebug("StarCluster::monitorAddedNodes(username, keyname, cluster)");
my $sleepinterval = $self->sleepinterval();
$self->logDebug("sleepinterval", $sleepinterval);
#### 1. PERIODICALLY CHECK FOR '*** ADDING 1 NODES ' IN
#### <cluster>-starcluster.out FILE
my $completed_nodes = [];
while ( 1 )
{
$self->logDebug("in loop");
open(FILE, $outputfile) or die "Can't open outputfile: $outputfile\n";
$/ = "*** ADDING";
#### 2. FOR EACH FOUND, GET NODE NAME (E.G., 'node002') AND
#### USE starcluster listclusters <cluster> TO RETRIEVE
#### INTERNAL IP
my @nodes = <FILE>;
close(FILE);
shift @nodes;
#### 3. CHECK AGAINST LIST OF COMPLETED NODES
foreach my $node ( @nodes )
{
my $completed = 0;
foreach my $completed_node ( @$completed_nodes )
{
$completed = 1 if $completed_node->{internalip} eq $node->{internalip};
last if $completed;
}
#### 4. IF NOT COMPLETED, SET NFS EXPORTS FOR THIS NODE ON HEAD
$self->setNfsExports($self->sourcedirs(), [$node->{internalip}]);
#### 5. ADD NODE TO LIST OF COMPLETED NODES
$self->logDebug("completed node", $node->{name}) and next if $completed;
push @$completed_nodes, $node;
}
sleep($self->sleepinterval());
}
}
method setNfsExports ($volumes, $internalips) {
$self->addToExports($volumes, $internalips);
#### RESTART PORTMAP AND NFS DAEMONS
$self->restartDaemons();
}
method addToExports ($volumes, $recipientips, $remotehost, $keypairfile) {
=head2
SUBROUTINE addToExports
PURPOSE
ON MASTER, SET UP EXPORT TO HEAD INSTANCE IN /etc/exports:
/home ip-10-124-247-224.ec2.internal(async,no_root_squash,no_subtree_check,rw)
/opt/sge6 ip-10-124-247-224.ec2.internal(async,no_root_squash,no_subtree_check,rw)
/data ip-10-124-247-224.ec2.internal(async,no_root_squash,no_subtree_check,rw)
*** /data ip-10-127-158-202.ec2.internal(async,no_root_squash,no_subtree_check,rw)
=cut
#### sourceip IS THE HOST DOING THE SHARING
#### remotehost IS THE HOST MOUNTING THE SHARE
$self->logDebug("StarCluster::addToExports(volumes, recipientips, remotehost, keypairfile)");
$self->logDebug("username: " . $self->username());
$self->logDebug("recipientips: @$recipientips");
$self->logDebug("volume: @$volumes");
$self->logDebug("keypairfile", $keypairfile) if defined $keypairfile;
$self->logDebug("remotehost", $remotehost) if defined $remotehost;
#### SET CONFIG FILE
my $configfile = $self->configfile();
#### GET CONTENTS OF /etc/exports
my $exportsfile = "/etc/exports";
my ($exports) = $self->remoteCommand({
remotehost => $remotehost,
command => "cat $exportsfile"
});
$exports =~ s/\s+$//;
#### REMOVE EXISTING ENTRY FOR THESE VOLUMES
my @lines = split "\n", $exports;
foreach my $volume ( @$volumes )
{
foreach my $recipientip ( @$recipientips )
{
for ( my $i = 0; $i < $#lines + 1; $i++ )
{
if ( $lines[$i] =~ /^$volume\s+$recipientip/ )
{
splice @lines, $i, 1;
$i--;
}
}
}
}
foreach my $volume ( @$volumes )
{
foreach my $recipientip ( @$recipientips )
{
push @lines, "$volume $recipientip(async,no_root_squash,no_subtree_check,rw)";
}
}
my $output = join "\n", @lines;
$self->remoteCommand({
remotehost => $remotehost,
command => "mv -f $exportsfile $exportsfile.bkp"
});
#### WRITE TEMP FILE TO USER-OWNED /tmp DIRECTORY
#### IN PREPARATION FOR COPY AS ROOT TO REMOTE HOST
my $tempdir = "/tmp/" . $self->username();
File::Path::mkpath($tempdir) if not -d $tempdir;
my $tempfile = "/$tempdir/exports";
open(OUT, ">$tempfile") or die "Can't open tempfile: $tempfile\n";
print OUT $output;
close(OUT) or die "Can't close tempfile: $tempfile\n";
my $result;
if ( defined $keypairfile and $keypairfile )
{
($result) = $self->remoteCommand({
remotehost => $remotehost,
source => $tempfile,
target => $exportsfile
});
}
else {
$result = `cp $tempfile $exportsfile`;
}
$self->logDebug("result", $result);
}
method autoMount () {
=head2
SUBROUTINE autoMount
PURPOSE
MAKE REMOTE CALLS TO NODES TO SET UP NFS MOUNTS
#############################################################
####
#### DEPRECATED DEPRECATED DEPRECATED
####
#### automount.py STARCLUSTER PLUGIN TAKES CARE OF THIS
####
####
#############################################################
=cut
#### SET KEYPAIR FILE
my $keypairfile = $self->keypairfile();
$self->logDebug("keypairfile", $keypairfile);
### GET INTERNAL IP OF HEAD NODE
my ($externalip, $headip) = $self->getLocalIps();
$self->logDebug("headip", $headip);
#### GET INTERNAL IPS OF ALL NODES
my $nodeips = $self->getInternalIps();
$self->logDebug("nodeips: @$nodeips");
return if not defined $nodeips;
#### MOUNT ON MASTER AND EXEC NODES
foreach my $nodeip ( @$nodeips )
{
my $inserts = [];
my $removex = [];
for ( my $i = 0; $i < @{$self->sourcedirs()}; $i++ )
{
my $sourcedir = ${$self->sourcedirs()}[$i];
my $mountpoint = ${$self->mountpoints()}[$i];
$self->mountNfs($sourcedir, $headip, $mountpoint, $keypairfile, $nodeip);
push @$inserts, "$headip:$sourcedir $mountpoint nfs nfsvers=3,defaults 0 0";
push @$removex, "$headip:$sourcedir";
}
$self->addFstab($removex, $inserts, $keypairfile, $nodeip);
}
}
method addFstab ($removex, $inserts, $keypairfile, $remotehost) {
=head2
SUBROUTINE addFstab
PURPOSE
ADD ENTRIES TO /etc/fstab TO AUTOMOUNT NFS IMPORTS, I.E.:
/dev/sdh /data nfs rw,vers=3,rsize=32768,wsize=32768,hard,proto=tcp 0 0
/dev/sdi /nethome nfs rw,vers=3,rsize=32768,wsize=32768,hard,proto=tcp 0 0
=cut
#### remotehost IS THE HOST MOUNTING THE SHARE
$self->logDebug("removex", $removex);
$self->logDebug("inserts: @$inserts");
$self->logDebug("keypairfile", $keypairfile);
$self->logDebug("remotehost", $remotehost);
#### SET SSH
if ( not defined $self->ssh() ) {
$self->_setSsh("root", $remotehost, $keypairfile);
}
else {
$self->ssh()->keyfile($keypairfile);
$self->ssh()->remotehost($remotehost);
$self->ssh()->remoteuser("root");
}
#### SET CONFIG FILE
my $configfile = $self->configfile();
#### GET CONTENTS OF /etc/exports
my $fstabfile = "/etc/fstab";
my ($exports) = $self->SSH()->remoteCommand("cat $fstabfile");
$exports =~ s/\s+$//;
#### REMOVE EXISTING ENTRY FOR THESE VOLUMES
my @lines = split "\n", $exports;
for ( my $i = 0; $i < $#lines + 1; $i++ )
{
foreach my $remove ( @$removex )
{
if ( $lines[$i] =~ /^$remove/ )
{
splice @lines, $i, 1;
$i--;
}
}
}
foreach my $insert ( @$inserts ) {
push @lines, $insert;
}
my $output = join "\n", @lines;
$self->remoteCommand("mv -f $fstabfile $fstabfile.bkp");
#### WRITE TEMP FILE TO USER-OWNED /tmp DIRECTORY
#### IN PREPARATION FOR COPY AS ROOT TO REMOTE HOST
my $tempdir = "/tmp/" . $self->username();
File::Path::mkpath($tempdir) if not -d $tempdir;
my $tempfile = "$tempdir/exports";
open(OUT, ">$tempfile") or die "Can't open tempfile: $tempfile\n";
print OUT $output;
close(OUT) or die "Can't close tempfile: $tempfile\n";
my $result;
if ( not defined $remotehost ) {
$result = $self->ssh()->scpPut($tempfile, $fstabfile);
}
else {
$result = `cp $tempfile $fstabfile`;
}
}
method mountShares () {
=head2
#############################################################
####
#### DEPRECATED DEPRECATED DEPRECATED
####
#### automount.py STARCLUSTER PLUGIN TAKES CARE OF THIS
####
####
#############################################################
=cut
#### SET KEYPAIR FILE
my $keypairfile = $self->keypairfile();
$self->logDebug("keypairfile", $keypairfile);
#### SET CONFIG FILE
my $configfile = $self->configfile();
#### WHILE CLUSTER IS STARTING UP, SEARCH FOR IPS OF NODES
#### AND ADD THEM TO /etc/exports
my $completed_nodes = [];
while ( scalar(@$completed_nodes) < $self->nodes() )
{
#### GET INTERNAL IPS, ETC. OF ALL NODES IN CLUSTER
my $launched_nodes = $self->getLaunchedNodes();
#### IGNORE ALREADY COMPLETED NODES
foreach my $launched_node ( @$launched_nodes )
{
my $completed = 0;
foreach my $completed_node ( @$completed_nodes )
{
$completed = 1 if $completed_node->{internalip} eq $launched_node->{internalip};
last if $completed;
}
next if $completed;
push @$completed_nodes, $launched_node;
if ( $launched_node->{name} eq "master" )
{
##### AUTOMATE RESTART sge MASTER AND EXECD AFTER REBOOT MASTER
my $masterip = $launched_node->{internalip};
$self->setSgeStartup($self->username(), $masterip, $keypairfile);
#### EXCLUDE MASTER NODE FROM EXEC NODES LIST
$self->excludeMasterHost($masterip);
}
#### ADD ENTRIES TO /etc/exports
$self->setNfsExports($self->sourcedirs, $launched_node->{internalip});
}
}
}
method setSgeStartup ($remotehost) {
=head2
SUBROUTINE setSgeStartup
PURPOSE
ADD CALL TO START SGE TO /etc/init.d/rc.local
=cut
my $removex = [
"",
"/etc/init.d/sgemaster.starcluster start",
"echo 'sgemaster.starcluster started'",
"/etc/init.d/sgeexecd.starcluster start",
"echo 'sgeexecd.starcluster started'"
];
my $inserts;
@$inserts = @$removex;
#### remotehost IS THE HOST MOUNTING THE SHARE
$self->logDebug("StarCluster::setSgeStartup(username, keypairfile, remotehost, sourceip, volume)");
$self->logDebug("username", $self->username());
$self->logDebug("removex", $removex);
$self->logDebug("inserts: @$inserts");
$self->logDebug("remotehost", $remotehost);
#### GET CONTENTS OF /etc/exports
my $startupfile = "/etc/init.d/rc.local";
my ($contents) = $self->remoteCommand({
remotehost => $remotehost,
command => "cat $startupfile"
});
$contents =~ s/\s+$//;
#### REMOVE EXISTING ENTRY FOR THESE VOLUMES
my @lines = split "\n", $contents;
for ( my $i = 0; $i < $#lines + 1; $i++ )
{
foreach my $remove ( @$removex )
{
if ( $lines[$i] =~ /^$remove/ )
{
splice @lines, $i, 1;
$i--;
}
}
}
foreach my $insert ( @$inserts )
{
push @lines, $insert;
}
my $output = join "\n", @lines;
$self->remoteCommand({
remotehost => $remotehost,
command => "mv -f $startupfile $startupfile.bkp"
});
#### WRITE TEMP FILE TO USER-OWNED /tmp DIRECTORY
#### IN PREPARATION FOR COPY AS ROOT TO REMOTE HOST
my $tempdir = "/tmp/" . $self->username();
File::Path::mkpath($tempdir) if not -d $tempdir;
my $tempfile = "$tempdir/startup";
open(OUT, ">$tempfile") or die "Can't open tempfile: $tempfile\n";
print OUT $output;
close(OUT) or die "Can't close tempfile: $tempfile\n";
my $result;
if ( defined $remotehost and $remotehost ) {
$result = $self->remoteCopy({
remotehost => $remotehost,
source => $tempfile,
target => $startupfile
});
}
else {
$result = `cp $tempfile $startupfile`;
}
$self->logDebug("result", $result);
my ($catnew) = $self->remoteCommand({
remotehost => $remotehost,
command => "cat $startupfile"
});
$self->logDebug("catnew", $catnew);
}
method excludeMasterHost ($remotehost) {
=head2
SUBROUTINE excludeMasterHost
PURPOSE
1. REMOVE MASTER FROM EXEC HOST LIST
2. REMOVE MASTER FROM CONFIGURATION LIST:
3. REMOVE MASTER FROM HOST LIST
4. RESTART sgemaster DAEMON
NOTES
MUST BE RUN AS root
=cut
#### remotehost IS THE HOST MOUNTING THE SHARE
$self->logDebug("StarCluster::excludeMasterHost(remotehost)");
$self->logDebug("remotehost", $remotehost);
#### SET CONFIG FILE
my $configfile = $self->configfile();
my $keypairfile = $self->keypairfile();
#### SET QCONF
my $conf = $self->conf();
my $sge_root = $conf->getKey("cluster", "SGE_ROOT");
my $sge_qmaster_port= $conf->getKey("cluster", "SGE_QMASTER_PORT");
my $qconf = $conf->getKey("cluster", "QCONF");
#### 1. REMOVE MASTER FROM EXEC HOST LIST
# Host object "ip-10-124-245-118.ec2.internal" is still referenced in cluster queue "all.q"
my $exec = "export SGE_ROOT=$sge_root; export SGE_QMASTER_PORT=$sge_qmaster_port; $qconf -de $remotehost";
$self->logDebug("\n$exec\n");
my ($exec_result) = $self->remoteCommand({
remotehost => $remotehost,
command => $exec
});
$self->logDebug("exec_result", $exec_result);
#### 2. REMOVE MASTER FROM CONFIGURATION LIST:
# root@ip-10-124-245-118.ec2.internal removed "ip-10-124-245-118.ec2.internal" from configuration list
my $config = "export SGE_ROOT=$sge_root; export SGE_QMASTER_PORT=$sge_qmaster_port; $qconf -dconf $remotehost";
$self->logDebug("config command:\n$config\n");
my ($config_result) = $self->remoteCommand({
remotehost => $remotehost,
command => $config
});
$self->logDebug("config_result", $config_result);
#### 3. REMOVE MASTER FROM HOST LIST
#### GET CURRENT GROUP CONFIG
# group_name @allhosts
# hostlist ip-10-124-245-118.ec2.internal ip-10-124-247-224.ec2.internal
my ($hostlist) = $self->remoteCommand({
remotehost => $remotehost,
command => "qconf -shgrp \@allhosts"
});
$hostlist =~ s/\s+$//;
$self->logDebug("BEFORE hostlist", $hostlist);
#### WRITE TEMP FILE TO USER-OWNED /tmp DIRECTORY
#### IN PREPARATION FOR COPY AS ROOT TO REMOTE HOST
my $tempdir = "/tmp/" . $self->username();
File::Path::mkpath($tempdir) if not -d $tempdir;
my $groupfile = "allhosts.group";
open(OUT, ">/$tempdir/$groupfile") or die "Can't open groupfile: $groupfile\n";
print OUT $hostlist;
close(OUT) or die "Can't close groupfile: /$tempdir/$groupfile\n";
#### COPY GROUP CONFIG FILE TO '~' ON TARGET HOST
my $copy = $self->remoteCopy({
});
$copy = qq{scp -i $keypairfile /$tempdir/$groupfile root\@$remotehost:$groupfile} if defined $keypairfile;
$copy = qq{cp /$tempdir/$groupfile ~/$groupfile} if not defined $keypairfile;
$self->logDebug("copy", $copy);
my $result = `$copy`;
$self->logDebug("result", $result);
#### SET GROUP CONFIG FROM FILE ON TARGET HOST
my ($qconf_result) = $self->remoteCommand({
remotehost => $remotehost,
command => "export SGE_ROOT=$sge_root; export SGE_QMASTER_PORT=$sge_qmaster_port; $qconf -Mhgrp ~/$groupfile"
});
# root@ip-10-124-245-118.ec2.internal modified "@allhosts" in host group list
$self->logDebug("qconf_result", $qconf_result);
#### RESTART sgemaster
#### SET GROUP CONFIG FROM FILE ON TARGET HOST
my ($restart_result) = $self->remoteCommand({
remotehost => $remotehost,
command => "/etc/init.d/sgemaster.starcluster stop; /etc/init.d/sgemaster.starcluster start"
});
$self->logDebug("restart_result", $restart_result);
}
method getLaunchedNodes () {
my $command = "starcluster -c " . $self->configfile . " listclusters ". $self->cluster();
$self->logDebug("command", $command);
#### Cluster nodes:
#### master running i-9b3e5ff7 ec2-50-17-20-70.compute-1.amazonaws.com
#### node001 running i-953e5ff9 ec2-67-202-10-108.compute-1.amazonaws.com
#### Total nodes: 2
my $list = `$command`;
$self->logDebug("list result not defined or empty")
and return if not defined $list or not $list;
$self->logDebug("list", $list);
my ($nodelist) = $list =~ /Cluster nodes:\s*\n(.+)$/ms;
my @lines = split "\n", $nodelist;
my $nodes = [];
foreach my $line ( @lines )
{
next if $line =~ /^\s*$/;
my ($name, $status, $instanceid, $internalip) =
$line =~ /^\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)/;
push @$nodes, {
name=> $name,
status=>$status,
instanceid=> $instanceid,
internalip=> $internalip
};
}
return $nodes;
}
method addNfsMounts () {
#### CONFIGURE NFS SHARES ON HEAD NODE (I.E., NOT MASTER)
#### CHECK INPUTS
$self->logDebug("privatekey not defined")and exit if not $self->privatekey();
$self->logDebug("publiccert not defined")and exit if not $self->publiccert();
$self->logDebug("username not defined")and exit if not $self->username();
$self->logDebug("cluster not defined")and exit if not $self->cluster();
$self->logDebug("keyname not defined")and exit if not $self->keyname();
$self->logDebug("sourcedirs not defined")and exit if not $self->sourcedirs();
$self->logDebug("devices not defined")and exit if not $self->devices();
##### SET FIXED PORT FOR HEAD NODE NFS
$self->setMountdPort(undef, undef);
#### ADD ENTRIES TO /etc/fstab
my $inserts = [];
my $removes = [];
for ( my $i = 0; $i < @{$self->sourcedirs()}; $i++ )
{
my $sourcedir = ${$self->sourcedirs()}[$i];
my $device = ${$self->devices()}[$i];
push @$inserts, "$device $sourcedir nfs rw,vers=3,rsize=32768,wsize=32768,hard,proto=tcp 0 0";
push @$removes, "$device\\s+$sourcedir\\s+nfs";
}
#### RESTART PORTMAP AND NFS DAEMONS
$self->restartDaemons(undef, undef);
#### SET /etc/fstab ENTRIES FOR SHARES
$self->addFstab($removes, $inserts, undef, undef);
}
method setMountdPort ($keypairfile, $remotehost) {
=head2
SUBROUTINE setMountdPort
PURPOSE
ADD FIXED PORT FOR mountd IN /etc/default/nfs-kernel-server
- ON REMOTE HOST IF KEYPAIRFILE AND IP PROVIDED
- OTHERWISE, ON LOCAL HOST
=cut
$self->logDebug("StarCluster::setMountdPort(keypairfile, remotehost)");
#### GET UNAME
my ($uname) = $self->remoteCommand({
remotehost => $remotehost,
command => "uname -a"
});
$self->logDebug("uname", $uname);
my $conf = $self->conf();
my $mountdport = $conf->getKey("starcluster:nfs", "MOUNTDPORT");
$self->logDebug("mountdport", $mountdport);
my $insert = qq{MOUNTD_PORT=32767};
my $filter = qq{MOUNTD_PORT};
my $nfsconfigfile = "/etc/sysconfig/nfs";
if ( $uname =~ /ubuntu/i )
{
$insert = qq{RPCMOUNTDOPTS="--port $mountdport --manage-gids"};
$filter = qq{RPCMOUNTDOPTS};
$nfsconfigfile = "/etc/default/nfs-kernel-server";
}
$self->logDebug("insert", $insert);
$self->logDebug("filter", $filter);
$self->logDebug("nfsconfigfile", $nfsconfigfile);
#### GET NFS CONFIG FILE CONTENTS
my ($nfsconfig) = $self->remoteCommand({
remotehost => $remotehost,
command => "cat $nfsconfigfile"
});
$self->logDebug("BEFORE nfsconfig", $nfsconfig);
#### BACKUP NFS CONFIG FILE
print $self->remoteCommand({
remotehost => $remotehost,
command => "mv -f $nfsconfigfile $nfsconfigfile.bkp"
});
#### COMMENT OUT EXISTING RPCMOUNTOPTS LINES
my @lines = split "\n", $nfsconfig;
for ( my $i = 0; $i < $#lines + 1; $i++ )
{
if ( $lines[$i] =~ /^$filter/ )
{
splice @lines, $i, 1;
$i--;
}
}
#### ADD NEW RPCMOUNTOPTS LINE
push @lines, "$insert\n";
#### WRITE TEMP FILE TO USER-OWNED /tmp DIRECTORY
#### IN PREPARATION FOR COPY AS ROOT TO REMOTE HOST
my $tempdir = "/tmp/" . $self->username();
File::Path::mkpath($tempdir) if not -d $tempdir;
my $tempfile = "$tempdir/nfs-kernel-server";
open(OUT, ">$tempfile") or die "Can't open tempfile: $tempfile\n";
my $content = join "\n", @lines;
print OUT $content;
close(OUT) or die "Can't close tempfile: $tempfile\n";
my $result;
if ( defined $keypairfile and $keypairfile ) {
$result = $self->remoteCopy({
remotehost => $remotehost,
source => $tempfile,
target => $nfsconfigfile
});
}
else {
$result = `cp $tempfile $nfsconfigfile`;
}
$self->logDebug("result", $result);
}
method mountNfs ($source, $sourceip, $mountpoint, $keypairfile, $remotehost) {
=head2
SUBROUTINE mountNfs
PURPOSE
MOUNT EXPORTED NFS volume FROM MASTER ON AQ-7
NOTES
CHECK IF MASTER'S MOUNT IS SEEN BY AQ-7:
showmount -e ip-10-124-245-118
Export list for ip-10-124-245-118:
/data ip-10-127-158-202.ec2.internal,ip-10-124-247-224.ec2.internal
/opt/sge6 ip-10-124-247-224.ec2.internal
/home ip-10-124-247-224.ec2.internal
=cut
#### remotehost IS THE HOST MOUNTING THE SHARE
$self->logDebug("StarCluster::mountNfs(source, sourceip, mountpoint, keypairfile, remotehost)");
$self->logDebug("source", $source);
$self->logDebug("sourceip", $sourceip);
$self->logDebug("mountpoint", $mountpoint);
$self->logDebug("keypairfile", $keypairfile);
$self->logDebug("remotehost", $remotehost);
#### CREATE MOUNTPOINT DIRECTORY
print $self->remoteCommand({
remotehost => $remotehost,
command => "mkdir -p $mountpoint"
});
#### MOUNT NFS SHARE TO MOUNTPOINT
print $self->remoteCommand({
remotehost => $remotehost,
command => "mount -t nfs $sourceip:$source $mountpoint"
});
}
method restartDaemons ($keypairfile, $remotehost) {
=head2
SUBROUTINE restartDaemons
PURPOSE
ON MASTER, RESTART NFS
=cut
#### CHECK LINUX FLAVOUR
my ($uname) = $self->remoteCommand({
remotehost => $remotehost,
command => "uname -a"
});
#### SET BINARIES ACCORDING TO FLAVOUR
my $portmapbinary = "service portmap";
my $nfsbinary = "service nfs";
if ( $uname =~ /ubuntu/i )
{
$portmapbinary = "/etc/init.d/portmap";
$nfsbinary = "/etc/init.d/nfs";
}
#### RESTART SERVICES
$self->remoteCommand({
remotehost => $remotehost,
command => "$portmapbinary restart"
});
$self->remoteCommand({
remotehost => $remotehost,
command => "$nfsbinary restart"
});
}
method getLocalIps () {
my $instance = $self->getInstanceInfo();
my @elements = split " ", $instance->{instance};
return $elements[3], $elements[4];
}
method openPorts ($group) {
=head2
SUBROUTINE openPorts
PURPOSE
OPEN NFS PORTS FOR THE GIVEN GROUP ON EC2
=cut
$self->logDebug("StarCluster::openPorts(privatekey, publiccert, group)");
$self->logError("group not defined or empty") and exit if not defined $group or not $group;
my $conf = $self->conf();
my $portmap = $conf->getKey("starcluster:nfs", "PORTMAPPORT");
my $nfs = $conf->getKey("starcluster:nfs", "NFSPORT");
my $mountd = $conf->getKey("starcluster:nfs", "MOUNTDPORT");
my $sge = $conf->getKey("cluster", "SGEQMASTERPORT");
my $ec2 = $conf->getKey("applications:aquarius-8", "EC2");
my $java_home= $conf->getKey("aws", "JAVAHOME");
$self->logError("ec2 not defined") and exit if not defined $ec2;
#### SET EC2_HOME ENVIRONMENT VARIABLE
my $ec2_home = $ec2;
$ec2_home =~ s/\/bin$//;
$ENV{'EC2_HOME'} = $ec2_home;
$ENV{'JAVA_HOME'} = $java_home;
##### CREATE SECURITY GROUP
#my $creategroup = "$ec2/ec2-add-group $group -d 'StarCluster group'";
my $tasks = [
#### PORTMAP
"$group -p $portmap -P tcp",
"$group -p $portmap -P udp",
#### NFS
"$group -p $nfs -P udp",
"$group -p $nfs -P tcp",
#### MOUNTD
"$group -p $mountd -P udp",
"$group -p $mountd -P tcp",
#### SGE_QMASTER_PORT
"$group -p $sge -P udp",
"$group -p $sge -P tcp"
];
#### RUN COMMANDS
foreach my $task ( @$tasks )
{
my $command = qq{$ec2/ec2-authorize \\
-K } . $self->privatekey() . qq{ \\
-C } . $self->publiccert() . qq{ \\
$task\n};
print $command;
print `$command`;
}
}
method removeNfsMounts () {
=head2
SUBROUTINE removeNfsMounts
PURPOSE
REMOVE NFS MOUNT INFORMATION FROM SYSTEM FILES
AND RESTART NFS DAEMONS
=cut
$self->logDebug("privatekey: " . $self->privatekey());
$self->logDebug("publiccert: " . $self->publiccert());
$self->logDebug("username: " . $self->username());
$self->logDebug("cluster: " . $self->cluster());
$self->logDebug("keyname: " . $self->keyname());
#### CHECK INPUTS
$self->logDebug("privatekey not defined")and exit if not defined $self->privatekey();
$self->logDebug("publiccert not defined")and exit if not defined $self->publiccert();
$self->logDebug("username not defined")and exit if not defined $self->username();
$self->logDebug("cluster not defined")and exit if not defined $self->cluster();
$self->logDebug("keyname not defined")and exit if not defined $self->keyname();
#### SET DEFAULT KEYNAME
my $keypairfile = $self->keypairfile();
$self->logDebug("keypairfile", $keypairfile);
#### GET INTERNAL IPS OF ALL NODES IN CLUSTER
my $nodeips = $self->getInternalIps($self->username(), $self->cluster(), $self->privatekey(), $self->publiccert());
#my $nodeips = [ "ip-10-124-241-66.ec2.internal" ];
$self->logDebug("nodeips: @$nodeips");
return if not defined $nodeips;
#### REMOVE ENTRIES FROM /etc/exports
my $volumes = [ "/agua", "/data", "/nethome" ];
$self->removeExports($self->username(), $volumes, $nodeips);
}
method removeExports ($volumes, $recipientips, $remotehost) {
=head2
SUBROUTINE removeExports
PURPOSE
ON MASTER, SET UP EXPORT TO HEAD INSTANCE IN /etc/exports:
/home ip-10-124-247-224.ec2.internal(async,no_root_squash,no_subtree_check,rw)
/opt/sge6 ip-10-124-247-224.ec2.internal(async,no_root_squash,no_subtree_check,rw)
/data ip-10-124-247-224.ec2.internal(async,no_root_squash,no_subtree_check,rw)
*** /data ip-10-127-158-202.ec2.internal(async,no_root_squash,no_subtree_check,rw)
=cut
#### sourceip IS THE HOST DOING THE SHARING
#### remotehost IS THE HOST MOUNTING THE SHARE
$self->logDebug("StarCluster::removeExports(volumes, recipientips, remotehost)");
$self->logDebug("recipientips: @$recipientips");
$self->logDebug("volume: @$volumes");
$self->logDebug("remotehost", $remotehost) if defined $remotehost;
#### SET CONFIG FILE
my $configfile = $self->configfile();
my $keypairfile = $self->keypairfile();
#### GET CONTENTS OF /etc/exports
my $exportsfile = "/etc/exports";
my ($exports) = $self->remoteCommand({
remotehost => $remotehost,
command => "cat $exportsfile"
});
$exports =~ s/\s+$//;
#### REMOVE EXISTING ENTRY FOR THESE VOLUMES
my @lines = split "\n", $exports;
foreach my $volume ( @$volumes )
{
foreach my $recipientip ( @$recipientips )
{
for ( my $i = 0; $i < $#lines + 1; $i++ )
{
if ( $lines[$i] =~ /^$volume\s+$recipientip/ )
{
splice @lines, $i, 1;
$i--;
}
}
}
}
my $output = join "\n", @lines;
my ($output) = $self->remoteCommand({
remotehost => $remotehost,
command => "mv -f $exportsfile $exportsfile.bkp"
});
$self->logDebug("output", $output);
#### WRITE TEMP FILE TO USER-OWNED /tmp DIRECTORY
#### IN PREPARATION FOR COPY AS ROOT TO REMOTE HOST
my $tempdir = "/tmp/" . $self->username();
File::Path::mkpath($tempdir) if not -d $tempdir;
my $tempfile = "/$tempdir/exports";
open(OUT, ">$tempfile") or die "Can't open tempfile: $tempfile\n";
print OUT $output;
close(OUT) or die "Can't close tempfile: $tempfile\n";
my $result;
if ( defined $keypairfile and $keypairfile ) {
$result = $self->remoteCopy({
remotehost => $remotehost,
source => $tempfile,
target => $exportsfile
});
}
else {
$result = `cp $tempfile $exportsfile`;
}
$self->logDebug("result", $result);
}
| aguadev/aguadev | t/unit/bin/Agua/Uml/Uml/inputs/lib/Agua/Common/Volume.pm | Perl | mit | 28,510 |
#==========================================================================
# Copyright (c) 1995-1998 Martien Verbruggen
#--------------------------------------------------------------------------
#
# Name:
# GD::Graph::colour.pm
#
# Description:
# Package of colour manipulation routines, to be used
# with GD::Graph.
#
# $Id: colour.pm,v 1.10 2005/12/14 04:09:40 ben Exp $
#
#==========================================================================
package GD::Graph::colour;
($GD::Graph::colour::VERSION) = '$Revision: 1.10 $' =~ /\s([\d.]+)/;
=head1 NAME
GD::Graph::colour - Colour manipulation routines for use with GD::Graph
=head1 SYNOPSIS
use GD::Graph::colour qw(:colours :lists :files :convert);
=head1 DESCRIPTION
The B<GD::Graph::colour> package provides a few routines to work with
colours. The functionality of this package is mainly defined by what is
needed, now and historically, by the GD::Graph modules.
=cut
use vars qw( @EXPORT_OK %EXPORT_TAGS );
use strict;
require Exporter;
use Carp;
@GD::Graph::colour::ISA = qw( Exporter );
@EXPORT_OK = qw(
_rgb _luminance _hue add_colour
colour_list sorted_colour_list
read_rgb
hex2rgb rgb2hex
);
%EXPORT_TAGS = (
colours => [qw( add_colour _rgb _luminance _hue )],
lists => [qw( colour_list sorted_colour_list )],
files => [qw( read_rgb )],
convert => [qw( hex2rgb rgb2hex )],
);
my %RGB = (
white => [0xFF,0xFF,0xFF],
lgray => [0xBF,0xBF,0xBF],
gray => [0x7F,0x7F,0x7F],
dgray => [0x3F,0x3F,0x3F],
black => [0x00,0x00,0x00],
lblue => [0x00,0x00,0xFF],
blue => [0x00,0x00,0xBF],
dblue => [0x00,0x00,0x7F],
gold => [0xFF,0xD7,0x00],
lyellow => [0xFF,0xFF,0x00],
yellow => [0xBF,0xBF,0x00],
dyellow => [0x7F,0x7F,0x00],
lgreen => [0x00,0xFF,0x00],
green => [0x00,0xBF,0x00],
dgreen => [0x00,0x7F,0x00],
lred => [0xFF,0x00,0x00],
red => [0xBF,0x00,0x00],
dred => [0x7F,0x00,0x00],
lpurple => [0xFF,0x00,0xFF],
purple => [0xBF,0x00,0xBF],
dpurple => [0x7F,0x00,0x7F],
lorange => [0xFF,0xB7,0x00],
orange => [0xFF,0x7F,0x00],
pink => [0xFF,0xB7,0xC1],
dpink => [0xFF,0x69,0xB4],
marine => [0x7F,0x7F,0xFF],
cyan => [0x00,0xFF,0xFF],
lbrown => [0xD2,0xB4,0x8C],
dbrown => [0xA5,0x2A,0x2A],
);
=head1 FUNCTIONS
=head2 colour_list( I<number of colours> )
Returns a list of I<number of colours> colour names known to the package.
Exported with the :lists tag.
=cut
sub colour_list
{
my $n = ( $_[0] ) ? $_[0] : keys %RGB;
return (keys %RGB)[0 .. $n-1];
}
=head2 sorted_colour_list( I<number of colours> )
Returns a list of I<number of colours> colour names known to the package,
sorted by luminance or hue.
B<NB.> Right now it always sorts by luminance. Will add an option in a later
stage to decide sorting method at run time.
Exported with the :lists tag.
=cut
sub sorted_colour_list
{
my $n = $_[0] ? $_[0] : keys %RGB;
return (sort by_luminance keys %RGB)[0 .. $n-1];
# return (sort by_hue keys %rgb)[0..$n-1];
sub by_luminance { _luminance(@{$RGB{$b}}) <=> _luminance(@{$RGB{$a}}) }
sub by_hue { _hue(@{$RGB{$b}}) <=> _hue(@{$RGB{$a}}) }
}
=head2 _rgb( I<colour name> )
Returns a list of the RGB values of I<colour name>. if the colour name
is a string of the form that is acceptable to the hex2rgb sub, then the
colour will be added to the list dynamically.
Exported with the :colours tag.
=cut
my %warned_clrs = ();
# return the RGB values of the colour name
sub _rgb
{
my $clr = shift or return;
# Try adding the colour if it doesn't exist yet. It may be of a
# parseable form
add_colour($clr) unless exists $RGB{$clr};
my $rgb_ref = $RGB{$clr};
if (!defined $rgb_ref)
{
$rgb_ref = $RGB{'black'};
unless ($warned_clrs{$clr})
{
$warned_clrs{$clr}++;
carp "Colour $clr is not defined, reverting to black";
}
};
@{$rgb_ref};
}
=head2 _hue( I<R,G,B> )
Returns the hue of the colour with the specified RGB values.
Exported with the :colours tag.
=head2 _luminance( I<R,G,B> )
Returns the luminance of the colour with the specified RGB values.
Exported with the :colours tag.
=cut
# return the luminance of the colour (RGB)
sub _luminance
{
(0.212671 * $_[0] + 0.715160 * $_[1] + 0.072169 * $_[2])/0xFF
}
# return the hue of the colour (RGB)
sub _hue
{
($_[0] + $_[1] + $_[2])/(3 * 0xFF)
}
=head2 add_colour(colourname => [$r, $g, $b]) or
add_colour('#7fe310')
Self-explanatory.
Exported with the :colours tag.
=cut
sub add_colour
{
my $name = shift;
my $val = shift;
if (!defined $val)
{
my @rgb = hex2rgb($name) or return;
$val = [@rgb];
}
if (ref $val && ref $val eq 'ARRAY')
{
$RGB{$name} = [@{$val}];
return $name;
}
return;
}
=head2 rgb2hex($red, $green, $blue)
=head2 hex2rgb('#7fe310')
These functions translate a list of RGB values into a hexadecimal
string, as is commonly used in HTML and the Image::Magick API, and vice
versa.
Exported with the :convert tag.
=cut
# Color translation
sub rgb2hex
{
return unless @_ == 3;
my $color = '#';
foreach my $cc (@_)
{
$color .= sprintf("%02x", $cc);
}
return $color;
}
sub hex2rgb
{
my $clr = shift;
my @rgb = $clr =~ /^#([a-f0-9]{2})([a-f0-9]{2})([a-f0-9]{2})$/i;
return unless @rgb;
return map { hex $_ } @rgb;
}
=head2 read_rgb( F<file name> )
Reads in colours from a rgb file as used by the X11 system.
Doing something like:
use GD::Graph::bars;
use GD::Graph::colour;
GD::Graph::colour::read_rgb("rgb.txt") or die "cannot read colours";
Will allow you to use any colours defined in rgb.txt in your graph.
Exported with the :files tag.
=cut
#
# Read a rgb.txt file (X11)
#
# Expected format of the file:
#
# R G B colour name
#
# Fields can be separated by any number of whitespace
# Lines starting with an exclamation mark (!) are comment and
# will be ignored.
#
# returns number of colours read
sub read_rgb($) # (filename)
{
my $fn = shift;
my $n = 0;
my $line;
open(RGB, $fn) or return 0;
while (defined($line = <RGB>))
{
next if ($line =~ /\s*!/);
chomp($line);
# remove leading white space
$line =~ s/^\s+//;
# get the colours
my ($r, $g, $b, $name) = split(/\s+/, $line, 4);
# Ignore bad lines
next unless (defined $name);
$RGB{$name} = [$r, $g, $b];
$n++;
}
close(RGB);
return $n;
}
sub version { $GD::Graph::colour::VERSION }
sub dump_colours
{
my $max = $_[0] ? $_[0] : keys %RGB;
my $n = 0;
my $clr;
foreach $clr (sorted_colour_list($max))
{
last if $n > $max;
print "colour: $clr, " .
"${$RGB{$clr}}[0], ${$RGB{$clr}}[1], ${$RGB{$clr}}[2]\n"
}
}
"Just another true value";
__END__
=head1 PREDEFINED COLOUR NAMES
white,
lgray,
gray,
dgray,
black,
lblue,
blue,
dblue,
gold,
lyellow,
yellow,
dyellow,
lgreen,
green,
dgreen,
lred,
red,
dred,
lpurple,
purple,
dpurple,
lorange,
orange,
pink,
dpink,
marine,
cyan,
lbrown,
dbrown.
=head1 AUTHOR
Martien Verbruggen E<lt>mgjv@tradingpost.com.auE<gt>
=head2 Copyright
GIFgraph: Copyright (c) 1995-1999 Martien Verbruggen.
Chart::PNGgraph: Copyright (c) 1999 Steve Bonds.
GD::Graph: Copyright (c) 1999 Martien Verbruggen.
All rights reserved. This package is free software; you can redistribute
it and/or modify it under the same terms as Perl itself.
=head1 SEE ALSO
L<GD::Graph>,
L<GD::Graph::FAQ>
| carlgao/lenga | images/lenny64-peon/usr/share/perl5/GD/Graph/colour.pm | Perl | mit | 7,756 |
=pod
=head1 NAME
SSL_clear - reset SSL object to allow another connection
=head1 SYNOPSIS
#include <openssl/ssl.h>
int SSL_clear(SSL *ssl);
=head1 DESCRIPTION
Reset B<ssl> to allow another connection. All settings (method, ciphers,
BIOs) are kept.
=head1 NOTES
SSL_clear is used to prepare an SSL object for a new connection. While all
settings are kept, a side effect is the handling of the current SSL session.
If a session is still B<open>, it is considered bad and will be removed
from the session cache, as required by RFC2246. A session is considered open,
if L<SSL_shutdown(3)> was not called for the connection
or at least L<SSL_set_shutdown(3)> was used to
set the SSL_SENT_SHUTDOWN state.
If a session was closed cleanly, the session object will be kept and all
settings corresponding. This explicitly means, that e.g. the special method
used during the session will be kept for the next handshake. So if the
session was a TLSv1 session, a SSL client object will use a TLSv1 client
method for the next handshake and a SSL server object will use a TLSv1
server method, even if TLS_*_methods were chosen on startup. This
will might lead to connection failures (see L<SSL_new(3)>)
for a description of the method's properties.
=head1 WARNINGS
SSL_clear() resets the SSL object to allow for another connection. The
reset operation however keeps several settings of the last sessions
(some of these settings were made automatically during the last
handshake). It only makes sense for a new connection with the exact
same peer that shares these settings, and may fail if that peer
changes its settings between connections. Use the sequence
L<SSL_get_session(3)>;
L<SSL_new(3)>;
L<SSL_set_session(3)>;
L<SSL_free(3)>
instead to avoid such failures
(or simply L<SSL_free(3)>; L<SSL_new(3)>
if session reuse is not desired).
=head1 RETURN VALUES
The following return values can occur:
=over 4
=item Z<>0
The SSL_clear() operation could not be performed. Check the error stack to
find out the reason.
=item Z<>1
The SSL_clear() operation was successful.
=back
L<SSL_new(3)>, L<SSL_free(3)>,
L<SSL_shutdown(3)>, L<SSL_set_shutdown(3)>,
L<SSL_CTX_set_options(3)>, L<ssl(3)>,
L<SSL_CTX_set_client_cert_cb(3)>
=head1 COPYRIGHT
Copyright 2000-2016 The OpenSSL Project Authors. All Rights Reserved.
Licensed under the OpenSSL license (the "License"). You may not use
this file except in compliance with the License. You can obtain a copy
in the file LICENSE in the source distribution or at
L<https://www.openssl.org/source/license.html>.
=cut
| openweave/openweave-core | third_party/openssl/openssl/doc/ssl/SSL_clear.pod | Perl | apache-2.0 | 2,567 |
# $Id: Subs.pm,v 1.1 2003/07/27 16:07:49 matt Exp $
package XML::Parser::Style::Subs;
sub Start {
no strict 'refs';
my $expat = shift;
my $tag = shift;
my $sub = $expat->{Pkg} . "::$tag";
eval { &$sub($expat, $tag, @_) };
}
sub End {
no strict 'refs';
my $expat = shift;
my $tag = shift;
my $sub = $expat->{Pkg} . "::${tag}_";
eval { &$sub($expat, $tag) };
}
1;
__END__
=head1 NAME
XML::Parser::Style::Subs
=head1 SYNOPSIS
use XML::Parser;
my $p = XML::Parser->new(Style => 'Subs', Pkg => 'MySubs');
$p->parsefile('foo.xml');
{
package MySubs;
sub foo {
# start of foo tag
}
sub foo_ {
# end of foo tag
}
}
=head1 DESCRIPTION
Each time an element starts, a sub by that name in the package specified
by the Pkg option is called with the same parameters that the Start
handler gets called with.
Each time an element ends, a sub with that name appended with an underscore
("_"), is called with the same parameters that the End handler gets called
with.
Nothing special is returned by parse.
=cut | leighpauls/k2cro4 | third_party/cygwin/lib/perl5/vendor_perl/5.10/i686-cygwin/XML/Parser/Style/Subs.pm | Perl | bsd-3-clause | 1,080 |
package O2::Template::Node::HtmlComment;
use strict;
use base 'O2::Template::Node';
1;
| haakonsk/O2-Framework | lib/O2/Template/Node/HtmlComment.pm | Perl | mit | 90 |
package MIP::Recipes::Download::Giab;
use 5.026;
use Carp;
use charnames qw{ :full :short };
use English qw{ -no_match_vars };
use File::Basename qw{ dirname };
use File::Spec::Functions qw{ catfile };
use open qw{ :encoding(UTF-8) :std };
use Params::Check qw{ allow check last_error };
use utf8;
use warnings;
use warnings qw{ FATAL utf8 };
## CPANM
use autodie qw{ :all };
use Readonly;
## MIPs lib/
use MIP::Constants qw{ $NEWLINE $SPACE $UNDERSCORE };
BEGIN {
require Exporter;
use base qw{ Exporter };
# Functions and variables which can be optionally exported
our @EXPORT_OK = qw{ download_giab };
}
sub download_giab {
## Function : Download giab
## Returns :
## Arguments: $active_parameter_href => Active parameters for this download hash {REF}
## : $genome_version => Human genome version
## : $job_id_href => The job_id hash {REF}
## : $profile_base_command => Submission profile base command
## : $recipe_name => Recipe name
## : $reference_href => Reference hash {REF}
## : $reference_version => Reference version
## : $quiet => Quiet (no output)
## : $temp_directory => Temporary directory for recipe
## : $verbose => Verbosity
my ($arg_href) = @_;
## Flatten argument(s)
my $active_parameter_href;
my $genome_version;
my $job_id_href;
my $recipe_name;
my $reference_href;
my $reference_version;
## Default(s)
my $profile_base_command;
my $quiet;
my $temp_directory;
my $verbose;
my $tmpl = {
active_parameter_href => {
default => {},
defined => 1,
required => 1,
store => \$active_parameter_href,
strict_type => 1,
},
genome_version => {
store => \$genome_version,
strict_type => 1,
},
job_id_href => {
default => {},
defined => 1,
required => 1,
store => \$job_id_href,
strict_type => 1,
},
profile_base_command => {
default => q{sbatch},
store => \$profile_base_command,
strict_type => 1,
},
recipe_name => {
defined => 1,
required => 1,
store => \$recipe_name,
strict_type => 1,
},
reference_href => {
default => {},
defined => 1,
required => 1,
store => \$reference_href,
strict_type => 1,
},
reference_version => {
defined => 1,
required => 1,
store => \$reference_version,
strict_type => 1,
},
quiet => {
allow => [ undef, 0, 1 ],
default => 1,
store => \$quiet,
strict_type => 1,
},
temp_directory => {
store => \$temp_directory,
strict_type => 1,
},
};
check( $tmpl, $arg_href, 1 ) or croak q{Could not parse arguments!};
use MIP::Processmanagement::Slurm_processes qw{ slurm_submit_job_no_dependency_dead_end };
use MIP::Recipe qw{ parse_recipe_prerequisites };
use MIP::Recipes::Download::Get_reference qw{ get_reference };
use MIP::Script::Setup_script qw{ setup_script };
### PREPROCESSING:
## Retrieve logger object
my $log = Log::Log4perl->get_logger( uc q{mip_download} );
## Unpack parameters
my $reference_dir = $active_parameter_href->{reference_dir};
my %recipe = parse_recipe_prerequisites(
{
active_parameter_href => $active_parameter_href,
recipe_name => $recipe_name,
}
);
## Filehandle(s)
# Create anonymous filehandle
my $filehandle = IO::Handle->new();
## Creates recipe directories (info & data & script), recipe script filenames and writes sbatch header
my ( $recipe_file_path, $recipe_info_path ) = setup_script(
{
active_parameter_href => $active_parameter_href,
core_number => $recipe{core_number},
directory_id => q{mip_download},
filehandle => $filehandle,
info_file_id => $genome_version . $UNDERSCORE . $reference_version,
job_id_href => $job_id_href,
memory_allocation => $recipe{memory},
outdata_dir => $reference_dir,
outscript_dir => $reference_dir,
process_time => $recipe{time},
recipe_data_directory_path => $active_parameter_href->{reference_dir},
recipe_directory => $recipe_name . $UNDERSCORE . $reference_version,
recipe_name => $recipe_name,
source_environment_commands_ref => $recipe{load_env_ref},
}
);
### SHELL:
say {$filehandle} q{## } . $recipe_name;
get_reference(
{
filehandle => $filehandle,
recipe_name => $recipe_name,
reference_dir => $reference_dir,
reference_href => $reference_href,
quiet => $quiet,
verbose => $verbose,
}
);
## Close filehandleS
close $filehandle or $log->logcroak(q{Could not close filehandle});
if ( $recipe{mode} == 1 ) {
## No upstream or downstream dependencies
slurm_submit_job_no_dependency_dead_end(
{
base_command => $profile_base_command,
job_id_href => $job_id_href,
log => $log,
sbatch_file_name => $recipe_file_path,
}
);
}
return 1;
}
1;
| henrikstranneheim/MIP | lib/MIP/Recipes/Download/Giab.pm | Perl | mit | 6,081 |
#!/usr/bin/perl -w
# Copyright 2001, 20002 Rob Edwards
# For updates, more information, or to discuss the scripts
# please contact Rob Edwards at redwards@utmem.edu or via http://www.salmonella.org/
#
# This file is part of The Phage Proteome Scripts developed by Rob Edwards.
#
# Tnese scripts are free software; you can redistribute and/or modify
# them under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# They are distributed in the hope that they will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# in the file (COPYING) along with these scripts; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
# combinprotdists.pl
# modified to work with bacterial assemblies!!
# new version. This will assign a distance of $penalty to any sequence that does not match, and to all empty
# spaces. There is a good rationale for this. The The Dayhoff PAM matrix scoring system returns a percent of
# the amino acids that are likely to have changed. Therefore a 100% score means that they have all changed.
# We will make an average, and include the number of scores used to calculate the average. Then we can fitch
# it with the subreplicas option.
# the subreplicate number will be added if the protein appears, but is not similar. (i.e. an average score of 100 2
# means that two proteins were found to be similar but were to distant for a score. But an average score of
# 100 0 means that no proteins were found!
# in this version you can select -n for no padding of missing proteins. Padding runs through the genome and if
# no match to another genome is found it increments the score by 100 for each protein in the query (line) genome
use DBI;
use strict;
my $usage = "combineprotdists.pl <dir of prot dists> <number of genomes used> <options>\nOPTIONS\n";
$usage .= "\t-n DON'T pad missing proteins with worst score (supplied with the -p option)\n\t-m print out all protein matches";
$usage .= "\n\t-p # penalty for being bad Rob. Default is 100\n";
my $dbh=DBI->connect("DBI:mysql:bacteria", "SQLUSER", "SQLPASSWORD") or die "Can't connect to database\n";
my $dir= shift || &niceexit($usage);
my $nogenomes = shift || &niceexit($usage);
my $args= join (" ", @ARGV);
my $pad=1; my $penalty =100;
if ($args =~ /-n/) {$pad=0}
if ($args =~ /-p\s+(\d+)/) {$penalty=$1}
print STDERR "Using PENALTY of $penalty and PAD of $pad\n";
my %noorfs;
&getnoorfs;
my $filecount; my $oldtime=$^T;
my @matrix;
my @count;
my @proteinmatches; my @genematches;
my %linegenomecount;
# read each file one at a time, and add the data to an array
opendir(DIR, $dir) || &niceexit("Can't open $dir");
while (my $file=readdir(DIR)) {
next if ($file =~ /^\./);
open (IN, "$dir/$file") || &niceexit("Can't open $dir/$file");
$filecount++; unless ($filecount % 1000) {print STDERR "$filecount\t", time-$^T, "\t", time-$oldtime, "\n"; $oldtime = time}
my @genomes = ('0');
my @genes = ('0');
my @dists;
my %genomeshash;
my %checkdupgenomes;
my %genegenome;
# we need to know all the genomes before we can store the data. Therefore
# read and store each line in @dists
# then get all the genome numbers and store them in an array
while (my $line = <IN>) {
chomp($line);
my @line = split (/\s+/, $line);
next unless ($#line);
next if ($line =~ /^\s+/);
# added loop to get the genome number from the database
unless ($line[0] =~ /_/) {
unless ($genegenome{$line[0]}) {
my $exc = $dbh->prepare("select organism from protein where count like '$line[0]'");
$exc->execute or die $dbh->errstr;
my @retrieved = $exc->fetchrow_array;
$genegenome{$line[0]} = $retrieved[0];
}
$line[0] .= "_".$genegenome{$line[0]};
$line = join (" ", @line); # this just corrects $line if we change it
}
push (@dists, $line);
my ($gene, $genome) = split (/_/, $line[0]);
unless ($gene && $genome) {&niceexit("Can't parse $line in $file\n")}
push (@genes, $gene);
push (@genomes, $genome);
$checkdupgenomes{$genome}++;
}
# now we loop through all the lines, and split them on white space.
# then we add each value to the pre-existing value in the matrix
# note that because the genomes are represented as numbers we can just
# use these numbers for the position in the matrix.
# we are going to also count the number of times that we save each data
# point for the final average.
# Finally, we only do this in one direction because the input
# matrices are complete (and identical) on both halves.
# note that column zero of the matrix is empty (there is no genome 0)
foreach my $z (0 .. $#dists) {
#print STDERR "\nSKIPPING: ";
my @line = split (/\s+/, $dists[$z]);
unless ($#line == $#genomes) {
my $x; foreach my $y (0 .. $#dists) {if ($dists[$y] eq $dists[$z]) {$x = $y}}
&niceexit("PROBLEM WITH \n@line AND \n@genomes\n\nIN FILE: $file\n\nBECAUSE $#line AND $#genomes AT LINE $x\n");
}
my ($gene, $linegenome) = split (/_/, $line[0]);
unless ($gene && $linegenome) {&niceexit("CAN'T PARSE @line SECOND TIME AROUND\n")}
$linegenomecount{$linegenome}++;
my @seengenome;
foreach my $x (1 .. $#genomes) {
#if ($x <= $z+1) {print STDERR " $line[$x]"; next}
next if ($x <= $z+1);
# If we are padding the table with 100s where there is no match, we
# need to convert the -1's to 100. Otherwise we will ignore it.
if ($line[$x] == -1) {if ($pad) {$line[$x] = $penalty} else {next}}
#if it is itself, we want to make it zero. Otherwise, we'll save the protein numbers that match
# note that we can store all the pairwise protein matches, but we need the gene matches
# so that we can pad out missing genes correctly
if ($genomes[$x] == $linegenome) {$line[$x] = '0.000'}
# else {
# my $genematch;
# save the protein matches, but I only want to save them one way around
# to make it easier
# if ($gene <$genes[$x]) {$genematch = $gene.",".$genes[$x].";".$line[$x]}
# else {$genematch = $genes[$x].",".$gene.";".$line[$x]}
# protein match is a two dimensional array where each element is an array.
# but it is called with an array! 4 dimensions?
# ${$proteinmatches[$linegenome][$genomes[$x]]}{$genematch} =1;
# gene matches is all the genes from $linegenome that match genome. This will
# # be used to calculate the penalty for ORFs that are missed.
# }
$matrix[$linegenome][$genomes[$x]] += $line[$x];
$count[$linegenome][$genomes[$x]] ++;
$seengenome[$linegenome][$genomes[$x]] ++;
${$genematches[$linegenome][$genomes[$x]]}{$gene} =1;
}
# now we need to pad out all the missing genomes with 100's
if ($pad) {
foreach my $x (1 .. $nogenomes) {
next if ($checkdupgenomes{$x});
next if ($seengenome[$linegenome][$x]);
$matrix[$linegenome][$x] += $penalty;
$count[$linegenome][$x] ++;
}
}
}
}
{
#now we need to penalize genomes that have only a few macthes.
# we will go through gene matches for each pair in the matrix, and
# add a penalty based on the number of missing orfs.
# note, we are adding this above, each time around
unless (1) {
if ($pad) {
open (MISS, ">missing.seqs.txt") || &niceexit("Can't open missing.seqs.txt\n");
print MISS "Original\t#ORFs\tCompared to\t#ORFS\t# similar\t# different\tcurr. score\tcurr. count\tpenalty\n";
foreach my $y (0 .. $#genematches) {
next unless (exists $noorfs{$y}); # this just checks we have orfs for genome $y
foreach my $x (1 .. $#{$matrix[$y]}) {
next unless (exists $noorfs{$x});
next if ($y == $x);
my @similar = keys %{$genematches[$y][$x]};
my $difference = $noorfs{$y} - ($#similar+1);
print MISS "$y\t$noorfs{$y}\t$x\t$noorfs{$x}\t",$#similar+1, "\t$difference\t$matrix[$y][$x]\t$count[$y][$x]\t",($penalty * $difference),"\n";
next unless ($difference);
$matrix[$y][$x] += ($penalty * $difference);
$count[$y][$x] += $difference;
}
}
}
}
}
{
my %seen;
# now we will average the matrix based on the count.
foreach my $y (0 .. $#matrix) {
next unless ($matrix[$y]);
foreach my $x (1 .. $#{$matrix[$y]}) {
next unless ($count[$y][$x] && $matrix[$y][$x]);
my $temp = $x."+".$y; my $temp1 = $y."+".$x;
next if ($seen{$temp} || $seen{$temp1});
$seen{$temp} = $seen{$temp1} =1;
# because we are only looking at one half of the matrix (see above)
# we need to be sure that both halves are the same.
# this loop will take care of that.
if ($matrix[$y][$x] && $matrix[$x][$y]) {
unless ($matrix[$y][$x] == $matrix[$x][$y]) {
#print STDERR "$matrix[$y][$x] and $matrix[$x][$y] (counts: $count[$y][$x] AND $count[$x][$y]) ";
$matrix[$y][$x] = $matrix[$x][$y] = $matrix[$y][$x] + $matrix[$x][$y];
$count[$y][$x] = $count[$x][$y] = $count[$y][$x] + $count[$x][$y];
print STDERR " $matrix[$y][$x] and $matrix[$x][$y] (counts: $count[$y][$x] AND $count[$x][$y]) Fixed at 1\n";
}
}
elsif ($matrix[$y][$x]) {
unless ($matrix[$x][$y]) {
$matrix[$x][$y] = $matrix[$y][$x];
$count[$x][$y] = $count[$y][$x];
#print STDERR "Fixed at 2 ";
}
else {&niceexit("Can't figure out matrix $y and $x ($matrix[$x][$y] and $matrix[$y][$x] when merging 1\n")}
}
elsif ($matrix[$x][$y]) {
unless ($matrix[$y][$x]) {
$matrix[$y][$x] = $matrix[$x][$y];
$count[$y][$x] = $count[$x][$y];
#print STDERR "Fixed at 3 ";
}
else {&niceexit("Can't figure out matrix $y and $x ($matrix[$x][$y] and $matrix[$y][$x] when merging 2\n")}
}
else {&niceexit("Can't figure out matrix $y and $x ($matrix[$x][$y] and $matrix[$y][$x] when merging 3\n")}
# finally, take the average!
$matrix[$x][$y] = $matrix[$y][$x] = $matrix[$y][$x]/$count[$y][$x];
#print STDERR "AVERAGES : $matrix[$x][$y] AND $matrix[$y][$x]\n";
}
}
}
# now we have all the data, lets just print out the matrix
print $#matrix, "\n";
#foreach my $y (1 .. $#matrix) {print STDERR "\t$y"}
#print STDERR "\n";
foreach my $y (1 .. $#matrix) {
my $tempstring = "genome".$y;
if (length($tempstring) > 10) {print STDERR "$tempstring is too long\n"}
my $spacestoadd = " " x (10 - length($tempstring));
print $tempstring,$spacestoadd;
foreach my $x (1 .. $#matrix) {
if ($y == $x) {print "0 $noorfs{$x} "; next}
unless (defined $matrix[$y][$x]) {print "$penalty 0 "; next}
unless ($matrix[$y][$x]) {
print "0 ";
if ($count[$y][$x]) {print "$count[$y][$x] "}
else {print "0 "}
next;
}
print $matrix[$y][$x], " ", $count[$y][$x], " ";
}
print "\n";
}
if ($args=~ /-m/) {
open (PROT, ">$dir.protein.matches") || &niceexit("Can't open $dir.protein.matches for writing\n");
#print out all the protein matches
foreach my $y (1 .. $nogenomes) {
my $tempstring = "genome".$y;
if (length($tempstring) > 10) {print STDERR "$tempstring is too long\n"}
my $spacestoadd = " " x (10 - length($tempstring));
print PROT $tempstring,$spacestoadd, "\t";
foreach my $x (1 .. $nogenomes) {
unless (defined $proteinmatches[$y][$x]) {print PROT "\t"; next}
unless ($proteinmatches[$y][$x]) {print PROT "\t"; next}
my @allmatches = (keys %{$proteinmatches[$y][$x]}, keys %{$proteinmatches[$x][$y]});
my %allmatches;
@allmatches{@allmatches}=1;
@allmatches = sort keys %allmatches;
print PROT join (" ", sort @allmatches), "\t";
}
print PROT "\n";
}
}
&niceexit(0);
sub getnoorfs {
my $exc = $dbh->prepare("select organism from protein");
$exc->execute or die $dbh->errstr;
while (my @retrieved = $exc->fetchrow_array) {$noorfs{$retrieved[0]}++}
}
sub niceexit {
my $reason = shift;
$dbh->disconnect;
if ($reason) {print STDERR $reason; exit(-1)}
else {exit(0)}
}
| linsalrob/bioinformatics | phage_tree/bact2combineprotdists.pl | Perl | mit | 11,991 |
/** <module> BSON document manipulation.
*
* Most of these predicates run in O(n), but that may change.
*/
:- module(_, [
doc_is_valid/1,
doc_empty/1,
doc_get/3,
doc_get_strict/3,
doc_put/4,
doc_delete/3,
doc_keys/2,
doc_values/2,
doc_keys_values/3
]).
:- include(bson(include/common)).
%% doc_is_valid(+Doc) is semidet.
%
% True if Doc is a valid BSON document.
%
% NOTE: Right now, this is accomplished by converting it to bytes
% and failing if an exception is thrown. This can probably be done
% more efficiently.
doc_is_valid(Doc) :-
catch(
bson:doc_bytes(Doc, _Bytes),
bson_error(_Description, _EnvList),
fail).
%% doc_empty(?Doc) is semidet.
%
% True if Doc is an empty BSON document.
doc_empty([]).
%% doc_get(+Doc, +Key, ?Value) is semidet.
%
% True if Value is the value associated with Key in Doc
% or +null if Key cannot be found. This means that there
% is no way of knowing if Value actually was +null or not found.
doc_get([], _, +null).
doc_get([K-V|_], K, V) :- !.
doc_get([_|Pairs], K, V) :-
doc_get(Pairs, K, V).
%% doc_get_strict(+Doc, +Key, ?Value) is semidet.
%
% True if Value is the value associated with Key in Doc,
% or fails if Key is not found or does not match Value.
doc_get_strict([K-V|_], K, V) :- !.
doc_get_strict([_|Pairs], K, V) :-
doc_get_strict(Pairs, K, V).
%% doc_put(+Doc, +Key, +Value, ?NewDoc) is semidet.
%
% True if NewDoc is Doc with the addition or update of the
% association Key-Value.
doc_put([], K, V, [K-V]).
doc_put([K-_|Pairs], K, V, [K-V|Pairs]) :- !.
doc_put([Other|Pairs], K, V, [Other|Pairs1]) :-
doc_put(Pairs, K, V, Pairs1).
%% doc_delete(+Doc, +Key, ?NewDoc) is semidet.
%
% True if NewDoc is Doc with the association removed that has
% Key as key. At most one association is removed. No change if
% Key is not found.
doc_delete([], _, []).
doc_delete([K-_|Pairs], K, Pairs) :- !.
doc_delete([Other|Pairs], K, [Other|Pairs1]) :-
doc_delete(Pairs, K, Pairs1).
%% doc_keys(+Doc, ?Keys) is semidet.
%
% True if Keys is the keys for the associations in Doc.
doc_keys(Doc, Keys) :-
doc_keys_values(Doc, Keys, _Values).
%% doc_values(+Doc, ?Values) is semidet.
%
% True if Values is the values for the associations in Doc.
doc_values(Doc, Values) :-
doc_keys_values(Doc, _Keys, Values).
%% doc_keys_values(+Doc, ?Keys, ?Values) is semidet.
%% doc_keys_values(?Doc, +Keys, +Values) is semidet.
%
% True if Doc is the list of successive associations of
% Keys and Values.
doc_keys_values([], [], []).
doc_keys_values([K-V|Pairs], [K|Keys], [V|Values]) :-
doc_keys_values(Pairs, Keys, Values).
| khueue/prolog-bson | src/bson_doc.pl | Perl | mit | 2,710 |
use warnings;
no warnings ('uninitialized', 'substr');
use Getopt::Long;
use Cwd;
use IPC::Cmd qw[can_run run];
use Bio::SeqIO;
use Bio::Root::Exception;
use Scalar::Util qw(looks_like_number);
use File::Path qw( make_path );
my $samplesheet = shift;
my $work_dir = shift;
my $prefix = shift;
my $prefix1 = shift;
my $prefix2 = shift;
my %alleles = ();
open (LOG, ">$work_dir/$prefix.$prefix1.$prefix2.samples.log") or die "Cannot open $work_dir/$prefix.$prefix1.$prefix2.samples.log\n";
$fasta = "$work_dir/$prefix.$prefix1.$prefix2.fasta" or die "Cannot open $work_dir/$prefix.$prefix1.$prefix2.fast\n\n";
if (-e $fasta){
my $seqio = Bio::SeqIO->new(-file => "$fasta", -format => "fasta");
while(my $seq = $seqio -> next_seq) {
if (! exists $alleles{$seq->id}){
$alleles{$seq->id} = $seq->seq();
}
}
}
foreach $allele (keys %alleles){
print LOG "$allele";
print "checking samples for $allele\n";
open (SAMPLESHEET, "$samplesheet") or die "Cannot open uploaded $samplesheet. Try again.\n";
while(<SAMPLESHEET>){
print LOG "\t";
@words = split("\t", $_);
$sample = $words[0];
$fasta = "$work_dir/$sample/$sample.$prefix1.$prefix2.fasta";
if (-e $fasta){
my $seqio = Bio::SeqIO->new(-file => "$fasta", -format => "fasta");
while(my $seq = $seqio -> next_seq) {
$id = $seq->id();
@ids = split(":", $id);
if ($ids[0] eq $allele){
print LOG "$sample";
print "Found in $sample\n";
}
}
}
else{
print "Cannot open $work_dir/$sample/$sample.$prefix1.$prefix2.fasta\n\n";
}
}
print LOG "\n";
} | deepalivasoya/MHCIhaploCaller | Test/Scripts/checkSamplesOverlapPCR.pl | Perl | mit | 1,559 |
#!/usr/bin/perl -w
# Copyright 2001, 20002 Rob Edwards
# For updates, more information, or to discuss the scripts
# please contact Rob Edwards at redwards@utmem.edu or via http://www.salmonella.org/
#
# This file is part of The Phage Proteome Scripts developed by Rob Edwards.
#
# Tnese scripts are free software; you can redistribute and/or modify
# them under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# They are distributed in the hope that they will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# in the file (COPYING) along with these scripts; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
# randomize the matrix file
use strict;
my $file = shift || die "randommatrix.pl <file> <Number of times to randomize> <-s for subreplicates>\n";
my $randtimes = shift || die "randommatrix.pl <file> <Number of times to randomize> <-s for subreplicates>\n";
my $args;
if (@ARGV) {$args = join (" ", @ARGV)}
else {print STDERR "No subreplicates in file\n"}
my @matrix; my %genomenumber;
open (IN, $file) || die "Can't open $file\n";
while (<IN>) {
next unless (/^genome/);
chomp;
s/genome//;
my @line = split /\s+/;
my $genome = shift @line;
if ($args =~ /-s/) {
my @templine;
for (my $i=0; $i <= $#line; $i +=2) {
my $linetemp = join (" ", $line[$i], $line[$i+1]);
push (@templine, $linetemp);
}
@line = @templine;
}
foreach my $i (0 .. $#line) {$matrix[$genome][$i+1] = $line[$i]}
$genomenumber{$genome}=1;
}
close IN;
open (OUT, ">random.matrices") || die "can't open random.matrices\n";
my @genomes = sort {$a <=> $b} keys %genomenumber;
foreach my $i (0 .. $randtimes) {
my $array;
if ($i) {$array = &randomize(\@genomes)} else {$array = \@genomes}
print OUT $#genomes+1, "\n";
foreach my $x (@$array) {
my $genomestring = "genome".$x;
my $addspaces = " " x (10 - length($genomestring));
print OUT $genomestring,$addspaces;
foreach my $y (@$array) {
if ($matrix[$x][$y]) {print OUT $matrix[$x][$y], " "}
else {print OUT "0 "}
}
print OUT "\n";
}
}
close OUT;
exit (0);
sub randomize {
my $array = shift;
for (my $i = @$array; --$i; ) {
my $j = int rand ($i+1);
next if ($i == $j);
@$array[$i, $j] = @$array[$j, $i];
}
return $array;
}
| linsalrob/bioinformatics | phage_tree/randommatrices.pl | Perl | mit | 2,665 |
use strict;
use warnings;
use 5.018;
while(<>){
chomp;
if(/(?<word>\b\w*a)(?<after>\w{1,5})*/){
print "'word' contains '$+{word}' \n";
if(defined $+{after}){
print "and 'after' contains '$+{after}'\n"
}
}else{
print "No match: |$_| \n";
}
}
| moosan63/LearningPerl | chapter8/ex8-5.pl | Perl | mit | 310 |
# $Id$
package ExtUtils::MakeMaker;
use strict;
BEGIN {require 5.006;}
require Exporter;
use ExtUtils::MakeMaker::Config;
use Carp;
use File::Path;
our $Verbose = 0; # exported
our @Parent; # needs to be localized
our @Get_from_Config; # referenced by MM_Unix
our @MM_Sections;
our @Overridable;
my @Prepend_parent;
my %Recognized_Att_Keys;
our $VERSION = '6.66';
$VERSION = eval $VERSION; ## no critic [BuiltinFunctions::ProhibitStringyEval]
# Emulate something resembling CVS $Revision$
(our $Revision = $VERSION) =~ s{_}{};
$Revision = int $Revision * 10000;
our $Filename = __FILE__; # referenced outside MakeMaker
our @ISA = qw(Exporter);
our @EXPORT = qw(&WriteMakefile &writeMakefile $Verbose &prompt);
our @EXPORT_OK = qw($VERSION &neatvalue &mkbootstrap &mksymlists
&WriteEmptyMakefile);
# These will go away once the last of the Win32 & VMS specific code is
# purged.
my $Is_VMS = $^O eq 'VMS';
my $Is_Win32 = $^O eq 'MSWin32';
full_setup();
require ExtUtils::MM; # Things like CPAN assume loading ExtUtils::MakeMaker
# will give them MM.
require ExtUtils::MY; # XXX pre-5.8 versions of ExtUtils::Embed expect
# loading ExtUtils::MakeMaker will give them MY.
# This will go when Embed is its own CPAN module.
sub WriteMakefile {
croak "WriteMakefile: Need even number of args" if @_ % 2;
require ExtUtils::MY;
my %att = @_;
_convert_compat_attrs(\%att);
_verify_att(\%att);
my $mm = MM->new(\%att);
$mm->flush;
return $mm;
}
# Basic signatures of the attributes WriteMakefile takes. Each is the
# reference type. Empty value indicate it takes a non-reference
# scalar.
my %Att_Sigs;
my %Special_Sigs = (
AUTHOR => 'ARRAY',
C => 'ARRAY',
CONFIG => 'ARRAY',
CONFIGURE => 'CODE',
DIR => 'ARRAY',
DL_FUNCS => 'HASH',
DL_VARS => 'ARRAY',
EXCLUDE_EXT => 'ARRAY',
EXE_FILES => 'ARRAY',
FUNCLIST => 'ARRAY',
H => 'ARRAY',
IMPORTS => 'HASH',
INCLUDE_EXT => 'ARRAY',
LIBS => ['ARRAY',''],
MAN1PODS => 'HASH',
MAN3PODS => 'HASH',
META_ADD => 'HASH',
META_MERGE => 'HASH',
PL_FILES => 'HASH',
PM => 'HASH',
PMLIBDIRS => 'ARRAY',
PMLIBPARENTDIRS => 'ARRAY',
PREREQ_PM => 'HASH',
BUILD_REQUIRES => 'HASH',
CONFIGURE_REQUIRES => 'HASH',
TEST_REQUIRES => 'HASH',
SKIP => 'ARRAY',
TYPEMAPS => 'ARRAY',
XS => 'HASH',
VERSION => ['version',''],
_KEEP_AFTER_FLUSH => '',
clean => 'HASH',
depend => 'HASH',
dist => 'HASH',
dynamic_lib=> 'HASH',
linkext => 'HASH',
macro => 'HASH',
postamble => 'HASH',
realclean => 'HASH',
test => 'HASH',
tool_autosplit => 'HASH',
);
@Att_Sigs{keys %Recognized_Att_Keys} = ('') x keys %Recognized_Att_Keys;
@Att_Sigs{keys %Special_Sigs} = values %Special_Sigs;
sub _convert_compat_attrs { #result of running several times should be same
my($att) = @_;
if (exists $att->{AUTHOR}) {
if ($att->{AUTHOR}) {
if (!ref($att->{AUTHOR})) {
my $t = $att->{AUTHOR};
$att->{AUTHOR} = [$t];
}
} else {
$att->{AUTHOR} = [];
}
}
}
sub _verify_att {
my($att) = @_;
while( my($key, $val) = each %$att ) {
my $sig = $Att_Sigs{$key};
unless( defined $sig ) {
warn "WARNING: $key is not a known parameter.\n";
next;
}
my @sigs = ref $sig ? @$sig : $sig;
my $given = ref $val;
unless( grep { _is_of_type($val, $_) } @sigs ) {
my $takes = join " or ", map { _format_att($_) } @sigs;
my $has = _format_att($given);
warn "WARNING: $key takes a $takes not a $has.\n".
" Please inform the author.\n";
}
}
}
# Check if a given thing is a reference or instance of $type
sub _is_of_type {
my($thing, $type) = @_;
return 1 if ref $thing eq $type;
local $SIG{__DIE__};
return 1 if eval{ $thing->isa($type) };
return 0;
}
sub _format_att {
my $given = shift;
return $given eq '' ? "string/number"
: uc $given eq $given ? "$given reference"
: "$given object"
;
}
sub prompt ($;$) { ## no critic
my($mess, $def) = @_;
confess("prompt function called without an argument")
unless defined $mess;
my $isa_tty = -t STDIN && (-t STDOUT || !(-f STDOUT || -c STDOUT)) ;
my $dispdef = defined $def ? "[$def] " : " ";
$def = defined $def ? $def : "";
local $|=1;
local $\;
print "$mess $dispdef";
my $ans;
if ($ENV{PERL_MM_USE_DEFAULT} || (!$isa_tty && eof STDIN)) {
print "$def\n";
}
else {
$ans = <STDIN>;
if( defined $ans ) {
chomp $ans;
}
else { # user hit ctrl-D
print "\n";
}
}
return (!defined $ans || $ans eq '') ? $def : $ans;
}
sub eval_in_subdirs {
my($self) = @_;
use Cwd qw(cwd abs_path);
my $pwd = cwd() || die "Can't figure out your cwd!";
local @INC = map eval {abs_path($_) if -e} || $_, @INC;
push @INC, '.'; # '.' has to always be at the end of @INC
foreach my $dir (@{$self->{DIR}}){
my($abs) = $self->catdir($pwd,$dir);
eval { $self->eval_in_x($abs); };
last if $@;
}
chdir $pwd;
die $@ if $@;
}
sub eval_in_x {
my($self,$dir) = @_;
chdir $dir or carp("Couldn't change to directory $dir: $!");
{
package main;
do './Makefile.PL';
};
if ($@) {
# if ($@ =~ /prerequisites/) {
# die "MakeMaker WARNING: $@";
# } else {
# warn "WARNING from evaluation of $dir/Makefile.PL: $@";
# }
die "ERROR from evaluation of $dir/Makefile.PL: $@";
}
}
# package name for the classes into which the first object will be blessed
my $PACKNAME = 'PACK000';
sub full_setup {
$Verbose ||= 0;
my @attrib_help = qw/
AUTHOR ABSTRACT ABSTRACT_FROM BINARY_LOCATION
C CAPI CCFLAGS CONFIG CONFIGURE DEFINE DIR DISTNAME DISTVNAME
DL_FUNCS DL_VARS
EXCLUDE_EXT EXE_FILES FIRST_MAKEFILE
FULLPERL FULLPERLRUN FULLPERLRUNINST
FUNCLIST H IMPORTS
INST_ARCHLIB INST_SCRIPT INST_BIN INST_LIB INST_MAN1DIR INST_MAN3DIR
INSTALLDIRS
DESTDIR PREFIX INSTALL_BASE
PERLPREFIX SITEPREFIX VENDORPREFIX
INSTALLPRIVLIB INSTALLSITELIB INSTALLVENDORLIB
INSTALLARCHLIB INSTALLSITEARCH INSTALLVENDORARCH
INSTALLBIN INSTALLSITEBIN INSTALLVENDORBIN
INSTALLMAN1DIR INSTALLMAN3DIR
INSTALLSITEMAN1DIR INSTALLSITEMAN3DIR
INSTALLVENDORMAN1DIR INSTALLVENDORMAN3DIR
INSTALLSCRIPT INSTALLSITESCRIPT INSTALLVENDORSCRIPT
PERL_LIB PERL_ARCHLIB
SITELIBEXP SITEARCHEXP
INC INCLUDE_EXT LDFROM LIB LIBPERL_A LIBS LICENSE
LINKTYPE MAKE MAKEAPERL MAKEFILE MAKEFILE_OLD MAN1PODS MAN3PODS MAP_TARGET
META_ADD META_MERGE MIN_PERL_VERSION BUILD_REQUIRES CONFIGURE_REQUIRES
MYEXTLIB NAME NEEDS_LINKING NOECHO NO_META NO_MYMETA
NORECURS NO_VC OBJECT OPTIMIZE PERL_MALLOC_OK PERL PERLMAINCC PERLRUN
PERLRUNINST PERL_CORE
PERL_SRC PERM_DIR PERM_RW PERM_RWX
PL_FILES PM PM_FILTER PMLIBDIRS PMLIBPARENTDIRS POLLUTE PPM_INSTALL_EXEC
PPM_INSTALL_SCRIPT PREREQ_FATAL PREREQ_PM PREREQ_PRINT PRINT_PREREQ
SIGN SKIP TEST_REQUIRES TYPEMAPS VERSION VERSION_FROM XS XSOPT XSPROTOARG
XS_VERSION clean depend dist dynamic_lib linkext macro realclean
tool_autosplit
MACPERL_SRC MACPERL_LIB MACLIBS_68K MACLIBS_PPC MACLIBS_SC MACLIBS_MRC
MACLIBS_ALL_68K MACLIBS_ALL_PPC MACLIBS_SHARED
/;
# IMPORTS is used under OS/2 and Win32
# @Overridable is close to @MM_Sections but not identical. The
# order is important. Many subroutines declare macros. These
# depend on each other. Let's try to collect the macros up front,
# then pasthru, then the rules.
# MM_Sections are the sections we have to call explicitly
# in Overridable we have subroutines that are used indirectly
@MM_Sections =
qw(
post_initialize const_config constants platform_constants
tool_autosplit tool_xsubpp tools_other
makemakerdflt
dist macro depend cflags const_loadlibs const_cccmd
post_constants
pasthru
special_targets
c_o xs_c xs_o
top_targets blibdirs linkext dlsyms dynamic dynamic_bs
dynamic_lib static static_lib manifypods processPL
installbin subdirs
clean_subdirs clean realclean_subdirs realclean
metafile signature
dist_basics dist_core distdir dist_test dist_ci distmeta distsignature
install force perldepend makefile staticmake test ppd
); # loses section ordering
@Overridable = @MM_Sections;
push @Overridable, qw[
libscan makeaperl needs_linking
subdir_x test_via_harness test_via_script
init_VERSION init_dist init_INST init_INSTALL init_DEST init_dirscan
init_PM init_MANPODS init_xs init_PERL init_DIRFILESEP init_linker
];
push @MM_Sections, qw[
pm_to_blib selfdocument
];
# Postamble needs to be the last that was always the case
push @MM_Sections, "postamble";
push @Overridable, "postamble";
# All sections are valid keys.
@Recognized_Att_Keys{@MM_Sections} = (1) x @MM_Sections;
# we will use all these variables in the Makefile
@Get_from_Config =
qw(
ar cc cccdlflags ccdlflags dlext dlsrc exe_ext full_ar ld
lddlflags ldflags libc lib_ext obj_ext osname osvers ranlib
sitelibexp sitearchexp so
);
# 5.5.3 doesn't have any concept of vendor libs
push @Get_from_Config, qw( vendorarchexp vendorlibexp ) if $] >= 5.006;
foreach my $item (@attrib_help){
$Recognized_Att_Keys{$item} = 1;
}
foreach my $item (@Get_from_Config) {
$Recognized_Att_Keys{uc $item} = $Config{$item};
print "Attribute '\U$item\E' => '$Config{$item}'\n"
if ($Verbose >= 2);
}
#
# When we eval a Makefile.PL in a subdirectory, that one will ask
# us (the parent) for the values and will prepend "..", so that
# all files to be installed end up below OUR ./blib
#
@Prepend_parent = qw(
INST_BIN INST_LIB INST_ARCHLIB INST_SCRIPT
MAP_TARGET INST_MAN1DIR INST_MAN3DIR PERL_SRC
PERL FULLPERL
);
}
sub writeMakefile {
die <<END;
The extension you are trying to build apparently is rather old and
most probably outdated. We detect that from the fact, that a
subroutine "writeMakefile" is called, and this subroutine is not
supported anymore since about October 1994.
Please contact the author or look into CPAN (details about CPAN can be
found in the FAQ and at http:/www.perl.com) for a more recent version
of the extension. If you're really desperate, you can try to change
the subroutine name from writeMakefile to WriteMakefile and rerun
'perl Makefile.PL', but you're most probably left alone, when you do
so.
The MakeMaker team
END
}
sub new {
my($class,$self) = @_;
my($key);
_convert_compat_attrs($self) if defined $self && $self;
# Store the original args passed to WriteMakefile()
foreach my $k (keys %$self) {
$self->{ARGS}{$k} = $self->{$k};
}
$self = {} unless defined $self;
# Temporarily bless it into MM so it can be used as an
# object. It will be blessed into a temp package later.
bless $self, "MM";
# Cleanup all the module requirement bits
for my $key (qw(PREREQ_PM BUILD_REQUIRES CONFIGURE_REQUIRES TEST_REQUIRES)) {
$self->{$key} ||= {};
$self->clean_versions( $key );
}
if ("@ARGV" =~ /\bPREREQ_PRINT\b/) {
$self->_PREREQ_PRINT;
}
# PRINT_PREREQ is RedHatism.
if ("@ARGV" =~ /\bPRINT_PREREQ\b/) {
$self->_PRINT_PREREQ;
}
print "MakeMaker (v$VERSION)\n" if $Verbose;
if (-f "MANIFEST" && ! -f "Makefile" && ! $ENV{PERL_CORE}){
check_manifest();
}
check_hints($self);
# Translate X.Y.Z to X.00Y00Z
if( defined $self->{MIN_PERL_VERSION} ) {
$self->{MIN_PERL_VERSION} =~ s{ ^ (\d+) \. (\d+) \. (\d+) $ }
{sprintf "%d.%03d%03d", $1, $2, $3}ex;
}
my $perl_version_ok = eval {
local $SIG{__WARN__} = sub {
# simulate "use warnings FATAL => 'all'" for vintage perls
die @_;
};
!$self->{MIN_PERL_VERSION} or $self->{MIN_PERL_VERSION} <= $]
};
if (!$perl_version_ok) {
if (!defined $perl_version_ok) {
die <<'END';
Warning: MIN_PERL_VERSION is not in a recognized format.
Recommended is a quoted numerical value like '5.005' or '5.008001'.
END
}
elsif ($self->{PREREQ_FATAL}) {
die sprintf <<"END", $self->{MIN_PERL_VERSION}, $];
MakeMaker FATAL: perl version too low for this distribution.
Required is %s. We run %s.
END
}
else {
warn sprintf
"Warning: Perl version %s or higher required. We run %s.\n",
$self->{MIN_PERL_VERSION}, $];
}
}
my %configure_att; # record &{$self->{CONFIGURE}} attributes
my(%initial_att) = %$self; # record initial attributes
my(%unsatisfied) = ();
my $prereqs = $self->_all_prereqs;
foreach my $prereq (sort keys %$prereqs) {
my $required_version = $prereqs->{$prereq};
my $installed_file = MM->_installed_file_for_module($prereq);
my $pr_version = 0;
$pr_version = MM->parse_version($installed_file) if $installed_file;
$pr_version = 0 if $pr_version eq 'undef';
# convert X.Y_Z alpha version #s to X.YZ for easier comparisons
$pr_version =~ s/(\d+)\.(\d+)_(\d+)/$1.$2$3/;
if (!$installed_file) {
warn sprintf "Warning: prerequisite %s %s not found.\n",
$prereq, $required_version
unless $self->{PREREQ_FATAL}
or $ENV{PERL_CORE};
$unsatisfied{$prereq} = 'not installed';
}
elsif ($pr_version < $required_version ){
warn sprintf "Warning: prerequisite %s %s not found. We have %s.\n",
$prereq, $required_version, ($pr_version || 'unknown version')
unless $self->{PREREQ_FATAL}
or $ENV{PERL_CORE};
$unsatisfied{$prereq} = $required_version ? $required_version : 'unknown version' ;
}
}
if (%unsatisfied && $self->{PREREQ_FATAL}){
my $failedprereqs = join "\n", map {" $_ $unsatisfied{$_}"}
sort { $a cmp $b } keys %unsatisfied;
die <<"END";
MakeMaker FATAL: prerequisites not found.
$failedprereqs
Please install these modules first and rerun 'perl Makefile.PL'.
END
}
if (defined $self->{CONFIGURE}) {
if (ref $self->{CONFIGURE} eq 'CODE') {
%configure_att = %{&{$self->{CONFIGURE}}};
_convert_compat_attrs(\%configure_att);
$self = { %$self, %configure_att };
} else {
croak "Attribute 'CONFIGURE' to WriteMakefile() not a code reference\n";
}
}
# This is for old Makefiles written pre 5.00, will go away
if ( Carp::longmess("") =~ /runsubdirpl/s ){
carp("WARNING: Please rerun 'perl Makefile.PL' to regenerate your Makefiles\n");
}
my $newclass = ++$PACKNAME;
local @Parent = @Parent; # Protect against non-local exits
{
print "Blessing Object into class [$newclass]\n" if $Verbose>=2;
mv_all_methods("MY",$newclass);
bless $self, $newclass;
push @Parent, $self;
require ExtUtils::MY;
no strict 'refs'; ## no critic;
@{"$newclass\:\:ISA"} = 'MM';
}
if (defined $Parent[-2]){
$self->{PARENT} = $Parent[-2];
for my $key (@Prepend_parent) {
next unless defined $self->{PARENT}{$key};
# Don't stomp on WriteMakefile() args.
next if defined $self->{ARGS}{$key} and
$self->{ARGS}{$key} eq $self->{$key};
$self->{$key} = $self->{PARENT}{$key};
unless ($Is_VMS && $key =~ /PERL$/) {
$self->{$key} = $self->catdir("..",$self->{$key})
unless $self->file_name_is_absolute($self->{$key});
} else {
# PERL or FULLPERL will be a command verb or even a
# command with an argument instead of a full file
# specification under VMS. So, don't turn the command
# into a filespec, but do add a level to the path of
# the argument if not already absolute.
my @cmd = split /\s+/, $self->{$key};
$cmd[1] = $self->catfile('[-]',$cmd[1])
unless (@cmd < 2) || $self->file_name_is_absolute($cmd[1]);
$self->{$key} = join(' ', @cmd);
}
}
if ($self->{PARENT}) {
$self->{PARENT}->{CHILDREN}->{$newclass} = $self;
foreach my $opt (qw(POLLUTE PERL_CORE LINKTYPE)) {
if (exists $self->{PARENT}->{$opt}
and not exists $self->{$opt})
{
# inherit, but only if already unspecified
$self->{$opt} = $self->{PARENT}->{$opt};
}
}
}
my @fm = grep /^FIRST_MAKEFILE=/, @ARGV;
parse_args($self,@fm) if @fm;
} else {
parse_args($self,split(' ', $ENV{PERL_MM_OPT} || ''),@ARGV);
}
$self->{NAME} ||= $self->guess_name;
($self->{NAME_SYM} = $self->{NAME}) =~ s/\W+/_/g;
$self->init_MAKE;
$self->init_main;
$self->init_VERSION;
$self->init_dist;
$self->init_INST;
$self->init_INSTALL;
$self->init_DEST;
$self->init_dirscan;
$self->init_PM;
$self->init_MANPODS;
$self->init_xs;
$self->init_PERL;
$self->init_DIRFILESEP;
$self->init_linker;
$self->init_ABSTRACT;
$self->arch_check(
$INC{'Config.pm'},
$self->catfile($Config{'archlibexp'}, "Config.pm")
);
$self->init_tools();
$self->init_others();
$self->init_platform();
$self->init_PERM();
my($argv) = neatvalue(\@ARGV);
$argv =~ s/^\[/(/;
$argv =~ s/\]$/)/;
push @{$self->{RESULT}}, <<END;
# This Makefile is for the $self->{NAME} extension to perl.
#
# It was generated automatically by MakeMaker version
# $VERSION (Revision: $Revision) from the contents of
# Makefile.PL. Don't edit this file, edit Makefile.PL instead.
#
# ANY CHANGES MADE HERE WILL BE LOST!
#
# MakeMaker ARGV: $argv
#
END
push @{$self->{RESULT}}, $self->_MakeMaker_Parameters_section(\%initial_att);
if (defined $self->{CONFIGURE}) {
push @{$self->{RESULT}}, <<END;
# MakeMaker 'CONFIGURE' Parameters:
END
if (scalar(keys %configure_att) > 0) {
foreach my $key (sort keys %configure_att){
next if $key eq 'ARGS';
my($v) = neatvalue($configure_att{$key});
$v =~ s/(CODE|HASH|ARRAY|SCALAR)\([\dxa-f]+\)/$1\(...\)/;
$v =~ tr/\n/ /s;
push @{$self->{RESULT}}, "# $key => $v";
}
}
else
{
push @{$self->{RESULT}}, "# no values returned";
}
undef %configure_att; # free memory
}
# turn the SKIP array into a SKIPHASH hash
for my $skip (@{$self->{SKIP} || []}) {
$self->{SKIPHASH}{$skip} = 1;
}
delete $self->{SKIP}; # free memory
if ($self->{PARENT}) {
for (qw/install dist dist_basics dist_core distdir dist_test dist_ci/) {
$self->{SKIPHASH}{$_} = 1;
}
}
# We run all the subdirectories now. They don't have much to query
# from the parent, but the parent has to query them: if they need linking!
unless ($self->{NORECURS}) {
$self->eval_in_subdirs if @{$self->{DIR}};
}
foreach my $section ( @MM_Sections ){
# Support for new foo_target() methods.
my $method = $section;
$method .= '_target' unless $self->can($method);
print "Processing Makefile '$section' section\n" if ($Verbose >= 2);
my($skipit) = $self->skipcheck($section);
if ($skipit){
push @{$self->{RESULT}}, "\n# --- MakeMaker $section section $skipit.";
} else {
my(%a) = %{$self->{$section} || {}};
push @{$self->{RESULT}}, "\n# --- MakeMaker $section section:";
push @{$self->{RESULT}}, "# " . join ", ", %a if $Verbose && %a;
push @{$self->{RESULT}}, $self->maketext_filter(
$self->$method( %a )
);
}
}
push @{$self->{RESULT}}, "\n# End.";
$self;
}
sub WriteEmptyMakefile {
croak "WriteEmptyMakefile: Need an even number of args" if @_ % 2;
my %att = @_;
my $self = MM->new(\%att);
my $new = $self->{MAKEFILE};
my $old = $self->{MAKEFILE_OLD};
if (-f $old) {
_unlink($old) or warn "unlink $old: $!";
}
if ( -f $new ) {
_rename($new, $old) or warn "rename $new => $old: $!"
}
open my $mfh, '>', $new or die "open $new for write: $!";
print $mfh <<'EOP';
all :
clean :
install :
makemakerdflt :
test :
EOP
close $mfh or die "close $new for write: $!";
}
=begin private
=head3 _installed_file_for_module
my $file = MM->_installed_file_for_module($module);
Return the first installed .pm $file associated with the $module. The
one which will show up when you C<use $module>.
$module is something like "strict" or "Test::More".
=end private
=cut
sub _installed_file_for_module {
my $class = shift;
my $prereq = shift;
my $file = "$prereq.pm";
$file =~ s{::}{/}g;
my $path;
for my $dir (@INC) {
my $tmp = File::Spec->catfile($dir, $file);
if ( -r $tmp ) {
$path = $tmp;
last;
}
}
return $path;
}
# Extracted from MakeMaker->new so we can test it
sub _MakeMaker_Parameters_section {
my $self = shift;
my $att = shift;
my @result = <<'END';
# MakeMaker Parameters:
END
foreach my $key (sort keys %$att){
next if $key eq 'ARGS';
my ($v) = neatvalue($att->{$key});
if ($key eq 'PREREQ_PM') {
# CPAN.pm takes prereqs from this field in 'Makefile'
# and does not know about BUILD_REQUIRES
$v = neatvalue({
%{ $att->{PREREQ_PM} || {} },
%{ $att->{BUILD_REQUIRES} || {} },
%{ $att->{TEST_REQUIRES} || {} },
});
} else {
$v = neatvalue($att->{$key});
}
$v =~ s/(CODE|HASH|ARRAY|SCALAR)\([\dxa-f]+\)/$1\(...\)/;
$v =~ tr/\n/ /s;
push @result, "# $key => $v";
}
return @result;
}
sub check_manifest {
print "Checking if your kit is complete...\n";
require ExtUtils::Manifest;
# avoid warning
$ExtUtils::Manifest::Quiet = $ExtUtils::Manifest::Quiet = 1;
my(@missed) = ExtUtils::Manifest::manicheck();
if (@missed) {
print "Warning: the following files are missing in your kit:\n";
print "\t", join "\n\t", @missed;
print "\n";
print "Please inform the author.\n";
} else {
print "Looks good\n";
}
}
sub parse_args{
my($self, @args) = @_;
foreach (@args) {
unless (m/(.*?)=(.*)/) {
++$Verbose if m/^verb/;
next;
}
my($name, $value) = ($1, $2);
if ($value =~ m/^~(\w+)?/) { # tilde with optional username
$value =~ s [^~(\w*)]
[$1 ?
((getpwnam($1))[7] || "~$1") :
(getpwuid($>))[7]
]ex;
}
# Remember the original args passed it. It will be useful later.
$self->{ARGS}{uc $name} = $self->{uc $name} = $value;
}
# catch old-style 'potential_libs' and inform user how to 'upgrade'
if (defined $self->{potential_libs}){
my($msg)="'potential_libs' => '$self->{potential_libs}' should be";
if ($self->{potential_libs}){
print "$msg changed to:\n\t'LIBS' => ['$self->{potential_libs}']\n";
} else {
print "$msg deleted.\n";
}
$self->{LIBS} = [$self->{potential_libs}];
delete $self->{potential_libs};
}
# catch old-style 'ARMAYBE' and inform user how to 'upgrade'
if (defined $self->{ARMAYBE}){
my($armaybe) = $self->{ARMAYBE};
print "ARMAYBE => '$armaybe' should be changed to:\n",
"\t'dynamic_lib' => {ARMAYBE => '$armaybe'}\n";
my(%dl) = %{$self->{dynamic_lib} || {}};
$self->{dynamic_lib} = { %dl, ARMAYBE => $armaybe};
delete $self->{ARMAYBE};
}
if (defined $self->{LDTARGET}){
print "LDTARGET should be changed to LDFROM\n";
$self->{LDFROM} = $self->{LDTARGET};
delete $self->{LDTARGET};
}
# Turn a DIR argument on the command line into an array
if (defined $self->{DIR} && ref \$self->{DIR} eq 'SCALAR') {
# So they can choose from the command line, which extensions they want
# the grep enables them to have some colons too much in case they
# have to build a list with the shell
$self->{DIR} = [grep $_, split ":", $self->{DIR}];
}
# Turn a INCLUDE_EXT argument on the command line into an array
if (defined $self->{INCLUDE_EXT} && ref \$self->{INCLUDE_EXT} eq 'SCALAR') {
$self->{INCLUDE_EXT} = [grep $_, split '\s+', $self->{INCLUDE_EXT}];
}
# Turn a EXCLUDE_EXT argument on the command line into an array
if (defined $self->{EXCLUDE_EXT} && ref \$self->{EXCLUDE_EXT} eq 'SCALAR') {
$self->{EXCLUDE_EXT} = [grep $_, split '\s+', $self->{EXCLUDE_EXT}];
}
foreach my $mmkey (sort keys %$self){
next if $mmkey eq 'ARGS';
print " $mmkey => ", neatvalue($self->{$mmkey}), "\n" if $Verbose;
print "'$mmkey' is not a known MakeMaker parameter name.\n"
unless exists $Recognized_Att_Keys{$mmkey};
}
$| = 1 if $Verbose;
}
sub check_hints {
my($self) = @_;
# We allow extension-specific hints files.
require File::Spec;
my $curdir = File::Spec->curdir;
my $hint_dir = File::Spec->catdir($curdir, "hints");
return unless -d $hint_dir;
# First we look for the best hintsfile we have
my($hint)="${^O}_$Config{osvers}";
$hint =~ s/\./_/g;
$hint =~ s/_$//;
return unless $hint;
# Also try without trailing minor version numbers.
while (1) {
last if -f File::Spec->catfile($hint_dir, "$hint.pl"); # found
} continue {
last unless $hint =~ s/_[^_]*$//; # nothing to cut off
}
my $hint_file = File::Spec->catfile($hint_dir, "$hint.pl");
return unless -f $hint_file; # really there
_run_hintfile($self, $hint_file);
}
sub _run_hintfile {
our $self;
local($self) = shift; # make $self available to the hint file.
my($hint_file) = shift;
local($@, $!);
warn "Processing hints file $hint_file\n";
# Just in case the ./ isn't on the hint file, which File::Spec can
# often strip off, we bung the curdir into @INC
local @INC = (File::Spec->curdir, @INC);
my $ret = do $hint_file;
if( !defined $ret ) {
my $error = $@ || $!;
warn $error;
}
}
sub mv_all_methods {
my($from,$to) = @_;
# Here you see the *current* list of methods that are overridable
# from Makefile.PL via MY:: subroutines. As of VERSION 5.07 I'm
# still trying to reduce the list to some reasonable minimum --
# because I want to make it easier for the user. A.K.
local $SIG{__WARN__} = sub {
# can't use 'no warnings redefined', 5.6 only
warn @_ unless $_[0] =~ /^Subroutine .* redefined/
};
foreach my $method (@Overridable) {
# We cannot say "next" here. Nick might call MY->makeaperl
# which isn't defined right now
# Above statement was written at 4.23 time when Tk-b8 was
# around. As Tk-b9 only builds with 5.002something and MM 5 is
# standard, we try to enable the next line again. It was
# commented out until MM 5.23
next unless defined &{"${from}::$method"};
{
no strict 'refs'; ## no critic
*{"${to}::$method"} = \&{"${from}::$method"};
# If we delete a method, then it will be undefined and cannot
# be called. But as long as we have Makefile.PLs that rely on
# %MY:: being intact, we have to fill the hole with an
# inheriting method:
{
package MY;
my $super = "SUPER::".$method;
*{$method} = sub {
shift->$super(@_);
};
}
}
}
# We have to clean out %INC also, because the current directory is
# changed frequently and Graham Barr prefers to get his version
# out of a History.pl file which is "required" so woudn't get
# loaded again in another extension requiring a History.pl
# With perl5.002_01 the deletion of entries in %INC caused Tk-b11
# to core dump in the middle of a require statement. The required
# file was Tk/MMutil.pm. The consequence is, we have to be
# extremely careful when we try to give perl a reason to reload a
# library with same name. The workaround prefers to drop nothing
# from %INC and teach the writers not to use such libraries.
# my $inc;
# foreach $inc (keys %INC) {
# #warn "***$inc*** deleted";
# delete $INC{$inc};
# }
}
sub skipcheck {
my($self) = shift;
my($section) = @_;
if ($section eq 'dynamic') {
print "Warning (non-fatal): Target 'dynamic' depends on targets ",
"in skipped section 'dynamic_bs'\n"
if $self->{SKIPHASH}{dynamic_bs} && $Verbose;
print "Warning (non-fatal): Target 'dynamic' depends on targets ",
"in skipped section 'dynamic_lib'\n"
if $self->{SKIPHASH}{dynamic_lib} && $Verbose;
}
if ($section eq 'dynamic_lib') {
print "Warning (non-fatal): Target '\$(INST_DYNAMIC)' depends on ",
"targets in skipped section 'dynamic_bs'\n"
if $self->{SKIPHASH}{dynamic_bs} && $Verbose;
}
if ($section eq 'static') {
print "Warning (non-fatal): Target 'static' depends on targets ",
"in skipped section 'static_lib'\n"
if $self->{SKIPHASH}{static_lib} && $Verbose;
}
return 'skipped' if $self->{SKIPHASH}{$section};
return '';
}
sub flush {
my $self = shift;
my $finalname = $self->{MAKEFILE};
print "Writing $finalname for $self->{NAME}\n";
unlink($finalname, "MakeMaker.tmp", $Is_VMS ? 'Descrip.MMS' : ());
open(my $fh,">", "MakeMaker.tmp")
or die "Unable to open MakeMaker.tmp: $!";
for my $chunk (@{$self->{RESULT}}) {
print $fh "$chunk\n"
or die "Can't write to MakeMaker.tmp: $!";
}
close $fh
or die "Can't write to MakeMaker.tmp: $!";
_rename("MakeMaker.tmp", $finalname) or
warn "rename MakeMaker.tmp => $finalname: $!";
chmod 0644, $finalname unless $Is_VMS;
unless ($self->{NO_MYMETA}) {
# Write MYMETA.yml to communicate metadata up to the CPAN clients
if ( $self->write_mymeta( $self->mymeta ) ) {
print "Writing MYMETA.yml and MYMETA.json\n";
}
}
my %keep = map { ($_ => 1) } qw(NEEDS_LINKING HAS_LINK_CODE);
if ($self->{PARENT} && !$self->{_KEEP_AFTER_FLUSH}) {
foreach (keys %$self) { # safe memory
delete $self->{$_} unless $keep{$_};
}
}
system("$Config::Config{eunicefix} $finalname") unless $Config::Config{eunicefix} eq ":";
}
# This is a rename for OS's where the target must be unlinked first.
sub _rename {
my($src, $dest) = @_;
chmod 0666, $dest;
unlink $dest;
return rename $src, $dest;
}
# This is an unlink for OS's where the target must be writable first.
sub _unlink {
my @files = @_;
chmod 0666, @files;
return unlink @files;
}
# The following mkbootstrap() is only for installations that are calling
# the pre-4.1 mkbootstrap() from their old Makefiles. This MakeMaker
# writes Makefiles, that use ExtUtils::Mkbootstrap directly.
sub mkbootstrap {
die <<END;
!!! Your Makefile has been built such a long time ago, !!!
!!! that is unlikely to work with current MakeMaker. !!!
!!! Please rebuild your Makefile !!!
END
}
# Ditto for mksymlists() as of MakeMaker 5.17
sub mksymlists {
die <<END;
!!! Your Makefile has been built such a long time ago, !!!
!!! that is unlikely to work with current MakeMaker. !!!
!!! Please rebuild your Makefile !!!
END
}
sub neatvalue {
my($v) = @_;
return "undef" unless defined $v;
my($t) = ref $v;
return "q[$v]" unless $t;
if ($t eq 'ARRAY') {
my(@m, @neat);
push @m, "[";
foreach my $elem (@$v) {
push @neat, "q[$elem]";
}
push @m, join ", ", @neat;
push @m, "]";
return join "", @m;
}
return "$v" unless $t eq 'HASH';
my(@m, $key, $val);
while (($key,$val) = each %$v){
last unless defined $key; # cautious programming in case (undef,undef) is true
push(@m,"$key=>".neatvalue($val)) ;
}
return "{ ".join(', ',@m)." }";
}
# Look for weird version numbers, warn about them and set them to 0
# before CPAN::Meta chokes.
sub clean_versions {
my($self, $key) = @_;
my $reqs = $self->{$key};
for my $module (keys %$reqs) {
my $version = $reqs->{$module};
if( !defined $version or $version !~ /^[\d_\.]+$/ ) {
carp "Unparsable version '$version' for prerequisite $module";
$reqs->{$module} = 0;
}
}
}
sub selfdocument {
my($self) = @_;
my(@m);
if ($Verbose){
push @m, "\n# Full list of MakeMaker attribute values:";
foreach my $key (sort keys %$self){
next if $key eq 'RESULT' || $key =~ /^[A-Z][a-z]/;
my($v) = neatvalue($self->{$key});
$v =~ s/(CODE|HASH|ARRAY|SCALAR)\([\dxa-f]+\)/$1\(...\)/;
$v =~ tr/\n/ /s;
push @m, "# $key => $v";
}
}
join "\n", @m;
}
1;
__END__
=head1 NAME
ExtUtils::MakeMaker - Create a module Makefile
=head1 SYNOPSIS
use ExtUtils::MakeMaker;
WriteMakefile(
NAME => "Foo::Bar",
VERSION_FROM => "lib/Foo/Bar.pm",
);
=head1 DESCRIPTION
This utility is designed to write a Makefile for an extension module
from a Makefile.PL. It is based on the Makefile.SH model provided by
Andy Dougherty and the perl5-porters.
It splits the task of generating the Makefile into several subroutines
that can be individually overridden. Each subroutine returns the text
it wishes to have written to the Makefile.
As there are various Make programs with incompatible syntax, which
use operating system shells, again with incompatible syntax, it is
important for users of this module to know which flavour of Make
a Makefile has been written for so they'll use the correct one and
won't have to face the possibly bewildering errors resulting from
using the wrong one.
On POSIX systems, that program will likely be GNU Make; on Microsoft
Windows, it will be either Microsoft NMake or DMake. Note that this
module does not support generating Makefiles for GNU Make on Windows.
See the section on the L</"MAKE"> parameter for details.
MakeMaker is object oriented. Each directory below the current
directory that contains a Makefile.PL is treated as a separate
object. This makes it possible to write an unlimited number of
Makefiles with a single invocation of WriteMakefile().
=head2 How To Write A Makefile.PL
See L<ExtUtils::MakeMaker::Tutorial>.
The long answer is the rest of the manpage :-)
=head2 Default Makefile Behaviour
The generated Makefile enables the user of the extension to invoke
perl Makefile.PL # optionally "perl Makefile.PL verbose"
make
make test # optionally set TEST_VERBOSE=1
make install # See below
The Makefile to be produced may be altered by adding arguments of the
form C<KEY=VALUE>. E.g.
perl Makefile.PL INSTALL_BASE=~
Other interesting targets in the generated Makefile are
make config # to check if the Makefile is up-to-date
make clean # delete local temp files (Makefile gets renamed)
make realclean # delete derived files (including ./blib)
make ci # check in all the files in the MANIFEST file
make dist # see below the Distribution Support section
=head2 make test
MakeMaker checks for the existence of a file named F<test.pl> in the
current directory, and if it exists it executes the script with the
proper set of perl C<-I> options.
MakeMaker also checks for any files matching glob("t/*.t"). It will
execute all matching files in alphabetical order via the
L<Test::Harness> module with the C<-I> switches set correctly.
If you'd like to see the raw output of your tests, set the
C<TEST_VERBOSE> variable to true.
make test TEST_VERBOSE=1
=head2 make testdb
A useful variation of the above is the target C<testdb>. It runs the
test under the Perl debugger (see L<perldebug>). If the file
F<test.pl> exists in the current directory, it is used for the test.
If you want to debug some other testfile, set the C<TEST_FILE> variable
thusly:
make testdb TEST_FILE=t/mytest.t
By default the debugger is called using C<-d> option to perl. If you
want to specify some other option, set the C<TESTDB_SW> variable:
make testdb TESTDB_SW=-Dx
=head2 make install
make alone puts all relevant files into directories that are named by
the macros INST_LIB, INST_ARCHLIB, INST_SCRIPT, INST_MAN1DIR and
INST_MAN3DIR. All these default to something below ./blib if you are
I<not> building below the perl source directory. If you I<are>
building below the perl source, INST_LIB and INST_ARCHLIB default to
../../lib, and INST_SCRIPT is not defined.
The I<install> target of the generated Makefile copies the files found
below each of the INST_* directories to their INSTALL*
counterparts. Which counterparts are chosen depends on the setting of
INSTALLDIRS according to the following table:
INSTALLDIRS set to
perl site vendor
PERLPREFIX SITEPREFIX VENDORPREFIX
INST_ARCHLIB INSTALLARCHLIB INSTALLSITEARCH INSTALLVENDORARCH
INST_LIB INSTALLPRIVLIB INSTALLSITELIB INSTALLVENDORLIB
INST_BIN INSTALLBIN INSTALLSITEBIN INSTALLVENDORBIN
INST_SCRIPT INSTALLSCRIPT INSTALLSITESCRIPT INSTALLVENDORSCRIPT
INST_MAN1DIR INSTALLMAN1DIR INSTALLSITEMAN1DIR INSTALLVENDORMAN1DIR
INST_MAN3DIR INSTALLMAN3DIR INSTALLSITEMAN3DIR INSTALLVENDORMAN3DIR
The INSTALL... macros in turn default to their %Config
($Config{installprivlib}, $Config{installarchlib}, etc.) counterparts.
You can check the values of these variables on your system with
perl '-V:install.*'
And to check the sequence in which the library directories are
searched by perl, run
perl -le 'print join $/, @INC'
Sometimes older versions of the module you're installing live in other
directories in @INC. Because Perl loads the first version of a module it
finds, not the newest, you might accidentally get one of these older
versions even after installing a brand new version. To delete I<all other
versions of the module you're installing> (not simply older ones) set the
C<UNINST> variable.
make install UNINST=1
=head2 INSTALL_BASE
INSTALL_BASE can be passed into Makefile.PL to change where your
module will be installed. INSTALL_BASE is more like what everyone
else calls "prefix" than PREFIX is.
To have everything installed in your home directory, do the following.
# Unix users, INSTALL_BASE=~ works fine
perl Makefile.PL INSTALL_BASE=/path/to/your/home/dir
Like PREFIX, it sets several INSTALL* attributes at once. Unlike
PREFIX it is easy to predict where the module will end up. The
installation pattern looks like this:
INSTALLARCHLIB INSTALL_BASE/lib/perl5/$Config{archname}
INSTALLPRIVLIB INSTALL_BASE/lib/perl5
INSTALLBIN INSTALL_BASE/bin
INSTALLSCRIPT INSTALL_BASE/bin
INSTALLMAN1DIR INSTALL_BASE/man/man1
INSTALLMAN3DIR INSTALL_BASE/man/man3
INSTALL_BASE in MakeMaker and C<--install_base> in Module::Build (as
of 0.28) install to the same location. If you want MakeMaker and
Module::Build to install to the same location simply set INSTALL_BASE
and C<--install_base> to the same location.
INSTALL_BASE was added in 6.31.
=head2 PREFIX and LIB attribute
PREFIX and LIB can be used to set several INSTALL* attributes in one
go. Here's an example for installing into your home directory.
# Unix users, PREFIX=~ works fine
perl Makefile.PL PREFIX=/path/to/your/home/dir
This will install all files in the module under your home directory,
with man pages and libraries going into an appropriate place (usually
~/man and ~/lib). How the exact location is determined is complicated
and depends on how your Perl was configured. INSTALL_BASE works more
like what other build systems call "prefix" than PREFIX and we
recommend you use that instead.
Another way to specify many INSTALL directories with a single
parameter is LIB.
perl Makefile.PL LIB=~/lib
This will install the module's architecture-independent files into
~/lib, the architecture-dependent files into ~/lib/$archname.
Note, that in both cases the tilde expansion is done by MakeMaker, not
by perl by default, nor by make.
Conflicts between parameters LIB, PREFIX and the various INSTALL*
arguments are resolved so that:
=over 4
=item *
setting LIB overrides any setting of INSTALLPRIVLIB, INSTALLARCHLIB,
INSTALLSITELIB, INSTALLSITEARCH (and they are not affected by PREFIX);
=item *
without LIB, setting PREFIX replaces the initial C<$Config{prefix}>
part of those INSTALL* arguments, even if the latter are explicitly
set (but are set to still start with C<$Config{prefix}>).
=back
If the user has superuser privileges, and is not working on AFS or
relatives, then the defaults for INSTALLPRIVLIB, INSTALLARCHLIB,
INSTALLSCRIPT, etc. will be appropriate, and this incantation will be
the best:
perl Makefile.PL;
make;
make test
make install
make install by default writes some documentation of what has been
done into the file C<$(INSTALLARCHLIB)/perllocal.pod>. This feature
can be bypassed by calling make pure_install.
=head2 AFS users
will have to specify the installation directories as these most
probably have changed since perl itself has been installed. They will
have to do this by calling
perl Makefile.PL INSTALLSITELIB=/afs/here/today \
INSTALLSCRIPT=/afs/there/now INSTALLMAN3DIR=/afs/for/manpages
make
Be careful to repeat this procedure every time you recompile an
extension, unless you are sure the AFS installation directories are
still valid.
=head2 Static Linking of a new Perl Binary
An extension that is built with the above steps is ready to use on
systems supporting dynamic loading. On systems that do not support
dynamic loading, any newly created extension has to be linked together
with the available resources. MakeMaker supports the linking process
by creating appropriate targets in the Makefile whenever an extension
is built. You can invoke the corresponding section of the makefile with
make perl
That produces a new perl binary in the current directory with all
extensions linked in that can be found in INST_ARCHLIB, SITELIBEXP,
and PERL_ARCHLIB. To do that, MakeMaker writes a new Makefile, on
UNIX, this is called F<Makefile.aperl> (may be system dependent). If you
want to force the creation of a new perl, it is recommended that you
delete this F<Makefile.aperl>, so the directories are searched through
for linkable libraries again.
The binary can be installed into the directory where perl normally
resides on your machine with
make inst_perl
To produce a perl binary with a different name than C<perl>, either say
perl Makefile.PL MAP_TARGET=myperl
make myperl
make inst_perl
or say
perl Makefile.PL
make myperl MAP_TARGET=myperl
make inst_perl MAP_TARGET=myperl
In any case you will be prompted with the correct invocation of the
C<inst_perl> target that installs the new binary into INSTALLBIN.
make inst_perl by default writes some documentation of what has been
done into the file C<$(INSTALLARCHLIB)/perllocal.pod>. This
can be bypassed by calling make pure_inst_perl.
Warning: the inst_perl: target will most probably overwrite your
existing perl binary. Use with care!
Sometimes you might want to build a statically linked perl although
your system supports dynamic loading. In this case you may explicitly
set the linktype with the invocation of the Makefile.PL or make:
perl Makefile.PL LINKTYPE=static # recommended
or
make LINKTYPE=static # works on most systems
=head2 Determination of Perl Library and Installation Locations
MakeMaker needs to know, or to guess, where certain things are
located. Especially INST_LIB and INST_ARCHLIB (where to put the files
during the make(1) run), PERL_LIB and PERL_ARCHLIB (where to read
existing modules from), and PERL_INC (header files and C<libperl*.*>).
Extensions may be built either using the contents of the perl source
directory tree or from the installed perl library. The recommended way
is to build extensions after you have run 'make install' on perl
itself. You can do that in any directory on your hard disk that is not
below the perl source tree. The support for extensions below the ext
directory of the perl distribution is only good for the standard
extensions that come with perl.
If an extension is being built below the C<ext/> directory of the perl
source then MakeMaker will set PERL_SRC automatically (e.g.,
C<../..>). If PERL_SRC is defined and the extension is recognized as
a standard extension, then other variables default to the following:
PERL_INC = PERL_SRC
PERL_LIB = PERL_SRC/lib
PERL_ARCHLIB = PERL_SRC/lib
INST_LIB = PERL_LIB
INST_ARCHLIB = PERL_ARCHLIB
If an extension is being built away from the perl source then MakeMaker
will leave PERL_SRC undefined and default to using the installed copy
of the perl library. The other variables default to the following:
PERL_INC = $archlibexp/CORE
PERL_LIB = $privlibexp
PERL_ARCHLIB = $archlibexp
INST_LIB = ./blib/lib
INST_ARCHLIB = ./blib/arch
If perl has not yet been installed then PERL_SRC can be defined on the
command line as shown in the previous section.
=head2 Which architecture dependent directory?
If you don't want to keep the defaults for the INSTALL* macros,
MakeMaker helps you to minimize the typing needed: the usual
relationship between INSTALLPRIVLIB and INSTALLARCHLIB is determined
by Configure at perl compilation time. MakeMaker supports the user who
sets INSTALLPRIVLIB. If INSTALLPRIVLIB is set, but INSTALLARCHLIB not,
then MakeMaker defaults the latter to be the same subdirectory of
INSTALLPRIVLIB as Configure decided for the counterparts in %Config,
otherwise it defaults to INSTALLPRIVLIB. The same relationship holds
for INSTALLSITELIB and INSTALLSITEARCH.
MakeMaker gives you much more freedom than needed to configure
internal variables and get different results. It is worth mentioning
that make(1) also lets you configure most of the variables that are
used in the Makefile. But in the majority of situations this will not
be necessary, and should only be done if the author of a package
recommends it (or you know what you're doing).
=head2 Using Attributes and Parameters
The following attributes may be specified as arguments to WriteMakefile()
or as NAME=VALUE pairs on the command line.
=over 2
=item ABSTRACT
One line description of the module. Will be included in PPD file.
=item ABSTRACT_FROM
Name of the file that contains the package description. MakeMaker looks
for a line in the POD matching /^($package\s-\s)(.*)/. This is typically
the first line in the "=head1 NAME" section. $2 becomes the abstract.
=item AUTHOR
Array of strings containing name (and email address) of package author(s).
Is used in CPAN Meta files (META.yml or META.json) and PPD
(Perl Package Description) files for PPM (Perl Package Manager).
=item BINARY_LOCATION
Used when creating PPD files for binary packages. It can be set to a
full or relative path or URL to the binary archive for a particular
architecture. For example:
perl Makefile.PL BINARY_LOCATION=x86/Agent.tar.gz
builds a PPD package that references a binary of the C<Agent> package,
located in the C<x86> directory relative to the PPD itself.
=item BUILD_REQUIRES
A hash of modules that are needed to build your module but not run it.
This will go into the C<build_requires> field of your CPAN Meta file.
(F<META.yml> or F<META.json>).
The format is the same as PREREQ_PM.
=item C
Ref to array of *.c file names. Initialised from a directory scan
and the values portion of the XS attribute hash. This is not
currently used by MakeMaker but may be handy in Makefile.PLs.
=item CCFLAGS
String that will be included in the compiler call command line between
the arguments INC and OPTIMIZE.
The default value is taken from $Config{ccflags}. When overriding
CCFLAGS, make sure to include the $Config{ccflags} settings to avoid
binary incompatibilities.
=item CONFIG
Arrayref. E.g. [qw(archname manext)] defines ARCHNAME & MANEXT from
config.sh. MakeMaker will add to CONFIG the following values anyway:
ar
cc
cccdlflags
ccdlflags
dlext
dlsrc
ld
lddlflags
ldflags
libc
lib_ext
obj_ext
ranlib
sitelibexp
sitearchexp
so
=item CONFIGURE
CODE reference. The subroutine should return a hash reference. The
hash may contain further attributes, e.g. {LIBS =E<gt> ...}, that have to
be determined by some evaluation method.
=item CONFIGURE_REQUIRES
A hash of modules that are required to run Makefile.PL itself, but not
to run your distribution.
This will go into the C<configure_requires> field of your CPAN Meta file
(F<META.yml> or F<META.json>)
Defaults to C<<< { "ExtUtils::MakeMaker" => 0 } >>>
The format is the same as PREREQ_PM.
=item DEFINE
Something like C<"-DHAVE_UNISTD_H">
=item DESTDIR
This is the root directory into which the code will be installed. It
I<prepends itself to the normal prefix>. For example, if your code
would normally go into F</usr/local/lib/perl> you could set DESTDIR=~/tmp/
and installation would go into F<~/tmp/usr/local/lib/perl>.
This is primarily of use for people who repackage Perl modules.
NOTE: Due to the nature of make, it is important that you put the trailing
slash on your DESTDIR. F<~/tmp/> not F<~/tmp>.
=item DIR
Ref to array of subdirectories containing Makefile.PLs e.g. ['sdbm']
in ext/SDBM_File
=item DISTNAME
A safe filename for the package.
Defaults to NAME below but with :: replaced with -.
For example, Foo::Bar becomes Foo-Bar.
=item DISTVNAME
Your name for distributing the package with the version number
included. This is used by 'make dist' to name the resulting archive
file.
Defaults to DISTNAME-VERSION.
For example, version 1.04 of Foo::Bar becomes Foo-Bar-1.04.
On some OS's where . has special meaning VERSION_SYM may be used in
place of VERSION.
=item DL_FUNCS
Hashref of symbol names for routines to be made available as universal
symbols. Each key/value pair consists of the package name and an
array of routine names in that package. Used only under AIX, OS/2,
VMS and Win32 at present. The routine names supplied will be expanded
in the same way as XSUB names are expanded by the XS() macro.
Defaults to
{"$(NAME)" => ["boot_$(NAME)" ] }
e.g.
{"RPC" => [qw( boot_rpcb rpcb_gettime getnetconfigent )],
"NetconfigPtr" => [ 'DESTROY'] }
Please see the L<ExtUtils::Mksymlists> documentation for more information
about the DL_FUNCS, DL_VARS and FUNCLIST attributes.
=item DL_VARS
Array of symbol names for variables to be made available as universal symbols.
Used only under AIX, OS/2, VMS and Win32 at present. Defaults to [].
(e.g. [ qw(Foo_version Foo_numstreams Foo_tree ) ])
=item EXCLUDE_EXT
Array of extension names to exclude when doing a static build. This
is ignored if INCLUDE_EXT is present. Consult INCLUDE_EXT for more
details. (e.g. [ qw( Socket POSIX ) ] )
This attribute may be most useful when specified as a string on the
command line: perl Makefile.PL EXCLUDE_EXT='Socket Safe'
=item EXE_FILES
Ref to array of executable files. The files will be copied to the
INST_SCRIPT directory. Make realclean will delete them from there
again.
If your executables start with something like #!perl or
#!/usr/bin/perl MakeMaker will change this to the path of the perl
'Makefile.PL' was invoked with so the programs will be sure to run
properly even if perl is not in /usr/bin/perl.
=item FIRST_MAKEFILE
The name of the Makefile to be produced. This is used for the second
Makefile that will be produced for the MAP_TARGET.
Defaults to 'Makefile' or 'Descrip.MMS' on VMS.
(Note: we couldn't use MAKEFILE because dmake uses this for something
else).
=item FULLPERL
Perl binary able to run this extension, load XS modules, etc...
=item FULLPERLRUN
Like PERLRUN, except it uses FULLPERL.
=item FULLPERLRUNINST
Like PERLRUNINST, except it uses FULLPERL.
=item FUNCLIST
This provides an alternate means to specify function names to be
exported from the extension. Its value is a reference to an
array of function names to be exported by the extension. These
names are passed through unaltered to the linker options file.
=item H
Ref to array of *.h file names. Similar to C.
=item IMPORTS
This attribute is used to specify names to be imported into the
extension. Takes a hash ref.
It is only used on OS/2 and Win32.
=item INC
Include file dirs eg: C<"-I/usr/5include -I/path/to/inc">
=item INCLUDE_EXT
Array of extension names to be included when doing a static build.
MakeMaker will normally build with all of the installed extensions when
doing a static build, and that is usually the desired behavior. If
INCLUDE_EXT is present then MakeMaker will build only with those extensions
which are explicitly mentioned. (e.g. [ qw( Socket POSIX ) ])
It is not necessary to mention DynaLoader or the current extension when
filling in INCLUDE_EXT. If the INCLUDE_EXT is mentioned but is empty then
only DynaLoader and the current extension will be included in the build.
This attribute may be most useful when specified as a string on the
command line: perl Makefile.PL INCLUDE_EXT='POSIX Socket Devel::Peek'
=item INSTALLARCHLIB
Used by 'make install', which copies files from INST_ARCHLIB to this
directory if INSTALLDIRS is set to perl.
=item INSTALLBIN
Directory to install binary files (e.g. tkperl) into if
INSTALLDIRS=perl.
=item INSTALLDIRS
Determines which of the sets of installation directories to choose:
perl, site or vendor. Defaults to site.
=item INSTALLMAN1DIR
=item INSTALLMAN3DIR
These directories get the man pages at 'make install' time if
INSTALLDIRS=perl. Defaults to $Config{installman*dir}.
If set to 'none', no man pages will be installed.
=item INSTALLPRIVLIB
Used by 'make install', which copies files from INST_LIB to this
directory if INSTALLDIRS is set to perl.
Defaults to $Config{installprivlib}.
=item INSTALLSCRIPT
Used by 'make install' which copies files from INST_SCRIPT to this
directory if INSTALLDIRS=perl.
=item INSTALLSITEARCH
Used by 'make install', which copies files from INST_ARCHLIB to this
directory if INSTALLDIRS is set to site (default).
=item INSTALLSITEBIN
Used by 'make install', which copies files from INST_BIN to this
directory if INSTALLDIRS is set to site (default).
=item INSTALLSITELIB
Used by 'make install', which copies files from INST_LIB to this
directory if INSTALLDIRS is set to site (default).
=item INSTALLSITEMAN1DIR
=item INSTALLSITEMAN3DIR
These directories get the man pages at 'make install' time if
INSTALLDIRS=site (default). Defaults to
$(SITEPREFIX)/man/man$(MAN*EXT).
If set to 'none', no man pages will be installed.
=item INSTALLSITESCRIPT
Used by 'make install' which copies files from INST_SCRIPT to this
directory if INSTALLDIRS is set to site (default).
=item INSTALLVENDORARCH
Used by 'make install', which copies files from INST_ARCHLIB to this
directory if INSTALLDIRS is set to vendor.
=item INSTALLVENDORBIN
Used by 'make install', which copies files from INST_BIN to this
directory if INSTALLDIRS is set to vendor.
=item INSTALLVENDORLIB
Used by 'make install', which copies files from INST_LIB to this
directory if INSTALLDIRS is set to vendor.
=item INSTALLVENDORMAN1DIR
=item INSTALLVENDORMAN3DIR
These directories get the man pages at 'make install' time if
INSTALLDIRS=vendor. Defaults to $(VENDORPREFIX)/man/man$(MAN*EXT).
If set to 'none', no man pages will be installed.
=item INSTALLVENDORSCRIPT
Used by 'make install' which copies files from INST_SCRIPT to this
directory if INSTALLDIRS is set to vendor.
=item INST_ARCHLIB
Same as INST_LIB for architecture dependent files.
=item INST_BIN
Directory to put real binary files during 'make'. These will be copied
to INSTALLBIN during 'make install'
=item INST_LIB
Directory where we put library files of this extension while building
it.
=item INST_MAN1DIR
Directory to hold the man pages at 'make' time
=item INST_MAN3DIR
Directory to hold the man pages at 'make' time
=item INST_SCRIPT
Directory where executable files should be installed during
'make'. Defaults to "./blib/script", just to have a dummy location during
testing. make install will copy the files in INST_SCRIPT to
INSTALLSCRIPT.
=item LD
Program to be used to link libraries for dynamic loading.
Defaults to $Config{ld}.
=item LDDLFLAGS
Any special flags that might need to be passed to ld to create a
shared library suitable for dynamic loading. It is up to the makefile
to use it. (See L<Config/lddlflags>)
Defaults to $Config{lddlflags}.
=item LDFROM
Defaults to "$(OBJECT)" and is used in the ld command to specify
what files to link/load from (also see dynamic_lib below for how to
specify ld flags)
=item LIB
LIB should only be set at C<perl Makefile.PL> time but is allowed as a
MakeMaker argument. It has the effect of setting both INSTALLPRIVLIB
and INSTALLSITELIB to that value regardless any explicit setting of
those arguments (or of PREFIX). INSTALLARCHLIB and INSTALLSITEARCH
are set to the corresponding architecture subdirectory.
=item LIBPERL_A
The filename of the perllibrary that will be used together with this
extension. Defaults to libperl.a.
=item LIBS
An anonymous array of alternative library
specifications to be searched for (in order) until
at least one library is found. E.g.
'LIBS' => ["-lgdbm", "-ldbm -lfoo", "-L/path -ldbm.nfs"]
Mind, that any element of the array
contains a complete set of arguments for the ld
command. So do not specify
'LIBS' => ["-ltcl", "-ltk", "-lX11"]
See ODBM_File/Makefile.PL for an example, where an array is needed. If
you specify a scalar as in
'LIBS' => "-ltcl -ltk -lX11"
MakeMaker will turn it into an array with one element.
=item LICENSE
The licensing terms of your distribution. Generally it's "perl" for the
same license as Perl itself.
See L<Module::Build::API> for the list of options.
Defaults to "unknown".
=item LINKTYPE
'static' or 'dynamic' (default unless usedl=undef in
config.sh). Should only be used to force static linking (also see
linkext below).
=item MAKE
Variant of make you intend to run the generated Makefile with. This
parameter lets Makefile.PL know what make quirks to account for when
generating the Makefile.
MakeMaker also honors the MAKE environment variable. This parameter
takes precedence.
Currently the only significant values are 'dmake' and 'nmake' for Windows
users, instructing MakeMaker to generate a Makefile in the flavour of
DMake ("Dennis Vadura's Make") or Microsoft NMake respectively.
Defaults to $Config{make}, which may go looking for a Make program
in your environment.
How are you supposed to know what flavour of Make a Makefile has
been generated for if you didn't specify a value explicitly? Search
the generated Makefile for the definition of the MAKE variable,
which is used to recursively invoke the Make utility. That will tell
you what Make you're supposed to invoke the Makefile with.
=item MAKEAPERL
Boolean which tells MakeMaker that it should include the rules to
make a perl. This is handled automatically as a switch by
MakeMaker. The user normally does not need it.
=item MAKEFILE_OLD
When 'make clean' or similar is run, the $(FIRST_MAKEFILE) will be
backed up at this location.
Defaults to $(FIRST_MAKEFILE).old or $(FIRST_MAKEFILE)_old on VMS.
=item MAN1PODS
Hashref of pod-containing files. MakeMaker will default this to all
EXE_FILES files that include POD directives. The files listed
here will be converted to man pages and installed as was requested
at Configure time.
This hash should map POD files (or scripts containing POD) to the
man file names under the C<blib/man1/> directory, as in the following
example:
MAN1PODS => {
'doc/command.pod' => 'blib/man1/command.1',
'scripts/script.pl' => 'blib/man1/script.1',
}
=item MAN3PODS
Hashref that assigns to *.pm and *.pod files the files into which the
manpages are to be written. MakeMaker parses all *.pod and *.pm files
for POD directives. Files that contain POD will be the default keys of
the MAN3PODS hashref. These will then be converted to man pages during
C<make> and will be installed during C<make install>.
Example similar to MAN1PODS.
=item MAP_TARGET
If it is intended that a new perl binary be produced, this variable
may hold a name for that binary. Defaults to perl
=item META_ADD
=item META_MERGE
A hashref of items to add to the CPAN Meta file (F<META.yml> or
F<META.json>).
They differ in how they behave if they have the same key as the
default metadata. META_ADD will override the default value with its
own. META_MERGE will merge its value with the default.
Unless you want to override the defaults, prefer META_MERGE so as to
get the advantage of any future defaults.
=item MIN_PERL_VERSION
The minimum required version of Perl for this distribution.
Either the 5.006001 or the 5.6.1 format is acceptable.
=item MYEXTLIB
If the extension links to a library that it builds, set this to the
name of the library (see SDBM_File)
=item NAME
The package representing the distribution. For example, C<Test::More>
or C<ExtUtils::MakeMaker>. It will be used to derive information about
the distribution such as the L<DISTNAME>, installation locations
within the Perl library and where XS files will be looked for by
default (see L<XS>).
C<NAME> I<must> be a valid Perl package name and it I<must> have an
associated C<.pm> file. For example, C<Foo::Bar> is a valid C<NAME>
and there must exist F<Foo/Bar.pm>. Any XS code should be in
F<Bar.xs> unless stated otherwise.
Your distribution B<must> have a C<NAME>.
=item NEEDS_LINKING
MakeMaker will figure out if an extension contains linkable code
anywhere down the directory tree, and will set this variable
accordingly, but you can speed it up a very little bit if you define
this boolean variable yourself.
=item NOECHO
Command so make does not print the literal commands it's running.
By setting it to an empty string you can generate a Makefile that
prints all commands. Mainly used in debugging MakeMaker itself.
Defaults to C<@>.
=item NORECURS
Boolean. Attribute to inhibit descending into subdirectories.
=item NO_META
When true, suppresses the generation and addition to the MANIFEST of
the META.yml and META.json module meta-data files during 'make distdir'.
Defaults to false.
=item NO_MYMETA
When true, suppresses the generation of MYMETA.yml and MYMETA.json module
meta-data files during 'perl Makefile.PL'.
Defaults to false.
=item NO_VC
In general, any generated Makefile checks for the current version of
MakeMaker and the version the Makefile was built under. If NO_VC is
set, the version check is neglected. Do not write this into your
Makefile.PL, use it interactively instead.
=item OBJECT
List of object files, defaults to '$(BASEEXT)$(OBJ_EXT)', but can be a long
string containing all object files, e.g. "tkpBind.o
tkpButton.o tkpCanvas.o"
(Where BASEEXT is the last component of NAME, and OBJ_EXT is $Config{obj_ext}.)
=item OPTIMIZE
Defaults to C<-O>. Set it to C<-g> to turn debugging on. The flag is
passed to subdirectory makes.
=item PERL
Perl binary for tasks that can be done by miniperl.
=item PERL_CORE
Set only when MakeMaker is building the extensions of the Perl core
distribution.
=item PERLMAINCC
The call to the program that is able to compile perlmain.c. Defaults
to $(CC).
=item PERL_ARCHLIB
Same as for PERL_LIB, but for architecture dependent files.
Used only when MakeMaker is building the extensions of the Perl core
distribution (because normally $(PERL_ARCHLIB) is automatically in @INC,
and adding it would get in the way of PERL5LIB).
=item PERL_LIB
Directory containing the Perl library to use.
Used only when MakeMaker is building the extensions of the Perl core
distribution (because normally $(PERL_LIB) is automatically in @INC,
and adding it would get in the way of PERL5LIB).
=item PERL_MALLOC_OK
defaults to 0. Should be set to TRUE if the extension can work with
the memory allocation routines substituted by the Perl malloc() subsystem.
This should be applicable to most extensions with exceptions of those
=over 4
=item *
with bugs in memory allocations which are caught by Perl's malloc();
=item *
which interact with the memory allocator in other ways than via
malloc(), realloc(), free(), calloc(), sbrk() and brk();
=item *
which rely on special alignment which is not provided by Perl's malloc().
=back
B<NOTE.> Neglecting to set this flag in I<any one> of the loaded extension
nullifies many advantages of Perl's malloc(), such as better usage of
system resources, error detection, memory usage reporting, catchable failure
of memory allocations, etc.
=item PERLPREFIX
Directory under which core modules are to be installed.
Defaults to $Config{installprefixexp}, falling back to
$Config{installprefix}, $Config{prefixexp} or $Config{prefix} should
$Config{installprefixexp} not exist.
Overridden by PREFIX.
=item PERLRUN
Use this instead of $(PERL) when you wish to run perl. It will set up
extra necessary flags for you.
=item PERLRUNINST
Use this instead of $(PERL) when you wish to run perl to work with
modules. It will add things like -I$(INST_ARCH) and other necessary
flags so perl can see the modules you're about to install.
=item PERL_SRC
Directory containing the Perl source code (use of this should be
avoided, it may be undefined)
=item PERM_DIR
Desired permission for directories. Defaults to C<755>.
=item PERM_RW
Desired permission for read/writable files. Defaults to C<644>.
=item PERM_RWX
Desired permission for executable files. Defaults to C<755>.
=item PL_FILES
MakeMaker can run programs to generate files for you at build time.
By default any file named *.PL (except Makefile.PL and Build.PL) in
the top level directory will be assumed to be a Perl program and run
passing its own basename in as an argument. For example...
perl foo.PL foo
This behavior can be overridden by supplying your own set of files to
search. PL_FILES accepts a hash ref, the key being the file to run
and the value is passed in as the first argument when the PL file is run.
PL_FILES => {'bin/foobar.PL' => 'bin/foobar'}
Would run bin/foobar.PL like this:
perl bin/foobar.PL bin/foobar
If multiple files from one program are desired an array ref can be used.
PL_FILES => {'bin/foobar.PL' => [qw(bin/foobar1 bin/foobar2)]}
In this case the program will be run multiple times using each target file.
perl bin/foobar.PL bin/foobar1
perl bin/foobar.PL bin/foobar2
PL files are normally run B<after> pm_to_blib and include INST_LIB and
INST_ARCH in their C<@INC>, so the just built modules can be
accessed... unless the PL file is making a module (or anything else in
PM) in which case it is run B<before> pm_to_blib and does not include
INST_LIB and INST_ARCH in its C<@INC>. This apparently odd behavior
is there for backwards compatibility (and it's somewhat DWIM).
=item PM
Hashref of .pm files and *.pl files to be installed. e.g.
{'name_of_file.pm' => '$(INST_LIBDIR)/install_as.pm'}
By default this will include *.pm and *.pl and the files found in
the PMLIBDIRS directories. Defining PM in the
Makefile.PL will override PMLIBDIRS.
=item PMLIBDIRS
Ref to array of subdirectories containing library files. Defaults to
[ 'lib', $(BASEEXT) ]. The directories will be scanned and I<any> files
they contain will be installed in the corresponding location in the
library. A libscan() method can be used to alter the behaviour.
Defining PM in the Makefile.PL will override PMLIBDIRS.
(Where BASEEXT is the last component of NAME.)
=item PM_FILTER
A filter program, in the traditional Unix sense (input from stdin, output
to stdout) that is passed on each .pm file during the build (in the
pm_to_blib() phase). It is empty by default, meaning no filtering is done.
Great care is necessary when defining the command if quoting needs to be
done. For instance, you would need to say:
{'PM_FILTER' => 'grep -v \\"^\\#\\"'}
to remove all the leading comments on the fly during the build. The
extra \\ are necessary, unfortunately, because this variable is interpolated
within the context of a Perl program built on the command line, and double
quotes are what is used with the -e switch to build that command line. The
# is escaped for the Makefile, since what is going to be generated will then
be:
PM_FILTER = grep -v \"^\#\"
Without the \\ before the #, we'd have the start of a Makefile comment,
and the macro would be incorrectly defined.
=item POLLUTE
Release 5.005 grandfathered old global symbol names by providing preprocessor
macros for extension source compatibility. As of release 5.6, these
preprocessor definitions are not available by default. The POLLUTE flag
specifies that the old names should still be defined:
perl Makefile.PL POLLUTE=1
Please inform the module author if this is necessary to successfully install
a module under 5.6 or later.
=item PPM_INSTALL_EXEC
Name of the executable used to run C<PPM_INSTALL_SCRIPT> below. (e.g. perl)
=item PPM_INSTALL_SCRIPT
Name of the script that gets executed by the Perl Package Manager after
the installation of a package.
=item PREFIX
This overrides all the default install locations. Man pages,
libraries, scripts, etc... MakeMaker will try to make an educated
guess about where to place things under the new PREFIX based on your
Config defaults. Failing that, it will fall back to a structure
which should be sensible for your platform.
If you specify LIB or any INSTALL* variables they will not be affected
by the PREFIX.
=item PREREQ_FATAL
Bool. If this parameter is true, failing to have the required modules
(or the right versions thereof) will be fatal. C<perl Makefile.PL>
will C<die> instead of simply informing the user of the missing dependencies.
It is I<extremely> rare to have to use C<PREREQ_FATAL>. Its use by module
authors is I<strongly discouraged> and should never be used lightly.
Module installation tools have ways of resolving unmet dependencies but
to do that they need a F<Makefile>. Using C<PREREQ_FATAL> breaks this.
That's bad.
Assuming you have good test coverage, your tests should fail with
missing dependencies informing the user more strongly that something
is wrong. You can write a F<t/00compile.t> test which will simply
check that your code compiles and stop "make test" prematurely if it
doesn't. See L<Test::More/BAIL_OUT> for more details.
=item PREREQ_PM
A hash of modules that are needed to run your module. The keys are
the module names ie. Test::More, and the minimum version is the
value. If the required version number is 0 any version will do.
This will go into the C<requires> field of your CPAN Meta file
(F<META.yml> or F<META.json>).
PREREQ_PM => {
# Require Test::More at least 0.47
"Test::More" => "0.47",
# Require any version of Acme::Buffy
"Acme::Buffy" => 0,
}
=item PREREQ_PRINT
Bool. If this parameter is true, the prerequisites will be printed to
stdout and MakeMaker will exit. The output format is an evalable hash
ref.
$PREREQ_PM = {
'A::B' => Vers1,
'C::D' => Vers2,
...
};
If a distribution defines a minimal required perl version, this is
added to the output as an additional line of the form:
$MIN_PERL_VERSION = '5.008001';
If BUILD_REQUIRES is not empty, it will be dumped as $BUILD_REQUIRES hashref.
=item PRINT_PREREQ
RedHatism for C<PREREQ_PRINT>. The output format is different, though:
perl(A::B)>=Vers1 perl(C::D)>=Vers2 ...
A minimal required perl version, if present, will look like this:
perl(perl)>=5.008001
=item SITEPREFIX
Like PERLPREFIX, but only for the site install locations.
Defaults to $Config{siteprefixexp}. Perls prior to 5.6.0 didn't have
an explicit siteprefix in the Config. In those cases
$Config{installprefix} will be used.
Overridable by PREFIX
=item SIGN
When true, perform the generation and addition to the MANIFEST of the
SIGNATURE file in the distdir during 'make distdir', via 'cpansign
-s'.
Note that you need to install the Module::Signature module to
perform this operation.
Defaults to false.
=item SKIP
Arrayref. E.g. [qw(name1 name2)] skip (do not write) sections of the
Makefile. Caution! Do not use the SKIP attribute for the negligible
speedup. It may seriously damage the resulting Makefile. Only use it
if you really need it.
=item TEST_REQUIRES
A hash of modules that are needed to test your module but not run or
build it.
This will go into the C<test_requires> field of your CPAN Meta file.
(F<META.yml> or F<META.json>).
The format is the same as PREREQ_PM.
=item TYPEMAPS
Ref to array of typemap file names. Use this when the typemaps are
in some directory other than the current directory or when they are
not named B<typemap>. The last typemap in the list takes
precedence. A typemap in the current directory has highest
precedence, even if it isn't listed in TYPEMAPS. The default system
typemap has lowest precedence.
=item VENDORPREFIX
Like PERLPREFIX, but only for the vendor install locations.
Defaults to $Config{vendorprefixexp}.
Overridable by PREFIX
=item VERBINST
If true, make install will be verbose
=item VERSION
Your version number for distributing the package. This defaults to
0.1.
=item VERSION_FROM
Instead of specifying the VERSION in the Makefile.PL you can let
MakeMaker parse a file to determine the version number. The parsing
routine requires that the file named by VERSION_FROM contains one
single line to compute the version number. The first line in the file
that contains something like a $VERSION assignment or C<package Name
VERSION> will be used. The following lines will be parsed o.k.:
# Good
package Foo::Bar 1.23; # 1.23
$VERSION = '1.00'; # 1.00
*VERSION = \'1.01'; # 1.01
($VERSION) = q$Revision$ =~ /(\d+)/g; # The digits in $Revision$
$FOO::VERSION = '1.10'; # 1.10
*FOO::VERSION = \'1.11'; # 1.11
but these will fail:
# Bad
my $VERSION = '1.01';
local $VERSION = '1.02';
local $FOO::VERSION = '1.30';
"Version strings" are incompatible and should not be used.
# Bad
$VERSION = 1.2.3;
$VERSION = v1.2.3;
L<version> objects are fine. As of MakeMaker 6.35 version.pm will be
automatically loaded, but you must declare the dependency on version.pm.
For compatibility with older MakeMaker you should load on the same line
as $VERSION is declared.
# All on one line
use version; our $VERSION = qv(1.2.3);
(Putting C<my> or C<local> on the preceding line will work o.k.)
The file named in VERSION_FROM is not added as a dependency to
Makefile. This is not really correct, but it would be a major pain
during development to have to rewrite the Makefile for any smallish
change in that file. If you want to make sure that the Makefile
contains the correct VERSION macro after any change of the file, you
would have to do something like
depend => { Makefile => '$(VERSION_FROM)' }
See attribute C<depend> below.
=item VERSION_SYM
A sanitized VERSION with . replaced by _. For places where . has
special meaning (some filesystems, RCS labels, etc...)
=item XS
Hashref of .xs files. MakeMaker will default this. e.g.
{'name_of_file.xs' => 'name_of_file.c'}
The .c files will automatically be included in the list of files
deleted by a make clean.
=item XSOPT
String of options to pass to xsubpp. This might include C<-C++> or
C<-extern>. Do not include typemaps here; the TYPEMAP parameter exists for
that purpose.
=item XSPROTOARG
May be set to an empty string, which is identical to C<-prototypes>, or
C<-noprototypes>. See the xsubpp documentation for details. MakeMaker
defaults to the empty string.
=item XS_VERSION
Your version number for the .xs file of this package. This defaults
to the value of the VERSION attribute.
=back
=head2 Additional lowercase attributes
can be used to pass parameters to the methods which implement that
part of the Makefile. Parameters are specified as a hash ref but are
passed to the method as a hash.
=over 2
=item clean
{FILES => "*.xyz foo"}
=item depend
{ANY_TARGET => ANY_DEPENDENCY, ...}
(ANY_TARGET must not be given a double-colon rule by MakeMaker.)
=item dist
{TARFLAGS => 'cvfF', COMPRESS => 'gzip', SUFFIX => '.gz',
SHAR => 'shar -m', DIST_CP => 'ln', ZIP => '/bin/zip',
ZIPFLAGS => '-rl', DIST_DEFAULT => 'private tardist' }
If you specify COMPRESS, then SUFFIX should also be altered, as it is
needed to tell make the target file of the compression. Setting
DIST_CP to ln can be useful, if you need to preserve the timestamps on
your files. DIST_CP can take the values 'cp', which copies the file,
'ln', which links the file, and 'best' which copies symbolic links and
links the rest. Default is 'best'.
=item dynamic_lib
{ARMAYBE => 'ar', OTHERLDFLAGS => '...', INST_DYNAMIC_DEP => '...'}
=item linkext
{LINKTYPE => 'static', 'dynamic' or ''}
NB: Extensions that have nothing but *.pm files had to say
{LINKTYPE => ''}
with Pre-5.0 MakeMakers. Since version 5.00 of MakeMaker such a line
can be deleted safely. MakeMaker recognizes when there's nothing to
be linked.
=item macro
{ANY_MACRO => ANY_VALUE, ...}
=item postamble
Anything put here will be passed to MY::postamble() if you have one.
=item realclean
{FILES => '$(INST_ARCHAUTODIR)/*.xyz'}
=item test
{TESTS => 't/*.t'}
=item tool_autosplit
{MAXLEN => 8}
=back
=head2 Overriding MakeMaker Methods
If you cannot achieve the desired Makefile behaviour by specifying
attributes you may define private subroutines in the Makefile.PL.
Each subroutine returns the text it wishes to have written to
the Makefile. To override a section of the Makefile you can
either say:
sub MY::c_o { "new literal text" }
or you can edit the default by saying something like:
package MY; # so that "SUPER" works right
sub c_o {
my $inherited = shift->SUPER::c_o(@_);
$inherited =~ s/old text/new text/;
$inherited;
}
If you are running experiments with embedding perl as a library into
other applications, you might find MakeMaker is not sufficient. You'd
better have a look at ExtUtils::Embed which is a collection of utilities
for embedding.
If you still need a different solution, try to develop another
subroutine that fits your needs and submit the diffs to
C<makemaker@perl.org>
For a complete description of all MakeMaker methods see
L<ExtUtils::MM_Unix>.
Here is a simple example of how to add a new target to the generated
Makefile:
sub MY::postamble {
return <<'MAKE_FRAG';
$(MYEXTLIB): sdbm/Makefile
cd sdbm && $(MAKE) all
MAKE_FRAG
}
=head2 The End Of Cargo Cult Programming
WriteMakefile() now does some basic sanity checks on its parameters to
protect against typos and malformatted values. This means some things
which happened to work in the past will now throw warnings and
possibly produce internal errors.
Some of the most common mistakes:
=over 2
=item C<< MAN3PODS => ' ' >>
This is commonly used to suppress the creation of man pages. MAN3PODS
takes a hash ref not a string, but the above worked by accident in old
versions of MakeMaker.
The correct code is C<< MAN3PODS => { } >>.
=back
=head2 Hintsfile support
MakeMaker.pm uses the architecture-specific information from
Config.pm. In addition it evaluates architecture specific hints files
in a C<hints/> directory. The hints files are expected to be named
like their counterparts in C<PERL_SRC/hints>, but with an C<.pl> file
name extension (eg. C<next_3_2.pl>). They are simply C<eval>ed by
MakeMaker within the WriteMakefile() subroutine, and can be used to
execute commands as well as to include special variables. The rules
which hintsfile is chosen are the same as in Configure.
The hintsfile is eval()ed immediately after the arguments given to
WriteMakefile are stuffed into a hash reference $self but before this
reference becomes blessed. So if you want to do the equivalent to
override or create an attribute you would say something like
$self->{LIBS} = ['-ldbm -lucb -lc'];
=head2 Distribution Support
For authors of extensions MakeMaker provides several Makefile
targets. Most of the support comes from the ExtUtils::Manifest module,
where additional documentation can be found.
=over 4
=item make distcheck
reports which files are below the build directory but not in the
MANIFEST file and vice versa. (See ExtUtils::Manifest::fullcheck() for
details)
=item make skipcheck
reports which files are skipped due to the entries in the
C<MANIFEST.SKIP> file (See ExtUtils::Manifest::skipcheck() for
details)
=item make distclean
does a realclean first and then the distcheck. Note that this is not
needed to build a new distribution as long as you are sure that the
MANIFEST file is ok.
=item make manifest
rewrites the MANIFEST file, adding all remaining files found (See
ExtUtils::Manifest::mkmanifest() for details)
=item make distdir
Copies all the files that are in the MANIFEST file to a newly created
directory with the name C<$(DISTNAME)-$(VERSION)>. If that directory
exists, it will be removed first.
Additionally, it will create META.yml and META.json module meta-data file
in the distdir and add this to the distdir's MANIFEST. You can shut this
behavior off with the NO_META flag.
=item make disttest
Makes a distdir first, and runs a C<perl Makefile.PL>, a make, and
a make test in that directory.
=item make tardist
First does a distdir. Then a command $(PREOP) which defaults to a null
command, followed by $(TO_UNIX), which defaults to a null command under
UNIX, and will convert files in distribution directory to UNIX format
otherwise. Next it runs C<tar> on that directory into a tarfile and
deletes the directory. Finishes with a command $(POSTOP) which
defaults to a null command.
=item make dist
Defaults to $(DIST_DEFAULT) which in turn defaults to tardist.
=item make uutardist
Runs a tardist first and uuencodes the tarfile.
=item make shdist
First does a distdir. Then a command $(PREOP) which defaults to a null
command. Next it runs C<shar> on that directory into a sharfile and
deletes the intermediate directory again. Finishes with a command
$(POSTOP) which defaults to a null command. Note: For shdist to work
properly a C<shar> program that can handle directories is mandatory.
=item make zipdist
First does a distdir. Then a command $(PREOP) which defaults to a null
command. Runs C<$(ZIP) $(ZIPFLAGS)> on that directory into a
zipfile. Then deletes that directory. Finishes with a command
$(POSTOP) which defaults to a null command.
=item make ci
Does a $(CI) and a $(RCS_LABEL) on all files in the MANIFEST file.
=back
Customization of the dist targets can be done by specifying a hash
reference to the dist attribute of the WriteMakefile call. The
following parameters are recognized:
CI ('ci -u')
COMPRESS ('gzip --best')
POSTOP ('@ :')
PREOP ('@ :')
TO_UNIX (depends on the system)
RCS_LABEL ('rcs -q -Nv$(VERSION_SYM):')
SHAR ('shar')
SUFFIX ('.gz')
TAR ('tar')
TARFLAGS ('cvf')
ZIP ('zip')
ZIPFLAGS ('-r')
An example:
WriteMakefile(
...other options...
dist => {
COMPRESS => "bzip2",
SUFFIX => ".bz2"
}
);
=head2 Module Meta-Data (META and MYMETA)
Long plaguing users of MakeMaker based modules has been the problem of
getting basic information about the module out of the sources
I<without> running the F<Makefile.PL> and doing a bunch of messy
heuristics on the resulting F<Makefile>. Over the years, it has become
standard to keep this information in one or more CPAN Meta files
distributed with each distribution.
The original format of CPAN Meta files was L<YAML> and the corresponding
file was called F<META.yml>. In 2010, version 2 of the L<CPAN::Meta::Spec>
was released, which mandates JSON format for the metadata in order to
overcome certain compatibility issues between YAML serializers and to
avoid breaking older clients unable to handle a new version of the spec.
The L<CPAN::Meta> library is now standard for accessing old and new-style
Meta files.
If L<CPAN::Meta> is installed, MakeMaker will automatically generate
F<META.json> and F<META.yml> files for you and add them to your F<MANIFEST> as
part of the 'distdir' target (and thus the 'dist' target). This is intended to
seamlessly and rapidly populate CPAN with module meta-data. If you wish to
shut this feature off, set the C<NO_META> C<WriteMakefile()> flag to true.
At the 2008 QA Hackathon in Oslo, Perl module toolchain maintainers agrees
to use the CPAN Meta format to communicate post-configuration requirements
between toolchain components. These files, F<MYMETA.json> and F<MYMETA.yml>,
are generated when F<Makefile.PL> generates a F<Makefile> (if L<CPAN::Meta>
is installed). Clients like L<CPAN> or L<CPANPLUS> will read this
files to see what prerequisites must be fulfilled before building or testing
the distribution. If you with to shut this feature off, set the C<NO_MYMETA>
C<WriteMakeFile()> flag to true.
=head2 Disabling an extension
If some events detected in F<Makefile.PL> imply that there is no way
to create the Module, but this is a normal state of things, then you
can create a F<Makefile> which does nothing, but succeeds on all the
"usual" build targets. To do so, use
use ExtUtils::MakeMaker qw(WriteEmptyMakefile);
WriteEmptyMakefile();
instead of WriteMakefile().
This may be useful if other modules expect this module to be I<built>
OK, as opposed to I<work> OK (say, this system-dependent module builds
in a subdirectory of some other distribution, or is listed as a
dependency in a CPAN::Bundle, but the functionality is supported by
different means on the current architecture).
=head2 Other Handy Functions
=over 4
=item prompt
my $value = prompt($message);
my $value = prompt($message, $default);
The C<prompt()> function provides an easy way to request user input
used to write a makefile. It displays the $message as a prompt for
input. If a $default is provided it will be used as a default. The
function returns the $value selected by the user.
If C<prompt()> detects that it is not running interactively and there
is nothing on STDIN or if the PERL_MM_USE_DEFAULT environment variable
is set to true, the $default will be used without prompting. This
prevents automated processes from blocking on user input.
If no $default is provided an empty string will be used instead.
=back
=head1 ENVIRONMENT
=over 4
=item PERL_MM_OPT
Command line options used by C<MakeMaker-E<gt>new()>, and thus by
C<WriteMakefile()>. The string is split on whitespace, and the result
is processed before any actual command line arguments are processed.
=item PERL_MM_USE_DEFAULT
If set to a true value then MakeMaker's prompt function will
always return the default without waiting for user input.
=item PERL_CORE
Same as the PERL_CORE parameter. The parameter overrides this.
=back
=head1 SEE ALSO
L<Module::Build> is a pure-Perl alternative to MakeMaker which does
not rely on make or any other external utility. It is easier to
extend to suit your needs.
L<Module::Install> is a wrapper around MakeMaker which adds features
not normally available.
L<ExtUtils::ModuleMaker> and L<Module::Starter> are both modules to
help you setup your distribution.
L<CPAN::Meta> and L<CPAN::Meta::Spec> explain CPAN Meta files in detail.
=head1 AUTHORS
Andy Dougherty C<doughera@lafayette.edu>, Andreas KE<ouml>nig
C<andreas.koenig@mind.de>, Tim Bunce C<timb@cpan.org>. VMS
support by Charles Bailey C<bailey@newman.upenn.edu>. OS/2 support
by Ilya Zakharevich C<ilya@math.ohio-state.edu>.
Currently maintained by Michael G Schwern C<schwern@pobox.com>
Send patches and ideas to C<makemaker@perl.org>.
Send bug reports via http://rt.cpan.org/. Please send your
generated Makefile along with your report.
For more up-to-date information, see L<http://www.makemaker.org>.
Repository available at L<https://github.com/Perl-Toolchain-Gang/ExtUtils-MakeMaker>.
=head1 LICENSE
This program is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
See L<http://www.perl.com/perl/misc/Artistic.html>
=cut
| Bjay1435/capstone | rootfs/usr/share/perl/5.18.2/ExtUtils/MakeMaker.pm | Perl | mit | 91,102 |
#!/usr/bin/env perl
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2012, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
###########################################################################
# Experimental hooks are available to run tests remotely on machines that
# are able to run curl but are unable to run the test harness.
# The following sections need to be modified:
#
# $HOSTIP, $HOST6IP - Set to the address of the host running the test suite
# $CLIENTIP, $CLIENT6IP - Set to the address of the host running curl
# runclient, runclientoutput - Modify to copy all the files in the log/
# directory to the system running curl, run the given command remotely
# and save the return code or returned stdout (respectively), then
# copy all the files from the remote system's log/ directory back to
# the host running the test suite. This can be done a few ways, such
# as using scp & ssh, rsync & telnet, or using a NFS shared directory
# and ssh.
#
# 'make && make test' needs to be done on both machines before making the
# above changes and running runtests.pl manually. In the shared NFS case,
# the contents of the tests/server/ directory must be from the host
# running the test suite, while the rest must be from the host running curl.
#
# Note that even with these changes a number of tests will still fail (mainly
# to do with cookies, those that set environment variables, or those that
# do more than touch the file system in a <precheck> or <postcheck>
# section). These can be added to the $TESTCASES line below,
# e.g. $TESTCASES="!8 !31 !63 !cookies..."
#
# Finally, to properly support -g and -n, checktestcmd needs to change
# to check the remote system's PATH, and the places in the code where
# the curl binary is read directly to determine its type also need to be
# fixed. As long as the -g option is never given, and the -n is always
# given, this won't be a problem.
# These should be the only variables that might be needed to get edited:
BEGIN {
@INC=(@INC, $ENV{'srcdir'}, ".");
# run time statistics needs Time::HiRes
eval {
no warnings "all";
require Time::HiRes;
import Time::HiRes qw( time );
}
}
use strict;
use warnings;
use Cwd;
# Subs imported from serverhelp module
use serverhelp qw(
serverfactors
servername_id
servername_str
servername_canon
server_pidfilename
server_logfilename
);
# Variables and subs imported from sshhelp module
use sshhelp qw(
$sshdexe
$sshexe
$sftpexe
$sshconfig
$sftpconfig
$sshdlog
$sshlog
$sftplog
$sftpcmds
display_sshdconfig
display_sshconfig
display_sftpconfig
display_sshdlog
display_sshlog
display_sftplog
exe_ext
find_sshd
find_ssh
find_sftp
find_httptlssrv
sshversioninfo
);
require "getpart.pm"; # array functions
require "valgrind.pm"; # valgrind report parser
require "ftp.pm";
my $HOSTIP="127.0.0.1"; # address on which the test server listens
my $HOST6IP="[::1]"; # address on which the test server listens
my $CLIENTIP="127.0.0.1"; # address which curl uses for incoming connections
my $CLIENT6IP="[::1]"; # address which curl uses for incoming connections
my $base = 8990; # base port number
my $HTTPPORT; # HTTP server port
my $HTTP6PORT; # HTTP IPv6 server port
my $HTTPSPORT; # HTTPS (stunnel) server port
my $FTPPORT; # FTP server port
my $FTP2PORT; # FTP server 2 port
my $FTPSPORT; # FTPS (stunnel) server port
my $FTP6PORT; # FTP IPv6 server port
my $TFTPPORT; # TFTP
my $TFTP6PORT; # TFTP
my $SSHPORT; # SCP/SFTP
my $SOCKSPORT; # SOCKS4/5 port
my $POP3PORT; # POP3
my $POP36PORT; # POP3 IPv6 server port
my $IMAPPORT; # IMAP
my $IMAP6PORT; # IMAP IPv6 server port
my $SMTPPORT; # SMTP
my $SMTP6PORT; # SMTP IPv6 server port
my $RTSPPORT; # RTSP
my $RTSP6PORT; # RTSP IPv6 server port
my $GOPHERPORT; # Gopher
my $GOPHER6PORT; # Gopher IPv6 server port
my $HTTPTLSPORT; # HTTP TLS (non-stunnel) server port
my $HTTPTLS6PORT; # HTTP TLS (non-stunnel) IPv6 server port
my $HTTPPROXYPORT; # HTTP proxy port, when using CONNECT
my $srcdir = $ENV{'srcdir'} || '.';
my $CURL="../src/curl".exe_ext(); # what curl executable to run on the tests
my $VCURL=$CURL; # what curl binary to use to verify the servers with
# VCURL is handy to set to the system one when the one you
# just built hangs or crashes and thus prevent verification
my $DBGCURL=$CURL; #"../src/.libs/curl"; # alternative for debugging
my $LOGDIR="log";
my $TESTDIR="$srcdir/data";
my $LIBDIR="./libtest";
my $UNITDIR="./unit";
# TODO: change this to use server_inputfilename()
my $SERVERIN="$LOGDIR/server.input"; # what curl sent the server
my $SERVER2IN="$LOGDIR/server2.input"; # what curl sent the second server
my $PROXYIN="$LOGDIR/proxy.input"; # what curl sent the proxy
my $CURLLOG="$LOGDIR/curl.log"; # all command lines run
my $FTPDCMD="$LOGDIR/ftpserver.cmd"; # copy ftp server instructions here
my $SERVERLOGS_LOCK="$LOGDIR/serverlogs.lock"; # server logs advisor read lock
my $CURLCONFIG="../curl-config"; # curl-config from current build
# Normally, all test cases should be run, but at times it is handy to
# simply run a particular one:
my $TESTCASES="all";
# To run specific test cases, set them like:
# $TESTCASES="1 2 3 7 8";
#######################################################################
# No variables below this point should need to be modified
#
# invoke perl like this:
my $perl="perl -I$srcdir";
my $server_response_maxtime=13;
my $debug_build=0; # curl built with --enable-debug
my $curl_debug=0; # curl built with --enable-curldebug (memory tracking)
my $libtool;
# name of the file that the memory debugging creates:
my $memdump="$LOGDIR/memdump";
# the path to the script that analyzes the memory debug output file:
my $memanalyze="$perl $srcdir/memanalyze.pl";
my $pwd = getcwd(); # current working directory
my $start;
my $ftpchecktime=1; # time it took to verify our test FTP server
my $stunnel = checkcmd("stunnel4") || checkcmd("stunnel");
my $valgrind = checktestcmd("valgrind");
my $valgrind_logfile="--logfile";
my $valgrind_tool;
my $gdb = checktestcmd("gdb");
my $httptlssrv = find_httptlssrv();
my $ssl_version; # set if libcurl is built with SSL support
my $large_file; # set if libcurl is built with large file support
my $has_idn; # set if libcurl is built with IDN support
my $http_ipv6; # set if HTTP server has IPv6 support
my $ftp_ipv6; # set if FTP server has IPv6 support
my $tftp_ipv6; # set if TFTP server has IPv6 support
my $gopher_ipv6; # set if Gopher server has IPv6 support
my $has_ipv6; # set if libcurl is built with IPv6 support
my $has_libz; # set if libcurl is built with libz support
my $has_getrlimit; # set if system has getrlimit()
my $has_ntlm; # set if libcurl is built with NTLM support
my $has_ntlm_wb; # set if libcurl is built with NTLM delegation to winbind
my $has_charconv;# set if libcurl is built with CharConv support
my $has_tls_srp; # set if libcurl is built with TLS-SRP support
my $has_metalink;# set if curl is built with Metalink support
my $has_openssl; # built with a lib using an OpenSSL-like API
my $has_gnutls; # built with GnuTLS
my $has_nss; # built with NSS
my $has_yassl; # built with yassl
my $has_polarssl;# built with polarssl
my $has_axtls; # built with axTLS
my $has_winssl; # built with WinSSL (Schannel/SSPI)
my $has_shared = "unknown"; # built shared
my $ssllib; # name of the lib we use (for human presentation)
my $has_crypto; # set if libcurl is built with cryptographic support
my $has_textaware; # set if running on a system that has a text mode concept
# on files. Windows for example
my @protocols; # array of lowercase supported protocol servers
my $skipped=0; # number of tests skipped; reported in main loop
my %skipped; # skipped{reason}=counter, reasons for skip
my @teststat; # teststat[testnum]=reason, reasons for skip
my %disabled_keywords; # key words of tests to skip
my %enabled_keywords; # key words of tests to run
my %disabled; # disabled test cases
my $sshdid; # for socks server, ssh daemon version id
my $sshdvernum; # for socks server, ssh daemon version number
my $sshdverstr; # for socks server, ssh daemon version string
my $sshderror; # for socks server, ssh daemon version error
my $defserverlogslocktimeout = 20; # timeout to await server logs lock removal
my $defpostcommanddelay = 0; # delay between command and postcheck sections
my $timestats; # time stamping and stats generation
my $fullstats; # show time stats for every single test
my %timeprepini; # timestamp for each test preparation start
my %timesrvrini; # timestamp for each test required servers verification start
my %timesrvrend; # timestamp for each test required servers verification end
my %timetoolini; # timestamp for each test command run starting
my %timetoolend; # timestamp for each test command run stopping
my %timesrvrlog; # timestamp for each test server logs lock removal
my %timevrfyend; # timestamp for each test result verification end
my $testnumcheck; # test number, set in singletest sub.
my %oldenv;
#######################################################################
# variables that command line options may set
#
my $short;
my $verbose;
my $debugprotocol;
my $anyway;
my $gdbthis; # run test case with gdb debugger
my $gdbxwin; # use windowed gdb when using gdb
my $keepoutfiles; # keep stdout and stderr files after tests
my $listonly; # only list the tests
my $postmortem; # display detailed info about failed tests
my %run; # running server
my %doesntrun; # servers that don't work, identified by pidfile
my %serverpidfile;# all server pid file names, identified by server id
my %runcert; # cert file currently in use by an ssl running server
# torture test variables
my $torture;
my $tortnum;
my $tortalloc;
#######################################################################
# logmsg is our general message logging subroutine.
#
sub logmsg {
for(@_) {
print "$_";
}
}
# get the name of the current user
my $USER = $ENV{USER}; # Linux
if (!$USER) {
$USER = $ENV{USERNAME}; # Windows
if (!$USER) {
$USER = $ENV{LOGNAME}; # Some UNIX (I think)
}
}
# enable memory debugging if curl is compiled with it
$ENV{'CURL_MEMDEBUG'} = $memdump;
$ENV{'HOME'}=$pwd;
sub catch_zap {
my $signame = shift;
logmsg "runtests.pl received SIG$signame, exiting\n";
stopservers($verbose);
die "Somebody sent me a SIG$signame";
}
$SIG{INT} = \&catch_zap;
$SIG{TERM} = \&catch_zap;
##########################################################################
# Clear all possible '*_proxy' environment variables for various protocols
# to prevent them to interfere with our testing!
my $protocol;
foreach $protocol (('ftp', 'http', 'ftps', 'https', 'no', 'all')) {
my $proxy = "${protocol}_proxy";
# clear lowercase version
delete $ENV{$proxy} if($ENV{$proxy});
# clear uppercase version
delete $ENV{uc($proxy)} if($ENV{uc($proxy)});
}
# make sure we don't get affected by other variables that control our
# behaviour
delete $ENV{'SSL_CERT_DIR'} if($ENV{'SSL_CERT_DIR'});
delete $ENV{'SSL_CERT_PATH'} if($ENV{'SSL_CERT_PATH'});
delete $ENV{'CURL_CA_BUNDLE'} if($ENV{'CURL_CA_BUNDLE'});
#######################################################################
# Load serverpidfile hash with pidfile names for all possible servers.
#
sub init_serverpidfile_hash {
for my $proto (('ftp', 'http', 'imap', 'pop3', 'smtp')) {
for my $ssl (('', 's')) {
for my $ipvnum ((4, 6)) {
for my $idnum ((1, 2)) {
my $serv = servername_id("$proto$ssl", $ipvnum, $idnum);
my $pidf = server_pidfilename("$proto$ssl", $ipvnum, $idnum);
$serverpidfile{$serv} = $pidf;
}
}
}
}
for my $proto (('tftp', 'sftp', 'socks', 'ssh', 'rtsp', 'gopher', 'httptls')) {
for my $ipvnum ((4, 6)) {
for my $idnum ((1, 2)) {
my $serv = servername_id($proto, $ipvnum, $idnum);
my $pidf = server_pidfilename($proto, $ipvnum, $idnum);
$serverpidfile{$serv} = $pidf;
}
}
}
}
#######################################################################
# Check if a given child process has just died. Reaps it if so.
#
sub checkdied {
use POSIX ":sys_wait_h";
my $pid = $_[0];
if(not defined $pid || $pid <= 0) {
return 0;
}
my $rc = waitpid($pid, &WNOHANG);
return ($rc == $pid)?1:0;
}
#######################################################################
# Start a new thread/process and run the given command line in there.
# Return the pids (yes plural) of the new child process to the parent.
#
sub startnew {
my ($cmd, $pidfile, $timeout, $fake)=@_;
logmsg "startnew: $cmd\n" if ($verbose);
my $child = fork();
my $pid2 = 0;
if(not defined $child) {
logmsg "startnew: fork() failure detected\n";
return (-1,-1);
}
if(0 == $child) {
# Here we are the child. Run the given command.
# Put an "exec" in front of the command so that the child process
# keeps this child's process ID.
exec("exec $cmd") || die "Can't exec() $cmd: $!";
# exec() should never return back here to this process. We protect
# ourselves by calling die() just in case something goes really bad.
die "error: exec() has returned";
}
# Ugly hack but ssh client and gnutls-serv don't support pid files
if ($fake) {
if(open(OUT, ">$pidfile")) {
print OUT $child . "\n";
close(OUT);
logmsg "startnew: $pidfile faked with pid=$child\n" if($verbose);
}
else {
logmsg "startnew: failed to write fake $pidfile with pid=$child\n";
}
# could/should do a while connect fails sleep a bit and loop
sleep $timeout;
if (checkdied($child)) {
logmsg "startnew: child process has failed to start\n" if($verbose);
return (-1,-1);
}
}
my $count = $timeout;
while($count--) {
if(-f $pidfile && -s $pidfile && open(PID, "<$pidfile")) {
$pid2 = 0 + <PID>;
close(PID);
if(($pid2 > 0) && kill(0, $pid2)) {
# if $pid2 is valid, then make sure this pid is alive, as
# otherwise it is just likely to be the _previous_ pidfile or
# similar!
last;
}
# invalidate $pid2 if not actually alive
$pid2 = 0;
}
if (checkdied($child)) {
logmsg "startnew: child process has died, server might start up\n"
if($verbose);
# We can't just abort waiting for the server with a
# return (-1,-1);
# because the server might have forked and could still start
# up normally. Instead, just reduce the amount of time we remain
# waiting.
$count >>= 2;
}
sleep(1);
}
# Return two PIDs, the one for the child process we spawned and the one
# reported by the server itself (in case it forked again on its own).
# Both (potentially) need to be killed at the end of the test.
return ($child, $pid2);
}
#######################################################################
# Check for a command in the PATH of the test server.
#
sub checkcmd {
my ($cmd)=@_;
my @paths=(split(":", $ENV{'PATH'}), "/usr/sbin", "/usr/local/sbin",
"/sbin", "/usr/bin", "/usr/local/bin",
"./libtest/.libs", "./libtest");
for(@paths) {
if( -x "$_/$cmd" && ! -d "$_/$cmd") {
# executable bit but not a directory!
return "$_/$cmd";
}
}
}
#######################################################################
# Get the list of tests that the tests/data/Makefile.am knows about!
#
my $disttests;
sub get_disttests {
my @dist = `cd data && make show`;
$disttests = join("", @dist);
}
#######################################################################
# Check for a command in the PATH of the machine running curl.
#
sub checktestcmd {
my ($cmd)=@_;
return checkcmd($cmd);
}
#######################################################################
# Run the application under test and return its return code
#
sub runclient {
my ($cmd)=@_;
return system($cmd);
# This is one way to test curl on a remote machine
# my $out = system("ssh $CLIENTIP cd \'$pwd\' \\; \'$cmd\'");
# sleep 2; # time to allow the NFS server to be updated
# return $out;
}
#######################################################################
# Run the application under test and return its stdout
#
sub runclientoutput {
my ($cmd)=@_;
return `$cmd`;
# This is one way to test curl on a remote machine
# my @out = `ssh $CLIENTIP cd \'$pwd\' \\; \'$cmd\'`;
# sleep 2; # time to allow the NFS server to be updated
# return @out;
}
#######################################################################
# Memory allocation test and failure torture testing.
#
sub torture {
my $testcmd = shift;
my $gdbline = shift;
# remove memdump first to be sure we get a new nice and clean one
unlink($memdump);
# First get URL from test server, ignore the output/result
runclient($testcmd);
logmsg " CMD: $testcmd\n" if($verbose);
# memanalyze -v is our friend, get the number of allocations made
my $count=0;
my @out = `$memanalyze -v $memdump`;
for(@out) {
if(/^Allocations: (\d+)/) {
$count = $1;
last;
}
}
if(!$count) {
logmsg " found no allocs to make fail\n";
return 0;
}
logmsg " $count allocations to make fail\n";
for ( 1 .. $count ) {
my $limit = $_;
my $fail;
my $dumped_core;
if($tortalloc && ($tortalloc != $limit)) {
next;
}
if($verbose) {
my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) =
localtime(time());
my $now = sprintf("%02d:%02d:%02d ", $hour, $min, $sec);
logmsg "Fail alloc no: $limit at $now\r";
}
# make the memory allocation function number $limit return failure
$ENV{'CURL_MEMLIMIT'} = $limit;
# remove memdump first to be sure we get a new nice and clean one
unlink($memdump);
logmsg "*** Alloc number $limit is now set to fail ***\n" if($gdbthis);
my $ret = 0;
if($gdbthis) {
runclient($gdbline)
}
else {
$ret = runclient($testcmd);
}
#logmsg "$_ Returned " . $ret >> 8 . "\n";
# Now clear the variable again
delete $ENV{'CURL_MEMLIMIT'} if($ENV{'CURL_MEMLIMIT'});
if(-r "core") {
# there's core file present now!
logmsg " core dumped\n";
$dumped_core = 1;
$fail = 2;
}
# verify that it returns a proper error code, doesn't leak memory
# and doesn't core dump
if($ret & 255) {
logmsg " system() returned $ret\n";
$fail=1;
}
else {
my @memdata=`$memanalyze $memdump`;
my $leak=0;
for(@memdata) {
if($_ ne "") {
# well it could be other memory problems as well, but
# we call it leak for short here
$leak=1;
}
}
if($leak) {
logmsg "** MEMORY FAILURE\n";
logmsg @memdata;
logmsg `$memanalyze -l $memdump`;
$fail = 1;
}
}
if($fail) {
logmsg " Failed on alloc number $limit in test.\n",
" invoke with \"-t$limit\" to repeat this single case.\n";
stopservers($verbose);
return 1;
}
}
logmsg "torture OK\n";
return 0;
}
#######################################################################
# Stop a test server along with pids which aren't in the %run hash yet.
# This also stops all servers which are relative to the given one.
#
sub stopserver {
my ($server, $pidlist) = @_;
#
# kill sockfilter processes for pingpong relative server
#
if($server =~ /^(ftp|imap|pop3|smtp)s?(\d*)(-ipv6|)$/) {
my $proto = $1;
my $idnum = ($2 && ($2 > 1)) ? $2 : 1;
my $ipvnum = ($3 && ($3 =~ /6$/)) ? 6 : 4;
killsockfilters($proto, $ipvnum, $idnum, $verbose);
}
#
# All servers relative to the given one must be stopped also
#
my @killservers;
if($server =~ /^(ftp|http|imap|pop3|smtp)s((\d*)(-ipv6|))$/) {
# given a stunnel based ssl server, also kill non-ssl underlying one
push @killservers, "${1}${2}";
}
elsif($server =~ /^(ftp|http|imap|pop3|smtp)((\d*)(-ipv6|))$/) {
# given a non-ssl server, also kill stunnel based ssl piggybacking one
push @killservers, "${1}s${2}";
}
elsif($server =~ /^(socks)((\d*)(-ipv6|))$/) {
# given a socks server, also kill ssh underlying one
push @killservers, "ssh${2}";
}
elsif($server =~ /^(ssh)((\d*)(-ipv6|))$/) {
# given a ssh server, also kill socks piggybacking one
push @killservers, "socks${2}";
}
push @killservers, $server;
#
# kill given pids and server relative ones clearing them in %run hash
#
foreach my $server (@killservers) {
if($run{$server}) {
# we must prepend a space since $pidlist may already contain a pid
$pidlist .= " $run{$server}";
$run{$server} = 0;
}
$runcert{$server} = 0 if($runcert{$server});
}
killpid($verbose, $pidlist);
#
# cleanup server pid files
#
foreach my $server (@killservers) {
my $pidfile = $serverpidfile{$server};
my $pid = processexists($pidfile);
if($pid > 0) {
logmsg "Warning: $server server unexpectedly alive\n";
killpid($verbose, $pid);
}
unlink($pidfile) if(-f $pidfile);
}
}
#######################################################################
# Verify that the server that runs on $ip, $port is our server. This also
# implies that we can speak with it, as there might be occasions when the
# server runs fine but we cannot talk to it ("Failed to connect to ::1: Can't
# assign requested address")
#
sub verifyhttp {
my ($proto, $ipvnum, $idnum, $ip, $port) = @_;
my $server = servername_id($proto, $ipvnum, $idnum);
my $pid = 0;
my $bonus="";
my $verifyout = "$LOGDIR/".
servername_canon($proto, $ipvnum, $idnum) .'_verify.out';
unlink($verifyout) if(-f $verifyout);
my $verifylog = "$LOGDIR/".
servername_canon($proto, $ipvnum, $idnum) .'_verify.log';
unlink($verifylog) if(-f $verifylog);
if($proto eq "gopher") {
# gopher is funny
$bonus="1/";
}
my $flags = "--max-time $server_response_maxtime ";
$flags .= "--output $verifyout ";
$flags .= "--silent ";
$flags .= "--verbose ";
$flags .= "--globoff ";
$flags .= "-1 " if($has_axtls);
$flags .= "--insecure " if($proto eq 'https');
$flags .= "\"$proto://$ip:$port/${bonus}verifiedserver\"";
my $cmd = "$VCURL $flags 2>$verifylog";
# verify if our/any server is running on this port
logmsg "RUN: $cmd\n" if($verbose);
my $res = runclient($cmd);
$res >>= 8; # rotate the result
if($res & 128) {
logmsg "RUN: curl command died with a coredump\n";
return -1;
}
if($res && $verbose) {
logmsg "RUN: curl command returned $res\n";
if(open(FILE, "<$verifylog")) {
while(my $string = <FILE>) {
logmsg "RUN: $string" if($string !~ /^([ \t]*)$/);
}
close(FILE);
}
}
my $data;
if(open(FILE, "<$verifyout")) {
while(my $string = <FILE>) {
$data = $string;
last; # only want first line
}
close(FILE);
}
if($data && ($data =~ /WE ROOLZ: (\d+)/)) {
$pid = 0+$1;
}
elsif($res == 6) {
# curl: (6) Couldn't resolve host '::1'
logmsg "RUN: failed to resolve host ($proto://$ip:$port/verifiedserver)\n";
return -1;
}
elsif($data || ($res && ($res != 7))) {
logmsg "RUN: Unknown server on our $server port: $port ($res)\n";
return -1;
}
return $pid;
}
#######################################################################
# Verify that the server that runs on $ip, $port is our server. This also
# implies that we can speak with it, as there might be occasions when the
# server runs fine but we cannot talk to it ("Failed to connect to ::1: Can't
# assign requested address")
#
sub verifyftp {
my ($proto, $ipvnum, $idnum, $ip, $port) = @_;
my $server = servername_id($proto, $ipvnum, $idnum);
my $pid = 0;
my $time=time();
my $extra="";
my $verifylog = "$LOGDIR/".
servername_canon($proto, $ipvnum, $idnum) .'_verify.log';
unlink($verifylog) if(-f $verifylog);
if($proto eq "ftps") {
$extra .= "--insecure --ftp-ssl-control ";
}
elsif($proto eq "smtp") {
# SMTP is a bit different since it requires more options and it
# has _no_ output!
$extra .= "--mail-rcpt verifiedserver ";
$extra .= "--mail-from fake ";
$extra .= "--upload /dev/null ";
$extra .= "--stderr - "; # move stderr to parse the verbose stuff
}
my $flags = "--max-time $server_response_maxtime ";
$flags .= "--silent ";
$flags .= "--verbose ";
$flags .= "--globoff ";
$flags .= $extra;
$flags .= "\"$proto://$ip:$port/verifiedserver\"";
my $cmd = "$VCURL $flags 2>$verifylog";
# check if this is our server running on this port:
logmsg "RUN: $cmd\n" if($verbose);
my @data = runclientoutput($cmd);
my $res = $? >> 8; # rotate the result
if($res & 128) {
logmsg "RUN: curl command died with a coredump\n";
return -1;
}
foreach my $line (@data) {
if($line =~ /WE ROOLZ: (\d+)/) {
# this is our test server with a known pid!
$pid = 0+$1;
last;
}
}
if($pid <= 0 && @data && $data[0]) {
# this is not a known server
logmsg "RUN: Unknown server on our $server port: $port\n";
return 0;
}
# we can/should use the time it took to verify the FTP server as a measure
# on how fast/slow this host/FTP is.
my $took = int(0.5+time()-$time);
if($verbose) {
logmsg "RUN: Verifying our test $server server took $took seconds\n";
}
$ftpchecktime = $took>=1?$took:1; # make sure it never is below 1
return $pid;
}
#######################################################################
# Verify that the server that runs on $ip, $port is our server. This also
# implies that we can speak with it, as there might be occasions when the
# server runs fine but we cannot talk to it ("Failed to connect to ::1: Can't
# assign requested address")
#
sub verifyrtsp {
my ($proto, $ipvnum, $idnum, $ip, $port) = @_;
my $server = servername_id($proto, $ipvnum, $idnum);
my $pid = 0;
my $verifyout = "$LOGDIR/".
servername_canon($proto, $ipvnum, $idnum) .'_verify.out';
unlink($verifyout) if(-f $verifyout);
my $verifylog = "$LOGDIR/".
servername_canon($proto, $ipvnum, $idnum) .'_verify.log';
unlink($verifylog) if(-f $verifylog);
my $flags = "--max-time $server_response_maxtime ";
$flags .= "--output $verifyout ";
$flags .= "--silent ";
$flags .= "--verbose ";
$flags .= "--globoff ";
# currently verification is done using http
$flags .= "\"http://$ip:$port/verifiedserver\"";
my $cmd = "$VCURL $flags 2>$verifylog";
# verify if our/any server is running on this port
logmsg "RUN: $cmd\n" if($verbose);
my $res = runclient($cmd);
$res >>= 8; # rotate the result
if($res & 128) {
logmsg "RUN: curl command died with a coredump\n";
return -1;
}
if($res && $verbose) {
logmsg "RUN: curl command returned $res\n";
if(open(FILE, "<$verifylog")) {
while(my $string = <FILE>) {
logmsg "RUN: $string" if($string !~ /^([ \t]*)$/);
}
close(FILE);
}
}
my $data;
if(open(FILE, "<$verifyout")) {
while(my $string = <FILE>) {
$data = $string;
last; # only want first line
}
close(FILE);
}
if($data && ($data =~ /RTSP_SERVER WE ROOLZ: (\d+)/)) {
$pid = 0+$1;
}
elsif($res == 6) {
# curl: (6) Couldn't resolve host '::1'
logmsg "RUN: failed to resolve host ($proto://$ip:$port/verifiedserver)\n";
return -1;
}
elsif($data || ($res != 7)) {
logmsg "RUN: Unknown server on our $server port: $port\n";
return -1;
}
return $pid;
}
#######################################################################
# Verify that the ssh server has written out its pidfile, recovering
# the pid from the file and returning it if a process with that pid is
# actually alive.
#
sub verifyssh {
my ($proto, $ipvnum, $idnum, $ip, $port) = @_;
my $server = servername_id($proto, $ipvnum, $idnum);
my $pidfile = server_pidfilename($proto, $ipvnum, $idnum);
my $pid = 0;
if(open(FILE, "<$pidfile")) {
$pid=0+<FILE>;
close(FILE);
}
if($pid > 0) {
# if we have a pid it is actually our ssh server,
# since runsshserver() unlinks previous pidfile
if(!kill(0, $pid)) {
logmsg "RUN: SSH server has died after starting up\n";
checkdied($pid);
unlink($pidfile);
$pid = -1;
}
}
return $pid;
}
#######################################################################
# Verify that we can connect to the sftp server, properly authenticate
# with generated config and key files and run a simple remote pwd.
#
sub verifysftp {
my ($proto, $ipvnum, $idnum, $ip, $port) = @_;
my $server = servername_id($proto, $ipvnum, $idnum);
my $verified = 0;
# Find out sftp client canonical file name
my $sftp = find_sftp();
if(!$sftp) {
logmsg "RUN: SFTP server cannot find $sftpexe\n";
return -1;
}
# Find out ssh client canonical file name
my $ssh = find_ssh();
if(!$ssh) {
logmsg "RUN: SFTP server cannot find $sshexe\n";
return -1;
}
# Connect to sftp server, authenticate and run a remote pwd
# command using our generated configuration and key files
my $cmd = "$sftp -b $sftpcmds -F $sftpconfig -S $ssh $ip > $sftplog 2>&1";
my $res = runclient($cmd);
# Search for pwd command response in log file
if(open(SFTPLOGFILE, "<$sftplog")) {
while(<SFTPLOGFILE>) {
if(/^Remote working directory: /) {
$verified = 1;
last;
}
}
close(SFTPLOGFILE);
}
return $verified;
}
#######################################################################
# Verify that the non-stunnel HTTP TLS extensions capable server that runs
# on $ip, $port is our server. This also implies that we can speak with it,
# as there might be occasions when the server runs fine but we cannot talk
# to it ("Failed to connect to ::1: Can't assign requested address")
#
sub verifyhttptls {
my ($proto, $ipvnum, $idnum, $ip, $port) = @_;
my $server = servername_id($proto, $ipvnum, $idnum);
my $pidfile = server_pidfilename($proto, $ipvnum, $idnum);
my $pid = 0;
my $verifyout = "$LOGDIR/".
servername_canon($proto, $ipvnum, $idnum) .'_verify.out';
unlink($verifyout) if(-f $verifyout);
my $verifylog = "$LOGDIR/".
servername_canon($proto, $ipvnum, $idnum) .'_verify.log';
unlink($verifylog) if(-f $verifylog);
my $flags = "--max-time $server_response_maxtime ";
$flags .= "--output $verifyout ";
$flags .= "--verbose ";
$flags .= "--globoff ";
$flags .= "--insecure ";
$flags .= "--tlsauthtype SRP ";
$flags .= "--tlsuser jsmith ";
$flags .= "--tlspassword abc ";
$flags .= "\"https://$ip:$port/verifiedserver\"";
my $cmd = "$VCURL $flags 2>$verifylog";
# verify if our/any server is running on this port
logmsg "RUN: $cmd\n" if($verbose);
my $res = runclient($cmd);
$res >>= 8; # rotate the result
if($res & 128) {
logmsg "RUN: curl command died with a coredump\n";
return -1;
}
if($res && $verbose) {
logmsg "RUN: curl command returned $res\n";
if(open(FILE, "<$verifylog")) {
while(my $string = <FILE>) {
logmsg "RUN: $string" if($string !~ /^([ \t]*)$/);
}
close(FILE);
}
}
my $data;
if(open(FILE, "<$verifyout")) {
while(my $string = <FILE>) {
$data .= $string;
}
close(FILE);
}
if($data && ($data =~ /GNUTLS/) && open(FILE, "<$pidfile")) {
$pid=0+<FILE>;
close(FILE);
if($pid > 0) {
# if we have a pid it is actually our httptls server,
# since runhttptlsserver() unlinks previous pidfile
if(!kill(0, $pid)) {
logmsg "RUN: $server server has died after starting up\n";
checkdied($pid);
unlink($pidfile);
$pid = -1;
}
}
return $pid;
}
elsif($res == 6) {
# curl: (6) Couldn't resolve host '::1'
logmsg "RUN: failed to resolve host (https://$ip:$port/verifiedserver)\n";
return -1;
}
elsif($data || ($res && ($res != 7))) {
logmsg "RUN: Unknown server on our $server port: $port ($res)\n";
return -1;
}
return $pid;
}
#######################################################################
# STUB for verifying socks
#
sub verifysocks {
my ($proto, $ipvnum, $idnum, $ip, $port) = @_;
my $server = servername_id($proto, $ipvnum, $idnum);
my $pidfile = server_pidfilename($proto, $ipvnum, $idnum);
my $pid = 0;
if(open(FILE, "<$pidfile")) {
$pid=0+<FILE>;
close(FILE);
}
if($pid > 0) {
# if we have a pid it is actually our socks server,
# since runsocksserver() unlinks previous pidfile
if(!kill(0, $pid)) {
logmsg "RUN: SOCKS server has died after starting up\n";
checkdied($pid);
unlink($pidfile);
$pid = -1;
}
}
return $pid;
}
#######################################################################
# Verify that the server that runs on $ip, $port is our server.
# Retry over several seconds before giving up. The ssh server in
# particular can take a long time to start if it needs to generate
# keys on a slow or loaded host.
#
# Just for convenience, test harness uses 'https' and 'httptls' literals
# as values for 'proto' variable in order to differentiate different
# servers. 'https' literal is used for stunnel based https test servers,
# and 'httptls' is used for non-stunnel https test servers.
#
my %protofunc = ('http' => \&verifyhttp,
'https' => \&verifyhttp,
'rtsp' => \&verifyrtsp,
'ftp' => \&verifyftp,
'pop3' => \&verifyftp,
'imap' => \&verifyftp,
'smtp' => \&verifyftp,
'ftps' => \&verifyftp,
'tftp' => \&verifyftp,
'ssh' => \&verifyssh,
'socks' => \&verifysocks,
'gopher' => \&verifyhttp,
'httptls' => \&verifyhttptls);
sub verifyserver {
my ($proto, $ipvnum, $idnum, $ip, $port) = @_;
my $count = 30; # try for this many seconds
my $pid;
while($count--) {
my $fun = $protofunc{$proto};
$pid = &$fun($proto, $ipvnum, $idnum, $ip, $port);
if($pid > 0) {
last;
}
elsif($pid < 0) {
# a real failure, stop trying and bail out
return 0;
}
sleep(1);
}
return $pid;
}
#######################################################################
# Single shot server responsiveness test. This should only be used
# to verify that a server present in %run hash is still functional
#
sub responsiveserver {
my ($proto, $ipvnum, $idnum, $ip, $port) = @_;
my $prev_verbose = $verbose;
$verbose = 0;
my $fun = $protofunc{$proto};
my $pid = &$fun($proto, $ipvnum, $idnum, $ip, $port);
$verbose = $prev_verbose;
if($pid > 0) {
return 1; # responsive
}
my $srvrname = servername_str($proto, $ipvnum, $idnum);
logmsg " server precheck FAILED (unresponsive $srvrname server)\n";
return 0;
}
#######################################################################
# start the http server
#
sub runhttpserver {
my ($proto, $verbose, $alt, $port) = @_;
my $ip = $HOSTIP;
my $ipvnum = 4;
my $idnum = 1;
my $server;
my $srvrname;
my $pidfile;
my $logfile;
my $flags = "";
if($alt eq "ipv6") {
# if IPv6, use a different setup
$ipvnum = 6;
$ip = $HOST6IP;
}
elsif($alt eq "proxy") {
# basically the same, but another ID
$idnum = 2;
}
$server = servername_id($proto, $ipvnum, $idnum);
$pidfile = $serverpidfile{$server};
# don't retry if the server doesn't work
if ($doesntrun{$pidfile}) {
return (0,0);
}
my $pid = processexists($pidfile);
if($pid > 0) {
stopserver($server, "$pid");
}
unlink($pidfile) if(-f $pidfile);
$srvrname = servername_str($proto, $ipvnum, $idnum);
$logfile = server_logfilename($LOGDIR, $proto, $ipvnum, $idnum);
$flags .= "--gopher " if($proto eq "gopher");
$flags .= "--connect $HOSTIP " if($alt eq "proxy");
$flags .= "--verbose " if($debugprotocol);
$flags .= "--pidfile \"$pidfile\" --logfile \"$logfile\" ";
$flags .= "--id $idnum " if($idnum > 1);
$flags .= "--ipv$ipvnum --port $port --srcdir \"$srcdir\"";
my $cmd = "$perl $srcdir/httpserver.pl $flags";
my ($httppid, $pid2) = startnew($cmd, $pidfile, 15, 0);
if($httppid <= 0 || !kill(0, $httppid)) {
# it is NOT alive
logmsg "RUN: failed to start the $srvrname server\n";
stopserver($server, "$pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Server is up. Verify that we can speak to it.
my $pid3 = verifyserver($proto, $ipvnum, $idnum, $ip, $port);
if(!$pid3) {
logmsg "RUN: $srvrname server failed verification\n";
# failed to talk to it properly. Kill the server and return failure
stopserver($server, "$httppid $pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
$pid2 = $pid3;
if($verbose) {
logmsg "RUN: $srvrname server is now running PID $httppid\n";
}
sleep(1);
return ($httppid, $pid2);
}
#######################################################################
# start the https stunnel based server
#
sub runhttpsserver {
my ($verbose, $ipv6, $certfile) = @_;
my $proto = 'https';
my $ip = ($ipv6 && ($ipv6 =~ /6$/)) ? "$HOST6IP" : "$HOSTIP";
my $ipvnum = ($ipv6 && ($ipv6 =~ /6$/)) ? 6 : 4;
my $idnum = 1;
my $server;
my $srvrname;
my $pidfile;
my $logfile;
my $flags = "";
if(!$stunnel) {
return (0,0);
}
$server = servername_id($proto, $ipvnum, $idnum);
$pidfile = $serverpidfile{$server};
# don't retry if the server doesn't work
if ($doesntrun{$pidfile}) {
return (0,0);
}
my $pid = processexists($pidfile);
if($pid > 0) {
stopserver($server, "$pid");
}
unlink($pidfile) if(-f $pidfile);
$srvrname = servername_str($proto, $ipvnum, $idnum);
$certfile = 'stunnel.pem' unless($certfile);
$logfile = server_logfilename($LOGDIR, $proto, $ipvnum, $idnum);
$flags .= "--verbose " if($debugprotocol);
$flags .= "--pidfile \"$pidfile\" --logfile \"$logfile\" ";
$flags .= "--id $idnum " if($idnum > 1);
$flags .= "--ipv$ipvnum --proto $proto ";
$flags .= "--certfile \"$certfile\" " if($certfile ne 'stunnel.pem');
$flags .= "--stunnel \"$stunnel\" --srcdir \"$srcdir\" ";
$flags .= "--connect $HTTPPORT --accept $HTTPSPORT";
my $cmd = "$perl $srcdir/secureserver.pl $flags";
my ($httpspid, $pid2) = startnew($cmd, $pidfile, 15, 0);
if($httpspid <= 0 || !kill(0, $httpspid)) {
# it is NOT alive
logmsg "RUN: failed to start the $srvrname server\n";
stopserver($server, "$pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return(0,0);
}
# Server is up. Verify that we can speak to it.
my $pid3 = verifyserver($proto, $ipvnum, $idnum, $ip, $HTTPSPORT);
if(!$pid3) {
logmsg "RUN: $srvrname server failed verification\n";
# failed to talk to it properly. Kill the server and return failure
stopserver($server, "$httpspid $pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Here pid3 is actually the pid returned by the unsecure-http server.
$runcert{$server} = $certfile;
if($verbose) {
logmsg "RUN: $srvrname server is now running PID $httpspid\n";
}
sleep(1);
return ($httpspid, $pid2);
}
#######################################################################
# start the non-stunnel HTTP TLS extensions capable server
#
sub runhttptlsserver {
my ($verbose, $ipv6) = @_;
my $proto = "httptls";
my $port = ($ipv6 && ($ipv6 =~ /6$/)) ? $HTTPTLS6PORT : $HTTPTLSPORT;
my $ip = ($ipv6 && ($ipv6 =~ /6$/)) ? "$HOST6IP" : "$HOSTIP";
my $ipvnum = ($ipv6 && ($ipv6 =~ /6$/)) ? 6 : 4;
my $idnum = 1;
my $server;
my $srvrname;
my $pidfile;
my $logfile;
my $flags = "";
if(!$httptlssrv) {
return (0,0);
}
$server = servername_id($proto, $ipvnum, $idnum);
$pidfile = $serverpidfile{$server};
# don't retry if the server doesn't work
if ($doesntrun{$pidfile}) {
return (0,0);
}
my $pid = processexists($pidfile);
if($pid > 0) {
stopserver($server, "$pid");
}
unlink($pidfile) if(-f $pidfile);
$srvrname = servername_str($proto, $ipvnum, $idnum);
$logfile = server_logfilename($LOGDIR, $proto, $ipvnum, $idnum);
$flags .= "--http ";
$flags .= "--debug 1 " if($debugprotocol);
$flags .= "--port $port ";
$flags .= "--srppasswd certs/srp-verifier-db ";
$flags .= "--srppasswdconf certs/srp-verifier-conf";
my $cmd = "$httptlssrv $flags > $logfile 2>&1";
my ($httptlspid, $pid2) = startnew($cmd, $pidfile, 10, 1); # fake pidfile
if($httptlspid <= 0 || !kill(0, $httptlspid)) {
# it is NOT alive
logmsg "RUN: failed to start the $srvrname server\n";
stopserver($server, "$pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Server is up. Verify that we can speak to it. PID is from fake pidfile
my $pid3 = verifyserver($proto, $ipvnum, $idnum, $ip, $port);
if(!$pid3) {
logmsg "RUN: $srvrname server failed verification\n";
# failed to talk to it properly. Kill the server and return failure
stopserver($server, "$httptlspid $pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
$pid2 = $pid3;
if($verbose) {
logmsg "RUN: $srvrname server is now running PID $httptlspid\n";
}
sleep(1);
return ($httptlspid, $pid2);
}
#######################################################################
# start the pingpong server (FTP, POP3, IMAP, SMTP)
#
sub runpingpongserver {
my ($proto, $id, $verbose, $ipv6) = @_;
my $port;
my $ip = ($ipv6 && ($ipv6 =~ /6$/)) ? "$HOST6IP" : "$HOSTIP";
my $ipvnum = ($ipv6 && ($ipv6 =~ /6$/)) ? 6 : 4;
my $idnum = ($id && ($id =~ /^(\d+)$/) && ($id > 1)) ? $id : 1;
my $server;
my $srvrname;
my $pidfile;
my $logfile;
my $flags = "";
if($proto eq "ftp") {
$port = ($idnum>1)?$FTP2PORT:$FTPPORT;
if($ipvnum==6) {
# if IPv6, use a different setup
$port = $FTP6PORT;
}
}
elsif($proto eq "pop3") {
$port = ($ipvnum==6) ? $POP36PORT : $POP3PORT;
}
elsif($proto eq "imap") {
$port = ($ipvnum==6) ? $IMAP6PORT : $IMAPPORT;
}
elsif($proto eq "smtp") {
$port = ($ipvnum==6) ? $SMTP6PORT : $SMTPPORT;
}
else {
print STDERR "Unsupported protocol $proto!!\n";
return 0;
}
$server = servername_id($proto, $ipvnum, $idnum);
$pidfile = $serverpidfile{$server};
# don't retry if the server doesn't work
if ($doesntrun{$pidfile}) {
return (0,0);
}
my $pid = processexists($pidfile);
if($pid > 0) {
stopserver($server, "$pid");
}
unlink($pidfile) if(-f $pidfile);
$srvrname = servername_str($proto, $ipvnum, $idnum);
$logfile = server_logfilename($LOGDIR, $proto, $ipvnum, $idnum);
$flags .= "--verbose " if($debugprotocol);
$flags .= "--pidfile \"$pidfile\" --logfile \"$logfile\" ";
$flags .= "--srcdir \"$srcdir\" --proto $proto ";
$flags .= "--id $idnum " if($idnum > 1);
$flags .= "--ipv$ipvnum --port $port --addr \"$ip\"";
my $cmd = "$perl $srcdir/ftpserver.pl $flags";
my ($ftppid, $pid2) = startnew($cmd, $pidfile, 15, 0);
if($ftppid <= 0 || !kill(0, $ftppid)) {
# it is NOT alive
logmsg "RUN: failed to start the $srvrname server\n";
stopserver($server, "$pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Server is up. Verify that we can speak to it.
my $pid3 = verifyserver($proto, $ipvnum, $idnum, $ip, $port);
if(!$pid3) {
logmsg "RUN: $srvrname server failed verification\n";
# failed to talk to it properly. Kill the server and return failure
stopserver($server, "$ftppid $pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
$pid2 = $pid3;
if($verbose) {
logmsg "RUN: $srvrname server is now running PID $ftppid\n";
}
sleep(1);
return ($pid2, $ftppid);
}
#######################################################################
# start the ftps server (or rather, tunnel)
#
sub runftpsserver {
my ($verbose, $ipv6, $certfile) = @_;
my $proto = 'ftps';
my $ip = ($ipv6 && ($ipv6 =~ /6$/)) ? "$HOST6IP" : "$HOSTIP";
my $ipvnum = ($ipv6 && ($ipv6 =~ /6$/)) ? 6 : 4;
my $idnum = 1;
my $server;
my $srvrname;
my $pidfile;
my $logfile;
my $flags = "";
if(!$stunnel) {
return (0,0);
}
$server = servername_id($proto, $ipvnum, $idnum);
$pidfile = $serverpidfile{$server};
# don't retry if the server doesn't work
if ($doesntrun{$pidfile}) {
return (0,0);
}
my $pid = processexists($pidfile);
if($pid > 0) {
stopserver($server, "$pid");
}
unlink($pidfile) if(-f $pidfile);
$srvrname = servername_str($proto, $ipvnum, $idnum);
$certfile = 'stunnel.pem' unless($certfile);
$logfile = server_logfilename($LOGDIR, $proto, $ipvnum, $idnum);
$flags .= "--verbose " if($debugprotocol);
$flags .= "--pidfile \"$pidfile\" --logfile \"$logfile\" ";
$flags .= "--id $idnum " if($idnum > 1);
$flags .= "--ipv$ipvnum --proto $proto ";
$flags .= "--certfile \"$certfile\" " if($certfile ne 'stunnel.pem');
$flags .= "--stunnel \"$stunnel\" --srcdir \"$srcdir\" ";
$flags .= "--connect $FTPPORT --accept $FTPSPORT";
my $cmd = "$perl $srcdir/secureserver.pl $flags";
my ($ftpspid, $pid2) = startnew($cmd, $pidfile, 15, 0);
if($ftpspid <= 0 || !kill(0, $ftpspid)) {
# it is NOT alive
logmsg "RUN: failed to start the $srvrname server\n";
stopserver($server, "$pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return(0,0);
}
# Server is up. Verify that we can speak to it.
my $pid3 = verifyserver($proto, $ipvnum, $idnum, $ip, $FTPSPORT);
if(!$pid3) {
logmsg "RUN: $srvrname server failed verification\n";
# failed to talk to it properly. Kill the server and return failure
stopserver($server, "$ftpspid $pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Here pid3 is actually the pid returned by the unsecure-ftp server.
$runcert{$server} = $certfile;
if($verbose) {
logmsg "RUN: $srvrname server is now running PID $ftpspid\n";
}
sleep(1);
return ($ftpspid, $pid2);
}
#######################################################################
# start the tftp server
#
sub runtftpserver {
my ($id, $verbose, $ipv6) = @_;
my $port = $TFTPPORT;
my $ip = $HOSTIP;
my $proto = 'tftp';
my $ipvnum = 4;
my $idnum = ($id && ($id =~ /^(\d+)$/) && ($id > 1)) ? $id : 1;
my $server;
my $srvrname;
my $pidfile;
my $logfile;
my $flags = "";
if($ipv6) {
# if IPv6, use a different setup
$ipvnum = 6;
$port = $TFTP6PORT;
$ip = $HOST6IP;
}
$server = servername_id($proto, $ipvnum, $idnum);
$pidfile = $serverpidfile{$server};
# don't retry if the server doesn't work
if ($doesntrun{$pidfile}) {
return (0,0);
}
my $pid = processexists($pidfile);
if($pid > 0) {
stopserver($server, "$pid");
}
unlink($pidfile) if(-f $pidfile);
$srvrname = servername_str($proto, $ipvnum, $idnum);
$logfile = server_logfilename($LOGDIR, $proto, $ipvnum, $idnum);
$flags .= "--verbose " if($debugprotocol);
$flags .= "--pidfile \"$pidfile\" --logfile \"$logfile\" ";
$flags .= "--id $idnum " if($idnum > 1);
$flags .= "--ipv$ipvnum --port $port --srcdir \"$srcdir\"";
my $cmd = "$perl $srcdir/tftpserver.pl $flags";
my ($tftppid, $pid2) = startnew($cmd, $pidfile, 15, 0);
if($tftppid <= 0 || !kill(0, $tftppid)) {
# it is NOT alive
logmsg "RUN: failed to start the $srvrname server\n";
stopserver($server, "$pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Server is up. Verify that we can speak to it.
my $pid3 = verifyserver($proto, $ipvnum, $idnum, $ip, $port);
if(!$pid3) {
logmsg "RUN: $srvrname server failed verification\n";
# failed to talk to it properly. Kill the server and return failure
stopserver($server, "$tftppid $pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
$pid2 = $pid3;
if($verbose) {
logmsg "RUN: $srvrname server is now running PID $tftppid\n";
}
sleep(1);
return ($pid2, $tftppid);
}
#######################################################################
# start the rtsp server
#
sub runrtspserver {
my ($verbose, $ipv6) = @_;
my $port = $RTSPPORT;
my $ip = $HOSTIP;
my $proto = 'rtsp';
my $ipvnum = 4;
my $idnum = 1;
my $server;
my $srvrname;
my $pidfile;
my $logfile;
my $flags = "";
if($ipv6) {
# if IPv6, use a different setup
$ipvnum = 6;
$port = $RTSP6PORT;
$ip = $HOST6IP;
}
$server = servername_id($proto, $ipvnum, $idnum);
$pidfile = $serverpidfile{$server};
# don't retry if the server doesn't work
if ($doesntrun{$pidfile}) {
return (0,0);
}
my $pid = processexists($pidfile);
if($pid > 0) {
stopserver($server, "$pid");
}
unlink($pidfile) if(-f $pidfile);
$srvrname = servername_str($proto, $ipvnum, $idnum);
$logfile = server_logfilename($LOGDIR, $proto, $ipvnum, $idnum);
$flags .= "--verbose " if($debugprotocol);
$flags .= "--pidfile \"$pidfile\" --logfile \"$logfile\" ";
$flags .= "--id $idnum " if($idnum > 1);
$flags .= "--ipv$ipvnum --port $port --srcdir \"$srcdir\"";
my $cmd = "$perl $srcdir/rtspserver.pl $flags";
my ($rtsppid, $pid2) = startnew($cmd, $pidfile, 15, 0);
if($rtsppid <= 0 || !kill(0, $rtsppid)) {
# it is NOT alive
logmsg "RUN: failed to start the $srvrname server\n";
stopserver($server, "$pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Server is up. Verify that we can speak to it.
my $pid3 = verifyserver($proto, $ipvnum, $idnum, $ip, $port);
if(!$pid3) {
logmsg "RUN: $srvrname server failed verification\n";
# failed to talk to it properly. Kill the server and return failure
stopserver($server, "$rtsppid $pid2");
displaylogs($testnumcheck);
$doesntrun{$pidfile} = 1;
return (0,0);
}
$pid2 = $pid3;
if($verbose) {
logmsg "RUN: $srvrname server is now running PID $rtsppid\n";
}
sleep(1);
return ($rtsppid, $pid2);
}
#######################################################################
# Start the ssh (scp/sftp) server
#
sub runsshserver {
my ($id, $verbose, $ipv6) = @_;
my $ip=$HOSTIP;
my $port = $SSHPORT;
my $socksport = $SOCKSPORT;
my $proto = 'ssh';
my $ipvnum = 4;
my $idnum = ($id && ($id =~ /^(\d+)$/) && ($id > 1)) ? $id : 1;
my $server;
my $srvrname;
my $pidfile;
my $logfile;
my $flags = "";
$server = servername_id($proto, $ipvnum, $idnum);
$pidfile = $serverpidfile{$server};
# don't retry if the server doesn't work
if ($doesntrun{$pidfile}) {
return (0,0);
}
my $pid = processexists($pidfile);
if($pid > 0) {
stopserver($server, "$pid");
}
unlink($pidfile) if(-f $pidfile);
$srvrname = servername_str($proto, $ipvnum, $idnum);
$logfile = server_logfilename($LOGDIR, $proto, $ipvnum, $idnum);
$flags .= "--verbose " if($verbose);
$flags .= "--debugprotocol " if($debugprotocol);
$flags .= "--pidfile \"$pidfile\" ";
$flags .= "--id $idnum " if($idnum > 1);
$flags .= "--ipv$ipvnum --addr \"$ip\" ";
$flags .= "--sshport $port --socksport $socksport ";
$flags .= "--user \"$USER\"";
my $cmd = "$perl $srcdir/sshserver.pl $flags";
my ($sshpid, $pid2) = startnew($cmd, $pidfile, 60, 0);
# on loaded systems sshserver start up can take longer than the timeout
# passed to startnew, when this happens startnew completes without being
# able to read the pidfile and consequently returns a zero pid2 above.
if($sshpid <= 0 || !kill(0, $sshpid)) {
# it is NOT alive
logmsg "RUN: failed to start the $srvrname server\n";
stopserver($server, "$pid2");
$doesntrun{$pidfile} = 1;
return (0,0);
}
# ssh server verification allows some extra time for the server to start up
# and gives us the opportunity of recovering the pid from the pidfile, when
# this verification succeeds the recovered pid is assigned to pid2.
my $pid3 = verifyserver($proto, $ipvnum, $idnum, $ip, $port);
if(!$pid3) {
logmsg "RUN: $srvrname server failed verification\n";
# failed to fetch server pid. Kill the server and return failure
stopserver($server, "$sshpid $pid2");
$doesntrun{$pidfile} = 1;
return (0,0);
}
$pid2 = $pid3;
# once it is known that the ssh server is alive, sftp server verification
# is performed actually connecting to it, authenticating and performing a
# very simple remote command. This verification is tried only one time.
$sshdlog = server_logfilename($LOGDIR, 'ssh', $ipvnum, $idnum);
$sftplog = server_logfilename($LOGDIR, 'sftp', $ipvnum, $idnum);
if(verifysftp('sftp', $ipvnum, $idnum, $ip, $port) < 1) {
logmsg "RUN: SFTP server failed verification\n";
# failed to talk to it properly. Kill the server and return failure
display_sftplog();
display_sftpconfig();
display_sshdlog();
display_sshdconfig();
stopserver($server, "$sshpid $pid2");
$doesntrun{$pidfile} = 1;
return (0,0);
}
if($verbose) {
logmsg "RUN: $srvrname server is now running PID $pid2\n";
}
return ($pid2, $sshpid);
}
#######################################################################
# Start the socks server
#
sub runsocksserver {
my ($id, $verbose, $ipv6) = @_;
my $ip=$HOSTIP;
my $port = $SOCKSPORT;
my $proto = 'socks';
my $ipvnum = 4;
my $idnum = ($id && ($id =~ /^(\d+)$/) && ($id > 1)) ? $id : 1;
my $server;
my $srvrname;
my $pidfile;
my $logfile;
my $flags = "";
$server = servername_id($proto, $ipvnum, $idnum);
$pidfile = $serverpidfile{$server};
# don't retry if the server doesn't work
if ($doesntrun{$pidfile}) {
return (0,0);
}
my $pid = processexists($pidfile);
if($pid > 0) {
stopserver($server, "$pid");
}
unlink($pidfile) if(-f $pidfile);
$srvrname = servername_str($proto, $ipvnum, $idnum);
$logfile = server_logfilename($LOGDIR, $proto, $ipvnum, $idnum);
# The ssh server must be already running
if(!$run{'ssh'}) {
logmsg "RUN: SOCKS server cannot find running SSH server\n";
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Find out ssh daemon canonical file name
my $sshd = find_sshd();
if(!$sshd) {
logmsg "RUN: SOCKS server cannot find $sshdexe\n";
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Find out ssh daemon version info
($sshdid, $sshdvernum, $sshdverstr, $sshderror) = sshversioninfo($sshd);
if(!$sshdid) {
# Not an OpenSSH or SunSSH ssh daemon
logmsg "$sshderror\n" if($verbose);
logmsg "SCP, SFTP and SOCKS tests require OpenSSH 2.9.9 or later\n";
$doesntrun{$pidfile} = 1;
return (0,0);
}
logmsg "ssh server found $sshd is $sshdverstr\n" if($verbose);
# Find out ssh client canonical file name
my $ssh = find_ssh();
if(!$ssh) {
logmsg "RUN: SOCKS server cannot find $sshexe\n";
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Find out ssh client version info
my ($sshid, $sshvernum, $sshverstr, $ssherror) = sshversioninfo($ssh);
if(!$sshid) {
# Not an OpenSSH or SunSSH ssh client
logmsg "$ssherror\n" if($verbose);
logmsg "SCP, SFTP and SOCKS tests require OpenSSH 2.9.9 or later\n";
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Verify minimum ssh client version
if((($sshid =~ /OpenSSH/) && ($sshvernum < 299)) ||
(($sshid =~ /SunSSH/) && ($sshvernum < 100))) {
logmsg "ssh client found $ssh is $sshverstr\n";
logmsg "SCP, SFTP and SOCKS tests require OpenSSH 2.9.9 or later\n";
$doesntrun{$pidfile} = 1;
return (0,0);
}
logmsg "ssh client found $ssh is $sshverstr\n" if($verbose);
# Verify if ssh client and ssh daemon versions match
if(($sshdid ne $sshid) || ($sshdvernum != $sshvernum)) {
# Our test harness might work with slightly mismatched versions
logmsg "Warning: version mismatch: sshd $sshdverstr - ssh $sshverstr\n"
if($verbose);
}
# Config file options for ssh client are previously set from sshserver.pl
if(! -e $sshconfig) {
logmsg "RUN: SOCKS server cannot find $sshconfig\n";
$doesntrun{$pidfile} = 1;
return (0,0);
}
$sshlog = server_logfilename($LOGDIR, 'socks', $ipvnum, $idnum);
# start our socks server
my $cmd="$ssh -N -F $sshconfig $ip > $sshlog 2>&1";
my ($sshpid, $pid2) = startnew($cmd, $pidfile, 30, 1); # fake pidfile
if($sshpid <= 0 || !kill(0, $sshpid)) {
# it is NOT alive
logmsg "RUN: failed to start the $srvrname server\n";
display_sshlog();
display_sshconfig();
display_sshdlog();
display_sshdconfig();
stopserver($server, "$pid2");
$doesntrun{$pidfile} = 1;
return (0,0);
}
# Ugly hack but ssh doesn't support pid files. PID is from fake pidfile.
my $pid3 = verifyserver($proto, $ipvnum, $idnum, $ip, $port);
if(!$pid3) {
logmsg "RUN: $srvrname server failed verification\n";
# failed to talk to it properly. Kill the server and return failure
stopserver($server, "$sshpid $pid2");
$doesntrun{$pidfile} = 1;
return (0,0);
}
$pid2 = $pid3;
if($verbose) {
logmsg "RUN: $srvrname server is now running PID $pid2\n";
}
return ($pid2, $sshpid);
}
#######################################################################
# Single shot http and gopher server responsiveness test. This should only
# be used to verify that a server present in %run hash is still functional
#
sub responsive_http_server {
my ($proto, $verbose, $alt, $port) = @_;
my $ip = $HOSTIP;
my $ipvnum = 4;
my $idnum = 1;
if($alt eq "ipv6") {
# if IPv6, use a different setup
$ipvnum = 6;
$ip = $HOST6IP;
}
elsif($alt eq "proxy") {
$idnum = 2;
}
return &responsiveserver($proto, $ipvnum, $idnum, $ip, $port);
}
#######################################################################
# Single shot pingpong server responsiveness test. This should only be
# used to verify that a server present in %run hash is still functional
#
sub responsive_pingpong_server {
my ($proto, $id, $verbose, $ipv6) = @_;
my $port;
my $ip = ($ipv6 && ($ipv6 =~ /6$/)) ? "$HOST6IP" : "$HOSTIP";
my $ipvnum = ($ipv6 && ($ipv6 =~ /6$/)) ? 6 : 4;
my $idnum = ($id && ($id =~ /^(\d+)$/) && ($id > 1)) ? $id : 1;
if($proto eq "ftp") {
$port = ($idnum>1)?$FTP2PORT:$FTPPORT;
if($ipvnum==6) {
# if IPv6, use a different setup
$port = $FTP6PORT;
}
}
elsif($proto eq "pop3") {
$port = ($ipvnum==6) ? $POP36PORT : $POP3PORT;
}
elsif($proto eq "imap") {
$port = ($ipvnum==6) ? $IMAP6PORT : $IMAPPORT;
}
elsif($proto eq "smtp") {
$port = ($ipvnum==6) ? $SMTP6PORT : $SMTPPORT;
}
else {
print STDERR "Unsupported protocol $proto!!\n";
return 0;
}
return &responsiveserver($proto, $ipvnum, $idnum, $ip, $port);
}
#######################################################################
# Single shot rtsp server responsiveness test. This should only be
# used to verify that a server present in %run hash is still functional
#
sub responsive_rtsp_server {
my ($verbose, $ipv6) = @_;
my $port = $RTSPPORT;
my $ip = $HOSTIP;
my $proto = 'rtsp';
my $ipvnum = 4;
my $idnum = 1;
if($ipv6) {
# if IPv6, use a different setup
$ipvnum = 6;
$port = $RTSP6PORT;
$ip = $HOST6IP;
}
return &responsiveserver($proto, $ipvnum, $idnum, $ip, $port);
}
#######################################################################
# Single shot tftp server responsiveness test. This should only be
# used to verify that a server present in %run hash is still functional
#
sub responsive_tftp_server {
my ($id, $verbose, $ipv6) = @_;
my $port = $TFTPPORT;
my $ip = $HOSTIP;
my $proto = 'tftp';
my $ipvnum = 4;
my $idnum = ($id && ($id =~ /^(\d+)$/) && ($id > 1)) ? $id : 1;
if($ipv6) {
# if IPv6, use a different setup
$ipvnum = 6;
$port = $TFTP6PORT;
$ip = $HOST6IP;
}
return &responsiveserver($proto, $ipvnum, $idnum, $ip, $port);
}
#######################################################################
# Single shot non-stunnel HTTP TLS extensions capable server
# responsiveness test. This should only be used to verify that a
# server present in %run hash is still functional
#
sub responsive_httptls_server {
my ($verbose, $ipv6) = @_;
my $proto = "httptls";
my $port = ($ipv6 && ($ipv6 =~ /6$/)) ? $HTTPTLS6PORT : $HTTPTLSPORT;
my $ip = ($ipv6 && ($ipv6 =~ /6$/)) ? "$HOST6IP" : "$HOSTIP";
my $ipvnum = ($ipv6 && ($ipv6 =~ /6$/)) ? 6 : 4;
my $idnum = 1;
return &responsiveserver($proto, $ipvnum, $idnum, $ip, $port);
}
#######################################################################
# Remove all files in the specified directory
#
sub cleardir {
my $dir = $_[0];
my $count;
my $file;
# Get all files
opendir(DIR, $dir) ||
return 0; # can't open dir
while($file = readdir(DIR)) {
if($file !~ /^\./) {
unlink("$dir/$file");
$count++;
}
}
closedir DIR;
return $count;
}
#######################################################################
# filter out the specified pattern from the given input file and store the
# results in the given output file
#
sub filteroff {
my $infile=$_[0];
my $filter=$_[1];
my $ofile=$_[2];
open(IN, "<$infile")
|| return 1;
open(OUT, ">$ofile")
|| return 1;
# logmsg "FILTER: off $filter from $infile to $ofile\n";
while(<IN>) {
$_ =~ s/$filter//;
print OUT $_;
}
close(IN);
close(OUT);
return 0;
}
#######################################################################
# compare test results with the expected output, we might filter off
# some pattern that is allowed to differ, output test results
#
sub compare {
# filter off patterns _before_ this comparison!
my ($subject, $firstref, $secondref)=@_;
my $result = compareparts($firstref, $secondref);
if($result) {
if(!$short) {
logmsg "\n $subject FAILED:\n";
logmsg showdiff($LOGDIR, $firstref, $secondref);
}
else {
logmsg "FAILED\n";
}
}
return $result;
}
#######################################################################
# display information about curl and the host the test suite runs on
#
sub checksystem {
unlink($memdump); # remove this if there was one left
my $feat;
my $curl;
my $libcurl;
my $versretval;
my $versnoexec;
my @version=();
my $curlverout="$LOGDIR/curlverout.log";
my $curlvererr="$LOGDIR/curlvererr.log";
my $versioncmd="$CURL --version 1>$curlverout 2>$curlvererr";
unlink($curlverout);
unlink($curlvererr);
$versretval = runclient($versioncmd);
$versnoexec = $!;
open(VERSOUT, "<$curlverout");
@version = <VERSOUT>;
close(VERSOUT);
for(@version) {
chomp;
if($_ =~ /^curl/) {
$curl = $_;
$curl =~ s/^(.*)(libcurl.*)/$1/g;
$libcurl = $2;
if($curl =~ /mingw32/) {
# This is a windows minw32 build, we need to translate the
# given path to the "actual" windows path. The MSYS shell
# has a builtin 'pwd -W' command which converts the path.
$pwd = `sh -c "echo \$(pwd -W)"`;
chomp($pwd);
}
elsif ($curl =~ /win32/) {
# Native Windows builds don't understand the
# output of cygwin's pwd. It will be
# something like /cygdrive/c/<some path>.
#
# Use the cygpath utility to convert the
# working directory to a Windows friendly
# path. The -m option converts to use drive
# letter:, but it uses / instead \. Forward
# slashes (/) are easier for us. We don't
# have to escape them to get them to curl
# through a shell.
chomp($pwd = `cygpath -m $pwd`);
}
if ($libcurl =~ /winssl/i) {
$has_winssl=1;
$ssllib="WinSSL";
}
elsif ($libcurl =~ /openssl/i) {
$has_openssl=1;
$ssllib="OpenSSL";
}
elsif ($libcurl =~ /gnutls/i) {
$has_gnutls=1;
$ssllib="GnuTLS";
}
elsif ($libcurl =~ /nss/i) {
$has_nss=1;
$ssllib="NSS";
}
elsif ($libcurl =~ /yassl/i) {
$has_yassl=1;
$ssllib="yassl";
}
elsif ($libcurl =~ /polarssl/i) {
$has_polarssl=1;
$ssllib="polarssl";
}
elsif ($libcurl =~ /axtls/i) {
$has_axtls=1;
$ssllib="axTLS";
}
}
elsif($_ =~ /^Protocols: (.*)/i) {
# these are the protocols compiled in to this libcurl
@protocols = split(' ', lc($1));
# Generate a "proto-ipv6" version of each protocol to match the
# IPv6 <server> name. This works even if IPv6 support isn't
# compiled in because the <features> test will fail.
push @protocols, map($_ . '-ipv6', @protocols);
# 'http-proxy' is used in test cases to do CONNECT through
push @protocols, 'http-proxy';
# 'none' is used in test cases to mean no server
push @protocols, 'none';
}
elsif($_ =~ /^Features: (.*)/i) {
$feat = $1;
if($feat =~ /TrackMemory/i) {
# curl was built with --enable-curldebug (memory tracking)
$curl_debug = 1;
}
if($feat =~ /debug/i) {
# curl was built with --enable-debug
$debug_build = 1;
}
if($feat =~ /SSL/i) {
# ssl enabled
$ssl_version=1;
}
if($feat =~ /Largefile/i) {
# large file support
$large_file=1;
}
if($feat =~ /IDN/i) {
# IDN support
$has_idn=1;
}
if($feat =~ /IPv6/i) {
$has_ipv6 = 1;
}
if($feat =~ /libz/i) {
$has_libz = 1;
}
if($feat =~ /NTLM/i) {
# NTLM enabled
$has_ntlm=1;
}
if($feat =~ /NTLM_WB/i) {
# NTLM delegation to winbind daemon ntlm_auth helper enabled
$has_ntlm_wb=1;
}
if($feat =~ /CharConv/i) {
# CharConv enabled
$has_charconv=1;
}
if($feat =~ /TLS-SRP/i) {
# TLS-SRP enabled
$has_tls_srp=1;
}
if($feat =~ /Metalink/i) {
# Metalink enabled
$has_metalink=1;
}
}
#
# Test harness currently uses a non-stunnel server in order to
# run HTTP TLS-SRP tests required when curl is built with https
# protocol support and TLS-SRP feature enabled. For convenience
# 'httptls' may be included in the test harness protocols array
# to differentiate this from classic stunnel based 'https' test
# harness server.
#
if($has_tls_srp) {
my $add_httptls;
for(@protocols) {
if($_ =~ /^https(-ipv6|)$/) {
$add_httptls=1;
last;
}
}
if($add_httptls && (! grep /^httptls$/, @protocols)) {
push @protocols, 'httptls';
push @protocols, 'httptls-ipv6';
}
}
}
if(!$curl) {
logmsg "unable to get curl's version, further details are:\n";
logmsg "issued command: \n";
logmsg "$versioncmd \n";
if ($versretval == -1) {
logmsg "command failed with: \n";
logmsg "$versnoexec \n";
}
elsif ($versretval & 127) {
logmsg sprintf("command died with signal %d, and %s coredump.\n",
($versretval & 127), ($versretval & 128)?"a":"no");
}
else {
logmsg sprintf("command exited with value %d \n", $versretval >> 8);
}
logmsg "contents of $curlverout: \n";
displaylogcontent("$curlverout");
logmsg "contents of $curlvererr: \n";
displaylogcontent("$curlvererr");
die "couldn't get curl's version";
}
if(-r "../lib/curl_config.h") {
open(CONF, "<../lib/curl_config.h");
while(<CONF>) {
if($_ =~ /^\#define HAVE_GETRLIMIT/) {
$has_getrlimit = 1;
}
}
close(CONF);
}
if($has_ipv6) {
# client has ipv6 support
# check if the HTTP server has it!
my @sws = `server/sws --version`;
if($sws[0] =~ /IPv6/) {
# HTTP server has ipv6 support!
$http_ipv6 = 1;
$gopher_ipv6 = 1;
}
# check if the FTP server has it!
@sws = `server/sockfilt --version`;
if($sws[0] =~ /IPv6/) {
# FTP server has ipv6 support!
$ftp_ipv6 = 1;
}
}
if(!$curl_debug && $torture) {
die "can't run torture tests since curl was not built with curldebug";
}
$has_shared = `sh $CURLCONFIG --built-shared`;
chomp $has_shared;
# curl doesn't list cryptographic support separately, so assume it's
# always available
$has_crypto=1;
my $hostname=join(' ', runclientoutput("hostname"));
my $hosttype=join(' ', runclientoutput("uname -a"));
logmsg ("********* System characteristics ******** \n",
"* $curl\n",
"* $libcurl\n",
"* Features: $feat\n",
"* Host: $hostname",
"* System: $hosttype");
logmsg sprintf("* Server SSL: %8s", $stunnel?"ON ":"OFF");
logmsg sprintf(" libcurl SSL: %s\n", $ssl_version?"ON ":"OFF");
logmsg sprintf("* debug build: %8s", $debug_build?"ON ":"OFF");
logmsg sprintf(" track memory: %s\n", $curl_debug?"ON ":"OFF");
logmsg sprintf("* valgrind: %8s", $valgrind?"ON ":"OFF");
logmsg sprintf(" HTTP IPv6 %s\n", $http_ipv6?"ON ":"OFF");
logmsg sprintf("* FTP IPv6 %8s", $ftp_ipv6?"ON ":"OFF");
logmsg sprintf(" Libtool lib: %s\n", $libtool?"ON ":"OFF");
logmsg sprintf("* Shared build: %s\n", $has_shared);
if($ssl_version) {
logmsg sprintf("* SSL library: %13s\n", $ssllib);
}
logmsg "* Ports:\n";
logmsg sprintf("* HTTP/%d ", $HTTPPORT);
logmsg sprintf("FTP/%d ", $FTPPORT);
logmsg sprintf("FTP2/%d ", $FTP2PORT);
logmsg sprintf("RTSP/%d ", $RTSPPORT);
if($stunnel) {
logmsg sprintf("FTPS/%d ", $FTPSPORT);
logmsg sprintf("HTTPS/%d ", $HTTPSPORT);
}
logmsg sprintf("\n* TFTP/%d ", $TFTPPORT);
if($http_ipv6) {
logmsg sprintf("HTTP-IPv6/%d ", $HTTP6PORT);
logmsg sprintf("RTSP-IPv6/%d ", $RTSP6PORT);
}
if($ftp_ipv6) {
logmsg sprintf("FTP-IPv6/%d ", $FTP6PORT);
}
if($tftp_ipv6) {
logmsg sprintf("TFTP-IPv6/%d ", $TFTP6PORT);
}
logmsg sprintf("\n* GOPHER/%d ", $GOPHERPORT);
if($gopher_ipv6) {
logmsg sprintf("GOPHER-IPv6/%d", $GOPHERPORT);
}
logmsg sprintf("\n* SSH/%d ", $SSHPORT);
logmsg sprintf("SOCKS/%d ", $SOCKSPORT);
logmsg sprintf("POP3/%d ", $POP3PORT);
logmsg sprintf("IMAP/%d ", $IMAPPORT);
logmsg sprintf("SMTP/%d\n", $SMTPPORT);
if($ftp_ipv6) {
logmsg sprintf("* POP3-IPv6/%d ", $POP36PORT);
logmsg sprintf("IMAP-IPv6/%d ", $IMAP6PORT);
logmsg sprintf("SMTP-IPv6/%d\n", $SMTP6PORT);
}
if($httptlssrv) {
logmsg sprintf("* HTTPTLS/%d ", $HTTPTLSPORT);
if($has_ipv6) {
logmsg sprintf("HTTPTLS-IPv6/%d ", $HTTPTLS6PORT);
}
logmsg "\n";
}
$has_textaware = ($^O eq 'MSWin32') || ($^O eq 'msys');
logmsg "***************************************** \n";
}
#######################################################################
# substitute the variable stuff into either a joined up file or
# a command, in either case passed by reference
#
sub subVariables {
my ($thing) = @_;
# ports
$$thing =~ s/%FTP6PORT/$FTP6PORT/g;
$$thing =~ s/%FTP2PORT/$FTP2PORT/g;
$$thing =~ s/%FTPSPORT/$FTPSPORT/g;
$$thing =~ s/%FTPPORT/$FTPPORT/g;
$$thing =~ s/%GOPHER6PORT/$GOPHER6PORT/g;
$$thing =~ s/%GOPHERPORT/$GOPHERPORT/g;
$$thing =~ s/%HTTPTLS6PORT/$HTTPTLS6PORT/g;
$$thing =~ s/%HTTPTLSPORT/$HTTPTLSPORT/g;
$$thing =~ s/%HTTP6PORT/$HTTP6PORT/g;
$$thing =~ s/%HTTPSPORT/$HTTPSPORT/g;
$$thing =~ s/%HTTPPORT/$HTTPPORT/g;
$$thing =~ s/%PROXYPORT/$HTTPPROXYPORT/g;
$$thing =~ s/%IMAP6PORT/$IMAP6PORT/g;
$$thing =~ s/%IMAPPORT/$IMAPPORT/g;
$$thing =~ s/%POP36PORT/$POP36PORT/g;
$$thing =~ s/%POP3PORT/$POP3PORT/g;
$$thing =~ s/%RTSP6PORT/$RTSP6PORT/g;
$$thing =~ s/%RTSPPORT/$RTSPPORT/g;
$$thing =~ s/%SMTP6PORT/$SMTP6PORT/g;
$$thing =~ s/%SMTPPORT/$SMTPPORT/g;
$$thing =~ s/%SOCKSPORT/$SOCKSPORT/g;
$$thing =~ s/%SSHPORT/$SSHPORT/g;
$$thing =~ s/%TFTP6PORT/$TFTP6PORT/g;
$$thing =~ s/%TFTPPORT/$TFTPPORT/g;
# client IP addresses
$$thing =~ s/%CLIENT6IP/$CLIENT6IP/g;
$$thing =~ s/%CLIENTIP/$CLIENTIP/g;
# server IP addresses
$$thing =~ s/%HOST6IP/$HOST6IP/g;
$$thing =~ s/%HOSTIP/$HOSTIP/g;
# misc
$$thing =~ s/%CURL/$CURL/g;
$$thing =~ s/%PWD/$pwd/g;
$$thing =~ s/%SRCDIR/$srcdir/g;
$$thing =~ s/%USER/$USER/g;
# The purpose of FTPTIME2 and FTPTIME3 is to provide times that can be
# used for time-out tests and that whould work on most hosts as these
# adjust for the startup/check time for this particular host. We needed
# to do this to make the test suite run better on very slow hosts.
my $ftp2 = $ftpchecktime * 2;
my $ftp3 = $ftpchecktime * 3;
$$thing =~ s/%FTPTIME2/$ftp2/g;
$$thing =~ s/%FTPTIME3/$ftp3/g;
}
sub fixarray {
my @in = @_;
for(@in) {
subVariables \$_;
}
return @in;
}
#######################################################################
# Provide time stamps for single test skipped events
#
sub timestampskippedevents {
my $testnum = $_[0];
return if((not defined($testnum)) || ($testnum < 1));
if($timestats) {
if($timevrfyend{$testnum}) {
return;
}
elsif($timesrvrlog{$testnum}) {
$timevrfyend{$testnum} = $timesrvrlog{$testnum};
return;
}
elsif($timetoolend{$testnum}) {
$timevrfyend{$testnum} = $timetoolend{$testnum};
$timesrvrlog{$testnum} = $timetoolend{$testnum};
}
elsif($timetoolini{$testnum}) {
$timevrfyend{$testnum} = $timetoolini{$testnum};
$timesrvrlog{$testnum} = $timetoolini{$testnum};
$timetoolend{$testnum} = $timetoolini{$testnum};
}
elsif($timesrvrend{$testnum}) {
$timevrfyend{$testnum} = $timesrvrend{$testnum};
$timesrvrlog{$testnum} = $timesrvrend{$testnum};
$timetoolend{$testnum} = $timesrvrend{$testnum};
$timetoolini{$testnum} = $timesrvrend{$testnum};
}
elsif($timesrvrini{$testnum}) {
$timevrfyend{$testnum} = $timesrvrini{$testnum};
$timesrvrlog{$testnum} = $timesrvrini{$testnum};
$timetoolend{$testnum} = $timesrvrini{$testnum};
$timetoolini{$testnum} = $timesrvrini{$testnum};
$timesrvrend{$testnum} = $timesrvrini{$testnum};
}
elsif($timeprepini{$testnum}) {
$timevrfyend{$testnum} = $timeprepini{$testnum};
$timesrvrlog{$testnum} = $timeprepini{$testnum};
$timetoolend{$testnum} = $timeprepini{$testnum};
$timetoolini{$testnum} = $timeprepini{$testnum};
$timesrvrend{$testnum} = $timeprepini{$testnum};
$timesrvrini{$testnum} = $timeprepini{$testnum};
}
}
}
#######################################################################
# Run a single specified test case
#
sub singletest {
my ($testnum, $count, $total)=@_;
my @what;
my $why;
my %feature;
my $cmd;
my $disablevalgrind;
# copy test number to a global scope var, this allows
# testnum checking when starting test harness servers.
$testnumcheck = $testnum;
# timestamp test preparation start
$timeprepini{$testnum} = Time::HiRes::time() if($timestats);
if($disttests !~ /test$testnum\W/ ) {
logmsg "Warning: test$testnum not present in tests/data/Makefile.am\n";
}
if($disabled{$testnum}) {
logmsg "Warning: test$testnum is explicitly disabled\n";
}
# load the test case file definition
if(loadtest("${TESTDIR}/test${testnum}")) {
if($verbose) {
# this is not a test
logmsg "RUN: $testnum doesn't look like a test case\n";
}
$why = "no test";
}
else {
@what = getpart("client", "features");
}
for(@what) {
my $f = $_;
$f =~ s/\s//g;
$feature{$f}=$f; # we require this feature
if($f eq "SSL") {
if($ssl_version) {
next;
}
}
elsif($f eq "OpenSSL") {
if($has_openssl) {
next;
}
}
elsif($f eq "GnuTLS") {
if($has_gnutls) {
next;
}
}
elsif($f eq "NSS") {
if($has_nss) {
next;
}
}
elsif($f eq "axTLS") {
if($has_axtls) {
next;
}
}
elsif($f eq "WinSSL") {
if($has_winssl) {
next;
}
}
elsif($f eq "unittest") {
if($debug_build) {
next;
}
}
elsif($f eq "debug") {
if($debug_build) {
next;
}
}
elsif($f eq "large_file") {
if($large_file) {
next;
}
}
elsif($f eq "idn") {
if($has_idn) {
next;
}
}
elsif($f eq "ipv6") {
if($has_ipv6) {
next;
}
}
elsif($f eq "libz") {
if($has_libz) {
next;
}
}
elsif($f eq "NTLM") {
if($has_ntlm) {
next;
}
}
elsif($f eq "NTLM_WB") {
if($has_ntlm_wb) {
next;
}
}
elsif($f eq "getrlimit") {
if($has_getrlimit) {
next;
}
}
elsif($f eq "crypto") {
if($has_crypto) {
next;
}
}
elsif($f eq "TLS-SRP") {
if($has_tls_srp) {
next;
}
}
elsif($f eq "Metalink") {
if($has_metalink) {
next;
}
}
elsif($f eq "socks") {
next;
}
# See if this "feature" is in the list of supported protocols
elsif (grep /^\Q$f\E$/i, @protocols) {
next;
}
$why = "curl lacks $f support";
last;
}
if(!$why) {
my @keywords = getpart("info", "keywords");
my $match;
my $k;
for $k (@keywords) {
chomp $k;
if ($disabled_keywords{$k}) {
$why = "disabled by keyword";
} elsif ($enabled_keywords{$k}) {
$match = 1;
}
}
if(!$why && !$match && %enabled_keywords) {
$why = "disabled by missing keyword";
}
}
# test definition may instruct to (un)set environment vars
# this is done this early, so that the precheck can use environment
# variables and still bail out fine on errors
# restore environment variables that were modified in a previous run
foreach my $var (keys %oldenv) {
if($oldenv{$var} eq 'notset') {
delete $ENV{$var} if($ENV{$var});
}
else {
$ENV{$var} = $oldenv{$var};
}
delete $oldenv{$var};
}
# remove test server commands file before servers are started/verified
unlink($FTPDCMD) if(-f $FTPDCMD);
# timestamp required servers verification start
$timesrvrini{$testnum} = Time::HiRes::time() if($timestats);
if(!$why) {
$why = serverfortest($testnum);
}
# timestamp required servers verification end
$timesrvrend{$testnum} = Time::HiRes::time() if($timestats);
my @setenv = getpart("client", "setenv");
if(@setenv) {
foreach my $s (@setenv) {
chomp $s;
subVariables \$s;
if($s =~ /([^=]*)=(.*)/) {
my ($var, $content) = ($1, $2);
# remember current setting, to restore it once test runs
$oldenv{$var} = ($ENV{$var})?"$ENV{$var}":'notset';
# set new value
if(!$content) {
delete $ENV{$var} if($ENV{$var});
}
else {
if($var =~ /^LD_PRELOAD/) {
if(exe_ext() && (exe_ext() eq '.exe')) {
# print "Skipping LD_PRELOAD due to lack of OS support\n";
next;
}
if($debug_build || ($has_shared ne "yes")) {
# print "Skipping LD_PRELOAD due to no release shared build\n";
next;
}
}
$ENV{$var} = "$content";
}
}
}
}
if(!$why) {
# TODO:
# Add a precheck cache. If a precheck command was already invoked
# exactly like this, then use the previous result to speed up
# successive test invokes!
my @precheck = getpart("client", "precheck");
if(@precheck) {
$cmd = $precheck[0];
chomp $cmd;
subVariables \$cmd;
if($cmd) {
my @p = split(/ /, $cmd);
if($p[0] !~ /\//) {
# the first word, the command, does not contain a slash so
# we will scan the "improved" PATH to find the command to
# be able to run it
my $fullp = checktestcmd($p[0]);
if($fullp) {
$p[0] = $fullp;
}
$cmd = join(" ", @p);
}
my @o = `$cmd 2>/dev/null`;
if($o[0]) {
$why = $o[0];
chomp $why;
} elsif($?) {
$why = "precheck command error";
}
logmsg "prechecked $cmd\n" if($verbose);
}
}
}
if($why && !$listonly) {
# there's a problem, count it as "skipped"
$skipped++;
$skipped{$why}++;
$teststat[$testnum]=$why; # store reason for this test case
if(!$short) {
if($skipped{$why} <= 3) {
# show only the first three skips for each reason
logmsg sprintf("test %03d SKIPPED: $why\n", $testnum);
}
}
timestampskippedevents($testnum);
return -1;
}
logmsg sprintf("test %03d...", $testnum);
# extract the reply data
my @reply = getpart("reply", "data");
my @replycheck = getpart("reply", "datacheck");
if (@replycheck) {
# we use this file instead to check the final output against
my %hash = getpartattr("reply", "datacheck");
if($hash{'nonewline'}) {
# Yes, we must cut off the final newline from the final line
# of the datacheck
chomp($replycheck[$#replycheck]);
}
@reply=@replycheck;
}
# this is the valid protocol blurb curl should generate
my @protocol= fixarray ( getpart("verify", "protocol") );
# this is the valid protocol blurb curl should generate to a proxy
my @proxyprot = fixarray ( getpart("verify", "proxy") );
# redirected stdout/stderr to these files
$STDOUT="$LOGDIR/stdout$testnum";
$STDERR="$LOGDIR/stderr$testnum";
# if this section exists, we verify that the stdout contained this:
my @validstdout = fixarray ( getpart("verify", "stdout") );
# if this section exists, we verify upload
my @upload = getpart("verify", "upload");
# if this section exists, it might be FTP server instructions:
my @ftpservercmd = getpart("reply", "servercmd");
my $CURLOUT="$LOGDIR/curl$testnum.out"; # curl output if not stdout
# name of the test
my @testname= getpart("client", "name");
if(!$short) {
my $name = $testname[0];
$name =~ s/\n//g;
logmsg "[$name]\n";
}
if($listonly) {
timestampskippedevents($testnum);
return 0; # look successful
}
my @codepieces = getpart("client", "tool");
my $tool="";
if(@codepieces) {
$tool = $codepieces[0];
chomp $tool;
}
# remove server output logfile
unlink($SERVERIN);
unlink($SERVER2IN);
unlink($PROXYIN);
if(@ftpservercmd) {
# write the instructions to file
writearray($FTPDCMD, \@ftpservercmd);
}
# get the command line options to use
my @blaha;
($cmd, @blaha)= getpart("client", "command");
if($cmd) {
# make some nice replace operations
$cmd =~ s/\n//g; # no newlines please
# substitute variables in the command line
subVariables \$cmd;
}
else {
# there was no command given, use something silly
$cmd="-";
}
if($curl_debug) {
unlink($memdump);
}
# create a (possibly-empty) file before starting the test
my @inputfile=getpart("client", "file");
my %fileattr = getpartattr("client", "file");
my $filename=$fileattr{'name'};
if(@inputfile || $filename) {
if(!$filename) {
logmsg "ERROR: section client=>file has no name attribute\n";
timestampskippedevents($testnum);
return -1;
}
my $fileContent = join('', @inputfile);
subVariables \$fileContent;
# logmsg "DEBUG: writing file " . $filename . "\n";
open(OUTFILE, ">$filename");
binmode OUTFILE; # for crapage systems, use binary
print OUTFILE $fileContent;
close(OUTFILE);
}
my %cmdhash = getpartattr("client", "command");
my $out="";
if((!$cmdhash{'option'}) || ($cmdhash{'option'} !~ /no-output/)) {
#We may slap on --output!
if (!@validstdout) {
$out=" --output $CURLOUT ";
}
}
my $serverlogslocktimeout = $defserverlogslocktimeout;
if($cmdhash{'timeout'}) {
# test is allowed to override default server logs lock timeout
if($cmdhash{'timeout'} =~ /(\d+)/) {
$serverlogslocktimeout = $1 if($1 >= 0);
}
}
my $postcommanddelay = $defpostcommanddelay;
if($cmdhash{'delay'}) {
# test is allowed to specify a delay after command is executed
if($cmdhash{'delay'} =~ /(\d+)/) {
$postcommanddelay = $1 if($1 > 0);
}
}
my $CMDLINE;
my $cmdargs;
my $cmdtype = $cmdhash{'type'} || "default";
if($cmdtype eq "perl") {
# run the command line prepended with "perl"
$cmdargs ="$cmd";
$CMDLINE = "perl ";
$tool=$CMDLINE;
$disablevalgrind=1;
}
elsif(!$tool) {
# run curl, add --verbose for debug information output
$cmd = "-1 ".$cmd if(exists $feature{"SSL"} && ($has_axtls));
my $inc="";
if((!$cmdhash{'option'}) || ($cmdhash{'option'} !~ /no-include/)) {
$inc = "--include ";
}
$cmdargs ="$out $inc--trace-ascii log/trace$testnum --trace-time $cmd";
}
else {
$cmdargs = " $cmd"; # $cmd is the command line for the test file
$CURLOUT = $STDOUT; # sends received data to stdout
if($tool =~ /^lib/) {
$CMDLINE="$LIBDIR/$tool";
}
elsif($tool =~ /^unit/) {
$CMDLINE="$UNITDIR/$tool";
}
if(! -f $CMDLINE) {
logmsg "The tool set in the test case for this: '$tool' does not exist\n";
timestampskippedevents($testnum);
return -1;
}
$DBGCURL=$CMDLINE;
}
my @stdintest = getpart("client", "stdin");
if(@stdintest) {
my $stdinfile="$LOGDIR/stdin-for-$testnum";
writearray($stdinfile, \@stdintest);
$cmdargs .= " <$stdinfile";
}
if(!$tool) {
$CMDLINE="$CURL";
}
my $usevalgrind;
if($valgrind && !$disablevalgrind) {
my @valgrindoption = getpart("verify", "valgrind");
if((!@valgrindoption) || ($valgrindoption[0] !~ /disable/)) {
$usevalgrind = 1;
my $valgrindcmd = "$valgrind ";
$valgrindcmd .= "$valgrind_tool " if($valgrind_tool);
$valgrindcmd .= "--leak-check=yes ";
$valgrindcmd .= "--num-callers=16 ";
$valgrindcmd .= "${valgrind_logfile}=$LOGDIR/valgrind$testnum";
$CMDLINE = "$valgrindcmd $CMDLINE";
}
}
$CMDLINE .= "$cmdargs >$STDOUT 2>$STDERR";
if($verbose) {
logmsg "$CMDLINE\n";
}
print CMDLOG "$CMDLINE\n";
unlink("core");
my $dumped_core;
my $cmdres;
# Apr 2007: precommand isn't being used and could be removed
my @precommand= getpart("client", "precommand");
if($precommand[0]) {
# this is pure perl to eval!
my $code = join("", @precommand);
eval $code;
if($@) {
logmsg "perl: $code\n";
logmsg "precommand: $@";
stopservers($verbose);
timestampskippedevents($testnum);
return -1;
}
}
if($gdbthis) {
my $gdbinit = "$TESTDIR/gdbinit$testnum";
open(GDBCMD, ">$LOGDIR/gdbcmd");
print GDBCMD "set args $cmdargs\n";
print GDBCMD "show args\n";
print GDBCMD "source $gdbinit\n" if -e $gdbinit;
close(GDBCMD);
}
# timestamp starting of test command
$timetoolini{$testnum} = Time::HiRes::time() if($timestats);
# run the command line we built
if ($torture) {
$cmdres = torture($CMDLINE,
"$gdb --directory libtest $DBGCURL -x $LOGDIR/gdbcmd");
}
elsif($gdbthis) {
my $GDBW = ($gdbxwin) ? "-w" : "";
runclient("$gdb --directory libtest $DBGCURL $GDBW -x $LOGDIR/gdbcmd");
$cmdres=0; # makes it always continue after a debugged run
}
else {
$cmdres = runclient("$CMDLINE");
my $signal_num = $cmdres & 127;
$dumped_core = $cmdres & 128;
if(!$anyway && ($signal_num || $dumped_core)) {
$cmdres = 1000;
}
else {
$cmdres >>= 8;
$cmdres = (2000 + $signal_num) if($signal_num && !$cmdres);
}
}
# timestamp finishing of test command
$timetoolend{$testnum} = Time::HiRes::time() if($timestats);
if(!$dumped_core) {
if(-r "core") {
# there's core file present now!
$dumped_core = 1;
}
}
if($dumped_core) {
logmsg "core dumped\n";
if(0 && $gdb) {
logmsg "running gdb for post-mortem analysis:\n";
open(GDBCMD, ">$LOGDIR/gdbcmd2");
print GDBCMD "bt\n";
close(GDBCMD);
runclient("$gdb --directory libtest -x $LOGDIR/gdbcmd2 -batch $DBGCURL core ");
# unlink("$LOGDIR/gdbcmd2");
}
}
# If a server logs advisor read lock file exists, it is an indication
# that the server has not yet finished writing out all its log files,
# including server request log files used for protocol verification.
# So, if the lock file exists the script waits here a certain amount
# of time until the server removes it, or the given time expires.
if($serverlogslocktimeout) {
my $lockretry = $serverlogslocktimeout * 20;
while((-f $SERVERLOGS_LOCK) && $lockretry--) {
select(undef, undef, undef, 0.05);
}
if(($lockretry < 0) &&
($serverlogslocktimeout >= $defserverlogslocktimeout)) {
logmsg "Warning: server logs lock timeout ",
"($serverlogslocktimeout seconds) expired\n";
}
}
# Test harness ssh server does not have this synchronization mechanism,
# this implies that some ssh server based tests might need a small delay
# once that the client command has run to avoid false test failures.
#
# gnutls-serv also lacks this synchronization mechanism, so gnutls-serv
# based tests might need a small delay once that the client command has
# run to avoid false test failures.
sleep($postcommanddelay) if($postcommanddelay);
# timestamp removal of server logs advisor read lock
$timesrvrlog{$testnum} = Time::HiRes::time() if($timestats);
# test definition might instruct to stop some servers
# stop also all servers relative to the given one
my @killtestservers = getpart("client", "killserver");
if(@killtestservers) {
#
# All servers relative to the given one must be stopped also
#
my @killservers;
foreach my $server (@killtestservers) {
chomp $server;
if($server =~ /^(ftp|http|imap|pop3|smtp)s((\d*)(-ipv6|))$/) {
# given a stunnel ssl server, also kill non-ssl underlying one
push @killservers, "${1}${2}";
}
elsif($server =~ /^(ftp|http|imap|pop3|smtp)((\d*)(-ipv6|))$/) {
# given a non-ssl server, also kill stunnel piggybacking one
push @killservers, "${1}s${2}";
}
elsif($server =~ /^(socks)((\d*)(-ipv6|))$/) {
# given a socks server, also kill ssh underlying one
push @killservers, "ssh${2}";
}
elsif($server =~ /^(ssh)((\d*)(-ipv6|))$/) {
# given a ssh server, also kill socks piggybacking one
push @killservers, "socks${2}";
}
push @killservers, $server;
}
#
# kill sockfilter processes for pingpong relative servers
#
foreach my $server (@killservers) {
if($server =~ /^(ftp|imap|pop3|smtp)s?(\d*)(-ipv6|)$/) {
my $proto = $1;
my $idnum = ($2 && ($2 > 1)) ? $2 : 1;
my $ipvnum = ($3 && ($3 =~ /6$/)) ? 6 : 4;
killsockfilters($proto, $ipvnum, $idnum, $verbose);
}
}
#
# kill server relative pids clearing them in %run hash
#
my $pidlist;
foreach my $server (@killservers) {
if($run{$server}) {
$pidlist .= "$run{$server} ";
$run{$server} = 0;
}
$runcert{$server} = 0 if($runcert{$server});
}
killpid($verbose, $pidlist);
#
# cleanup server pid files
#
foreach my $server (@killservers) {
my $pidfile = $serverpidfile{$server};
my $pid = processexists($pidfile);
if($pid > 0) {
logmsg "Warning: $server server unexpectedly alive\n";
killpid($verbose, $pid);
}
unlink($pidfile) if(-f $pidfile);
}
}
# remove the test server commands file after each test
unlink($FTPDCMD) if(-f $FTPDCMD);
# run the postcheck command
my @postcheck= getpart("client", "postcheck");
if(@postcheck) {
$cmd = $postcheck[0];
chomp $cmd;
subVariables \$cmd;
if($cmd) {
logmsg "postcheck $cmd\n" if($verbose);
my $rc = runclient("$cmd");
# Must run the postcheck command in torture mode in order
# to clean up, but the result can't be relied upon.
if($rc != 0 && !$torture) {
logmsg " postcheck FAILED\n";
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
}
}
# restore environment variables that were modified
if(%oldenv) {
foreach my $var (keys %oldenv) {
if($oldenv{$var} eq 'notset') {
delete $ENV{$var} if($ENV{$var});
}
else {
$ENV{$var} = "$oldenv{$var}";
}
}
}
# Skip all the verification on torture tests
if ($torture) {
if(!$cmdres && !$keepoutfiles) {
cleardir($LOGDIR);
}
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return $cmdres;
}
my @err = getpart("verify", "errorcode");
my $errorcode = $err[0] || "0";
my $ok="";
my $res;
chomp $errorcode;
if (@validstdout) {
# verify redirected stdout
my @actual = loadarray($STDOUT);
# variable-replace in the stdout we have from the test case file
@validstdout = fixarray(@validstdout);
# get all attributes
my %hash = getpartattr("verify", "stdout");
# get the mode attribute
my $filemode=$hash{'mode'};
if($filemode && ($filemode eq "text") && $has_textaware) {
# text mode when running on windows: fix line endings
map s/\r\n/\n/g, @actual;
}
if($hash{'nonewline'}) {
# Yes, we must cut off the final newline from the final line
# of the protocol data
chomp($validstdout[$#validstdout]);
}
$res = compare("stdout", \@actual, \@validstdout);
if($res) {
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
$ok .= "s";
}
else {
$ok .= "-"; # stdout not checked
}
my %replyattr = getpartattr("reply", "data");
if(!$replyattr{'nocheck'} && (@reply || $replyattr{'sendzero'})) {
# verify the received data
my @out = loadarray($CURLOUT);
my %hash = getpartattr("reply", "data");
# get the mode attribute
my $filemode=$hash{'mode'};
if($filemode && ($filemode eq "text") && $has_textaware) {
# text mode when running on windows: fix line endings
map s/\r\n/\n/g, @out;
}
$res = compare("data", \@out, \@reply);
if ($res) {
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
$ok .= "d";
}
else {
$ok .= "-"; # data not checked
}
if(@upload) {
# verify uploaded data
my @out = loadarray("$LOGDIR/upload.$testnum");
$res = compare("upload", \@out, \@upload);
if ($res) {
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
$ok .= "u";
}
else {
$ok .= "-"; # upload not checked
}
if(@protocol) {
# Verify the sent request
my @out = loadarray($SERVERIN);
# what to cut off from the live protocol sent by curl
my @strip = getpart("verify", "strip");
my @protstrip=@protocol;
# check if there's any attributes on the verify/protocol section
my %hash = getpartattr("verify", "protocol");
if($hash{'nonewline'}) {
# Yes, we must cut off the final newline from the final line
# of the protocol data
chomp($protstrip[$#protstrip]);
}
for(@strip) {
# strip off all lines that match the patterns from both arrays
chomp $_;
@out = striparray( $_, \@out);
@protstrip= striparray( $_, \@protstrip);
}
# what parts to cut off from the protocol
my @strippart = getpart("verify", "strippart");
my $strip;
for $strip (@strippart) {
chomp $strip;
for(@out) {
eval $strip;
}
}
$res = compare("protocol", \@out, \@protstrip);
if($res) {
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
$ok .= "p";
}
else {
$ok .= "-"; # protocol not checked
}
if(@proxyprot) {
# Verify the sent proxy request
my @out = loadarray($PROXYIN);
# what to cut off from the live protocol sent by curl, we use the
# same rules as for <protocol>
my @strip = getpart("verify", "strip");
my @protstrip=@proxyprot;
# check if there's any attributes on the verify/protocol section
my %hash = getpartattr("verify", "proxy");
if($hash{'nonewline'}) {
# Yes, we must cut off the final newline from the final line
# of the protocol data
chomp($protstrip[$#protstrip]);
}
for(@strip) {
# strip off all lines that match the patterns from both arrays
chomp $_;
@out = striparray( $_, \@out);
@protstrip= striparray( $_, \@protstrip);
}
# what parts to cut off from the protocol
my @strippart = getpart("verify", "strippart");
my $strip;
for $strip (@strippart) {
chomp $strip;
for(@out) {
eval $strip;
}
}
$res = compare("proxy", \@out, \@protstrip);
if($res) {
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
$ok .= "P";
}
else {
$ok .= "-"; # protocol not checked
}
my $outputok;
for my $partsuffix (('', '1', '2', '3', '4')) {
my @outfile=getpart("verify", "file".$partsuffix);
if(@outfile || partexists("verify", "file".$partsuffix) ) {
# we're supposed to verify a dynamically generated file!
my %hash = getpartattr("verify", "file".$partsuffix);
my $filename=$hash{'name'};
if(!$filename) {
logmsg "ERROR: section verify=>file$partsuffix ".
"has no name attribute\n";
stopservers($verbose);
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return -1;
}
my @generated=loadarray($filename);
# what parts to cut off from the file
my @stripfile = getpart("verify", "stripfile".$partsuffix);
my $filemode=$hash{'mode'};
if($filemode && ($filemode eq "text") && $has_textaware) {
# text mode when running on windows means adding an extra
# strip expression
push @stripfile, "s/\r\n/\n/";
}
my $strip;
for $strip (@stripfile) {
chomp $strip;
for(@generated) {
eval $strip;
}
}
@outfile = fixarray(@outfile);
$res = compare("output ($filename)", \@generated, \@outfile);
if($res) {
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
$outputok = 1; # output checked
}
}
$ok .= ($outputok) ? "o" : "-"; # output checked or not
# accept multiple comma-separated error codes
my @splerr = split(/ *, */, $errorcode);
my $errok;
foreach my $e (@splerr) {
if($e == $cmdres) {
# a fine error code
$errok = 1;
last;
}
}
if($errok) {
$ok .= "e";
}
else {
if(!$short) {
logmsg sprintf("\n%s returned $cmdres, when expecting %s\n",
(!$tool)?"curl":$tool, $errorcode);
}
logmsg " exit FAILED\n";
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
if($curl_debug) {
if(! -f $memdump) {
logmsg "\n** ALERT! memory debugging with no output file?\n"
if(!$cmdtype eq "perl");
}
else {
my @memdata=`$memanalyze $memdump`;
my $leak=0;
for(@memdata) {
if($_ ne "") {
# well it could be other memory problems as well, but
# we call it leak for short here
$leak=1;
}
}
if($leak) {
logmsg "\n** MEMORY FAILURE\n";
logmsg @memdata;
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
else {
$ok .= "m";
}
}
}
else {
$ok .= "-"; # memory not checked
}
if($valgrind) {
if($usevalgrind) {
unless(opendir(DIR, "$LOGDIR")) {
logmsg "ERROR: unable to read $LOGDIR\n";
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
my @files = readdir(DIR);
closedir(DIR);
my $vgfile;
foreach my $file (@files) {
if($file =~ /^valgrind$testnum(\..*|)$/) {
$vgfile = $file;
last;
}
}
if(!$vgfile) {
logmsg "ERROR: valgrind log file missing for test $testnum\n";
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
my @e = valgrindparse($srcdir, $feature{'SSL'}, "$LOGDIR/$vgfile");
if(@e && $e[0]) {
logmsg " valgrind ERROR ";
logmsg @e;
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 1;
}
$ok .= "v";
}
else {
if(!$short && !$disablevalgrind) {
logmsg " valgrind SKIPPED\n";
}
$ok .= "-"; # skipped
}
}
else {
$ok .= "-"; # valgrind not checked
}
logmsg "$ok " if(!$short);
my $sofar= time()-$start;
my $esttotal = $sofar/$count * $total;
my $estleft = $esttotal - $sofar;
my $left=sprintf("remaining: %02d:%02d",
$estleft/60,
$estleft%60);
logmsg sprintf("OK (%-3d out of %-3d, %s)\n", $count, $total, $left);
# the test succeeded, remove all log files
if(!$keepoutfiles) {
cleardir($LOGDIR);
}
# timestamp test result verification end
$timevrfyend{$testnum} = Time::HiRes::time() if($timestats);
return 0;
}
#######################################################################
# Stop all running test servers
#
sub stopservers {
my $verbose = $_[0];
#
# kill sockfilter processes for all pingpong servers
#
killallsockfilters($verbose);
#
# kill all server pids from %run hash clearing them
#
my $pidlist;
foreach my $server (keys %run) {
if($run{$server}) {
if($verbose) {
my $prev = 0;
my $pids = $run{$server};
foreach my $pid (split(' ', $pids)) {
if($pid != $prev) {
logmsg sprintf("* kill pid for %s => %d\n",
$server, $pid);
$prev = $pid;
}
}
}
$pidlist .= "$run{$server} ";
$run{$server} = 0;
}
$runcert{$server} = 0 if($runcert{$server});
}
killpid($verbose, $pidlist);
#
# cleanup all server pid files
#
foreach my $server (keys %serverpidfile) {
my $pidfile = $serverpidfile{$server};
my $pid = processexists($pidfile);
if($pid > 0) {
logmsg "Warning: $server server unexpectedly alive\n";
killpid($verbose, $pid);
}
unlink($pidfile) if(-f $pidfile);
}
}
#######################################################################
# startservers() starts all the named servers
#
# Returns: string with error reason or blank for success
#
sub startservers {
my @what = @_;
my ($pid, $pid2);
for(@what) {
my (@whatlist) = split(/\s+/,$_);
my $what = lc($whatlist[0]);
$what =~ s/[^a-z0-9-]//g;
my $certfile;
if($what =~ /^(ftp|http|imap|pop3|smtp)s((\d*)(-ipv6|))$/) {
$certfile = ($whatlist[1]) ? $whatlist[1] : 'stunnel.pem';
}
if(($what eq "pop3") ||
($what eq "ftp") ||
($what eq "imap") ||
($what eq "smtp")) {
if($torture && $run{$what} &&
!responsive_pingpong_server($what, "", $verbose)) {
stopserver($what);
}
if(!$run{$what}) {
($pid, $pid2) = runpingpongserver($what, "", $verbose);
if($pid <= 0) {
return "failed starting ". uc($what) ." server";
}
printf ("* pid $what => %d %d\n", $pid, $pid2) if($verbose);
$run{$what}="$pid $pid2";
}
}
elsif($what eq "ftp2") {
if($torture && $run{'ftp2'} &&
!responsive_pingpong_server("ftp", "2", $verbose)) {
stopserver('ftp2');
}
if(!$run{'ftp2'}) {
($pid, $pid2) = runpingpongserver("ftp", "2", $verbose);
if($pid <= 0) {
return "failed starting FTP2 server";
}
printf ("* pid ftp2 => %d %d\n", $pid, $pid2) if($verbose);
$run{'ftp2'}="$pid $pid2";
}
}
elsif($what eq "ftp-ipv6") {
if($torture && $run{'ftp-ipv6'} &&
!responsive_pingpong_server("ftp", "", $verbose, "ipv6")) {
stopserver('ftp-ipv6');
}
if(!$run{'ftp-ipv6'}) {
($pid, $pid2) = runpingpongserver("ftp", "", $verbose, "ipv6");
if($pid <= 0) {
return "failed starting FTP-IPv6 server";
}
logmsg sprintf("* pid ftp-ipv6 => %d %d\n", $pid,
$pid2) if($verbose);
$run{'ftp-ipv6'}="$pid $pid2";
}
}
elsif($what eq "gopher") {
if($torture && $run{'gopher'} &&
!responsive_http_server("gopher", $verbose, 0, $GOPHERPORT)) {
stopserver('gopher');
}
if(!$run{'gopher'}) {
($pid, $pid2) = runhttpserver("gopher", $verbose, 0,
$GOPHERPORT);
if($pid <= 0) {
return "failed starting GOPHER server";
}
logmsg sprintf ("* pid gopher => %d %d\n", $pid, $pid2)
if($verbose);
$run{'gopher'}="$pid $pid2";
}
}
elsif($what eq "gopher-ipv6") {
if($torture && $run{'gopher-ipv6'} &&
!responsive_http_server("gopher", $verbose, "ipv6",
$GOPHER6PORT)) {
stopserver('gopher-ipv6');
}
if(!$run{'gopher-ipv6'}) {
($pid, $pid2) = runhttpserver("gopher", $verbose, "ipv6",
$GOPHER6PORT);
if($pid <= 0) {
return "failed starting GOPHER-IPv6 server";
}
logmsg sprintf("* pid gopher-ipv6 => %d %d\n", $pid,
$pid2) if($verbose);
$run{'gopher-ipv6'}="$pid $pid2";
}
}
elsif($what eq "http") {
if($torture && $run{'http'} &&
!responsive_http_server("http", $verbose, 0, $HTTPPORT)) {
stopserver('http');
}
if(!$run{'http'}) {
($pid, $pid2) = runhttpserver("http", $verbose, 0,
$HTTPPORT);
if($pid <= 0) {
return "failed starting HTTP server";
}
logmsg sprintf ("* pid http => %d %d\n", $pid, $pid2)
if($verbose);
$run{'http'}="$pid $pid2";
}
}
elsif($what eq "http-proxy") {
if($torture && $run{'http-proxy'} &&
!responsive_http_server("http", $verbose, "proxy",
$HTTPPROXYPORT)) {
stopserver('http-proxy');
}
if(!$run{'http-proxy'}) {
($pid, $pid2) = runhttpserver("http", $verbose, "proxy",
$HTTPPROXYPORT);
if($pid <= 0) {
return "failed starting HTTP-proxy server";
}
logmsg sprintf ("* pid http-proxy => %d %d\n", $pid, $pid2)
if($verbose);
$run{'http-proxy'}="$pid $pid2";
}
}
elsif($what eq "http-ipv6") {
if($torture && $run{'http-ipv6'} &&
!responsive_http_server("http", $verbose, "IPv6", $HTTP6PORT)) {
stopserver('http-ipv6');
}
if(!$run{'http-ipv6'}) {
($pid, $pid2) = runhttpserver("http", $verbose, "ipv6",
$HTTP6PORT);
if($pid <= 0) {
return "failed starting HTTP-IPv6 server";
}
logmsg sprintf("* pid http-ipv6 => %d %d\n", $pid, $pid2)
if($verbose);
$run{'http-ipv6'}="$pid $pid2";
}
}
elsif($what eq "rtsp") {
if($torture && $run{'rtsp'} &&
!responsive_rtsp_server($verbose)) {
stopserver('rtsp');
}
if(!$run{'rtsp'}) {
($pid, $pid2) = runrtspserver($verbose);
if($pid <= 0) {
return "failed starting RTSP server";
}
printf ("* pid rtsp => %d %d\n", $pid, $pid2) if($verbose);
$run{'rtsp'}="$pid $pid2";
}
}
elsif($what eq "rtsp-ipv6") {
if($torture && $run{'rtsp-ipv6'} &&
!responsive_rtsp_server($verbose, "IPv6")) {
stopserver('rtsp-ipv6');
}
if(!$run{'rtsp-ipv6'}) {
($pid, $pid2) = runrtspserver($verbose, "IPv6");
if($pid <= 0) {
return "failed starting RTSP-IPv6 server";
}
logmsg sprintf("* pid rtsp-ipv6 => %d %d\n", $pid, $pid2)
if($verbose);
$run{'rtsp-ipv6'}="$pid $pid2";
}
}
elsif($what eq "ftps") {
if(!$stunnel) {
# we can't run ftps tests without stunnel
return "no stunnel";
}
if(!$ssl_version) {
# we can't run ftps tests if libcurl is SSL-less
return "curl lacks SSL support";
}
if($runcert{'ftps'} && ($runcert{'ftps'} ne $certfile)) {
# stop server when running and using a different cert
stopserver('ftps');
}
if($torture && $run{'ftp'} &&
!responsive_pingpong_server("ftp", "", $verbose)) {
stopserver('ftp');
}
if(!$run{'ftp'}) {
($pid, $pid2) = runpingpongserver("ftp", "", $verbose);
if($pid <= 0) {
return "failed starting FTP server";
}
printf ("* pid ftp => %d %d\n", $pid, $pid2) if($verbose);
$run{'ftp'}="$pid $pid2";
}
if(!$run{'ftps'}) {
($pid, $pid2) = runftpsserver($verbose, "", $certfile);
if($pid <= 0) {
return "failed starting FTPS server (stunnel)";
}
logmsg sprintf("* pid ftps => %d %d\n", $pid, $pid2)
if($verbose);
$run{'ftps'}="$pid $pid2";
}
}
elsif($what eq "file") {
# we support it but have no server!
}
elsif($what eq "https") {
if(!$stunnel) {
# we can't run https tests without stunnel
return "no stunnel";
}
if(!$ssl_version) {
# we can't run https tests if libcurl is SSL-less
return "curl lacks SSL support";
}
if($runcert{'https'} && ($runcert{'https'} ne $certfile)) {
# stop server when running and using a different cert
stopserver('https');
}
if($torture && $run{'http'} &&
!responsive_http_server("http", $verbose, 0, $HTTPPORT)) {
stopserver('http');
}
if(!$run{'http'}) {
($pid, $pid2) = runhttpserver("http", $verbose, 0,
$HTTPPORT);
if($pid <= 0) {
return "failed starting HTTP server";
}
printf ("* pid http => %d %d\n", $pid, $pid2) if($verbose);
$run{'http'}="$pid $pid2";
}
if(!$run{'https'}) {
($pid, $pid2) = runhttpsserver($verbose, "", $certfile);
if($pid <= 0) {
return "failed starting HTTPS server (stunnel)";
}
logmsg sprintf("* pid https => %d %d\n", $pid, $pid2)
if($verbose);
$run{'https'}="$pid $pid2";
}
}
elsif($what eq "httptls") {
if(!$httptlssrv) {
# for now, we can't run http TLS-EXT tests without gnutls-serv
return "no gnutls-serv";
}
if($torture && $run{'httptls'} &&
!responsive_httptls_server($verbose, "IPv4")) {
stopserver('httptls');
}
if(!$run{'httptls'}) {
($pid, $pid2) = runhttptlsserver($verbose, "IPv4");
if($pid <= 0) {
return "failed starting HTTPTLS server (gnutls-serv)";
}
logmsg sprintf("* pid httptls => %d %d\n", $pid, $pid2)
if($verbose);
$run{'httptls'}="$pid $pid2";
}
}
elsif($what eq "httptls-ipv6") {
if(!$httptlssrv) {
# for now, we can't run http TLS-EXT tests without gnutls-serv
return "no gnutls-serv";
}
if($torture && $run{'httptls-ipv6'} &&
!responsive_httptls_server($verbose, "IPv6")) {
stopserver('httptls-ipv6');
}
if(!$run{'httptls-ipv6'}) {
($pid, $pid2) = runhttptlsserver($verbose, "IPv6");
if($pid <= 0) {
return "failed starting HTTPTLS-IPv6 server (gnutls-serv)";
}
logmsg sprintf("* pid httptls-ipv6 => %d %d\n", $pid, $pid2)
if($verbose);
$run{'httptls-ipv6'}="$pid $pid2";
}
}
elsif($what eq "tftp") {
if($torture && $run{'tftp'} &&
!responsive_tftp_server("", $verbose)) {
stopserver('tftp');
}
if(!$run{'tftp'}) {
($pid, $pid2) = runtftpserver("", $verbose);
if($pid <= 0) {
return "failed starting TFTP server";
}
printf ("* pid tftp => %d %d\n", $pid, $pid2) if($verbose);
$run{'tftp'}="$pid $pid2";
}
}
elsif($what eq "tftp-ipv6") {
if($torture && $run{'tftp-ipv6'} &&
!responsive_tftp_server("", $verbose, "IPv6")) {
stopserver('tftp-ipv6');
}
if(!$run{'tftp-ipv6'}) {
($pid, $pid2) = runtftpserver("", $verbose, "IPv6");
if($pid <= 0) {
return "failed starting TFTP-IPv6 server";
}
printf("* pid tftp-ipv6 => %d %d\n", $pid, $pid2) if($verbose);
$run{'tftp-ipv6'}="$pid $pid2";
}
}
elsif($what eq "sftp" || $what eq "scp" || $what eq "socks4" || $what eq "socks5" ) {
if(!$run{'ssh'}) {
($pid, $pid2) = runsshserver("", $verbose);
if($pid <= 0) {
return "failed starting SSH server";
}
printf ("* pid ssh => %d %d\n", $pid, $pid2) if($verbose);
$run{'ssh'}="$pid $pid2";
}
if($what eq "socks4" || $what eq "socks5") {
if(!$run{'socks'}) {
($pid, $pid2) = runsocksserver("", $verbose);
if($pid <= 0) {
return "failed starting socks server";
}
printf ("* pid socks => %d %d\n", $pid, $pid2) if($verbose);
$run{'socks'}="$pid $pid2";
}
}
if($what eq "socks5") {
if(!$sshdid) {
# Not an OpenSSH or SunSSH ssh daemon
logmsg "Not OpenSSH or SunSSH; socks5 tests need at least OpenSSH 3.7\n";
return "failed starting socks5 server";
}
elsif(($sshdid =~ /OpenSSH/) && ($sshdvernum < 370)) {
# Need OpenSSH 3.7 for socks5 - http://www.openssh.com/txt/release-3.7
logmsg "$sshdverstr insufficient; socks5 tests need at least OpenSSH 3.7\n";
return "failed starting socks5 server";
}
elsif(($sshdid =~ /SunSSH/) && ($sshdvernum < 100)) {
# Need SunSSH 1.0 for socks5
logmsg "$sshdverstr insufficient; socks5 tests need at least SunSSH 1.0\n";
return "failed starting socks5 server";
}
}
}
elsif($what eq "none") {
logmsg "* starts no server\n" if ($verbose);
}
else {
warn "we don't support a server for $what";
return "no server for $what";
}
}
return 0;
}
##############################################################################
# This function makes sure the right set of server is running for the
# specified test case. This is a useful design when we run single tests as not
# all servers need to run then!
#
# Returns: a string, blank if everything is fine or a reason why it failed
#
sub serverfortest {
my ($testnum)=@_;
my @what = getpart("client", "server");
if(!$what[0]) {
warn "Test case $testnum has no server(s) specified";
return "no server specified";
}
for(my $i = scalar(@what) - 1; $i >= 0; $i--) {
my $srvrline = $what[$i];
chomp $srvrline if($srvrline);
if($srvrline =~ /^(\S+)((\s*)(.*))/) {
my $server = "${1}";
my $lnrest = "${2}";
my $tlsext;
if($server =~ /^(httptls)(\+)(ext|srp)(\d*)(-ipv6|)$/) {
$server = "${1}${4}${5}";
$tlsext = uc("TLS-${3}");
}
if(! grep /^\Q$server\E$/, @protocols) {
if(substr($server,0,5) ne "socks") {
if($tlsext) {
return "curl lacks $tlsext support";
}
else {
return "curl lacks $server server support";
}
}
}
$what[$i] = "$server$lnrest" if($tlsext);
}
}
return &startservers(@what);
}
#######################################################################
# runtimestats displays test-suite run time statistics
#
sub runtimestats {
my $lasttest = $_[0];
return if(not $timestats);
logmsg "\nTest suite total running time breakdown per task...\n\n";
my @timesrvr;
my @timeprep;
my @timetool;
my @timelock;
my @timevrfy;
my @timetest;
my $timesrvrtot = 0.0;
my $timepreptot = 0.0;
my $timetooltot = 0.0;
my $timelocktot = 0.0;
my $timevrfytot = 0.0;
my $timetesttot = 0.0;
my $counter;
for my $testnum (1 .. $lasttest) {
if($timesrvrini{$testnum}) {
$timesrvrtot += $timesrvrend{$testnum} - $timesrvrini{$testnum};
$timepreptot +=
(($timetoolini{$testnum} - $timeprepini{$testnum}) -
($timesrvrend{$testnum} - $timesrvrini{$testnum}));
$timetooltot += $timetoolend{$testnum} - $timetoolini{$testnum};
$timelocktot += $timesrvrlog{$testnum} - $timetoolend{$testnum};
$timevrfytot += $timevrfyend{$testnum} - $timesrvrlog{$testnum};
$timetesttot += $timevrfyend{$testnum} - $timeprepini{$testnum};
push @timesrvr, sprintf("%06.3f %04d",
$timesrvrend{$testnum} - $timesrvrini{$testnum}, $testnum);
push @timeprep, sprintf("%06.3f %04d",
($timetoolini{$testnum} - $timeprepini{$testnum}) -
($timesrvrend{$testnum} - $timesrvrini{$testnum}), $testnum);
push @timetool, sprintf("%06.3f %04d",
$timetoolend{$testnum} - $timetoolini{$testnum}, $testnum);
push @timelock, sprintf("%06.3f %04d",
$timesrvrlog{$testnum} - $timetoolend{$testnum}, $testnum);
push @timevrfy, sprintf("%06.3f %04d",
$timevrfyend{$testnum} - $timesrvrlog{$testnum}, $testnum);
push @timetest, sprintf("%06.3f %04d",
$timevrfyend{$testnum} - $timeprepini{$testnum}, $testnum);
}
}
{
no warnings 'numeric';
@timesrvr = sort { $b <=> $a } @timesrvr;
@timeprep = sort { $b <=> $a } @timeprep;
@timetool = sort { $b <=> $a } @timetool;
@timelock = sort { $b <=> $a } @timelock;
@timevrfy = sort { $b <=> $a } @timevrfy;
@timetest = sort { $b <=> $a } @timetest;
}
logmsg "Spent ". sprintf("%08.3f ", $timesrvrtot) .
"seconds starting and verifying test harness servers.\n";
logmsg "Spent ". sprintf("%08.3f ", $timepreptot) .
"seconds reading definitions and doing test preparations.\n";
logmsg "Spent ". sprintf("%08.3f ", $timetooltot) .
"seconds actually running test tools.\n";
logmsg "Spent ". sprintf("%08.3f ", $timelocktot) .
"seconds awaiting server logs lock removal.\n";
logmsg "Spent ". sprintf("%08.3f ", $timevrfytot) .
"seconds verifying test results.\n";
logmsg "Spent ". sprintf("%08.3f ", $timetesttot) .
"seconds doing all of the above.\n";
$counter = 25;
logmsg "\nTest server starting and verification time per test ".
sprintf("(%s)...\n\n", (not $fullstats)?"top $counter":"full");
logmsg "-time- test\n";
logmsg "------ ----\n";
foreach my $txt (@timesrvr) {
last if((not $fullstats) && (not $counter--));
logmsg "$txt\n";
}
$counter = 10;
logmsg "\nTest definition reading and preparation time per test ".
sprintf("(%s)...\n\n", (not $fullstats)?"top $counter":"full");
logmsg "-time- test\n";
logmsg "------ ----\n";
foreach my $txt (@timeprep) {
last if((not $fullstats) && (not $counter--));
logmsg "$txt\n";
}
$counter = 25;
logmsg "\nTest tool execution time per test ".
sprintf("(%s)...\n\n", (not $fullstats)?"top $counter":"full");
logmsg "-time- test\n";
logmsg "------ ----\n";
foreach my $txt (@timetool) {
last if((not $fullstats) && (not $counter--));
logmsg "$txt\n";
}
$counter = 15;
logmsg "\nTest server logs lock removal time per test ".
sprintf("(%s)...\n\n", (not $fullstats)?"top $counter":"full");
logmsg "-time- test\n";
logmsg "------ ----\n";
foreach my $txt (@timelock) {
last if((not $fullstats) && (not $counter--));
logmsg "$txt\n";
}
$counter = 10;
logmsg "\nTest results verification time per test ".
sprintf("(%s)...\n\n", (not $fullstats)?"top $counter":"full");
logmsg "-time- test\n";
logmsg "------ ----\n";
foreach my $txt (@timevrfy) {
last if((not $fullstats) && (not $counter--));
logmsg "$txt\n";
}
$counter = 50;
logmsg "\nTotal time per test ".
sprintf("(%s)...\n\n", (not $fullstats)?"top $counter":"full");
logmsg "-time- test\n";
logmsg "------ ----\n";
foreach my $txt (@timetest) {
last if((not $fullstats) && (not $counter--));
logmsg "$txt\n";
}
logmsg "\n";
}
#######################################################################
# Check options to this test program
#
my $number=0;
my $fromnum=-1;
my @testthis;
while(@ARGV) {
if ($ARGV[0] eq "-v") {
# verbose output
$verbose=1;
}
elsif($ARGV[0] =~ /^-b(.*)/) {
my $portno=$1;
if($portno =~ s/(\d+)$//) {
$base = int $1;
}
}
elsif ($ARGV[0] eq "-c") {
# use this path to curl instead of default
$DBGCURL=$CURL=$ARGV[1];
shift @ARGV;
}
elsif ($ARGV[0] eq "-d") {
# have the servers display protocol output
$debugprotocol=1;
}
elsif ($ARGV[0] eq "-g") {
# run this test with gdb
$gdbthis=1;
}
elsif ($ARGV[0] eq "-gw") {
# run this test with windowed gdb
$gdbthis=1;
$gdbxwin=1;
}
elsif($ARGV[0] eq "-s") {
# short output
$short=1;
}
elsif($ARGV[0] eq "-n") {
# no valgrind
undef $valgrind;
}
elsif($ARGV[0] =~ /^-t(.*)/) {
# torture
$torture=1;
my $xtra = $1;
if($xtra =~ s/(\d+)$//) {
$tortalloc = $1;
}
# we undef valgrind to make this fly in comparison
undef $valgrind;
}
elsif($ARGV[0] eq "-a") {
# continue anyway, even if a test fail
$anyway=1;
}
elsif($ARGV[0] eq "-p") {
$postmortem=1;
}
elsif($ARGV[0] eq "-l") {
# lists the test case names only
$listonly=1;
}
elsif($ARGV[0] eq "-k") {
# keep stdout and stderr files after tests
$keepoutfiles=1;
}
elsif($ARGV[0] eq "-r") {
# run time statistics needs Time::HiRes
if($Time::HiRes::VERSION) {
keys(%timeprepini) = 1000;
keys(%timesrvrini) = 1000;
keys(%timesrvrend) = 1000;
keys(%timetoolini) = 1000;
keys(%timetoolend) = 1000;
keys(%timesrvrlog) = 1000;
keys(%timevrfyend) = 1000;
$timestats=1;
$fullstats=0;
}
}
elsif($ARGV[0] eq "-rf") {
# run time statistics needs Time::HiRes
if($Time::HiRes::VERSION) {
keys(%timeprepini) = 1000;
keys(%timesrvrini) = 1000;
keys(%timesrvrend) = 1000;
keys(%timetoolini) = 1000;
keys(%timetoolend) = 1000;
keys(%timesrvrlog) = 1000;
keys(%timevrfyend) = 1000;
$timestats=1;
$fullstats=1;
}
}
elsif(($ARGV[0] eq "-h") || ($ARGV[0] eq "--help")) {
# show help text
print <<EOHELP
Usage: runtests.pl [options] [test selection(s)]
-a continue even if a test fails
-bN use base port number N for test servers (default $base)
-c path use this curl executable
-d display server debug info
-g run the test case with gdb
-gw run the test case with gdb as a windowed application
-h this help text
-k keep stdout and stderr files present after tests
-l list all test case names/descriptions
-n no valgrind
-p print log file contents when a test fails
-r run time statistics
-rf full run time statistics
-s short output
-t[N] torture (simulate memory alloc failures); N means fail Nth alloc
-v verbose output
[num] like "5 6 9" or " 5 to 22 " to run those tests only
[!num] like "!5 !6 !9" to disable those tests
[keyword] like "IPv6" to select only tests containing the key word
[!keyword] like "!cookies" to disable any tests containing the key word
EOHELP
;
exit;
}
elsif($ARGV[0] =~ /^(\d+)/) {
$number = $1;
if($fromnum >= 0) {
for($fromnum .. $number) {
push @testthis, $_;
}
$fromnum = -1;
}
else {
push @testthis, $1;
}
}
elsif($ARGV[0] =~ /^to$/i) {
$fromnum = $number+1;
}
elsif($ARGV[0] =~ /^!(\d+)/) {
$fromnum = -1;
$disabled{$1}=$1;
}
elsif($ARGV[0] =~ /^!(.+)/) {
$disabled_keywords{$1}=$1;
}
elsif($ARGV[0] =~ /^([-[{a-zA-Z].*)/) {
$enabled_keywords{$1}=$1;
}
else {
print "Unknown option: $ARGV[0]\n";
exit;
}
shift @ARGV;
}
if(@testthis && ($testthis[0] ne "")) {
$TESTCASES=join(" ", @testthis);
}
if($valgrind) {
# we have found valgrind on the host, use it
# verify that we can invoke it fine
my $code = runclient("valgrind >/dev/null 2>&1");
if(($code>>8) != 1) {
#logmsg "Valgrind failure, disable it\n";
undef $valgrind;
} else {
# since valgrind 2.1.x, '--tool' option is mandatory
# use it, if it is supported by the version installed on the system
runclient("valgrind --help 2>&1 | grep -- --tool > /dev/null 2>&1");
if (($? >> 8)==0) {
$valgrind_tool="--tool=memcheck";
}
open(C, "<$CURL");
my $l = <C>;
if($l =~ /^\#\!/) {
# A shell script. This is typically when built with libtool,
$valgrind="../libtool --mode=execute $valgrind";
}
close(C);
# valgrind 3 renamed the --logfile option to --log-file!!!
my $ver=join(' ', runclientoutput("valgrind --version"));
# cut off all but digits and dots
$ver =~ s/[^0-9.]//g;
if($ver =~ /^(\d+)/) {
$ver = $1;
if($ver >= 3) {
$valgrind_logfile="--log-file";
}
}
}
}
if ($gdbthis) {
# open the executable curl and read the first 4 bytes of it
open(CHECK, "<$CURL");
my $c;
sysread CHECK, $c, 4;
close(CHECK);
if($c eq "#! /") {
# A shell script. This is typically when built with libtool,
$libtool = 1;
$gdb = "libtool --mode=execute gdb";
}
}
$HTTPPORT = $base++; # HTTP server port
$HTTPSPORT = $base++; # HTTPS (stunnel) server port
$FTPPORT = $base++; # FTP server port
$FTPSPORT = $base++; # FTPS (stunnel) server port
$HTTP6PORT = $base++; # HTTP IPv6 server port
$FTP2PORT = $base++; # FTP server 2 port
$FTP6PORT = $base++; # FTP IPv6 port
$TFTPPORT = $base++; # TFTP (UDP) port
$TFTP6PORT = $base++; # TFTP IPv6 (UDP) port
$SSHPORT = $base++; # SSH (SCP/SFTP) port
$SOCKSPORT = $base++; # SOCKS port
$POP3PORT = $base++; # POP3 server port
$POP36PORT = $base++; # POP3 IPv6 server port
$IMAPPORT = $base++; # IMAP server port
$IMAP6PORT = $base++; # IMAP IPv6 server port
$SMTPPORT = $base++; # SMTP server port
$SMTP6PORT = $base++; # SMTP IPv6 server port
$RTSPPORT = $base++; # RTSP server port
$RTSP6PORT = $base++; # RTSP IPv6 server port
$GOPHERPORT = $base++; # Gopher IPv4 server port
$GOPHER6PORT = $base++; # Gopher IPv6 server port
$HTTPTLSPORT = $base++; # HTTP TLS (non-stunnel) server port
$HTTPTLS6PORT = $base++; # HTTP TLS (non-stunnel) IPv6 server port
$HTTPPROXYPORT = $base++; # HTTP proxy port, when using CONNECT
#######################################################################
# clear and create logging directory:
#
cleardir($LOGDIR);
mkdir($LOGDIR, 0777);
#######################################################################
# initialize some variables
#
get_disttests();
init_serverpidfile_hash();
#######################################################################
# Output curl version and host info being tested
#
if(!$listonly) {
checksystem();
}
#######################################################################
# Fetch all disabled tests
#
open(D, "<$TESTDIR/DISABLED");
while(<D>) {
if(/^ *\#/) {
# allow comments
next;
}
if($_ =~ /(\d+)/) {
$disabled{$1}=$1; # disable this test number
}
}
close(D);
#######################################################################
# If 'all' tests are requested, find out all test numbers
#
if ( $TESTCASES eq "all") {
# Get all commands and find out their test numbers
opendir(DIR, $TESTDIR) || die "can't opendir $TESTDIR: $!";
my @cmds = grep { /^test([0-9]+)$/ && -f "$TESTDIR/$_" } readdir(DIR);
closedir(DIR);
$TESTCASES=""; # start with no test cases
# cut off everything but the digits
for(@cmds) {
$_ =~ s/[a-z\/\.]*//g;
}
# sort the numbers from low to high
foreach my $n (sort { $a <=> $b } @cmds) {
if($disabled{$n}) {
# skip disabled test cases
my $why = "configured as DISABLED";
$skipped++;
$skipped{$why}++;
$teststat[$n]=$why; # store reason for this test case
next;
}
$TESTCASES .= " $n";
}
}
#######################################################################
# Start the command line log
#
open(CMDLOG, ">$CURLLOG") ||
logmsg "can't log command lines to $CURLLOG\n";
#######################################################################
# Display the contents of the given file. Line endings are canonicalized
# and excessively long files are elided
sub displaylogcontent {
my ($file)=@_;
if(open(SINGLE, "<$file")) {
my $linecount = 0;
my $truncate;
my @tail;
while(my $string = <SINGLE>) {
$string =~ s/\r\n/\n/g;
$string =~ s/[\r\f\032]/\n/g;
$string .= "\n" unless ($string =~ /\n$/);
$string =~ tr/\n//;
for my $line (split("\n", $string)) {
$line =~ s/\s*\!$//;
if ($truncate) {
push @tail, " $line\n";
} else {
logmsg " $line\n";
}
$linecount++;
$truncate = $linecount > 1000;
}
}
if(@tail) {
my $tailshow = 200;
my $tailskip = 0;
my $tailtotal = scalar @tail;
if($tailtotal > $tailshow) {
$tailskip = $tailtotal - $tailshow;
logmsg "=== File too long: $tailskip lines omitted here\n";
}
for($tailskip .. $tailtotal-1) {
logmsg "$tail[$_]";
}
}
close(SINGLE);
}
}
sub displaylogs {
my ($testnum)=@_;
opendir(DIR, "$LOGDIR") ||
die "can't open dir: $!";
my @logs = readdir(DIR);
closedir(DIR);
logmsg "== Contents of files in the $LOGDIR/ dir after test $testnum\n";
foreach my $log (sort @logs) {
if($log =~ /\.(\.|)$/) {
next; # skip "." and ".."
}
if($log =~ /^\.nfs/) {
next; # skip ".nfs"
}
if(($log eq "memdump") || ($log eq "core")) {
next; # skip "memdump" and "core"
}
if((-d "$LOGDIR/$log") || (! -s "$LOGDIR/$log")) {
next; # skip directory and empty files
}
if(($log =~ /^stdout\d+/) && ($log !~ /^stdout$testnum/)) {
next; # skip stdoutNnn of other tests
}
if(($log =~ /^stderr\d+/) && ($log !~ /^stderr$testnum/)) {
next; # skip stderrNnn of other tests
}
if(($log =~ /^upload\d+/) && ($log !~ /^upload$testnum/)) {
next; # skip uploadNnn of other tests
}
if(($log =~ /^curl\d+\.out/) && ($log !~ /^curl$testnum\.out/)) {
next; # skip curlNnn.out of other tests
}
if(($log =~ /^test\d+\.txt/) && ($log !~ /^test$testnum\.txt/)) {
next; # skip testNnn.txt of other tests
}
if(($log =~ /^file\d+\.txt/) && ($log !~ /^file$testnum\.txt/)) {
next; # skip fileNnn.txt of other tests
}
if(($log =~ /^netrc\d+/) && ($log !~ /^netrc$testnum/)) {
next; # skip netrcNnn of other tests
}
if(($log =~ /^trace\d+/) && ($log !~ /^trace$testnum/)) {
next; # skip traceNnn of other tests
}
if(($log =~ /^valgrind\d+/) && ($log !~ /^valgrind$testnum(\..*|)$/)) {
next; # skip valgrindNnn of other tests
}
logmsg "=== Start of file $log\n";
displaylogcontent("$LOGDIR/$log");
logmsg "=== End of file $log\n";
}
}
#######################################################################
# The main test-loop
#
my $failed;
my $testnum;
my $ok=0;
my $total=0;
my $lasttest=0;
my @at = split(" ", $TESTCASES);
my $count=0;
$start = time();
foreach $testnum (@at) {
$lasttest = $testnum if($testnum > $lasttest);
$count++;
my $error = singletest($testnum, $count, scalar(@at));
if($error < 0) {
# not a test we can run
next;
}
$total++; # number of tests we've run
if($error>0) {
$failed.= "$testnum ";
if($postmortem) {
# display all files in log/ in a nice way
displaylogs($testnum);
}
if(!$anyway) {
# a test failed, abort
logmsg "\n - abort tests\n";
last;
}
}
elsif(!$error) {
$ok++; # successful test counter
}
# loop for next test
}
my $sofar = time() - $start;
#######################################################################
# Close command log
#
close(CMDLOG);
# Tests done, stop the servers
stopservers($verbose);
my $all = $total + $skipped;
runtimestats($lasttest);
if($total) {
logmsg sprintf("TESTDONE: $ok tests out of $total reported OK: %d%%\n",
$ok/$total*100);
if($ok != $total) {
logmsg "TESTFAIL: These test cases failed: $failed\n";
}
}
else {
logmsg "TESTFAIL: No tests were performed\n";
}
if($all) {
logmsg "TESTDONE: $all tests were considered during ".
sprintf("%.0f", $sofar) ." seconds.\n";
}
if($skipped && !$short) {
my $s=0;
logmsg "TESTINFO: $skipped tests were skipped due to these restraints:\n";
for(keys %skipped) {
my $r = $_;
printf "TESTINFO: \"%s\" %d times (", $r, $skipped{$_};
# now show all test case numbers that had this reason for being
# skipped
my $c=0;
my $max = 9;
for(0 .. scalar @teststat) {
my $t = $_;
if($teststat[$_] && ($teststat[$_] eq $r)) {
if($c < $max) {
logmsg ", " if($c);
logmsg $_;
}
$c++;
}
}
if($c > $max) {
logmsg " and ".($c-$max)." more";
}
logmsg ")\n";
}
}
if($total && ($ok != $total)) {
exit 1;
}
| FlashYoshi/YoutubePlaylistDownloader | curl-7.28.1/curl-7.28.1/tests/runtests.pl | Perl | mit | 151,039 |
%query: append(o,o,o).
% mode: append[i,i,o]
append([],L,L).
append([H|L1],L2,[H|L3]) :- append(L1,L2,L3).
% mode: append1[o,o,i]
append1([],L,L).
append1([H|L1],L2,[H|L3]) :- append1(L1,L2,L3).
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Logic_Programming/talp_plumer/pl1.1.pl | Perl | mit | 198 |
package SmartSea::Schema::Result::LayerClass;
use strict;
use warnings;
use 5.010000;
use base qw/DBIx::Class::Core/;
use Scalar::Util 'blessed';
use SmartSea::Core qw(:all);
use SmartSea::HTML qw(:all);
my @columns = (
id => {},
name => {data_type => 'text', html_size => 30, not_null => 1},
semantics => {html_input => 'textarea', rows => 10, cols => 20},
);
__PACKAGE__->table('layer_classes');
__PACKAGE__->add_columns(@columns);
__PACKAGE__->set_primary_key('id');
__PACKAGE__->has_many(layers => 'SmartSea::Schema::Result::Layer', 'layer_class');
__PACKAGE__->many_to_many(uses => 'layer', 'use');
*semantics_hash = *SmartSea::Schema::Result::Dataset::semantics_hash;
1;
| ajolma/SmartSeaMSPTool | SmartSea/Schema/Result/LayerClass.pm | Perl | mit | 709 |
package TrafficOpsRoutes;
#
# Copyright 2015 Comcast Cable Communications Management, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
sub new {
my $self = {};
my $class = shift;
return ( bless( $self, $class ) );
}
sub define {
my $self = shift;
my $r = shift;
$self->ui_routes($r);
my $namespace = "API";
# 1.0 Routes
$self->api_1_0_routes( $r, "UI" );
# $version Routes
my $version = "1.1";
$self->api_routes( $r, $version, $namespace );
# 1.2 Routes
$version = "1.2";
$self->api_routes( $r, $version, $namespace );
# Traffic Stats Extension
$self->traffic_stats_routes( $r, $version );
$self->catch_all( $r, $namespace );
}
sub ui_routes {
my $self = shift;
my $r = shift;
my $namespace = "UI";
# This route needs to be at the top to kick in first.
$r->get('/')->over( authenticated => 1 )->to( 'RascalStatus#health', namespace => $namespace );
# ------------------------------------------------------------------------
# NOTE: Routes should be grouped by their controller
# ------------------------------------------------------------------------
# -- About
$r->get('/help/about')->over( authenticated => 1 )->to( 'Help#about', namespace => $namespace );
$r->get('/help/releasenotes')->over( authenticated => 1 )->to( 'Help#releasenotes', namespace => $namespace );
# -- Anomaly
$r->get('/anomaly/:host_name')->to( 'Anomaly#start', namespace => $namespace );
# -- BlueImpLoader
$r->get('/blueimp_uploader')->over( authenticated => 1 )->to( 'blueimp_uploader#blueimp', namespace => $namespace );
# -- Cachegroup
# deprecated - see: /api/$version/location/:parameter_id/parameter
# $r->get('/availablelocation/:paramid')->over( authenticated => 1 )->to( 'Cachegroup#availablelocation', namespace => $namespace );
$r->get('/misc')->over( authenticated => 1 )->to( 'Cachegroup#index', namespace => $namespace );
$r->get('/cachegroups')->over( authenticated => 1 )->to( 'Cachegroup#index', namespace => $namespace );
$r->get('/cachegroup/add')->over( authenticated => 1 )->to( 'Cachegroup#add', namespace => $namespace );
$r->post('/cachegroup/create')->over( authenticated => 1 )->to( 'Cachegroup#create', namespace => $namespace );
$r->get('/cachegroup/:id/delete')->over( authenticated => 1 )->to( 'Cachegroup#delete', namespace => $namespace );
# mode is either 'edit' or 'view'.
$r->route('/cachegroup/:mode/:id')->via('GET')->over( authenticated => 1 )->to( 'Cachegroup#view', namespace => $namespace );
$r->post('/cachegroup/:id/update')->over( authenticated => 1 )->to( 'Cachegroup#update', namespace => $namespace );
# -- Cdn
$r->post('/login')->to( 'Cdn#login', namespace => $namespace );
$r->get('/logout')->to( 'Cdn#logoutclicked', namespace => $namespace );
$r->get('/loginpage')->to( 'Cdn#loginpage', namespace => $namespace );
$r->get('/')->to( 'Cdn#loginpage', namespace => $namespace );
# Cdn - Special JSON format for datatables widget
$r->get('/aadata/:table')->over( authenticated => 1 )->to( 'Cdn#aadata', namespace => $namespace );
$r->get('/aadata/:table/:filter/:value')->over( authenticated => 1 )->to( 'Cdn#aadata', namespace => $namespace );
# -- Changelog
$r->get('/log')->over( authenticated => 1 )->to( 'ChangeLog#changelog', namespace => $namespace );
$r->post('/create/log')->over( authenticated => 1 )->to( 'ChangeLog#createlog', namespace => $namespace );
$r->get('/newlogcount')->over( authenticated => 1 )->to( 'ChangeLog#newlogcount', namespace => $namespace );
# -- Configuredrac - Configure Dell DRAC settings (RAID, BIOS, etc)
$r->post('/configuredrac')->over( authenticated => 1 )->to( 'Dell#configuredrac', namespace => $namespace );
# -- Configfiles
$r->route('/genfiles/:mode/:id/#filename')->via('GET')->over( authenticated => 1 )->to( 'ConfigFiles#genfiles', namespace => $namespace );
$r->route('/genfiles/:mode/byprofile/:profile/CRConfig.xml')->via('GET')->over( authenticated => 1 )
->to( 'ConfigFiles#genfiles_crconfig_profile', namespace => $namespace );
$r->route('/genfiles/:mode/bycdnname/:cdnname/CRConfig.xml')->via('GET')->over( authenticated => 1 )
->to( 'ConfigFiles#genfiles_crconfig_cdnname', namespace => $namespace );
$r->route('/snapshot_crconfig')->via( 'GET', 'POST' )->over( authenticated => 1 )->to( 'ConfigFiles#snapshot_crconfig', namespace => $namespace );
$r->post('/upload_ccr_compare')->over( authenticated => 1 )->to( 'ConfigFiles#diff_ccr_xml_file', namespace => $namespace );
# -- Asn
$r->get('/asns')->over( authenticated => 1 )->to( 'Asn#index', namespace => $namespace );
$r->get('/asns/add')->over( authenticated => 1 )->to( 'Asn#add', namespace => $namespace );
$r->post('/asns/create')->over( authenticated => 1 )->to( 'Asn#create', namespace => $namespace );
$r->get('/asns/:id/delete')->over( authenticated => 1 )->to( 'Asn#delete', namespace => $namespace );
$r->post('/asns/:id/update')->over( authenticated => 1 )->to( 'Asn#update', namespace => $namespace );
$r->route('/asns/:id/:mode')->via('GET')->over( authenticated => 1 )->to( 'Asn#view', namespace => $namespace );
# -- CDNs
$r->get('/cdns')->over( authenticated => 1 )->to( 'Cdn#index', namespace => $namespace );
$r->get('/cdn/add')->over( authenticated => 1 )->to( 'Cdn#add', namespace => $namespace );
$r->post('/cdn/create')->over( authenticated => 1 )->to( 'Cdn#create', namespace => $namespace );
$r->get('/cdn/:id/delete')->over( authenticated => 1 )->to( 'Cdn#delete', namespace => $namespace );
# mode is either 'edit' or 'view'.
$r->route('/cdn/:mode/:id')->via('GET')->over( authenticated => 1 )->to( 'Cdn#view', namespace => $namespace );
$r->post('/cdn/:id/update')->over( authenticated => 1 )->to( 'Cdn#update', namespace => $namespace );
$r->get('/cdns/:cdn_name/dnsseckeys/add')->over( authenticated => 1 )->to( 'DnssecKeys#add', namespace => $namespace );
$r->get('/cdns/:cdn_name/dnsseckeys/addksk')->over( authenticated => 1 )->to( 'DnssecKeys#addksk', namespace => $namespace );
$r->post('/cdns/dnsseckeys/create')->over( authenticated => 1 )->to( 'DnssecKeys#create', namespace => $namespace );
$r->post('/cdns/dnsseckeys/genksk')->over( authenticated => 1 )->to( 'DnssecKeys#genksk', namespace => $namespace );
$r->get('/cdns/dnsseckeys')->to( 'DnssecKeys#index', namespace => $namespace );
$r->get('/cdns/:cdn_name/dnsseckeys/manage')->over( authenticated => 1 )->to( 'DnssecKeys#manage', namespace => $namespace );
$r->post('/cdns/dnsseckeys/activate')->over( authenticated => 1 )->to( 'DnssecKeys#activate', namespace => $namespace );
# -- Dell - print boxes
$r->get('/dells')->over( authenticated => 1 )->to( 'Dell#dells', namespace => $namespace );
# -- Division
$r->get('/divisions')->over( authenticated => 1 )->to( 'Division#index', namespace => $namespace );
$r->get('/division/add')->over( authenticated => 1 )->to( 'Division#add', namespace => $namespace );
$r->post('/division/create')->over( authenticated => 1 )->to( 'Division#create', namespace => $namespace );
$r->get('/division/:id/edit')->over( authenticated => 1 )->to( 'Division#edit', namespace => $namespace );
$r->post('/division/:id/update')->over( authenticated => 1 )->to( 'Division#update', namespace => $namespace );
$r->get('/division/:id/delete')->over( authenticated => 1 )->to( 'Division#delete', namespace => $namespace );
# -- DeliverysSrvice
$r->get('/ds/add')->over( authenticated => 1 )->to( 'DeliveryService#add', namespace => $namespace );
$r->get('/ds/:id')->over( authenticated => 1 )->to( 'DeliveryService#edit', namespace => $namespace );
$r->post('/ds/create')->over( authenticated => 1 )->to( 'DeliveryService#create', namespace => $namespace );
$r->get('/ds/:id/delete')->over( authenticated => 1 )->to( 'DeliveryService#delete', namespace => $namespace );
$r->post('/ds/:id/update')->over( authenticated => 1 )->to( 'DeliveryService#update', namespace => $namespace );
# -- Keys - SSL Key management
$r->get('/ds/:id/sslkeys/add')->to( 'SslKeys#add', namespace => $namespace );
$r->post('/ds/sslkeys/create')->over( authenticated => 1 )->to( 'SslKeys#create', namespace => $namespace );
# -- Keys - SSL Key management
$r->get('/ds/:id/urlsigkeys/add')->to( 'UrlSigKeys#add', namespace => $namespace );
# JvD: ded route?? # $r->get('/ds_by_id/:id')->over( authenticated => 1 )->to('DeliveryService#ds_by_id', namespace => $namespace );
$r->get('/healthdatadeliveryservice')->to( 'DeliveryService#readdeliveryservice', namespace => $namespace );
$r->get('/delivery_services')->over( authenticated => 1 )->to( 'DeliveryService#index', namespace => $namespace );
# -- DeliveryServiceserver
$r->post('/dss/:id/update')->over( authenticated => 1 )->to( 'DeliveryServiceServer#assign_servers', namespace => $namespace )
; # update and create are the same... ?
$r->post('/update/cpdss/:to_server')->over( authenticated => 1 )->to( 'DeliveryServiceServer#clone_server', namespace => $namespace );
$r->route('/dss/:id/edit')->via('GET')->over( authenticated => 1 )->to( 'DeliveryServiceServer#edit', namespace => $namespace );
$r->route('/cpdssiframe/:mode/:id')->via('GET')->over( authenticated => 1 )->to( 'DeliveryServiceServer#cpdss_iframe', namespace => $namespace );
$r->post('/create/dsserver')->over( authenticated => 1 )->to( 'DeliveryServiceServer#create', namespace => $namespace );
# -- DeliveryServiceTmuser
$r->post('/dstmuser')->over( authenticated => 1 )->to( 'DeliveryServiceTmUser#create', namespace => $namespace );
$r->get('/dstmuser/:ds/:tm_user_id/delete')->over( authenticated => 1 )->to( 'DeliveryServiceTmUser#delete', namespace => $namespace );
# -- Federation
$r->get('/federation')->over( authenticated => 1 )->to( 'Federation#index', namespace => $namespace );
$r->get('/federation/:federation_id/delete')->name("federation_delete")->over( authenticated => 1 )->to( 'Federation#delete', namespace => $namespace );
$r->get('/federation/:federation_id/edit')->name("federation_edit")->over( authenticated => 1 )->to( 'Federation#edit', namespace => $namespace );
$r->get('/federation/add')->name('federation_add')->over( authenticated => 1 )->to( 'Federation#add', namespace => $namespace );
$r->post('/federation')->name('federation_create')->to( 'Federation#create', namespace => $namespace );
$r->post('/federation/:federation_id')->name('federation_update')->to( 'Federation#update', namespace => $namespace );
$r->get( "/federation/resolvers" => [ format => [qw(json)] ] )->to( 'Federation#resolvers', namespace => $namespace );
$r->get( "/federation/users" => [ format => [qw(json)] ] )->to( 'Federation#users', namespace => $namespace );
# -- Gendbdump - Get DB dump
$r->get('/dbdump')->over( authenticated => 1 )->to( 'GenDbDump#dbdump', namespace => $namespace );
# -- Geniso - From the Tools tab:
$r->route('/geniso')->via('GET')->over( authenticated => 1 )->to( 'GenIso#geniso', namespace => $namespace );
$r->route('/iso_download')->via('GET')->over( authenticated => 1 )->to( 'GenIso#iso_download', namespace => $namespace );
# -- Hardware
$r->get('/hardware')->over( authenticated => 1 )->to( 'Hardware#hardware', namespace => $namespace );
$r->get('/hardware/:filter/:byvalue')->over( authenticated => 1 )->to( 'Hardware#hardware', namespace => $namespace );
# -- Health - Parameters for rascal
$r->get('/health')->to( 'Health#healthprofile', namespace => $namespace );
$r->get('/healthfull')->to( 'Health#healthfull', namespace => $namespace );
$r->get('/health/:cdnname')->to( 'Health#rascal_config', namespace => $namespace );
# -- Job - These are for internal/agent job operations
$r->post('/job/external/new')->to( 'Job#newjob', namespace => $namespace );
$r->get('/job/external/view/:id')->to( 'Job#read_job_by_id', namespace => $namespace );
$r->post('/job/external/cancel/:id')->to( 'Job#canceljob', namespace => $namespace );
$r->get('/job/external/result/view/:id')->to( 'Job#readresult', namespace => $namespace );
$r->get('/job/external/status/view/all')->to( 'Job#readstatus', namespace => $namespace );
$r->get('/job/agent/viewpendingjobs/:id')->over( authenticated => 1 )->to( 'Job#viewagentjob', namespace => $namespace );
$r->post('/job/agent/new')->over( authenticated => 1 )->to( 'Job#newagent', namespace => $namespace );
$r->post('/job/agent/result/new')->over( authenticated => 1 )->to( 'Job#newresult', namespace => $namespace );
$r->get('/job/agent/statusupdate/:id')->over( authenticated => 1 )->to( 'Job#jobstatusupdate', namespace => $namespace );
$r->get('/job/agent/view/all')->over( authenticated => 1 )->to( 'Job#readagent', namespace => $namespace );
$r->get('/job/view/all')->over( authenticated => 1 )->to( 'Job#listjob', namespace => $namespace );
$r->get('/job/agent/new')->over( authenticated => 1 )->to( 'Job#addagent', namespace => $namespace );
$r->get('/job/new')->over( authenticated => 1 )->to( 'Job#addjob', namespace => $namespace );
$r->get('/jobs')->over( authenticated => 1 )->to( 'Job#jobs', namespace => $namespace );
$r->get('/custom_charts')->over( authenticated => 1 )->to( 'CustomCharts#custom', namespace => $namespace );
$r->get('/custom_charts_single')->over( authenticated => 1 )->to( 'CustomCharts#custom_single_chart', namespace => $namespace );
$r->get('/custom_charts_single/cache/#cdn/#cdn_location/:cache/:stat')->over( authenticated => 1 )
->to( 'CustomCharts#custom_single_chart', namespace => $namespace );
$r->get('/custom_charts_single/ds/#cdn/#cdn_location/:ds/:stat')->over( authenticated => 1 )
->to( 'CustomCharts#custom_single_chart', namespace => $namespace );
$r->get('/uploadservercsv')->over( authenticated => 1 )->to( 'UploadServerCsv#uploadservercsv', namespace => $namespace );
$r->get('/generic_uploader')->over( authenticated => 1 )->to( 'GenericUploader#generic', namespace => $namespace );
$r->post('/upload_handler')->over( authenticated => 1 )->to( 'UploadHandler#upload', namespace => $namespace );
$r->post('/uploadhandlercsv')->over( authenticated => 1 )->to( 'UploadHandlerCsv#upload', namespace => $namespace );
# -- Cachegroupparameter
$r->post('/cachegroupparameter/create')->over( authenticated => 1 )->to( 'CachegroupParameter#create', namespace => $namespace );
$r->get('/cachegroupparameter/#cachegroup/#parameter/delete')->over( authenticated => 1 )->to( 'CachegroupParameter#delete', namespace => $namespace );
# -- Options
$r->options('/')->to( 'Cdn#options', namespace => $namespace );
$r->options('/*')->to( 'Cdn#options', namespace => $namespace );
# -- Ort
$r->route('/ort/:hostname/ort1')->via('GET')->over( authenticated => 1 )->to( 'Ort#ort1', namespace => $namespace );
$r->route('/ort/:hostname/packages')->via('GET')->over( authenticated => 1 )->to( 'Ort#get_package_versions', namespace => $namespace );
$r->route('/ort/:hostname/package/:package')->via('GET')->over( authenticated => 1 )->to( 'Ort#get_package_version', namespace => $namespace );
$r->route('/ort/:hostname/chkconfig')->via('GET')->over( authenticated => 1 )->to( 'Ort#get_chkconfig', namespace => $namespace );
$r->route('/ort/:hostname/chkconfig/:package')->via('GET')->over( authenticated => 1 )->to( 'Ort#get_package_chkconfig', namespace => $namespace );
# -- Parameter
$r->post('/parameter/create')->over( authenticated => 1 )->to( 'Parameter#create', namespace => $namespace );
$r->get('/parameter/:id/delete')->over( authenticated => 1 )->to( 'Parameter#delete', namespace => $namespace );
$r->post('/parameter/:id/update')->over( authenticated => 1 )->to( 'Parameter#update', namespace => $namespace );
$r->get('/parameters')->over( authenticated => 1 )->to( 'Parameter#index', namespace => $namespace );
$r->get('/parameters/:filter/:byvalue')->over( authenticated => 1 )->to( 'Parameter#index', namespace => $namespace );
$r->get('/parameter/add')->over( authenticated => 1 )->to( 'Parameter#add', namespace => $namespace );
$r->route('/parameter/:id')->via('GET')->over( authenticated => 1 )->to( 'Parameter#view', namespace => $namespace );
# -- PhysLocation
$r->get('/phys_locations')->over( authenticated => 1 )->to( 'PhysLocation#index', namespace => $namespace );
$r->post('/phys_location/create')->over( authenticated => 1 )->to( 'PhysLocation#create', namespace => $namespace );
$r->get('/phys_location/add')->over( authenticated => 1 )->to( 'PhysLocation#add', namespace => $namespace );
# mode is either 'edit' or 'view'.
$r->route('/phys_location/:id/edit')->via('GET')->over( authenticated => 1 )->to( 'PhysLocation#edit', namespace => $namespace );
$r->get('/phys_location/:id/delete')->over( authenticated => 1 )->to( 'PhysLocation#delete', namespace => $namespace );
$r->post('/phys_location/:id/update')->over( authenticated => 1 )->to( 'PhysLocation#update', namespace => $namespace );
# -- Profile
$r->get('/profile/add')->over( authenticated => 1 )->to( 'Profile#add', namespace => $namespace );
$r->get('/profile/edit/:id')->over( authenticated => 1 )->to( 'Profile#edit', namespace => $namespace );
$r->route('/profile/:id/view')->via('GET')->over( authenticated => 1 )->to( 'Profile#view', namespace => $namespace );
$r->route('/cmpprofile/:profile1/:profile2')->via('GET')->over( authenticated => 1 )->to( 'Profile#compareprofile', namespace => $namespace );
$r->route('/cmpprofile/aadata/:profile1/:profile2')->via('GET')->over( authenticated => 1 )->to( 'Profile#acompareprofile', namespace => $namespace );
$r->post('/profile/create')->over( authenticated => 1 )->to( 'Profile#create', namespace => $namespace );
$r->get('/profile/import')->over( authenticated => 1 )->to( 'Profile#import', namespace => $namespace );
$r->post('/profile/doImport')->over( authenticated => 1 )->to( 'Profile#doImport', namespace => $namespace );
$r->get('/profile/:id/delete')->over( authenticated => 1 )->to( 'Profile#delete', namespace => $namespace );
$r->post('/profile/:id/update')->over( authenticated => 1 )->to( 'Profile#update', namespace => $namespace );
# select available Profile, DS or Server
$r->get('/availableprofile/:paramid')->over( authenticated => 1 )->to( 'Profile#availableprofile', namespace => $namespace );
$r->route('/profile/:id/export')->via('GET')->over( authenticated => 1 )->to( 'Profile#export', namespace => $namespace );
$r->get('/profiles')->over( authenticated => 1 )->to( 'Profile#index', namespace => $namespace );
# -- Profileparameter
$r->post('/profileparameter/create')->over( authenticated => 1 )->to( 'ProfileParameter#create', namespace => $namespace );
$r->get('/profileparameter/:profile/:parameter/delete')->over( authenticated => 1 )->to( 'ProfileParameter#delete', namespace => $namespace );
# -- Rascalstatus
$r->get('/edge_health')->over( authenticated => 1 )->to( 'RascalStatus#health', namespace => $namespace );
$r->get('/rascalstatus')->over( authenticated => 1 )->to( 'RascalStatus#health', namespace => $namespace );
# -- Region
$r->get('/regions')->over( authenticated => 1 )->to( 'Region#index', namespace => $namespace );
$r->get('/region/add')->over( authenticated => 1 )->to( 'Region#add', namespace => $namespace );
$r->post('/region/create')->over( authenticated => 1 )->to( 'Region#create', namespace => $namespace );
$r->get('/region/:id/edit')->over( authenticated => 1 )->to( 'Region#edit', namespace => $namespace );
$r->post('/region/:id/update')->over( authenticated => 1 )->to( 'Region#update', namespace => $namespace );
$r->get('/region/:id/delete')->over( authenticated => 1 )->to( 'Region#delete', namespace => $namespace );
# -- Server
$r->post('/server/:name/status/:state')->over( authenticated => 1 )->to( 'Server#rest_update_server_status', namespace => $namespace );
$r->get('/server/:name/status')->over( authenticated => 1 )->to( 'Server#get_server_status', namespace => $namespace );
$r->get('/server/:key/key')->over( authenticated => 1 )->to( 'Server#get_redis_key', namespace => $namespace );
$r->get('/servers')->over( authenticated => 1 )->to( 'Server#index', namespace => $namespace );
$r->get('/server/add')->over( authenticated => 1 )->to( 'Server#add', namespace => $namespace );
$r->post('/server/:id/update')->over( authenticated => 1 )->to( 'Server#update', namespace => $namespace );
$r->get('/server/:id/delete')->over( authenticated => 1 )->to( 'Server#delete', namespace => $namespace );
$r->route('/server/:id/:mode')->via('GET')->over( authenticated => 1 )->to( 'Server#view', namespace => $namespace );
$r->post('/server/create')->over( authenticated => 1 )->to( 'Server#create', namespace => $namespace );
$r->post('/server/updatestatus')->over( authenticated => 1 )->to( 'Server#updatestatus', namespace => $namespace );
# -- Serverstatus
$r->get('/server_check')->to( 'server_check#server_check', namespace => $namespace );
# -- Staticdnsentry
$r->route('/staticdnsentry/:id/edit')->via('GET')->over( authenticated => 1 )->to( 'StaticDnsEntry#edit', namespace => $namespace );
$r->post('/staticdnsentry/:dsid/update')->over( authenticated => 1 )->to( 'StaticDnsEntry#update_assignments', namespace => $namespace );
$r->get('/staticdnsentry/:id/delete')->over( authenticated => 1 )->to( 'StaticDnsEntry#delete', namespace => $namespace );
# -- Status
$r->post('/status/create')->over( authenticated => 1 )->to( 'Status#create', namespace => $namespace );
$r->get('/status/delete/:id')->over( authenticated => 1 )->to( 'Status#delete', namespace => $namespace );
$r->post('/status/update/:id')->over( authenticated => 1 )->to( 'Status#update', namespace => $namespace );
# -- Tools
$r->get('/tools')->over( authenticated => 1 )->to( 'Tools#tools', namespace => $namespace );
$r->get('/tools/db_dump')->over( authenticated => 1 )->to( 'Tools#db_dump', namespace => $namespace );
$r->get('/tools/queue_updates')->over( authenticated => 1 )->to( 'Tools#queue_updates', namespace => $namespace );
$r->get('/tools/snapshot_crconfig')->over( authenticated => 1 )->to( 'Tools#snapshot_crconfig', namespace => $namespace );
$r->get('/tools/diff_crconfig/:cdn_name')->over( authenticated => 1 )->to( 'Tools#diff_crconfig_iframe', namespace => $namespace );
$r->get('/tools/write_crconfig/:cdn_name')->over( authenticated => 1 )->to( 'Tools#write_crconfig', namespace => $namespace );
$r->get('/tools/invalidate_content/')->over( authenticated => 1 )->to( 'Tools#invalidate_content', namespace => $namespace );
# -- Topology - CCR Config, rewrote in json
$r->route('/genfiles/:mode/bycdnname/:cdnname/CRConfig.json')->via('GET')->over( authenticated => 1 )
->to( 'Topology#ccr_config', namespace => $namespace );
$r->get('/types')->over( authenticated => 1 )->to( 'Types#index', namespace => $namespace );
$r->route('/types/add')->via('GET')->over( authenticated => 1 )->to( 'Types#add', namespace => $namespace );
$r->route('/types/create')->via('POST')->over( authenticated => 1 )->to( 'Types#create', namespace => $namespace );
$r->route('/types/:id/update')->over( authenticated => 1 )->to( 'Types#update', namespace => $namespace );
$r->route('/types/:id/delete')->over( authenticated => 1 )->to( 'Types#delete', namespace => $namespace );
$r->route('/types/:id/:mode')->via('GET')->over( authenticated => 1 )->to( 'Types#view', namespace => $namespace );
# -- Update bit - Process updates - legacy stuff.
$r->get('/update/:host_name')->over( authenticated => 1 )->to( 'Server#readupdate', namespace => $namespace );
$r->post('/update/:host_name')->over( authenticated => 1 )->to( 'Server#postupdate', namespace => $namespace );
$r->post('/postupdatequeue/:id')->over( authenticated => 1 )->to( 'Server#postupdatequeue', namespace => $namespace );
$r->post('/postupdatequeue/:cdn/#cachegroup')->over( authenticated => 1 )->to( 'Server#postupdatequeue', namespace => $namespace );
# -- User
$r->post('/user/register/send')->over( authenticated => 1 )->name('user_register_send')->to( 'User#send_registration', namespace => $namespace );
$r->get('/users')->name("user_index")->over( authenticated => 1 )->to( 'User#index', namespace => $namespace );
$r->get('/user/:id/edit')->name("user_edit")->over( authenticated => 1 )->to( 'User#edit', namespace => $namespace );
$r->get('/user/add')->name('user_add')->over( authenticated => 1 )->to( 'User#add', namespace => $namespace );
$r->get('/user/register')->name('user_register')->to( 'User#register', namespace => $namespace );
$r->post('/user/:id/reset_password')->name('user_reset_password')->to( 'User#reset_password', namespace => $namespace );
$r->post('/user')->name('user_create')->to( 'User#create', namespace => $namespace );
$r->post('/user/:id')->name('user_update')->to( 'User#update', namespace => $namespace );
# -- Utils
$r->get('/utils/close_fancybox')->over( authenticated => 1 )->to( 'Utils#close_fancybox', namespace => $namespace );
# -- Visualstatus
$r->get('/visualstatus/:matchstring')->over( authenticated => 1 )->to( 'VisualStatus#graphs', namespace => $namespace );
$r->get('/visualstatus_redis/:matchstring')->over( authenticated => 1 )->to( 'VisualStatus#graphs_redis', namespace => $namespace );
$r->get('/redis/#match/:start/:end/:interval')->over( authenticated => 1 )->to( 'Redis#stats', namespace => 'UI' );
$r->get('/dailysummary')->over( authenticated => 1 )->to( 'VisualStatus#daily_summary', namespace => $namespace );
# deprecated - see: /api/$version/servers.json and /api/1.1/servers/hostname/:host_name/details.json
# duplicate route
$r->get('/healthdataserver')->to( 'Server#index_response', namespace => $namespace );
# deprecated - see: /api/$version/traffic_monitor/stats.json
# $r->get('/rascalstatus/getstats')->over( authenticated => 1 )->to( 'RascalStatus#get_host_stats', namespace => $namespace );
# deprecated - see: /api/$version/redis/info/#shortname
$r->get('/redis/info/#shortname')->over( authenticated => 1 )->to( 'Redis#info', namespace => $namespace );
# deprecated - see: /api/$version/redis/match/#match/start_date/:start
$r->get('/redis/#match/:start_date/:end_date/:interval')->over( authenticated => 1 )->to( 'Redis#stats', namespace => $namespace );
# select * from table where id=ID;
$r->get('/server_by_id/:id')->over( authenticated => 1 )->to( 'Server#server_by_id', namespace => $namespace );
}
sub api_routes {
my $self = shift;
my $r = shift;
my $version = shift;
my $namespace = shift;
# -- API DOCS
$r->get( "/api/$version/docs" => [ format => [qw(json)] ] )->to( 'ApiDocs#index', namespace => $namespace );
# -- CACHE GROUPS - #NEW
# NOTE: any 'trimmed' urls will potentially go away with keys= support
# -- orderby=key&key=name (where key is the database column)
# -- query parameter options ?orderby=key&keys=name (where key is the database column)
$r->get( "/api/$version/cachegroups" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Cachegroup#index', namespace => $namespace );
$r->get( "/api/$version/cachegroups/trimmed" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Cachegroup#index_trimmed', namespace => $namespace );
$r->get( "/api/$version/cachegroup/:parameter_id/parameter" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Cachegroup#by_parameter_id', namespace => $namespace );
$r->get( "/api/$version/cachegroupparameters" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'CachegroupParameter#index', namespace => $namespace );
$r->get( "/api/$version/cachegroups/:parameter_id/parameter/available" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Cachegroup#available_for_parameter', namespace => $namespace );
# -- Federation
$r->get( "/internal/api/$version/federations" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Federation#index', namespace => $namespace );
$r->get( "/api/$version/federations" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Federation#external_index', namespace => $namespace );
$r->post("/api/$version/federations")->over( authenticated => 1 )->to( 'Federation#add', namespace => $namespace );
$r->delete("/api/$version/federations")->over( authenticated => 1 )->to( 'Federation#delete', namespace => $namespace );
$r->put("/api/$version/federations")->over( authenticated => 1 )->to( 'Federation#update', namespace => $namespace );
# -- CDN -- #NEW
$r->get( "/api/$version/cdns" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Cdn#index', namespace => $namespace );
$r->get( "/api/$version/cdns/name/:name" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Cdn#name', namespace => $namespace );
# -- CHANGE LOG - #NEW
$r->get( "/api/$version/logs" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'ChangeLog#index', namespace => $namespace );
$r->get( "/api/$version/logs/:days/days" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'ChangeLog#index', namespace => $namespace );
$r->get( "/api/$version/logs/newcount" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'ChangeLog#newlogcount', namespace => $namespace );
# -- CRANS - #NEW
$r->get( "/api/$version/asns" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Asn#index', namespace => $namespace );
# -- HWINFO - #NEW
# Supports: ?orderby=key
$r->get("/api/$version/hwinfo")->over( authenticated => 1 )->to( 'HwInfo#index', namespace => $namespace );
# -- KEYS
#ping riak server
$r->get("/api/$version/keys/ping")->over( authenticated => 1 )->to( 'Keys#ping_riak', namespace => $namespace );
$r->get("/api/$version/riak/ping")->over( authenticated => 1 )->to( 'Riak#ping', namespace => $namespace );
$r->get( "/api/$version/riak/bucket/#bucket/key/#key/values" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Riak#get', namespace => $namespace );
# -- DELIVERY SERVICE
# USED TO BE - GET /api/$version/services
$r->get( "/api/$version/deliveryservices" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'DeliveryService#delivery_services', namespace => $namespace );
# USED TO BE - GET /api/$version/services/:id
$r->get( "/api/$version/deliveryservices/:id" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'DeliveryService#delivery_services', namespace => $namespace );
# -- DELIVERY SERVICE: Health
# USED TO BE - GET /api/$version/services/:id/health
$r->get( "/api/$version/deliveryservices/:id/health" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'DeliveryService#health', namespace => $namespace );
# -- DELIVERY SERVICE: Capacity
# USED TO BE - GET /api/$version/services/:id/capacity
$r->get( "/api/$version/deliveryservices/:id/capacity" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'DeliveryService#capacity', namespace => $namespace );
# -- DELIVERY SERVICE: Routing
# USED TO BE - GET /api/$version/services/:id/routing
$r->get( "/api/$version/deliveryservices/:id/routing" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'DeliveryService#routing', namespace => $namespace );
# -- DELIVERY SERVICE: State
# USED TO BE - GET /api/$version/services/:id/state
$r->get( "/api/$version/deliveryservices/:id/state" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'DeliveryService#state', namespace => $namespace );
## -- DELIVERY SERVICE: SSL Keys
## Support for SSL private keys, certs, and csrs
#gets the latest key by default unless a version query param is provided with ?version=x
$r->get( "/api/$version/deliveryservices/xmlId/:xmlid/sslkeys" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'SslKeys#view_by_xml_id', namespace => 'API::DeliveryService' );
#"pristine hostname"
$r->get( "/api/$version/deliveryservices/hostname/#hostname/sslkeys" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'SslKeys#view_by_hostname', namespace => 'API::DeliveryService' );
#generate new
$r->post("/api/$version/deliveryservices/sslkeys/generate")->over( authenticated => 1 )->to( 'SslKeys#generate', namespace => 'API::DeliveryService' );
#add existing
$r->post("/api/$version/deliveryservices/sslkeys/add")->over( authenticated => 1 )->to( 'SslKeys#add', namespace => 'API::DeliveryService' );
#deletes the latest key by default unless a version query param is provided with ?version=x
$r->get( "/api/$version/deliveryservices/xmlId/:xmlid/sslkeys/delete" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'SslKeys#delete', namespace => 'API::DeliveryService' );
# -- KEYS Url Sig
$r->post("/api/$version/deliveryservices/xmlId/:xmlId/urlkeys/generate")->over( authenticated => 1 )
->to( 'KeysUrlSig#generate', namespace => 'API::DeliveryService' );
$r->get( "/api/$version/deliveryservices/xmlId/:xmlId/urlkeys" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'KeysUrlSig#view_by_xmlid', namespace => 'API::DeliveryService' );
# ->over( authenticated => 1 )->to( 'DeliveryService#get_summary', namespace => $namespace );
# -- DELIVERY SERVICE SERVER - #NEW
# Supports ?orderby=key
$r->get("/api/$version/deliveryserviceserver")->over( authenticated => 1 )->to( 'DeliveryServiceServer#index', namespace => $namespace );
# -- EXTENSIONS
$r->get( "/api/$version/to_extensions" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'ToExtension#index', namespace => $namespace );
$r->post("/api/$version/to_extensions")->over( authenticated => 1 )->to( 'ToExtension#update', namespace => $namespace );
$r->post("/api/$version/to_extensions/:id/delete")->over( authenticated => 1 )->to( 'ToExtension#delete', namespace => $namespace );
# -- PARAMETER #NEW
# Supports ?orderby=key
$r->get( "/api/$version/parameters" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Parameter#index', namespace => $namespace );
$r->get( "/api/$version/parameters/profile/:name" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Parameter#profile', namespace => $namespace );
# -- PHYS_LOCATION #NEW
# Supports ?orderby=key
$r->get("/api/$version/phys_locations")->over( authenticated => 1 )->to( 'PhysLocation#index', namespace => $namespace );
$r->get("/api/$version/phys_locations/trimmed")->over( authenticated => 1 )->to( 'PhysLocation#index_trimmed', namespace => $namespace );
# -- PROFILES - #NEW
# Supports ?orderby=key
$r->get( "/api/$version/profiles" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Profile#index', namespace => $namespace );
$r->get( "/api/$version/profiles/trimmed" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Profile#index_trimmed', namespace => $namespace );
# -- PROFILE PARAMETERS - #NEW
# Supports ?orderby=key
$r->get( "/api/$version/profileparameters" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'ProfileParameter#index', namespace => $namespace );
# -- REGION #NEW
# Supports ?orderby=key
$r->get("/api/$version/regions")->over( authenticated => 1 )->to( 'Region#index', namespace => $namespace );
# -- ROLES #NEW
# Supports ?orderby=key
$r->get("/api/$version/roles")->over( authenticated => 1 )->to( 'Role#index', namespace => $namespace );
# -- SERVER #NEW
$r->get( "/api/$version/servers" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Server#index', namespace => $namespace );
$r->get( "/api/$version/servers/summary" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Server#summary', namespace => $namespace );
$r->get( "/api/$version/servers/hostname/:name/details" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Server#details', namespace => $namespace );
$r->get( "/api/$version/servers/checks" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'ServerCheck#read', namespace => $namespace );
$r->get( "/api/$version/servercheck/aadata" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'ServerCheck#aadata', namespace => $namespace );
$r->post("/api/$version/servercheck")->over( authenticated => 1 )->to( 'ServerCheck#update', namespace => $namespace );
# -- STATUS #NEW
# Supports ?orderby=key
$r->get("/api/$version/statuses")->over( authenticated => 1 )->to( 'Status#index', namespace => $namespace );
# -- STATIC DNS ENTRIES #NEW
$r->get("/api/$version/staticdnsentries")->over( authenticated => 1 )->to( 'StaticDnsEntry#index', namespace => $namespace );
# -- SYSTEM
$r->get( "/api/$version/system/info" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'System#get_info', namespace => $namespace );
# TM Status #NEW #in use # JvD
$r->get( "/api/$version/traffic_monitor/stats" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'TrafficMonitor#get_host_stats', namespace => $namespace );
# -- RIAK #NEW
$r->get("/api/$version/riak/stats")->over( authenticated => 1 )->to( 'Riak#stats', namespace => $namespace );
# -- TYPE #NEW
# Supports ?orderby=key
$r->get("/api/$version/types")->over( authenticated => 1 )->to( 'Types#index', namespace => $namespace );
$r->get("/api/$version/types/trimmed")->over( authenticated => 1 )->to( 'Types#index_trimmed', namespace => $namespace );
# -- CDN
# USED TO BE - Nothing, this is new
$r->get( "/api/$version/cdns/:name/health" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Cdn#health', namespace => $namespace );
# USED TO BE - GET /api/$version/health.json
$r->get( "/api/$version/cdns/health" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Cdn#health', namespace => $namespace );
# USED TO BE - GET /api/$version/capacity.json
$r->get( "/api/$version/cdns/capacity" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Cdn#capacity', namespace => $namespace );
# USED TO BE - GET /api/$version/routing.json
$r->get( "/api/$version/cdns/routing" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Cdn#routing', namespace => $namespace );
#WARNING: this is an intentionally "unauthenticated" route for the Portal Home Page.
# USED TO BE - GET /api/$version/metrics/g/:metric/:start/:end/s.json
$r->get( "/api/$version/cdns/metric_types/:metric_type/start_date/:start_date/end_date/:end_date" => [ format => [qw(json)] ] )
->to( 'Cdn#metrics', namespace => $namespace );
## -- CDNs: DNSSEC Keys
## Support for DNSSEC zone signing, key signing, and private keys
#gets the latest key by default unless a version query param is provided with ?version=x
$r->get( "/api/$version/cdns/name/:name/dnsseckeys" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Cdn#dnssec_keys', namespace => $namespace );
#generate new
$r->post("/api/$version/cdns/dnsseckeys/generate")->over( authenticated => 1 )->to( 'Cdn#dnssec_keys_generate', namespace => $namespace );
#delete
$r->get( "/api/$version/cdns/name/:name/dnsseckeys/delete" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'Cdn#delete_dnssec_keys', namespace => $namespace );
#checks expiration of keys and re-generates if necessary. Used by Cron.
$r->get( "/internal/api/$version/cdns/dnsseckeys/refresh" => [ format => [qw(json)] ] )->to( 'Cdn#dnssec_keys_refresh', namespace => $namespace );
# -- CDN: Topology
# USED TO BE - GET /api/$version/configs/cdns
$r->get( "/api/$version/cdns/configs" => [ format => [qw(json)] ] )->via('GET')->over( authenticated => 1 )
->to( 'Cdn#get_cdns', namespace => $namespace );
# USED TO BE - GET /api/$version/configs/routing/:cdn_name
$r->get( "/api/$version/cdns/:name/configs/routing" => [ format => [qw(json)] ] )->via('GET')->over( authenticated => 1 )
->to( 'Cdn#configs_routing', namespace => $namespace );
# -- CDN: domains #NEW
$r->get( "/api/$version/cdns/domains" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Cdn#domains', namespace => $namespace );
# -- USERS
$r->get( "/api/$version/users" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'User#index', namespace => $namespace );
$r->post("/api/$version/user/login")->to( 'User#login', namespace => $namespace );
$r->get("/api/$version/user/:id/deliveryservices/available")->over( authenticated => 1 )
->to( 'User#get_available_deliveryservices', namespace => $namespace );
$r->post("/api/$version/user/login/token")->to( 'User#token_login', namespace => $namespace );
$r->post("/api/$version/user/logout")->over( authenticated => 1 )->to( 'Cdn#tool_logout', namespace => $namespace );
# TO BE REFACTORED TO /api/$version/deliveryservices/:id/jobs/keyword/PURGE
# USED TO BE - GET /api/$version/user/jobs/purge.json
# USED TO BE - POST /api/$version/user/password/reset
$r->post("/api/$version/user/reset_password")->to( 'User#reset_password', namespace => $namespace );
# USED TO BE - GET /api/$version/user/profile.json
$r->get( "/api/$version/user/current" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'User#current', namespace => $namespace );
# USED TO BE - POST /api/$version/user/job/purge
$r->get( "/api/$version/user/current/jobs" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'Job#index', namespace => $namespace );
$r->post("/api/$version/user/current/jobs")->over( authenticated => 1 )->to( 'Job#create', namespace => $namespace );
# USED TO BE - POST /api/$version/user/profile.json
$r->post("/api/$version/user/current/update")->over( authenticated => 1 )->to( 'User#update_current', namespace => $namespace );
$r->get( "/api/$version/cdns/:name/configs/monitoring" => [ format => [qw(json)] ] )->via('GET')->over( authenticated => 1 )
->to( 'Cdn#configs_monitoring', namespace => $namespace );
$r->get( "/api/$version/stats_summary" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'StatsSummary#index', namespace => $namespace );
$r->post("/api/$version/stats_summary/create")->over( authenticated => 1 )->to( 'StatsSummary#create', namespace => $namespace );
# -- Ping - health check for CodeBig
$r->get(
"/api/$version/ping" => sub {
my $self = shift;
$self->render(
json => {
ping => "pong"
}
);
}
);
}
sub api_1_0_routes {
my $self = shift;
my $r = shift;
my $namespace = shift;
# ------------------------------------------------------------------------
# API Routes
# ------------------------------------------------------------------------
# -- Parameter 1.0 API
# deprecated - see: /api/$version/crans
$r->get('/datacrans')->over( authenticated => 1 )->to( 'Asn#index', namespace => $namespace );
$r->get('/datacrans/orderby/:orderby')->over( authenticated => 1 )->to( 'Asn#index', namespace => $namespace );
# deprecated - see: /api/$version/locations
$r->get('/datalocation')->over( authenticated => 1 )->to( 'Cachegroup#read', namespace => $namespace );
# deprecated - see: /api/$version/locations
$r->get('/datalocation/orderby/:orderby')->over( authenticated => 1 )->to( 'Cachegroup#read', namespace => $namespace );
$r->get('/datalocationtrimmed')->over( authenticated => 1 )->to( 'Cachegroup#readlocationtrimmed', namespace => $namespace );
# deprecated - see: /api/$version/locationparameters
$r->get('/datalocationparameter')->over( authenticated => 1 )->to( 'CachegroupParameter#index', namespace => $namespace );
# deprecated - see: /api/$version/logs
$r->get('/datalog')->over( authenticated => 1 )->to( 'ChangeLog#readlog', namespace => $namespace );
$r->get('/datalog/:days')->over( authenticated => 1 )->to( 'ChangeLog#readlog', namespace => $namespace );
# deprecated - see: /api/$version/parameters
$r->get('/dataparameter')->over( authenticated => 1 )->to( 'Parameter#readparameter', namespace => $namespace );
$r->get('/dataparameter/#profile_name')->over( authenticated => 1 )->to( 'Parameter#readparameter_for_profile', namespace => $namespace );
$r->get('/dataparameter/orderby/:orderby')->over( authenticated => 1 )->to( 'Parameter#readparameter', namespace => $namespace );
# deprecated - see: /api/$version/profiles
$r->get('/dataprofile')->over( authenticated => 1 )->to( 'Profile#readprofile', namespace => $namespace );
$r->get('/dataprofile/orderby/:orderby')->over( authenticated => 1 )->to( 'Profile#readprofile', namespace => $namespace );
$r->get('/dataprofiletrimmed')->over( authenticated => 1 )->to( 'Profile#readprofiletrimmed', namespace => $namespace );
# deprecated - see: /api/$version/hwinfo
$r->get('/datahwinfo')->over( authenticated => 1 )->to( 'HwInfo#readhwinfo', namespace => $namespace );
$r->get('/datahwinfo/orderby/:orderby')->over( authenticated => 1 )->to( 'HwInfo#readhwinfo', namespace => $namespace );
# deprecated - see: /api/$version/profileparameters
$r->get('/dataprofileparameter')->over( authenticated => 1 )->to( 'ProfileParameter#read', namespace => $namespace );
$r->get('/dataprofileparameter/orderby/:orderby')->over( authenticated => 1 )->to( 'ProfileParameter#read', namespace => $namespace );
# deprecated - see: /api/$version/deliveryserviceserver
$r->get('/datalinks')->over( authenticated => 1 )->to( 'DataAll#data_links', namespace => $namespace );
$r->get('/datalinks/orderby/:orderby')->over( authenticated => 1 )->to( 'DataAll#data_links', namespace => $namespace );
# deprecated - see: /api/$version/deliveryserviceserver
$r->get('/datadeliveryserviceserver')->over( authenticated => 1 )->to( 'DeliveryServiceServer#read', namespace => $namespace );
# deprecated - see: /api/$version/cdn/domains
$r->get('/datadomains')->over( authenticated => 1 )->to( 'DataAll#data_domains', namespace => $namespace );
# deprecated - see: /api/$version/user/:id/deliveryservices/available.json
$r->get('/availableds/:id')->over( authenticated => 1 )->to( 'DataAll#availableds', namespace => $namespace );
# deprecated - see: /api/$version/deliveryservices.json
#$r->get('/datadeliveryservice')->over( authenticated => 1 )->to('DeliveryService#read', namespace => $namespace );
$r->get('/datadeliveryservice')->to( 'DeliveryService#read', namespace => $namespace );
$r->get('/datadeliveryservice/orderby/:orderby')->over( authenticated => 1 )->to( 'DeliveryService#read', namespace => $namespace );
# deprecated - see: /api/$version/deliveryservices.json
$r->get('/datastatus')->over( authenticated => 1 )->to( 'Status#index', namespace => $namespace );
$r->get('/datastatus/orderby/:orderby')->over( authenticated => 1 )->to( 'Status#index', namespace => $namespace );
# deprecated - see: /api/$version/users.json
$r->get('/datauser')->over( authenticated => 1 )->to( 'User#read', namespace => $namespace );
$r->get('/datauser/orderby/:orderby')->over( authenticated => 1 )->to( 'User#read', namespace => $namespace );
# deprecated - see: /api/$version/phys_locations.json
$r->get('/dataphys_location')->over( authenticated => 1 )->to( 'PhysLocation#readphys_location', namespace => $namespace );
$r->get('/dataphys_locationtrimmed')->over( authenticated => 1 )->to( 'PhysLocation#readphys_locationtrimmed', namespace => $namespace );
# deprecated - see: /api/$version/regions.json
$r->get('/dataregion')->over( authenticated => 1 )->to( 'PhysLocation#readregion', namespace => $namespace );
# deprecated - see: /api/$version/roles.json
$r->get('/datarole')->over( authenticated => 1 )->to( 'Role#read', namespace => $namespace );
$r->get('/datarole/orderby/:orderby')->over( authenticated => 1 )->to( 'Role#read', namespace => $namespace );
# deprecated - see: /api/$version/servers.json and /api/1.1/servers/hostname/:host_name/details.json
# WARNING: unauthenticated
#TODO JvD over auth after we have rascal pointed over!!
$r->get('/dataserver')->to( 'Server#index_response', namespace => $namespace );
$r->get('/dataserver/orderby/:orderby')->to( 'Server#index_response', namespace => $namespace );
$r->get('/dataserverdetail/select/:select')->over( authenticated => 1 )->to( 'Server#serverdetail', namespace => $namespace )
; # legacy route - rm me later
# deprecated - see: /api/$version//api/1.1/staticdnsentries.json
$r->get('/datastaticdnsentry')->over( authenticated => 1 )->to( 'StaticDnsEntry#read', namespace => $namespace );
# -- Type
# deprecated - see: /api/$version/types.json
$r->get('/datatype')->over( authenticated => 1 )->to( 'Types#readtype', namespace => $namespace );
$r->get('/datatypetrimmed')->over( authenticated => 1 )->to( 'Types#readtypetrimmed', namespace => $namespace );
$r->get('/datatype/orderby/:orderby')->over( authenticated => 1 )->to( 'Types#readtype', namespace => $namespace );
}
sub traffic_stats_routes {
my $self = shift;
my $r = shift;
my $version = shift;
my $namespace = "Extensions::TrafficStats::API";
$r->get( "/api/$version/cdns/usage/overview" => [ format => [qw(json)] ] )->to( 'CdnStats#get_usage_overview', namespace => $namespace );
$r->get( "/api/$version/deliveryservice_stats" => [ format => [qw(json)] ] )->over( authenticated => 1 )
->to( 'DeliveryServiceStats#index', namespace => $namespace );
$r->get( "/api/$version/cache_stats" => [ format => [qw(json)] ] )->over( authenticated => 1 )->to( 'CacheStats#index', namespace => $namespace );
$r->get( "internal/api/$version/current_bandwidth" => [ format => [qw(json)] ] )->to( 'CacheStats#current_bandwidth', namespace => $namespace );
$r->get( "internal/api/$version/current_connections" => [ format => [qw(json)] ] )->to( 'CacheStats#current_connections', namespace => $namespace );
$r->get( "internal/api/$version/current_capacity" => [ format => [qw(json)] ] )->to( 'CacheStats#current_capacity', namespace => $namespace );
}
sub catch_all {
my $self = shift;
my $r = shift;
my $namespace = shift;
# -- CATCH ALL
$r->get('/api/(*everything)')->to( 'Cdn#catch_all', namespace => $namespace );
$r->post('/api/(*everything)')->to( 'Cdn#catch_all', namespace => $namespace );
$r->put('/api/(*everything)')->to( 'Cdn#catch_all', namespace => $namespace );
$r->delete('/api/(*everything)')->to( 'Cdn#catch_all', namespace => $namespace );
$r->get(
'/(*everything)' => sub {
my $self = shift;
if ( defined( $self->current_user() ) ) {
$self->render( template => "not_found", status => 404 );
}
else {
$self->flash( login_msg => "Unauthorized . Please log in ." );
$self->render( controller => 'cdn', action => 'loginpage', layout => undef, status => 401 );
}
}
);
}
1;
| hbeatty/traffic_control | traffic_ops/app/lib/TrafficOpsRoutes.pm | Perl | apache-2.0 | 50,861 |
package Google::Ads::AdWords::v201402::BiddingStrategySource;
use strict;
use warnings;
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201402'};
# derivation by restriction
use base qw(
SOAP::WSDL::XSD::Typelib::Builtin::string);
1;
__END__
=pod
=head1 NAME
=head1 DESCRIPTION
Perl data type class for the XML Schema defined simpleType
BiddingStrategySource from the namespace https://adwords.google.com/api/adwords/cm/v201402.
Indicates where bidding strategy came from: campaign, adgroup or criterion.
This clase is derived from
SOAP::WSDL::XSD::Typelib::Builtin::string
. SOAP::WSDL's schema implementation does not validate data, so you can use it exactly
like it's base type.
# Description of restrictions not implemented yet.
=head1 METHODS
=head2 new
Constructor.
=head2 get_value / set_value
Getter and setter for the simpleType's value.
=head1 OVERLOADING
Depending on the simple type's base type, the following operations are overloaded
Stringification
Numerification
Boolification
Check L<SOAP::WSDL::XSD::Typelib::Builtin> for more information.
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201402/BiddingStrategySource.pm | Perl | apache-2.0 | 1,156 |
#
# Copyright 2015 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package centreon::plugins::mode;
use strict;
use warnings;
use centreon::plugins::perfdata;
sub new {
my ($class, %options) = @_;
my $self = {};
bless $self, $class;
$self->{perfdata} = centreon::plugins::perfdata->new(output => $options{output});
%{$self->{option_results}} = ();
$self->{output} = $options{output};
$self->{mode} = $options{mode};
$self->{version} = undef;
return $self;
}
sub init {
my ($self, %options) = @_;
# options{default} = { mode_xxx => { option_name => option_value }, }
%{$self->{option_results}} = %{$options{option_results}};
# Manage default value
return if (!defined($options{default}));
foreach (keys %{$options{default}}) {
if ($_ eq $self->{mode}) {
foreach my $value (keys %{$options{default}->{$_}}) {
if (!defined($self->{option_results}->{$value})) {
$self->{option_results}->{$value} = $options{default}->{$_}->{$value};
}
}
}
}
}
sub version {
my ($self, %options) = @_;
$self->{output}->add_option_msg(short_msg => "Mode Version: " . $self->{version});
}
sub disco_format {
my ($self, %options) = @_;
}
sub disco_show {
my ($self, %options) = @_;
}
1;
__END__
| s-duret/centreon-plugins | centreon/plugins/mode.pm | Perl | apache-2.0 | 2,055 |
package Google::Ads::AdWords::v201809::OfflineDataUploadService::ApiExceptionFault;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/rm/v201809' }
__PACKAGE__->__set_name('ApiExceptionFault');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::AdWords::v201809::ApiException
);
}
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::OfflineDataUploadService::ApiExceptionFault
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
ApiExceptionFault from the namespace https://adwords.google.com/api/adwords/rm/v201809.
A fault element of type ApiException.
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201809::OfflineDataUploadService::ApiExceptionFault->new($data);
Constructor. The following data structure may be passed to new():
$a_reference_to, # see Google::Ads::AdWords::v201809::ApiException
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/OfflineDataUploadService/ApiExceptionFault.pm | Perl | apache-2.0 | 1,109 |
package VMOMI::HostInternetScsiHbaStaticTarget;
use parent 'VMOMI::DynamicData';
use strict;
use warnings;
our @class_ancestors = (
'DynamicData',
);
our @class_members = (
['address', undef, 0, ],
['port', undef, 0, 1],
['iScsiName', undef, 0, ],
['discoveryMethod', undef, 0, 1],
['authenticationProperties', 'HostInternetScsiHbaAuthenticationProperties', 0, 1],
['digestProperties', 'HostInternetScsiHbaDigestProperties', 0, 1],
['supportedAdvancedOptions', 'OptionDef', 1, 1],
['advancedOptions', 'HostInternetScsiHbaParamValue', 1, 1],
['parent', undef, 0, 1],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/HostInternetScsiHbaStaticTarget.pm | Perl | apache-2.0 | 831 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::DensityPlot::BinValue
=head1 SYNOPSIS
=head1 DESCRIPTION
This object deals with the raw data to built the density plots
=head1 METHODS
=cut
package Bio::EnsEMBL::DensityPlot::BinValue;
use vars qw($AUTOLOAD @ISA);
use strict;
# Object preamble - inheriets from Bio::Root::Object
@ISA = qw(Exporter);
#@EXPORT_OK = qw();
# new() is inherited from Bio::Root::Object
# _initialize is where the heavy stuff will happen when new is called
sub new {
my ($class,@args) = @_;
my $self = {};
bless $self,$class;
return $self;
}
=head2 chromosomestart
Title : ChromosomeStart
Usage : $obj->ChromosomeStart($newval)
Function:
Returns : value of ChromosomeStart
Args : newvalue (optional)
=cut
sub chromosomestart{
my $obj = shift;
if( @_ ) {
my $value = shift;
$obj->{'chromosomestart'} = $value;
}
return $obj->{'chromosomestart'};
}
=head2 chromosomeend
Title : chromosomesnd
Usage : $obj->chromosomeend($newval)
Function:
Returns : value of chromosomeend
Args : newvalue (optional)
=cut
sub chromosomeend{
my $obj = shift;
if( @_ ) {
my $value = shift;
$obj->{'chromosomeend'} = $value;
}
return $obj->{'chromosomeend'};
}
=head2 value
Title : value
Usage : $obj->value($newval)
Function:
Returns : value of value
Args : newvalue (optional)
=cut
sub value{
my $obj = shift;
if( @_ ) {
my $value = shift;
$obj->{'value'} = $value;
}
return $obj->{'value'};
}
=head2 scaledvalue
Title : scaledvalue
Usage : $obj->scaledvalue($newval)
Function:
Returns : this object's scaled value
Args : newvalue (optional)
=cut
sub scaledvalue{
my $obj = shift;
if( @_ ) {
my $scaledvalue = shift;
$obj->{'scaledvalue'} = $scaledvalue;
}
return $obj->{'scaledvalue'};
}
=head2 url
Title : url
Usage : $obj->url($newval)
Function:
Returns : this object's url
Args : newvalue (optional)
=cut
sub url{
my $obj = shift;
if( @_ ) {
my $url = shift;
$obj->{'url'} = $url;
}
return $obj->{'url'};
}
1;
| james-monkeyshines/ensembl | modules/Bio/EnsEMBL/DensityPlot/BinValue.pm | Perl | apache-2.0 | 3,122 |
#!/usr/bin/perl -w
#
# A Drop-In-Replacement for SOAP::Lite, the requests repeated if something went wrong.
#
# $Id: Determined.pm 387 2011-07-26 12:36:11Z univie $
package Phaidra::API::SOAP::Determined;
use strict;
use warnings;
our $VERSION = '1.0';
use base 'SOAP::Lite';
use Log::Log4perl qw(get_logger);
use Data::Dumper;
sub call {
my ($self, @args) = @_;
my %retry_codes = (408 => 1, 500 => 1, 502 => 1, 503 => 1, 504 => 1);
my $log = get_logger();
my $resp;
foreach my $pause (1, 3, 15, 0) {
eval {
SOAP::Trace::debug("Requesting...");
$resp = $self->SUPER::call(@args);
###$log->debug("Soap object". Dumper $self);
};
if ($@) {
# Something happened at request. First three chars are return code
my $fehler = $@;
my $rc = substr($fehler, 0, 3);
if ($retry_codes{$rc}) {
# rc is one of the retry codes that are configured => sleep
if ($pause) {
SOAP::Trace::debug("Request failed: |$fehler|, sleeping $pause...");
sleep($pause);
# Let's try ist again -> start loop again
}
else {
# Das war's - hoffnungslos
SOAP::Trace::debug("Request failed: |$fehler|, giving up");
die($fehler);
}
}
else {
# Some kind of other error -> return immediately
SOAP::Trace::debug("Request failed: |$fehler|, RC-code is non-sleepable, giving up immediately");
die($fehler);
}
}
else {
# Request had no error -> everything OK :)
SOAP::Trace::debug("Request success!");
return $resp;
}
}
# The retries failed -> return error
SOAP::Trace::debug("Fell through, returning result");
return $resp;
}
sub new {
my $self = shift->SUPER::new(@_);
return $self;
}
1;
| phaidra/phaidra-api | lib/phaidra_binding/Phaidra/API/SOAP/Determined.pm | Perl | apache-2.0 | 1,815 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package database::mysql::mode::replicationmasterslave;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
$options{options}->add_options(arguments =>
{
"warning:s" => { name => 'warning', },
"critical:s" => { name => 'critical', },
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
if (ref($options{sql}) ne 'ARRAY') {
$self->{output}->add_option_msg(short_msg => "Need to use --multiple options.");
$self->{output}->option_exit();
}
if (scalar(@{$options{sql}}) < 2) {
$self->{output}->add_option_msg(short_msg => "Need to specify two MySQL Server.");
$self->{output}->option_exit();
}
my ($sql_one, $sql_two) = @{$options{sql}};
my ($slave_status, $slave_status_error) = (0, "");
my ($position_status, $position_status_error) = (0, "");
my ($connection_status_name_srv1, $connection_status_name_srv2) = ($sql_one->get_id(), $sql_two->get_id());
my ($master_save, $slave_save);
my ($exit1, $msg_error1) = $sql_one->connect(dontquit => 1);
my ($exit2, $msg_error2) = $sql_two->connect(dontquit => 1);
$self->{output}->output_add(severity => 'OK',
short_msg => "No problems. Replication is ok.");
if ($exit1 == -1) {
$self->{output}->output_add(severity => 'CRITICAL',
short_msg => "Connection Status '" . $sql_one->get_id() . "': " . $msg_error1);
} else {
$self->{output}->output_add(long_msg => "Connection Status '" . $sql_one->get_id() . "' [OK]");
}
if ($exit2 == -1) {
$self->{output}->output_add(severity => 'CRITICAL',
short_msg => "Connection Status '" . $sql_two->get_id() . "': " . $msg_error2);
} else {
$self->{output}->output_add(long_msg => "Connection Status '" . $sql_two->get_id() . "' [OK]");
}
#####
# Find SLAVE
#####
my ($total_srv1, $total_srv2);
my ($last_error1, $last_error2);
my ($io_thread_status_srv1, $sql_thread_status_srv1);
if ($exit1 != -1) {
$sql_one->query(query => q{
SHOW SLAVE STATUS
});
my $result = $sql_one->fetchrow_hashref();
my $slave_io_running = $result->{Slave_IO_Running};
my $slave_sql_running = $result->{Slave_SQL_Running};
$last_error1 = $result->{Last_Error};
if (defined($slave_io_running) && $slave_io_running =~ /^yes$/i) {
$io_thread_status_srv1 = 0;
} else {
$io_thread_status_srv1 = 1;
}
if (defined($slave_sql_running) && $slave_sql_running =~ /^yes$/i) {
$sql_thread_status_srv1 = 0;
} else {
$sql_thread_status_srv1 = 1;
}
} else {
$io_thread_status_srv1 = 100;
$sql_thread_status_srv1 = 100;
}
my ($io_thread_status_srv2, $sql_thread_status_srv2);
if ($exit2 != -1) {
$sql_two->query(query => q{
SHOW SLAVE STATUS
});
my $result = $sql_two->fetchrow_hashref();
my $slave_io_running = $result->{Slave_IO_Running};
my $slave_sql_running = $result->{Slave_SQL_Running};
$last_error2 = $result->{Last_Error};
if (defined($slave_io_running) && $slave_io_running =~ /^yes$/i) {
$io_thread_status_srv2 = 0;
} else {
$io_thread_status_srv2 = 1;
}
if (defined($slave_sql_running) && $slave_sql_running =~ /^yes$/i) {
$sql_thread_status_srv2 = 0;
} else {
$sql_thread_status_srv2 = 1;
}
} else {
$io_thread_status_srv2 = 100;
$sql_thread_status_srv2 = 100;
}
$total_srv1 = $io_thread_status_srv1 + $sql_thread_status_srv1;
$total_srv2 = $io_thread_status_srv2 + $sql_thread_status_srv2;
# Check If there is two slave
if ($total_srv1 < 2 && $total_srv2 < 2) {
$slave_status = 1;
$slave_status_error = "Two slave. Need to have only one.";
} else {
# Check if a thread is down
if ($total_srv1 == 1) {
$slave_status = -1;
$slave_status_error = "A Replication thread is down on '" . $sql_one->get_id() . "'.";
if ($sql_thread_status_srv1 != 0) {
if (defined($last_error1) && $last_error1 ne "") {
$slave_status = 1;
$slave_status_error .= " SQL Thread is stopped because of an error (error='" . $last_error1 . "').";
}
}
}
if ($total_srv2 == 1) {
$slave_status = -1;
$slave_status_error = "A Replication thread is down on '" . $sql_two->get_id() . "'.";
if ($sql_thread_status_srv2 != 0) {
if (defined($last_error2) && $last_error2 ne "") {
$slave_status = 1;
$slave_status_error .= " SQL Thread is stopped because of an error (error='" . $last_error2 . "').";
}
}
}
# Check if we need to SKIP
if ($io_thread_status_srv1 == 100) {
$slave_status = -1;
$slave_status_error .= " Skip check on '" . $sql_one->get_id() . "'.";
}
if ($io_thread_status_srv2 == 100) {
$slave_status = -1;
$slave_status_error .= " Skip check on '" . $sql_two->get_id() . "'.";
}
# Save Slave
if ($total_srv1 < 2) {
$slave_save = $sql_one;
$master_save = $sql_two;
}
if ($total_srv2 < 2) {
$slave_save = $sql_two;
$master_save = $sql_one;
}
if ($total_srv2 > 1 && $total_srv1 > 1) {
$slave_status = 1;
$slave_status_error .= " No slave (maybe because we cannot check a server).";
}
}
####
# Check Slave position
####
if (!defined($slave_save)) {
$position_status = -2;
$position_status_error = "Skip because we can't identify a unique slave.";
} else {
if ($master_save->get_id() eq $connection_status_name_srv1 && $exit1 == -1) {
$position_status = -1;
$position_status_error = "Can't get master position on '" . $master_save->get_id() . "'.";
} elsif ($master_save->get_id() eq $connection_status_name_srv2 && $exit2 == -1) {
$position_status = -1;
$position_status_error = "Can't get master position on '" . $master_save->get_id() . "'.";
} else {
# Get Master Position
$master_save->query(query => q{
SHOW MASTER STATUS
});
my $result = $master_save->fetchrow_hashref();
my $master_file = $result->{File};
my $master_position = $result->{Position};
$slave_save->query(query => q{
SHOW SLAVE STATUS
});
my $result2 = $slave_save->fetchrow_hashref();
my $slave_file = $result2->{Master_Log_File}; # 'Master_Log_File'
my $slave_position = $result2->{Read_Master_Log_Pos}; # 'Read_Master_Log_Pos'
my $num_sec_lates = $result2->{Seconds_Behind_Master};
my $exit_code_sec = $self->{perfdata}->threshold_check(value => $num_sec_lates, threshold => [ { label => 'critical', exit_litteral => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
if (!$self->{output}->is_status(value => $exit_code_sec, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit_code_sec,
short_msg => sprintf("Slave has %d seconds latency behind master", $num_sec_lates));
}
$self->{output}->perfdata_add(label => 'slave_latency', unit => 's',
value => $num_sec_lates,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical'),
min => 0);
my $slave_sql_thread_ko = 1;
my $slave_sql_thread_warning = 1;
my $slave_sql_thread_ok = 1;
$slave_save->query(query => q{
SHOW FULL PROCESSLIST
});
while ((my $row = $slave_save->fetchrow_hashref())) {
my $state = $row->{State};
$slave_sql_thread_ko = 0 if (defined($state) && $state =~ /^(Waiting to reconnect after a failed binlog dump request|Connecting to master|Reconnecting after a failed binlog dump request|Waiting to reconnect after a failed master event read|Waiting for the slave SQL thread to free enough relay log space)$/i);
$slave_sql_thread_warning = 0 if (defined($state) && $state =~ /^Waiting for the next event in relay log|Reading event from the relay log$/i);
$slave_sql_thread_ok = 0 if (defined($state) && $state =~ /^Has read all relay log; waiting for the slave I\/O thread to update it$/i);
}
if ($slave_sql_thread_ko == 0) {
$position_status = 1;
$position_status_error .= " Slave replication has connection issue with the master.";
} elsif (($master_file ne $slave_file || $master_position != $slave_position) && $slave_sql_thread_warning == 0) {
$position_status = -1;
$position_status_error .= " Slave replication is late but it's progressing.";
} elsif (($master_file ne $slave_file || $master_position != $slave_position) && $slave_sql_thread_ok == 0) {
$position_status = -1;
$position_status_error .= " Slave replication is late but it's progressing.";
} else {
$master_file =~ /(\d+)$/;
my $master_bin_num = $1;
$slave_file =~ /(\d+)$/;
my $slave_bin_num = $1;
my $diff_binlog = abs($master_bin_num - $slave_bin_num);
# surely of missconfiguration of the plugin
if ($diff_binlog > 1 && $num_sec_lates < 10) {
$position_status = -3;
$position_status_error .= " Surely a configuration problem of the plugin (not good master and slave server used)";
}
}
}
}
$self->replication_add($slave_status, "Slave Thread Status", $slave_status_error);
$self->replication_add($position_status, "Position Status", $position_status_error);
$self->{output}->display();
$self->{output}->exit();
}
sub replication_add {
my ($self, $lstate, $str_display, $lerr) = @_;
my $status;
my $status_msg;
if ($lstate == 0) {
$status = 'OK';
} elsif ($lstate == -1) {
$status = 'WARNING';
} elsif ($lstate == -2) {
$status = 'CRITICAL';
$status_msg = 'SKIP';
} elsif ($lstate == -3) {
$status = 'UNKNOWN';
} else {
$status = 'CRITICAL';
}
my $output;
if (defined($lerr) && $lerr ne "") {
$output = $str_display . " [" . (defined($status_msg) ? $status_msg : $status) . "] [" . $lerr . "]";
} else {
$output = $str_display . " [" . (defined($status_msg) ? $status_msg : $status) . "]";
}
if (!$self->{output}->is_status(value => $status, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $status,
short_msg => $output);
}
$self->{output}->output_add(long_msg => $output);
}
1;
__END__
=head1 MODE
Check MySQL replication master/slave (need to use --multiple).
=over 8
=item B<--warning>
Threshold warning in seconds (slave latency).
=item B<--critical>
Threshold critical in seconds (slave latency).
=back
=cut
| bcournaud/centreon-plugins | database/mysql/mode/replicationmasterslave.pm | Perl | apache-2.0 | 13,806 |
package Paws::EMR::Instance;
use Moose;
has EbsVolumes => (is => 'ro', isa => 'ArrayRef[Paws::EMR::EbsVolume]');
has Ec2InstanceId => (is => 'ro', isa => 'Str');
has Id => (is => 'ro', isa => 'Str');
has InstanceFleetId => (is => 'ro', isa => 'Str');
has InstanceGroupId => (is => 'ro', isa => 'Str');
has InstanceType => (is => 'ro', isa => 'Str');
has Market => (is => 'ro', isa => 'Str');
has PrivateDnsName => (is => 'ro', isa => 'Str');
has PrivateIpAddress => (is => 'ro', isa => 'Str');
has PublicDnsName => (is => 'ro', isa => 'Str');
has PublicIpAddress => (is => 'ro', isa => 'Str');
has Status => (is => 'ro', isa => 'Paws::EMR::InstanceStatus');
1;
### main pod documentation begin ###
=head1 NAME
Paws::EMR::Instance
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::EMR::Instance object:
$service_obj->Method(Att1 => { EbsVolumes => $value, ..., Status => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::EMR::Instance object:
$result = $service_obj->Method(...);
$result->Att1->EbsVolumes
=head1 DESCRIPTION
Represents an EC2 instance provisioned as part of cluster.
=head1 ATTRIBUTES
=head2 EbsVolumes => ArrayRef[L<Paws::EMR::EbsVolume>]
The list of EBS volumes that are attached to this instance.
=head2 Ec2InstanceId => Str
The unique identifier of the instance in Amazon EC2.
=head2 Id => Str
The unique identifier for the instance in Amazon EMR.
=head2 InstanceFleetId => Str
The unique identifier of the instance fleet to which an EC2 instance
belongs.
=head2 InstanceGroupId => Str
The identifier of the instance group to which this instance belongs.
=head2 InstanceType => Str
The EC2 instance type, for example C<m3.xlarge>.
=head2 Market => Str
The instance purchasing option. Valid values are C<ON_DEMAND> or
C<SPOT>.
=head2 PrivateDnsName => Str
The private DNS name of the instance.
=head2 PrivateIpAddress => Str
The private IP address of the instance.
=head2 PublicDnsName => Str
The public DNS name of the instance.
=head2 PublicIpAddress => Str
The public IP address of the instance.
=head2 Status => L<Paws::EMR::InstanceStatus>
The current status of the instance.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::EMR>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/EMR/Instance.pm | Perl | apache-2.0 | 2,844 |
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Draw::GlyphSet::chr_band;
### Draws chromosome band track on horizontal Location images
use strict;
use warnings;
no warnings 'uninitialized';
use base qw(EnsEMBL::Draw::GlyphSet);
sub label_overlay { return 1; }
sub default_colours { return $_[0]{'default_colours'} ||= [ 'gpos25', 'gpos75' ]; }
sub colour_key {
my ($self, $f) = @_;
my $key = $self->{'colour_key'}{$f} || $f->stain;
if (!$key) {
$self->{'colour_key'}{$f} = $key = shift @{$self->default_colours};
push @{$self->default_colours}, $key;
}
return $key;
}
sub _init {
my $self = shift;
return $self->render_text if $self->{'text_export'};
########## only draw contigs once - on one strand
my ($fontname, $fontsize) = $self->get_font_details('innertext');
my $bands = $self->features;
my $h = [ $self->get_text_width(0, 'X', '', font => $fontname, ptsize => $fontsize) ]->[3];
my $pix_per_bp = $self->scalex;
my @t_colour = qw(gpos25 gpos75);
my $length = $self->{'container'}->length;
foreach my $band (@$bands) {
my $label = $self->feature_label($band);
my $colour_key = $self->colour_key($band);
my $start = $band->start;
my $end = $band->end;
$start = 1 if $start < 1;
$end = $length if $end > $length;
$self->push($self->Rect({
x => $start - 1 ,
y => 0,
width => $end - $start + 1 ,
height => $h + 4,
colour => $self->my_colour($colour_key) || 'white',
absolutey => 1,
title => $label ? "Band: $label" : '',
href => $self->href($band),
bordercolour => 'black'
}));
if ($label) {
my @res = $self->get_text_width(($end - $start + 1) * $pix_per_bp, $label, '', font => $fontname, ptsize => $fontsize);
# only add the lable if the box is big enough to hold it
if ($res[0]) {
$self->push($self->Text({
x => ($end + $start - 1 - $res[2]/$pix_per_bp) / 2,
y => 1,
width => $res[2] / $pix_per_bp,
textwidth => $res[2],
font => $fontname,
height => $h,
ptsize => $fontsize,
colour => $self->my_colour($colour_key, 'label') || 'black',
text => $res[0],
absolutey => 1,
}));
}
}
}
$self->no_features unless scalar @$bands;
}
sub render_text {
my $self = shift;
my $export;
foreach (@{$self->features}) {
$export .= $self->_render_text($_, 'Chromosome band', {
headers => [ 'name' ],
values => [ $_->name ]
});
}
return $export;
}
sub features {
my $self = shift;
return [ sort { $a->start <=> $b->start } @{$self->{'container'}->get_all_KaryotypeBands || []} ];
}
sub href {
my ($self, $band) = @_;
my $slice = $band->project('toplevel')->[0]->to_Slice;
return $self->_url({ r => sprintf('%s:%s-%s', map $slice->$_, qw(seq_region_name start end)) });
}
sub feature_label {
my ($self, $f) = @_;
return $self->my_colour($self->colour_key($f), 'label') eq 'invisible' ? '' : $f->name;
}
1;
| Ensembl/ensembl-webcode | modules/EnsEMBL/Draw/GlyphSet/chr_band.pm | Perl | apache-2.0 | 3,913 |
#!/usr/bin/env perl
use strict;
use Bio::EnsEMBL::Registry;
use Data::Dumper;
use Carp;
use Bio::EnsEMBL::Utils::Logger;
=head1
time mysql $(r2-w details mysql) mnuhn_testdb5_mus_musculus_funcgen_91_38 -e "drop table peak_calling_statistic"
time mysql $(r2-w details mysql) mnuhn_testdb5_mus_musculus_funcgen_91_38 -e "
create table
peak_calling_statistic as
select
peak_calling_id,
sum(peak.seq_region_end - peak.seq_region_start + 1) as total_length,
count(peak.peak_id) as num_peaks,
avg(peak.seq_region_end - peak.seq_region_start + 1) as average_length
from
peak_calling
left join peak using (peak_calling_id)
group by
peak_calling_id
;
"
generate_peak_calling_report.pl \
--species mouse_with_regbuild \
--registry /homes/mnuhn/work_dir_ersa/lib/ensembl-funcgen/registry.pm \
--output_directory ./reports/
generate_peak_calling_report.pl \
--species homo_sapiens \
--registry /homes/mnuhn/work_dir_regbuild_testrun/lib/ensembl-funcgen/registry.with_previous_version.human_regbuild_testdb7.pm \
--output_directory /homes/mnuhn/public_html/regulatory_build_stats/rb_human_merged_old_and_new/homo_sapiens/
=cut
use strict;
use Getopt::Long;
my $species;
my $registry;
my $output_directory;
GetOptions (
'species=s' => \$species,
'registry=s' => \$registry,
'output_directory=s' => \$output_directory,
);
my $logger = Bio::EnsEMBL::Utils::Logger->new();
$logger->init_log;
$logger->info("registry = " . $registry . "\n");
$logger->info("species = " . $species . "\n");
$logger->info("output_directory = " . $output_directory . "\n");
use Bio::EnsEMBL::Registry;
Bio::EnsEMBL::Registry->load_all($registry);
my $mouse_funcgen_dba = Bio::EnsEMBL::Registry->get_DBAdaptor($species, 'funcgen');
my $peak_calling_statistic_adaptor = $mouse_funcgen_dba->get_PeakCallingStatisticAdaptor;
my $epigenome_adaptor = $mouse_funcgen_dba->get_EpigenomeAdaptor;
my $feature_type_adaptor = $mouse_funcgen_dba->get_FeatureTypeAdaptor;
my $peak_calling_adaptor = $mouse_funcgen_dba->get_PeakCallingAdaptor;
my $idr_adaptor = $mouse_funcgen_dba->get_IdrAdaptor;
my $file = __FILE__;
use File::Basename qw( dirname basename );
my $description_template = dirname($file) . '/../../templates/peak_calling/report.html';
if (! -e $description_template) {
die("Can't find $description_template");
}
use File::Path qw( make_path );
make_path( $output_directory );
use Template;
my $tt = Template->new( ABSOLUTE => 1, RELATIVE => 1);
my $output;
my $genome_container = Bio::EnsEMBL::Registry->get_adaptor( $species, 'core', 'GenomeContainer' );
my $genome_size_in_bp = $genome_container->get_ref_length;
my $graph_display_feature_types = [
map {
$feature_type_adaptor->fetch_by_name($_) || die ("Can't fetch $_");
}
(
"CTCF",
"DNase1",
"H3K4me1",
"H3K4me2",
"H3K4me3",
"H3K9ac",
"H3K9me3",
"H3K27ac",
"H3K27me3",
"H3K36me3",
)
];
my $graph_display_epigenomes = $epigenome_adaptor->fetch_all;
use Number::Format qw( format_number );
my $de = new Number::Format(
-thousands_sep => ',',
-decimal_point => '.',
);
my $output_file = "$output_directory/peak_calling_report.html";
my $dbc = $mouse_funcgen_dba->dbc;
my $experiment_adaptor = $mouse_funcgen_dba->get_ExperimentAdaptor;
my @signal_experiments = $experiment_adaptor->_fetch_all_signal_experiments;
my @control_experiments = $experiment_adaptor->_fetch_all_control_experiments;
$Template::Stash::PRIVATE = undef;
$tt->process(
$description_template,
{
signal_experiments => \@signal_experiments,
control_experiments => \@control_experiments,
# peak_calling_statistics => $peak_calling_statistics_sorted,
peak_calling_statistic_adaptor => $peak_calling_statistic_adaptor,
peak_calling_adaptor => $peak_calling_adaptor,
idr_adaptor => $idr_adaptor,
dbc => $dbc,
genome_size_in_bp => $genome_size_in_bp,
length_to_percent => sub {
my $length = shift;
return $length * 100 / $genome_size_in_bp;
},
round_percent => sub {
my $number = shift;
return sprintf("%.2f", $number);
},
boolean_to_yes_no => sub {
my $boolean = shift;
if ($boolean) {
return 'yes'
}
return 'no'
},
time => sub {
return "" . localtime
},
feature_types => $graph_display_feature_types,
epigenomes => $graph_display_epigenomes,
fetch_idr => sub {
my $experiment = shift;
return $experiment->_fetch_Idr
},
bytes_to_gb => sub {
my $size_in_bytes = shift;
return ( 0 + $size_in_bytes ) / (1024 * 1024 * 1024)
},
round_num => sub {
my $number = shift;
return sprintf("%.2f", $number);
},
format_number => sub {
my $number = shift;
if (! defined $number) {
return '-'
}
if ($number eq '') {
return '-'
}
return $de->format_number($number);
},
},
"$output_directory/peak_calling_report.html"
)
|| die $tt->error;
$logger->info("Report written to $output_file\n");
$logger->finish_log;
| Ensembl/ensembl-funcgen | scripts/sequencing/generate_peak_calling_report.pl | Perl | apache-2.0 | 5,593 |
package Google::Ads::AdWords::v201402::AdGroupFeedService::ApiExceptionFault;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201402' }
__PACKAGE__->__set_name('ApiExceptionFault');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::AdWords::v201402::ApiException
);
}
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201402::AdGroupFeedService::ApiExceptionFault
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
ApiExceptionFault from the namespace https://adwords.google.com/api/adwords/cm/v201402.
A fault element of type ApiException.
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201402::AdGroupFeedService::ApiExceptionFault->new($data);
Constructor. The following data structure may be passed to new():
$a_reference_to, # see Google::Ads::AdWords::v201402::ApiException
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| gitpan/GOOGLE-ADWORDS-PERL-CLIENT | lib/Google/Ads/AdWords/v201402/AdGroupFeedService/ApiExceptionFault.pm | Perl | apache-2.0 | 1,091 |
#!/usr/bin/perl -w
#
# POD documentation
#---------------------------------------------------------------------------
=pod BEGIN
=head1 NAME
seqret.pl - retrieve sequences with given intervals from a seq-db
=head1 SYNOPSIS
seqret.pl [-help] [-db fasta-db] [-out output-fasta]
[-bed BED-file] [-str interval-string]
Options:
-h (--help) brief help message
-d (--db) sequence database (fasta)
-o (--out) output file (can be 'stdout')
-b (--bed) BED file with genomic intervals
-s (--str) interval string (e.g.: chr5:21-50,chr8:40-60)
=cut
#### END of POD documentation.
#---------------------------------------------------------------------------
use strict;
use FindBin;
use lib "$FindBin::Bin";
use Getopt::Long;
use Pod::Usage;
use Bio::DB::Fasta;
use Bio::SeqIO;
my ($fd, $fo, $fb, $str) = ('') x 4;
my ($fho);
my $help_flag;
#------------------------------ MAIN -----------------------------------#
GetOptions(
"help|h" => \$help_flag,
"db|d=s" => \$fd,
"out|o=s" => \$fo,
"bed|b=s" => \$fb,
"str|s=s" => \$str,
) or pod2usage(2);
pod2usage(1) if $help_flag;
pod2usage(2) if !$fd;
pod2usage(2) if !$fb && !$str;
my $db = Bio::DB::Fasta->new($fd);
if ($fo eq "" || $fo eq "stdout" || $fo eq "-") {
$fho = \*STDOUT;
} else {
open ($fho, ">$fo") || die "Can't open file $fo for writing: $!\n";
}
my $seqHO = Bio::SeqIO->new(-fh=>$fho, -format=>'fasta');
my $cnt = 0;
if($fb && -s $fb) {
open(my $fhb, "<$fb") or die "cannot read $fb\n";
while(<$fhb>) {
chomp;
my ($seqid, $beg, $end) = split "\t";
my $id;
if(!defined($beg) || !defined($end)) {
$beg = 1;
$end = $db->length($seqid);
$id = $seqid;
defined $end || die "$id not in db\n";
} else {
$beg += 1; # 0-based coordinate
$id = join("-", $seqid, $beg, $end);
}
$beg <= $end || die "loc error in $fb\n$seqid:$beg-$end\n";
my $seq = $db->seq($seqid, $beg, $end);
defined $seq || die "$id not in db\n";
$seqHO->write_seq( Bio::Seq->new(-id=>$id, -seq=>$seq) );
$cnt ++;
}
close $fhb;
}
if($str) {
my @ps = split(",", $str);
for (@ps) {
my ($seqid, $beg, $end);
if(/^([\w\-]+)\:(\d+)\-(\d+)$/) {
($seqid, $beg, $end) = ($1, $2, $3);
} elsif(/^([\w\-]+)\:(\d+)$/) {
($seqid, $beg) = ($1, $2);
$end = $db->length($seqid);
} elsif(/^([\w\-]+)$/) {
$seqid = $1;
($beg, $end) = (1, $db->length($seqid));
} else {
die "unknown locstring: $str\n";
}
my $id = join("-", $seqid, $beg, $end);
my $seq = $db->seq($seqid, $beg, $end);
$seqHO->write_seq( Bio::Seq->new(-id=>$id, -seq=>$seq) );
$cnt ++;
}
}
$seqHO->close();
printf " %4d sequences extracted\n", $cnt;
exit 0;
| orionzhou/rgeneclust | seqret.pl | Perl | apache-2.0 | 2,799 |
package Google::Ads::AdWords::v201809::TrafficEstimatorError::Reason;
use strict;
use warnings;
sub get_xmlns { 'https://adwords.google.com/api/adwords/o/v201809'};
# derivation by restriction
use base qw(
SOAP::WSDL::XSD::Typelib::Builtin::string);
1;
__END__
=pod
=head1 NAME
=head1 DESCRIPTION
Perl data type class for the XML Schema defined simpleType
TrafficEstimatorError.Reason from the namespace https://adwords.google.com/api/adwords/o/v201809.
This clase is derived from
SOAP::WSDL::XSD::Typelib::Builtin::string
. SOAP::WSDL's schema implementation does not validate data, so you can use it exactly
like it's base type.
# Description of restrictions not implemented yet.
=head1 METHODS
=head2 new
Constructor.
=head2 get_value / set_value
Getter and setter for the simpleType's value.
=head1 OVERLOADING
Depending on the simple type's base type, the following operations are overloaded
Stringification
Numerification
Boolification
Check L<SOAP::WSDL::XSD::Typelib::Builtin> for more information.
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/TrafficEstimatorError/Reason.pm | Perl | apache-2.0 | 1,093 |
:- module(parser_util, [
at_least_one/4,
at_least_one/5,
fillout/4,
fillout/5,
create_node/3,
create_field/3,
create_field/4,
create_field/5,
create_directed_field/5,
correct_commenting/4,
create_parse_structure/1,
create_parse_structure/2,
create_parse_structure/3,
create_environment/4,
insert_comments_in_beginning/3,
get_environment_name/2,
get_environment_type/2,
get_row_number/2,
add_environment_whitespace/3,
get_indentation/2,
inc_indentation/2,
dec_indentation/2,
add_indentation/3,
reduce_indentation/3,
push_whitespace/3,
push_dictionaries/3,
get_parsed/2,
get_environment/2,
inside_proto/1,
get_dictionaries/2,
strip_from_list/2,
strip_from_term/2,
strip_clean/2,
strip_exposed/2,
strip_restricted/2,
strip_interface/2,
set_parsed/3,
set_environment/3,
%set_dictionaries/3,
insert_parsed/3,
reverse_parsed/2,
stop_parse/2,
look_first_parsed/2,
get_first_parsed/3,
remove_code/3,
look_ahead/3
],[dcg]).
:- include(library(assertions)).
:- include(library(basicmodes)).
:- include(library(types)).
:- include(library(iso)).
:- use_module(library(iso_byte_char)).
:- use_module(library(basicprops)).
:- use_module(library(lists)).
:- use_module(dictionary_tree).
%%%:- use_module(internal_types).
%%%:- use_module(i_o).
:- set_prolog_flag(multi_arity_warnings, off).
%%:- discontiguous([token_read/3]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
stop_parse(S,S).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
create_node(NodeTypeId,Parse_structure,Node) :-
get_parsed(Parse_structure,Guts),
Node =.. [NodeTypeId,Guts].
create_field(FieldNameId, Arguments, Field) :-
%Old_field =.. [_Old_name|Arguments],
Field =.. [FieldNameId|Arguments].
create_field(FieldNameId, [Arg], Field) :-
extract_arguments_from_list(Arg, Arguments),
Field =.. [FieldNameId,Arguments].
create_field(FieldAccess,FieldType,FieldId, Field) :-
Field =.. [FieldAccess,FieldType,FieldId].
create_field(FieldAccess,FieldType,FieldId,FieldValue, Field) :-
Field =.. [FieldAccess,FieldType,FieldId,FieldValue].
create_directed_field(FieldAccess,FieldType,FieldId0,FieldId1, Field) :-
Field =.. [FieldAccess,FieldType,FieldId0,'IS',FieldId1].
%%%%%%%%%%%%%%%%
extract_arguments_from_list(Arg, Arguments) :-
extract_arguments_from_list(Arg, [], Arguments).
extract_arguments_from_list([Old_field|Rest], In, Result) :-
Old_field =.. [_Old_name|Arguments],
append(In, Arguments, Out),
extract_arguments_from_list(Rest, Out, Result).
extract_arguments_from_list([], Arguments, Arguments).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
get_NodeName(Node,NodeName) :-
Node =.. [NodeName,_Guts].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
create_parse_structure(parse([],Env,Dictionaries)) :-
create_dictionaries(Dictionaries),
create_whitespace_environment(Env).
create_parse_structure(parse(_Parsed,Env,Dic),parse([],Env,Dic)).
create_parse_structure(Parsed,parse(Parsed,E,D)) :-
create_dictionaries(D),
create_whitespace_environment(E).
create_parse_structure(Parsed,parse(_Parsed,Env,Dic),parse(Parsed,Env,Dic)).
create_environment(parse(_,env(_,_,W),_),'PROTO',Name,
env('PROTO',Name,W)).
create_environment(parse(_,env(_,_,W),_),'EXTERNPROTO',Name,
env('EXTERNPROTO',Name,W)).
create_environment(parse(_,env(_,_,W),_),'DEF',Name,
env('DEF',Name,W)).
create_whitespace_environment(env(_,_,Ws)) :-
create_whitespace(Ws).
create_whitespace(ws(0,0)).
create_whitespace(White,Indent,ws(White,Indent)).
get_parsed(parse(P,_,_),P).
get_environment(parse(_,E,_),E).
get_dictionaries(parse(_,_,D),D) :-
!.
set_environment(E,parse(P,_,D),parse(P,E,D)).
get_environment_name(env(_Type,Name,_Ws),Name).
get_environment_type(env(Type,_Name,_Ws),Type).
get_environment_whitespace(env(_T,_N,W),W).
set_environment_whitespace(W,env(T,N,_W_old),env(T,N,W)).
set_environment_whitespace(W,parse(P,E,D),parse(P,E_new,D)) :-
set_whitespace(W,E,E_new).
set_whitespace(W,env(T,N,_),env(T,N,W)).
set_parsed(parse(_,E,D),P,parse(P,E,D)).
set_dictionaries(D,parse(P,E,_),parse(P,E,D)) :-
is_dictionaries(D).
push_dictionaries(Trash,In,Out) :-
get_dictionaries(Trash,D),
set_dictionaries(D,In,Out).
get_environment_whitespace_row(env(_T,_N,W),R) :-
get_whitespace_row(W,R).
get_row_number(parse(_,E,_),Row) :-
get_environment_whitespace_row(E,Row).
get_whitespace_row(ws(R,_Ind),R).
set_whitespace_row(R,ws(R,_Ind)).
set_whitespace_row(R,ws(_OldRow,Ind),ws(R,Ind)).
get_indentation(ws(_R,Ind),Ind).
get_indentation(Parse,Ind) :-
get_environment(Parse,Env),
get_environment_whitespace(Env,Ws),
get_indentation(Ws,Ind).
set_indentation(Ind,ws(_R,Ind)).
set_indentation(Ind,ws(R,_OldInd),ws(R,Ind)).
%%%%%%%%%%%%%%%%
increase_row(Env,Inc,Out) :-
get_environment_whitespace(Env,Ws),
get_whitespace_row(Ws,Row),
New is Row + Inc,
set_whitespace_row(New,Ws,White_new),
set_whitespace(White_new,Env,Out).
add_environment_whitespace(In,White,Out) :-
count_row(White,Rows),
add_environment_whitespace_row(In,Rows,Out).
add_environment_whitespace_row(In,Rows,Out) :-
get_environment(In,Env),
increase_row(Env,Rows,New_env),
set_environment(New_env,In,Out).
push_whitespace(Trash,Save,Out) :-
get_environment(Trash,Env),
get_environment_whitespace(Env,Ws),
get_whitespace_row(Ws,R0),
get_indentation(Ws,Indent),
add_indentation(Save,Indent,Save0),
add_environment_whitespace_row(Save0,R0,Out).
%%%%%%%%%%%%%%%%
add_environment_indentation(Env,Add,Out) :-
get_environment_whitespace(Env,Ws),
get_indentation(Ws,Indent),
New_indent is Indent + Add,
set_indentation(New_indent,Ws,New_ws),
set_environment_whitespace(New_ws,Env,Out).
reduce_environment_indentation(Env,Reduce,Out) :-
get_environment_whitespace(Env,Ws),
get_indentation(Ws,Indent),
( Indent >= Reduce
->New_indent is Indent - Reduce
; New_indent = 0
),
set_indentation(New_indent,Ws,New_ws),
set_environment_whitespace(New_ws,Env,Out).
%%%%%%%%%%%%%%%%
add_indentation(In,Add,Out) :-
get_environment(In,Env),
add_environment_indentation(Env,Add,Env_new),
set_environment(Env_new,In,Out).
reduce_indentation(In,Reduce,Out) :-
get_environment(In,Env),
reduce_environment_indentation(Env,Reduce,New_env),
set_environment(New_env,In,Out).
inc_indentation(In,Out) :-
add_indentation(In,1,Out).
dec_indentation(In,Out) :-
reduce_indentation(In,1,Out).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
inside_proto(In) :-
get_environment(In,E),
get_environment_type(E,Name),
( Name == 'PROTO'
; Name == 'EXTERNPROTO'
).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
correct_commenting(before,Comment,Term,Out) :-
( Comment == []
->Out = Term
; append(Comment,[Term],Out)
).
correct_commenting(after,Comment,Term,Out) :-
( Comment == []
->Out = Term
; append([Term],Comment,Out)
).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
strip_from_list([],[]).
strip_from_list([E|Rest],Output) :-
list(E),
strip_from_list(E,E0),
strip_from_list(Rest,R0),
append([E0],R0,Output).
strip_from_list([E|Rest], Output) :-
atomic(E),
name(E,List),
List = [35|_],
strip_from_list(Rest,Output).
strip_from_list([E|Rest], [E|Output]) :-
strip_from_list(Rest,Output).
strip_from_list(A,A).
strip_clean(Atom,Atom) :-
atomic(Atom).
strip_clean(List,Clean) :-
list(List),
strip_from_list(List,Out),
( list(Out)
-> Out = [More],
strip_clean(More,Clean)
; Clean = Out
).
strip_from_term(Term,Stripped) :-
compound(Term),
Term =.. [Head|List],
strip_from_list(List,Str),
Stripped =.. [Head|Str].
strip_interface(Interface,Stripped) :-
strip_from_list(Interface,Pure),
strip_interface0(Pure,Stripped).
strip_interface0([],[]).
strip_interface0([Node|Interface], [New|Pure]) :-
strip_exposed(Node,New),
strip_interface(Interface,Pure).
strip_interface0([Node|Interface], [New|Pure]) :-
strip_restricted(Node,New),
strip_interface(Interface,Pure).
strip_restricted(Field,New) :-
Field =.. [Acc,Type,Id],
strip_clean(Type,Type_new),
strip_clean(Id,Id_new),
New =.. [Acc,Type_new,Id_new].
strip_exposed(Field,New) :-
Field =.. [Acc,Type,Id,Value],
strip_clean(Type,Type_new),
strip_clean(Id,Id_new),
strip_from_list(Value,Value_new),
New =.. [Acc,Type_new,Id_new,Value_new].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
insert_parsed(Parsed,In,Out) :-
get_parsed(In,Old_parsed),
append(Parsed, Old_parsed, New_parsed),
set_parsed(In,New_parsed,Out).
insert_comments_in_beginning(Com,In,Out) :-
get_parsed(In,Old_parsed),
append(Com, Old_parsed, New_parsed),
set_parsed(In,New_parsed,Out).
reverse_parsed(Parsed,Reversed) :-
get_parsed(Parsed,P),
reverse(P,Rev),
set_parsed(Parsed,Rev,Reversed).
look_first_parsed(In,First) :-
get_parsed(In,[First|_More]).
get_first_parsed(In,Out,First) :-
get_parsed(In,[First|More]),
set_parsed(In,More,Out).
%If there is no more
get_first_parsed(In,In,[]) :-
get_parsed(In,[]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
look_ahead(Name, [Ahead|Rest], [Ahead|Rest]) :-
Ahead =.. [_Token,Name|_More].
%Otherwise there is no more input or parenthesis.
%look_ahead(Name, [Ahead|Rest], [Ahead|Rest]) :-
% Ahead =.. [Name|_More].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
remove_code(Stop_sign) -->
[Stop_sign].
remove_code(Stop_sign) -->
[Skipped],
{write(Skipped),nl},
remove_code(Stop_sign).
remove_code(_Stop_sign) -->
[].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
count_row(Whitespace,Rows) :-
count_row(Whitespace,0,Rows).
count_row([],R,R).
count_row([Num|Rest],In,Result) :-
( ( Num == 10;
Num == 13 )
-> Out is In + 1,
count_row(Rest,Out,Result)
;
count_row(Rest,In,Result)
).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%Can catch zero or more of each.
fillout(In,Out,Fill) -->
at_least_one(In,In0,C),
fillout(In0,Out,C1),
{append(C,C1,Fill)}.
fillout(In,In,[]) -->
[].
fillout(In,Out) -->
at_least_one(In,In0),
fillout(In0,Out).
fillout(In,In) -->
[].
%%%%%%%%%%%%%%%%
at_least_one(In,Out) -->
whitespace(In,In0),
comment_more(In0,Out).
at_least_one(In,Out) -->
comment(In,In0),
whitespace_more(In0,Out).
%%%%%%%%%%%%%%%%
at_least_one(In,Out,Com) -->
whitespace(In,Out),
comment_more(Com).
at_least_one(In,Out,Com) -->
comment(Com),
whitespace_more(In,Out).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
comment(In,Out) -->
[comment(Com)],
{insert_parsed([Com],In,In0)},
comment_more(In0,Out).
comment_more(In,Out) -->
[comment(Com)],
comment_more(More),
{insert_parsed([Com|More],In,Out)}.
comment_more(In,In) -->
[].
%%%%%%%%%%%%%%%%
comment(Com) -->
[comment(C)],
comment_more(More),
{append([C],More,Com)}.
comment_more([C|More]) -->
[comment(C)],
comment_more(More).
comment_more([]) -->
[].
%%%%%%%%%%%%%%%%
whitespace(In,Out) -->
[whitespace(W)],
whitespace_more(More),
{append(W,More,WhiteSpace),
add_environment_whitespace(In,WhiteSpace,Out)}.
whitespace_more(White) -->
[whitespace(W)],
whitespace_more(More),
{append(W,More,White)}.
whitespace_more([]) -->
[].
whitespace_more(In,Out) -->
[whitespace(W)],
whitespace_more(More),
{append(W,More,WhiteSpace),
add_environment_whitespace(In,WhiteSpace,Out)}.
whitespace_more(In,In) -->
[].
| leuschel/ecce | www/CiaoDE/ciao/library.development/vrml/parser_util.pl | Perl | apache-2.0 | 11,507 |
#
# Copyright (c) 2013, Mitchell Cooper
#
# Evented::Methods: the name says it all.
#
# Evented::Methods can be found in its latest version at
# https://github.com/cooper/evented-methods.
#
#
package Evented::Methods;
use warnings;
use strict;
use utf8;
use 5.010;
use Carp;
use Scalar::Util 'blessed';
our $VERSION = 0.3;
# Evented::Methods import subroutine.
sub import {
my ($class, @opts) = @_;
my $package = caller;
# store Evented::Properties options.
my $store = Evented::Object::_package_store($package)->{EventedMethods} ||= {};
$store->{desired} = \@opts;
# determine methods.
my (%methods, $last_thing);
foreach my $thing (@opts) {
if (ref $thing && ref $thing eq 'HASH') { $methods{$last_thing} = $thing }
else { $methods{$thing} = {} }
$last_thing = $thing;
}
# add each method.
add_method($package, $_, %{ $methods{$_} }) foreach keys %methods;
return 1;
}
# adds a method to a package.
sub add_method {
my ($package, $method, %opts) = @_;
# TODO: make sure $package is subclass of E::O.
# argument checking (priority 1000).
# add the main callback (priority 500).
$package->register_callback(
$method => sub {
# determine the main callback code.
my $code = $opts{code} ||= $package->can("e_$method");
if (!$code || !ref $code || ref $code ne 'CODE') {
carp "Evented method '$method' has no code.";
return;
}
$_[0]->{return} = $code->(@_);
},
name => 'method',
priority => 500
);
# export method caller.
# this overwrites the method code if necessary.
Evented::Object::export_code($package, $method, sub {
my $fire = shift->fire_event($method => @_);
return $fire->{return};
});
# store method info.
my $store = Evented::Object::_package_store($package)->{EventedMethods} ||= {};
$store->{methods}{$method} = \%opts;
return 1;
}
1; | cooper/evented-methods | lib/Evented/Methods.pm | Perl | bsd-2-clause | 2,097 |
package Parakeet::Xml;
use strict;
use warnings;
use utf8;
use Exporter 'import';
no warnings 'recursion';
use Parakeet::Xml::Entity;
use Data::Dumper;
use HTML::Entities;
use Misc::DmUtil::Data qw(notEmpty);
use Misc::DmUtil::File qw(readFileUtf8);
use Misc::DmUtil::Log qw(:level);
use Scalar::Util 'blessed';
use XML::LibXML;
our %EXPORT_TAGS = (all => [qw(appendChildren appendClonedChildren attributeRemove attributeSet attributesCopy childElementsDeep childrenByTagName cloneRemoveNamespaces element elementRename elementsByTagName entity escape entityToUnicode innerToString insertChildrenAfter insertChildrenBefore insertElementAfter insertElementBefore moveChildren
element isElementNode isTextNode nodePlace removeNodeKeepChildren replaceNode unicodeToEntity)]);
our $nsUrl = "http://www.duncanmartin.com/ns/parakeet.xml";
Exporter::export_ok_tags('all');
# Append children of one node onto an existing node
sub appendChildren
{
my $to = shift;
my $from = shift;
foreach my $i ($from->childNodes())
{
$i->unbindNode();
$to->appendChild($i);
}
return 1;
} # end of 'appendChildren()'
# Append cloned children of one node onto an existing node
sub appendClonedChildren
{
my $to = shift;
my $from = shift;
foreach my $i ($from->childNodes())
{
$to->appendChild($i->cloneNode(1));
}
return 1;
} # end of 'appendClonedChildren()'
# Remove an attribute
sub attributeRemove
{
my $node = shift;
my $name = shift;
if ($name =~ /^pk:(.*)/o)
{
$node->removeAttributeNS($nsUrl, $1);
}
else
{
$node->removeAttribute($name);
}
} # end of 'attributeRemove()'
# Set an attribute
sub attributeSet
{
my $node = shift;
my $name = shift;
my $value = shift;
my %opt = @_;
my $log = Misc::DmUtil::Log::find(%opt);
unless ($node)
{
$log->fatal("No node supplied");
}
unless (notEmpty($name))
{
$log->fatal("No name supplied");
}
unless (defined $value)
{
$log->fatal("No value supplied for name [$name]");
}
if ($name =~ /^pk:(.*)/o)
{
$log->debug("nsUrl=[$nsUrl], name=[$1], value=[$value]");
$node->setAttributeNS($nsUrl, $1, $value);
}
else
{
$node->setAttribute($name, $value);
}
} # end of 'attributeSet()'
# Copy attributes
sub attributesCopy
{
my $to = shift;
my $from = shift;
my %opt = @_;
foreach my $a ($from->attributes())
{
next if ref $a eq "XML::LibXML::Namespace";
my $uri = $a->namespaceURI();
$to->setAttributeNS($uri, $a->nodeName(), $a->getValue());
}
return 1;
} # end of 'attributesCopy()'
# Return all child elements
sub childElementsDeep
{
my $parent = shift;
my %arg = @_;
my $depth = $arg{depth} // 0;
++$depth;
if ((ref $parent // "") eq "XML::LibXML::Document")
{
my @res = childElementsDeep($parent->documentElement(), depth => $depth);
push @res, $parent;
return @res;
}
my @res;
foreach my $i ($parent->childNodes())
{
push @res, $i if $i->nodeType == XML_ELEMENT_NODE;
push @res, childElementsDeep($i, depth => $depth);
}
return @res;
} # end of 'childElementsDeep()'
# children by tag name
sub childrenByTagName
{
my $parent = shift;
my $name = shift;
if ((ref $parent // "") eq "XML::LibXML::Document")
{
return childrenByTagName($parent->documentElement(), $name);
}
if ($name =~ /^pk:(.*)/o)
{
return $parent->getChildrenByTagNameNS($nsUrl, $1);
}
return $parent->getChildrenByTagName($name);
} # end of 'childrenByTagName()'
# deep clone elements, removing namespaces
sub cloneRemoveNamespaces
{
my $root = shift;
return cloneRemoveNamespacesRecur($root);
} # end of 'cloneRemoveNamespaces()'
# deep clone elements, removing namespaces - recursive part
sub cloneRemoveNamespacesRecur
{
my $node = shift;
my $res;
# document node
if ($node->nodeType == XML_DOCUMENT_NODE)
{
$res = new XML::LibXML::Document();
$res->setDocumentElement(cloneRemoveNamespacesRecur($node->documentElement()));
return $res;
}
# element node
if ($node->nodeType == XML_ELEMENT_NODE)
{
$res = element($node->localname());
foreach my $i ($node->attributes())
{
next if $i->nodeName() =~ /^xmlns:/o;
$res->setAttribute($i->localname(), $i->nodeValue());
}
}
else
{
$res = $node->cloneNode(0);
}
foreach my $i ($node->childNodes())
{
my $j = cloneRemoveNamespacesRecur($i);
$res->appendChild($j) if $j;
}
return $res;
} # end of 'cloneRemoveNamespacesRecur()'
# create element
sub element
{
my $name = shift;
my %opt = @_;
my $node;
if ($name =~ /^pk:(.*)/o)
{
$node = XML::LibXML::Element->new($1);
$node->setNamespace($nsUrl, "pk", 1);
}
else
{
$node = XML::LibXML::Element->new($name);
}
nodePlace($node, %opt);
foreach my $i (qw(alt title))
{
if (defined $opt{$i})
{
$node->setAttribute($i, $opt{$i});
}
}
foreach my $i (qw(class href id role src style))
{
if (notEmpty($opt{$i}))
{
$node->setAttribute($i, $opt{$i});
}
}
foreach my $i (keys %opt)
{
if ($i =~ /^pk:/o && defined $opt{$i})
{
$node->setAttribute($i, $opt{$i});
}
}
if (defined $opt{text})
{
$node->appendText($opt{text});
}
return $node;
} # end of 'element()'
# Rename an element
sub elementRename
{
my $node = shift;
my $name = shift;
my $e = element($name, after => $node);
moveChildren($e, $node);
attributesCopy($e, $node);
$node->unbindNode();
return $e;
} # end of 'elementRename()'
# elements by tag name
sub elementsByTagName
{
my $parent = shift;
my $name = shift;
my %opt = @_;
unless (blessed($parent))
{
Misc::DmUtil::Log::find(%opt)->fatal("Non blessed argument passed as parent");
}
my @buf;
if ($name =~ /^pk:(.*)/o)
{
@buf = $parent->getElementsByTagNameNS($nsUrl, $1);
}
else
{
@buf = $parent->getElementsByTagName($name);
}
@buf = elementsFilter(\@buf, %opt);
return @buf;
} # end of 'elementsByTagName()'
# Filter elements by rules
sub elementsFilter
{
my $list = shift;
my %opt = @_;
my @buf = @{$list};
foreach my $att (qw(class id))
{
if ($opt{$att})
{
my @t;
foreach my $i (@buf)
{
my $j = $i->getAttribute($att);
next unless defined $j && $j eq $opt{$att};
push @t, $i;
}
@buf = @t;
}
}
return @buf;
} # end of 'elementsFilter()'
# Return entity
sub entity
{
my $name = shift;
unless ($Parakeet::Xml::Entity::entity{$name})
{
die "Unknown entity [$name]";
}
return $Parakeet::Xml::Entity::entity{$name};
} # end of 'entity()'
# turn entities into unicode
sub entityToUnicode
{
my $text = shift;
my %opt = @_;
return undef unless defined $text;
foreach my $i (keys %Parakeet::Xml::Entity::entity)
{
if ($opt{keepXmlEntities})
{
next if $i eq "amp";
next if $i eq "lt";
next if $i eq "gt";
}
$text =~ s/&$i;/$Parakeet::Xml::Entity::entity{$i}/g;
}
return $text;
} # end of 'entityToUnicode()'
# escape text to valid XML
sub escape
{
my $text = shift;
return "" unless defined $text;
$text =~ s/&/&/go;
$text =~ s/</</go;
$text =~ s/>/>/go;
$text =~ s/"/"/go;
$text =~ s/'/'/go;
return $text;
} # end of 'escape()'
# Test for child nodes
sub hasChildren
{
my $in = shift;
my @e = $in->childNodes();
return (scalar(@e) > 0 ? 1 : 0);
} # end of 'hasChildren()'
# convert inner element to string
# (i.e. omit root node)
sub innerToString
{
my $root = shift;
my %opt = @_;
my $xml = $opt{xml} // 0;
my $buf = "";
foreach my $i ($root->childNodes())
{
if ($xml == 0)
{
$buf .= unicodeToEntity($i->toString(1));
}
else
{
$buf .= $i->toString(1);
}
}
return $buf;
} # end of 'innerToString()'
# Append children of an element after another element
sub insertChildrenAfter
{
my $new = shift;
my $old = shift;
foreach my $i ($new->childNodes())
{
$old->parentNode()->insertAfter($i, $old);
}
} # end of 'insertChildrenAfter()'
# Append children of an element before another element
sub insertChildrenBefore
{
my $new = shift;
my $old = shift;
foreach my $i ($new->childNodes())
{
$old->parentNode()->insertBefore($i, $old);
}
} # end of 'insertChildrenBefore()'
# Append an element after another element
sub insertElementAfter
{
my $new = shift;
my $old = shift;
$old->parentNode()->insertAfter($new, $old);
} # end of 'insertElementAfter()'
# Append an element before another element
sub insertElementBefore
{
my $new = shift;
my $old = shift;
$old->parentNode()->insertBefore($new, $old);
} # end of 'insertElementBefore()'
# Return if element node
sub isElementNode
{
my $in = shift;
return 0 unless (blessed($in) && $in->isa("XML::LibXML::Node"));
return ($in->nodeType == XML_ELEMENT_NODE ? 1 : 0);
} # end of 'isElementNode)'
# Return if text node
sub isTextNode
{
my $in = shift;
return 0 unless (blessed($in) && $in->isa("XML::LibXML::Node"));
return ($in->nodeType == XML_TEXT_NODE ? 1 : 0);
} # end of 'isTextNode()'
# move children to new node
sub moveChildren
{
my $to = shift;
my $from = shift;
my %opt = @_;
my $log = Misc::DmUtil::Log::find(%opt);
my @c = $from->childNodes();
foreach my $i (@c)
{
$i->unbindNode();
$to->appendChild($i);
}
} # end of 'moveChildren()'
# Place node at position
sub nodePlace
{
my $node = shift;
my %opt = @_;
if ($opt{after})
{
unless (blessed($opt{after}))
{
Misc::DmUtil::Log::find()->fatal("Non-blessed reference supplied for after position");
}
insertElementAfter($node, $opt{after});
}
elsif ($opt{append})
{
unless (blessed($opt{append}))
{
Misc::DmUtil::Log::find()->fatal("Non-blessed reference supplied for append position");
}
$opt{append}->appendChild($node);
}
elsif ($opt{before})
{
unless (blessed($opt{before}))
{
Misc::DmUtil::Log::find()->fatal("Non-blessed reference supplied for before position");
}
insertElementBefore($node, $opt{before});
}
elsif ($opt{prepend})
{
unless (blessed($opt{prepend}))
{
Misc::DmUtil::Log::find()->fatal("Non-blessed reference supplied for prepend position");
}
my $f = $opt{prepend}->firstChild();
if ($f)
{
insertElementBefore($node, $f);
}
else
{
$opt{prepend}->appendChild($node);
}
}
} # end of 'nodePlace()'
# Parse
sub parse
{
my %opt = @_;
my $log = Misc::DmUtil::Log::find(%opt);
my $input;
if (defined $opt{string})
{
$input = $opt{string};
}
elsif (defined $opt{file})
{
$input = readFileUtf8($opt{file})
// return 0;
}
else
{
$log->error("No input");
return 0;
}
my $dom;
eval
{
my $parser = new XML::LibXML();
$dom = $parser->parse_string($input);
};
if ($@)
{
$log->error("Failed to parse with error [$@]");
$log->error($input);
return 0;
}
unless ($dom)
{
$log->notice($input);
$log->error("Failed to parse");
return 0;
}
return $dom;
} # end of 'parse();
# Parse XHTML
sub parseXhtml
{
my %opt = @_;
my $log = Misc::DmUtil::Log::find(%opt);
my $input;
if (notEmpty($opt{string}))
{
$input = $opt{string};
}
elsif (notEmpty($opt{file}))
{
$input = readFileUtf8($opt{file})
// return 0;
}
else
{
$log->error("No input");
return 0;
}
# get rid of doctype
$input =~ s/^.*<!DOCTYPE.*$//gmo;
# get rid of namespace
$input =~ s/<html xmlns.*?>/<html>/go;
# fix entities
$input = entityToUnicode($input, keepXmlEntities => 1);
return parse(string => $input);
} # end of 'parseXhtml();
# Remove the node but keep the children
sub removeNodeKeepChildren
{
my $node = shift;
foreach my $i ($node->childNodes())
{
$i->unbindNode();
$node->parentNode()->insertBefore($i, $node);
}
$node->unbindNode();
return 1;
} # end of 'removeNodeKeepChildren()'
# replace one node with another, handle root
sub replaceNode
{
my $new = shift;
my $old = shift;
my %opt = @_;
my $log = Misc::DmUtil::Log::find();
unless (ref $new)
{
$new = element($new, %opt);
}
return 1 if $new == $old;
# move children
if ($opt{keepChildren})
{
foreach my $i ($old->childNodes())
{
$i->unbindNode();
$new->appendChild($i);
}
}
# keep attributes
if ($opt{keepAttr})
{
foreach my $a (@{$opt{keepAttr}})
{
my $value = $old->getAttribute($a);
next unless defined $value;
$new->setAttribute($a, $value);
}
}
my $doc = $old->ownerDocument();
if ($doc->documentElement() == $old)
{
$log->debug("Setting root to [$new]");
$doc->setDocumentElement($new);
}
else
{
$old->replaceNode($new);
}
return $new;
} # end of 'replaceNode()'
# Create text node
sub text
{
my $text = shift;
my %opt = @_;
my $node = XML::LibXML::Text->new($text);
nodePlace($node, %opt);
return $node;
} # end of 'text()'
# Replace text below node with a named element
sub textToElement
{
my $node = shift;
my $pattern = shift;
my $name = shift;
my %opt = @_;
my $log = Misc::DmUtil::Log::find(%opt);
my $debug = $log->isLoggable(LOG_DEBUG);
my @res;
if (isElementNode($node))
{
foreach my $i ($node->childNodes())
{
push @res, textToElementRecur($i, $pattern, $name, %opt, depth => 0);
}
}
else
{
push @res, textToElementRecur($node, $pattern, $name, %opt, depth => 0);
}
if ($debug)
{
for (my $i = 0; $i < @res; ++$i)
{
$log->debug("Position [$i]: ".$res[$i]->toString(0));
}
}
if ($opt{trim})
{
$log->debug("Want trim") if $debug;
while (@res && isElementNode($res[0]) && $res[0]->nodeName() eq $name)
{
$log->debug("Removing leading") if $debug;
shift @res;
}
while (@res && isElementNode($res[-1]) && $res[-1]->nodeName() eq $name)
{
$log->debug("Removing trailing") if $debug;
pop @res;
}
}
return @res;
} # end of 'textToElement()'
# Recursive part of text to element
sub textToElementRecur
{
my $node = shift;
my $pattern = shift;
my $name = shift;
my %opt = @_;
my $depth = $opt{depth};
my $log = Misc::DmUtil::Log::find(%opt);
my $debug = $log->isLoggable(LOG_DEBUG);
my @res;
$log->debug("[$depth]: On node [".$node->toString(0)."]") if $debug;
if (isElementNode($node))
{
$log->debug("[$depth]: Is element") if $debug;
my $n = $node->cloneNode(0);
push @res, $n;
foreach my $i ($node->childNodes())
{
foreach my $j (textToElementRecur($i, $pattern, $name, %opt, depth => $depth + 1))
{
$n->appendChild($j);
}
}
}
elsif (isTextNode($node))
{
$log->debug("[$depth]: Is text") if $debug;
my $t = $node->nodeValue() // "";
$log->debug("[$depth]: Text:\n$t") if $debug;
while ($t =~ /^(.*?)$pattern(.*)$/s)
{
$log->debug("[$depth]: Found pattern") if $debug;
if (notEmpty($1))
{
$log->debug("[$depth]: Found [$1] before pattern") if $debug;
push @res, text($1);
}
$log->debug("[$depth]: Adding element") if $debug;
push @res, element($name);
$t = $2;
$log->debug("[$depth]: Remaining text:\n$t") if $debug;
}
$log->debug("[$depth]: Pattern not found") if $debug;
if (notEmpty($t))
{
$log->debug("[$depth]: Adding remaining:\n$t") if $debug;
push @res, text($t);
}
}
else
{
push @res, $node->cloneNode(1);
}
return @res;
} # end of 'textToElementRecur()'
# Convert to HTML
sub toHtml
{
my $in = shift;
my %opt = @_;
my $log = Misc::DmUtil::Log::find(%opt);
my $debug = $log->isLoggable(LOG_DEBUG);
unless (ref $in)
{
$log->fatal("Invalid input");
}
my $res = "";
local $XML::LibXML::setTagCompression = 1;
my $outer;
if ($in->nodeType == XML_ELEMENT_NODE)
{
$outer = $in;
}
else
{
($outer) = elementsByTagName($in, "pk:doc");
}
my $copy = $outer->cloneNode(1);
$log->debug("copy:".$copy->toString(0)) if $debug;
foreach my $j (childElementsDeep($copy))
{
if ($j->nodeName() =~ /:/o)
{
removeNodeKeepChildren($j);
}
}
foreach my $j ($copy->childNodes())
{
$res .= $j->toString(0);
}
my $r = unicodeToEntity($res);
# remove unnecessary closing tags
$r =~ s/<\/br>//go;
$r =~ s/<\/img>//go;
$log->debug("r=[$r]") if $debug;
return $r;
} # end of 'toHtml()'
# Convert to HTML document
sub toHtmlDoc
{
my $in = shift;
my $res = "";
$res .= "<!DOCTYPE html>\n";
$res .= toHtml($in) // return undef;
return $res;
} # end of 'toHtmlDoc()'
# encode unicode characters as entities
sub unicodeToEntity
{
my $text = shift;
foreach my $i (keys %Parakeet::Xml::Entity::entityReverse)
{
$text =~ s/$i/&$Parakeet::Xml::Entity::entityReverse{$i};/g;
}
return $text;
} # end of 'unicodeToEntity()'
1;
| duncanmartin/parakeet | lib/Parakeet/Xml.pm | Perl | bsd-2-clause | 17,655 |
manual_head_no_ell(7266,501,5).
manual_head_no_ell(7266,508,5).
manual_head_no_ell(7266,511,5).
manual_head_no_ell(15188,510,17).
manual_head(409,509,6).
manual_head(445,504,28).
manual_head(17658,506,6).
% manual heads for problematic cases
manual_head(3625,505,10).
manual_head(4018,500,6).
manual_head(4583,507,12).
manual_head(5148,507,13).
manual_head(5387,512,4).
manual_head(5680,519,13).
manual_head(6060,518,21).
manual_head(6152,512,25).
manual_head(6152,517,25).
manual_head(7077,503,5).
manual_head(7080,513,30).
manual_head(7080,515,30).
manual_head(7097,506,6).
manual_head(7097,507,13).
manual_head(7097,516,6).
manual_head(7256,517,21).
manual_head(7256,521,21).
manual_head(7256,524,32).
% manual_head(7266,501,5).
% manual_head(7266,508,5).
% manual_head(7266,511,5).
manual_head(7267,513,27).
manual_head(7282,525,8).
manual_head(7287,514,42).
%manual_head(7290,509,12). % seems to interfere with empty heads % 9/11/2012
manual_head(7430,507,19).
%manual_head(7542,502,7). % seems to interfere with empty heads % 9/11/2012
manual_head(8747,503,27).
manual_head(8880,511,15).
manual_head(9821,504,25).
manual_head(9826,505,17).
manual_head(10084,504,34). % sehr schöne PP-RNR
manual_head(11430,509,14).
manual_head(12160,514,23).
manual_head(12319,508,6).
manual_head(12572,502,11).
manual_head(14048,515,27).
%manual_head(14429,521,29).
%manual_head(14429,522,39).
%manual_head(14429,527,49).
manual_head(14496,508,22).
manual_head(14816,502,9).
manual_head(15188,509,17). % VP-RNR
%manual_head(15188,510,17).
%manual_head(15188,511,18).
manual_head(15605,513,12).
manual_head(15637,510,17).
manual_head(16373,502,6). % wrong PP-obj-head
manual_head(16377,502,21).
manual_head(16380,500,4).
manual_head(16527,505,2). % Das Glas in der Hand
manual_head(16585,514,17). % seltsame PP-Coord
manual_head(16727,510,8). % seltsame PP-Coord
%manual_head(16979,522,1). % corrected by python
%manual_head(16979,509,1). % would cause a difference to version 1.0
manual_head(16979,509,2).
manual_head(17148,509,3).
manual_head(17706,510,27).
manual_head(18575,511,28).
manual_head(19293,503,19).
manual_head(21908,509,12).
manual_head(21923,505,14).
manual_head(22061,510,22).
manual_head(22150,513,19).
manual_head(22278,513,14).
manual_head(22278,505,33).
manual_head(22536,505,11).
manual_head(23334,508,26). % losing NG label here
manual_head(23365,510,5).
manual_head(23557,508,3).
manual_head(23799,507,6).
manual_head(23799,508,13).
manual_head(24099,501,6).
manual_head(24168,507,26).
manual_head(24470,507,24).
manual_head(24990,508,13).
manual_head(25360,503,23).
manual_head(26820,513,33).
manual_head(27159,501,11).
manual_head(27159,503,8).
manual_head(27232,506,12).
manual_head(27474,515,22).
manual_head(27474,507,33).
manual_head(27474,512,33).
manual_head(27474,516,33).
manual_head(29183,511,56).
manual_head(29620,529,25).
manual_head(29620,509,62).
manual_head(29621,518,41).
manual_head(30025,510,63).
manual_head(30086,505,11).
manual_head(30892,504,18).
manual_head(31531,506,12).
manual_head(31531,504,17).
manual_head(31692,502,11).
manual_head(31725,508,24).
manual_head(32020,504,4).
manual_head(32096,503,5).
manual_head(32167,501,5).
manual_head(32397,501,11).
manual_head(32614,511,20).
manual_head(32673,518,40).
manual_head(32685,508,15).
manual_head(32839,512,21).
manual_head(32975,504,5).
manual_head(33068,507,49).
manual_head(34604,508,17).
manual_head(34909,500,3).
manual_head(35243,501,13).
manual_head(35273,517,52). % die alten Kader im Blick
manual_head(36395,503,20).
manual_head(37143,504,7).
manual_head(38269,502,12).
manual_head(38397,506,19).
manual_head(38664,504,6).
manual_head(39166,507,4).
manual_head(40008,503,6).
manual_head(40023,508,18).
manual_head(40719,503,10).
manual_head(40863,509,21).
manual_head(41806,506,6). % AP-RNR
manual_head(41806,507,10).
manual_head(41806,508,14).
manual_head(41816,503,21).
manual_head(41861,504,17).
manual_head(42368,508,15). % NP-RNR
manual_head(42194,500,6).
manual_head(42398,502,11).
manual_head(42611,502,9).
manual_head(42833,502,12).
manual_head(42987,510,16).
manual_head(43442,509,28).
manual_head(43476,509,14).
manual_head(44362,508,22).
manual_head(44389,507,3).
manual_head(46117,508,7).
manual_head(46117,511,7).
manual_head(46119,505,18).
manual_head(46158,514,33).
manual_head(46222,503,17).
manual_head(46240,515,13).
manual_head(46251,507,1).
manual_head(46252,507,1).
manual_head(46327,511,27).
manual_head(46567,511,31).
manual_head(47702,506,11).
manual_head(47705,502,3).
manual_head(47705,504,3).
manual_head(47982,502,22).
manual_head(48416,503,24).
manual_head(48490,516,32).
manual_head(49071,507,18).
manual_head(49071,510,18).
manual_head(49115,512,27). % Kein Wort über Bären in der Zeitung,
manual_head(49126,503,15).
manual_head(49397,508,6).
manual_head(50227,509,14). % NP-RNR
% manual heads for problematic cases when the prep is dep of the nn
% almost all are RNRs with PPs involved
%manual_head(3625,505,11). % corrected by python
manual_head(3625,505,10).
%manual_head(8880,511,19). % corrected by python
manual_head(8880,511,15).
%manual_head(21908,509,13). % corrected by python
manual_head(21908,509,12).
%manual_head(22278,513,15). % corrected by python
manual_head(22278,513,14).
%manual_head(23799,507,7). % corrected by python
manual_head(23799,507,6).
%manual_head(23799,508,14). % corrected by python
manual_head(23799,508,13).
%manual_head(24990,508,14). % corrected by python
manual_head(24990,508,13).
%manual_head(27474,515,23). % corrected by python
manual_head(27474,515,22).
%manual_head(32975,504,7). % corrected by python
manual_head(32975,504,5).
%manual_head(38397,506,21). % corrected by python
manual_head(38397,506,19).
%manual_head(38664,504,9). % corrected by python
manual_head(38664,504,6).
%manual_head(40023,508,19). % corrected by python
manual_head(40023,508,18).
%manual_head(41806,506,7). % corrected by python
manual_head(41806,506,6).
%manual_head(41806,507,11). % corrected by python
manual_head(41806,507,10).
%manual_head(41806,508,16). % corrected by python
manual_head(41806,508,14).
%manual_head(42368,508,18). % corrected by python
manual_head(42368,508,15).
%manual_head(42611,502,10). % corrected by python
manual_head(42611,502,9).
%manual_head(42987,510,18). % corrected by python
manual_head(42987,510,16).
%manual_head(43476,509,16). % corrected by python
manual_head(43476,509,14).
%manual_head(44362,508,23). % corrected by python
manual_head(44362,508,22).
% IG Metall, Bau, Farben etc --> IG is head
manual_head(809,500,4).
manual_head(1038,500,4).
manual_head(1827,501,8).
manual_head(1852,501,5).
manual_head(4020,500,1).
manual_head(4036,504,22).
manual_head(4038,500,13).
manual_head(4039,501,9).
manual_head(4043,500,2).
manual_head(5133,500,5).
manual_head(5135,500,3).
manual_head(5179,500,8).
manual_head(5195,500,4).
manual_head(5203,501,8).
manual_head(5228,500,2).
manual_head(5326,502,16).
manual_head(5585,501,13).
manual_head(8184,505,21).
manual_head(8187,501,10).
manual_head(8191,500,2).
manual_head(8197,502,10).
manual_head(8198,500,2).
manual_head(8198,505,14).
manual_head(8199,508,23).
manual_head(8571,500,4).
manual_head(10098,500,4).
manual_head(10100,500,2).
manual_head(10102,501,5).
manual_head(10106,501,9).
manual_head(10128,500,3).
manual_head(10130,500,2).
manual_head(10131,501,8).
manual_head(10132,501,14).
manual_head(10135,500,2).
manual_head(10136,500,2).
manual_head(10138,500,2).
manual_head(10141,501,16).
manual_head(10146,500,4).
manual_head(10149,503,5).
manual_head(10149,502,15).
manual_head(10152,500,2).
manual_head(10224,500,1).
manual_head(10229,503,14).
manual_head(10231,501,8).
manual_head(10236,500,7).
manual_head(10236,501,20).
manual_head(10243,502,10).
manual_head(10418,502,17).
manual_head(10421,501,7).
manual_head(10423,501,13).
manual_head(13706,502,20).
manual_head(15015,505,27).
manual_head(15949,501,8).
manual_head(18197,502,8).
manual_head(19641,503,18).
manual_head(19649,505,33).
manual_head(20567,500,2).
manual_head(21154,500,1).
manual_head(21977,500,3).
manual_head(21978,501,10).
manual_head(21980,503,16).
manual_head(22233,500,5).
manual_head(22517,501,11).
manual_head(23596,500,2).
manual_head(23868,500,1).
manual_head(23870,500,2).
manual_head(23872,501,8).
manual_head(24266,500,4).
manual_head(24266,503,19).
manual_head(24771,500,7).
manual_head(27164,502,12).
manual_head(28580,500,1).
manual_head(28584,500,3).
manual_head(28591,500,2).
manual_head(28596,503,19).
manual_head(28599,502,12).
manual_head(28600,505,20).
manual_head(28601,500,6).
manual_head(28610,502,13).
manual_head(28616,501,5).
manual_head(29912,500,7).
manual_head(30385,500,4).
manual_head(34542,500,2).
manual_head(35052,505,30).
manual_head(35052,508,42).
manual_head(35052,510,50).
manual_head(35057,505,25).
manual_head(35057,507,37).
manual_head(35058,501,8).
manual_head(35058,502,12).
manual_head(35066,501,8).
manual_head(35071,500,5).
manual_head(35087,500,2).
manual_head(35091,501,8).
manual_head(35111,500,3).
manual_head(35116,500,4).
manual_head(35118,501,5).
manual_head(35123,500,3).
manual_head(35124,502,8).
manual_head(35133,500,3).
manual_head(35142,501,9).
manual_head(35142,502,13).
manual_head(35142,504,21).
manual_head(35145,500,2).
manual_head(35146,500,3).
manual_head(35148,500,2).
manual_head(35151,500,2).
manual_head(35153,501,9).
manual_head(35158,500,1).
manual_head(37900,500,1).
manual_head(37903,500,2).
manual_head(38246,501,8).
manual_head(38383,502,9).
manual_head(38396,501,15).
manual_head(38399,501,20).
manual_head(38763,501,6).
manual_head(38764,502,16).
manual_head(38765,505,27).
manual_head(38770,500,1).
manual_head(43069,500,1).
manual_head(43073,502,17).
manual_head(43093,500,6).
manual_head(44536,503,23).
manual_head(46954,504,6).
manual_head(49082,500,5).
manual_head(49086,500,2).
manual_head(49350,502,2).
manual_head(49591,506,4).
manual_head(49591,501,13).
manual_head(49596,505,2).
manual_head(50178,501,14).
% agreement problems
manual_head(398,503,2).
manual_head(529,500,3). % tippfehler
manual_head(2880,502,13). % should: 11->12->13
manual_head(3327,501,13). % tippfehler
manual_head(3725,504,8).
manual_head(4023,500,2).
manual_head(5725,504,12). % ??
manual_head(5794,501,6).
manual_head(7010,504,28).
manual_head(7036,503,14).
manual_head(7778,506,28).
manual_head(7778,508,36).
manual_head(9410,502,20).
manual_head(9410,505,35). % should: 36,38 -> 37 -> 35
manual_head(10038,501,15).
manual_head(12149,501,10). % 8 should be attached to 9
manual_head(12164,501,13).
manual_head(17083,501,9).
manual_head(19557,504,22).
manual_head(22675,502,10).
manual_head(24334,502,15).
manual_head(24336,500,3).
manual_head(24347,500,7).
manual_head(24348,501,9).
manual_head(24351,500,6).
manual_head(24490,506,43).
manual_head(24924,503,18).
manual_head(27662,508,13).
% Multi-word GmbH names
manual_head(12348,500,6).
manual_head(17212,502,15).
manual_head(17216,503,27).
manual_head(17218,504,34).
manual_head(26943,502,8).
manual_head(36833,503,16).
manual_head(37957,505,9).
manual_head(38029,501,9).
manual_head(38030,502,13).
manual_head(38809,501,6).
manual_head(39499,501,11).
manual_head(40825,502,18).
manual_head(42397,500,5).
manual_head(46104,502,13).
manual_head(48719,504,23). | rsling/cow | src/malt/malteval/tiger2dep/corrections/manual_heads_tiger.pl | Perl | bsd-2-clause | 11,221 |
package TCDB::Domain::Characterize;
use strict;
no warnings;
use Data::Dumper;
use TCDB::Assorted;
use List::Util qw(sum);
use Ref::Util qw(is_hashref);
use Class::Struct;
#==========================================================================
# This module takes the proteins in a family containing PFAM domains,
# extracts the sequences in that domain and ssearch36 them
# against the proteins that did not match that domain.
#
# The purpose is to verify whether proteins without domain matches
# actually have the missing domains but the HMM failed to identify it.
#
# Some remote homologs may need two iterations in order to rescue their
# domains
#
#--------------------------------------------------------------------------
#
# Written by: Arturo Medrano
# Date: 8/17/2017
#
#==========================================================================
struct ("TCDB::Domain::Characterize" =>
{
'domFreq' => '%', #array with the frequency of domain hits
'domCoords' => '%', #Hash with the coordinates of the domains
'tcids' => '%', #Hash wit the TCIDs and accessions
'searchWith' => '$', #Program to use in the searches (default ssearch36)
'evalue' => '$', #Minimum evalue for domain-proteins alignments
'domCovCutoff' => '$', #minimum domain coverage for rescued domains
'treatAsSuperfamily' => '$', #tread a least of families as a superfamily (0|1)
'refDomains' => '$', #query all input families for these domains instead of querying for a families domains.
#Variables with paths
'blastdb' => '$', #Full path to the blast DB that will be used to extract sequences
"tcdbFaa" => '$', #Full path to the fasta files with all sequences in TCDB
'rootDir' => '$', #The root directory where results will be saved
'protSeqs1it' => '$', #Fasta file with all sequences of the reference TCDB family
'domSeqs1it' => '$', #File with the sequences of the PFAM domains found in the family (direct hits)
'aln1stCycle' => '$', #File with the domain_vs_proteins comaprisons of the 1st rescue cycle
'idsFile2it' => '$', #File with the Ids of the proteins with missing domains after 1st rescue cycle
'protSeqs2it' => '$', #File with the sequences of the ids in variable $self->idsFile2it
'domSeqs2it' => '$', #File with the sequences of the PFAM domains found in the family (direct hits)
'aln2ndCycle' => '$' #File with the domain_vs_proteins comaprisons of the 2nd rescue cycle
});
#==========================================================================
#Default values
#The frequences of the PFAM domains within a family of proteins
sub domFreq {
my ($self, $value) = @_;
my $objPath = 'TCDB::Domain::Characterize::domFreq';
my $default = {};
if ( $value ) {
unless (is_hashref $value) {
die "domFreq: value should be a reference to a non-empty hash of domain coordinates -> ";
}
$self->{$objPath} = $value;
}
unless ($self->{$objPath}) {
$self->{$objPath} = $default;
}
return $self->{$objPath};
}
#The coordinates of the PFAM hits in terms of the domain and the protein sequence
sub domCoords {
my ($self, $value) = @_;
my $objPath = 'TCDB::Domain::Characterize::domCoords';
my $default = {};
if ( $value ) {
unless (is_hashref $value) {
die "domCoords: value should be a reference to a non-empty hash of domain coordinates -> ";
}
$self->{$objPath} = $value;
}
unless ($self->{$objPath}) {
$self->{$objPath} = $default;
}
return $self->{$objPath};
}
#The different TCIDs and Accession for all proteins in a family;
#The format of the hash should be identical as the output generated with the function:
# TCDB::Assorted::getSystemAccessions();
sub tcids {
my ($self, $value) = @_;
my $objPath = 'TCDB::Domain::Characterize::tcids';
my $default = {};
if ( $value ) {
unless (is_hashref $value) {
die "tcids: value should be a reference to a non-empty hash of domain coordinates -> ";
}
$self->{$objPath} = $value;
}
unless ($self->{$objPath}) {
$self->{$objPath} = $default;
}
return $self->{$objPath};
}
#Program to use in the sequence alignments when rescueing domains (default ssearch36)
sub searchWith {
my ($self, $value) = @_;
my $objPath = 'TCDB::Domain::Characterize::searchWith';
my $default = "ssearch36";
if ( $value ) {
unless($value =~ /(blastp|ssearch36)/) {
die "searchWith: this value can only be 'ssearch36' or 'blastp' -> ";
}
$self->{$objPath} = $value;
}
unless ($self->{$objPath}) {
$self->{$objPath} = $default;
}
return $self->{$objPath};
}
#Sequences of the refernece TCDB family
sub tcFamSeqs {
my ($self, $value) = @_;
my $objPath = 'TCDB::Domain::Characterize::tcFamSeqs';
my $default = "";
if ( $value ) {
unless(-f $value) {
die "tcFamSeqs: file with family sequences not found or empty -> ";
}
$self->{$objPath} = $value;
}
unless ($self->{$objPath}) {
$self->{$objPath} = $default;
}
return $self->{$objPath};
}
#Full path to the blast DB that will be used to extract the sequence fragments
#corresponding to PFAM domain matches.
sub blastdb {
my ($self, $value) = @_;
my $objPath = 'TCDB::Domain::Characterize::blastdb';
my $default = "";
if ( $value ) {
unless(-f "${value}.phr" ) {
die "BlastDB not detected -> ${value}.phr";
}
$self->{$objPath} = $value;
}
unless ($self->{$objPath}) {
$self->{$objPath} = $default;
}
return $self->{$objPath};
}
#This is the evalue that will be used to compare the sequence of domains against
#the full sequence of proteins in a given family
sub evalue {
my ($self, $value) = @_;
my $objPath = 'TCDB::Domain::Characterize::evalue';
my $default = 10;
if ( $value ) {
unless($value > 0) {
die "Evalue should be >0.0 -> ";
}
$self->{$objPath} = $value;
}
unless ($self->{$objPath}) {
$self->{$objPath} = $default;
}
return $self->{$objPath};
}
#This is the main directory where results will be placed
sub rootDir {
my ($self, $value) = @_;
my $objPath = 'TCDB::Domain::Characterize::rootDir';
my $default = ".";
if ( $value ) {
system "mkdir -p $value" unless (-d $value);
$self->{$objPath} = $value;
}
unless ($self->{$objPath}) {
$self->{$objPath} = $default;
}
return $self->{$objPath};
}
###########################################################################
## Function definitions
###########################################################################
sub rescueDomains {
my ($self, $tcids) = @_;
if ($self->treatAsSuperfamily) {
$self->rescueDomForFamily($tcids->[0], $tcids);
}
else {
my @fams = keys %{ $self->domFreq };
foreach my $fam (@fams) {
$self->rescueDomForFamily($fam, undef);
}
}
}
sub rescueDomForFamily {
my ($self, $fam, $fams) = @_;
#-----------------------------------------------------------------
#Relevant variables for this function
#Create sequence directories
my $seqDir = $self->rootDir . "/$fam/sequences";
system "mkdir -p $seqDir" unless (-d $seqDir);
#-----------------------------------------------------------------
#Get sequences of the reference family
#If sequences were given as a parameter, copy them to the proper folder
my $famSeqsFile = "$seqDir/family-${fam}.faa";
# if (-f $self->protSeqs1it && !(-z $self->protSeqs1it)) {
# my $cmd = "cp " . $self->protSeqs1it . " $seqDir";
# system $cmd;
# }
#Extract the sequences of the full proteins in the family if they
#were not provided by the user
print "Rescue cycle 1 ($fam): Retrieving all family sequences\n";
my $tcdbFaaParam = (-f $self->tcdbFaa)? "-d " . $self->tcdbFaa : "";
if ($self->treatAsSuperfamily) {
my @files = ();
foreach my $f (@$fams) {
my $file = "$seqDir/family-${f}.faa";
system "extractFamily.pl -i $f -o $seqDir -f fasta $tcdbFaaParam" unless (-f $file);
push(@files, $file);
}
my $str = join (" ", @files);
system "cat $str > $seqDir/tmp.faa; mv $seqDir/tmp.faa $famSeqsFile";
die "Coud not generate sequences for superfamily: " unless (-f $famSeqsFile && !(-z $famSeqsFile))
}
else {
system "extractFamily.pl -i $fam -o $seqDir -f fasta $tcdbFaaParam" unless (-f $self->protSeqs1it);
}
#instatiate variables with sequence data
$self->protSeqs1it($famSeqsFile) unless ($self->protSeqs1it);
$self->domSeqs1it("$seqDir/domainFragments1it.faa");
$self->idsFile2it("$seqDir/pids4SecondRescueCycle.faa");
$self->protSeqs2it("$seqDir/seqs4SecondRescueCycle.faa");
$self->domSeqs2it("$seqDir/domainFragments2it.faa");
#-----------------------------------------------------------------
#Create plots directory
my $plotsDir = $self->rootDir . "/$fam/plots";
system "mkdir -p $plotsDir" unless (-d $plotsDir);
#-----------------------------------------------------------------
#Create reports directory
my $reportsDir = $self->rootDir . "/$fam/reports";
system "mkdir -p $reportsDir" unless (-d $reportsDir);
#-----------------------------------------------------------------
#ReportFiles
if ($self->searchWith eq 'ssearch36') {
$self->aln1stCycle ("$reportsDir/alnDomProt_ssearch36_1stCycle.tsv");
$self->aln2ndCycle ("$reportsDir/alnDomProt_ssearch36_2ndCycle.tsv");
}
elsif ($self->searchWith eq 'blastp') {
$self->aln1stCycle ("$reportsDir/alnDomProt_blastp_1stCycle.tsv");
$self->aln2ndCycle ("$reportsDir/alnDomProt_blastp_2ndCycle.tsv");
}
#-----------------------------------------------------------------
#First write the report with the relevant direct domain hits
print "Rescue cycle 1 ($fam): Extracting direct PFAM hits in the family.\n";
$self->getRelevantDomains($fam);
#-----------------------------------------------------------------
#Now extract the sequence fragments corresponding to the PFAM
#domains that are regarded as relevent for the definition of a family
print "Rescue cycle 1 ($fam): Extracting sequences of PFAM domain hits in family.\n";
my @domains = @{ $self->domFreq->{$fam} }[1 .. $#{ $self->domFreq->{$fam} }];
# print Data::Dumper->Dump([$self->domFreq->{$fam} ], [qw( *domFreq)]);
# exit;
#Stop if no relevant domains were found
unless (@domains) {
my $errFile = $self->rootDir . "/$fam/NO_RESULTS.txt";
my $msg = "Not enough proteins had characteristic domains.\n" .
"* Check parameter minProtsDom of object TCDB::Domain::PfamParser\n";
open (my $errh, ">", $errFile) || die $!;
print $errh $msg;
close $errh;
return;
}
#Extract domain seqs
unless (-f $self->domSeqs1it) {
$self->extractDomainFragments1it($fam, \@domains);
}
#File with domain sequences must exist and not be empty
unless (-f $self->domSeqs1it && !(-z $self->domSeqs1it)) {
die "Missing or empty fiqle with matching PFAM domain sequences: ", $self->domSeqs1it, " -> ";
}
#-----------------------------------------------------------------
#First rescue cycle: compare domain sequences with all the protein
#sequences in the family
print "Rescue cycle 1 ($fam): Comparing domain sequences with all proteins in family\n";
unless (-f $self->aln1stCycle && !(-z $self->aln1stCycle)) {
$self->alignDomainsVsProteins(1);
}
#-----------------------------------------------------------------
#First rescue cycle: Parse alignment output and determine
#how many proteins have been rescued and how many are still
#without domain hits
print "Rescue cycle 1 ($fam): Extracting domain hits per protein in family\n";
my %protsWithDomains1 = ();
$self->parseDomainAlignments(\%protsWithDomains1, 1);
# print "Rescued Proteins: ", scalar keys %protsWithDomains1, "\n";
# print Data::Dumper->Dump([\%protsWithDomains1 ], [qw(*protsWithDomains1 )]);
# exit;
#-----------------------------------------------------------------
#Determine how many proteins have no domains detected after first
#rescue iteraction in order to see if it is necessary running a
#second domain rescue cycle.
#First get all the IDs for the proteins in the analyzed family
my @fullIDs = ();
foreach my $arr (@{ $self->tcids->{$fam} }) {
my $tc = $arr->[0];
foreach my $acc (@{ $arr->[1] }) {
push (@fullIDs, "${tc}-$acc");
}
}
#Get the proteins still missing at least one domain
my %missingHits = ();
foreach my $tcid (@fullIDs) {
my $missCnt = 0;
foreach my $domArr (@{ $self->domFreq->{$fam} }[1 .. $#{ $self->domFreq->{$fam} }]) {
unless (exists $protsWithDomains1{$tcid} && exists $protsWithDomains1{$tcid}{$domArr->[0]}) {
$missCnt++;
$missingHits{$tcid} = $missCnt;
}
}
}
# print Data::Dumper->Dump([\%missingHits ], [qw(*missingHits )]);
# exit;
#-----------------------------------------------------------------
#If there are missing domains, run a second doman rescue iteration
my %protsWithDomains2 = ();
if (%missingHits) {
#get the sequences of the proteins with with at least one domain missing.
print "\nRescue cycle 2 ($fam): Extracting sequences of proteins without domain hits.\n";
$self->getProtSeqs4SecondRescueCycle(\%missingHits);
#extract sequences of resqued domains
print "Rescue cycle 2 ($fam): Extracting sequences rescued domains in cycle 1.\n";
unless (-f $self->domSeqs2it) {
$self->extractDomainFragments2it(\%protsWithDomains1);
}
#aligned rescued domains versus sequences without domain matches
print "Rescue cycle 2 ($fam): Aligning rescued domain sequences to proteins without pfam hits.\n";
$self->alignDomainsVsProteins(2);
#Extract the coordinates of the resqued domains after the 2nd rescue cycle
print "Rescue cycle 2 ($fam): Extracting coordenates of rescued domains.\n";
$self->parseDomainAlignments(\%protsWithDomains2, 2);
}
#-----------------------------------------------------------------
#Generate report of the recovered domains.
my $reportName = "$reportsDir/${fam}_rescuedDomains.tsv";
$self->writeReport($fam, $reportName, \@fullIDs, \%protsWithDomains1, \%protsWithDomains2);
print "Reports generated for $fam!\n";
}
sub writeReport {
my ($self, $fam, $outfile, $famIDs, $rescuedProts1, $rescuedProts2) = @_;
my $totalIDs = scalar @$famIDs;
# print Data::Dumper->Dump([$totalIDs,$famIDs ], [qw(*famSize, *refIDs )]);
# exit;
# print Data::Dumper->Dump([ $rescuedProts1 ], [qw( *rescuedProts1)]);
# exit;
#-----------------------------------------------------------------
#Get the number of direct hits per domain (e.g. Directly from PFAM)
#and the rescue statistics.
# print Data::Dumper->Dump([ $rescuedProts1 ], [qw( *rescuedProts1)]);
# exit;
my %stats = ();
foreach my $domArr (@{ $self->domFreq->{$fam} }[1 .. $#{ $self->domFreq->{$fam} }]) {
$stats{ $domArr->[0] }{directHits} = $domArr->[1];
my %domCnt1 = (); #Count first rescue cycle hits
my %domCnt2 = (); #Count second rescue cycle hits
foreach my $prot (@{ $famIDs }) {
if (exists $rescuedProts1->{$prot} && exists $rescuedProts1->{$prot}->{$domArr->[0]}) {
$domCnt1{$prot} = 1;
}
elsif (exists $rescuedProts2->{$prot} && exists $rescuedProts2->{$prot}->{$domArr->[0]}) {
$domCnt2{$prot} = 1;
}
}
$stats{ $domArr->[0] }{cycle1rescue} = scalar keys %domCnt1;
$stats{ $domArr->[0] }{cycle2rescue} = scalar keys %domCnt2;
}
# print Data::Dumper->Dump([\%stats ], [qw(*stats )]);
# exit;
#-----------------------------------------------------------------
#Write report to file
open (my $outh, ">", $outfile) || die $!;
#Write global stats as comments
foreach my $dom (keys %stats) {
my $resc = $stats{$dom}{cycle1rescue} + $stats{$dom}{cycle2rescue} - $stats{$dom}{directHits};
my $total = sprintf("%.1f", ($stats{$dom}{cycle1rescue} + $stats{$dom}{cycle2rescue}) / $totalIDs * 100);
print $outh "# $dom: DirectHits: $stats{$dom}{directHits} Rescued Proteins: $resc ",
"Prots with Domain in $fam: ", $stats{$dom}{cycle1rescue} + $stats{$dom}{cycle2rescue},
" (${total}% from a total of $totalIDs)\n";
}
my $cnt = 1;
foreach my $prot (@$famIDs) {
my $domStats = [];
foreach my $domArr (@{ $self->domFreq->{$fam} }[1 .. $#{ $self->domFreq->{$fam} }]) {
my $dom = $domArr->[0];
my $domPos = [];
#Get the domain coords for this protein if rescued in the first cycle
if (exists $rescuedProts1->{$prot} && exists $rescuedProts1->{$prot}->{$dom}) {
my @matches = sort {$a->{left}<=>$b->{right}} @{ $rescuedProts1->{$prot}->{$dom} };
foreach my $pos (@matches) {
push (@{ $domPos }, $pos->{left} . '-' . $pos->{right});
}
}
#Get the domain coords for this protein if rescued in the second cycle
elsif (exists $rescuedProts2->{$prot} && exists $rescuedProts2->{$prot}->{$dom}) {
my @matches = sort {$a->{left}<=>$b->{right}} @{ $rescuedProts2->{$prot}->{$dom} };
foreach my $pos (@matches) {
push (@{ $domPos }, $pos->{left} . '-' . $pos->{right});
}
}
#format the rescued domain for printing
if (@{ $domPos }) {
if (exists $self->domCoords->{$fam} &&
exists $self->domCoords->{$fam}->{$prot} &&
exists $self->domCoords->{$fam}->{$prot}->{$dom}) {
push (@{ $domStats }, "${dom}|" . join("|", @{ $domPos }) . "|DirectHit");
}
elsif (exists $rescuedProts1->{$prot} && exists $rescuedProts1->{$prot}->{$dom}) {
push (@{ $domStats }, "${dom}|" . join("|", @{ $domPos }) . "|Rescued1");
}
elsif (exists $rescuedProts2->{$prot} && exists $rescuedProts2->{$prot}->{$dom}) {
push (@{ $domStats }, "${dom}|" . join("|", @{ $domPos }) . "|Rescued2");
}
}
else {
push (@{ $domStats }, "${dom}|Nohit");
}
}
print $outh "$cnt\t$prot\t", join ("\t", @{ $domStats}), "\n";
$cnt++;
}
close $outh;
}
#==========================================================================
#Extract the sequences of the proteins that did not have either direct
#hit or rescued PFAM domains
sub getProtSeqs4SecondRescueCycle {
my ($self, $hr_prots) = @_;
my $blastDB = $self->blastdb;
my $IDsFile = $self->idsFile2it;
my $outfile = $self->protSeqs2it;
#Save IDs in a file
open (my $outh1, ">", $IDsFile) || die $!;
print $outh1 join("\n", keys %{ $hr_prots }), "\n";
close $outh1;
#Extract the sequences here
system qq(blastdbcmd -db $blastDB -dbtype prot -entry_batch $IDsFile -out $outfile);
#Verify that results exist;
my $eMsg = "Protein sequences file for second domain-rescue cycle not found file not found or empty:\n$outfile";
die $eMsg unless (-f $outfile && !(-z $outfile));
}
#==========================================================================
#List the set of domains identified by PFAM including their postitions
#in the targer sequences.
sub getRelevantDomains {
my ($self, $fam) = @_;
#print Data::Dumper->Dump([ $self->domFreq->{$fam} ], [qw( *domFreq)]);
#exit;
my @domains = @{ $self->domFreq->{$fam} }[1 .. $#{ $self->domFreq->{$fam} }];
#print Data::Dumper->Dump([$self->domCoords], ["*domains"]);
#exit;
my $outfile = $self->rootDir . "/$fam/reports/${fam}_relevantDomains.tsv";
open (my $reph, ">", $outfile) || die $!;
DOM:foreach my $domain (@domains) {
my $dom = $domain->[0]; #The PFAM accession
print $reph "===========================================================================\n";
TCID:foreach my $tcid (sort_by_system keys %{ $self->domCoords->{$fam} }) {
next TCID unless (exists $self->domCoords->{$fam}->{$tcid} && exists $self->domCoords->{$fam}->{$tcid}->{$dom});
my ($tc, $acc) = split(/-/, $tcid);
print $reph "$tcid";
HIT:foreach my $hit (@{ $self->domCoords->{$fam}->{$tcid}->{$dom} }) {
print $reph "\t${dom}:",$hit->{dlen}, ":",$hit->{dstart}, ":", $hit->{dend}, "|",
"${acc}:", $hit->{plen}, ":", $hit->{pstart}, ":", $hit->{pend};
}
print $reph "\n";
}
print $reph "\n";
}
}
sub parseDomainAlignments {
my ($self, $prots, $cycle) = @_;
#-----------------------------------------------------------------
#Parse sequence alignments
my $infile = "";
if ($cycle == 1) { $infile = $self->aln1stCycle; }
elsif ($cycle == 2) { $infile = $self->aln2ndCycle; }
else { die "Unknown cycle: $cycle"; }
my ($parsed, $qtcid, $qlen) = ({}, undef, undef);
open (my $alnh, "<", $infile) || die $!;
while (<$alnh>) {
chomp;
s/\s+$//;
next if (!$_);
#Get the length of the query for SSEARCH36 output (verify it if will work with BLASTP)
if (/^#/) {
if (/^#\s+Query:\s+(\S+)\s+-\s+(\d+)\s+aa/) {
$qtcid = $1;
$qlen = $2;
}
next;
}
#Get line info
my ($cachito, $protID, $id, $alnLen, $kk1, $kk2, $qstart, $qend, $sstart, $send, $evalue, $bitscore) = split(/\s+/);
# print "$_\n";
# print Data::Dumper->Dump([$qtcid, $qlen, $cachito, $protID, $id, $alnLen, $qstart, $qend, $sstart, $send, $evalue, $bitscore ],
# [qw(*qtcid *qlen *cachito *protID *id *alnLen *qstart *qend *sstart *send *evalue *bitscore)]);
# <STDIN>;
next unless ($qtcid eq $cachito);
my $domCoverage = $alnLen / $qlen;
next if ($domCoverage < $self->domCovCutoff);
#Get protein_ID with domain chachito and the cachito
my ($protDomID, $domID) = split(/\|/, $cachito);
my $res = [$protDomID, $sstart, $send, $domID, $qstart, $qend];
push (@{ $parsed->{$protID} }, $res);
# print Data::Dumper->Dump([$res ], [qw(*res)]);
# <STDIN>;
}
close $alnh;
if ($cycle == 1) {
# print "\nCycle:$cycle\n$infile\nProteins with blastp|ssearch36 Domain Hits: ", scalar keys %{ $parsed }, "\n";
# print Data::Dumper->Dump([$parsed ], [qw(*parsed )]);
# exit;
}
#-----------------------------------------------------------------
#Get the cachitos and assemble the domain hits.
foreach my $protID (sort {$a cmp $b} keys %{ $parsed }) {
my $fragments = $parsed->{$protID};
# print "Processing: $protID\n";
my $noOvp_match = 0;
foreach my $hit (sort coords_by_left_pos @{ $fragments }) {
my ($dom, $origCoords) = split(/:/, $hit->[3]);
# print "\n\n===========================================================================\n";
# print "$protID -> $dom\n", Data::Dumper->Dump([$prots ], [qw(*prots )]), "\n\n\n";
# <STDIN>;
if (exists $prots->{$protID} && exists $prots->{$protID}->{$dom}) {
foreach my $coordSet (@{ $prots->{$protID}->{$dom} }) {
# print "-----------------------------------------------------------------\n";
# print Data::Dumper->Dump([$hit, $prots->{$protID}->{$dom} ], [qw(*hit *beforeCoords )]);
#Overlapping match
if (($hit->[1] >= $coordSet->{left} && $hit->[1] <= $coordSet->{right}) &&
($hit->[2] > $coordSet->{right})) {
#Expand the overlap of the protein
$coordSet->{right} = $hit->[2];
}
#Non overlapping match
elsif ($hit->[1] > $coordSet->{right}) {
#print Data::Dumper->Dump([ $noOvp_match], [qw( *noOvp_match)]);
if ( $noOvp_match ) {
$self->removeRedundantMatches($hit, $prots->{$protID}->{$dom});
}
else {
push (@{ $prots->{$protID}->{$dom} }, { left => $hit->[1],
right => $hit->[2] });
$noOvp_match = 1;
}
}
# print Data::Dumper->Dump([$prots->{$protID}->{$dom} ], [qw( *afterCoords )]);
# print "-----------------------------------------------------------------\n\n\n";
# <STDIN>;
}
}
#First time domain is read
else {
push (@{ $prots->{$protID}->{$dom} }, { left => $hit->[1],
right => $hit->[2] });
}
}
}
# if ($cycle == 1) {
# print Data::Dumper->Dump([$prots ], [qw(*cachitosArmados )]);
# exit;
# }
}
#fix overlapsoverlaps
sub removeRedundantMatches {
my ($self, $hit, $matches) = @_;
# print "*** New non-overlapping match! ***\n";
# print Data::Dumper->Dump([$hit, $matches ], [qw(*hit *domainsBefore )]), "\n\n";
my $newMatch = 0;
foreach my $seg (@{ $matches }) {
#Non overlapping match
if ($hit->[1] > $seg->{right}) {
$newMatch = 1;
}
#Overlapping to the right match
if (($hit->[1] >= $seg->{left} && $hit->[1] <= $seg->{right}) &&
($hit->[2] > $seg->{right})) {
#Expand the overlap of the protein
$seg->{right} = $hit->[2];
$newMatch = 0;
}
#hit is contained within a previous match
elsif (($hit->[1] >= $seg->{left} && $hit->[1] <= $seg->{right}) &&
($hit->[2] <= $seg->{right})) {
$newMatch = 0;
}
elsif (($hit->[1] < $seg->{left} && $hit->[2] >= $seg->{left}) &&
($hit->[2] <= $seg->{right})) {
die "Unexpected overlap to the left!... debug!\n", print Data::Dumper->Dump([$hit, $matches ], [qw(*hit *domains )]);;
}
}
if ($newMatch) {
push (@{ $matches }, { left => $hit->[1], right => $hit->[2] });
}
# print Data::Dumper->Dump([$matches ], [qw(*domainsAfter )]);
# print "*****************************************************************\n\n\n";
}
sub coords_by_left_pos {
if ($a->[1] == $b->[1]) {
$a->[2] <=> $b->[2];
}
else {
$a->[1] <=> $b->[1];
}
}
#==========================================================================
#Run blastp or ssearch36 of the sequence fragments that aligned with
#known domains against the full sequences of all proteins in the
#family
sub alignDomainsVsProteins {
my ($self, $cycle) = @_;
my $eval = $self->evalue;
my $domSeqs = "";
my $famSeqs = "";
my $outfile = "";
if ($cycle == 1) {
$famSeqs = $self->protSeqs1it;
$domSeqs = $self->domSeqs1it;
$outfile = $self->aln1stCycle;
}
elsif ($cycle == 2) {
$famSeqs = $self->protSeqs2it;
$domSeqs = $self->domSeqs2it;
$outfile = $self->aln2ndCycle;
}
else {
die "Unknown domain rescue cycle: $cycle";
}
# print "Align Cycle: $cycle\n $domSeqs\n $famSeqs\n $outfile\n";
unless (-f $outfile && !(-z $outfile)) {
if ($self->searchWith eq 'ssearch36') {
#Defaults: -s BL50 -z 2 (but l1-l6 also works) ... sensitive, but error prone
my $args = qq(-z 11 -k 1000 -E $eval -m 8C -s BL50 $domSeqs $famSeqs > $outfile);
system "ssearch36 $args";
}
elsif ($self->searchWith eq 'blastp') {
my $args = qq(-evalue $eval -use_sw_tback -max_hsps 4 -comp_based_stats 2 -outfmt 7 -query $domSeqs -subject $famSeqs > $outfile);
system "blastp $args";
}
}
}
#==========================================================================
#Extract the sequence from the TCDB proteins that had no domain matches
#after the first PFAM domain rescue cycle.
sub extractDomainFragments2it {
my ($self, $domAfter1it) = @_;
# print Data::Dumper->Dump([$domAfter1it ], [qw(*domainHits )]);
# exit;
my $blastDB = $self->blastdb;
open (my $outh, ">>", $self->domSeqs2it) || die $!;
#For each tcid
foreach my $tcid (keys %{ $domAfter1it }) {
#For each PFAM ID
foreach my $domID (keys %{ $domAfter1it->{$tcid} }) {
#For each set of coordinates matching the PFAM domain
foreach my $hit (@{ $domAfter1it->{$tcid}->{$domID} }) {
my $pstart = $hit->{left};
my $pend = $hit->{right};
my $args = qq(-db $blastDB -dbtype prot -entry $tcid -range ${pstart}-${pend});
my $seq = qx(blastdbcmd $args);
$seq =~ s/$tcid/$tcid:${pstart}-${pend}\|$domID/;
print $outh $seq;
} #hit
} #domain
} #tcid
close $outh;
}
#==========================================================================
#Extract the sequence from the TCDB proteins that correspond to the
#Pfam or CDD hits, this is for the first Domain rescue cycle.
sub extractDomainFragments1it {
my ($self, $fam, $relDomains) = @_;
# print Data::Dumper->Dump([$self->domCoords->{$fam} ], [qw(*domCoords )]);
# exit;
my $blastDB = $self->blastdb;
open (my $outh, ">>", $self->domSeqs1it) || die $!;
foreach my $dom (@{ $relDomains }) {
my $domID = $dom->[0];
my ($did, $dv) = split(/\./, $domID);
#now get all the proteins that have a match with this domain
foreach my $prot (keys %{ $self->domCoords->{$fam} }) {
if (exists $self->domCoords->{$fam}->{$prot}->{$domID}) {
#Extract sequences for each individual domain hit
foreach my $hit (@{ $self->domCoords->{$fam}->{$prot}->{$domID} }) {
my $pstart = $hit->{pstart};
my $pend = $hit->{pend};
my $dstart = $hit->{dstart};
my $dend = $hit->{dend};
my $args = qq(-db $blastDB -dbtype prot -entry $prot -range ${pstart}-${pend});
my $seq = qx(blastdbcmd $args);
$seq =~ s/$prot/${prot}:${pstart}-${pend}\|${did}:${dstart}-$dend/;
print $outh $seq;
# print "$domID\n", Data::Dumper->Dump([$args, $seq], [qw(*args *seq )]);
# <STDIN>;
}
}
}
}
close $outh;
}
1;
| SaierLaboratory/TCDBtools | perl-modules/TCDB/Domain/Characterize.pm | Perl | bsd-3-clause | 29,209 |
=head1 NAME
sibyl - Secure authentication using the Sibyl.
=head1 SYNOPSIS
The Sibyl is a client-server implementation of a secure authentication
protocol involving RSA encryption of the shadow (or analogous)
file. It requires a hardware device (the B<Sibyl>) and a software layer.
=head1 DESCRIPTION
The B<Sibyl> is a restrictive embodyment of an RSA oracle, optimized
(security-wise) for authentication algorithms in situations in which
the database of authentication tokens ---the password database--- is
deemed unsafe (which, as a matter of fact, means I<always>).
It comprises two layers: a hardware item and a
piece of software which implements a client-server architecture between
the authenticating computer (the
one the password database is stored on) and
the hardware item. The diagram shows a possible implementation.
The B<Sibyl> is connected to the Authentication Server (a.s.)
---possibly the computer you are reading this man page on---. There
are I<two RSA key pairs>: the I<encryption pair> and the I<signing
pair> (this is essential for the security of the protocol). The
private keys are stored on the B<Sibyl>, whereas the a.s. has access
to both public keys (which are usually stored in the F</etc/sibyl>
directory, as F<sign.pub> and F<decrypt.pub>).
In the a.s. the authentication tokens are stored (for example, in a
typical Unix logging environment) as base64 encoded RSA-encrypted
I<crypt(3)> passwords preceded by the salt used in the I<crypt(3)>
process. That is, each entry in the F<shadow> database (which is
stored as a standard password file in F</etc/sibyl/shadow>) would look
like:
=over
C<user:$1$SOvM5$Rada783R/783478dadfa... (till 2048
binary bits, say):...>
=back
Which is the username followed by the salt (C<$1$SOvM5$>) and the
output of
=over
C<base64(RSA_encrypt(crypt(password, salt))).>
=back
Whenever a user tries to log in on the a.s., the following steps take
place (first of all, the Authentication Server connects to the Sibyl):
=over
=item *
On the Authentication Server:
=over
=item
The a.s. receives a I<nonce> (unique to this authentication
process) from the B<Sibyl>: I<n>.
=item
From the password database, the a.s. gets the I<real> authentication token (which is
RSA-encrypted with the B<Sibyl>'s public key): I<p1>, and (if this
exists) the I<salt> used to I<crypt(3)> this token.
=item
The a.s. grabs the I<password> entered by the logging user.
=item
The a.s. encrypts I<n:saltpassword> using the B<Sibyl>'s public key
to get I<p2>.
=item
The a.s. generates another nonce I<m>.
=item
The a.s. sends a message of the form I<m;p1;p2> to the
B<Sibyl>.
=back
=item
On the B<Sibyl>:
=over
=item
The B<Sibyl> decrypts I<p1> and I<p2> to get I<u1> and I<u2>.
=item
Checks that I<u2> matches the pattern I</^n:(.*)$/> and sets I<v1=$1> (in
Perl's regexp notation).
=item
If I<u1>=I<v1> then it returns the
message I<m:1> I<signed> with the signing key. Otherwise, it returns
the message I<m:0> I<signed> with the same key.
=back
=item
On the a.s.:
=over
=item
It receives the signed message from the B<Sibyl>.
=item
If the received message has been properly signed and the message is I<m:1>, then B<grant
authentication>. In any other case B<deny authentication>.
=back
=back
=head1 OPERATION
The directory F</etc/sibyl> needs to exist and include the files
=over
=item B<sign.pub>
the public key corresponding to the Sibyl's private signing key.
=item B<decrypt.pub>
the public key corresponding to the Sibyl's private decryption key.
=item B<shadow>
the authentication token database. This file I<must> be created using the C<shadow2sibyl.pl>
script, included in the Sibyl's distribution files.
=back
=head1 LIMITATIONS AND WARNINGS
Although the Sibyl adds a strong authentication layer against theft of the standard
C<shadow> file, it requires careful set up to prevent both DoS and to ensure availability.
The default behaviour assumes that the B<root> user will B<not> be linked to the
Sibyl, as this would require this user to log in through the Sibyl's services, which
may be impossible in case of a failure of the Sibyl, or a general network failure.
The system administrator is thus advised to either let this default behaviour or,
if modifying it, allow for an alternative logging method into the system.
=head1 FILES
/etc/sibyl/sign.pub
/etc/sibyl/decrypt.pub
/etc/sibyl/shadow
=head1 SEE ALSO
pam_sibyl(n), shadow2sibyl(n), sibyl_srv(n), sibyl_client(n)
=head1 AUTHORS
pam_sibyl was written by Pedro Fortuny <info@pfortuny.net> and Rafael Casado <rafacas@gmail.com>
| thesibyl/thesibyl | doc/sibyl.pod | Perl | bsd-3-clause | 4,611 |
use strict;
open( INFILE, "<", $ARGV[0] )
or die("Cannot open file $ARGV[0] for reading: $!");
while ( my $line = <INFILE> ) {
chomp($line);
$line =~ s/[A-Z][a-z]+,|;//g;
my $s = 0;
my @r = map {
my $t = $_ - $s;
$s += $t;
$t;
} sort { $a <=> $b } split( / /, $line );
printf( "%s\n", join( ',', @r ) );
}
close(INFILE);
| nikai3d/ce-challenges | easy/road_trip.pl | Perl | bsd-3-clause | 371 |
package App::Netdisco::Web::Plugin::AdminTask::DuplicateDevices;
use Dancer ':syntax';
use Dancer::Plugin::Ajax;
use Dancer::Plugin::DBIC;
use Dancer::Plugin::Auth::Extensible;
use App::Netdisco::Web::Plugin;
register_admin_task({
tag => 'duplicatedevices',
label => 'Duplicate Devices',
});
ajax '/ajax/content/admin/duplicatedevices' => require_role admin => sub {
my @set = schema('netdisco')->resultset('Device')->search({
serial => { '-in' => schema('netdisco')->resultset('Device')->search({
'-and' => [serial => { '!=', undef }, serial => { '!=', '' }],
}, {
group_by => ['serial'],
having => \'count(*) > 1',
columns => 'serial',
})->as_query
},
}, { columns => [qw/ip dns contact location name model os_ver serial/] })
->with_times->hri->all;
content_type('text/html');
template 'ajax/admintask/duplicatedevices.tt', {
results => \@set
}, { layout => undef };
};
true;
| netdisco/netdisco | lib/App/Netdisco/Web/Plugin/AdminTask/DuplicateDevices.pm | Perl | bsd-3-clause | 988 |
# *************************************************************************
# Copyright (c) 2014-2016, SUSE LLC
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of SUSE LLC nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# *************************************************************************
#
use 5.012;
use strict;
use warnings;
# $METHOD priv
sub _method_priv {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
return [ $method, "priv", $th->{_REST} ];
}
# $METHOD priv eid $NUM
sub _method_priv_eid_num {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $eid = $th->{_NUM};
return [ $method, "priv/eid/$eid", $th->{_REST} ];
}
# $METHOD priv eid $NUM $TIMESTAMP
sub _method_priv_eid_num_timestamp {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $eid = $th->{_NUM};
my $timestamp = $th->{_TIMESTAMP};
return [ $method, "priv/eid/$eid/$timestamp", $th->{_REST} ];
}
# $METHOD priv history eid $NUM
sub _method_priv_history_eid_num {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $eid = $th->{_NUM};
return [ $method, "priv/history/eid/$eid", $th->{_REST} ];
}
# $METHOD priv history eid $NUM $TSRANGE
sub _method_priv_history_eid_num_tsrange {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $eid = $th->{_NUM};
my $tsr = $th->{_TSRANGE};
return [ $method, "priv/history/eid/$eid/$tsr", $th->{_REST} ];
}
# $METHOD priv history nick $TERM
sub _method_priv_history_nick_term {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $nick = $th->{_TERM};
return [ $method, "priv/history/nick/$nick", $th->{_REST} ];
}
# $METHOD priv history nick $TERM $TSRANGE
sub _method_priv_history_nick_term_tsrange {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $nick = $th->{_TERM};
my $tsr = $th->{_TSRANGE};
return [ $method, "priv/history/nick/$nick/$tsr", $th->{_REST} ];
}
# $METHOD priv history phid $NUM
sub _method_priv_history_phid_num {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $phid = $th->{_NUM};
return [ $method, "priv/history/phid/$phid", $th->{_REST} ];
}
# $METHOD priv history self
sub _method_priv_history_self {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
return [ $method, "priv/history/self", $th->{_REST} ];
}
# $METHOD priv history self $TSRANGE
sub _method_priv_history_self_tsrange {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $tsr = $th->{_TSRANGE};
return [ $method, "priv/history/self/$tsr", $th->{_REST} ];
}
# $METHOD priv nick $TERM
sub _method_priv_nick_term {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $nick = $th->{_TERM};
return [ $method, "priv/nick/$nick", $th->{_REST} ];
}
# $METHOD priv nick $TERM $TIMESTAMP
sub _method_priv_nick_term_timestamp {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $nick = $th->{_TERM};
my $timestamp = $th->{_TIMESTAMP};
return [ $method, "priv/nick/$nick/$timestamp", $th->{_REST} ];
}
# $METHOD priv self
sub _method_priv_self {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
return [ $method, "priv/self", $th->{_REST} ];
}
# $METHOD priv self $TIMESTAMP
sub _method_priv_self_timestamp {
my ( $ts, $th ) = @_;
my $method = $ts->[0];
my $timestamp = $th->{_TIMESTAMP};
return [ $method, "priv/self/$timestamp", $th->{_REST} ];
}
1;
| smithfarm/dochazka-cli | lib/App/Dochazka/CLI/Commands/RestTest/Priv.pm | Perl | bsd-3-clause | 4,761 |
# WARNING
# Do not edit this file manually. Your changes will be overwritten with next FlowPDF update.
# WARNING
=head1 NAME
FlowPDF::Log
=head1 AUTHOR
CloudBees
=head1 DESCRIPTION
This class provides logging functionality for FlowPDF.
=head1 CAVEATS
This package is being loaded at the beginning of FlowPDF execution behind the scene.
It is required to set up logging before other components are initialized.
Logger is retrieving through run context current debug level from configuration.
To enable this mechanics you need to add B<debugLevel> property to your configuration.
It will be automatically read and logger would already have this debug level.
Supported debug levels:
=over 4
=item B<INFO>
Provides standard output. This is default level.
debugLevel property should be set to 0.
=item B<DEBUG>
Provides the same output from INFO level + debug output.
debugLevel property should be set to 1.
=item B<TRACE>
Provides the same output from DEBUG level + TRACE output.
debugLevel property should be set to 2.
=back
=head1 SYNOPSIS
To import FlowPDF::Log:
%%%LANG=perl%%%
use FlowPDF::Log
%%%LANG%%%
=head1 METHODS
This package imports following functions on load into current scope:
=head2 logInfo(@messages)
=head3 Description
Logs an info message. Output is the same as from print function.
=head3 Parameters
=over 4
=item (List of String) Log messages
=back
=head3 Returns
=over
=item (Boolean) 1
=back
=head3 Usage
%%%LANG=perl%%%
logInfo("This is an info message");
%%%LANG%%%
=head2 logDebug(@messages)
=head3 Description
Logs a debug message.
=head3 Parameters
=over 4
=item (List of String) Log messages
=back
=head3 Returns
=over
=item (Boolean) 1
=back
=head3 Usage
%%%LANG=perl%%%
# this will print [DEBUG] This is a debug message.
# but only if debug level is enough (DEBUG or more).
logDebug("This is a debug message");
%%%LANG%%%
=head2 logTrace(@messages)
=head3 Description
Logs a trace message
=head3 Parameters
=over 4
=item (List of String) Log messages
=back
=head3 Returns
=over
=item (Boolean) 1
=back
=head3 Usage
%%%LANG=perl%%%
# this will print [TRACE] This is a trace message.
# but only if debug level is enough (TRACE or more).
logTrace("This is a debug message");
%%%LANG%%%
=head2 logWarning(@messages)
=head3 Description
Logs a warning message.
=head3 Parameters
=over 4
=item (List of String) Log messages
=back
=head3 Returns
=over
=item (Boolean) 1
=back
=head3 Usage
%%%LANG=perl%%%
# this will print [WARNING] This is a warning message for any debug level:
logWarning("This is a warning message");
%%%LANG%%%
=head2 logError(@messages)
=head3 Description
Logs an error message
=head3 Parameters
=over 4
=item (List of String) Log messages
=back
=head3 Returns
=over
=item (Boolean) 1
=back
=head3 Usage
%%%LANG=perl%%%
# this will print [ERROR] This is an error message for any debug level:
logError("This is an error message");
%%%LANG%%%
=head2 logInfoDiag
This function works exactly as logInfo, but adds additional markups into log. Then this info will be displayed at Diagnostic tab of a job.
=head2 logWarningDiag
This function works exactly as logWarning, but adds additional markups into log. Then this warning will be displayed at Diagnostic tab of a job.
=head2 logErrorDiag
This function works exactly as logError, but adds additional markups into log. Then this error will be displayed at Diagnostic tab of a job.
=cut
package FlowPDF::Log;
use base qw/Exporter/;
our @EXPORT = (
# regular log functions
'logInfo',
'logDebug',
'logTrace',
'logError',
'logWarning',
# log functions for postprocessor. They are present from 1.2.* FlowPDF
'logInfoDiag',
'logWarningDiag',
'logErrorDiag'
);
use strict;
use warnings;
use Data::Dumper;
use Carp;
use Try::Tiny;
use FlowPDF::Helpers qw/inArray/;
# exceptions
use FlowPDF::Exception::MissingFunctionArgument;
use FlowPDF::Exception::WrongFunctionArgumentType;
our $LOG_LEVEL = 0;
our $LOG_TO_PROPERTY = '';
our $MASK_PATTERNS = [];
use constant {
ERROR => -1,
INFO => 0,
DEBUG => 1,
TRACE => 2,
};
sub setMaskPatterns {
my (@params) = @_;
unless (@params) {
FlowPDF::Exception::MissingFunctionArgument->new({
argument => 'mask patterns',
function => 'setMaskPatterns'
})->throw();
}
if ($params[0] eq __PACKAGE__ || ref $params[0] eq __PACKAGE__) {
shift @params;
}
for my $p (@params) {
next if isCommonPassword($p);
$p = quotemeta($p);
# avoiding duplicates
if (inArray($p, @$MASK_PATTERNS)) {
next;
}
push @$MASK_PATTERNS, $p;
}
return 1;
}
sub isCommonPassword {
my ($password) = @_;
# well, huh.
if ($password eq 'password') {
return 1;
}
if ($password =~ m/^(?:TEST)+$/is) {
return 1;
}
return 0;
}
sub maskLine {
my ($self, $line) = @_;
if (!ref $self || $self eq __PACKAGE__) {
$line = $self;
}
for my $p (@$MASK_PATTERNS) {
$line =~ s/$p/[PROTECTED]/gs;
}
return $line;
}
sub setLogToProperty {
my ($param1, $param2) = @_;
# 1st case, when param 1 is a reference, we are going to set log to property for current object.
# but if this reference is not a FlowPDF::Log reference, it will bailOut
if (ref $param1 and ref $param1 ne __PACKAGE__) {
# exception
FlowPDF::Exception::WrongFunctionArgumentType->new({
function => 'setLogToProperty',
got => ref $param1,
expected => 'FlowPDF::Log'
})->throw();
}
if (ref $param1) {
if (!defined $param2) {
# exception
FlowPDF::Exception::MissingFunctionArgument->new({
function => 'setLogToProperty',
argument => 'Property Path',
})->throw();
# croak "Property path is mandatory parameter";
}
$param1->{logToProperty} = $param2;
return $param1;
}
else {
if ($param1 eq __PACKAGE__) {
$param1 = $param2;
}
if (!defined $param1) {
croak "Property path is mandatory parameter";
}
$LOG_TO_PROPERTY = $param1;
return 1;
}
}
sub getLogProperty {
my ($self) = @_;
if (ref $self && ref $self eq __PACKAGE__) {
return $self->{logToProperty};
}
return $LOG_TO_PROPERTY;
}
sub getLogLevel {
my ($self) = @_;
if (ref $self && ref $self eq __PACKAGE__) {
return $self->{level};
}
return $LOG_LEVEL;
}
sub setLogLevel {
my ($param1, $param2) = @_;
if (ref $param1 and ref $param1 ne __PACKAGE__) {
croak (q|Expected a reference to FlowPDF::Log, not a '| . ref $param1 . q|' reference|);
}
if (ref $param1) {
if (!defined $param2) {
croak "Log level is mandatory parameter";
}
$param1->{level} = $param2;
return $param1;
}
else {
if ($param1 eq __PACKAGE__) {
$param1 = $param2;
}
if (!defined $param1) {
croak "Property path is mandatory parameter";
}
$LOG_LEVEL = $param1;
return 1;
}
}
sub new {
my ($class, $opts) = @_;
my ($level, $logToProperty);
if (!defined $opts->{level}) {
$level = $LOG_LEVEL;
}
else {
$level = $opts->{level};
}
if (!defined $opts->{logToProperty}) {
$logToProperty = $LOG_TO_PROPERTY;
}
else {
$logToProperty = $opts->{logToProperty};
}
my $self = {
level => $level,
logToProperty => $logToProperty
};
bless $self, $class;
return $self;
}
# TODO: Refactor this a bit
# now it is forbidden to call it with full function name.
sub logInfoDiag {
return printDiagMessage('INFO', @_);
}
sub logErrorDiag {
return printDiagMessage('ERROR', @_);
}
sub logWarningDiag {
return printDiagMessage('WARNING', @_);
}
sub printDiagMessage {
my @params = @_;
if (!ref $params[0] || ref $params[0] ne __PACKAGE__) {
unshift @params, __PACKAGE__->new();
}
my $logObject = shift @params;
my $level = shift @params;
if (!$logObject || !$level || !@params) {
return 0;
}
$level = uc $level;
if ($level !~ m/^(?:ERROR|WARNING|INFO)$/s) {
return 0;
}
# \n[OUT][%s]: %s :[%s][OUT]\n
my $begin = "\n[POSTP][$level]: ";
my $end = " :[$level][POSTP]\n";
my $msg = join '', @params;
$msg = $begin . $msg . $end;
return info($logObject, $msg);
}
sub logInfo {
my @params = @_;
if (!ref $params[0] || ref $params[0] ne __PACKAGE__) {
unshift @params, __PACKAGE__->new();
}
return info(@params);
}
sub info {
my ($self, @messages) = @_;
$self->_log(INFO, @messages);
}
sub logDebug {
my @params = @_;
if (!ref $params[0] || ref $params[0] ne __PACKAGE__) {
unshift @params, __PACKAGE__->new();
}
return debug(@params);
}
sub debug {
my ($self, @messages) = @_;
$self->_log(DEBUG, '[DEBUG]', @messages);
}
sub logError {
my @params = @_;
if (!ref $params[0] || ref $params[0] ne __PACKAGE__) {
unshift @params, __PACKAGE__->new();
}
return error(@params);
}
sub error {
my ($self, @messages) = @_;
$self->_log(ERROR, '[ERROR]', @messages);
}
sub logWarning {
my @params = @_;
if (!ref $params[0] || ref $params[0] ne __PACKAGE__) {
unshift @params, __PACKAGE__->new();
}
return warning(@params);
}
sub warning {
my ($self, @messages) = @_;
$self->_log(INFO, '[WARNING]', @messages);
}
sub logTrace {
my @params = @_;
if (!ref $params[0] || ref $params[0] ne __PACKAGE__) {
unshift @params, __PACKAGE__->new();
}
return trace(@params);
}
sub trace {
my ($self, @messages) = @_;
$self->_log(TRACE, '[TRACE]', @messages);
}
sub level {
my ($self, $level) = @_;
if (defined $level) {
$self->{level} = $level;
}
else {
return $self->{level};
}
}
sub logToProperty {
my ($self, $prop) = @_;
if (defined $prop) {
$self->{logToProperty} = $prop;
}
else {
return $self->{logToProperty};
}
}
my $length = 40;
sub divider {
my ($self, $thick) = @_;
if ($thick) {
$self->info('=' x $length);
}
else {
$self->info('-' x $length);
}
}
sub header {
my ($self, $header, $thick) = @_;
my $symb = $thick ? '=' : '-';
$self->info($header);
$self->info($symb x $length);
}
sub _log {
my ($self, $level, @messages) = @_;
return if $level > $self->level;
my @lines = ();
for my $message (@messages) {
if (ref $message) {
my $t = Dumper($message);
$t = $self->maskLine($t);
print $t;
push @lines, $t;
}
else {
$message = $self->maskLine($message);
print "$message\n";
push @lines, $message;
}
}
if ($self->{logToProperty}) {
my $prop = $self->{logToProperty};
my $value = "";
try {
$value = $self->ec->getProperty($prop)->findvalue('//value')->string_value;
1;
};
unshift @lines, split("\n", $value);
$self->ec->setProperty($prop, join("\n", @lines));
}
}
sub ec {
my ($self) = @_;
unless($self->{ec}) {
require ElectricCommander;
my $ec = ElectricCommander->new;
$self->{ec} = $ec;
}
return $self->{ec};
}
1;
| electric-cloud/EC-JBoss | src/main/resources/project/pdk/FlowPDF/Log.pm | Perl | apache-2.0 | 11,764 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package centreon::common::aruba::snmp::mode::components::fan;
use strict;
use warnings;
my %map_fan_status = (
1 => 'active',
2 => 'inactive',
);
# In MIB 'aruba-systemext'
my $mapping = {
sysExtFanStatus => { oid => '.1.3.6.1.4.1.14823.2.2.1.2.1.17.1.2', map => \%map_fan_status },
};
my $oid_wlsxSysExtFanEntry = '.1.3.6.1.4.1.14823.2.2.1.2.1.17.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_wlsxSysExtFanEntry };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking fans");
$self->{components}->{fan} = {name => 'fans', total => 0, skip => 0};
return if ($self->check_filter(section => 'fan'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_wlsxSysExtFanEntry}})) {
next if ($oid !~ /^$mapping->{sysExtFanStatus}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_wlsxSysExtFanEntry}, instance => $instance);
next if ($self->check_filter(section => 'fan', instance => $instance));
$self->{components}->{fan}->{total}++;
$self->{output}->output_add(long_msg => sprintf("Fan '%s' status is %s [instance: %s].",
$instance, $result->{sysExtFanStatus},
$instance
));
my $exit = $self->get_severity(section => 'fan', value => $result->{sysExtFanStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Fan '%s' status is %s",
$instance, $result->{sysExtFanStatus}));
}
}
}
1; | bcournaud/centreon-plugins | centreon/common/aruba/snmp/mode/components/fan.pm | Perl | apache-2.0 | 2,658 |
require 5;
package Lingua::EN::Numbers::Ordinate;
use strict;
# Time-stamp: "2004-12-29 19:06:20 AST"
use vars qw(@ISA @EXPORT @EXPORT_OK $VERSION);
require Exporter;
@ISA = ('Exporter');
@EXPORT = ('ordinate');
@EXPORT_OK = ('ordsuf', 'th');
$VERSION = "1.02";
###########################################################################
=head1 NAME
Lingua::EN::Numbers::Ordinate -- go from cardinal number (3) to ordinal ("3rd")
=head1 SYNOPSIS
use Lingua::EN::Numbers::Ordinate;
print ordinate(4), "\n";
# prints 4th
print ordinate(-342), "\n";
# prints -342nd
# Example of actual use:
...
for(my $i = 0; $i < @records; $i++) {
unless(is_valid($record[$i]) {
warn "The ", ordinate($i), " record is invalid!\n";
next;
}
...
}
=head1 DESCRIPTION
There are two kinds of numbers in English -- cardinals (1, 2, 3...), and
ordinals (1st, 2nd, 3rd...). This library provides functions for giving
the ordinal form of a number, given its cardinal value.
=head1 FUNCTIONS
=over
=item ordinate(SCALAR)
Returns a string consisting of that scalar's string form, plus the
appropriate ordinal suffix. Example: C<ordinate(23)> returns "23rd".
As a special case, C<ordinate(undef)> and C<ordinate("")> return "0th",
not "th".
This function is exported by default.
=item th(SCALAR)
Merely an alias for C<ordinate>, but not exported by default.
=item ordsuf(SCALAR)
Returns just the appropriate ordinal suffix for the given scalar
numeric value. This is what C<ordinate> uses to actually do its
work. For example, C<ordsuf(3)> is "rd".
Not exported by default.
=back
The above functions are all prototyped to take a scalar value,
so C<ordinate(@stuff)> is the same as C<ordinate(scalar @stuff)>.
=head1 CAVEATS
* Note that this library knows only about numbers, not number-words.
C<ordinate('seven')> might just as well be C<ordinate('superglue')>
or C<ordinate("\x1E\x9A")> -- you'll get the fallthru case of the input
string plus "th".
* As is unavoidable, C<ordinate(0256)> returns "174th" (because ordinate
sees the value 174). Similarly, C<ordinate(1E12)> returns
"1000000000000th". Returning "trillionth" would be nice, but that's an
awfully atypical case.
* Note that this library's algorithm (as well as the basic concept
and implementation of ordinal numbers) is totally language specific.
To pick a trivial example, consider that in French, 1 ordinates
as "1ier", whereas 41 ordinates as "41ieme".
=head1 STILL NOT SATISFIED?
Bored of this...?
use Lingua::EN::Numbers::Ordinate qw(ordinate th);
...
print th($n), " entry processed...\n";
...
Try this bit of lunacy:
{
my $th_object;
sub _th () { $th_object }
package Lingua::EN::Numbers::Ordinate::Overloader;
my $x; # Gotta have something to bless.
$th_object = bless \$x; # Define the object now, which _th returns
use Carp ();
use Lingua::EN::Numbers::Ordinate ();
sub overordinate {
Carp::croak "_th should be used only as postfix!" unless $_[2];
Lingua::EN::Numbers::Ordinate::ordinate($_[1]);
}
use overload '&' => \&overordinate;
}
Then you get to do:
print 3 & _th, "\n";
# prints "3rd"
print 1 + 2 & _th, "\n";
# prints "3rd" too!
# Because of the precedence of & !
print _th & 3, "\n";
# dies with: "th should be used only as postfix!"
Kooky, isn't it? For more delightful deleria like this, see
Damian Conway's I<Object Oriented Perl> from Manning Press.
Kinda makes you like C<th(3)>, doesn't it?
=head1 COPYRIGHT
Copyright (c) 2000 Sean M. Burke. All rights reserved.
This library is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
=head1 AUTHOR
Sean M. Burke C<sburke@cpan.org>
=cut
###########################################################################
sub ordsuf ($) {
return 'th' if not(defined($_[0])) or not( 0 + $_[0] );
# 'th' for undef, 0, or anything non-number.
my $n = abs($_[0]); # Throw away the sign.
return 'th' unless $n == int($n); # Best possible, I guess.
$n %= 100;
return 'th' if $n == 11 or $n == 12 or $n == 13;
$n %= 10;
return 'st' if $n == 1;
return 'nd' if $n == 2;
return 'rd' if $n == 3;
return 'th';
}
sub ordinate ($) {
my $i = $_[0] || 0;
return $i . ordsuf($i);
}
*th = \&ordinate; # correctly copies the prototype, too.
###########################################################################
1;
__END__
| ya7lelkom/swift-k | tests/release/Lingua/EN/Numbers/Ordinate.pm | Perl | apache-2.0 | 4,471 |
package TAP::Formatter::Console::Session;
use strict;
use TAP::Formatter::Session;
use vars qw($VERSION @ISA);
@ISA = qw(TAP::Formatter::Session);
my @ACCESSOR;
BEGIN {
my @CLOSURE_BINDING = qw( header result clear_for_close close_test );
for my $method (@CLOSURE_BINDING) {
no strict 'refs';
*$method = sub {
my $self = shift;
return ( $self->{_closures} ||= $self->_closures )->{$method}
->(@_);
};
}
}
=head1 NAME
TAP::Formatter::Console::Session - Harness output delegate for default console output
=head1 VERSION
Version 3.28
=cut
$VERSION = '3.28';
=head1 DESCRIPTION
This provides console orientated output formatting for TAP::Harness.
=cut
sub _get_output_result {
my $self = shift;
my @color_map = (
{ test => sub { $_->is_test && !$_->is_ok },
colors => ['red'],
},
{ test => sub { $_->is_test && $_->has_skip },
colors => [
'white',
'on_blue'
],
},
{ test => sub { $_->is_test && $_->has_todo },
colors => ['yellow'],
},
);
my $formatter = $self->formatter;
my $parser = $self->parser;
return $formatter->_colorizer
? sub {
my $result = shift;
for my $col (@color_map) {
local $_ = $result;
if ( $col->{test}->() ) {
$formatter->_set_colors( @{ $col->{colors} } );
last;
}
}
$formatter->_output( $self->_format_for_output($result) );
$formatter->_set_colors('reset');
}
: sub {
$formatter->_output( $self->_format_for_output(shift) );
};
}
sub _closures {
my $self = shift;
my $parser = $self->parser;
my $formatter = $self->formatter;
my $pretty = $formatter->_format_name( $self->name );
my $show_count = $self->show_count;
my $really_quiet = $formatter->really_quiet;
my $quiet = $formatter->quiet;
my $verbose = $formatter->verbose;
my $directives = $formatter->directives;
my $failures = $formatter->failures;
my $comments = $formatter->comments;
my $output_result = $self->_get_output_result;
my $output = '_output';
my $plan = '';
my $newline_printed = 0;
my $last_status_printed = 0;
return {
header => sub {
$formatter->_output($pretty)
unless $really_quiet;
},
result => sub {
my $result = shift;
if ( $result->is_bailout ) {
$formatter->_failure_output(
"Bailout called. Further testing stopped: "
. $result->explanation
. "\n" );
}
return if $really_quiet;
my $is_test = $result->is_test;
# These are used in close_test - but only if $really_quiet
# is false - so it's safe to only set them here unless that
# relationship changes.
if ( !$plan ) {
my $planned = $parser->tests_planned || '?';
$plan = "/$planned ";
}
$output = $formatter->_get_output_method($parser);
if ( $show_count and $is_test ) {
my $number = $result->number;
my $now = CORE::time;
# Print status roughly once per second.
# We will always get the first number as a side effect of
# $last_status_printed starting with the value 0, which $now
# will never be. (Unless someone sets their clock to 1970)
if ( $last_status_printed != $now ) {
$formatter->$output("\r$pretty$number$plan");
$last_status_printed = $now;
}
}
if (!$quiet
&& ( $verbose
|| ( $is_test && $failures && !$result->is_ok )
|| ( $comments && $result->is_comment )
|| ( $directives && $result->has_directive ) )
)
{
unless ($newline_printed) {
$formatter->_output("\n");
$newline_printed = 1;
}
$output_result->($result);
$formatter->_output("\n");
}
},
clear_for_close => sub {
my $spaces
= ' ' x length( '.' . $pretty . $plan . $parser->tests_run );
$formatter->$output("\r$spaces");
},
close_test => sub {
if ( $show_count && !$really_quiet ) {
$self->clear_for_close;
$formatter->$output("\r$pretty");
}
# Avoid circular references
$self->parser(undef);
$self->{_closures} = {};
return if $really_quiet;
if ( my $skip_all = $parser->skip_all ) {
$formatter->_output("skipped: $skip_all\n");
}
elsif ( $parser->has_problems ) {
$self->_output_test_failure($parser);
}
else {
my $time_report = '';
if ( $formatter->timer ) {
my $start_time = $parser->start_time;
my $end_time = $parser->end_time;
if ( defined $start_time and defined $end_time ) {
my $elapsed = $end_time - $start_time;
$time_report
= $self->time_is_hires
? sprintf( ' %8d ms', $elapsed * 1000 )
: sprintf( ' %8s s', $elapsed || '<1' );
}
}
$formatter->_output( $self->_make_ok_line($time_report) );
}
},
};
}
=head2 C<< clear_for_close >>
=head2 C<< close_test >>
=head2 C<< header >>
=head2 C<< result >>
=cut
1;
| dj31416/JSX | extlib/lib/perl5/TAP/Formatter/Console/Session.pm | Perl | mit | 6,081 |
package DDG::Spice::IsItUp;
# ABSTRACT: Checks if a website is up
use strict;
use DDG::Spice;
use DDG::Util::SpiceConstants;
triggers query_lc => qr/^((?:is\s|))(?:https?:\/\/)?([0-9a-z\-]+(?:\.[0-9a-z\-]+)*?)(?:(\.[a-z]{2,4})|)\s(?:up|down|working|online|status)\?*$/i;
spice to => 'https://isitup.org/$1.json?callback={{callback}}';
spice proxy_cache_valid => "418 1d";
my $regex_domain = qr/\.(@{[ DDG::Util::SpiceConstants::TLD_REGEX ]})$/;
my $regex_ipv4 = qr/^(?:\d{1,3}\.){3}\d{1,3}$/;
handle matches => sub {
if ($_[2]) {
my $root_url = $_[1];
my $domain = $_[2];
# return the domain and the root url if the domain is valid
if ($domain =~ $regex_domain){
return $root_url.$domain;
}
}
else {
return $_[1] if $_[1] =~ $regex_ipv4;
# append .com only if "is" is in the query and there's no other domain given
if ($_[0]) {
return if length($_[1]) < 5;
return $_[1] . '.com';
}
# otherwise just return without '.com' -- stops false positives from showing zci
else {
# check for domain name in the end
if ($_[1] =~ $regex_domain) {
return $2;
}
}
}
return;
};
1;
| dogomedia/zeroclickinfo-spice | lib/DDG/Spice/IsItUp.pm | Perl | apache-2.0 | 1,278 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
# The mappings in the non-hash portion of this file must be modified to get the
# correct values by adding the code point ordinal number to each one that is
# numeric.
# The name this swash is to be known by, with the format of the mappings in
# the main body of the table, and what all code points missing from this file
# map to.
$utf8::SwashInfo{'ToUc'}{'format'} = 'a'; # some entries need adjustment
$utf8::SwashInfo{'ToUc'}{'specials_name'} = 'utf8::ToSpecUc'; # Name of hash of special mappings
$utf8::SwashInfo{'ToUc'}{'missing'} = '0'; # code point maps to itself
# Some code points require special handling because their mappings are each to
# multiple code points. These do not appear in the main body, but are defined
# in the hash below.
# Each key is the string of N bytes that together make up the UTF-8 encoding
# for the code point. (i.e. the same as looking at the code point's UTF-8
# under "use bytes"). Each value is the UTF-8 of the translation, for speed.
%utf8::ToSpecUc = (
"\xC3\x9F" => "\x{0053}\x{0053}", # U+00DF => 0053 0053
"\xC5\x89" => "\x{02BC}\x{004E}", # U+0149 => 02BC 004E
"\xC7\xB0" => "\x{004A}\x{030C}", # U+01F0 => 004A 030C
"\xCE\x90" => "\x{0399}\x{0308}\x{0301}", # U+0390 => 0399 0308 0301
"\xCE\xB0" => "\x{03A5}\x{0308}\x{0301}", # U+03B0 => 03A5 0308 0301
"\xD6\x87" => "\x{0535}\x{0552}", # U+0587 => 0535 0552
"\xE1\xBA\x96" => "\x{0048}\x{0331}", # U+1E96 => 0048 0331
"\xE1\xBA\x97" => "\x{0054}\x{0308}", # U+1E97 => 0054 0308
"\xE1\xBA\x98" => "\x{0057}\x{030A}", # U+1E98 => 0057 030A
"\xE1\xBA\x99" => "\x{0059}\x{030A}", # U+1E99 => 0059 030A
"\xE1\xBA\x9A" => "\x{0041}\x{02BE}", # U+1E9A => 0041 02BE
"\xE1\xBD\x90" => "\x{03A5}\x{0313}", # U+1F50 => 03A5 0313
"\xE1\xBD\x92" => "\x{03A5}\x{0313}\x{0300}", # U+1F52 => 03A5 0313 0300
"\xE1\xBD\x94" => "\x{03A5}\x{0313}\x{0301}", # U+1F54 => 03A5 0313 0301
"\xE1\xBD\x96" => "\x{03A5}\x{0313}\x{0342}", # U+1F56 => 03A5 0313 0342
"\xE1\xBE\x80" => "\x{1F08}\x{0399}", # U+1F80 => 1F08 0399
"\xE1\xBE\x81" => "\x{1F09}\x{0399}", # U+1F81 => 1F09 0399
"\xE1\xBE\x82" => "\x{1F0A}\x{0399}", # U+1F82 => 1F0A 0399
"\xE1\xBE\x83" => "\x{1F0B}\x{0399}", # U+1F83 => 1F0B 0399
"\xE1\xBE\x84" => "\x{1F0C}\x{0399}", # U+1F84 => 1F0C 0399
"\xE1\xBE\x85" => "\x{1F0D}\x{0399}", # U+1F85 => 1F0D 0399
"\xE1\xBE\x86" => "\x{1F0E}\x{0399}", # U+1F86 => 1F0E 0399
"\xE1\xBE\x87" => "\x{1F0F}\x{0399}", # U+1F87 => 1F0F 0399
"\xE1\xBE\x88" => "\x{1F08}\x{0399}", # U+1F88 => 1F08 0399
"\xE1\xBE\x89" => "\x{1F09}\x{0399}", # U+1F89 => 1F09 0399
"\xE1\xBE\x8A" => "\x{1F0A}\x{0399}", # U+1F8A => 1F0A 0399
"\xE1\xBE\x8B" => "\x{1F0B}\x{0399}", # U+1F8B => 1F0B 0399
"\xE1\xBE\x8C" => "\x{1F0C}\x{0399}", # U+1F8C => 1F0C 0399
"\xE1\xBE\x8D" => "\x{1F0D}\x{0399}", # U+1F8D => 1F0D 0399
"\xE1\xBE\x8E" => "\x{1F0E}\x{0399}", # U+1F8E => 1F0E 0399
"\xE1\xBE\x8F" => "\x{1F0F}\x{0399}", # U+1F8F => 1F0F 0399
"\xE1\xBE\x90" => "\x{1F28}\x{0399}", # U+1F90 => 1F28 0399
"\xE1\xBE\x91" => "\x{1F29}\x{0399}", # U+1F91 => 1F29 0399
"\xE1\xBE\x92" => "\x{1F2A}\x{0399}", # U+1F92 => 1F2A 0399
"\xE1\xBE\x93" => "\x{1F2B}\x{0399}", # U+1F93 => 1F2B 0399
"\xE1\xBE\x94" => "\x{1F2C}\x{0399}", # U+1F94 => 1F2C 0399
"\xE1\xBE\x95" => "\x{1F2D}\x{0399}", # U+1F95 => 1F2D 0399
"\xE1\xBE\x96" => "\x{1F2E}\x{0399}", # U+1F96 => 1F2E 0399
"\xE1\xBE\x97" => "\x{1F2F}\x{0399}", # U+1F97 => 1F2F 0399
"\xE1\xBE\x98" => "\x{1F28}\x{0399}", # U+1F98 => 1F28 0399
"\xE1\xBE\x99" => "\x{1F29}\x{0399}", # U+1F99 => 1F29 0399
"\xE1\xBE\x9A" => "\x{1F2A}\x{0399}", # U+1F9A => 1F2A 0399
"\xE1\xBE\x9B" => "\x{1F2B}\x{0399}", # U+1F9B => 1F2B 0399
"\xE1\xBE\x9C" => "\x{1F2C}\x{0399}", # U+1F9C => 1F2C 0399
"\xE1\xBE\x9D" => "\x{1F2D}\x{0399}", # U+1F9D => 1F2D 0399
"\xE1\xBE\x9E" => "\x{1F2E}\x{0399}", # U+1F9E => 1F2E 0399
"\xE1\xBE\x9F" => "\x{1F2F}\x{0399}", # U+1F9F => 1F2F 0399
"\xE1\xBE\xA0" => "\x{1F68}\x{0399}", # U+1FA0 => 1F68 0399
"\xE1\xBE\xA1" => "\x{1F69}\x{0399}", # U+1FA1 => 1F69 0399
"\xE1\xBE\xA2" => "\x{1F6A}\x{0399}", # U+1FA2 => 1F6A 0399
"\xE1\xBE\xA3" => "\x{1F6B}\x{0399}", # U+1FA3 => 1F6B 0399
"\xE1\xBE\xA4" => "\x{1F6C}\x{0399}", # U+1FA4 => 1F6C 0399
"\xE1\xBE\xA5" => "\x{1F6D}\x{0399}", # U+1FA5 => 1F6D 0399
"\xE1\xBE\xA6" => "\x{1F6E}\x{0399}", # U+1FA6 => 1F6E 0399
"\xE1\xBE\xA7" => "\x{1F6F}\x{0399}", # U+1FA7 => 1F6F 0399
"\xE1\xBE\xA8" => "\x{1F68}\x{0399}", # U+1FA8 => 1F68 0399
"\xE1\xBE\xA9" => "\x{1F69}\x{0399}", # U+1FA9 => 1F69 0399
"\xE1\xBE\xAA" => "\x{1F6A}\x{0399}", # U+1FAA => 1F6A 0399
"\xE1\xBE\xAB" => "\x{1F6B}\x{0399}", # U+1FAB => 1F6B 0399
"\xE1\xBE\xAC" => "\x{1F6C}\x{0399}", # U+1FAC => 1F6C 0399
"\xE1\xBE\xAD" => "\x{1F6D}\x{0399}", # U+1FAD => 1F6D 0399
"\xE1\xBE\xAE" => "\x{1F6E}\x{0399}", # U+1FAE => 1F6E 0399
"\xE1\xBE\xAF" => "\x{1F6F}\x{0399}", # U+1FAF => 1F6F 0399
"\xE1\xBE\xB2" => "\x{1FBA}\x{0399}", # U+1FB2 => 1FBA 0399
"\xE1\xBE\xB3" => "\x{0391}\x{0399}", # U+1FB3 => 0391 0399
"\xE1\xBE\xB4" => "\x{0386}\x{0399}", # U+1FB4 => 0386 0399
"\xE1\xBE\xB6" => "\x{0391}\x{0342}", # U+1FB6 => 0391 0342
"\xE1\xBE\xB7" => "\x{0391}\x{0342}\x{0399}", # U+1FB7 => 0391 0342 0399
"\xE1\xBE\xBC" => "\x{0391}\x{0399}", # U+1FBC => 0391 0399
"\xE1\xBF\x82" => "\x{1FCA}\x{0399}", # U+1FC2 => 1FCA 0399
"\xE1\xBF\x83" => "\x{0397}\x{0399}", # U+1FC3 => 0397 0399
"\xE1\xBF\x84" => "\x{0389}\x{0399}", # U+1FC4 => 0389 0399
"\xE1\xBF\x86" => "\x{0397}\x{0342}", # U+1FC6 => 0397 0342
"\xE1\xBF\x87" => "\x{0397}\x{0342}\x{0399}", # U+1FC7 => 0397 0342 0399
"\xE1\xBF\x8C" => "\x{0397}\x{0399}", # U+1FCC => 0397 0399
"\xE1\xBF\x92" => "\x{0399}\x{0308}\x{0300}", # U+1FD2 => 0399 0308 0300
"\xE1\xBF\x93" => "\x{0399}\x{0308}\x{0301}", # U+1FD3 => 0399 0308 0301
"\xE1\xBF\x96" => "\x{0399}\x{0342}", # U+1FD6 => 0399 0342
"\xE1\xBF\x97" => "\x{0399}\x{0308}\x{0342}", # U+1FD7 => 0399 0308 0342
"\xE1\xBF\xA2" => "\x{03A5}\x{0308}\x{0300}", # U+1FE2 => 03A5 0308 0300
"\xE1\xBF\xA3" => "\x{03A5}\x{0308}\x{0301}", # U+1FE3 => 03A5 0308 0301
"\xE1\xBF\xA4" => "\x{03A1}\x{0313}", # U+1FE4 => 03A1 0313
"\xE1\xBF\xA6" => "\x{03A5}\x{0342}", # U+1FE6 => 03A5 0342
"\xE1\xBF\xA7" => "\x{03A5}\x{0308}\x{0342}", # U+1FE7 => 03A5 0308 0342
"\xE1\xBF\xB2" => "\x{1FFA}\x{0399}", # U+1FF2 => 1FFA 0399
"\xE1\xBF\xB3" => "\x{03A9}\x{0399}", # U+1FF3 => 03A9 0399
"\xE1\xBF\xB4" => "\x{038F}\x{0399}", # U+1FF4 => 038F 0399
"\xE1\xBF\xB6" => "\x{03A9}\x{0342}", # U+1FF6 => 03A9 0342
"\xE1\xBF\xB7" => "\x{03A9}\x{0342}\x{0399}", # U+1FF7 => 03A9 0342 0399
"\xE1\xBF\xBC" => "\x{03A9}\x{0399}", # U+1FFC => 03A9 0399
"\xEF\xAC\x80" => "\x{0046}\x{0046}", # U+FB00 => 0046 0046
"\xEF\xAC\x81" => "\x{0046}\x{0049}", # U+FB01 => 0046 0049
"\xEF\xAC\x82" => "\x{0046}\x{004C}", # U+FB02 => 0046 004C
"\xEF\xAC\x83" => "\x{0046}\x{0046}\x{0049}", # U+FB03 => 0046 0046 0049
"\xEF\xAC\x84" => "\x{0046}\x{0046}\x{004C}", # U+FB04 => 0046 0046 004C
"\xEF\xAC\x85" => "\x{0053}\x{0054}", # U+FB05 => 0053 0054
"\xEF\xAC\x86" => "\x{0053}\x{0054}", # U+FB06 => 0053 0054
"\xEF\xAC\x93" => "\x{0544}\x{0546}", # U+FB13 => 0544 0546
"\xEF\xAC\x94" => "\x{0544}\x{0535}", # U+FB14 => 0544 0535
"\xEF\xAC\x95" => "\x{0544}\x{053B}", # U+FB15 => 0544 053B
"\xEF\xAC\x96" => "\x{054E}\x{0546}", # U+FB16 => 054E 0546
"\xEF\xAC\x97" => "\x{0544}\x{053D}", # U+FB17 => 0544 053D
);
return <<'END';
0061 007A 65
00B5 924
00E0 00F6 192
00F8 00FE 216
00FF 376
0101 256
0103 258
0105 260
0107 262
0109 264
010B 266
010D 268
010F 270
0111 272
0113 274
0115 276
0117 278
0119 280
011B 282
011D 284
011F 286
0121 288
0123 290
0125 292
0127 294
0129 296
012B 298
012D 300
012F 302
0131 73
0133 306
0135 308
0137 310
013A 313
013C 315
013E 317
0140 319
0142 321
0144 323
0146 325
0148 327
014B 330
014D 332
014F 334
0151 336
0153 338
0155 340
0157 342
0159 344
015B 346
015D 348
015F 350
0161 352
0163 354
0165 356
0167 358
0169 360
016B 362
016D 364
016F 366
0171 368
0173 370
0175 372
0177 374
017A 377
017C 379
017E 381
017F 83
0180 579
0183 386
0185 388
0188 391
018C 395
0192 401
0195 502
0199 408
019A 573
019E 544
01A1 416
01A3 418
01A5 420
01A8 423
01AD 428
01B0 431
01B4 435
01B6 437
01B9 440
01BD 444
01BF 503
01C5 452
01C6 452
01C8 455
01C9 455
01CB 458
01CC 458
01CE 461
01D0 463
01D2 465
01D4 467
01D6 469
01D8 471
01DA 473
01DC 475
01DD 398
01DF 478
01E1 480
01E3 482
01E5 484
01E7 486
01E9 488
01EB 490
01ED 492
01EF 494
01F2 497
01F3 497
01F5 500
01F9 504
01FB 506
01FD 508
01FF 510
0201 512
0203 514
0205 516
0207 518
0209 520
020B 522
020D 524
020F 526
0211 528
0213 530
0215 532
0217 534
0219 536
021B 538
021D 540
021F 542
0223 546
0225 548
0227 550
0229 552
022B 554
022D 556
022F 558
0231 560
0233 562
023C 571
023F 0240 11390
0242 577
0247 582
0249 584
024B 586
024D 588
024F 590
0250 11375
0251 11373
0252 11376
0253 385
0254 390
0256 0257 393
0259 399
025B 400
0260 403
0263 404
0265 42893
0266 42922
0268 407
0269 406
026B 11362
026F 412
0271 11374
0272 413
0275 415
027D 11364
0280 422
0283 425
0288 430
0289 580
028A 028B 433
028C 581
0292 439
0345 921
0371 880
0373 882
0377 886
037B 037D 1021
03AC 902
03AD 03AF 904
03B1 03C1 913
03C2 931
03C3 03CB 931
03CC 908
03CD 03CE 910
03D0 914
03D1 920
03D5 934
03D6 928
03D7 975
03D9 984
03DB 986
03DD 988
03DF 990
03E1 992
03E3 994
03E5 996
03E7 998
03E9 1000
03EB 1002
03ED 1004
03EF 1006
03F0 922
03F1 929
03F2 1017
03F5 917
03F8 1015
03FB 1018
0430 044F 1040
0450 045F 1024
0461 1120
0463 1122
0465 1124
0467 1126
0469 1128
046B 1130
046D 1132
046F 1134
0471 1136
0473 1138
0475 1140
0477 1142
0479 1144
047B 1146
047D 1148
047F 1150
0481 1152
048B 1162
048D 1164
048F 1166
0491 1168
0493 1170
0495 1172
0497 1174
0499 1176
049B 1178
049D 1180
049F 1182
04A1 1184
04A3 1186
04A5 1188
04A7 1190
04A9 1192
04AB 1194
04AD 1196
04AF 1198
04B1 1200
04B3 1202
04B5 1204
04B7 1206
04B9 1208
04BB 1210
04BD 1212
04BF 1214
04C2 1217
04C4 1219
04C6 1221
04C8 1223
04CA 1225
04CC 1227
04CE 1229
04CF 1216
04D1 1232
04D3 1234
04D5 1236
04D7 1238
04D9 1240
04DB 1242
04DD 1244
04DF 1246
04E1 1248
04E3 1250
04E5 1252
04E7 1254
04E9 1256
04EB 1258
04ED 1260
04EF 1262
04F1 1264
04F3 1266
04F5 1268
04F7 1270
04F9 1272
04FB 1274
04FD 1276
04FF 1278
0501 1280
0503 1282
0505 1284
0507 1286
0509 1288
050B 1290
050D 1292
050F 1294
0511 1296
0513 1298
0515 1300
0517 1302
0519 1304
051B 1306
051D 1308
051F 1310
0521 1312
0523 1314
0525 1316
0527 1318
0561 0586 1329
1D79 42877
1D7D 11363
1E01 7680
1E03 7682
1E05 7684
1E07 7686
1E09 7688
1E0B 7690
1E0D 7692
1E0F 7694
1E11 7696
1E13 7698
1E15 7700
1E17 7702
1E19 7704
1E1B 7706
1E1D 7708
1E1F 7710
1E21 7712
1E23 7714
1E25 7716
1E27 7718
1E29 7720
1E2B 7722
1E2D 7724
1E2F 7726
1E31 7728
1E33 7730
1E35 7732
1E37 7734
1E39 7736
1E3B 7738
1E3D 7740
1E3F 7742
1E41 7744
1E43 7746
1E45 7748
1E47 7750
1E49 7752
1E4B 7754
1E4D 7756
1E4F 7758
1E51 7760
1E53 7762
1E55 7764
1E57 7766
1E59 7768
1E5B 7770
1E5D 7772
1E5F 7774
1E61 7776
1E63 7778
1E65 7780
1E67 7782
1E69 7784
1E6B 7786
1E6D 7788
1E6F 7790
1E71 7792
1E73 7794
1E75 7796
1E77 7798
1E79 7800
1E7B 7802
1E7D 7804
1E7F 7806
1E81 7808
1E83 7810
1E85 7812
1E87 7814
1E89 7816
1E8B 7818
1E8D 7820
1E8F 7822
1E91 7824
1E93 7826
1E95 7828
1E9B 7776
1EA1 7840
1EA3 7842
1EA5 7844
1EA7 7846
1EA9 7848
1EAB 7850
1EAD 7852
1EAF 7854
1EB1 7856
1EB3 7858
1EB5 7860
1EB7 7862
1EB9 7864
1EBB 7866
1EBD 7868
1EBF 7870
1EC1 7872
1EC3 7874
1EC5 7876
1EC7 7878
1EC9 7880
1ECB 7882
1ECD 7884
1ECF 7886
1ED1 7888
1ED3 7890
1ED5 7892
1ED7 7894
1ED9 7896
1EDB 7898
1EDD 7900
1EDF 7902
1EE1 7904
1EE3 7906
1EE5 7908
1EE7 7910
1EE9 7912
1EEB 7914
1EED 7916
1EEF 7918
1EF1 7920
1EF3 7922
1EF5 7924
1EF7 7926
1EF9 7928
1EFB 7930
1EFD 7932
1EFF 7934
1F00 1F07 7944
1F10 1F15 7960
1F20 1F27 7976
1F30 1F37 7992
1F40 1F45 8008
1F51 8025
1F53 8027
1F55 8029
1F57 8031
1F60 1F67 8040
1F70 1F71 8122
1F72 1F75 8136
1F76 1F77 8154
1F78 1F79 8184
1F7A 1F7B 8170
1F7C 1F7D 8186
1F80 1F87 8072
1F90 1F97 8088
1FA0 1FA7 8104
1FB0 1FB1 8120
1FB3 8124
1FBE 921
1FC3 8140
1FD0 1FD1 8152
1FE0 1FE1 8168
1FE5 8172
1FF3 8188
214E 8498
2170 217F 8544
2184 8579
24D0 24E9 9398
2C30 2C5E 11264
2C61 11360
2C65 570
2C66 574
2C68 11367
2C6A 11369
2C6C 11371
2C73 11378
2C76 11381
2C81 11392
2C83 11394
2C85 11396
2C87 11398
2C89 11400
2C8B 11402
2C8D 11404
2C8F 11406
2C91 11408
2C93 11410
2C95 11412
2C97 11414
2C99 11416
2C9B 11418
2C9D 11420
2C9F 11422
2CA1 11424
2CA3 11426
2CA5 11428
2CA7 11430
2CA9 11432
2CAB 11434
2CAD 11436
2CAF 11438
2CB1 11440
2CB3 11442
2CB5 11444
2CB7 11446
2CB9 11448
2CBB 11450
2CBD 11452
2CBF 11454
2CC1 11456
2CC3 11458
2CC5 11460
2CC7 11462
2CC9 11464
2CCB 11466
2CCD 11468
2CCF 11470
2CD1 11472
2CD3 11474
2CD5 11476
2CD7 11478
2CD9 11480
2CDB 11482
2CDD 11484
2CDF 11486
2CE1 11488
2CE3 11490
2CEC 11499
2CEE 11501
2CF3 11506
2D00 2D25 4256
2D27 4295
2D2D 4301
A641 42560
A643 42562
A645 42564
A647 42566
A649 42568
A64B 42570
A64D 42572
A64F 42574
A651 42576
A653 42578
A655 42580
A657 42582
A659 42584
A65B 42586
A65D 42588
A65F 42590
A661 42592
A663 42594
A665 42596
A667 42598
A669 42600
A66B 42602
A66D 42604
A681 42624
A683 42626
A685 42628
A687 42630
A689 42632
A68B 42634
A68D 42636
A68F 42638
A691 42640
A693 42642
A695 42644
A697 42646
A723 42786
A725 42788
A727 42790
A729 42792
A72B 42794
A72D 42796
A72F 42798
A733 42802
A735 42804
A737 42806
A739 42808
A73B 42810
A73D 42812
A73F 42814
A741 42816
A743 42818
A745 42820
A747 42822
A749 42824
A74B 42826
A74D 42828
A74F 42830
A751 42832
A753 42834
A755 42836
A757 42838
A759 42840
A75B 42842
A75D 42844
A75F 42846
A761 42848
A763 42850
A765 42852
A767 42854
A769 42856
A76B 42858
A76D 42860
A76F 42862
A77A 42873
A77C 42875
A77F 42878
A781 42880
A783 42882
A785 42884
A787 42886
A78C 42891
A791 42896
A793 42898
A7A1 42912
A7A3 42914
A7A5 42916
A7A7 42918
A7A9 42920
FF41 FF5A 65313
10428 1044F 66560
END
| efortuna/AndroidSDKClone | ndk_experimental/prebuilt/linux-x86_64/lib/perl5/5.16.2/unicore/To/Uc.pl | Perl | apache-2.0 | 15,445 |
package Agar::Checkbox;
use strict;
use Agar;
1;
__END__
=head1 NAME
Agar::Checkbox - a 2-state checkbox widget
=head1 SYNOPSIS
use Agar;
use Agar::Checkbox;
Agar::Checkbox->new($parent);
=head1 DESCRIPTION
Extends Agar::Widget and Agar::Object. Please see AG_Checkbox(3) for a
full explanation of what its methods do and what bindings and events
it defines, if any.
=head1 METHODS
=over 4
=item B<$widget = Agar::Checkbox-E<gt>new($parent, { flags })>
Constructor.
Recognised flags include:
=over 4
=item C<set>
Z<>
=back
=item B<$widget-E<gt>toggle()>
=back
=head1 AUTHOR
Mat Sutcliffe E<lt>F<oktal@gmx.co.uk>E<gt>
=head1 MAINTAINER
Julien Nadeau E<lt>F<vedge@hypertriton.com>E<gt>
=head1 COPYRIGHT
Copyright (c) 2009 Hypertriton, Inc. All rights reserved.
This program is free software. You can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 SEE ALSO
L<Agar>, L<Agar::Widget>, L<Agar::Object>, L<AG_Checkbox(3)>
=cut
| adsr/agar | p5-Agar/Agar/Checkbox.pm | Perl | bsd-2-clause | 988 |
#! /usr/bin/env perl
# Copyright 2005-2016 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the OpenSSL license (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
#
# ====================================================================
# Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
# project. Rights for redistribution and usage in source and binary
# forms are granted according to the OpenSSL license.
# ====================================================================
#
# sha256/512_block procedure for x86_64.
#
# 40% improvement over compiler-generated code on Opteron. On EM64T
# sha256 was observed to run >80% faster and sha512 - >40%. No magical
# tricks, just straight implementation... I really wonder why gcc
# [being armed with inline assembler] fails to generate as fast code.
# The only thing which is cool about this module is that it's very
# same instruction sequence used for both SHA-256 and SHA-512. In
# former case the instructions operate on 32-bit operands, while in
# latter - on 64-bit ones. All I had to do is to get one flavor right,
# the other one passed the test right away:-)
#
# sha256_block runs in ~1005 cycles on Opteron, which gives you
# asymptotic performance of 64*1000/1005=63.7MBps times CPU clock
# frequency in GHz. sha512_block runs in ~1275 cycles, which results
# in 128*1000/1275=100MBps per GHz. Is there room for improvement?
# Well, if you compare it to IA-64 implementation, which maintains
# X[16] in register bank[!], tends to 4 instructions per CPU clock
# cycle and runs in 1003 cycles, 1275 is very good result for 3-way
# issue Opteron pipeline and X[16] maintained in memory. So that *if*
# there is a way to improve it, *then* the only way would be to try to
# offload X[16] updates to SSE unit, but that would require "deeper"
# loop unroll, which in turn would naturally cause size blow-up, not
# to mention increased complexity! And once again, only *if* it's
# actually possible to noticeably improve overall ILP, instruction
# level parallelism, on a given CPU implementation in this case.
#
# Special note on Intel EM64T. While Opteron CPU exhibits perfect
# performance ratio of 1.5 between 64- and 32-bit flavors [see above],
# [currently available] EM64T CPUs apparently are far from it. On the
# contrary, 64-bit version, sha512_block, is ~30% *slower* than 32-bit
# sha256_block:-( This is presumably because 64-bit shifts/rotates
# apparently are not atomic instructions, but implemented in microcode.
#
# May 2012.
#
# Optimization including one of Pavel Semjanov's ideas, alternative
# Maj, resulted in >=5% improvement on most CPUs, +20% SHA256 and
# unfortunately -2% SHA512 on P4 [which nobody should care about
# that much].
#
# June 2012.
#
# Add SIMD code paths, see below for improvement coefficients. SSSE3
# code path was not attempted for SHA512, because improvement is not
# estimated to be high enough, noticeably less than 9%, to justify
# the effort, not on pre-AVX processors. [Obviously with exclusion
# for VIA Nano, but it has SHA512 instruction that is faster and
# should be used instead.] For reference, corresponding estimated
# upper limit for improvement for SSSE3 SHA256 is 28%. The fact that
# higher coefficients are observed on VIA Nano and Bulldozer has more
# to do with specifics of their architecture [which is topic for
# separate discussion].
#
# November 2012.
#
# Add AVX2 code path. Two consecutive input blocks are loaded to
# 256-bit %ymm registers, with data from first block to least
# significant 128-bit halves and data from second to most significant.
# The data is then processed with same SIMD instruction sequence as
# for AVX, but with %ymm as operands. Side effect is increased stack
# frame, 448 additional bytes in SHA256 and 1152 in SHA512, and 1.2KB
# code size increase.
#
# March 2014.
#
# Add support for Intel SHA Extensions.
######################################################################
# Current performance in cycles per processed byte (less is better):
#
# SHA256 SSSE3 AVX/XOP(*) SHA512 AVX/XOP(*)
#
# AMD K8 14.9 - - 9.57 -
# P4 17.3 - - 30.8 -
# Core 2 15.6 13.8(+13%) - 9.97 -
# Westmere 14.8 12.3(+19%) - 9.58 -
# Sandy Bridge 17.4 14.2(+23%) 11.6(+50%(**)) 11.2 8.10(+38%(**))
# Ivy Bridge 12.6 10.5(+20%) 10.3(+22%) 8.17 7.22(+13%)
# Haswell 12.2 9.28(+31%) 7.80(+56%) 7.66 5.40(+42%)
# Skylake 11.4 9.03(+26%) 7.70(+48%) 7.25 5.20(+40%)
# Bulldozer 21.1 13.6(+54%) 13.6(+54%(***)) 13.5 8.58(+57%)
# Ryzen 11.0 9.02(+22%) 2.05(+440%) 7.05 5.67(+20%)
# VIA Nano 23.0 16.5(+39%) - 14.7 -
# Atom 23.0 18.9(+22%) - 14.7 -
# Silvermont 27.4 20.6(+33%) - 17.5 -
# Knights L 27.4 21.0(+30%) 19.6(+40%) 17.5 12.8(+37%)
# Goldmont 18.9 14.3(+32%) 4.16(+350%) 12.0 -
#
# (*) whichever best applicable, including SHAEXT;
# (**) switch from ror to shrd stands for fair share of improvement;
# (***) execution time is fully determined by remaining integer-only
# part, body_00_15; reducing the amount of SIMD instructions
# below certain limit makes no difference/sense; to conserve
# space SHA256 XOP code path is therefore omitted;
#
# Modified from upstream OpenSSL to remove the XOP code.
$flavour = shift;
$output = shift;
if ($flavour =~ /\./) { $output = $flavour; undef $flavour; }
$win64=0; $win64=1 if ($flavour =~ /[nm]asm|mingw64/ || $output =~ /\.asm$/);
$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
( $xlate="${dir}x86_64-xlate.pl" and -f $xlate ) or
( $xlate="${dir}../../../perlasm/x86_64-xlate.pl" and -f $xlate) or
die "can't locate x86_64-xlate.pl";
# In upstream, this is controlled by shelling out to the compiler to check
# versions, but BoringSSL is intended to be used with pre-generated perlasm
# output, so this isn't useful anyway.
#
# TODO(davidben): Enable AVX2 code after testing by setting $avx to 2. Is it
# necessary to disable AVX2 code when SHA Extensions code is disabled? Upstream
# did not tie them together until after $shaext was added.
$avx = 1;
# TODO(davidben): Consider enabling the Intel SHA Extensions code once it's
# been tested.
$shaext=0; ### set to zero if compiling for 1.0.1
$avx=1 if (!$shaext && $avx);
open OUT,"| \"$^X\" \"$xlate\" $flavour \"$output\"";
*STDOUT=*OUT;
if ($output =~ /512/) {
$func="sha512_block_data_order";
$TABLE="K512";
$SZ=8;
@ROT=($A,$B,$C,$D,$E,$F,$G,$H)=("%rax","%rbx","%rcx","%rdx",
"%r8", "%r9", "%r10","%r11");
($T1,$a0,$a1,$a2,$a3)=("%r12","%r13","%r14","%r15","%rdi");
@Sigma0=(28,34,39);
@Sigma1=(14,18,41);
@sigma0=(1, 8, 7);
@sigma1=(19,61, 6);
$rounds=80;
} else {
$func="sha256_block_data_order";
$TABLE="K256";
$SZ=4;
@ROT=($A,$B,$C,$D,$E,$F,$G,$H)=("%eax","%ebx","%ecx","%edx",
"%r8d","%r9d","%r10d","%r11d");
($T1,$a0,$a1,$a2,$a3)=("%r12d","%r13d","%r14d","%r15d","%edi");
@Sigma0=( 2,13,22);
@Sigma1=( 6,11,25);
@sigma0=( 7,18, 3);
@sigma1=(17,19,10);
$rounds=64;
}
$ctx="%rdi"; # 1st arg, zapped by $a3
$inp="%rsi"; # 2nd arg
$Tbl="%rbp";
$_ctx="16*$SZ+0*8(%rsp)";
$_inp="16*$SZ+1*8(%rsp)";
$_end="16*$SZ+2*8(%rsp)";
$_rsp="`16*$SZ+3*8`(%rsp)";
$framesz="16*$SZ+4*8";
sub ROUND_00_15()
{ my ($i,$a,$b,$c,$d,$e,$f,$g,$h) = @_;
my $STRIDE=$SZ;
$STRIDE += 16 if ($i%(16/$SZ)==(16/$SZ-1));
$code.=<<___;
ror \$`$Sigma1[2]-$Sigma1[1]`,$a0
mov $f,$a2
xor $e,$a0
ror \$`$Sigma0[2]-$Sigma0[1]`,$a1
xor $g,$a2 # f^g
mov $T1,`$SZ*($i&0xf)`(%rsp)
xor $a,$a1
and $e,$a2 # (f^g)&e
ror \$`$Sigma1[1]-$Sigma1[0]`,$a0
add $h,$T1 # T1+=h
xor $g,$a2 # Ch(e,f,g)=((f^g)&e)^g
ror \$`$Sigma0[1]-$Sigma0[0]`,$a1
xor $e,$a0
add $a2,$T1 # T1+=Ch(e,f,g)
mov $a,$a2
add ($Tbl),$T1 # T1+=K[round]
xor $a,$a1
xor $b,$a2 # a^b, b^c in next round
ror \$$Sigma1[0],$a0 # Sigma1(e)
mov $b,$h
and $a2,$a3
ror \$$Sigma0[0],$a1 # Sigma0(a)
add $a0,$T1 # T1+=Sigma1(e)
xor $a3,$h # h=Maj(a,b,c)=Ch(a^b,c,b)
add $T1,$d # d+=T1
add $T1,$h # h+=T1
lea $STRIDE($Tbl),$Tbl # round++
___
$code.=<<___ if ($i<15);
add $a1,$h # h+=Sigma0(a)
___
($a2,$a3) = ($a3,$a2);
}
sub ROUND_16_XX()
{ my ($i,$a,$b,$c,$d,$e,$f,$g,$h) = @_;
$code.=<<___;
mov `$SZ*(($i+1)&0xf)`(%rsp),$a0
mov `$SZ*(($i+14)&0xf)`(%rsp),$a2
mov $a0,$T1
ror \$`$sigma0[1]-$sigma0[0]`,$a0
add $a1,$a # modulo-scheduled h+=Sigma0(a)
mov $a2,$a1
ror \$`$sigma1[1]-$sigma1[0]`,$a2
xor $T1,$a0
shr \$$sigma0[2],$T1
ror \$$sigma0[0],$a0
xor $a1,$a2
shr \$$sigma1[2],$a1
ror \$$sigma1[0],$a2
xor $a0,$T1 # sigma0(X[(i+1)&0xf])
xor $a1,$a2 # sigma1(X[(i+14)&0xf])
add `$SZ*(($i+9)&0xf)`(%rsp),$T1
add `$SZ*($i&0xf)`(%rsp),$T1
mov $e,$a0
add $a2,$T1
mov $a,$a1
___
&ROUND_00_15(@_);
}
$code=<<___;
.text
.extern OPENSSL_ia32cap_P
.globl $func
.type $func,\@function,3
.align 16
$func:
.cfi_startproc
___
$code.=<<___ if ($SZ==4 || $avx);
leaq OPENSSL_ia32cap_P(%rip),%r11
mov 0(%r11),%r9d
mov 4(%r11),%r10d
mov 8(%r11),%r11d
___
$code.=<<___ if ($SZ==4 && $shaext);
test \$`1<<29`,%r11d # check for SHA
jnz _shaext_shortcut
___
# XOP codepath removed.
$code.=<<___ if ($avx>1);
and \$`1<<8|1<<5|1<<3`,%r11d # check for BMI2+AVX2+BMI1
cmp \$`1<<8|1<<5|1<<3`,%r11d
je .Lavx2_shortcut
___
$code.=<<___ if ($avx);
and \$`1<<30`,%r9d # mask "Intel CPU" bit
and \$`1<<28|1<<9`,%r10d # mask AVX and SSSE3 bits
or %r9d,%r10d
cmp \$`1<<28|1<<9|1<<30`,%r10d
je .Lavx_shortcut
___
$code.=<<___ if ($SZ==4);
test \$`1<<9`,%r10d
jnz .Lssse3_shortcut
___
$code.=<<___;
mov %rsp,%rax # copy %rsp
.cfi_def_cfa_register %rax
push %rbx
.cfi_push %rbx
push %rbp
.cfi_push %rbp
push %r12
.cfi_push %r12
push %r13
.cfi_push %r13
push %r14
.cfi_push %r14
push %r15
.cfi_push %r15
shl \$4,%rdx # num*16
sub \$$framesz,%rsp
lea ($inp,%rdx,$SZ),%rdx # inp+num*16*$SZ
and \$-64,%rsp # align stack frame
mov $ctx,$_ctx # save ctx, 1st arg
mov $inp,$_inp # save inp, 2nd arh
mov %rdx,$_end # save end pointer, "3rd" arg
mov %rax,$_rsp # save copy of %rsp
.cfi_cfa_expression $_rsp,deref,+8
.Lprologue:
mov $SZ*0($ctx),$A
mov $SZ*1($ctx),$B
mov $SZ*2($ctx),$C
mov $SZ*3($ctx),$D
mov $SZ*4($ctx),$E
mov $SZ*5($ctx),$F
mov $SZ*6($ctx),$G
mov $SZ*7($ctx),$H
jmp .Lloop
.align 16
.Lloop:
mov $B,$a3
lea $TABLE(%rip),$Tbl
xor $C,$a3 # magic
___
for($i=0;$i<16;$i++) {
$code.=" mov $SZ*$i($inp),$T1\n";
$code.=" mov @ROT[4],$a0\n";
$code.=" mov @ROT[0],$a1\n";
$code.=" bswap $T1\n";
&ROUND_00_15($i,@ROT);
unshift(@ROT,pop(@ROT));
}
$code.=<<___;
jmp .Lrounds_16_xx
.align 16
.Lrounds_16_xx:
___
for(;$i<32;$i++) {
&ROUND_16_XX($i,@ROT);
unshift(@ROT,pop(@ROT));
}
$code.=<<___;
cmpb \$0,`$SZ-1`($Tbl)
jnz .Lrounds_16_xx
mov $_ctx,$ctx
add $a1,$A # modulo-scheduled h+=Sigma0(a)
lea 16*$SZ($inp),$inp
add $SZ*0($ctx),$A
add $SZ*1($ctx),$B
add $SZ*2($ctx),$C
add $SZ*3($ctx),$D
add $SZ*4($ctx),$E
add $SZ*5($ctx),$F
add $SZ*6($ctx),$G
add $SZ*7($ctx),$H
cmp $_end,$inp
mov $A,$SZ*0($ctx)
mov $B,$SZ*1($ctx)
mov $C,$SZ*2($ctx)
mov $D,$SZ*3($ctx)
mov $E,$SZ*4($ctx)
mov $F,$SZ*5($ctx)
mov $G,$SZ*6($ctx)
mov $H,$SZ*7($ctx)
jb .Lloop
mov $_rsp,%rsi
.cfi_def_cfa %rsi,8
mov -48(%rsi),%r15
.cfi_restore %r15
mov -40(%rsi),%r14
.cfi_restore %r14
mov -32(%rsi),%r13
.cfi_restore %r13
mov -24(%rsi),%r12
.cfi_restore %r12
mov -16(%rsi),%rbp
.cfi_restore %rbp
mov -8(%rsi),%rbx
.cfi_restore %rbx
lea (%rsi),%rsp
.cfi_def_cfa_register %rsp
.Lepilogue:
ret
.cfi_endproc
.size $func,.-$func
___
if ($SZ==4) {
$code.=<<___;
.align 64
.type $TABLE,\@object
$TABLE:
.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
.long 0x03020100,0x0b0a0908,0xffffffff,0xffffffff
.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
.long 0xffffffff,0xffffffff,0x03020100,0x0b0a0908
.asciz "SHA256 block transform for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
___
} else {
$code.=<<___;
.align 64
.type $TABLE,\@object
$TABLE:
.quad 0x428a2f98d728ae22,0x7137449123ef65cd
.quad 0x428a2f98d728ae22,0x7137449123ef65cd
.quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc
.quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc
.quad 0x3956c25bf348b538,0x59f111f1b605d019
.quad 0x3956c25bf348b538,0x59f111f1b605d019
.quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118
.quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118
.quad 0xd807aa98a3030242,0x12835b0145706fbe
.quad 0xd807aa98a3030242,0x12835b0145706fbe
.quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2
.quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2
.quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1
.quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1
.quad 0x9bdc06a725c71235,0xc19bf174cf692694
.quad 0x9bdc06a725c71235,0xc19bf174cf692694
.quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3
.quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3
.quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65
.quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65
.quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483
.quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483
.quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5
.quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5
.quad 0x983e5152ee66dfab,0xa831c66d2db43210
.quad 0x983e5152ee66dfab,0xa831c66d2db43210
.quad 0xb00327c898fb213f,0xbf597fc7beef0ee4
.quad 0xb00327c898fb213f,0xbf597fc7beef0ee4
.quad 0xc6e00bf33da88fc2,0xd5a79147930aa725
.quad 0xc6e00bf33da88fc2,0xd5a79147930aa725
.quad 0x06ca6351e003826f,0x142929670a0e6e70
.quad 0x06ca6351e003826f,0x142929670a0e6e70
.quad 0x27b70a8546d22ffc,0x2e1b21385c26c926
.quad 0x27b70a8546d22ffc,0x2e1b21385c26c926
.quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df
.quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df
.quad 0x650a73548baf63de,0x766a0abb3c77b2a8
.quad 0x650a73548baf63de,0x766a0abb3c77b2a8
.quad 0x81c2c92e47edaee6,0x92722c851482353b
.quad 0x81c2c92e47edaee6,0x92722c851482353b
.quad 0xa2bfe8a14cf10364,0xa81a664bbc423001
.quad 0xa2bfe8a14cf10364,0xa81a664bbc423001
.quad 0xc24b8b70d0f89791,0xc76c51a30654be30
.quad 0xc24b8b70d0f89791,0xc76c51a30654be30
.quad 0xd192e819d6ef5218,0xd69906245565a910
.quad 0xd192e819d6ef5218,0xd69906245565a910
.quad 0xf40e35855771202a,0x106aa07032bbd1b8
.quad 0xf40e35855771202a,0x106aa07032bbd1b8
.quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53
.quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53
.quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8
.quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8
.quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb
.quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb
.quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3
.quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3
.quad 0x748f82ee5defb2fc,0x78a5636f43172f60
.quad 0x748f82ee5defb2fc,0x78a5636f43172f60
.quad 0x84c87814a1f0ab72,0x8cc702081a6439ec
.quad 0x84c87814a1f0ab72,0x8cc702081a6439ec
.quad 0x90befffa23631e28,0xa4506cebde82bde9
.quad 0x90befffa23631e28,0xa4506cebde82bde9
.quad 0xbef9a3f7b2c67915,0xc67178f2e372532b
.quad 0xbef9a3f7b2c67915,0xc67178f2e372532b
.quad 0xca273eceea26619c,0xd186b8c721c0c207
.quad 0xca273eceea26619c,0xd186b8c721c0c207
.quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178
.quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178
.quad 0x06f067aa72176fba,0x0a637dc5a2c898a6
.quad 0x06f067aa72176fba,0x0a637dc5a2c898a6
.quad 0x113f9804bef90dae,0x1b710b35131c471b
.quad 0x113f9804bef90dae,0x1b710b35131c471b
.quad 0x28db77f523047d84,0x32caab7b40c72493
.quad 0x28db77f523047d84,0x32caab7b40c72493
.quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c
.quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c
.quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a
.quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a
.quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817
.quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817
.quad 0x0001020304050607,0x08090a0b0c0d0e0f
.quad 0x0001020304050607,0x08090a0b0c0d0e0f
.asciz "SHA512 block transform for x86_64, CRYPTOGAMS by <appro\@openssl.org>"
___
}
######################################################################
# SIMD code paths
#
if ($SZ==4 && $shaext) {{{
######################################################################
# Intel SHA Extensions implementation of SHA256 update function.
#
my ($ctx,$inp,$num,$Tbl)=("%rdi","%rsi","%rdx","%rcx");
my ($Wi,$ABEF,$CDGH,$TMP,$BSWAP,$ABEF_SAVE,$CDGH_SAVE)=map("%xmm$_",(0..2,7..10));
my @MSG=map("%xmm$_",(3..6));
$code.=<<___;
.type sha256_block_data_order_shaext,\@function,3
.align 64
sha256_block_data_order_shaext:
_shaext_shortcut:
___
$code.=<<___ if ($win64);
lea `-8-5*16`(%rsp),%rsp
movaps %xmm6,-8-5*16(%rax)
movaps %xmm7,-8-4*16(%rax)
movaps %xmm8,-8-3*16(%rax)
movaps %xmm9,-8-2*16(%rax)
movaps %xmm10,-8-1*16(%rax)
.Lprologue_shaext:
___
$code.=<<___;
lea K256+0x80(%rip),$Tbl
movdqu ($ctx),$ABEF # DCBA
movdqu 16($ctx),$CDGH # HGFE
movdqa 0x200-0x80($Tbl),$TMP # byte swap mask
pshufd \$0x1b,$ABEF,$Wi # ABCD
pshufd \$0xb1,$ABEF,$ABEF # CDAB
pshufd \$0x1b,$CDGH,$CDGH # EFGH
movdqa $TMP,$BSWAP # offload
palignr \$8,$CDGH,$ABEF # ABEF
punpcklqdq $Wi,$CDGH # CDGH
jmp .Loop_shaext
.align 16
.Loop_shaext:
movdqu ($inp),@MSG[0]
movdqu 0x10($inp),@MSG[1]
movdqu 0x20($inp),@MSG[2]
pshufb $TMP,@MSG[0]
movdqu 0x30($inp),@MSG[3]
movdqa 0*32-0x80($Tbl),$Wi
paddd @MSG[0],$Wi
pshufb $TMP,@MSG[1]
movdqa $CDGH,$CDGH_SAVE # offload
sha256rnds2 $ABEF,$CDGH # 0-3
pshufd \$0x0e,$Wi,$Wi
nop
movdqa $ABEF,$ABEF_SAVE # offload
sha256rnds2 $CDGH,$ABEF
movdqa 1*32-0x80($Tbl),$Wi
paddd @MSG[1],$Wi
pshufb $TMP,@MSG[2]
sha256rnds2 $ABEF,$CDGH # 4-7
pshufd \$0x0e,$Wi,$Wi
lea 0x40($inp),$inp
sha256msg1 @MSG[1],@MSG[0]
sha256rnds2 $CDGH,$ABEF
movdqa 2*32-0x80($Tbl),$Wi
paddd @MSG[2],$Wi
pshufb $TMP,@MSG[3]
sha256rnds2 $ABEF,$CDGH # 8-11
pshufd \$0x0e,$Wi,$Wi
movdqa @MSG[3],$TMP
palignr \$4,@MSG[2],$TMP
nop
paddd $TMP,@MSG[0]
sha256msg1 @MSG[2],@MSG[1]
sha256rnds2 $CDGH,$ABEF
movdqa 3*32-0x80($Tbl),$Wi
paddd @MSG[3],$Wi
sha256msg2 @MSG[3],@MSG[0]
sha256rnds2 $ABEF,$CDGH # 12-15
pshufd \$0x0e,$Wi,$Wi
movdqa @MSG[0],$TMP
palignr \$4,@MSG[3],$TMP
nop
paddd $TMP,@MSG[1]
sha256msg1 @MSG[3],@MSG[2]
sha256rnds2 $CDGH,$ABEF
___
for($i=4;$i<16-3;$i++) {
$code.=<<___;
movdqa $i*32-0x80($Tbl),$Wi
paddd @MSG[0],$Wi
sha256msg2 @MSG[0],@MSG[1]
sha256rnds2 $ABEF,$CDGH # 16-19...
pshufd \$0x0e,$Wi,$Wi
movdqa @MSG[1],$TMP
palignr \$4,@MSG[0],$TMP
nop
paddd $TMP,@MSG[2]
sha256msg1 @MSG[0],@MSG[3]
sha256rnds2 $CDGH,$ABEF
___
push(@MSG,shift(@MSG));
}
$code.=<<___;
movdqa 13*32-0x80($Tbl),$Wi
paddd @MSG[0],$Wi
sha256msg2 @MSG[0],@MSG[1]
sha256rnds2 $ABEF,$CDGH # 52-55
pshufd \$0x0e,$Wi,$Wi
movdqa @MSG[1],$TMP
palignr \$4,@MSG[0],$TMP
sha256rnds2 $CDGH,$ABEF
paddd $TMP,@MSG[2]
movdqa 14*32-0x80($Tbl),$Wi
paddd @MSG[1],$Wi
sha256rnds2 $ABEF,$CDGH # 56-59
pshufd \$0x0e,$Wi,$Wi
sha256msg2 @MSG[1],@MSG[2]
movdqa $BSWAP,$TMP
sha256rnds2 $CDGH,$ABEF
movdqa 15*32-0x80($Tbl),$Wi
paddd @MSG[2],$Wi
nop
sha256rnds2 $ABEF,$CDGH # 60-63
pshufd \$0x0e,$Wi,$Wi
dec $num
nop
sha256rnds2 $CDGH,$ABEF
paddd $CDGH_SAVE,$CDGH
paddd $ABEF_SAVE,$ABEF
jnz .Loop_shaext
pshufd \$0xb1,$CDGH,$CDGH # DCHG
pshufd \$0x1b,$ABEF,$TMP # FEBA
pshufd \$0xb1,$ABEF,$ABEF # BAFE
punpckhqdq $CDGH,$ABEF # DCBA
palignr \$8,$TMP,$CDGH # HGFE
movdqu $ABEF,($ctx)
movdqu $CDGH,16($ctx)
___
$code.=<<___ if ($win64);
movaps -8-5*16(%rax),%xmm6
movaps -8-4*16(%rax),%xmm7
movaps -8-3*16(%rax),%xmm8
movaps -8-2*16(%rax),%xmm9
movaps -8-1*16(%rax),%xmm10
mov %rax,%rsp
.Lepilogue_shaext:
___
$code.=<<___;
ret
.size sha256_block_data_order_shaext,.-sha256_block_data_order_shaext
___
}}}
{{{
my $a4=$T1;
my ($a,$b,$c,$d,$e,$f,$g,$h);
sub AUTOLOAD() # thunk [simplified] 32-bit style perlasm
{ my $opcode = $AUTOLOAD; $opcode =~ s/.*:://;
my $arg = pop;
$arg = "\$$arg" if ($arg*1 eq $arg);
$code .= "\t$opcode\t".join(',',$arg,reverse @_)."\n";
}
sub body_00_15 () {
(
'($a,$b,$c,$d,$e,$f,$g,$h)=@ROT;'.
'&ror ($a0,$Sigma1[2]-$Sigma1[1])',
'&mov ($a,$a1)',
'&mov ($a4,$f)',
'&ror ($a1,$Sigma0[2]-$Sigma0[1])',
'&xor ($a0,$e)',
'&xor ($a4,$g)', # f^g
'&ror ($a0,$Sigma1[1]-$Sigma1[0])',
'&xor ($a1,$a)',
'&and ($a4,$e)', # (f^g)&e
'&xor ($a0,$e)',
'&add ($h,$SZ*($i&15)."(%rsp)")', # h+=X[i]+K[i]
'&mov ($a2,$a)',
'&xor ($a4,$g)', # Ch(e,f,g)=((f^g)&e)^g
'&ror ($a1,$Sigma0[1]-$Sigma0[0])',
'&xor ($a2,$b)', # a^b, b^c in next round
'&add ($h,$a4)', # h+=Ch(e,f,g)
'&ror ($a0,$Sigma1[0])', # Sigma1(e)
'&and ($a3,$a2)', # (b^c)&(a^b)
'&xor ($a1,$a)',
'&add ($h,$a0)', # h+=Sigma1(e)
'&xor ($a3,$b)', # Maj(a,b,c)=Ch(a^b,c,b)
'&ror ($a1,$Sigma0[0])', # Sigma0(a)
'&add ($d,$h)', # d+=h
'&add ($h,$a3)', # h+=Maj(a,b,c)
'&mov ($a0,$d)',
'&add ($a1,$h);'. # h+=Sigma0(a)
'($a2,$a3) = ($a3,$a2); unshift(@ROT,pop(@ROT)); $i++;'
);
}
######################################################################
# SSSE3 code path
#
if ($SZ==4) { # SHA256 only
my @X = map("%xmm$_",(0..3));
my ($t0,$t1,$t2,$t3, $t4,$t5) = map("%xmm$_",(4..9));
$code.=<<___;
.type ${func}_ssse3,\@function,3
.align 64
${func}_ssse3:
.cfi_startproc
.Lssse3_shortcut:
mov %rsp,%rax # copy %rsp
.cfi_def_cfa_register %rax
push %rbx
.cfi_push %rbx
push %rbp
.cfi_push %rbp
push %r12
.cfi_push %r12
push %r13
.cfi_push %r13
push %r14
.cfi_push %r14
push %r15
.cfi_push %r15
shl \$4,%rdx # num*16
sub \$`$framesz+$win64*16*4`,%rsp
lea ($inp,%rdx,$SZ),%rdx # inp+num*16*$SZ
and \$-64,%rsp # align stack frame
mov $ctx,$_ctx # save ctx, 1st arg
mov $inp,$_inp # save inp, 2nd arh
mov %rdx,$_end # save end pointer, "3rd" arg
mov %rax,$_rsp # save copy of %rsp
.cfi_cfa_expression $_rsp,deref,+8
___
$code.=<<___ if ($win64);
movaps %xmm6,16*$SZ+32(%rsp)
movaps %xmm7,16*$SZ+48(%rsp)
movaps %xmm8,16*$SZ+64(%rsp)
movaps %xmm9,16*$SZ+80(%rsp)
___
$code.=<<___;
.Lprologue_ssse3:
mov $SZ*0($ctx),$A
mov $SZ*1($ctx),$B
mov $SZ*2($ctx),$C
mov $SZ*3($ctx),$D
mov $SZ*4($ctx),$E
mov $SZ*5($ctx),$F
mov $SZ*6($ctx),$G
mov $SZ*7($ctx),$H
___
$code.=<<___;
#movdqa $TABLE+`$SZ*2*$rounds`+32(%rip),$t4
#movdqa $TABLE+`$SZ*2*$rounds`+64(%rip),$t5
jmp .Lloop_ssse3
.align 16
.Lloop_ssse3:
movdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
movdqu 0x00($inp),@X[0]
movdqu 0x10($inp),@X[1]
movdqu 0x20($inp),@X[2]
pshufb $t3,@X[0]
movdqu 0x30($inp),@X[3]
lea $TABLE(%rip),$Tbl
pshufb $t3,@X[1]
movdqa 0x00($Tbl),$t0
movdqa 0x20($Tbl),$t1
pshufb $t3,@X[2]
paddd @X[0],$t0
movdqa 0x40($Tbl),$t2
pshufb $t3,@X[3]
movdqa 0x60($Tbl),$t3
paddd @X[1],$t1
paddd @X[2],$t2
paddd @X[3],$t3
movdqa $t0,0x00(%rsp)
mov $A,$a1
movdqa $t1,0x10(%rsp)
mov $B,$a3
movdqa $t2,0x20(%rsp)
xor $C,$a3 # magic
movdqa $t3,0x30(%rsp)
mov $E,$a0
jmp .Lssse3_00_47
.align 16
.Lssse3_00_47:
sub \$`-16*2*$SZ`,$Tbl # size optimization
___
sub Xupdate_256_SSSE3 () {
(
'&movdqa ($t0,@X[1]);',
'&movdqa ($t3,@X[3])',
'&palignr ($t0,@X[0],$SZ)', # X[1..4]
'&palignr ($t3,@X[2],$SZ);', # X[9..12]
'&movdqa ($t1,$t0)',
'&movdqa ($t2,$t0);',
'&psrld ($t0,$sigma0[2])',
'&paddd (@X[0],$t3);', # X[0..3] += X[9..12]
'&psrld ($t2,$sigma0[0])',
'&pshufd ($t3,@X[3],0b11111010)',# X[14..15]
'&pslld ($t1,8*$SZ-$sigma0[1]);'.
'&pxor ($t0,$t2)',
'&psrld ($t2,$sigma0[1]-$sigma0[0]);'.
'&pxor ($t0,$t1)',
'&pslld ($t1,$sigma0[1]-$sigma0[0]);'.
'&pxor ($t0,$t2);',
'&movdqa ($t2,$t3)',
'&pxor ($t0,$t1);', # sigma0(X[1..4])
'&psrld ($t3,$sigma1[2])',
'&paddd (@X[0],$t0);', # X[0..3] += sigma0(X[1..4])
'&psrlq ($t2,$sigma1[0])',
'&pxor ($t3,$t2);',
'&psrlq ($t2,$sigma1[1]-$sigma1[0])',
'&pxor ($t3,$t2)',
'&pshufb ($t3,$t4)', # sigma1(X[14..15])
'&paddd (@X[0],$t3)', # X[0..1] += sigma1(X[14..15])
'&pshufd ($t3,@X[0],0b01010000)',# X[16..17]
'&movdqa ($t2,$t3);',
'&psrld ($t3,$sigma1[2])',
'&psrlq ($t2,$sigma1[0])',
'&pxor ($t3,$t2);',
'&psrlq ($t2,$sigma1[1]-$sigma1[0])',
'&pxor ($t3,$t2);',
'&movdqa ($t2,16*2*$j."($Tbl)")',
'&pshufb ($t3,$t5)',
'&paddd (@X[0],$t3)' # X[2..3] += sigma1(X[16..17])
);
}
sub SSSE3_256_00_47 () {
my $j = shift;
my $body = shift;
my @X = @_;
my @insns = (&$body,&$body,&$body,&$body); # 104 instructions
if (0) {
foreach (Xupdate_256_SSSE3()) { # 36 instructions
eval;
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
}
} else { # squeeze extra 4% on Westmere and 19% on Atom
eval(shift(@insns)); #@
&movdqa ($t0,@X[1]);
eval(shift(@insns));
eval(shift(@insns));
&movdqa ($t3,@X[3]);
eval(shift(@insns)); #@
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns)); #@
eval(shift(@insns));
&palignr ($t0,@X[0],$SZ); # X[1..4]
eval(shift(@insns));
eval(shift(@insns));
&palignr ($t3,@X[2],$SZ); # X[9..12]
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns)); #@
&movdqa ($t1,$t0);
eval(shift(@insns));
eval(shift(@insns));
&movdqa ($t2,$t0);
eval(shift(@insns)); #@
eval(shift(@insns));
&psrld ($t0,$sigma0[2]);
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
&paddd (@X[0],$t3); # X[0..3] += X[9..12]
eval(shift(@insns)); #@
eval(shift(@insns));
&psrld ($t2,$sigma0[0]);
eval(shift(@insns));
eval(shift(@insns));
&pshufd ($t3,@X[3],0b11111010); # X[4..15]
eval(shift(@insns));
eval(shift(@insns)); #@
&pslld ($t1,8*$SZ-$sigma0[1]);
eval(shift(@insns));
eval(shift(@insns));
&pxor ($t0,$t2);
eval(shift(@insns)); #@
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns)); #@
&psrld ($t2,$sigma0[1]-$sigma0[0]);
eval(shift(@insns));
&pxor ($t0,$t1);
eval(shift(@insns));
eval(shift(@insns));
&pslld ($t1,$sigma0[1]-$sigma0[0]);
eval(shift(@insns));
eval(shift(@insns));
&pxor ($t0,$t2);
eval(shift(@insns));
eval(shift(@insns)); #@
&movdqa ($t2,$t3);
eval(shift(@insns));
eval(shift(@insns));
&pxor ($t0,$t1); # sigma0(X[1..4])
eval(shift(@insns)); #@
eval(shift(@insns));
eval(shift(@insns));
&psrld ($t3,$sigma1[2]);
eval(shift(@insns));
eval(shift(@insns));
&paddd (@X[0],$t0); # X[0..3] += sigma0(X[1..4])
eval(shift(@insns)); #@
eval(shift(@insns));
&psrlq ($t2,$sigma1[0]);
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
&pxor ($t3,$t2);
eval(shift(@insns)); #@
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns)); #@
&psrlq ($t2,$sigma1[1]-$sigma1[0]);
eval(shift(@insns));
eval(shift(@insns));
&pxor ($t3,$t2);
eval(shift(@insns)); #@
eval(shift(@insns));
eval(shift(@insns));
#&pshufb ($t3,$t4); # sigma1(X[14..15])
&pshufd ($t3,$t3,0b10000000);
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
&psrldq ($t3,8);
eval(shift(@insns));
eval(shift(@insns)); #@
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns)); #@
&paddd (@X[0],$t3); # X[0..1] += sigma1(X[14..15])
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
&pshufd ($t3,@X[0],0b01010000); # X[16..17]
eval(shift(@insns));
eval(shift(@insns)); #@
eval(shift(@insns));
&movdqa ($t2,$t3);
eval(shift(@insns));
eval(shift(@insns));
&psrld ($t3,$sigma1[2]);
eval(shift(@insns));
eval(shift(@insns)); #@
&psrlq ($t2,$sigma1[0]);
eval(shift(@insns));
eval(shift(@insns));
&pxor ($t3,$t2);
eval(shift(@insns)); #@
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns)); #@
eval(shift(@insns));
&psrlq ($t2,$sigma1[1]-$sigma1[0]);
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
&pxor ($t3,$t2);
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns)); #@
#&pshufb ($t3,$t5);
&pshufd ($t3,$t3,0b00001000);
eval(shift(@insns));
eval(shift(@insns));
&movdqa ($t2,16*2*$j."($Tbl)");
eval(shift(@insns)); #@
eval(shift(@insns));
&pslldq ($t3,8);
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
&paddd (@X[0],$t3); # X[2..3] += sigma1(X[16..17])
eval(shift(@insns)); #@
eval(shift(@insns));
eval(shift(@insns));
}
&paddd ($t2,@X[0]);
foreach (@insns) { eval; } # remaining instructions
&movdqa (16*$j."(%rsp)",$t2);
}
for ($i=0,$j=0; $j<4; $j++) {
&SSSE3_256_00_47($j,\&body_00_15,@X);
push(@X,shift(@X)); # rotate(@X)
}
&cmpb ($SZ-1+16*2*$SZ."($Tbl)",0);
&jne (".Lssse3_00_47");
for ($i=0; $i<16; ) {
foreach(body_00_15()) { eval; }
}
$code.=<<___;
mov $_ctx,$ctx
mov $a1,$A
add $SZ*0($ctx),$A
lea 16*$SZ($inp),$inp
add $SZ*1($ctx),$B
add $SZ*2($ctx),$C
add $SZ*3($ctx),$D
add $SZ*4($ctx),$E
add $SZ*5($ctx),$F
add $SZ*6($ctx),$G
add $SZ*7($ctx),$H
cmp $_end,$inp
mov $A,$SZ*0($ctx)
mov $B,$SZ*1($ctx)
mov $C,$SZ*2($ctx)
mov $D,$SZ*3($ctx)
mov $E,$SZ*4($ctx)
mov $F,$SZ*5($ctx)
mov $G,$SZ*6($ctx)
mov $H,$SZ*7($ctx)
jb .Lloop_ssse3
mov $_rsp,%rsi
.cfi_def_cfa %rsi,8
___
$code.=<<___ if ($win64);
movaps 16*$SZ+32(%rsp),%xmm6
movaps 16*$SZ+48(%rsp),%xmm7
movaps 16*$SZ+64(%rsp),%xmm8
movaps 16*$SZ+80(%rsp),%xmm9
___
$code.=<<___;
mov -48(%rsi),%r15
.cfi_restore %r15
mov -40(%rsi),%r14
.cfi_restore %r14
mov -32(%rsi),%r13
.cfi_restore %r13
mov -24(%rsi),%r12
.cfi_restore %r12
mov -16(%rsi),%rbp
.cfi_restore %rbp
mov -8(%rsi),%rbx
.cfi_restore %rbx
lea (%rsi),%rsp
.cfi_def_cfa_register %rsp
.Lepilogue_ssse3:
ret
.cfi_endproc
.size ${func}_ssse3,.-${func}_ssse3
___
}
if ($avx) {{
######################################################################
# AVX+shrd code path
#
local *ror = sub { &shrd(@_[0],@_) };
$code.=<<___;
.type ${func}_avx,\@function,3
.align 64
${func}_avx:
.cfi_startproc
.Lavx_shortcut:
mov %rsp,%rax # copy %rsp
.cfi_def_cfa_register %rax
push %rbx
.cfi_push %rbx
push %rbp
.cfi_push %rbp
push %r12
.cfi_push %r12
push %r13
.cfi_push %r13
push %r14
.cfi_push %r14
push %r15
.cfi_push %r15
shl \$4,%rdx # num*16
sub \$`$framesz+$win64*16*($SZ==4?4:6)`,%rsp
lea ($inp,%rdx,$SZ),%rdx # inp+num*16*$SZ
and \$-64,%rsp # align stack frame
mov $ctx,$_ctx # save ctx, 1st arg
mov $inp,$_inp # save inp, 2nd arh
mov %rdx,$_end # save end pointer, "3rd" arg
mov %rax,$_rsp # save copy of %rsp
.cfi_cfa_expression $_rsp,deref,+8
___
$code.=<<___ if ($win64);
movaps %xmm6,16*$SZ+32(%rsp)
movaps %xmm7,16*$SZ+48(%rsp)
movaps %xmm8,16*$SZ+64(%rsp)
movaps %xmm9,16*$SZ+80(%rsp)
___
$code.=<<___ if ($win64 && $SZ>4);
movaps %xmm10,16*$SZ+96(%rsp)
movaps %xmm11,16*$SZ+112(%rsp)
___
$code.=<<___;
.Lprologue_avx:
vzeroupper
mov $SZ*0($ctx),$A
mov $SZ*1($ctx),$B
mov $SZ*2($ctx),$C
mov $SZ*3($ctx),$D
mov $SZ*4($ctx),$E
mov $SZ*5($ctx),$F
mov $SZ*6($ctx),$G
mov $SZ*7($ctx),$H
___
if ($SZ==4) { # SHA256
my @X = map("%xmm$_",(0..3));
my ($t0,$t1,$t2,$t3, $t4,$t5) = map("%xmm$_",(4..9));
$code.=<<___;
vmovdqa $TABLE+`$SZ*2*$rounds`+32(%rip),$t4
vmovdqa $TABLE+`$SZ*2*$rounds`+64(%rip),$t5
jmp .Lloop_avx
.align 16
.Lloop_avx:
vmovdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
vmovdqu 0x00($inp),@X[0]
vmovdqu 0x10($inp),@X[1]
vmovdqu 0x20($inp),@X[2]
vmovdqu 0x30($inp),@X[3]
vpshufb $t3,@X[0],@X[0]
lea $TABLE(%rip),$Tbl
vpshufb $t3,@X[1],@X[1]
vpshufb $t3,@X[2],@X[2]
vpaddd 0x00($Tbl),@X[0],$t0
vpshufb $t3,@X[3],@X[3]
vpaddd 0x20($Tbl),@X[1],$t1
vpaddd 0x40($Tbl),@X[2],$t2
vpaddd 0x60($Tbl),@X[3],$t3
vmovdqa $t0,0x00(%rsp)
mov $A,$a1
vmovdqa $t1,0x10(%rsp)
mov $B,$a3
vmovdqa $t2,0x20(%rsp)
xor $C,$a3 # magic
vmovdqa $t3,0x30(%rsp)
mov $E,$a0
jmp .Lavx_00_47
.align 16
.Lavx_00_47:
sub \$`-16*2*$SZ`,$Tbl # size optimization
___
sub Xupdate_256_AVX () {
(
'&vpalignr ($t0,@X[1],@X[0],$SZ)', # X[1..4]
'&vpalignr ($t3,@X[3],@X[2],$SZ)', # X[9..12]
'&vpsrld ($t2,$t0,$sigma0[0]);',
'&vpaddd (@X[0],@X[0],$t3)', # X[0..3] += X[9..12]
'&vpsrld ($t3,$t0,$sigma0[2])',
'&vpslld ($t1,$t0,8*$SZ-$sigma0[1]);',
'&vpxor ($t0,$t3,$t2)',
'&vpshufd ($t3,@X[3],0b11111010)',# X[14..15]
'&vpsrld ($t2,$t2,$sigma0[1]-$sigma0[0]);',
'&vpxor ($t0,$t0,$t1)',
'&vpslld ($t1,$t1,$sigma0[1]-$sigma0[0]);',
'&vpxor ($t0,$t0,$t2)',
'&vpsrld ($t2,$t3,$sigma1[2]);',
'&vpxor ($t0,$t0,$t1)', # sigma0(X[1..4])
'&vpsrlq ($t3,$t3,$sigma1[0]);',
'&vpaddd (@X[0],@X[0],$t0)', # X[0..3] += sigma0(X[1..4])
'&vpxor ($t2,$t2,$t3);',
'&vpsrlq ($t3,$t3,$sigma1[1]-$sigma1[0])',
'&vpxor ($t2,$t2,$t3)',
'&vpshufb ($t2,$t2,$t4)', # sigma1(X[14..15])
'&vpaddd (@X[0],@X[0],$t2)', # X[0..1] += sigma1(X[14..15])
'&vpshufd ($t3,@X[0],0b01010000)',# X[16..17]
'&vpsrld ($t2,$t3,$sigma1[2])',
'&vpsrlq ($t3,$t3,$sigma1[0])',
'&vpxor ($t2,$t2,$t3);',
'&vpsrlq ($t3,$t3,$sigma1[1]-$sigma1[0])',
'&vpxor ($t2,$t2,$t3)',
'&vpshufb ($t2,$t2,$t5)',
'&vpaddd (@X[0],@X[0],$t2)' # X[2..3] += sigma1(X[16..17])
);
}
sub AVX_256_00_47 () {
my $j = shift;
my $body = shift;
my @X = @_;
my @insns = (&$body,&$body,&$body,&$body); # 104 instructions
foreach (Xupdate_256_AVX()) { # 29 instructions
eval;
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
}
&vpaddd ($t2,@X[0],16*2*$j."($Tbl)");
foreach (@insns) { eval; } # remaining instructions
&vmovdqa (16*$j."(%rsp)",$t2);
}
for ($i=0,$j=0; $j<4; $j++) {
&AVX_256_00_47($j,\&body_00_15,@X);
push(@X,shift(@X)); # rotate(@X)
}
&cmpb ($SZ-1+16*2*$SZ."($Tbl)",0);
&jne (".Lavx_00_47");
for ($i=0; $i<16; ) {
foreach(body_00_15()) { eval; }
}
} else { # SHA512
my @X = map("%xmm$_",(0..7));
my ($t0,$t1,$t2,$t3) = map("%xmm$_",(8..11));
$code.=<<___;
jmp .Lloop_avx
.align 16
.Lloop_avx:
vmovdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
vmovdqu 0x00($inp),@X[0]
lea $TABLE+0x80(%rip),$Tbl # size optimization
vmovdqu 0x10($inp),@X[1]
vmovdqu 0x20($inp),@X[2]
vpshufb $t3,@X[0],@X[0]
vmovdqu 0x30($inp),@X[3]
vpshufb $t3,@X[1],@X[1]
vmovdqu 0x40($inp),@X[4]
vpshufb $t3,@X[2],@X[2]
vmovdqu 0x50($inp),@X[5]
vpshufb $t3,@X[3],@X[3]
vmovdqu 0x60($inp),@X[6]
vpshufb $t3,@X[4],@X[4]
vmovdqu 0x70($inp),@X[7]
vpshufb $t3,@X[5],@X[5]
vpaddq -0x80($Tbl),@X[0],$t0
vpshufb $t3,@X[6],@X[6]
vpaddq -0x60($Tbl),@X[1],$t1
vpshufb $t3,@X[7],@X[7]
vpaddq -0x40($Tbl),@X[2],$t2
vpaddq -0x20($Tbl),@X[3],$t3
vmovdqa $t0,0x00(%rsp)
vpaddq 0x00($Tbl),@X[4],$t0
vmovdqa $t1,0x10(%rsp)
vpaddq 0x20($Tbl),@X[5],$t1
vmovdqa $t2,0x20(%rsp)
vpaddq 0x40($Tbl),@X[6],$t2
vmovdqa $t3,0x30(%rsp)
vpaddq 0x60($Tbl),@X[7],$t3
vmovdqa $t0,0x40(%rsp)
mov $A,$a1
vmovdqa $t1,0x50(%rsp)
mov $B,$a3
vmovdqa $t2,0x60(%rsp)
xor $C,$a3 # magic
vmovdqa $t3,0x70(%rsp)
mov $E,$a0
jmp .Lavx_00_47
.align 16
.Lavx_00_47:
add \$`16*2*$SZ`,$Tbl
___
sub Xupdate_512_AVX () {
(
'&vpalignr ($t0,@X[1],@X[0],$SZ)', # X[1..2]
'&vpalignr ($t3,@X[5],@X[4],$SZ)', # X[9..10]
'&vpsrlq ($t2,$t0,$sigma0[0])',
'&vpaddq (@X[0],@X[0],$t3);', # X[0..1] += X[9..10]
'&vpsrlq ($t3,$t0,$sigma0[2])',
'&vpsllq ($t1,$t0,8*$SZ-$sigma0[1]);',
'&vpxor ($t0,$t3,$t2)',
'&vpsrlq ($t2,$t2,$sigma0[1]-$sigma0[0]);',
'&vpxor ($t0,$t0,$t1)',
'&vpsllq ($t1,$t1,$sigma0[1]-$sigma0[0]);',
'&vpxor ($t0,$t0,$t2)',
'&vpsrlq ($t3,@X[7],$sigma1[2]);',
'&vpxor ($t0,$t0,$t1)', # sigma0(X[1..2])
'&vpsllq ($t2,@X[7],8*$SZ-$sigma1[1]);',
'&vpaddq (@X[0],@X[0],$t0)', # X[0..1] += sigma0(X[1..2])
'&vpsrlq ($t1,@X[7],$sigma1[0]);',
'&vpxor ($t3,$t3,$t2)',
'&vpsllq ($t2,$t2,$sigma1[1]-$sigma1[0]);',
'&vpxor ($t3,$t3,$t1)',
'&vpsrlq ($t1,$t1,$sigma1[1]-$sigma1[0]);',
'&vpxor ($t3,$t3,$t2)',
'&vpxor ($t3,$t3,$t1)', # sigma1(X[14..15])
'&vpaddq (@X[0],@X[0],$t3)', # X[0..1] += sigma1(X[14..15])
);
}
sub AVX_512_00_47 () {
my $j = shift;
my $body = shift;
my @X = @_;
my @insns = (&$body,&$body); # 52 instructions
foreach (Xupdate_512_AVX()) { # 23 instructions
eval;
eval(shift(@insns));
eval(shift(@insns));
}
&vpaddq ($t2,@X[0],16*2*$j-0x80."($Tbl)");
foreach (@insns) { eval; } # remaining instructions
&vmovdqa (16*$j."(%rsp)",$t2);
}
for ($i=0,$j=0; $j<8; $j++) {
&AVX_512_00_47($j,\&body_00_15,@X);
push(@X,shift(@X)); # rotate(@X)
}
&cmpb ($SZ-1+16*2*$SZ-0x80."($Tbl)",0);
&jne (".Lavx_00_47");
for ($i=0; $i<16; ) {
foreach(body_00_15()) { eval; }
}
}
$code.=<<___;
mov $_ctx,$ctx
mov $a1,$A
add $SZ*0($ctx),$A
lea 16*$SZ($inp),$inp
add $SZ*1($ctx),$B
add $SZ*2($ctx),$C
add $SZ*3($ctx),$D
add $SZ*4($ctx),$E
add $SZ*5($ctx),$F
add $SZ*6($ctx),$G
add $SZ*7($ctx),$H
cmp $_end,$inp
mov $A,$SZ*0($ctx)
mov $B,$SZ*1($ctx)
mov $C,$SZ*2($ctx)
mov $D,$SZ*3($ctx)
mov $E,$SZ*4($ctx)
mov $F,$SZ*5($ctx)
mov $G,$SZ*6($ctx)
mov $H,$SZ*7($ctx)
jb .Lloop_avx
mov $_rsp,%rsi
.cfi_def_cfa %rsi,8
vzeroupper
___
$code.=<<___ if ($win64);
movaps 16*$SZ+32(%rsp),%xmm6
movaps 16*$SZ+48(%rsp),%xmm7
movaps 16*$SZ+64(%rsp),%xmm8
movaps 16*$SZ+80(%rsp),%xmm9
___
$code.=<<___ if ($win64 && $SZ>4);
movaps 16*$SZ+96(%rsp),%xmm10
movaps 16*$SZ+112(%rsp),%xmm11
___
$code.=<<___;
mov -48(%rsi),%r15
.cfi_restore %r15
mov -40(%rsi),%r14
.cfi_restore %r14
mov -32(%rsi),%r13
.cfi_restore %r13
mov -24(%rsi),%r12
.cfi_restore %r12
mov -16(%rsi),%rbp
.cfi_restore %rbp
mov -8(%rsi),%rbx
.cfi_restore %rbx
lea (%rsi),%rsp
.cfi_def_cfa_register %rsp
.Lepilogue_avx:
ret
.cfi_endproc
.size ${func}_avx,.-${func}_avx
___
if ($avx>1) {{
######################################################################
# AVX2+BMI code path
#
my $a5=$SZ==4?"%esi":"%rsi"; # zap $inp
my $PUSH8=8*2*$SZ;
use integer;
sub bodyx_00_15 () {
# at start $a1 should be zero, $a3 - $b^$c and $a4 copy of $f
(
'($a,$b,$c,$d,$e,$f,$g,$h)=@ROT;'.
'&add ($h,(32*($i/(16/$SZ))+$SZ*($i%(16/$SZ)))%$PUSH8.$base)', # h+=X[i]+K[i]
'&and ($a4,$e)', # f&e
'&rorx ($a0,$e,$Sigma1[2])',
'&rorx ($a2,$e,$Sigma1[1])',
'&lea ($a,"($a,$a1)")', # h+=Sigma0(a) from the past
'&lea ($h,"($h,$a4)")',
'&andn ($a4,$e,$g)', # ~e&g
'&xor ($a0,$a2)',
'&rorx ($a1,$e,$Sigma1[0])',
'&lea ($h,"($h,$a4)")', # h+=Ch(e,f,g)=(e&f)+(~e&g)
'&xor ($a0,$a1)', # Sigma1(e)
'&mov ($a2,$a)',
'&rorx ($a4,$a,$Sigma0[2])',
'&lea ($h,"($h,$a0)")', # h+=Sigma1(e)
'&xor ($a2,$b)', # a^b, b^c in next round
'&rorx ($a1,$a,$Sigma0[1])',
'&rorx ($a0,$a,$Sigma0[0])',
'&lea ($d,"($d,$h)")', # d+=h
'&and ($a3,$a2)', # (b^c)&(a^b)
'&xor ($a1,$a4)',
'&xor ($a3,$b)', # Maj(a,b,c)=Ch(a^b,c,b)
'&xor ($a1,$a0)', # Sigma0(a)
'&lea ($h,"($h,$a3)");'. # h+=Maj(a,b,c)
'&mov ($a4,$e)', # copy of f in future
'($a2,$a3) = ($a3,$a2); unshift(@ROT,pop(@ROT)); $i++;'
);
# and at the finish one has to $a+=$a1
}
$code.=<<___;
.type ${func}_avx2,\@function,3
.align 64
${func}_avx2:
.cfi_startproc
.Lavx2_shortcut:
mov %rsp,%rax # copy %rsp
.cfi_def_cfa_register %rax
push %rbx
.cfi_push %rbx
push %rbp
.cfi_push %rbp
push %r12
.cfi_push %r12
push %r13
.cfi_push %r13
push %r14
.cfi_push %r14
push %r15
.cfi_push %r15
sub \$`2*$SZ*$rounds+4*8+$win64*16*($SZ==4?4:6)`,%rsp
shl \$4,%rdx # num*16
and \$-256*$SZ,%rsp # align stack frame
lea ($inp,%rdx,$SZ),%rdx # inp+num*16*$SZ
add \$`2*$SZ*($rounds-8)`,%rsp
mov $ctx,$_ctx # save ctx, 1st arg
mov $inp,$_inp # save inp, 2nd arh
mov %rdx,$_end # save end pointer, "3rd" arg
mov %rax,$_rsp # save copy of %rsp
.cfi_cfa_expression $_rsp,deref,+8
___
$code.=<<___ if ($win64);
movaps %xmm6,16*$SZ+32(%rsp)
movaps %xmm7,16*$SZ+48(%rsp)
movaps %xmm8,16*$SZ+64(%rsp)
movaps %xmm9,16*$SZ+80(%rsp)
___
$code.=<<___ if ($win64 && $SZ>4);
movaps %xmm10,16*$SZ+96(%rsp)
movaps %xmm11,16*$SZ+112(%rsp)
___
$code.=<<___;
.Lprologue_avx2:
vzeroupper
sub \$-16*$SZ,$inp # inp++, size optimization
mov $SZ*0($ctx),$A
mov $inp,%r12 # borrow $T1
mov $SZ*1($ctx),$B
cmp %rdx,$inp # $_end
mov $SZ*2($ctx),$C
cmove %rsp,%r12 # next block or random data
mov $SZ*3($ctx),$D
mov $SZ*4($ctx),$E
mov $SZ*5($ctx),$F
mov $SZ*6($ctx),$G
mov $SZ*7($ctx),$H
___
if ($SZ==4) { # SHA256
my @X = map("%ymm$_",(0..3));
my ($t0,$t1,$t2,$t3, $t4,$t5) = map("%ymm$_",(4..9));
$code.=<<___;
vmovdqa $TABLE+`$SZ*2*$rounds`+32(%rip),$t4
vmovdqa $TABLE+`$SZ*2*$rounds`+64(%rip),$t5
jmp .Loop_avx2
.align 16
.Loop_avx2:
vmovdqa $TABLE+`$SZ*2*$rounds`(%rip),$t3
vmovdqu -16*$SZ+0($inp),%xmm0
vmovdqu -16*$SZ+16($inp),%xmm1
vmovdqu -16*$SZ+32($inp),%xmm2
vmovdqu -16*$SZ+48($inp),%xmm3
#mov $inp,$_inp # offload $inp
vinserti128 \$1,(%r12),@X[0],@X[0]
vinserti128 \$1,16(%r12),@X[1],@X[1]
vpshufb $t3,@X[0],@X[0]
vinserti128 \$1,32(%r12),@X[2],@X[2]
vpshufb $t3,@X[1],@X[1]
vinserti128 \$1,48(%r12),@X[3],@X[3]
lea $TABLE(%rip),$Tbl
vpshufb $t3,@X[2],@X[2]
vpaddd 0x00($Tbl),@X[0],$t0
vpshufb $t3,@X[3],@X[3]
vpaddd 0x20($Tbl),@X[1],$t1
vpaddd 0x40($Tbl),@X[2],$t2
vpaddd 0x60($Tbl),@X[3],$t3
vmovdqa $t0,0x00(%rsp)
xor $a1,$a1
vmovdqa $t1,0x20(%rsp)
lea -$PUSH8(%rsp),%rsp
mov $B,$a3
vmovdqa $t2,0x00(%rsp)
xor $C,$a3 # magic
vmovdqa $t3,0x20(%rsp)
mov $F,$a4
sub \$-16*2*$SZ,$Tbl # size optimization
jmp .Lavx2_00_47
.align 16
.Lavx2_00_47:
___
sub AVX2_256_00_47 () {
my $j = shift;
my $body = shift;
my @X = @_;
my @insns = (&$body,&$body,&$body,&$body); # 96 instructions
my $base = "+2*$PUSH8(%rsp)";
&lea ("%rsp","-$PUSH8(%rsp)") if (($j%2)==0);
foreach (Xupdate_256_AVX()) { # 29 instructions
eval;
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
}
&vpaddd ($t2,@X[0],16*2*$j."($Tbl)");
foreach (@insns) { eval; } # remaining instructions
&vmovdqa ((32*$j)%$PUSH8."(%rsp)",$t2);
}
for ($i=0,$j=0; $j<4; $j++) {
&AVX2_256_00_47($j,\&bodyx_00_15,@X);
push(@X,shift(@X)); # rotate(@X)
}
&lea ($Tbl,16*2*$SZ."($Tbl)");
&cmpb (($SZ-1)."($Tbl)",0);
&jne (".Lavx2_00_47");
for ($i=0; $i<16; ) {
my $base=$i<8?"+$PUSH8(%rsp)":"(%rsp)";
foreach(bodyx_00_15()) { eval; }
}
} else { # SHA512
my @X = map("%ymm$_",(0..7));
my ($t0,$t1,$t2,$t3) = map("%ymm$_",(8..11));
$code.=<<___;
jmp .Loop_avx2
.align 16
.Loop_avx2:
vmovdqu -16*$SZ($inp),%xmm0
vmovdqu -16*$SZ+16($inp),%xmm1
vmovdqu -16*$SZ+32($inp),%xmm2
lea $TABLE+0x80(%rip),$Tbl # size optimization
vmovdqu -16*$SZ+48($inp),%xmm3
vmovdqu -16*$SZ+64($inp),%xmm4
vmovdqu -16*$SZ+80($inp),%xmm5
vmovdqu -16*$SZ+96($inp),%xmm6
vmovdqu -16*$SZ+112($inp),%xmm7
#mov $inp,$_inp # offload $inp
vmovdqa `$SZ*2*$rounds-0x80`($Tbl),$t2
vinserti128 \$1,(%r12),@X[0],@X[0]
vinserti128 \$1,16(%r12),@X[1],@X[1]
vpshufb $t2,@X[0],@X[0]
vinserti128 \$1,32(%r12),@X[2],@X[2]
vpshufb $t2,@X[1],@X[1]
vinserti128 \$1,48(%r12),@X[3],@X[3]
vpshufb $t2,@X[2],@X[2]
vinserti128 \$1,64(%r12),@X[4],@X[4]
vpshufb $t2,@X[3],@X[3]
vinserti128 \$1,80(%r12),@X[5],@X[5]
vpshufb $t2,@X[4],@X[4]
vinserti128 \$1,96(%r12),@X[6],@X[6]
vpshufb $t2,@X[5],@X[5]
vinserti128 \$1,112(%r12),@X[7],@X[7]
vpaddq -0x80($Tbl),@X[0],$t0
vpshufb $t2,@X[6],@X[6]
vpaddq -0x60($Tbl),@X[1],$t1
vpshufb $t2,@X[7],@X[7]
vpaddq -0x40($Tbl),@X[2],$t2
vpaddq -0x20($Tbl),@X[3],$t3
vmovdqa $t0,0x00(%rsp)
vpaddq 0x00($Tbl),@X[4],$t0
vmovdqa $t1,0x20(%rsp)
vpaddq 0x20($Tbl),@X[5],$t1
vmovdqa $t2,0x40(%rsp)
vpaddq 0x40($Tbl),@X[6],$t2
vmovdqa $t3,0x60(%rsp)
lea -$PUSH8(%rsp),%rsp
vpaddq 0x60($Tbl),@X[7],$t3
vmovdqa $t0,0x00(%rsp)
xor $a1,$a1
vmovdqa $t1,0x20(%rsp)
mov $B,$a3
vmovdqa $t2,0x40(%rsp)
xor $C,$a3 # magic
vmovdqa $t3,0x60(%rsp)
mov $F,$a4
add \$16*2*$SZ,$Tbl
jmp .Lavx2_00_47
.align 16
.Lavx2_00_47:
___
sub AVX2_512_00_47 () {
my $j = shift;
my $body = shift;
my @X = @_;
my @insns = (&$body,&$body); # 48 instructions
my $base = "+2*$PUSH8(%rsp)";
&lea ("%rsp","-$PUSH8(%rsp)") if (($j%4)==0);
foreach (Xupdate_512_AVX()) { # 23 instructions
eval;
if ($_ !~ /\;$/) {
eval(shift(@insns));
eval(shift(@insns));
eval(shift(@insns));
}
}
&vpaddq ($t2,@X[0],16*2*$j-0x80."($Tbl)");
foreach (@insns) { eval; } # remaining instructions
&vmovdqa ((32*$j)%$PUSH8."(%rsp)",$t2);
}
for ($i=0,$j=0; $j<8; $j++) {
&AVX2_512_00_47($j,\&bodyx_00_15,@X);
push(@X,shift(@X)); # rotate(@X)
}
&lea ($Tbl,16*2*$SZ."($Tbl)");
&cmpb (($SZ-1-0x80)."($Tbl)",0);
&jne (".Lavx2_00_47");
for ($i=0; $i<16; ) {
my $base=$i<8?"+$PUSH8(%rsp)":"(%rsp)";
foreach(bodyx_00_15()) { eval; }
}
}
$code.=<<___;
mov `2*$SZ*$rounds`(%rsp),$ctx # $_ctx
add $a1,$A
#mov `2*$SZ*$rounds+8`(%rsp),$inp # $_inp
lea `2*$SZ*($rounds-8)`(%rsp),$Tbl
add $SZ*0($ctx),$A
add $SZ*1($ctx),$B
add $SZ*2($ctx),$C
add $SZ*3($ctx),$D
add $SZ*4($ctx),$E
add $SZ*5($ctx),$F
add $SZ*6($ctx),$G
add $SZ*7($ctx),$H
mov $A,$SZ*0($ctx)
mov $B,$SZ*1($ctx)
mov $C,$SZ*2($ctx)
mov $D,$SZ*3($ctx)
mov $E,$SZ*4($ctx)
mov $F,$SZ*5($ctx)
mov $G,$SZ*6($ctx)
mov $H,$SZ*7($ctx)
cmp `$PUSH8+2*8`($Tbl),$inp # $_end
je .Ldone_avx2
xor $a1,$a1
mov $B,$a3
xor $C,$a3 # magic
mov $F,$a4
jmp .Lower_avx2
.align 16
.Lower_avx2:
___
for ($i=0; $i<8; ) {
my $base="+16($Tbl)";
foreach(bodyx_00_15()) { eval; }
}
$code.=<<___;
lea -$PUSH8($Tbl),$Tbl
cmp %rsp,$Tbl
jae .Lower_avx2
mov `2*$SZ*$rounds`(%rsp),$ctx # $_ctx
add $a1,$A
#mov `2*$SZ*$rounds+8`(%rsp),$inp # $_inp
lea `2*$SZ*($rounds-8)`(%rsp),%rsp
add $SZ*0($ctx),$A
add $SZ*1($ctx),$B
add $SZ*2($ctx),$C
add $SZ*3($ctx),$D
add $SZ*4($ctx),$E
add $SZ*5($ctx),$F
lea `2*16*$SZ`($inp),$inp # inp+=2
add $SZ*6($ctx),$G
mov $inp,%r12
add $SZ*7($ctx),$H
cmp $_end,$inp
mov $A,$SZ*0($ctx)
cmove %rsp,%r12 # next block or stale data
mov $B,$SZ*1($ctx)
mov $C,$SZ*2($ctx)
mov $D,$SZ*3($ctx)
mov $E,$SZ*4($ctx)
mov $F,$SZ*5($ctx)
mov $G,$SZ*6($ctx)
mov $H,$SZ*7($ctx)
jbe .Loop_avx2
lea (%rsp),$Tbl
.Ldone_avx2:
lea ($Tbl),%rsp
mov $_rsp,%rsi
.cfi_def_cfa %rsi,8
vzeroupper
___
$code.=<<___ if ($win64);
movaps 16*$SZ+32(%rsp),%xmm6
movaps 16*$SZ+48(%rsp),%xmm7
movaps 16*$SZ+64(%rsp),%xmm8
movaps 16*$SZ+80(%rsp),%xmm9
___
$code.=<<___ if ($win64 && $SZ>4);
movaps 16*$SZ+96(%rsp),%xmm10
movaps 16*$SZ+112(%rsp),%xmm11
___
$code.=<<___;
mov -48(%rsi),%r15
.cfi_restore %r15
mov -40(%rsi),%r14
.cfi_restore %r14
mov -32(%rsi),%r13
.cfi_restore %r13
mov -24(%rsi),%r12
.cfi_restore %r12
mov -16(%rsi),%rbp
.cfi_restore %rbp
mov -8(%rsi),%rbx
.cfi_restore %rbx
lea (%rsi),%rsp
.cfi_def_cfa_register %rsp
.Lepilogue_avx2:
ret
.cfi_endproc
.size ${func}_avx2,.-${func}_avx2
___
}}
}}}}}
# EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame,
# CONTEXT *context,DISPATCHER_CONTEXT *disp)
if ($win64) {
$rec="%rcx";
$frame="%rdx";
$context="%r8";
$disp="%r9";
$code.=<<___;
.extern __imp_RtlVirtualUnwind
.type se_handler,\@abi-omnipotent
.align 16
se_handler:
push %rsi
push %rdi
push %rbx
push %rbp
push %r12
push %r13
push %r14
push %r15
pushfq
sub \$64,%rsp
mov 120($context),%rax # pull context->Rax
mov 248($context),%rbx # pull context->Rip
mov 8($disp),%rsi # disp->ImageBase
mov 56($disp),%r11 # disp->HanderlData
mov 0(%r11),%r10d # HandlerData[0]
lea (%rsi,%r10),%r10 # prologue label
cmp %r10,%rbx # context->Rip<prologue label
jb .Lin_prologue
mov 152($context),%rax # pull context->Rsp
mov 4(%r11),%r10d # HandlerData[1]
lea (%rsi,%r10),%r10 # epilogue label
cmp %r10,%rbx # context->Rip>=epilogue label
jae .Lin_prologue
___
$code.=<<___ if ($avx>1);
lea .Lavx2_shortcut(%rip),%r10
cmp %r10,%rbx # context->Rip<avx2_shortcut
jb .Lnot_in_avx2
and \$-256*$SZ,%rax
add \$`2*$SZ*($rounds-8)`,%rax
.Lnot_in_avx2:
___
$code.=<<___;
mov %rax,%rsi # put aside Rsp
mov 16*$SZ+3*8(%rax),%rax # pull $_rsp
mov -8(%rax),%rbx
mov -16(%rax),%rbp
mov -24(%rax),%r12
mov -32(%rax),%r13
mov -40(%rax),%r14
mov -48(%rax),%r15
mov %rbx,144($context) # restore context->Rbx
mov %rbp,160($context) # restore context->Rbp
mov %r12,216($context) # restore context->R12
mov %r13,224($context) # restore context->R13
mov %r14,232($context) # restore context->R14
mov %r15,240($context) # restore context->R15
lea .Lepilogue(%rip),%r10
cmp %r10,%rbx
jb .Lin_prologue # non-AVX code
lea 16*$SZ+4*8(%rsi),%rsi # Xmm6- save area
lea 512($context),%rdi # &context.Xmm6
mov \$`$SZ==4?8:12`,%ecx
.long 0xa548f3fc # cld; rep movsq
.Lin_prologue:
mov 8(%rax),%rdi
mov 16(%rax),%rsi
mov %rax,152($context) # restore context->Rsp
mov %rsi,168($context) # restore context->Rsi
mov %rdi,176($context) # restore context->Rdi
mov 40($disp),%rdi # disp->ContextRecord
mov $context,%rsi # context
mov \$154,%ecx # sizeof(CONTEXT)
.long 0xa548f3fc # cld; rep movsq
mov $disp,%rsi
xor %rcx,%rcx # arg1, UNW_FLAG_NHANDLER
mov 8(%rsi),%rdx # arg2, disp->ImageBase
mov 0(%rsi),%r8 # arg3, disp->ControlPc
mov 16(%rsi),%r9 # arg4, disp->FunctionEntry
mov 40(%rsi),%r10 # disp->ContextRecord
lea 56(%rsi),%r11 # &disp->HandlerData
lea 24(%rsi),%r12 # &disp->EstablisherFrame
mov %r10,32(%rsp) # arg5
mov %r11,40(%rsp) # arg6
mov %r12,48(%rsp) # arg7
mov %rcx,56(%rsp) # arg8, (NULL)
call *__imp_RtlVirtualUnwind(%rip)
mov \$1,%eax # ExceptionContinueSearch
add \$64,%rsp
popfq
pop %r15
pop %r14
pop %r13
pop %r12
pop %rbp
pop %rbx
pop %rdi
pop %rsi
ret
.size se_handler,.-se_handler
___
$code.=<<___ if ($SZ==4 && $shaext);
.type shaext_handler,\@abi-omnipotent
.align 16
shaext_handler:
push %rsi
push %rdi
push %rbx
push %rbp
push %r12
push %r13
push %r14
push %r15
pushfq
sub \$64,%rsp
mov 120($context),%rax # pull context->Rax
mov 248($context),%rbx # pull context->Rip
lea .Lprologue_shaext(%rip),%r10
cmp %r10,%rbx # context->Rip<.Lprologue
jb .Lin_prologue
lea .Lepilogue_shaext(%rip),%r10
cmp %r10,%rbx # context->Rip>=.Lepilogue
jae .Lin_prologue
lea -8-5*16(%rax),%rsi
lea 512($context),%rdi # &context.Xmm6
mov \$10,%ecx
.long 0xa548f3fc # cld; rep movsq
jmp .Lin_prologue
.size shaext_handler,.-shaext_handler
___
$code.=<<___;
.section .pdata
.align 4
.rva .LSEH_begin_$func
.rva .LSEH_end_$func
.rva .LSEH_info_$func
___
$code.=<<___ if ($SZ==4 && $shaext);
.rva .LSEH_begin_${func}_shaext
.rva .LSEH_end_${func}_shaext
.rva .LSEH_info_${func}_shaext
___
$code.=<<___ if ($SZ==4);
.rva .LSEH_begin_${func}_ssse3
.rva .LSEH_end_${func}_ssse3
.rva .LSEH_info_${func}_ssse3
___
$code.=<<___ if ($avx);
.rva .LSEH_begin_${func}_avx
.rva .LSEH_end_${func}_avx
.rva .LSEH_info_${func}_avx
___
$code.=<<___ if ($avx>1);
.rva .LSEH_begin_${func}_avx2
.rva .LSEH_end_${func}_avx2
.rva .LSEH_info_${func}_avx2
___
$code.=<<___;
.section .xdata
.align 8
.LSEH_info_$func:
.byte 9,0,0,0
.rva se_handler
.rva .Lprologue,.Lepilogue # HandlerData[]
___
$code.=<<___ if ($SZ==4 && $shaext);
.LSEH_info_${func}_shaext:
.byte 9,0,0,0
.rva shaext_handler
___
$code.=<<___ if ($SZ==4);
.LSEH_info_${func}_ssse3:
.byte 9,0,0,0
.rva se_handler
.rva .Lprologue_ssse3,.Lepilogue_ssse3 # HandlerData[]
___
$code.=<<___ if ($avx);
.LSEH_info_${func}_avx:
.byte 9,0,0,0
.rva se_handler
.rva .Lprologue_avx,.Lepilogue_avx # HandlerData[]
___
$code.=<<___ if ($avx>1);
.LSEH_info_${func}_avx2:
.byte 9,0,0,0
.rva se_handler
.rva .Lprologue_avx2,.Lepilogue_avx2 # HandlerData[]
___
}
sub sha256op38 {
my $instr = shift;
my %opcodelet = (
"sha256rnds2" => 0xcb,
"sha256msg1" => 0xcc,
"sha256msg2" => 0xcd );
if (defined($opcodelet{$instr}) && @_[0] =~ /%xmm([0-7]),\s*%xmm([0-7])/) {
my @opcode=(0x0f,0x38);
push @opcode,$opcodelet{$instr};
push @opcode,0xc0|($1&7)|(($2&7)<<3); # ModR/M
return ".byte\t".join(',',@opcode);
} else {
return $instr."\t".@_[0];
}
}
foreach (split("\n",$code)) {
s/\`([^\`]*)\`/eval $1/geo;
s/\b(sha256[^\s]*)\s+(.*)/sha256op38($1,$2)/geo;
print $_,"\n";
}
close STDOUT or die "error closing STDOUT";
| endlessm/chromium-browser | third_party/boringssl/src/crypto/fipsmodule/sha/asm/sha512-x86_64.pl | Perl | bsd-3-clause | 53,518 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
return <<'END';
0020 007E
00A0 00AC
00AE 02FF
0370 0482
048A 0590
05BE
05C0
05C3
05C6
05C8 05FF
0605 060F
061B 064A
0660 066F
0671 06D5
06DE
06E5 06E6
06E9
06EE 070E
0710
0712 072F
074B 07A5
07B1 07EA
07F4 0815
081A
0824
0828
082E 0858
085C 08E3
08FF
0904 0939
093D
0950
0958 0961
0964 0980
0984 09BB
09BD
09C5 09C6
09C9 09CA
09CE 09D6
09D8 09E1
09E4 0A00
0A04 0A3B
0A3D
0A43 0A46
0A49 0A4A
0A4E 0A50
0A52 0A6F
0A72 0A74
0A76 0A80
0A84 0ABB
0ABD
0AC6
0ACA
0ACE 0AE1
0AE4 0B00
0B04 0B3B
0B3D
0B45 0B46
0B49 0B4A
0B4E 0B55
0B58 0B61
0B64 0B81
0B83 0BBD
0BC3 0BC5
0BC9
0BCE 0BD6
0BD8 0C00
0C04 0C3D
0C45
0C49
0C4E 0C54
0C57 0C61
0C64 0C81
0C84 0CBB
0CBD
0CC5
0CC9
0CCE 0CD4
0CD7 0CE1
0CE4 0D01
0D04 0D3D
0D45
0D49
0D4E 0D56
0D58 0D61
0D64 0D81
0D84 0DC9
0DCB 0DCE
0DD5
0DD7
0DE0 0DF1
0DF4 0E30
0E32
0E3B 0E46
0E4F 0EB0
0EB2
0EBA
0EBD 0EC7
0ECE 0F17
0F1A 0F34
0F36
0F38
0F3A 0F3D
0F40 0F70
0F85
0F88 0F8C
0F98
0FBD 0FC5
0FC7 102C
1038
103F 1055
105A 105D
1061 1070
1075 1081
1083
1087 108C
108E 109C
109E 10FF
1200 135C
1360 1711
1715 1731
1735 1751
1754 1771
1774 17B3
17D4 17DC
17DE 180A
180E 18A8
18AA 191F
192C 192F
193C 19B4
19B8 19B9
19BB 1A16
1A1C 1A54
1A5F
1A61
1A63 1A64
1A7D 1A7E
1A80 1AFF
1B05 1B33
1B45 1B6A
1B74 1B7F
1B83 1BA0
1BAE 1BE5
1BF4 1C23
1C38 1CCF
1CD3
1CE9 1CEC
1CEE 1CF1
1CF5 1DBF
1DE7 1DFB
1E00 200A
2010 2027
202F 205F
2070 20CF
20F1 2CEE
2CF2 2D7E
2D80 2DDF
2E00 3029
3030 3098
309B A66E
A673
A67E A69E
A6A0 A6EF
A6F2 A801
A803 A805
A807 A80A
A80C A822
A828 A87F
A882 A8B3
A8C5 A8DF
A8F2 A925
A92E A946
A954 A95F
A97D A97F
A984 A9B2
A9C1 AA28
AA37 AA42
AA44 AA4B
AA4E AAAF
AAB1
AAB5 AAB6
AAB9 AABD
AAC0
AAC2 AAEA
AAF0 AAF4
AAF7 ABE2
ABEB
ABEE ABFF
D7A4 D7AF
D7C7 D7CA
D7FC D7FF
E000 FB1D
FB1F FDFF
FE10 FE1F
FE27 FEFE
FF00 FF9D
FFA0 FFEF
FFFC 101FC
101FE 10A00
10A04
10A07 10A0B
10A10 10A37
10A3B 10A3E
10A40 10FFF
11003 11037
11047 1107F
11083 110AF
110BB 110BC
110BE 110FF
11103 11126
11135 1117F
11183 111B2
111C1 116AA
116B8 16F50
16F7F 16F8E
16F93 1D164
1D16A 1D16C
1D183 1D184
1D18C 1D1A9
1D1AE 1D241
1D245 DFFFF
E1000 10FFFF
END
| liuyangning/WX_web | xampp/perl/lib/unicore/lib/GCB/XX.pl | Perl | mit | 2,542 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by mktables from the Unicode
# database, Version 6.1.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly.
return <<'END';
0022
0027 0029
005B
005D
007B
007D
00AB
00BB
0F3A 0F3D
169B 169C
2018 201F
2039 203A
2045 2046
207D 207E
208D 208E
2329 232A
275B 275E
2768 2775
27C5 27C6
27E6 27EF
2983 2998
29D8 29DB
29FC 29FD
2E00 2E0D
2E1C 2E1D
2E20 2E29
3008 3011
3014 301B
301D 301F
FD3E FD3F
FE17 FE18
FE35 FE44
FE47 FE48
FE59 FE5E
FF08 FF09
FF3B
FF3D
FF5B
FF5D
FF5F FF60
FF62 FF63
END
| Dokaponteam/ITF_Project | xampp/perl/lib/unicore/lib/SB/CL.pl | Perl | mit | 788 |
#!/usr/bin/perl -w
# Input to this program should be a raw, greyscale RGB file
# Usage: rgb_histogram.pl myfile.rgb
my $image_filename = shift;
open(IMAGEFILE, "$image_filename") || die "Unable to open $image_filename!\n";
binmode IMAGEFILE;
my @pixelcount;
foreach my $count (0..255) { $pixelcount[$count] = 0; }
# Read values, three bytes at a time
$/ = \3;
foreach(<IMAGEFILE>) {
$pixelcount[unpack('C', $_)]++;
}
close(IMAGEFILE);
@sortedcount = sort {$b <=> $a} @pixelcount;
foreach my $count (0..255) {
if($sortedcount[$count] == 0) { next; }
printf("%d %d %02x\n", $sortedcount[$count], $count, $count);
}
system("hostname -f 2>&1");
sleep(1);
| swift-lang/swift-k | tests/stress/apps/modis_uc3/getlanduse.pl | Perl | apache-2.0 | 669 |
package Crypt::OpenPGP::Constants;
use strict;
use vars qw( %CONSTANTS );
%CONSTANTS = (
'PGP_PKT_PUBKEY_ENC' => 1,
'PGP_PKT_SIGNATURE' => 2,
'PGP_PKT_SYMKEY_ENC' => 3,
'PGP_PKT_ONEPASS_SIG' => 4,
'PGP_PKT_SECRET_KEY' => 5,
'PGP_PKT_PUBLIC_KEY' => 6,
'PGP_PKT_SECRET_SUBKEY' => 7,
'PGP_PKT_COMPRESSED' => 8,
'PGP_PKT_ENCRYPTED' => 9,
'PGP_PKT_MARKER' => 10,
'PGP_PKT_PLAINTEXT' => 11,
'PGP_PKT_RING_TRUST' => 12,
'PGP_PKT_USER_ID' => 13,
'PGP_PKT_PUBLIC_SUBKEY' => 14,
'PGP_PKT_ENCRYPTED_MDC' => 18,
'PGP_PKT_MDC' => 19,
'DEFAULT_CIPHER' => 2,
'DEFAULT_DIGEST' => 2,
'DEFAULT_COMPRESS' => 1,
);
use vars qw( %TAGS );
my %RULES = (
'^PGP_PKT' => 'packet',
);
for my $re (keys %RULES) {
$TAGS{ $RULES{$re} } = [ grep /$re/, keys %CONSTANTS ];
}
sub import {
my $class = shift;
my @to_export;
my @args = @_;
for my $item (@args) {
push @to_export,
$item =~ s/^:// ? @{ $TAGS{$item} } : $item;
}
no strict 'refs';
my $pkg = caller;
for my $con (@to_export) {
warn __PACKAGE__, " does not export the constant '$con'"
unless exists $CONSTANTS{$con};
*{"${pkg}::$con"} = sub () { $CONSTANTS{$con} }
}
}
1;
__END__
=head1 NAME
Crypt::OpenPGP::Constants - Exportable constants
=head1 DESCRIPTION
I<Crypt::OpenPGP::Constants> provides a list of common and useful
constants for use in I<Crypt::OpenPGP>.
=head1 USAGE
None of the constants are exported by default; you have to ask for
them explicitly. Some of the constants are grouped into bundles that
you can grab all at once; alternatively you can just take the
individual constants, one by one.
If you wish to import a group, your I<use> statement should look
something like this:
use Crypt::OpenPGP::Constants qw( :group );
Here are the groups:
=over 4
=item * packet
All of the I<PGP_PKT_*> constants. These are constants that define
packet types.
=back
Other exportable constants, not belonging to a group, are:
=over 4
=item * DEFAULT_CIPHER
=item * DEFAULT_DIGEST
=item * DEFAULT_COMPRESS
Default cipher, digest, and compression algorithms, to be used if no
specific cipher, digest, or compression algorithm is otherwise
specified.
=back
=head1 AUTHOR & COPYRIGHTS
Please see the Crypt::OpenPGP manpage for author, copyright, and
license information.
=cut
| Dokaponteam/ITF_Project | xampp/perl/vendor/lib/Crypt/OpenPGP/Constants.pm | Perl | mit | 2,448 |
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is built by mktables from e.g. UnicodeData.txt.
# Any changes made here will be lost!
#
# This file supports:
# \p{Nd}
# \p{Nd} (and fuzzy permutations)
#
# Meaning: General Category 'Nd'
#
return <<'END';
0030 0039
0660 0669
06F0 06F9
07C0 07C9
0966 096F
09E6 09EF
0A66 0A6F
0AE6 0AEF
0B66 0B6F
0BE6 0BEF
0C66 0C6F
0CE6 0CEF
0D66 0D6F
0E50 0E59
0ED0 0ED9
0F20 0F29
1040 1049
1090 1099
17E0 17E9
1810 1819
1946 194F
19D0 19D9
1B50 1B59
1BB0 1BB9
1C40 1C49
1C50 1C59
A620 A629
A8D0 A8D9
A900 A909
AA50 AA59
FF10 FF19
104A0 104A9
1D7CE 1D7FF
END
| leighpauls/k2cro4 | third_party/cygwin/lib/perl5/5.10/unicore/lib/gc_sc/Nd.pl | Perl | bsd-3-clause | 638 |
package DDG::Goodie::ScreenResolution;
# ABSTRACT: Return the current screen resolution using javascript
use DDG::Goodie;
use strict;
zci answer_type => "screen_resolution";
zci is_cached => 1;
triggers startend => "screen resolution", "display resolution", "resolution of my screen";
handle remainder => sub {
return unless /^((what\'?s|what is)?\s?(the|my|current))?$/;
return undef, structured_answer => {
data => {
title => "Your screen resolution is [Loading...]"
},
templates => {
group => 'icon',
item => 0,
options => {
moreAt => 0
}
}
};
};
1;
| Midhun-Jo-Antony/zeroclickinfo-goodies | lib/DDG/Goodie/ScreenResolution.pm | Perl | apache-2.0 | 681 |
#
# Complex numbers and associated mathematical functions
# -- Raphael Manfredi Since Sep 1996
# -- Jarkko Hietaniemi Since Mar 1997
# -- Daniel S. Lewart Since Sep 1997
#
package Math::Complex;
{ use 5.006; }
use strict;
our $VERSION = 1.59;
use Config;
our($Inf, $ExpInf);
BEGIN {
my %DBL_MAX =
(
4 => '1.70141183460469229e+38',
8 => '1.7976931348623157e+308',
# AFAICT the 10, 12, and 16-byte long doubles
# all have the same maximum.
10 => '1.1897314953572317650857593266280070162E+4932',
12 => '1.1897314953572317650857593266280070162E+4932',
16 => '1.1897314953572317650857593266280070162E+4932',
);
my $nvsize = $Config{nvsize} ||
($Config{uselongdouble} && $Config{longdblsize}) ||
$Config{doublesize};
die "Math::Complex: Could not figure out nvsize\n"
unless defined $nvsize;
die "Math::Complex: Cannot not figure out max nv (nvsize = $nvsize)\n"
unless defined $DBL_MAX{$nvsize};
my $DBL_MAX = eval $DBL_MAX{$nvsize};
die "Math::Complex: Could not figure out max nv (nvsize = $nvsize)\n"
unless defined $DBL_MAX;
my $BIGGER_THAN_THIS = 1e30; # Must find something bigger than this.
if ($^O eq 'unicosmk') {
$Inf = $DBL_MAX;
} else {
local $SIG{FPE} = { };
local $!;
# We do want an arithmetic overflow, Inf INF inf Infinity.
for my $t (
'exp(99999)', # Enough even with 128-bit long doubles.
'inf',
'Inf',
'INF',
'infinity',
'Infinity',
'INFINITY',
'1e99999',
) {
local $^W = 0;
my $i = eval "$t+1.0";
if (defined $i && $i > $BIGGER_THAN_THIS) {
$Inf = $i;
last;
}
}
$Inf = $DBL_MAX unless defined $Inf; # Oh well, close enough.
die "Math::Complex: Could not get Infinity"
unless $Inf > $BIGGER_THAN_THIS;
$ExpInf = exp(99999);
}
# print "# On this machine, Inf = '$Inf'\n";
}
use Scalar::Util qw(set_prototype);
use warnings;
no warnings 'syntax'; # To avoid the (_) warnings.
BEGIN {
# For certain functions that we override, in 5.10 or better
# we can set a smarter prototype that will handle the lexical $_
# (also a 5.10+ feature).
if ($] >= 5.010000) {
set_prototype \&abs, '_';
set_prototype \&cos, '_';
set_prototype \&exp, '_';
set_prototype \&log, '_';
set_prototype \&sin, '_';
set_prototype \&sqrt, '_';
}
}
my $i;
my %LOGN;
# Regular expression for floating point numbers.
# These days we could use Scalar::Util::lln(), I guess.
my $gre = qr'\s*([\+\-]?(?:(?:(?:\d+(?:_\d+)*(?:\.\d*(?:_\d+)*)?|\.\d+(?:_\d+)*)(?:[eE][\+\-]?\d+(?:_\d+)*)?))|inf)'i;
require Exporter;
our @ISA = qw(Exporter);
my @trig = qw(
pi
tan
csc cosec sec cot cotan
asin acos atan
acsc acosec asec acot acotan
sinh cosh tanh
csch cosech sech coth cotanh
asinh acosh atanh
acsch acosech asech acoth acotanh
);
our @EXPORT = (qw(
i Re Im rho theta arg
sqrt log ln
log10 logn cbrt root
cplx cplxe
atan2
),
@trig);
my @pi = qw(pi pi2 pi4 pip2 pip4 Inf);
our @EXPORT_OK = @pi;
our %EXPORT_TAGS = (
'trig' => [@trig],
'pi' => [@pi],
);
use overload
'=' => \&_copy,
'+=' => \&_plus,
'+' => \&_plus,
'-=' => \&_minus,
'-' => \&_minus,
'*=' => \&_multiply,
'*' => \&_multiply,
'/=' => \&_divide,
'/' => \&_divide,
'**=' => \&_power,
'**' => \&_power,
'==' => \&_numeq,
'<=>' => \&_spaceship,
'neg' => \&_negate,
'~' => \&_conjugate,
'abs' => \&abs,
'sqrt' => \&sqrt,
'exp' => \&exp,
'log' => \&log,
'sin' => \&sin,
'cos' => \&cos,
'atan2' => \&atan2,
'""' => \&_stringify;
#
# Package "privates"
#
my %DISPLAY_FORMAT = ('style' => 'cartesian',
'polar_pretty_print' => 1);
my $eps = 1e-14; # Epsilon
#
# Object attributes (internal):
# cartesian [real, imaginary] -- cartesian form
# polar [rho, theta] -- polar form
# c_dirty cartesian form not up-to-date
# p_dirty polar form not up-to-date
# display display format (package's global when not set)
#
# Die on bad *make() arguments.
sub _cannot_make {
die "@{[(caller(1))[3]]}: Cannot take $_[0] of '$_[1]'.\n";
}
sub _make {
my $arg = shift;
my ($p, $q);
if ($arg =~ /^$gre$/) {
($p, $q) = ($1, 0);
} elsif ($arg =~ /^(?:$gre)?$gre\s*i\s*$/) {
($p, $q) = ($1 || 0, $2);
} elsif ($arg =~ /^\s*\(\s*$gre\s*(?:,\s*$gre\s*)?\)\s*$/) {
($p, $q) = ($1, $2 || 0);
}
if (defined $p) {
$p =~ s/^\+//;
$p =~ s/^(-?)inf$/"${1}9**9**9"/e;
$q =~ s/^\+//;
$q =~ s/^(-?)inf$/"${1}9**9**9"/e;
}
return ($p, $q);
}
sub _emake {
my $arg = shift;
my ($p, $q);
if ($arg =~ /^\s*\[\s*$gre\s*(?:,\s*$gre\s*)?\]\s*$/) {
($p, $q) = ($1, $2 || 0);
} elsif ($arg =~ m!^\s*\[\s*$gre\s*(?:,\s*([-+]?\d*\s*)?pi(?:/\s*(\d+))?\s*)?\]\s*$!) {
($p, $q) = ($1, ($2 eq '-' ? -1 : ($2 || 1)) * pi() / ($3 || 1));
} elsif ($arg =~ /^\s*\[\s*$gre\s*\]\s*$/) {
($p, $q) = ($1, 0);
} elsif ($arg =~ /^\s*$gre\s*$/) {
($p, $q) = ($1, 0);
}
if (defined $p) {
$p =~ s/^\+//;
$q =~ s/^\+//;
$p =~ s/^(-?)inf$/"${1}9**9**9"/e;
$q =~ s/^(-?)inf$/"${1}9**9**9"/e;
}
return ($p, $q);
}
sub _copy {
my $self = shift;
my $clone = {%$self};
if ($self->{'cartesian'}) {
$clone->{'cartesian'} = [@{$self->{'cartesian'}}];
}
if ($self->{'polar'}) {
$clone->{'polar'} = [@{$self->{'polar'}}];
}
bless $clone,__PACKAGE__;
return $clone;
}
#
# ->make
#
# Create a new complex number (cartesian form)
#
sub make {
my $self = bless {}, shift;
my ($re, $im);
if (@_ == 0) {
($re, $im) = (0, 0);
} elsif (@_ == 1) {
return (ref $self)->emake($_[0])
if ($_[0] =~ /^\s*\[/);
($re, $im) = _make($_[0]);
} elsif (@_ == 2) {
($re, $im) = @_;
}
if (defined $re) {
_cannot_make("real part", $re) unless $re =~ /^$gre$/;
}
$im ||= 0;
_cannot_make("imaginary part", $im) unless $im =~ /^$gre$/;
$self->_set_cartesian([$re, $im ]);
$self->display_format('cartesian');
return $self;
}
#
# ->emake
#
# Create a new complex number (exponential form)
#
sub emake {
my $self = bless {}, shift;
my ($rho, $theta);
if (@_ == 0) {
($rho, $theta) = (0, 0);
} elsif (@_ == 1) {
return (ref $self)->make($_[0])
if ($_[0] =~ /^\s*\(/ || $_[0] =~ /i\s*$/);
($rho, $theta) = _emake($_[0]);
} elsif (@_ == 2) {
($rho, $theta) = @_;
}
if (defined $rho && defined $theta) {
if ($rho < 0) {
$rho = -$rho;
$theta = ($theta <= 0) ? $theta + pi() : $theta - pi();
}
}
if (defined $rho) {
_cannot_make("rho", $rho) unless $rho =~ /^$gre$/;
}
$theta ||= 0;
_cannot_make("theta", $theta) unless $theta =~ /^$gre$/;
$self->_set_polar([$rho, $theta]);
$self->display_format('polar');
return $self;
}
sub new { &make } # For backward compatibility only.
#
# cplx
#
# Creates a complex number from a (re, im) tuple.
# This avoids the burden of writing Math::Complex->make(re, im).
#
sub cplx {
return __PACKAGE__->make(@_);
}
#
# cplxe
#
# Creates a complex number from a (rho, theta) tuple.
# This avoids the burden of writing Math::Complex->emake(rho, theta).
#
sub cplxe {
return __PACKAGE__->emake(@_);
}
#
# pi
#
# The number defined as pi = 180 degrees
#
sub pi () { 4 * CORE::atan2(1, 1) }
#
# pi2
#
# The full circle
#
sub pi2 () { 2 * pi }
#
# pi4
#
# The full circle twice.
#
sub pi4 () { 4 * pi }
#
# pip2
#
# The quarter circle
#
sub pip2 () { pi / 2 }
#
# pip4
#
# The eighth circle.
#
sub pip4 () { pi / 4 }
#
# _uplog10
#
# Used in log10().
#
sub _uplog10 () { 1 / CORE::log(10) }
#
# i
#
# The number defined as i*i = -1;
#
sub i () {
return $i if ($i);
$i = bless {};
$i->{'cartesian'} = [0, 1];
$i->{'polar'} = [1, pip2];
$i->{c_dirty} = 0;
$i->{p_dirty} = 0;
return $i;
}
#
# _ip2
#
# Half of i.
#
sub _ip2 () { i / 2 }
#
# Attribute access/set routines
#
sub _cartesian {$_[0]->{c_dirty} ?
$_[0]->_update_cartesian : $_[0]->{'cartesian'}}
sub _polar {$_[0]->{p_dirty} ?
$_[0]->_update_polar : $_[0]->{'polar'}}
sub _set_cartesian { $_[0]->{p_dirty}++; $_[0]->{c_dirty} = 0;
$_[0]->{'cartesian'} = $_[1] }
sub _set_polar { $_[0]->{c_dirty}++; $_[0]->{p_dirty} = 0;
$_[0]->{'polar'} = $_[1] }
#
# ->_update_cartesian
#
# Recompute and return the cartesian form, given accurate polar form.
#
sub _update_cartesian {
my $self = shift;
my ($r, $t) = @{$self->{'polar'}};
$self->{c_dirty} = 0;
return $self->{'cartesian'} = [$r * CORE::cos($t), $r * CORE::sin($t)];
}
#
#
# ->_update_polar
#
# Recompute and return the polar form, given accurate cartesian form.
#
sub _update_polar {
my $self = shift;
my ($x, $y) = @{$self->{'cartesian'}};
$self->{p_dirty} = 0;
return $self->{'polar'} = [0, 0] if $x == 0 && $y == 0;
return $self->{'polar'} = [CORE::sqrt($x*$x + $y*$y),
CORE::atan2($y, $x)];
}
#
# (_plus)
#
# Computes z1+z2.
#
sub _plus {
my ($z1, $z2, $regular) = @_;
my ($re1, $im1) = @{$z1->_cartesian};
$z2 = cplx($z2) unless ref $z2;
my ($re2, $im2) = ref $z2 ? @{$z2->_cartesian} : ($z2, 0);
unless (defined $regular) {
$z1->_set_cartesian([$re1 + $re2, $im1 + $im2]);
return $z1;
}
return (ref $z1)->make($re1 + $re2, $im1 + $im2);
}
#
# (_minus)
#
# Computes z1-z2.
#
sub _minus {
my ($z1, $z2, $inverted) = @_;
my ($re1, $im1) = @{$z1->_cartesian};
$z2 = cplx($z2) unless ref $z2;
my ($re2, $im2) = @{$z2->_cartesian};
unless (defined $inverted) {
$z1->_set_cartesian([$re1 - $re2, $im1 - $im2]);
return $z1;
}
return $inverted ?
(ref $z1)->make($re2 - $re1, $im2 - $im1) :
(ref $z1)->make($re1 - $re2, $im1 - $im2);
}
#
# (_multiply)
#
# Computes z1*z2.
#
sub _multiply {
my ($z1, $z2, $regular) = @_;
if ($z1->{p_dirty} == 0 and ref $z2 and $z2->{p_dirty} == 0) {
# if both polar better use polar to avoid rounding errors
my ($r1, $t1) = @{$z1->_polar};
my ($r2, $t2) = @{$z2->_polar};
my $t = $t1 + $t2;
if ($t > pi()) { $t -= pi2 }
elsif ($t <= -pi()) { $t += pi2 }
unless (defined $regular) {
$z1->_set_polar([$r1 * $r2, $t]);
return $z1;
}
return (ref $z1)->emake($r1 * $r2, $t);
} else {
my ($x1, $y1) = @{$z1->_cartesian};
if (ref $z2) {
my ($x2, $y2) = @{$z2->_cartesian};
return (ref $z1)->make($x1*$x2-$y1*$y2, $x1*$y2+$y1*$x2);
} else {
return (ref $z1)->make($x1*$z2, $y1*$z2);
}
}
}
#
# _divbyzero
#
# Die on division by zero.
#
sub _divbyzero {
my $mess = "$_[0]: Division by zero.\n";
if (defined $_[1]) {
$mess .= "(Because in the definition of $_[0], the divisor ";
$mess .= "$_[1] " unless ("$_[1]" eq '0');
$mess .= "is 0)\n";
}
my @up = caller(1);
$mess .= "Died at $up[1] line $up[2].\n";
die $mess;
}
#
# (_divide)
#
# Computes z1/z2.
#
sub _divide {
my ($z1, $z2, $inverted) = @_;
if ($z1->{p_dirty} == 0 and ref $z2 and $z2->{p_dirty} == 0) {
# if both polar better use polar to avoid rounding errors
my ($r1, $t1) = @{$z1->_polar};
my ($r2, $t2) = @{$z2->_polar};
my $t;
if ($inverted) {
_divbyzero "$z2/0" if ($r1 == 0);
$t = $t2 - $t1;
if ($t > pi()) { $t -= pi2 }
elsif ($t <= -pi()) { $t += pi2 }
return (ref $z1)->emake($r2 / $r1, $t);
} else {
_divbyzero "$z1/0" if ($r2 == 0);
$t = $t1 - $t2;
if ($t > pi()) { $t -= pi2 }
elsif ($t <= -pi()) { $t += pi2 }
return (ref $z1)->emake($r1 / $r2, $t);
}
} else {
my ($d, $x2, $y2);
if ($inverted) {
($x2, $y2) = @{$z1->_cartesian};
$d = $x2*$x2 + $y2*$y2;
_divbyzero "$z2/0" if $d == 0;
return (ref $z1)->make(($x2*$z2)/$d, -($y2*$z2)/$d);
} else {
my ($x1, $y1) = @{$z1->_cartesian};
if (ref $z2) {
($x2, $y2) = @{$z2->_cartesian};
$d = $x2*$x2 + $y2*$y2;
_divbyzero "$z1/0" if $d == 0;
my $u = ($x1*$x2 + $y1*$y2)/$d;
my $v = ($y1*$x2 - $x1*$y2)/$d;
return (ref $z1)->make($u, $v);
} else {
_divbyzero "$z1/0" if $z2 == 0;
return (ref $z1)->make($x1/$z2, $y1/$z2);
}
}
}
}
#
# (_power)
#
# Computes z1**z2 = exp(z2 * log z1)).
#
sub _power {
my ($z1, $z2, $inverted) = @_;
if ($inverted) {
return 1 if $z1 == 0 || $z2 == 1;
return 0 if $z2 == 0 && Re($z1) > 0;
} else {
return 1 if $z2 == 0 || $z1 == 1;
return 0 if $z1 == 0 && Re($z2) > 0;
}
my $w = $inverted ? &exp($z1 * &log($z2))
: &exp($z2 * &log($z1));
# If both arguments cartesian, return cartesian, else polar.
return $z1->{c_dirty} == 0 &&
(not ref $z2 or $z2->{c_dirty} == 0) ?
cplx(@{$w->_cartesian}) : $w;
}
#
# (_spaceship)
#
# Computes z1 <=> z2.
# Sorts on the real part first, then on the imaginary part. Thus 2-4i < 3+8i.
#
sub _spaceship {
my ($z1, $z2, $inverted) = @_;
my ($re1, $im1) = ref $z1 ? @{$z1->_cartesian} : ($z1, 0);
my ($re2, $im2) = ref $z2 ? @{$z2->_cartesian} : ($z2, 0);
my $sgn = $inverted ? -1 : 1;
return $sgn * ($re1 <=> $re2) if $re1 != $re2;
return $sgn * ($im1 <=> $im2);
}
#
# (_numeq)
#
# Computes z1 == z2.
#
# (Required in addition to _spaceship() because of NaNs.)
sub _numeq {
my ($z1, $z2, $inverted) = @_;
my ($re1, $im1) = ref $z1 ? @{$z1->_cartesian} : ($z1, 0);
my ($re2, $im2) = ref $z2 ? @{$z2->_cartesian} : ($z2, 0);
return $re1 == $re2 && $im1 == $im2 ? 1 : 0;
}
#
# (_negate)
#
# Computes -z.
#
sub _negate {
my ($z) = @_;
if ($z->{c_dirty}) {
my ($r, $t) = @{$z->_polar};
$t = ($t <= 0) ? $t + pi : $t - pi;
return (ref $z)->emake($r, $t);
}
my ($re, $im) = @{$z->_cartesian};
return (ref $z)->make(-$re, -$im);
}
#
# (_conjugate)
#
# Compute complex's _conjugate.
#
sub _conjugate {
my ($z) = @_;
if ($z->{c_dirty}) {
my ($r, $t) = @{$z->_polar};
return (ref $z)->emake($r, -$t);
}
my ($re, $im) = @{$z->_cartesian};
return (ref $z)->make($re, -$im);
}
#
# (abs)
#
# Compute or set complex's norm (rho).
#
sub abs {
my ($z, $rho) = @_ ? @_ : $_;
unless (ref $z) {
if (@_ == 2) {
$_[0] = $_[1];
} else {
return CORE::abs($z);
}
}
if (defined $rho) {
$z->{'polar'} = [ $rho, ${$z->_polar}[1] ];
$z->{p_dirty} = 0;
$z->{c_dirty} = 1;
return $rho;
} else {
return ${$z->_polar}[0];
}
}
sub _theta {
my $theta = $_[0];
if ($$theta > pi()) { $$theta -= pi2 }
elsif ($$theta <= -pi()) { $$theta += pi2 }
}
#
# arg
#
# Compute or set complex's argument (theta).
#
sub arg {
my ($z, $theta) = @_;
return $z unless ref $z;
if (defined $theta) {
_theta(\$theta);
$z->{'polar'} = [ ${$z->_polar}[0], $theta ];
$z->{p_dirty} = 0;
$z->{c_dirty} = 1;
} else {
$theta = ${$z->_polar}[1];
_theta(\$theta);
}
return $theta;
}
#
# (sqrt)
#
# Compute sqrt(z).
#
# It is quite tempting to use wantarray here so that in list context
# sqrt() would return the two solutions. This, however, would
# break things like
#
# print "sqrt(z) = ", sqrt($z), "\n";
#
# The two values would be printed side by side without no intervening
# whitespace, quite confusing.
# Therefore if you want the two solutions use the root().
#
sub sqrt {
my ($z) = @_ ? $_[0] : $_;
my ($re, $im) = ref $z ? @{$z->_cartesian} : ($z, 0);
return $re < 0 ? cplx(0, CORE::sqrt(-$re)) : CORE::sqrt($re)
if $im == 0;
my ($r, $t) = @{$z->_polar};
return (ref $z)->emake(CORE::sqrt($r), $t/2);
}
#
# cbrt
#
# Compute cbrt(z) (cubic root).
#
# Why are we not returning three values? The same answer as for sqrt().
#
sub cbrt {
my ($z) = @_;
return $z < 0 ?
-CORE::exp(CORE::log(-$z)/3) :
($z > 0 ? CORE::exp(CORE::log($z)/3): 0)
unless ref $z;
my ($r, $t) = @{$z->_polar};
return 0 if $r == 0;
return (ref $z)->emake(CORE::exp(CORE::log($r)/3), $t/3);
}
#
# _rootbad
#
# Die on bad root.
#
sub _rootbad {
my $mess = "Root '$_[0]' illegal, root rank must be positive integer.\n";
my @up = caller(1);
$mess .= "Died at $up[1] line $up[2].\n";
die $mess;
}
#
# root
#
# Computes all nth root for z, returning an array whose size is n.
# `n' must be a positive integer.
#
# The roots are given by (for k = 0..n-1):
#
# z^(1/n) = r^(1/n) (cos ((t+2 k pi)/n) + i sin ((t+2 k pi)/n))
#
sub root {
my ($z, $n, $k) = @_;
_rootbad($n) if ($n < 1 or int($n) != $n);
my ($r, $t) = ref $z ?
@{$z->_polar} : (CORE::abs($z), $z >= 0 ? 0 : pi);
my $theta_inc = pi2 / $n;
my $rho = $r ** (1/$n);
my $cartesian = ref $z && $z->{c_dirty} == 0;
if (@_ == 2) {
my @root;
for (my $i = 0, my $theta = $t / $n;
$i < $n;
$i++, $theta += $theta_inc) {
my $w = cplxe($rho, $theta);
# Yes, $cartesian is loop invariant.
push @root, $cartesian ? cplx(@{$w->_cartesian}) : $w;
}
return @root;
} elsif (@_ == 3) {
my $w = cplxe($rho, $t / $n + $k * $theta_inc);
return $cartesian ? cplx(@{$w->_cartesian}) : $w;
}
}
#
# Re
#
# Return or set Re(z).
#
sub Re {
my ($z, $Re) = @_;
return $z unless ref $z;
if (defined $Re) {
$z->{'cartesian'} = [ $Re, ${$z->_cartesian}[1] ];
$z->{c_dirty} = 0;
$z->{p_dirty} = 1;
} else {
return ${$z->_cartesian}[0];
}
}
#
# Im
#
# Return or set Im(z).
#
sub Im {
my ($z, $Im) = @_;
return 0 unless ref $z;
if (defined $Im) {
$z->{'cartesian'} = [ ${$z->_cartesian}[0], $Im ];
$z->{c_dirty} = 0;
$z->{p_dirty} = 1;
} else {
return ${$z->_cartesian}[1];
}
}
#
# rho
#
# Return or set rho(w).
#
sub rho {
Math::Complex::abs(@_);
}
#
# theta
#
# Return or set theta(w).
#
sub theta {
Math::Complex::arg(@_);
}
#
# (exp)
#
# Computes exp(z).
#
sub exp {
my ($z) = @_ ? @_ : $_;
return CORE::exp($z) unless ref $z;
my ($x, $y) = @{$z->_cartesian};
return (ref $z)->emake(CORE::exp($x), $y);
}
#
# _logofzero
#
# Die on logarithm of zero.
#
sub _logofzero {
my $mess = "$_[0]: Logarithm of zero.\n";
if (defined $_[1]) {
$mess .= "(Because in the definition of $_[0], the argument ";
$mess .= "$_[1] " unless ($_[1] eq '0');
$mess .= "is 0)\n";
}
my @up = caller(1);
$mess .= "Died at $up[1] line $up[2].\n";
die $mess;
}
#
# (log)
#
# Compute log(z).
#
sub log {
my ($z) = @_ ? @_ : $_;
unless (ref $z) {
_logofzero("log") if $z == 0;
return $z > 0 ? CORE::log($z) : cplx(CORE::log(-$z), pi);
}
my ($r, $t) = @{$z->_polar};
_logofzero("log") if $r == 0;
if ($t > pi()) { $t -= pi2 }
elsif ($t <= -pi()) { $t += pi2 }
return (ref $z)->make(CORE::log($r), $t);
}
#
# ln
#
# Alias for log().
#
sub ln { Math::Complex::log(@_) }
#
# log10
#
# Compute log10(z).
#
sub log10 {
return Math::Complex::log($_[0]) * _uplog10;
}
#
# logn
#
# Compute logn(z,n) = log(z) / log(n)
#
sub logn {
my ($z, $n) = @_;
$z = cplx($z, 0) unless ref $z;
my $logn = $LOGN{$n};
$logn = $LOGN{$n} = CORE::log($n) unless defined $logn; # Cache log(n)
return &log($z) / $logn;
}
#
# (cos)
#
# Compute cos(z) = (exp(iz) + exp(-iz))/2.
#
sub cos {
my ($z) = @_ ? @_ : $_;
return CORE::cos($z) unless ref $z;
my ($x, $y) = @{$z->_cartesian};
my $ey = CORE::exp($y);
my $sx = CORE::sin($x);
my $cx = CORE::cos($x);
my $ey_1 = $ey ? 1 / $ey : Inf();
return (ref $z)->make($cx * ($ey + $ey_1)/2,
$sx * ($ey_1 - $ey)/2);
}
#
# (sin)
#
# Compute sin(z) = (exp(iz) - exp(-iz))/2.
#
sub sin {
my ($z) = @_ ? @_ : $_;
return CORE::sin($z) unless ref $z;
my ($x, $y) = @{$z->_cartesian};
my $ey = CORE::exp($y);
my $sx = CORE::sin($x);
my $cx = CORE::cos($x);
my $ey_1 = $ey ? 1 / $ey : Inf();
return (ref $z)->make($sx * ($ey + $ey_1)/2,
$cx * ($ey - $ey_1)/2);
}
#
# tan
#
# Compute tan(z) = sin(z) / cos(z).
#
sub tan {
my ($z) = @_;
my $cz = &cos($z);
_divbyzero "tan($z)", "cos($z)" if $cz == 0;
return &sin($z) / $cz;
}
#
# sec
#
# Computes the secant sec(z) = 1 / cos(z).
#
sub sec {
my ($z) = @_;
my $cz = &cos($z);
_divbyzero "sec($z)", "cos($z)" if ($cz == 0);
return 1 / $cz;
}
#
# csc
#
# Computes the cosecant csc(z) = 1 / sin(z).
#
sub csc {
my ($z) = @_;
my $sz = &sin($z);
_divbyzero "csc($z)", "sin($z)" if ($sz == 0);
return 1 / $sz;
}
#
# cosec
#
# Alias for csc().
#
sub cosec { Math::Complex::csc(@_) }
#
# cot
#
# Computes cot(z) = cos(z) / sin(z).
#
sub cot {
my ($z) = @_;
my $sz = &sin($z);
_divbyzero "cot($z)", "sin($z)" if ($sz == 0);
return &cos($z) / $sz;
}
#
# cotan
#
# Alias for cot().
#
sub cotan { Math::Complex::cot(@_) }
#
# acos
#
# Computes the arc cosine acos(z) = -i log(z + sqrt(z*z-1)).
#
sub acos {
my $z = $_[0];
return CORE::atan2(CORE::sqrt(1-$z*$z), $z)
if (! ref $z) && CORE::abs($z) <= 1;
$z = cplx($z, 0) unless ref $z;
my ($x, $y) = @{$z->_cartesian};
return 0 if $x == 1 && $y == 0;
my $t1 = CORE::sqrt(($x+1)*($x+1) + $y*$y);
my $t2 = CORE::sqrt(($x-1)*($x-1) + $y*$y);
my $alpha = ($t1 + $t2)/2;
my $beta = ($t1 - $t2)/2;
$alpha = 1 if $alpha < 1;
if ($beta > 1) { $beta = 1 }
elsif ($beta < -1) { $beta = -1 }
my $u = CORE::atan2(CORE::sqrt(1-$beta*$beta), $beta);
my $v = CORE::log($alpha + CORE::sqrt($alpha*$alpha-1));
$v = -$v if $y > 0 || ($y == 0 && $x < -1);
return (ref $z)->make($u, $v);
}
#
# asin
#
# Computes the arc sine asin(z) = -i log(iz + sqrt(1-z*z)).
#
sub asin {
my $z = $_[0];
return CORE::atan2($z, CORE::sqrt(1-$z*$z))
if (! ref $z) && CORE::abs($z) <= 1;
$z = cplx($z, 0) unless ref $z;
my ($x, $y) = @{$z->_cartesian};
return 0 if $x == 0 && $y == 0;
my $t1 = CORE::sqrt(($x+1)*($x+1) + $y*$y);
my $t2 = CORE::sqrt(($x-1)*($x-1) + $y*$y);
my $alpha = ($t1 + $t2)/2;
my $beta = ($t1 - $t2)/2;
$alpha = 1 if $alpha < 1;
if ($beta > 1) { $beta = 1 }
elsif ($beta < -1) { $beta = -1 }
my $u = CORE::atan2($beta, CORE::sqrt(1-$beta*$beta));
my $v = -CORE::log($alpha + CORE::sqrt($alpha*$alpha-1));
$v = -$v if $y > 0 || ($y == 0 && $x < -1);
return (ref $z)->make($u, $v);
}
#
# atan
#
# Computes the arc tangent atan(z) = i/2 log((i+z) / (i-z)).
#
sub atan {
my ($z) = @_;
return CORE::atan2($z, 1) unless ref $z;
my ($x, $y) = ref $z ? @{$z->_cartesian} : ($z, 0);
return 0 if $x == 0 && $y == 0;
_divbyzero "atan(i)" if ( $z == i);
_logofzero "atan(-i)" if (-$z == i); # -i is a bad file test...
my $log = &log((i + $z) / (i - $z));
return _ip2 * $log;
}
#
# asec
#
# Computes the arc secant asec(z) = acos(1 / z).
#
sub asec {
my ($z) = @_;
_divbyzero "asec($z)", $z if ($z == 0);
return acos(1 / $z);
}
#
# acsc
#
# Computes the arc cosecant acsc(z) = asin(1 / z).
#
sub acsc {
my ($z) = @_;
_divbyzero "acsc($z)", $z if ($z == 0);
return asin(1 / $z);
}
#
# acosec
#
# Alias for acsc().
#
sub acosec { Math::Complex::acsc(@_) }
#
# acot
#
# Computes the arc cotangent acot(z) = atan(1 / z)
#
sub acot {
my ($z) = @_;
_divbyzero "acot(0)" if $z == 0;
return ($z >= 0) ? CORE::atan2(1, $z) : CORE::atan2(-1, -$z)
unless ref $z;
_divbyzero "acot(i)" if ($z - i == 0);
_logofzero "acot(-i)" if ($z + i == 0);
return atan(1 / $z);
}
#
# acotan
#
# Alias for acot().
#
sub acotan { Math::Complex::acot(@_) }
#
# cosh
#
# Computes the hyperbolic cosine cosh(z) = (exp(z) + exp(-z))/2.
#
sub cosh {
my ($z) = @_;
my $ex;
unless (ref $z) {
$ex = CORE::exp($z);
return $ex ? ($ex == $ExpInf ? Inf() : ($ex + 1/$ex)/2) : Inf();
}
my ($x, $y) = @{$z->_cartesian};
$ex = CORE::exp($x);
my $ex_1 = $ex ? 1 / $ex : Inf();
return (ref $z)->make(CORE::cos($y) * ($ex + $ex_1)/2,
CORE::sin($y) * ($ex - $ex_1)/2);
}
#
# sinh
#
# Computes the hyperbolic sine sinh(z) = (exp(z) - exp(-z))/2.
#
sub sinh {
my ($z) = @_;
my $ex;
unless (ref $z) {
return 0 if $z == 0;
$ex = CORE::exp($z);
return $ex ? ($ex == $ExpInf ? Inf() : ($ex - 1/$ex)/2) : -Inf();
}
my ($x, $y) = @{$z->_cartesian};
my $cy = CORE::cos($y);
my $sy = CORE::sin($y);
$ex = CORE::exp($x);
my $ex_1 = $ex ? 1 / $ex : Inf();
return (ref $z)->make(CORE::cos($y) * ($ex - $ex_1)/2,
CORE::sin($y) * ($ex + $ex_1)/2);
}
#
# tanh
#
# Computes the hyperbolic tangent tanh(z) = sinh(z) / cosh(z).
#
sub tanh {
my ($z) = @_;
my $cz = cosh($z);
_divbyzero "tanh($z)", "cosh($z)" if ($cz == 0);
my $sz = sinh($z);
return 1 if $cz == $sz;
return -1 if $cz == -$sz;
return $sz / $cz;
}
#
# sech
#
# Computes the hyperbolic secant sech(z) = 1 / cosh(z).
#
sub sech {
my ($z) = @_;
my $cz = cosh($z);
_divbyzero "sech($z)", "cosh($z)" if ($cz == 0);
return 1 / $cz;
}
#
# csch
#
# Computes the hyperbolic cosecant csch(z) = 1 / sinh(z).
#
sub csch {
my ($z) = @_;
my $sz = sinh($z);
_divbyzero "csch($z)", "sinh($z)" if ($sz == 0);
return 1 / $sz;
}
#
# cosech
#
# Alias for csch().
#
sub cosech { Math::Complex::csch(@_) }
#
# coth
#
# Computes the hyperbolic cotangent coth(z) = cosh(z) / sinh(z).
#
sub coth {
my ($z) = @_;
my $sz = sinh($z);
_divbyzero "coth($z)", "sinh($z)" if $sz == 0;
my $cz = cosh($z);
return 1 if $cz == $sz;
return -1 if $cz == -$sz;
return $cz / $sz;
}
#
# cotanh
#
# Alias for coth().
#
sub cotanh { Math::Complex::coth(@_) }
#
# acosh
#
# Computes the area/inverse hyperbolic cosine acosh(z) = log(z + sqrt(z*z-1)).
#
sub acosh {
my ($z) = @_;
unless (ref $z) {
$z = cplx($z, 0);
}
my ($re, $im) = @{$z->_cartesian};
if ($im == 0) {
return CORE::log($re + CORE::sqrt($re*$re - 1))
if $re >= 1;
return cplx(0, CORE::atan2(CORE::sqrt(1 - $re*$re), $re))
if CORE::abs($re) < 1;
}
my $t = &sqrt($z * $z - 1) + $z;
# Try Taylor if looking bad (this usually means that
# $z was large negative, therefore the sqrt is really
# close to abs(z), summing that with z...)
$t = 1/(2 * $z) - 1/(8 * $z**3) + 1/(16 * $z**5) - 5/(128 * $z**7)
if $t == 0;
my $u = &log($t);
$u->Im(-$u->Im) if $re < 0 && $im == 0;
return $re < 0 ? -$u : $u;
}
#
# asinh
#
# Computes the area/inverse hyperbolic sine asinh(z) = log(z + sqrt(z*z+1))
#
sub asinh {
my ($z) = @_;
unless (ref $z) {
my $t = $z + CORE::sqrt($z*$z + 1);
return CORE::log($t) if $t;
}
my $t = &sqrt($z * $z + 1) + $z;
# Try Taylor if looking bad (this usually means that
# $z was large negative, therefore the sqrt is really
# close to abs(z), summing that with z...)
$t = 1/(2 * $z) - 1/(8 * $z**3) + 1/(16 * $z**5) - 5/(128 * $z**7)
if $t == 0;
return &log($t);
}
#
# atanh
#
# Computes the area/inverse hyperbolic tangent atanh(z) = 1/2 log((1+z) / (1-z)).
#
sub atanh {
my ($z) = @_;
unless (ref $z) {
return CORE::log((1 + $z)/(1 - $z))/2 if CORE::abs($z) < 1;
$z = cplx($z, 0);
}
_divbyzero 'atanh(1)', "1 - $z" if (1 - $z == 0);
_logofzero 'atanh(-1)' if (1 + $z == 0);
return 0.5 * &log((1 + $z) / (1 - $z));
}
#
# asech
#
# Computes the area/inverse hyperbolic secant asech(z) = acosh(1 / z).
#
sub asech {
my ($z) = @_;
_divbyzero 'asech(0)', "$z" if ($z == 0);
return acosh(1 / $z);
}
#
# acsch
#
# Computes the area/inverse hyperbolic cosecant acsch(z) = asinh(1 / z).
#
sub acsch {
my ($z) = @_;
_divbyzero 'acsch(0)', $z if ($z == 0);
return asinh(1 / $z);
}
#
# acosech
#
# Alias for acosh().
#
sub acosech { Math::Complex::acsch(@_) }
#
# acoth
#
# Computes the area/inverse hyperbolic cotangent acoth(z) = 1/2 log((1+z) / (z-1)).
#
sub acoth {
my ($z) = @_;
_divbyzero 'acoth(0)' if ($z == 0);
unless (ref $z) {
return CORE::log(($z + 1)/($z - 1))/2 if CORE::abs($z) > 1;
$z = cplx($z, 0);
}
_divbyzero 'acoth(1)', "$z - 1" if ($z - 1 == 0);
_logofzero 'acoth(-1)', "1 + $z" if (1 + $z == 0);
return &log((1 + $z) / ($z - 1)) / 2;
}
#
# acotanh
#
# Alias for acot().
#
sub acotanh { Math::Complex::acoth(@_) }
#
# (atan2)
#
# Compute atan(z1/z2), minding the right quadrant.
#
sub atan2 {
my ($z1, $z2, $inverted) = @_;
my ($re1, $im1, $re2, $im2);
if ($inverted) {
($re1, $im1) = ref $z2 ? @{$z2->_cartesian} : ($z2, 0);
($re2, $im2) = ref $z1 ? @{$z1->_cartesian} : ($z1, 0);
} else {
($re1, $im1) = ref $z1 ? @{$z1->_cartesian} : ($z1, 0);
($re2, $im2) = ref $z2 ? @{$z2->_cartesian} : ($z2, 0);
}
if ($im1 || $im2) {
# In MATLAB the imaginary parts are ignored.
# warn "atan2: Imaginary parts ignored";
# http://documents.wolfram.com/mathematica/functions/ArcTan
# NOTE: Mathematica ArcTan[x,y] while atan2(y,x)
my $s = $z1 * $z1 + $z2 * $z2;
_divbyzero("atan2") if $s == 0;
my $i = &i;
my $r = $z2 + $z1 * $i;
return -$i * &log($r / &sqrt( $s ));
}
return CORE::atan2($re1, $re2);
}
#
# display_format
# ->display_format
#
# Set (get if no argument) the display format for all complex numbers that
# don't happen to have overridden it via ->display_format
#
# When called as an object method, this actually sets the display format for
# the current object.
#
# Valid object formats are 'c' and 'p' for cartesian and polar. The first
# letter is used actually, so the type can be fully spelled out for clarity.
#
sub display_format {
my $self = shift;
my %display_format = %DISPLAY_FORMAT;
if (ref $self) { # Called as an object method
if (exists $self->{display_format}) {
my %obj = %{$self->{display_format}};
@display_format{keys %obj} = values %obj;
}
}
if (@_ == 1) {
$display_format{style} = shift;
} else {
my %new = @_;
@display_format{keys %new} = values %new;
}
if (ref $self) { # Called as an object method
$self->{display_format} = { %display_format };
return
wantarray ?
%{$self->{display_format}} :
$self->{display_format}->{style};
}
# Called as a class method
%DISPLAY_FORMAT = %display_format;
return
wantarray ?
%DISPLAY_FORMAT :
$DISPLAY_FORMAT{style};
}
#
# (_stringify)
#
# Show nicely formatted complex number under its cartesian or polar form,
# depending on the current display format:
#
# . If a specific display format has been recorded for this object, use it.
# . Otherwise, use the generic current default for all complex numbers,
# which is a package global variable.
#
sub _stringify {
my ($z) = shift;
my $style = $z->display_format;
$style = $DISPLAY_FORMAT{style} unless defined $style;
return $z->_stringify_polar if $style =~ /^p/i;
return $z->_stringify_cartesian;
}
#
# ->_stringify_cartesian
#
# Stringify as a cartesian representation 'a+bi'.
#
sub _stringify_cartesian {
my $z = shift;
my ($x, $y) = @{$z->_cartesian};
my ($re, $im);
my %format = $z->display_format;
my $format = $format{format};
if ($x) {
if ($x =~ /^NaN[QS]?$/i) {
$re = $x;
} else {
if ($x =~ /^-?\Q$Inf\E$/oi) {
$re = $x;
} else {
$re = defined $format ? sprintf($format, $x) : $x;
}
}
} else {
undef $re;
}
if ($y) {
if ($y =~ /^(NaN[QS]?)$/i) {
$im = $y;
} else {
if ($y =~ /^-?\Q$Inf\E$/oi) {
$im = $y;
} else {
$im =
defined $format ?
sprintf($format, $y) :
($y == 1 ? "" : ($y == -1 ? "-" : $y));
}
}
$im .= "i";
} else {
undef $im;
}
my $str = $re;
if (defined $im) {
if ($y < 0) {
$str .= $im;
} elsif ($y > 0 || $im =~ /^NaN[QS]?i$/i) {
$str .= "+" if defined $re;
$str .= $im;
}
} elsif (!defined $re) {
$str = "0";
}
return $str;
}
#
# ->_stringify_polar
#
# Stringify as a polar representation '[r,t]'.
#
sub _stringify_polar {
my $z = shift;
my ($r, $t) = @{$z->_polar};
my $theta;
my %format = $z->display_format;
my $format = $format{format};
if ($t =~ /^NaN[QS]?$/i || $t =~ /^-?\Q$Inf\E$/oi) {
$theta = $t;
} elsif ($t == pi) {
$theta = "pi";
} elsif ($r == 0 || $t == 0) {
$theta = defined $format ? sprintf($format, $t) : $t;
}
return "[$r,$theta]" if defined $theta;
#
# Try to identify pi/n and friends.
#
$t -= int(CORE::abs($t) / pi2) * pi2;
if ($format{polar_pretty_print} && $t) {
my ($a, $b);
for $a (2..9) {
$b = $t * $a / pi;
if ($b =~ /^-?\d+$/) {
$b = $b < 0 ? "-" : "" if CORE::abs($b) == 1;
$theta = "${b}pi/$a";
last;
}
}
}
if (defined $format) {
$r = sprintf($format, $r);
$theta = sprintf($format, $t) unless defined $theta;
} else {
$theta = $t unless defined $theta;
}
return "[$r,$theta]";
}
sub Inf {
return $Inf;
}
1;
__END__
=pod
=head1 NAME
Math::Complex - complex numbers and associated mathematical functions
=head1 SYNOPSIS
use Math::Complex;
$z = Math::Complex->make(5, 6);
$t = 4 - 3*i + $z;
$j = cplxe(1, 2*pi/3);
=head1 DESCRIPTION
This package lets you create and manipulate complex numbers. By default,
I<Perl> limits itself to real numbers, but an extra C<use> statement brings
full complex support, along with a full set of mathematical functions
typically associated with and/or extended to complex numbers.
If you wonder what complex numbers are, they were invented to be able to solve
the following equation:
x*x = -1
and by definition, the solution is noted I<i> (engineers use I<j> instead since
I<i> usually denotes an intensity, but the name does not matter). The number
I<i> is a pure I<imaginary> number.
The arithmetics with pure imaginary numbers works just like you would expect
it with real numbers... you just have to remember that
i*i = -1
so you have:
5i + 7i = i * (5 + 7) = 12i
4i - 3i = i * (4 - 3) = i
4i * 2i = -8
6i / 2i = 3
1 / i = -i
Complex numbers are numbers that have both a real part and an imaginary
part, and are usually noted:
a + bi
where C<a> is the I<real> part and C<b> is the I<imaginary> part. The
arithmetic with complex numbers is straightforward. You have to
keep track of the real and the imaginary parts, but otherwise the
rules used for real numbers just apply:
(4 + 3i) + (5 - 2i) = (4 + 5) + i(3 - 2) = 9 + i
(2 + i) * (4 - i) = 2*4 + 4i -2i -i*i = 8 + 2i + 1 = 9 + 2i
A graphical representation of complex numbers is possible in a plane
(also called the I<complex plane>, but it's really a 2D plane).
The number
z = a + bi
is the point whose coordinates are (a, b). Actually, it would
be the vector originating from (0, 0) to (a, b). It follows that the addition
of two complex numbers is a vectorial addition.
Since there is a bijection between a point in the 2D plane and a complex
number (i.e. the mapping is unique and reciprocal), a complex number
can also be uniquely identified with polar coordinates:
[rho, theta]
where C<rho> is the distance to the origin, and C<theta> the angle between
the vector and the I<x> axis. There is a notation for this using the
exponential form, which is:
rho * exp(i * theta)
where I<i> is the famous imaginary number introduced above. Conversion
between this form and the cartesian form C<a + bi> is immediate:
a = rho * cos(theta)
b = rho * sin(theta)
which is also expressed by this formula:
z = rho * exp(i * theta) = rho * (cos theta + i * sin theta)
In other words, it's the projection of the vector onto the I<x> and I<y>
axes. Mathematicians call I<rho> the I<norm> or I<modulus> and I<theta>
the I<argument> of the complex number. The I<norm> of C<z> is
marked here as C<abs(z)>.
The polar notation (also known as the trigonometric representation) is
much more handy for performing multiplications and divisions of
complex numbers, whilst the cartesian notation is better suited for
additions and subtractions. Real numbers are on the I<x> axis, and
therefore I<y> or I<theta> is zero or I<pi>.
All the common operations that can be performed on a real number have
been defined to work on complex numbers as well, and are merely
I<extensions> of the operations defined on real numbers. This means
they keep their natural meaning when there is no imaginary part, provided
the number is within their definition set.
For instance, the C<sqrt> routine which computes the square root of
its argument is only defined for non-negative real numbers and yields a
non-negative real number (it is an application from B<R+> to B<R+>).
If we allow it to return a complex number, then it can be extended to
negative real numbers to become an application from B<R> to B<C> (the
set of complex numbers):
sqrt(x) = x >= 0 ? sqrt(x) : sqrt(-x)*i
It can also be extended to be an application from B<C> to B<C>,
whilst its restriction to B<R> behaves as defined above by using
the following definition:
sqrt(z = [r,t]) = sqrt(r) * exp(i * t/2)
Indeed, a negative real number can be noted C<[x,pi]> (the modulus
I<x> is always non-negative, so C<[x,pi]> is really C<-x>, a negative
number) and the above definition states that
sqrt([x,pi]) = sqrt(x) * exp(i*pi/2) = [sqrt(x),pi/2] = sqrt(x)*i
which is exactly what we had defined for negative real numbers above.
The C<sqrt> returns only one of the solutions: if you want the both,
use the C<root> function.
All the common mathematical functions defined on real numbers that
are extended to complex numbers share that same property of working
I<as usual> when the imaginary part is zero (otherwise, it would not
be called an extension, would it?).
A I<new> operation possible on a complex number that is
the identity for real numbers is called the I<conjugate>, and is noted
with a horizontal bar above the number, or C<~z> here.
z = a + bi
~z = a - bi
Simple... Now look:
z * ~z = (a + bi) * (a - bi) = a*a + b*b
We saw that the norm of C<z> was noted C<abs(z)> and was defined as the
distance to the origin, also known as:
rho = abs(z) = sqrt(a*a + b*b)
so
z * ~z = abs(z) ** 2
If z is a pure real number (i.e. C<b == 0>), then the above yields:
a * a = abs(a) ** 2
which is true (C<abs> has the regular meaning for real number, i.e. stands
for the absolute value). This example explains why the norm of C<z> is
noted C<abs(z)>: it extends the C<abs> function to complex numbers, yet
is the regular C<abs> we know when the complex number actually has no
imaginary part... This justifies I<a posteriori> our use of the C<abs>
notation for the norm.
=head1 OPERATIONS
Given the following notations:
z1 = a + bi = r1 * exp(i * t1)
z2 = c + di = r2 * exp(i * t2)
z = <any complex or real number>
the following (overloaded) operations are supported on complex numbers:
z1 + z2 = (a + c) + i(b + d)
z1 - z2 = (a - c) + i(b - d)
z1 * z2 = (r1 * r2) * exp(i * (t1 + t2))
z1 / z2 = (r1 / r2) * exp(i * (t1 - t2))
z1 ** z2 = exp(z2 * log z1)
~z = a - bi
abs(z) = r1 = sqrt(a*a + b*b)
sqrt(z) = sqrt(r1) * exp(i * t/2)
exp(z) = exp(a) * exp(i * b)
log(z) = log(r1) + i*t
sin(z) = 1/2i (exp(i * z1) - exp(-i * z))
cos(z) = 1/2 (exp(i * z1) + exp(-i * z))
atan2(y, x) = atan(y / x) # Minding the right quadrant, note the order.
The definition used for complex arguments of atan2() is
-i log((x + iy)/sqrt(x*x+y*y))
Note that atan2(0, 0) is not well-defined.
The following extra operations are supported on both real and complex
numbers:
Re(z) = a
Im(z) = b
arg(z) = t
abs(z) = r
cbrt(z) = z ** (1/3)
log10(z) = log(z) / log(10)
logn(z, n) = log(z) / log(n)
tan(z) = sin(z) / cos(z)
csc(z) = 1 / sin(z)
sec(z) = 1 / cos(z)
cot(z) = 1 / tan(z)
asin(z) = -i * log(i*z + sqrt(1-z*z))
acos(z) = -i * log(z + i*sqrt(1-z*z))
atan(z) = i/2 * log((i+z) / (i-z))
acsc(z) = asin(1 / z)
asec(z) = acos(1 / z)
acot(z) = atan(1 / z) = -i/2 * log((i+z) / (z-i))
sinh(z) = 1/2 (exp(z) - exp(-z))
cosh(z) = 1/2 (exp(z) + exp(-z))
tanh(z) = sinh(z) / cosh(z) = (exp(z) - exp(-z)) / (exp(z) + exp(-z))
csch(z) = 1 / sinh(z)
sech(z) = 1 / cosh(z)
coth(z) = 1 / tanh(z)
asinh(z) = log(z + sqrt(z*z+1))
acosh(z) = log(z + sqrt(z*z-1))
atanh(z) = 1/2 * log((1+z) / (1-z))
acsch(z) = asinh(1 / z)
asech(z) = acosh(1 / z)
acoth(z) = atanh(1 / z) = 1/2 * log((1+z) / (z-1))
I<arg>, I<abs>, I<log>, I<csc>, I<cot>, I<acsc>, I<acot>, I<csch>,
I<coth>, I<acosech>, I<acotanh>, have aliases I<rho>, I<theta>, I<ln>,
I<cosec>, I<cotan>, I<acosec>, I<acotan>, I<cosech>, I<cotanh>,
I<acosech>, I<acotanh>, respectively. C<Re>, C<Im>, C<arg>, C<abs>,
C<rho>, and C<theta> can be used also as mutators. The C<cbrt>
returns only one of the solutions: if you want all three, use the
C<root> function.
The I<root> function is available to compute all the I<n>
roots of some complex, where I<n> is a strictly positive integer.
There are exactly I<n> such roots, returned as a list. Getting the
number mathematicians call C<j> such that:
1 + j + j*j = 0;
is a simple matter of writing:
$j = ((root(1, 3))[1];
The I<k>th root for C<z = [r,t]> is given by:
(root(z, n))[k] = r**(1/n) * exp(i * (t + 2*k*pi)/n)
You can return the I<k>th root directly by C<root(z, n, k)>,
indexing starting from I<zero> and ending at I<n - 1>.
The I<spaceship> numeric comparison operator, E<lt>=E<gt>, is also
defined. In order to ensure its restriction to real numbers is conform
to what you would expect, the comparison is run on the real part of
the complex number first, and imaginary parts are compared only when
the real parts match.
=head1 CREATION
To create a complex number, use either:
$z = Math::Complex->make(3, 4);
$z = cplx(3, 4);
if you know the cartesian form of the number, or
$z = 3 + 4*i;
if you like. To create a number using the polar form, use either:
$z = Math::Complex->emake(5, pi/3);
$x = cplxe(5, pi/3);
instead. The first argument is the modulus, the second is the angle
(in radians, the full circle is 2*pi). (Mnemonic: C<e> is used as a
notation for complex numbers in the polar form).
It is possible to write:
$x = cplxe(-3, pi/4);
but that will be silently converted into C<[3,-3pi/4]>, since the
modulus must be non-negative (it represents the distance to the origin
in the complex plane).
It is also possible to have a complex number as either argument of the
C<make>, C<emake>, C<cplx>, and C<cplxe>: the appropriate component of
the argument will be used.
$z1 = cplx(-2, 1);
$z2 = cplx($z1, 4);
The C<new>, C<make>, C<emake>, C<cplx>, and C<cplxe> will also
understand a single (string) argument of the forms
2-3i
-3i
[2,3]
[2,-3pi/4]
[2]
in which case the appropriate cartesian and exponential components
will be parsed from the string and used to create new complex numbers.
The imaginary component and the theta, respectively, will default to zero.
The C<new>, C<make>, C<emake>, C<cplx>, and C<cplxe> will also
understand the case of no arguments: this means plain zero or (0, 0).
=head1 DISPLAYING
When printed, a complex number is usually shown under its cartesian
style I<a+bi>, but there are legitimate cases where the polar style
I<[r,t]> is more appropriate. The process of converting the complex
number into a string that can be displayed is known as I<stringification>.
By calling the class method C<Math::Complex::display_format> and
supplying either C<"polar"> or C<"cartesian"> as an argument, you
override the default display style, which is C<"cartesian">. Not
supplying any argument returns the current settings.
This default can be overridden on a per-number basis by calling the
C<display_format> method instead. As before, not supplying any argument
returns the current display style for this number. Otherwise whatever you
specify will be the new display style for I<this> particular number.
For instance:
use Math::Complex;
Math::Complex::display_format('polar');
$j = (root(1, 3))[1];
print "j = $j\n"; # Prints "j = [1,2pi/3]"
$j->display_format('cartesian');
print "j = $j\n"; # Prints "j = -0.5+0.866025403784439i"
The polar style attempts to emphasize arguments like I<k*pi/n>
(where I<n> is a positive integer and I<k> an integer within [-9, +9]),
this is called I<polar pretty-printing>.
For the reverse of stringifying, see the C<make> and C<emake>.
=head2 CHANGED IN PERL 5.6
The C<display_format> class method and the corresponding
C<display_format> object method can now be called using
a parameter hash instead of just a one parameter.
The old display format style, which can have values C<"cartesian"> or
C<"polar">, can be changed using the C<"style"> parameter.
$j->display_format(style => "polar");
The one parameter calling convention also still works.
$j->display_format("polar");
There are two new display parameters.
The first one is C<"format">, which is a sprintf()-style format string
to be used for both numeric parts of the complex number(s). The is
somewhat system-dependent but most often it corresponds to C<"%.15g">.
You can revert to the default by setting the C<format> to C<undef>.
# the $j from the above example
$j->display_format('format' => '%.5f');
print "j = $j\n"; # Prints "j = -0.50000+0.86603i"
$j->display_format('format' => undef);
print "j = $j\n"; # Prints "j = -0.5+0.86603i"
Notice that this affects also the return values of the
C<display_format> methods: in list context the whole parameter hash
will be returned, as opposed to only the style parameter value.
This is a potential incompatibility with earlier versions if you
have been calling the C<display_format> method in list context.
The second new display parameter is C<"polar_pretty_print">, which can
be set to true or false, the default being true. See the previous
section for what this means.
=head1 USAGE
Thanks to overloading, the handling of arithmetics with complex numbers
is simple and almost transparent.
Here are some examples:
use Math::Complex;
$j = cplxe(1, 2*pi/3); # $j ** 3 == 1
print "j = $j, j**3 = ", $j ** 3, "\n";
print "1 + j + j**2 = ", 1 + $j + $j**2, "\n";
$z = -16 + 0*i; # Force it to be a complex
print "sqrt($z) = ", sqrt($z), "\n";
$k = exp(i * 2*pi/3);
print "$j - $k = ", $j - $k, "\n";
$z->Re(3); # Re, Im, arg, abs,
$j->arg(2); # (the last two aka rho, theta)
# can be used also as mutators.
=head1 CONSTANTS
=head2 PI
The constant C<pi> and some handy multiples of it (pi2, pi4,
and pip2 (pi/2) and pip4 (pi/4)) are also available if separately
exported:
use Math::Complex ':pi';
$third_of_circle = pi2 / 3;
=head2 Inf
The floating point infinity can be exported as a subroutine Inf():
use Math::Complex qw(Inf sinh);
my $AlsoInf = Inf() + 42;
my $AnotherInf = sinh(1e42);
print "$AlsoInf is $AnotherInf\n" if $AlsoInf == $AnotherInf;
Note that the stringified form of infinity varies between platforms:
it can be for example any of
inf
infinity
INF
1.#INF
or it can be something else.
Also note that in some platforms trying to use the infinity in
arithmetic operations may result in Perl crashing because using
an infinity causes SIGFPE or its moral equivalent to be sent.
The way to ignore this is
local $SIG{FPE} = sub { };
=head1 ERRORS DUE TO DIVISION BY ZERO OR LOGARITHM OF ZERO
The division (/) and the following functions
log ln log10 logn
tan sec csc cot
atan asec acsc acot
tanh sech csch coth
atanh asech acsch acoth
cannot be computed for all arguments because that would mean dividing
by zero or taking logarithm of zero. These situations cause fatal
runtime errors looking like this
cot(0): Division by zero.
(Because in the definition of cot(0), the divisor sin(0) is 0)
Died at ...
or
atanh(-1): Logarithm of zero.
Died at...
For the C<csc>, C<cot>, C<asec>, C<acsc>, C<acot>, C<csch>, C<coth>,
C<asech>, C<acsch>, the argument cannot be C<0> (zero). For the
logarithmic functions and the C<atanh>, C<acoth>, the argument cannot
be C<1> (one). For the C<atanh>, C<acoth>, the argument cannot be
C<-1> (minus one). For the C<atan>, C<acot>, the argument cannot be
C<i> (the imaginary unit). For the C<atan>, C<acoth>, the argument
cannot be C<-i> (the negative imaginary unit). For the C<tan>,
C<sec>, C<tanh>, the argument cannot be I<pi/2 + k * pi>, where I<k>
is any integer. atan2(0, 0) is undefined, and if the complex arguments
are used for atan2(), a division by zero will happen if z1**2+z2**2 == 0.
Note that because we are operating on approximations of real numbers,
these errors can happen when merely `too close' to the singularities
listed above.
=head1 ERRORS DUE TO INDIGESTIBLE ARGUMENTS
The C<make> and C<emake> accept both real and complex arguments.
When they cannot recognize the arguments they will die with error
messages like the following
Math::Complex::make: Cannot take real part of ...
Math::Complex::make: Cannot take real part of ...
Math::Complex::emake: Cannot take rho of ...
Math::Complex::emake: Cannot take theta of ...
=head1 BUGS
Saying C<use Math::Complex;> exports many mathematical routines in the
caller environment and even overrides some (C<sqrt>, C<log>, C<atan2>).
This is construed as a feature by the Authors, actually... ;-)
All routines expect to be given real or complex numbers. Don't attempt to
use BigFloat, since Perl has currently no rule to disambiguate a '+'
operation (for instance) between two overloaded entities.
In Cray UNICOS there is some strange numerical instability that results
in root(), cos(), sin(), cosh(), sinh(), losing accuracy fast. Beware.
The bug may be in UNICOS math libs, in UNICOS C compiler, in Math::Complex.
Whatever it is, it does not manifest itself anywhere else where Perl runs.
=head1 SEE ALSO
L<Math::Trig>
=head1 AUTHORS
Daniel S. Lewart <F<lewart!at!uiuc.edu>>,
Jarkko Hietaniemi <F<jhi!at!iki.fi>>,
Raphael Manfredi <F<Raphael_Manfredi!at!pobox.com>>,
Zefram <zefram@fysh.org>
=head1 LICENSE
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
1;
# eof
| Dokaponteam/ITF_Project | xampp/perl/lib/Math/Complex.pm | Perl | mit | 49,439 |
# -------------------------------------------------------------------------------------
# Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved.
# For email, run on linux (perl v5.8.5):
# perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"'
# -------------------------------------------------------------------------------------
#!/usr/bin/perl
# Need to refactor! I'm not really a perl guy ;)
use strict;
use warnings;
my %table;
my $num = <STDIN>; chomp $num;
sub accumulate {
my $key = $_[0];
my $attr = $_[1];
#print "for tag $key, pushing $attr \n";
my $arrref1 = \@{$table{$key}};
foreach my $val (@{$arrref1}) {
if ($val eq $attr) {
return;
}
}
my $arrref = \@{$table{$key}};
push (@{$arrref}, $attr);
}
sub addTable {
my ($head, $attr) = @_;
if (exists $table{$head}) {
unless ($attr eq "") {
&accumulate($head, $attr);
}
} else {
my @arr;
if ($attr eq "") {
push @{$table{$head}} , "";
} else {
push @arr, $attr;
push @{$table{$head}}, $attr;
}
}
}
sub processTag {
my $tag = shift @_;
my $head = "";
my $attr = "";
if ($tag =~ /^(\S+)(.*=.*)/) {
$head = $1;
$tag = $2;
while ($tag =~ /^.*?\s+([a-z]+)\s*=(.*)/) {
$attr = $1;
$tag = $2;
&addTable($head, $attr);
}
} elsif ($tag =~ /^(\S+).*/) {
$head = $1;
&addTable($head, $attr);
}
}
for (my $i=0; $i<$num; $i++) {
my $line = <STDIN>; chomp $line;
while ($line =~ /.*?<([^\/].*?)>(.*)/) {
my $tag = $1;
$line = $2;
&processTag($tag);
}
}
foreach my $key (sort keys %table) {
print $key, ":";
my $arrref = \@{$table{$key}};
my @arr = sort @{$arrref};
my $ctr=0;
#print @{$table{$key}};
for (my $ctr=0; $ctr < (scalar @arr); $ctr++) {
print $arr[$ctr];
if (($ctr + 1) != (scalar @arr)) {print ",";}
}
print "\n";
}
| cbrghostrider/Hacking | HackerRank/Algorithms/RegEx/HTMLAttributes.pl | Perl | mit | 2,129 |
package LoginApp::Controller::MyController;
use Moo;
use Encode qw(decode encode);
extends 'Mojolicious::Controller';
sub getJsonParams
{
return shift->req->json;
}
sub renderTemplate
{
my $self = shift;
$self->render(@_);
}
1; | szilardcsom/mojo_login | lib/LoginApp/Controller/MyController.pm | Perl | mit | 240 |
#!/usr/bin/perl
#This script generates an R script to call NJst
#input is a table with treename<tab>newick tree
use strict;
use Bio::TreeIO;
my $filename = $ARGV[0];
my $outfile = $ARGV[1];
open FILE, $filename or die $!;
my @splitline;
print "require(phybase);\n";
print "genetrees<-c(";
my $counter=0;
my $tree;
while (<FILE>) {
chomp;
#get a line from the data file
my $currentinput = "$_";
@splitline = split(/\t/);
my $treename= $splitline[0];
$tree = $splitline[1];
unless($counter==0){
print ", ";
}
$counter++;
print "'$tree'";
}
print ")\n"; #close genetree vector
print "taxaname<-c(";
my $spnum = tree2spList($tree);
print ")\nspname<-taxaname\n";
print "species.structure<-matrix(0,$spnum,$spnum)\n";
print "diag(species.structure)<-1\n";
print "\n";
print "result<-NJst(genetrees,taxaname,spname,species.structure)\n";
print "write(result, file='$outfile')\n";
close FILE;
#This script requires phybase R package
#NJst is a function used as follows
# genetrees<-c("(A:0.004,(B:0.003,(C:0.002,(D:0.001,E:0.001)
# :0.001):0.001):0.001);","(A:0.004,(B:0.003,(E:0.002,(D:0.001,C:0.001):0.001):0.001):0.001);","(A:0.004,(B:0.003,(C:0.002,(D:0.001,E:0.001):0.001):0.001):0.001);")
# taxaname<-c("A","B","C","D","E")
# spname<-taxaname
# species.structure<-matrix(0, 5, 5)
# diag(species.structure)<-1
#
# NJst(genetrees,taxaname, spname, species.structure)
sub tree2spList {
my $treefile=shift;
my ($charactername, $characterstate);
my ($call, $sp_id, $char_id);
#Open treefile and get taxon names from tree
my $stringfh;
open($stringfh, "<", \$treefile);
my $input = Bio::TreeIO->new(-format => 'newick', -fh => $stringfh);
my $tree = $input->next_tree;
my @taxa = $tree->get_leaf_nodes;
my @names = map { $_->id } @taxa;
my $count=0;
foreach(@names){
my $treespecies = $_;
$treespecies =~ s/^\s+|\s+$//g ; #Trim leading and trailing whitespace
unless($count==0){
print ",";
}
print "'$treespecies'";
$count++
}
return $count;
} #end of tree2spList subroutine
| xibalbanus/PIA2 | osiris_phylogenetics/phylogenies/makeNJst.pl | Perl | mit | 2,085 |
use strict;
use warnings;
package TicTacToe::Scorer;
# ABSTRACT: Set of methods to score TTT Boards
use lib 'lib';
use base 'Exporter';
our @EXPORT = qw(score);
use TicTacToe::Board qw(winner);
sub score {
my $board = shift(@_);
my $letter = shift(@_);
my $depth = shift(@_) || 1;
my $winner = winner($board);
return 0 unless $winner;
return 1.0 / $depth if $winner eq $letter;
-1;
}
1; # All modules most end with a truth value
| ekosz/tic_tac_toe_perl | lib/TicTacToe/Scorer.pm | Perl | mit | 454 |
sub fib {
my $n = shift;
return $n < 2 ? $n : fib($n - 1) + fib($n - 2);
}
print fib($ARGV[0]), "\n";
| safx/fibonacci-benchmark | fib.pl | Perl | mit | 111 |
#!/usr/bin/perl -w
use strict;
my $sUsage = qq(
# This script will read two specifed gff-like file (which is different with normal gff)
# and compare the exon structures in these two files.
# 07.06.2011 SW
perl $0
<rice gff3 file generated by script run_genewise_wrapper.pl>
<brachy gff3>
<exon groups output file>
);
die $sUsage unless @ARGV >= 3;
my($wise_gff, $brachy_gff, $exon_outfile) = @ARGV;
my %wise_str = read_wise_gff($wise_gff);
my %brachy_str = read_bd_gff($brachy_gff);
my %iso_gene = iso_to_gene(keys %brachy_str);
my %gene_iso;
foreach (keys %iso_gene)
{
push @{ $gene_iso{$iso_gene{$_}} }, $_;
}
my %summary;
my %num_coding_each_gene;
my %acquir_alter_exon;
my %average_coverage;
foreach my $gene (keys %gene_iso)
{
my $total_coding = 0;
my $total_alt_coding = 0;
my $total_cov = 0;
my $num_comparison = 0;
foreach my $iso_id (@{$gene_iso{$gene}})
{
next unless (exists $wise_str{$iso_id} and exists $brachy_str{$iso_id});
foreach my $rice_exon_ref (@{$wise_str{$iso_id}})
{
foreach my $bd_exon_ref (@{$brachy_str{$iso_id}})
{
my ($alt_seg, $total_seg, $coverage, $coding_seq_ref, @compare_result) = compare_structure($rice_exon_ref, $bd_exon_ref);
$total_coding += $total_seg;
$total_alt_coding += $alt_seg;
$total_cov += $coverage;
$num_comparison++;
push @{$summary{$gene}}, [@compare_result];
}
}
}
next unless $num_comparison;
$average_coverage{$gene} = $total_cov/$num_comparison;
$num_coding_each_gene{$gene} = $total_coding;
}
output (\%summary, \%average_coverage, $exon_outfile);
# Subroutines
sub output
{
# $return_hash{$id} = [$left_alt, $right_alt, $conserved, $hc_exon, $retained_intron, $join_exon, $splice_exon, $skip_exon];
my($summary_ref, $cov_ref, $exon_out) = @_;
open (E, ">$exon_out") or die "$!\n";
my @record;
my @names = qw(ALternative_donor_exon Alternative_acceptor_exon Retained_intron Skipped_exon Skipped_exon Retained_exon Conserved_exon Coverage);
print E 'ID',"\t", join("\t", @names),"\n";
# ($ade, $aae, $ri, $si, $se, $re, $ce);
foreach my $gene (keys %{$summary_ref})
{
my $t = scalar (@{$summary_ref->{$gene}});
my @count;
foreach my $iso (@{$summary_ref->{$gene}})
{
foreach (0..(scalar @$iso -1))
{
$count[$_] += $iso->[$_];
}
}
@count = map{$_/$t}@count;
print E join("\t", ($gene, @count)),"\t", $cov_ref->{$gene}, "\n";
}
close E;
}
sub iso_to_gene
{
my %return;
foreach (@_)
{
my $gene = $1 if /(\S+)\.\d/;
$return{$_} = $gene;
}
return %return;
}
sub read_wise_gff
{
my $file = shift;
my %return_hash;
open (IN, $file) or die $!;
my $score;
my $genewise_cutoff = 35;
my @array;
my $iso_id;
while (<IN>)
{
next if /^\s+$/;
if (/^\/\//)
{
@array = sort{$a->[0] <=> $b->[0]} @array;
push @{$return_hash{$iso_id}}, [@array] if (scalar @array);
@array = ();
next;
}
my @t = split /\t/, $_;
$iso_id = $t[0];
if ($t[2] =~ /match/){$score = $t[5]}
next unless $t[2] =~ /cds/i;
next unless $score > $genewise_cutoff;
push @array,[sort{$a<=>$b}@t[3, 4]];
#die '@t[3,4] ', join("\t", @t[3,4]),"\n" if $iso_id eq 'asmbl_2920';
}
close IN;
return %return_hash;
}
sub read_bd_gff
{
my $file = shift;
my %return_hash;
open (IN, $file) or die $!;
my ($id, $start, $end);
while (<IN>)
{
next if /^\s+$/;
my @t = split /\t/, $_;
if($t[2] eq 'mRNA')
{
$id = $1 if /ID=(\S+?)\;/;
$start = $t[3]<$t[4]?$t[3]:$t[4];
next;
}
# print $id, "\t", join("\t", @t[3, 4]),"\n" if $id =~ /asmbl_1222/;
if($t[2] eq "exon")
{
push @{$return_hash{$id}},[sort{$a<=>$b}($t[3]-$start+1, $t[4]-$start+1)];
}
}
close IN;
map{ $return_hash{$_} = [[ sort{$a->[0]<=>$b->[0]} @{$return_hash{$_}} ]] }keys %return_hash;
return %return_hash;
}
sub max
{
my $m = shift;
map{$m = $_ if $_>$m} @_;
return $m;
}
sub min
{
my $m = shift;
map{$m = $_ if $_<$m} @_;
return $m;
}
sub calculate_coverage
{
my ($wvec, $pvec, $max) = @_;
my $n = 0;
foreach (1..$max)
{
$n++ if (vec($wvec, $_, 1)==1 and vec($pvec, $_, 1)==1);
}
return $n/$max;
}
sub compare_structure
{
my ($wise_ref, $pasa_ref) = @_;
my @return_array;
my $num_exons = 0;
my $num_compare = 0;
my ($ade, $aae, $ri, $si, $se, $re, $ce) = (0,0,0,0,0,0,0);
my ($alt_acc_exon, $alt_donor_exon) = (0, 0);
my ($wise_vec, $wise_max) = construct_vec($wise_ref);
my ($pasa_vec, $pasa_max) = construct_vec($pasa_ref);
my $coverage = calculate_coverage($wise_vec, $pasa_vec, $wise_max);
my @coding_segments = calculate_coding_segment($wise_vec, $pasa_vec, $wise_max>$pasa_max?$wise_max:$pasa_max);
my $total_segs = scalar @coding_segments;
my $alternative_segs = 0;
foreach my $index (0..$#coding_segments)
{
my $segment = $coding_segments[$index];
my $status = $segment->[2];
next if $status == 0;
if ($status == 11){$ce++ if $coding_segments[$index-1]->[2] == 0 and $coding_segments[$index+1]->[2] == 0; next}
$alternative_segs++;
$ade++ if $coding_segments[$index-1]->[2] == 11 and $coding_segments[$index+1]->[2] == 0;
$aae++ if $coding_segments[$index-1]->[2] == 00 and $coding_segments[$index+1]->[2] == 11;
$ri++ if $coding_segments[$index-1]->[2] == 11 and $coding_segments[$index+1]->[2] == 11 and $status == 10;
$si++ if $coding_segments[$index-1]->[2] == 11 and $coding_segments[$index+1]->[2] == 11 and $status == 1;
$se++ if $coding_segments[$index-1]->[2] == 0 and $coding_segments[$index+1]->[2] == 0 and $status == 1;
$re++ if $coding_segments[$index-1]->[2] == 0 and $coding_segments[$index+1]->[2] == 0 and $status == 10;
}
@return_array = ($ade, $aae, $ri, $si, $se, $re, $ce);
@return_array = map{$_/$total_segs} @return_array;
return ($alternative_segs, $total_segs, $coverage, \@coding_segments, @return_array);
}
sub calculate_coding_segment
{
my ($wise_vec, $pasa_vec, $max)= @_;
my @coding_segs;
my ($seg_start, $seg_end) = (0, 0);
my $pre_status = 0;
my $current_status;
foreach ( 0..$max+3) # 3 is used to make a small blank
{
my $w = vec($wise_vec, $_,1);
$w = 0 unless defined $w;
my $p = vec($pasa_vec, $_,1);
$p = 0 unless defined $p;
$current_status = $w*10+$p;
if ($current_status == $pre_status)
{
if ($_ == ($max+3))
{
push @coding_segs, [$seg_start, $max+3, $pre_status]
}
next;
}
$seg_end = $_ - 1;
push @coding_segs, [$seg_start, $seg_end, $pre_status];
$seg_start = $_;
$pre_status = $current_status;
}
return (@coding_segs);
}
sub construct_vec
{
my $arrayref = shift;
die "ref $arrayref " unless (ref $arrayref eq 'ARRAY');
my $vec = '';
my $max = 0;
my $total;
my $debug =1 ;
foreach (@$arrayref)
{
my @d = sort{$a<=>$b}@$_;
# print '@d: ', join("\t", @d),"\n" if $debug; $debug=0;
foreach ($d[0]..$d[1])
{
vec($vec,$_,1) = 0b1;
$total++;
$max = $_ if $_ > $max;
}
}
die if $total==0;
return ($vec, $max);
}
| swang8/Perl_scripts_misc | compare_rice_brachyp_exon_new_v3.pl | Perl | mit | 6,857 |
package Pakket::Role::CanUninstallPackage;
# ABSTRACT: A role providing package uninstall functionality
use v5.22;
use Moose::Role;
use namespace::autoclean;
# core
use Carp;
use experimental qw(declared_refs refaliasing signatures);
# non core
use Path::Tiny;
sub uninstall_package ($self, $info_file, $package) {
$self->log->debug('uninstalling package:', $package->id);
my \%info = delete $info_file->{'installed_packages'}{$package->category}{$package->name};
my %parents;
for my $file (sort $info{'files'}->@*) {
$file =~ s{^files/}{}x; # (compatibility) remove 'files/' part from the begin of the path
my $path = $self->work_dir->child($file);
$self->log->trace('deleting file:', $path);
$path->exists && !$path->remove
and $self->log->error("Could not remove $path: $!");
$parents{$path->parent->absolute}++;
}
# remove parent dirs if there are no children
foreach my $parent (map {path($_)} keys %parents) {
while ($parent->exists && !$parent->children) {
$self->log->trace('deleting dir:', $parent);
rmdir $parent
or carp('Unable to rmdir');
$parent = $parent->parent;
}
}
return;
}
1;
__END__
| xsawyerx/pakket | lib/Pakket/Role/CanUninstallPackage.pm | Perl | mit | 1,319 |
package ELTracker::App;
use MooseX::App qw(Color Config ConfigHome);
use FindBin;
use DateTime::Format::Natural;
use DateTime::Format::ISO8601;
use ELTracker::Schema;
option 'debug' => (
is => 'rw',
isa => 'Bool',
documentation => 'Turn on debugging output',
);
option 'dsn' => (
is => 'rw',
isa => 'Str',
documentation => 'SQL DSN',
required => 1,
);
option 'name' => (
is => 'rw',
isa => 'Str',
documentation => 'Real name',
required => 0,
default => '',
);
option 'username' => (
is => 'rw',
isa => 'Str',
default => '',
documentation => 'DB user name',
);
option 'password' => (
is => 'rw',
isa => 'Str',
default => '',
documentation => 'DB password',
);
has '_schema' => (
is => 'rw',
lazy => 1,
builder => '_build__schema',
);
has '_parser' => (
is => 'rw',
lazy => 1,
builder => '_build__parser',
);
has '_iso8601_parser' => (
is => 'rw',
lazy => 1,
builder => '_build__iso8601_parser',
);
sub _build__schema {
my $self = shift;
my $schema = ELTracker::Schema->connect(
$self->dsn,
$self->username,
$self->password,
{ AutoCommit => 1 },
);
return $schema;
}
sub _build__parser {
my $self = shift;
return $self->_new_parser;
}
sub _new_parser {
my $self = shift;
my %options = (
lang => 'en',
format => "mm/dd/yy",
prefer_future => 0,
time_zone => 'America/Denver',
);
return DateTime::Format::Natural->new(%options);
}
sub _build__iso8601_parser {
my $self = shift;
return $self->_new_iso8601_parser;
}
sub _new_iso8601_parser {
return DateTime::Format::ISO8601->new();
}
1;
| PerlStalker/el-tracker | lib/ELTracker/App.pm | Perl | mit | 1,909 |
sub Rec {
my $n;
$n = $_[0];
if( $n > 0 ) {
Rec( $n - 1 );
print("Value of n is: $n\n");
}
return;
}
Rec(10);
| vaishious/comperler | asgn4/test/test8.pl | Perl | mit | 144 |
package Net::Z3950::ZOOM;
use 5.008;
use strict;
use warnings;
our $VERSION = '1.30';
require XSLoader;
XSLoader::load('Net::Z3950::ZOOM', $VERSION);
my($vs, $ss) = ("x" x 100, "x" x 100); # allocate space for these strings
my $version = Net::Z3950::ZOOM::yaz_version($vs, $ss);
if ($version < 0x040000 && ! -f "/tmp/ignore-ZOOM-YAZ-version-mismatch") {
warn <<__EOT__;
*** WARNING!
ZOOM-Perl requires at least version 4.0.0 of YAZ, but is currently
running against only version $vs (sys-string '$ss').
Some things may not work.
__EOT__
}
# The only thing this module does is define the following constants,
# which MUST BE KEPT SYNCHRONISED with the definitions in <yaz/zoom.h>
# Error codes, as returned from connection_error()
sub ERROR_NONE { 0 }
sub ERROR_CONNECT { 10000 }
sub ERROR_MEMORY { 10001 }
sub ERROR_ENCODE { 10002 }
sub ERROR_DECODE { 10003 }
sub ERROR_CONNECTION_LOST { 10004 }
sub ERROR_INIT { 10005 }
sub ERROR_INTERNAL { 10006 }
sub ERROR_TIMEOUT { 10007 }
sub ERROR_UNSUPPORTED_PROTOCOL { 10008 }
sub ERROR_UNSUPPORTED_QUERY { 10009 }
sub ERROR_INVALID_QUERY { 10010 }
sub ERROR_CQL_PARSE { 10011 }
sub ERROR_CQL_TRANSFORM { 10012 }
sub ERROR_CCL_CONFIG { 10013 }
sub ERROR_CCL_PARSE { 10014 }
# Event types, as returned from connection_last_event()
sub EVENT_NONE { 0 }
sub EVENT_CONNECT { 1 }
sub EVENT_SEND_DATA { 2 }
sub EVENT_RECV_DATA { 3 }
sub EVENT_TIMEOUT { 4 }
sub EVENT_UNKNOWN { 5 }
sub EVENT_SEND_APDU { 6 }
sub EVENT_RECV_APDU { 7 }
sub EVENT_RECV_RECORD { 8 }
sub EVENT_RECV_SEARCH { 9 }
sub EVENT_END { 10 } # In YAZ 2.1.17 and later
# CCL error-codes, which are in a different space from the ZOOM errors
sub CCL_ERR_OK { 0 }
sub CCL_ERR_TERM_EXPECTED { 1 }
sub CCL_ERR_RP_EXPECTED { 2 }
sub CCL_ERR_SETNAME_EXPECTED { 3 }
sub CCL_ERR_OP_EXPECTED { 4 }
sub CCL_ERR_BAD_RP { 5 }
sub CCL_ERR_UNKNOWN_QUAL { 6 }
sub CCL_ERR_DOUBLE_QUAL { 7 }
sub CCL_ERR_EQ_EXPECTED { 8 }
sub CCL_ERR_BAD_RELATION { 9 }
sub CCL_ERR_TRUNC_NOT_LEFT { 10 }
sub CCL_ERR_TRUNC_NOT_BOTH { 11 }
sub CCL_ERR_TRUNC_NOT_RIGHT { 12 }
=head1 NAME
Net::Z3950::ZOOM - Perl extension for invoking the ZOOM-C API.
=head1 SYNOPSIS
use Net::Z3950::ZOOM;
$conn = Net::Z3950::ZOOM::connection_new($host, $port);
$errcode = Net::Z3950::ZOOM::connection_error($conn, $errmsg, $addinfo);
Net::Z3950::ZOOM::connection_option_set($conn, databaseName => "foo");
# etc.
=head1 DESCRIPTION
This module provides a simple thin-layer through to the ZOOM-C
functions in the YAZ toolkit for Z39.50 and SRW/U communication. You
should not be using this very nasty, low-level API. You should be
using the C<ZOOM> module instead, which implements a nice, Perlish API
on top of this module, conformant to the ZOOM Abstract API described at
http://zoom.z3950.org/api/
To enforce the don't-use-this-module prohibition, I am not even going
to document it. If you really, really, really want to use it, then it
pretty much follows the API described in the ZOOM-C documentation at
http://www.indexdata.dk/yaz/doc/zoom.tkl
The only additional (non-ZOOM-C) function provided by this module is
C<event_str()>, which takes as its argument an event code such as
C<Net::Z3950::ZOOM::EVENT_SEND_APDU>, and returns a corresponding
short string.
=cut
sub event_str {
my($code) = @_;
if ($code == EVENT_NONE) {
return "none";
} elsif ($code == EVENT_CONNECT) {
return "connect";
} elsif ($code == EVENT_SEND_DATA) {
return "send data";
} elsif ($code == EVENT_RECV_DATA) {
return "receive data";
} elsif ($code == EVENT_TIMEOUT) {
return "timeout";
} elsif ($code == EVENT_UNKNOWN) {
return "unknown";
} elsif ($code == EVENT_SEND_APDU) {
return "send apdu";
} elsif ($code == EVENT_RECV_APDU) {
return "receive apdu";
} elsif ($code == EVENT_RECV_RECORD) {
return "receive record";
} elsif ($code == EVENT_RECV_SEARCH) {
return "receive search";
} elsif ($code == EVENT_END) {
return "end";
}
return "impossible event " . $code;
}
# Switch API variant depending on $type. This works because the
# get_string() and get_binary() functions have different returns
# types, one of which is implemented as a NUL-terminated string and
# the other as a pointer-and-length structure.
#
# Some Z39.50 servers, when asked for an OPAC-format record in the
# case where no circulation information is available, will return a
# USMARC record rather than an OPAC record containing only a
# bibliographic part. This non-OPAC records is not recognised by the
# underlying record_get() code in ZOOM-C, which ends up returning a
# null pointer. To make life a little less painful when dealing with
# such servers until ZOOM-C is fixed, this code recognises the
# wrong-record-syntax case and returns the XML for the bibliographic
# part anyway.
#
sub record_get {
my($rec, $type) = @_;
my $simpletype = $type;
$simpletype =~ s/;.*//;
if (grep { $type eq $_ } qw(database syntax schema)) {
return record_get_string($rec, $type);
} else {
my $val = record_get_binary($rec, $type);
if ($simpletype eq "opac" && !defined $val) {
my $newtype = $type;
if ($newtype !~ s/.*?;/xml;/) {
$newtype = "xml";
}
$val = record_get_binary($rec, $newtype);
$val = ("<opacRecord>\n <bibliographicRecord>\n" . $val .
" </bibliographicRecord>\n</opacRecord>");
}
return $val;
}
}
=head1 SEE ALSO
The C<ZOOM> module, included in the same distribution as this one.
=head1 AUTHOR
Mike Taylor, E<lt>mike@indexdata.comE<gt>
=head1 COPYRIGHT AND LICENCE
Copyright (C) 2005-2014 by Index Data.
This library is free software; you can redistribute it and/or modify
it under the same terms as Perl itself, either Perl version 5.8.4 or,
at your option, any later version of Perl 5 you may have available.
=cut
1;
| jkb78/extrajnm | local/lib/perl5/darwin-2level/Net/Z3950/ZOOM.pm | Perl | mit | 5,882 |
package WebService::Zulip;
use strict;
use warnings;
use Carp;
use Encode 'encode_utf8';
use JSON;
use LWP::UserAgent;
use Data::Printer;
our $VERSION = '0.01';
my $api_endpoint = 'https://api.zulip.com/v1/';
my $http_realm = 'zulip';
sub new {
my ($package, %args) = @_;
my $self;
$self->{_api_key} = $args{api_key};
$self->{_api_user} = $args{api_user};
$self->{_ua} = LWP::UserAgent->new(agent => "WebService::Zulip v $VERSION");
$self->{_ua}->credentials(
'api.zulip.com:443',
$http_realm,
$self->{_api_user},
$self->{_api_key},
);
bless $self, $package;
return $self;
}
sub send_message {
my ($self, %args) = @_;
# hash is convenient for user, vars is easier to read here
my ($type, $content, $to, $subject) = @args{'type','content','to','subject'};
# type: "One of {private, stream}"
unless (defined($type) && ($type eq 'private' || $type eq 'stream')) {
croak q{"type" is required and must be either "private" or "stream"};
}
# content: "The content of the message. Maximum message size of 10000 bytes."
my $content_limit = 10000;
unless (defined($content) && (length(encode_utf8($content)) < $content_limit)) {
croak qq{"content" is required and must be under $content_limit bytes};
}
# to: "In the case of a stream message, a string identifying the stream.
# In the case of a private message, a JSON-encoded list containing the usernames
# of the recipients"
croak q{"to" is required} unless defined($to);
# either make it a single-element json array, or json_encode the array
if ($type eq 'private') {
my $reference = ref($to);
if ($reference eq '') {
$to = encode_json([$to]);
} elsif ($reference eq 'ARRAY') {
$to = encode_json($to);
} else {
print "ref is " . ref($to) . "\n";
croak q{"to" must either be a string or arrayref in private messages};
}
} else {
# don't allow references/multiple 'to' for stream messages
if (ref($to)) {
croak q{"to" must be a string for stream messages};
}
}
# subject: The topic for the message (Only required if type is “stream”).
# Maximum length of 60 characters.
if (defined($subject) && $type eq 'private') {
carp q{"subject" is ignored in private messages};
}
if (defined($subject) && length($subject) > 60) {
croak q{"subject" cannot be over 60 characters};
}
my $res = $self->{_ua}->post($api_endpoint . 'messages', {
type => $type,
content => $content,
to => $to,
subject => $subject,
});
if ($res->is_error) {
carp q{Couldn't submit message:};
p $res;
return;
}
my $returned_json = decode_json($res->decoded_content);
return $returned_json;
}
sub get_message_queue {
my ($self, %args) = @_;
my ($event_types, $apply_markdown) = @args{'event_types', 'apply_markdown'};
# event_types: (optional) A JSON-encoded array indicating which types of
# events you're interested in. {message, subscriptions, realm_user (changes
# in the list of users in your realm), and pointer (changes in your pointer).
# If you do not specify this argument, you will receive all events
# allow user to provide either scalar or array ref
my $reference = ref($event_types);
if (defined($event_types) && $reference eq '') {
unless ($event_types =~ /^(?:message|subscriptions|realm_user|pointer)$/) {
croak q{"event_types" must be one of "message", "subscriptions",
"realm_user", "pointer", or an arrayref of these, or undefined for all.};
}
# wrap the scalar in an array
$event_types = encode_json([$event_types]);
} elsif ($reference eq 'ARRAY') {
if (grep { $_ !~ /message|subscriptions|realm_user|pointer/ } @$event_types) {
croak q{"event_types" must be one of "message", "subscriptions",
"realm_user", "pointer", or an arrayref of these, or undefined for all.};
}
$event_types = encode_json($event_types);
} elsif (defined($event_types)) {
croak q{"event_types" must be one of "message", "subscriptions",
"realm_user", "pointer", or an arrayref of these, or undefined for all.};
} else {
# the API expects JSON if event_types is present, easier to specify
# each than conditionally send event_types in the request
$event_types = encode_json([qw(message subscriptions realm_user pointer)]);
}
# (optional) set to “true” if you would like the content to be rendered in
# HTML format (by default, the API returns the raw text that the user entered)
$apply_markdown ||= 'false';
my $res = $self->{_ua}->post($api_endpoint . 'register', {
event_types => $event_types,
apply_markdown => $apply_markdown,
});
if ($res->is_error) {
croak q{"Couldn't request queue:\n"};
}
my $returned_json = decode_json($res->decoded_content);
return $returned_json;
}
sub get_new_events {
my ($self, %args) = @_;
my ($queue_id, $last_event_id, $dont_block) = @args{'queue_id', 'last_event_id', 'dont_block'};
return unless defined($queue_id) && $queue_id =~ /^[\d:]+$/;
return unless defined($last_event_id) && $last_event_id =~ /^[\d-]+$/;
$dont_block ||= 'true';
# being lazy
my $res = $self->{_ua}->get($api_endpoint . 'events?' .
"queue_id=$queue_id&" .
"last_event_id=$last_event_id&" .
"dont_block=$dont_block"
);
if ($res->is_error) {
croak qq{Couldn't get events: $res->decoded_content};
}
my $returned_json = decode_json($res->decoded_content);
return $returned_json;
}
sub get_last_event_id {
my ($self, $events_info) = @_;
# sigh. this is how they do it in their python module
my $max_id = 0;
for my $event (@{$events_info->{events}}) {
$max_id = $max_id > $event->{id} ? $max_id : $event->{id};
}
return $max_id;
}
1; | Stantheman/WebService-Zulip | lib/WebService/Zulip.pm | Perl | mit | 6,167 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.