code stringlengths 2 1.05M | repo_name stringlengths 5 101 | path stringlengths 4 991 | language stringclasses 3 values | license stringclasses 5 values | size int64 2 1.05M |
|---|---|---|---|---|---|
=pod
=encoding utf8
=head1 Test
A minimal testing framework
=head1 SYNOPSIS
Private Sub test_all()
gStart "basic tests"
test_arithmetics
test_moreStuff
gStop
End Sub
Private Sub test_arithmetics()
gStart "arithmetics"
Dim i As Integer
i = 5
ok IsNumeric(i), "Integer is nummeric"
equals 1 + 1, 2, "Integral addition works"
On Error Resume Next
i = 1 / 0
checkError 11, "Dividing by zero throws error code 11"
On Error GoTo 0
gStop
End Sub
Private Sub test_moreStuff()
gStart "moreStuff"
...
gStop
End Sub
=head1 DESCRIPTION
This C<Test> module provides a rather minimalistic framework for testing VBA
stuff.
It provides a set of functions to perform tests. Tests must be wrapped in a
C<gStart> and C<gStop> pair. The last call to C<gStop> causes the test results
to be printed to the debug output.
The intended usage pattern is to create a module for each unit to test and group
tests into functions. Each function starts with a C<gStart> and ends with a
C<gStop>. One method called C<test_all()> calls all the other methods. To run
the tests one simply puts the cursor in one of the methods and clicks the C<Run>
button.
=head2 Grouping
Tests can be grouped and groups nested using the two functions C<gStart> and
C<gStop>. The grouping affects the test result reporting and is also helpful
to create aggregator functions that run many tests at once.
gStart "outer group"
gStart "sub-group 1"
ok True
gStop
gStart "sub-group 2"
ok False
gStop
gStop
=head1 FUNCTIONS
=head2 ok
ok obj.isCool, "obj should be cool"
Checks whether the first parameter is True.
An optional second parameter, a message, can be provided, which will be included
in the test results.
=head2 equals
equals 1 + 1, 2, "one and one should be two"
Checks whether the first two parameters are equal.
Arrays and C<List>s are compared element by element but do not recurse into
deeper levels.
An optional third parameter, a message, can be provided, which will be included
in the test results.
=head2 checkError
On Error Resume Next
i = 1 / 0
checkError 11, "Dividing by zero throws error code 11"
On Error GoTo 0
Checks whether the Err object has a given error number set.
An optional second parameter, a message, can be provided, which will be included
in the test results.
=head2 checkNoError
On Error Resume Next
i = 1 / 2
checkNoError "Dividing with remainder does not throw"
On Error GoTo 0
Makes sure the Err object contains no error.
An optional second parameter, a message, can be provided, which will be included
in the test results.
=head2 pass / fail
If False Then
fail "If works"
Else
pass "If works"
End If
Unconditionally passes / fails.
An optional parameter, a message, can be provided, which will be included
in the test results.
C<fail> and C<pass> should always be used in combination with another test.
Otherwise the test count changes depending on whether the test succeeds or
fails.
=cut
| HALTEC/VBA-commons-lib | doc/Test.pod | Perl | apache-2.0 | 3,273 |
#!/usr/bin/perl -w
# Creation Date: Dec 5th PST 2007
use strict;
use warnings;
($#ARGV == 0) or die "Usage: $0 <plot_data>\n";
my $infile = shift;
# Some Gnuplot notes:
#set terminal postscript eps color "Helvetica" 22
#set output "average_degree.eps"
#1 + 7 (f) circle
#2 cross 8 (o) triangle
#3 * 9 (f) triangle
#4 (o) square 10 (o) inverse-triangle
#5 (f) square 11 (f) inverse-triangle
#6 (o) circle 12 (o) diamond
#13 (f) diamond
# type 'test' to see the colors and point types available
# lt is for color of the points: -1=black 1=red 2=grn 3=blue 4=purple 5=aqua 6=brn 7=orange 8=light-brn
# pt gives a particular point type: 1=diamond 2=+ 3=square 4=X 5=triangle 6=*
# postscipt: 1=+, 2=X, 3=*, 4=square, 5=filled square, 6=circle,
# 7=filled circle, 8=triangle, 9=filled triangle, etc.
# Parse Input Variables
my @fields;
my $line;
my %Data = ();
my $USE_NICE_COLORS_FLAG = 0;
# Commands for figure format
my $plot_name = "default";
my $xaxis_label = "x-default"; #"Clusters";
my $yaxis_label = "y-default"; #"Percentage (%)";
my $title = "# default-title";
my $point_size = "0.5";
my $line_size = 1;
my $mutiply_data = 1;
my $plot_with = "points";
#my $title = "";
my $use_titels = "notitle"; #"title \"%s\" ";
my $mode = "color enhanced"; # or color
my $log_usage = "set yrange [0:]";
# -------------
my $tics = "default";
my $range = "default";
my $injected_lines = ""; # add any fancy line here
# Few colors that I know are nice
my @Colors = ( "n/a", 9, 1, 3, 7, -1, 0, 10, 11, 12, 13, 14, 15 );
open(IN_FILE, "$infile" ) || die("Could not open file: $!");
while ($line=<IN_FILE>)
{
if ( $line =~ m/#/)
{
next;
}
elsif ( $line =~ m/^\n/ )
{
next;
}
elsif ( $line =~ m/Opt=/i )
{
$_ = $';
if ( m/name\s+(.*)/ )
{
$plot_name = $1;
}
elsif ( m/x-label\s+(.*)/ )
{
$xaxis_label = $1;
}
elsif ( m/use-nice-colors/)
{
$USE_NICE_COLORS_FLAG=1;
}
elsif ( m/mode\s+(.*)/ ) # Color or Black & White
{
$mode = $1;
}
elsif ( m/y-label\s+(.*)/ )
{
$yaxis_label = $1;
}
elsif ( m/timeseries\s+(.*)/ )
{
$xaxis_label = "Time";
$line_size = 2;
$plot_with = "linespoints";
$point_size = 0.2;
$title = "Time series presentation defalut parameters";
$injected_lines = "set xdata time\n";
$injected_lines .= "set timefmt \"%s\"\n";
$injected_lines .= "set format x \" %d\\n%H:%M\"\n";
}
elsif ( m/add\s+(.*)/ )
{
$injected_lines .= "$1\n";
}
elsif ( m/psize\s+(.*)/ ) # Point size
{
$point_size = $1;
}
elsif ( m/lsize\s+(.*)/ ) # Line width
{
$line_size = $1;
}
elsif ( m/type\s+(.*)/ ) # Plot with
{
$plot_with = $1;
}
elsif ( m/title\s+(.*)/ ) # Plot with
{
$title = $1;
}
elsif ( m/tics\s+(.*)/ )
{
$tics = $1;
}
elsif ( m/range\s+(.*)/ )
{
$range = $1;
}
elsif ( m/use-legent(.*)/ )
{
$use_titels = "title \"%s\" ";
}
elsif ( m/use-log/ )
{
$log_usage = "set log x\nset log y\n";
}
}
else
{
(@fields) = split(/\s+/, $line);
push @{$Data{$fields[0]}} , [ @fields[1..$#fields] ];
}
}
close(IN_FILE);
#TODO: test violation e.g. using lines with point sizes
my $plot_out_name = "$plot_name.eps";
my $date = `date +%Y-%m-%d-%H_%M_%S`; chomp($date);
#my @list_nice_lins ?
#my @luist_nice_points ?
open (OUT_FILE, ">$plot_name.gnu") || die("Could not open file!");
my $gnu = `which gnuplot`;
chomp $gnu;
print OUT_FILE "#!$gnu\n";
print OUT_FILE "# $title $date\n";
print OUT_FILE "set terminal postscript eps $mode \"Helvetica\" 22\n";
print OUT_FILE "set output \"$plot_out_name\" \n";
print OUT_FILE "set autoscale \n";
if ( $title ne "# default-title" ) {
print OUT_FILE "set title \"$title\" \n";
}
print OUT_FILE "$log_usage\n";
print OUT_FILE "#set key 60,6 \n";
print OUT_FILE "set xlabel \"$xaxis_label\" \n";
print OUT_FILE "set ylabel \"$yaxis_label\" \n";
if ( $tics !~ m/default/ )
{
print OUT_FILE "set xtics $tics \n";
}
if ( $range !~ m/default/ )
{
print OUT_FILE "set xrange $range \n";
}
print OUT_FILE "#set grid \n";
print OUT_FILE "$injected_lines";
print OUT_FILE "# set label 1 \"Y=AX^2+BX+C\" at 11,1225\n";
print OUT_FILE "plot ";
my $first_flag = 1;
my $last = "";
my $counter = 4 ;
my @Data_Sequence_Order = ( sort keys %Data );
foreach my $app ( @Data_Sequence_Order )
{
my $added_color_line = "";
if ( $USE_NICE_COLORS_FLAG > 0 )
{
$added_color_line = "lt $Colors[$USE_NICE_COLORS_FLAG]";
$USE_NICE_COLORS_FLAG++;
}
if ( $first_flag == 1)
{
my $line_command = "";
if ( $line_size eq "" ) {
} else {
$line_command = "lw $line_size"
}
printf OUT_FILE "\"-\" using 1:2 $use_titels with $plot_with $added_color_line pt $counter ps $point_size $line_command", $app;
$first_flag = 0;
}
else
{
printf OUT_FILE ", \"-\" using 1:2 $use_titels with $plot_with $added_color_line pt $counter ps $point_size lw $line_size", $app;
}
$last = $app;
$counter++;
}
print OUT_FILE "\n";
# Write data values to gnuplot file
foreach my $app ( @Data_Sequence_Order )
{
foreach (sort { $b->[0] <=> $a->[0] } @{$Data{$app}})
{
my @line = @{$_};
print OUT_FILE "@line\n";
}
print OUT_FILE "e\n" unless $app eq $last;
}
print OUT_FILE "end\n";
print OUT_FILE "!epstopdf $plot_out_name\n";
close(OUT_FILE);
`gnuplot $plot_name.gnu; chmod 700 $plot_name.gnu`;
exit 0;
| imarios/gnuplot-helper | plot_data.pl | Perl | apache-2.0 | 6,216 |
package CorpusCheck;
use Test::Roo;
use MooX::Types::MooseLike::Base qw/ArrayRef/;
use Path::Tiny;
has corpus => (
is => 'ro',
isa => sub { -f shift },
required => 1,
);
has lines => (
is => 'lazy',
isa => ArrayRef,
);
sub _build_lines {
my ($self) = @_;
return [ map { lc } path( $self->corpus )->lines ];
}
test 'sorted' => sub {
my $self = shift;
is_deeply( $self->lines, [ sort @{$self->lines} ], "alphabetized");
};
test 'a to z' => sub {
my $self = shift;
my %letters = map { substr($_,0,1) => 1 } @{ $self->lines };
is_deeply( [sort keys %letters], ["a" .. "z"], "all letters found" );
};
1;
| gitpan/Test-Roo | examples/cookbook/lib/CorpusCheck.pm | Perl | apache-2.0 | 666 |
#!/usr/bin/perl
use strict;
my $port_prefix = $ENV{C4PORT_PREFIX} || 8000;
my $http_port = $port_prefix+67;
my $sse_port = $port_prefix+68;
my $zoo_port = $port_prefix+81;
my $plain_kafka_port = $port_prefix+92;
my $ssl_kafka_port = $port_prefix+93;
my $build_dir = "./client/build/test";
my $inbox_prefix = '';
my $kafka_version = "2.2.0";
my $kafka = "kafka_2.12-$kafka_version";
my $curl_test = "curl http://127.0.0.1:$http_port/abc";
#my $plain_bootstrap_server = "127.0.0.1:$plain_kafka_port";
my $ssl_bootstrap_server = "localhost:$ssl_kafka_port";
my $http_server = "127.0.0.1:$http_port";
my $gen_dir = "."; #"target/c4gen/res";
$ENV{PATH}.=":tmp/$kafka/bin";
sub syn{ print join(" ",@_),"\n"; system @_; }
sub sy{
print "$ENV{PATH}\n";
print join(" ",@_),"\n"; system @_ and die $?;
}
sub syf{ my $res = scalar `$_[0]`; print "$_[0]\n$res"; $res }
my $put_text = sub{
my($fn,$content)=@_;
open FF,">:encoding(UTF-8)",$fn and print FF $content and close FF or die "put_text($!)($fn)";
};
my $need_tmp = sub{ -e $_ or mkdir $_ or die for "tmp" };
my @tasks;
push @tasks, ["setup_sbt", sub{
&$need_tmp();
my $sbta = "sbt-0.13.13.tgz";
if(!-e $sbta){
sy("cd tmp && curl -LO https://dl.bintray.com/sbt/native-packages/sbt/0.13.13/$sbta");
sy("cd tmp && tar -xzf $sbta");
sy("tmp/sbt-launcher-packaging-0.13.13/bin/sbt update")
}
#my $nodea = "node-v6.10.0-linux-x64.tar.xz";
#if(!-e $nodea){
# sy("wget https://nodejs.org/dist/v6.10.0/$nodea");
# sy("tar -xJf $nodea");
#}
#print qq{export PATH=tmp/sbt-launcher-packaging-0.13.13/bin:tmp/node-v6.10.0-linux-x64/bin:\$PATH\n};
print qq{add to .bashrc or so:\nexport PATH=tmp/sbt-launcher-packaging-0.13.13/bin:\$PATH\n};
}];
push @tasks, ["setup_kafka", sub{
&$need_tmp();
if (!-e $kafka) {
sy("cd tmp && curl -LO http://www-eu.apache.org/dist/kafka/$kafka_version/$kafka.tgz");
sy("cd tmp && tar -xzf $kafka.tgz")
}
}];
push @tasks, ["es_examples", sub{
sy("cd $gen_dir && sbt 'c4actor-base-examples/run-main ee.cone.c4actor.ProtoAdapterTest' ");
sy("cd $gen_dir && sbt 'c4actor-base-examples/run-main ee.cone.c4actor.AssemblerTest' ");
sy("C4STATE_TOPIC_PREFIX=ee.cone.c4actor.ConnTestApp cd $gen_dir && sbt 'c4actor-base-examples/run-main ee.cone.c4actor.ServerMain' ");
}];
push @tasks, ["not_effective_join_bench", sub{
sy("cd $gen_dir && sbt 'c4actor-base-examples/run-main ee.cone.c4actor.NotEffectiveAssemblerTest' ");
}];
my $inbox_configure = sub{
my $kafka_topics = "kafka-topics.sh --zookeeper 127.0.0.1:$zoo_port --topic .inbox";
sy("$kafka_topics --create --partitions 1 --replication-factor 1")
if 0 > index syf("$kafka_topics --list"), ".inbox";
};
my $stop_kafka = sub{
syn("kafka-server-stop.sh");
syn("zookeeper-server-stop.sh");
};
push @tasks, ["restart_kafka", sub{
my $data_dir = $ENV{C4DATA_DIR} || die "no C4DATA_DIR";
&$stop_kafka();
&$need_tmp();
&$put_text("tmp/zookeeper.properties","dataDir=$data_dir/zookeeper\nclientPort=$zoo_port\n");
sy("perl prod.pl need_certs $data_dir/ca cu.broker $data_dir $data_dir");
sy("perl prod.pl need_certs $data_dir/ca cu.def $data_dir");
&$put_text("tmp/server.properties", join '', map{"$_\n"}
"log.dirs=$data_dir/kafka-logs",
"zookeeper.connect=127.0.0.1:$zoo_port",
"message.max.bytes=250000000", #seems to be compressed
#"listeners=PLAINTEXT://$plain_bootstrap_server,SSL://$ssl_bootstrap_server",
"listeners=SSL://$ssl_bootstrap_server",
"inter.broker.listener.name=SSL",
"socket.request.max.bytes=250000000",
);
sy("cat $data_dir/cu.broker.properties >> tmp/server.properties");
sy("zookeeper-server-start.sh -daemon tmp/zookeeper.properties");
sleep 5;
sy("kafka-server-start.sh -daemon tmp/server.properties");
sy("jps");
#&$inbox_configure();
}];
push @tasks, ["stop_kafka", sub{&$stop_kafka()}];
#push @tasks, ["inbox_configure", sub{&$inbox_configure()}];
push @tasks, ["inbox_log_tail", sub{
sy("kafka-console-consumer.sh --bootstrap-server $ssl_bootstrap_server --topic $inbox_prefix.inbox.log")
}];
push @tasks, ["inbox_test", sub{
sy("kafka-verifiable-consumer.sh --broker-list $ssl_bootstrap_server --topic $inbox_prefix.inbox --group-id dummy-".rand())
}];
=sk
push @tasks, ["inbox_copy", sub{
my $from = $ENV{C4COPY_FROM} || die "C4COPY_FROM required";
&$need_tmp();
&$put_text("tmp/copy.consumer.properties",join "\n",
"group.id=dummy-".rand(),
"bootstrap.servers=$from",
#"enable.auto.commit=false"
);
&$put_text("tmp/copy.producer.properties",join "\n",
"bootstrap.servers=$bootstrap_server",
"compression.type=lz4",
"max.request.size=10000000",
#"linger.ms=1000",
"batch.size=1000",
);
sy("kafka-mirror-maker.sh"
." --consumer.config tmp/copy.consumer.properties"
." --producer.config tmp/copy.producer.properties"
.qq[ --whitelist="$inbox_prefix\\.inbox"]
." --num.streams 40"
#." --queue.size 2000"
#." --whitelist='.*'"
);
}];
=cut
my $client = sub{
my($inst)=@_;
unlink or die $! for <$build_dir/*>;
sy("cd client && npm install") if $inst;
sy("cd client && ./node_modules/webpack/bin/webpack.js");# -d
$build_dir
};
my $get_env = sub{
my $data_dir = $ENV{C4DATA_DIR} || die "no C4DATA_DIR";
my %env = (
C4BOOTSTRAP_SERVERS => $ssl_bootstrap_server,
C4INBOX_TOPIC_PREFIX => "",
C4MAX_REQUEST_SIZE => 250000000,
C4HTTP_SERVER => "http://$http_server",
C4AUTH_KEY_FILE => "$data_dir/simple.auth",
C4KEYSTORE_PATH => "$data_dir/cu.def.keystore.jks",
C4TRUSTSTORE_PATH => "$data_dir/cu.def.truststore.jks",
C4HTTP_PORT => $http_port,
C4SSE_PORT => $sse_port,
C4LOGBACK_XML => "$data_dir/logback.xml",
);
my $env = join " ", map{"$_=$env{$_}"} sort keys %env;
($env,%env);
};
sub staged{
"C4STATE_TOPIC_PREFIX=$_[1] $gen_dir/$_[0]/target/universal/stage/bin/$_[0] $_[1]"
}
push @tasks, ["gate_publish", sub{
my($env,%env) = &$get_env();
my $build_dir = &$client(0);
$build_dir eq readlink $_ or symlink $build_dir, $_ or die $! for "htdocs";
sy("$env C4PUBLISH_DIR=$build_dir C4PUBLISH_THEN_EXIT=1 ".staged("c4gate-server","ee.cone.c4gate.PublishApp"))
}];
push @tasks, ["gate_server_run", sub{
my($env,%env) = &$get_env();
&$inbox_configure();
sy("$env C4STATE_REFRESH_SECONDS=100 ".staged("c4gate-server","ee.cone.c4gate.HttpGatewayApp"));
}];
push @tasks, ["env", sub{
my ($cmd,@exec) = @ARGV;
my($env,%env) = &$get_env();
$ENV{$_} = $env{$_} for keys %env;
$ENV{C4STATE_TOPIC_PREFIX} || die "no actor name";
sy(@exec);
#perl $ENV{C4PROTO_DIR}/prod.pl
}];
#push @tasks, ["snapshot_maker_run", sub{
# sy("$env ".staged("c4gate-server","ee.cone.c4gate.SnapshotMakerApp"));
#}];
push @tasks, ["test", sub{
my ($arg_a,$arg_b) = @_;
my($env,%env) = &$get_env();
sy("$env ".staged(($ARGV[1]||die),($ARGV[2]||die)));
}];
push @tasks, ["test_post_get_tcp_service_run", sub{
my($env,%env) = &$get_env();
sy("$env ".staged("c4gate-consumer-example","ee.cone.c4gate.TestConsumerApp"))
}];
push @tasks, ["test_post_get_check", sub{
my $v = int(rand()*10);
sy("$curl_test -X POST -d $v");
sleep 1;
sy("$curl_test -v");
sleep 4;
sy("$curl_test -v");
print " -- should be posted * 3\n";
}];
#push @tasks, ["test_tcp_check", sub{
# sy("nc 127.0.0.1 $sse_port");
#}];
push @tasks, ["test_actor_serial_service_run", sub{
my($env,%env) = &$get_env();
sy("$env ".staged("c4gate-consumer-example","ee.cone.c4gate.TestSerialApp"))
}];
push @tasks, ["test_actor_parallel_service_run", sub{
my($env,%env) = &$get_env();
sy("$env ".staged("c4gate-consumer-example","ee.cone.c4gate.TestParallelApp"))
}];
push @tasks, ["test_actor_check", sub{
sy("$curl_test -X POST") for 0..11;
}];
push @tasks, ["test_big_message_check", sub{
&$need_tmp();
sy("dd if=/dev/zero of=tmp/test.bin bs=1M count=4 && $curl_test -v -XPOST -T tmp/test.bin")
}];
push @tasks, ["test_ui_timer_service_run", sub{ # http://localhost:8067/sse.html#
my($env,%env) = &$get_env();
sy("$env ".staged("c4gate-sse-example","ee.cone.c4gate.TestSSEApp"))
}];
push @tasks, ["test_ui_todo_service_run", sub{
my($env,%env) = &$get_env();
sy("$env ".staged("c4gate-sse-example","ee.cone.c4gate.TestTodoApp"))
}];
push @tasks, ["test_ui_cowork_service_run", sub{
my($env,%env) = &$get_env();
sy("$env ".staged("c4gate-sse-example","ee.cone.c4gate.TestCoWorkApp"))
}];
push @tasks, ["test_ui_canvas_service_run", sub{
my($env,%env) = &$get_env();
sy("$env C4PUBLISH_DIR=$build_dir C4PUBLISH_THEN_EXIT='' ".staged("c4gate-sse-example","ee.cone.c4gate.TestCanvasApp"))
}];
push @tasks, ["test_ui_password_service_run", sub{
my($env,%env) = &$get_env();
sy("$env ".staged("c4gate-sse-example","ee.cone.c4gate.TestPasswordApp"))
}];
if($ARGV[0]) {
$ARGV[0] eq $$_[0] and $$_[1]->() for @tasks;
} else {
print "usage:\n";
print "\t$0 $$_[0]\n" for @tasks;
}
#export PATH=$HOME/tools/jdk/bin:$HOME/tools/sbt/bin:$PATH
#sbt show compile:dependencyClasspath
#... ScalaCheck, Specs2, and ScalaTest
#http://localhost:8067/react-app.html#todo
#http://localhost:8067/react-app.html#rectangle
#http://localhost:8067/react-app.html#leader
#tmp/kafka_2.11-0.10.1.0/bin/kafka-topics.sh --zookeeper 127.0.0.1:8081 --list
#force compaction:?
#min.cleanable.dirty.ratio=0.01
#segment.ms=100
#delete.retention.ms=100
#tar cvf - db4 | lz4 - db.tar.lz4
#lz4 -d db.tar.lz4 | tar xf -
=topic integrity
use strict;
use JSON::XS;
my $e = JSON::XS->new;
my $n = 0;
my $c = 0;
while(<>){
/records_consumed/ or next;
my $j = $e->decode($_);
$$j{name} eq "records_consumed" or next;
my($count,$min,$max) = @{$$j{partitions}[0]}{qw(count minOffset maxOffset)};
$count-1 == $max-$min or die $_;
$n == $min or die $_;
$n = $max + 1;
$c += $count;
}
print "count:$c\n";
=cut
| wregs/c4proto | do.pl | Perl | apache-2.0 | 10,203 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package database::mysql::mode::myisamkeycachehitrate;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
use centreon::plugins::statefile;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
"warning:s" => { name => 'warning', },
"critical:s" => { name => 'critical', },
"lookback" => { name => 'lookback', },
});
$self->{statefile_cache} = centreon::plugins::statefile->new(%options);
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
$self->{statefile_cache}->check_options(%options);
}
sub run {
my ($self, %options) = @_;
# $options{sql} = sqlmode object
$self->{sql} = $options{sql};
$self->{sql}->connect();
if (!($self->{sql}->is_version_minimum(version => '5'))) {
$self->{output}->add_option_msg(short_msg => "MySQL version '" . $self->{sql}->{version} . "' is not supported (need version >= '5.x').");
$self->{output}->option_exit();
}
$self->{sql}->query(query => q{SHOW /*!50000 global */ STATUS WHERE Variable_name IN ('Key_read_requests', 'Key_reads')});
my $new_datas = {Key_read_requests => undef, Key_reads => undef};
my $result = $self->{sql}->fetchall_arrayref();
foreach my $row (@{$result}) {
$new_datas->{$$row[0]} = $$row[1];
}
foreach (keys %$new_datas) {
if (!defined($new_datas->{$_})) {
$self->{output}->add_option_msg(short_msg => "Cannot get '$_' variable.");
$self->{output}->option_exit();
}
}
$self->{statefile_cache}->read(statefile => 'mysql_' . $self->{mode} . '_' . $self->{sql}->get_unique_id4save());
my $old_timestamp = $self->{statefile_cache}->get(name => 'last_timestamp');
$new_datas->{last_timestamp} = time();
my $old_read_request = $self->{statefile_cache}->get(name => 'Key_read_requests');
my $old_read = $self->{statefile_cache}->get(name => 'Key_reads');
if (defined($old_read_request) && defined($old_read) &&
$new_datas->{Key_read_requests} >= $old_read_request &&
$new_datas->{Key_reads} >= $old_read) {
my %prcts = ();
my $total_read_requests = $new_datas->{Key_read_requests} - $old_read_request;
my $total_read_disk = $new_datas->{Key_reads} - $old_read;
$prcts{keycache_hitrate_now} = ($total_read_requests == 0) ? 100 : ($total_read_requests - $total_read_disk) * 100 / $total_read_requests;
$prcts{keycache_hitrate} = ($new_datas->{Key_read_requests} == 0) ? 100 : ($new_datas->{Key_read_requests} - $new_datas->{Key_reads}) * 100 / $new_datas->{Key_read_requests};
my $exit_code = $self->{perfdata}->threshold_check(value => $prcts{'keycache_hitrate' . ((defined($self->{option_results}->{lookback})) ? '' : '_now' )}, threshold => [ { label => 'critical', exit_litteral => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
$self->{output}->output_add(severity => $exit_code,
short_msg => sprintf("myisam keycache hitrate at %.2f%%", $prcts{'keycache_hitrate' . ((defined($self->{option_results}->{lookback})) ? '' : '_now')})
);
$self->{output}->perfdata_add(label => 'keycache_hitrate' . ((defined($self->{option_results}->{lookback})) ? '' : '_now'),
nlabel => 'database.keycache.hitrate' . ((defined($self->{option_results}->{lookback})) ? '.average' : '.delta') . '.percentage',
unit => '%',
value => sprintf("%.2f", $prcts{'keycache_hitrate' . ((defined($self->{option_results}->{lookback})) ? '' : '_now')}),
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical'),
min => 0);
$self->{output}->perfdata_add(label => 'keycache_hitrate' . ((defined($self->{option_results}->{lookback})) ? '_now' : ''),
nlabel => 'database.keycache.hitrate' . ((defined($self->{option_results}->{lookback})) ? '.delta' : '.average') . '.percentage',
unit => '%',
value => sprintf("%.2f", $prcts{'keycache_hitrate' . ((defined($self->{option_results}->{lookback})) ? '_now' : '')}),
min => 0);
}
$self->{statefile_cache}->write(data => $new_datas);
if (!defined($old_timestamp)) {
$self->{output}->output_add(severity => 'OK',
short_msg => "Buffer creation...");
}
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check hitrate in the Myisam Key Cache.
=over 8
=item B<--warning>
Threshold warning.
=item B<--critical>
Threshold critical.
=item B<--lookback>
Threshold isn't on the percent calculated from the difference ('keycache_hitrate_now').
=back
=cut
| centreon/centreon-plugins | database/mysql/mode/myisamkeycachehitrate.pm | Perl | apache-2.0 | 6,846 |
package VMOMI::ArrayOfServiceProfile;
use parent 'VMOMI::ComplexType';
use strict;
use warnings;
our @class_ancestors = ( );
our @class_members = (
['ServiceProfile', 'ServiceProfile', 1, 1],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/ArrayOfServiceProfile.pm | Perl | apache-2.0 | 420 |
%%---------------------------------------------------------------------
%%
%% TCL CANVAS WIDGET
%%
%%---------------------------------------------------------------------
:- module(canvas_class_doc,[],[objects,assertions,isomodes,regtypes]).
:- use_class(library('tcltk/examples/class/shape_class')).
:- use_module(library(system)).
:- use_module(library(strings)).
:- use_module(library(lists),[append/3]).
:- use_module(library('tcltk/tcltk')).
:- use_module(library('tcltk/tcltk_low_level')).
%%----------------------------------------------------------------------
:- comment(title, "The Tcl/Tk Geometry Class Interface").
:- comment(author,"Montse Iglesias").
:- comment(copyright,"@include{Copyright.Manuals}").
:- comment(summary,
"This section will explain how to use the geometry class using the
tcl/tk library ").
:- comment(module,"The @lib{canvas_class} permits create a geometry class using
tcltk library. The constructor class is canvas_class. ").
:- regtype shape(S) # "@var{S} is a reference to one type of the items
supported by canvas widgets.".
:- comment(shape/1,"Each item type is characterized by two things:
first, the form of the create command
used to create instances of the type; and second, a set
of configuration options for items of that type, which may
be used in the create and itemconfigure widget commands.").
shape(_).
:- regtype option(O) # "@var{O} is @em{hidden} if the Shape is not visible or
@em{shown} if its visible.".
option(_).
%%----------------------------------------------------------------------
:- export(item/2).
:- export(interp/1).
:- pred item(+Shape,+Option) :: shape * option
# "Indicates if the shape is visible or not.".
item(_,_).
:- pred interp(-TclInterpreter) :: tclInterpreter
# "Creates the TclInterpreter.".
interp(_).
%%----------------------------------------------------------------------
%%---------------------------------------------------------------------
%% CONSTRUCTOR/DESTRUCTOR
%%---------------------------------------------------------------------
:- set_prolog_flag(multi_arity_warnings,off).
%%------------------------------------------------------------------------
:- export(canvas_class/0).
:- pred canvas_class :: list
#" Creates a new interpreter, asserting the predicate
@em{interp(I)}, and the canvas object.".
:- export(canvas_class/1).
:- pred canvas_class(+ItemsList) :: list
# "Adds items of the list to the canvas object.".
%%------------------------------------------------------------------------
canvas_class :-
tcl_new(I),
asserta_fact(interp(I)),
self_codes(ID),
append(".",ID,Var),
% atom_concat('.canvas',ID,Var),
atom_codes(Var1,Var),
tcl_eval(I,[canvas, Var1],_),
tcl_eval(I,[pack, Var],_).
canvas_class([]) :-
canvas_class.
canvas_class([Item|Next]) :-
( add_item(Item) ; true ),
!,
canvas_class(Next).
:- set_prolog_flag(multi_arity_warnings,on).
%%------------------------------------------------------------------------
:- export(destructor/0).
:- pred destructor
# "Deletes the shapes of the canvas object and the object.".
%%------------------------------------------------------------------------
destructor :-
self(Canvas),
% command(["destroy .canvas",Canvas),
retract_fact(item(Shape,_)),
Shape:remove_owner(Canvas),
fail.
destructor :-
interp(I),
tcl_delete(I),
retract_fact(interp(_)).
%%---------------------------------------------------------------------
%% ADD/REMOVE ITEMS
%%---------------------------------------------------------------------
:- export(add_item/1).
:- export(remove_item/1).
:- export(item_changed/1).
%%------------------------------------------------------------------------
:- pred add_item(+Shape) :: shape
# "Adds shapes to the canvas object.".
%%------------------------------------------------------------------------
add_item(Shape) :-
\+ item(Shape,_),
Shape instance_of shape_class,
assertz_fact(item(Shape,hidden)),
self(Canvas),
Shape:add_owner(Canvas),
!.
add_item(_).
%%------------------------------------------------------------------------
:- pred remove_item(+Shape) :: shape
# "Removes shape from the canvas object.".
%%------------------------------------------------------------------------
remove_item(Shape) :-
hide_item(Shape),
retract_fact(item(Shape,_)),
Shape instance_of shape_class,
self(Canvas),
Shape:remove_owner(Canvas),
!.
remove_item(_).
%%------------------------------------------------------------------------
:- pred item_changed(+Shape) :: shape
# "Removes shape from the canvas object and creates a new one.".
%%------------------------------------------------------------------------
item_changed(Shape) :-
hide_item(Shape),
show_item(Shape).
%%---------------------------------------------------------------------
%% SHOW / HIDE ENTIRE CANVAS
%%---------------------------------------------------------------------
:- export(show/0).
%%------------------------------------------------------------------------
:- pred show
# "Adds shapes to the canvas object.".
%%------------------------------------------------------------------------
show :-
item(Shape,hidden),
display('Principio show'),nl,
show_item(Shape),
fail.
show.
:- export(hide/0).
%%------------------------------------------------------------------------
:- pred hide
# "Removes shapes from the canvas object.".
%%------------------------------------------------------------------------
hide :-
item(Shape,shown),
hide_item(Shape),
fail.
hide.
%%---------------------------------------------------------------------
%% SHOW / HIDE SPECIFIC ITEMS
%%---------------------------------------------------------------------
:- export(show_item/1).
%%------------------------------------------------------------------------
:- pred show_item(+Shape) :: shape
# "Shows Shape into the canvas object.".
%%------------------------------------------------------------------------
show_item(Shape) :-
display('en el show item de '),display(Shape),nl,
% self(Canvas),
self_codes(Canvas),
item(Shape,hidden),
Shape instance_of shape_class,
Shape:tcl_name(ItemName),
Shape:creation_options(Opts),
interp(I),
append(".",Canvas,Var),
atom_codes(Var1,Var),
append(Opts,[min(tags)|write(Shape)],Opts1),
tcl_eval(I,[Var1,' create ',ItemName|Opts1],_),
% codify([Command," ",Options," -tags tag",Shape],Aux),
retract_fact(item(Shape,hidden)),
asserta_fact(item(Shape,shown)).
:- export(hide_item/1).
%%------------------------------------------------------------------------
:- pred hide_item(+Shape) :: shape
# "Removes Shape from the canvas object.".
%%------------------------------------------------------------------------
hide_item(Shape) :-
self_codes(Canvas),
retract_fact(item(Shape,shown)),
interp(I),
append(".",Canvas,Var),
atom_codes(Var1,Var),
% tcl_eval(I,[Var,'delete tag',Shape],_),
tcl_eval(I,[Var1,'delete ',write(Shape)],_),
asserta_fact(item(Shape,hidden)).
%%---------------------------------------------------------------------
%% MACROS
%%---------------------------------------------------------------------
codify([],"").
codify([''|Next],[C,C|CNext]) :-
!,
atom_codes('\"',[C]),
codify(Next,CNext).
codify([[]|Next],[C,C|CNext]) :-
!,
atom_codes('\"',[C]),
codify(Next,CNext).
codify([X|Next],Str) :-
atom(X),
!,
atom_codes(X,XCodes),
codify(Next,CNext),
append(XCodes,CNext,Str).
codify([X|Next],Str) :-
number(X),
number_codes(X,XCodes),
!,
codify(Next,CNext),
append(XCodes,CNext,Str).
codify([X|Next],Str) :-
!,
codify(Next,CNext),
append(X,CNext,Str).
self_codes(S) :-
self(Me),
instance_codes(Me,S).
:- inheritable(self_codes/1).
| leuschel/ecce | www/CiaoDE/ciao/library/tcltk/examples_with_problems/class/canvas_class_doc.pl | Perl | apache-2.0 | 8,403 |
package VMOMI::VirtualMachineFileLayoutExSnapshotLayout;
use parent 'VMOMI::DynamicData';
use strict;
use warnings;
our @class_ancestors = (
'DynamicData',
);
our @class_members = (
['key', 'ManagedObjectReference', 0, ],
['dataKey', undef, 0, ],
['memoryKey', undef, 0, 1],
['disk', 'VirtualMachineFileLayoutExDiskLayout', 1, 1],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/VirtualMachineFileLayoutExSnapshotLayout.pm | Perl | apache-2.0 | 576 |
package Tapper::TestSuite::AutoTest;
# ABSTRACT: Tapper - Complete OS testing in a box via autotest
use warnings;
use strict;
use 5.010;
use Cwd;
use Moose;
use Getopt::Long qw/GetOptions/;
use Sys::Hostname qw/hostname/;
use YAML::Syck;
use Archive::Tar;
use IO::Socket::INET;
use File::Slurp qw/slurp/;
use File::Spec::Functions 'tmpdir';
use Digest::MD5 'md5_hex';
with 'MooseX::Log::Log4perl';
=head1 SYNOPSIS
You most likely want to run the frontend cmdline tool like this
=over 4
=item * Run an autotest subtest and report results to Tapper:
$ tapper-testsuite-autotest -t hackbench
=item * Run multiple autotest subtests and report results to Tapper:
$ tapper-testsuite-autotest -t hackbench -t hwclock
=back
=head1 DESCRIPTION
This module wraps autotest to make its (sub) tests available for Tapper.
The commandline tool simply calls the single steps like this:
use Tapper::TestSuite::AutoTest;
my $wrapper = Tapper::TestSuite::AutoTest->new();
my $args = $wrapper->parse_args();
$args = $wrapper->install($args);
$args = $wrapper->run($args);
The reporting evaluates several environment variables:
TAPPER_REPORT_SERVER
TAPPER_REPORT_API_PORT
TAPPER_REPORT_PORT
TAPPER_TESTRUN
TAPPER_REPORT_GROUP
with some sensible defaults. They are automatically provided when
using Tapper automation.
In case you run it manually the most important variable is
C<TAPPER_REPORT_SERVER> pointing to your central Tapper server.
See the Tapper manual for more details.
=head1 FUNCTIONS
=head2 makedir
Checks whether a given directory exists and creates it if not.
@param string - directory to create
@return success - 0
@return error - error string
=cut
sub makedir
{
my ($self, $dir) = @_;
return 0 if -d $dir;
if (-e $dir and not -d $dir) {
unlink $dir;
}
system("mkdir","-p",$dir) == 0 or return "Can't create $dir:$!";
return 0;
}
=head2 $self->log_and_system(@args)
Log and do a multi arg C<system()>.
=cut
sub log_and_system {
my ($self, @args) = @_;
$self->log->debug(join(" ", @args));
system(@args);
}
=head2 $self->log_and_system_shell(@args)
Log and do a single arg C<system()>.
=cut
sub log_and_system_shell {
my ($self, @args) = @_;
$self->log->debug(join(" ", @args));
system(join(" ", @args));
}
=head2 copy_client
Move the client to where it belongs.
@param string - download directory
@param string - target directory
@return die() in case of error
=cut
sub copy_client
{
my($self, $downloaddir, $target) = @_;
my ($error, $output);
`which rsync`;
if ( $? == 0) {
($error, $output) = $self->log_and_system_shell("rsync",
"-a",
"$downloaddir/*autotest*/",
"$target/");
} else {
die "Target dir '$target' does not exist\n" if not -d $target;
($error, $output) = $self->log_and_system_shell("cp","-r","$downloaddir/*autotest*/*","$target/");
}
die $output if $error;
return;
}
=head2 install
Install the autotest framework from a given source into a given target
@param hash ref - args
@return hash ref - args
=cut
sub install
{
my ($self, $args) = @_;
my $error;
my $output;
my $tmp = tmpdir;
my $source = $args->{source};
my $user = $ENV{USER} || 'unknown';
my $checksum = substr(md5_hex($source), 0,7);
my $target = $args->{target} || "$tmp/tapper-testsuite-autotest-client-$user-$checksum";
my $downloaddir = "$tmp/tapper-testsuite-autotest-mirror-$user-$checksum";
$self->makedir($target);
$self->makedir($downloaddir);
my $downloadfile;
if (! -d "$target/tests") {
if ($source =~ m,^(http|ftp)://, ) {
$downloadfile = "$downloaddir/autotest-download-$checksum.tgz";
if (! -e $downloadfile) {
$self->log->debug( "Download autotest from $source to $downloadfile");
($error, $output) = $self->log_and_system('wget', "--no-check-certificate",
$source, "-O", $downloadfile);
die $output if $error;
}
} elsif ($source =~ m,^file://,) {
$downloadfile = $source;
$downloadfile =~ s,^file://,,;
} else {
$downloadfile = $source;
}
$self->log->debug( "Unpack autotest from file $downloadfile to subdir $downloaddir");
($error, $output) = $self->log_and_system("tar",
"-xzf", $downloadfile,
"-C", $downloaddir);
die $output if $error;
$self->copy_client($downloaddir, $target);
die $output if $error;
}
$args->{target} = $target;
return $args;
}
=head2 report_away
Send the actual report to reports receiver framework.
@param hash ref - args
@return success - int - report id
@return error - die()
=cut
sub report_away
{
my ($self, $args) = @_;
my $result_dir = $args->{result_dir};
my $gzipped_content = slurp("$result_dir/tap.tar.gz");
my $sock = IO::Socket::INET->new(PeerAddr => $args->{report_server},
PeerPort => $args->{report_port},
Proto => 'tcp');
$self->log->debug("Report to ".($args->{report_server} // "report_server=UNDEF").":".($args->{report_port} // "report_port=UNDEF"));
unless ($sock) {
$self->log->error( "Result TAP in $result_dir/tap.tar.gz can not be sent to Tapper server.");
die "Can't open connection to ", ($args->{report_server} // "report_server=UNDEF"), ":", ($args->{report_port} // "report_port=UNDEF"), ":$!"
}
my $report_id = <$sock>;
($report_id) = $report_id =~ /(\d+)$/;
$sock->print($gzipped_content);
$sock->close();
$self->log->debug( "Report $report_id (http://".$args->{report_server}."/tapper/reports/id/$report_id)");
return $report_id;
}
=head2 upload_files
Upload the stats file to reports framework.
@param int - report id
@param hash ref - args
=cut
sub upload_files
{
my ($self, $report_id, $test, $args) = @_;
my $host = $args->{reportserver};
my $port = $args->{reportport};
my $result_dir = $args->{result_dir};
# Currently no upload for these (personal taste, privacy, too big):
#
# sysinfo/installed_packages
#
my @files = ();
push @files, (qw( status
control
sysinfo/cmdline
sysinfo/cpuinfo
sysinfo/df
sysinfo/dmesg.gz
sysinfo/gcc_--version
sysinfo/hostname
sysinfo/interrupts
sysinfo/ld_--version
sysinfo/lspci_-vvn
sysinfo/meminfo
sysinfo/modules
sysinfo/mount
sysinfo/partitions
sysinfo/proc_mounts
sysinfo/slabinfo
sysinfo/uname
sysinfo/uptime
sysinfo/version
));
my @iterations = map { chomp; $_ } `cd $result_dir ; find $test/sysinfo -name 'iteration.*'`;
foreach my $iteration (@iterations) {
push @files, map { "$iteration/$_" } (qw( interrupts.before
interrupts.after
meminfo.before
meminfo.after
schedstat.before
schedstat.after
slabinfo.before
slabinfo.after
));
}
foreach my $shortfile (@files) {
my $file = "$result_dir/$shortfile";
next unless -e $file;
# upload uncompressed dmesg for easier inline reading
if ($file =~ m/dmesg.gz$/) {
system("gunzip $file") or do {
$file =~ s/\.gz$//;
$shortfile =~ s/\.gz$//;
}
}
my $cmdline = "#! upload $report_id $shortfile plain\n";
my $content = slurp($file);
my $sock = IO::Socket::INET->new(PeerAddr => $args->{report_server},
PeerPort => $args->{report_api_port},
Proto => 'tcp');
$self->log->debug("Upload '$shortfile' to ".($args->{report_server} // "report_server=UNDEF").":".($args->{report_api_port} // "report_api_port=UNDEF"));
unless ($sock) {
$self->log->error( "Result file '$file' can not be sent to Tapper server.");
die "Can't open connection to ", ($args->{report_server} // "report_server=UNDEF"), ":", ($args->{report_api_port} // "report_api_port=UNDEF"), ":$!"
}
$sock->print($cmdline);
$sock->print($content);
$sock->close();
}
return;
}
=head2 get_machine_name
Return hostname for metainfo in typical Tapper notation, i.e., just
the hostname (without FQDN) in host context or C<host:guest> (colon
separated) in guest context.
=cut
sub get_machine_name
{
my $etc_tapper = "/etc/tapper";
my $hostname = hostname();
$hostname =~ s/\..*$//; # no FQDN
# combined machine name in Tapper automation guest environment
if ($ENV{TAPPER_HOSTNAME}) {
$hostname = "$ENV{TAPPER_HOSTNAME}:$hostname"
} elsif ( -r $etc_tapper ) {
my @tapper_config = ();
my $TAPPERCFG;
open $TAPPERCFG, "<", $etc_tapper and do {
local $/;
@tapper_config = <$TAPPERCFG>;
close $TAPPERCFG;
};
my ($machinename) =
map {
my $m = $_ ; $m =~ s/^[^:]*:// ; $m
}
grep {
/hostname:/
} @tapper_config;
$hostname = "${machinename}:$hostname";
}
return $hostname;
}
=head2 send_results
Send the test results to Tapper.
@param hash ref - args
@return hash ref - args
=cut
sub send_results
{
my ($self, $test, $args) = @_;
my $report;
my $tar = Archive::Tar->new;
$args->{result_dir} = $args->{target}."/client/results/default";
my $result_dir = $args->{result_dir};
my $hostname = get_machine_name;
my $testrun_id = $args->{testrun_id};
my $report_group = $args->{report_group};
my $report_meta = "Version 13
1..1
# Tapper-Suite-Name: Autotest-$test
# Tapper-Machine-Name: $hostname
# Tapper-Suite-Version: ".$Tapper::TestSuite::AutoTest::VERSION."
ok 1 - Tapper metainfo
";
$report_meta .= $testrun_id ? "# Tapper-Reportgroup-Testrun: $testrun_id\n" : '';
$report_meta .= $report_group ? "# Tapper-Reportgroup-Arbitrary: $report_group\n" : '';
$report_meta .= $self->autotest_meta($test, $args);
my $meta;
eval { $meta = YAML::Syck::LoadFile("$result_dir/meta.yml") };
if ($@) {
$meta = {};
$report_meta .= "# Error loading $result_dir/meta.yml: $@\n";
$report_meta .= "# Files in $result_dir\n";
$report_meta .= $_ foreach map { "# ".$_ } `find $result_dir`;
}
push @{$meta->{file_order}}, 'tapper-suite-meta.tap';
$tar->read("$result_dir/tap.tar.gz");
$tar->replace_content( 'meta.yml', YAML::Syck::Dump($meta) );
$tar->add_data('tapper-suite-meta.tap',$report_meta);
$tar->write("$result_dir/tap.tar.gz", COMPRESS_GZIP);
my $report_id = $self->report_away($args);
$self->upload_files($report_id, $test, $args) if $args->{uploadfiles};
return $args;
}
=head2 autotest_meta
Add meta information from files generated by autotest.
@param hash ref - args
@return string - Tapper TAP metainfo headers
=cut
sub autotest_meta
{
my ($self, $test, $args) = @_;
my $result_dir = $args->{result_dir};
my $meta = '';
# --- generic entries ---
my %metamapping = ( "uname" => "uname",
"flags" => "cmdline",
"machine-name" => "hostname",
);
foreach my $header (keys %metamapping) {
my $file = "$result_dir/sysinfo/".$metamapping{$header};
next unless -e $file;
my ($value) = slurp($file);
chomp $value;
$meta .= "# Tapper-$header: $value\n";
}
# --- cpu info ---
my $cpuinfofile = "$result_dir/sysinfo/cpuinfo";
if (-e $cpuinfofile) {
my @lines = slurp($cpuinfofile);
my $is_arm_cpu = grep { /Processor.*:.*ARM/ } @lines;
my $entry = $is_arm_cpu ? "Processor" : "model name";
my @cpuinfo = map { chomp ; s/^$entry.*: *//; $_ } grep { /$entry.*:/ } @lines;
$meta .= "# Tapper-cpuinfo: ".@cpuinfo." cores [".$cpuinfo[0]."]\n" if @cpuinfo;
}
return $meta;
}
=head2 print_help
Print help and die.
=cut
sub print_help
{
my ($self) = @_;
say "$0 --test=s@ [ --directory=s ] [--remote-name]";
say "\t--test|t\t\tName of a subtest, REQUIRED, may be given multple times";
say "\t--directory|d\t\tDirectory to copy autotest to";
say "\t--source_url|s\t\tURL to get autotest from";
say "\t--remote-name|O\t\tPrint out the name of result files";
say "\t--help|h\t\tPrint this help text and exit";
exit;
}
=head2 parse_args
Parse command line arguments and Tapper ENV variables.
@return hash ref - args
=cut
sub parse_args
{
my ($self) = @_;
my @tests;
my ($dir, $remote_name, $help, $source, $uploadfiles);
$uploadfiles = 1;
GetOptions ("test|t=s" => \@tests,
"directory|d=s" => \$dir,
"remote-name|O" => \$remote_name,
"source_url|s=s" => \$source,
"help|h" => \$help,
"uploadfiles!" => \$uploadfiles,
);
$self->print_help() if $help;
if (not @tests) {
print "Please name at least one subtest you want to run (--test=...).\n\n.";
$self->print_help();
}
my $args = {subtests => \@tests,
target => $dir,
source => $source || 'http://github.com/autotest/autotest/tarball/0.14.3',
report_server => $ENV{TAPPER_REPORT_SERVER},
report_api_port => $ENV{TAPPER_REPORT_API_PORT} || '7358',
report_port => $ENV{TAPPER_REPORT_PORT} || '7357',
testrun_id => $ENV{TAPPER_TESTRUN} || '',
report_group => $ENV{TAPPER_REPORT_GROUP} || '',
remote_name => $remote_name,
uploadfiles => $uploadfiles,
};
return $args;
}
=head2 run
Run the requested autotest test(s), collect their results and report
them.
@param hash ref - args
@return hash ref - args
=cut
sub run
{
my ($self, $args) = @_;
my $target = $args->{target};
my $autotest = "./autotest-local";
my $olddir = cwd();
foreach my $test (@{$args->{subtests} || [] }) {
$self->log->debug("chdir $target/client");
chdir "$target/client";
$self->log_and_system($autotest, "run", "--tap", $test);
$self->send_results($test, $args);
}
chdir $olddir;
return $args;
}
1; # End of Tapper::TestSuite::AutoTest
| tapper/Tapper-TestSuite-AutoTest | lib/Tapper/TestSuite/AutoTest.pm | Perl | bsd-2-clause | 17,301 |
% count_correct(+Sudoku, +Field, -Count)
% outputs Count of correct posibilities for Field in scope of Sudoku
count_correct1(_, _, _, 0, []).
count_correct1(RowValues, ColumnValues, AreaValues, MaxPossible, [MaxPossible|Out]) :-
\+ member(MaxPossible, RowValues),
\+ member(MaxPossible, ColumnValues),
\+ member(MaxPossible, AreaValues),
Max2 is MaxPossible - 1,
count_correct1(RowValues, ColumnValues, AreaValues, Max2, Out), !.
count_correct1(RowValues, ColumnValues, AreaValues, MaxPossible, Out) :-
Max2 is MaxPossible - 1,
count_correct1(RowValues, ColumnValues, AreaValues, Max2, Out), !.
count_correct(Sudoku, Field, Count-Field) :-
Field = [X, Y, Value],
get_row_values(Sudoku, [X, Y, Value], RowValues),
get_column_values(Sudoku, [X, Y, Value], ColumnValues),
get_area_values(Sudoku, [X, Y, Value], AreaValues),
Sudoku = sudoku(M, N, _),
MaxPossible is M * N,
count_correct1(RowValues, ColumnValues, AreaValues, MaxPossible, Out), !,
length(Out, Count).
% not_done_yet(+In, -Out)
% filters yet not set fields in In to Out
not_done_yet([], []).
not_done_yet([[X, Y, 0]|In],[[X, Y, 0]|Out]) :- not_done_yet(In, Out), !.
not_done_yet([_|In], Out) :- not_done_yet(In, Out), !.
% adhoc(+Sudoku, -SolvedSudoku)
% dynamic solving of sudoku
adhoc(S1, S2) :-
S1 = sudoku(_, _, Fields),
not_done_yet(Fields, NotDone),
length(NotDone, NotDoneLength),
adhoc1(S1, S2, NotDoneLength).
adhoc1(sudoku(M, N, Fields), sudoku(M, N, Fields), 0).
adhoc1(sudoku(M, N, Fields), Solved, NotDoneCount) :-
not_done_yet(Fields, NotDone),
maplist(count_correct(sudoku(M, N, Fields)), NotDone, Rated),
keysort(Rated, SortedRated),
[_-Field|_] = SortedRated,
correct(sudoku(M, N, Fields), Field, CorrectField),
set_value(sudoku(M, N, Fields), Field, CorrectField, NewSudoku),
NewNotDoneCount is NotDoneCount - 1,
adhoc1(NewSudoku, Solved, NewNotDoneCount).
| lovasko/NoveZamky | src/adhoc.pl | Perl | bsd-2-clause | 1,867 |
#!/usr/bin/env perl
use strict;
use warnings;
my $previus_uid=-1;
my @positives=();
my @negatives=();
my %seen = ();
our $keepall=0;
my $usage = "echo \"DV DV_UID|namespace feature\" | $0 [keepall]";
sub finalize_user($$)
{
my ($positives, $negatives) = @_;
# only if both +/- examples for this user
if(0 < (scalar @$positives) * (scalar @$negatives))
{
foreach my $p (@$positives)
{
foreach my $n (@$negatives)
{
print "$p\n$n\n";
}
}
}
elsif($keepall == 1)
{
foreach my $e (@$positives, @$negatives)
{
print "$e\n";
}
}
@$positives=();
@$negatives=();
}
sub main()
{
my $arg = shift @ARGV;
if(defined $arg)
{
if($arg eq "keepall")
{
$keepall = 1;
}
else
{
print "$usage\n";
exit();
}
}
else
{
$keepall = 0;
}
my $previous_uid=-1;
while(my $l =<>)
{
chomp $l;
my ($dv, $_, $uid, $rest) = split(/ |_|\|/, $l, 4);
#print "#uid=$uid\n";
if(($uid ne $previous_uid) and ($uid ne "-1"))
{
finalize_user(\@positives, \@negatives);
}
if($dv eq "1")
{
push @positives, $l;
}
else
{
push @negatives, $l;
}
$previous_uid = $uid;
}
finalize_user(\@positives, \@negatives);
}
main();
| petricek/datatools | bin/twin.pl | Perl | bsd-2-clause | 1,198 |
# Copyright (c) 2009, 2010 Oleksandr Tymoshenko <gonzo@bluezbox.com>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
package EBook::EPUB::Lite::Metadata::DCItem;
use Moo;
use Types::Standard qw/ArrayRef Object Str/;
#
# Helper class for DC metadata items. Just contains name, value and attributes.
# values goes as CDATA.
# End-user should not use this module directly
#
has name => (isa => Str, is => 'rw');
has value => (isa => Str, is => 'rw');
has attributes => (
is => 'ro',
isa => ArrayRef[Str],
default => sub { [] },
);
sub encode
{
my ($self, $writer) = @_;
$writer->dataElement($self->name, $self->value,
@{$self->attributes},
);
my %attr = @{$self->attributes()};
}
# Override default - set not reference value but
# reference content
sub copy_attributes
{
my ($self, $ref) = @_;
@{$self->attributes()} = @{$ref};
}
1;
| melmothx/EBook-EPUB-Lite | lib/EBook/EPUB/Lite/Metadata/DCItem.pm | Perl | bsd-2-clause | 2,201 |
package Sisimai::Bite::Email::AmazonSES;
use parent 'Sisimai::Bite::Email';
use feature ':5.10';
use strict;
use warnings;
# http://aws.amazon.com/ses/
my $Indicators = __PACKAGE__->INDICATORS;
my $StartingOf = {
'message' => ['The following message to <', 'An error occurred while trying to deliver the mail '],
'rfc822' => ['content-type: message/rfc822'],
};
my $MessagesOf = { 'expired' => ['Delivery expired'] };
# X-SenderID: Sendmail Sender-ID Filter v1.0.0 nijo.example.jp p7V3i843003008
# X-Original-To: 000001321defbd2a-788e31c8-2be1-422f-a8d4-cf7765cc9ed7-000000@email-bounces.amazonses.com
# X-AWS-Outgoing: 199.255.192.156
# X-SES-Outgoing: 2016.10.12-54.240.27.6
sub headerlist { return ['x-aws-outgoing', 'x-ses-outgoing', 'x-amz-sns-message-id'] }
sub description { 'Amazon SES(Sending): http://aws.amazon.com/ses/' };
sub scan {
# Detect an error from Amazon SES
# @param [Hash] mhead Message headers of a bounce email
# @options mhead [String] from From header
# @options mhead [String] date Date header
# @options mhead [String] subject Subject header
# @options mhead [Array] received Received headers
# @options mhead [String] others Other required headers
# @param [String] mbody Message body of a bounce email
# @return [Hash, Undef] Bounce data list and message/rfc822 part
# or Undef if it failed to parse or the
# arguments are missing
# @since v4.0.2
my $class = shift;
my $mhead = shift // return undef;
my $mbody = shift // return undef;
my $match = 0;
my $xmail = $mhead->{'x-mailer'} || '';
# 'from' => qr/\AMAILER-DAEMON[@]email[-]bounces[.]amazonses[.]com\z/,
# 'subject' => qr/\ADelivery Status Notification [(]Failure[)]\z/,
return undef if index($xmail, 'Amazon WorkMail') > -1;
$match ||= 1 if $mhead->{'x-aws-outgoing'};
$match ||= 1 if $mhead->{'x-ses-outgoing'};
return undef unless $match;
require Sisimai::RFC1894;
my $fieldtable = Sisimai::RFC1894->FIELDTABLE;
my $permessage = {}; # (Hash) Store values of each Per-Message field
my $dscontents = [__PACKAGE__->DELIVERYSTATUS];
my $rfc822part = ''; # (String) message/rfc822-headers part
my $rfc822list = []; # (Array) Each line in message/rfc822 part string
my $blanklines = 0; # (Integer) The number of blank lines
my $readcursor = 0; # (Integer) Points the current cursor position
my $recipients = 0; # (Integer) The number of 'Final-Recipient' header
my $v = undef;
my $p = '';
for my $e ( split("\n", $$mbody) ) {
# Read each line between the start of the message and the start of rfc822 part.
unless( $readcursor ) {
# Beginning of the bounce message or message/delivery-status part
if( index($e, $StartingOf->{'message'}->[0]) == 0 ||
index($e, $StartingOf->{'message'}->[1]) == 0 ) {
$readcursor |= $Indicators->{'deliverystatus'};
next;
}
}
unless( $readcursor & $Indicators->{'message-rfc822'} ) {
# Beginning of the original message part(message/rfc822)
if( index($e, $StartingOf->{'rfc822'}->[0]) == 0 ) {
$readcursor |= $Indicators->{'message-rfc822'};
next;
}
}
if( $readcursor & $Indicators->{'message-rfc822'} ) {
# message/rfc822 or text/rfc822-headers part
unless( length $e ) {
last if ++$blanklines > 1;
next;
}
push @$rfc822list, $e;
} else {
# message/delivery-status part
next unless $readcursor & $Indicators->{'deliverystatus'};
next unless length $e;
if( my $f = Sisimai::RFC1894->match($e) ) {
# $e matched with any field defined in RFC3464
next unless my $o = Sisimai::RFC1894->field($e);
$v = $dscontents->[-1];
if( $o->[-1] eq 'addr' ) {
# Final-Recipient: rfc822; kijitora@example.jp
# X-Actual-Recipient: rfc822; kijitora@example.co.jp
if( $o->[0] eq 'final-recipient' ) {
# Final-Recipient: rfc822; kijitora@example.jp
if( $v->{'recipient'} ) {
# There are multiple recipient addresses in the message body.
push @$dscontents, __PACKAGE__->DELIVERYSTATUS;
$v = $dscontents->[-1];
}
$v->{'recipient'} = $o->[2];
$recipients++;
} else {
# X-Actual-Recipient: rfc822; kijitora@example.co.jp
$v->{'alias'} = $o->[2];
}
} elsif( $o->[-1] eq 'code' ) {
# Diagnostic-Code: SMTP; 550 5.1.1 <userunknown@example.jp>... User Unknown
$v->{'spec'} = $o->[1];
$v->{'diagnosis'} = $o->[2];
} else {
# Other DSN fields defined in RFC3464
next unless exists $fieldtable->{ $o->[0] };
$v->{ $fieldtable->{ $o->[0] } } = $o->[2];
next unless $f == 1;
$permessage->{ $fieldtable->{ $o->[0] } } = $o->[2];
}
} else {
# Continued line of the value of Diagnostic-Code field
next unless index($p, 'Diagnostic-Code:') == 0;
next unless $e =~ /\A[ \t]+(.+)\z/;
$v->{'diagnosis'} .= ' '.$1;
}
} # End of message/delivery-status
} continue {
# Save the current line for the next loop
$p = $e;
}
if( $recipients == 0 && index($$mbody, 'notificationType') > -1 ) {
# Try to parse with Sisimai::Bite::JSON::AmazonSES module
require Sisimai::Bite::JSON::AmazonSES;
my $j = Sisimai::Bite::JSON::AmazonSES->scan($mhead, $mbody);
if( ref $j->{'ds'} eq 'ARRAY' ) {
# Update $dscontents
$dscontents = $j->{'ds'};
$recipients = scalar @{ $j->{'ds'} };
}
}
return undef unless $recipients;
for my $e ( @$dscontents ) {
# Set default values if each value is empty.
$e->{'lhost'} ||= $permessage->{'rhost'};
map { $e->{ $_ } ||= $permessage->{ $_ } || '' } keys %$permessage;
$e->{'agent'} = __PACKAGE__->smtpagent;
$e->{'diagnosis'} =~ y/\n/ /;
$e->{'diagnosis'} = Sisimai::String->sweep($e->{'diagnosis'});
if( $e->{'status'} =~ /\A[45][.][01][.]0\z/ ) {
# Get other D.S.N. value from the error message
# 5.1.0 - Unknown address error 550-'5.7.1 ...
my $errormessage = $e->{'diagnosis'};
$errormessage = $1 if $e->{'diagnosis'} =~ /["'](\d[.]\d[.]\d.+)['"]/;
$e->{'status'} = Sisimai::SMTP::Status->find($errormessage) || $e->{'status'};
}
SESSION: for my $r ( keys %$MessagesOf ) {
# Verify each regular expression of session errors
next unless grep { index($e->{'diagnosis'}, $_) > -1 } @{ $MessagesOf->{ $r } };
$e->{'reason'} = $r;
last;
}
}
$rfc822part = Sisimai::RFC5322->weedout($rfc822list);
return { 'ds' => $dscontents, 'rfc822' => $$rfc822part };
}
1;
__END__
=encoding utf-8
=head1 NAME
Sisimai::Bite::Email::AmazonSES - bounce mail parser class for C<Amazon SES>.
=head1 SYNOPSIS
use Sisimai::Bite::Email::AmazonSES;
=head1 DESCRIPTION
Sisimai::Bite::Email::AmazonSES parses a bounce email which created by C<Amazon Simple Email Service>.
Methods in the module are called from only Sisimai::Message.
=head1 CLASS METHODS
=head2 C<B<description()>>
C<description()> returns description string of this module.
print Sisimai::Bite::Email::AmazonSES->description;
=head2 C<B<smtpagent()>>
C<smtpagent()> returns MTA name.
print Sisimai::Bite::Email::AmazonSES->smtpagent;
=head2 C<B<scan(I<header data>, I<reference to body string>)>>
C<scan()> method parses a bounced email and return results as a array reference.
See Sisimai::Message for more details.
=head1 AUTHOR
azumakuniyuki
=head1 COPYRIGHT
Copyright (C) 2014-2018 azumakuniyuki, All rights reserved.
=head1 LICENSE
This software is distributed under The BSD 2-Clause License.
=cut
| azumakuniyuki/p5-Sisimai | lib/Sisimai/Bite/Email/AmazonSES.pm | Perl | bsd-2-clause | 8,672 |
package AiMdle::Config;
require Exporter;
BEGIN { push @ISA, "Exporter"; }
@EXPORT = qw(
$SN
$PWD
$VER
$ADMIN_PWD
$WELCOME_MSG
@LVL_TIMES
);
our $SN = 'AiMdleBot';
our $PWD = '';
our $VER = 0.07;
our $ADMIN_PWD = 'bad_pass';
our $WELCOME_MSG = "Hello there! I'm AiMdleBot, and if you type " .
"'help' to me, I'll let you know what you can do!";
our @LVL_TIMES = (0, 60, 120, 240, 480, 1000, 2500, 9000000);
1;
| rpj/aimdle | AiMdle/Config.pm | Perl | bsd-3-clause | 482 |
<?php
include 'CommonSaveInit.lang.pl';
$SESSION_REFERENCE_IS_WRONG = 'Brak referencji do modułu [app::$session].';
$PASS_VALID_REFERENCE = 'Przekaż prawidłową referencje do modułu [app::$session]. Aktualnie przekazujesz <b>%s</b>.';
$HERE_CONFIG_IS_WRONG = 'Popraw konfiguracje w tym miejscu.'; | charJKL/deArcane | src/Save/_Exception/Init/RequireSession.lang.pl | Perl | mit | 300 |
#!/usr/bin/perl
# The exclusive file contains class III protein-ORF pairs, and we need to
# separate class IV and V pairs from them.
use strict;
use warnings;
my %pairs;
my %cpairs;
open EXC, "BLASTp_exclusive.txt";
while (<EXC>) {
chomp;
my $line = $_;
my @block = split /\s/, $line;
if ($block[0] ne 'Query') {
$pairs{$block[0]} = $line;
}
}
close EXC;
open CONS, "BLASTp_consecutive.txt";
while (<CONS>) {
chomp;
my $line = $_;
my $result = ($line =~/^(?<pi25611>[^\s]+)\s+/);
if ($result) {
$cpairs{$+{pi25611}} = $line;
}
}
close CONS;
open OUTPUT, ">BLASTp_nonmatching.txt";
print OUTPUT "Query\t\t\tDatabase\tScore\tE Value\n";
foreach my $key (sort keys %pairs) {
if (!(defined $cpairs{$key})) {
print OUTPUT "$pairs{$key}\n";
}
}
close OUTPUT;
| gtblack/DNA.analysis | scripts/non_match.pl | Perl | mit | 795 |
%% SIPC - Scheme In Prolog with Continuation Semantics
%% a simple implementation of Scheme in Prolog (w. continuation semantics)
%% Tim Finin, University of Pennsylvania, Mon Oct 6 10:11:18 1986
%% note: in order to handle side effects in a reasonable way,
%% environments are stored in the database. The result is that old
%% environments never die. They don't even fade away!
% operators
:- set_prolog_flag(backquoted_string,false).
:- dynamic(((( ==> ))/2)).
%:- op(1100,xfx, (==)).
:- op(900,xfy,user:'==>'). % used for macros.
:- op(500,fx,user:'`'). % `x = quote(x).
:- thread_local(t_l:bound/3).
%:- use_module(library(sexpr_reader)).
% sip - creates a virgin initial environment and enters a
% read-eval-print loop for SIP. Exit by typing "prolog(`abort)."
ssip :- create0Env, sipREP2.
% sipREP - enters a read-eval-print loop for SIP.
sipREP2 :-
repeat,
writeln('SIP> '),
read(E),
once(eval3(print(E),0,k(nil))),
fail.
% create0Env sets up E as the inital Scheme environment.
create0Env :-
% flush all old environments.
% abolish(t_l:bound,3),
retractall(t_l:bound(_,_,_)),
% (re)set the environment counter.
abolish(envNum,1),
assert(envNum(1)),
% define the initial variables.
load('sipcore.pl').
% abbreviations for eval/3.
eval(E) :- eval3(E,0,k(nil)),!.
eval(E,V) :- eval3(E,0,k(nil)),!.
eval(E,Env,C) :- eval3(E,Env,C),!.
% like eval3, but "returns" its value to caller.
evalReturn(Exp,Env,Val) :- eval3(Exp,Env,k(return(Val))).
% eval3(expression,environment,continuation)
eval3(X,_,C) :- number(X),applyC(C,X).
eval3(X,E,C) :- atomic(X),!,lookUp(X,V,E),applyC(C,V).
% syntax kludge - call a fn F of no arguments as [F].
% bug - doen't check for special forms!!!
eval3([F|Args],E,C) :- !,eval3(F,Env,k(evalArgs(Args,[]),Env,C)).
% special forms ...
eval3(quote(E),_,C) :- !,applyC(C,E).
eval3(lambda(Vars,Body),E,C) :- !, applyC(C,closure(Vars,Body,E)).
eval3(set(Sym,Val),E,C) :- !, eval3(Val,E,k(set(Sym),E,C)).
eval3(define(Sym,Exp),Env,C) :- !, eval3(Exp,Env,k(define(Sym),Env,C)).
eval3(if(E1,E2,E3),Env,C) :- !, eval3(E1,Env,k(if(E2,E3),Env,C)).
eval3(and(X,Y),Env,C) :- !, eval3(X,Env,k(and(Y),Env,C)).
eval3(or(X,Y),Env,C) :- !, eval3(X,Env,k(or(Y),Env,C)).
eval3(delay(X),Env,C) :- applyC(C,promise(X,Env,_)).
eval3(F1==>F2,_,C) :- !, assert(F1==>F2), applyC(C,F2).
% this should be done via apply.
eval3(eval(X),Env,C) :- !, eval3(X,Env,k(eval,Env,C)).
% check for a "macro" application.
eval3(X,E,C) :- (X==>NewX), !, eval3(NewX,E,C).
% non-special form...
eval3(E,Env,C) :-
!,
E =.. [F|Args],
eval3(F,Env,k(evalArgs(Args,[]),Env,C)).
% applyC(continuation,value) ...
% a continuation has one of the forms:
% k(operation,expression,next continuation)
% k(nil)
% k(return(Value))
applyC(k(if(Then,Else),E,C),false) :- !,eval3(Else,E,C).
applyC(k(if(Then,Else),E,C),_) :- eval3(Then,E,C).
applyC(k(set(Sym),E,C),V) :- set(Sym,V,E,OldV),applyC(C,OldV).
applyC(k(define(Sym),E,C),V) :- define(Sym,V,E),applyC(C,Sym).
applyC(k(and(X),E,C),V) :- V=false->applyC(C,false);eval3(X,E,C).
applyC(k(or(X),E,C),V) :- V=false->eval3(X,E,C);applyC(C,V).
applyC(k(eval,E,C),V) :- eval3(V,E,C).
applyC(k(evalArgs([In1|InRest],Out),E,C),V) :-
eval3(In1,E,k(evalArgs(InRest,[V|Out]),E,C)).
applyC(k(evalArgs([],RCall),E,C),V) :-
reverse([V|RCall],Call),
Call = [F|Args],
apply(F,Args,C).
% we're done
applyC(k(return(V)),V) :- !.
applyC(k(nil),V) :- !.
% apply(Function,Args,COntinuation) - apply a prmitive function.
apply(pf(callCC),[F],C) :- !, apply(F,[C],C).
apply(pf(X),Args,C) :-
!,
applyPrim(X,Args,V),
applyC(C,V).
% apply a compound function.
apply(closure(Parameters,Body,Env),Args,Value) :- !,
makeEnv(Parameters,Args,New/Env),
eval3(Body,New/Env,Value),
!.
% this should never happen.
apply(X,Args,_) :- err('I dont know how to apply:', [X|Args]).
% applyPrim(function,arguments,value)
applyPrim('+',[A1,A2],V) :- V is A1+A2.
applyPrim('-',[A1,A2],V) :- V is A1-A2.
applyPrim('*',[A1,A2],V) :- V is A1*A2.
applyPrim('=',[A1,A2],V) :- A1=A2->V=true;V=false.
applyPrim(cons,[A1,A2],[A1|A2]).
applyPrim(car,[[H|T]],H).
applyPrim(cdr,[[H|T]],T).
applyPrim(begin,[A1,A2],A2).
applyPrim(force,[promise(B,E,V)],V) :- var(V)->evalReturn(B,E,V);true.
applyPrim(load,[File],File) :- !,load(File).
applyPrim(prolog,[X],V) :- !,call(X) -> V=X;V=false.
applyPrim(print,[X],true) :- !,writeln(X).
applyPrim(F,Args,error) :-
Call =.. [F|Args],
err('bad call to a primitive function',Call).
% evalList(listOfArguments,listOfValues,environment)
% evlas a list of expressions and returns a list of the results.
evalList([],[],_).
evalList([A1|Arest],[V1|Vrest],E) :-
eval3(A1,E,V1),
evalList(Arest,Vrest,E).
% makeEnv(+Parameters,+Arguments,-Environment) - creates a new environment
% in which the variables in the 1st arg are t_l:bound to the values in the
% 2nd. The new envitronment is returned in the 3rd.
makeEnv(Ps,As,New/Old) :-
% determine the next environment number to use.
retract(envNum(N)),
New is N+1,
assert(envNum(New)),
!,
% add the binding to the new environment.
addBindings(Ps,As,New/Old).
% addBindings(Variables,Values,Environment) binds variables to
% corresponding values in the specified Environment.
addBindings([],[],_) :- !.
addBindings([P|Ps],[V|Vs],Env) :-
!,
define(P,V,Env),
addBindings(Ps,Vs,Env).
addBindings([_|_],[],_) :- !, err('too few arguments').
addBindings([],[_|_],_) :- !, err('too many arguments').
% looks up the values associated with a symbol in an environment. It's
% an error if there is no binding.
lookUp(Symbol,Value,Env) :- value(Symbol,Value,Env,_),!.
lookUp(S,_,Env) :- err('unbound symbol: ',S/Env).
% value(+symbol,-value,+frameSought,-frameFound) like lookUp but also
% returns the frame in which the variable was t_l:bound.
value(S,V,Env,Env) :- t_l:bound(S,V,Env),!.
value(S,V,SIN,E) :- compound(SIN),SIN = (E1/E2),
\+ (t_l:bound(S,V,SS),compound(SS),SS = E1/E2 ),
value(S,V,E2,E).
% change the value associated with symbol S to V, returning the old value.
set(S,V,Env,OldV) :-
value(S,OldV,Env,BindingEnv),
!,
retract(t_l:bound(S,OldV,BindingEnv)),
assert(t_l:bound(S,V,BindingEnv)).
set(S,_,E,_) :- err('symbol not t_l:bound in environment:',(S/E)).
% add an initial binding for symbol S to value V in environment Env.
define(S,V,Env) :-
sip_when(retract(t_l:bound(S,_,Env)),
warn('symbol already defined in environment: ',(S,Env))),
assert(t_l:bound(S,V,Env)).
% load(F) reads and evals all expressions in file F.
/*
load(File):- see(File),!,
repeat,
call_cleanup((read(X), ((X = end_if_file ) -> true;((once(loadProcess(X)),fail)))),seen),!.
*/
load(File) :-
open(File,read,S),
repeat,read_term(S,X,[module(user),backquoted_string(false)]),
(end_of_file == X -> close(S) ; (loadProcess(X),fail)),
!.
loadProcess(end_of_file).
loadProcess(X) :- eval(X),fail.
%%% misc. utilities ...
err(Msg) :- warn(Msg),!,fail.
err(Msg1,Msg2) :- warn(Msg1,Msg2),!,fail.
warn(Msg) :- writeln(Msg).
warn(Msg1,Msg2) :-writeln(Msg1),write(' '),write(Msg2).
% once(X) executes X only once.
%once(X) :- X,!.
%writeln(X) :- nl,write(X).
sip_when(Test,Then) :- Test->Then;true.
| TeamSPoon/MUD_ScriptEngines | t/reference/sip2.pl | Perl | mit | 7,255 |
#!/usr/bin/perl -w
use strict;
my $configure_file=$ARGV[0] or die "no input configure files!\n";
my $sample_size=$ARGV[1] or die "no down sample size!\n";
my $configure_file_down_sampled=$ARGV[2] or die "no output downsampled configure file name!\n";
my $qrf_path=`echo \$QRF`;
chomp($qrf_path);
#get configure parameters
my ($dir,$snp_tab,$gene_tab,$cov_tab) = check_conf_file($configure_file);
#downsample and get new SNP info, gene info and cov,info
my $cmd="R --no-save --no-restore --args wd=$dir snpInfo=$snp_tab exprInfo=$gene_tab covarInfo=$cov_tab sampleSize=$sample_size < $qrf_path/R/downsample.R\n";
run_cmd($cmd);
#generate new configure files
open(FH,"<$configure_file") or die "can't open configure file $configure_file:$!\n";
open(OUT,">$configure_file_down_sampled") or die "can't open new configure file $configure_file_down_sampled:$!\n";
while(<FH>){
chomp;
my @f=split "=";
if($f[0] eq "SNP_tab"){
$snp_tab=$f[1].".SampleSize-${sample_size}.txt";
}elsif($f[0] eq "gene_tab"){
$gene_tab=$f[1].".SampleSize-${sample_size}.txt";
}elsif(($f[0] eq "covar_tab") and ($f[1] ne "NULL") and ($f[1] ne "null") and ($f[1] ne "")){
$cov_tab=$f[1].".SampleSize-${sample_size}.txt";
}
print OUT join("=",@f)."\n";
}
close(FH);
close(OUT);
sub check_conf_file{
my $input=shift @_;
my $dir="";
my $snp_tab="";
my $gene_tab="";
my $cov_tab="";
open(FH,"<$input") or die "can't open configure file $input:$!\n";
while(<FH>){
chomp;
my @f=split "=";
if($f[0] eq "directory"){
$dir=$f[1];
}elsif($f[0] eq "SNP_tab"){
$snp_tab=$f[1];
}elsif($f[0] eq "gene_tab"){
$gene_tab=$f[1];
}elsif($f[0] eq "covar_tab"){
$cov_tab=$f[1];
}
}
close(FH);
##check parameter read
if(($dir eq "") or ($dir eq "NULL")){
$dir="./";
}
if(($snp_tab eq "") or ($snp_tab eq "NULL") or ($gene_tab eq "") or ($gene_tab eq "NULL")){
die "SNP information and Gene/CpG information could not be NULL!!\n\n";
}
return($dir,$snp_tab,$gene_tab,$cov_tab);
}
sub run_cmd{
my $cmd=shift @_;
print STDERR "$cmd\n";
system($cmd) == 0 || die "can't execute command $cmd:$!\n";
}
| dnaase/QRF_spark | src/main/perl/down_sample.for_matrixEQTL.pl | Perl | mit | 2,134 |
=head1 NAME
memcached_get, memcached_mget, memcached_fetch, memcached_mget_execute,
memcached_mget_execute_by_key - Get a value
=head1 LIBRARY
C Client Library for memcached (libmemcached, -lmemcached)
=head1 SYNOPSIS
#include <memcached.h>
memcached_result_st *
memcached_fetch_result (memcached_st *ptr,
memcached_result_st *result,
memcached_return_t *error);
char *
memcached_get (memcached_st *ptr,
const char *key, size_t key_length,
size_t *value_length,
uint32_t *flags,
memcached_return_t *error);
memcached_return_t
memcached_mget (memcached_st *ptr,
const char * const *keys,
const size_t *key_length,
size_t number_of_keys);
char *
memcached_get_by_key (memcached_st *ptr,
const char *master_key, size_t master_key_length,
const char *key, size_t key_length,
size_t *value_length,
uint32_t *flags,
memcached_return_t *error);
memcached_return_t
memcached_mget_by_key (memcached_st *ptr,
const char *master_key, size_t master_key_length,
const char * const *keys,
const size_t *key_length,
size_t number_of_keys);
char *
memcached_fetch (memcached_st *ptr,
char *key, size_t *key_length,
size_t *value_length,
uint32_t *flags,
memcached_return_t *error);
memcached_return_t
memcached_fetch_execute (memcached_st *ptr,
memcached_execute_fn *callback,
void *context,
uint32_t number_of_callbacks);
memcached_return_t
memcached_mget_execute (memcached_st *ptr,
const char * const *keys,
const size_t *key_length,
size_t number_of_keys,
memcached_execute_fn *callback,
void *context,
uint32_t number_of_callbacks);
memcached_return_t
memcached_mget_execute_by_key (memcached_st *ptr,
const char *master_key,
size_t master_key_length,
const char * const *keys,
const size_t *key_length,
size_t number_of_keys,
memcached_execute_fn *callback,
void *context,
uint32_t number_of_callbacks);
=head1 DESCRIPTION
memcached_get() is used to fetch an individual value from the server. You
must pass in a key and its length to fetch the object. You must supply
three pointer variables which will give you the state of the returned
object. A uint32_t pointer to contain whatever flags you stored with the value,
a size_t pointer which will be filled with size of of the object, and a
memcached_return_t pointer to hold any error. The object will be returned
upon success and NULL will be returned on failure. Any object returned by
memcached_get() must be released by the caller application.
memcached_mget() is used to select multiple keys at once. For multiple key
operations it is always faster to use this function. This function always
works asynchronously. memcached_fetch() is then used to retrieve any keys
found. No error is given on keys that are not found. You must call either
memcached_fetch() or memcached_fetch_result() after a successful call to
memcached_mget(). You should continue to call these functions until they
return NULL (aka no more values). If you need to quit in the middle of a
memcached_get() call, execute a memcached_quit(). After you do this, you can
issue new queries against the server.
memcached_fetch() is used to fetch an individual value from the server.
memcached_mget() must always be called before using this method. You
must pass in a key and its length to fetch the object. You must supply
three pointer variables which will give you the state of the returned
object. A uint32_t pointer to contain whatever flags you stored with the value,
a size_t pointer which will be filled with size of of the object, and a
memcached_return_t pointer to hold any error. The object will be returned
upon success and NULL will be returned on failure. MEMCACHD_END is returned
by the *error value when all objects that have been found are returned.
The final value upon MEMCACHED_END is null. Values returned by
memcached_fetch() musted be free'ed by the caller. memcached_fetch() will
be DEPRECATED in the near future, memcached_fetch_result() should be used
instead.
memcached_fetch_result() is used to return a memcached_result_st(3) structure
from a memcached server. The result object is forward compatible with changes
to the server. For more information please refer to the memcached_result_st(3)
help. This function will dynamically allocate a result structure for you
if you do not pass one to the function.
memcached_fetch_execute() is a callback function for result sets. Instead
of returning the results to you for processing, it passes each of the
result sets to the list of functions you provide. It passes to the function
a memcached_st that can be cloned for use in the called function (it can not
be used directly). It also passes a result set which does not need to be freed.
Finally it passes a "context". This is just a pointer to a memory reference
you supply the calling function. Currently only one value is being passed
to each function call. In the future there will be an option to allow this
to be an array.
memcached_mget_execute() and memcached_mget_execute_by_key() is
similar to memcached_mget(), but it may trigger the supplied callbacks
with result sets while sending out the queries. If you try to perform
a really large multiget with memcached_mget() you may encounter a
deadlock in the OS kernel (we fail to write data to the socket because
the input buffer is full). memcached_mget_execute() solves this
problem by processing some of the results before continuing sending
out requests. Please note that this function is only available in the
binary protocol.
memcached_get_by_key() and memcached_mget_by_key() behave in a similar nature
as memcached_get() and memcached_mget(). The difference is that they take
a master key that is used for determining which server an object was stored
if key partitioning was used for storage.
All of the above functions are not supported when the C<MEMCACHED_BEHAVIOR_USE_UDP>
has been set. Executing any of these functions with this behavior on will result in
C<MEMCACHED_NOT_SUPPORTED> being returned or, for those functions which do not return
a C<memcached_return_t>, the error function parameter will be set to
C<MEMCACHED_NOT_SUPPORTED>.
=head1 RETURN
All objects returned must be freed by the calling application.
memcached_get() and memcached_fetch() will return NULL on error. You must
look at the value of error to determine what the actual error was.
MEMCACHED_KEY_TOO_BIG is set to error whenever memcached_fetch() was used
and the key was set larger then MEMCACHED_MAX_KEY, which was the largest
key allowed for the original memcached ascii server.
=head1 HOME
To find out more information please check:
L<https://launchpad.net/libmemcached>
=head1 AUTHOR
Brian Aker, E<lt>brian@tangent.orgE<gt>
=head1 SEE ALSO
memcached(1) libmemcached(3) memcached_strerror(3)
=cut
| simplegeo/libmemcached | docs/memcached_get.pod | Perl | bsd-3-clause | 7,817 |
=pod
=for comment openssl_manual_section:7
=head1 NAME
x509 - X.509 certificate handling
=head1 SYNOPSIS
#include <openssl/x509.h>
=head1 DESCRIPTION
A X.509 certificate is a structured grouping of information about
an individual, a device, or anything one can imagine. A X.509 CRL
(certificate revocation list) is a tool to help determine if a
certificate is still valid. The exact definition of those can be
found in the X.509 document from ITU-T, or in RFC3280 from PKIX.
In OpenSSL, the type X509 is used to express such a certificate, and
the type X509_CRL is used to express a CRL.
A related structure is a certificate request, defined in PKCS#10 from
RSA Security, Inc, also reflected in RFC2896. In OpenSSL, the type
X509_REQ is used to express such a certificate request.
To handle some complex parts of a certificate, there are the types
X509_NAME (to express a certificate name), X509_ATTRIBUTE (to express
a certificate attributes), X509_EXTENSION (to express a certificate
extension) and a few more.
Finally, there's the supertype X509_INFO, which can contain a CRL, a
certificate and a corresponding private key.
B<X509_>I<...>, B<d2i_X509_>I<...> and B<i2d_X509_>I<...> handle X.509
certificates, with some exceptions, shown below.
B<X509_CRL_>I<...>, B<d2i_X509_CRL_>I<...> and B<i2d_X509_CRL_>I<...>
handle X.509 CRLs.
B<X509_REQ_>I<...>, B<d2i_X509_REQ_>I<...> and B<i2d_X509_REQ_>I<...>
handle PKCS#10 certificate requests.
B<X509_NAME_>I<...> handle certificate names.
B<X509_ATTRIBUTE_>I<...> handle certificate attributes.
B<X509_EXTENSION_>I<...> handle certificate extensions.
=head1 SEE ALSO
L<X509_NAME_ENTRY_get_object(3)>,
L<X509_NAME_add_entry_by_txt(3)>,
L<X509_NAME_add_entry_by_NID(3)>,
L<X509_NAME_print_ex(3)>,
L<X509_NAME_new(3)>,
L<d2i_X509(3)>,
L<d2i_X509_ALGOR(3)>,
L<d2i_X509_CRL(3)>,
L<d2i_X509_NAME(3)>,
L<d2i_X509_REQ(3)>,
L<d2i_X509_SIG(3)>,
L<crypto(3)>,
L<x509v3(3)>
=head1 COPYRIGHT
Copyright 2003-2016 The OpenSSL Project Authors. All Rights Reserved.
Licensed under the OpenSSL license (the "License"). You may not use
this file except in compliance with the License. You can obtain a copy
in the file LICENSE in the source distribution or at
L<https://www.openssl.org/source/license.html>.
=cut
| openweave/openweave-core | third_party/openssl/openssl/doc/crypto/x509.pod | Perl | apache-2.0 | 2,272 |
package B::Debug;
our $VERSION = '1.18';
use strict;
require 5.006;
use B qw(peekop class walkoptree walkoptree_exec
main_start main_root cstring sv_undef SVf_NOK SVf_IOK);
use Config;
my (@optype, @specialsv_name);
require B;
if ($] < 5.009) {
require B::Asmdata;
B::Asmdata->import (qw(@optype @specialsv_name));
} else {
B->import (qw(@optype @specialsv_name));
}
if ($] < 5.006002) {
eval q|sub B::GV::SAFENAME {
my $name = (shift())->NAME;
# The regex below corresponds to the isCONTROLVAR macro from toke.c
$name =~ s/^([\cA-\cZ\c\\c[\c]\c?\c_\c^])/"^".chr(64 ^ ord($1))/e;
return $name;
}|;
}
my ($have_B_Flags, $have_B_Flags_extra);
if (!$ENV{PERL_CORE}){ # avoid CORE test crashes
eval { require B::Flags and $have_B_Flags++ };
$have_B_Flags_extra++ if $have_B_Flags and $B::Flags::VERSION gt '0.03';
}
my %done_gv;
sub _printop {
my $op = shift;
my $addr = ${$op} ? $op->ppaddr : '';
$addr =~ s/^PL_ppaddr// if $addr;
return sprintf "0x%08x %6s %s", ${$op}, ${$op} ? class($op) : '', $addr;
}
sub B::OP::debug {
my ($op) = @_;
printf <<'EOT', class($op), $$op, _printop($op), _printop($op->next), _printop($op->sibling), $op->targ, $op->type, $op->name;
%s (0x%lx)
op_ppaddr %s
op_next %s
op_sibling %s
op_targ %d
op_type %d %s
EOT
if ($] > 5.009) {
printf <<'EOT', $op->opt;
op_opt %d
EOT
} else {
printf <<'EOT', $op->seq;
op_seq %d
EOT
}
if ($have_B_Flags) {
printf <<'EOT', $op->flags, $op->flagspv, $op->private, $op->privatepv;
op_flags %d %s
op_private %d %s
EOT
} else {
printf <<'EOT', $op->flags, $op->private;
op_flags %d
op_private %d
EOT
}
}
sub B::UNOP::debug {
my ($op) = @_;
$op->B::OP::debug();
printf "\top_first\t%s\n", _printop($op->first);
}
sub B::BINOP::debug {
my ($op) = @_;
$op->B::UNOP::debug();
printf "\top_last \t%s\n", _printop($op->last);
}
sub B::LOOP::debug {
my ($op) = @_;
$op->B::BINOP::debug();
printf <<'EOT', _printop($op->redoop), _printop($op->nextop), _printop($op->lastop);
op_redoop %s
op_nextop %s
op_lastop %s
EOT
}
sub B::LOGOP::debug {
my ($op) = @_;
$op->B::UNOP::debug();
printf "\top_other\t%s\n", _printop($op->other);
}
sub B::LISTOP::debug {
my ($op) = @_;
$op->B::BINOP::debug();
printf "\top_children\t%d\n", $op->children;
}
sub B::PMOP::debug {
my ($op) = @_;
$op->B::LISTOP::debug();
printf "\top_pmreplroot\t0x%x\n", $] < 5.008 ? ${$op->pmreplroot} : $op->pmreplroot;
printf "\top_pmreplstart\t0x%x\n", ${$op->pmreplstart};
printf "\top_pmnext\t0x%x\n", ${$op->pmnext} if $] < 5.009005;
if ($Config{'useithreads'}) {
printf "\top_pmstashpv\t%s\n", cstring($op->pmstashpv);
printf "\top_pmoffset\t%d\n", $op->pmoffset;
} else {
printf "\top_pmstash\t%s\n", cstring($op->pmstash);
}
printf "\top_precomp\t%s\n", cstring($op->precomp);
printf "\top_pmflags\t0x%x\n", $op->pmflags;
printf "\top_reflags\t0x%x\n", $op->reflags if $] >= 5.009;
printf "\top_pmpermflags\t0x%x\n", $op->pmpermflags if $] < 5.009;
printf "\top_pmdynflags\t0x%x\n", $op->pmdynflags if $] < 5.009;
$op->pmreplroot->debug if $] < 5.008;
}
sub B::COP::debug {
my ($op) = @_;
$op->B::OP::debug();
my $warnings = ref $op->warnings ? ${$op->warnings} : 0;
printf <<'EOT', $op->label, $op->stashpv, $op->file, $op->cop_seq, $op->arybase, $op->line, $warnings;
cop_label "%s"
cop_stashpv "%s"
cop_file "%s"
cop_seq %d
cop_arybase %d
cop_line %d
cop_warnings 0x%x
EOT
if ($] > 5.008 and $] < 5.011) {
my $cop_io = class($op->io) eq 'SPECIAL' ? '' : $op->io->as_string;
printf(" cop_io %s\n", cstring($cop_io));
}
}
sub B::SVOP::debug {
my ($op) = @_;
$op->B::OP::debug();
printf "\top_sv\t\t0x%x\n", ${$op->sv};
$op->sv->debug;
}
sub B::PVOP::debug {
my ($op) = @_;
$op->B::OP::debug();
printf "\top_pv\t\t%s\n", cstring($op->pv);
}
sub B::PADOP::debug {
my ($op) = @_;
$op->B::OP::debug();
printf "\top_padix\t%ld\n", $op->padix;
}
sub B::NULL::debug {
my ($sv) = @_;
if ($$sv == ${sv_undef()}) {
print "&sv_undef\n";
} else {
printf "NULL (0x%x)\n", $$sv;
}
}
sub B::SV::debug {
my ($sv) = @_;
if (!$$sv) {
print class($sv), " = NULL\n";
return;
}
printf <<'EOT', class($sv), $$sv, $sv->REFCNT;
%s (0x%x)
REFCNT %d
FLAGS 0x%x
EOT
printf "\tFLAGS\t\t0x%x", $sv->FLAGS;
if ($have_B_Flags) {
printf "\t%s", $have_B_Flags_extra ? $sv->flagspv(0) : $sv->flagspv;
}
print "\n";
}
sub B::RV::debug {
my ($rv) = @_;
B::SV::debug($rv);
printf <<'EOT', ${$rv->RV};
RV 0x%x
EOT
$rv->RV->debug;
}
sub B::PV::debug {
my ($sv) = @_;
$sv->B::SV::debug();
my $pv = $sv->PV();
printf <<'EOT', cstring($pv), length($pv);
xpv_pv %s
xpv_cur %d
EOT
}
sub B::IV::debug {
my ($sv) = @_;
$sv->B::SV::debug();
printf "\txiv_iv\t\t%d\n", $sv->IV if $sv->FLAGS & SVf_IOK;
}
sub B::NV::debug {
my ($sv) = @_;
$sv->B::IV::debug();
printf "\txnv_nv\t\t%s\n", $sv->NV if $sv->FLAGS & SVf_NOK;
}
sub B::PVIV::debug {
my ($sv) = @_;
$sv->B::PV::debug();
printf "\txiv_iv\t\t%d\n", $sv->IV if $sv->FLAGS & SVf_IOK;
}
sub B::PVNV::debug {
my ($sv) = @_;
$sv->B::PVIV::debug();
printf "\txnv_nv\t\t%s\n", $sv->NV if $sv->FLAGS & SVf_NOK;
}
sub B::PVLV::debug {
my ($sv) = @_;
$sv->B::PVNV::debug();
printf "\txlv_targoff\t%d\n", $sv->TARGOFF;
printf "\txlv_targlen\t%u\n", $sv->TARGLEN;
printf "\txlv_type\t%s\n", cstring(chr($sv->TYPE));
}
sub B::BM::debug {
my ($sv) = @_;
$sv->B::PVNV::debug();
printf "\txbm_useful\t%d\n", $sv->USEFUL;
printf "\txbm_previous\t%u\n", $sv->PREVIOUS;
printf "\txbm_rare\t%s\n", cstring(chr($sv->RARE));
}
sub B::CV::debug {
my ($sv) = @_;
$sv->B::PVNV::debug();
my ($stash) = $sv->STASH;
my ($start) = $sv->START;
my ($root) = $sv->ROOT;
my ($padlist) = $sv->PADLIST;
my ($file) = $sv->FILE;
my ($gv) = $sv->GV;
printf <<'EOT', $$stash, $$start, $$root, $$gv, $file, $sv->DEPTH, $padlist, ${$sv->OUTSIDE};
STASH 0x%x
START 0x%x
ROOT 0x%x
GV 0x%x
FILE %s
DEPTH %d
PADLIST 0x%x
OUTSIDE 0x%x
EOT
printf("\tOUTSIDE_SEQ\t%d\n", , $sv->OUTSIDE_SEQ) if $] > 5.007;
if ($have_B_Flags) {
my $SVt_PVCV = $] < 5.010 ? 12 : 13;
printf("\tCvFLAGS\t0x%x\t%s\n", $sv->CvFLAGS,
$have_B_Flags_extra ? $sv->flagspv($SVt_PVCV) : $sv->flagspv);
} else {
printf("\tCvFLAGS\t0x%x\n", $sv->CvFLAGS);
}
$start->debug if $start;
$root->debug if $root;
$gv->debug if $gv;
$padlist->debug if $padlist;
}
sub B::AV::debug {
my ($av) = @_;
$av->B::SV::debug;
_array_debug($av);
}
sub _array_debug {
my ($av) = @_;
# tied arrays may leave out FETCHSIZE
my (@array) = eval { $av->ARRAY; };
print "\tARRAY\t\t(", join(", ", map("0x" . $$_, @array)), ")\n";
my $fill = eval { scalar(@array) };
if ($Config{'useithreads'} && class($av) ne 'PADLIST') {
printf <<'EOT', $fill, $av->MAX, $av->OFF;
FILL %d
MAX %d
OFF %d
EOT
} else {
printf <<'EOT', $fill, $av->MAX;
FILL %d
MAX %d
EOT
}
if ($] < 5.009) {
if ($have_B_Flags) {
printf("\tAvFLAGS\t0x%x\t%s\n", $av->AvFLAGS,
$have_B_Flags_extra ? $av->flagspv(10) : $av->flagspv);
} else {
printf("\tAvFLAGS\t0x%x\n", $av->AvFLAGS);
}
}
}
sub B::GV::debug {
my ($gv) = @_;
if ($done_gv{$$gv}++) {
printf "GV %s::%s\n", $gv->STASH->NAME, $gv->SAFENAME;
return;
}
my $sv = $gv->SV;
my $av = $gv->AV;
my $cv = $gv->CV;
$gv->B::SV::debug;
printf <<'EOT', $gv->SAFENAME, $gv->STASH->NAME, $gv->STASH, $$sv, $gv->GvREFCNT, $gv->FORM, $$av, ${$gv->HV}, ${$gv->EGV}, $$cv, $gv->CVGEN, $gv->LINE, $gv->FILE, $gv->GvFLAGS;
NAME %s
STASH %s (0x%x)
SV 0x%x
GvREFCNT %d
FORM 0x%x
AV 0x%x
HV 0x%x
EGV 0x%x
CV 0x%x
CVGEN %d
LINE %d
FILE %s
EOT
if ($have_B_Flags) {
my $SVt_PVGV = $] < 5.010 ? 13 : 9;
printf("\tGvFLAGS\t0x%x\t%s\n", $gv->GvFLAGS,
$have_B_Flags_extra ? $gv->flagspv($SVt_PVGV) : $gv->flagspv);
} else {
printf("\tGvFLAGS\t0x%x\n", $gv->GvFLAGS);
}
$sv->debug if $sv;
$av->debug if $av;
$cv->debug if $cv;
}
sub B::SPECIAL::debug {
my $sv = shift;
my $i = ref $sv ? $$sv : 0;
print exists $specialsv_name[$i] ? $specialsv_name[$i] : "", "\n";
}
sub B::PADLIST::debug {
my ($padlist) = @_;
printf <<'EOT', class($padlist), $$padlist, $padlist->REFCNT;
%s (0x%x)
REFCNT %d
EOT
_array_debug($padlist);
}
sub compile {
my $order = shift;
B::clearsym();
if ($order && $order eq "exec") {
return sub { walkoptree_exec(main_start, "debug") }
} else {
return sub { walkoptree(main_root, "debug") }
}
}
1;
__END__
=head1 NAME
B::Debug - Walk Perl syntax tree, printing debug info about ops
=head1 SYNOPSIS
perl -MO=Debug foo.pl
perl -MO=Debug,-exec foo.pl
=head1 DESCRIPTION
See F<ext/B/README> and the newer L<B::Concise>, L<B::Terse>.
=head1 OPTIONS
With option -exec, walks tree in execute order,
otherwise in basic order.
=head1 AUTHOR
Malcolm Beattie, C<mbeattie@sable.ox.ac.uk>
Reini Urban C<rurban@cpan.org>
=head1 LICENSE
Copyright (c) 1996, 1997 Malcolm Beattie
Copyright (c) 2008, 2010 Reini Urban
This program is free software; you can redistribute it and/or modify
it under the terms of either:
a) the GNU General Public License as published by the Free
Software Foundation; either version 1, or (at your option) any
later version, or
b) the "Artistic License" which comes with this kit.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See either
the GNU General Public License or the Artistic License for more details.
You should have received a copy of the Artistic License with this kit,
in the file named "Artistic". If not, you can get one from the Perl
distribution. You should also have received a copy of the GNU General
Public License, in the file named "Copying". If not, you can get one
from the Perl distribution or else write to the Free Software Foundation,
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
=cut
| Dokaponteam/ITF_Project | xampp/perl/lib/B/Debug.pm | Perl | mit | 10,532 |
package DDG::Spice::Songkick::Artists;
# ABSTRACT: Get similar artists for artist id of Songkick
use strict;
use warnings;
use DDG::Spice;
triggers any => "///***never_trigger***///";
spice to => 'http://api.songkick.com/api/3.0/artists/$1/similar_artists.json?apikey={{ENV{DDG_SPICE_SONGKICK_APIKEY}}}&per_page=10&jsoncallback={{callback}}';
handle remainder => sub {
return $_ if $_;
return;
};
1;
| ppant/zeroclickinfo-spice | lib/DDG/Spice/Songkick/Artists.pm | Perl | apache-2.0 | 410 |
name(arouter).
version('1.0.0').
title('Alternative HTTP path router').
author('Raivo Laanemets', 'http://rlaanemets.com/').
home('https://github.com/rla/alternative-router').
| segmond/alternative-router | pack.pl | Perl | mit | 176 |
package Statistics::Descriptive;
use strict;
use warnings;
##This module draws heavily from perltoot v0.4 from Tom Christiansen.
use 5.006;
use vars (qw($VERSION $Tolerance $Min_samples_number));
$VERSION = '3.0607';
$Tolerance = 0.0;
$Min_samples_number = 4;
package Statistics::Descriptive::Sparse;
use vars qw($VERSION);
$VERSION = '3.0607';
use vars qw(%fields);
use Carp;
use Statistics::Descriptive::Smoother;
sub _make_accessors
{
my ($pkg, $methods) = @_;
no strict 'refs';
foreach my $method (@$methods)
{
*{$pkg."::".$method} =
do {
my $m = $method;
sub {
my $self = shift;
if (@_)
{
$self->{$m} = shift;
}
return $self->{$m};
};
};
}
return;
}
sub _make_private_accessors
{
my ($pkg, $methods) = @_;
no strict 'refs';
foreach my $method (@$methods)
{
*{$pkg."::_".$method} =
do {
my $m = $method;
sub {
my $self = shift;
if (@_)
{
$self->{$m} = shift;
}
return $self->{$m};
};
};
}
return;
}
##Define the fields to be used as methods
%fields = (
count => 0,
mean => undef,
sum => undef,
sumsq => undef,
min => undef,
max => undef,
mindex => undef,
maxdex => undef,
sample_range => undef,
variance => undef,
);
__PACKAGE__->_make_accessors( [ grep { $_ ne "variance" } keys(%fields) ] );
__PACKAGE__->_make_accessors( ["_permitted"] );
__PACKAGE__->_make_private_accessors(["variance"]);
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $self = {
%fields,
};
bless ($self, $class);
$self->_permitted(\%fields);
return $self;
}
sub _is_permitted
{
my $self = shift;
my $key = shift;
return exists($self->_permitted()->{$key});
}
sub add_data {
my $self = shift; ##Myself
my $oldmean;
my ($min,$mindex,$max,$maxdex,$sum,$sumsq,$count);
my $aref;
if (ref $_[0] eq 'ARRAY') {
$aref = $_[0];
}
else {
$aref = \@_;
}
##If we were given no data, we do nothing.
return 1 if (!@{ $aref });
##Take care of appending to an existing data set
if (!defined($min = $self->min()))
{
$min = $aref->[$mindex = 0];
}
else
{
$mindex = $self->mindex();
}
if (!defined($max = $self->max()))
{
$max = $aref->[$maxdex = 0];
}
else
{
$maxdex = $self->maxdex();
}
$sum = $self->sum();
$sumsq = $self->sumsq();
$count = $self->count();
##Calculate new mean, sumsq, min and max;
foreach ( @{ $aref } ) {
$sum += $_;
$sumsq += $_**2;
$count++;
if ($_ >= $max) {
$max = $_;
$maxdex = $count-1;
}
if ($_ <= $min) {
$min = $_;
$mindex = $count-1;
}
}
$self->min($min);
$self->mindex($mindex);
$self->max($max);
$self->maxdex($maxdex);
$self->sample_range($max - $min);
$self->sum($sum);
$self->sumsq($sumsq);
$self->mean($sum / $count);
$self->count($count);
##indicator the value is not cached. Variance isn't commonly enough
##used to recompute every single data add.
$self->_variance(undef);
return 1;
}
sub standard_deviation {
my $self = shift; ##Myself
return undef if (!$self->count());
return sqrt($self->variance());
}
##Return variance; if needed, compute and cache it.
sub variance {
my $self = shift; ##Myself
my $count = $self->count();
return undef if !$count;
return 0 if $count == 1;
if (!defined($self->_variance())) {
my $variance = ($self->sumsq()- $count * $self->mean()**2);
# Sometimes due to rounding errors we get a number below 0.
# This makes sure this is handled as gracefully as possible.
#
# See:
#
# https://rt.cpan.org/Public/Bug/Display.html?id=46026
$variance = $variance < 0 ? 0 : $variance / ($count - 1);
$self->_variance($variance);
# Return now to avoid re-entering this sub
# (and therefore save time when many objects are used).
return $variance;
}
return $self->_variance();
}
##Clear a stat. More efficient than destroying an object and calling
##new.
sub clear {
my $self = shift; ##Myself
my $key;
return if (!$self->count());
while (my($field, $value) = each %fields) { # could use a slice assignment here
$self->{$field} = $value;
}
}
1;
package Statistics::Descriptive::Full;
use vars qw($VERSION);
$VERSION = '3.0607';
use Carp;
use POSIX ();
use Statistics::Descriptive::Smoother;
use vars qw(@ISA $a $b %fields);
@ISA = qw(Statistics::Descriptive::Sparse);
use List::MoreUtils ();
use List::Util ();
##Create a list of fields not to remove when data is updated
%fields = (
_permitted => undef, ##Place holder for the inherited key hash
data => undef, ##Our data
samples => undef, ##Number of samples for each value of the data set
presorted => undef, ##Flag to indicate the data is already sorted
_reserved => undef, ##Place holder for this lookup hash
);
__PACKAGE__->_make_private_accessors(
[qw(data samples frequency geometric_mean harmonic_mean
least_squares_fit median mode
skewness kurtosis median_absolute_deviation
)
]
);
__PACKAGE__->_make_accessors([qw(presorted _reserved _trimmed_mean_cache)]);
sub _clear_fields
{
my $self = shift;
# Empty array ref for holding data later!
$self->_data([]);
$self->_samples([]);
$self->_reserved(\%fields);
$self->presorted(0);
$self->_trimmed_mean_cache(+{});
return;
}
##Have to override the base method to add the data to the object
##The proxy method from above is still valid
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
# Create my self re SUPER
my $self = $class->SUPER::new();
bless ($self, $class); #Re-anneal the object
$self->_clear_fields();
return $self;
}
sub _is_reserved
{
my $self = shift;
my $field = shift;
return exists($self->_reserved->{$field});
}
sub _delete_all_cached_keys
{
my $self = shift;
my %keys = %{ $self };
# Remove reserved keys for this class from the deletion list
delete @keys{keys %{$self->_reserved}};
delete @keys{keys %{$self->_permitted}};
delete $keys{_trimmed_mean_cache};
KEYS_LOOP:
foreach my $key (keys %keys) { # Check each key in the object
delete $self->{$key}; # Delete any out of date cached key
}
$self->{_trimmed_mean_cache} = {}; # just reset this one
return;
}
##Clear a stat. More efficient than destroying an object and calling
##new.
sub clear {
my $self = shift; ##Myself
my $key;
if (!$self->count())
{
return;
}
$self->_delete_all_cached_keys();
$self->SUPER::clear();
$self->_clear_fields();
}
sub add_data {
my $self = shift; ##Myself
my $aref;
if (ref $_[0] eq 'ARRAY') {
$aref = $_[0];
}
else {
$aref = \@_;
}
##If we were given no data, we do nothing.
return 1 if (!@{ $aref });
my $oldmean;
my ($min, $max, $sum, $sumsq);
my $count = $self->count;
# $count is modified lower down, but we need this flag after that
my $has_existing_data = $count;
# Take care of appending to an existing data set
if ($has_existing_data) {
$min = $self->min();
$max = $self->max();
$sum = $self->sum();
$sumsq = $self->sumsq();
}
else {
$min = $aref->[0];
$max = $aref->[0];
$sum = 0;
$sumsq = 0;
}
# need to allow for already having data
$sum += List::Util::sum (@$aref);
$sumsq += List::Util::sum (map {$_ ** 2} @$aref);
$max = List::Util::max ($max, @$aref);
$min = List::Util::min ($min, @$aref);
$count += scalar @$aref;
my $mean = $sum / $count;
$self->min($min);
$self->max($max);
$self->sample_range($max - $min);
$self->sum($sum);
$self->sumsq($sumsq);
$self->mean($mean);
$self->count($count);
##Variance isn't commonly enough
##used to recompute every single data add, so just clear its cache.
$self->_variance(undef);
push @{ $self->_data() }, @{ $aref };
# no need to clear keys if we are a newly populated object,
# and profiling shows it takes a long time when creating
# and populating many stats objects
if ($has_existing_data) {
##Clear the presorted flag
$self->presorted(0);
$self->_delete_all_cached_keys();
}
return 1;
}
sub add_data_with_samples {
my ($self,$aref_values) = @_;
return 1 if (!@{ $aref_values });
my $aref_data = [map { keys %$_ } @{ $aref_values }];
my $aref_samples = [map { values %$_ } @{ $aref_values }];
$self->add_data($aref_data);
push @{ $self->_samples() }, @{ $aref_samples };
return 1;
}
sub get_data {
my $self = shift;
return @{ $self->_data() };
}
sub get_data_without_outliers {
my $self = shift;
if ($self->count() < $Statistics::Descriptive::Min_samples_number) {
carp("Need at least $Statistics::Descriptive::Min_samples_number samples\n");
return;
}
if (!defined $self->{_outlier_filter}) {
carp("Outliers filter not defined\n");
return;
}
my $outlier_candidate_index = $self->_outlier_candidate_index;
my $possible_outlier = ($self->_data())->[$outlier_candidate_index];
my $is_outlier = $self->{_outlier_filter}->($self, $possible_outlier);
return $self->get_data unless $is_outlier;
# Removing the outlier from the dataset
my @good_indexes = grep { $_ != $outlier_candidate_index } (0 .. $self->count() - 1);
my @data = $self->get_data;
my @filtered_data = @data[@good_indexes];
return @filtered_data;
}
sub set_outlier_filter {
my ($self, $code_ref) = @_;
if (!$code_ref || ref($code_ref) ne "CODE") {
carp("Need to pass a code reference");
return;
}
$self->{_outlier_filter} = $code_ref;
return 1;
}
sub _outlier_candidate_index {
my $self = shift;
my $mean = $self->mean();
my $outlier_candidate_index = 0;
my $max_std_deviation = abs(($self->_data())->[0] - $mean);
foreach my $idx (1 .. ($self->count() - 1) ) {
my $curr_value = ($self->_data())->[$idx];
if ($max_std_deviation < abs($curr_value - $mean) ) {
$outlier_candidate_index = $idx;
$max_std_deviation = abs($curr_value - $mean);
}
}
return $outlier_candidate_index;
}
sub set_smoother {
my ($self, $args) = @_;
$args->{data} = $self->_data();
$args->{samples} = $self->_samples();
$self->{_smoother} = Statistics::Descriptive::Smoother->instantiate($args);
}
sub get_smoothed_data {
my ($self, $args) = @_;
if (!defined $self->{_smoother}) {
carp("Smoother object not defined\n");
return;
}
$self->{_smoother}->get_smoothed_data();
}
sub maxdex {
my $self = shift;
return undef if !$self->count;
my $maxdex;
if ($self->presorted) {
$maxdex = $self->count - 1;
}
else {
my $max = $self->max;
$maxdex = List::MoreUtils::first_index {$_ == $max} $self->get_data;
}
$self->{maxdex} = $maxdex;
return $maxdex;
}
sub mindex {
my $self = shift;
return undef if !$self->count;
#my $maxdex = $self->{maxdex};
#return $maxdex if defined $maxdex;
my $mindex;
if ($self->presorted) {
$mindex = 0;
}
else {
my $min = $self->min;
$mindex = List::MoreUtils::first_index {$_ == $min} $self->get_data;
}
$self->{mindex} = $mindex;
return $mindex;
}
sub sort_data {
my $self = shift;
if (! $self->presorted())
{
##Sort the data in descending order
$self->_data([ sort {$a <=> $b} @{$self->_data()} ]);
$self->presorted(1);
##Fix the maxima and minima indices - no, this is unnecessary now we have methods
#$self->mindex(0);
#$self->maxdex($#{$self->_data()});
}
return 1;
}
sub percentile {
my $self = shift;
my $percentile = shift || 0;
##Since we're returning a single value there's no real need
##to cache this.
##If the requested percentile is less than the "percentile bin
##size" then return undef. Check description of RFC 2330 in the
##POD below.
my $count = $self->count();
if ((! $count) || ($percentile < 100 / $count))
{
return; # allow for both scalar and list context
}
$self->sort_data();
my $num = $count*$percentile/100;
my $index = &POSIX::ceil($num) - 1;
my $val = $self->_data->[$index];
return wantarray
? ($val, $index)
: $val
;
}
sub _calc_new_median
{
my $self = shift;
my $count = $self->count();
##Even or odd
if ($count % 2)
{
return $self->_data->[($count-1)/2];
}
else
{
return
(
($self->_data->[($count)/2] + $self->_data->[($count-2)/2] ) / 2
);
}
}
sub median {
my $self = shift;
return undef if !$self->count;
##Cached?
if (! defined($self->_median()))
{
$self->sort_data();
$self->_median($self->_calc_new_median());
}
return $self->_median();
}
sub quantile {
my ( $self, $QuantileNumber ) = @_;
unless ( defined $QuantileNumber and $QuantileNumber =~ m/^0|1|2|3|4$/ ) {
carp("Bad quartile type, must be 0, 1, 2, 3 or 4\n");
return;
}
# check data count after the args are checked - should help debugging
return undef if !$self->count;
$self->sort_data();
return $self->_data->[0] if ( $QuantileNumber == 0 );
my $count = $self->count();
return $self->_data->[ $count - 1 ] if ( $QuantileNumber == 4 );
my $K_quantile = ( ( $QuantileNumber / 4 ) * ( $count - 1 ) + 1 );
my $F_quantile = $K_quantile - POSIX::floor($K_quantile);
$K_quantile = POSIX::floor($K_quantile);
# interpolation
my $aK_quantile = $self->_data->[ $K_quantile - 1 ];
return $aK_quantile if ( $F_quantile == 0 );
my $aKPlus_quantile = $self->_data->[$K_quantile];
# Calcul quantile
my $quantile = $aK_quantile
+ ( $F_quantile * ( $aKPlus_quantile - $aK_quantile ) );
return $quantile;
}
sub _real_calc_trimmed_mean
{
my $self = shift;
my $lower = shift;
my $upper = shift;
my $lower_trim = int ($self->count()*$lower);
my $upper_trim = int ($self->count()*$upper);
my ($val,$oldmean) = (0,0);
my ($tm_count,$tm_mean,$index) = (0,0,$lower_trim);
$self->sort_data();
while ($index <= $self->count() - $upper_trim -1)
{
$val = $self->_data()->[$index];
$oldmean = $tm_mean;
$index++;
$tm_count++;
$tm_mean += ($val - $oldmean) / $tm_count;
}
return $tm_mean;
}
sub trimmed_mean
{
my $self = shift;
my ($lower,$upper);
#upper bound is in arg list or is same as lower
if (@_ == 1)
{
($lower,$upper) = ($_[0],$_[0]);
}
else
{
($lower,$upper) = ($_[0],$_[1]);
}
# check data count after the args
return undef if !$self->count;
##Cache
my $thistm = join ':',$lower,$upper;
my $cache = $self->_trimmed_mean_cache();
if (!exists($cache->{$thistm}))
{
$cache->{$thistm} = $self->_real_calc_trimmed_mean($lower, $upper);
}
return $cache->{$thistm};
}
sub _test_for_too_small_val
{
my $self = shift;
my $val = shift;
return (abs($val) <= $Statistics::Descriptive::Tolerance);
}
sub _calc_harmonic_mean
{
my $self = shift;
my $hs = 0;
foreach my $item ( @{$self->_data()} )
{
##Guarantee that there are no divide by zeros
if ($self->_test_for_too_small_val($item))
{
return;
}
$hs += 1/$item;
}
if ($self->_test_for_too_small_val($hs))
{
return;
}
return $self->count()/$hs;
}
sub harmonic_mean
{
my $self = shift;
if (!defined($self->_harmonic_mean()))
{
$self->_harmonic_mean(scalar($self->_calc_harmonic_mean()));
}
return $self->_harmonic_mean();
}
sub mode
{
my $self = shift;
if (!defined ($self->_mode()))
{
my $mode = 0;
my $occurances = 0;
my %count;
foreach my $item (@{ $self->_data() })
{
my $count = ++$count{$item};
if ($count > $occurances)
{
$mode = $item;
$occurances = $count;
}
}
$self->_mode(
($occurances > 1)
? {exists => 1, mode => $mode}
: {exists => 0,}
);
}
my $m = $self->_mode;
return $m->{'exists'} ? $m->{mode} : undef;
}
sub geometric_mean {
my $self = shift;
return undef if !$self->count;
if (!defined($self->_geometric_mean()))
{
my $gm = 1;
my $exponent = 1/$self->count();
for my $val (@{ $self->_data() })
{
if ($val < 0)
{
return undef;
}
$gm *= $val**$exponent;
}
$self->_geometric_mean($gm);
}
return $self->_geometric_mean();
}
sub skewness {
my $self = shift;
if (!defined($self->_skewness()))
{
my $n = $self->count();
my $sd = $self->standard_deviation();
my $skew;
# skip if insufficient records
if ( $sd && $n > 2) {
my $mean = $self->mean();
my $sum_pow3;
foreach my $rec ( $self->get_data ) {
my $value = (($rec - $mean) / $sd);
$sum_pow3 += $value ** 3;
}
my $correction = $n / ( ($n-1) * ($n-2) );
$skew = $correction * $sum_pow3;
}
$self->_skewness($skew);
}
return $self->_skewness();
}
sub kurtosis {
my $self = shift;
if (!defined($self->_kurtosis()))
{
my $kurt;
my $n = $self->count();
my $sd = $self->standard_deviation();
if ( $sd && $n > 3) {
my $mean = $self->mean();
my $sum_pow4;
foreach my $rec ( $self->get_data ) {
$sum_pow4 += ( ($rec - $mean ) / $sd ) ** 4;
}
my $correction1 = ( $n * ($n+1) ) / ( ($n-1) * ($n-2) * ($n-3) );
my $correction2 = ( 3 * ($n-1) ** 2) / ( ($n-2) * ($n-3) );
$kurt = ( $correction1 * $sum_pow4 ) - $correction2;
}
$self->_kurtosis($kurt);
}
return $self->_kurtosis();
}
sub frequency_distribution_ref
{
my $self = shift;
my @k = ();
# Must have at least two elements
if ($self->count() < 2)
{
return undef;
}
if ((!@_) && (defined $self->_frequency()))
{
return $self->_frequency()
}
my %bins;
my $partitions = shift;
if (ref($partitions) eq 'ARRAY')
{
@k = @{ $partitions };
return undef unless @k; ##Empty array
if (@k > 1) {
##Check for monotonicity
my $element = $k[0];
for my $next_elem (@k[1..$#k]) {
if ($element > $next_elem) {
carp "Non monotonic array cannot be used as frequency bins!\n";
return undef;
}
$element = $next_elem;
}
}
%bins = map { $_ => 0 } @k;
}
else
{
return undef unless $partitions >= 1;
my $interval = $self->sample_range() / $partitions;
foreach my $idx (1 .. ($partitions-1))
{
push @k, ($self->min() + $idx * $interval);
}
$bins{$self->max()} = 0;
push @k, $self->max();
}
ELEMENT:
foreach my $element (@{$self->_data()})
{
foreach my $limit (@k)
{
if ($element <= $limit)
{
$bins{$limit}++;
next ELEMENT;
}
}
}
return $self->_frequency(\%bins);
}
sub frequency_distribution {
my $self = shift;
my $ret = $self->frequency_distribution_ref(@_);
if (!defined($ret))
{
return undef;
}
else
{
return %$ret;
}
}
sub least_squares_fit {
my $self = shift;
return () if $self->count() < 2;
##Sigma sums
my ($sigmaxy, $sigmax, $sigmaxx, $sigmayy, $sigmay) = (0,0,0,0,$self->sum);
my ($xvar, $yvar, $err);
##Work variables
my ($iter,$y,$x,$denom) = (0,0,0,0);
my $count = $self->count();
my @x;
##Outputs
my ($m, $q, $r, $rms);
if (!defined $_[1]) {
@x = 1..$self->count();
}
else {
@x = @_;
if ( $self->count() != scalar @x) {
carp "Range and domain are of unequal length.";
return ();
}
}
foreach $x (@x) {
$y = $self->_data->[$iter];
$sigmayy += $y * $y;
$sigmaxx += $x * $x;
$sigmaxy += $x * $y;
$sigmax += $x;
$iter++;
}
$denom = $count * $sigmaxx - $sigmax*$sigmax;
return ()
unless abs( $denom ) > $Statistics::Descriptive::Tolerance;
$m = ($count*$sigmaxy - $sigmax*$sigmay) / $denom;
$q = ($sigmaxx*$sigmay - $sigmax*$sigmaxy ) / $denom;
$xvar = $sigmaxx - $sigmax*$sigmax / $count;
$yvar = $sigmayy - $sigmay*$sigmay / $count;
$denom = sqrt( $xvar * $yvar );
return () unless (abs( $denom ) > $Statistics::Descriptive::Tolerance);
$r = ($sigmaxy - $sigmax*$sigmay / $count )/ $denom;
$iter = 0;
$rms = 0.0;
foreach (@x) {
##Error = Real y - calculated y
$err = $self->_data->[$iter] - ( $m * $_ + $q );
$rms += $err*$err;
$iter++;
}
$rms = sqrt($rms / $count);
$self->_least_squares_fit([$q, $m, $r, $rms]);
return @{ $self->_least_squares_fit() };
}
sub median_absolute_deviation {
my ($self) = @_;
if (!defined($self->_median_absolute_deviation()))
{
my $stat = $self->new;
$stat->add_data(map { abs($_ - $self->median) } $self->get_data);
$self->_median_absolute_deviation($stat->median);
}
return $self->_median_absolute_deviation();
}
1;
package Statistics::Descriptive;
##All modules return true.
1;
__END__
=head1 NAME
Statistics::Descriptive - Module of basic descriptive statistical functions.
=head1 SYNOPSIS
use Statistics::Descriptive;
$stat = Statistics::Descriptive::Full->new();
$stat->add_data(1,2,3,4); $mean = $stat->mean();
$var = $stat->variance();
$tm = $stat->trimmed_mean(.25);
$Statistics::Descriptive::Tolerance = 1e-10;
=head1 DESCRIPTION
This module provides basic functions used in descriptive statistics.
It has an object oriented design and supports two different types of
data storage and calculation objects: sparse and full. With the sparse
method, none of the data is stored and only a few statistical measures
are available. Using the full method, the entire data set is retained
and additional functions are available.
Whenever a division by zero may occur, the denominator is checked to be
greater than the value C<$Statistics::Descriptive::Tolerance>, which
defaults to 0.0. You may want to change this value to some small
positive value such as 1e-24 in order to obtain error messages in case
of very small denominators.
Many of the methods (both Sparse and Full) cache values so that subsequent
calls with the same arguments are faster.
=head1 METHODS
=head2 Sparse Methods
=over 5
=item $stat = Statistics::Descriptive::Sparse->new();
Create a new sparse statistics object.
=item $stat->clear();
Effectively the same as
my $class = ref($stat);
undef $stat;
$stat = new $class;
except more efficient.
=item $stat->add_data(1,2,3);
Adds data to the statistics variable. The cached statistical values are
updated automatically.
=item $stat->count();
Returns the number of data items.
=item $stat->mean();
Returns the mean of the data.
=item $stat->sum();
Returns the sum of the data.
=item $stat->variance();
Returns the variance of the data. Division by n-1 is used.
=item $stat->standard_deviation();
Returns the standard deviation of the data. Division by n-1 is used.
=item $stat->min();
Returns the minimum value of the data set.
=item $stat->mindex();
Returns the index of the minimum value of the data set.
=item $stat->max();
Returns the maximum value of the data set.
=item $stat->maxdex();
Returns the index of the maximum value of the data set.
=item $stat->sample_range();
Returns the sample range (max - min) of the data set.
=back
=head2 Full Methods
Similar to the Sparse Methods above, any Full Method that is called caches
the current result so that it doesn't have to be recalculated. In some
cases, several values can be cached at the same time.
=over 5
=item $stat = Statistics::Descriptive::Full->new();
Create a new statistics object that inherits from
Statistics::Descriptive::Sparse so that it contains all the methods
described above.
=item $stat->add_data(1,2,4,5);
Adds data to the statistics variable. All of the sparse statistical
values are updated and cached. Cached values from Full methods are
deleted since they are no longer valid.
I<Note: Calling add_data with an empty array will delete all of your
Full method cached values! Cached values for the sparse methods are
not changed>
=item $stat->add_data_with_samples([{1 => 10}, {2 => 20}, {3 => 30},]);
Add data to the statistics variable and set the number of samples each value has been
built with. The data is the key of each element of the input array ref, while
the value is the number of samples: [{data1 => smaples1}, {data2 => samples2}, ...]
=item $stat->get_data();
Returns a copy of the data array.
=item $stat->get_data_without_outliers();
Returns a copy of the data array without outliers. The number minimum of
samples to apply the outlier filtering is C<$Statistics::Descriptive::Min_samples_number>,
4 by default.
A function to detect outliers need to be defined (see C<set_outlier_filter>),
otherwise the function will return an undef value.
The filtering will act only on the most extreme value of the data set
(i.e.: value with the highest absolute standard deviation from the mean).
If there is the need to remove more than one outlier, the filtering
need to be re-run for the next most extreme value with the initial outlier removed.
This is not always needed since the test (for example Grubb's test) usually can only detect
the most exreme value. If there is more than one extreme case in a set,
then the standard deviation will be high enough to make neither case an outlier.
=item $stat->set_outlier_filter($code_ref);
Set the function to filter out the outlier.
C<$code_ref> is the reference to the subroutine implemeting the filtering function.
Returns C<undef> for invalid values of C<$code_ref> (i.e.: not defined or not a
code reference), C<1> otherwise.
=over 4
=item
Example #1: Undefined code reference
my $stat = Statistics::Descriptive::Full->new();
$stat->add_data(1, 2, 3, 4, 5);
print $stat->set_outlier_filter(); # => undef
=item
Example #2: Valid code reference
sub outlier_filter { return $_[1] > 1; }
my $stat = Statistics::Descriptive::Full->new();
$stat->add_data( 1, 1, 1, 100, 1, );
print $stat->set_outlier_filter( \&outlier_filter ); # => 1
my @filtered_data = $stat->get_data_without_outliers();
# @filtered_data is (1, 1, 1, 1)
In this example the series is really simple and the outlier filter function as well.
For more complex series the outlier filter function might be more complex
(see Grubbs' test for outliers).
The outlier filter function will receive as first parameter the Statistics::Descriptive::Full object,
as second the value of the candidate outlier. Having the object in the function
might be useful for complex filters where statistics property are needed (again see Grubbs' test for outlier).
=back
=item $stat->set_smoother({ method => 'exponential', coeff => 0, });
Set the method used to smooth the data and the smoothing coefficient.
See C<Statistics::Smoother> for more details.
=item $stat->get_smoothed_data();
Returns a copy of the smoothed data array.
The smoothing method and coefficient need to be defined (see C<set_smoother>),
otherwise the function will return an undef value.
=item $stat->sort_data();
Sort the stored data and update the mindex and maxdex methods. This
method uses perl's internal sort.
=item $stat->presorted(1);
=item $stat->presorted();
If called with a non-zero argument, this method sets a flag that says
the data is already sorted and need not be sorted again. Since some of
the methods in this class require sorted data, this saves some time.
If you supply sorted data to the object, call this method to prevent
the data from being sorted again. The flag is cleared whenever add_data
is called. Calling the method without an argument returns the value of
the flag.
=item $stat->skewness();
Returns the skewness of the data.
A value of zero is no skew, negative is a left skewed tail,
positive is a right skewed tail.
This is consistent with Excel.
=item $stat->kurtosis();
Returns the kurtosis of the data.
Positive is peaked, negative is flattened.
=item $x = $stat->percentile(25);
=item ($x, $index) = $stat->percentile(25);
Sorts the data and returns the value that corresponds to the
percentile as defined in RFC2330:
=over 4
=item
For example, given the 6 measurements:
-2, 7, 7, 4, 18, -5
Then F(-8) = 0, F(-5) = 1/6, F(-5.0001) = 0, F(-4.999) = 1/6, F(7) =
5/6, F(18) = 1, F(239) = 1.
Note that we can recover the different measured values and how many
times each occurred from F(x) -- no information regarding the range
in values is lost. Summarizing measurements using histograms, on the
other hand, in general loses information about the different values
observed, so the EDF is preferred.
Using either the EDF or a histogram, however, we do lose information
regarding the order in which the values were observed. Whether this
loss is potentially significant will depend on the metric being
measured.
We will use the term "percentile" to refer to the smallest value of x
for which F(x) >= a given percentage. So the 50th percentile of the
example above is 4, since F(4) = 3/6 = 50%; the 25th percentile is
-2, since F(-5) = 1/6 < 25%, and F(-2) = 2/6 >= 25%; the 100th
percentile is 18; and the 0th percentile is -infinity, as is the 15th
percentile, which for ease of handling and backward compatibility is returned
as undef() by the function.
Care must be taken when using percentiles to summarize a sample,
because they can lend an unwarranted appearance of more precision
than is really available. Any such summary must include the sample
size N, because any percentile difference finer than 1/N is below the
resolution of the sample.
=back
(Taken from:
I<RFC2330 - Framework for IP Performance Metrics>,
Section 11.3. Defining Statistical Distributions.
RFC2330 is available from:
L<http://www.ietf.org/rfc/rfc2330.txt> .)
If the percentile method is called in a list context then it will
also return the index of the percentile.
=item $x = $stat->quantile($Type);
Sorts the data and returns estimates of underlying distribution quantiles based on one
or two order statistics from the supplied elements.
This method use the same algorithm as Excel and R language (quantile B<type 7>).
The generic function quantile produces sample quantiles corresponding to the given probabilities.
B<$Type> is an integer value between 0 to 4 :
0 => zero quartile (Q0) : minimal value
1 => first quartile (Q1) : lower quartile = lowest cut off (25%) of data = 25th percentile
2 => second quartile (Q2) : median = it cuts data set in half = 50th percentile
3 => third quartile (Q3) : upper quartile = highest cut off (25%) of data, or lowest 75% = 75th percentile
4 => fourth quartile (Q4) : maximal value
Exemple :
my @data = (1..10);
my $stat = Statistics::Descriptive::Full->new();
$stat->add_data(@data);
print $stat->quantile(0); # => 1
print $stat->quantile(1); # => 3.25
print $stat->quantile(2); # => 5.5
print $stat->quantile(3); # => 7.75
print $stat->quantile(4); # => 10
=item $stat->median();
Sorts the data and returns the median value of the data.
=item $stat->harmonic_mean();
Returns the harmonic mean of the data. Since the mean is undefined
if any of the data are zero or if the sum of the reciprocals is zero,
it will return undef for both of those cases.
=item $stat->geometric_mean();
Returns the geometric mean of the data.
=item my $mode = $stat->mode();
Returns the mode of the data. The mode is the most commonly occuring datum.
See L<http://en.wikipedia.org/wiki/Mode_%28statistics%29> . If all values
occur only once, then mode() will return undef.
=item $stat->trimmed_mean(ltrim[,utrim]);
C<trimmed_mean(ltrim)> returns the mean with a fraction C<ltrim>
of entries at each end dropped. C<trimmed_mean(ltrim,utrim)>
returns the mean after a fraction C<ltrim> has been removed from the
lower end of the data and a fraction C<utrim> has been removed from the
upper end of the data. This method sorts the data before beginning
to analyze it.
All calls to trimmed_mean() are cached so that they don't have to be
calculated a second time.
=item $stat->frequency_distribution_ref($partitions);
=item $stat->frequency_distribution_ref(\@bins);
=item $stat->frequency_distribution_ref();
C<frequency_distribution_ref($partitions)> slices the data into
C<$partition> sets (where $partition is greater than 1) and counts the
number of items that fall into each partition. It returns a reference to
a hash where the keys are the numerical values of the
partitions used. The minimum value of the data set is not a key and the
maximum value of the data set is always a key. The number of entries
for a particular partition key are the number of items which are
greater than the previous partition key and less then or equal to the
current partition key. As an example,
$stat->add_data(1,1.5,2,2.5,3,3.5,4);
$f = $stat->frequency_distribution_ref(2);
for (sort {$a <=> $b} keys %$f) {
print "key = $_, count = $f->{$_}\n";
}
prints
key = 2.5, count = 4
key = 4, count = 3
since there are four items less than or equal to 2.5, and 3 items
greater than 2.5 and less than 4.
C<frequency_distribution_refs(\@bins)> provides the bins that are to be used
for the distribution. This allows for non-uniform distributions as
well as trimmed or sample distributions to be found. C<@bins> must
be monotonic and contain at least one element. Note that unless the
set of bins contains the range that the total counts returned will
be less than the sample size.
Calling C<frequency_distribution_ref()> with no arguments returns the last
distribution calculated, if such exists.
=item my %hash = $stat->frequency_distribution($partitions);
=item my %hash = $stat->frequency_distribution(\@bins);
=item my %hash = $stat->frequency_distribution();
Same as C<frequency_distribution_ref()> except that returns the hash clobbered
into the return list. Kept for compatibility reasons with previous
versions of Statistics::Descriptive and using it is discouraged.
=item $stat->least_squares_fit();
=item $stat->least_squares_fit(@x);
C<least_squares_fit()> performs a least squares fit on the data,
assuming a domain of C<@x> or a default of 1..$stat->count(). It
returns an array of four elements C<($q, $m, $r, $rms)> where
=over 4
=item C<$q and $m>
satisfy the equation C($y = $m*$x + $q).
=item C<$r>
is the Pearson linear correlation cofficient.
=item C<$rms>
is the root-mean-square error.
=back
If case of error or division by zero, the empty list is returned.
The array that is returned can be "coerced" into a hash structure
by doing the following:
my %hash = ();
@hash{'q', 'm', 'r', 'err'} = $stat->least_squares_fit();
Because calling C<least_squares_fit()> with no arguments defaults
to using the current range, there is no caching of the results.
=back
=head1 REPORTING ERRORS
I read my email frequently, but since adopting this module I've added 2
children and 1 dog to my family, so please be patient about my response
times. When reporting errors, please include the following to help
me out:
=over 4
=item *
Your version of perl. This can be obtained by typing perl C<-v> at
the command line.
=item *
Which version of Statistics::Descriptive you're using. As you can
see below, I do make mistakes. Unfortunately for me, right now
there are thousands of CD's with the version of this module with
the bugs in it. Fortunately for you, I'm a very patient module
maintainer.
=item *
Details about what the error is. Try to narrow down the scope
of the problem and send me code that I can run to verify and
track it down.
=back
=head1 AUTHOR
Current maintainer:
Shlomi Fish, L<http://www.shlomifish.org/> , C<shlomif@cpan.org>
Previously:
Colin Kuskie
My email address can be found at http://www.perl.com under Who's Who
or at: http://search.cpan.org/author/COLINK/.
=head1 CONTRIBUTORS
Fabio Ponciroli & Adzuna Ltd. team (outliers handling)
=head1 REFERENCES
RFC2330, Framework for IP Performance Metrics
The Art of Computer Programming, Volume 2, Donald Knuth.
Handbook of Mathematica Functions, Milton Abramowitz and Irene Stegun.
Probability and Statistics for Engineering and the Sciences, Jay Devore.
=head1 COPYRIGHT
Copyright (c) 1997,1998 Colin Kuskie. All rights reserved. This
program is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
Copyright (c) 1998 Andrea Spinelli. All rights reserved. This program
is free software; you can redistribute it and/or modify it under the
same terms as Perl itself.
Copyright (c) 1994,1995 Jason Kastner. All rights
reserved. This program is free software; you can redistribute it
and/or modify it under the same terms as Perl itself.
=head1 LICENSE
This program is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=cut
| lskatz/lskScripts | lib/Statistics/Descriptive.pm | Perl | mit | 38,498 |
# This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/Q713JNUf8G/africa. Olson data version 2016a
#
# Do not edit this file directly.
#
package DateTime::TimeZone::Africa::Accra;
$DateTime::TimeZone::Africa::Accra::VERSION = '1.95';
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::Africa::Accra::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
60494688052, # utc_end 1918-01-01 00:00:52 (Tue)
DateTime::TimeZone::NEG_INFINITY, # local_start
60494688000, # local_end 1918-01-01 00:00:00 (Tue)
-52,
0,
'LMT',
],
[
60494688052, # utc_start 1918-01-01 00:00:52 (Tue)
60578841600, # utc_end 1920-09-01 00:00:00 (Wed)
60494688052, # local_start 1918-01-01 00:00:52 (Tue)
60578841600, # local_end 1920-09-01 00:00:00 (Wed)
0,
0,
'GMT',
],
[
60578841600, # utc_start 1920-09-01 00:00:00 (Wed)
60589294800, # utc_end 1920-12-30 23:40:00 (Thu)
60578842800, # local_start 1920-09-01 00:20:00 (Wed)
60589296000, # local_end 1920-12-31 00:00:00 (Fri)
1200,
1,
'GHST',
],
[
60589294800, # utc_start 1920-12-30 23:40:00 (Thu)
60610377600, # utc_end 1921-09-01 00:00:00 (Thu)
60589294800, # local_start 1920-12-30 23:40:00 (Thu)
60610377600, # local_end 1921-09-01 00:00:00 (Thu)
0,
0,
'GMT',
],
[
60610377600, # utc_start 1921-09-01 00:00:00 (Thu)
60620830800, # utc_end 1921-12-30 23:40:00 (Fri)
60610378800, # local_start 1921-09-01 00:20:00 (Thu)
60620832000, # local_end 1921-12-31 00:00:00 (Sat)
1200,
1,
'GHST',
],
[
60620830800, # utc_start 1921-12-30 23:40:00 (Fri)
60641913600, # utc_end 1922-09-01 00:00:00 (Fri)
60620830800, # local_start 1921-12-30 23:40:00 (Fri)
60641913600, # local_end 1922-09-01 00:00:00 (Fri)
0,
0,
'GMT',
],
[
60641913600, # utc_start 1922-09-01 00:00:00 (Fri)
60652366800, # utc_end 1922-12-30 23:40:00 (Sat)
60641914800, # local_start 1922-09-01 00:20:00 (Fri)
60652368000, # local_end 1922-12-31 00:00:00 (Sun)
1200,
1,
'GHST',
],
[
60652366800, # utc_start 1922-12-30 23:40:00 (Sat)
60673449600, # utc_end 1923-09-01 00:00:00 (Sat)
60652366800, # local_start 1922-12-30 23:40:00 (Sat)
60673449600, # local_end 1923-09-01 00:00:00 (Sat)
0,
0,
'GMT',
],
[
60673449600, # utc_start 1923-09-01 00:00:00 (Sat)
60683902800, # utc_end 1923-12-30 23:40:00 (Sun)
60673450800, # local_start 1923-09-01 00:20:00 (Sat)
60683904000, # local_end 1923-12-31 00:00:00 (Mon)
1200,
1,
'GHST',
],
[
60683902800, # utc_start 1923-12-30 23:40:00 (Sun)
60705072000, # utc_end 1924-09-01 00:00:00 (Mon)
60683902800, # local_start 1923-12-30 23:40:00 (Sun)
60705072000, # local_end 1924-09-01 00:00:00 (Mon)
0,
0,
'GMT',
],
[
60705072000, # utc_start 1924-09-01 00:00:00 (Mon)
60715525200, # utc_end 1924-12-30 23:40:00 (Tue)
60705073200, # local_start 1924-09-01 00:20:00 (Mon)
60715526400, # local_end 1924-12-31 00:00:00 (Wed)
1200,
1,
'GHST',
],
[
60715525200, # utc_start 1924-12-30 23:40:00 (Tue)
60736608000, # utc_end 1925-09-01 00:00:00 (Tue)
60715525200, # local_start 1924-12-30 23:40:00 (Tue)
60736608000, # local_end 1925-09-01 00:00:00 (Tue)
0,
0,
'GMT',
],
[
60736608000, # utc_start 1925-09-01 00:00:00 (Tue)
60747061200, # utc_end 1925-12-30 23:40:00 (Wed)
60736609200, # local_start 1925-09-01 00:20:00 (Tue)
60747062400, # local_end 1925-12-31 00:00:00 (Thu)
1200,
1,
'GHST',
],
[
60747061200, # utc_start 1925-12-30 23:40:00 (Wed)
60768144000, # utc_end 1926-09-01 00:00:00 (Wed)
60747061200, # local_start 1925-12-30 23:40:00 (Wed)
60768144000, # local_end 1926-09-01 00:00:00 (Wed)
0,
0,
'GMT',
],
[
60768144000, # utc_start 1926-09-01 00:00:00 (Wed)
60778597200, # utc_end 1926-12-30 23:40:00 (Thu)
60768145200, # local_start 1926-09-01 00:20:00 (Wed)
60778598400, # local_end 1926-12-31 00:00:00 (Fri)
1200,
1,
'GHST',
],
[
60778597200, # utc_start 1926-12-30 23:40:00 (Thu)
60799680000, # utc_end 1927-09-01 00:00:00 (Thu)
60778597200, # local_start 1926-12-30 23:40:00 (Thu)
60799680000, # local_end 1927-09-01 00:00:00 (Thu)
0,
0,
'GMT',
],
[
60799680000, # utc_start 1927-09-01 00:00:00 (Thu)
60810133200, # utc_end 1927-12-30 23:40:00 (Fri)
60799681200, # local_start 1927-09-01 00:20:00 (Thu)
60810134400, # local_end 1927-12-31 00:00:00 (Sat)
1200,
1,
'GHST',
],
[
60810133200, # utc_start 1927-12-30 23:40:00 (Fri)
60831302400, # utc_end 1928-09-01 00:00:00 (Sat)
60810133200, # local_start 1927-12-30 23:40:00 (Fri)
60831302400, # local_end 1928-09-01 00:00:00 (Sat)
0,
0,
'GMT',
],
[
60831302400, # utc_start 1928-09-01 00:00:00 (Sat)
60841755600, # utc_end 1928-12-30 23:40:00 (Sun)
60831303600, # local_start 1928-09-01 00:20:00 (Sat)
60841756800, # local_end 1928-12-31 00:00:00 (Mon)
1200,
1,
'GHST',
],
[
60841755600, # utc_start 1928-12-30 23:40:00 (Sun)
60862838400, # utc_end 1929-09-01 00:00:00 (Sun)
60841755600, # local_start 1928-12-30 23:40:00 (Sun)
60862838400, # local_end 1929-09-01 00:00:00 (Sun)
0,
0,
'GMT',
],
[
60862838400, # utc_start 1929-09-01 00:00:00 (Sun)
60873291600, # utc_end 1929-12-30 23:40:00 (Mon)
60862839600, # local_start 1929-09-01 00:20:00 (Sun)
60873292800, # local_end 1929-12-31 00:00:00 (Tue)
1200,
1,
'GHST',
],
[
60873291600, # utc_start 1929-12-30 23:40:00 (Mon)
60894374400, # utc_end 1930-09-01 00:00:00 (Mon)
60873291600, # local_start 1929-12-30 23:40:00 (Mon)
60894374400, # local_end 1930-09-01 00:00:00 (Mon)
0,
0,
'GMT',
],
[
60894374400, # utc_start 1930-09-01 00:00:00 (Mon)
60904827600, # utc_end 1930-12-30 23:40:00 (Tue)
60894375600, # local_start 1930-09-01 00:20:00 (Mon)
60904828800, # local_end 1930-12-31 00:00:00 (Wed)
1200,
1,
'GHST',
],
[
60904827600, # utc_start 1930-12-30 23:40:00 (Tue)
60925910400, # utc_end 1931-09-01 00:00:00 (Tue)
60904827600, # local_start 1930-12-30 23:40:00 (Tue)
60925910400, # local_end 1931-09-01 00:00:00 (Tue)
0,
0,
'GMT',
],
[
60925910400, # utc_start 1931-09-01 00:00:00 (Tue)
60936363600, # utc_end 1931-12-30 23:40:00 (Wed)
60925911600, # local_start 1931-09-01 00:20:00 (Tue)
60936364800, # local_end 1931-12-31 00:00:00 (Thu)
1200,
1,
'GHST',
],
[
60936363600, # utc_start 1931-12-30 23:40:00 (Wed)
60957532800, # utc_end 1932-09-01 00:00:00 (Thu)
60936363600, # local_start 1931-12-30 23:40:00 (Wed)
60957532800, # local_end 1932-09-01 00:00:00 (Thu)
0,
0,
'GMT',
],
[
60957532800, # utc_start 1932-09-01 00:00:00 (Thu)
60967986000, # utc_end 1932-12-30 23:40:00 (Fri)
60957534000, # local_start 1932-09-01 00:20:00 (Thu)
60967987200, # local_end 1932-12-31 00:00:00 (Sat)
1200,
1,
'GHST',
],
[
60967986000, # utc_start 1932-12-30 23:40:00 (Fri)
60989068800, # utc_end 1933-09-01 00:00:00 (Fri)
60967986000, # local_start 1932-12-30 23:40:00 (Fri)
60989068800, # local_end 1933-09-01 00:00:00 (Fri)
0,
0,
'GMT',
],
[
60989068800, # utc_start 1933-09-01 00:00:00 (Fri)
60999522000, # utc_end 1933-12-30 23:40:00 (Sat)
60989070000, # local_start 1933-09-01 00:20:00 (Fri)
60999523200, # local_end 1933-12-31 00:00:00 (Sun)
1200,
1,
'GHST',
],
[
60999522000, # utc_start 1933-12-30 23:40:00 (Sat)
61020604800, # utc_end 1934-09-01 00:00:00 (Sat)
60999522000, # local_start 1933-12-30 23:40:00 (Sat)
61020604800, # local_end 1934-09-01 00:00:00 (Sat)
0,
0,
'GMT',
],
[
61020604800, # utc_start 1934-09-01 00:00:00 (Sat)
61031058000, # utc_end 1934-12-30 23:40:00 (Sun)
61020606000, # local_start 1934-09-01 00:20:00 (Sat)
61031059200, # local_end 1934-12-31 00:00:00 (Mon)
1200,
1,
'GHST',
],
[
61031058000, # utc_start 1934-12-30 23:40:00 (Sun)
61052140800, # utc_end 1935-09-01 00:00:00 (Sun)
61031058000, # local_start 1934-12-30 23:40:00 (Sun)
61052140800, # local_end 1935-09-01 00:00:00 (Sun)
0,
0,
'GMT',
],
[
61052140800, # utc_start 1935-09-01 00:00:00 (Sun)
61062594000, # utc_end 1935-12-30 23:40:00 (Mon)
61052142000, # local_start 1935-09-01 00:20:00 (Sun)
61062595200, # local_end 1935-12-31 00:00:00 (Tue)
1200,
1,
'GHST',
],
[
61062594000, # utc_start 1935-12-30 23:40:00 (Mon)
61083763200, # utc_end 1936-09-01 00:00:00 (Tue)
61062594000, # local_start 1935-12-30 23:40:00 (Mon)
61083763200, # local_end 1936-09-01 00:00:00 (Tue)
0,
0,
'GMT',
],
[
61083763200, # utc_start 1936-09-01 00:00:00 (Tue)
61094216400, # utc_end 1936-12-30 23:40:00 (Wed)
61083764400, # local_start 1936-09-01 00:20:00 (Tue)
61094217600, # local_end 1936-12-31 00:00:00 (Thu)
1200,
1,
'GHST',
],
[
61094216400, # utc_start 1936-12-30 23:40:00 (Wed)
61115299200, # utc_end 1937-09-01 00:00:00 (Wed)
61094216400, # local_start 1936-12-30 23:40:00 (Wed)
61115299200, # local_end 1937-09-01 00:00:00 (Wed)
0,
0,
'GMT',
],
[
61115299200, # utc_start 1937-09-01 00:00:00 (Wed)
61125752400, # utc_end 1937-12-30 23:40:00 (Thu)
61115300400, # local_start 1937-09-01 00:20:00 (Wed)
61125753600, # local_end 1937-12-31 00:00:00 (Fri)
1200,
1,
'GHST',
],
[
61125752400, # utc_start 1937-12-30 23:40:00 (Thu)
61146835200, # utc_end 1938-09-01 00:00:00 (Thu)
61125752400, # local_start 1937-12-30 23:40:00 (Thu)
61146835200, # local_end 1938-09-01 00:00:00 (Thu)
0,
0,
'GMT',
],
[
61146835200, # utc_start 1938-09-01 00:00:00 (Thu)
61157288400, # utc_end 1938-12-30 23:40:00 (Fri)
61146836400, # local_start 1938-09-01 00:20:00 (Thu)
61157289600, # local_end 1938-12-31 00:00:00 (Sat)
1200,
1,
'GHST',
],
[
61157288400, # utc_start 1938-12-30 23:40:00 (Fri)
61178371200, # utc_end 1939-09-01 00:00:00 (Fri)
61157288400, # local_start 1938-12-30 23:40:00 (Fri)
61178371200, # local_end 1939-09-01 00:00:00 (Fri)
0,
0,
'GMT',
],
[
61178371200, # utc_start 1939-09-01 00:00:00 (Fri)
61188824400, # utc_end 1939-12-30 23:40:00 (Sat)
61178372400, # local_start 1939-09-01 00:20:00 (Fri)
61188825600, # local_end 1939-12-31 00:00:00 (Sun)
1200,
1,
'GHST',
],
[
61188824400, # utc_start 1939-12-30 23:40:00 (Sat)
61209993600, # utc_end 1940-09-01 00:00:00 (Sun)
61188824400, # local_start 1939-12-30 23:40:00 (Sat)
61209993600, # local_end 1940-09-01 00:00:00 (Sun)
0,
0,
'GMT',
],
[
61209993600, # utc_start 1940-09-01 00:00:00 (Sun)
61220446800, # utc_end 1940-12-30 23:40:00 (Mon)
61209994800, # local_start 1940-09-01 00:20:00 (Sun)
61220448000, # local_end 1940-12-31 00:00:00 (Tue)
1200,
1,
'GHST',
],
[
61220446800, # utc_start 1940-12-30 23:40:00 (Mon)
61241529600, # utc_end 1941-09-01 00:00:00 (Mon)
61220446800, # local_start 1940-12-30 23:40:00 (Mon)
61241529600, # local_end 1941-09-01 00:00:00 (Mon)
0,
0,
'GMT',
],
[
61241529600, # utc_start 1941-09-01 00:00:00 (Mon)
61251982800, # utc_end 1941-12-30 23:40:00 (Tue)
61241530800, # local_start 1941-09-01 00:20:00 (Mon)
61251984000, # local_end 1941-12-31 00:00:00 (Wed)
1200,
1,
'GHST',
],
[
61251982800, # utc_start 1941-12-30 23:40:00 (Tue)
61273065600, # utc_end 1942-09-01 00:00:00 (Tue)
61251982800, # local_start 1941-12-30 23:40:00 (Tue)
61273065600, # local_end 1942-09-01 00:00:00 (Tue)
0,
0,
'GMT',
],
[
61273065600, # utc_start 1942-09-01 00:00:00 (Tue)
61283518800, # utc_end 1942-12-30 23:40:00 (Wed)
61273066800, # local_start 1942-09-01 00:20:00 (Tue)
61283520000, # local_end 1942-12-31 00:00:00 (Thu)
1200,
1,
'GHST',
],
[
61283518800, # utc_start 1942-12-30 23:40:00 (Wed)
DateTime::TimeZone::INFINITY, # utc_end
61283518800, # local_start 1942-12-30 23:40:00 (Wed)
DateTime::TimeZone::INFINITY, # local_end
0,
0,
'GMT',
],
];
sub olson_version {'2016a'}
sub has_dst_changes {23}
sub _max_year {2026}
sub _new_instance {
return shift->_init( @_, spans => $spans );
}
1;
| jkb78/extrajnm | local/lib/perl5/DateTime/TimeZone/Africa/Accra.pm | Perl | mit | 12,437 |
use strict;
use Data::Dumper;
use Carp;
#
# This is a SAS Component
#
=head1 NAME
get_relationship_IsRepresentedBy
=head1 SYNOPSIS
get_relationship_IsRepresentedBy [-c N] [-a] [--fields field-list] < ids > table.with.fields.added
=head1 DESCRIPTION
This relationship associates observational units with a genus,
species, strain, and/or variety that was the source material.
Example:
get_relationship_IsRepresentedBy -a < ids > table.with.fields.added
would read in a file of ids and add a column for each field in the relationship.
The standard input should be a tab-separated table (i.e., each line
is a tab-separated set of fields). Normally, the last field in each
line would contain the id. If some other column contains the id,
use
-c N
where N is the column (from 1) that contains the id.
This is a pipe command. The input is taken from the standard input, and the
output is to the standard output.
=head1 COMMAND-LINE OPTIONS
Usage: get_relationship_IsRepresentedBy [arguments] < ids > table.with.fields.added
=over 4
=item -c num
Select the identifier from column num
=item -from field-list
Choose a set of fields from the TaxonomicGrouping
entity to return. Field-list is a comma-separated list of strings. The
following fields are available:
=over 4
=item id
=item domain
=item hidden
=item scientific_name
=item alias
=back
=item -rel field-list
Choose a set of fields from the relationship to return. Field-list is a comma-separated list of
strings. The following fields are available:
=over 4
=item from_link
=item to_link
=back
=item -to field-list
Choose a set of fields from the ObservationalUnit entity to return. Field-list is a comma-separated list of
strings. The following fields are available:
=over 4
=item id
=item source_name
=item source_name2
=item plant_id
=back
=back
=head1 AUTHORS
L<The SEED Project|http://www.theseed.org>
=cut
use Bio::KBase::Utilities::ScriptThing;
use Bio::KBase::CDMI::CDMIClient;
use Getopt::Long;
#Default fields
my @all_from_fields = ( 'id', 'domain', 'hidden', 'scientific_name', 'alias' );
my @all_rel_fields = ( 'from_link', 'to_link', );
my @all_to_fields = ( 'id', 'source_name', 'source_name2', 'plant_id' );
my %all_from_fields = map { $_ => 1 } @all_from_fields;
my %all_rel_fields = map { $_ => 1 } @all_rel_fields;
my %all_to_fields = map { $_ => 1 } @all_to_fields;
my @default_fields = ('from-link', 'to-link');
my @from_fields;
my @rel_fields;
my @to_fields;
our $usage = <<'END';
Usage: get_relationship_IsRepresentedBy [arguments] < ids > table.with.fields.added
--show-fields
List the available fields.
-c num
Select the identifier from column num
--from field-list
Choose a set of fields from the TaxonomicGrouping
entity to return. Field-list is a comma-separated list of strings. The
following fields are available:
id
domain
hidden
scientific_name
alias
--rel field-list
Choose a set of fields from the relationship to return. Field-list is a comma-separated list of
strings. The following fields are available:
from_link
to_link
--to field-list
Choose a set of fields from the ObservationalUnit entity to
return. Field-list is a comma-separated list of strings. The following fields are available:
id
source_name
source_name2
plant_id
END
my $column;
my $input_file;
my $a;
my $f;
my $r;
my $t;
my $help;
my $show_fields;
my $i = "-";
my $geO = Bio::KBase::CDMI::CDMIClient->new_get_entity_for_script("c=i" => \$column,
"h" => \$help,
"show-fields" => \$show_fields,
"a" => \$a,
"from=s" => \$f,
"rel=s" => \$r,
"to=s" => \$t,
'i=s' => \$i);
if ($help) {
print $usage;
exit 0;
}
if ($show_fields)
{
print "from fields:\n";
print " $_\n" foreach @all_from_fields;
print "relation fields:\n";
print " $_\n" foreach @all_rel_fields;
print "to fields:\n";
print " $_\n" foreach @all_to_fields;
exit 0;
}
if ($a && ($f || $r || $t)) {die $usage};
if ($a) {
@from_fields = @all_from_fields;
@rel_fields = @all_rel_fields;
@to_fields = @all_to_fields;
} elsif ($f || $t || $r) {
my $err = 0;
if ($f) {
@from_fields = split(",", $f);
$err += check_fields(\@from_fields, %all_from_fields);
}
if ($r) {
@rel_fields = split(",", $r);
$err += check_fields(\@rel_fields, %all_rel_fields);
}
if ($t) {
@to_fields = split(",", $t);
$err += check_fields(\@to_fields, %all_to_fields);
}
if ($err) {exit 1;}
} else {
@rel_fields = @default_fields;
}
my $ih;
if ($input_file)
{
open $ih, "<", $input_file or die "Cannot open input file $input_file: $!";
}
else
{
$ih = \*STDIN;
}
while (my @tuples = Bio::KBase::Utilities::ScriptThing::GetBatch($ih, undef, $column)) {
my @h = map { $_->[0] } @tuples;
my $h = $geO->get_relationship_IsRepresentedBy(\@h, \@from_fields, \@rel_fields, \@to_fields);
my %results;
for my $result (@$h) {
my @from;
my @rel;
my @to;
my $from_id;
my $res = $result->[0];
for my $key (@from_fields) {
push (@from,$res->{$key});
}
$res = $result->[1];
$from_id = $res->{'from_link'};
for my $key (@rel_fields) {
push (@rel,$res->{$key});
}
$res = $result->[2];
for my $key (@to_fields) {
push (@to,$res->{$key});
}
if ($from_id) {
push @{$results{$from_id}}, [@from, @rel, @to];
}
}
for my $tuple (@tuples)
{
my($id, $line) = @$tuple;
my $resultsForId = $results{$id};
if ($resultsForId) {
for my $result (@$resultsForId) {
print join("\t", $line, @$result) . "\n";
}
}
}
}
sub check_fields {
my ($fields, %all_fields) = @_;
my @err;
for my $field (@$fields) {
if (!$all_fields{$field})
{
push(@err, $field);
}
}
if (@err) {
my @f = keys %all_fields;
print STDERR "get_relationship_IsRepresentedBy: unknown fields @err. Valid fields are @f\n";
return 1;
}
return 0;
}
| kbase/kb_seed | scripts/get_relationship_IsRepresentedBy.pl | Perl | mit | 6,111 |
#!/usr/bin/env perl
# SuperRead pipeline
# Copyright (C) 2012 Genome group at University of Maryland.
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Cat the k-unitig fasta file through
$workingDir = ".";
$outputPrefix = "";
for ($i=0; $i<=$#ARGV; $i++) {
$arg = $ARGV[$i];
if ($arg eq "-output-prefix") {
++$i;
$outputPrefix = $ARGV[$i];
if ($outputPrefix !~ /\.$/) { $outputPrefix .= "."; }
next; }
$workingDir = $arg;
}
$numKUnitigsFile = $outputPrefix . "numKUnitigs.txt";
$maxKUnitigNumberFile = $outputPrefix . "maxKUnitigNumber.txt";
$totBasesInKUnitigsFile = $outputPrefix . "totBasesInKUnitigs.txt";
$isFirstRead = 1;
while ($line = <STDIN>) {
if ($line =~ /^>/) {
if (! $isFirstRead) { $kUnitigLengths[$kUnitig] = $kUnitigLength; }
$kUnitigLength = 0;
$isFirstRead = 0;
($kUnitig) = ($line =~ /^.(\S+)\s/);
}
else {
$len = length ($line)-1;
$kUnitigLength += $len;
}
}
if (! $isFirstRead) { $kUnitigLengths[$kUnitig] = $kUnitigLength; }
for ($i=0; $i<=$#kUnitigLengths; $i++) {
$length = $kUnitigLengths[$i];
$totBasesInKUnitigs += $length;
if (! $length) {
$length = 0; }
else {
++$numKUnitigs; }
print "$i $length\n";
}
open (OUTFILE, ">", "$workingDir/$numKUnitigsFile");
print OUTFILE "$numKUnitigs\n";
close (OUTFILE);
open (OUTFILE, ">", "$workingDir/$maxKUnitigNumberFile");
$arraySizeForKUnitigData = $#kUnitigLengths+1;
print OUTFILE "$arraySizeForKUnitigData\n";
close (OUTFILE);
open (OUTFILE, ">", "$workingDir/$totBasesInKUnitigsFile");
print OUTFILE "$totBasesInKUnitigs\n";
close (OUTFILE);
| gpertea/stringtie | SuperReads_RNA/global-1/SuperReadsR/src2/getLengthStatisticsForKUnitigsFile.perl | Perl | mit | 2,171 |
package CSN::Controller::Account;
use Mojo::Base 'Mojolicious::Controller';
use String::Random;
use Crypt::PBKDF2;
use Text::Trim qw(trim);
use HTML::Escape;
use JSON;
use Try::Tiny;
sub dashboard {
my $c = shift;
my $page = $c->param('page') || 1;
my $activities_rs = $c->model('Recentactivityperobserver')->search({
observer_id => $c->session('account_id')
}, {
page => $page,
rows => 50,
order_by => { -desc => [ 'timestamp_activity', 'activity_id' ] }
});
my @activities = $activities_rs->all;
my @recent_visitors = $c->stash('account')->visitors->all;
my @followeds = $c->stash('account')->followed->all;
$c->render(activities => \@activities, pager => $activities_rs->pager, recent_visitors => \@recent_visitors, followeds => \@followeds);
}
sub search_username {
my $c = shift;
my $field_name = 'username';
my $search_term = $c->param('term') || "";
my @matching_values = $c->model('Account')->search({
$field_name => {-like => $search_term."%"}
}, {
select => ['account_id', $field_name],
order_by => { -asc => [ $field_name, 'account_id' ] }
})->all;
my @json = map { {id => $_->account_id, text => $_->$field_name} } @matching_values;
$c->render(json => { results => \@json });
}
sub follow_profile {
my $c = shift;
my $account = $c->model('Account')->find($c->param('account_id'));
if(not defined $account) {
$c->render(json => {
error => 'unknown profile'
});
return;
}
try {
$c->model('Profilefollow')->update_or_create({account_id => $c->session('account_id'), followed => $c->param('account_id')});
$c->render(json => {
result => 'following'
});
}catch {
$c->render(json => {
error => 'query failed'
});
};
}
sub unfollow_profile {
my $c = shift;
my $account = $c->model('Account')->find($c->param('account_id'));
if(not defined $account) {
$c->render(json => {
error => 'unknown profile'
});
return;
}
my $follow_rs = $c->model('Profilefollow')->find({account_id => $c->session('account_id'), followed => $c->param('account_id')});
if (defined $follow_rs) {
try {
$follow_rs->delete;
$c->render(json => {
result => 'unfollowed'
});
}catch {
$c->render(json => {
error => 'query failed'
});
};
return;
}
$c->render(json => {
result => 'unfollowd'
});
}
sub search_field {
my $c = shift;
my $field_id = $c->param('field');
my $search_term = $c->param('term') || "";
my $field = $c->model('Profilefield')->find($field_id);
if (!defined $field) {
$c->render(json => { results => [] });
}
my @json = [];
if ($field->data_type ~~ [qw(select select_multi)]) {
my @matching_values = $c->model('Profilefieldoption')->search({
field_id => $field_id,
}, {
select => 'language_key',
order_by => { -asc => 'language_key' }
})->all;
# beacause we use i18n, we need to search in perl and not in psql
@json = map { $c->loc($_->language_key) =~ /^$search_term/i ? ( {id => $_->language_key, text => $c->loc($_->language_key)} ) : () } @matching_values;
}else {
my @matching_values = $c->model('Profilefieldvalue')->search({
field_id => $field_id,
value => {-like => $search_term."%"}
}, {
select => 'value',
group_by => 'value',
order_by => { -asc => 'value' }
})->all;
@json = map { {id => $_->value, text => $_->value} } @matching_values;
}
$c->render(json => { results => \@json });
}
sub list_profiles {
my $c = shift;
my $page = $c->param('page') || 1;
my $accounts_rs = $c->model('Account')->search(undef, {
page => $page,
rows => 50,
order_by => { -desc => [ 'date_last_login', 'last_heartbeat', 'account_id' ] }
});
my @accounts = $accounts_rs->all;
$c->render(accounts => \@accounts, pager => $accounts_rs->pager);
}
sub edit_profile {
my $c = shift;
my @pleft = $c->model('Profilefield')->search({ block => 'left' }, { order_by => { -asc => 'position' }});
my @pright = $c->model('Profilefield')->search({ block => 'right' }, { order_by => { -asc => 'position' }});
my $profilefields = { left => \@pleft, right => \@pright };
$c->render(profile => $c->stash('account'), profilefields => $profilefields);
}
sub save_profile {
my $c = shift;
if (!defined $c->param('pk') || trim($c->param('pk')) eq "") {
$c->render(json => {result => 'invalid request'});
return;
}
my $field = $c->model('Profilefield')->find($c->param('pk'));
if (!defined $field) {
$c->render(json => {result => 'not found'});
return;
}
my $value_clean = trim($c->param('value'));
my $values = $c->every_param('value[]');
my $format_clean = trim($c->param('format'));
if (defined $format_clean && $format_clean eq "") {
$format_clean = undef;
}
my $currentfieldvalue = $c->model('Profilefieldvalue')->search({field_id => $field->field_id, account_id => $c->session('account_id')});
if (defined $currentfieldvalue) {
$currentfieldvalue->delete;
}
if($values < 2 && (!defined $value_clean || $value_clean eq "")) {
$c->render(json => {result => 'deleted'});
return;
}
my $saved = 0;
foreach my $value (@{$values}, $value_clean) {
if (!defined $value) {
next;
}
my $newfieldvalue = $c->model('Profilefieldvalue')->create({
value => trim($value),
format => $format_clean,
field => {
field_id => $field->field_id
},
account => {
account_id => $c->session('account_id')
}
});
if (defined $newfieldvalue) {
$saved = 1;
}
}
if ($saved) {
$c->render(json => {result => 'saved'});
return;
}
$c->render(json => {result => 'error'});
}
sub show_profile {
my $c = shift;
my $account = $c->model('Account')->find($c->param('account_id'));
if(not defined $account) {
$c->flash_message('warning', $c->loc('flash_warning_user_unknown'));
$c->redirect_to($c->url_for('/profiles'));
return;
}
my @pleft = $c->model('Profilefield')->search({ block => 'left' }, { order_by => { -asc => 'position' }});
my @pright = $c->model('Profilefield')->search({ block => 'right' }, { order_by => { -asc => 'position' }});
my $profilefields = { left => \@pleft, right => \@pright };
if($c->session('account_id') ne $c->param('account_id')) {
# inform profile owner about visitor
my $activity_id = $c->model('Activityevent')->create({
account_id => $c->session('account_id'),
event => 'event_visited_you',
data => encode_json [ $c->session('account_id'), $c->stash('account')->username ]
})->activity_id;
$c->model('Activityrecipient')->create({activity_id => $activity_id, recipient_id => $c->param('account_id')});
# update visitor history
try {
$c->model('Profilevisitor')->update_or_create({account_id => $c->param('account_id'), visitor => $c->session('account_id')});
};
# publish websocket notification
my $ws_notification = encode_json {
'event' => 'visitor_notification',
'data' => {
loc => 'ws_event_visited_you',
loc_params => [ $c->session('account_id'), $c->stash('account')->username ]
}};
$c->redis->publish($c->redis_uid . ":global:user:" . $c->param('account_id') => $ws_notification);
}
$c->render(profile => $account, profilefields => $profilefields);
}
sub list_settings {
my $c = shift;
$c->render();
}
sub list_vouchers {
my $c = shift;
my $page = $c->param('page') || 1;
my $account_rs = $c->stash('account');
# do we need to create vouchers ?
if (not $account_rs->vouchers_created) {
# User must be registered more days than users where active in the last 7 days
my $active_users = $c->model('Account')->search({
date_last_login => { '>=' => \"NOW() - INTERVAL '7' DAY" }
})->count;
if (int($account_rs->date_registered->delta_days(DateTime->now())->in_units('days')) > $active_users) {
# TODO: Better use md5 on microseconds + rand to avoid collisions ?
my $rand_gen = String::Random->new;
map {
$c->model('Voucher')->create({
account_id => $account_rs->account_id,
code => $rand_gen->randregex('[0-9a-f]{33}')
});
} 0..4;
$account_rs->update({vouchers_created => 'true'});
}
}
my $invitees_rs = $account_rs->invitees(undef, {
page => $page,
rows => 50,
order_by => { -desc => [ 'date_last_login', 'last_heartbeat' ] }
});
my @invitees = $invitees_rs->all;
my $inviter = $account_rs->invited_by;
my @vouchers = $account_rs->vouchers->all;
$c->render(inviter => $inviter, vouchers => \@vouchers, invitees => \@invitees, pager => $invitees_rs->pager);
}
sub change_password {
my $c = shift;
if ($c->param('password') ne $c->param('password_retyped')) {
$c->stash_message('warning', $c->loc('flash_warning_passwords_missmatch'));
}elsif (! $c->_check_current_password($c->session->{account_id}, $c->param('password_current'))) {
$c->stash_message('warning', $c->loc('flash_warning_current_password_wrong'));
}elsif (! $c->_check_new_password($c->param('password'))) {
$c->stash_message('warning', $c->loc('flash_warning_password_invalid'));
}else {
if($c->_change_password($c->param('password'))) {
$c->redirect_to($c->url_for('/settings'));
return 1;
}else {
$c->stash_message('warning', $c->loc('flash_warning_unknown_error'));
}
}
$c->render('account/list_settings');
}
sub save_settings {
my $c = shift;
# validate theme key
my $theme = escape_html($c->param('theme'));
if(!($theme ~~ @{$c->app->config->{available_themes}})) {
$theme = $c->app->config->{default_settings}->{theme};
}
# validate language
my $language = escape_html($c->param('language'));
if(!defined $c->validate_lang($language)) {
$language = $c->config->{$c->app->mode}->{languages}->[0]->{key};
}
my $new_settings = encode_json {
theme => $theme,
notify_boards => escape_html($c->param('notify_boards')),
notify_messages => escape_html($c->param('notify_messages')),
notify_visits => escape_html($c->param('notify_visits')),
language => $language
};
# reset language if necessary
if($c->session('language') ne $language) {
delete $c->session->{language};
}
$c->stash('account')->update({settings => $new_settings});
$c->redirect_to($c->url_for('/settings'));
}
sub _check_new_password {
my ($c, $pass) = @_;
return length($pass) > 7;
}
sub _check_current_password {
my ($c, $account_id, $pass) = @_;
my $pbkdf2 = Crypt::PBKDF2->new(
hash_class => 'HMACSHA1', # this is the default
iterations => 1000, # so is this
output_len => 20, # and this
salt_len => 4, # and this.
);
my $account_rs = $c->model('Account')->find($account_id);
if(! defined $account_rs) {
return;
};
if(!$pbkdf2->validate($account_rs->password, $pass)) {
return;
}
return 1;
}
sub _change_password {
my ($c, $pass) = @_;
my $pbkdf2 = Crypt::PBKDF2->new(
hash_class => 'HMACSHA1', # this is the default
iterations => 1000, # so is this
output_len => 20, # and this
salt_len => 4, # and this.
);
my $hashed_pass = $pbkdf2->generate($pass);
return $c->stash('account')->update({password => $hashed_pass});
}
1;
| CodingMinds/CSN | lib/CSN/Controller/Account.pm | Perl | mit | 11,584 |
package MarpaX::Languages::PowerBuilder;
use 5.006;
use strict;
use warnings FATAL => 'all';
=encoding utf8
=head1 NAME
MarpaX::Languages::PowerBuilder - PowerBuilder sources parsers based on Marpa::R2
=head1 VERSION
Version 0.01
=cut
our $VERSION = '0.02';
=head1 SYNOPSIS
Dummy module, used to maintain version number.
=cut
=head1 AUTHOR
Sebastien Kirche, C<< <sebastien.kirche at free.fr> >>
=head1 BUGS
Please report any bugs or feature requests through the web interface at
L<http://github.com/sebkirche/MarpaX-Languages-PowerBuilder/issues>.
I will be notified, and then you'll automatically be notified of progress on
your bug as I make changes.
=head1 SUPPORT
You can find documentation for this module with the perldoc command.
perldoc MarpaX::Languages::PowerBuilder
You can also look for information at:
=over 4
=item * Git repository
L<http://github.com/sebkirche/MarpaX-Languages-PowerBuilder>
=back
=head1 ACKNOWLEDGEMENTS
=head1 LICENSE AND COPYRIGHT
Copyright 2014 Sébastien Kirche.
This program is free software; you can redistribute it and/or modify it
under the terms of the MIT license.
L<The MIT License (MIT)>
Copyright (c) 2014 Sébastien Kirche, SRD, SRQ, PBSelect parsers and others contributions by Nicolas Georges.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=cut
1; # End of MarpaX::Languages::PowerBuilder
| sebkirche/MarpaX-Languages-PowerBuilder | lib/MarpaX/Languages/PowerBuilder.pm | Perl | mit | 2,363 |
package selenoprotein_ref_seq;
#
# This is a SAS component
#
# The proteins in this package are a representative subset of selenocysteine-
# containing proteins.
#
# Updates:
#
# 2010-01-05
# 2012-05-10 Add Hypothetical protein HNE_2485
#
use gjoseqlib;
our $ref_seqs = [ gjoseqlib::read_fasta( \*DATA ) ];
1;
__DATA__
>fig|83333.1.peg.3814 Formate dehydrogenase O alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MQVSRRQFFKICAGGMAGTTAAALGFAPSVALAETRQYKLLRTRETRNTCTYCSVGCGLL
MYSLGDGAKNAKASIFHIEGDPDHPVNRGALCPKGAGLVDFIHSESRLKFPEYRAPGSDK
WQQISWEEAFDRIAKLMKEDRDANYIAQNAEGVTVNRWLSTGMLCASASSNETGYLTQKF
SRALGMLAVDNQARVUHGPTVASLAPTFGRGAMTNHWVDIKNANLVVVMGGNAAEAHPVG
FRWAMEAKIHNGAKLIVIDPRFTRTAAVADYYAPIRSGTDIAFLSGVLLYLLNNEKFNRE
YTEAYTNASLIVREDYGFEDGLFTGYDAEKRKYDKSSWTYELDENGFAKRDTTLQHPRCV
WNLLKQHVSRYTPDVVENICGTPKDAFLKVCEYIAETSAHDKTASFLYALGWTQHSVGAQ
NIRTMAMIQLLLGNMGMAGGGVNALRGHSNIQGLTDLGLLSQSLPGYMTLPSEKQTDLQT
YLTANTPKPLLEGQVNYWGNYPKFFVSMMKAFFGDKATAENSWGFDWLPKWDKGYDVLQY
FEMMKEGKVNGYICQGFNPVASFPNKNKVIGCLSKLKFLVTIDPLNTETSNFWQNHGELN
EVDSSKIQTEVFRLPSTCFAEENGSIVNSGRWLQWHWKGADAPGIALTDGEILSGIFLRL
RKMYAEQGGANPDQVLNMTWNYAIPHEPSSEEVAMESNGKALADITDPATGAVIVKKGQQ
LSSFAQLRDDGTTSCGCWIFAGSWTPEGNQMARRDNADPSGLGNTLGWAWAWPLNRRILY
NRASADPQGNPWDPKRQLLKWDGTKWTGWDIPDYSAAPPGSGVGPFIMQQEGMGRLFALD
KMAEGPFPEHYEPFETPLGTNPLHPNVISNPAARIFKDDAEALGKADKFPYVGTTYRLTE
HFHYWTKHALLNAILQPEQFVEIGESLANKLGIAQGDTVKVSSNRGYIKAKAVVTKRIRT
LKANGKDIDTIGIPIHWGYEGVAKKGFIANTLTPFVGDANTQTPEFKSFLVNVEKV
>fig|266940.1.peg.7369 Formate dehydrogenase O alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MGIKTFLEGWPVYRQLTGDDPLGRGAATQSARSKSLTPRTATADRVAKSVCPYCAVGCGQ
RVYVKDEKVVQIEGDPDSPHSRGRLCPKGSASEQLVNGDSRATRIRYRAPYATEWSDLDL
DTAMDMITDRVIDARRRGWQDHDELGRRLNRTMGIASLGGATLDNEENYLIKKLFTAMGA
IQVENQARIUHSATVPGLGASFGRGGATGDLQDLSNADVIVIQGSNMAECHPVGFQWVME
AKARGAKVFHVDPRFTRTSALADTHVPIRTGTDIVLLGAIINRVLTQERDFREYVVAYTN
ASHLISEDFQDTDDLDGLFSGFDHETNSYDTKSWQYAGTEQHGDEEEETAESSDSESGHS
LGAGGAAVEHRSIQRDETLQDPRCVYQVLKRHFARYTPEMVADVCGIPTDLFEELAEAWI
SNSGREKTGALVYSVGWTQRGTGVQYIRAGAILQLLLGNMGRPGGGVMALRGHASIQGST
DIPTLYNLLPGYLQMPHVERHPDLAGYIDSIKGPAQKGFWANADAYTVSLLKAYFGDEAT
AENDYLFDELPKISGDHGTYSQVMDMIEGGKIFGYFLLGQNPAVGSANGRAQRLGMANLD
WMVVRDLVMIESATFWKDAPEVETGEIVPEQCRTEVFFLPAASHVEKEGTFTQTQRLLQW
REKAVNPPGDATSELWFFYHLGRRMREKLAGSTDPRDQLLLKLAWDYPLHGEEREPSSED
VLKEINGYGPEGLLPGYTALKADGSTSSGCWIYSGVYADDVNQAARRKPGREQSLHAPEW
GWAWPMNRRMLYNRASADPEGRPWSERKALVWWDEEAQRWVGDDIADFEATKPPSYRPPE
GATGVAAIAGDDPFIMQSDGKGWLYAPKGVVDGPLPAHYEPAESPFRNAVYTQQANPARE
VYTNHLNEVHPSPPELGSQVFPYVWITSRLTEHHTAGGMSRYLPYLAELQPALFMEVSPE
LARMRGLEHLGWAHVVTARAIVEARVLVTDRLAPLKVQNQLVHQIWLPYHWGQGGLTTGD
VVNDLMHLTLDPNVHIQEFKAGTCDVLPGRRPRGAAMLALIDEYRVRGGVTMASGAVAVT
ADLIARDGQERTDLAQGGDDHVGDGHDDDEIDRVLSVRHDVRSSPEHPQQDVEPAVLDAH
PARGLDRRAD
>fig|266834.1.peg.1 Formate dehydrogenase O alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MEAVPMNVDLSRRSFLKLAGAGAAATSLGAMGFGEAEAAVVAHVRPHKLTTTTETRNTCP
YCSVACGVIIYSKGDLRKGEAADIIHIEGDADHPTNRGTLCPKGAALKDFVKSPTRLQYP
MHRKPGSDKFERISWEDAFDRIARLMKDDRDANFIAANAAGVPVNRWTTVGMLAASATTN
ETAWATFKFAKALGIVGFDNQARVUHGPTVSSLGPTFGRGAMTNSWTDIKNTDLVVVMGG
NAAEAHPCGFKWVTEAKATRGAKLIVVDPRYTRTASVSDYYAPIRQGTDIAFLNGVMKYC
IDNDKVQWDYMKAFTNASYLVKDGFGYQDGLFTGYDAEKRDYDKSTWDYVLGDDGFVVTD
PALQHPRCVWNLLKAHLAPYTPEMVERICGTPKDKFLKVAEMISECSSPTKTMTSMYALG
WTQHSSGSQNIRAMAMLQLILGNIGVRGGGMNALRGHSNIQGLTDLGLMSHLLTGYLTMP
TEKDVDFTTYMSTRQFKPLRPGQTSYWQNYRKFMVSFQKAMWGDAARIDNDWAFNYLSKL
DVPAYDVLRVFELMYAGKVNGYICQGFNPLLAFPNRDKNTKALSNLKWLVTMDPLDTETA
RFWENHGDFNPVDTASIQTEVFQLPTTCFAEEEGSLTNSGRWLQWHWAGGTPPGEAKHDT
YIVAQIFLRMKEMYRNEGGAFPDPILNLSWDYADPNEPTPEELAKEINGRALTDLMDPAN
PMKVQVAAGKQILNFSQLRDDGSTMCGCWIYSGNFNEQGNNMARRDNHDPDDTGAYLGWS
FAWPLNRRTLYNRASADLQGKPWDPSRKLLEWDGTKWAGYDVPDIAPTAKPDEIGPFIMN
QEGTARLFSRGLMRDGPFPAHMEPFESPVANVFNPKMRGNPVSRVFQTDVAQMGLSDEFP
YAATSYRLTEHFHYWTKHNRVNSALQPEFFVEISEELAEEKNIENGGWVRVWSKRGSVKA
KAVVTKRIRPLMCDGKPVHVVGIPLHWGFTGSAKKGLGPNSLAPFVGDANIETPEYKAFL
VNIEPSTAPEEATV
>fig|292459.1.peg.3209 Formate dehydrogenase O alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MNLNRREFLRLSGVSAAGAALLLFAEEREALARDLKESRIARAKEVPSVCPHCSVGCGLI
GYVREGQLLQVEGNPDSPVSEGSLCPKGAATMQFAYDGVGRPNPLRQLTAKIRRPGSDRW
EEIGIEEALDRIARRIKETRDASFVERNADGLVVNRTESIAHIGSACIDNEECYLVTKLM
RALGVVYLEHHARIUHSSTVPGLGTSFGRGAMTTALWDIPNADVVVFMGSNAAENHPISF
KWFLRAKQKGATVICVDPRYNRTASKADWWIPFRSGTDTAVLGGLINYAITHNKFHREYV
VKYTNASLLVHPDFRFDDGLFSGWDPEAKKYDQSTWAFQTGPDGQPLSDPTLEHPQCVFQ
HIKRIYAEYTPEKVSEISGIPVDDFIKFAETVCATGTAGKSAVFVYAMGWTQHTKGVQNI
RTATILQMLLGNIGVAGGGIAALRGHANVQGATDLAVLYHDLPGYLGQPTEAHKSLSDFL
AKTTTKNSYWENKPKFLISLLKAWWGEHATAENDFCFDYLPKMAAGETYSHYDIFQSILD
GRVKGLIVVGQNPAVGSANARKVQAALAKLDWLVVSDLFLNETSEFWKLEGMNPAEIQTE
VILLPAAGPLEKEGSFTNTHRLIQWKHKMIEPMGESHSDGWYMVQIGKRLKQLYADSTAA
KDLPIKHLVWDYDDPDNPNEFDHLKVLKEINGYDVATGKPVSGFGALQDDGSTACGCWIY
SGIYPEEGVNKADARLRAAPNRPDGWTEAKADGSADYLHLGWGFAWPANRRVIYNRASAD
PSGKPWSKVPLVWWDEAEGKWTGVDVPDMLPVAPGQVHAVGVPGDTPFIMKAWGLGGIWG
PLPDGPLPVHYEPMESPTPNLLYRKQGTIPTMKIYDSEFDLFGDPERFPYVATTYRLTEH
LTSGVMTRTLPYLAEAFARHFCEIPRELAQREGIRNGDWVEVESARGKVRVQAMVTNRLR
PLRIGGRETFLIGLPIHWAPNSGHVQGDITNTLTPQAVDVNVQIQESKVFLANLRKVSVG
G
>fig|224324.1.peg.724 Formate dehydrogenase O alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MNYMDISRRGFLKLSVGSVGAGILGGLGFDLTPAYARVRDLKITKAKVTKSICPYCSVSC
GILAYSLSDGAMNVKERIIHVEGNPDDPINRGTLCPKGATLRDFVNAPDRLTKPLYRPAG
STEWKEISWDEAIEKFARWVKDTRDRTFIHKDKAGRVVNRCDSIVWAVGSPLGNEEGWLM
VKIGIALGLSARETQATIUHAPTVASLAPTFGRGAMTNNWVDISNSDLVFVMGGNPAENH
PCGFKWAIKAREKRGAKIICIDPRFNRTAAVADIFVQIRPGTDIAFLGGLINYVLQNEKY
QKEYVRLHTTGPFIVREDFGFKDGLFTGYDPKTRSYDTTTWDYEFDPATGYPKMDPEMKH
PRCVLNILKEHYSRYTPEVVSQICGCSKEDFLRVAEEVAKCGAPNKFMTILYALGWTHHS
YGTQLIRTACMLQLLLGNIGCPGGGINALRGHSNVQGMTDLAGQNKNLPTYIKPPKPEEQ
TLAQHLKNRTPRKLHPTSLNYWANYPKFFISFLKCMWGDAATPENDFAYDYLYKPEGGYN
SWDKFIDDMYKGKIEGVVTAALNFLNNTPNAKKTVRALKNLKWMVVMDPFMIETAQFWKA
EGLDPKEVKTEILVLPTAVFLEKEGSFTNSARWVKWKYKATDPPGDAKDEFWIFGRFFMK
LKEFYEKEGGAFPEPILNLVWPYKNPYYPTAEEILTEINGYYTRDVDGHKKGERVRLFTD
LRDDGSTACGGWLYCGVFPPEGNLAKRTDLSDPLGLGTYPNYAWNWPANRRVLYNRASCD
EKGRPWDPERPLLRWDPERDMWVGDIPDYPATAPPEKGIGAFIMLPEGKGRLFAAKSYVT
FKDGPLPEHYEPYESPVTNILHPNVPHNPVAKVYKSDLDLLGTPDKFPHVATTYRLTEHY
HFWTKHLYGPSLLAPVMFIEIPEELAKEKGIQNGDLVRVSTARASIEAIALVTKRIKPLK
VAGKTVYTIGIPIHWGFEGLVKGAITNFITPNVWDPNSRTPEFKGFLANIEKVKT
>fig|207559.3.peg.3778 Formate dehydrogenase O alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MNCTRRGFLKLAGAGAACISLAQLGFSLKEARAFAASLKIEGAKEVITVCPFCSVSCHII
AYVKNGKLISTEGDPDYPINEGSLCAKGAALLTMSTSHHRLLKPKYRAPFSDKWEEKDWG
WTLEQIARRVKDTRDKEIILENNKGQRVNRLESMFLLGTSHADNEECALVHQAMRGLGVV
HMDHQARIUHSATVAALGESFGRGAMTNHWIDIKNADAILIMGSNAAEHHPISFKWVLQA
KDKGAVVMHVDPKFSRTSARADFHVPLRSGTDIAFMGGMIHYILETESYFRQYVLDYTNA
SFIVGESYGFKDGLFSGYDIHTRSYDRSKWNFETDAQGTPKRDPSLKHPRCVFRLMQEHY
SRYTLDNVSSVTGVTKENLLKVYKAFAATGKPDKAGTMMYALGWTQHTVGVQNIRSAAIV
QLLLGNIGVAGGGINALRGEPNVQGSTDHCLLYHIIPGYMTMPMADWQTYADYNKANTPV
SADPQSANWWQHKPKYLTSLLKAWFGDAATAENGYCYGLLPKIEKGADHSYMFLFDRMYS
GKITGGFIIGLNPMNSVPNTNKVRKALDNLDWLVTAELHHSETTDNWRRPGVDPATVKTE
VFLLPSAHRVEKAGSVSNSGRWLVWHHKAVEPEGEARSFGDMFVPLINVVRDLYRKEGGT
MPEAVLNLDWPQQYDPEEWARRINGFFLKDTTVNGKEYKKGQLVPSFTALADDGSTSSLN
WLYSGSYTEEDGNKAQRRDPSQTPMQAAIGLYPKWSWCWPVNRRILYNRASVDAQGKPWN
PAKAVISWNGKKWEGDVPDGGWPPMATGKGRHPFIMSKHGFGQLYGTGRMDGPFSEHYEP
VETPIDAHPFSKQLSSPVYKFVSSDMDKLARPADPRFPYVLTTYNVTEHWCGGGETRNVP
NLLEAEPQLYVELSPELAEEKGIANGDGVILESARGRVEAIAMVTVRIRPFTVQGKTVHL
VGMPFCFGWTTPGTGDSTNRLTPSVGDPNTSIPEYKASCVNIRKADTLTEIDR
>fig|292459.1.peg.3210 Formate dehydrogenase O alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MALTRRDLLKSSLAAGGALSLLGLGGKAEAKERDDLKIEGAKDYRTICCYCSVGCGIQLW
VKDGEVVHVNGDPDHPINEGTLCPKGASIANLRIVADKNNQYQPNPRRLTKVLYRAPGAT
DWEEKDWDWAIERIARNVKAARDATFERVDENGVTVNRTFAMAHIGSAALDNEENYVLAK
LMRGLGIVRLEHHARLUHSSTVAGLAPTFGRGAMTNHWTDYQNTDVFMVLGTNPAENHPI
SMRWIDRARETRGAKLIVVDPKFNRTAAKADLYVQIRPGTDIAFLGGLMNYAMTHGRYFH
EYVAKYTNASYLIHPDFKFEEGLFSGAQVGEDGQVKYDTATWQYQVDEDGNIKKDPTLQH
PQCVFQLMKKHYARYTPEMVAETCGMTVEEFLEVAELFTSTGRPDKAGNIMYAMGITQSS
HGSQNVRAVAMLQLLLGNIGIPGGGVNAHRGESNVQGSTDMAMLWNNLPGYMPMPSAAQH
PTLAAYQASTPKSGYWTNRPKFMVSLLKAWWGENATAENDFAYDYLPKLDHRDHSHMSIF
EAMGRGELKGLFAWGQNFAVGGPNVTKERSALANLDWLVVVDLFETETAAFWKGPGMNPA
EIQTEVFLLPAAASYEKCGTVTNSGRWIQWRDKAVEPMGDSRDDLWIADRLFKKLRELYA
TEGGAFPDPILQLHWDYDDGDHPSAEKVAFEINGYTWADRKGLTTFGNLQDDGSTACGCW
IYAGYFDNFETPKCRSRVKDEPGTTLGTHLGWAWAWPVNRRILYNRCSMDEHGQPWDPER
PLFRWDGEKFIAQDVPDFVATNPPEVSAQNPFIMMQEGVGALWSPSGMKDGPIPEHYEPV
ESPVSNRFNRRQFNPVAVISGKGEFGALTEAENPEFPYICTTYRVTEHWQSGAMTRSLPW
LGEMMPDMFVEISPTLAAKLGVQSGDRVEVTTVRGSLVAPAMVTPRMRPVRVHGREIEIV
GMPWHWGFMGKFTGASANVLTPHVGDANTQIPEYKAFLCNVKKAGGSGPVRG
>fig|477974.3.peg.2160 Formate dehydrogenase O alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MKKLSRRAFLKSVGLGAAGLTALDIIGGGKSAEAATLLLKSRTDMPEFKLQYAEETYTIC
GWCSGGCGAIIYTKDGKVIDSIGDPDHPINEGAMCPKGRALSDLRHVVGQSERRHSMWPN
PRIDNPRRLTKPLYRAPYSTEWQEISWDTAIAEIVKRVKATRDATFETTDANGVTVNRTQ
AIAQFGSATINNEENYLMQKWARALGLINICFHARLUHSVTVAGLANTYGRGVMTNHWTD
YQFADAWLVIGGNPAENHPIAFKWITKARESRGAKLIVVDPRVSKTASLADIYAPLRPGT
DIAFFNGFIKYILDNNLYHEEYVVNYTNASYIINPDFKLEDGVFSGFDGGKYDKATWQYT
EEKDPTLEHPNCVFQLLKKHVSRYDLKTVSRICGTPENKLVEVWKAFAETGKPGKAGSIL
YAMGMTQHTHGAQNVRALAILQLLLGNMGMPGGGVNAQRGEANVQGATDQGMLFHITTGY
NPMPSAAAHPTWADYVEKTTPKAGYWTNRPKFVAAMLKAWYGDKATAENDYCFDWLPKLA
TGDHSHMAIFRDIALGKIKGLFAWGQNLVVCGPSQVQARAGMANLEWLVCMDLFETETAA
FWKAPDLDAARIGTEVFLLPAAGPYEKEGTVTNSGRWIQWRYKAVDPPGEAKSDLWIVDR
LFKTLRAEYKAKGGAFPDAIINMVWDYGDPPDPNKVAMEMNGFSLETGELLPNFTAIANS
DLGAVSSGCWIYSGFYNNVNDPACKRRNREDKSGLAMYHNWSWAWPLNRRIVYNRCSCDP
QGRPWDPKRAPVEWDGAQWVLRDVPDFNAKVPPEETATKPFIMLAEGQALLFSIPMADGP
FPEHYEPVESPVKNIMSKQQINPCTRFYGPHFEKFAAVGSEEFPYVCTTLRLVEHYQSGG
LTRNLPFLVELQPEMFVMISKSLGEKLGIKTYDWVKVSTARGAIECKVGVTPLVKPLEVD
GKPVEIVYMPWHWGFMGLSAGAIANDLTPAAGDPNTMIPEYKAFLCNIEKVKAGAPPAPR
TRARAIYDL
>fig|360106.3.peg.1723 Formate dehydrogenase-O, major subunit (EC 1.2.1.2) @ selenocysteine-containing
MSEAHIGRRSFLKLAALGAGSTMAFGKENETLRTATNEEIKNPFPGSKKVRTICTICSAG
CGIEAEVKDGVWIRQDMAIDHPISQGSHCCKGIDQIDLTKSKQRVKYPMKKVNGKWERIS
WETAINEIGDKLLEIRKEDGPDCVEFLGSAKFSNEQAFYFRKFAAFWGTNNIDHVARIUH
SATVAGVANTWGYGAMTNHFGDVTANSKAILLIGANSAVANPIGFKHMLQAKDRNNCKLI
VVDPVYTKSAAKADYFVRLRPGTDIAFAYGMLHLIFKNGWEDKEFIKTRSYAVDEIRKEA
EHWTPQETENVTGIPAEQLIEITRVFATTKPATLAWALGVTQHSVGSSNTRIYALLQLIL
GNVGKAGGGCNIIRGHDNVQGATDMNNLADSLPGYYGLGDAAWKHFCKGWGQDYDKFIKR
FATSVKEPREKLGDDVEGTKFKEYFYHDPKTPEDRNWRNEKGYSLSKWWQGVLKEENTFS
SGNLRVLWVQGTGITSMAHLTKIKEAIDKLDMLVVVEPFVNEVAILSDRKDGVYILPAAT
QFESSGYVSATNRSAQWRSQVIPPIYESKEDQEVMMLFAKKFGFYDEYVKGMMMDVVDGE
LKKVKDSFVWPDDATNEIARTTQSIGNNGRTADRLKKHQENWINFDPDTLMGKPGSPVAG
EYYGLPWPCWNKNHPGSPVLYDVTKPVWKGGMGFRNRFGLEHNGVSQLADDSITIPGSKV
KGGYPQITKDNIEKVLGITLSEEEKAKMGSGWNMDYSGIIAKKCEEAGVSPCGNAKARAI
VWEFIDQYPKHREPIHSPRKDLVEKYPTFGDQAKNFRVSTKFESEQKAQDWSKDFPTIIS
SMRLVNLSGAGMIERTSKYLAHITPEMFAHVNPELALKYGIQDGEMMWIHSPQGTKIKVK
CIHSHSVTPDRICLPYNFAGVMQGVDLSARYPEGTKPYTIGESSNTITNYGFDIVTQISE
FNAGLCRLERASDQSTFKTAFFDEK
>fig|192222.1.peg.1426 Formate dehydrogenase-O, major subunit (EC 1.2.1.2) @ selenocysteine-containing
MSSVGENIKLTRRSFLKMAALSSLATPLLARSETLREASADELKEAYEGSKKVKTVCTAC
SVGCGIIAEVQNGVWVRQEIAQDHPVSSGGHCCKGSDMIDMVRSHVRLKYPMKKENGEWK
RISYEQALSEIGEKLAAYRKENPESVMFLGSAKLNNEQAYYIRKFAAFFGTNNVDHQARI
UHSATVAGVANTFGYGAMTNHLGDIQRSKCIIIIGANPAVNHPVGFRHFLKAKEKGAKLI
VVDPRFTKSAAKADIYARIRPGTDIAFMYGMLKIIFDEGLEDTKYLDERVFGIDKIREEA
AKWTVEEVENVTGISKELLVQITHEVAKNKPTTLIWAMGLTQHTVGTSNTRLAPIVQMVL
GNIGKFGGGVNILRGHDNVQGASDMACLSENLPGYYPLNEATWRYYAKIWGVDYEWLLGN
FVSKDWMHKTGLSLARWWAAALNGKDGNDAIDNAGTPLKALVVMGNGITSTAQQVKVKEG
LEALELLVLADPFVNEAGIIAERKDGIYLLPAATQFETSGSVTATNRSGQWRFKVVDPLY
ESMEDQEILFELAKKLGFYEDFTKTLRDEKGEIVWPENATREIAKAVRSIGLNGWSPERL
KKHTLYWDKFDEVTLEGKDEVAGEYYGLPWPCWSDKHPGSPVLYNTDIEVAKGGMGFRNN
FGLEYEGESLLAKNAPLNSPIDTGYPQITKDNIEKVLGITLSAQEKEKMGSTWSYDDSNI
IATKCIEKGIVPYGNAKARAVVWTFKDKIPLHREPLHSPRNDLVQKYPSFEDQKALYRVD
TKFVSVQQAKDYSKEFPLNLVTARLVNLNGAGMENRASMYLTRLTPEMFCEINPELAKEQ
DIKAGDMIWVHSPEGTKIHVRVKVNPGVAKDMIFLPFHFTGVMQGVDLTHNFPEGTKPYA
SGESANTVTNYGYDIMCQIPETKGGLCRISKDGK
>fig|292459.1.peg.3216 Formate dehydrogenase-O, major subunit (EC 1.2.1.2) @ selenocysteine-containing
MAELSRRDFLKLGGVAAGALAVGLPQRPARAAQADVRKVPTLCEMCTSRCGVFAVVENGR
VTRIEGNPAHPVNLGRPCARGNAGASALYDPDRLKEPMKRGEDGKLYPITWEQAVEEIGA
KLNEIRKRHGPEALVFAEYNNLNSTLTKRWTEAFGSPNHVGHAANCFANRNVGYSAVFGA
LPSVDYENVKFYLSPGRNLLGGIKVSEVAALAKAKANGARIVVLDPRHSELAGWGEWIPI
KSAGDLAFLLAVANVLITEGIYNKAWVEAHCNGFEQLAEGIREYTPEWQEQHTDIPAEKV
RQLAREMAAAAPATVVDPGWHGGNGMYWNGYEAARAGAIVNALLGNLGAKGGLKLSPKVA
LGTIDNPPEGAVLEAAGGGUAATGTSAGAEVWTPYPKPTAVRFDGMGTKYPLSLGSIQAI
PLVVESGKPYPLKAAIFFRVNPVKSSGDQRRWIEALKRLDLVVAIDTQMSETAMLAHYVL
PEHHYLERMDAISVVGDTVSIRQPVVEPMYNTRSCLEILQGLAKVVGIEQYFNFTMEQWN
NALLGPTGWTVKHLKEKGVIKVSATPPDYSKLPTSSGKAELVHKGFALSGGHEVVTWVPP
KTQPEGDRLRLLHGHMAVHTNGYTQNVPALYARMPENDLWIHPTAAAARGIQDGDLVEVA
NEYGVQRIRARVTEGIRPDCVWMCHGFGTMAPEQRLAYGKGAADGWFYPILVTPVSAALG
QGDATVTVRKVGEKV
>fig|83333.1.peg.1460 Formate dehydrogenase N alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MDVSRRQFFKICAGGMAGTTVAALGFAPKQALAQARNYKLLRAKEIRNTCTYCSVGCGLL
MYSLGDGAKNAREAIYHIEGDPDHPVSRGALCPKGAGLLDYVNSENRLRYPEYRAPGSDK
WQRISWEEAFSRIAKLMKADRDANFIEKNEQGVTVNRWLSTGMLCASGASNETGMLTQKF
ARSLGMLAVDNQARVUHGPTVASLAPTFGRGAMTNHWVDIKNANVVMVMGGNAAEAHPVG
FRWAMEAKNNNDATLIVVDPRFTRTASVADIYAPIRSGTDITFLSGVLRYLIENNKINAE
YVKHYTNASLLVRDDFAFEDGLFSGYDAEKRQYDKSSWNYQLDENGYAKRDETLTHPRCV
WNLLKEHVSRYTPDVVENICGTPKADFLKVCEVLASTSAPDRTTTFLYALGWTQHTVGAQ
NIRTMAMIQLLLGNMGMAGGGVNALRGHSNIQGLTDLGLLSTSLPGYLTLPSEKQVDLQS
YLEANTPKATLADQVNYWSNYPKFFVSLMKSFYGDAAQKENNWGYDWLPKWDQTYDVIKY
FNMMDEGKVTGYFCQGFNPVASFPDKNKVVSCLSKLKYMVVIDPLVTETSTFWQNHGESN
DVDPASIQTEVFRLPSTCFAEEDGSIANSGRWLQWHWKGQDAPGEARNDGEILAGIYHHL
RELYQSEGGKGVEPLMKMSWNYKQPHEPQSDEVAKENNGYALEDLYDANGVLIAKKGQLL
SSFAHLRDDGTTASSCWIYTGSWTEQGNQMANRDNSDPSGLGNTLGWAWAWPLNRRVLYN
RASADINGKPWDPKRMLIQWNGSKWTGNDIPDFGNAAPGTPTGPFIMQPEGMGRLFAINK
MAEGPFPEHYEPIETPLGTNPLHPNVVSNPVVRLYEQDALRMGKKEQFPYVGTTYRLTEH
FHTWTKHALLNAIAQPEQFVEISETLAAAKGINNGDRVTVSSKRGFIRAVAVVTRRLKPL
NVNGQQVETVGIPIHWGFEGVARKGYIANTLTPNVGDANSQTPEYKAFLVNIEKA
>fig|138119.3.peg.4655 Formate dehydrogenase N alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MEVTRRGFLKLSGASLFALASGLGFDPQIAQAQGFSLRIEGTTKIPSICHFCSGGCGLLL
HIKDEKLVYLDGDPDNPVNSGALCPKGASLGHVANAKDRVTKPRYRAPGSSDWQDISWDE
AINKIASKIKEVRDTTWMATEEIGGTAYNVNRADGIAVLGSAEVDNEESYLIKKLSELIG
TPYNEHQARIUHAPTVASLSPSFGRGAMTNSWTDMQNTKCFLIAGSNCAENHPIAMRWIN
KAKENGAKVIVVDPRFTRTASQADIFAQVRPGADIAYLNAIINYILENKLYDQDYVLNHT
NGLYKISKDFKFADGLFSGFDPETKKYNFDSWAYQLDAENKPVKAESLDDPDCVFGKLKE
HFSRYTLEVGADISGIPAEKIKEIADTFCNTRPGSILYALGMTQHTTGVQGIRSYAIIQL
LLGNVGKAGSGIQALRGEPNVQGSTDMANLFNNLPGYLPAPVHTDKDLRSYLVRSGSAFE
RHIVSQLKAWFGENATKENDYCFNYLPKYNSGKNYSMVKLWEAANNGQFKMLLNFGSNSM
VSIPNRQIVREGLAKLDMLVIADVYEVETAQFWREKDPTTGELLVDPAKINTEVILLPAA
FVYEKGGTLSNSGRWIQWKDAALKPPGEAKPDLDILDHIYHKLKELYAGSTDPKDEPILK
ARWDYGHEPDPLKVLQEISGYDETTGKVLPTLADYLKAPIGSASSGCWIYAGVTGNGNLA
ARRDNSDPSGLGLYRNWSFSWPGNIRILYNRGSCDMNGQPLDENRKLIWWDAAKNSWEGN
DGADVPDKTKGPDTPEGKQVFRMNPEGVGRLFTAKYFSGIPATPAADGLPHIGVRPAGQC
NDGPLPEFYEPVESPTVNSLHPDVSSNPTVPIPNFLPGVTNHGSKEDFPYVLTTYALVEH
FCAGGITRNIPMLNELMPQPFAEISKNLAQKIGVKEGDMVEVSSARGKVQVVALVTDRIQ
TLKINGQDSETIGMPWSWGFASLSPGPTTNNLTISAIDPTAGTPEYKCCLVNIRRA
>fig|246194.3.peg.767 NAD-dependent formate dehydrogenase alpha subunit @ selenocysteine-containing
MADVTLKINGIEVTVPEGTTILEAAQIAGFKIPTLCYDPELSKPGACRICVVEVKGARNL
VASCVTPVAPGMEVLTHSEPVINARREILDLLLSNHPEDCLTCDKMGECALADYAYEYGV
RRGSYQGEKNVYPIEDNNPFIVRDMNKCILCGKCVRVCDEIVGYSVIDFINRGFKTKVAP
PYDGTLAESNCVFCGSCVTVCPTGALTVKDTRGKFRPWEVKRVKTTCPYCGVGCNFDLLV
KDDKVVGVAPNPTSEVNGRFMCVKGRFGYKFIHSDDRLTKPLIKKNGELTEASWDEALTL
VAEKLGEIKAKYGSDSIGVLASAKCTNEENYLLSKFTRAVLGTNNIDHCARLUHSPSVAG
LAAAFGSGAMTNSFEDIMETELIMLIGSNTTESHPVAGYKIKQAVKKGAKLIVIDPRNIE
LASYADLHLQLKPGTNLPLLNAMAHVIIEEGLYNEQFIRERTEGFDSYKDTVKDYSPEAV
AEICGVPAEKIREAARMYAKAGKAMILYTMGVTQFSFGTYGVLAVANLAMLTGHVGKDGA
GVNPLRGQNNVQGACDMGGLPNVFTGYQRVDDPAANEKFSKAWGVKLPEKPGLTLGEMFD
GALKGNIKAMYIMGENPVLSDPDARHIEEALKKLDFLVVQDIFLTETAKFADVVLPAATF
AEKDGTFTNTERRVQRVRKAIPCVGEAKPDWEIIALVATKMGYPMSYSSAEEIFDEMRTL
TPSYAGITYERLEKKSLQWPCPAPDHPGTKFLHSDKFVRGKGLFHPIPFKEPEEKPDSEY
PFTLITGRRLYHYHTGTMSRRSMMGEHVPEDHLEMNPADAKKLNIKDGDRVRVTTRRGSV
EIKVALTDKVPEGTVFASFHFFESPVNQVTNPARDPMAKTPELKYSAAKIEKL
>fig|177439.1.peg.682 NAD-dependent formate dehydrogenase alpha subunit @ selenocysteine-containing
MKKVTLTINGRSVTAPEGSYVLQAALQKGIYIPHLCYHNDLEPVGACRLCTVRVNGRWDA
LSCKTPVEEGMVIETETETIAKTRKIAIELLIADHDMDCLACARNTTCKLQQAADYIGVN
KKHLARLKKPEVLLPIDDSNPFFDFDPNRCVLCGICIRTCDELQNINAIDFAYRGLTTKV
AGFGDKARILSNCESCGECVARCPVGALTEKDYVKPSHWVKTVCAYCGCGCGLDLGVRDN
KIVSARGEHGNPANRGRLCVKGRFGFNYVNSKERLTTPMVRRPEGFVPVSWDEALDLIAD
NFAKNKGDSFAAVSSAKCTNEENYLLQKFTRTVMKTNNIDHCARLUHAPTVAGLATAFGS
GAMTNSIAEIGDAQVILAIGTNTTAAHPIIALQIKDAVRRGAKLIVINPKRIDLCKHAHI
FLQNTPGSDIPLLMTMMRIIVEEDLIDKEFIEERCENYPAFLASLQQFELEDVAELTGIP
AETIREAALCYATESPATILYCMGITQHVHGTDNVLATSNLALLTGNVGKESTGVNPLRG
QNNVQGSCDMGALPDVYPGYQKVVNNTAAFEEKWQTSLSAVPGKTHTEIFDGLYDGSIKS
VYLIGENPVLSEANASHAIEAMNRAKFLVVQDIFFSETAELADVILPASSFAEKDGTFTN
TERRVQRVRQAIEPVGDSRPDWWIIKEIATRMGAKGFDHEGPEQIMEEISATIPNYAGIN
YDRLQHGTLQWPCPSTDHPGTKFLHAERFATANGRARFMPLKTRKTTEQVDKKYPLLLNT
DRSLFHFHGTMTRRVFGLNVLDRGENLIIHPEDAEKFSLTDGEIVEVRSRRGAVKVAVEI
NDSCRPGSVSMSFHFSETPTNMLTSGALDPVAKTPGTKICAVQINKL
>fig|714.2.peg.2081 Formate dehydrogenase H (EC 1.2.1.2) @ selenocysteine-containing
MALDNNNNLSISCCSSNKDITMAIKKVITVCPYCASGCKINLLVENNKIVGAEGANGKTN
EGELCLKGYYGWDFVHDTKILTPRLTQPMIRYKRGEPFTPVSWEEAISYSANRLKEICEK
YGNESIMVTGSSRGPGNEVNFVMQKFARAVLGNNNVDCCARVUHGPSVTGLLKSVGNGAM
SNSIVEIEDTKCVFIFGYNASTSHPIVARRINHAKEKGAKIIVCDPPKIETARIADIYAP
LANGSNVAFLNAMMNVILEEGLQDQKFIDEHTENFDAFYETVKAYTPESTQHITGIDPEM
LREIARTYAKAETATILWGMGVCQFRQGVETVRALASLAMLTGNLGKPNVGVNPVRGQNN
VQGACDMGALFNTLPGYQSFADPETNAKFAKAWGVPSIPTKPGVPLSEVPHAVKEGKLKA
FYIMGEDTLQTEPDINAMKQTFKDLEFIIVQDIFMTQTAAEADVILPATSCAEHEGVYSA
ADRGFQRFYKAVEPVGNVKDDWEIISLIAQALGYPMHYNNTKEIWDELRELCPIYKGATY
EKMEGLGYIQWPCTDEGPEDQGTQYLYEGQIFDRKGGKAEFFACDWEPPMEDLSEEFPLV
LSTVREVGHYSCRSMTGNCRALAALADEPGFVQMNDQDAKALGIKNNDLVWIASSRGKVI
SHADVSTRTNKGACYMTYQWWIGKCNELTAEHLNPGSRTPEYKYSAVRIDKIEDQAWAEH
YVVAEYTKLKNRLKQTALVA
>fig|190192.1.peg.319 Formate dehydrogenase alpha subunit (EC 1.2.1.2) @ selenocysteine-containing
MARMRFVPQVCPFCGCGCGILVGTDGEEIKLLEPWRRHPVNEGRQCVKLWELPEAVQKDR
LERPVRMTESGEPRELSWNRALEEVAEVLSTHEPEEVYFVTSAKATNEDNYVAQKLARTL
GTNNVDHCARLUHAPTVVALSELLGSGAMTNSIPDLVEADCYLVAGSNTAEQHPIVYRRI
LQGLEENDADLIVLDPRRTQIAELADIHLQVRPRTDLIVFLYMAKVIVEEGLHDGTFIEE
RTTGFESFEEYVREAVSEGDVRRIAGVDPEDVRKAAVRYAEAERGCILYCMGLTHHDIAT
RTVRALCALALLTGNVGRPGTGVNPLRGQNNVQGACDVGALATHFPGYRPINTETANEMS
KIWSFEVPDEPGLKLTEAFDADEITVMYVVGENPAVSEPNTRHAVEKLESLEFLVVQDLY
LTETGELADLVLPAAGWAERTGTFTATDRRVQLAEKAVEPPGEARPDWWILEAVARRLGL
KGFGHRSPREVFEEIRRVVPQYRGITYERLRRRPGGIHWPCPSEDHPGTPILHTEEFATE
DGKARFPKPEDVEYREPERDVDEEYPLILTTGRVYAHYHTRTITRRSRLLSEEVPESFVE
IHPKDAERYGVRDGELVVVETPYGEWRCRARVTDRVREGTIFTPFHFGENVLTPHDVRDP
ESGIPEYKYVPARVRPDSRGSASRG
>fig|6239.3.peg.13805 Thioredoxin reductase (EC 1.8.1.9), metazoan type @ selenocysteine-containing
MKSLTELFGCFKRQPRQQEASSPANPHVSDTLSMGVAASGMPPPKRPAPAESPTLPGETL
VDAPGIPLKEALKEAANSKIVIFYNSSDEEKQLVEFETYLNSLKEPADAEKPLEIPEIKK
LQVSRASQKVIQYLTLHTSWPLMYIKGNAVGGLKELKALKQDYLKEWLRDHTYDLIVIGG
GSGGLAAAKEASRLGKKVACLDFVKPSPQGTSWGLGGTCVNVGCIPKKLMHQASLLGHSI
HDAKKYGWKLPEGKVEHQWNHLRDSVQDHIASLNWGYRVQLREKTVTYINSYGEFTGPFE
ISATNKKKKVEKLTADRFLISTGLRPKYPEIPGVKEYTITSDDLFQLPYSPGKTLCVGAS
YVSLECAGFLHGFGFDVTVMVRSILLRGFDQDMAERIRKHMIAYGMKFEAGVPTRIEQID
EKTDEKAGKYRVFWPKKNEETGEMQEVSEEYNTILMAIGREAVTDDVGLTTIGVERAKSK
KVLGRREQSTTIPWVYAIGDVLEGTPELTPVAIQAGRVLMRRIFDGANELTEYDQIPTTV
FTPLEYGCCGLSEEDAMMKYGKDNIIIYHNVFNPLEYTISERMDKDHCYLKMICLRNEEE
KVVGFHILTPNAGEVTQGFGIALKLAAKKADFDRLIGIHPTVAENFTTLTLEKKEGDEEL
QASGCUG
>fig|99883.3.peg.26915 Thioredoxin reductase (EC 1.8.1.9), metazoan type @ selenocysteine-containing
MPPIDNESGKNDLKSRIQQLIDSNQVMVFSKSYCPFCVQVKDLFRELQVECNVVELDLME
DGTNYQEMLLEMTGQKSVPNVFINKTHVGGCDKTMQAHKDGSLQQLLNGQNEAYDYDLIV
IGGGSGGLACSKEAALLGKKVMVLDYVVPTPKGTSWGLGGTCVNVGCIPKKLMHQTALLR
TAIQDARKFGWEFDEAVTHNWETMKTAINDYIGSLNWGYRVSLRDKNVNYVNAYAEFVDP
HKIKATNKRGKETFYTAARFVLATGERPRYLGVPGDKEYCITSDDLFSLPYCPGKTLVIG
ASYVALECGGFLAGLGLDVTVMVRSILLRGFDQDMANRAGQYMEEHGVKFLRKYVPVQVG
GPRKRAGVGARLVLHVQKSSSLTSVWQIEELEAGTPGRLKVTAKSTESDEIIEEEYNTVL
IAVGRNACTDKIGLDKVGVKVNPKNGKIPVNDEEQTSVPHIYAIGDILEEKWELTPVAIQ
AGRLLARRLYGGSKVKCDYVNVPTTVFTPMEYGACGLSEERAVGLYGQENIEVFHTQFWP
LEFTVPGRDNNKCYAKIICNKLDSGRVVGFHYLGPNAGEVTQGFSAAMKCGATKEQLDGT
IGIHPTCAEIFTTLEVTKSSGKSIIQTGC
>fig|177439.1.peg.3237 CoB--CoM-reducing hydrogenase (Sec) alpha subunit @ selenocysteine-containing
MGKVLTIAPVTRIEGHAKIAIHLDDTGNVENAFLHIQSLRGFEKFIEGRPAEEVPRLVNR
ICGICPWMHHIASNKAVDRCFDVTPTATGYKLRELCQVLAHVNDKILHFFFLAAPDFVLG
PDADHSVRNVMGVVKAAPELASKVVKMRQLGQMMLEKFAGKAIHPIAGVVGGFSKPMVEE
ERKELLEGARTLLDFSTYALDFAINNVFSKYMDVIGELGVIKTGFLGTVDRQDGSLRLFD
GNLRMMKADGSFEEFQNRDYADHIGEHTVPWGYSKMPYAKFWNEGFDMSLGAPKGIYRSN
TLARINVCDKISTPKAQAALEAFRELFGRPAQQTLLYHYARLIEMVYACERTIELLEWDG
ITDPHVRAEVTPRAGRGVGVVEAPRGTLIHDYTTDDDGCVTSANLIVGTTHNIAPMNMSV
RQAASSLIKDGKYDEGILNRVEMAVRAYDPUMSCATHRLDGGLPVTVSIMDKEGVEIDRI
VK
>fig|267377.1.peg.1724 CoB--CoM-reducing hydrogenase (Sec) alpha subunit @ selenocysteine-containing
MGKVTIEPLSRLEGHGKVSITLDDAGKPTDVKLHITALRGFEQFVVGRPAEEVPRIVPRI
CGICQTPHHLASVKAVDAAWGAEVPSAAEKLRELMHLGNMMHSHALHFYYLAAPDFVLGP
DSDPAARNIIGVIGAAPEVAKKAIAMRRVGQSIVETIGGRAIHPVTGVPGGVSKALSEEK
RDELLNEIDEMIAYGQDGIALIKSLNEKYMDLAKTLGVIDTWYLGMVKDGKHNFYGDTLR
FMSPDGKQTTDFKPSEYLDNIGEHVVGHNYVKYPYNKKVGYPDGLYRVGPLAMLNVCDSM
PTPLAEEARKDFADTFGKPANQSLAYNHARLIELLASCERVKELLEDSEITSTDIKAEVE
PKAGNGVGAIYAPRGTLFHNYETDDKGIVVKANMIVASTHNVPTMEKAIQQAAEVVFKUA
TVVKMVDEAKLNLIEIVLRAYDPUYSCAAHMIVKDEQGKVLLEVKKEE
>fig|243275.1.peg.78 Betaine reductase component B beta subunit (EC 1.21.4.4) @ selenocysteine-containing
MIYKAIHYINQFYAGIGGESSADSGFVVLRDKKGPAIGLEGLWHGKMIVTKVICCGDNYI
NLDRNFEEVCRQLKKIVEEEKPDVLIAGPAFNAGRYGMACAKICDYVRSNLNLPSVTAMW
HENPAVKIYVRNNYIISSTETAAGMHKTLQDLADLALKLAKKEKIGPARIEGYLPTGHRY
NEYHKKTGAERVVDMLLDKLNGRPYQTEVPLRGFERVPPAPPIHKMNKTTIALFTTGGLV
PIGNPDKLKQAFAEDFKVYDISNRDTLPQGVYESIHGGYDTTAASAEPNRLIPLDALRQC
EAEGIIGGIYPYFGTTCGVGTNVAVSESMGKAWARKIKEEGVGAVILTSTUGTCTRCGAT
ICKELDRVGIPNVHINAFTSISESVGANRIVFGGGFTAPVGNPMLPLERETAYRRKIVDK
ALEALQTEVLSPTVFTVDHDKEG
>fig|891.1.peg.7228 Betaine reductase component B beta subunit (EC 1.21.4.4) @ selenocysteine-containing
MKKAVLYINQFFAGVGGETEADFEPEIREGLVGPALELDKQLKNAEVTHTVICGDNFIGS
NQEEAVKRILEGLEGIEFDIFFAGPAFQAGRYGSACGFICRAVKEKFGVETVSSMHVENP
GVLMFKKELYIAEGGHIAAKMRKDIKKMANIGDKLLAGETMLRAAAEEFFPRGIRHQFWP
EEARPAAIRAVEMLQKKLSGENYQTELPIPKSDRVEIAKPVTDLSKARIALMNTGGIVPV
DNPDKIQSASATRWGKYDISGMDRLESGVFKTIHAGFDPAAADADPNVIMPVDVMRAFEK
EGKIGELYKDFYSTVGTGTTEAEARRMAQEIVVHMKEAQVDAVLMVSTUGTCTRCGATMV
KEVEKAGFPVVILCNLIPVAKSVGANKIVPTISIPYPLGDPATSQEEQLKLRRHRVEVAL
DALAQDIEEQTVFKVKI
>fig|1496.1.peg.3938 Glycine reductase component B gamma subunit (EC 1.21.4.2) @ selenocysteine-containing
MGKLKAVHYINQFFAGIGGEEKADTKPHVAETLPPISLQLDKLLGEDIEIVGTVVCGDSY
FNENIDSASEEVLSMVKGFEPQLFIAGPAFNAGRYGVAAGTITKVVKDALNIPALTGMYV
ENPGADMFKKDVYVVETSDSAAGMRKALPKIAKLAVKLANGEEIGTPKDEGYIARGIRVN
YFHEDRGSKRAVDMLVKKIKGEPFETEYPMPNFDRVDPSKAVKDLSKCKIALVTSGGIVP
KGNPDRIESSSASKYGTYSIAGVMDLTEETYETAHGGYDPVYANLDADRVLPVDVLRDLE
KEGVIGKLHETFYTTVGNGTSVANSKKYASEIGAALVADGVDAVILTSTUGTCTRCGATM
VKEIEKTGLPVVHMCTVVPISLTVGANRIVPTIAIPHPLGNPALDPTEEKALRRGLVEKA
LNALTTEVDGQTVFEK
>fig|243232.1.peg.1226 Formylmethanofuran dehydrogenase (tungsten) subunit B (EC 1.2.99.5) @ selenocysteine-containing
MVKVVRNVVCPFCGTLCDDLEILVEDNHIVGTRHACRIGNAKFMHFEGAVRYTEPLMREN
KKDDFKKVDYETAIEETARLLTEATLPLIYGWSATECHAHMYGVELAELVGAVIDNTASV
UHGPSLLAVQDVGYPVCTLGEVKNRADVIIFWGSNPMHAHPRHMSRYSVFARGFFRERGR
EDRTLIVVDPRETDTAKLADIHLQVEPHKDYELVSAMRAVLKGFELQVDKVAGVPADLIY
EAVEVCKNAQFGELFFAMGVTMTRGKHRNIDNAIQLVIDLNAYTKFGLMPMRGHYNVNGF
NQVLTWVTGYPFGVDFSRGYPRYNPGETTANDLLQRGETDMMLNIASDPGAHFPQKAVQH
MAKIPLVCIDPHETPTTQLANIIIPPAIAGVEVEGTAYRMDGVPIQLRKVIDPPEGVLPD
REILKILIKKVKEML
>fig|190192.1.peg.927 Coenzyme F420 hydrogenase alpha subunit (FruA) (EC 1.12.98.1) @ selenocysteine-containing
MAEGAVEIQPTTRHEGHAKLVLYVDDEGYVERAFYLNTSAVRGFEALAKGRPAEFVQVAV
MRICGICQATHGTASAEAFERAMGIEPPKDGKLLRELCALGNRIQSHVLHQLLVLDDFVE
DESEKVEAVKRIQQIRRIGQYVVDVVGGEGIHPPNIRIGGMAENISEAARRKLYRRLREA
RELMMEQHEFMVNIVERFGDENDLDIDEFGRHDQPFLATHPTYGDPDRLDMDRVVELLPI
EYYGEEHKEVAYQHRGQIPLYDGVPVEVGPRARYILFDGVDPRGVLYIHVLRSQETLAAI
DRAMTILDELNTSGKTLAEWEPKAGVGIGVHEAPRGTNVHIAKVNEKGIVEDYRIIAAST
WNFPVVEKAIEGENEEYAEVIMRCYDIUASCAAHVVKEVRDADSREKIRESVVKLA
>fig|9913.1.peg.588 Selenoprotein P precursor @ selenocysteine-containing
MWRGLGLALALCLLLTGGTESQGQSSYCKQPPPWSIKDQDPMLNSYGSVTVVALLQASUY
LCILQASRLEDLRVKLEKEGYSNISYVVVNHQGISSRLKYVHLKNKVSEHIPVYQQEENQ
PDVWTLLNGNKDDFLIYDRCGRLVYHLGLPYSFLTFTYVEDSIKTVYCEDKCGNCSLSRP
QDEDVCKNVFLATKEKTAEASQRHHHPHPHSHPHPHPHPHPHPHPHPHHGHQLHENAHLS
ESPKPDTPDTPENPPTSGLHHHHHRHKGPQRQGHSDNCDTPLGSESLQPSLPQKKLURKR
CINQLLUQFPKYSESALSSCCCHCRHLVFEKTGSAITUQCTEKLPSLCSUQGLLAEENVI
ESUQURLPPAAUQAAGQQLNPTEASTKUSUKNKAKMUKUPSN
>fig|221988.1.peg.2243 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MQNDANEVRPALSEAEQIRLTQYSHGAGUGCKISPKVLGTILHSQLEQFVDPHLLVGNDT
NDDAAVYDIGNGTCIISTTDFFMPIVDDPFDFGRIAATNAISDIFAMGGKPIMAIAILGF
PINVLPAEVAQKIVDGGRFACREAGIALAGGHSIDAPEPIFGLAVTGIVPTEKVKRNASA
EAGSKLYLTKPLGIGILTTAEKRGKLKPEHKGLATEVMCQMNLIGSQFSQLESVTAMTDV
TGFGLLGHLAEICEGSNLVADVHFNKIKMLDGVPYYIEQGCLAGGVTRNYESYGIKIGAI
TEFQKAVLCDPQTSGGLLVAVKPEGETQLLELAAQAGIELIEVGELRRRVDNSDPVIIRI
LD
>fig|1491.1.peg.3667 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MSNNINIKEGEACSIEKKEVRLTELSRTSGUAAKIAPGVLSKILSKLPKMESENLIVGIE
TADDAAVYKINDDTAMIQTLDFFTPIVDDPYMFGQIAAANSLSDVYAMGGKPVVALNIVC
FPSCLPIEVLGEILKGGADKVMESGAVVIGGHTVDDNEPKYGLSVTGIVHPDKVLKNYGC
KEGEVLITTKPLGIGIINTAIKGDMASKEAKDAAVKSMSTLNKYAGEIITKYNISACTDI
TGFGFLGHAYEMASASNVTLKFNYKRISYIKEAEEYAEFGLVPAGAYANRDHIEGNFEFK
NVPVFMQDILFDPQTSGGLLISCSKEESEKIMKELNKLELKSSIVGEVIRKQDKYIIVE
>fig|882.1.peg.1326 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MTVDRLTSRSRAAGUAAKIAPGDLERILATLPRDPREGERVVVGTRDNEDAAIVRVPGGK
AIVQTLDFFTPIVDDPYLFGQIAAANALSDVYAMGGEPWCALNIVCFPVKELPEDILADI
LRGGADKVREAGAVLVGGHSIEDESIKYGLSVTGIIDPDCYATNTGLRPGDVLLLTKPLG
SGVLATAVKAGWDGFEAHEQELGRWGAMLNRAGGRVIRELGLAAATDVTGFGLGGHLLEM
ANASNMSVHVDVSTLPLMPAVLDLVATGLLPAGSHANRHFCSGNVSVHPEVDSLLVDIVF
DAQTSGGLILAVPPHLVDDACSILRAEDAPFWRIGHVEEMGEGVPRLVLQP
>fig|243232.1.peg.1637 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MERGNEKIKLTELVKLHGUACKLPSTELEFLVKGIVTDDDLLDKNILVGLGDDASIIKRN
GLVIAKTVDVFTPIVDDPYIQGKIAACNSTSDIYAMGLLDIVGVLAIVGIPEKLPIHVVR
EMLKGFQDFCRENKTTIVGGHTILNPWPLIGGAVTGVGREEEVLTKAGVKVGDVLILTKP
LGTQTAMALSRIPEEFKDLISITEEERDYIINKAIEIMTTSNRYALKALRKAEERVGDKI
ANALTDITGFGILGHSNEMAKNSNVLIEINLLPCIKRTPELSRLFGHALLDGYGAETAGG
LLISAKEEYKDNLIDELEKAKCYAFEVGRVVKKGEGKAVLSKDVKVIEI
>fig|335283.3.peg.2205 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MHPEKIALTQTVQKGGCAAKVAATTLHRILQQVRFPAAHSALMVDGRYFDDAAIYKINEQ
TALVQTLDFFTPIVDTPRLFGEIAAANAISDVYAMGGRPVTAMGILAFPLATLSEHIIVD
VLQGASDKIAEAGANFVGGHSIDDDTLKFGLSVTGLVNPQQVWTNANAQSGDHLVLTKAL
GTGTLTAGIKRQQLQEKDIMDALESMAAINNVIDYLSPDLLAAIHAATDITGFGFSGHAM
QLANASNVTLSIATGNLPRFDKAFYCLKNSFLTKAHRTNAEYTTPHIDDAKLDALYKLLI
HDPQTSGGLLLSVVPEASQLVLQALRTYFKPAAIVGTVHPRQDKAVQFE
>fig|235279.1.peg.1876 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MADYQLTKHIQCAGUAAKVGLSDLAQISSQLTQKPNSLLIAGFESNEDCGAMLYAPNDEY
AMLSSVDFITPVVDDPYLYGQIAAANALSDIFAMGGEVKSALNLLMWDNTHFDSAVANAI
LKGGLNKITESNALLLGGHTIKDKEQKYGLAVNGIAHKNRLWRNHTGYIGDMLVLTKPLG
SGILTTAIKAQMFSQTTEVTQSMAMLNLYAARIAQNYEIHACTDITGFGLIGHAFEMCGG
IKNQNEKSILFYTKQIPLFDKTESFSQMGIVPGGSYENKKALQSQVQIQCTLEDDIFYYD
AQTSGGLLFALPFNQAKLFVDELHKAGIIHANVIGEIIPKTETSIVLG
>fig|326298.3.peg.2098 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MNNESKLTKFVQAAGUAAKMGPGDLKHTICSLISDDKNVLVGFENSDDAAVYQINENEAL
VQTVDFITPVVDDPYIYGKIAAANSLSDIFAMGADVKTALNIVGFDKTNHTYEVLSEILR
GGNEKIKECGGVLVGGHTIESPEMYYGLSVTGMIHPKKVLRNNTPKIGHVIVLTKPIGMG
ILTTAIKRDLLSKETTLEAINVMQSLNYLPSKLLREYEVSACTDITGFGLLGHALESTNE
RVSISIDAKNVPVMADAFDLADKNIVPGGTKRNMKYLEDKVIFAGDASKYALMFSDAQTS
GGLLISMSEKDALEYVKKVQDLTYGYACVIGSIIPRGDRAIIVY
>fig|306263.1.peg.1683 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MIYKDQKLTQYVKAAGUAAKLDSVGLDKILGILKPHENILSGIGNNEDASVYKLNEDLAL
VQTLDFITPVVDSAYHFGAIAAANALSDVFAMGAEVINALNIVGFDTCHFNNEILLEVLE
GARVKVEEAGAVLVGGHTIENDEFIFGLSVTGVVHPKKFIANNSAKDGDVILLTKPIGSG
IISTAIKASLLEKEKILKAVEQMSFLNLYASRILREFKSLSALSDVTGFGLLGHLKEMLN
KEIMIEVYKNEIPLMDGVLSMANMGIIPAGAYKNKDSLKIWVENLNEKDEDIMYFDPQTS
GGLLASMSENEAVEALKILKDHNIEAKIIARCVRNTHNYLLLC
>fig|217.1.peg.1890 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MNEEIKLTHFVQCAGUAGKSSPADLSQIFSSLPSQSDPKLIAGFEHHEDCGIYALENGDF
LLQSIDFITPIVDDPYFYGKIAAANALSDVFAKGAEVCTALNVLMWDREHVSKSCLQEIL
RGGMEKIKECGALLLGGHTINDREQKYGLSVSGITRSFWPNRGAKIGDVLLLCKPIGSGI
ITTALKRENLALSAAKECVESMALLNVYAMRIALDFPIHASTDITGFGLIGHLLEMCAEE
QSFALEVDKIPLFEPVKELLAQNISLGSKNNRAYFSHAVRDTRERGEDGFLYDAQTSGGI
VFALPKNKAWALLDALKKSQYERASIIGEVIPRAQGAILLG
>fig|224324.1.peg.1561 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MVELLKLVRSSGUAAKVGPGDLQEILKGFNIYTDESTLVSIGDDAGVYEHNGIIWVYTVD
IITPVVNDPYLWGAISTANALSDVYAMGGIPVNALAISCFNNCELDIEIFREVIRGALDK
LREAKTVLLGGHTIDDKEPKFGLSVAGICPEGKYITQSGAQVGQLLILTKPIGTGILIKG
LKEGILKEEDINEAIENMLALNDKARNLMLSLDATACTDVTGFGLLGHAWNICKNSNIGA
RIFFEKVPYYQLSENLVKKKIYPKGAIENLNFVKNYLKSNLDNWKLILLSDPVTSGGLLF
TINKEKLEKIDETAKELEVNYWIIGETIAENVLEVL
>fig|190192.1.peg.1365 Selenide,water dikinase (EC 2.7.9.3) @ selenocysteine-containing
MSRKKSLVEMADLHGUACKLPQGDLEDLLKGVELPEEGGRVEVGVGDDAAVIRVDGGYVI
QSVDFFTPIHPDPYTQGRIAANNSINDVFAMGATEVLSVLVVSGFPRELPEEDAREMLQG
FADQCREVDALIVGGHTIMNPWPILGGCVTGFAERYVTVGGAEPGDVLYLTKPLGTQPAM
AALRLPEDVRKQFLTDSELEEAVDLAVEVMTEPLKDAAEAALEVGVHAMTDVTGFGLKGH
AGEMAEASGVRVVIERLPVIPGTTELSRALGYGLERGESAETAGGLLVAVPEEHAEDLED
AFERRDVWYRRIGRVEEGSGVEVRGDVEEVEDYP
>fig|316057.3.peg.1843 D-proline reductase, 26 kDa subunit @ selenocysteine-containing
MAETRAPTGFASEDDVPIPYMARTRAYYQAIGYDVPYRWAHDVAAPFRPLTKPLAQARVA
LITTAAPFDPAKGDQGPGAAYNGGAKFYQVYDGDTSQDHDLRISHIAYDRTHTTATDSGS
WFPLPQLRRLAREGAIGDVAPRFFGAPTNRSHRVTTDTDAPEILARCRADNVDAAVLVPN
CPVCHQTVSLVARHLEAHGVSTVVMGCAKDIVEHAAVPRFLFSDFPLGNSAGKPHDDASQ
AFTLELALRVLERAPGPQTTVQSPLRWRTDAAWKRDYNNVAAMSADELERRRCDFEAQKD
RARAIRDSVA
>fig|1491.1.peg.3665 D-proline reductase, 26 kDa subunit @ selenocysteine-containing
MSLTITKGLQSEIFVPITPEPVWTPVTKELKDMTVAIATAAGVHLKKDKRFNLAGDFTYR
LIPGDAKTSDMMVSHGGYDNSDVNKDINAMFPIDRLRELVDVGFIKAVAPTHIGFMGGGG
NQQKFREETGPEVAKILKEEGVDAVLMTAGUGTCHRSAVLVQRAIEKAGIPTIIIAALPP
VVRQTGTPRAVAPRVPMGANAGEPNNVEMQTAIVKDTLEQLIKIPSAGKIVPLPYEYIAK
V
>fig|99883.3.peg.21500 Selenoprotein P precursor @ selenocysteine-containing
MDSLRQKLENQGLRDVVYMVVNHQGAQARGLHAMLAQRLSEHISLHRQDEALADVWQTLG
GNKDDFFIYDRCGRLTHRISLPYAVIGHGHVEKAVKDTYCSSLCGECTHETAETQQECTP
KTDTQPQEDTRHECHHRHHQGHQHHGDGHDHGHGHHGGQGHHQGHDQAGGVAQRPADADT
DAGCSAASLSRRREPDPRVPTEDESRCKLALIVPVTPVAKSSLYQQVLL
>fig|9913.1.peg.211 Glutathione peroxidase (EC 1.11.1.9) @ selenocysteine-containing
MARLFRASCLLSLLLAGFIPPSQGQEKSKTDCHAGVGGTIYEYGALTIDGEEYIPFKQYA
GKYILFVNVASYUGLTGQYVELNALQEELEPFGLVILGFPCNQFGKQEPGENSEILATLK
YVRPGGGFTPNFQLFEKGDVNGEKEQKFYTFLKNSCPPTSELLGSPDRLFWEPMKVHDIR
WNFEKFLVGPDGIPIMRWYHRTTVNSVKMDILTYMRRRAVWEAKGK
>fig|10279.1.peg.66 Glutathione peroxidase (EC 1.11.1.9) @ selenocysteine-containing
MADGSGARFPRFSELCAKYAAQLAAAETRSVYAFSARPITGGEPVSLGFLRGRVLLIENV
ASLUGSTVREYTQMNELQRRLGARGLVVLGFPCNQFGHQENAQNAEILPSLKHVRPGNGF
EPNFMLFEKCEVNGARAHPLFAFLREALPAPSDDMSTLVSDPQLIAWSPVCRNDVAWNFE
KFLVGADGTPVRRYSHRCQTLAVEPDIEALLPPPARGYYA
>fig|351605.3.peg.3641 D-proline reductase, 26 kDa subunit @ selenocysteine-containing
MPFARLKNRLLARIFTSFPSLAARWGRQLDANISDIPWTEPQKPLNEAILALVTTGGVHL
KTQEPFNMADPNGDPTFREIPADTPADLLTITHDYYDHRDAERDLNLVLPLQRLREMVEV
GALKALHPIAYGFMGHIDGPHLHTLQEETAPAIARKLADAGVDYALLVPA
>fig|298386.1.peg.5793 Glycine/sarcosine/betaine reductase protein A @ selenocysteine-containing
MLKDKKVIILGDRDGIPGQAIEACIKSAGAHVLFSTTECFVUTSAGAMDLENQKRIKGFA
EEFGAENILIVLGGAEAEASGLACETVTNGDPTFAGPLAGVQLGLSCYHVVEPEIKNNVD
ADVYDEQIGMMEMVLDVDAIIAEIKGYREQFGKYVLAEAEV
>fig|243275.1.peg.1716 Glutathione peroxidase (EC 1.11.1.9) @ selenocysteine-containing
MGIYNYTVKDSLGNDFSFNDYKDYVILIVNTACEUGLTPHFQGLEALYKEYRDKKFLVAA
FPCNQFGGQDPGTNEEIRNFAQSKYGVSFPIMAKIEVNGENTEPIFSFLKKASNGEDIKW
NFAKFLVDKTGERVTAYAPTVAPEDLKKDIEKLLN
>fig|243232.1.peg.1222 CoB--CoM-reducing hydrogenase (Sec) delta subunit @ selenocysteine-containing
MDPVIIAFCCYQUGYGAADLAGTSRMQYPATVRIVRLPCTGKFDITYALRAFQKGADAVM
VVGUKKGECAYETGNLKAEERVRFAKQLLDELGIGGDRIDMFFMSAAEADKFVSAVNEMT
ARVEKLGPNPLKAQ
>fig|882.1.peg.2275 Carbon monoxide-induced hydrogenase proton translocating subunit CooK @ selenocysteine-containing
MQDILVALFHMIVFPGGAFALTLGLLLKGLDRKVEARLQRRVGPPIVQPFIDLVKLTTKE
TLIPATANRTFFLAAPLIGFTGMAVCAAFIPVPGVYDGMPGMGDMLVLFYLLPLPAIALM
VGGSASSSPFGAIGFSREMTMMLAYEIPLLAVLLAVALKVGHATGVGAELSLASVVAYQA
STGMLGLDPVMLPALLAYLLFLPGTAGVPPFDIPEAETEIIEGPLLEYSGPALGFFHLGA
ALKTVVVLGLGVAMFFPGTVPGGIVPNVLWFAAKUAGLMLLSLTLVKAATGRFRIDQAFT
FYLKLPTPLAMASLALAWLGF
>fig|882.1.peg.2458 DsrK-like protein @ selenocysteine-containing
MSDDTLKPHQEPGRTFKDRVMEVLPDGGNLNLCLTCGACSAGCPATGLEDMDPRKFLRMA
ALGMDEEVTTTPWVWMCTMCMRCMYVCPMQINIPQLVYHARASWPREKRPRGIVNSCDAA
LKTESNSAMGASPDDFAYVVEDVLEEVRSTQPGQEKLTAPVDKHGAMYFLNQNSREPVTE
PDEMVPLWKILDMAGADWTYGSVGWAAENYCMFAADDEAWETIVRNKVKAVEDLGCKVWL
NTEUGHELYAIRSGLQKFNIKPKFEIESIIRLYARWIREGKLPVSSEWNRERKVKFTVQD
PCQLVRKSFGDPVADDLRFVAKAVCGEENVIEMWPNRSNNYCCGGGGGFLQSGYPEARRY
YGRLKNEQIVATGAPYVIAPCHNCHSQISDLSDHYGAGYRVVHLWTLIALSLGILGENER
EYLGQDLCDCGL
>fig|243231.1.peg.66 AhpC/Tsa family protein @ selenocysteine-containing
MAIDEQKPVYELQKELDALREDYLAGMSPEHAATLQRTATELVLSGIVGHAATIGDRAQD
FTLPNAVGRQIRLSEVTAQSTAVVTFYRGAWUPYCSLQLRAYQAVLPRLRELGGELLAIS
PQTPDKSQATLLKNFLQYEVLSDVGNLVARSFGLVYPLGEEMRRIYLGFGVNLADYNGDE
SWELPLPGTFVIDGTMTIRYSFVDADYTRRLEPATILDVLERIREERGRDDNQAS
>fig|228405.5.peg.3424 Hypothetical protein HNE_2485 @ selenocysteine-containing
MTIILDPTDERKPVSRQVTARAGSLSGTVGLLDIRKPRGNVLLDELERQLKAAAPAITVK
RFAKPTFTKPCPDDLRRDIAGQVDYLVEALADUGSCTTCSLHDTVWFEIQGIPSVSIASS
EFEEAANFQRSALGMPDARYVLVPHPIQDATDDEMRAKAKQALSAIIDALTR
| kbase/kb_seed | lib/selenoprotein_ref_seq.pm | Perl | mit | 33,460 |
use bigint;
use LWP::UserAgent;
use JSON -support_by_pp;
use Config::IniFiles;
use POSIX qw(strftime);
use IO::Socket::INET;
# required for --log-master
STDOUT->autoflush(1);
$ENV{PERL_LWP_SSL_VERIFY_HOSTNAME}=0;
my $cfg = Config::IniFiles->new( -file => "/etc/uwsgi/local.ini" );
my $base_url = 'https://'.$cfg->val('uwsgi', 'api_domain').'/api/private';
my $ssl_key = $cfg->val('uwsgi', 'api_client_key_file');
my $ssl_cert = $cfg->val('uwsgi', 'api_client_cert_file');
my $timeout = 30;
for(;;) {
my $ua = LWP::UserAgent->new;
$ua->ssl_opts(
SSL_key_file => $ssl_key,
SSL_cert_file => $ssl_cert,
);
$ua->timeout($timeout);
my $response = $ua->get($base_url.'/loopboxes/');
if ($response->is_error or $response->code != 200 ) {
print date().' oops: '.$response->code.' '.$response->message."\n";
exit;
}
my $loopboxes = decode_json($response->decoded_content);
my $containers_json = undef;
# get json stats from the Emperor stats
my $s = IO::Socket::INET->new(PeerAddr => '127.0.0.1:5001');
if ($s) {
my $json = '';
for(;;) {
$s->recv(my $buf, 8192);
last unless $buf;
$json .= $buf;
}
$containers_json = decode_json($json);
}
foreach my $lb (@{$loopboxes}) {
my $pid = get_container_pid($lb->{uid}, $containers_json->{vassals});
next unless $pid;
if (check_mountpoint($pid, $lb->{uid}, $lb->{id}, $lb->{filename}, $lb->{mountpoint}, $loopboxes)) {
my $cmd = '/etc/uwsgi/loopbox mount /containers/'.$lb->{uid}.'/run/ns.socket /dev/loop'.$lb->{id}.' /containers/'.$lb->{uid}.'/'.$lb->{filename}.' /containers/'.$lb->{uid}.'/'.$lb->{mountpoint}.' '.$lb->{ro};
print date().' running '.$cmd."\n";
system($cmd.' >> /containers/'.$lb->{uid}.'/logs/emperor.log');
}
}
foreach(@{$containers_json->{vassals}}) {
if (!$_->{checked}) {
unmount_all($_);
}
}
sleep(30);
}
sub date {
return strftime "%Y-%m-%d %H:%M:%S", localtime;
}
sub get_container_pid {
my ($uid, $vassals) = @_;
foreach(@{$vassals}) {
if ($uid.'.ini' eq $_->{id}) {
$_->{checked} = 1;
return $_->{pid};
}
}
}
sub unmount_all {
my ($vassal) = @_;
open MOUNTS,'/proc/'.$vassal->{pid}.'/mounts';
while(<MOUNTS>) {
my ($device, $dir) = split /\s+/;
if ($device =~ /\/dev\/loop\d+/) {
my $uid = $vassal->{id};
$uid =~ s/\.ini$//;
umount($uid, $dir);
}
}
close(MOUNTS);
}
sub check_mountpoint {
my ($pid, $uid, $id, $filename, $mountpoint, $loopboxes) = @_;
my $ret = 1;
# first check if we need to umount devices
open MOUNTS,'/proc/'.$pid.'/mounts';
while(<MOUNTS>) {
my ($device, $dir) = split /\s+/;
if ($device =~ /\/dev\/loop(\d+)/) {
my $loop = $1;
my $found = 0;
foreach(@{$loopboxes}) {
if ($loop eq $_->{id}) {
$found = 1;
last;
}
}
unless($found) {
umount($uid, $dir);
}
}
}
close(MOUNTS);
open MOUNTS,'/proc/'.$pid.'/mounts';
while(<MOUNTS>) {
my ($device, $dir) = split /\s+/;
if ($device =~ /\/dev\/loop(\d+)/) {
my $loop = $1;
my $found = 0;
foreach(@{$loopboxes}) {
if ($loop eq $_->{id}) {
$found = 1;
last;
}
}
# exit in case of incongruence
last unless($found);
if ($device eq '/dev/loop'.$id) {
# check if the size of the file is changed
open SYSFS, '/sys/class/block/loop'.$id.'/size';
my $sectors = <SYSFS>;
close SYSFS;
my @st = stat('/containers/'.$uid.'/'.$filename);
# deleted file ?
unless(@st) {
umount($uid, $dir);
$ret = 0;
last;
}
# invalid file size ?
my $size = $st[7];
if ($size < (1024*1024)) {
umount($uid, $dir);
$ret = 0;
last;
}
# file decreased in size ?
$file_sectors = $size/512;
if ($file_sectors < $sectors) {
umount($uid, $dir);
$ret = 0;
last;
}
# at least 1 megabyte more
if ($file_sectors > $sectors + (2048)) {
umount($uid, $dir);
# calling resize2fs on a loopback device under a namespace
# make the kernel hang ... :( disable it for now
#my $cmd = '/etc/uwsgi/loopbox resize /containers/'.$uid.'/run/ns.socket /dev/loop'.$id.' /containers/'.$uid.'/'.$filename;
#print date().' running '.$cmd."\n";
#system($cmd);
}
$ret = 0;
last;
}
}
}
close(MOUNTS);
return $ret;
}
sub umount {
my ($uid, $dir) = @_;
my $cmd = '/etc/uwsgi/loopbox umount /containers/'.$uid.'/run/ns.socket '.$dir;
print date().' running '.$cmd."\n";
system($cmd.' >> /containers/'.$uid.'/logs/emperor.log');
}
| unbit/uwsgi.it | loopboxer.pl | Perl | mit | 5,724 |
%%% automatically generated by src/prolog/lib/verbnet2boxer.pl
%%%
:- module(verbnet,[verbnet/3,verbnet/4]).
:- use_module(boxer(slashes)).
%%% wrapper
%%%
verbnet(A,B,C):- verbnet(A,B,C,_).
%%% File: ext/VerbNet/accept-77.xml
%%% Primary: NP V how S (accept-77)
%%% Syntax: [np:Agent,v,s:Theme]
%%% CCG: (s:_\np)/s:_
%%% Roles: [Theme,Agent]
%%% Example: I accept how you do it.
%%%
verbnet(accept, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(understand, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(encourage, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(discourage, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(disprefer, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(reject, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(repent, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(rue, (s:_\np)/s:_, ['Theme','Agent'], [77]).
%%% File: ext/VerbNet/accept-77.xml
%%% Primary: NP V NP (accept-77)
%%% Syntax: [np:Agent,v,np:Theme]
%%% CCG: (s:_\np)/np
%%% Roles: [Theme,Agent]
%%% Example: I accepted it.
%%%
verbnet(accept, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(understand, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(encourage, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(discourage, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(disprefer, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(reject, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(repent, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(rue, (s:_\np)/np, ['Theme','Agent'], [77]).
%%% File: ext/VerbNet/accept-77.xml
%%% Primary: NP V S_ING (accept-77)
%%% Syntax: [np:Agent,v,np:Theme]
%%% CCG: (s:_\np)/np
%%% Roles: [Theme,Agent]
%%% Example: I accepted their writing novels.
%%%
verbnet(accept, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(understand, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(encourage, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(discourage, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(disprefer, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(reject, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(repent, (s:_\np)/np, ['Theme','Agent'], [77]).
verbnet(rue, (s:_\np)/np, ['Theme','Agent'], [77]).
%%% File: ext/VerbNet/accept-77.xml
%%% Primary: NP V that S (accept-77)
%%% Syntax: [np:Agent,v,s:Theme]
%%% CCG: (s:_\np)/s:_
%%% Roles: [Theme,Agent]
%%% Example: I accepted that they wrote novels.
%%%
verbnet(accept, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(understand, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(encourage, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(discourage, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(disprefer, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(reject, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(repent, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(rue, (s:_\np)/s:_, ['Theme','Agent'], [77]).
%%% File: ext/VerbNet/accept-77.xml
%%% Primary: NP V what S (accept-77)
%%% Syntax: [np:Agent,v,s:Theme]
%%% CCG: (s:_\np)/s:_
%%% Roles: [Theme,Agent]
%%% Example: I accepted what they were doing.
%%%
verbnet(accept, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(understand, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(encourage, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(discourage, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(disprefer, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(reject, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(repent, (s:_\np)/s:_, ['Theme','Agent'], [77]).
verbnet(rue, (s:_\np)/s:_, ['Theme','Agent'], [77]).
%%% File: ext/VerbNet/accompany-51.7.xml
%%% Primary: NP V NP (accompany-51.7)
%%% Syntax: [np:Agent,v,np:Theme]
%%% CCG: (s:_\np)/np
%%% Roles: [Theme,Agent]
%%% Example: Jackie accompanied Rose.
%%%
verbnet(accompany, (s:_\np)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(conduct, (s:_\np)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(escort, (s:_\np)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(guide, (s:_\np)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(lead, (s:_\np)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(misdirect, (s:_\np)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(shepherd, (s:_\np)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(steer, (s:_\np)/np, ['Theme','Agent'], [51,'.',7]).
%%% File: ext/VerbNet/accompany-51.7.xml
%%% Primary: NP V NP PP.destination (accompany-51.7)
%%% Syntax: [np:Agent,v,np:Theme,pp,np:Destination]
%%% CCG: ((s:_\np)/pp)/np
%%% Roles: [Theme,Agent]
%%% Example: Jackie accompanied Rose to the store.
%%%
verbnet(accompany, ((s:_\np)/pp)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(conduct, ((s:_\np)/pp)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(escort, ((s:_\np)/pp)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(guide, ((s:_\np)/pp)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(lead, ((s:_\np)/pp)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(misdirect, ((s:_\np)/pp)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(shepherd, ((s:_\np)/pp)/np, ['Theme','Agent'], [51,'.',7]).
verbnet(steer, ((s:_\np)/pp)/np, ['Theme','Agent'], [51,'.',7]).
%%% File: ext/VerbNet/acquiesce-95.xml
%%% Primary: NP V PP.co-agent (acquiesce-95)
%%% Syntax: [np:Agent,v,prep:to,np:Co-Agent]
%%% CCG: (s:_\np)/pp
%%% Roles: [Agent]
%%% Example: The enemy soldiers submitted to us.
%%%
verbnet(acquiesce, (s:_\np)/pp, ['Agent'], [95]).
verbnet(submit, (s:_\np)/pp, ['Agent'], [95]).
verbnet(give_in, (s:_\np)/pp, ['Agent'], [95]).
verbnet(bow, (s:_\np)/pp, ['Agent'], [95]).
verbnet(defer, (s:_\np)/pp, ['Agent'], [95]).
verbnet(accede, (s:_\np)/pp, ['Agent'], [95]).
verbnet(succumb, (s:_\np)/pp, ['Agent'], [95]).
verbnet(yield, (s:_\np)/pp, ['Agent'], [95]).
verbnet(capitulate, (s:_\np)/pp, ['Agent'], [95]).
verbnet(to, pp/np, ['Agent'], [95]).
%%% File: ext/VerbNet/acquiesce-95.xml
%%% Primary: NP V S_INF (acquiesce-95)
%%% Syntax: [np:Agent,v,vp_to:Theme]
%%% CCG: (s:_\np)/ (s:ng\np)
%%% Roles: [Theme,Agent]
%%% Example: The king acquiesced to be in the same room with the paupers
%%%
verbnet(acquiesce, (s:_\np)/ (s:ng\np), ['Theme','Agent'], [95]).
verbnet(submit, (s:_\np)/ (s:ng\np), ['Theme','Agent'], [95]).
verbnet(give_in, (s:_\np)/ (s:ng\np), ['Theme','Agent'], [95]).
verbnet(bow, (s:_\np)/ (s:ng\np), ['Theme','Agent'], [95]).
verbnet(defer, (s:_\np)/ (s:ng\np), ['Theme','Agent'], [95]).
verbnet(accede, (s:_\np)/ (s:ng\np), ['Theme','Agent'], [95]).
verbnet(succumb, (s:_\np)/ (s:ng\np), ['Theme','Agent'], [95]).
verbnet(yield, (s:_\np)/ (s:ng\np), ['Theme','Agent'], [95]).
verbnet(capitulate, (s:_\np)/ (s:ng\np), ['Theme','Agent'], [95]).
%%% File: ext/VerbNet/acquiesce-95.xml
%%% Primary: NP V PP.theme (acquiesce-95)
%%% Syntax: [np:Agent,v,prep:to,np:Theme]
%%% CCG: (s:_\np)/pp
%%% Roles: [Agent]
%%% Example: The enemy soldiers submitted to demands.
%%%
verbnet(acquiesce, (s:_\np)/pp, ['Agent'], [95]).
verbnet(submit, (s:_\np)/pp, ['Agent'], [95]).
verbnet(give_in, (s:_\np)/pp, ['Agent'], [95]).
verbnet(bow, (s:_\np)/pp, ['Agent'], [95]).
verbnet(defer, (s:_\np)/pp, ['Agent'], [95]).
verbnet(accede, (s:_\np)/pp, ['Agent'], [95]).
verbnet(succumb, (s:_\np)/pp, ['Agent'], [95]).
verbnet(yield, (s:_\np)/pp, ['Agent'], [95]).
verbnet(capitulate, (s:_\np)/pp, ['Agent'], [95]).
verbnet(to, pp/np, ['Theme'], [95]).
%%% File: ext/VerbNet/addict-96.xml
%%% Primary: NP V NP PP.stimulus (addict-96)
%%% Syntax: [np:Agent,v,np:Patient,prep:to,np:Stimulus]
%%% CCG: ((s:_\np)/pp)/np
%%% Roles: [Patient,Agent]
%%% Example: I addicted him to Douglas Adams.
%%%
verbnet(addict, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(bias, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(dispose, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(incline, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(predispose, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(woo, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(slant, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(to, pp/np, ['Stimulus'], [96]).
%%% File: ext/VerbNet/addict-96.xml
%%% Primary: NP V NP S_ING (addict-96)
%%% Syntax: [np:Agent,v,np:Patient,prep:to,np:Stimulus]
%%% CCG: ((s:_\np)/pp)/np
%%% Roles: [Patient,Agent]
%%% Example: Mary addicted him to going on long journeys.
%%%
verbnet(addict, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(bias, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(dispose, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(incline, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(predispose, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(woo, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(slant, ((s:_\np)/pp)/np, ['Patient','Agent'], [96]).
verbnet(to, pp/np, ['Stimulus'], [96]).
%%% File: ext/VerbNet/adjust-26.9.xml
%%% Primary: NP V NP (adjust-26.9)
%%% Syntax: [np:Agent,v,np:Patient]
%%% CCG: (s:_\np)/np
%%% Roles: [Patient,Agent]
%%% Example: He adapted himself.
%%%
verbnet(adapt, (s:_\np)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(accommodate, (s:_\np)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(conform, (s:_\np)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(adjust, (s:_\np)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(gear, (s:_\np)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(fit, (s:_\np)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(readjust, (s:_\np)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(readapt, (s:_\np)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(assimilate, (s:_\np)/np, ['Patient','Agent'], [26,'.',9]).
%%% File: ext/VerbNet/adjust-26.9.xml
%%% Primary: NP V NP PP (adjust-26.9)
%%% Syntax: [np:Agent,v,np:Patient,prep:to,np:Goal]
%%% CCG: ((s:_\np)/pp)/np
%%% Roles: [Patient,Agent]
%%% Example: He adapted himself to the situation.
%%%
verbnet(adapt, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(accommodate, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(conform, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(adjust, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(gear, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(fit, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(readjust, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(readapt, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(assimilate, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(to, pp/np, ['Goal'], [26,.,9]).
%%% File: ext/VerbNet/adjust-26.9.xml
%%% Primary: NP V NP S_ING (adjust-26.9)
%%% Syntax: [np:Agent,v,np:Patient,prep:to,vp_ng:Goal]
%%% CCG: ((s:_\np)/pp)/np
%%% Roles: [Patient,Agent]
%%% Example: He adapted himself to waking up early.
%%%
verbnet(adapt, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(accommodate, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(conform, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(adjust, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(gear, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(fit, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(readjust, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(readapt, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(assimilate, ((s:_\np)/pp)/np, ['Patient','Agent'], [26,'.',9]).
verbnet(to, pp/(s:ng\np), ['Goal'], [26,.,9]).
%%% File: ext/VerbNet/adjust-26.9.xml
%%% Primary: NP V S_ING (adjust-26.9)
%%% Syntax: [np:Agent,v,prep:to,np:Goal]
%%% CCG: (s:_\np)/pp
%%% Roles: [Agent]
%%% Example: He adapted to waking up early.
%%%
verbnet(adapt, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(accommodate, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(conform, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(adjust, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(gear, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(fit, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(readjust, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(readapt, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(assimilate, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(to, pp/np, ['Goal'], [26,.,9]).
%%% File: ext/VerbNet/adjust-26.9.xml
%%% Primary: NP V PP (adjust-26.9)
%%% Syntax: [np:Agent,v,prep:to,np:Goal]
%%% CCG: (s:_\np)/pp
%%% Roles: [Agent]
%%% Example: He adapted to the situation.
%%%
verbnet(adapt, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(accommodate, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(conform, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(adjust, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(gear, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(fit, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(readjust, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(readapt, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(assimilate, (s:_\np)/pp, ['Agent'], [26,'.',9]).
verbnet(to, pp/np, ['Goal'], [26,.,9]).
%%% File: ext/VerbNet/admire-31.2.xml
%%% Primary: NP V S_INF (admire-31.2-1)
%%% Syntax: [np:Experiencer,v,vp_to:Stimulus]
%%% CCG: (s:_\np)/ (s:ng\np)
%%% Roles: [Stimulus,Experiencer]
%%% Example: I loved to write.
%%%
verbnet(despise, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
verbnet(detest, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
verbnet(disdain, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
verbnet(dislike, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
verbnet(regret, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
verbnet(like, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
verbnet(love, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
verbnet(enjoy, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
verbnet(fear, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
verbnet(hate, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2,-,1]).
%%% File: ext/VerbNet/admire-31.2.xml
%%% Primary: NP V NP (admire-31.2)
%%% Syntax: [np:Experiencer,v,np:Stimulus]
%%% CCG: (s:_\np)/np
%%% Roles: [Stimulus,Experiencer]
%%% Example: The tourists admired the paintings.
%%%
verbnet(despise, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(detest, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disdain, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dislike, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(regret, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(like, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(love, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(enjoy, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fear, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(hate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(admire, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(adore, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(appreciate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(applaud, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bear, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bewail, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(cherish, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deify, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disbelieve, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(esteem, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(exalt, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fancy, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(favor, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(grudge, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(idolize, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(miss, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prefer, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(affirm, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(reaffirm, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prize, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(respect, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(relish, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(revere, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(savor, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(stand, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(support, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(tolerate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(treasure, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(trust, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(value, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(venerate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(worship, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(abhor, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deplore, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(distrust, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dread, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(envy, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(execrate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(lament, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(loathe, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(mourn, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(pity, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(resent, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(rue, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(believe, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(suffer, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
%%% File: ext/VerbNet/admire-31.2.xml
%%% Primary: NP V NP PP.attribute (admire-31.2)
%%% Syntax: [np:Experiencer,v,np:Stimulus,prep:for,np:Attribute]
%%% CCG: ((s:_\np)/pp)/np
%%% Roles: [Stimulus,Experiencer]
%%% Example: I admired him for his honesty.
%%%
verbnet(despise, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(detest, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disdain, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dislike, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(regret, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(like, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(love, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(enjoy, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fear, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(hate, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(admire, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(adore, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(appreciate, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(applaud, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bear, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bewail, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(cherish, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deify, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disbelieve, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(esteem, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(exalt, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fancy, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(favor, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(grudge, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(idolize, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(miss, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prefer, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(affirm, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(reaffirm, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prize, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(respect, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(relish, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(revere, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(savor, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(stand, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(support, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(tolerate, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(treasure, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(trust, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(value, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(venerate, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(worship, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(abhor, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deplore, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(distrust, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dread, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(envy, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(execrate, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(lament, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(loathe, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(mourn, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(pity, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(resent, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(rue, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(believe, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(suffer, ((s:_\np)/pp)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(for, pp/np, ['Attribute'], [31,.,2]).
%%% File: ext/VerbNet/admire-31.2.xml
%%% Primary: NP V NP.stimulus (admire-31.2)
%%% Syntax: [np:Experiencer,v,np:Stimulus]
%%% CCG: (s:_\np)/np
%%% Roles: [Stimulus,Experiencer]
%%% Example: I admired the honesty in him.
%%%
verbnet(despise, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(detest, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disdain, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dislike, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(regret, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(like, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(love, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(enjoy, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fear, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(hate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(admire, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(adore, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(appreciate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(applaud, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bear, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bewail, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(cherish, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deify, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disbelieve, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(esteem, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(exalt, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fancy, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(favor, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(grudge, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(idolize, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(miss, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prefer, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(affirm, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(reaffirm, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prize, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(respect, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(relish, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(revere, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(savor, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(stand, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(support, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(tolerate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(treasure, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(trust, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(value, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(venerate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(worship, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(abhor, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deplore, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(distrust, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dread, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(envy, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(execrate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(lament, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(loathe, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(mourn, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(pity, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(resent, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(rue, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(believe, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(suffer, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
%%% File: ext/VerbNet/admire-31.2.xml
%%% Primary: NP V that S (admire-31.2)
%%% Syntax: [np:Experiencer,v,s:Stimulus]
%%% CCG: (s:_\np)/s:_
%%% Roles: [Stimulus,Experiencer]
%%% Example: The children liked that the clown had a red nose.
%%%
verbnet(despise, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(detest, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disdain, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dislike, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(regret, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(like, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(love, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(enjoy, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fear, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(hate, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(admire, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(adore, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(appreciate, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(applaud, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bear, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bewail, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(cherish, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deify, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disbelieve, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(esteem, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(exalt, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fancy, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(favor, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(grudge, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(idolize, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(miss, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prefer, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(affirm, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(reaffirm, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prize, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(respect, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(relish, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(revere, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(savor, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(stand, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(support, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(tolerate, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(treasure, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(trust, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(value, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(venerate, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(worship, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(abhor, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deplore, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(distrust, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dread, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(envy, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(execrate, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(lament, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(loathe, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(mourn, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(pity, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(resent, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(rue, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(believe, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(suffer, (s:_\np)/s:_, ['Stimulus','Experiencer'], [31,'.',2]).
%%% File: ext/VerbNet/admire-31.2.xml
%%% Primary: NP V S_ING (admire-31.2)
%%% Syntax: [np:Experiencer,v,vp_ng:Stimulus]
%%% CCG: (s:_\np)/ (s:ng\np)
%%% Roles: [Stimulus,Experiencer]
%%% Example: I loved writing.
%%%
verbnet(despise, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(detest, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disdain, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dislike, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(regret, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(like, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(love, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(enjoy, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fear, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(hate, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(admire, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(adore, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(appreciate, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(applaud, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bear, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bewail, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(cherish, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deify, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disbelieve, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(esteem, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(exalt, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fancy, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(favor, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(grudge, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(idolize, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(miss, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prefer, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(affirm, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(reaffirm, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prize, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(respect, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(relish, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(revere, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(savor, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(stand, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(support, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(tolerate, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(treasure, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(trust, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(value, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(venerate, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(worship, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(abhor, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deplore, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(distrust, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dread, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(envy, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(execrate, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(lament, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(loathe, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(mourn, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(pity, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(resent, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(rue, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(believe, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(suffer, (s:_\np)/ (s:ng\np), ['Stimulus','Experiencer'], [31,'.',2]).
%%% File: ext/VerbNet/admire-31.2.xml
%%% Primary: NP V NP S_ING (admire-31.2)
%%% Syntax: [np:Experiencer,v,np:Stimulus]
%%% CCG: (s:_\np)/np
%%% Roles: [Stimulus,Experiencer]
%%% Example: I loved him writing novels.
%%%
verbnet(despise, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(detest, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disdain, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dislike, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(regret, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(like, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(love, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(enjoy, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fear, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(hate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(admire, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(adore, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(appreciate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(applaud, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bear, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(bewail, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(cherish, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deify, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(disbelieve, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(esteem, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(exalt, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(fancy, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(favor, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(grudge, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(idolize, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(miss, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prefer, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(affirm, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(reaffirm, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(prize, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(respect, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(relish, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(revere, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(savor, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(stand, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(support, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(tolerate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(treasure, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(trust, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(value, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(venerate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(worship, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(abhor, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(deplore, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(distrust, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(dread, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(envy, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(execrate, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(lament, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(loathe, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(mourn, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(pity, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(resent, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(rue, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(believe, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
verbnet(suffer, (s:_\np)/np, ['Stimulus','Experiencer'], [31,'.',2]).
%%% File: ext/VerbNet/admit-65.xml
%%% Primary: NP V NP (admit-65)
%%% Syntax: [np:Agent,v,np:Theme]
%%% CCG: (s:_\np)/np
%%% Roles: [Theme,Agent]
%%% Example: She admitted us.
%%%
verbnet(admit, (s:_\np)/np, ['Theme','Agent'], [65]).
verbnet(allow, (s:_\np)/np, ['Theme','Agent'], [65]).
verbnet(exclude, (s:_\np)/np, ['Theme','Agent'], [65]).
verbnet(include, (s:_\np)/np, ['Theme','Agent'], [65]).
verbnet(permit, (s:_\np)/np, ['Theme','Agent'], [65]).
verbnet(welcome, (s:_\np)/np, ['Theme','Agent'], [65]).
%%% File: ext/VerbNet/admit-65.xml
%%% Primary: NP V NP ADV (admit-65)
%%% Syntax: [np:Agent,v,np:Theme,np:Location]
%%% CCG: ((s:_\np)/np)/np
%%% Roles: [Theme,Location,Agent]
%%% Example: She admitted us here.
%%%
verbnet(admit, ((s:_\np)/np)/np, ['Theme','Location','Agent'], [65]).
verbnet(allow, ((s:_\np)/np)/np, ['Theme','Location','Agent'], [65]).
verbnet(exclude, ((s:_\np)/np)/np, ['Theme','Location','Agent'], [65]).
verbnet(include, ((s:_\np)/np)/np, ['Theme','Location','Agent'], [65]).
verbnet(permit, ((s:_\np)/np)/np, ['Theme','Location','Agent'], [65]).
verbnet(welcome, ((s:_\np)/np)/np, ['Theme','Location','Agent'], [65]).
%%% File: ext/VerbNet/admit-65.xml
%%% Primary: NP V NP PP.location (admit-65)
%%% Syntax: [np:Agent,v,np:Theme,pp,np:Location]
%%% CCG: ((s:_\np)/pp)/np
%%% Roles: [Theme,Agent]
%%% Example: She allowed us near the house.
%%%
verbnet(admit, ((s:_\np)/pp)/np, ['Theme','Agent'], [65]).
verbnet(allow, ((s:_\np)/pp)/np, ['Theme','Agent'], [65]).
verbnet(exclude, ((s:_\np)/pp)/np, ['Theme','Agent'], [65]).
verbnet(include, ((s:_\np)/pp)/np, ['Theme','Agent'], [65]).
verbnet(permit, ((s:_\np)/pp)/np, ['Theme','Agent'], [65]).
verbnet(welcome, ((s:_\np)/pp)/np, ['Theme','Agent'], [65]).
%%% File: ext/VerbNet/adopt-93.xml
%%% Primary: NP V NP (adopt-93)
%%% Syntax: [np:Agent,v,np:Theme]
%%% CCG: (s:_\np)/np
%%% Roles: [Theme,Agent]
%%% Example: Soon, the new President will assume office.
%%%
verbnet(assume, (s:_\np)/np, ['Theme','Agent'], [93]).
verbnet(adopt, (s:_\np)/np, ['Theme','Agent'], [93]).
verbnet(take_over, (s:_\np)/np, ['Theme','Agent'], [93]).
verbnet(take_on, (s:_\np)/np, ['Theme','Agent'], [93]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V PP.topic (advise-37.9-1)
%%% Syntax: [np:Agent,v,prep:against about concerning on regarding respecting,np:Topic]
%%% CCG: (s:_\np)/pp
%%% Roles: [Agent]
%%% Example: Ellen warned against skating on thin ice.
%%%
verbnet(admonish, (s:_\np)/pp, ['Agent'], [37,'.',9,-,1]).
verbnet(advise, (s:_\np)/pp, ['Agent'], [37,'.',9,-,1]).
verbnet(caution, (s:_\np)/pp, ['Agent'], [37,'.',9,-,1]).
verbnet(counsel, (s:_\np)/pp, ['Agent'], [37,'.',9,-,1]).
verbnet(instruct, (s:_\np)/pp, ['Agent'], [37,'.',9,-,1]).
verbnet(warn, (s:_\np)/pp, ['Agent'], [37,'.',9,-,1]).
verbnet(against, pp/np, ['Topic'], [37,.,9,-,1]).
verbnet(about, pp/np, ['Topic'], [37,.,9,-,1]).
verbnet(concerning, pp/np, ['Topic'], [37,.,9,-,1]).
verbnet(on, pp/np, ['Topic'], [37,.,9,-,1]).
verbnet(regarding, pp/np, ['Topic'], [37,.,9,-,1]).
verbnet(respecting, pp/np, ['Topic'], [37,.,9,-,1]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V that S (advise-37.9-1)
%%% Syntax: [np:Agent,v,s:Topic]
%%% CCG: (s:_\np)/s:_
%%% Roles: [Topic,Agent]
%%% Example: Ellen warned that the party would be tonight.
%%%
verbnet(admonish, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(advise, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(caution, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(counsel, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(instruct, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(warn, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V how S_INF (advise-37.9-1)
%%% Syntax: [np:Agent,v,s:Topic]
%%% CCG: (s:_\np)/s:_
%%% Roles: [Topic,Agent]
%%% Example: Ellen warned how to avoid the crowd.
%%%
verbnet(admonish, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(advise, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(caution, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(counsel, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(instruct, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(warn, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V S-Quote (advise-37.9-1)
%%% Syntax: [np:Agent,v,s:Topic]
%%% CCG: (s:_\np)/s:_
%%% Roles: [Topic,Agent]
%%% Example: Ellen warned, 'Avoid that hole in the sidewalk.'
%%%
verbnet(admonish, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(advise, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(caution, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(counsel, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(instruct, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
verbnet(warn, (s:_\np)/s:_, ['Topic','Agent'], [37,'.',9,-,1]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V NP.recipient (advise-37.9)
%%% Syntax: [np:Agent,v,np:Recipient]
%%% CCG: (s:_\np)/np
%%% Roles: [Recipient,Agent]
%%% Example: Ellen alerted Helen.
%%%
verbnet(admonish, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(advise, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(caution, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(counsel, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(instruct, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(warn, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(alert, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(assure, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(brief, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(encourage, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(inform, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(notify, (s:_\np)/np, ['Recipient','Agent'], [37,'.',9]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V NP PP.topic (advise-37.9)
%%% Syntax: [np:Agent,v,np:Recipient,prep:against about concerning on regarding respecting,np:Topic]
%%% CCG: ((s:_\np)/pp)/np
%%% Roles: [Recipient,Agent]
%%% Example: Ellen warned Helen against skating on thin ice.
%%%
verbnet(admonish, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(advise, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(caution, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(counsel, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(instruct, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(warn, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(alert, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(assure, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(brief, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(encourage, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(inform, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(notify, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(against, pp/np, ['Topic'], [37,.,9]).
verbnet(about, pp/np, ['Topic'], [37,.,9]).
verbnet(concerning, pp/np, ['Topic'], [37,.,9]).
verbnet(on, pp/np, ['Topic'], [37,.,9]).
verbnet(regarding, pp/np, ['Topic'], [37,.,9]).
verbnet(respecting, pp/np, ['Topic'], [37,.,9]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V NP.recipient that S (advise-37.9)
%%% Syntax: [np:Agent,v,np:Recipient,s:Topic]
%%% CCG: ((s:_\np)/s:_)/np
%%% Roles: [Recipient,Topic,Agent]
%%% Example: Ellen warned Helen that the party would be tonight.
%%%
verbnet(admonish, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(advise, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(caution, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(counsel, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(instruct, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(warn, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(alert, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(assure, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(brief, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(encourage, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(inform, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(notify, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V NP how S_INF (advise-37.9)
%%% Syntax: [np:Agent,v,np:Recipient,s:Topic]
%%% CCG: ((s:_\np)/s:_)/np
%%% Roles: [Recipient,Topic,Agent]
%%% Example: Ellen warned Helen how to avoid the crowd.
%%%
verbnet(admonish, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(advise, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(caution, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(counsel, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(instruct, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(warn, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(alert, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(assure, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(brief, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(encourage, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(inform, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(notify, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V NP S-Quote (advise-37.9)
%%% Syntax: [np:Agent,v,np:Recipient,s:Topic]
%%% CCG: ((s:_\np)/s:_)/np
%%% Roles: [Recipient,Topic,Agent]
%%% Example: Ellen warned Helen, 'Avoid that hole in the sidewalk.'
%%%
verbnet(admonish, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(advise, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(caution, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(counsel, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(instruct, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(warn, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(alert, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(assure, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(brief, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(encourage, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(inform, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
verbnet(notify, ((s:_\np)/s:_)/np, ['Recipient','Topic','Agent'], [37,'.',9]).
%%% File: ext/VerbNet/advise-37.9.xml
%%% Primary: NP V NP PP.topic (advise-37.9)
%%% Syntax: [np:Agent,v,np:Recipient,prep:of,np:Topic]
%%% CCG: ((s:_\np)/pp)/np
%%% Roles: [Recipient,Agent]
%%% Example: My accountant warned me of the new loopholes in the tax code.
%%%
verbnet(admonish, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(advise, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(caution, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(counsel, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(instruct, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(warn, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(alert, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(assure, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(brief, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(encourage, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(inform, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(notify, ((s:_\np)/pp)/np, ['Recipient','Agent'], [37,'.',9]).
verbnet(of, pp/np, ['Topic'], [37,.,9]).
:- include('verbnet_candc_boxer.nldata').
%%% File: ext/VerbNet/allow-64.xml
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_nlu/ext/pldata/verbnet_candc_boxer.pl | Perl | mit | 52,014 |
#!/usr/bin/perl -w
use strict;
my $sUsage = qq(
perl $0
</home/swang/90K_assay_design/final/blat_suvey_sequence/90K_SNP_mapped_chromosomes.out>
<synop_linkgrp_chr_summary_checked>
<mstmap_input_file>
<mstmap_output_file>
);
die $sUsage unless @ARGV == 4;
my ($snp_mapped_chr_file, $linkgrp_summary_file, $mstmap_input_file, $mstmap_output_file) = @ARGV;
my @chrs = map{$_."A", $_."B", $_."D"}1..7;
my %snp_chr = read_snp_mapped_chr_file($snp_mapped_chr_file);
my %linkgrp_chr = read_linkgrp_summary_file($linkgrp_summary_file);
my %snp_linkgrp = read_mstmap_output($mstmap_output_file);
my ($header, $genotype_ref) = read_mstmap_input($mstmap_input_file);
# output
foreach my $chr (@chrs){
my $output = $mstmap_input_file . "_chr_" . $chr;
my @snps_belong_to_chr = ();
my %extracted;
foreach my $c (keys %snp_linkgrp){
my $grp = $snp_linkgrp{$c};
next unless exists $linkgrp_chr{$grp} ;
if ($linkgrp_chr{$grp} eq $chr){
push @snps_belong_to_chr, $c;
$extracted{$c} = 1;
}
}
foreach my $c (keys %snp_chr){
next if exists $extracted{$c};
next if exists $snp_linkgrp{$c};
push @snps_belong_to_chr, $c if $snp_chr{$c} eq $chr and exists $genotype_ref->{$c};
}
open (OUT, ">$output") or die $!;
my $num_loci = scalar @snps_belong_to_chr;
$header =~ s/cut_off_p_value\s\S+/cut_off_p_value 1/;
$header =~ s/number_of_loci \d+/number_of_loci $num_loci/;
print STDERR "$chr\t$num_loci\t$1\n" if $header =~ /(number_of_loci \d+)/;
print OUT $header, "\n";
my %snps = map{$_, 1}@snps_belong_to_chr;
foreach my $c (keys %$genotype_ref){
print OUT $genotype_ref->{$c}, "\n" if exists $snps{$c};
}
close OUT;
}
# Subroutines
sub read_snp_mapped_chr_file
{
my $file = shift or die;
my %return;
open (IN, $file) or die $!;
while (<IN>){
chomp;
my @t = split /\s+/, $_;
$return{$t[0]} = $t[1];
}
close IN;
return %return;
}
sub read_linkgrp_summary_file
{
my $file = shift or die;
my %return;
open (IN, $file) or die $!;
while (<IN>){
chomp;
my @t = split /\s+/, $_;
$return{$t[0]} = $t[-1];
}
close IN;
return %return;
}
sub read_mstmap_output
{
my $file = shift or die;
my %return;
open (IN, $file) or die $!;
my $group;
while (<IN>){
chomp;
next if /\;/;
next unless /\S/;
if (/group (\S+)/){$group = $1; next}
if(/^(\S+)/){$return{$1} = $group}
}
close IN;
return %return;
}
sub read_mstmap_input
{
my $file = shift or die;
my(@header, %genotype);
open (IN, $file) or die $!;
my $flag = 0;
while(<IN>){
if($flag==0){
chomp;
push @header, $_;
if (/locus_name/){$flag=1}
}
else{
chomp;
my $marker = $1 if /^(\S+)/;
$genotype{$marker} = $_;
}
}
close IN;
return (join("\n", @header), \%genotype);
}
| swang8/Perl_scripts_misc | get_snps_for_linkage_groups.pl | Perl | mit | 2,727 |
#!/usr/bin/perl
use strict;
use warnings;
use List::Permutor;
my $happiness = {};
while (<>) {
chomp;
if ($_ =~ /^(.+) would (gain|lose) (\d+) happiness units by sitting next to (.+)\.$/) {
$happiness->{$1}->{$4} = ($2 eq 'lose') ? -1 * $3 : int $3;
}
}
for my $guest (keys %{$happiness}) {
$happiness->{KenMGJ}->{$guest} = 0;
$happiness->{$guest}->{KenMGJ} = 0;
}
my $best_score = 0;
my @best_arr;
my $perm = new List::Permutor keys %{$happiness};
while (my @set = $perm->next) {
my $scores = 0;
for my $i (0..scalar(@set) - 1) {
my $h = ($i == 0) ? scalar(@set) - 1 : $i - 1;
my $j = ($i == scalar(@set) - 1) ? 0 : $i + 1;
$scores += $happiness->{$set[$i]}->{$set[$h]} + $happiness->{$set[$i]}->{$set[$j]};
}
if ($scores > $best_score) {
$best_score = $scores;
@best_arr = @set;
}
}
print join(', ', @best_arr), ' : ', $best_score, "\n";
| KenMGJ/advent-of-code | 2015/13/day13-2.pl | Perl | mit | 939 |
###########################################################################
#
# This file is auto-generated by the Perl DateTime Suite time locale
# generator (0.04). This code generator comes with the
# DateTime::Locale distribution in the tools/ directory, and is called
# generate_from_cldr.
#
# This file as generated from the CLDR XML locale data. See the
# LICENSE.cldr file included in this distribution for license details.
#
# This file was generated from the source file es_ES.xml.
# The source file version number was 1.50, generated on
# 2007/07/21 21:12:28.
#
# Do not edit this file directly.
#
###########################################################################
package DateTime::Locale::es_ES;
use strict;
BEGIN
{
if ( $] >= 5.006 )
{
require utf8; utf8->import;
}
}
use DateTime::Locale::es;
@DateTime::Locale::es_ES::ISA = qw(DateTime::Locale::es);
sub full_time_format { "\%HH\%M\'\%S\"\ v" }
sub medium_time_format { "\%\{hour\}\:\%M\:\%S" }
sub short_time_format { "\%\{hour\}\:\%M" }
1;
| carlgao/lenga | images/lenny64-peon/usr/share/perl5/DateTime/Locale/es_ES.pm | Perl | mit | 1,090 |
#!/usr/local/bin/perl
#Title: make_RNA-seq_diff
#Auther: Naoto Imamachi
#ver: 1.0.0
#Date: 2014-08-28
=pod
=cut
use warnings;
use strict;
my $input = $ARGV[0];
#REF#################################
open(REF, "</home/akimitsu/custom_command/cuffnorm_Gencode/mRNA_lncRNA_list_gencode_vM4_mm10.txt") || die;
my %ref_list;
while(my $line = <REF>){
chomp $line;
my @data = split/\t/,$line;
my $gene_id = $data[0];
my $trx_id = $data[3];
my $type = $data[1];
my $name = $data[4];
$ref_list{$gene_id}{'trx_id'} = $trx_id;
$ref_list{$gene_id}{'type'} = $type;
$ref_list{$gene_id}{'name'} = $name;
}
close(REF);
#REF#################################
open(REF, "<$input/samples.table") || die;
my @REF_SAMPLE;
while(my $line = <REF>){
chomp $line;
my @data = split/\t/,$line;
if($data[0] eq "sample_id"){next;}
my $name = $data[1];
$name =~ s/\.\/cuffquant_out_//;
$name =~ s/\/abundances.cxb//;
push(@REF_SAMPLE,$name);
}
close(REF);
#REF#################################
open(REF, "<$input/genes.attr_table") || die;
my %REF_ATTR;
while(my $line = <REF>){
chomp $line;
my @data = split/\t/,$line;
if($data[0] eq "tracking_id"){next;}
my $symbol = $data[0];
my $locus = $data[6];
$REF_ATTR{$symbol} = $locus;
}
close(REF);
#MAIN################################
open(IN,"<$input/genes.fpkm_table") || die;
open(OUT_mRNA,">$input/genes_Gencode_vM4_result_mRNA.fpkm_table") || die;
open(OUT_ncRNA,">$input/genes_Gencode_vM4_result_ncRNA.fpkm_table") || die;
my $gr_id_mRNA = 1;
my $gr_id_ncRNA = 1;
while(my $line = <IN>){
chomp $line;
my @data = split/\t/,$line;
if($data[0] eq "tracking_id"){
print OUT_mRNA "gr_id\tsymbol\taccession_id\tlocus\t",join("\t",@REF_SAMPLE),"\n";
print OUT_ncRNA "gr_id\tsymbol\taccession_id\tlocus\t",join("\t",@REF_SAMPLE),"\n";
next;
}
my $gene_id = shift(@data);
my @result = @data;
unless(defined($ref_list{$gene_id}{'trx_id'})){next;}
my $trx_id = $ref_list{$gene_id}{'trx_id'};
my $gene_name = $ref_list{$gene_id}{'name'};
my $type = $ref_list{$gene_id}{'type'};
my $locus = $REF_ATTR{$gene_id};
#print "$type\n";
if($type eq "mRNA"){
print OUT_mRNA "$gr_id_mRNA\t$gene_name\t$trx_id\t$locus\t",join("\t",@result),"\n";
$gr_id_mRNA++;
}elsif($type eq "ncRNA"){
print OUT_ncRNA "$gr_id_ncRNA\t$gene_name\t$trx_id\t$locus\t",join("\t",@result),"\n";
$gr_id_ncRNA++;
}
}
close(IN);
close(OUT_ncRNA);
close(OUT_mRNA);
| Naoto-Imamachi/NGS_data_analysis_pipeline | cuffnorm_Gencode/B_make_RNA-seq_diff_data_Gencode_vM4_mm10.pl | Perl | mit | 2,410 |
zip([],[],[]).
zip([H1|T1],[H2|T2],[[H1,H2]|T3]) :- zip(T1,T2,T3).
| itsWeller/CSE130FinalPractice | sp13/mk2/3-zip.pl | Perl | mit | 67 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Services::CustomerUserAccessService::MutateCustomerUserAccessResponse;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {result => $args->{result}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V8/Services/CustomerUserAccessService/MutateCustomerUserAccessResponse.pm | Perl | apache-2.0 | 1,061 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package database::postgres::mode::querytime;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments => {
'warning:s' => { name => 'warning' },
'critical:s' => { name => 'critical' },
'exclude:s' => { name => 'exclude' },
'exclude-user:s' => { name => 'exclude_user' }
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{option_results}->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{option_results}->{critical} . "'.");
$self->{output}->option_exit();
}
}
sub run {
my ($self, %options) = @_;
$options{sql}->connect();
my $query;
if ($options{sql}->is_version_minimum(version => '9.2')) {
$query = q{
SELECT pg_database.datname, pgsa.datid, pgsa.pid, pgsa.usename, pgsa.client_addr, pgsa.query AS current_query, pgsa.state AS state,
CASE WHEN pgsa.client_port < 0 THEN 0 ELSE pgsa.client_port END AS client_port,
COALESCE(ROUND(EXTRACT(epoch FROM now()-query_start)),0) AS seconds
FROM pg_database LEFT JOIN pg_stat_activity pgsa ON pg_database.datname = pgsa.datname AND (pgsa.query_start IS NOT NULL AND (pgsa.state NOT LIKE 'idle%' OR pgsa.state IS NULL))
ORDER BY pgsa.query_start, pgsa.pid DESC
};
} else {
$query = q{
SELECT pg_database.datname, pgsa.datid, pgsa.procpid, pgsa.usename, pgsa.client_addr, pgsa.current_query AS current_query, '' AS state,
CASE WHEN pgsa.client_port < 0 THEN 0 ELSE pgsa.client_port END AS client_port,
COALESCE(ROUND(EXTRACT(epoch FROM now()-query_start)),0) AS seconds
FROM pg_database LEFT JOIN pg_stat_activity pgsa ON pg_database.datname = pgsa.datname AND (pgsa.query_start IS NOT NULL AND current_query NOT LIKE '<IDLE>%')
ORDER BY pgsa.query_start, pgsa.procpid DESC
};
}
$options{sql}->query(query => $query);
$self->{output}->output_add(
severity => 'OK',
short_msg => 'All databases queries time are ok.'
);
my $dbquery = {};
while ((my $row = $options{sql}->fetchrow_hashref())) {
if (defined($self->{option_results}->{exclude}) && $row->{datname} !~ /$self->{option_results}->{exclude}/) {
next;
}
if (!defined($dbquery->{$row->{datname}})) {
$dbquery->{ $row->{datname} } = { total => 0, code => {} };
}
next if (!defined($row->{datid}) || $row->{datid} eq ''); # No joint
if (defined($self->{option_results}->{exclude_user}) && $row->{usename} !~ /$self->{option_results}->{exclude_user}/) {
next;
}
my $exit_code = $self->{perfdata}->threshold_check(value => $row->{seconds}, threshold => [ { label => 'critical', exit_litteral => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
if (!$self->{output}->is_status(value => $exit_code, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(
long_msg => sprintf(
"Request from client '%s' too long (%d sec) on database '%s': %s",
$row->{client_addr}, $row->{seconds}, $row->{datname}, $row->{current_query}
)
);
$dbquery->{ $row->{datname} }->{total}++;
$dbquery->{ $row->{datname} }->{code}->{$exit_code}++;
}
}
foreach my $dbname (keys %$dbquery) {
$self->{output}->perfdata_add(
label => $dbname . '_qtime_num',
value => $dbquery->{$dbname}->{total},
min => 0
);
foreach my $exit_code (keys %{$dbquery->{$dbname}->{code}}) {
$self->{output}->output_add(
severity => $exit_code,
short_msg => sprintf(
"%d request exceed " . lc($exit_code) . " threshold on database '%s'",
$dbquery->{$dbname}->{code}->{$exit_code}, $dbname
)
);
}
}
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Checks the time of running queries for one or more databases
=over 8
=item B<--warning>
Threshold warning in seconds.
=item B<--critical>
Threshold critical in seconds.
=item B<--exclude>
Filter databases.
=item B<--exclude-user>
Filter users.
=back
=cut
| centreon/centreon-plugins | database/postgres/mode/querytime.pm | Perl | apache-2.0 | 5,679 |
%------------------------------------------------------------------------------
%
% * Knihovna kombinatoru parseru *
%
% Zavadec knihovny kombinatoru parseru & podpory
% LPA Prolog
%
% Martin Dvorak
% 1999
%------------------------------------------------------------------------------
/** Module: Knihovna konstruktorù parserù
Text: Knihovna konstruktorù je tvoøena sadou kombinátorù, mutátorù,
generátorù a v¹eobecnì pou¾itelných predikátù urèených pro
rychlé vytváøení parserù. Kromì toho obsahuje parsery pro
obvyklé syntaktické konstrukce a podporu pro ladìní, její¾
souèástí je metainterpret usnadòující efektivní vyhledávání
a odstraòování chyb z vytváøených syntaktických analyzátorù.
p Soubor 'loadLPA-1.pl' je zavadìèem knihovny pro interpret LPA Prolog.
*/
% Knihovna
:-consult('../pc/pcLPA-1').
% Implementacne zavisle predikaty (LPA Prolog)
:-consult('../specific/lpa').
% Podpora
:-consult('../support/file').
:-consult('../support/setlist').
%- EOF ------------------------------------------------------------------------
| dvorka/parser-combinators | src/prolog/loadLPA-1.pl | Perl | apache-2.0 | 1,181 |
use Search::Kino03::KinoSearch;
1;
__END__
__XS__
MODULE = Search::Kino03::KinoSearch PACKAGE = Search::Kino03::KinoSearch::Index::IndexReader
void
set_race_condition_debug1(val_sv)
SV *val_sv;
PPCODE:
KINO_DECREF(kino_PolyReader_race_condition_debug1);
kino_PolyReader_race_condition_debug1 =
(kino_CharBuf*)MAYBE_SV_TO_KOBJ(val_sv, &KINO_CHARBUF);
if (kino_PolyReader_race_condition_debug1)
(void)KINO_INCREF(kino_PolyReader_race_condition_debug1);
chy_i32_t
debug1_num_passes()
CODE:
RETVAL = kino_PolyReader_debug1_num_passes;
OUTPUT: RETVAL
__AUTO_XS__
my $synopsis = <<'END_SYNOPSIS';
my $reader = Search::Kino03::KinoSearch::Index::IndexReader->open(
index => '/path/to/index',
);
my $seg_readers = $reader->seg_readers;
for my $seg_reader (@$seg_readers) {
my $seg_name = $seg_reader->get_segment->get_name;
my $num_docs = $seg_reader->doc_max;
print "Segment $seg_name ($num_docs documents):\n";
my $doc_reader = $seg_reader->obtain("Search::Kino03::KinoSearch::Index::DocReader");
for my $doc_id ( 1 .. $num_docs ) {
my $doc = $doc_reader->fetch($doc_id);
print " $doc_id: $doc->{title}\n";
}
}
END_SYNOPSIS
my $constructor = <<'END_CONSTRUCTOR';
my $reader = Search::Kino03::KinoSearch::Index::IndexReader->open(
index => '/path/to/index', # required
snapshot => $snapshot,
lock_factory => $lock_factory,
);
END_CONSTRUCTOR
{ "Search::Kino03::KinoSearch::Index::IndexReader" => {
bind_methods => [
qw( Doc_Max
Doc_Count
Del_Count
Fetch
Obtain
Seg_Readers
_offsets|Offsets
Get_Lock_Factory
Get_Components
)
],
make_constructors => ['open|do_open'],
make_pod => {
synopsis => $synopsis,
constructor => {
name => 'open',
func => 'do_open',
sample => $constructor,
},
methods => [qw(
doc_max
doc_count
del_count
seg_readers
offsets
fetch
obtain
)]
},
}
}
__COPYRIGHT__
Copyright 2005-2009 Marvin Humphrey
This program is free software; you can redistribute it and/or modify
under the same terms as Perl itself.
| robertkrimen/Search-Kino03 | lib/Search/Kino03/KinoSearch/Index/IndexReader.pm | Perl | apache-2.0 | 2,544 |
package Scrabblicious::Plugin::Hooks;
use Mojo::Base 'Mojolicious::Plugin';
sub register {
my ($self, $app, $config) = @_;
$config //= {};
# Intercept JSON requests and add more useful information to the output.
$app->hook(
before_render => sub {
my ($c, $args) = @_;
# If client is requesting JSON, customise the output to suit our needs.
if ( exists $args->{json}
&& exists $c->stash->{'format'}
&& $c->stash->{'format'} eq 'json')
{
$args->{json}->{links}->{self} = $c->url_for->to_abs;
# Don't expose information if there's an error in the JSON output (the
# errors hash key is not a Mojo convention).
if (!$args->{json}->{errors}) {
# Pager objects have to be explicitly stashed in your controllers.
if ((my $pager = $c->api->pager)
&& ref $c->api->pager eq 'DBIx::Class::ResultSet::Pager')
{
# http://jsonapi.org/format/#fetching-resources
$args->{json}->{links}->{last}
= $c->url_for->query(page => $pager->previous_page)->to_abs
if $pager->previous_page;
$args->{json}->{links}->{next}
= $c->url_for->query(page => $pager->next_page)->to_abs
if $pager->next_page;
$args->{json}->{meta}->{pager} = {
current_page => $pager->current_page,
entries_per_page => $pager->entries_per_page,
total_entries => $pager->total_entries,
first_page => $pager->first_page,
last_page => $pager->last_page,
previous_page => $pager->previous_page,
next_page => $pager->next_page,
first => $pager->first,
last => $pager->last,
};
}
}
}
# End hook
}
);
}
1;
=head1 NAME
Scrabblicious::Plugin::Hooks - Hook into stuff, yo!
=head1 SYNOPSIS
use Mojolicious::Lite;
plugin 'Scrabblicious::Plugin::Hooks';
get '/players' => sub {
my $c = shift;
my $players = $c->db->resultset('Player')->search(
{'me.status' => {'=' => [qw/Active/]}},
{
order_by => {-asc => [qw/me.nickname/]},
page => $c->api->page,
rows => $c->api->per_page,
}
);
$c->api->pager($players->pager);
$c->respond_to(
json => {json => {data => [$players->all]}},
any => {
players => [$players->all],
format => 'html',
template => 'players/list'
}
);
};
=head1 DESCRIPTION
L<Scrabblicious::Plugin::Hooks> is home to project-specific hooks.
The primary reason this has been created is to hook into C<before_render> to
mangle to the JSON before it is being rendered. Using this technique means we
can also add some extra data to the response to keep it more in line with
L<http://jsonapi.org/>.
=head1 METHODS
L<Scrabblicious::Plugin::Hooks> inherits all methods from L<Mojolicious::Plugin>
and implements the following new ones.
=head2 register
$plugin->register(Mojolicious->new);
Register plugin in L<Mojolicious> application.
=head1 HOOKS
=head2 before_render
Intercept JSON requests and add more helpful information to the output.
Specifically checks for the presence of L<Scrabblicious::Plugin::API/pager>
and adds the pager data to the meta property in the JSON output.
=head1 SEE ALSO
L<Mojolicious>, L<Mojolicious::Guides>, L<http://mojolicio.us>.
=cut
| kwakwaversal/mojolicious-plugin-restify-example | lib/Scrabblicious/Plugin/Hooks.pm | Perl | apache-2.0 | 3,524 |
#
# Copyright 2015 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package centreon::plugins::statefile;
use strict;
use warnings;
use Data::Dumper;
use vars qw($datas);
use centreon::plugins::misc;
my $default_dir = '/var/lib/centreon/centplugins';
sub new {
my ($class, %options) = @_;
my $self = {};
bless $self, $class;
if (defined($options{options})) {
$options{options}->add_options(arguments =>
{
"memcached:s" => { name => 'memcached' },
"statefile-dir:s" => { name => 'statefile_dir', default => $default_dir },
"statefile-suffix:s" => { name => 'statefile_suffix', default => '' },
"statefile-concat-cwd" => { name => 'statefile_concat_cwd' },
"statefile-storable" => { name => 'statefile_storable' },
});
$options{options}->add_help(package => __PACKAGE__, sections => 'RETENTION OPTIONS', once => 1);
}
$self->{error} = 0;
$self->{output} = $options{output};
$self->{datas} = {};
$self->{storable} = 0;
$self->{memcached_ok} = 0;
$self->{memcached} = undef;
$self->{statefile_dir} = undef;
$self->{statefile_suffix} = undef;
return $self;
}
sub check_options {
my ($self, %options) = @_;
if (defined($options{option_results}) && defined($options{option_results}->{memcached})) {
centreon::plugins::misc::mymodule_load(output => $self->{output}, module => 'Memcached::libmemcached',
error_msg => "Cannot load module 'Memcached::libmemcached'.");
$self->{memcached} = Memcached::libmemcached->new();
Memcached::libmemcached::memcached_server_add($self->{memcached}, $options{option_results}->{memcached});
}
$self->{statefile_dir} = $options{option_results}->{statefile_dir};
if ($self->{statefile_dir} ne $default_dir && defined($options{option_results}->{statefile_concat_cwd})) {
centreon::plugins::misc::mymodule_load(output => $self->{output}, module => 'Cwd',
error_msg => "Cannot load module 'Cwd'.");
$self->{statefile_dir} = Cwd::cwd() . '/' . $self->{statefile_dir};
}
if (defined($options{option_results}->{statefile_storable})) {
centreon::plugins::misc::mymodule_load(output => $self->{output}, module => 'Storable',
error_msg => "Cannot load module 'Storable'.");
$self->{storable} = 1;
}
$self->{statefile_suffix} = $options{option_results}->{statefile_suffix};
}
sub error {
my ($self) = shift;
if (@_) {
$self->{error} = $_[0];
}
return $self->{error};
}
sub read {
my ($self, %options) = @_;
$self->{statefile_suffix} = defined($options{statefile_suffix}) ? $options{statefile_suffix} : $self->{statefile_suffix};
$self->{statefile_dir} = defined($options{statefile_dir}) ? $options{statefile_dir} : $self->{statefile_dir};
$self->{statefile} = defined($options{statefile}) ? $options{statefile} . $self->{statefile_suffix} :
$self->{statefile};
$self->{no_quit} = defined($options{no_quit}) && $options{no_quit} == 1 ? 1 : 0;
if (defined($self->{memcached})) {
# if "SUCCESS" or "NOT FOUND" is ok. Other with use the file
my $val = Memcached::libmemcached::memcached_get($self->{memcached}, $self->{statefile_dir} . "/" . $self->{statefile});
if (defined($self->{memcached}->errstr) && $self->{memcached}->errstr =~ /^SUCCESS|NOT FOUND$/i) {
$self->{memcached_ok} = 1;
if (defined($val)) {
eval( $val );
$self->{datas} = $datas;
$datas = {};
return 1;
}
return 0;
}
}
if (! -e $self->{statefile_dir} . "/" . $self->{statefile}) {
if (! -w $self->{statefile_dir}) {
$self->error(1);
$self->{output}->add_option_msg(short_msg => "Cannot write statefile '" . $self->{statefile_dir} . "/" . $self->{statefile} . "'. Need write permissions on directory.");
if ($self->{no_quit} == 0) {
$self->{output}->option_exit();
}
}
return 0;
} elsif (! -w $self->{statefile_dir} . "/" . $self->{statefile}) {
$self->error(1);
$self->{output}->add_option_msg(short_msg => "Cannot write statefile '" . $self->{statefile_dir} . "/" . $self->{statefile} . "'. Need write permissions on file.");
if ($self->{no_quit} == 0) {
$self->{output}->option_exit();
}
return 1;
} elsif (! -s $self->{statefile_dir} . "/" . $self->{statefile}) {
# Empty file. Not a problem. Maybe plugin not manage not values
return 0;
}
if ($self->{storable} == 1) {
open FILE, $self->{statefile_dir} . "/" . $self->{statefile};
eval {
$self->{datas} = Storable::fd_retrieve(*FILE);
};
# File is corrupted surely. We'll reset it
if ($@) {
close FILE;
return 0;
}
close FILE;
} else {
unless (my $return = do $self->{statefile_dir} . "/" . $self->{statefile}) {
# File is corrupted surely. We'll reset it
return 0;
#if ($@) {
# $self->{output}->add_option_msg(short_msg => "Couldn't parse '" . $self->{statefile_dir} . "/" . $self->{statefile} . "': $@");
# $self->{output}->option_exit();
#}
#unless (defined($return)) {
# $self->{output}->add_option_msg(short_msg => "Couldn't do '" . $self->{statefile_dir} . "/" . $self->{statefile} . "': $!");
# $self->{output}->option_exit();
#}
#unless ($return) {
# $self->{output}->add_option_msg(short_msg => "Couldn't run '" . $self->{statefile_dir} . "/" . $self->{statefile} . "': $!");
# $self->{output}->option_exit();
}
$self->{datas} = $datas;
$datas = {};
}
return 1;
}
sub get_string_content {
my ($self, %options) = @_;
return Data::Dumper::Dumper($self->{datas});
}
sub get {
my ($self, %options) = @_;
if (defined($self->{datas}->{$options{name}})) {
return $self->{datas}->{$options{name}};
}
return undef;
}
sub write {
my ($self, %options) = @_;
if ($self->{memcached_ok} == 1) {
Memcached::libmemcached::memcached_set($self->{memcached}, $self->{statefile_dir} . "/" . $self->{statefile},
Data::Dumper->Dump([$options{data}], ["datas"]));
if (defined($self->{memcached}->errstr) && $self->{memcached}->errstr =~ /^SUCCESS$/i) {
return ;
}
}
open FILE, ">", $self->{statefile_dir} . "/" . $self->{statefile};
if ($self->{storable} == 1) {
Storable::store_fd($options{data}, *FILE);
} else {
print FILE Data::Dumper->Dump([$options{data}], ["datas"]);
}
close FILE;
}
1;
__END__
=head1 NAME
Statefile class
=head1 SYNOPSIS
-
=head1 RETENTION OPTIONS
=over 8
=item B<--memcached>
Memcached server to use (only one server).
=item B<--statefile-dir>
Directory for statefile (Default: '/var/lib/centreon/centplugins').
=item B<--statefile-suffix>
Add a suffix for the statefile name (Default: '').
=item B<--statefile-concat-cwd>
Concat current working directory with option '--statefile-dir'.
Useful on Windows when plugin is compiled.
=item B<--statefile-storable>
Use Perl Module 'Storable' (instead Data::Dumper) to store datas.
=back
=head1 DESCRIPTION
B<statefile>.
=cut
| s-duret/centreon-plugins | centreon/plugins/statefile.pm | Perl | apache-2.0 | 8,619 |
#!/usr/bin/env perl
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2020] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#The script generates a FreePlane mind map file of xrefs for a given species.
#To view the generated file, download free mind mapping software Freeplane from http://freeplane.sourceforge.net
use strict;
use warnings;
use DBI qw( :sql_types );
use Getopt::Long;
use Bio::EnsEMBL::Registry;
use Bio::EnsEMBL::ApiVersion;
use HTML::Entities;
use FindBin qw($Bin);
use vars qw($SERVERROOT);
BEGIN {
$SERVERROOT = "$Bin";
}
my $xhost;
my $xport;
my $xuser;
my $xpass;
my $db_version;
my $host;
my $user;
my $port;
my $species;
my $dontdrop;
GetOptions( "xhost|xh=s" => \$xhost,
"xport=i" => \$xport,
"xuser|xu=s" => \$xuser,
"xpass|xp=s" => \$xpass,
"host|h=s",\$host,
"user|u=s",\$user,
"port=i",\$port,
"species|s=s",\$species,
"db_version=i",\$db_version,
"dontdrop|d", \$dontdrop,
"help" , \&usage,
);
usage() if (!defined $xhost || !defined $xuser || !defined $xpass || !defined $host || !defined $user || !defined $species);
$port ||= 3306;
$xport ||= 3306;
$db_version ||= software_version();
my $registry = "Bio::EnsEMBL::Registry";
$registry->load_registry_from_db( -host => $host, -port => $port,-user => $user, -db_version => $db_version, -species => $species);
my $dba = $registry->get_DBAdaptor($species, "core");
if (!$dba) {
die("$species database version $db_version not found on host $host port $port");
}
my $dbh;
#create and connect to the xref_mindmap database
my $dsn = "DBI:mysql:host=$xhost;";
if ($xport) {
$dsn .= "port=$xport;";
}
my $xdbname = 'xref_mindmap_'.$species.'_'.$db_version;
$dbh = DBI->connect( $dsn, $xuser, $xpass,
{ 'PrintError' => 1, 'RaiseError' => 1 } );
my @objects_with_xrefs = ('Gene', 'Transcript', 'Translation');
my %object_display_names = ('Gene' => 'Gene', 'Transcript' => 'Transcript', 'Translation' => 'Protein');
print STDOUT "Creating database $xdbname\n";
eval {
$dbh->do("drop database if exists $xdbname");
$dbh->do("create database $xdbname");
my $cmd = "mysql -h $xhost";
if ($xport) {
$cmd .= " -P $xport";
}
$cmd .= " -u $xuser --password=$xpass $xdbname < $SERVERROOT/sql/tables.sql";
system($cmd) == 0 or die("error encountered when creating schema for database $xdbname\n");
$dbh->do("use $xdbname");
};
if ($@) {
die("An SQL error occured while creating database $xdbname:\n$@");
}
#copy the external_db information
my $external_db_info_sth = $dba->dbc()->prepare("select external_db_id, db_name, db_display_name from external_db");
my $insert_external_db_info_sth = $dbh->prepare("insert into external_db_type (external_db_id, db_name, db_display_name) values (?,?,?)");
$external_db_info_sth->execute();
while ( my ($external_db_id, $db_name, $db_display_name) = $external_db_info_sth->fetchrow_array() ) {
$insert_external_db_info_sth->execute($external_db_id, $db_name, $db_display_name);
}
$external_db_info_sth->finish();
$insert_external_db_info_sth->finish();
#populate db types in external_db_type
my $cmd = "mysql -h $xhost";
if ($xport) {
$cmd .= " -P $xport";
}
$cmd .= " -u $xuser --password=$xpass $xdbname < $SERVERROOT/sql/update.sql";
system($cmd) == 0 or die("error encountered when updating database $xdbname\n");
#get distinct db_names for an object type
my $external_db_sth = $dba->dbc()->prepare("select distinct external_db_id from object_xref join xref using(xref_id) join external_db using(external_db_id) where ensembl_object_type = ? and info_type != 'UNMAPPED' order by db_name");
#get xref types for an object type and external db
my $xref_types_sth = $dba->dbc()->prepare("select distinct info_type from object_xref join xref using(xref_id) where ensembl_object_type = ? and external_db_id = ? and info_type != 'UNMAPPED'");
#get master xref db_names
my $master_db_sth = $dba->dbc()->prepare("select distinct e.external_db_id from object_xref ox join xref x1 on (ox.xref_id = x1.xref_id) join dependent_xref on (x1.xref_id = dependent_xref_id) join xref x2 on (master_xref_id = x2.xref_id) join external_db e on (x2.external_db_id = e.external_db_id) where ensembl_object_type = ? and x1.external_db_id = ? and x1.info_type != 'UNMAPPED'");
#get species from which xrefs were projected
my $projected_from_species_sth = $dba->dbc()->prepare("select distinct substr(info_text,1,locate(' ', info_text, locate(' ', info_text)+1 )) from object_xref join xref using(xref_id) where info_text like 'from%' and ensembl_object_type = ? and external_db_id = ? and info_type = 'PROJECTION'");
#xref_mindmap db statements
my $insert_object_xref_linkage = $dbh->prepare("insert into object_xref_linkage values(?,?,?,?,?)");
my $insert_object_external_db_node = $dbh->prepare("insert into object_external_db_node values (?,?,?)");
my $link_type_sth = $dbh->prepare("select link_type_id from link_type where link_type = ?");
foreach my $object (@objects_with_xrefs) {
$external_db_sth->execute($object);
my @db_ids;
while (my ($db_id) = $external_db_sth->fetchrow_array() ) {
push @db_ids, $db_id;
}
foreach my $db_id (@db_ids) {
$xref_types_sth->execute($object, $db_id);
#store the xref db node
$insert_object_external_db_node->execute($object,$db_id, $object.$db_id);
while (my ($link_type) = $xref_types_sth->fetchrow_array() ) {
#get link_type_id
$link_type_sth->execute($link_type);
my ($link_type_id) = $link_type_sth->fetchrow_array();
if ($link_type_id) {
if ($link_type eq 'DEPENDENT') {
$master_db_sth->execute($object,$db_id);
while (my ($master_db_id) = $master_db_sth->fetchrow_array() ) {
$insert_object_xref_linkage->execute($db_id,$object,$link_type_id, $master_db_id, undef);
}
} elsif ($link_type eq 'PROJECTION') {
$projected_from_species_sth->execute($object, $db_id);
while (my ($linked_node_text) = $projected_from_species_sth->fetchrow_array()) {
$insert_object_xref_linkage->execute($db_id,$object,$link_type_id, undef, $linked_node_text);
}
} else {
$insert_object_xref_linkage->execute($db_id,$object,$link_type_id, undef, undef);
}
}
}
}
}
#populate 'generated_from' link types in object_xref_linkage - this is for xrefs to do with gene and transcript naming
my $naming_dbs_sth = $dbh->prepare("select external_db_id, db_name from external_db_type where db_name like ?");
my $db_id_sth = $dbh->prepare("select external_db_id from external_db_type where db_name = ?");
$link_type_sth->execute('GENERATED_FROM');
my ($link_type_id) = $link_type_sth->fetchrow_array();
foreach my $object (@objects_with_xrefs) {
$naming_dbs_sth->execute('%'.$object.'\_name');
while (my ($db_id, $db_name) = $naming_dbs_sth->fetchrow_array() ) {
if ($db_name =~ m/(.*)\_$object\_name/i) {
my $linked_db_name = $1;
if ($linked_db_name) {
#find the linked db id
$db_id_sth->execute($linked_db_name);
my ($linked_db_id) = $db_id_sth->fetchrow_array();
if ($linked_db_id) {
$insert_object_xref_linkage->execute($db_id,$object,$link_type_id, $linked_db_id, undef);
}
}
}
}
foreach my $name ( ('Vega','Ensembl')) {
$naming_dbs_sth->execute('Clone\_based\_'.$name.'\_'.$object);
my ($db_id, $db_name) = $naming_dbs_sth->fetchrow_array();
if ($db_id) {
$insert_object_xref_linkage->execute($db_id,$object,$link_type_id, undef, "$name clone name");
}
}
}
$naming_dbs_sth->finish();
#check if there are any rows in the interpro table
my $interpro_sth = $dba->dbc()->prepare("select count(1) from interpro");
$interpro_sth->execute();
my ($interpro_count) = $interpro_sth->fetchrow_array();
if ($interpro_count > 0) {
my $interpro = 'Interpro';
$db_id_sth->execute($interpro);
my ($interpro_db_id) = $db_id_sth->fetchrow_array();
$link_type_sth->execute('PROTEIN_FEATURES');
my ($link_type_id) = $link_type_sth->fetchrow_array();
my $object = 'Translation';
$insert_object_external_db_node->execute($object,$interpro_db_id, $object.$interpro_db_id);
$insert_object_xref_linkage->execute($interpro_db_id,$object,$link_type_id, undef, undef);
}
$interpro_sth->finish();
$db_id_sth->finish();
$external_db_sth->finish();
$xref_types_sth->finish();
$master_db_sth->finish();
$projected_from_species_sth->finish();
$insert_object_xref_linkage->finish();
$insert_object_external_db_node->finish();
$link_type_sth->finish();
#create the .mm file based on info from xref_mindmap db
my $file_name = $species .'_xrefs_'. $db_version .'.mm';
open( FH, ">$file_name" ) or die("Can't open $file_name\n");
print STDOUT "Writing to file $file_name\n";
my $header = <<HEADER;
<map version='0.9.0'>
<!--To view this file, download free mind mapping software Freeplane from http://freeplane.sourceforge.net -->
<node TEXT='External References' ID='ID_000000001' COLOR='#18898b' STYLE='fork'>
<font NAME='Liberation Sans' SIZE='12' BOLD='true'/>
<hook NAME='MapStyle' max_node_width='600'/>
<edge STYLE='bezier' COLOR='#808080' WIDTH='thin'/>
HEADER
print FH $header;
my $category_and_db_name_nodes_sth = $dbh->prepare("select db_type, external_db_id, db_display_name, mindmap_tag_id from object_external_db_node join external_db_type using(external_db_id) join db_type using(db_type_id) where ensembl_object_type = ? order by db_type, db_display_name;");
my $db_linkage_types_sth = $dbh->prepare("select link_description, linked_external_db_id, linked_node_text from object_xref_linkage join link_type using(link_type_id) where ensembl_object_type = ? and external_db_id = ? and link_description is not null order by link_description, linked_external_db_id, linked_node_text");
#this is used to link to dbs from the closest level (e.g. if Orphanet xrefs for a Gene are dependent on HGNC xrefs
#we will try to link to the Gene HGNC node then transcript etc..
my $object_distance_sth = $dbh->prepare("select to_object from object_distance where from_object = ? order by distance");
my $node_id_sth = $dbh->prepare("select mindmap_tag_id from object_external_db_node where ensembl_object_type = ? and external_db_id = ?");
my $node_id_count = 1;
foreach my $object (@objects_with_xrefs) {
$node_id_count++;
my $node_id = 'ID_'.$node_id_count;
#write out the object node
my $object_node = "<node TEXT='".$object_display_names{$object}."' POSITION='right' ID='".$node_id."' COLOR='#cc3300' STYLE='fork'>
<font NAME='Liberation Sans' SIZE='12' BOLD='true'/>
<edge STYLE='bezier' COLOR='#808080' WIDTH='thin'/>
";
print FH $object_node;
$category_and_db_name_nodes_sth->execute($object);
my $last_type = 'none';
while (my ($db_type, $db_id, $db_display_name, $tag_id) = $category_and_db_name_nodes_sth->fetchrow_array() ) {
if ($db_type ne $last_type) {
if ($last_type ne 'none') {
print FH "</node>\n"; #db type node end tag
}
$node_id_count++;
$node_id = 'ID_'.$node_id_count;
#write out new db type node
my $db_type_node = "<node TEXT='".encode_entities($db_type)."' ID='".$node_id."' COLOR='#669900'>
<font NAME='Liberation Sans' SIZE='12' BOLD='true'/>
";
print FH $db_type_node;
}
#write out the db name node
$node_id_sth->execute($object,$db_id);
($node_id) = $node_id_sth->fetchrow_array();
my $db_name_node = "<node TEXT='".encode_entities($db_display_name)."' ID='".$node_id."'>
";
print FH $db_name_node;
$db_linkage_types_sth->execute($object,$db_id);
my $last_link_desc = 'none';
while (my ($link_description, $linked_db_id, $linked_node_text)
= $db_linkage_types_sth->fetchrow_array() ){
if ($link_description ne $last_link_desc) {
if ($last_link_desc ne 'none') {
print FH "</node>\n"; #link type node end tag
}
#write out the link node
$node_id_count++;
$node_id = 'ID_'.$node_id_count;
my $link_node = "<node TEXT='".encode_entities($link_description)."' ID='".$node_id."'>
";
print FH $link_node;
}
if ($linked_db_id) {
#find the node id to link to
my @closest_level;
$object_distance_sth->execute($object);
while (my ($object_type) = $object_distance_sth->fetchrow_array() ) {
push @closest_level, $object_type;
}
my $linked_node_id;
foreach my $object_type (@closest_level) {
$node_id_sth->execute($object_type,$linked_db_id);
($linked_node_id) = $node_id_sth->fetchrow_array();
last if ($linked_node_id);
}
if ($linked_node_id) {
#write out arrow link to the node
my $arrow_link_node = "<arrowlink DESTINATION='".$linked_node_id."' STARTARROW='NONE' ENDARROW='DEFAULT'/>
";
print FH $arrow_link_node;
}
} elsif ($linked_node_text) {
#write out the node
$node_id_count++;
$node_id = 'ID_'.$node_id_count;
my $node = "<node TEXT='".encode_entities($linked_node_text)."' ID='".$node_id."'/>
";
print FH $node;
}
$last_link_desc = $link_description;
}
if ($last_link_desc ne 'none') {
print FH "</node>\n"; #link type node end tag
}
print FH "</node>\n"; #db name node end tag
$last_type = $db_type;
}
if ($last_type ne 'none') {
print FH "</node>\n"; #db type node end tag
}
print FH "</node>\n" #object node end tag;
}
$category_and_db_name_nodes_sth->finish();
$db_linkage_types_sth->finish();
$object_distance_sth->finish();
$node_id_sth->finish();
#external references end tag and map end tag
my $footer = <<FOOTER;
</node>
</map>
FOOTER
print FH $footer;
close(FH);
if (!$dontdrop) {
print STDOUT "Dropping database $xdbname\n";
eval {
$dbh->do("drop database if exists $xdbname");
};
if ($@) {
die("An SQL error occured while dropping database $xdbname:\n$@");
}
}
$dbh->disconnect();
sub usage {
my $indent = ' ' x length($0);
print <<EOF; exit(0);
The script populates a stable_id lookup database with all stable ids found in databases
on a specified server (or servers) for a specified db release.
Stable ids are copied for objects listed in hash %group_objects
Options -lhost -luser -lpass are mandatory and specify the credentials for the server on which a stable id lookup database exists or is to be created (if using option -create). If an argument for option -ldbname is not provided, the default name for the database wil be used: 'ensembl_stable_id_lookup_xx', where xx is the database release (option -db_version).
Options -host -user -port specify the credentials of the server(s) where stable ids are to be copied from.
To run the script cd into the directory where the script lives eg:
cd ensembl/misc-scripts/stable_id_lookup/
This command will create database ensembl_stable_id_lookup_67 on server ens-staging1 and will copy stable ids from databases for release 67 found on ens-staging1 and ens-staging2:
populate_stable_id_lookup.pl -lhost ens-staging1 -luser ensadmin -lpass xxxx -create -db_version 67 -host ens-staging1 -host ens-staging2 -user ensro
Usage:
$0 -xhost host_name -xuser user_name -xpass password
$indent [-xport port_number]
$indent -host host_name -user user_name
$indent [-port port_number] [-db_version]
$indent -species species_name
$indent [-dontdrop]
$indent [-help]
-h|host Database host where the species db lives
-u|user Database user where the species db lives
-port Database port the species db lives
-xh|xhost Database host where the xref mindmap db is to be created
-xu|xuser Database user where the xref mindmap db is to be created
-xp|xpass Database password where the xref mindmap db is to be created
-xport Database port where the xref mindmap db is to be created
-s|species Species name to generate xref mindmap for
-d|dontdrop Don\'t drop the xref_mindmap db at the end of the script
-db_version If not specified, software_version() returned by the ApiVersion module will be used
-help This message
EOF
}
| james-monkeyshines/ensembl | misc-scripts/xref_mapping/xref_mindmap/generate_xref_mindmap.pl | Perl | apache-2.0 | 16,874 |
package Google::Ads::AdWords::v201809::BiddingStrategyService::get;
use strict;
use warnings;
{ # BLOCK to scope variables
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' }
__PACKAGE__->__set_name('get');
__PACKAGE__->__set_nillable();
__PACKAGE__->__set_minOccurs();
__PACKAGE__->__set_maxOccurs();
__PACKAGE__->__set_ref();
use base qw(
SOAP::WSDL::XSD::Typelib::Element
Google::Ads::SOAP::Typelib::ComplexType
);
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %selector_of :ATTR(:get<selector>);
__PACKAGE__->_factory(
[ qw( selector
) ],
{
'selector' => \%selector_of,
},
{
'selector' => 'Google::Ads::AdWords::v201809::Selector',
},
{
'selector' => 'selector',
}
);
} # end BLOCK
} # end of BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::BiddingStrategyService::get
=head1 DESCRIPTION
Perl data type class for the XML Schema defined element
get from the namespace https://adwords.google.com/api/adwords/cm/v201809.
Returns a list of bidding strategies that match the selector. @return list of bidding strategies specified by the selector. @throws com.google.ads.api.services.common.error.ApiException if problems occurred while retrieving results.
=head1 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * selector
$element->set_selector($data);
$element->get_selector();
=back
=head1 METHODS
=head2 new
my $element = Google::Ads::AdWords::v201809::BiddingStrategyService::get->new($data);
Constructor. The following data structure may be passed to new():
{
selector => $a_reference_to, # see Google::Ads::AdWords::v201809::Selector
},
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| googleads/googleads-perl-lib | lib/Google/Ads/AdWords/v201809/BiddingStrategyService/get.pm | Perl | apache-2.0 | 2,012 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::devices::video::axis::snmp::mode::components::fan;
use strict;
use warnings;
my %map_fan_status = (
0 => 'ok',
1 => 'failed',
);
my $mapping = {
axisFanState => { oid => '.1.3.6.1.4.1.368.4.1.2.1.3', map => \%map_fan_status },
};
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $mapping->{axisFanState}->{oid} };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking fans");
$self->{components}->{fan} = {name => 'fans', total => 0, skip => 0};
return if ($self->check_filter(section => 'fan'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$mapping->{axisFanState}->{oid}}})) {
next if ($oid !~ /^$mapping->{axisFanState}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$mapping->{axisFanState}->{oid}}, instance => $instance);
next if ($self->check_filter(section => 'fan', instance => $instance));
$self->{components}->{fan}->{total}++;
$self->{output}->output_add(long_msg => sprintf("fan '%s' state is %s [instance: %s].",
$instance, $result->{axisFanState}, $instance
));
my $exit = $self->get_severity(section => 'fan', value => $result->{axisFanState});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("fan '%s' state is %s",
$instance, $result->{axisFanState}));
}
}
}
1;
| Tpo76/centreon-plugins | hardware/devices/video/axis/snmp/mode/components/fan.pm | Perl | apache-2.0 | 2,540 |
#
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package os::solaris::local::mode::lomv120components::voltage;
use strict;
use warnings;
use centreon::plugins::misc;
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking supply voltages");
$self->{components}->{voltage} = {name => 'voltages', total => 0, skip => 0};
return if ($self->check_filter(section => 'voltage'));
#Supply voltages:
#1 5V status=ok
#2 3V3 status=ok
#3 +12V status=ok
return if ($self->{stdout} !~ /^Supply voltages:(((.*?)(?::|Status flag))|(.*))/ims);
my @content = split(/\n/, $1);
shift @content;
foreach my $line (@content) {
$line = centreon::plugins::misc::trim($line);
next if ($line !~ /^\s*(\S+).*?status=(.*)/);
my ($instance, $status) = ($1, $2);
next if ($self->check_filter(section => 'voltage', instance => $instance));
$self->{components}->{voltage}->{total}++;
$self->{output}->output_add(
long_msg => sprintf(
"Supply voltage '%s' status is %s.",
$instance, $status
)
);
my $exit = $self->get_severity(label => 'default', section => 'voltage', value => $status);
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("supply voltage '%s' status is %s",
$instance, $status));
}
}
}
1;
| centreon/centreon-plugins | os/solaris/local/mode/lomv120components/voltage.pm | Perl | apache-2.0 | 2,380 |
package Bio::EnsEMBL::Funcgen::RunnableDB::PeakCalling::RemoveDuplicates;
use strict;
use base 'Bio::EnsEMBL::Hive::Process';
use Data::Dumper;
sub run {
my $self = shift;
my $bam_file_with_duplicates = $self->param_required('merged_bam');
my $full_path_to_deduplicated_bam = $self->param_required('deduplicated_bam');
my $chunks = $self->param_required('chunks');
use Bio::EnsEMBL::Registry;
Bio::EnsEMBL::Registry->set_disconnect_when_inactive;
eval {
use Bio::EnsEMBL::Funcgen::Sequencing::SeqTools;
remove_duplicates_from_bam({
input_bam => $bam_file_with_duplicates,
output_bam => $full_path_to_deduplicated_bam,
debug => $self->debug,
});
};
if ($@) {
$self->throw($@);
}
sleep(20);
my $cmd = "check_bam_file_has_EOF_marker.pl --bam_file $full_path_to_deduplicated_bam";
my $has_failed = $self->run_system_command($cmd);
if ($has_failed) {
$self->throw("End of file marker check failed:\n" . $cmd)
}
my $cmd = qq(samtools index $full_path_to_deduplicated_bam);
$has_failed = $self->run_system_command($cmd);
if ($has_failed) {
$self->throw("Can't index $full_path_to_deduplicated_bam")
}
my $expected_index_file = "${full_path_to_deduplicated_bam}.bai";
if (! -e $full_path_to_deduplicated_bam) {
$self->throw("Can't find index file ${full_path_to_deduplicated_bam}!")
}
foreach my $current_chunk (@$chunks) {
use File::Basename qw( dirname );
my $temp_dir = dirname($current_chunk);
$self->say_with_header("Deleting $temp_dir", 1);
use File::Path qw( rmtree );
rmtree($temp_dir);
}
return;
}
1;
| Ensembl/ensembl-funcgen | modules/Bio/EnsEMBL/Funcgen/RunnableDB/PeakCalling/RemoveDuplicates.pm | Perl | apache-2.0 | 1,702 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::hddtemp::local::mode::temperature;
use base qw(centreon::plugins::mode);
use strict;
use warnings;
use centreon::plugins::misc;
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$options{options}->add_options(arguments =>
{
"hostname:s" => { name => 'hostname' },
"remote" => { name => 'remote' },
"ssh-option:s@" => { name => 'ssh_option' },
"ssh-path:s" => { name => 'ssh_path' },
"ssh-command:s" => { name => 'ssh_command', default => 'ssh' },
"timeout:s" => { name => 'timeout', default => 30 },
"sudo" => { name => 'sudo' },
"command:s" => { name => 'command', default => 'hddtemp' },
"command-path:s" => { name => 'command_path', default => '/usr/sbin' },
"command-options:s" => { name => 'command_options', default => '-u' },
"warning:s" => { name => 'warning' },
"critical:s" => { name => 'critical' },
"disks:s" => { name => 'disks' },
"unit:s" => { name => 'unit', default => 'C' }
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::init(%options);
if (($self->{perfdata}->threshold_validate(label => 'warning', value => $self->{option_results}->{warning})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong warning threshold '" . $self->{warning} . "'.");
$self->{output}->option_exit();
}
if (($self->{perfdata}->threshold_validate(label => 'critical', value => $self->{option_results}->{critical})) == 0) {
$self->{output}->add_option_msg(short_msg => "Wrong critical threshold '" . $self->{critical} . "'.");
$self->{output}->option_exit();
}
if (!defined($self->{option_results}->{disks}) || $self->{option_results}->{disks} eq '') {
$self->{output}->add_option_msg(short_msg => "Need to specify hdd (ex. /dev/sda).");
$self->{output}->option_exit();
}
#### Create command_options
if (defined($self->{option_results}->{unit})) {
$self->{option_results}->{command_options} .= $self->{option_results}->{unit};
}
$self->{option_results}->{command_options} .= ' ' . $self->{option_results}->{disks};
$self->{option_results}->{command_options} .= ' 2>&1';
}
sub run {
my ($self, %options) = @_;
my $total_size = 0;
my $stdout = centreon::plugins::misc::execute(output => $self->{output},
options => $self->{option_results},
sudo => $self->{option_results}->{sudo},
command => $self->{option_results}->{command},
command_path => $self->{option_results}->{command_path},
command_options => $self->{option_results}->{command_options});
$self->{output}->output_add(severity => 'OK',
short_msg => "All temperatures are ok.");
foreach (split(/\n/, $stdout)) {
next if (!/(.*): (.*): ([0-9]*)/);
my ($disk, $model, $temp) = ($1, $2, $3);
my $exit_code = $self->{perfdata}->threshold_check(value => $temp,
threshold => [ { label => 'critical', exit_litteral => 'critical' }, { label => 'warning', exit_litteral => 'warning' } ]);
$self->{output}->output_add(long_msg => sprintf("%s: %s", $disk, $temp . '°' . $self->{option_results}->{unit}));
if (!$self->{output}->is_status(litteral => 1, value => $exit_code, compare => 'ok')) {
$self->{output}->output_add(severity => $exit_code,
short_msg => sprintf("'%s' temp is %s", $disk, $temp . '°' . $self->{option_results}->{unit}));
}
$self->{output}->perfdata_add(label => $disk, unit => $self->{option_results}->{unit},
value => $temp,
warning => $self->{perfdata}->get_perfdata_for_output(label => 'warning'),
critical => $self->{perfdata}->get_perfdata_for_output(label => 'critical'),
min => 0);
}
$self->{output}->display();
$self->{output}->exit();
}
1;
__END__
=head1 MODE
Check hdd temperature with hddtemp.
=over 8
=item B<--disks>
Disks to check (ex: /dev/sda)
=item B<--unit>
Temperature unit (default: C).
=item B<--warning>
Threshold warning in °.
=item B<--critical>
Threshold critical in °.
=item B<--remote>
Execute command remotely in 'ssh'.
=item B<--hostname>
Hostname to query (need --remote).
=item B<--ssh-option>
Specify multiple options like the user (example: --ssh-option='-l=centreon-engine' --ssh-option='-p=52').
=item B<--ssh-path>
Specify ssh command path (default: none)
=item B<--ssh-command>
Specify ssh command (default: 'ssh'). Useful to use 'plink'.
=item B<--timeout>
Timeout in seconds for the command (Default: 30).
=item B<--sudo>
Use 'sudo' to execute the command.
=item B<--command>
Command to get information (Default: 'hddtemp').
Can be changed if you have output in a file.
=item B<--command-path>
Command path (Default: '/usr/sbin').
=item B<--command-options>
Command options (Default: '-u').
=back
=cut
| Sims24/centreon-plugins | apps/hddtemp/local/mode/temperature.pm | Perl | apache-2.0 | 6,751 |
package BS;
use strict;
use warnings;
use v5.10;
use base 'Exporter';
use Path::Tiny;
use JSON;
use YAML::XS;
use Mojo::URL;
use Template;
use Template::Stash;
use Format::Util::Numbers;
our @EXPORT_OK = qw/
root_path is_dev set_is_dev branch set_branch
localize set_lang all_languages lang_display_name
get_static_hash set_static_hash
root_url
tt2
css_files js_config menu
/;
sub root_path {
return path(__FILE__)->parent->parent->parent->absolute->stringify;
}
# for developer
our $IS_DEV = 0;
sub is_dev { return $IS_DEV; }
sub set_is_dev { $IS_DEV = 1; }
our $BRANCH = '';
sub branch { return $BRANCH; }
sub set_branch {
$BRANCH = 'br_'.shift;
# chomp ($BRANCH = `git symbolic-ref --short HEAD`); $BRANCH = '_'.(split('/', $BRANCH))[-1];
}
our $LANG = 'en';
sub set_lang {
$LANG = shift;
}
my %__lh;
sub localize {
my @texts = @_;
require BS::I18N;
$__lh{$LANG} //= BS::I18N::handle_for($LANG)
|| die("could not build locale for language $LANG");
return $__lh{$LANG}->maketext(@texts);
}
sub all_languages {
return ('EN', 'AR', 'DE', 'ES', 'FR', 'ID', 'IT', 'PL', 'PT', 'RU', 'VI', 'JA', 'ZH_CN', 'ZH_TW');
}
sub rtl_languages {
return ('AR');
}
sub lang_display_name {
my $iso_code = shift;
my %lang_code_name = (
AR => 'Arabic',
DE => 'Deutsch',
ES => 'Español',
FR => 'Français',
EN => 'English',
ID => 'Bahasa Indonesia',
JA => '日本語',
PL => 'Polish',
PT => 'Português',
RU => 'Русский',
VI => 'Vietnamese',
ZH_CN => '简体中文',
ZH_TW => '繁體中文',
IT => 'Italiano'
);
$iso_code = defined($iso_code) ? uc $iso_code : '';
return $lang_code_name{$iso_code} || $iso_code;
}
## url_for
sub root_url {
return is_dev() ? '/binary-static/'.($BRANCH ? $BRANCH.'/' : '') : '/';
}
my %__request;
sub url_for {
require BS::Request;
$__request{$LANG} //= BS::Request->new(language => $LANG);
return $__request{$LANG}->url_for(@_);
}
## tt2/haml
sub tt2 {
my @include_path = (root_path() . '/src/templates/toolkit');
state $request = BS::Request->new(language => $LANG);
my $stash = Template::Stash->new({
language => $request->language,
broker => $request->broker,
request => $request,
broker_name => $request->website->display_name,
website => $request->website,
# 'is_pjax_request' => $request->is_pjax,
l => \&localize,
to_monetary_number_format => \&Format::Util::Numbers::to_monetary_number_format,
});
my $template_toolkit = Template->new({
ENCODING => 'utf8',
INCLUDE_PATH => join(':', @include_path),
INTERPOLATE => 1,
PRE_CHOMP => $Template::CHOMP_GREEDY,
POST_CHOMP => $Template::CHOMP_GREEDY,
TRIM => 1,
STASH => $stash,
}) || die "$Template::ERROR\n";
return $template_toolkit;
}
our $static_hash = join('', map{('a'..'z',0..9)[rand 36]} 0..7);
sub get_static_hash { return $static_hash; }
sub set_static_hash { $static_hash = shift; }
## css/js/menu
sub css_files {
my @css;
# if (is_dev()) {
# if (grep { $_ eq uc $LANG } rtl_languages()) {
# push @css, root_url() . "css/binary_rtl.css?$static_hash";
# } else {
# push @css, root_url() . "css/binary.css?$static_hash";
# }
# } else {
if (grep { $_ eq uc $LANG } rtl_languages()) {
push @css, root_url() . "css/binary_rtl.min.css?$static_hash";
} else {
push @css, root_url() . "css/binary.min.css?$static_hash";
}
return @css;
}
sub js_config {
my @libs;
if (is_dev()) {
push @libs, root_url . "js/binary.js?$static_hash";
} else {
push @libs, root_url . "js/binary.min.js?$static_hash";
}
my %setting = (
enable_relative_barrier => 'true',
image_link => {
hourglass => url_for('images/common/hourglass_1.gif')->to_string,
up => url_for('images/javascript/up_arrow_1.gif')->to_string,
down => url_for('images/javascript/down_arrow_1.gif')->to_string,
calendar_icon => url_for('images/common/calendar_icon_1.png')->to_string,
livechaticon => url_for('images/pages/contact/chat-icon.svg')->to_string,
},
broker => 'CR',
countries_list => YAML::XS::LoadFile(root_path() . '/scripts/config/countries.yml'),
valid_loginids => 'MX|MF|VRTC|MLT|CR|FOG|VRTJ|JP',
streaming_server => 'www.binary.com',
);
# hardcode, require a fix?
$setting{arr_all_currencies} = ["USD", "EUR", "GBP", "AUD"];
return {
libs => \@libs,
settings => JSON::to_json(\%setting),
};
}
sub menu {
my @menu;
# beta interface
push @menu,
{
id => 'topMenuBetaInterface',
url => url_for($LANG eq 'ja' ? '/jptrading' : '/trading'),
text => localize('Start Trading'),
link_class => 'pjaxload'
};
# myaccount
my $my_account_ref = {
id => 'topMenuMyAccount',
url => url_for('/user/my_accountws'),
text => localize('My Account'),
class => 'by_client_type client_real client_virtual',
link_class => 'with_login_cookies pjaxload',
sub_items => [],
};
# Portfolio
push @{$my_account_ref->{sub_items}},
{
id => 'topMenuPortfolio',
url => url_for('/user/portfoliows'),
text => localize('Portfolio'),
link_class => 'with_login_cookies pjaxload',
};
push @{$my_account_ref->{sub_items}},
{
id => 'topMenuProfitTable',
url => url_for('/user/profit_tablews'),
text => localize('Profit Table'),
link_class => 'with_login_cookies pjaxload',
};
push @{$my_account_ref->{sub_items}},
{
id => 'topMenuStatement',
url => url_for('/user/statementws'),
text => localize('Statement'),
link_class => 'with_login_cookies pjaxload',
};
push @{$my_account_ref->{sub_items}},
{
id => 'topMenuChangePassword',
url => url_for('/user/change_passwordws'),
text => localize('Password'),
link_class => 'with_login_cookies pjaxload',
};
push @{$my_account_ref->{sub_items}},
{
id => 'topMenuAccountSettings',
url => url_for('/user/settingsws'),
text => localize('Settings'),
link_class => 'with_login_cookies pjaxload',
};
push @{$my_account_ref->{sub_items}},
{
id => 'topMenuBecomeAffiliate',
class => 'ja-hide',
url => url_for('/affiliate/signup'),
link_class => 'pjaxload',
text => localize('Affiliate'),
};
push @{$my_account_ref->{sub_items}},
{
id => 'topMenuAuthenticateAccount',
url => url_for('/user/authenticatews'),
text => localize('Authenticate'),
class => 'by_client_type client_real',
link_class => 'with_login_cookies pjaxload',
};
push @menu, $my_account_ref;
# cashier
push @menu,
{
id => 'topMenuCashier',
url => url_for('/cashier'),
text => localize('Cashier'),
link_class => 'pjaxload',
};
# resources
my $resources_items_ref = {
id => 'topMenuResources',
url => url_for('/resources'),
text => localize('Resources'),
link_class => 'pjaxload',
};
my $asset_index_ref = {
id => 'topMenuAssetIndex',
class => 'ja-hide',
url => url_for('/resources/asset_indexws'),
text => localize('Asset Index'),
link_class => 'pjaxload',
};
my $trading_times_ref = {
id => 'topMenuTradingTimes',
url => url_for('/resources/market_timesws'),
text => localize('Trading Times'),
link_class => 'pjaxload',
};
$resources_items_ref->{'sub_items'} = [$asset_index_ref, $trading_times_ref];
push @menu, $resources_items_ref;
# applications
push @menu,
{
id => 'topMenuApplications',
url => url_for('/applications'),
text => localize('Applications'),
link_class => 'ja-hide pjaxload',
};
# push @{$menu}, $self->_main_menu_trading();
return \@menu;
}
1;
| shuwnyuantee/binary-static | scripts/lib/BS.pm | Perl | apache-2.0 | 8,943 |
#!/usr/bin/env perl
BEGIN {
$ENV{CATALYST_SCRIPT_GEN} = 40;
}
use Catalyst::ScriptRunner;
Catalyst::ScriptRunner->run('ISC::BIND::Stats::UI', 'Server');
1;
=head1 NAME
isc_bind_stats_ui_server.pl - Catalyst Test Server
=head1 SYNOPSIS
isc_bind_stats_ui_server.pl [options]
-d --debug force debug mode
-f --fork handle each request in a new process
(defaults to false)
-? --help display this help and exits
-h --host host (defaults to all)
-p --port port (defaults to 3000)
-k --keepalive enable keep-alive connections
-r --restart restart when files get modified
(defaults to false)
-rd --restart_delay delay between file checks
(ignored if you have Linux::Inotify2 installed)
-rr --restart_regex regex match files that trigger
a restart when modified
(defaults to '\.yml$|\.yaml$|\.conf|\.pm$')
--restart_directory the directory to search for
modified files, can be set multiple times
(defaults to '[SCRIPT_DIR]/..')
--follow_symlinks follow symlinks in search directories
(defaults to false. this is a no-op on Win32)
--background run the process in the background
--pidfile specify filename for pid file
See also:
perldoc Catalyst::Manual
perldoc Catalyst::Manual::Intro
=head1 DESCRIPTION
Run a Catalyst Testserver for this application.
=head1 AUTHORS
Catalyst Contributors, see Catalyst.pm
=head1 COPYRIGHT
This library is free software. You can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
| isc-projects/bind9-stats | ISC-BIND-Stats-UI/script/isc_bind_stats_ui_server.pl | Perl | bsd-2-clause | 1,778 |
package Task::Tags;
# Pragmas.
use strict;
use warnings;
# Version.
our $VERSION = 0.07;
1;
__END__
=pod
=encoding utf8
=head1 NAME
Task::Tags - Install the Tags modules.
=head1 SYNOPSIS
cpanm Task::Tags
=head1 SEE ALSO
=over
=item L<Tags>
Structure oriented SGML/XML/HTML/etc. elements manipulation.
=item L<Tags::Element>
Element utilities for 'Tags'.
=item L<Tags::Output>
Base class for Tags::Output::*.
=item L<Tags::Output::Indent>
Indent class for Tags.
=item L<Tags::Output::LibXML>
LibXML output for Tags.
=item L<Tags::Output::PYX>
PYX class for line oriented output for 'Tags'.
=item L<Tags::Output::Raw>
Raw printing 'Tags' structure to tags code.
=item L<Tags::Output::Structure>
Structure class for 'Tags' output.
=item L<Tags::Output::LibXML>
Printing 'Tags' structure by LibXML library.
=item L<Tags::Utils>
Utils module for Tags.
=item L<Tags::Utils::Preserve>
Class to check if content in element is preserved?
=back
=head1 REPOSITORY
L<https://github.com/tupinek/Task-Tags>
=head1 AUTHOR
Michal Špaček L<mailto:skim@cpan.org>
L<http://skim.cz>
=head1 LICENSE AND COPYRIGHT
© 2015 Michal Špaček
Artistic License
BSD 2-Clause License
=head1 VERSION
0.07
=cut
| tupinek/Task-Tags | Tags.pm | Perl | bsd-2-clause | 1,230 |
package AsposeTasksCloud::Object::OrdinalNumber;
require 5.6.0;
use strict;
use warnings;
use utf8;
use JSON qw(decode_json);
use Data::Dumper;
use Module::Runtime qw(use_module);
use Log::Any qw($log);
use Date::Parse;
use DateTime;
use base "AsposeTasksCloud::Object::BaseObject";
#
#
#
#NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
#
my $swagger_types = {
};
my $attribute_map = {
};
# new object
sub new {
my ($class, %args) = @_;
my $self = {
};
return bless $self, $class;
}
# get swagger type of the attribute
sub get_swagger_types {
return $swagger_types;
}
# get attribute mappping
sub get_attribute_map {
return $attribute_map;
}
1;
| asposetasks/Aspose_Tasks_Cloud | SDKs/Aspose.Tasks-Cloud-SDK-for-Perl/lib/AsposeTasksCloud/Object/OrdinalNumber.pm | Perl | mit | 764 |
#!/usr/bin/env perl
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
=head1 CONTACT
Please email comments or questions to the Ensembl help desk
<http://www.ensembl.org/Help/Contact>.
=head1 NAME
ping_ensembl.pl
=head1 SYNOPSIS
# print usage
$ ping_ensembl.pl -h
# ping Ensembl with default species (Human)
$ ping_ensembl.pl
# ping Ensembl with user provided species
$ ping_ensembl.pl -s "dog"
# ping Ensembl with a different version (Human)
$ ping_ensembl.pl -db_version 70
# ping the US Ensembl mirror
$ ping_ensembl.pl -ue
# ping Ensembl Genomes with default species (arabidopsis thaliana)
$ ping_ensembl.pl -eg
# ping Ensembl Genomes with user provided species
$ ping_ensembl.pl -eg -s "oryza sativa japonica"
=head1 DESCRIPTION
This script is used to detect if you can contact the Ensembl database
server with your current setup. The program will attempt to print out
helpful hints about how to resolve your problems. If they still persist
then please contact http://www.ensembl.org/Help/Contact.
=head1 SUBROUTINES
=cut
use strict;
use warnings;
use File::Temp qw/tempfile/;
use Net::FTP;
use Getopt::Long;
#
# Default option values
#
my $help = 0;
my $host = 'ensembldb.ensembl.org';
my $user = 'anonymous';
my $port = 3306;
my $verbose = 0;
my $db_version = -1;
my $grch37;
my $useast = 0;
my $ensembl_genomes = 0;
my $species = undef;
my $api_version = -1;
#
# Parse command-line arguments
#
my $options_ok =
GetOptions(
"ue" => \$useast,
"eg" => \$ensembl_genomes,
"species=s" => \$species,
"db_version=i" => \$db_version,
"verbose" => \$verbose,
"grch37" => \$grch37,
"help" => \$help);
($help or !$options_ok) && usage();
$useast and $ensembl_genomes and
die "Cannot test Ensembl Genomes on the US mirror.\n" .
"Options \"ue\" and \"eg\" are mutually exclusive\n";
$useast and $host = "useastdb.ensembl.org";
$verbose and $verbose = 1;
$grch37 and $port = 3337;
if ($ensembl_genomes) {
$host = "mysql-eg-publicsql.ebi.ac.uk";
$port = 4157;
$species = "arabidopsis thaliana"
unless defined $species;
}
eval {
require DBI;
require DBD::mysql;
require Bio::Perl;
require Bio::EnsEMBL::Registry;
require Bio::EnsEMBL::ApiVersion;
require Bio::EnsEMBL::LookUp if $ensembl_genomes;
$api_version = Bio::EnsEMBL::ApiVersion::software_version();
$db_version = $api_version if $db_version == -1; #if it was still -1 then it wasn't set. Default is current API version
Bio::EnsEMBL::Registry->load_registry_from_db(
-host => $host,
-port => $port,
-user => $user,
-db_version => $db_version,
-verbose => $verbose,
);
$species = "human" unless defined $species;
my $species_adaptor = Bio::EnsEMBL::Registry->get_DBAdaptor("$species", 'core');
print "Installation is good. Connection to Ensembl works and you can query the $species core database\n";
};
my $error = $@;
# If no error found then see if we've got all of our external modules available
if(!$error) {
$error = '';
eval {
foreach my $module (qw/Compara Variation Funcgen/) {
my $full_module = "Bio::EnsEMBL::${module}::DBSQL::DBAdaptor";
eval "require $full_module;";
if($@) {
$error .= "\tMissing the checkout $module\n";
}
}
};
}
# Check the current release of datafiles from the FTP site
my $ftp_version = -1;
eval {
my $ftp = Net::FTP->new('ftp.ensembl.org', Debug => 0);
$ftp->login("anonymous",'-anonymous@');
$ftp->cwd('/pub');
my ($fh, $filename) = tempfile();
close($fh);
$ftp->get('current_README', $filename);
$ftp->quit();
open($fh, '<', $filename);
local $/ = undef;
my $ftp_readme = <$fh>;
close($fh);
($ftp_version) = $ftp_readme =~ /Ensembl Release (\d+) Databases/;
};
# Print all the errors which could have occured
if($error) {
print "ERROR: Error detected when connecting to Ensembl!\n";
if($error =~ /DBI/) {
print "\tCannot find the DBI perl module. Please install this using your package management system, cpan or cpanm. Please consult http://www.ensembl.org/info/docs/api/api_installation.html\n";
}
if($error =~ /mysql/) {
print "\tCannot find the DBD::mysql perl module. Please install this using your package management system, cpan or cpanm. Also install the mysql libs. Please consult http://www.ensembl.org/info/docs/api/api_installation.html\n";
}
if($error =~ /Can't locate Bio\/E/) {
print "\tLooks like you need to setup your PERL5LIB with the Ensembl API. Please consult http://www.ensembl.org/info/docs/api/api_installation.html\n";
}
if($error =~ /Can't locate Bio\/Perl/) {
print "\tLooks like you need to setup your PERL5LIB with BioPerl. Please consult http://www.ensembl.org/info/docs/api/api_installation.html\n";
}
if($error =~ /Cannot connect to/) {
print "\tCannot seem to contact EnsemblDB at '$host' with the username '$user'. Try running 'ping $host' or asking your systems about firewalls against port $port\n";
}
if($error =~ /internal name/) {
print "\tSpecies was not found. You may have accidentally download the HEAD API version (found API release $db_version & public FTP release is $ftp_version). Please consult http://www.ensembl.org/info/docs/api/api_installation.html\n";
}
if($error =~ /Species not defined/) {
print "\tSpecies was not found. You may have accidentally download the HEAD API version (told to load release $db_version, API version is $api_version & public FTP release is $ftp_version). Please consult http://www.ensembl.org/info/docs/api/api_installation.html\n";
}
if($error =~ /Missing the checkout/) {
print "\tYour core installation was good but supplementary modules cannot be found. If you wish to access these other Ensembl resources add the libraries to your PERL5LIB:\n";
print $error;
#bail early
exit 0;
}
print '='x80, "\n";
print "If the problem persists please send the following error message to helpdesk\@ensembl.org\n";
print $error;
print '='x80, "\n";
exit 1;
}
=head2 usage
Arg [] : None
Returntype : None
Example :
Description : Print script usage string
Exceptions : None
Caller : General
=cut
sub usage {
my $prog = `basename $0`; chomp($prog);
print "Usage: $prog [OPTIONS]\n\n";
print "Options:\n";
print " -ue Ping Ensembl US mirror\n";
print " -eg Ping Ensembl Genomes (can't be used together with \"ue\")\n";
print " -species <species> Use species <species> (use double quotes if species name contains spaces)\n";
print " -db_version <version> Use the specified version of Ensembl not the API version\n";
print " -grch37 Use human assembly GRCh37 rather than the default GRCh38 version\n";
print " -verbose Ping output is more verbose. Not recommended for Ensembl genomes\n";
print " -help Print this message\n";
print "\n\n";
exit 1;
}
| mjg17/ensembl | misc-scripts/ping_ensembl.pl | Perl | apache-2.0 | 7,678 |
## OpenXPKI::Config
##
## Written 2011 by Scott Hardin for the OpenXPKI project
## Copyright (C) 2010, 2011 by The OpenXPKI Project
##
## Based on the CPAN module App::Options
##
## vim: syntax=perl
use Config::Merge;
use OpenXPKI::Exception;
use OpenXPKI::XML::Cache;
package OpenXPKI::Config::Merge;
use base qw( Config::Versioned );
# override new() to prepend with our config bootstrap
sub new {
my ($this) = shift;
my $class = ref($this) || $this;
my $params = shift;
# Set from ENV
$params->{dbpath} = $ENV{OPENXPKI_CONF_DB} if ($ENV{OPENXPKI_CONF_DB});
$params->{path} = [ split( /:/, $ENV{OPENXPKI_CONF_PATH} ) ] if ( $ENV{OPENXPKI_CONF_PATH} );
# Set to defaults if nothing is set
$params->{dbpath} = '/etc/openxpki/config.git' unless($params->{dbpath});
$params->{path} = [qw( /etc/openxpki/config.d )] if ( not exists $params->{path} );
$params->{autocreate} = 1;
$this->SUPER::new($params);
}
# parser overrides the method in Config::Versioned to use Config::Merge
# instead of Config::Std
# TODO: Parse multiple directories
sub parser {
my $self = shift;
my $params = shift;
my $dir;
if ( exists $params->{'path'} ) {
$dir = $params->{path}->[0];
} else {
$dir = $self->path()->[0];
}
# If the directory was not set or doesn't exist, don't bother
# trying to import any configuration
if ( not $dir or not -d $dir ) {
return;
}
# Skip the workflow directories
my $cm = Config::Merge->new( path => $dir, skip => qr/realm\.\w+\._workflow/ );
my $cmref = $cm->();
my $tree = $self->cm2tree($cmref);
# Incorporate the Workflow XML definitions
# List the realms from the system.realms tree
foreach my $realm (keys %{$tree->{system}->{realms}}) {
my $xml_cache = OpenXPKI::XML::Cache->new (CONFIG => "$dir/realm/$realm/_workflow/workflow.xml");
$tree->{realm}->{$realm}->{workflow} = $xml_cache->get_serialized();
}
$params->{comment} = 'import from ' . $dir . ' using Config::Merge';
$self->commit( $tree, @_, $params );
}
# cm2tree is just a helper routine for recursively traversing the data
# structure returned by Config::Merge and massaging it into something
# we can use with Config::Versioned
sub cm2tree {
my $self = shift;
my $cm = shift;
my $tree = {};
if ( ref($cm) eq 'HASH' ) {
my $ret = {};
foreach my $key ( keys %{$cm} ) {
if ( $key =~ m{ (?: \A @ (.*?) @ \z | \A @ (.*) | (.*?) @ \z ) }xms ) {
my $match = $1 || $2 || $3;
#make it a ref to an anonymous scalar so we know it's a symlink
$ret->{$match} = \$cm->{$key};
} else {
$ret->{$key} = $self->cm2tree( $cm->{$key} )
}
}
return $ret;
}
elsif ( ref($cm) eq 'ARRAY' ) {
my $ret = {};
my $i = 0;
foreach my $entry ( @{$cm} ) {
$ret->{ $i++ } = $self->cm2tree($entry);
}
return $ret;
}
else {
return $cm;
}
}
1; # End of OpenXPKI::Config
__DATA__
=head1 NAME
OpenXPKI::Config - Simplified access to the configuration data
=head1 SYNOPSIS
use OpenXPKI::Config;
my $cfg = OpenXPKI::Config->new();
print "Param1=", $cfg->get('subsystem1.group1.param1'), "\n";
=head1 DESCRIPTION
OpenXPKI::Config uses Config::Versioned to access versioned configuration
parameters. It overrides the default behavior of Config::Versioned to use
the CPAN C<Config::Merge> module instead of C<Config::Std>. In addition,
the following parameters are also modified:
=head2 dbpath
The C<dbpath> (the storage location for the internal git repository) is
located in C</etc/openxpki/config.git> by default, but may be overridden
with the ENV variable C<OPENXPKI_CONF_DB>.
=head2 path
The C<path> is where the configuration files to be read are located and
is set to C</etc/openxpki/config.d> by default, but may be overridden
with the ENV variable C<OPENXPKI_CONF_PATH>.
Note: for C<Config::Merge>, only one directory name should be supplied
and not a colon-separated list.
=head1 METHODS
=head2 new()
This overrides the parent class, adding the default locations for the
configuration files needed by OpenXPKI.
=head1 MORE INFO
See L<Config::Versioned> for more details on the configuration backend.
See L<Config::Merge> for more details on the configuration file format.
=cut
1;
| mrscotty/openxpki | core/server/OpenXPKI/Config/Merge.pm | Perl | apache-2.0 | 4,630 |
package Paws::ES::ListTagsResponse;
use Moose;
has TagList => (is => 'ro', isa => 'ArrayRef[Paws::ES::Tag]');
has _request_id => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ES::ListTagsResponse
=head1 ATTRIBUTES
=head2 TagList => ArrayRef[L<Paws::ES::Tag>]
List of C<Tag> for the requested Elasticsearch domain.
=head2 _request_id => Str
=cut
| ioanrogers/aws-sdk-perl | auto-lib/Paws/ES/ListTagsResponse.pm | Perl | apache-2.0 | 405 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
package AI::MXNet::Base;
use strict;
use warnings;
use PDL;
use PDL::Types ();
use PDL::CCS::Nd;
use AI::MXNetCAPI 1.5;
use AI::NNVMCAPI 1.4;
use AI::MXNet::Types;
use Time::HiRes;
use Scalar::Util qw(blessed);
use Carp;
use Exporter;
use base qw(Exporter);
use List::Util qw(shuffle);
use Data::Dumper;
our @EXPORT = qw(product enumerate assert zip check_call build_param_doc
pdl cat dog svd bisect_left pdl_shuffle as_array ascsr rand_sparse
DTYPE_STR_TO_MX DTYPE_MX_TO_STR DTYPE_MX_TO_PDL
DTYPE_PDL_TO_MX DTYPE_MX_TO_PERL GRAD_REQ_MAP
STORAGE_TYPE_UNDEFINED STORAGE_TYPE_DEFAULT
STORAGE_TYPE_ROW_SPARSE STORAGE_TYPE_CSR
STORAGE_TYPE_STR_TO_ID STORAGE_TYPE_ID_TO_STR STORAGE_AUX_TYPES);
our @EXPORT_OK = qw(pzeros pceil pones digitize hash array_index range);
use constant DTYPE_STR_TO_MX => {
float32 => 0,
float64 => 1,
float16 => 2,
uint8 => 3,
int32 => 4,
int8 => 5,
int64 => 6
};
use constant DTYPE_MX_TO_STR => {
0 => 'float32',
1 => 'float64',
2 => 'float16',
3 => 'uint8',
4 => 'int32',
5 => 'int8',
6 => 'int64'
};
use constant DTYPE_MX_TO_PDL => {
0 => 6,
1 => 7,
2 => 6,
3 => 0,
4 => 3,
5 => 0,
6 => 5,
float32 => 6,
float64 => 7,
float16 => 6,
uint8 => 0,
int32 => 3,
int8 => 0,
int64 => 5
};
use constant DTYPE_PDL_TO_MX => {
6 => 0,
7 => 1,
0 => 3,
3 => 4,
5 => 6
};
use constant DTYPE_MX_TO_PERL => {
0 => 'f',
1 => 'd',
2 => 'S',
3 => 'C',
4 => 'l',
5 => 'c',
6 => 'q',
float32 => 'f',
float64 => 'd',
float16 => 'S',
uint8 => 'C',
int32 => 'l',
int8 => 'c',
int64 => 'q'
};
use constant GRAD_REQ_MAP => {
null => 0,
write => 1,
add => 3
};
use constant {
STORAGE_TYPE_UNDEFINED => -1,
STORAGE_TYPE_DEFAULT => 0,
STORAGE_TYPE_ROW_SPARSE => 1,
STORAGE_TYPE_CSR => 2
};
use constant STORAGE_TYPE_STR_TO_ID => {
undefined => STORAGE_TYPE_UNDEFINED,
default => STORAGE_TYPE_DEFAULT,
row_sparse => STORAGE_TYPE_ROW_SPARSE,
csr => STORAGE_TYPE_CSR
};
use constant STORAGE_TYPE_ID_TO_STR => {
STORAGE_TYPE_UNDEFINED() => 'undefined',
STORAGE_TYPE_DEFAULT() => 'default',
STORAGE_TYPE_ROW_SPARSE() => 'row_sparse',
STORAGE_TYPE_CSR() => 'csr'
};
use constant STORAGE_AUX_TYPES => {
row_sparse => ['int64'],
csr => ['int64', 'int64']
};
=head1 NAME
AI::MXNet::Base - Helper functions
=head1 DEFINITION
Helper functions
=head2 zip
Perl version of for x,y,z in zip (arr_x, arr_y, arr_z)
Parameters
----------
$sub_ref, called with @_ filled with $arr_x->[$i], $arr_y->[$i], $arr_z->[$i]
for each loop iteration.
@array_refs
=cut
sub zip
{
if('CODE' eq ref $_[0])
{
# continue supporting the callback style
my $code = shift;
$code->(@$_) for AI::MXNetCAPI::py_zip(map { \@$_ } @_);
return;
}
# the map() here may seem like a no-op, but triggers overloading or
# whatever else is needed to make array-ish things actually arrays
# before entering the low level list builder.
return AI::MXNetCAPI::py_zip(map { \@$_ } @_);
}
=head2 enumerate
Same as zip, but the argument list in the anonymous sub is prepended
by the iteration count.
=cut
sub enumerate
{
if('CODE' eq ref $_[0])
{
# continue supporting the callback style
my $code = shift;
my $len = @{ $_[0] };
$code->(@$_) for AI::MXNetCAPI::py_zip([0..$len-1], map { \@$_ } @_);
return;
}
my $len = @{ $_[0] };
return AI::MXNetCAPI::py_zip([0..$len-1], map { \@$_ } @_);
}
=head2 product
Calculates the product of the input agruments.
=cut
sub product
{
my $p = 1;
map { $p = $p * $_ } @_;
return $p;
}
=head2 bisect_left
https://hg.python.org/cpython/file/2.7/Lib/bisect.py
=cut
sub bisect_left
{
my ($a, $x, $lo, $hi) = @_;
$lo //= 0;
$hi //= @{ $a };
if($lo < 0)
{
Carp::confess('lo must be non-negative');
}
while($lo < $hi)
{
my $mid = int(($lo+$hi)/2);
if($a->[$mid] < $x)
{
$lo = $mid+1;
}
else
{
$hi = $mid;
}
}
return $lo;
}
=head2 pdl_shuffle
Shuffle the pdl by the last dimension
Parameters
-----------
PDL $pdl
$preshuffle Maybe[ArrayRef[Index]], if defined the array elements are used
as shuffled last dimension's indexes
=cut
sub pdl_shuffle
{
my ($pdl, $preshuffle) = @_;
my @shuffle = $preshuffle ? @{ $preshuffle } : shuffle(0..$pdl->dim(-1)-1);
return $pdl->dice_axis(-1, pdl(\@shuffle));
}
=head2 assert
Parameters
-----------
Bool $input
Str $error_str
Calls Carp::confess with $error_str//"AssertionError" if the $input is false
=cut
sub assert
{
my ($input, $error_str) = @_;
local($Carp::CarpLevel) = 1;
Carp::confess($error_str//'AssertionError')
unless $input;
}
=head2 check_call
Checks the return value of C API call
This function will raise an exception when error occurs.
Every API call is wrapped with this function.
Returns the C API call return values stripped of first return value,
checks for return context and returns first element in
the values list when called in scalar context.
=cut
sub check_call
{
Carp::confess(AI::MXNetCAPI::GetLastError()) if shift;
return wantarray ? @_ : $_[0];
}
=head2 build_param_doc
Builds argument docs in python style.
arg_names : array ref of str
Argument names.
arg_types : array ref of str
Argument type information.
arg_descs : array ref of str
Argument description information.
remove_dup : boolean, optional
Whether to remove duplication or not.
Returns
-------
docstr : str
Python docstring of parameter sections.
=cut
sub build_param_doc
{
my ($arg_names, $arg_types, $arg_descs, $remove_dup) = @_;
$remove_dup //= 1;
my %param_keys;
my @param_str;
for(zip($arg_names, $arg_types, $arg_descs)) {
my ($key, $type_info, $desc) = @$_;
next if exists $param_keys{$key} and $remove_dup;
$param_keys{$key} = 1;
my $ret = sprintf("%s : %s", $key, $type_info);
$ret .= "\n ".$desc if length($desc);
push @param_str, $ret;
}
return sprintf("Parameters\n----------\n%s\n", join("\n", @param_str));
}
=head2 _notify_shutdown
Notify MXNet about shutdown.
=cut
sub _notify_shutdown
{
check_call(AI::MXNetCAPI::NotifyShutdown());
}
sub _indent
{
my ($s_, $numSpaces) = @_;
my @s = split(/\n/, $s_);
if (@s == 1)
{
return $s_;
}
my $first = shift(@s);
@s = ($first, map { (' 'x$numSpaces) . $_ } @s);
return join("\n", @s);
}
sub as_array
{
return ref $_[0] eq 'ARRAY' ? $_[0] : [$_[0]];
}
my %internal_arguments = (prefix => 1, params => 1, shared => 1);
my %attributes_per_class;
sub process_arguments
{
my $orig = shift;
my $class = shift;
if($class->can('python_constructor_arguments'))
{
if(not exists $attributes_per_class{$class})
{
%{ $attributes_per_class{$class} } = map { $_->name => 1 } $class->meta->get_all_attributes;
}
my %kwargs;
while(@_ >= 2 and defined $_[-2] and not ref $_[-2] and (exists $attributes_per_class{$class}{ $_[-2] } or exists $internal_arguments{ $_[-2] }))
{
my $v = pop(@_);
my $k = pop(@_);
$kwargs{ $k } = $v;
}
if(@_)
{
my @named_params = @{ $class->python_constructor_arguments };
Carp::confess("Paramers mismatch expected ".Dumper(\@named_params).", but got ".Dumper(\@_))
if @_ > @named_params;
@kwargs{ @named_params[0..@_-1] } = @_;
}
return $class->$orig(%kwargs);
}
return $class->$orig(@_);
}
END {
_notify_shutdown();
Time::HiRes::sleep(0.01);
}
*pzeros = \&zeros;
*pones = \&ones;
*pceil = \&ceil;
## making sure that we can stringify arbitrarily large piddles
$PDL::toolongtoprint = 1000_000_000;
## convenience subs
sub ascsr
{
my ($data, $indptr, $indices, $shape) = @_;
my @which;
my $i = 0;
my $j = 0;
while($i < $indices->nelem)
{
for($i = $indptr->at($j); $i < $indptr->at($j+1); $i++)
{
push @which, [$j, $indices->at($i)];
}
$j++;
}
return PDL::CCS::Nd->newFromWhich(
pdl(\@which), $data, pdims => blessed $shape ? $shape : pdl($shape)
)->xchg(0, 1);
}
package AI::MXNet::COO::Nd;
use Mouse;
has ['data', 'row', 'col'] => (is => 'rw');
no Mouse;
package AI::MXNet::Base;
sub tocoo
{
my $csr = shift;
return AI::MXNet::COO::Nd->new(
data => $csr->data,
row => $csr->_whichND->slice(0)->flat,
col => $csr->_whichND->slice(1)->flat
);
}
sub rand_sparse
{
my ($num_rows, $num_cols, $density, $dtype, $format) = @_;
$dtype //= 'float32';
$format //= 'csr';
my $pdl_type = PDL::Type->new(DTYPE_MX_TO_PDL->{ $dtype });
my $dense = random($pdl_type, $num_cols, $num_rows);
my $missing = 0;
$dense->where(random($num_cols, $num_rows)<=1-$density) .= $missing;
if($format eq 'csr')
{
return $dense->tocsr;
}
return $dense;
}
{
no warnings 'once';
*PDL::CCS::Nd::data = sub { shift->_nzvals };
*PDL::CCS::Nd::indptr = sub { my $self = shift; ($self->hasptr ? $self->getptr : $self->ptr)[0] };
*PDL::CCS::Nd::indices = sub { shift->_whichND->slice(1)->flat };
*PDL::CCS::Nd::tocoo = sub { tocoo(shift) };
*PDL::CCS::Nd::shape = sub { shift->pdims };
*PDL::CCS::Nd::dtype = sub { DTYPE_MX_TO_STR->{ DTYPE_PDL_TO_MX->{ shift->type->numval } } };
*PDL::tocsr = sub { shift->xchg(0, 1)->toccs->xchg(0, 1) };
*PDL::rand_sparse = sub { shift; rand_sparse(@_) };
}
{
my $orig_at = PDL->can('at');
no warnings 'redefine';
*PDL::at = sub {
my ($self, @args) = @_;
return $orig_at->($self, @args) if @args != 1;
return $orig_at->($self, @args) if $self->ndims == 1;
return $self->slice(('X')x($self->ndims-1), $args[0])->squeeze;
};
*PDL::len = sub { shift->dim(-1) };
*PDL::dtype = sub { DTYPE_MX_TO_STR->{ DTYPE_PDL_TO_MX->{ shift->type->numval } } };
}
sub digitize
{
my ($d, $bins) = @_;
for(my $i = 0; $i < @$bins; $i++)
{
return $i if $d < $bins->[$i];
}
return scalar(@$bins);
}
use B;
sub hash { hex(B::hash(shift)) }
use List::Util ();
sub array_index { my ($s, $array) = @_; return List::Util::first { $array->[$_] eq $s } 0..@$array-1 }
sub range { my ($begin, $end, $step) = @_; $step //= 1; grep { not (($_-$begin) % $step) } $begin..$end-1 }
1;
| eric-haibin-lin/mxnet | perl-package/AI-MXNet/lib/AI/MXNet/Base.pm | Perl | apache-2.0 | 11,885 |
#!/usr/bin/perl -w
# This perl script removes duplicate include paths left to the right
use strict;
my @all_incl_paths = @ARGV;
my @cleaned_up_incl_paths;
foreach( @all_incl_paths ) {
$_ = remove_rel_paths($_);
if( !($_=~/-I/) ) {
push @cleaned_up_incl_paths, $_;
}
elsif( !entry_exists($_,\@cleaned_up_incl_paths) ) {
push @cleaned_up_incl_paths, $_;
}
}
print join( " ", @cleaned_up_incl_paths );
#
# Subroutines
#
sub entry_exists {
my $entry = shift; # String
my $list = shift; # Reference to an array
foreach( @$list ) {
if( $entry eq $_ ) { return 1; }
}
return 0;
}
#
sub remove_rel_paths {
my $entry_in = shift;
if ($entry_in=~/-I\.\./) {
return $entry_in;
}
my @paths = split("/",$entry_in);
my @new_paths;
foreach( @paths ) {
if( !($_=~/\.\./) ) {
push @new_paths, $_;
}
else {
pop @new_paths
}
}
return join("/",@new_paths);
}
| nschloe/seacas | packages/zoltan/config/strip_dup_incl_paths.pl | Perl | bsd-3-clause | 879 |
package Moose::Meta::Class::Immutable::Trait;
BEGIN {
$Moose::Meta::Class::Immutable::Trait::AUTHORITY = 'cpan:STEVAN';
}
{
$Moose::Meta::Class::Immutable::Trait::VERSION = '2.0604';
}
use strict;
use warnings;
use Class::MOP;
use Scalar::Util qw( blessed );
use base 'Class::MOP::Class::Immutable::Trait';
sub add_role { $_[1]->_immutable_cannot_call }
sub calculate_all_roles {
my $orig = shift;
my $self = shift;
@{ $self->{__immutable}{calculate_all_roles} ||= [ $self->$orig ] };
}
sub calculate_all_roles_with_inheritance {
my $orig = shift;
my $self = shift;
@{ $self->{__immutable}{calculate_all_roles_with_inheritance} ||= [ $self->$orig ] };
}
sub does_role {
shift;
my $self = shift;
my $role = shift;
(defined $role)
|| $self->throw_error("You must supply a role name to look for");
$self->{__immutable}{does_role} ||= { map { $_->name => 1 } $self->calculate_all_roles_with_inheritance };
my $name = blessed $role ? $role->name : $role;
return $self->{__immutable}{does_role}{$name};
}
1;
# ABSTRACT: Implements immutability for metaclass objects
=pod
=head1 NAME
Moose::Meta::Class::Immutable::Trait - Implements immutability for metaclass objects
=head1 VERSION
version 2.0604
=head1 DESCRIPTION
This class makes some Moose-specific metaclass methods immutable. This
is deep guts.
=head1 BUGS
See L<Moose/BUGS> for details on reporting bugs.
=head1 AUTHOR
Moose is maintained by the Moose Cabal, along with the help of many contributors. See L<Moose/CABAL> and L<Moose/CONTRIBUTORS> for details.
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 2012 by Infinity Interactive, Inc..
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
=cut
__END__
| Dokaponteam/ITF_Project | xampp/perl/vendor/lib/Moose/Meta/Class/Immutable/Trait.pm | Perl | mit | 1,853 |
do 'iscsi-client-lib.pl';
# backup_config_files()
# Returns files and directories that can be backed up
sub backup_config_files
{
return ( $config{'config_file'}, $config{'initiator_file'},
$config{'ifaces_dir'} );
}
# pre_backup(&files)
# Called before the files are actually read
sub pre_backup
{
return undef;
}
# post_backup(&files)
# Called after the files are actually read
sub post_backup
{
return undef;
}
# pre_restore(&files)
# Called before the files are restored from a backup
sub pre_restore
{
return undef;
}
# post_restore(&files)
# Called after the files are restored from a backup
sub post_restore
{
return undef;
}
1;
| BangL/webmin | iscsi-client/backup_config.pl | Perl | bsd-3-clause | 646 |
=head1 This Week on perl5-porters (7-13 April 2003)
This week : MacOS X problems, big and small patch proposals,
cross-compilation and unknown errors.
=head2 use base and import
Casey West proposed a patch to call the import() method of a module
referenced through the C<use base> pragma. Graham Barr disagrees,
saying that C<use base> should only be used to set up inheritance, and
Michael Schwern threatens to call the backward compatibility police.
http://archive.develooper.com/perl5-porters@perl.org/msg94142.html
=head2 Cross-compilation
Redvers Davies complains that there's too little information about the
cross-compilation of perl. Nevertheless, he successfully produced perl
packages for the OpenZaurus OS (L<http://www.openzaurus.org/>), including
dynamically loaded extensions, and the ability to cross-compile XS modules !
(Perl 5.8.0 has the beginning of a minimal support for cross-compilation.
Let's hope it can be improved by this impressive work.)
http://archive.develooper.com/perl5-porters@perl.org/msg94160.html
=head2 Unknown error
Tels manages to have perl to emit an actual I<Unknown error>. This obscure
message is actually produced by perl when it fails to load a module, and
fails again to get an error message for this first failure.
Enache Adrian provided a fix.
http://archive.develooper.com/perl5-porters@perl.org/msg94096.html
http://archive.develooper.com/perl5-porters@perl.org/msg94183.html
=head2 OS X peculiarities
Michael Schwern notices that C<nobody>, the traditional Unix
low-privileged user, has user id -2 on MacOS X (according to
F</etc/passwd>), but the C<uid_t> C type is actually an unsigned integer.
Consequently, getpwnam() says that C<nobody>'s UID is 4294967294. Looks
logical. We've seen weirder things.
This doesn't seem to be related to the fact that you can't run perldoc as
root on MacOS X. That's what Dan Kogai explains after a few tests of
changing the UID and EUID on various BSD flavors.
http://archive.develooper.com/perl5-porters@perl.org/msg94194.html
Michael also criticizes the choice of the default I<vendorlib> location on
OS X. In fact, he doesn't think that I<vendorlib> should be set at all in
a hints file. Wilfredo SE<aacute>nchez asks for more info, and is working
on a patch, both for the Darwin and Rhapsody hints files. (By the way,
Wilfredo doesn't have a Mac OS X server, so he'll patch blindly unless
someone wants to test bleadperl and/or maintperl on Rhapsody.)
http://archive.develooper.com/perl5-porters@perl.org/msg94104.html
=head2 C<h2xs> and enums
Tassilo von Parseval notices (bug #21887) that C<h2xs> doesn't generate
constants from the enumerated types found in C header files. Nicholas
Clark explains how to use ExtUtils::Constant to achieve the desired
effect.
http://archive.develooper.com/perl5-porters@perl.org/msg94212.html
=head2 Big Bug Fixes
Pradeep Hodigere proposed a patch to speed up some of the perl built-ins
when handling UTF8 data (and he provided also benchmark results).
Basically his solution is to add a field C<sv_length> to the internal SV
structure, to hold the length, in chars, of the scalar string value.
Nobody commented yet.
http://archive.develooper.com/perl5-porters@perl.org/msg94230.html
Dave Mitchell sent a patch to allow I<FETCH to access arrays and hashes
which are themselves tied without fear of crashing> (a.k.a. nested
FETCHes). I personally don't understand fully the solution. Nobody
commented yet. No, wait, Arthur Bergman said it was nice ! and asked for
benchmarks.
http://archive.develooper.com/perl5-porters@perl.org/msg94130.html
=head2 Beware the precedence police
Dan Kogai complains that this code :
print chr(0xE3).chr(0x81).chr(0x82) =~ /^\x{3042}$/ ? 'true' : 'false';
prints I<true>. Then, he ceases to complain when he realizes that the
precedence of C<.> is actually lower than the precedence of C<=~>. Tom
Horsley comments that I<APL was the only language that ever got precedence
and associativity correct.>
http://archive.develooper.com/perl5-porters@perl.org/msg94127.html
=head2 In brief
Jarkko Hietaniemi reports that Devel::Coverage doesn't work with perl
5.8.0, while it works with perl 5.6.1 (bug #21890). Randy J. Ray says
he'll look at it. Stas Bekman suggests that the problem might be similar
to one he found on Apache::DB.
Long doubles are now correctly recognized by the Configure script on AIX,
thanks to John Allen and Merijn Brand. They also improved the compiler
detection mechanism, if I understood correctly.
Pierre Denis reports a parsing bug (#21875) involving a hash key which is
a bareword beginning by C<q>, and a pair of braces that can be seen as a
block or as a literal hashref. Dave Mitchell sends a fix.
Alberto SimE<otilde>es says that it'd be nice to have a function in
Data::Dumper to dump the structures directly to a chosen filehandle.
Patches welcome !
Nathan Torkington asks a bunch of I/O layer questions, and gets some
answers.
http://archive.develooper.com/perl5-porters@perl.org/msg94285.html
=head2 About this summary
This summary was brought to you by Rafael Garcia-Suarez. Weekly summaries
are available on L<http://use.perl.org/> and/or via a mailing list, which
subscription address is L<perl5-summary-subscribe@perl.org>. Comments,
corrections, additions, and suggestions are welcome.
| rjbs/perlweb | docs/dev/perl5/list-summaries/2003/p5p-200304-2.pod | Perl | apache-2.0 | 5,369 |
package UI::Status;
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
# JvD Note: you always want to put Utils as the first use. Sh*t don't work if it's after the Mojo lines.
use UI::Utils;
use Mojo::Base 'Mojolicious::Controller';
use Data::Dumper;
sub index {
my $self = shift;
my @data;
my $orderby = "name";
$orderby = $self->param('orderby') if ( defined $self->param('orderby') );
my $rs_data = $self->db->resultset("Status")->search( undef, { order_by => $orderby } );
while ( my $row = $rs_data->next ) {
push(
@data, {
"id" => $row->id,
"name" => $row->name,
"description" => $row->description,
"last_updated" => $row->last_updated,
}
);
}
$self->render( json => \@data );
}
sub delete {
my $self = shift;
my $id = $self->param('id');
if ( !&is_admin($self) ) {
$self->flash( alertmsg => "You must be an ADMIN to perform this operation!" );
}
else {
my $p_name = $self->db->resultset('Status')->search( { id => $id } )->get_column('name')->single();
my $delete = $self->db->resultset('Status')->search( { id => $id } );
$delete->delete();
&log( $self, "Delete status " . $p_name, "UICHANGE" );
}
return $self->redirect_to('/misc');
}
sub check_status_input {
my $self = shift;
my $sep = "__NEWLINE__"; # the line separator sub that with \n in the .ep javascript
my $err = undef;
# First, check permissions
if ( !&is_admin($self) ) {
$err .= "You must be an ADMIN to perform this operation!" . $sep;
return $err;
}
return $err;
}
sub update {
my $self = shift;
my $id = $self->param('id');
my $err = &check_status_input($self);
if ( defined($err) ) {
$self->flash( alertmsg => $err );
}
else {
my $update = $self->db->resultset('Status')->find( { id => $self->param('id') } );
$update->name( $self->param('name') );
$update->description( $self->param('description') );
$update->update();
# if the update has failed, we don't even get here, we go to the exception page.
&log( $self, "Update status with name " . $self->param('name') . " and id " . $self->param('id'), "UICHANGE" );
}
$self->flash( alertmsg => "Success!" );
return $self->redirect_to('/misc');
}
sub create {
my $self = shift;
my $err = &check_status_input($self);
if ( defined($err) ) {
$self->flash( alertmsg => $err );
}
else {
my $insert = $self->db->resultset('Status')->create(
{
name => $self->param('name'),
description => $self->param('description'),
}
);
$insert->insert();
}
$self->flash( alertmsg => "Success!" );
return $self->redirect_to('/misc');
}
sub is_valid_status {
my $self = shift;
my $status = shift;
my $valid = 0;
my $row = $self->db->resultset("Status")->search( { name => $status } )->single;
if ( defined($row) ) {
return ( $row->id );
}
else {
return (undef);
}
}
1;
| alficles/incubator-trafficcontrol | traffic_ops/app/lib/UI/Status.pm | Perl | apache-2.0 | 3,355 |
package MAP::API;
use Dancer ':syntax';
use MAP::auth::Auth;
use MAP::dataStores::ClientListing;
use MAP::MASTERDATA::RELIGIONS;
use MAP::LibraryFields::Fields;
use MAP::LibraryFields::Category;
use MAP::LibraryFields::SubCategory;
use MAP::LibraryFields::Options;
use MAP::LibraryFields::Groups;
use MAP::LibraryFields::Tags;
use MAP::Forms::Forms;
use MAP::EmailMessages::EmailMessages;
use MAP::Agency::Agency;
use MAP::Clients;
use MAP::Agencies::Agencies;
use MAP::LoadingAverage;
use MAP::Socket;
use MAP::DHTMLX;
our $VERSION = '0.1';
set 'session' => 'Simple';
set logger => 'console';
set 'log' => 'debug';
set 'show_errors' => 1;
set 'access_log ' => 1;
set 'warnings' => 0;
sub options_header{
header('Access-Control-Allow-Origin' => request->header("Origin"));
header('Access-Control-Allow-Methods' => 'GET, POST, PUT, DELETE, OPTIONS');
header('Access-Control-Allow-Headers' => request->header("Access-Control-Request-Headers"));
header('Vary' => 'Accept-Encoding');
header('Keep-Alive' => 'timeout=2, max=100');
header('Connection' => 'Keep-Alive');
header('X-Server' => 'Starman');
header('X-Server-Time' => time);
}
sub normal_header{
header('Access-Control-Allow-Origin' => request->header("Origin"));
header('Access-Control-Allow-Methods' => 'GET, POST, PUT, DELETE, OPTIONS');
header('Keep-Alive' => 'timeout=2, max=100');
header('Connection' => 'Keep-Alive');
header('Cache-Control' => 'max-age=0, must-revalidate, no-cache, no-store');
header('Vary' => 'Accept-Encoding');
header('X-Server-Time' => time);
header('X-Server' => 'Starman');
header('Expires' => 'Thu, 01 Jan 1970 00:00:00');
header('X-FRAME-OPTIONS' => 'DENY');
header('X-XSS-Protection' => '1; mode=block');
#header('Date' => 'Wed, 27 Nov 2013 03:29:25 GMT');
#header('Expires' => 'Thu, 01 Jan 1970 00:00:00');
#header('Strict-Transport-Security' => 'max-age=15768000');
#header('Access-Control-Allow-Headers' => 'X-Requested-With');
#header('Access-Control-Max-Age' => '1728000');
#header('X-FRAME-OPTIONS' => 'DENY');
#header('X-XSS-Protection' => '1; mode=block');
if ( defined(params->{format}) ) {
if ( params->{format} eq "json" ) {
set serializer => 'JSON';
}
elsif ( params->{format} eq "xml" ) {
set serializer => 'XML';
}
elsif ( params->{format} eq "yaml" ) {
set serializer => 'YAML';
}
else
{
set serializer => 'JSON';
}
}
else
{
set serializer => 'JSON';
}
}
sub dbh{
my $database = params->{database} || "";
#debug $database;
$ENV{DSQUERY} = '192.168.1.19';
my $dbh = DBI->connect('DBI:Sybase:database='.$database.';scriptName=MAP_API;', "ESCairs", "FishB8", {
PrintError => 0#,
#syb_enable_utf8 => 1
}) or return "Can't connect to sql server: $DBI::errstr";
#$dbh->{syb_enable_utf8} = 1 ;
$dbh->do('use '. $database);
return $dbh;
}
sub fail{
my($self, $err_msg) = @_;
normal_header();
halt({
status => 'err', response => $err_msg
});
}
sub unauthorized{
my($self, $err_msg) = @_;
normal_header();
halt({
status => 'err', response => $err_msg
});
}
sub check_authorization{
my($self, $token, $Origin) = @_;
my $dbh = dbh();
my $token_status = "";
$Origin = $Origin || MAP::API->fail( "you can't fetch without a browser" );
$token = $token || MAP::API->fail( "token can not be empty" );
my $origin_status = "";
my $strSQLcheckOrigin = "SELECT origin FROM tbl_api_allowed_origin WHERE origin = ?";
my $sth = $dbh->prepare( $strSQLcheckOrigin, );
$sth->execute( $Origin ) or MAP::API->fail( $sth->errstr );
while ( my $record = $sth->fetchrow_hashref())
{
$origin_status = "ok";
}
if ( $origin_status eq "" )
{
MAP::API->unauthorized("Origin not allowed");
}
my $strSQLtoken = 'SELECT * FROM tbl_api_access_token WHERE token = ? AND active_status = 1 AND date_expiration > '.time.'';
$sth = $dbh->prepare( $strSQLtoken, );
$sth->execute( $token ) or MAP::API->fail( $sth->errstr );
while ( my $record = $sth->fetchrow_hashref())
{
$token_status = "ok";
}
if ( $token_status eq "" ) {
MAP::API->unauthorized("token not authorized");
}
}
sub normalizeColumnNames
{
my($self, $strColumns, $packageColumns) = @_;
my @columns = split(/,/, $strColumns);
$strColumns = '';
for(@columns)
{
if ( index($packageColumns, '['.$_.']') != -1 )
{
}
elsif ( index($packageColumns, $_) != -1 )
{
$strColumns = $strColumns . '['.$_.'],';
}
}
return substr($strColumns, 0, -1);;
}
sub Exec
{
my $self = shift;
my $dbh = dbh();
$dbh->do(shift,undef,@_) || die"Can't exec:\n".$dbh->errstr;
}
sub SelectOne
{
my $self = shift;
my $dbh = dbh();
my $res = $dbh->selectrow_arrayref(shift,undef,@_);
die"Can't execute select:\n".$dbh->errstr if $dbh->err;
return $res->[0];
}
sub SelectRow
{
my $self = shift;
my $dbh = dbh();
my $res = $dbh->selectrow_hashref(shift,undef,@_);
die"Can't execute select:\n".$dbh->errstr if $dbh->err;
return $res;
}
sub Select
{
my $dbh = dbh();
my $res = $dbh->selectall_arrayref( shift, { Slice=>{} }, @_ );
die"Can't execute select:\n".$dbh->errstr if $dbh->err;
return undef if $#$res == -1;
my $cidxor = 0;
for(@$res)
{
$cidxor = $cidxor ^ 1;
$_->{row_cid} = $cidxor;
}
return $res;
}
sub SelectARef
{
my $self = shift;
my $data = Select(@_);
return [] unless $data;
return [$data] unless ref($data) eq 'ARRAY';
return $data;
}
sub regex_alnum
{
my ($self, $value) = @_;
$value =~ s/ /_/g;
$value =~ s/\W//g;
return $value;
}
dance;
| dimpu/AravindCom | MAP-API/lib/MAP/API.pm | Perl | mit | 5,514 |
#!/usr/bin/perl
# I used this to convert my data from http://www.runningahead.com/ into
# the plaintext format used by the activity log code. (This reads the
# tab-separated RA format, not the XML format.)
use strict;
use warnings;
use diagnostics;
use File::Basename qw(dirname basename);
use File::Path qw(mkpath);
use Getopt::Long;
use Text::Wrap;
$Text::Wrap::columns = 72;
my $activities_dir = "$ENV{HOME}/Documents/Activities";
my $garmin_dir = "$ENV{HOME}/Documents/Garmin";
my $opt_overwrite = 0;
GetOptions ("overwrite" => \$opt_overwrite);
my $ra_log_file = shift;
my %ignored_types = (
"Strength" => 1,
);
my %subtype_map = (
"Easy + Strides" => "easy-strides",
"Day hike" => "day-hike",
"Race DNF" => "race-DNF",
);
my %equipment_map = (
# adidas
"Adizero Mana 5 (blue)" => "mana-5-blue",
"Adizero Mana 5 (white)" => "mana-5-white",
"Adizero Mana 5 (yellow)" => "mana-5-yellow",
# asics
"Gel-Tarther" => "tarther-1",
"Gel-Tarther (2)" => "tarther-2",
"GT-2140" => "asics-gt2140",
# brooks
"Mach 11 Spikeless" => "mach11",
# new balance
"M730" => "nb730",
"MR890" => "nb890",
"MR890V2 (grey/green)" => "nb890v2-green",
"MR890V2 (grey/blue)" => "nb890v2-blue",
# nike
"Free Run 3.0 v2" => "free-3.0",
"Lunar Montreal (grey)" => "lunar-montreal-grey",
"Lunar Montreal (black)" => "lunar-montreal-black",
"Zoom Streak 3" => "streak3-1",
"Zoom Streak 3 (2)" => "streak3-2",
"Zoom Streak XC 2" => "streak-xc2",
# saucony
"ProGrid Guide 3" => "guide-3",
"ProGrid Kinvara" => "kinvara",
);
my %distance_unit_map = (
"Mile" => "miles",
"Kilometer" => "km",
"Meter" => "m",
);
my %weight_unit_map = (
);
my %temperature_unit_map = (
);
sub find_garmin_file {
my $year = shift;
my $month = shift;
my $day = shift;
my $hour = shift;
my $min = shift;
my $pattern = sprintf("%04d-%02d-%02d-%02d-%02d-*",
$year, $month, $day, $hour, $min);
my @files = ();
my $out = `/usr/bin/find $garmin_dir -name $pattern -print`;
foreach (split('\n', $out)) {
if (/.(fit|FIT|tcx|TCX)$/) {
push @files, $_;
}
}
my $file_count = scalar @files;
if ($file_count == 1) {
return $files[0];
} else {
if ($file_count > 1) {
print STDOUT "warning multiple GPS files found for $pattern: @files\n";
}
return "";
}
}
open INPUT, '<', $ra_log_file
or die "can't open $ra_log_file";
# skip first line
$_ = <INPUT>;
while (<INPUT>) {
chomp;
my ($Date, $TimeOfDay, $Type, $SubType, $Distance, $DistanceUnit, $Duration, $Weight, $WeightUnit, $RestHR, $AvgHR, $MaxHR, $Sleep, $Calories, $Quality, $Effort, $Weather, $Temperature, $TempUnit, $Notes, $Course, $CourseSurface, $CourseNotes, $ShoeMake, $ShoeModel, $Size, $System, $ShoeSerial, $ShoePrice, $OverallPlace, $FieldSize, $GroupMinAge, $GroupMaxAge, $GroupPlace, $GroupSize, $GenderPlace, $GenderSize) = split("\t");
if ($ignored_types{$Type}) {
next;
}
my ($year, $month, $day, $hour, $minute);
if ($TimeOfDay) {
my $date_time = "$Date $TimeOfDay";
if ($date_time =~ /(\d+)-(\d+)-(\d+) (\d+):(\d+)/) {
$year = $1; $month = $2; $day = $3; $hour = $4; $minute = $5;
} else {
die "Weird date format: $date_time";
}
} else {
if ($Date =~ /(\d+)-(\d+)-(\d+)/) {
$year = $1; $month = $2; $day = $3; $hour = 0; $minute = 0;
} else {
die "Weird date format: $Date";
}
}
my $output_file = sprintf("%04d/%02d/%02d-%02d%02d.txt",
$year, $month, $day, $hour, $minute);
my $output_dir = dirname($output_file);
if (!$opt_overwrite && -f "$activities_dir/$output_file") {
next;
}
if (! -d "$activities_dir/$output_dir") {
mkpath("$activities_dir/$output_dir")
or die "can't create output directory";
}
open OUTPUT, '>', "$activities_dir/$output_file"
or die "can't create output file";
if ($TimeOfDay) {
print OUTPUT "Date: $Date $TimeOfDay\n";
} else {
print OUTPUT "Date: $Date\n";
}
my $gps_file = find_garmin_file($year, $month, $day, $hour, $minute);
if ($gps_file) {
my $file = basename($gps_file);
print OUTPUT "GPS-File: $file\n";
}
$Type = lc($Type);
print OUTPUT "Activity: $Type\n";
if ($SubType) {
my $subtype = $subtype_map{$SubType};
if (!$subtype) {
$subtype = lc($SubType);
}
print OUTPUT "Type: $subtype\n";
}
if ($Course) {
print OUTPUT "Course: $Course\n";
}
if ($Duration) {
my $dur = $Duration;
$dur =~ s/^0://;
print OUTPUT "Duration: $dur\n";
}
if ($Distance) {
my $distance = "$Distance";
if ($DistanceUnit) {
my $unit = $distance_unit_map{$DistanceUnit};
if (!$unit) {
$unit = lc($DistanceUnit);
}
$distance = "$distance $unit";
}
print OUTPUT "Distance: $distance\n";
}
if ($Weight) {
my $weight = "$Weight";
if ($WeightUnit) {
my $unit = $weight_unit_map{$WeightUnit};
if (!$unit) {
$unit = lc($WeightUnit);
}
$weight = "$weight $unit";
}
print OUTPUT "Weight: $weight\n";
}
if ($RestHR) {
print OUTPUT "Resting-HR: $RestHR\n";
}
if ($AvgHR) {
print OUTPUT "Average-HR: $AvgHR\n";
}
if ($MaxHR) {
print OUTPUT "Max-HR: $MaxHR\n";
}
if ($Calories) {
print OUTPUT "Calories: $Calories\n";
}
if ($Quality) {
print OUTPUT "Quality: $Quality\n";
}
if ($Effort) {
print OUTPUT "Effort: $Effort\n";
}
if ($ShoeModel) {
my $shoe = $ShoeModel;
if ($ShoeSerial) {
$shoe = "$shoe ($ShoeSerial)";
}
if ($equipment_map{$shoe}) {
$shoe = $equipment_map{$shoe};
}
print OUTPUT "Equipment: $shoe\n";
}
if ($Temperature) {
my $temperature = "$Temperature";
if ($TempUnit) {
my $unit = $temperature_unit_map{$TempUnit};
if (!$unit) {
$unit = $TempUnit;
}
$temperature = "$temperature $unit";
}
print OUTPUT "Temperature: $temperature\n";
}
if ($Weather) {
my $weather = lc($Weather);
$weather =~ s/,/ /g;
$weather =~ s/partlycloudy/partly-cloudy/g;
print OUTPUT "Weather: $weather\n";
}
if ($OverallPlace) {
print OUTPUT "Overall-Place: $OverallPlace\n";
}
if ($FieldSize) {
print OUTPUT "Field-Size: $FieldSize\n";
}
if ($GroupPlace) {
print OUTPUT "Group-Place: $GroupPlace\n";
}
if ($GroupSize) {
print OUTPUT "Group-Size: $GroupSize\n";
}
if ($GroupMinAge) {
print OUTPUT "Group-Min-Age: $GroupMinAge\n";
}
if ($GroupMaxAge) {
print OUTPUT "Group-Max-Age: $GroupMaxAge\n";
}
if ($GenderPlace) {
print OUTPUT "Gender-Place: $GenderPlace\n";
}
if ($GenderSize) {
print OUTPUT "Gender-Size: $GenderSize\n";
}
if ($Notes) {
print OUTPUT "\n";
my $notes = $Notes;
$notes =~ s/<br>/\n/g;
$notes =~ s/<tab>/\t/g;
$notes =~ s/— /-- /g;
$notes =~ s/—/--/g;
$notes =~ s/…/.../g;
$notes = wrap("", "", $notes);
print OUTPUT "$notes\n";
}
close OUTPUT;
print STDOUT "Created $output_file\n";
}
close INPUT;
| jsh1/activity_log | misc/import-ra-log.pl | Perl | mit | 7,048 |
########################################################################
# Bio::KBase::ObjectAPI::BaseObject - This is a base object that serves as a foundation for all other objects
# Author: Christopher Henry
# Author email: chenry@mcs.anl.gov
# Author affiliation: Mathematics and Computer Science Division, Argonne National Lab
# Date of module creation: 3/11/2012
########################################################################
use Bio::KBase::ObjectAPI::Types;
use DateTime;
use Data::UUID;
use JSON::XS;
use Module::Load;
use Bio::KBase::ObjectAPI::Attribute::Typed;
use Bio::KBase::ObjectAPI::Exceptions;
use Bio::KBase::ObjectAPI::utilities;
package Bio::KBase::ObjectAPI::BaseObject;
=head1 Bio::KBase::ObjectAPI::BaseObject
=head2 SYNOPSIS
=head2 METHODS
=head3 Initialization
=head4 new
my $obj = Bio::KBase::ObjectAPI::Object->new(); # Initialize object with default parameters
my $obj = Bio::KBase::ObjectAPI::Object->new(\%); # Initialize object with hashref of parameters
my $obj = Bio::KBase::ObjectAPI::Object->new(%); # Initialize object with hash of parameters
=head3 Serialization
=head4 serializeToDB
Return a simple perl hash that can be passed to a JSON serializer.
my $data = $object->serializeToDB();
=head4 toJSON
my $string = $object->toJSON(\%);
Serialize object to JSON. A hash reference of options may be passed
as the first argument. Currently only one option is available C<pp>
which will pretty-print the ouptut.
=head4 createHTML
my $string = $object->createHTML();
Returns an HTML document for the object.
=head4 toReadableString
my $string = $object->toReadableString();
=head3 Object Traversal
=head4 defaultNameSpace
=head4 getLinkedObject
=head4 getLinkedObjectArray
=head4 store
=head4 biochemisry
=head4 annotation
=head4 mapping
=head4 fbaproblem
=head3 Object Manipulation
=head4 add
=head4 remove
=head3 Helper Functions
=head4 interpretReference
=head4 parseReferenceList
=head3 Schema Versioning
=head4 __version__
=head4 __upgrade__
=cut
use Moose;
use namespace::autoclean;
use Scalar::Util qw(weaken);
our $VERSION = undef;
my $htmlheader = <<HEADER;
<!doctype HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>\${TITLE}</title>
<link rel="stylesheet" href="http://netdna.bootstrapcdn.com/twitter-bootstrap/2.3.0/css/bootstrap-combined.min.css">
<link rel="stylesheet" href="http://ajax.aspnetcdn.com/ajax/jquery.dataTables/1.9.4/css/jquery.dataTables.css">
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<script src="http://netdna.bootstrapcdn.com/twitter-bootstrap/2.3.0/js/bootstrap.min.js"></script>
<script type="text/javascript" charset="utf8" src="http://ajax.aspnetcdn.com/ajax/jquery.dataTables/1.9.4/jquery.dataTables.js"></script>
<script type="text/javascript">
\$(document).ready(function() {
\$('#tab-header a').click(function (e) {
e.preventDefault();
\$(this).tab('show');
});
\$('.data-table').dataTable();
});
</script>
<style type="text/css">
#tabs {
margin: 20px 50px;
}
</style>
</head>
HEADER
my $htmlbody = <<BODY;
<body>
<div id="tabs">
<ul class="nav nav-tabs" id="tab-header">
<li class="active"><a href="#tab-1">Overview</a></li>
\${TABS}
</ul>
<div class="tab-content">
<div class="tab-pane active" id="tab-1">
\${MAINTAB}
</div>
\${TABDIVS}
</div>
</div>
</body>
BODY
my $htmltail = <<TAIL;
</html>
TAIL
around BUILDARGS => sub {
my $orig = shift;
my $class = shift;
my $hash = {};
if ( ref $_[0] eq 'HASH' ) {
$hash = shift;
} elsif ( scalar @_ % 2 == 0 ) {
my %h = @_;
$hash = \%h;
}
my $objVersion = $hash->{__VERSION__};
my $classVersion = $class->__version__;
if (!defined($objVersion) && defined($hash->{parent})) {
$objVersion = 1;
}
if (defined $objVersion && defined($classVersion) && $objVersion != $classVersion) {
if (defined(my $fn = $class->__upgrade__($objVersion))) {
$hash = $fn->($hash);
} else {
die "Invalid Object\n";
}
}
my $sos = $class->_subobjects();
foreach my $subobj (@{$sos}) {
if (defined($subobj->{singleton}) && $subobj->{singleton} == 1) {
if (defined $hash->{$subobj->{name}}) {
$hash->{$subobj->{name}} = [$hash->{$subobj->{name}}];
}
else {
$hash->{$subobj->{name}} = [];
}
}
}
return $class->$orig($hash);
};
sub BUILD {
my ($self,$params) = @_;
# replace subobject data with info hash
foreach my $subobj (@{$self->_subobjects}) {
my $name = $subobj->{name};
my $class = $subobj->{class};
my $method = "_$name";
my $subobjs = $self->$method();
for (my $i=0; $i<scalar @$subobjs; $i++) {
my $data = $subobjs->[$i];
# create the info hash
my $info = {
created => 0,
class => $class,
data => $data
};
$data->{parent} = $self; # set the parent
weaken($data->{parent}); # and make it weak
$subobjs->[$i] = $info; # reset the subobject with info hash
}
}
}
sub fix_reference {
my ($self,$ref) = @_;
if ($ref =~ m/^~/) {
return $ref;
} elsif ($ref =~ m/^([A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12})/) {
my $uuid = $1;
if ($self->topparent()->uuid() eq $uuid) {
$ref =~ s/$uuid/~/;
return $ref;
}
my $newref = $self->store()->uuid_to_ref($uuid);
if (!defined($newref)) {
Bio::KBase::ObjectAPI::utilities::error("Attempting to save object with references to unsaved object:".$uuid);
}
$ref =~ s/$uuid/$newref/;
return $ref;
} elsif ($ref =~ m/^(\w+\/\w+\/\w+)\//) {
my $oldref = $1;
my $newref = $self->store()->updated_reference($oldref);
if (!defined($newref)) {
$newref = $oldref;
}
if ($self->topparent()->_reference() eq $newref) {
$ref =~ s/$oldref/~/;
} elsif ($newref ne $oldref) {
$ref =~ s/$oldref/$newref/;
}
return $ref;
}
return $ref;
}
sub fix_references {
my ($self) = @_;
my $links = $self->_links();
foreach my $link (@{$links}) {
my $att = $link->{attribute};
if (defined($self->$att())) {
if (defined($link->{array})) {
my $refarray = $self->$att();
for (my $i=0; $i < @{$refarray}; $i++) {
$self->$att()->[$i] = $self->fix_reference($self->$att()->[$i]);
}
} else {
$self->$att($self->fix_reference($self->$att()));
}
}
}
}
sub serializeToDB {
my ($self) = @_;
my $data = {};
$self->fix_references();
$data = { __VERSION__ => $self->__version__() } if defined $self->__version__();
my $attributes = $self->_attributes();
foreach my $item (@{$attributes}) {
my $name = $item->{name};
if ($name eq "isCofactor") {
if ($self->$name() != 0 && $self->$name() != 1) {
$self->$name(0);
}
}
if (defined($self->$name())) {
if ($item->{type} eq "Int" || $item->{type} eq "Num" || $item->{type} eq "Bool") {
$data->{$name} = $self->$name()+0;
} elsif ($name eq "cues") {
$data->{$name} = $self->$name();
foreach my $cue (keys(%{$data->{$name}})) {
$data->{$name}->{$cue} = $data->{$name}->{$cue}+0;
}
} elsif ($name eq "fba_ref" || $name eq "gapfill_ref") {
if (defined($self->$name()) && length($self->$name()) > 0) {
$data->{$name} = $self->$name();
}
} elsif ($name =~ m/_objterms$/) {
$data->{$name} = {};
foreach my $key (keys(%{$self->$name()})) {
$data->{$name}->{$key} = $self->$name()->{$key}+0;
}
} elsif ($name =~ m/uptakeLimits/) {
$data->{$name} = {};
foreach my $key (keys(%{$self->$name()})) {
$data->{$name}->{$key} = $self->$name()->{$key}+0;
}
} elsif ($name =~ m/minimize_reaction_costs/) {
$data->{$name} = {};
foreach my $key (keys(%{$self->$name()})) {
$data->{$name}->{$key} = $self->$name()->{$key}+0;
}
} elsif ($name =~ m/^parameters$/) {
$data->{$name} = {};
foreach my $key (keys(%{$self->$name()})) {
$data->{$name}->{$key} = $self->$name()->{$key}."";
}
} elsif ($name eq "annotations") {
my $dataitem = $self->$name();
for (my $i=0; $i < @{$dataitem}; $i++) {
if (!defined($dataitem->[$i]->[1])) {
$data->{$name}->[$i]->[1] = "";
} else {
$data->{$name}->[$i]->[1] = $dataitem->[$i]->[1];
}
if (!defined($dataitem->[$i]->[0])) {
$data->{$name}->[$i]->[0] = "";
} else {
$data->{$name}->[$i]->[0] = $dataitem->[$i]->[0];
}
$data->{$name}->[$i]->[2] = $dataitem->[$i]->[2]+0;
}
} elsif ($name eq "location") {
my $dataitem = $self->$name();
for (my $i=0; $i < @{$dataitem}; $i++) {
$data->{$name}->[$i]->[0] = $dataitem->[$i]->[0];
$data->{$name}->[$i]->[1] = $dataitem->[$i]->[1]+0;
$data->{$name}->[$i]->[3] = $dataitem->[$i]->[3]+0;
$data->{$name}->[$i]->[2] = $dataitem->[$i]->[2];
}
} else {
$data->{$name} = $self->$name();
}
}
}
my $subobjects = $self->_subobjects();
foreach my $item (@{$subobjects}) {
my $name = "_".$item->{name};
my $arrayRef = $self->$name();
$data->{$item->{name}} = [];
foreach my $subobject (@{$arrayRef}) {
if ($subobject->{created} == 1) {
push(@{$data->{$item->{name}}},$subobject->{object}->serializeToDB());
} else {
my $newData;
foreach my $key (keys(%{$subobject->{data}})) {
if ($key ne "parent") {
$newData->{$key} = $subobject->{data}->{$key};
}
}
push(@{$data->{$item->{name}}},$newData);
}
}
if (defined $item->{"singleton"} && $item->{"singleton"} == 1) {
if (scalar @{$data->{$item->{name}}} > 0) {
$data->{$item->{name}} = $data->{$item->{name}}->[0];
}
else {
delete $data->{$item->{name}};
}
}
}
return $data;
}
sub cloneObject {
my ($self) = @_;
my $data = $self->serializeToDB();
my $class = "Bio::KBase::ObjectAPI::".$self->_module()."::".$self->_class();
return $class->new($data);
}
sub toJSON {
my $self = shift;
my $args = Bio::KBase::ObjectAPI::utilities::args([],{pp => 0}, @_);
my $data = $self->serializeToDB();
my $JSON = JSON::XS->new->utf8(1);
$JSON->allow_blessed(1);
$JSON->pretty(1) if($args->{pp} == 1);
return $JSON->encode($data);
}
=head3 export
Definition:
string = Bio::KBase::ObjectAPI::BaseObject->export({
format => readable/html/json
});
Description:
Exports media data to the specified format.
=cut
sub export {
my $self = shift;
my $args = Bio::KBase::ObjectAPI::utilities::args(["format"], {}, @_);
my $function = "print_".$args->{format};
if (!$self->can($function)) {
Bio::KBase::ObjectAPI::utilities::error("Unrecognized type for export: ".$args->{format});
}
$self->$function();
}
=head3 print_html
Definition:
Description:
Exports data to html format
=cut
sub print_html {
my $self = shift;
return $self->createHTML();
}
=head3 print_readable
Definition:
Description:
Exports data to readable format
=cut
sub print_readable {
my $self = shift;
return $self->toReadableString();
}
=head3 print_json
Definition:
Description:
Exports data to json format
=cut
sub print_json {
my $self = shift;
return $self->toJSON();
}
######################################################################
#Output functions
######################################################################
sub htmlComponents {
my $self = shift;
my $args = Bio::KBase::ObjectAPI::utilities::args([],{}, @_);
my $data = $self->_createReadableData();
my $output = {
title => $self->_type()." Viewer",
tablist => [],
tabs => {
main => {
content => "",
name => "Overview"
}
}
};
$output->{tabs}->{main}->{content} .= "<table>\n";
for (my $i=0; $i < @{$data->{attributes}->{headings}}; $i++) {
$output->{tabs}->{main}->{content} .= "<tr><th>".$data->{attributes}->{headings}->[$i]."</th><td style='font-size:16px;border: 1px solid black;'>".$data->{attributes}->{data}->[0]->[$i]."</td></tr>\n";
}
$output->{tabs}->{main}->{content} .= "</table>\n";
my $count = 2;
foreach my $subobject (@{$data->{subobjects}}) {
my $name = $self->_type()." ".$subobject->{name};
my $id = "tab-".$count;
push(@{$output->{tablist}},$id);
$output->{tabs}->{$id} = {
content => Bio::KBase::ObjectAPI::utilities::PRINTHTMLTABLE( $subobject->{headings}, $subobject->{data}, 'data-table' ),
name => $name
};
$count++;
}
return $output;
}
sub createHTML {
my $self = shift;
my $args = Bio::KBase::ObjectAPI::utilities::args([],{internal => 0}, @_);
my $document = "";
if ($args->{internal} == 0) {
$document .= $htmlheader."\n";
}
$document .= $htmlbody."\n";
if ($args->{internal} == 0) {
$document .= $htmltail;
}
my $htmlData = $self->htmlComponents();
my $title = $htmlData->{title};
$document =~ s/\$\{TITLE\}/$title/;
my $tablist = "";
foreach my $id (@{$htmlData->{tablist}}) {
$tablist .= '<li><a href="#'.$id.'">'.$htmlData->{tabs}->{$id}->{name}."</a></li>\n";
}
$document =~ s/\$\{TABS\}/$tablist/;
my $maintab = $htmlData->{tabs}->{main}->{content};
$document =~ s/\$\{MAINTAB\}/$maintab/;
my $divdata = "";
for (my $i=0; $i < @{$htmlData->{tablist}}; $i++) {
$divdata .= '<div class="tab-pane" id="'.$htmlData->{tablist}->[$i].'">'."\n";
$divdata .= $htmlData->{tabs}->{$htmlData->{tablist}->[$i]}->{content};
$divdata .= "</div>\n";
}
$document =~ s/\$\{TABDIVS\}/$divdata/;
return $document;
}
sub toReadableString {
my ($self, $asArray) = @_;
my $output = ["Attributes {"];
my $data = $self->_createReadableData();
for (my $i=0; $i < @{$data->{attributes}->{headings}}; $i++) {
push(@{$output},"\t".$data->{attributes}->{headings}->[$i].":".$data->{attributes}->{data}->[0]->[$i])
}
push(@{$output},"}");
if (defined($data->{subobjects})) {
for (my $i=0; $i < @{$data->{subobjects}}; $i++) {
push(@{$output},$data->{subobjects}->[$i]->{name}." (".join("\t",@{$data->{subobjects}->[$i]->{headings}}).") {");
for (my $j=0; $j < @{$data->{subobjects}->[$i]->{data}}; $j++) {
push(@{$output},join("\t",@{$data->{subobjects}->[$i]->{data}->[$j]}));
}
push(@{$output},"}");
}
}
if (defined($data->{results})) {
for (my $i=0; $i < @{$data->{results}}; $i++) {
my $rs = $data->{results}->[$i];
my $objects = $self->$rs();
if (defined($objects->[0])) {
push(@{$output},$rs." objects {");
for (my $j=0; $j < @{$objects}; $j++) {
push(@{$output},@{$objects->[$j]->toReadableString("asArray")});
}
push(@{$output},"}");
}
}
}
return join("\n", @$output) unless defined $asArray;
return $output;
}
sub _createReadableData {
my ($self) = @_;
my $data;
my ($sortedAtt,$sortedSO,$sortedRS) = $self->_getReadableAttributes();
$data->{attributes}->{headings} = $sortedAtt;
for (my $i=0; $i < @{$data->{attributes}->{headings}}; $i++) {
my $att = $data->{attributes}->{headings}->[$i];
push(@{$data->{attributes}->{data}->[0]},$self->$att());
}
for (my $i=0; $i < @{$sortedSO}; $i++) {
my $so = $sortedSO->[$i];
my $soData = {name => $so};
my $objects = $self->$so();
if (defined($objects->[0])) {
my ($sortedAtt,$sortedSO) = $objects->[0]->_getReadableAttributes();
$soData->{headings} = $sortedAtt;
for (my $j=0; $j < @{$objects}; $j++) {
for (my $k=0; $k < @{$sortedAtt}; $k++) {
my $att = $sortedAtt->[$k];
$soData->{data}->[$j]->[$k] = ($objects->[$j]->$att() || "");
}
}
push(@{$data->{subobjects}},$soData);
}
}
for (my $i=0; $i < @{$sortedRS}; $i++) {
push(@{$data->{results}},$sortedRS->[$i]);
}
return $data;
}
sub _getReadableAttributes {
my ($self) = @_;
my $priority = {};
my $attributes = [];
my $prioritySO = {};
my $attributesSO = [];
my $priorityRS = {};
my $attributesRS = [];
my $class = 'Bio::KBase::ObjectAPI::'.$self->_module().'::'.$self->_class();
foreach my $attr ( $class->meta->get_all_attributes ) {
if ($attr->isa('Bio::KBase::ObjectAPI::Attribute::Typed') && $attr->printOrder() != -1 && ($attr->type() eq "attribute" || $attr->type() eq "msdata")) {
push(@{$attributes},$attr->name());
$priority->{$attr->name()} = $attr->printOrder();
} elsif ($attr->isa('Bio::KBase::ObjectAPI::Attribute::Typed') && $attr->printOrder() != -1 && $attr->type() =~ m/^result/) {
push(@{$attributesRS},$attr->name());
$priorityRS->{$attr->name()} = $attr->printOrder();
} elsif ($attr->isa('Bio::KBase::ObjectAPI::Attribute::Typed') && $attr->printOrder() != -1) {
push(@{$attributesSO},$attr->name());
$prioritySO->{$attr->name()} = $attr->printOrder();
}
}
my $sortedAtt = [sort { $priority->{$a} <=> $priority->{$b} } @{$attributes}];
my $sortedSO = [sort { $prioritySO->{$a} <=> $prioritySO->{$b} } @{$attributesSO}];
my $sortedRS = [sort { $priorityRS->{$a} <=> $priorityRS->{$b} } @{$attributesRS}];
return ($sortedAtt,$sortedSO,$sortedRS);
}
######################################################################
#SubObject manipulation functions
######################################################################
sub add {
my ($self, $attribute, $data_or_object) = @_;
my $attr_info = $self->_subobjects($attribute);
if (!defined($attr_info)) {
Bio::KBase::ObjectAPI::utilities::error("Object doesn't have subobject with name: $attribute");
}
my $obj_info = {
created => 0,
class => $attr_info->{class}
};
my $ref = ref($data_or_object);
if ($ref eq "HASH") {
# need to create object first
$obj_info->{data} = $data_or_object;
$self->_build_object($attribute, $obj_info);
} elsif ($ref =~ m/Bio::KBase::ObjectAPI/) {
$obj_info->{object} = $data_or_object;
$obj_info->{created} = 1;
} else {
Bio::KBase::ObjectAPI::utilities::error("Neither data nor object passed into " . ref($self) . "->add");
}
$obj_info->{object}->parent($self);
my $method = "_$attribute";
push(@{$self->$method}, $obj_info);
return $obj_info->{object};
}
sub remove {
my ($self, $attribute, $object) = @_;
my $attr_info = $self->_subobjects($attribute);
if (!defined($attr_info)) {
Bio::KBase::ObjectAPI::utilities::error("Object doesn't have attribute with name: $attribute");
}
my $removedCount = 0;
my $method = "_$attribute";
my $array = $self->$method;
for (my $i=0; $i<@$array; $i++) {
my $obj_info = $array->[$i];
if ($obj_info->{created}) {
if ($object eq $obj_info->{object}) {
splice(@$array, $i, 1);
$removedCount += 1;
}
}
}
return $removedCount;
}
sub getLinkedObject {
my ($self, $ref) = @_;
if (! defined $ref) {
Bio::KBase::ObjectAPI::utilities::error("Undefined reference in BaseObject::getLinkedObject");
}
if ($ref =~ m/^~$/) {
return $self->topparent();
} elsif ($ref =~ m/^~\/(\w+)\/(\w+)\/(\w+)\/(\w+)\/([\w\.\|\-:]+)$/) {
my $linkedobject = $1;
my $otherlinkedobject = $2;
return $self->topparent()->$linkedobject()->$otherlinkedobject()->queryObject($3,{$4 => $5});
} elsif ($ref =~ m/^~\/(\w+)\/(\w+)\/(\w+)\/([\w\.\|\-:]+)$/) {
my $linkedobject = $1;
return $self->topparent()->$linkedobject()->queryObject($2,{$3 => $4});
} elsif ($ref =~ m/^~\/(\w+)\/(\w+)\/([\w\.\|\-:]+)$/) {
return $self->topparent()->queryObject($1,{$2 => $3});
} elsif ($ref =~ m/^[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}$/) {
return $self->store()->getObjectByUUID($ref);
} elsif ($ref =~ m/^([A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12})\/(\w+)\/(\w+)\/([\w\.\|\-]+)$/) {
Bio::KBase::ObjectAPI::utilities::error("FAILED!");
return $self->store()->getObjectByUUID($1)->queryObject($2,{$3 => $4});
} elsif ($ref =~ m/^[:\w]+\/[\w\.\|\-]+\/[\w\.\|\-]+$/) {
return $self->store()->get_object($ref);
} elsif ($ref =~ m/^([:\w]+\/\w+\/\w+)\/(\w+)\/(\w+)\/([\w\.\|\-:]+)$/) {
return $self->store()->get_object($1)->queryObject($2,{$3 => $4});
} elsif ($ref =~ m/^[:\w]+\/[\w\.\|\-]+$/) {
return $self->store()->get_object($ref);
}
Bio::KBase::ObjectAPI::utilities::error("Unrecognized reference format:".$ref);
}
sub getLinkedObjectArray {
my ($self,$array) = @_;
my $list = [];
foreach my $item (@{$array}) {
push(@{$list},$self->getLinkedObject($item));
}
return $list;
}
sub removeLinkArrayItem {
my ($self,$link,$object) = @_;
my $linkdata = $self->_links($link);
if (defined($linkdata) && $linkdata->{array} == 1) {
my $method = $linkdata->{attribute};
my $data = $self->$method();
my $id = $object->id();
for (my $i=0; $i < @{$data}; $i++) {
if ($data->[$i] =~ m/$id$/) {
Bio::KBase::ObjectAPI::utilities::verbose("Removing object from link array.");
if (@{$data} == 1) {
$self->$method([]);
} else {
splice(@{$data},$i,1);
$self->$method($data);
}
my $clearer = "clear_".$link;
$self->$clearer();
}
}
}
}
sub addLinkArrayItem {
my ($self,$link,$object) = @_;
my $linkdata = $self->_links($link);
if (defined($linkdata) && $linkdata->{array} == 1) {
my $method = $linkdata->{attribute};
my $data = $self->$method();
my $found = 0;
my $id = $object->id();
for (my $i=0; $i < @{$data}; $i++) {
if ($data->[$i] =~ m/\Q$id\E$/) {
$found = 1;
}
}
if ($found == 0) {
Bio::KBase::ObjectAPI::utilities::verbose("Adding object to link array.");
my $clearer = "clear_".$link;
$self->$clearer();
push(@{$data},$object->_reference());
}
}
}
sub clearLinkArray {
my ($self,$link) = @_;
my $linkdata = $self->_links($link);
if (defined($linkdata) && $linkdata->{array} == 1) {
my $method = $linkdata->{attribute};
$self->$method([]);
Bio::KBase::ObjectAPI::utilities::verbose("Clearing link array.");
my $clearer = "clear_".$link;
$self->$clearer();
}
}
sub store {
my ($self) = @_;
my $parent = $self->parent();
if (defined($parent) && ref($parent) ne "Bio::KBase::ObjectAPI::KBaseStore") {
return $parent->store();
}
if (!defined($parent)) {
Bio::KBase::ObjectAPI::utilities::error("Attempted to get object with no store!");
}
return $parent;
}
sub topparent {
my ($self) = @_;
if ($self->_top() == 1) {
return $self;
} else {
return $self->parent()->topparent();
}
}
sub _build_object {
my ($self, $attribute, $obj_info) = @_;
if ($obj_info->{created}) {
return $obj_info->{object};
}
my $attInfo = $self->_subobjects($attribute);
if (!defined($attInfo->{class})) {
Bio::KBase::ObjectAPI::utilities::error("No class for attribute ".$attribute);
}
my $class = 'Bio::KBase::ObjectAPI::' . $attInfo->{module} . '::' . $attInfo->{class};
Module::Load::load $class;
my $obj = $class->new($obj_info->{data});
$obj_info->{created} = 1;
$obj_info->{object} = $obj;
delete $obj_info->{data};
return $obj;
}
sub _build_all_objects {
my ($self, $attribute) = @_;
my $objs = [];
my $method = "_$attribute";
my $subobjs = $self->$method();
foreach my $subobj (@$subobjs) {
push(@$objs, $self->_build_object($attribute, $subobj));
}
return $objs;
}
sub __version__ { $VERSION }
sub __upgrade__ { return sub { return $_[0] } }
__PACKAGE__->meta->make_immutable;
1;
| kbase/KBaseFBAModeling | lib/Bio/KBase/ObjectAPI/BaseObject.pm | Perl | mit | 23,666 |
#
# The contents of this file are subject to the Mozilla Public
# License Version 1.1 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of
# the License at http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS
# IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
# implied. See the License for the specific language governing
# rights and limitations under the License.
#
# The Original Code is the RDF::Core module
#
# The Initial Developer of the Original Code is Ginger Alliance Ltd.
# Portions created by Ginger Alliance are
# Copyright (C) 2001 Ginger Alliance Ltd.
# All Rights Reserved.
#
# Contributor(s):
#
# Alternatively, the contents of this file may be used under the
# terms of the GNU General Public License Version 2 or later (the
# "GPL"), in which case the provisions of the GPL are applicable
# instead of those above. If you wish to allow use of your
# version of this file only under the terms of the GPL and not to
# allow others to use your version of this file under the MPL,
# indicate your decision by deleting the provisions above and
# replace them with the notice and other provisions required by
# the GPL. If you do not delete the provisions above, a recipient
# may use your version of this file under either the MPL or the
# GPL.
#
package RDF::Core::NodeFactory;
use strict;
require Exporter;
require RDF::Core::Resource;
require RDF::Core::Literal;
use Carp;
use URI;
sub new {
my ($pkg,%options) = @_;
$pkg = ref $pkg || $pkg;
my $self = {};
if (@_ > 0) {
$self->{_options} = \%options;
}
$self->{_options}->{BaseURI} ||= 'uri:';
bless $self, $pkg;
}
sub setOptions {
my ($self, $options) = @_;
$self->{_options} = $options;
}
sub getOptions {
return $_[0]->{_options};
}
sub newLiteral {
my ($self, $value, $lang, $datatype) = @_;
return new RDF::Core::Literal ($value, $lang, $datatype);
}
sub newResource {
my $self = shift;
my $resource;
if (@_ gt 1) {
#more then one parameters is interpreted as ($namespace,$localValue)
#pair, unless both of them are undef
my ($namespace,$localValue) = @_;
return $self->newResource
if !defined $namespace && !defined $localValue;
if (!defined $namespace) {
croak "Resource's namespace must be defined"
}
$localValue = ''
unless defined $localValue;
my $absoluteURI = new_abs URI($namespace, $self->getOptions->{BaseURI});
$resource = new RDF::Core::Resource($absoluteURI->as_string, $localValue);
} else {
#one parameter is URI or bNode label
my ($URI) = @_;
if (defined $URI) {
if ($URI !~ /^_:/ && defined $self->getOptions->{BaseURI}) {
my $absoluteURI = new_abs URI($URI, $self->getOptions->{BaseURI});
$resource = new RDF::Core::Resource($absoluteURI->as_string);
} else {
$resource = new RDF::Core::Resource($URI);
}
} else {
#no parameter or parameter undef - generate bNode label
$resource = new RDF::Core::Resource($self->_generateURI);
}
}
return $resource;
}
sub _generateURI {
my $self = shift;
$self->getOptions->{GenPrefix} = '_:a'
unless defined $self->getOptions->{GenPrefix};
$self->getOptions->{GenCounter} = 0
unless defined $self->getOptions->{GenCounter};
my $bNode = $self->getOptions->{GenPrefix}.
$self->getOptions->{GenCounter}++;
return $bNode
}
1;
__END__
=head1 NAME
RDF::Core::NodeFactory - produces literals and resources, generates labels for anonymous resources
=head1 SYNOPSIS
require RDF::Core::NodeFactory;
my $factory = new RDF::Core::NodeFactory(BaseURI=>'http://www.foo.org/');
my $resource = $factory->newResource('http://www.foo.org/pages');
#get the same uri:
my $absolutizedResource = $factory->newResource('/pages');
#anonymous resource
my $generatedResource = $factory->newResource;
=head1 DESCRIPTION
NodeFactory generates RDF graph nodes - literals and resources. The resources' URIs are expanded against base uri (BaseURI option) to their absolute forms using URI module. NodeFactory can generate unique 'anonymous' resources.
=head2 Interface
=over 4
=item * new(%options)
Available options are:
=over 4
=item * BaseURI
When NodeFactory generates a resource from relative URI, BaseURI is used to obtain absolute URI. BaseURI must be absolute. Default value is 'uri:'.
=item * GenPrefix, GenCounter
Is used to generate bNode label (an anonymous resource). Default values are '_:a' for GenPrefix and 0 for GenCounter. Resulting label is concatenation of GenPrefix and GenCounter.
=back
=item * getOptions
=item * setOptions(\%options)
=item * newLiteral($value)
=item * newResource($namespace, $localValue)
=item * newResource($uri)
=item * newResource
=back
=head1 LICENSE
This package is subject to the MPL (or the GPL alternatively).
=head1 AUTHOR
Ginger Alliance, rdf@gingerall.cz
=head1 SEE ALSO
URI, RDF::Core::Resource, RDF::Core::Literal
=cut
| carlgao/lenga | images/lenny64-peon/usr/share/perl5/RDF/Core/NodeFactory.pm | Perl | mit | 5,013 |
:- ensure_loaded('$REGULUS/PrologLib/compatibility').
%---------------------------------------------------------------
:- module(random_subcorpus,
[random_subcorpus/4,
random_sub_bicorpus/7,
subcorpus/3,
test_random_subcorpus/1
]
).
%---------------------------------------------------------------
:- use_module('$REGULUS/PrologLib/CorpusTools/match_patterns').
:- use_module('$REGULUS/PrologLib/utilities').
:- use_module(library(lists)).
%---------------------------------------------------------------
test_random_subcorpus(small_tu) :-
random_subcorpus('$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/all_files_small.txt',
or("Tu ", " tu ", "-tu ", " te ", " t'"),
10,
'$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/all_files_tu_small.txt').
test_random_subcorpus(all_tu) :-
random_subcorpus('$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/all_files_v2.txt',
or("Tu ", " tu ", "-tu ", " te ", " t'"),
200,
'$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/tu_200_v2.txt').
test_random_subcorpus(all_est_ce_que) :-
random_subcorpus('$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/all_files_v2.txt',
or("est-ce que", "Est-ce que", "est ce que", "Est ce que"),
200,
'$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/est_ce_que_200.txt').
test_random_subcorpus(forum_questions) :-
random_subcorpus('$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/all_files_v2.txt',
"?",
250,
'$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/questions_250.txt').
test_random_subcorpus(all_forum_questions) :-
subcorpus('$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/all_files_v2.txt',
"?",
'$ACCEPT/MT/GTFeb2012/CleanFrenchVersions/all_questions.txt').
test_random_subcorpus(questions) :-
random_sub_bicorpus('$ACCEPT/MT/Europarl/Generated/europarl_questions_fr.txt',
'$ACCEPT/MT/Europarl/Generated/europarl_questions_en.txt',
5000,
'$ACCEPT/MT/Europarl/Generated/europarl_questions_fr_dev.txt',
'$ACCEPT/MT/Europarl/Generated/europarl_questions_en_dev.txt',
'$ACCEPT/MT/Europarl/Generated/europarl_questions_fr_test.txt',
'$ACCEPT/MT/Europarl/Generated/europarl_questions_en_test.txt').
%---------------------------------------------------------------
random_subcorpus(InFile, Pattern, N, OutFile) :-
safe_absolute_file_name(InFile, AbsInFile),
safe_absolute_file_name(OutFile, AbsOutFile),
read_unicode_file_to_string_list(AbsInFile, InList),
length(InList, NIn),
format('~N--- Read file (~d lines) ~w~n', [NIn, AbsInFile]),
extract_elements_matching_pattern(InList, Pattern, FilteredList),
length(FilteredList, NFiltered),
format('~N--- Filtered elements (~d left)~n', [NFiltered]),
set_random_generator_state_from_time,
pick_n_random_members_from_list(N, FilteredList, OutList),
length(OutList, NOut),
write_string_list_to_unicode_file(OutList, AbsOutFile),
format('~N--- Written file (~d lines) ~w~n', [NOut, AbsOutFile]),
!.
subcorpus(InFile, Pattern, OutFile) :-
safe_absolute_file_name(InFile, AbsInFile),
safe_absolute_file_name(OutFile, AbsOutFile),
read_unicode_file_to_string_list(AbsInFile, InList),
length(InList, NIn),
format('~N--- Read file (~d lines) ~w~n', [NIn, AbsInFile]),
extract_elements_matching_pattern(InList, Pattern, FilteredList),
length(FilteredList, NFiltered),
write_string_list_to_unicode_file(FilteredList, AbsOutFile),
format('~N--- Written file (~d lines) ~w~n', [NFiltered, AbsOutFile]),
!.
extract_elements_matching_pattern([], _Pattern, []).
extract_elements_matching_pattern([F | R], Pattern, Out) :-
( match(Pattern, F) ->
Out = [F | R1]
;
otherwise ->
Out = R1
),
!,
extract_elements_matching_pattern(R, Pattern, R1).
%---------------------------------------------------------------
random_sub_bicorpus(InSource, InTarget, N, OutSource1, OutTarget1, OutSource2, OutTarget2) :-
safe_absolute_file_name(InSource, AbsInSource),
safe_absolute_file_name(InTarget, AbsInTarget),
safe_absolute_file_name(OutSource1, AbsOutSource1),
safe_absolute_file_name(OutTarget1, AbsOutTarget1),
safe_absolute_file_name(OutSource2, AbsOutSource2),
safe_absolute_file_name(OutTarget2, AbsOutTarget2),
read_unicode_file_to_string_list(AbsInSource, InSourceList),
length(InSourceList, NInSource),
format('~N--- Read file (~d lines) ~w~n', [NInSource, AbsInSource]),
read_unicode_file_to_string_list(AbsInTarget, InTargetList),
length(InTargetList, NInTarget),
format('~N--- Read file (~d lines) ~w~n', [NInTarget, AbsInTarget]),
( NInSource \== NInTarget ->
format('~N*** Error: different numbers of records in source and target files~n', []),
fail
;
otherwise ->
set_random_generator_state_from_time,
lists_paired_list(InSourceList, InTargetList, InList),
pick_n_random_members_from_list(N, InList, OutList1, OutList2),
lists_paired_list(OutSourceList1, OutTargetList1, OutList1),
lists_paired_list(OutSourceList2, OutTargetList2, OutList2),
length(OutList1, NOut1),
length(OutList2, NOut2),
write_string_list_to_unicode_file(OutSourceList1, AbsOutSource1),
format('~N--- Written file (~d lines) ~w~n', [NOut1, AbsOutSource1]),
write_string_list_to_unicode_file(OutTargetList1, AbsOutTarget1),
format('~N--- Written file (~d lines) ~w~n', [NOut1, AbsOutTarget1]),
write_string_list_to_unicode_file(OutSourceList2, AbsOutSource2),
format('~N--- Written file (~d lines) ~w~n', [NOut2, AbsOutSource2]),
write_string_list_to_unicode_file(OutTargetList2, AbsOutTarget2),
format('~N--- Written file (~d lines) ~w~n', [NOut2, AbsOutTarget2])
).
lists_paired_list([], [], []) :-
!.
lists_paired_list([F1 | R1], [F2 | R2], [F1-F2 | R3]) :-
!,
lists_paired_list(R1, R2, R3).
| TeamSPoon/logicmoo_workspace | packs_sys/logicmoo_nlu/ext/regulus/PrologLib/CorpusTools/random_subcorpus.pl | Perl | mit | 5,770 |
# pod source for openjazz man page
# pod2man -s6 -r160214 --stderr -cSlackBuilds.org openjazz.pod > openjazz.6
=pod
=head1 NAME
B<openjazz> - free/open-source game engine for Jazz Jackrabbit
=head1 SYNOPSIS
B<openjazz> I<[-f]> I<[dir] ...>
=head1 DESCRIPTION
OpenJazz is a free, open-source version of the classic Jazz Jackrabbit(tm)
games. OpenJazz can be compiled on a wide range of operating systems,
including Windows 98/Me/XP and Linux. OpenJazz requires the files
from any of the following games:
=over
=item Jazz Jackrabbit
=item Jazz Jackrabbit CD
=item Jazz Jackrabbit Shareware Edition
=item Jazz Jackrabbit Christmas Edition
=item Jazz Jackrabbit: Holiday Hare 1995
=back
OpenJazz will not work with any of the Jazz Jackrabbit 2 games. Don't
get confused: there's a "Holiday Hare 1998", which isn't supported
(only the 1995 version is).
There is no support for saving/loading the game. Multiplayer
is supported, but not well tested.
=head1 OPTIONS
B<-f> starts the game fullscreen. Any other option beginning with - will
be ignored. Anything else is treated as a directory to be added to the
file search path.
=head1 FILES
Game data files must be named in uppercase (e.g. B<BLOCKS.000>).
The search path for game data files includes, in order:
=over
=item the current directory
=item $HOME/.openjazz/
=item all paths listed on the command line I<in reverse order>
=back
In addition, the file B<openjazz.000> (containing the OpenJazz logo,
note the lowercase name) will be searched for in B</usr/share/openjazz>
after all other directories (so each separate game doesn't need its
own copy, when you've got multiple games installed).
B<$HOME/.openjazz/> stores user preferences and any custom levels
downloaded from multiplayer servers. Will be created if it doesn't
exist.
=head1 SEE ALSO
B<http://www.alister.eu/jazz/oj/faq.php>
=head1 AUTHORS
B<openjazz> is copyright (c) 2005-2013 Alister Thomson.
This man page written by B. Watson for the SlackBuilds.org project, and may
be used by anyone.
| panosmdma/SlackOnly-SlackBuilds | games/openjazz/openjazz.pod | Perl | mit | 2,039 |
package Yogafire::Regions;
use strict;
use warnings;
use Mouse;
has 'out_columns' => (is => 'rw', default => sub { [qw/region_id region_name/] }, );
has 'out_format' => (is => 'rw');
has 'regions' => (is => 'rw');
no Mouse;
my @meta_regions = (
{ id => 'us-east-1', oid => 'us-east-1', name => 'US East (Northern Virginia)' },
{ id => 'us-east-2', oid => 'us-east-2', name => 'US East (Ohio)' },
{ id => 'us-west-1', oid => 'us-west-1', name => 'US West (Northern California)' },
{ id => 'us-west-2', oid => 'us-west-2', name => 'US West (Oregon)' },
{ id => 'eu-west-1', oid => 'eu-west-1', name => 'EU (Ireland)' },
{ id => 'eu-west-2', oid => 'eu-west-2', name => 'EU (London)' },
{ id => 'eu-central-1', oid => 'eu-central-1', name => 'EU (Frankfurt)' },
{ id => 'ca-central-1', oid => 'ca-central-1', name => 'Canada (Central)' },
{ id => 'ap-southeast-1', oid => 'ap-southeast-1', name => 'Asia Pacific (Singapore)' },
{ id => 'ap-southeast-2', oid => 'ap-southeast-2', name => 'Asia Pacific (Sydney)' },
{ id => 'ap-northeast-1', oid => 'ap-northeast-1', name => 'Asia Pacific (Tokyo)' },
{ id => 'ap-northeast-2', oid => 'ap-northeast-2', name => 'Asia Pacific (Seoul)' },
{ id => 'sa-east-1', oid => 'sa-east-1', name => 'South America (Sao Paulo)' },
{ id => 'ap-south-1', oid => 'ap-south-1', name => 'Asia Pacific (Munbai)' },
);
use Yogafire::Output;
use Term::ANSIColor qw/colored/;
use Yogafire::Declare qw/ec2 config/;
sub BUILD {
my ($self) = @_;
my @regions = ec2->describe_regions();
for my $region (@regions) {
for my $meta_region (@meta_regions) {
if($region->regionName eq $meta_region->{id}) {
$region->{data}->{full_name} = $meta_region->{name};
$region->{data}->{oid} = $meta_region->{oid};
last;
}
}
}
$self->regions(\@regions);
}
sub output {
my ($self, $zones) = @_;
my @headers = @{$self->out_columns};
push @headers, 'region_zones' if $zones;
my $output = Yogafire::Output->new({ format => $self->out_format });
$output->header(\@headers);
my @rows = @{$self->regions};
@rows = map {
my @data = ($_->regionName, $_->{data}->{full_name});
if($zones) {
my @zones = map { colored($_, $self->_get_state_color($_->zoneState)) } $_->zones;
push @data, join(', ', @zones);
}
\@data;
} @rows;
$output->output(\@rows);
}
sub _get_state_color {
my ($self, $status) = @_;
if($status eq 'available') {
return 'green';
} else {
return 'red';
}
}
sub find {
my ($self, $text) = @_;
for my $region (@{$self->regions}) {
my $region_id = $region->regionName;
if($text =~ /$region_id/) {
return {
id => $region_id,
name => $region->{data}->{full_name},
};
}
}
return {};
}
1;
| toritori0318/p5-Yogafire | lib/Yogafire/Regions.pm | Perl | mit | 3,080 |
=pod
=head1 NAME
EVP_SignInit, EVP_SignUpdate, EVP_SignFinal - EVP signing functions
=head1 SYNOPSIS
#include <openssl/evp.h>
int EVP_SignInit_ex(EVP_MD_CTX *ctx, const EVP_MD *type, ENGINE *impl);
int EVP_SignUpdate(EVP_MD_CTX *ctx, const void *d, unsigned int cnt);
int EVP_SignFinal(EVP_MD_CTX *ctx,unsigned char *sig,unsigned int *s, EVP_PKEY *pkey);
void EVP_SignInit(EVP_MD_CTX *ctx, const EVP_MD *type);
int EVP_PKEY_size(EVP_PKEY *pkey);
=head1 DESCRIPTION
The EVP signature routines are a high level interface to digital
signatures.
EVP_SignInit_ex() sets up signing context B<ctx> to use digest
B<type> from ENGINE B<impl>. B<ctx> must be initialized with
EVP_MD_CTX_init() before calling this function.
EVP_SignUpdate() hashes B<cnt> bytes of data at B<d> into the
signature context B<ctx>. This function can be called several times on the
same B<ctx> to include additional data.
EVP_SignFinal() signs the data in B<ctx> using the private key B<pkey> and
places the signature in B<sig>. B<sig> must be at least EVP_PKEY_size(pkey)
bytes in size. B<s> is an OUT parameter, and not used as an IN parameter.
The number of bytes of data written (i.e. the length of the signature)
will be written to the integer at B<s>, at most EVP_PKEY_size(pkey) bytes
will be written.
EVP_SignInit() initializes a signing context B<ctx> to use the default
implementation of digest B<type>.
EVP_PKEY_size() returns the maximum size of a signature in bytes. The actual
signature returned by EVP_SignFinal() may be smaller.
=head1 RETURN VALUES
EVP_SignInit_ex(), EVP_SignUpdate() and EVP_SignFinal() return 1
for success and 0 for failure.
EVP_PKEY_size() returns the maximum size of a signature in bytes.
The error codes can be obtained by L<ERR_get_error(3)>.
=head1 NOTES
The B<EVP> interface to digital signatures should almost always be used in
preference to the low level interfaces. This is because the code then becomes
transparent to the algorithm used and much more flexible.
Due to the link between message digests and public key algorithms the correct
digest algorithm must be used with the correct public key type. A list of
algorithms and associated public key algorithms appears in
L<EVP_DigestInit(3)>.
When signing with DSA private keys the random number generator must be seeded
or the operation will fail. The random number generator does not need to be
seeded for RSA signatures.
The call to EVP_SignFinal() internally finalizes a copy of the digest context.
This means that calls to EVP_SignUpdate() and EVP_SignFinal() can be called
later to digest and sign additional data.
Since only a copy of the digest context is ever finalized the context must
be cleaned up after use by calling EVP_MD_CTX_cleanup() or a memory leak
will occur.
=head1 BUGS
Older versions of this documentation wrongly stated that calls to
EVP_SignUpdate() could not be made after calling EVP_SignFinal().
Since the private key is passed in the call to EVP_SignFinal() any error
relating to the private key (for example an unsuitable key and digest
combination) will not be indicated until after potentially large amounts of
data have been passed through EVP_SignUpdate().
It is not possible to change the signing parameters using these function.
The previous two bugs are fixed in the newer EVP_SignDigest*() function.
=head1 SEE ALSO
L<EVP_VerifyInit(3)>,
L<EVP_DigestInit(3)>, L<err(3)>,
L<evp(3)>, L<hmac(3)>, L<md2(3)>,
L<md5(3)>, L<mdc2(3)>, L<ripemd(3)>,
L<sha(3)>, L<dgst(1)>
=head1 HISTORY
EVP_SignInit(), EVP_SignUpdate() and EVP_SignFinal() are
available in all versions of SSLeay and OpenSSL.
EVP_SignInit_ex() was added in OpenSSL 0.9.7.
=cut
| vbloodv/blood | extern/openssl.orig/doc/crypto/EVP_SignInit.pod | Perl | mit | 3,685 |
package Module::Build::Platform::aix;
use strict;
use warnings;
our $VERSION = '0.4216';
$VERSION = eval $VERSION;
use Module::Build::Platform::Unix;
our @ISA = qw(Module::Build::Platform::Unix);
# This class isn't necessary anymore, but we can't delete it, because
# some people might still have the old copy in their @INC, containing
# code we don't want to execute, so we have to make sure an upgrade
# will replace it with this empty subclass.
1;
__END__
=head1 NAME
Module::Build::Platform::aix - Builder class for AIX platform
=head1 DESCRIPTION
This module provides some routines very specific to the AIX
platform.
Please see the L<Module::Build> for the general docs.
=head1 AUTHOR
Ken Williams <kwilliams@cpan.org>
=head1 SEE ALSO
perl(1), Module::Build(3), ExtUtils::MakeMaker(3)
=cut
| jkb78/extrajnm | local/lib/perl5/Module/Build/Platform/aix.pm | Perl | mit | 810 |
#!/usr/bin/perl
use lib "/httpd/modules";
my $USERNAME = "heirloom";
my $ucUSERNAME = uc($USERNAME);
system("create database $ucUSERNAME");
system("mysql $ucUSERNAME < /backend/patches/schema.sql");
| CommerceRack/backend | scripts/account/provision.pl | Perl | mit | 207 |
# This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/BU3Xn7v6Kb/australasia. Olson data version 2015g
#
# Do not edit this file directly.
#
package DateTime::TimeZone::Australia::Lord_Howe;
$DateTime::TimeZone::Australia::Lord_Howe::VERSION = '1.94';
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::Australia::Lord_Howe::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
59771568220, # utc_end 1895-01-31 13:23:40 (Thu)
DateTime::TimeZone::NEG_INFINITY, # local_start
59771606400, # local_end 1895-02-01 00:00:00 (Fri)
38180,
0,
'LMT',
],
[
59771568220, # utc_start 1895-01-31 13:23:40 (Thu)
62487900000, # utc_end 1981-02-28 14:00:00 (Sat)
59771604220, # local_start 1895-01-31 23:23:40 (Thu)
62487936000, # local_end 1981-03-01 00:00:00 (Sun)
36000,
0,
'AEST',
],
[
62487900000, # utc_start 1981-02-28 14:00:00 (Sat)
62508468600, # utc_end 1981-10-24 15:30:00 (Sat)
62487937800, # local_start 1981-03-01 00:30:00 (Sun)
62508506400, # local_end 1981-10-25 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62508468600, # utc_start 1981-10-24 15:30:00 (Sat)
62519956200, # utc_end 1982-03-06 14:30:00 (Sat)
62508510000, # local_start 1981-10-25 03:00:00 (Sun)
62519997600, # local_end 1982-03-07 02:00:00 (Sun)
41400,
1,
'LHDT',
],
[
62519956200, # utc_start 1982-03-06 14:30:00 (Sat)
62540523000, # utc_end 1982-10-30 15:30:00 (Sat)
62519994000, # local_start 1982-03-07 01:00:00 (Sun)
62540560800, # local_end 1982-10-31 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62540523000, # utc_start 1982-10-30 15:30:00 (Sat)
62551405800, # utc_end 1983-03-05 14:30:00 (Sat)
62540564400, # local_start 1982-10-31 03:00:00 (Sun)
62551447200, # local_end 1983-03-06 02:00:00 (Sun)
41400,
1,
'LHDT',
],
[
62551405800, # utc_start 1983-03-05 14:30:00 (Sat)
62571972600, # utc_end 1983-10-29 15:30:00 (Sat)
62551443600, # local_start 1983-03-06 01:00:00 (Sun)
62572010400, # local_end 1983-10-30 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62571972600, # utc_start 1983-10-29 15:30:00 (Sat)
62582855400, # utc_end 1984-03-03 14:30:00 (Sat)
62572014000, # local_start 1983-10-30 03:00:00 (Sun)
62582896800, # local_end 1984-03-04 02:00:00 (Sun)
41400,
1,
'LHDT',
],
[
62582855400, # utc_start 1984-03-03 14:30:00 (Sat)
62603422200, # utc_end 1984-10-27 15:30:00 (Sat)
62582893200, # local_start 1984-03-04 01:00:00 (Sun)
62603460000, # local_end 1984-10-28 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62603422200, # utc_start 1984-10-27 15:30:00 (Sat)
62614305000, # utc_end 1985-03-02 14:30:00 (Sat)
62603463600, # local_start 1984-10-28 03:00:00 (Sun)
62614346400, # local_end 1985-03-03 02:00:00 (Sun)
41400,
1,
'LHDT',
],
[
62614305000, # utc_start 1985-03-02 14:30:00 (Sat)
62634871800, # utc_end 1985-10-26 15:30:00 (Sat)
62614342800, # local_start 1985-03-03 01:00:00 (Sun)
62634909600, # local_end 1985-10-27 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62634871800, # utc_start 1985-10-26 15:30:00 (Sat)
62646966000, # utc_end 1986-03-15 15:00:00 (Sat)
62634911400, # local_start 1985-10-27 02:30:00 (Sun)
62647005600, # local_end 1986-03-16 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62646966000, # utc_start 1986-03-15 15:00:00 (Sat)
62665716600, # utc_end 1986-10-18 15:30:00 (Sat)
62647003800, # local_start 1986-03-16 01:30:00 (Sun)
62665754400, # local_end 1986-10-19 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62665716600, # utc_start 1986-10-18 15:30:00 (Sat)
62678415600, # utc_end 1987-03-14 15:00:00 (Sat)
62665756200, # local_start 1986-10-19 02:30:00 (Sun)
62678455200, # local_end 1987-03-15 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62678415600, # utc_start 1987-03-14 15:00:00 (Sat)
62697771000, # utc_end 1987-10-24 15:30:00 (Sat)
62678453400, # local_start 1987-03-15 01:30:00 (Sun)
62697808800, # local_end 1987-10-25 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62697771000, # utc_start 1987-10-24 15:30:00 (Sat)
62710470000, # utc_end 1988-03-19 15:00:00 (Sat)
62697810600, # local_start 1987-10-25 02:30:00 (Sun)
62710509600, # local_end 1988-03-20 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62710470000, # utc_start 1988-03-19 15:00:00 (Sat)
62729825400, # utc_end 1988-10-29 15:30:00 (Sat)
62710507800, # local_start 1988-03-20 01:30:00 (Sun)
62729863200, # local_end 1988-10-30 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62729825400, # utc_start 1988-10-29 15:30:00 (Sat)
62741919600, # utc_end 1989-03-18 15:00:00 (Sat)
62729865000, # local_start 1988-10-30 02:30:00 (Sun)
62741959200, # local_end 1989-03-19 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62741919600, # utc_start 1989-03-18 15:00:00 (Sat)
62761275000, # utc_end 1989-10-28 15:30:00 (Sat)
62741957400, # local_start 1989-03-19 01:30:00 (Sun)
62761312800, # local_end 1989-10-29 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62761275000, # utc_start 1989-10-28 15:30:00 (Sat)
62772159600, # utc_end 1990-03-03 15:00:00 (Sat)
62761314600, # local_start 1989-10-29 02:30:00 (Sun)
62772199200, # local_end 1990-03-04 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62772159600, # utc_start 1990-03-03 15:00:00 (Sat)
62792724600, # utc_end 1990-10-27 15:30:00 (Sat)
62772197400, # local_start 1990-03-04 01:30:00 (Sun)
62792762400, # local_end 1990-10-28 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62792724600, # utc_start 1990-10-27 15:30:00 (Sat)
62803609200, # utc_end 1991-03-02 15:00:00 (Sat)
62792764200, # local_start 1990-10-28 02:30:00 (Sun)
62803648800, # local_end 1991-03-03 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62803609200, # utc_start 1991-03-02 15:00:00 (Sat)
62824174200, # utc_end 1991-10-26 15:30:00 (Sat)
62803647000, # local_start 1991-03-03 01:30:00 (Sun)
62824212000, # local_end 1991-10-27 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62824174200, # utc_start 1991-10-26 15:30:00 (Sat)
62835058800, # utc_end 1992-02-29 15:00:00 (Sat)
62824213800, # local_start 1991-10-27 02:30:00 (Sun)
62835098400, # local_end 1992-03-01 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62835058800, # utc_start 1992-02-29 15:00:00 (Sat)
62855623800, # utc_end 1992-10-24 15:30:00 (Sat)
62835096600, # local_start 1992-03-01 01:30:00 (Sun)
62855661600, # local_end 1992-10-25 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62855623800, # utc_start 1992-10-24 15:30:00 (Sat)
62867113200, # utc_end 1993-03-06 15:00:00 (Sat)
62855663400, # local_start 1992-10-25 02:30:00 (Sun)
62867152800, # local_end 1993-03-07 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62867113200, # utc_start 1993-03-06 15:00:00 (Sat)
62887678200, # utc_end 1993-10-30 15:30:00 (Sat)
62867151000, # local_start 1993-03-07 01:30:00 (Sun)
62887716000, # local_end 1993-10-31 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62887678200, # utc_start 1993-10-30 15:30:00 (Sat)
62898562800, # utc_end 1994-03-05 15:00:00 (Sat)
62887717800, # local_start 1993-10-31 02:30:00 (Sun)
62898602400, # local_end 1994-03-06 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62898562800, # utc_start 1994-03-05 15:00:00 (Sat)
62919127800, # utc_end 1994-10-29 15:30:00 (Sat)
62898600600, # local_start 1994-03-06 01:30:00 (Sun)
62919165600, # local_end 1994-10-30 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62919127800, # utc_start 1994-10-29 15:30:00 (Sat)
62930012400, # utc_end 1995-03-04 15:00:00 (Sat)
62919167400, # local_start 1994-10-30 02:30:00 (Sun)
62930052000, # local_end 1995-03-05 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62930012400, # utc_start 1995-03-04 15:00:00 (Sat)
62950577400, # utc_end 1995-10-28 15:30:00 (Sat)
62930050200, # local_start 1995-03-05 01:30:00 (Sun)
62950615200, # local_end 1995-10-29 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62950577400, # utc_start 1995-10-28 15:30:00 (Sat)
62963881200, # utc_end 1996-03-30 15:00:00 (Sat)
62950617000, # local_start 1995-10-29 02:30:00 (Sun)
62963920800, # local_end 1996-03-31 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62963881200, # utc_start 1996-03-30 15:00:00 (Sat)
62982027000, # utc_end 1996-10-26 15:30:00 (Sat)
62963919000, # local_start 1996-03-31 01:30:00 (Sun)
62982064800, # local_end 1996-10-27 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
62982027000, # utc_start 1996-10-26 15:30:00 (Sat)
62995330800, # utc_end 1997-03-29 15:00:00 (Sat)
62982066600, # local_start 1996-10-27 02:30:00 (Sun)
62995370400, # local_end 1997-03-30 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
62995330800, # utc_start 1997-03-29 15:00:00 (Sat)
63013476600, # utc_end 1997-10-25 15:30:00 (Sat)
62995368600, # local_start 1997-03-30 01:30:00 (Sun)
63013514400, # local_end 1997-10-26 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63013476600, # utc_start 1997-10-25 15:30:00 (Sat)
63026780400, # utc_end 1998-03-28 15:00:00 (Sat)
63013516200, # local_start 1997-10-26 02:30:00 (Sun)
63026820000, # local_end 1998-03-29 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63026780400, # utc_start 1998-03-28 15:00:00 (Sat)
63044926200, # utc_end 1998-10-24 15:30:00 (Sat)
63026818200, # local_start 1998-03-29 01:30:00 (Sun)
63044964000, # local_end 1998-10-25 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63044926200, # utc_start 1998-10-24 15:30:00 (Sat)
63058230000, # utc_end 1999-03-27 15:00:00 (Sat)
63044965800, # local_start 1998-10-25 02:30:00 (Sun)
63058269600, # local_end 1999-03-28 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63058230000, # utc_start 1999-03-27 15:00:00 (Sat)
63076980600, # utc_end 1999-10-30 15:30:00 (Sat)
63058267800, # local_start 1999-03-28 01:30:00 (Sun)
63077018400, # local_end 1999-10-31 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63076980600, # utc_start 1999-10-30 15:30:00 (Sat)
63089679600, # utc_end 2000-03-25 15:00:00 (Sat)
63077020200, # local_start 1999-10-31 02:30:00 (Sun)
63089719200, # local_end 2000-03-26 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63089679600, # utc_start 2000-03-25 15:00:00 (Sat)
63102987000, # utc_end 2000-08-26 15:30:00 (Sat)
63089717400, # local_start 2000-03-26 01:30:00 (Sun)
63103024800, # local_end 2000-08-27 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63102987000, # utc_start 2000-08-26 15:30:00 (Sat)
63121129200, # utc_end 2001-03-24 15:00:00 (Sat)
63103026600, # local_start 2000-08-27 02:30:00 (Sun)
63121168800, # local_end 2001-03-25 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63121129200, # utc_start 2001-03-24 15:00:00 (Sat)
63139879800, # utc_end 2001-10-27 15:30:00 (Sat)
63121167000, # local_start 2001-03-25 01:30:00 (Sun)
63139917600, # local_end 2001-10-28 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63139879800, # utc_start 2001-10-27 15:30:00 (Sat)
63153183600, # utc_end 2002-03-30 15:00:00 (Sat)
63139919400, # local_start 2001-10-28 02:30:00 (Sun)
63153223200, # local_end 2002-03-31 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63153183600, # utc_start 2002-03-30 15:00:00 (Sat)
63171329400, # utc_end 2002-10-26 15:30:00 (Sat)
63153221400, # local_start 2002-03-31 01:30:00 (Sun)
63171367200, # local_end 2002-10-27 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63171329400, # utc_start 2002-10-26 15:30:00 (Sat)
63184633200, # utc_end 2003-03-29 15:00:00 (Sat)
63171369000, # local_start 2002-10-27 02:30:00 (Sun)
63184672800, # local_end 2003-03-30 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63184633200, # utc_start 2003-03-29 15:00:00 (Sat)
63202779000, # utc_end 2003-10-25 15:30:00 (Sat)
63184671000, # local_start 2003-03-30 01:30:00 (Sun)
63202816800, # local_end 2003-10-26 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63202779000, # utc_start 2003-10-25 15:30:00 (Sat)
63216082800, # utc_end 2004-03-27 15:00:00 (Sat)
63202818600, # local_start 2003-10-26 02:30:00 (Sun)
63216122400, # local_end 2004-03-28 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63216082800, # utc_start 2004-03-27 15:00:00 (Sat)
63234833400, # utc_end 2004-10-30 15:30:00 (Sat)
63216120600, # local_start 2004-03-28 01:30:00 (Sun)
63234871200, # local_end 2004-10-31 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63234833400, # utc_start 2004-10-30 15:30:00 (Sat)
63247532400, # utc_end 2005-03-26 15:00:00 (Sat)
63234873000, # local_start 2004-10-31 02:30:00 (Sun)
63247572000, # local_end 2005-03-27 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63247532400, # utc_start 2005-03-26 15:00:00 (Sat)
63266283000, # utc_end 2005-10-29 15:30:00 (Sat)
63247570200, # local_start 2005-03-27 01:30:00 (Sun)
63266320800, # local_end 2005-10-30 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63266283000, # utc_start 2005-10-29 15:30:00 (Sat)
63279586800, # utc_end 2006-04-01 15:00:00 (Sat)
63266322600, # local_start 2005-10-30 02:30:00 (Sun)
63279626400, # local_end 2006-04-02 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63279586800, # utc_start 2006-04-01 15:00:00 (Sat)
63297732600, # utc_end 2006-10-28 15:30:00 (Sat)
63279624600, # local_start 2006-04-02 01:30:00 (Sun)
63297770400, # local_end 2006-10-29 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63297732600, # utc_start 2006-10-28 15:30:00 (Sat)
63310431600, # utc_end 2007-03-24 15:00:00 (Sat)
63297772200, # local_start 2006-10-29 02:30:00 (Sun)
63310471200, # local_end 2007-03-25 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63310431600, # utc_start 2007-03-24 15:00:00 (Sat)
63329182200, # utc_end 2007-10-27 15:30:00 (Sat)
63310469400, # local_start 2007-03-25 01:30:00 (Sun)
63329220000, # local_end 2007-10-28 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63329182200, # utc_start 2007-10-27 15:30:00 (Sat)
63343090800, # utc_end 2008-04-05 15:00:00 (Sat)
63329221800, # local_start 2007-10-28 02:30:00 (Sun)
63343130400, # local_end 2008-04-06 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63343090800, # utc_start 2008-04-05 15:00:00 (Sat)
63358817400, # utc_end 2008-10-04 15:30:00 (Sat)
63343128600, # local_start 2008-04-06 01:30:00 (Sun)
63358855200, # local_end 2008-10-05 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63358817400, # utc_start 2008-10-04 15:30:00 (Sat)
63374540400, # utc_end 2009-04-04 15:00:00 (Sat)
63358857000, # local_start 2008-10-05 02:30:00 (Sun)
63374580000, # local_end 2009-04-05 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63374540400, # utc_start 2009-04-04 15:00:00 (Sat)
63390267000, # utc_end 2009-10-03 15:30:00 (Sat)
63374578200, # local_start 2009-04-05 01:30:00 (Sun)
63390304800, # local_end 2009-10-04 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63390267000, # utc_start 2009-10-03 15:30:00 (Sat)
63405990000, # utc_end 2010-04-03 15:00:00 (Sat)
63390306600, # local_start 2009-10-04 02:30:00 (Sun)
63406029600, # local_end 2010-04-04 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63405990000, # utc_start 2010-04-03 15:00:00 (Sat)
63421716600, # utc_end 2010-10-02 15:30:00 (Sat)
63406027800, # local_start 2010-04-04 01:30:00 (Sun)
63421754400, # local_end 2010-10-03 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63421716600, # utc_start 2010-10-02 15:30:00 (Sat)
63437439600, # utc_end 2011-04-02 15:00:00 (Sat)
63421756200, # local_start 2010-10-03 02:30:00 (Sun)
63437479200, # local_end 2011-04-03 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63437439600, # utc_start 2011-04-02 15:00:00 (Sat)
63453166200, # utc_end 2011-10-01 15:30:00 (Sat)
63437477400, # local_start 2011-04-03 01:30:00 (Sun)
63453204000, # local_end 2011-10-02 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63453166200, # utc_start 2011-10-01 15:30:00 (Sat)
63468889200, # utc_end 2012-03-31 15:00:00 (Sat)
63453205800, # local_start 2011-10-02 02:30:00 (Sun)
63468928800, # local_end 2012-04-01 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63468889200, # utc_start 2012-03-31 15:00:00 (Sat)
63485220600, # utc_end 2012-10-06 15:30:00 (Sat)
63468927000, # local_start 2012-04-01 01:30:00 (Sun)
63485258400, # local_end 2012-10-07 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63485220600, # utc_start 2012-10-06 15:30:00 (Sat)
63500943600, # utc_end 2013-04-06 15:00:00 (Sat)
63485260200, # local_start 2012-10-07 02:30:00 (Sun)
63500983200, # local_end 2013-04-07 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63500943600, # utc_start 2013-04-06 15:00:00 (Sat)
63516670200, # utc_end 2013-10-05 15:30:00 (Sat)
63500981400, # local_start 2013-04-07 01:30:00 (Sun)
63516708000, # local_end 2013-10-06 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63516670200, # utc_start 2013-10-05 15:30:00 (Sat)
63532393200, # utc_end 2014-04-05 15:00:00 (Sat)
63516709800, # local_start 2013-10-06 02:30:00 (Sun)
63532432800, # local_end 2014-04-06 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63532393200, # utc_start 2014-04-05 15:00:00 (Sat)
63548119800, # utc_end 2014-10-04 15:30:00 (Sat)
63532431000, # local_start 2014-04-06 01:30:00 (Sun)
63548157600, # local_end 2014-10-05 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63548119800, # utc_start 2014-10-04 15:30:00 (Sat)
63563842800, # utc_end 2015-04-04 15:00:00 (Sat)
63548159400, # local_start 2014-10-05 02:30:00 (Sun)
63563882400, # local_end 2015-04-05 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63563842800, # utc_start 2015-04-04 15:00:00 (Sat)
63579569400, # utc_end 2015-10-03 15:30:00 (Sat)
63563880600, # local_start 2015-04-05 01:30:00 (Sun)
63579607200, # local_end 2015-10-04 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63579569400, # utc_start 2015-10-03 15:30:00 (Sat)
63595292400, # utc_end 2016-04-02 15:00:00 (Sat)
63579609000, # local_start 2015-10-04 02:30:00 (Sun)
63595332000, # local_end 2016-04-03 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63595292400, # utc_start 2016-04-02 15:00:00 (Sat)
63611019000, # utc_end 2016-10-01 15:30:00 (Sat)
63595330200, # local_start 2016-04-03 01:30:00 (Sun)
63611056800, # local_end 2016-10-02 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63611019000, # utc_start 2016-10-01 15:30:00 (Sat)
63626742000, # utc_end 2017-04-01 15:00:00 (Sat)
63611058600, # local_start 2016-10-02 02:30:00 (Sun)
63626781600, # local_end 2017-04-02 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63626742000, # utc_start 2017-04-01 15:00:00 (Sat)
63642468600, # utc_end 2017-09-30 15:30:00 (Sat)
63626779800, # local_start 2017-04-02 01:30:00 (Sun)
63642506400, # local_end 2017-10-01 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63642468600, # utc_start 2017-09-30 15:30:00 (Sat)
63658191600, # utc_end 2018-03-31 15:00:00 (Sat)
63642508200, # local_start 2017-10-01 02:30:00 (Sun)
63658231200, # local_end 2018-04-01 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63658191600, # utc_start 2018-03-31 15:00:00 (Sat)
63674523000, # utc_end 2018-10-06 15:30:00 (Sat)
63658229400, # local_start 2018-04-01 01:30:00 (Sun)
63674560800, # local_end 2018-10-07 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63674523000, # utc_start 2018-10-06 15:30:00 (Sat)
63690246000, # utc_end 2019-04-06 15:00:00 (Sat)
63674562600, # local_start 2018-10-07 02:30:00 (Sun)
63690285600, # local_end 2019-04-07 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63690246000, # utc_start 2019-04-06 15:00:00 (Sat)
63705972600, # utc_end 2019-10-05 15:30:00 (Sat)
63690283800, # local_start 2019-04-07 01:30:00 (Sun)
63706010400, # local_end 2019-10-06 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63705972600, # utc_start 2019-10-05 15:30:00 (Sat)
63721695600, # utc_end 2020-04-04 15:00:00 (Sat)
63706012200, # local_start 2019-10-06 02:30:00 (Sun)
63721735200, # local_end 2020-04-05 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63721695600, # utc_start 2020-04-04 15:00:00 (Sat)
63737422200, # utc_end 2020-10-03 15:30:00 (Sat)
63721733400, # local_start 2020-04-05 01:30:00 (Sun)
63737460000, # local_end 2020-10-04 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63737422200, # utc_start 2020-10-03 15:30:00 (Sat)
63753145200, # utc_end 2021-04-03 15:00:00 (Sat)
63737461800, # local_start 2020-10-04 02:30:00 (Sun)
63753184800, # local_end 2021-04-04 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63753145200, # utc_start 2021-04-03 15:00:00 (Sat)
63768871800, # utc_end 2021-10-02 15:30:00 (Sat)
63753183000, # local_start 2021-04-04 01:30:00 (Sun)
63768909600, # local_end 2021-10-03 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63768871800, # utc_start 2021-10-02 15:30:00 (Sat)
63784594800, # utc_end 2022-04-02 15:00:00 (Sat)
63768911400, # local_start 2021-10-03 02:30:00 (Sun)
63784634400, # local_end 2022-04-03 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63784594800, # utc_start 2022-04-02 15:00:00 (Sat)
63800321400, # utc_end 2022-10-01 15:30:00 (Sat)
63784632600, # local_start 2022-04-03 01:30:00 (Sun)
63800359200, # local_end 2022-10-02 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63800321400, # utc_start 2022-10-01 15:30:00 (Sat)
63816044400, # utc_end 2023-04-01 15:00:00 (Sat)
63800361000, # local_start 2022-10-02 02:30:00 (Sun)
63816084000, # local_end 2023-04-02 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63816044400, # utc_start 2023-04-01 15:00:00 (Sat)
63831771000, # utc_end 2023-09-30 15:30:00 (Sat)
63816082200, # local_start 2023-04-02 01:30:00 (Sun)
63831808800, # local_end 2023-10-01 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63831771000, # utc_start 2023-09-30 15:30:00 (Sat)
63848098800, # utc_end 2024-04-06 15:00:00 (Sat)
63831810600, # local_start 2023-10-01 02:30:00 (Sun)
63848138400, # local_end 2024-04-07 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63848098800, # utc_start 2024-04-06 15:00:00 (Sat)
63863825400, # utc_end 2024-10-05 15:30:00 (Sat)
63848136600, # local_start 2024-04-07 01:30:00 (Sun)
63863863200, # local_end 2024-10-06 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63863825400, # utc_start 2024-10-05 15:30:00 (Sat)
63879548400, # utc_end 2025-04-05 15:00:00 (Sat)
63863865000, # local_start 2024-10-06 02:30:00 (Sun)
63879588000, # local_end 2025-04-06 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63879548400, # utc_start 2025-04-05 15:00:00 (Sat)
63895275000, # utc_end 2025-10-04 15:30:00 (Sat)
63879586200, # local_start 2025-04-06 01:30:00 (Sun)
63895312800, # local_end 2025-10-05 02:00:00 (Sun)
37800,
0,
'LHST',
],
[
63895275000, # utc_start 2025-10-04 15:30:00 (Sat)
63910998000, # utc_end 2026-04-04 15:00:00 (Sat)
63895314600, # local_start 2025-10-05 02:30:00 (Sun)
63911037600, # local_end 2026-04-05 02:00:00 (Sun)
39600,
1,
'LHDT',
],
[
63910998000, # utc_start 2026-04-04 15:00:00 (Sat)
63926724600, # utc_end 2026-10-03 15:30:00 (Sat)
63911035800, # local_start 2026-04-05 01:30:00 (Sun)
63926762400, # local_end 2026-10-04 02:00:00 (Sun)
37800,
0,
'LHST',
],
];
sub olson_version {'2015g'}
sub has_dst_changes {46}
sub _max_year {2025}
sub _new_instance {
return shift->_init( @_, spans => $spans );
}
sub _last_offset { 37800 }
my $last_observance = bless( {
'format' => 'LH%sT',
'gmtoff' => '10:30',
'local_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 723240,
'local_rd_secs' => 1800,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 723240,
'utc_rd_secs' => 1800,
'utc_year' => 1982
}, 'DateTime' ),
'offset_from_std' => 0,
'offset_from_utc' => 37800,
'until' => [],
'utc_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 723239,
'local_rd_secs' => 50400,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 723239,
'utc_rd_secs' => 50400,
'utc_year' => 1982
}, 'DateTime' )
}, 'DateTime::TimeZone::OlsonDB::Observance' )
;
sub _last_observance { $last_observance }
my $rules = [
bless( {
'at' => '2:00',
'from' => '2008',
'in' => 'Apr',
'letter' => 'S',
'name' => 'LH',
'offset_from_std' => 0,
'on' => 'Sun>=1',
'save' => '0',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' ),
bless( {
'at' => '2:00',
'from' => '2008',
'in' => 'Oct',
'letter' => 'D',
'name' => 'LH',
'offset_from_std' => 1800,
'on' => 'Sun>=1',
'save' => '0:30',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' )
]
;
sub _rules { $rules }
1;
| rosiro/wasarabi | local/lib/perl5/DateTime/TimeZone/Australia/Lord_Howe.pm | Perl | mit | 25,351 |
# TTT::UserAgent
#
# $Id$ $Revision$
# $Source$ $Author$ $Date$
#
# A derivative of LWP::UserAgent to be used for fetching stuff.
#
#
package App::TTT::UserAgent;
use base qw(LWP::UserAgent);
# Pragma
use warnings;
use strict;
# Modules
use autodie;
## no critic
our ($VERSION) = ( '\$Revision: 1 \$' =~ m{ \$Revision: \s+ (\S+) }msx );
## use critic
1;
| hissohathair/mumpsimus | exp/perl/lib/App/TTT/UserAgent.pm | Perl | mit | 360 |
:- module(bc_mail, [
bc_mail_send/4, % +Goal, +From, +Subject, +To
bc_mail_send_text/4, % +Text, +From, +Subject, +To
bc_mail_test/3 % +User, +Params, -Result
]).
/** <module> Helper module to send mail notifications */
:- use_module(library(smtp)).
:- use_module(library(error)).
:- use_module(library(debug)).
:- use_module(bc_data_config).
%! bc_mail_test(+User, +Params, -Result) is det.
%
% Tries to send a test email. Result is one
% of: error(E), fail, ok.
bc_mail_test(User, Params, Result):-
must_be(dict, User),
must_be(dict, Params),
BaseConfig = _{
smtp: Params.host,
from: Params.from,
subject: Params.subject,
auth_method: Params.auth,
security: Params.security
},
( Params.auth = login
-> Config = BaseConfig.put(auth, Params.user - Params.password)
; Config = BaseConfig),
wrap_smtp(User.username,
text_body(Params.body), Config, Result).
:- meta_predicate(wrap_smtp(+, 1, +, -)).
% Wrapper around smtp_send_mail to catch
% and report the mail error.
wrap_smtp(To, Goal, Config, Result):-
( catch(smtp_send_mail(To, Goal, Config), E, true)
-> ( nonvar(E)
-> format(user_error, 'Mail sending error: ~w~n', [E]),
Result = error(E)
; Result = ok)
; writeln(user_error, 'Mail sending failed.'),
Result = fail).
% Helper to write given
% text to output.
text_body(Text, Out):-
writeln(Out, Text).
:- meta_predicate(bc_mail_send(1, +, +, +)).
%! bc_mail_send(:Goal, +From, +Subject, +To) is det.
%
% Sends mail using the current SMTP configuration.
% Takes Goal argument that must produce the mail
% body. Goal must accept argument for output Stream.
bc_mail_send(Goal, From, Subject, To):-
must_be(atomic, From),
must_be(atomic, Subject),
must_be(atomic, To),
must_be(callable, Goal),
( bc_config_get(smtp_enabled, true)
-> debug(bc_mail, 'smtp is enabled', []),
smtp_config(Config),
put_dict(_{ from: From, subject: Subject },
Config, Options),
wrap_smtp(To, Goal, Options, _)
; debug(bc_mail, 'smtp is not enabled', [])).
%! bc_mail_send_text(+Text, +From, +Subject, +To) is det.
%
% Same as bc_mail_send/4 but takes prepared
% text instead of closure.
bc_mail_send_text(Text, From, Subject, To):-
must_be(atomic, Text),
bc_mail_send(text_body(Text), From, Subject, To).
% Builds dict from the current
% SMTP config options.
smtp_config(Config):-
bc_config_get(smtp_host, Host),
bc_config_get(smtp_auth, Auth),
bc_config_get(smtp_security, Security),
( Auth = plain
-> Config = _{ smtp: Host, auth_method: Auth, security: Security }
; bc_config_get(smtp_user, User),
bc_config_get(smtp_password, Password),
Config = _{ smtp: Host, auth_method: Auth, auth: User-Password, security: Security }).
| rla/blog-core | prolog/bc/bc_mail.pl | Perl | mit | 2,916 |
package wasarabi::Web::Plugin::Session;
use strict;
use warnings;
use utf8;
use Amon2::Util;
use HTTP::Session2::ClientStore2;
use Crypt::CBC;
sub init {
my ($class, $c) = @_;
# Validate XSRF Token.
$c->add_trigger(
BEFORE_DISPATCH => sub {
my ( $c ) = @_;
if ($c->req->method ne 'GET' && $c->req->method ne 'HEAD') {
my $token = $c->req->header('X-XSRF-TOKEN') || $c->req->param('XSRF-TOKEN');
unless ($c->session->validate_xsrf_token($token)) {
return $c->create_simple_status_page(
403, 'XSRF detected.'
);
}
}
return;
},
);
Amon2::Util::add_method($c, 'session', \&_session);
# Inject cookie header after dispatching.
$c->add_trigger(
AFTER_DISPATCH => sub {
my ( $c, $res ) = @_;
if ($c->{session} && $res->can('cookies')) {
$c->{session}->finalize_plack_response($res);
}
return;
},
);
}
# $c->session() accessor.
my $cipher = Crypt::CBC->new({
key => 'Cj7efJhz-5qgZPIM0B2JzjEkPy6X9IVm',
cipher => 'Rijndael',
});
sub _session {
my $self = shift;
if (!exists $self->{session}) {
$self->{session} = HTTP::Session2::ClientStore2->new(
env => $self->req->env,
secret => 'rH8GrqhhK8Urj01XkLPSFKsliq9Ltt1G',
cipher => $cipher,
);
}
return $self->{session};
}
1;
__END__
=head1 DESCRIPTION
This module manages session for wasarabi.
| rosiro/wasarabi | lib/wasarabi/Web/Plugin/Session.pm | Perl | mit | 1,607 |
package Weather::PWS::DataType::Coordinates;
use strict;
use parent qw( Weather::PWS::DataType::Base );
# lat / long cordinates
sub decimal_latitude
{
my ($self, $latitude) = @_;
if (defined($latitude))
{
$self->{_latitude} = $latitude;
}
return $self->{_latitude};
}
sub decimal_longitude
{
my ($self, $longitude) = @_;
if (defined($longitude))
{
$self->{_longitude} = $longitude;
}
return $self->{_longitude};
}
sub altitude_meters
{
my ($self, $altitude) = @_;
if (defined($altitude))
{
$self->{_altitude} = $altitude;
}
return $self->{_altitude};
}
# once set, we can return the parts e/w/n/s or whatever
# so we dont have to worry about future integrations making
# life hard here (see CWOP LORAN retch formatting)
# as i find the need to get just fragments of data out ill
# hack in the methods
sub altitude_feet
{
my $self = shift;
my $alt = Math::BigFloat->new($self->{_altitude});
$alt->bmul(3.28084);
return $alt;
}
sub coordinates_degrees_minutes_seconds
{
my ($self, $ignore) = @_;
if ($ignore)
{
warn "You must set the coordinate in decimal first.";
}
return $self->latitude_dms . ", " . $self->longitude_dms;
}
sub coordinates_degrees_minutes
{
my ($self, $ignore) = @_;
if ($ignore)
{
warn "You must set the coordinate in decimal first.";
}
return $self->latitude_dm . ", " . $self->longitude_dm;
}
sub coordinates_decimal
{
my ($self, $ignore) = @_;
if ($ignore)
{
warn "You must set the coordinates in decimal first.";
}
return $self->{_latitude} . ", " . $self->{_longitude};
}
sub coordinates_aprswxnet
{
my ($self, $ignore) = @_;
if ($ignore)
{
warn "You must set the coordinates in decimal first.";
}
return $self->latitude_aprswxnet . ", " . $self->longitude_aprswxnet;
}
sub latitude_dms
{
my $self = shift;
if ($self->{_latitude} < 0)
{
return $self->decimal_coordinate_to_degrees_minutes_seconds($self->{_latitude} * -1) . 'S';
}
return $self->decimal_coordinate_to_degrees_minutes_seconds($self->{_latitude}) . 'N';
}
sub latitude_dm
{
my $self = shift;
if ($self->{_latitude} < 0)
{
return $self->decimal_coordinate_to_degrees_minutes($self->{_latitude} * -1) . 'S';
}
return $self->decimal_coordinate_to_degrees_minutes($self->{_latitude}) . 'N';
}
sub latitude_aprswxnet
{
my $self = shift;
if ($self->{_latitude} < 0)
{
return $self->decimal_coordinate_to_degrees_minutes_aprswxnet($self->{_latitude} * -1) . 'S';
}
return $self->decimal_coordinate_to_degrees_minutes_aprswxnet($self->{_latitude}) . 'N';
}
sub longitude_dms
{
my $self = shift;
if ($self->{_longitude} < 0)
{
return $self->decimal_coordinate_to_degrees_minutes_seconds($self->{_longitude} * -1) . 'W';
}
return $self->decimal_coordinate_to_degrees_minutes_seconds($self->{_longitude}) . 'E';
}
sub longitude_dm
{
my $self = shift;
if ($self->{_longitude} < 0)
{
return $self->decimal_coordinate_to_degrees_minutes($self->{_longitude} * -1) . 'W';
}
return $self->decimal_coordinate_to_degrees_minutes($self->{_longitude}) . 'E';
}
sub longitude_aprswxnet
{
my $self = shift;
if ($self->{_longitude} < 0)
{
return '0' . $self->decimal_coordinate_to_degrees_minutes_aprswxnet($self->{_longitude} * -1) . 'W';
}
return '0' . $self->decimal_coordinate_to_degrees_minutes_aprswxnet($self->{_longitude}) . 'E';
}
sub decimal_coordinate_to_degrees_minutes_seconds
{
my ($self, $lat) = @_;
my $dec_lat = Math::BigFloat->new($lat);
my $dec = $dec_lat->copy->bmod($dec_lat->copy->as_int);
my $min = $dec->copy->bmul(60);
my $hmin = $min->copy->bmod($min->copy->as_int);
my $sec = $hmin->copy->bmul(60);
return $dec_lat->as_int . " " . $min->as_int . "' " . $sec . '"';
}
sub decimal_coordinate_to_degrees_minutes
{
my ($self, $lat) = @_;
my $dec_lat = Math::BigFloat->new($lat);
my $dec = $dec_lat->copy->bmod($dec_lat->copy->as_int);
my $min = $dec->copy->bmul(60);
return $dec_lat->as_int . ' ' . $min . "'";
}
sub decimal_coordinate_to_degrees_minutes_aprswxnet
{
my ($self, $lat) = @_;
my $dec_lat = Math::BigFloat->new($lat);
my $dec = $dec_lat->copy->bmod($dec_lat->copy->as_int);
my $min = $dec->copy->bmul(60);
my $hmin = $min->copy->bmod($min->copy->as_int);
my $sec = $hmin->copy->bmul(60);
return $dec_lat->as_int . $min->as_int . "." . $sec->bround(2);
}
1;
1;
| console0/pi-weather-station | Weather-PWS/lib/Weather/PWS/DataType/Coordinates.pm | Perl | mit | 4,863 |
#!/usr/bin/perl
require 'quickies.pl'
print "Content-type: text/html\n\n";
$user_information = $MasterPath . "/User Information";
$Planet = $ENV{QUERY_STRING};
&parse_form;
use File::Find;
foreach $key(keys(%data)){
$key =~ tr/"%27"/\'/;
print $key;
$userdir = $MasterPath . "/se/Planets/$Planet/users/$key";
chdir('../../');
finddepth(\&deltree,"$userdir");
rmdir("$userdir");
&RemoveData;
}
#print"<SCRIPT>history.back()</SCRIPT>";
sub deltree {
$file = "$File::Find::dir/$_";
unlink("$File::Find::dir/$_") or rmdir("$File::Find::dir/$_")
}
sub RemoveData {
dbmopen(%password, "$user_information/password",0777);
delete($password{$key});
dbmclose(%password);
dbmopen(%accesscode, "$user_information/accesscode",0777);
delete($accesscode{$key});
dbmclose(%accesscode);
dbmopen(%emailaddress, "$user_information/emailaddress",0777);
delete($emailaddress{$key});
dbmclose(%emailaddress);
dbmopen(%planet, "$user_information/planet",0777);
delete($planet{$key});
dbmclose(%planet);
dbmopen(%ip, "$user_information/ip",0777);
delete($ip{$key});
dbmclose(%ip);
dbmopen(%date, "$user_information/date",0777);
delete($date{$key});
dbmclose(%date);
}
sub parse_form {
# Get the input
read(STDIN, $buffer, $ENV{'CONTENT_LENGTH'});
# Split the name-value pairs
@pairs = split(/&/, $buffer);
foreach $pair (@pairs) {
($name, $value) = split(/=/, $pair);
# Un-Webify plus signs and %-encoding
$value =~ tr/+/ /;
$value =~ s/%([a-fA-F0-9][a-fA-F0-9])/pack("C", hex($1))/eg;
$value =~ s/<!--(.|\n)*-->//g;
$value =~ s/<([^>]|\n)*>//g;
$data{$name} = $value;
}
}
| cpraught/shattered-empires | deleteusr.pl | Perl | mit | 1,651 |
#!/usr/bin/perl
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# A script to allow Bash or Z-Shell to complete an Ant command-line.
#
# To install for Bash 2.0 or better, add the following to ~/.bashrc:
#
# $ complete -C complete-ant-cmd ant build.sh
#
# To install for Z-Shell 2.5 or better, add the following to ~/.zshrc:
#
# function ant_complete () {
# local args_line args
# read -l args_line
# set -A args $args_line
# set -A reply $(COMP_LINE=$args_line complete-ant-cmd ${args[1]} $1)
# }
# compctl -K ant_complete ant build.sh
#
# @author Mike Williams <mikew@cortexebusiness.com.au>
my $cmdLine = $ENV{'COMP_LINE'};
my $antCmd = $ARGV[0];
my $word = $ARGV[1];
my @completions;
if ($word =~ /^-/) {
list( restrict( $word, getArguments() ));
} elsif ($cmdLine =~ /-(f|buildfile)\s+\S*$/) {
list( getBuildFiles($word) );
} else {
list( restrict( $word, getTargets() ));
}
exit(0);
sub list {
for (@_) {
print "$_\n";
}
}
sub restrict {
my ($word, @completions) = @_;
grep( /^\Q$word\E/, @completions );
}
sub getArguments {
qw(-buildfile -debug -emacs -f -find -help -listener -logfile
-logger -projecthelp -quiet -verbose -version);
}
sub getBuildFiles {
my ($word) = @_;
grep( /\.xml$/, glob( "$word*" ));
}
sub getTargets {
# Look for build-file
my $buildFile = 'build.xml';
if ($cmdLine =~ /-(f|buildfile)\s+(\S+)/) {
$buildFile = $2;
}
return () unless (-f $buildFile);
# Run "ant -projecthelp" to list targets. Keep a cache of results in a
# cache-file.
my $cacheFile = $buildFile;
$cacheFile =~ s|(.*/)?(.*)|${1}.ant-targets-${2}|;
if ((!-e $cacheFile) || (-M $buildFile) < (-M $cacheFile)) {
open( CACHE, '>'.$cacheFile ) || die "can\'t write $cacheFile: $!\n";
open( HELP, "$antCmd -projecthelp -f '$buildFile'|" ) || return();
my %targets;
while( <HELP> ) {
if (/^\s+(\S+)/) {
$targets{$1}++;
}
}
my @targets = sort keys %targets;
for (@targets) { print CACHE "$_\n"; }
return @targets;
}
# Read the target-cache
open( CACHE, $cacheFile ) || die "can\'t read $cacheFile: $!\n";
my @targets;
while (<CACHE>) {
chop;
s/\r$//; # for Cygwin
push( @targets, $_ );
}
close( CACHE );
@targets;
}
| apache/lenya | tools/bin/complete-ant-cmd.pl | Perl | apache-2.0 | 3,199 |
# Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Common::MobileApplicationInfo;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
appId => $args->{appId},
name => $args->{name}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| googleads/google-ads-perl | lib/Google/Ads/GoogleAds/V8/Common/MobileApplicationInfo.pm | Perl | apache-2.0 | 1,052 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::nokia::timos::snmp::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_snmp);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '1.0';
%{$self->{modes}} = (
'bgp-usage' => 'network::nokia::timos::snmp::mode::bgpusage',
'cpu' => 'network::nokia::timos::snmp::mode::cpu',
'hardware' => 'network::nokia::timos::snmp::mode::hardware',
'l2tp-usage' => 'network::nokia::timos::snmp::mode::l2tpusage',
'ldp-usage' => 'network::nokia::timos::snmp::mode::ldpusage',
'interfaces' => 'snmp_standard::mode::interfaces',
'isis-usage' => 'network::nokia::timos::snmp::mode::isisusage',
'list-bgp' => 'network::nokia::timos::snmp::mode::listbgp',
'list-interfaces' => 'snmp_standard::mode::listinterfaces',
'list-isis' => 'network::nokia::timos::snmp::mode::listisis',
'list-ldp' => 'network::nokia::timos::snmp::mode::listldp',
'list-sap' => 'network::nokia::timos::snmp::mode::listsap',
'list-vrtr' => 'network::nokia::timos::snmp::mode::listvrtr',
'memory' => 'network::nokia::timos::snmp::mode::memory',
'sap-usage' => 'network::nokia::timos::snmp::mode::sapusage',
'uptime' => 'snmp_standard::mode::uptime',
);
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Nokia TiMOS (SR OS) equipments (7750SR, 7210SAS) in SNMP.
=cut
| Sims24/centreon-plugins | network/nokia/timos/snmp/plugin.pm | Perl | apache-2.0 | 2,691 |
## OpenXPKI::Crypto::Tool::PKCS7::API
## Written 2006 by Alexander Klink for the OpenXPKI project
## (C) Copyright 2006 by The OpenXPKI Project
use strict;
use warnings;
package OpenXPKI::Crypto::Tool::PKCS7::API;
use base qw(OpenXPKI::Crypto::API);
use Class::Std;
use English;
use OpenXPKI::Debug;
## scalar value:
## - 0 means the parameter is optional
## - 1 means the parameter is required
## array values:
## - an array represent the allowed parameters
## - element "__undef" in the array means that the parameter is optional
## hash values:
## - "" => {...} (these are the default parameters
## - "TYPE:EC" => {...} means parameters if TYPE => "EC" is used
sub __init_command_params : PRIVATE {
##! 16: 'start'
my $self = shift;
$self->set_command_params({
'verify' => { 'PKCS7' => 1,
'DATA' => 0,
}, # + CHAIN => 0?
});
}
sub START {
##! 16: 'start'
my $self = shift;
my $arg_ref = shift;
$self->__init_command_params();
}
1;
__END__
=head1 Name
OpenXPKI::Crypto::Tool::PKCS7::API - API for the PKCS7 functions.
=head1 Description
This is the basic class for the PKCS7 tool API. It inherits from
OpenXPKI::Crypto::API. It defines a hash of valid commands.
| durko/openxpki | core/server/OpenXPKI/Crypto/Tool/PKCS7/API.pm | Perl | apache-2.0 | 1,305 |
#
# Copyright 2021 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package cloud::azure::network::frontdoor::mode::discovery;
use base qw(cloud::azure::management::monitor::mode::discovery);
use strict;
use warnings;
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
$self->{namespace} = 'Microsoft.Network';
$self->{type} = 'frontdoors';
}
1;
__END__
=head1 MODE
Azure FrontDoor discovery.
=over 8
=item B<--resource-group>
Specify resource group.
=item B<--location>
Specify location.
=item B<--prettify>
Prettify JSON output.
=back
=cut
| Tpo76/centreon-plugins | cloud/azure/network/frontdoor/mode/discovery.pm | Perl | apache-2.0 | 1,302 |
## OpenXPKI::Service::Default.pm
##
## Written 2005-2006 by Michael Bell and Martin Bartosch for the OpenXPKI project
## Polished to use a state-machine like interface 2007 by Alexander Klink
## for the OpenXPKI project
## (C) Copyright 2005-2007 by The OpenXPKI Project
package OpenXPKI::Service::Default;
use base qw( OpenXPKI::Service );
use strict;
use warnings;
use utf8;
use English;
use List::Util qw( first );
use Class::Std;
use Sys::SigAction qw( sig_alarm set_sig_handler );
use Data::Dumper;
## used modules
use OpenXPKI::i18n qw(set_language);
use OpenXPKI::Debug;
use OpenXPKI::Exception;
use OpenXPKI::Server;
use OpenXPKI::Server::Session;
use OpenXPKI::Server::Context qw( CTX );
use OpenXPKI::Service::Default::CommandApi2;
use Log::Log4perl::MDC;
my %state_of :ATTR; # the current state of the service
my %max_execution_time : ATTR( :set<max_execution_time> );
sub init {
my $self = shift;
my $ident = ident $self;
my $args = shift;
##! 1: "start"
# timeout idle clients
my $timeout = CTX('config')->get("system.server.service.Default.idle_timeout") || 120;
$self->set_timeout($timeout);
my $max_execution_time = CTX('config')->get("system.server.service.Default.max_execution_time") || 0;
$self->set_max_execution_time($max_execution_time);
$state_of{$ident} = 'NEW';
# in case we reuse a child in PreFork mode make sure there is
# no session left in context
OpenXPKI::Server::Context::killsession();
# TODO - this should be handled by the run method after some cleanup
# do session init, PKI realm selection and authentication
while ($state_of{$ident} ne 'MAIN_LOOP') {
my $msg = $self->collect();
my $is_valid = $self->__is_valid_message({
MESSAGE => $msg,
});
if (! $is_valid) {
$self->__send_error({
ERROR => "I18N_OPENXPKI_SERVICE_DEFAULT_RUN_UNRECOGNIZED_SERVICE_MESSAGE",
});
}
else { # valid message received
my $result;
eval { # try to handle it
$result = $self->__handle_message({ MESSAGE => $msg });
# persist session unless it was killed (we assume someone saved it before)
CTX('session')->persist if OpenXPKI::Server::Context::hascontext('session');
};
if (my $exc = OpenXPKI::Exception->caught()) {
$self->__send_error({ EXCEPTION => $exc });
}
elsif ($EVAL_ERROR) {
$self->__send_error({ EXCEPTION => $EVAL_ERROR });
}
else { # if everything was fine, send the result to the client
$self->talk($result);
}
}
}
return 1;
}
sub __is_valid_message : PRIVATE {
my $self = shift;
my $ident = ident $self;
my $arg_ref = shift;
my $message = $arg_ref->{'MESSAGE'};
my $message_name = $message->{'SERVICE_MSG'};
##! 32: 'message_name: ' . $message_name
##! 32: 'state: ' . $state_of{$ident}
# this is a table of valid messages that may be received from the
# client in the different states
my $valid_messages = {
'NEW' => [
'PING',
'CONTINUE_SESSION',
'NEW_SESSION',
'DETACH_SESSION',
],
'SESSION_ID_SENT' => [
'PING',
'SESSION_ID_ACCEPTED',
'CONTINUE_SESSION',
'DETACH_SESSION',
],
'SESSION_ID_SENT_FROM_CONTINUE' => [
'PING',
'SESSION_ID_ACCEPTED',
'CONTINUE_SESSION',
'DETACH_SESSION',
],
'SESSION_ID_SENT_FROM_RESET' => [
'PING',
'SESSION_ID_ACCEPTED',
'CONTINUE_SESSION',
'DETACH_SESSION',
],
'WAITING_FOR_PKI_REALM' => [
'PING',
'LOGOUT',
'GET_PKI_REALM',
'NEW_SESSION',
'CONTINUE_SESSION',
'DETACH_SESSION',
],
'WAITING_FOR_AUTHENTICATION_STACK' => [
'PING',
'LOGOUT',
'GET_AUTHENTICATION_STACK',
'NEW_SESSION',
'CONTINUE_SESSION',
'DETACH_SESSION',
],
'WAITING_FOR_LOGIN' => [
'PING',
'LOGOUT',
'GET_PASSWD_LOGIN',
'GET_CLIENT_SSO_LOGIN',
'GET_CLIENT_X509_LOGIN',
'GET_X509_LOGIN',
'NEW_SESSION',
'CONTINUE_SESSION',
'DETACH_SESSION',
],
'MAIN_LOOP' => [
'PING',
'LOGOUT',
'STATUS',
'COMMAND',
'NEW_SESSION',
'CONTINUE_SESSION',
'DETACH_SESSION',
'RESET_SESSIONID',
],
};
my @valid_msgs_now = @{ $valid_messages->{$state_of{$ident}} };
if (defined first { $_ eq $message_name } @valid_msgs_now) {
# TODO - once could possibly check the content of the message
# here, too
##! 16: 'message is valid'
return 1;
}
CTX('log')->system()->warn('Invalid message '.$message_name.' recevied in state ' . $state_of{$ident});
##! 16: 'message is NOT valid'
return;
}
sub __handle_message : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $arg_ref = shift;
my $message = $arg_ref->{'MESSAGE'};
my $message_name = $message->{'SERVICE_MSG'};
##! 64: 'message: ' . Dumper $message
my $result;
# get the result from a method specific to the message name
eval {
my $method = '__handle_' . $message_name;
CTX('log')->system->trace("<< $message_name (message from client)");
$result = $self->$method($message);
};
if (my $exc = OpenXPKI::Exception->caught()) {
$exc->rethrow();
}
elsif ($EVAL_ERROR) {
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_HANDLE_MESSAGE_FAILED',
params => {
'MESSAGE_NAME' => $message_name,
'EVAL_ERROR' => $EVAL_ERROR,
},
);
}
return $result;
}
sub __handle_NEW_SESSION : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $msg = shift;
Log::Log4perl::MDC->put('sid', undef);
##! 4: "new session"
my $session = OpenXPKI::Server::Session->new(load_config => 1)->create;
if (exists $msg->{LANGUAGE}) {
##! 8: "set language"
set_language($msg->{LANGUAGE});
$session->data->language($msg->{LANGUAGE});
} else {
##! 8: "no language specified"
}
OpenXPKI::Server::Context::setcontext({'session' => $session, force => 1});
Log::Log4perl::MDC->put('sid', substr($session->data->id,0,4));
CTX('log')->system->info('New session created');
$self->__change_state({ STATE => 'SESSION_ID_SENT', });
return {
SESSION_ID => $session->data->id,
};
}
sub __handle_CONTINUE_SESSION {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $msg = shift;
my $session;
# for whatever reason prior to the Client rewrite continue_session
# has the session id not in params
my $sess_id = exists $msg->{SESSION_ID} ? $msg->{SESSION_ID} : $msg->{PARAMS}->{SESSION_ID};
##! 4: "try to continue session " . $sess_id
$session = OpenXPKI::Server::Session->new(load_config => 1);
$session->resume($sess_id)
or OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_HANDLE_CONTINUE_SESSION_SESSION_CONTINUE_FAILED',
params => {ID => $sess_id}
);
# There might be an exisiting session if the child did some work before
# we therefore use force to overwrite exisiting entries
OpenXPKI::Server::Context::setcontext({'session' => $session, force => 1});
Log::Log4perl::MDC->put('sid', substr($sess_id,0,4));
CTX('log')->system->debug('Session resumed');
# do not use __change_state here, as we want to have access
# to the old session in __handle_SESSION_ID_ACCEPTED
$state_of{$ident} = 'SESSION_ID_SENT_FROM_CONTINUE';
return {
SESSION_ID => $session->data->id,
};
}
sub __handle_RESET_SESSIONID: PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $msg = shift;
my $sess_id = CTX('session')->new_id;
CTX('log')->system->debug("Changing session ID to ".substr($sess_id,0,4));
Log::Log4perl::MDC->put('sid', substr($sess_id,0,4));
##! 4: 'new session id ' . $sess_id
$self->__change_state({
STATE => 'SESSION_ID_SENT_FROM_RESET',
});
return {
SESSION_ID => $sess_id,
};
}
sub __handle_DETACH_SESSION: PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $msg = shift;
my $sessid = CTX('session')->data->id;
##! 4: "detach session " . $sessid
OpenXPKI::Server::Context::killsession();
# Cleanup ALL items from the MDC!
Log::Log4perl::MDC->remove();
$self->__change_state({ STATE => 'NEW' });
return { 'SERVICE_MSG' => 'DETACH' };
}
sub __handle_PING : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $message = shift;
if ($state_of{$ident} eq 'MAIN_LOOP') {
return {
SERVICE_MSG => 'SERVICE_READY',
};
}
elsif ($state_of{$ident} eq 'WAITING_FOR_PKI_REALM') {
my @realm_names = CTX('config')->get_keys("system.realms");
my %realms =();
foreach my $realm (sort @realm_names) {
my $label = CTX('config')->get("system.realms.$realm.label");
$realms{$realm}->{NAME} = $realm;
$realms{$realm}->{LABEL} = $label;
$realms{$realm}->{DESCRIPTION} = CTX('config')->get("system.realms.$realm.description") || $label;
}
return {
SERVICE_MSG => 'GET_PKI_REALM',
PARAMS => {
'PKI_REALMS' => \%realms,
},
};
}
elsif ($state_of{$ident} eq 'WAITING_FOR_AUTHENTICATION_STACK') {
return $self->__list_authentication_stacks();
}
elsif ($state_of{$ident} eq 'WAITING_FOR_LOGIN') {
##! 16: 'we are in state WAITING_FOR_LOGIN'
##! 16: 'auth stack: ' . CTX('session')->data->authentication_stack
##! 16: 'pki realm: ' . CTX('session')->data->pki_realm
my ($user, $role, $reply) = CTX('authentication')->login_step({
STACK => CTX('session')->data->authentication_stack,
MESSAGE => $message,
});
return $reply;
}
return { SERVICE_MSG => 'START_SESSION' };
}
sub __handle_SESSION_ID_ACCEPTED : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $message = shift;
if ($state_of{$ident} eq 'SESSION_ID_SENT_FROM_RESET') {
##! 4: 'existing session detected'
my $session = CTX('session');
##! 8: 'Session ' . Dumper $session
$self->__change_state({
STATE => 'MAIN_LOOP',
});
}
if ($state_of{$ident} eq 'SESSION_ID_SENT_FROM_CONTINUE') {
##! 4: 'existing session detected'
my $session = CTX('session');
##! 8: 'Session ' . Dumper $session
$self->__change_state({
STATE => CTX('session')->data->status,
});
}
##! 16: 'state: ' . $state_of{$ident}
my $pki_realm_choice = $self->__pki_realm_choice_available();
##! 16: 'pki_realm_choice: ' . $pki_realm_choice
# if there is more than one PKI realm, send an appropriate
# message for the user and set the state to
# 'WAITING_FOR_PKI_REALM'
# we only do this if we are in a 'SESSION_ID_SENT.*' state
if ($pki_realm_choice
&& $state_of{$ident} =~ m{\A SESSION_ID_SENT.* \z}xms) {
##! 2: "build hash with ID, name and description"
my @realm_names = CTX('config')->get_keys("system.realms");
my %realms =();
foreach my $realm (sort @realm_names) {
$realms{$realm}->{NAME} = $realm;
$realms{$realm}->{DESCRIPTION} = CTX('config')->get("system.realms.$realm.label");
}
$self->__change_state({
STATE => 'WAITING_FOR_PKI_REALM',
});
return {
SERVICE_MSG => 'GET_PKI_REALM',
PARAMS => {
'PKI_REALMS' => \%realms,
},
};
}
# if we do not have an authentication stack in the session,
# send all available stacks to the user and set the state to
# 'WAITING_FOR_AUTHENTICATION_STACK'
if ($state_of{$ident} =~ m{\A SESSION_ID_SENT.* \z}xms
&& (! defined CTX('session')->data->authentication_stack) ) {
##! 4: 'sending authentication stacks'
$self->__change_state({
STATE => 'WAITING_FOR_AUTHENTICATION_STACK',
});
return $self->__list_authentication_stacks();
}
if ($state_of{$ident} eq 'WAITING_FOR_AUTHENTICATION_STACK') {
return $self->__list_authentication_stacks();
}
if ($state_of{$ident} eq 'WAITING_FOR_LOGIN') {
##! 16: 'we are in state WAITING_FOR_LOGIN'
##! 16: 'auth stack: ' . CTX('session')->data->authentication_stack
##! 16: 'pki realm: ' . CTX('session')->data->pki_realm
my ($user, $role, $reply) = CTX('authentication')->login_step({
STACK => CTX('session')->data->authentication_stack,
MESSAGE => $message,
});
return $reply;
}
if ($state_of{$ident} eq 'MAIN_LOOP') {
return {
SERVICE_MSG => 'SERVICE_READY',
};
}
##! 16: 'end'
return;
}
sub __handle_GET_PKI_REALM : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $message = shift;
my $requested_realm = $message->{PARAMS}->{'PKI_REALM'};
if ($self->__is_valid_pki_realm($requested_realm)) {
##! 2: "update session with PKI realm"
CTX('session')->data->pki_realm($requested_realm);
Log::Log4perl::MDC->put('pki_realm', $requested_realm);
}
else {
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_GET_PKI_REALM_INVALID_PKI_REALM_REQUESTED',
);
}
if (! defined CTX('session')->data->authentication_stack ) {
$self->__change_state({
STATE => 'WAITING_FOR_AUTHENTICATION_STACK',
});
# proceed if stack is already set
if (defined $message->{PARAMS}->{'AUTHENTICATION_STACK'}) {
delete $message->{PARAMS}->{'PKI_REALM'};
return $self->__handle_GET_AUTHENTICATION_STACK($message);
}
return $self->__list_authentication_stacks();
}
# check for next step, change state and prepare response
return;
}
sub __handle_GET_AUTHENTICATION_STACK : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $message = shift;
my $requested_stack = $message->{PARAMS}->{'AUTHENTICATION_STACK'};
if (! $self->__is_valid_auth_stack($requested_stack)) {
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_GET_AUTHENTICATION_STACK_INVALID_AUTH_STACK_REQUESTED',
);
}
else { # valid authentication stack
$self->__change_state({
STATE => 'WAITING_FOR_LOGIN',
});
CTX('session')->data->authentication_stack($requested_stack);
my ($user, $role, $reply, $userinfo) = CTX('authentication')->login_step({
STACK => $requested_stack,
MESSAGE => $message,
});
if (defined $user && defined $role) {
##! 4: 'login successful'
# successful login, save it in the session
# and make the session valid
CTX('session')->data->user($user);
CTX('session')->data->role($role);
CTX('session')->data->userinfo($userinfo) if ($userinfo);
CTX('session')->is_valid(1); # mark session as "valid"
Log::Log4perl::MDC->put('user', $user);
Log::Log4perl::MDC->put('role', $role);
$self->__change_state({ STATE => 'MAIN_LOOP', });
}
else {
##! 4: 'login unsuccessful'
}
return $reply;
}
return;
}
sub __handle_GET_PASSWD_LOGIN : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $message = shift;
## do not let users with non-ASCII characters in their username
## log in, as this will cause a crash on the web interface. This
## is a known bug (#1909037), and this code is here as a workaround
## until it is fixed.
if (exists $message->{PARAMS}->{LOGIN}) {
if (! defined $message->{PARAMS}->{LOGIN}) {
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_GET_PASSWD_USERNAME_UNDEFINED',
);
}
if ($message->{PARAMS}->{LOGIN} !~ m{ \A \p{IsASCII}+ \z }xms) {
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_GET_PASSWD_LOGIN_NON_ASCII_USERNAME_BUG',
);
}
}
my ($user, $role, $reply, $userinfo) = CTX('authentication')->login_step({
STACK => CTX('session')->data->authentication_stack,
MESSAGE => $message,
});
##! 16: 'user: ' . $user
##! 16: 'role: ' . $role
##! 16: 'reply: ' . Dumper $reply
if (defined $user && defined $role) {
CTX('log')->system->debug("Successful login from user $user, role $role");
##! 4: 'login successful'
# successful login, save it in the session and mark session as valid
CTX('session')->data->user($user);
CTX('session')->data->role($role);
CTX('session')->data->userinfo($userinfo) if ($userinfo);
CTX('session')->is_valid(1);
Log::Log4perl::MDC->put('user', $user);
Log::Log4perl::MDC->put('role', $role);
$self->__change_state({ STATE => 'MAIN_LOOP', });
}
else {
##! 4: 'login unsuccessful'
}
return $reply;
}
sub __handle_GET_CLIENT_SSO_LOGIN : PRIVATE {
##! 1: 'start'
my $self = shift;
my $msg = shift;
# SSO login is basically handled in the same way as password login
return $self->__handle_GET_PASSWD_LOGIN($msg);
}
sub __handle_GET_CLIENT_X509_LOGIN : PRIVATE {
##! 1: 'start'
my $self = shift;
my $msg = shift;
# client X509 login is basically handled in the same way as password login
return $self->__handle_GET_PASSWD_LOGIN($msg);
}
sub __handle_GET_X509_LOGIN : PRIVATE {
##! 1: 'start'
my $self = shift;
my $msg = shift;
# X509 login is handled the same as password login, too
return $self->__handle_GET_PASSWD_LOGIN($msg);
}
sub __handle_LOGOUT : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $message = shift;
my $old_session = CTX('session');
##! 8: "logout received - terminate session " . $old_session->id,
CTX('log')->system->debug('Terminating session ' . $old_session->id);
$self->__change_state({ STATE => 'NEW' });
OpenXPKI::Server::Context::killsession();
Log::Log4perl::MDC->remove();
if (!$old_session->delete()) {
CTX('log')->system->warn('Error terminating session!');
}
return { 'SERVICE_MSG' => 'LOGOUT' };
}
sub __handle_STATUS : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $message = shift;
# closure to access session parameters or return undef if CTX('session') is not defined
my $session_param = sub {
my $param = shift;
return CTX('session')->data->$param if OpenXPKI::Server::Context::hascontext('session');
return undef;
};
# SERVICE_MSG ?
return {
SESSION => {
ROLE => $session_param->("role"),
USER => $session_param->("user"),
},
};
}
sub __handle_COMMAND : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $data = shift;
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_COMMAND_COMMAND_MISSING',
) unless exists $data->{PARAMS}->{COMMAND};
##! 16: "executing access control before doing anything else"
#eval {
# FIXME - ACL
#CTX('acl')->authorize ({
# ACTIVITY => "Service::".$data->{PARAMS}->{COMMAND},
# AFFECTED_ROLE => "",
#});
#};
##! 32: 'Callstack ' . Dumper $data
if (0 || $EVAL_ERROR) {
##! 1: "Permission denied for Service::".$data->{PARAMS}->{COMMAND}."."
if (my $exc = OpenXPKI::Exception->caught()) {
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_COMMAND_PERMISSION_DENIED',
params => {
EXCEPTION => $exc,
},
);
} else {
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_COMMAND_PERMISSION_DENIED',
params => {
ERROR => $EVAL_ERROR,
},
);
}
return;
}
##! 16: "access to command granted"
my $command;
my $api = $data->{PARAMS}->{API} || 2;
if ($api !~ /^2$/) {
OpenXPKI::Exception->throw (
message => "I18N_OPENXPKI_SERVICE_DEFAULT_COMMAND_UNKNWON_COMMAND_API_VERSION",
params => $data->{PARAMS},
);
}
eval {
$command = OpenXPKI::Service::Default::CommandApi2->new(
command => $data->{PARAMS}->{COMMAND},
params => $data->{PARAMS}->{PARAMS},
);
};
if (my $exc = OpenXPKI::Exception->caught()) {
if ($exc->message() =~ m{ I18N_OPENXPKI_SERVICE_DEFAULT_COMMAND_INVALID_COMMAND }xms) {
##! 16: "Invalid command $data->{PARAMS}->{COMMAND}"
# fall-through intended
} else {
$exc->rethrow();
}
}
elsif ($EVAL_ERROR) {
OpenXPKI::Exception->throw (
message => "I18N_OPENXPKI_SERVICE_DEFAULT_COMMAND_COULD_NOT_INSTANTIATE_COMMAND",
params => { EVAL_ERROR => $EVAL_ERROR },
);
}
return unless defined $command;
##! 16: 'command class instantiated successfully'
my $result;
eval {
CTX('log')->system->debug("Executing command ".$data->{PARAMS}->{COMMAND});
my $sh;
if ($max_execution_time{$ident}) {
$sh = set_sig_handler( 'ALRM' ,sub {
CTX('log')->system->error("Service command ".$data->{PARAMS}->{COMMAND}." was aborted after " . $max_execution_time{$ident});
CTX('log')->system->trace("Call was " . Dumper $data->{PARAMS} );
OpenXPKI::Exception->throw(
message => "Server took too long to respond to your request - aborted!",
params => {
COMMAND => $data->{PARAMS}->{COMMAND}
}
);
});
sig_alarm( $max_execution_time{$ident} );
}
# enclose command with DBI transaction
CTX('dbi')->start_txn();
$result = $command->execute();
CTX('dbi')->commit();
if ($sh) {
sig_alarm(0);
}
};
if (my $error = $EVAL_ERROR) {
# rollback DBI (should not matter as we throw exception anyway)
CTX('dbi')->rollback();
# just rethrow if we have an exception
if (my $exc = OpenXPKI::Exception->caught()) {
##! 16: 'exception caught during execute'
$exc->rethrow();
}
##! 16: "Exception caught during command execution"
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_COMMAND_EXECUTION_ERROR',
params => { ERROR => $error },
);
}
##! 16: 'command executed successfully'
# sanity checks on command reply
if (! defined $result || ref $result ne 'HASH') {
OpenXPKI::Exception->throw(
message => 'I18N_OPENXPKI_SERVICE_DEFAULT_COMMAND_ILLEGAL_COMMAND_RETURN_VALUE',
);
return;
}
##! 16: 'returning result'
return $result;
}
sub __pki_realm_choice_available : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
##! 2: "check if PKI realm is already known"
my $realm = OpenXPKI::Server::Context::hascontext('session')
? CTX('session')->data->pki_realm
: undef;
return $realm if defined $realm;
##! 2: "check if there is more than one realm"
my @list = CTX('config')->get_keys('system.realms');
if (scalar @list < 1) {
##! 4: "no PKI realm configured"
OpenXPKI::Exception->throw(
message => "I18N_OPENXPKI_SERVICE_DEFAULT_NO_PKI_REALM_CONFIGURED",
);
}
elsif (scalar @list == 1) {
##! 4: "update session with PKI realm"
##! 16: 'PKI realm: ' . $list[0]
CTX('session')->data->pki_realm($list[0]);
return 0;
}
else { # more than one PKI realm available
return 1;
}
return 0;
}
sub __list_authentication_stacks : PRIVATE {
my $self = shift;
my $authentication = CTX('authentication');
return {
SERVICE_MSG => 'GET_AUTHENTICATION_STACK',
PARAMS => {
'AUTHENTICATION_STACKS' => $authentication->list_authentication_stacks(),
},
};
}
sub __is_valid_auth_stack : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $stack = shift;
my $stacks = CTX('authentication')->list_authentication_stacks();
return exists $stacks->{$stack};
}
sub __is_valid_pki_realm : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $realm = shift;
return CTX('config')->exists("system.realms.$realm");
}
sub __change_state : PRIVATE {
##! 1: 'start'
my $self = shift;
my $ident = ident $self;
my $arg_ref = shift;
my $new_state = $arg_ref->{STATE};
##! 4: 'changing state from ' . $state_of{$ident} . ' to ' . $new_state
CTX('log')->system()->debug('Changing session state from ' . $state_of{$ident} . ' to ' . $new_state);
$state_of{$ident} = $new_state;
# save the new state in the session
if (OpenXPKI::Server::Context::hascontext('session')) {
CTX('session')->data->status($new_state);
}
# Set the daemon name after enterin MAIN_LOOP
if ($new_state eq "MAIN_LOOP") {
OpenXPKI::Server::__set_process_name("worker: %s (%s)", CTX('session')->data->user, CTX('session')->data->role);
} elsif ($new_state eq "NEW") {
OpenXPKI::Server::__set_process_name("worker: connected");
}
return 1;
}
sub run
{
my $self = shift;
my $ident = ident $self;
my $args = shift;
$SIG{'TERM'} = \&OpenXPKI::Server::sig_term;
$SIG{'HUP'} = \&OpenXPKI::Server::sig_hup;
MESSAGE:
while (1) {
my $msg;
eval {
$msg = $self->collect();
};
if (my $exc = OpenXPKI::Exception->caught()) {
if ($exc->message() =~ m{I18N_OPENXPKI_TRANSPORT.*CLOSED_CONNECTION}xms) {
# client closed socket
last MESSAGE;
} else {
$exc->rethrow();
}
} elsif ($EVAL_ERROR) {
OpenXPKI::Exception->throw (
message => "I18N_OPENXPKI_SERVICE_DEFAULT_RUN_READ_EXCEPTION",
params => {
EVAL_ERROR => $EVAL_ERROR,
});
}
last MESSAGE unless defined $msg;
my $is_valid = $self->__is_valid_message({ MESSAGE => $msg });
if (! $is_valid) {
CTX('log')->system->debug("Invalid message received from client: ".($msg->{SERVICE_MSG} // "(empty)"));
$self->__send_error({
ERROR => "I18N_OPENXPKI_SERVICE_DEFAULT_RUN_UNRECOGNIZED_SERVICE_MESSAGE",
});
}
else { # valid message received
my $result;
# we dont need a valid session when we are not in main loop state
if ($state_of{$ident} eq 'MAIN_LOOP' && ! CTX('session')->is_valid) {
# check whether we still have a valid session (someone
# might have logged out on a different forked server)
CTX('log')->system->debug("Can't process client message: session is not valid (login incomplete)");
$self->__send_error({
ERROR => 'I18N_OPENXPKI_SERVICE_DEFAULT_RUN_SESSION_INVALID',
});
}
else {
# our session is just fine
eval { # try to handle it
$result = $self->__handle_message({ MESSAGE => $msg });
# persist session unless it was killed (we assume someone saved it before)
CTX('session')->persist if OpenXPKI::Server::Context::hascontext('session');
};
if (my $exc = OpenXPKI::Exception->caught()) {
$self->__send_error({ EXCEPTION => $exc, });
}
elsif ($EVAL_ERROR) {
$self->__send_error({ EXCEPTION => $EVAL_ERROR, });
}
else { # if everything was fine, send the result to the client
$self->talk($result);
}
}
}
}
return 1;
}
##################################
## begin error handling ##
##################################
sub __send_error
{
my $self = shift;
my $params = shift;
my $error;
if ($params->{ERROR}) {
$error = { LABEL => $params->{ERROR} };
} elsif (ref $params->{EXCEPTION} eq '') {
# got exception with already stringified error
$error = { LABEL => $params->{EXCEPTION} };
} else {
# blessed exception object - there are some bubble ups where message
# is an exception again => enforce stringification on message
$error = { LABEL => "".$params->{EXCEPTION}->message() };
# get all scalar/hash/array parameters from OXI::Exceptions
# this is used to transport some extra infos for validators, etc
if (ref $params->{EXCEPTION} eq 'OpenXPKI::Exception' &&
defined $params->{EXCEPTION}->params) {
my $p = $params->{EXCEPTION}->params;
map {
my $key = $_;
my $val = $p->{$_};
my $ref = ref $val;
delete $p->{$_} unless(defined $val && $ref =~ /^(|HASH|ARRAY)$/);
} keys %{$p};
if($p) {
$error->{PARAMS} = $p;
}
}
}
CTX('log')->system->debug('Sending error ' . Dumper $error);
return $self->talk({
SERVICE_MSG => "ERROR",
LIST => [ $error ]
});
}
################################
## end error handling ##
################################
1;
__END__
=head1 Name
OpenXPKI::Service::Default - basic service implementation
=head1 Description
This is the common Service implementation to be used by most interactive
clients. It supports PKI realm selection, user authentication and session
handling.
=head1 Protocol Definition
=head2 Connection startup
You can send two messages at the beginning of a connection. You can
ask to continue an old session or you start a new session. The answer
is always the same - the session ID or an error message.
=head3 Session init
--> {SERVICE_MSG => "NEW_SESSION",
LANGUAGE => $lang}
<-- {SESSION_ID => $ID}
--> {SERVICE_MSG => "SESSION_ID_ACCEPTED"}
<-- {SERVICE_MSG => "GET_PKI_REALM",
PARAMS => {
PKI_REALM => {
"0" => {
NAME => "Root Realm",
DESCRIPTION => "This is an example root realm."
}
}
}
}
}
--> {SERVICE_MSG => "GET_PKI_REALM",
PARAMS => {
PKI_REALM => $realm,
}
}
<-- {SERVICE_MSG => "GET_AUTHENTICATION_STACK",
PARAMS => {
AUTHENTICATION_STACKS => {
"0" => {
NAME => "Basic Root Auth Stack",
DESCRIPTION => "This is the basic root authentication stack."
}
}
}
}
--> {SERVICE_MSG => "GET_AUTHENTICATION_STACK",
PARAMS => {
AUTHENTICATION_STACK => "0"
}
}
Example 1: Anonymous Login
<-- {SERVICE_MSG => "SERVICE_READY"}
Answer is the first command.
Example 2: Password Login
<-- {SERVICE_MSG => "GET_PASSWD_LOGIN",
PARAMS => {
NAME => "XYZ",
DESCRIPTION => "bla bla ..."
}
}
--> {LOGIN => "John Doe",
PASSWD => "12345678"}
on success ...
<-- {SERVICE_MSG => "SERVICE_READY"}
on failure ...
<-- {ERROR => "some already translated message"}
=head3 Session continue
--> {SERVICE_MSG => "CONTINUE_SESSION",
SESSION_ID => $ID}
<-- {SESSION_ID => $ID}
--> {SERVICE_MSG => "SESSION_ID_ACCEPTED}
<-- {SERVICE_MSG => "SERVICE_READY"}
=head1 Functions
The functions does nothing else than to support the test stuff
with a working user interface dummy.
=over
=item * START
=item * init
Receives messages, checks them for validity in the given state
and passes them of to __handle_message if they are valid. Runs
until it reaches the state 'MAIN_LOOP', which means that session
initialization, PKI realm selection and login are done.
=item * run
Receives messages, checks them for validity in the given state
(MAIN_LOOP) and passes them to __handle_message if they are valid.
Runs until a LOGOUT command is received.
=item * __is_valid_message
Checks whether a given message is a valid message in the current
state. Currently, this checks the message name ('SERVICE_MSG')
only, could be used to validate the input as well later.
=item * __handle_message
Handles a message by passing it off to a handler named using the
service message name.
=item * __handle_NEW_SESSION
Handles the NEW_SESSION message by creating a new session, saving it
in the context and sending back the session ID. Changes the state to
'SESSION_ID_ACCEPTED'
=item * __handle_CONTINUE_SESSION
Handles the CONTINUE_SESSION message.
=item * __handle_PING
Handles the PING message by sending back an empty response.
=item * __handle_SESSION_ID_ACCEPTED
Handles the 'SESSION_ID_ACCEPTED' message. It looks whether there
are multiple PKI realms defined. If so, it sends back the list
and changes to state 'WAITING_FOR_PKI_REALM'. If not, it looks
whether an authentication stack is present. If not, it sends the
list of possible stacks and changes the state to
'WAITING_FOR_AUTHENTICATION_STACK'.
=item * __handle_GET_PKI_REALM
Handles the GET_PKI_REALM message by checking whether the received
realm is valid and setting it in the context if so.
=item * __handle_GET_AUTHENTICATION_STACK
Handles the GET_AUTHENTICATION_STACK message by checking whether
the received stack is valid and setting the corresponding attribute
if it is
=item * __handle_GET_PASSWD_LOGIN
Handles the GET_PASSWD_LOGIN message by passing on the credentials
to the Authentication modules 'login_step' method.
=item * __handle_DETACH
Removes the current session from this worker but does not delete
the session. The worker is now free to handle requests for other
sessions.
=item * __handle_LOGOUT
Handles the LOGOUT message by deleting the session from the backend.
=item * __handle_STATUS
Handles the STATUS message by sending back role and user information.
=item * __handle_COMMAND
Handles the COMMAND message by calling the corresponding command if
the user is authorized.
=item * __pki_realm_choice_available
Checks whether more than one PKI realm is configured.
=item * __list_authentication_stacks
Returns a list of configured authentication stacks.
=item * __is_valid_auth_stack
Checks whether a given stack is a valid one.
=item * __is_valid_pki_realm
Checks whether a given realm is a valid one.
=item * __change_state
Changes the internal state.
=item * __send_error
Sends an error message to the user.
=back
| oliwel/openxpki | core/server/OpenXPKI/Service/Default.pm | Perl | apache-2.0 | 36,525 |
package VMOMI::ToolsConfigInfoToolsLastInstallInfo;
use parent 'VMOMI::DynamicData';
use strict;
use warnings;
our @class_ancestors = (
'DynamicData',
);
our @class_members = (
['counter', undef, 0, ],
['fault', 'LocalizedMethodFault', 0, 1],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/ToolsConfigInfoToolsLastInstallInfo.pm | Perl | apache-2.0 | 480 |
:- module(dictionary, [dictionary/6],
[assertions,basicmodes,regtypes]).
:- include(library(iso)).
:- use_module(library(iso_byte_char)).
:- use_module(library(basicprops)).
:- use_module(library(lists)).
:- use_module(internal_types).
:- comment(module,"This module contains the fixed dictionary. All the nodes
in VRML with their associated fields.").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
:- pred dictionary(?NodeTypeId,?AccessType,?FieldTypeId,?FieldId,-Init_value,
-Boundary)
:: atm * atm * atm * atm * list(atm) * bound
# "To lookup information about the nodes, getting their properties.
Note that the type returned for the bound can be of two different types
bound or bound_double. The rotation type have one bound for the
directions and one for the degree of rotation.".
:- pred dictionary(?NodeTypeId,?AccessType,?FieldTypeId,?FieldId,-Init_value,
-Boundary)
:: atm * atm * atm * atm * list(atm) * bound_double
# "To lookup information about the nodes, getting their properties.
Note that the type returned for the bound can be of two different types
bound or bound_double. The rotation type have one bound for the
directions and one for the degree of rotation.".
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
dictionary('Anchor',eventIn,'MFNode',addChildren,[],[]).
dictionary('Anchor',eventIn,'MFNode',removeChildren,[],[]).
dictionary('Anchor',exposedField,'MFNode',children,[[]],
[children_nodes]).
dictionary('Anchor',exposedField,'SFString',description,[''],[]).
dictionary('Anchor',exposedField,'MFString',parameter,[[]],[]).
dictionary('Anchor',exposedField,'MFString',url,[[]],[]).
dictionary('Anchor',field,'SFVec3f',bboxCenter,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Anchor',field,'SFVec3f',bboxSize,[-1,-1,-1],
bound(excl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Appearance',exposedField,'SFNode',material,['NULL'],
['Material']).
dictionary('Appearance',exposedField,'SFNode',texture,['NULL'],
['ImageTexture','MovieTexture','PixelTexture']).
dictionary('Appearance',exposedField,'SFNode',textureTransform,['NULL'],
['TextureTransform']).
%%%%%%%%%%%%%%%%
dictionary('AudioClip',exposedField,'SFString',description,[''],[]).
dictionary('AudioClip',exposedField,'SFBool',loop,'FALSE',[]).
dictionary('AudioClip',exposedField,'SFFloat',pitch,[1.0],
bound(excl(0),excl(inf))).
dictionary('AudioClip',exposedField,'SFTime',startTime,[0],
bound(excl(inf_neg),excl(inf))).
dictionary('AudioClip',exposedField,'SFTime',stopTime,[0],
bound(excl(inf_neg),excl(inf))).
dictionary('AudioClip',exposedField,'MFString',url,[[]],[]).
dictionary('AudioClip',eventOut,'SFTime',duration_changed,[],[]).
dictionary('AudioClip',eventOut,'SFBool',isActive,[],[]).
%%%%%%%%%%%%%%%%
dictionary('Background',eventIn,'SFBool',set_bind,[],[]).
dictionary('Background',exposedField,'MFFloat',groundAngle,[[]],
bound(incl(0),incl(pi_div2))).
dictionary('Background',exposedField,'MFColor',groundColor,[[]],
bound(incl(0),incl(1))).
dictionary('Background',exposedField,'MFString',backUrl,[[]],[]).
dictionary('Background',exposedField,'MFString',bottomUrl,[[]],[]).
dictionary('Background',exposedField,'MFString',frontUrl,[[]],[]).
dictionary('Background',exposedField,'MFString',leftUrl,[[]],[]).
dictionary('Background',exposedField,'MFString',rightUrl,[[]],[]).
dictionary('Background',exposedField,'MFString',topUrl,[[]],[]).
dictionary('Background',exposedField,'MFFloat',skyAngle,[[]],
bound(incl(0),incl(pi))).
dictionary('Background',exposedField,'MFColor',skyColor,[0,0,0],
bound(incl(0),incl(1))).
dictionary('Background',eventOut,'SFBool',isBound,[],[]).
%%%%%%%%%%%%%%%%
dictionary('Billboard',eventIn,'MFNode',addChildren,[],[]).
dictionary('Billboard',eventIn,'MFNode',removeChildren,[],[]).
dictionary('Billboard',exposedField,'SFVec3f',axisOfRotation,[0,1,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Billboard',exposedField,'MFNode',children,[[]],
[children_nodes]).
dictionary('Billboard',field,'SFVec3f',bboxCenter,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Billboard',field,'SFVec3f',bboxSize,[-1,-1,-1],
bound(excl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Box',field,'SFVec3f',size,[2,2,2],
bound(excl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Collision',eventIn,'MFNode',addChildren,[],[]).
dictionary('Collision',eventIn,'MFNode',removeChildren,[],[]).
dictionary('Collision',exposedField,'MFNode',children,[[]],
[children_nodes]).
dictionary('Collision',exposedField,'SFBool',collide,['TRUE'],[]).
dictionary('Collision',field,'SFVec3f',bboxCenter,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Collision',field,'SFVec3f',bboxSize,[-1,-1,-1],
bound(excl(0),excl(inf))).
dictionary('Collision',field,'SFNode',proxy,['NULL'],
[children_nodes]).
dictionary('Collision',eventOut,'SFTime',collideTime,[],[]).
%%%%%%%%%%%%%%%%
dictionary('Color',exposedField,'MFColor',color,[[]],
bound(incl(0),incl(1))).
%%%%%%%%%%%%%%%%
dictionary('ColorInterpolator',eventIn,'SFFloat',set_fraction,[],
bound(excl(inf_neg),excl(inf))).
dictionary('ColorInterpolator',exposedField,'MFFloat',key,[[]],
bound(excl(inf_neg),excl(inf))).
dictionary('ColorInterpolator',exposedField,'MFColor',keyValue,[[]],
bound(incl(0),incl(1))).
dictionary('ColorInterpolator',eventOut,'SFColor',value_changed,[],[]).
%%%%%%%%%%%%%%%%
dictionary('Cone',field,'SFFloat',bottomRadius,[1],
bound(excl(0),excl(inf))).
dictionary('Cone',field,'SFFloat',height,[2],
bound(excl(0),excl(inf))).
dictionary('Cone',field,'SFBool',side,'TRUE',[]).
dictionary('Cone',field,'SFBool',bottom,'TRUE',[]).
%%%%%%%%%%%%%%%%
dictionary('Coordinate',exposedField,'MFVec3f',point,[],
bound(excl(inf_neg),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('CoordinateInterpolator',eventIn,'SFFloat',set_fraction,[],
bound(excl(inf_neg),excl(inf))).
dictionary('CoordinateInterpolator',exposedField,'MFFloat',key,[],
bound(excl(inf_neg),excl(inf))).
dictionary('CoordinateInterpolator',exposedField,'MFVec3f',keyValue,[],
bound(excl(inf_neg),excl(inf))).
dictionary('CoordinateInterpolator',eventOut,'MFVec3f',value_changed,[],[]).
%%%%%%%%%%%%%%%%
dictionary('Cylinder',field,'SFBool',bottom,'TRUE',[]).
dictionary('Cylinder',field,'SFFloat',height,[2],
bound(excl(0),excl(inf))).
dictionary('Cylinder',field,'SFFloat',radius,[1],
bound(excl(0),excl(inf))).
dictionary('Cylinder',field,'SFBool',side,'TRUE',[]).
dictionary('Cylinder',field,'SFBool',top,'TRUE',[]).
%%%%%%%%%%%%%%%%
dictionary('CylinderSensor',exposedField,'SFBool',autoOffset,'TRUE',[]).
dictionary('CylinderSensor',exposedField,'SFFloat',diskAngle,[0.262],
bound(excl(0),excl(pi_div2))).
dictionary('CylinderSensor',exposedField,'SFBool',enabled,'TRUE',[]).
dictionary('CylinderSensor',exposedField,'SFFloat',maxAngle,[-1],
bound(excl(pi_mult2_neg),excl(pi_mult2))).
dictionary('CylinderSensor',exposedField,'SFFloat',minAngle,[0],
bound(excl(pi_mult2_neg),excl(pi_mult2))).
dictionary('CylinderSensor',exposedField,'SFFloat',offset,[0],
bound(excl(inf_neg),excl(inf))).
dictionary('CylinderSensor',eventOut,'SFBool',isActive,[],[]).
dictionary('CylinderSensor',eventOut,'SFRotation',rotation_changed,[],[]).
dictionary('CylinderSensor',eventOut,'SFVec3f',trackPoint_changed,[],[]).
%%%%%%%%%%%%%%%%
dictionary('DirectionalLight',exposedField,'SFFloat',ambientIntensity,[0],
bound(incl(0),incl(1))).
dictionary('DirectionalLight',exposedField,'SFColor',color,[1,1,1],
bound(incl(0),incl(1))).
dictionary('DirectionalLight',exposedField,'SFVec3f',direction,[0,0,-1],
bound(excl(inf_neg),excl(inf))).
dictionary('DirectionalLight',exposedField,'SFFloat',intensity,[1],
boound(incl(0),incl(1))).
dictionary('DirectionalLight',exposedField,'SFBool',on,'TRUE',[]).
%%%%%%%%%%%%%%%%
dictionary('ElevationGrid',eventIn,'MFFloat',set_height,[],[]).
dictionary('ElevationGrid',exposedField,'SFNode',color,['NULL'],
['Color']).
dictionary('ElevationGrid',exposedField,'SFNode',normal,['NULL'],
['Normal']).
dictionary('ElevationGrid',exposedField,'SFNode',texCoord,['NULL'],
['TextureCoordinate']).
dictionary('ElevationGrid',field,'MFFloat',height,[[]],
bound(excl(inf_neg),excl(inf))).
dictionary('ElevationGrid',field,'SFBool',ccw,['TRUE'],[]).
dictionary('ElevationGrid',field,'SFBool',colorPerVertex,['TRUE'],[]).
dictionary('ElevationGrid',field,'SFFloat',creaseAngle,[0],
bound(incl(0),excl(inf))).
dictionary('ElevationGrid',field,'SFBool',normalPerVertex,['TRUE'],[]).
dictionary('ElevationGrid',field,'SFBool',solid,['TRUE'],[]).
dictionary('ElevationGrid',field,'SFInt32',xDimension,[0],
bound(incl(0),excl(inf))).
dictionary('ElevationGrid',field,'SFFloat',xSpacing,[1.0],
bound(excl(0),excl(inf))).
dictionary('ElevationGrid',field,'SFInt32',zDimension,[0],
bound(incl(0),excl(inf))).
dictionary('ElevationGrid',field,'SFFloat',zSpacing,[1.0],
bound(excl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Extrusion',eventIn,'MFVec2f',set_crossSection,[],[]).
dictionary('Extrusion',eventIn,'MFRotation',set_orientation,[],[]).
dictionary('Extrusion',eventIn,'MFVec2f',set_scale,[],[]).
dictionary('Extrusion',eventIn,'MFVec3f',set_spine,[],[]).
dictionary('Extrusion',field,'SFBool',beginCap,['TRUE'],[]).
dictionary('Extrusion',field,'SFBool',ccw,['TRUE'],[]).
dictionary('Extrusion',field,'SFBool',convex,['TRUE'],[]).
dictionary('Extrusion',field,'SFFloat',creaseAngle,[0],
bound(incl(0),excl(inf))).
dictionary('Extrusion',field,'MFVec2f',crossSection,[1,1,1,-1,-1,-1,-1,1,1,1],
bound(excl(inf_neg),excl(inf))).
dictionary('Extrusion',field,'SFBool',endCap,['TRUE'],[]).
dictionary('Extrusion',field,'MFRotation',orientation,[0,0,1,0],
bound(incl(-1),incl(1),excl(inf_neg),excl(inf))).
dictionary('Extrusion',field,'MFVec2f',scale,[1,1],
bound(excl(0),excl(inf))).
dictionary('Extrusion',field,'SFBool',solid,['TRUE'],[]).
dictionary('Extrusion',field,'MFVec3f',spine,[0,0,0,0,1,0],
bound(excl(inf_neg),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Fog',exposedField,'SFColor',color,[1,1,1],
bound(incl(0),incl(1))).
dictionary('Fog',exposedField,'SFString',fogType,['LINEAR'],[]).
dictionary('Fog',exposedField,'SFFloat',visabilityRange,[0],
bound(incl(0),excl(inf))).
dictionary('Fog',eventIn,'SFBool',set_bind,[],[]).
dictionary('Fog',eventOut,'SFBool',isBound,[],[]).
%%%%%%%%%%%%%%%%
dictionary('FontStyle',field,'MFString',family,[['SERIF']],[]).
dictionary('FontStyle',field,'SFBool',horizontal,['TRUE'],[]).
dictionary('FontStyle',field,'MFString',justify,['BEGIN'],[]).
dictionary('FontStyle',field,'SFString',language,[''],[]).
dictionary('FontStyle',field,'SFBool',leftToRight,['TRUE'],[]).
dictionary('FontStyle',field,'SFFloat',size,[1.0],
bound(excl(0),excl(inf))).
dictionary('FontStyle',field,'SFFloat',spacing,[1.0],
bound(incl(0),excl(inf))).
dictionary('FontStyle',field,'SFString',style,['PLAIN'],[]).
dictionary('FontStyle',field,'SFBool',topToBottom,['TRUE'],[]).
%%%%%%%%%%%%%%%%
dictionary('Group',eventIn,'MFNode',addChildren,[],[]).
dictionary('Group',eventIn,'MFNode',removeChildren,[],[]).
dictionary('Group',exposedField,'MFNode',children,[[]],
[children_nodes]).
dictionary('Group',field,'SFVec3f',bboxCenter,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Group',field,'SFVec3f',bboxSize,[-1,-1,-1],
bound(excl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('ImageTexture',exposedField,'MFString',url,[[]],[]).
dictionary('ImageTexture',field,'SFBool',repeatS,['TRUE'],[]).
dictionary('ImageTexture',field,'SFBool',repeatT,['TRUE'],[]).
%%%%%%%%%%%%%%%%
dictionary('IndexedFaceSet',eventIn,'MFInt32',set_colorIndex,[],[]).
dictionary('IndexedFaceSet',eventIn,'MFInt32',set_coordIndex,[],[]).
dictionary('IndexedFaceSet',eventIn,'MFInt32',set_normalIndex,[],[]).
dictionary('IndexedFaceSet',eventIn,'MFInt32',set_texCoordIndex,[],[]).
dictionary('IndexedFaceSet',exposedField,'SFNode',color,['NULL'],
['Color']).
dictionary('IndexedFaceSet',exposedField,'SFNode',coord,['NULL'],
['Coordinate']).
dictionary('IndexedFaceSet',exposedField,'SFNode',normal,['NULL'],
['Normal']).
dictionary('IndexedFaceSet',exposedField,'SFNode',texCoord,['NULL'],
['TextureCoordinate']).
dictionary('IndexedFaceSet',field,'SFBool',ccw,['TRUE'],[]).
dictionary('IndexedFaceSet',field,'MFInt32',colorIndex,[[]],
bound(incl(-1),excl(inf))).
dictionary('IndexedFaceSet',field,'SFBool',colorPerVertex,['TRUE'],[]).
dictionary('IndexedFaceSet',field,'SFBool',convex,['TRUE'],[]).
dictionary('IndexedFaceSet',field,'MFInt32',coordIndex,[[]],
bound(incl(-1),excl(inf))).
dictionary('IndexedFaceSet',field,'SFFloat',creaseAngle,[0],
bound(incl(0),excl(inf))).
dictionary('IndexedFaceSet',field,'MFInt32',normalIndex,[[]],
bound(incl(-1),excl(inf))).
dictionary('IndexedFaceSet',field,'SFBool',normalPerVertex,['TRUE'],[]).
dictionary('IndexedFaceSet',field,'SFBool',solid,['TRUE'],[]).
dictionary('IndexedFaceSet',field,'MFInt32',texCoordIndex,[[]],
bound(incl(-1),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('IndexedLineSet',eventIn,'MFInt32',set_colorIndex,[],[]).
dictionary('IndexedLineSet',eventIn,'MFInt32',set_coordIndex,[],[]).
dictionary('IndexedLineSet',exposedField,'SFNode',color,['NULL'],
['Color']).
dictionary('IndexedLineSet',exposedField,'SFNode',coord,['NULL'],
['Coordinate']).
dictionary('IndexedLineSet',field,'MFInt32',colorIndex,[[]],
bound(incl(-1),excl(inf))).
dictionary('IndexedLineSet',field,'SFBool',colorPerVertex,['TRUE'],[]).
dictionary('IndexedLineSet',field,'MFInt32',coordIndex,[[]],
bound(incl(-1),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Inline',exposedField,'MFString',url,[[]],[]).
dictionary('Inline',field,'SFVec3f',bboxCenter,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Inline',field,'SFVec3f',bboxSize,[-1,-1,-1],
bound(excl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('LOD',exposedField,'MFNode',level,[[]],
[children_nodes]).
dictionary('LOD',field,'SFVec3f',center,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('LOD',field,'MFFloat',range,[[]],
bound(excl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Material',exposedField,'SFFloat',ambientIntensity,[0.2],
bound(incl(0),incl(1))).
dictionary('Material',exposedField,'SFColor',diffuseColor,[0.8,0.8,0.8],
bound(incl(0),incl(1))).
dictionary('Material',exposedField,'SFColor',emissiveColor,[0,0,0],
bound(incl(0),incl(1))).
dictionary('Material',exposedField,'SFFloat',shininess,[0.2],
bound(incl(0),incl(1))).
dictionary('Material',exposedField,'SFColor',specularColor,[0,0,0],
bound(incl(0),incl(1))).
dictionary('Material',exposedField,'SFFloat',transparency,[0],
bound(incl(0),incl(1))).
%%%%%%%%%%%%%%%%
dictionary('MovieTexture',exposedField,'SFBool',loop,['FALSE'],[]).
dictionary('MovieTexture',exposedField,'SFFloat',speed,[1.0],
bound(excl(inf_neg),excl(inf))).
dictionary('MovieTexture',exposedField,'SFTime',startTime,[0],
bound(excl(inf_neg),excl(inf))).
dictionary('MovieTexture',exposedField,'SFTime',stopTime,[0],
bound(excl(inf_neg),excl(inf))).
dictionary('MovieTexture',exposedField,'MFString',url,[[]],[]).
dictionary('MovieTexture',field,'SFBool',repeatS,['TRUE'],[]).
dictionary('MovieTexture',field,'SFBool',repeatT,['TRUE'],[]).
dictionary('MovieTexture',eventOut,'SFTime',duration_changed,[],[]).
dictionary('MovieTexture',eventOut,'SFBool',isActive,[],[]).
%%%%%%%%%%%%%%%%
dictionary('NavigationInfo',eventIn,'SFBool',set_bind,[],[]).
dictionary('NavigationInfo',exposedField,'MFFloat',avatarSize,[0.25,1.6,0.75],
bound(incl(0),excl(inf))).
dictionary('NavigationInfo',exposedField,'SFBool',headlight,['TRUE'],[]).
dictionary('NavigationInfo',exposedField,'SFFloat',speed,[1.0],
bound(incl(0),excl(inf))).
dictionary('NavigationInfo',exposedField,'MFString',type,['WALK','ANY'],[]).
dictionary('NavigationInfo',exposedField,'SFFloat',visabilityLimit,[0.0],
bound(incl(0),excl(inf))).
dictionary('NavigationInfo',eventOut,'SFBool',isBound,[],[]).
%%%%%%%%%%%%%%%%
dictionary('Normal',exposedField,'MFVec3f',vector,[[]],
bound(excl(inf_neg),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('NormalInterpolator',eventIn,'SFFloat',set_fraction,[],
bound(excl(inf_neg),excl(inf))).
dictionary('NormalInterpolator',exposedField,'MFFloat',key,[],
bound(excl(inf_neg),excl(inf))).
dictionary('NormalInterpolator',exposedField,'MFVec3f',keyValue,[],
bound(excl(inf_neg),excl(inf))).
dictionary('NormalInterpolator',eventOut,'MFVec3f',value_changed,[],[]).
%%%%%%%%%%%%%%%%
dictionary('OrientationInterpolator',eventIn,'SFFloat',set_fraction,[],
bound(excl(inf_neg),excl(inf))).
dictionary('OrientationInterpolator',exposedField,'MFFloat',key,[],
bound(excl(inf_neg),excl(inf))).
dictionary('OrientationInterpolator',exposedField,'MFRotation',keyValue,[],
bound(incl(-1),incl(1),excl(inf_neg),excl(inf))).
dictionary('OrientationInterpolator',eventOut,'SFRotation',value_changed,[],[]).
%%%%%%%%%%%%%%%%
dictionary('PixelTexture',exposedField,'SFImage',image,[0,0,0],[]).
dictionary('PixelTexture',field,'SFBool',repeatS,['TRUE'],[]).
dictionary('PixelTexture',field,'SFBool',repeatT,['TRUE'],[]).
%%%%%%%%%%%%%%%%
dictionary('PlaneSensor',exposedField,'SFBool',autoOffset,['TRUE'],[]).
dictionary('PlaneSensor',exposedField,'SFBool',enabled,['TRUE'],[]).
dictionary('PlaneSensor',exposedField,'SFVec2f',maxPosition,[-1, -1],
bound(excl(inf_neg),excl(inf))).
dictionary('PlaneSensor',exposedField,'SFVec2f',minPosition,[0, 0],
bound(excl(inf_neg),excl(inf))).
dictionary('PlaneSensor',exposedField,'SFVec3f',offset,[0, 0, 0],
bound(excl(inf_neg),excl(inf))).
dictionary('PlaneSensor',eventOut,'SFBool',isActive,[],[]).
dictionary('PlaneSensor',eventOut,'SFVec3f',trackPoint_changed,[],[]).
dictionary('PlaneSensor',eventOut,'SFVec3f',translation_changed,[],[]).
%%%%%%%%%%%%%%%%
dictionary('PointLight',exposedField,'SFFloat',ambientIntensity,[0],
bound(incl(0),incl(1))).
dictionary('PointLight',exposedField,'SFVec3f',attenuation,[1,0,0],
bound(incl(0),excl(inf))).
dictionary('PointLight',exposedField,'SFColor',color,[1,1,1],
bound(incl(0),incl(1))).
dictionary('PointLight',exposedField,'SFFloat',intensity,[1],
bound(incl(0),incl(1))).
dictionary('PointLight',exposedField,'SFVec3f',location,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('PointLight',exposedField,'SFBool',on,['TRUE'],[]).
dictionary('PointLight',exposedField,'SFFloat',radius,[100],
bound(incl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('PointSet',exposedField,'SFNode',color,['NULL'],
['Color']).
dictionary('PointSet',exposedField,'SFNode',coord,['NULL'],
['Coordinate']).
%%%%%%%%%%%%%%%%
dictionary('PositionInterpolator',eventIn,'SFFloat',set_fraction,[],
bound(excl(inf_neg),excl(inf))).
dictionary('PositionInterpolator',exposedField,'MFFloat',key,[[]],
bound(excl(inf_neg),excl(inf))).
dictionary('PositionInterpolator',exposedField,'MFVec3f',keyValue,[[]],
bound(excl(inf_neg),excl(inf))).
dictionary('PositionInterpolator',eventOut,'SFVec3f',value_changed,[],[]).
%%%%%%%%%%%%%%%%
dictionary('ProximitySensor',exposedField,'SFVec3f',center,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('ProximitySensor',exposedField,'SFVec3f',size,[0,0,0],
bound(incl(0),excl(inf))).
dictionary('ProximitySensor',exposedField,'SFBool',enabled,['TRUE'],[]).
dictionary('ProximitySensor',eventOut,'SFBool',isActive,[],[]).
dictionary('ProximitySensor',eventOut,'SFVec3f',position_changed,[],[]).
dictionary('ProximitySensor',eventOut,'SFRotation',orientation_changed,[],[]).
dictionary('ProximitySensor',eventOut,'SFTime',enterTime,[],[]).
dictionary('ProximitySensor',eventOut,'SFTime',exitTime,[],[]).
%%%%%%%%%%%%%%%%
dictionary('ScalarInterpolator',eventIn,'SFFloat',set_fraction,[[]],
bound(excl(inf_neg),excl(inf))).
dictionary('ScalarInterpolator',exposedField,'MFFloat',key,[[]],
bound(excl(inf_neg),excl(inf))).
dictionary('ScalarInterpolator',exposedField,'MFFloat',keyValue,[[]],
bound(excl(inf_neg),excl(inf))).
dictionary('ScalarInterpolator',eventOut,'SFFloat',value_changed,[],[]).
%%%%%%%%%%%%%%%%
dictionary('Script',exposedField,'MFString',url,[[]],[]).
dictionary('Script',field,'SFBool',directOutput,['FALSE'],[]).
dictionary('Script',field,'SFBool',mustEvaluate,['FALSE'],[]).
%dictionary('Script',eventIn,_eventTypeName,_eventName,[],_Bound).
%dictionary('Script',field,_fieldTypeName,_fieldName,_initialValue,_Bound).
%dictionary('Script',eventOut,_eventTypeName,_eventName,[],[]).
%%%%%%%%%%%%%%%%
dictionary('Shape',exposedField,'SFNode',appearance,['NULL'],
['Appearance']).
dictionary('Shape',exposedField,'SFNode',geometry,['NULL'],[]).
%%%%%%%%%%%%%%%%
dictionary('Sound',exposedField,'SFVec3f',direction,[0,0,1],
bound(excl(inf_neg),excl(inf))).
dictionary('Sound',exposedField,'SFFloat',intensity,[1],
bound(incl(0),incl(1))).
dictionary('Sound',exposedField,'SFVec3f',location,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Sound',exposedField,'SFFloat',maxBack,[10],
bound(incl(0),excl(inf))).
dictionary('Sound',exposedField,'SFFloat',maxFront,[10],
bound(incl(0),excl(inf))).
dictionary('Sound',exposedField,'SFFloat',minBack,[1],
bound(incl(0),excl(inf))).
dictionary('Sound',exposedField,'SFFloat',minFront,[1],
bound(incl(0),excl(inf))).
dictionary('Sound',exposedField,'SFFloat',priority,[0],
bound(incl(0),incl(1))).
dictionary('Sound',exposedField,'SFNode',source,['NULL'],
['AudioClip','MovieTexture']).
dictionary('Sound',field,'SFBool',spatialize,['TRUE'],[]).
%%%%%%%%%%%%%%%%
dictionary('Sphere',field,'SFFloat',radius,[1],
bound(excl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('SphereSensor',exposedField,'SFBool',autoOffset,['TRUE'],[]).
dictionary('SphereSensor',exposedField,'SFBool',enabled,['TRUE'],[]).
dictionary('SphereSensor',exposedField,'SFRotation',offset,[0,1,0,0],
bound(incl(-1),incl(1),excl(inf_neg),excl(inf))).
dictionary('SphereSensor',eventOut,'SFBool',isActive,[],[]).
dictionary('SphereSensor',eventOut,'SFRotation',rotation_changed,[],[]).
dictionary('SphereSensor',eventOut,'SFVec3f',trackPoint_changed,[],[]).
%%%%%%%%%%%%%%%
dictionary('SpotLight',exposedField,'SFFloat',ambientIntensity,[0],
bound(incl(0),incl(1))).
dictionary('SpotLight',exposedField,'SFVec3f',attenuation,[1,0,0],
bound(incl(0),excl(inf))).
dictionary('SpotLight',exposedField,'SFFloat',beamWidth,[1.570796],
bound(excl(0),incl(pi_div2))).
dictionary('SpotLight',exposedField,'SFColor',color,[1,1,1],
bound(incl(0),incl(1))).
dictionary('SpotLight',exposedField,'SFFloat',cutOffAngle,[0.785398],
bound(excl(0),incl(pi_div2))).
dictionary('SpotLight',exposedField,'SFVec3f',direction,[0,0,-1],
bound(excl(inf_neg),excl(inf))).
dictionary('SpotLight',exposedField,'SFFloat',intensity,[1],
bound(incl(0),incl(1))).
dictionary('SpotLight',exposedField,'SFVec3f',location,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('SpotLight',exposedField,'SFBool',on,['TRUE'],[]).
dictionary('SpotLight',exposedField,'SFFloat',radius,[100],
bound(incl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Switch',exposedField,'MFNode',choice,[[]],
[children_nodes]).
dictionary('Switch',exposedField,'SFInt32',whichChoice,[-1],
bound(incl(-1),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Text',exposedField,'MFString',string,[[]],[]).
dictionary('Text',exposedField,'SFNode',fontStyle,['NULL'],
['FontStyle']).
dictionary('Text',exposedField,'MFFloat',length,[[]],
bound(incl(0),excl(inf))).
dictionary('Text',exposedField,'SFFloat',maxExtent,[0.0],
bound(incl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('TextureCoordinate',exposedField,'MFVec2f',point,[[]],
bound(excl(inf_neg),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('TextureTransform',exposedField,'SFVec2f',center,[0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('TextureTransform',exposedField,'SFFloat',rotation,[0],
bound(excl(inf_neg),excl(inf))).
dictionary('TextureTransform',exposedField,'SFVec2f',scale,[1,1],
bound(excl(inf_neg),excl(inf))).
dictionary('TextureTransform',exposedField,'SFVec2f',translation,[0,0],
bound(excl(inf_neg),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('TimeSensor',exposedField,'SFTime',cycleInterval,[1],
bound(excl(0),excl(inf))).
dictionary('TimeSensor',exposedField,'SFBool',enabled,['TRUE'],[]).
dictionary('TimeSensor',exposedField,'SFBool',loop,['FALSE'],[]).
dictionary('TimeSensor',exposedField,'SFTime',startTime,[0],
bound(excl(inf_neg),excl(inf))).
dictionary('TimeSensor',exposedField,'SFTime',stopTime,[0],
bound(excl(inf_neg),excl(inf))).
dictionary('TimeSensor',eventOut,'SFTime',cycleTime,[],[]).
dictionary('TimeSensor',eventOut,'SFFloat',fraction_changed,[],[]).
dictionary('TimeSensor',eventOut,'SFBool',isActive,[],[]).
dictionary('TimeSensor',eventOut,'SFTime',time,[],[]).
%%%%%%%%%%%%%%%%
dictionary('TouchSensor',exposedField,'SFBool',enabled,['TRUE'],[]).
dictionary('TouchSensor',eventOut,'SFVec3f',hitNormal_changed,[],[]).
dictionary('TouchSensor',eventOut,'SFVec3f',hitPoint_changed,[],[]).
dictionary('TouchSensor',eventOut,'SFVec2f',hitTexCoord_changed,[],[]).
dictionary('TouchSensor',eventOut,'SFBool',isActive,[],[]).
dictionary('TouchSensor',eventOut,'SFBool',isOver,[],[]).
dictionary('TouchSensor',eventOut,'SFTime',touchTime,[],[]).
%%%%%%%%%%%%%%%%
dictionary('Transform',eventIn,'MFNode',addChildren,[],[]).
dictionary('Transform',eventIn,'MFNode',removeChildren,[],[]).
dictionary('Transform',exposedField,'SFVec3f',center,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Transform',exposedField,'MFNode',children,[[]],
[children_nodes]).
dictionary('Transform',exposedField,'SFRotation',rotation,[0,0,1,0],
bound(incl(-1),incl(1),excl(inf_neg),excl(inf))).
dictionary('Transform',exposedField,'SFVec3f',scale,[1,1,1],
bound(excl(0),excl(inf))).
dictionary('Transform',exposedField,'SFRotation',scaleOrientation,[0,0,1,0],
bound(incl(-1),incl(1),excl(inf_neg),excl(inf))).
dictionary('Transform',exposedField,'SFVec3f',translation,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Transform',field,'SFVec3f',bboxCenter,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('Transform',field,'SFVec3f',bboxSize,[-1,-1,-1],
bound(excl(0),excl(inf))).
%%%%%%%%%%%%%%%%
dictionary('Viewpoint',eventIn,'SFBool',set_bind,[],[]).
dictionary('Viewpoint',exposedField,'SFFloat',fieldOfView,[0.785398],
bound(excl(0),excl(pi))).
dictionary('Viewpoint',exposedField,'SFBool',jump,['TRUE'],[]).
dictionary('Viewpoint',exposedField,'SFRotation',orientation,[0,0,1,0],
bound(incl(-1),incl(1),excl(inf_neg),excl(inf))).
dictionary('Viewpoint',exposedField,'SFVec3f',position,[0,0,10],
bound(excl(inf_neg),excl(inf))).
dictionary('Viewpoint',field,'SFString',description,[''],[]).
dictionary('Viewpoint',eventOut,'SFTime',bindTime,[],[]).
dictionary('Viewpoint',eventOut,'SFBool',isBound,[],[]).
%%%%%%%%%%%%%%%%
dictionary('VisibilitySensor',exposedField,'SFVec3f',center,[0,0,0],
bound(excl(inf_neg),excl(inf))).
dictionary('VisibilitySensor',exposedField,'SFBool',enabled,['TRUE'],[]).
dictionary('VisibilitySensor',exposedField,'SFVec3f',size,[0,0,0],
bound(incl(0),excl(inf))).
dictionary('VisibilitySensor',eventOut,'SFTime',enterTime,[],[]).
dictionary('VisibilitySensor',eventOut,'SFTime',exitTime,[],[]).
dictionary('VisibilitySensor',eventOut,'SFBool',isActive,[],[]).
%%%%%%%%%%%%%%%%
dictionary('WorldInfo',field,'MFString',info,[[]],[]).
dictionary('WorldInfo',field,'SFString',title,[''],[]).
| leuschel/ecce | www/CiaoDE/ciao/library.development/vrml/dictionary.pl | Perl | apache-2.0 | 27,273 |
###########################################$
# Copyright 2008-2010 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions
# and limitations under the License.
###########################################$
# __ _ _ ___
# ( )( \/\/ )/ __)
# /__\ \ / \__ \
# (_)(_) \/\/ (___/
#
# Amazon EC2 Perl Library
# API Version: 2010-06-15
# Generated: Wed Jul 21 13:37:54 PDT 2010
#
package Amazon::EC2::Model::RequestSpotInstancesRequest;
use base qw (Amazon::EC2::Model);
#
# Amazon::EC2::Model::RequestSpotInstancesRequest
#
# Properties:
#
#
# SpotPrice: string
# InstanceCount: int
# Type: string
# ValidFrom: string
# ValidUntil: string
# LaunchGroup: string
# AvailabilityZoneGroup: string
# LaunchSpecification: Amazon::EC2::Model::LaunchSpecification
#
#
#
sub new {
my ($class, $data) = @_;
my $self = {};
$self->{_fields} = {
SpotPrice => { FieldValue => undef, FieldType => "string"},
InstanceCount => { FieldValue => undef, FieldType => "int"},
Type => { FieldValue => undef, FieldType => "string"},
ValidFrom => { FieldValue => undef, FieldType => "string"},
ValidUntil => { FieldValue => undef, FieldType => "string"},
LaunchGroup => { FieldValue => undef, FieldType => "string"},
AvailabilityZoneGroup => { FieldValue => undef, FieldType => "string"},
LaunchSpecification => {FieldValue => undef, FieldType => "Amazon::EC2::Model::LaunchSpecification"},
};
bless ($self, $class);
if (defined $data) {
$self->_fromHashRef($data);
}
return $self;
}
sub getSpotPrice {
return shift->{_fields}->{SpotPrice}->{FieldValue};
}
sub setSpotPrice {
my ($self, $value) = @_;
$self->{_fields}->{SpotPrice}->{FieldValue} = $value;
return $self;
}
sub withSpotPrice {
my ($self, $value) = @_;
$self->setSpotPrice($value);
return $self;
}
sub isSetSpotPrice {
return defined (shift->{_fields}->{SpotPrice}->{FieldValue});
}
sub getInstanceCount {
return shift->{_fields}->{InstanceCount}->{FieldValue};
}
sub setInstanceCount {
my ($self, $value) = @_;
$self->{_fields}->{InstanceCount}->{FieldValue} = $value;
return $self;
}
sub withInstanceCount {
my ($self, $value) = @_;
$self->setInstanceCount($value);
return $self;
}
sub isSetInstanceCount {
return defined (shift->{_fields}->{InstanceCount}->{FieldValue});
}
sub getType {
return shift->{_fields}->{Type}->{FieldValue};
}
sub setType {
my ($self, $value) = @_;
$self->{_fields}->{Type}->{FieldValue} = $value;
return $self;
}
sub withType {
my ($self, $value) = @_;
$self->setType($value);
return $self;
}
sub isSetType {
return defined (shift->{_fields}->{Type}->{FieldValue});
}
sub getValidFrom {
return shift->{_fields}->{ValidFrom}->{FieldValue};
}
sub setValidFrom {
my ($self, $value) = @_;
$self->{_fields}->{ValidFrom}->{FieldValue} = $value;
return $self;
}
sub withValidFrom {
my ($self, $value) = @_;
$self->setValidFrom($value);
return $self;
}
sub isSetValidFrom {
return defined (shift->{_fields}->{ValidFrom}->{FieldValue});
}
sub getValidUntil {
return shift->{_fields}->{ValidUntil}->{FieldValue};
}
sub setValidUntil {
my ($self, $value) = @_;
$self->{_fields}->{ValidUntil}->{FieldValue} = $value;
return $self;
}
sub withValidUntil {
my ($self, $value) = @_;
$self->setValidUntil($value);
return $self;
}
sub isSetValidUntil {
return defined (shift->{_fields}->{ValidUntil}->{FieldValue});
}
sub getLaunchGroup {
return shift->{_fields}->{LaunchGroup}->{FieldValue};
}
sub setLaunchGroup {
my ($self, $value) = @_;
$self->{_fields}->{LaunchGroup}->{FieldValue} = $value;
return $self;
}
sub withLaunchGroup {
my ($self, $value) = @_;
$self->setLaunchGroup($value);
return $self;
}
sub isSetLaunchGroup {
return defined (shift->{_fields}->{LaunchGroup}->{FieldValue});
}
sub getAvailabilityZoneGroup {
return shift->{_fields}->{AvailabilityZoneGroup}->{FieldValue};
}
sub setAvailabilityZoneGroup {
my ($self, $value) = @_;
$self->{_fields}->{AvailabilityZoneGroup}->{FieldValue} = $value;
return $self;
}
sub withAvailabilityZoneGroup {
my ($self, $value) = @_;
$self->setAvailabilityZoneGroup($value);
return $self;
}
sub isSetAvailabilityZoneGroup {
return defined (shift->{_fields}->{AvailabilityZoneGroup}->{FieldValue});
}
sub getLaunchSpecification {
return shift->{_fields}->{LaunchSpecification}->{FieldValue};
}
sub setLaunchSpecification {
my ($self, $value) = @_;
$self->{_fields}->{LaunchSpecification}->{FieldValue} = $value;
}
sub withLaunchSpecification {
my ($self, $value) = @_;
$self->setLaunchSpecification($value);
return $self;
}
sub isSetLaunchSpecification {
return defined (shift->{_fields}->{LaunchSpecification}->{FieldValue});
}
1;
| electric-cloud/EC-EC2 | src/main/resources/project/lib/Amazon/EC2/Model/RequestSpotInstancesRequest.pm | Perl | apache-2.0 | 6,136 |
package VMOMI::MultipleCertificatesVerifyFaultThumbprintData;
use parent 'VMOMI::DynamicData';
use strict;
use warnings;
our @class_ancestors = (
'DynamicData',
);
our @class_members = (
['port', undef, 0, ],
['thumbprint', undef, 0, ],
);
sub get_class_ancestors {
return @class_ancestors;
}
sub get_class_members {
my $class = shift;
my @super_members = $class->SUPER::get_class_members();
return (@super_members, @class_members);
}
1;
| stumpr/p5-vmomi | lib/VMOMI/MultipleCertificatesVerifyFaultThumbprintData.pm | Perl | apache-2.0 | 474 |
# Copyright (c) 2014 Timm Murray
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
package UAV::Pilot::WumpusRover::Video;
use v5.14;
use warnings;
use Moose;
use namespace::autoclean;
use UAV::Pilot::Exceptions;
use UAV::Pilot::Video::H264Handler;
use constant BUF_READ_SIZE => 4096;
use constant WUMP_VIDEO_MAGIC_NUMBER => 0xFB42;
use constant WUMP_VIDEO_MAGIC_NUMBER_ARRAY => [ 0xFB, 0x42 ];
use constant WUMP_HEADER_SIZE => 32;
use constant WUMP_VERSION => 0x0000;
use constant {
CODEC_TYPE_NULL => 0,
CODEC_TYPE_H264 => 1,
CODEC_TYPE_MJPEG => 2,
};
use constant {
_MODE_WUMP_HEADER => 0,
_MODE_FRAME => 1,
_MODE_NEXT_WUMP => 2,
};
use constant {
FLAG_HEARTBEAT => 0,
};
with 'UAV::Pilot::Logger';
has 'handlers' => (
traits => ['Array'],
is => 'rw',
isa => 'ArrayRef[UAV::Pilot::Video::H264Handler]',
default => sub {[]},
handles => {
'add_handler' => 'push',
},
);
has 'condvar' => (
is => 'ro',
isa => 'AnyEvent::CondVar',
);
has 'driver' => (
is => 'ro',
isa => 'UAV::Pilot::WumpusRover::Driver',
);
has 'frames_processed' => (
traits => ['Number'],
is => 'ro',
isa => 'Int',
default => 0,
handles => {
'_add_frames_processed' => 'add',
},
);
has '_io' => (
is => 'ro',
isa => 'Item',
writer => '_set_io',
);
has '_byte_buffer' => (
traits => ['Array'],
is => 'rw',
isa => 'ArrayRef[Int]',
default => sub {[]},
handles => {
'_byte_buffer_splice' => 'splice',
'_byte_buffer_size' => 'count',
'_byte_buffer_push' => 'push',
},
);
has '_mode' => (
is => 'rw',
isa => 'Int',
default => sub {
my ($class) = @_;
return $class->_MODE_WUMP_HEADER;
},
);
has '_last_wump_header' => (
is => 'rw',
isa => 'HashRef[Item]',
default => sub {{}},
);
sub BUILDARGS
{
my ($class, $args) = @_;
my $io = $class->_build_io( $args );
$$args{'_io'} = $io;
delete $$args{'host'};
delete $$args{'port'};
return $args;
}
sub init_event_loop
{
my ($self) = @_;
my $io_event; $io_event = AnyEvent->io(
fh => $self->_io,
poll => 'r',
cb => sub {
$self->_process_io;
$io_event;
},
);
return 1;
}
sub _read_wump_header
{
my ($self) = @_;
return 1 if $self->_byte_buffer_size < $self->WUMP_HEADER_SIZE;
my @bytes = $self->_byte_buffer_splice( 0, $self->WUMP_HEADER_SIZE );
my %packet;
$packet{magic_number} = UAV::Pilot->convert_16bit_BE( @bytes[0,1] );
$packet{version} = UAV::Pilot->convert_16bit_BE( @bytes[2,3] );
$packet{codec_id} = UAV::Pilot->convert_16bit_BE( @bytes[4,5] );
$packet{flags} = UAV::Pilot->convert_32bit_BE( @bytes[6..9] );
$packet{length} = UAV::Pilot->convert_32bit_BE( @bytes[10..13] );
$packet{width} = UAV::Pilot->convert_16bit_BE( @bytes[14,15] );
$packet{height} = UAV::Pilot->convert_16bit_BE( @bytes[16,17] );
$packet{checksum} = UAV::Pilot->convert_32bit_BE( @bytes[18..21] );
# 10 bytes reserved
if( $packet{magic_number} != $self->WUMP_VIDEO_MAGIC_NUMBER ) {
$self->_logger->error( "Bad Wump header. Got [$packet{magic_number}],"
. " expected " . $self->WUMP_VIDEO_MAGIC_NUMBER );
$self->_mode( $self->_MODE_NEXT_WUMP );
return $self->_read_to_next_wump_header;
}
if( $packet{version} > $self->WUMP_VERSION ) {
$self->_logger->error( "Got Wumpus Video version [$packet{version}]"
. ", but only support up to version [" . $self->WUMP_VERSION . "]"
);
$self->_mode( $self->_MODE_NEXT_WUMP );
return $self->_read_to_next_wump_header;
}
if( $packet{codec_id} != $self->CODEC_TYPE_H264 ) {
$self->_logger->error( "Can only handle encoding h264 packets" );
$self->_mode( $self->_MODE_NEXT_WUMP );
return $self->_read_to_next_wump_header;
}
if( $packet{flags} & (1 << $self->FLAG_HEARTBEAT) ) {
$self->_send_heartbeat( $packet{checksum} );
}
$self->_logger->info( "Received frame " . $self->frames_processed
. ", size $packet{length}, checksum "
. sprintf( '%x', $packet{checksum} ) );
$self->_add_frames_processed( 1 );
$self->_last_wump_header( \%packet );
$self->_mode( $self->_MODE_FRAME );
return $self->_read_frame;
}
sub _read_to_next_wump_header
{
my ($self) = @_;
my @byte_buf = @{ $self->_byte_buffer };
my @expect_signature = @{ $self->WUMP_VIDEO_MAGIC_NUMBER_ARRAY };
foreach my $i (0 .. $#byte_buf) {
if( ($expect_signature[0] == $byte_buf[$i])
&& ($expect_signature[1] == $byte_buf[$i + 1])
) {
my @new_byte_buffer = @byte_buf[$i..$#byte_buf];
$self->_byte_buffer( \@new_byte_buffer );
$self->_mode( $self->_MODE_WUMP_HEADER );
return $self->_read_wump_header;
}
}
return 1;
}
sub _read_frame
{
my ($self) = @_;
my %header = %{ $self->_last_wump_header };
my $frame_size = $header{length};
if( $self->_byte_buffer_size < $frame_size ) {
$self->_logger->info( "Need $frame_size bytes to read next frame"
. ", but only " . $self->_byte_buffer_size . " available"
. ", waiting for next read" );
return 1;
}
# TODO verify checksum (Adler32)
my @frame = $self->_byte_buffer_splice( 0, $frame_size );
foreach my $handler (@{ $self->handlers }) {
$handler->process_h264_frame(
\@frame,
# Redundant width/height in order to fill both width/height
# and encoded width/height params
@header{qw{
width
height
width
height
}}
);
}
$self->_mode( $self->_MODE_WUMP_HEADER );
return $self->_read_wump_header;
}
sub _process_io
{
my ($self) = @_;
my $buf;
my $read_count = $self->_io->read( $buf, $self->BUF_READ_SIZE );
my @bytes = unpack 'C*', $buf;
$self->_byte_buffer_push( @bytes );
if( $self->_mode == $self->_MODE_WUMP_HEADER ) {
$self->_read_wump_header;
}
elsif( $self->_mode == $self->_MODE_FRAME ) {
$self->_read_frame;
}
elsif( $self->_mode == $self->_MODE_NEXT_WUMP ) {
$self->_read_to_next_wump_header;
}
return 1;
}
sub _build_io
{
my ($class, $args) = @_;
my $driver = $$args{driver};
my $host = $driver->host;
my $port = UAV::Pilot::WumpusRover->DEFAULT_VIDEO_PORT;
my $io = IO::Socket::INET->new(
PeerAddr => $host,
PeerPort => $port,
ReuseAddr => 1,
Blocking => 0,
) or UAV::Pilot::IOException->throw(
error => "Could not connect to $host:$port for video: $@",
);
return $io;
}
sub _send_heartbeat
{
my ($self, $checksum) = @_;
$self->_logger->info( "Sending heartbeat packet for checksum [$checksum]" );
my $output = pack 'nN'
,$self->WUMP_VIDEO_MAGIC_NUMBER
,$checksum;
$self->_io->write( $output );
return 1;
}
no Moose;
__PACKAGE__->meta->make_immutable;
1;
__END__
| gitpan/UAV-Pilot-WumpusRover | lib/UAV/Pilot/WumpusRover/Video.pm | Perl | bsd-2-clause | 8,643 |
package Zonemaster::Logger::Entry v1.0.0;
use 5.14.2;
use Time::HiRes qw[time];
use JSON;
use Moose;
use Zonemaster;
use overload '""' => \&string;
our %numeric = (
DEBUG3 => -2,
DEBUG2 => -1,
DEBUG => 0,
INFO => 1,
NOTICE => 2,
WARNING => 3,
ERROR => 4,
CRITICAL => 5,
);
our $start_time = time();
my $json = JSON::XS->new->allow_blessed->convert_blessed->canonical;
has 'module' => ( is => 'ro', isa => 'Str', lazy_build => 1 );
has 'tag' => ( is => 'ro', isa => 'Str', required => 1 );
has 'args' => ( is => 'ro', isa => 'Maybe[HashRef]', required => 0 );
has 'timestamp' => ( is => 'ro', isa => 'Num', default => sub { time() - $start_time } );
has 'trace' => ( is => 'ro', isa => 'ArrayRef[ArrayRef]', builder => '_build_trace' );
has 'level' => ( is => 'ro', isa => 'Str', lazy_build => 1, writer => '_set_level' );
sub _build_trace {
my ( $self ) = @_;
my @trace;
my $i = 0;
# 0 1 2 3 4 5 6 7 8 9 10
# $package, $filename, $line, $subroutine, $hasargs, $wantarray, $evaltext, $is_require, $hints, $bitmask, $hinthash
while ( my @line = caller( $i++ ) ) {
next unless index( $line[3], 'Zonemaster' ) == 0;
push @trace, [ @line[ 0, 3 ] ];
}
return \@trace;
}
sub _build_module {
my ( $self ) = @_;
foreach my $e ( @{ $self->trace } ) {
if ( $e->[1] eq 'Zonemaster::Util::info'
and $e->[0] =~ /^Zonemaster::Test::(.*)$/ )
{
return uc $1;
}
}
return 'SYSTEM';
}
sub _build_level {
my ( $self ) = @_;
my $string;
if ( Zonemaster->config->policy->{ $self->module }{ $self->tag } ) {
$string = uc Zonemaster->config->policy->{ $self->module }{ $self->tag };
}
else {
$string = 'DEBUG';
}
if ( defined $numeric{$string} ) {
return $string;
}
else {
die "Unknown level string: $string";
}
}
sub numeric_level {
my ( $self ) = @_;
return $numeric{ $self->level };
}
sub levels {
return %numeric;
}
sub string {
my ( $self ) = @_;
my $argstr = '';
## no critic (TestingAndDebugging::ProhibitNoWarnings)
no warnings 'uninitialized';
$argstr = join( ', ',
map { $_ . '=' . ( ref( $self->args->{$_} ) ? $json->encode( $self->args->{$_} ) : $self->args->{$_} ) }
sort keys %{ $self->args } )
if $self->args;
return sprintf( '%s:%s %s', $self->module, $self->tag, $argstr );
}
###
### Class method
###
sub start_time_now {
$start_time = time();
}
no Moose;
__PACKAGE__->meta->make_immutable;
1;
=head1 NAME
Zonemaster::Logger::Entry - module for single log entries
=head1 SYNOPSIS
Zonemaster->logger->add( TAG => { some => 'arguments' });
There should never be a need to create a log entry object in isolation. They should always be associated with and created via a logger object.
=head1 CLASS METHODS
=over
=item levels
Returns a hash where the keys are log levels as strings and the corresponding values their numeric value.
=item start_time_now()
Set the logger's start time to the current time.
=back
=head1 ATTRIBUTES
=over
=item module
An auto-generated identifier of the module that created the log entry. If it was generated from a module under Zonemaster::Test, it will be an
uppercased version of the part of the name after "Zonemaster::Test". For example, "Zonemaster::Test::Basic" gets the module identifier "BASIC". If the
entry was generated from anywhere else, it will get the module identifier "SYSTEM".
=item tag
The tag that was set when the entry was created.
=item args
The argument hash reference that was provided when the entry was created.
=item timestamp
The time after the current program started running when this entry was created. This is a floating-point value with the precision provided by
L<Time::HiRes>.
=item trace
A partial stack trace for the call that created the entry. Used to create the module tag. Almost certainly not useful for anything else.
=back
=head1 METHODS
=over
=item string
Simple method to generate a string representation of the log entry. Overloaded to the stringification operator.
=item numeric_level
Returns the log level of the entry in numeric form.
=back
=cut
| dolmen/p5-Zonemaster | lib/Zonemaster/Logger/Entry.pm | Perl | bsd-2-clause | 4,464 |
#! /usr/bin/env perl
# Copyright 1995-2016 The OpenSSL Project Authors. All Rights Reserved.
#
# Licensed under the OpenSSL license (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
push(@INC,"${dir}","${dir}../../../perlasm");
require "x86asm.pl";
$output = pop;
open STDOUT,">$output";
&asm_init($ARGV[0]);
&bn_mul_comba("bn_mul_comba8",8);
&bn_mul_comba("bn_mul_comba4",4);
&bn_sqr_comba("bn_sqr_comba8",8);
&bn_sqr_comba("bn_sqr_comba4",4);
&asm_finish();
close STDOUT;
sub mul_add_c
{
local($a,$ai,$b,$bi,$c0,$c1,$c2,$pos,$i,$na,$nb)=@_;
# pos == -1 if eax and edx are pre-loaded, 0 to load from next
# words, and 1 if load return value
&comment("mul a[$ai]*b[$bi]");
# "eax" and "edx" will always be pre-loaded.
# &mov("eax",&DWP($ai*4,$a,"",0)) ;
# &mov("edx",&DWP($bi*4,$b,"",0));
&mul("edx");
&add($c0,"eax");
&mov("eax",&DWP(($na)*4,$a,"",0)) if $pos == 0; # laod next a
&mov("eax",&wparam(0)) if $pos > 0; # load r[]
###
&adc($c1,"edx");
&mov("edx",&DWP(($nb)*4,$b,"",0)) if $pos == 0; # laod next b
&mov("edx",&DWP(($nb)*4,$b,"",0)) if $pos == 1; # laod next b
###
&adc($c2,0);
# is pos > 1, it means it is the last loop
&mov(&DWP($i*4,"eax","",0),$c0) if $pos > 0; # save r[];
&mov("eax",&DWP(($na)*4,$a,"",0)) if $pos == 1; # laod next a
}
sub sqr_add_c
{
local($r,$a,$ai,$bi,$c0,$c1,$c2,$pos,$i,$na,$nb)=@_;
# pos == -1 if eax and edx are pre-loaded, 0 to load from next
# words, and 1 if load return value
&comment("sqr a[$ai]*a[$bi]");
# "eax" and "edx" will always be pre-loaded.
# &mov("eax",&DWP($ai*4,$a,"",0)) ;
# &mov("edx",&DWP($bi*4,$b,"",0));
if ($ai == $bi)
{ &mul("eax");}
else
{ &mul("edx");}
&add($c0,"eax");
&mov("eax",&DWP(($na)*4,$a,"",0)) if $pos == 0; # load next a
###
&adc($c1,"edx");
&mov("edx",&DWP(($nb)*4,$a,"",0)) if ($pos == 1) && ($na != $nb);
###
&adc($c2,0);
# is pos > 1, it means it is the last loop
&mov(&DWP($i*4,$r,"",0),$c0) if $pos > 0; # save r[];
&mov("eax",&DWP(($na)*4,$a,"",0)) if $pos == 1; # load next b
}
sub sqr_add_c2
{
local($r,$a,$ai,$bi,$c0,$c1,$c2,$pos,$i,$na,$nb)=@_;
# pos == -1 if eax and edx are pre-loaded, 0 to load from next
# words, and 1 if load return value
&comment("sqr a[$ai]*a[$bi]");
# "eax" and "edx" will always be pre-loaded.
# &mov("eax",&DWP($ai*4,$a,"",0)) ;
# &mov("edx",&DWP($bi*4,$a,"",0));
if ($ai == $bi)
{ &mul("eax");}
else
{ &mul("edx");}
&add("eax","eax");
###
&adc("edx","edx");
###
&adc($c2,0);
&add($c0,"eax");
&adc($c1,"edx");
&mov("eax",&DWP(($na)*4,$a,"",0)) if $pos == 0; # load next a
&mov("eax",&DWP(($na)*4,$a,"",0)) if $pos == 1; # load next b
&adc($c2,0);
&mov(&DWP($i*4,$r,"",0),$c0) if $pos > 0; # save r[];
&mov("edx",&DWP(($nb)*4,$a,"",0)) if ($pos <= 1) && ($na != $nb);
###
}
sub bn_mul_comba
{
local($name,$num)=@_;
local($a,$b,$c0,$c1,$c2);
local($i,$as,$ae,$bs,$be,$ai,$bi);
local($tot,$end);
&function_begin_B($name,"");
$c0="ebx";
$c1="ecx";
$c2="ebp";
$a="esi";
$b="edi";
$as=0;
$ae=0;
$bs=0;
$be=0;
$tot=$num+$num-1;
&push("esi");
&mov($a,&wparam(1));
&push("edi");
&mov($b,&wparam(2));
&push("ebp");
&push("ebx");
&xor($c0,$c0);
&mov("eax",&DWP(0,$a,"",0)); # load the first word
&xor($c1,$c1);
&mov("edx",&DWP(0,$b,"",0)); # load the first second
for ($i=0; $i<$tot; $i++)
{
$ai=$as;
$bi=$bs;
$end=$be+1;
&comment("################## Calculate word $i");
for ($j=$bs; $j<$end; $j++)
{
&xor($c2,$c2) if ($j == $bs);
if (($j+1) == $end)
{
$v=1;
$v=2 if (($i+1) == $tot);
}
else
{ $v=0; }
if (($j+1) != $end)
{
$na=($ai-1);
$nb=($bi+1);
}
else
{
$na=$as+($i < ($num-1));
$nb=$bs+($i >= ($num-1));
}
#printf STDERR "[$ai,$bi] -> [$na,$nb]\n";
&mul_add_c($a,$ai,$b,$bi,$c0,$c1,$c2,$v,$i,$na,$nb);
if ($v)
{
&comment("saved r[$i]");
# &mov("eax",&wparam(0));
# &mov(&DWP($i*4,"eax","",0),$c0);
($c0,$c1,$c2)=($c1,$c2,$c0);
}
$ai--;
$bi++;
}
$as++ if ($i < ($num-1));
$ae++ if ($i >= ($num-1));
$bs++ if ($i >= ($num-1));
$be++ if ($i < ($num-1));
}
&comment("save r[$i]");
# &mov("eax",&wparam(0));
&mov(&DWP($i*4,"eax","",0),$c0);
&pop("ebx");
&pop("ebp");
&pop("edi");
&pop("esi");
&ret();
&function_end_B($name);
}
sub bn_sqr_comba
{
local($name,$num)=@_;
local($r,$a,$c0,$c1,$c2)=@_;
local($i,$as,$ae,$bs,$be,$ai,$bi);
local($b,$tot,$end,$half);
&function_begin_B($name,"");
$c0="ebx";
$c1="ecx";
$c2="ebp";
$a="esi";
$r="edi";
&push("esi");
&push("edi");
&push("ebp");
&push("ebx");
&mov($r,&wparam(0));
&mov($a,&wparam(1));
&xor($c0,$c0);
&xor($c1,$c1);
&mov("eax",&DWP(0,$a,"",0)); # load the first word
$as=0;
$ae=0;
$bs=0;
$be=0;
$tot=$num+$num-1;
for ($i=0; $i<$tot; $i++)
{
$ai=$as;
$bi=$bs;
$end=$be+1;
&comment("############### Calculate word $i");
for ($j=$bs; $j<$end; $j++)
{
&xor($c2,$c2) if ($j == $bs);
if (($ai-1) < ($bi+1))
{
$v=1;
$v=2 if ($i+1) == $tot;
}
else
{ $v=0; }
if (!$v)
{
$na=$ai-1;
$nb=$bi+1;
}
else
{
$na=$as+($i < ($num-1));
$nb=$bs+($i >= ($num-1));
}
if ($ai == $bi)
{
&sqr_add_c($r,$a,$ai,$bi,
$c0,$c1,$c2,$v,$i,$na,$nb);
}
else
{
&sqr_add_c2($r,$a,$ai,$bi,
$c0,$c1,$c2,$v,$i,$na,$nb);
}
if ($v)
{
&comment("saved r[$i]");
#&mov(&DWP($i*4,$r,"",0),$c0);
($c0,$c1,$c2)=($c1,$c2,$c0);
last;
}
$ai--;
$bi++;
}
$as++ if ($i < ($num-1));
$ae++ if ($i >= ($num-1));
$bs++ if ($i >= ($num-1));
$be++ if ($i < ($num-1));
}
&mov(&DWP($i*4,$r,"",0),$c0);
&pop("ebx");
&pop("ebp");
&pop("edi");
&pop("esi");
&ret();
&function_end_B($name);
}
| youtube/cobalt | third_party/boringssl/src/crypto/fipsmodule/bn/asm/co-586.pl | Perl | bsd-3-clause | 6,008 |
#!/usr/bin/perl -w
use strict;
my $classpath = "$ENV{CLASSPATH}:antlr-2.7.5.jar:stringtemplate.jar:jung-1.7.1.jar:commons-collections-3.1.jar:colt.jar:$ENV{MAPLE}/java/jopenmaple.jar:$ENV{MAPLE}/java/externalcall.jar";
unless (@ARGV) {
@ARGV = ('../../src/LAPACK/BLAS/SRC/caxpy.f');}
system($ENV{JAVARUN}, "-cp", $classpath, "-Djava.library.path=$ENV{MAPLE}/bin.IBM_INTEL_LINUX", "Main", @ARGV);
| dacmot/RevEngTools | rev.pl | Perl | bsd-3-clause | 402 |
package Lingua::YALI::LanguageIdentifier;
# ABSTRACT: Module for language identification.
use strict;
use warnings;
use File::ShareDir;
use File::Glob;
use Carp;
use Moose;
our $VERSION = '0.015'; # VERSION
extends 'Lingua::YALI::Identifier';
has '_languages' => (is => 'rw', isa => 'ArrayRef');
has '_language_model' => (is => 'rw', isa => 'HashRef');
sub add_language
{
my ($self, @languages) = @_;
# lazy loading
if ( ! defined($self->_languages) ) {
$self->get_available_languages();
}
# register languages
my $added_languages = 0;
for my $lang (@languages) {
if ( ! defined($self->{_language_model}->{$lang}) ) {
croak("Unknown language $lang");
}
$added_languages += $self->add_class($lang, $self->{_language_model}->{$lang});
}
return $added_languages;
}
sub remove_language
{
my ($self, @languages) = @_;
# lazy loading
if ( ! defined($self->_languages) ) {
$self->get_available_languages();
}
# remove languages
my $removed_languages = 0;
for my $lang (@languages) {
if ( ! defined($self->{_language_model}->{$lang}) ) {
croak("Unknown language $lang");
}
$removed_languages += $self->remove_class($lang);
}
return $removed_languages;
}
sub get_languages
{
my $self = shift;
return $self->get_classes();
}
sub get_available_languages
{
my $self = shift;
# Get a module's shared files directory
if ( ! defined($self->_languages) ) {
my $dir = "share/";
eval { $dir = File::ShareDir::dist_dir('Lingua-YALI'); };
my @languages = ();
for my $file (File::Glob::bsd_glob($dir . "/*.yali.gz")) {
my $language = $file;
$language =~ s/\Q$dir\E.//;
$language =~ s/.yali.gz//;
push(@languages, $language);
$self->{_language_model}->{$language} = $file;
}
$self->_languages(\@languages);
# print STDERR join("\t", @languages), "\n";
}
return $self->_languages;
}
# for lang in `ls lib/auto/Lingua/YALI/ | cut -f1 -d.`; do name=`webAPI.sh GET $lang name | cut -f3-`; echo -e "=item * $lang - $name\n"; done
1;
__END__
=pod
=encoding UTF-8
=head1 NAME
Lingua::YALI::LanguageIdentifier - Module for language identification.
=head1 VERSION
version 0.015
=head1 SYNOPSIS
This modul is for language identification and can identify 122 languages.
use Lingua::YALI::LanguageIdentifier;
# create identifier and register languages
my $identifier = Lingua::YALI::LanguageIdentifier->new();
$identifier->add_language("ces", "eng")
# identify string
my $result = $identifier->identify_string("CPAN, the Comprehensive Perl Archive Network, is an archive of modules written in Perl.");
print "The most probable language is " . $result->[0]->[0] . ".\n";
# prints out The most probable language is eng.
More examples is presented in L<Lingua::YALI::Examples|Lingua::YALI::Examples>.
=head1 METHODS
=head2 add_language
my $added_languages = $identifier->add_languages(@languages)
Registers new languages C<@languages> for identification and returns
the amount of newly added languages. Languages are identified by their
ISO 639-3 code.
It croaks when unsupported language is used.
print $identifier->add_languages("ces", "deu", "eng") . "\n";
# prints out 3
print $identifier->add_languages("ces", "slk") . "\n";
# prints out 1
=head2 remove_language
my $removed_languages = $identifier->remove_languages(@languages)
Remove languages C<@languages> and returns the amount of removed languages.
It croaks when unsupported language is used.
print $identifier->add_languages("ces", "deu", "eng")
# prints out 3
print $identifier->remove_languages("ces", "slk") . "\n";
# prints out 1
print $identifier->remove_languages("ces", "slk") . "\n";
# prints out 0
=head2 get_languages
my \@languages = $identifier->get_languages();
Returns all registered languages.
=head2 get_available_languages
my \@languages = $identifier->get_available_languages();
Returns all available languages. Currently there is 122 languages (L</LANGUAGES>).
=head2 identify_file
my $result = $identifier->identify_file($file)
Identifies language for file C<$file>.
For more details look at method L<Lingua::YALI::Identifier/identify_file>.
=head2 identify_string
my $result = $identifier->identify_string($string)
Identifies language for string C<$string>.
For more details look at method L<Lingua::YALI::Identifier/identify_string>.
=head2 identify_handle
my $result = $identifier->identify_handle($fh)
Identifies language for handle C<$fh>.
For more details look at method L<Lingua::YALI::Identifier/identify_handle>.
=head1 LANGUAGES
More details about supported languages may be found at L<http://ufal.mff.cuni.cz/~majlis/w2c/download.html>.
=over
=item * afr - Afrikaans
=item * als - Tosk Albanian
=item * amh - Amharic
=item * ara - Arabic
=item * arg - Aragonese
=item * arz - Egyptian Arabic
=item * ast - Asturian
=item * aze - Azerbaijani
=item * bcl - Central Bicolano
=item * bel - Belarusian
=item * ben - Bengali
=item * bos - Bosnian
=item * bpy - Bishnupriya
=item * bre - Breton
=item * bug - Buginese
=item * bul - Bulgarian
=item * cat - Catalan
=item * ceb - Cebuano
=item * ces - Czech
=item * chv - Chuvash
=item * cos - Corsican
=item * cym - Welsh
=item * dan - Danish
=item * deu - German
=item * diq - Dimli (individual language)
=item * ell - Modern Greek (1453-)
=item * eng - English
=item * epo - Esperanto
=item * est - Estonian
=item * eus - Basque
=item * fao - Faroese
=item * fas - Persian
=item * fin - Finnish
=item * fra - French
=item * fry - Western Frisian
=item * gan - Gan Chinese
=item * gla - Scottish Gaelic
=item * gle - Irish
=item * glg - Galician
=item * glk - Gilaki
=item * guj - Gujarati
=item * hat - Haitian
=item * hbs - Serbo-Croatian
=item * heb - Hebrew
=item * hif - Fiji Hindi
=item * hin - Hindi
=item * hrv - Croatian
=item * hsb - Upper Sorbian
=item * hun - Hungarian
=item * hye - Armenian
=item * ido - Ido
=item * ina - Interlingua (International Auxiliary Language Association)
=item * ind - Indonesian
=item * isl - Icelandic
=item * ita - Italian
=item * jav - Javanese
=item * jpn - Japanese
=item * kan - Kannada
=item * kat - Georgian
=item * kaz - Kazakh
=item * kor - Korean
=item * kur - Kurdish
=item * lat - Latin
=item * lav - Latvian
=item * lim - Limburgan
=item * lit - Lithuanian
=item * lmo - Lombard
=item * ltz - Luxembourgish
=item * mal - Malayalam
=item * mar - Marathi
=item * mkd - Macedonian
=item * mlg - Malagasy
=item * mon - Mongolian
=item * mri - Maori
=item * msa - Malay (macrolanguage)
=item * mya - Burmese
=item * nap - Neapolitan
=item * nds - Low German
=item * nep - Nepali
=item * new - Newari
=item * nld - Dutch
=item * nno - Norwegian Nynorsk
=item * nor - Norwegian
=item * oci - Occitan (post 1500)
=item * oss - Ossetian
=item * pam - Pampanga
=item * pms - Piemontese
=item * pnb - Western Panjabi
=item * pol - Polish
=item * por - Portuguese
=item * que - Quechua
=item * ron - Romanian
=item * rus - Russian
=item * sah - Yakut
=item * scn - Sicilian
=item * sco - Scots
=item * slk - Slovak
=item * slv - Slovenian
=item * spa - Spanish
=item * sqi - Albanian
=item * srp - Serbian
=item * sun - Sundanese
=item * swa - Swahili (macrolanguage)
=item * swe - Swedish
=item * tam - Tamil
=item * tat - Tatar
=item * tel - Telugu
=item * tgk - Tajik
=item * tgl - Tagalog
=item * tha - Thai
=item * tur - Turkish
=item * ukr - Ukrainian
=item * urd - Urdu
=item * uzb - Uzbek
=item * vec - Venetian
=item * vie - Vietnamese
=item * vol - Volapük
=item * war - Waray (Philippines)
=item * wln - Walloon
=item * yid - Yiddish
=item * yor - Yoruba
=item * zho - Chinese
=back
=head1 SEE ALSO
=over
=item * Identifier for own models is L<Lingua::YALI::Identifier|Lingua::YALI::Identifier>.
=item * There is also command line tool L<yali-language-identifier|Lingua::YALI::yali-language-identifier> with similar functionality.
=item * Source codes are available at L<https://github.com/martin-majlis/YALI>.
=back
=head1 AUTHOR
Martin Majlis <martin@majlis.cz>
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2012 by Martin Majlis.
This is free software, licensed under:
The (three-clause) BSD License
=cut
| gitpan/Lingua-YALI | lib/Lingua/YALI/LanguageIdentifier.pm | Perl | bsd-3-clause | 8,599 |
use strict;
use warnings;
package Net::Amazon::SignatureVersion4;
{
$Net::Amazon::SignatureVersion4::VERSION = '0.006';
}
use MooseX::App qw(Config);
use Digest::SHA qw(sha256_hex hmac_sha256_hex hmac_sha256 hmac_sha256_base64);
use POSIX qw(strftime);
use URI::Encode;
use HTTP::Date;
use 5.010;
# ABSTRACT: Signs requests using Amazon's Signature Version 4.
option 'Access_Key_Id' => (
is => 'rw',
isa => 'Str',
reader => 'get_Access_Key_ID',
predicate => 'has_Access_Key_ID',
writer => 'set_Access_Key_ID',
);
option 'Secret_Access_Key' => (
is => 'rw',
isa => 'Str',
reader => 'get_Secret_Access_Key',
predicate => 'has_Secret_Access_Key',
writer => 'set_Secret_Access_Key',
);
option 'region' => (
is => 'rw',
isa => 'Str',
writer => 'set_region',
reader => 'get_region',
default => 'us-east-1',
);
option 'request' => (
is => 'rw',
isa => 'Object',
writer => 'set_request',
reader => 'get_request',
);
option 'service' => (
is => 'rw',
isa => 'Str',
writer => 'set_service',
reader => 'get_service',
);
option 'time' => (
is => 'rw',
isa => 'Str',
writer => 'set_time',
reader => 'get_time',
);
option 'date_stamp' => (
is => 'rw',
isa => 'Str',
writer => 'set_date_stamp',
reader => 'get_date_stamp',
);
option 'signed_headers' => (
is => 'rw',
isa => 'Str',
writer => 'set_signed_headers',
reader => 'get_signed_headers',
);
sub get_authorized_request{
my $self=shift;
my $request=$self->get_request();
$request->header( Authorization => $self->get_authorization() );
return $request
}
sub get_authorization{
my $self=shift;
my %dk=$self->get_derived_signing_key();
my $sts=$self->get_string_to_sign();
$sts=~tr/\r//d;
my $signature=hmac_sha256_hex($sts,$dk{'kSigning'});
return "AWS4-HMAC-SHA256 Credential=".$self->get_Access_Key_ID()."/".$self->get_date_stamp()."/".$self->get_region()."/".$self->get_service()."/aws4_request, SignedHeaders=".$self->get_signed_headers().", Signature=$signature";
}
sub get_derived_signing_key{
my $self=shift;
$self->get_canonical_request(); # This is a hack to get the date set before using it to derive the signing key.
my %rv=();
$rv{'kSecret'}="AWS4".$self->get_Secret_Access_Key();
#say("kSecret: ".unpack('H*',$rv{'kSecret'}));
$rv{'kDate'}=hmac_sha256($self->get_date_stamp(),$rv{'kSecret'});
#say("kDate: ".unpack('H*',$rv{'kDate'}));
$rv{'kRegion'}=hmac_sha256($self->get_region(),$rv{'kDate'});
#say("kRegion: ".unpack('H*',$rv{'kRegion'}));
$rv{'kService'}=hmac_sha256($self->get_service(),$rv{'kRegion'});
#say("kService: ".unpack('H*',$rv{'kService'}));
$rv{'kSigning'}=hmac_sha256("aws4_request",$rv{'kService'});
#say("kSigning: ".unpack('H*',$rv{'kSigning'}));
return %rv;
}
sub get_string_to_sign{
my $self=shift;
my $creq=$self->get_canonical_request();
$creq=~tr/\r//d;
my $StringToSign="AWS4-HMAC-SHA256\r\n".
$self->get_time()."\r\n".
$self->get_date_stamp()."/".
$self->get_region()."/".
$self->get_service()."/aws4_request\r\n".
sha256_hex($creq);
}
sub get_canonical_request{
my $self=shift;
use Data::Dumper;
my $method;
my $full_uri="";
my $version;
my $canonical_query_string="";
my %headers=();
foreach my $name ( $self->get_request()->header_field_names() ){
my @value=$self->get_request()->header($name);
next unless (defined $name & defined $value[0]);
if (lc($name) eq 'date'){
my $time=str2time($value[0]);
$self->set_date_stamp(strftime("%Y%m%d", gmtime($time)));
$self->set_time(strftime("%Y%m%dT%H%M%SZ",gmtime($time)));
}
foreach my $value (@value){
local $/ = " ";
chomp($value);
if (defined $headers{lc($name)}){
push @{$headers{lc($name)}}, $value;
}else{
$headers{lc($name)}=[$value ];
}
}
}
$full_uri=$self->get_request()->uri();
$full_uri =~ s@^(http|https)://.*?/@/@;
if ($full_uri=~m/(.*?)\?(.*)/){
$full_uri=$1;
$canonical_query_string=$2;
}
my @canonical_query_list;
if ( defined $canonical_query_string){
if ($canonical_query_string=~m/(.*?)\s.*/){
$canonical_query_string=$1
}
@canonical_query_list=split(/\&/,$canonical_query_string);
}
$canonical_query_string="";
foreach my $param (sort @canonical_query_list){
(my $name, my $value)=split(/=/, $param);
$name="" unless (defined $name);
$value="" unless (defined $value);
$canonical_query_string=$canonical_query_string._encode($name)."="._encode($value)."&";
}
$canonical_query_string=substr($canonical_query_string, 0, -1) unless ($canonical_query_string eq "");
$full_uri=~tr/\///s;
my $ends_in_slash=0;
if ($full_uri=~m/\w\/$/){
$ends_in_slash=1;
}
my @uri_source=split /\//, $full_uri;
my @uri_stack;
foreach my $path_component (@uri_source){
if ($path_component =~ m/^\.$/){
sleep 0;
}elsif ($path_component =~ m/^..$/){
pop @uri_stack;
}else{
push @uri_stack, $path_component;
}
}
$full_uri="/";
foreach my $path_component (@uri_stack){
$full_uri=$full_uri."$path_component/";
}
$full_uri=~tr/\///s;
chop $full_uri unless ( $full_uri eq "/" );
if ($ends_in_slash){
$full_uri=$full_uri."/";
}
my $CanonicalHeaders="";
my $SignedHeaders="";
foreach my $header ( sort keys %headers ){
$CanonicalHeaders=$CanonicalHeaders.lc($header).':';
foreach my $element(sort @{$headers{$header}}){
$CanonicalHeaders=$CanonicalHeaders.($element).",";
}
$CanonicalHeaders=substr($CanonicalHeaders, 0, -1);
$CanonicalHeaders=$CanonicalHeaders."\r\n";
$SignedHeaders=$SignedHeaders.lc($header).";";
}
$SignedHeaders=substr($SignedHeaders, 0, -1);
$self->set_signed_headers($SignedHeaders);
my $CanonicalRequest =
$self->get_request()->method() . "\r\n" .
$full_uri . "\r\n" .
$canonical_query_string . "\r\n" .
$CanonicalHeaders . "\r\n" .
$SignedHeaders . "\r\n" .
sha256_hex($self->get_request()->content());
return $CanonicalRequest;
}
sub _encode{
#This method is used to add some additional encodings that are not enforced by the URI::Encode module. AWS expects these.
my $encoder = URI::Encode->new({ double_encode => 0 });
my $rv=shift;
# %20=%2F%2C%3F%3E%3C%60%22%3B%3A%5C%7C%5D%5B%7B%7D&%40%23%24%25%5E=
# + =/ , ? %3E%3C%60%22; : %5C%7C] [ %7B%7D&@ # $ %25%5E=
$rv=$encoder->encode($rv);
$rv=~s/\+/\%20/g;
$rv=~s/\//\%2F/g;
$rv=~s/\,/\%2C/g;
$rv=~s/\?/\%3F/g;
$rv=~s/\;/\%3B/g;
$rv=~s/\:/\%3A/g;
$rv=~s/\]/\%5D/g;
$rv=~s/\[/\%5B/g;
$rv=~s/\@/\%40/g;
$rv=~s/\#/\%23/g;
$rv=~s/\$/\%24/g;
# $rv=~s///g;
return $rv;
}
1;
__END__
=pod
=head1 NAME
Net::Amazon::SignatureVersion4 - Signs requests using Amazon's Signature Version 4.
=head1 VERSION
version 0.006
=head1 SYNOPSIS
use Net::Amazon::SignatureVersion4;
my $sig=new Net::Amazon::SignatureVersion4();
my $hr=HTTP::Request->new('GET','http://glacier.us-west-2.amazonaws.com/-/vaults', [
'Host', 'glacier.us-west-2.amazonaws.com',
'Date', strftime("%Y%m%dT%H%M%SZ",gmtime(time())) ,
'X-Amz-Date', strftime("%Y%m%dT%H%M%SZ",gmtime(time())) ,
'x-amz-glacier-version', '2012-06-01',
]);
$hr->protocol('HTTP/1.1');
$sig->set_request($request); # $request is HTTP::Request
$sig->set_region('us-west-2');
$sig->set_service('glacier'); # Must be service you are accessing
$sig->set_Access_Key_ID('AKIDEXAMPLE'); # Replace with your ACCESS_KEY_ID
$sig->set_Secret_Access_Key('wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY'); # Replace with your SECRET_KEY
my $authorized_request=$sig->get_authorized_request();
my $agent = LWP::UserAgent->new( agent => 'perl-Net::Amazon::SignatureVersion4-Testing');
my $response = $agent->request($authorized_request);
if ($response->is_success) {
say("List of vaults");
say($response->decoded_content); # or whatever
say("Connected to live server");
}else {
say($response->status_line);
use Data::Dumper;
say("Failed Response");
say(Data::Dumper->Dump([ $response ]));
}
=head1 DESCRIPTION
This module implements Amazon's Signature Version 4 as documented at
http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html
The tests for this module are taken from the test suite provided by
Amazon. This implementation does not yet pass all the tests. The
following test is failing:
get-header-value-multiline: Amazon did not supply enough files for
this test. The test may be run, but the results can not be validated.
=head1 METHODS
=head2 get_authorized_request
This method does most of the work for the user. After setting the
request, region, service, access key, and secret access key, this
method will return a copy of the request headers with
authorization.
=head2 get_authorization
This method gets the authorization line that should be added to
the headers. It is likely never to be used by the end user. It
is here as a convenient test.
=head2 get_derived_signing_key
This method implements the derived signing key required for
version 4. It is likely never to be used by the end user. It is
here as a convenient test.
=head2 get_string_to_sign
This method returns the string to sign. It is likely never to be
used by the end user. It is here as a convenient test.
=head2 get_canonical_request
This method returns the canonical request. It is likely never to
be used by the end user. It is here as a convenient test.
=head1 AUTHOR
Charles A. Wimmer <charles@wimmer.net>
=head1 COPYRIGHT AND LICENSE
This software is Copyright (c) 2012 by Charles A. Wimmer.
This is free software, licensed under:
The (three-clause) BSD License
=cut
| gitpan/Net-Amazon-SignatureVersion4 | lib/Net/Amazon/SignatureVersion4.pm | Perl | bsd-3-clause | 10,122 |
# SNMP::Info::MAU - Media Access Unit - RFC 2668
# $Id$
#
# Copyright (c) 2008 Max Baker changes from version 0.8 and beyond.
#
# Copyright (c) 2002,2003 Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California, Santa Cruz nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
package SNMP::Info::MAU;
use strict;
use Exporter;
use SNMP::Info;
@SNMP::Info::MAU::ISA = qw/SNMP::Info Exporter/;
@SNMP::Info::MAU::EXPORT_OK = qw//;
use vars qw/$VERSION %MIBS %FUNCS %GLOBALS %MUNGE/;
$VERSION = '3.29';
%MIBS = ( 'MAU-MIB' => 'mauMod', 'IANA-MAU-MIB' => 'dot3MauType' );
%GLOBALS = ();
%FUNCS = (
# Interface MAU Table
'mau_index' => 'ifMauIfIndex',
'mau_link' => 'ifMauType',
'mau_status' => 'ifMauStatus',
'mau_up' => 'ifMauMediaAvailable',
'mau_type' => 'ifMauTypeList',
'mau_type_admin' => 'ifMauDefaultType',
# Interface Auto-Negotiation Table
'mau_auto' => 'ifMauAutoNegSupported',
'mau_autostat' => 'ifMauAutoNegAdminStatus',
'mau_autosent' => 'ifMauAutoNegCapAdvertised',
'mau_autorec' => 'ifMauAutoNegCapReceived',
);
%MUNGE = (
# Inherit all the built in munging
%SNMP::Info::MUNGE,
# Add ones for our class
'mau_type' => \&munge_int2bin,
'mau_autosent' => \&munge_int2bin,
'mau_autorec' => \&munge_int2bin,
);
sub munge_int2bin {
my $int = shift;
return unless defined $int;
return unpack( "B32", pack( "N", $int ) );
}
sub _isfullduplex {
my $mau = shift;
my $mautype = shift;
my @full_types = qw/11 13 16 18 20/;
foreach my $type (@full_types) {
return 1 if ( substr( $mautype, 32 - $type, 1 ) eq '1' );
}
return 0;
}
sub _ishalfduplex {
my $mau = shift;
my $mautype = shift;
my @half_types = qw/10 12 15 17 19/;
foreach my $type (@half_types) {
return 1 if ( substr( $mautype, 32 - $type, 1 ) eq '1' );
}
return 0;
}
my %_mau_i_speed_map = (
'10' => '10 Mbps',
'100' => '100 Mbps',
'1000' => '1.0 Gbps',
'10Gig' => '10 Gbps',
);
sub mau_i_speed_admin {
my $mau = shift;
my $mau_index = $mau->mau_index();
my $mau_type_admin = $mau->mau_type_admin();
my %i_speed_admin;
foreach my $mau_port ( keys %$mau_type_admin ) {
my $iid = $mau_index->{$mau_port};
next unless defined $iid;
my $type_adminoid = $mau_type_admin->{$mau_port};
my $type_admin = &SNMP::translateObj($type_adminoid);
next unless defined $type_admin;
if ( $type_adminoid eq '.0.0' ) {
$i_speed_admin{$iid} = 'auto';
}
elsif ($type_admin =~ /^dot3MauType(.*)Base/
&& $_mau_i_speed_map{$1} )
{
$i_speed_admin{$iid} = $_mau_i_speed_map{$1};
}
}
return \%i_speed_admin;
}
sub mau_i_duplex {
my $mau = shift;
my $mau_index = $mau->mau_index();
my $mau_link = $mau->mau_link();
my %i_duplex;
foreach my $mau_port ( keys %$mau_link ) {
my $iid = $mau_index->{$mau_port};
next unless defined $iid;
my $linkoid = $mau_link->{$mau_port};
my $link = &SNMP::translateObj($linkoid);
next unless defined $link;
my $duplex = undef;
if ( $link =~ /fd$/i ) {
$duplex = 'full';
}
elsif ( $link =~ /hd$/i ) {
$duplex = 'half';
}
$i_duplex{$iid} = $duplex if defined $duplex;
}
return \%i_duplex;
}
sub mau_i_duplex_admin {
my $mau = shift;
my $partial = shift;
my $mau_index = $mau->mau_index() || {};
if ($partial) {
my %rev_mau_index = reverse %$mau_index;
$partial = $rev_mau_index{$partial};
}
my $mau_autostat = $mau->mau_autostat($partial) || {};
my $mau_type_admin = $mau->mau_type_admin($partial) || {};
# Older HP4000's don't implement ifMauDefaultType, but we can
# figure out from ifMauAutoNegCapAdvertised what we'd like.
if ( !defined($mau_type_admin) ) {
if ( defined($mau_index) ) {
return mau_i_duplex_admin_old( $mau, $mau_index, $mau_autostat );
}
else {
return;
}
}
my %i_duplex_admin;
foreach my $mau_port ( keys %$mau_type_admin ) {
my $iid = $mau_index->{$mau_port};
next unless defined $iid;
my $autostat = $mau_autostat->{$mau_port};
if ( defined $autostat and $autostat =~ /enabled/i ) {
$i_duplex_admin{$iid} = 'auto';
next;
}
my $type_adminoid = $mau_type_admin->{$mau_port};
my $type_admin = &SNMP::translateObj($type_adminoid);
next unless defined $type_admin;
my $duplex = undef;
if ( $type_admin =~ /fd$/i ) {
$duplex = 'full';
}
elsif ( $type_admin =~ /hd$/i ) {
$duplex = 'half';
}
elsif ( $type_admin eq 'zeroDotZero' ) {
$duplex = 'auto';
}
$i_duplex_admin{$iid} = $duplex if defined $duplex;
}
return \%i_duplex_admin;
}
sub mau_i_duplex_admin_old {
my $mau = shift;
my $mau_index = shift;
my $mau_autostat = shift;
my $interfaces = $mau->interfaces();
my $mau_autosent = $mau->mau_autosent();
my %mau_reverse = reverse %$mau_index;
my %i_duplex_admin;
foreach my $iid ( keys %$interfaces ) {
my $mau_idx = $mau_reverse{$iid};
next unless defined $mau_idx;
my $autostat = $mau_autostat->{$mau_idx};
# HP25xx has this value
if ( defined $autostat and $autostat =~ /enabled/i ) {
$i_duplex_admin{$iid} = 'auto';
next;
}
my $type = $mau_autosent->{$mau_idx};
next unless defined $type;
if ( $type == 0 ) {
$i_duplex_admin{$iid} = 'none';
next;
}
my $full = $mau->_isfullduplex($type);
my $half = $mau->_ishalfduplex($type);
if ( $full && !$half ) {
$i_duplex_admin{$iid} = 'full';
}
elsif ($half) {
$i_duplex_admin{$iid} = 'half';
}
}
return \%i_duplex_admin;
}
sub mau_set_i_speed_admin {
my $mau = shift;
my $speed = shift;
my $iid = shift;
my $rv;
$speed = lc($speed);
if ( !( $speed =~ /(10|100|1000|auto)/io and $iid =~ /\d+/o ) ) {
return;
}
# map a speed value to an integer the switch understands based on duplex
my %speeds;
# 10 = dot3MauType10BaseTHD, 15 = dot3MauType100BaseTXHD
# 29 = dot3MauType1000BaseTHD from IANA-MAU-MIB
%{ $speeds{'HD'} } = qw/10 10 100 15 1000 29/; # half duplex settings
# 11 = dot3MauType10BaseTFD, 16 = dot3MauType100BaseTXFD
# 30 = dot3MauType1000BaseTFD from IANA-MAU-MIB
%{ $speeds{'FD'} } = qw/10 11 100 16 1000 30/; # full duplex settings
my $myhash = $mau->mau_autostat;
my $key = $iid . '.1';
my $i_autoneg = $myhash->{$key};
my $myduplex;
my $i_mau_def_type
= &SNMP::translateObj( $mau->mau_type_admin($iid)->{ $iid . '.1' } );
if ( $i_mau_def_type =~ /^dot3MauType.*Base.*(..)$/
&& ( $1 eq "HD" or $1 eq "FD" ) )
{
$myduplex = $1;
}
else {
# this is not a valid speed known, assuming auto
$myduplex = "auto";
}
if ( $speed eq "auto" && $i_autoneg eq "enabled" ) {
return (1);
}
elsif ( $speed eq "auto" ) {
$rv = $mau->set_mau_autostat( 'enabled', $iid . '.1' );
return ($rv);
}
else {
if ( $i_autoneg eq "enabled" ) {
$mau->set_mau_autostat( 'disabled', $iid . '.1' );
}
$rv
= $mau->set_mau_type_admin(
'.1.3.6.1.2.1.26.4.' . $speeds{$myduplex}{$speed},
$iid . '.1' );
return ($rv);
}
}
sub mau_set_i_duplex_admin {
my $mau = shift;
my $duplex = shift;
my $iid = shift;
my $rv;
$duplex = lc($duplex);
if ( !( $duplex =~ /(full|half|auto)/i and $iid =~ /\d+/ ) ) {
return;
}
# map a textual duplex setting to an integer value the switch will understand
my %duplexes;
%{ $duplexes{'10'} } = qw/full 11 half 10/;
%{ $duplexes{'100'} } = qw/full 16 half 15/;
%{ $duplexes{'1000'} } = qw/full 30 half 29/;
# current port values:
my $myhash = $mau->mau_autostat;
my $key = $iid . '.1';
my $i_autoneg = $myhash->{$key};
my $i_speed
= &SNMP::translateObj( $mau->mau_type_admin($iid)->{ $iid . '.1' } );
if ( $i_speed =~ /^dot3MauType(.*)Base/ && $_mau_i_speed_map{$1} ) {
$i_speed = $1;
}
else {
# this is not a valid speed setting, assuming auto
$duplex = "auto";
}
if ( $duplex eq "auto" && $i_autoneg eq "enabled" ) {
return (1);
}
elsif ( $duplex eq "auto" ) {
$rv = $mau->set_mau_autostat( 'enabled', $iid . '.1' );
return ($rv);
}
else {
# Can't always do it here, if not...
if ( $i_autoneg eq "enabled"
&& defined( $duplexes{$i_speed}{$duplex} ) )
{
$mau->set_mau_autostat( 'disabled', $iid . '.1' );
}
$rv
= $mau->set_mau_type_admin(
'.1.3.6.1.2.1.26.4.' . $duplexes{$i_speed}{$duplex},
$iid . '.1' );
return ($rv);
}
}
#
# mau_set_i_speed_duplex_admin() accepts the following values for speed/duplex
#
# auto/auto (special case)
# 10/half
# 10/full
# 100/half
# 100/full
# 1000/half
# 1000/full
sub mau_set_i_speed_duplex_admin {
my $mau = shift;
my $speed = shift;
my $duplex = shift;
my $iid = shift;
my $rv;
$speed = lc($speed);
$duplex = lc($duplex);
if ( ( $speed !~ m/auto|10|100|1000/io )
or ( $duplex !~ m/full|half|auto/io )
or ( $iid !~ /\d+/ ) )
{
return ("bad arguments");
}
# map input speed and duplex paramters to 'mau_type_admin' settings
# From IANA-MAU-MIB
# 11 = dot3MauType10BaseTFD, 10 = dot3MauType10BaseTHD,
# 16 = dot3MauType100BaseTXFD, 15 = dot3MauType100BaseTXHD
# 30 = dot3MauType1000BaseTFD, 29 = dot3MauType1000BaseTHD
my %params;
%{ $params{'10'} } = qw/full 11 half 10/;
%{ $params{'100'} } = qw/full 16 half 15/;
%{ $params{'1000'} } = qw/full 30 half 29/;
# if given "auto/auto", set 'mau_autostat' to "enable" and exit
if ( ( $speed eq "auto" ) or ( $duplex eq "auto" ) ) {
$rv = $mau->set_mau_autostat( 'enabled', $iid . '.1' );
return ($rv);
}
$rv
= $mau->set_mau_type_admin(
'.1.3.6.1.2.1.26.4.' . $params{$speed}{$duplex},
$iid . '.1' );
$rv = $mau->set_mau_autostat( 'disabled', $iid . '.1' );
return ($rv);
}
1;
__END__
=head1 NAME
SNMP::Info::MAU - SNMP Interface to Medium Access Unit (MAU) MIB (RFC 2668)
via SNMP
=head1 AUTHOR
Max Baker
=head1 SYNOPSIS
my $mau = new SNMP::Info (
AutoSpecify => 1,
Debug => 1,
DestHost => 'hpswitch',
Community => 'public',
Version => 2
);
my $class = $mau->class();
print " Using device sub class : $class\n";
=head1 DESCRIPTION
SNMP::Info::MAU is a subclass of SNMP::Info that supplies access to the
F<MAU-MIB> (RFC 2668). This MIB is sometimes implemented on Layer 2 network
devices like HP Switches. MAU = Media Access Unit.
The MAU table contains link and duplex info for the port itself and the device
connected to that port.
Normally you use or create a subclass of SNMP::Info that inherits this one.
Do not use directly.
For debugging purposes call the class directly as you would SNMP::Info
my $mau = new SNMP::Info::MAU(...);
=head2 Inherited Classes
None.
=head2 Required MIBs
=over
=item F<MAU-MIB>
=back
=head1 GLOBALS
These are methods that return scalar value from SNMP
=over
=item None
=back
=head1 TABLE METHODS
These are methods that return tables of information in the form
of a reference to a hash.
=over
=item $mau->mau_i_duplex()
Parses mau_index and mau_link to return the duplex information for
interfaces.
=item $mau->mau_i_duplex_admin()
Parses C<mac_index>,C<mau_autostat>,C<mau_type_admin> in
order to find the admin duplex setting for all the interfaces.
Returns either (auto,full,half).
=item $mau->mau_i_duplex_admin_old()
Called by mau_i_duplex_admin() if C<mau_type_admin> is empty.
Parses C<mau_index>,C<mau_autostat>,C<mau_autosent> in
order to find the admin duplex setting for all the interfaces.
Returns either (auto,none,full,half).
=item $mau->mau_i_speed_admin()
Returns admin speed setting for all the interfaces.
=back
=head2 MAU Interface Table Methods
=over
=item $mau->mau_index() - Returns a list of interfaces
and their index in the MAU IF Table.
(C<ifMauIfIndex>)
=item $mau->mau_link() - Returns the type of Media Access used.
This is essentially the type of link in use.
eg. dot3MauType100BaseTXFD - 100BaseT at Full Duplex
(C<ifMauType>)
=item $mau->mau_status() - Returns the admin link condition as
1 - other
2 - unknown
3 - operational
4 - standby
5 - shutdown
6 - reset
Use 5 and !5 to see if the link is up or down on the admin side.
(C<ifMauStatus>)
=item $mau->mau_up() - Returns the current link condition
(C<ifMauMediaAvailable>)
=item $mau->mau_type() - Returns a 32bit string reporting the capabilities
of the port from a MAU POV.
Directly from F<MAU-MIB> :
Bit Capability
0 other or unknown
1 AUI
2 10BASE-5
3 FOIRL
4 10BASE-2
5 10BASE-T duplex mode unknown
6 10BASE-FP
7 10BASE-FB
8 10BASE-FL duplex mode unknown
9 10BROAD36
10 10BASE-T half duplex mode
11 10BASE-T full duplex mode
12 10BASE-FL half duplex mode
13 10BASE-FL full duplex mode
14 100BASE-T4
15 100BASE-TX half duplex mode
16 100BASE-TX full duplex mode
17 100BASE-FX half duplex mode
18 100BASE-FX full duplex mode
19 100BASE-T2 half duplex mode
20 100BASE-T2 full duplex mode
(C<ifMauTypeList>)
=item $mau->mau_type_admin()
(C<ifMauDefaultType>)
=item $mau->mau_auto() - Indicates whether or not auto-negotiation is
supported.
(C<ifMauAutoNegSupported>)
=item $mau->mau_autostat() - Returns status of auto-negotiation mode for
ports.
(C<ifMauAutoNegAdminStatus>)
=item $mau->mau_autosent() - Returns a 32 bit bit-string representing the
capabilities we are broadcasting on that port
Uses the same decoder as $mau->mau_type().
(C<ifMauAutoNegCapAdvertised>)
=item $mau->mau_autorec() - Returns a 32 bit bit-string representing the
capabilities of the device on the other end.
Uses the same decoder as $mau->mau_type().
(C<ifMauAutoNegCapReceived>)
=back
=head1 SET METHODS
These are methods that provide SNMP set functionality for overridden methods
or provide a simpler interface to complex set operations. See
L<SNMP::Info/"SETTING DATA VIA SNMP"> for general information on set
operations.
=over
=item $mau->mau_set_i_speed_admin(speed, ifIndex)
Sets port speed, must be supplied with speed and port C<ifIndex>.
Note that this method has some limitations since there is no way
to reliably set the port speed independently of the port duplex
setting on certain devices, notably the Cisco Cat4k series.
Speed choices are '10', '100', '1000', 'auto'.
=item $mau->mau_set_i_duplex_admin(duplex, ifIndex)
Sets port duplex, must be supplied with duplex and port C<ifIndex>.
Note that this method has some limitations since there is no way
to reliably set the port duplex independently of the port speed
setting on certain devices, notably the Cisco Cat4k series.
Duplex choices are 'auto', 'half', 'full'.
=item $mau->mau_set_i_speed_duplex_admin(speed, duplex, ifIndex)
Sets port speed and duplex settings, must be supplied with speed,
duplex and port C<ifIndex>.
Accepts the following values for speed and duplex:
Speed/Duplex
------------
auto/auto (this is a special case)
10/half
10/full
100/half
100/full
1000/half
1000/full
=back
=head1 Utility Functions
=over
=item munge_int2bin() - Unpacks an integer into a 32bit bit string.
=item $mau->_isfullduplex(bitstring)
Boolean. Checks to see if any of the full_duplex types from mau_type()
are high. Currently bits 11,13,16,18,20.
=item $mau->_ishalfduplex(bitstring)
Boolean. Checks to see if any of the half_duplex types from mau_type()
are high. Currently bits 10,12,15,17,19.
=back
=cut
| lucwillems/SNMP-INFO | Info/MAU.pm | Perl | bsd-3-clause | 18,528 |
#ExStart:1
use lib 'lib';
use strict;
use warnings;
use utf8;
use File::Slurp; # From CPAN
use JSON;
use AsposeStorageCloud::StorageApi;
use AsposeStorageCloud::ApiClient;
use AsposeStorageCloud::Configuration;
use AsposePdfCloud::PdfApi;
use AsposePdfCloud::ApiClient;
use AsposePdfCloud::Configuration;
use AsposePdfCloud::Object::DocumentProperty;
my $configFile = '../config/config.json';
my $configPropsText = read_file($configFile);
my $configProps = decode_json($configPropsText);
my $data_path = '../../../Data/';
my $out_path = $configProps->{'out_folder'};
$AsposePdfCloud::Configuration::app_sid = $configProps->{'app_sid'};
$AsposePdfCloud::Configuration::api_key = $configProps->{'api_key'};
$AsposePdfCloud::Configuration::debug = 1;
$AsposeStorageCloud::Configuration::app_sid = $configProps->{'app_sid'};
$AsposeStorageCloud::Configuration::api_key = $configProps->{'api_key'};
# Instantiate Aspose.Storage and Aspose.Pdf API SDK
my $storageApi = AsposeStorageCloud::StorageApi->new();
my $pdfApi = AsposePdfCloud::PdfApi->new();
# Set input file name
my $name = 'Sample-Annotation.pdf';
my $propertyName = 'AsposeDev';
my @documentPropertyBody = AsposePdfCloud::Object::DocumentProperty->new('Name' => 'AsposeDev', 'Value'=>'Farooq Sheikh', 'BuiltIn'=>'False' );
# Upload file to aspose cloud storage
my $response = $storageApi->PutCreate(Path => $name, file => $data_path.$name);
# Invoke Aspose.Pdf Cloud SDK API to set a PDF document property
$response = $pdfApi->PutSetProperty(name=>$name,propertyName=>$propertyName, body=>@documentPropertyBody);
if($response->{'Status'} eq 'OK'){
my $docProp = $response->{'DocumentProperty'};
print "\n $docProp->{'Name'} :: $docProp->{'Value'}";
}
#ExEnd:1 | asposepdf/Aspose_Pdf_Cloud | Examples/Perl/Document-Properties/SetSingleDocumentProperty.pl | Perl | mit | 1,758 |
#!/usr/bin/perl -w
######################################################################
#
# process LNB data files, u01, u03, mpr and write data
# out as csv files.
#
# NOTES:
#
# z_cass or feeder table no = FADD/10000
#
######################################################################
#
use strict;
#
use Getopt::Std;
use File::Find;
use File::Path qw(mkpath);
#
######################################################################
#
# logical constants
#
use constant TRUE => 1;
use constant FALSE => 0;
#
# output types
#
use constant PROD_COMPLETE => 3;
use constant PROD_COMPLETE_LATER => 4;
use constant DETECT_CHANGE => 5;
use constant MANUAL_CLEAR => 11;
use constant TIMER_NOT_RUNNING => 12;
use constant AUTO_CLEAR => 13;
#
# processing states
#
use constant RESET => 'reset';
use constant BASELINE => 'baseline';
use constant DELTA => 'delta';
#
# common sections for all files types: u01, u03, mpr
#
use constant INDEX => '[Index]';
use constant INFORMATION => '[Information]';
#
# sections specific to u01
#
use constant TIME => '[Time]';
use constant CYCLETIME => '[CycleTime]';
use constant COUNT => '[Count]';
use constant DISPENSER => '[Dispenser]';
use constant MOUNTPICKUPFEEDER => '[MountPickupFeeder]';
use constant MOUNTPICKUPNOZZLE => '[MountPickupNozzle]';
use constant INSPECTIONDATA => '[InspectionData]';
#
# sections specific to u03
#
use constant BRECG => '[BRecg]';
use constant BRECGCALC => '[BRecgCalc]';
use constant ELAPSETIMERECOG => '[ElapseTimeRecog]';
use constant SBOARD => '[SBoard]';
use constant HEIGHTCORRECT => '[HeightCorrect]';
use constant MOUNTQUALITYTRACE => '[MountQualityTrace]';
use constant MOUNTLATESTREEL => '[MountLatestReel]';
use constant MOUNTEXCHANGEREEL => '[MountExchangeReel]';
#
# sections specfic to mpr
#
use constant TIMEDATASP => '[TimeDataSP]';
use constant COUNTDATASP => '[CountDataSP]';
use constant COUNTDATASP2 => '[CountDataSP2]';
use constant TRACEDATASP => '[TraceDataSP]';
use constant TRACEDATASP_2 => '[TraceDataSP_2]';
use constant ISPINFODATA => '[ISPInfoData]';
use constant MASKISPINFODATA => '[MaskISPInfoData]';
#
# files types
#
use constant LNB_U01_FILE_TYPE => 'u01';
use constant LNB_U03_FILE_TYPE => 'u03';
use constant LNB_MPR_FILE_TYPE => 'mpr';
#
# verbose levels
#
use constant NOVERBOSE => 0;
use constant MINVERBOSE => 1;
use constant MIDVERBOSE => 2;
use constant MAXVERBOSE => 3;
#
# processing options
#
use constant PROC_OPT_NONE => 0;
use constant PROC_OPT_IGNRESET12 => 1;
use constant PROC_OPT_IGNALL12 => 2;
use constant PROC_OPT_USENEGDELTS => 4;
use constant PROC_OPT_USENEWNZ => 8;
#
# nozzle key names
#
use constant NZ_KEY_HEAD => 'Head';
use constant NZ_KEY_NHADD => 'NHAdd';
use constant NZ_KEY_NCADD => 'NCAdd';
#
use constant NZ_LABEL_NHADD_NCADD => 'nhadd_ncadd';
use constant NZ_LABEL_HEAD_NHADD => 'head_nhadd';
use constant NZ_LABEL_HEAD_NCADD => 'head_ncadd';
#
######################################################################
#
# globals
#
my $cmd = $0;
my $log_fh = *STDOUT;
#
# cmd line options
#
my $logfile = '';
my $verbose = NOVERBOSE;
my $file_type = "all";
my $export_csv = FALSE;
my $export_dir = '/tmp/';
my $proc_options = PROC_OPT_NONE;
my $audit_only = FALSE;
#
my %verbose_levels =
(
off => NOVERBOSE(),
min => MINVERBOSE(),
mid => MIDVERBOSE(),
max => MAXVERBOSE()
);
#
my %allowed_proc_options =
(
NONE => PROC_OPT_NONE(),
IGNRESET12 => PROC_OPT_IGNRESET12(),
IGNALL12 => PROC_OPT_IGNALL12(),
USENEGDELTS => PROC_OPT_USENEGDELTS(),
USENEWNZ => PROC_OPT_USENEWNZ()
);
#
# fields to ignore for output=12 files if enabled.
#
my %ignored_output12_fields =
(
'TPICKUP' => 1,
'TPMISS' => 1,
'TRMISS' => 1,
'TDMISS' => 1,
'TMMISS' => 1,
'THMISS' => 1,
'CPERR' => 1,
'CRERR' => 1,
'CDERR' => 1,
'CMERR' => 1,
'CTERR' => 1
);
#
# summary tables.
#
my %totals = ();
#
# list of colums to export
#
my @feeder_export_cols =
(
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => 'FAdd', format => ',%s' },
{ name => 'FSAdd', format => ',%s' },
{ name => 'ReelID', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
);
#
my @feeder_export_cols2 =
(
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => 'FAdd', format => ',%s' },
{ name => 'FSAdd', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
);
#
my @feeder_export_cols3 =
(
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => 'TableNo', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
);
#
my @feeder_count_cols =
(
'Pickup',
'PMiss',
'RMiss',
'DMiss',
'MMiss',
'HMiss',
'TRSMiss',
'Mount'
);
#
my @nozzle_export_cols =
(
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => 'NHAdd', format => ',%s' },
{ name => 'NCAdd', format => ',%s' },
{ name => 'Blkserial', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
);
#
my @nozzle_export_cols2 =
(
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => 'NHAdd', format => ',%s' },
{ name => 'NCAdd', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
);
#
# use constant NZ_KEY_HEAD => 'Head';
# use constant NZ_KEY_NHADD => 'NHAdd';
# use constant NZ_KEY_NCADD => 'NCAdd';
#
# use constant NZ_LABEL_NHADD_NCADD => 'nhadd_ncadd';
# use constant NZ_LABEL_HEAD_NHADD => 'head_nhadd';
# use constant NZ_LABEL_HEAD_NCADD => 'head_ncadd';
#
my %nozzle_export_cols_new =
(
NZ_LABEL_NHADD_NCADD() => [
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => NZ_KEY_NHADD(), format => ',%s' },
{ name => NZ_KEY_NCADD(), format => ',%s' },
{ name => 'Blkserial', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
],
NZ_LABEL_HEAD_NHADD() => [
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => NZ_KEY_HEAD(), format => ',%s' },
{ name => NZ_KEY_NHADD(), format => ',%s' },
{ name => 'Blkserial', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
],
NZ_LABEL_HEAD_NCADD() => [
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => NZ_KEY_HEAD(), format => ',%s' },
{ name => NZ_KEY_NCADD(), format => ',%s' },
{ name => 'Blkserial', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
]
);
#
my %nozzle_export_cols2_new =
(
NZ_LABEL_NHADD_NCADD() => [
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => NZ_KEY_NHADD(), format => ',%s' },
{ name => NZ_KEY_NCADD(), format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
],
NZ_LABEL_HEAD_NHADD() => [
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => NZ_KEY_HEAD(), format => ',%s' },
{ name => NZ_KEY_NHADD(), format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
],
NZ_LABEL_HEAD_NCADD() => [
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => NZ_KEY_HEAD(), format => ',%s' },
{ name => NZ_KEY_NCADD(), format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
]
);
#
my @nozzle_count_cols =
(
'Pickup',
'PMiss',
'RMiss',
'DMiss',
'MMiss',
'HMiss',
'TRSMiss',
'Mount'
);
#
########################################################################
########################################################################
#
# miscellaneous functions
#
sub usage
{
my ($arg0) = @_;
print $log_fh <<EOF;
usage: $arg0 [-?] [-h] \\
[-w | -W |-v level] \\
[-t u10|u03|mpr] \\
[-l logfile] \\
[-o option] \\
[-x] [-d path] \\
directory ...
where:
-? or -h - print usage.
-w - enable warning (level=min=1)
-W - enable warning and trace (level=mid=2)
-v - verbose level: 0=off,1=min,2=mid,3=max
-t file-type = type of file to process: u01, u03, mpr.
default is all files.
-l logfile - log file path
-o option - enable a procesing option:
ignreset12 - ignore resetable output=12 fields.
ignall12 - ignore all output=12 files.
usenegdelts - use negative deltas in calculations.
usenewnz - use new nozzle processing
-a - run audit only.
-d path - export directory, defaults to '/tmp'.
EOF
}
#
########################################################################
########################################################################
#
# current product functions
#
sub get_product_info
{
my ($pdata, $pmjsid, $plotname, $plotnumber) = @_;
#
my $section = INDEX;
$$pmjsid = $pdata->{$section}->{data}->{MJSID};
$$pmjsid = $1 if ($$pmjsid =~ m/"([^"]*)"/);
#
$section = INFORMATION;
$$plotname = $pdata->{$section}->{data}->{LotName};
$$plotname = $1 if ($$plotname =~ m/"([^"]*)"/);
$$plotnumber = $pdata->{$section}->{data}->{LotNumber};
}
#
sub set_product_info
{
my ($pdb, $pfile, $ftype) = @_;
#
my $filename = $pfile->{file_name};
#
my $machine = $pfile->{mach_no};
my $lane = $pfile->{lane};
my $stage = $pfile->{stage};
my $output_no = $pfile->{output_no};
#
my $mjsid = 'UNKNOWN';
my $lotname = 'UNKNOWN';
my $lotnumber = 0;
#
if ( ! exists($pdb->{product}{$ftype}{$machine}{$lane}{$stage}))
{
$pdb->{product}{$ftype}{$machine}{$lane}{$stage} = "${mjsid}_${lotname}_${lotnumber}";
$pdb->{change_over}{$ftype}{$machine}{$lane}{$stage} = FALSE;
}
elsif (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER))
{
get_product_info($pfile, \$mjsid, \$lotname, \$lotnumber);
#
if (($pdb->{product}{$ftype}{$machine}{$lane}{$stage} ne "${mjsid}_${lotname}_${lotnumber}") &&
($pdb->{product}{$ftype}{$machine}{$lane}{$stage} ne "UNKNOWN_UNKNOWN_0"))
{
$pdb->{change_over}{$ftype}{$machine}{$lane}{$stage} = TRUE;
}
else
{
$pdb->{change_over}{$ftype}{$machine}{$lane}{$stage} = FALSE;
}
#
$pdb->{product}{$ftype}{$machine}{$lane}{$stage} = "${mjsid}_${lotname}_${lotnumber}";
}
else
{
# clear this flag.
$pdb->{change_over}{$ftype}{$machine}{$lane}{$stage} = FALSE;
}
#
printf $log_fh "Product %s: %s, Change Over: %d\n", $ftype, $pdb->{product}{$ftype}{$machine}{$lane}{$stage}, $pdb->{change_over}{u01}{$machine}{$lane}{$stage} if ($verbose >= MIDVERBOSE);
}
#
########################################################################
########################################################################
#
# scan directories for U01, U03 and MPR files.
#
my %all_list = ();
my $one_type = '';
#
sub want_one_type
{
if ($_ =~ m/^.*\.${one_type}$/)
{
printf $log_fh "FOUND %s FILE: %s\n", $one_type, $File::Find::name
if ($verbose >= MAXVERBOSE);
#
my $file_name = $_;
#
my $date = '';
my $mach_no = '';
my $stage = '';
my $lane = '';
my $pcb_serial = '';
my $pcb_id = '';
my $output_no = '';
my $pcb_id_lot_no = '';
#
my @parts = split('\+-\+', $file_name);
if (scalar(@parts) >= 9)
{
$date = $parts[0];
$mach_no = $parts[1];
$stage = $parts[2];
$lane = $parts[3];
$pcb_serial = $parts[4];
$pcb_id = $parts[5];
$output_no = $parts[6];
$pcb_id_lot_no = $parts[7];
}
else
{
@parts = split('-', $file_name);
if (scalar(@parts) >= 9)
{
$date = $parts[0];
$mach_no = $parts[1];
$stage = $parts[2];
$lane = $parts[3];
$pcb_serial = $parts[4];
$pcb_id = $parts[5];
$output_no = $parts[6];
$pcb_id_lot_no = $parts[7];
}
}
#
unshift @{$all_list{$one_type}},
{
'file_name' => $file_name,
'full_path' => $File::Find::name,
'directory' => $File::Find::dir,
'date' => $date,
'mach_no' => $mach_no,
'stage' => $stage,
'lane' => $lane,
'pcb_serial' => $pcb_serial,
'pcb_id' => $pcb_id,
'output_no' => $output_no,
'pcb_id_lot_no' => $pcb_id_lot_no
};
}
}
#
sub want_all_types
{
my $dt = '';
#
if ($_ =~ m/^.*\.u01$/)
{
printf $log_fh "FOUND u01 FILE: %s\n", $File::Find::name
if ($verbose >= MAXVERBOSE);
$dt = 'u01';
}
elsif ($_ =~ m/^.*\.u03$/)
{
printf $log_fh "FOUND u03 FILE: %s\n", $File::Find::name
if ($verbose >= MAXVERBOSE);
$dt = 'u03';
}
elsif ($_ =~ m/^.*\.mpr$/)
{
printf $log_fh "FOUND mpr FILE: %s\n", $File::Find::name
if ($verbose >= MAXVERBOSE);
$dt = 'mpr';
}
#
if ($dt ne '')
{
my $file_name = $_;
#
my $date = '';
my $mach_no = '';
my $stage = '';
my $lane = '';
my $pcb_serial = '';
my $pcb_id = '';
my $output_no = '';
my $pcb_id_lot_no = '';
#
my @parts = split('\+-\+', $file_name);
if (scalar(@parts) >= 9)
{
$date = $parts[0];
$mach_no = $parts[1];
$stage = $parts[2];
$lane = $parts[3];
$pcb_serial = $parts[4];
$pcb_id = $parts[5];
$output_no = $parts[6];
$pcb_id_lot_no = $parts[7];
}
else
{
@parts = split('-', $file_name);
if (scalar(@parts) >= 9)
{
$date = $parts[0];
$mach_no = $parts[1];
$stage = $parts[2];
$lane = $parts[3];
$pcb_serial = $parts[4];
$pcb_id = $parts[5];
$output_no = $parts[6];
$pcb_id_lot_no = $parts[7];
}
}
#
unshift @{$all_list{$dt}},
{
'file_name' => $file_name,
'full_path' => $File::Find::name,
'directory' => $File::Find::dir,
'date' => $date,
'mach_no' => $mach_no,
'stage' => $stage,
'lane' => $lane,
'pcb_serial' => $pcb_serial,
'pcb_id' => $pcb_id,
'output_no' => $output_no,
'pcb_id_lot_no' => $pcb_id_lot_no
};
}
}
#
sub get_all_files
{
my ($ftype, $pargv, $pu01, $pu03, $pmpr) = @_;
#
# optimize for file type
#
if ($ftype eq 'u01')
{
$one_type = $ftype;
$all_list{$one_type} = $pu01;
#
find(\&want_one_type, @{$pargv});
#
@{$pu01} = sort { $a->{file_name} cmp $b->{file_name} } @{$pu01};
}
elsif ($ftype eq 'u03')
{
$one_type = $ftype;
$all_list{$one_type} = $pu03;
#
find(\&want_one_type, @{$pargv});
#
@{$pu03} = sort { $a->{file_name} cmp $b->{file_name} } @{$pu03};
}
elsif ($ftype eq 'mpr')
{
$one_type = $ftype;
$all_list{$one_type} = $pmpr;
#
find(\&want_one_type, @{$pargv});
#
@{$pmpr} = sort { $a->{file_name} cmp $b->{file_name} } @{$pmpr};
}
else
{
$all_list{u01} = $pu01;
$all_list{u03} = $pu03;
$all_list{mpr} = $pmpr;
#
find(\&want_all_types, @{$pargv});
#
@{$pu01} = sort { $a->{file_name} cmp $b->{file_name} } @{$pu01};
@{$pu03} = sort { $a->{file_name} cmp $b->{file_name} } @{$pu03};
@{$pmpr} = sort { $a->{file_name} cmp $b->{file_name} } @{$pmpr};
}
}
#
######################################################################
######################################################################
#
# read in data file and load all sections
#
sub load
{
my ($pdata) = @_;
#
my $path = $pdata->{full_path};
#
if ( ! -r $path )
{
printf $log_fh "\nERROR: file $path is NOT readable\n\n";
return 0;
}
#
unless (open(INFD, $path))
{
printf $log_fh "\nERROR: unable to open $path.\n\n";
return 0;
}
@{$pdata->{data}} = <INFD>;
close(INFD);
#
# remove newlines
#
chomp(@{$pdata->{data}});
printf $log_fh "Lines read: %d\n", scalar(@{$pdata->{data}})
if ($verbose >= MAXVERBOSE);
#
return 1;
}
#
sub load_name_value
{
my ($pdata, $section) = @_;
#
printf $log_fh "\nLoading Name-Value Section: %s\n", $section
if ($verbose >= MAXVERBOSE);
#
my $re_section = '\\' . $section;
@{$pdata->{raw}->{$section}} =
grep /^${re_section}\s*$/ .. /^\s*$/, @{$pdata->{data}};
#
# printf $log_fh "<%s>\n", join("\n", @{$pdata->{raw}->{$section}});
#
if (scalar(@{$pdata->{raw}->{$section}}) <= 2)
{
$pdata->{$section} = {};
printf $log_fh "No data found.\n"
if ($verbose >= MAXVERBOSE);
return 1;
}
#
shift @{$pdata->{raw}->{$section}};
pop @{$pdata->{raw}->{$section}};
#
printf $log_fh "Section Lines: %d\n", scalar(@{$pdata->{raw}->{$section}})
if ($verbose >= MAXVERBOSE);
#
%{$pdata->{$section}->{data}} =
map { split /\s*=\s*/, $_, 2 } @{$pdata->{raw}->{$section}};
printf $log_fh "Number of Keys: %d\n", scalar(keys %{$pdata->{$section}->{data}})
if ($verbose >= MAXVERBOSE);
#
return 1;
}
#
sub split_quoted_string
{
my $rec = shift;
#
my $rec_len = length($rec);
#
my $istart = -1;
my $iend = -1;
my $in_string = 0;
#
my @tokens = ();
my $token = "";
#
for (my $i=0; $i<$rec_len; $i++)
{
my $c = substr($rec, $i, 1);
#
if ($in_string == 1)
{
if ($c eq '"')
{
$in_string = 0;
}
else
{
$token .= $c;
}
}
elsif ($c eq '"')
{
$in_string = 1;
}
elsif ($c eq ' ')
{
# printf $log_fh "Token ... <%s>\n", $token;
push (@tokens, $token);
$token = '';
}
else
{
$token .= $c;
}
}
#
if (length($token) > 0)
{
# printf $log_fh "Token ... <%s>\n", $token;
push (@tokens, $token);
$token = '';
}
#
# printf $log_fh "Tokens: \n%s\n", join("\n",@tokens);
#
return @tokens;
}
#
sub load_list
{
my ($pdata, $section) = @_;
#
printf $log_fh "\nLoading List Section: %s\n", $section
if ($verbose >= MAXVERBOSE);
#
my $re_section = '\\' . $section;
@{$pdata->{raw}->{$section}} =
grep /^${re_section}\s*$/ .. /^\s*$/, @{$pdata->{data}};
#
# printf $log_fh "<%s>\n", join("\n", @{$pdata->{raw}->{$section}});
#
if (scalar(@{$pdata->{raw}->{$section}}) <= 3)
{
$pdata->{$section} = {};
printf $log_fh "No data found.\n"
if ($verbose >= MAXVERBOSE);
return 1;
}
shift @{$pdata->{raw}->{$section}};
pop @{$pdata->{raw}->{$section}};
$pdata->{$section}->{header} = shift @{$pdata->{raw}->{$section}};
@{$pdata->{$section}->{column_names}} =
split / /, $pdata->{$section}->{header};
my $number_columns = scalar(@{$pdata->{$section}->{column_names}});
#
@{$pdata->{$section}->{data}} = ();
#
printf $log_fh "Section Lines: %d\n", scalar(@{$pdata->{raw}->{$section}})
if ($verbose >= MAXVERBOSE);
# printf $log_fh "Column Names: %d\n", $number_columns;
foreach my $record (@{$pdata->{raw}->{$section}})
{
# printf $log_fh "\nRECORD: %s\n", $record;
#
# printf $log_fh "\nRECORD (original): %s\n", $record;
# $record =~ s/"\s+"\s/"" /g;
# $record =~ s/"\s+"\s*$/""/g;
# printf $log_fh "\nRECORD (final): %s\n", $record;
# my @tokens = split / /, $record;
#
my @tokens = split_quoted_string($record);
my $number_tokens = scalar(@tokens);
printf $log_fh "Number of tokens in record: %d\n", $number_tokens
if ($verbose >= MAXVERBOSE);
#
if ($number_tokens == $number_columns)
{
my %data = ();
@data{@{$pdata->{$section}->{column_names}}} = @tokens;
my $data_size = scalar(keys %data);
# printf $log_fh "Current Data Size: %d\n", $data_size;
unshift @{$pdata->{$section}->{data}}, \%data;
printf $log_fh "Current Number of Records: %d\n", scalar(@{$pdata->{$section}->{data}})
if ($verbose >= MAXVERBOSE);
}
else
{
printf $log_fh "SKIPPING RECORD - NUMBER TOKENS (%d) != NUMBER COLUMNS (%d)\n", $number_tokens, $number_columns;
}
}
#
return 1;
}
#
########################################################################
########################################################################
#
# process U01 files.
#
sub export_u01_count_data
{
my ($pdb) = @_;
#
###############################################################
#
my $section = COUNT;
#
printf $log_fh "\nExport Total Data For %s:\n", $section;
#
my $first_time = TRUE;
#
open(my $outfh, ">" , "${export_dir}/COUNT_TOTALS.csv") || die $!;
#
foreach my $key (sort keys %{$totals{$section}{totals}})
{
if ($first_time == TRUE)
{
printf $outfh "%s", $key;
$first_time = FALSE;
}
else
{
printf $outfh ",%s", $key;
}
}
printf $outfh "\n";
#
$first_time = TRUE;
foreach my $key (sort keys %{$totals{$section}{totals}})
{
if ($first_time == TRUE)
{
printf $outfh "%d", $totals{$section}{totals}{$key};
$first_time = FALSE;
}
else
{
printf $outfh ",%d", $totals{$section}{totals}{$key};
}
}
printf $outfh "\n";
close($outfh);
#
$section = COUNT;
#
printf $log_fh "\nExport Data For %s by Machine:\n", $section;
#
$first_time = TRUE;
#
open($outfh, ">" , "${export_dir}/COUNT_BY_MACHINE.csv") || die $!;
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine}})
{
if ($first_time == TRUE)
{
printf $outfh "machine";
foreach my $key (sort keys %{$totals{$section}{by_machine}{$machine}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s", $machine;
foreach my $key (sort keys %{$totals{$section}{by_machine}{$machine}})
{
printf $outfh ",%d", $totals{$section}{by_machine}{$machine}{$key};
}
printf $outfh "\n";
}
close($outfh);
#
$section = COUNT;
#
printf $log_fh "\nExport Data For %s by Machine and Lane:\n", $section;
#
$first_time = TRUE;
open($outfh, ">" , "${export_dir}/COUNT_BY_MACHINE_LANE.csv") || die $!;
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}{$machine}})
{
if ($first_time == TRUE)
{
printf $outfh "machine,lane";
foreach my $key (sort keys %{$totals{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s,%s", $machine, $lane;
foreach my $key (sort keys %{$totals{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%d", $totals{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
printf $outfh "\n";
}
}
close($outfh);
#
###############################################################
#
$section = COUNT;
#
printf $log_fh "\nExport Total Data For %s by Product:\n", $section;
#
$first_time = TRUE;
open($outfh, ">" , "${export_dir}/COUNT_TOTALS_BY_PRODUCT.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
if ($first_time == TRUE)
{
printf $outfh "product";
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{totals}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s", $product;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{totals}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{totals}{$key};
}
printf $outfh "\n";
}
close($outfh);
#
$section = COUNT;
#
printf $log_fh "\nExport Data For %s by Product and Machine:\n", $section;
#
$first_time = TRUE;
open($outfh, ">" , "${export_dir}/COUNT_BY_PRODUCT_MACHINE.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine}})
{
if ($first_time == TRUE)
{
printf $outfh "product,machine";
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine}{$machine}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s,%s", $product, $machine;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine}{$machine}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine}{$machine}{$key};
}
printf $outfh "\n";
}
}
close($outfh);
#
$section = COUNT;
#
printf $log_fh "\nExport Data For %s by Product, Machine and Lane:\n", $section;
#
$first_time = TRUE;
open($outfh, ">" , "${export_dir}/COUNT_BY_PRODUCT_MACHINE_LANE.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}})
{
if ($first_time == TRUE)
{
printf $outfh "product,machine,lane";
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s,%s,%s", $product, $machine, $lane;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
printf $outfh "\n";
}
}
}
close($outfh);
}
#
sub export_u01_time_data
{
my ($pdb) = @_;
#
###############################################################
#
my $section = TIME;
#
printf $log_fh "\nExport Data For %s by Machine:\n", $section;
#
open(my $outfh, ">" , "${export_dir}/TIME_BY_MACHINE.csv") || die $!;
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine}})
{
printf $outfh "%s", $machine;
foreach my $key (sort keys %{$totals{$section}{by_machine}{$machine}})
{
printf $outfh ",%d", $totals{$section}{by_machine}{$machine}{$key};
}
printf $outfh "\n",
}
close($outfh);
#
$section = TIME;
#
printf $log_fh "\nExport Data For %s by Machine and Lane:\n", $section;
#
open($outfh, ">" , "${export_dir}/TIME_BY_MACHINE_LANE.csv") || die $!;
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}{$machine}})
{
printf $outfh "%s,%s", $machine, $lane;
foreach my $key (sort keys %{$totals{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%d", $totals{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
printf $outfh "\n";
}
}
close($outfh);
#
###############################################################
#
$section = TIME;
#
printf $log_fh "\nExport Data For %s by Product and Machine:\n", $section;
#
open($outfh, ">" , "${export_dir}/TIME_BY_PRODUCT_MACHINE.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine}})
{
printf $outfh "%s,%s", $product, $machine;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine}{$machine}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine}{$machine}{$key};
}
printf $outfh "\n";
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Product, Machine and Lane:\n", $section;
#
open($outfh, ">" , "${export_dir}/TIME_BY_PRODUCT_MACHINE_LANE.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}})
{
printf $outfh "%s,s,%s", $product, $machine, $lane;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
printf $outfh "\n";
}
}
}
close($outfh);
}
#
sub export_u01_feeder_data
{
my ($pdb) = @_;
#
###############################################################
#
my $section = MOUNTPICKUPFEEDER;
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, FAdd, FSAdd, ReelID:\n", $section;
#
open(my $outfh, ">" , "${export_dir}/FEEDER_BY_MACHINE_LANE_STAGE_FADD_FSADD_REELID.csv") || die $!;
foreach my $pcol (@feeder_export_cols)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}})
{
foreach my $reelid (sort keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s", $machine, $lane, $stage, $fadd, $fsadd, $reelid;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, FAdd, FSAdd:\n", $section;
#
open($outfh, ">" , "${export_dir}/FEEDER_BY_MACHINE_LANE_STAGE_FADD_FSADD.csv") || die $!;
foreach my $pcol (@feeder_export_cols2)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}})
{
printf $outfh "%s,%s,%s,%s,%s", $machine, $lane, $stage, $fadd, $fsadd;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col};
}
printf $outfh "\n";
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, TableNo:\n", $section;
#
open($outfh, ">" , "${export_dir}/FEEDER_BY_MACHINE_LANE_STAGE_TABLE_NO.csv") || die $!;
foreach my $pcol (@feeder_export_cols3)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_table_no}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_table_no}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}})
{
foreach my $table_no (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}})
{
printf $outfh "%s,%s,%s,%s", $machine, $lane, $stage, $table_no;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}{$table_no}{$col};
}
printf $outfh "\n";
}
}
}
}
}
close($outfh);
#
###############################################################
#
$section = MOUNTPICKUPFEEDER;
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, FAdd, FSAdd, ReelID:\n", $section;
#
open($outfh, ">" , "${export_dir}/FEEDER_BY_PRODUCT_MACHINE_LANE_STAGE_FADD_FSADD_REELID.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@feeder_export_cols)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}})
{
foreach my $reelid (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s,%s", $product, $machine, $lane, $stage, $fadd, $fsadd, $reelid;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, FAdd, FSAdd:\n", $section;
#
open($outfh, ">" , "${export_dir}/FEEDER_BY_PRODUCT_MACHINE_LANE_STAGE_FADD_FSADD.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@feeder_export_cols2)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s", $product, $machine, $lane, $stage, $fadd, $fsadd;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, TableNo:\n", $section;
#
open($outfh, ">" , "${export_dir}/FEEDER_BY_PRODUCT_MACHINE_LANE_STAGE_TABLE_NO.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@feeder_export_cols3)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_table_no}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_table_no}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}})
{
foreach my $table_no (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}})
{
printf $outfh "%s,%s,%s,%s,%s", $product, $machine, $lane, $stage, $table_no;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}{$table_no}{$col};
}
printf $outfh "\n";
}
}
}
}
}
close($outfh);
}
#
sub export_u01_nozzle_data
{
my ($pdb) = @_;
#
###############################################################
#
my $section = MOUNTPICKUPNOZZLE;
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, NHAdd, NCAdd, Blkserial:\n", $section;
#
open(my $outfh, ">" , "${export_dir}/NOZZLE_BY_MACHINE_LANE_STAGE_NHADD_NCADD_BLKSERIAL.csv") || die $!;
foreach my $pcol (@nozzle_export_cols)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}})
{
foreach my $blkserial (sort keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s",
$machine, $lane, $stage, $nhadd, $ncadd, $blkserial;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d",
$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, NHAdd, NCAdd:\n", $section;
#
open($outfh, ">" , "${export_dir}/NOZZLE_BY_MACHINE_LANE_STAGE_NHADD_NCADD.csv") || die $!;
foreach my $pcol (@nozzle_export_cols2)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}})
{
printf $outfh "%s,%s,%s,%s,%s", $machine, $lane, $stage, $nhadd, $ncadd;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d", $totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col};
}
printf $outfh "\n";
}
}
}
}
}
close($outfh);
#
###############################################################
#
$section = MOUNTPICKUPNOZZLE;
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, NHAdd, NCAdd, Blkserial:\n", $section;
#
open($outfh, ">" , "${export_dir}/NOZZLE_BY_PRODUCT_MACHINE_LANE_STAGE_NHADD_NCADD_BLKSERIAL.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@nozzle_export_cols)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}})
{
foreach my $blkserial (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s,%s", $product, $machine, $lane, $stage, $nhadd, $ncadd, $blkserial;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, NHAdd, NCAdd:\n", $section;
#
open($outfh, ">" , "${export_dir}/NOZZLE_BY_PRODUCT_MACHINE_LANE_STAGE_NHADD_NCADD.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@nozzle_export_cols2)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s",
$product, $machine, $lane, $stage, $nhadd, $ncadd;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
}
#
sub export_u01_nozzle_data_keys
{
my ($pdb, $nmkey1, $nmkey2, $label) = @_;
#
my $NMKEY1 = $nmkey1;
$NMKEY1 =~ tr/[a-z]/[A-Z]/;
my $NMKEY2 = $nmkey2;
$NMKEY2 =~ tr/[a-z]/[A-Z]/;
my $LABEL = $label;
$LABEL =~ tr/[a-z]/[A-Z]/;
#
###############################################################
#
my $section = MOUNTPICKUPNOZZLE;
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, $nmkey1, $nmkey2, Blkserial:\n", $section;
#
open(my $outfh, ">" , "${export_dir}/NOZZLE_BY_MACHINE_LANE_STAGE_${NMKEY1}_${NMKEY2}_BLKSERIAL.csv") || die $!;
foreach my $pcol (@{$nozzle_export_cols_new{$label}})
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}})
{
foreach my $key1 (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}})
{
foreach my $key2 (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}})
{
foreach my $blkserial (sort keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}})
{
printf $outfh "%s,%s,%s,%s,%s,%s",
$machine, $lane, $stage, $key1, $key2, $blkserial;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d",
$totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}{$blkserial}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, $nmkey1, $nmkey2:\n", $section;
#
open($outfh, ">" , "${export_dir}/NOZZLE_BY_MACHINE_LANE_STAGE_${NMKEY1}_${NMKEY2}.csv") || die $!;
foreach my $pcol (@{$nozzle_export_cols2_new{$label}})
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}})
{
foreach my $key1 (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}})
{
foreach my $key2 (sort { $a <=> $b } keys %{$totals{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}})
{
printf $outfh "%s,%s,%s,%s,%s", $machine, $lane, $stage, $key1, $key2;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d", $totals{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}{$key2}{$col};
}
printf $outfh "\n";
}
}
}
}
}
close($outfh);
#
###############################################################
#
$section = MOUNTPICKUPNOZZLE;
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, $nmkey1, $nmkey2, Blkserial:\n", $section;
#
open($outfh, ">" , "${export_dir}/NOZZLE_BY_PRODUCT_MACHINE_LANE_STAGE_${NMKEY1}_${NMKEY2}_BLKSERIAL.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@{$nozzle_export_cols_new{$label}})
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}})
{
foreach my $key1 (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}})
{
foreach my $key2 (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}})
{
foreach my $blkserial (sort keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}})
{
printf $outfh "%s,%s,%s,%s,%s,%s,%s", $product, $machine, $lane, $stage, $key1, $key2, $blkserial;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}{$blkserial}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, ${nmkey1}, ${nmkey2}:\n", $section;
#
open($outfh, ">" , "${export_dir}/NOZZLE_BY_PRODUCT_MACHINE_LANE_STAGE_${NMKEY1}_${NMKEY2}.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@{$nozzle_export_cols2_new{$label}})
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}})
{
foreach my $key1 (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}})
{
foreach my $key2 (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}})
{
printf $outfh "%s,%s,%s,%s,%s,%s",
$product, $machine, $lane, $stage, $key1, $key2;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}{$key2}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
}
#
sub export_u01_nozzle_data_new
{
my ($pdb) = @_;
#
export_u01_nozzle_data_keys($pdb,
NZ_KEY_NHADD,
NZ_KEY_NCADD,
NZ_LABEL_NHADD_NCADD);
export_u01_nozzle_data_keys($pdb,
NZ_KEY_HEAD,
NZ_KEY_NHADD,
NZ_LABEL_HEAD_NHADD);
export_u01_nozzle_data_keys($pdb,
NZ_KEY_HEAD,
NZ_KEY_NCADD,
NZ_LABEL_HEAD_NCADD);
}
#
sub export_u01_data
{
my ($pdb) = @_;
#
export_u01_count_data($pdb);
export_u01_time_data($pdb);
export_u01_feeder_data($pdb);
if (($proc_options & PROC_OPT_USENEWNZ) != 0)
{
export_u01_nozzle_data_new($pdb);
}
else
{
export_u01_nozzle_data($pdb);
}
}
#
######################################################################
#
# high-level u01 file audit functions
#
sub calculate_u01_name_value_delta
{
my ($pdb, $pu01, $section) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
foreach my $key (keys %{$pu01->{$section}->{data}})
{
my $delta = 0;
#
if (exists($pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$key}))
{
$delta =
$pu01->{$section}->{data}->{$key} -
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$key};
#
if ($delta >= 0)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = $delta;
}
elsif (($proc_options & PROC_OPT_USENEGDELTS) != 0)
{
printf $log_fh "%d WARNING: [%s] using NEGATIVE delta for %s key %s: %d\n", __LINE__, $filename, $section, $key, $delta if ($verbose >= MINVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = $delta;
}
else
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = 0;
printf $log_fh "%d WARNING: [%s] setting NEGATIVE delta (%d) for %s key %s to ZERO\n", __LINE__, $filename, $delta, $section, $key if ($verbose >= MINVERBOSE);
}
#
printf $log_fh "%d %s: %s = %d\n", __LINE__, $section, $key, $delta if ($verbose >= MAXVERBOSE);
}
else
{
printf $log_fh "ERROR: [%s] %s key %s NOT found in cache. Ignoring counts (%d).\n", $filename, $section, $key, $pu01->{$section}->{data}->{$key};
die "ERROR: [$filename] $section key $key NOT found it cache. Stopped";
}
}
}
#
sub copy_u01_name_value_cache
{
my ($pdb, $pu01, $section) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
foreach my $key (keys %{$pu01->{$section}->{data}})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$key} =
$pu01->{$section}->{data}->{$key};
}
}
#
sub copy_u01_name_value_delta
{
my ($pdb, $pu01, $section) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $key (keys %{$pu01->{$section}->{data}})
{
my $delta = $pu01->{$section}->{data}->{$key};
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = $delta;
printf $log_fh "%s: %s = %d\n", $section, $key, $delta
if ($verbose >= MAXVERBOSE);
}
}
#
sub tabulate_u01_name_value_delta
{
my ($pdb, $pu01, $section) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $product = $pdb->{product}{u01}{$machine}{$lane}{$stage};
#
foreach my $key (keys %{$pu01->{$section}->{data}})
{
#
# product dependent totals
#
if (exists($totals{by_product}{$product}{$section}{totals}{$key}))
{
$totals{by_product}{$product}{$section}{totals}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{by_product}{$product}{$section}{totals}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
printf $log_fh "%d %s %s %s total=%d\n", __LINE__, $product, $section, $key, $totals{by_product}{$product}{$section}{totals}{$key} if ($verbose >= MAXVERBOSE);
#
if (exists($totals{by_product}{$product}{$section}{by_machine}{$machine}{$key}))
{
$totals{by_product}{$product}{$section}{by_machine}{$machine}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{by_product}{$product}{$section}{by_machine}{$machine}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
printf $log_fh "%d %s %s %s %s total=%d\n", __LINE__, $product, $section, $machine, $key, $totals{by_product}{$product}{$section}{by_machine}{$machine}{$key} if ($verbose >= MAXVERBOSE);
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key}))
{
$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
printf $log_fh "%d %s %s %s %s %s total=%d\n", __LINE__, $product, $section, $machine, $lane, $key, $totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key} if ($verbose >= MAXVERBOSE);
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key}))
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
printf $log_fh "%d %s %s %s %s %s %s total=%d\n", __LINE__, $product, $section, $machine, $lane, $stage, $key, $totals{by_product}{$product}{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key} if ($verbose >= MAXVERBOSE);
#
# product independent totals
#
if (exists($totals{$section}{totals}{$key}))
{
$totals{$section}{totals}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{$section}{totals}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
if (exists($totals{$section}{by_machine}{$machine}{$key}))
{
$totals{$section}{by_machine}{$machine}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{$section}{by_machine}{$machine}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
if (exists($totals{$section}{by_machine_lane}{$machine}{$lane}{$key}))
{
$totals{$section}{by_machine_lane}{$machine}{$lane}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{$section}{by_machine_lane}{$machine}{$lane}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
if (exists($totals{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key}))
{
$totals{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
}
}
#
sub audit_u01_name_value
{
my ($pdb, $pu01, $section) = @_;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
return if ((($proc_options & PROC_OPT_IGNRESET12) != 0) &&
($output_no == TIMER_NOT_RUNNING) &&
($section eq COUNT));
#
my $mjsid = '';
my $lotname = '';
my $lotnumber = 0;
#
my $change_over = $pdb->{change_over}{u01}{$machine}{$lane}{$stage};
printf $log_fh "Change Over: %s\n", $change_over if ($verbose >= MAXVERBOSE);
#
get_product_info($pu01, \$mjsid, \$lotname, \$lotnumber);
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu01->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(keys %{$pu01->{$section}->{data}});
}
#
# output 3,4,5,12 U01 files have both Time and Count sections.
# these output types can all be treated the same.
#
if (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER) ||
($output_no == DETECT_CHANGE) ||
($output_no == TIMER_NOT_RUNNING))
{
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{state}))
{
#
# first file of any of these types to be processed.
#
printf $log_fh "ENTRY STATE: UNKNOWN\n"
if ($verbose >= MAXVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
#
printf $log_fh "ENTRY STATE: %s\n", $state if ($verbose >= MAXVERBOSE);
#
if ($change_over == TRUE)
{
copy_u01_name_value_delta($pdb, $pu01, $section);
tabulate_u01_name_value_delta($pdb, $pu01, $section);
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($state eq DELTA)
{
calculate_u01_name_value_delta($pdb, $pu01, $section);
tabulate_u01_name_value_delta($pdb, $pu01, $section);
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($state eq RESET)
{
copy_u01_name_value_delta($pdb, $pu01, $section);
tabulate_u01_name_value_delta($pdb, $pu01, $section);
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($state eq BASELINE)
{
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
die "ERROR: unknown $section state: $state. Stopped";
}
printf $log_fh "EXIT STATE: %s\n", $state if ($verbose >= MAXVERBOSE);
}
}
elsif (($output_no == MANUAL_CLEAR) ||
($output_no == AUTO_CLEAR))
{
#
# reset files have no data. they indicate the machine
# and counters were all reset to zero.
#
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
printf $log_fh "ENTRY STATE: %s\n", $state if ($verbose >= MAXVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = RESET;
printf $log_fh "EXIT STATE: %s\n", $state if ($verbose >= MAXVERBOSE);
}
else
{
die "ERROR: unknown output type: $output_no. Stopped";
}
#
return;
}
#
######################################################################
#
# routines for feeder section
#
sub calculate_u01_feeder_delta
{
my ($pdb, $pu01) = @_;
#
my $section = MOUNTPICKUPFEEDER;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $fadd = $prow->{FAdd};
my $fsadd = $prow->{FSAdd};
my $reelid = $prow->{ReelID};
#
my $is_tray = substr($fadd, -4, 2);
if ($is_tray > 0)
{
$is_tray = TRUE;
printf $log_fh "%d: [%s] %s IS tray part (%s) fadd: %s, fsadd: %s\n", __LINE__, $filename, $section, $is_tray, $fadd, $fsadd
if ($verbose >= MAXVERBOSE);
}
else
{
$is_tray = FALSE;
printf $log_fh "%d: [%s] %s IS NOT tray part (%s) fadd: %s, fsadd: %s\n", __LINE__, $filename, $section, $is_tray, $fadd, $fsadd
if ($verbose >= MAXVERBOSE);
}
#
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{data}))
{
#
# unlike name-value (count,time) sections, it is possible
# to get new entries which have not been seen before. for
# example, new reelids or new feeders may not be in the
# previous u01 file, but appear as new. in those cases,
# take the counts as is.
#
printf $log_fh "%d WARNING: [%s] %s FAdd %s, FSAdd %s NOT found in cache. Taking all counts as is.\n", __LINE__, $filename, $section, $fadd, $fsadd if ($verbose >= MINVERBOSE);
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $prow->{$col};
}
}
else
{
my $cache_reelid = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{data}{ReelID};
my $cache_filename = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{filename};
if (($reelid eq $cache_reelid) || ($is_tray == TRUE))
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{ReelID} = $reelid;
#
foreach my $col (@feeder_count_cols)
{
my $u01_value = $prow->{$col};
my $cache_value = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{data}{$col};
#
my $delta = $u01_value - $cache_value;
#
if ($delta >= 0)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $delta;
}
elsif (($proc_options & PROC_OPT_USENEGDELTS) != 0)
{
printf $log_fh "%d WARNING: [%s] [%s] %s FAdd %s, FSAdd %s using NEGATIVE delta for key %s: %d\n", __LINE__, $filename, $cache_filename, $section, $fadd, $fsadd, $col, $delta if ($verbose >= MINVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $delta;
}
else
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = 0;
printf $log_fh "%d WARNING: [%s] [%s] %s FAdd %s, FSAdd %s setting NEGATIVE delta (%d) for key %s to ZERO; current value %d, cache value %d\n", __LINE__, $filename, $cache_filename, $section, $fadd, $fsadd, $delta, $col, $u01_value, $cache_value if ($verbose >= MINVERBOSE);
}
}
}
else
{
printf $log_fh "%d WARNING: [%s] %s FAdd %s, FSAdd %s REELID CHANGED: CACHED %s, CURRENT U01 %s\n", __LINE__, $filename, $section, $fadd, $fsadd, $cache_reelid, $reelid if ($verbose >= MINVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $prow->{$col};
}
}
}
}
}
#
sub copy_u01_feeder_cache
{
my ($pdb, $pu01, $state) = @_;
#
my $section = MOUNTPICKUPFEEDER;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $fadd = $prow->{FAdd};
my $fsadd = $prow->{FSAdd};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{data}{$col} = $prow->{$col};
}
#
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{state} = $state;
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{filename} = $filename;
}
}
#
sub copy_u01_feeder_delta
{
my ($pdb, $pu01) = @_;
#
my $section = MOUNTPICKUPFEEDER;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $fadd = $prow->{FAdd};
my $fsadd = $prow->{FSAdd};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $prow->{$col};
}
}
}
#
sub tabulate_u01_feeder_delta
{
my ($pdb, $pu01) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPFEEDER;
#
my $product = $pdb->{product}{u01}{$machine}{$lane}{$stage};
#
foreach my $fadd (sort { $a <=> $b } keys %{$pdb->{$section}{$machine}{$lane}{$stage}{delta}})
{
my $table_no = int($fadd/10000); # truncate
#
foreach my $fsadd (sort { $a <=> $b } keys %{$pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}})
{
my $reelid = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{ReelID};
#
# product-independent totals
#
if (exists($totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}))
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
#
if (exists($totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}))
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
#
if (exists($totals{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}{$table_no}))
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}{$table_no}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}{$table_no}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
#
# product-dependent totals
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}))
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}{$table_no}))
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}{$table_no}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_table_no}{$machine}{$lane}{$stage}{$table_no}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
}
}
}
#
sub audit_u01_feeders
{
my ($pdb, $pu01) = @_;
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPFEEDER;
my $filename = $pu01->{file_name};
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu01->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(@{$pu01->{$section}->{data}}) if (defined(@{$pu01->{$section}->{data}}));
}
#
# check if the file has a feeder data section.
#
if ($output_no == TIMER_NOT_RUNNING)
{
printf $log_fh "No Feeder data in Output=%d U01 files. Skipping.\n", $output_no if ($verbose >= MAXVERBOSE);
return;
}
elsif (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER))
{
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{state}))
{
printf $log_fh "ENTRY STATE: UNKNOWN\n",
if ($verbose >= MAXVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
copy_u01_feeder_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq RESET)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
copy_u01_feeder_delta($pdb, $pu01);
tabulate_u01_feeder_delta($pdb, $pu01);
copy_u01_feeder_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq DELTA)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
calculate_u01_feeder_delta($pdb, $pu01);
tabulate_u01_feeder_delta($pdb, $pu01);
copy_u01_feeder_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq BASELINE)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
#
copy_u01_feeder_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
die "ERROR: unknown $section state: $state. Stopped";
}
}
elsif ($output_no == DETECT_CHANGE)
{
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{state}))
{
printf $log_fh "ENTRY STATE: UNKNOWN\n",
if ($verbose >= MAXVERBOSE);
#
copy_u01_feeder_cache($pdb, $pu01, DELTA);
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = BASELINE;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq RESET)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
copy_u01_feeder_delta($pdb, $pu01);
tabulate_u01_feeder_delta($pdb, $pu01);
copy_u01_feeder_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq DELTA)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
calculate_u01_feeder_delta($pdb, $pu01);
tabulate_u01_feeder_delta($pdb, $pu01);
copy_u01_feeder_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq BASELINE)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
copy_u01_feeder_cache($pdb, $pu01, DELTA);
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
die "ERROR: unknown $section state: $state. Stopped";
}
}
elsif (($output_no == MANUAL_CLEAR) ||
($output_no == AUTO_CLEAR))
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = RESET;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
}
else
{
die "ERROR: unknown $section output type: $output_no. Stopped";
}
#
printf $log_fh "EXIT STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
return;
}
#
######################################################################
#
# routines for nozzle section
#
sub calculate_u01_nozzle_delta
{
my ($pdb, $pu01) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $nhadd = $prow->{NHAdd};
my $ncadd = $prow->{NCAdd};
my $blkserial = $prow->{BLKSerial};
#
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{data}))
{
printf $log_fh "%d WARNING: [%s] %s NHAdd %s, NCAdd %s NOT found in cache. Taking all counts as is.\n", __LINE__, $filename, $section, $nhadd, $ncadd if ($verbose >= MINVERBOSE);
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $prow->{$col};
}
}
else
{
my $cache_blkserial = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{data}{BLKSerial};
if ($blkserial eq $cache_blkserial)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{BLKSerial} = $blkserial;
#
foreach my $col (@nozzle_count_cols)
{
my $u01_value = $prow->{$col};
my $cache_value = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{data}{$col};
#
my $delta = $u01_value - $cache_value;
#
if ($delta >= 0)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $delta;
}
elsif (($proc_options & PROC_OPT_USENEGDELTS) != 0)
{
printf $log_fh "%d WARNING: [%s] %s NHAdd %s, NCAdd %s using NEGATIVE delta for key %s: %d\n", __LINE__, $filename, $section, $nhadd, $ncadd, $col, $delta if ($verbose >= MINVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $delta;
}
else
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = 0;
printf $log_fh "%d WARNING: [%s] %s NHAdd %s, NCAdd %s setting NEGATIVE delta (%d) for key %s to ZERO\n", __LINE__, $filename, $section, $nhadd, $ncadd, $delta, $col if ($verbose >= MINVERBOSE);
}
}
}
else
{
printf $log_fh "%d WARNING: [%s] %s NHAdd %s, NCAdd %s BLKSERIAL CHANGED: CACHED %s, CURRENT U01 %s\n", __LINE__, $filename, $section, $nhadd, $ncadd, $cache_blkserial, $blkserial if ($verbose >= MINVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $prow->{$col};
}
}
}
}
}
#
sub copy_u01_nozzle_cache
{
my ($pdb, $pu01, $state) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $nhadd = $prow->{NHAdd};
my $ncadd = $prow->{NCAdd};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{data}{$col} = $prow->{$col};
}
#
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{state} = $state;
}
}
#
sub copy_u01_nozzle_delta
{
my ($pdb, $pu01) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $nhadd = $prow->{NHAdd};
my $ncadd = $prow->{NCAdd};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $prow->{$col};
}
}
}
#
sub tabulate_u01_nozzle_delta
{
my ($pdb, $pu01) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPNOZZLE;
#
my $product = $pdb->{product}{u01}{$machine}{$lane}{$stage};
#
foreach my $nhadd (sort { $a <=> $b } keys %{$pdb->{$section}{$machine}{$lane}{$stage}{delta}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}})
{
my $blkserial = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{BLKSerial};
#
# product-independent totals
#
if (exists($totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
#
if (exists($totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
#
# product-dependent totals
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
}
}
}
#
sub audit_u01_nozzles
{
my ($pdb, $pu01) = @_;
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPNOZZLE;
my $filename = $pu01->{file_name};
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu01->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(@{$pu01->{$section}->{data}}) if (defined(@{$pu01->{$section}->{data}}));
}
#
# check if the file has a nozzle data section.
#
if (($output_no == DETECT_CHANGE) ||
($output_no == TIMER_NOT_RUNNING))
{
printf $log_fh "No Nozzle data in Output=%d U01 files. Skipping.\n", $output_no if ($verbose >= MAXVERBOSE);
return;
}
elsif (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER))
{
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{state}))
{
printf $log_fh "ENTRY STATE: UNKNOWN\n",
if ($verbose >= MAXVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
copy_u01_nozzle_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq RESET)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
copy_u01_nozzle_delta($pdb, $pu01);
tabulate_u01_nozzle_delta($pdb, $pu01);
copy_u01_nozzle_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq DELTA)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
calculate_u01_nozzle_delta($pdb, $pu01);
tabulate_u01_nozzle_delta($pdb, $pu01);
copy_u01_nozzle_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq BASELINE)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
copy_u01_nozzle_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
die "ERROR: unknown $section state: $state. Stopped";
}
}
elsif (($output_no == MANUAL_CLEAR) ||
($output_no == AUTO_CLEAR))
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = RESET;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
}
else
{
die "ERROR: unknown $section output type: $output_no. Stopped";
}
#
printf $log_fh "EXIT STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
return;
}
#
######################################################################
#
# routines for nozzle section
#
sub calculate_u01_nozzle_delta_keys
{
my ($pdb, $pu01, $nmkey1, $nmkey2, $label) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $key1 = $prow->{$nmkey1};
my $key2 = $prow->{$nmkey2};
my $blkserial = $prow->{BLKSerial};
#
if ( ! exists($pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{cache}{$key1}{$key2}{data}))
{
printf $log_fh "%d WARNING: [%s] %s $nmkey2 %s, $nmkey2 %s NOT found in cache. Taking all counts as is.\n", __LINE__, $filename, $section, $key1, $key2 if ($verbose >= MINVERBOSE);
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col} = $prow->{$col};
}
}
else
{
my $cache_blkserial = $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{cache}{$key1}{$key2}{data}{BLKSerial};
if ($blkserial eq $cache_blkserial)
{
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{BLKSerial} = $blkserial;
#
foreach my $col (@nozzle_count_cols)
{
my $u01_value = $prow->{$col};
my $cache_value = $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{cache}{$key1}{$key2}{data}{$col};
#
my $delta = $u01_value - $cache_value;
#
if ($delta >= 0)
{
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col} = $delta;
}
elsif (($proc_options & PROC_OPT_USENEGDELTS) != 0)
{
printf $log_fh "%d WARNING: [%s] %s $nmkey1 %s, $nmkey2 %s using NEGATIVE delta for key %s: %d\n", __LINE__, $filename, $section, $key1, $key2, $col, $delta if ($verbose >= MINVERBOSE);
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col} = $delta;
}
else
{
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col} = 0;
printf $log_fh "%d WARNING: [%s] %s $nmkey1 %s, $nmkey2 %s setting NEGATIVE delta (%d) for key %s to ZERO\n", __LINE__, $filename, $section, $key1, $key2, $delta, $col if ($verbose >= MINVERBOSE);
}
}
}
else
{
printf $log_fh "%d WARNING: [%s] %s $nmkey1 %s, $nmkey2 %s BLKSERIAL CHANGED: CACHED %s, CURRENT U01 %s\n", __LINE__, $filename, $section, $key1, $key2, $cache_blkserial, $blkserial if ($verbose >= MINVERBOSE);
#
delete $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col} = $prow->{$col};
}
}
}
}
}
#
sub copy_u01_nozzle_cache_keys
{
my ($pdb, $pu01, $state, $nmkey1, $nmkey2, $label) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $key1 = $prow->{$nmkey1};
my $key2 = $prow->{$nmkey2};
# printf $log_fh "%d $label $nmkey1 %d $nmkey2 %d\n", __LINE__, $key1, $key2;
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{cache}{$key1}{$key2}{data}{$col} = $prow->{$col};
# printf $log_fh "%d $label $nmkey1 %d $nmkey2 %d $col %s\n", __LINE__, $key1, $key2, $prow->{$col}
}
#
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{cache}{$key1}{$key2}{state} = $state;
}
}
#
sub copy_u01_nozzle_delta_keys
{
my ($pdb, $pu01, $nmkey1, $nmkey2, $label) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $key1 = $prow->{$nmkey1};
my $key2 = $prow->{$nmkey2};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col} = $prow->{$col};
}
}
}
#
sub tabulate_u01_nozzle_delta_keys
{
my ($pdb, $pu01, $nmkey1, $nmkey2, $label) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPNOZZLE;
#
my $product = $pdb->{product}{u01}{$machine}{$lane}{$stage};
#
foreach my $key1 (sort { $a <=> $b } keys %{$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}})
{
foreach my $key2 (sort { $a <=> $b } keys %{$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}})
{
my $blkserial = $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{BLKSerial};
#
# product-independent totals
#
if (exists($totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}{$blkserial}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}{$blkserial}{$col} += $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}{$blkserial}{$col} = $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col};
}
}
#
if (exists($totals{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}{$key2}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}{$key2}{$col} += $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}{$key2}{$col} = $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col};
}
}
#
# product-dependent totals
#
if (exists($totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}{$blkserial}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}{$blkserial}{$col} += $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2_blkserial}{$machine}{$lane}{$stage}{$key1}{$key2}{$blkserial}{$col} = $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col};
}
}
#
if (exists($totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}{$key2}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}{$key2}{$col} += $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{$label}{by_machine_lane_stage_key1_key2}{$machine}{$lane}{$stage}{$key1}{$key2}{$col} = $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{delta}{$key1}{$key2}{data}{$col};
}
}
}
}
}
#
sub audit_u01_nozzles_keys
{
my ($pdb, $pu01, $nmkey1, $nmkey2, $label) = @_;
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPNOZZLE;
my $filename = $pu01->{file_name};
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu01->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(@{$pu01->{$section}->{data}}) if (defined(@{$pu01->{$section}->{data}}));
}
#
# check if the file has a nozzle data section.
#
if (($output_no == DETECT_CHANGE) ||
($output_no == TIMER_NOT_RUNNING))
{
printf $log_fh "No Nozzle data in Output=%d U01 files. Skipping.\n", $output_no if ($verbose >= MAXVERBOSE);
return;
}
elsif (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER))
{
if ( ! exists($pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state}))
{
printf $log_fh "ENTRY STATE: UNKNOWN\n",
if ($verbose >= MAXVERBOSE);
#
delete $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{cache};
copy_u01_nozzle_cache_keys(
$pdb, $pu01, DELTA, $nmkey1, $nmkey2, $label);
#
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state} eq RESET)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
copy_u01_nozzle_delta_keys(
$pdb, $pu01, $nmkey1, $nmkey2, $label);
tabulate_u01_nozzle_delta_keys(
$pdb, $pu01, $nmkey1, $nmkey2, $label);
copy_u01_nozzle_cache_keys(
$pdb, $pu01, DELTA, $nmkey1, $nmkey2, $label);
#
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state} eq DELTA)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
calculate_u01_nozzle_delta_keys(
$pdb, $pu01, $nmkey1, $nmkey2, $label);
tabulate_u01_nozzle_delta_keys(
$pdb, $pu01, $nmkey1, $nmkey2, $label);
copy_u01_nozzle_cache_keys(
$pdb, $pu01, DELTA, $nmkey1, $nmkey2, $label);
#
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state} eq BASELINE)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
delete $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{cache};
copy_u01_nozzle_cache_keys(
$pdb, $pu01, DELTA, $nmkey1, $nmkey2, $label);
#
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
my $state = $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state};
die "ERROR: unknown $section state: $state. Stopped";
}
}
elsif (($output_no == MANUAL_CLEAR) ||
($output_no == AUTO_CLEAR))
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state} = RESET;
delete $pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{cache};
}
else
{
die "ERROR: unknown $section output type: $output_no. Stopped";
}
#
printf $log_fh "EXIT STATE: %s\n",
$pdb->{$section}->{$label}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
return;
}
#
sub audit_u01_nozzles_new
{
my ($pdb, $pu01) = @_;
#
audit_u01_nozzles_keys($pdb, $pu01,
NZ_KEY_NHADD,
NZ_KEY_NCADD,
NZ_LABEL_NHADD_NCADD);
audit_u01_nozzles_keys($pdb, $pu01,
NZ_KEY_HEAD,
NZ_KEY_NHADD,
NZ_LABEL_HEAD_NHADD);
audit_u01_nozzles_keys($pdb, $pu01,
NZ_KEY_HEAD,
NZ_KEY_NCADD,
NZ_LABEL_HEAD_NCADD);
}
#
#####################################################################
#
# high-level audit functions for u01 files.
#
sub audit_u01_file
{
my ($pdb, $pu01) = @_;
#
my $output_no = $pu01->{output_no};
#
return if (($output_no == TIMER_NOT_RUNNING) &&
(($proc_options & PROC_OPT_IGNALL12) != 0));
#
set_product_info($pdb, $pu01, LNB_U01_FILE_TYPE);
#
audit_u01_name_value($pdb, $pu01, COUNT);
audit_u01_name_value($pdb, $pu01, TIME);
audit_u01_feeders($pdb, $pu01);
#
if (($proc_options & PROC_OPT_USENEWNZ) != 0)
{
audit_u01_nozzles_new($pdb, $pu01);
}
else
{
audit_u01_nozzles($pdb, $pu01);
}
#
return;
}
#
sub load_u01_sections
{
my ($pu01) = @_;
#
load_name_value($pu01, INDEX);
load_name_value($pu01, INFORMATION);
#
load_name_value($pu01, TIME);
load_name_value($pu01, CYCLETIME);
load_name_value($pu01, COUNT);
load_list($pu01, DISPENSER);
load_list($pu01, MOUNTPICKUPFEEDER);
load_list($pu01, MOUNTPICKUPNOZZLE);
load_name_value($pu01, INSPECTIONDATA);
}
#
sub audit_u01_files
{
my ($pu01s, $pdb) = @_;
#
printf $log_fh "\nAudit U01 files:\n";
#
foreach my $pu01 (@{$pu01s})
{
printf $log_fh "\nAudit U01: %s\n", $pu01->{file_name}
if ($verbose >= MIDVERBOSE);
#
next unless (load($pu01) != 0);
#
load_u01_sections($pu01);
#
audit_u01_file($pdb, $pu01);
}
#
return;
}
#
sub process_u01_files
{
my ($pu01s) = @_;
#
# any files to process?
#
if (scalar(@{$pu01s}) <= 0)
{
printf $log_fh "No U01 files to process. Returning.\n\n";
return;
}
#
my %db = ();
audit_u01_files($pu01s, \%db);
#
export_u01_data(\%db) unless ($audit_only == TRUE);
#
return;
}
#
########################################################################
########################################################################
#
# process U03 files.
#
sub export_u03_data
{
my ($pdb) = @_;
}
#
sub audit_u03_mount_quality_trace
{
my ($pdb, $pu03) = @_;
}
#
sub audit_u03_mount_latest_reel
{
my ($pdb, $pu03) = @_;
}
#
sub audit_u03_mount_exchange_reel
{
my ($pdb, $pu03) = @_;
}
#
sub audit_u03_file
{
my ($pdb, $pu03) = @_;
#
set_product_info($pdb, $pu03, LNB_U03_FILE_TYPE);
#
audit_u03_mount_quality_trace($pdb, $pu03);
audit_u03_mount_latest_reel($pdb, $pu03);
audit_u03_mount_exchange_reel($pdb, $pu03);
#
return;
}
#
sub load_u03_sections
{
my ($pu03) = @_;
#
load_name_value($pu03, INDEX);
load_name_value($pu03, INFORMATION);
#
load_list($pu03, BRECG);
load_list($pu03, BRECGCALC);
load_list($pu03, ELAPSETIMERECOG);
load_list($pu03, SBOARD);
load_list($pu03, HEIGHTCORRECT);
load_list($pu03, MOUNTQUALITYTRACE);
load_list($pu03, MOUNTLATESTREEL);
load_list($pu03, MOUNTEXCHANGEREEL);
}
#
sub audit_u03_files
{
my ($pu03s, $pdb) = @_;
#
printf $log_fh "\nAudit U03 files:\n";
#
foreach my $pu03 (@{$pu03s})
{
printf $log_fh "\nAudit u03: %s\n", $pu03->{file_name}
if ($verbose >= MIDVERBOSE);
#
next unless (load($pu03) != 0);
#
load_u03_sections($pu03, INDEX);
#
audit_u03_file($pdb, $pu03);
}
#
return;
}
#
sub process_u03_files
{
my ($pu03s) = @_;
#
# any files to process?
#
if (scalar(@{$pu03s}) <= 0)
{
printf $log_fh "\nNo U03 files to process. Returning.\n\n";
return;
}
#
my %db = ();
audit_u03_files($pu03s, \%db);
#
export_u03_data(\%db) unless ($audit_only == TRUE);
#
return;
}
#
########################################################################
########################################################################
#
# process MPR files.
#
sub export_mpr_data
{
my ($pdb) = @_;
}
#
sub audit_mpr_timedatasp
{
my ($pdb, $pmpr) = @_;
}
sub audit_mpr_countdatasp
{
my ($pdb, $pmpr) = @_;
}
sub audit_mpr_countdatasp2
{
my ($pdb, $pmpr) = @_;
}
sub audit_mpr_tracedatasp
{
my ($pdb, $pmpr) = @_;
}
sub audit_mpr_tracedatasp_2
{
my ($pdb, $pmpr) = @_;
}
sub audit_mpr_ispinfodata
{
my ($pdb, $pmpr) = @_;
}
sub audit_mpr_maskispinfodata
{
my ($pdb, $pmpr) = @_;
}
#
sub audit_mpr_file
{
my ($pdb, $pmpr) = @_;
#
audit_mpr_timedatasp($pdb, $pmpr);
audit_mpr_countdatasp($pdb, $pmpr);
audit_mpr_countdatasp2($pdb, $pmpr);
audit_mpr_tracedatasp($pdb, $pmpr);
audit_mpr_tracedatasp_2($pdb, $pmpr);
audit_mpr_ispinfodata($pdb, $pmpr);
audit_mpr_maskispinfodata($pdb, $pmpr);
#
return;
}
#
sub load_mpr_sections
{
my ($pmpr) = @_;
#
load_name_value($pmpr, INDEX);
load_name_value($pmpr, INFORMATION);
#
load_list($pmpr, TIMEDATASP);
load_list($pmpr, COUNTDATASP);
load_list($pmpr, COUNTDATASP2);
load_list($pmpr, TRACEDATASP);
load_list($pmpr, TRACEDATASP_2);
load_list($pmpr, ISPINFODATA);
load_list($pmpr, MASKISPINFODATA);
}
#
sub audit_mpr_files
{
my ($pmprs, $pdb) = @_;
#
printf $log_fh "\nAudit MPR files:\n";
#
foreach my $pmpr (@{$pmprs})
{
printf $log_fh "\nAudit mpr: %s\n", $pmpr->{file_name}
if ($verbose >= MIDVERBOSE);
#
next unless (load($pmpr) != 0);
#
load_mpr_sections($pmpr);
#
audit_mpr_file($pdb, $pmpr);
}
#
return;
}
#
sub process_mpr_files
{
my ($pmprs) = @_;
#
# any files to process?
#
if (scalar(@{$pmprs}) <= 0)
{
printf $log_fh "\nNo MPR files to process. Returning.\n\n";
return;
}
#
my %db = ();
audit_mpr_files($pmprs, \%db);
#
export_mpr_data(\%db) unless ($audit_only == TRUE);
#
return;
}
#
########################################################################
########################################################################
#
# start main execution.
#
my %opts;
if (getopts('?hwWv:t:l:o:xd:a', \%opts) != 1)
{
usage($cmd);
exit 2;
}
#
foreach my $opt (%opts)
{
if (($opt eq "h") or ($opt eq "?"))
{
usage($cmd);
exit 0;
}
elsif ($opt eq "w")
{
$verbose = MINVERBOSE;
}
elsif ($opt eq "W")
{
$verbose = MIDVERBOSE;
}
elsif ($opt eq "v")
{
if ($opts{$opt} =~ m/^[0123]$/)
{
$verbose = $opts{$opt};
}
elsif (exists($verbose_levels{$opts{$opt}}))
{
$verbose = $verbose_levels{$opts{$opt}};
}
else
{
printf $log_fh "\nInvalid verbose level: $opts{$opt}\n";
usage($cmd);
exit 2;
}
}
elsif ($opt eq "t")
{
$file_type = $opts{$opt};
$file_type =~ tr/[A-Z]/[a-z]/;
if ($file_type !~ m/^(u01|u03|mpr)$/i)
{
printf $log_fh "\nInvalid file type: $opts{$opt}\n";
usage($cmd);
exit 2;
}
}
elsif ($opt eq "l")
{
local *FH;
$logfile = $opts{$opt};
open(FH, '>', $logfile) or die $!;
$log_fh = *FH;
printf $log_fh "\nLog File: %s\n", $logfile;
}
elsif ($opt eq "o")
{
my $option = $opts{$opt};
$option =~ tr/[a-z]/[A-Z]/;
if (exists($allowed_proc_options{$option}))
{
$proc_options |= $allowed_proc_options{$option};
}
else
{
printf $log_fh "\nInvalid option type: $opts{$opt}\n";
usage($cmd);
exit 2;
}
}
elsif ($opt eq "x")
{
$export_csv = TRUE;
}
elsif ($opt eq "d")
{
$export_dir = $opts{$opt};
mkpath($export_dir) unless ( -d $export_dir );
printf $log_fh "\nExport directory: %s\n", $export_dir;
}
elsif ($opt eq "a")
{
$audit_only = TRUE;
}
}
#
if (scalar(@ARGV) == 0)
{
printf $log_fh "No directories given.\n";
usage($cmd);
exit 2;
}
#
printf $log_fh "\nScan directories for U01, U03 and MPR files: \n\n";
#
my @u01_files = ();
my @u03_files = ();
my @mpr_files = ();
#
get_all_files($file_type,
\@ARGV,
\@u01_files,
\@u03_files,
\@mpr_files);
#
printf $log_fh "Number of U01 files: %d\n", scalar(@u01_files);
printf $log_fh "Number of U03 files: %d\n", scalar(@u03_files);
printf $log_fh "Number of MPR files: %d\n\n", scalar(@mpr_files);
#
process_u01_files(\@u01_files);
process_u03_files(\@u03_files);
process_mpr_files(\@mpr_files);
#
printf $log_fh "\nAll Done\n";
#
exit 0;
__DATA__
######################################################################
#
# constants
#
# logical constants
#
use constant TRUE => 1;
use constant FALSE => 0;
#
# output types
#
use constant PROD_COMPLETE => 3;
use constant PROD_COMPLETE_LATER => 4;
use constant DETECT_CHANGE => 5;
use constant MANUAL_CLEAR => 11;
use constant TIMER_NOT_RUNNING => 12;
use constant AUTO_CLEAR => 13;
#
# processing states
#
use constant RESET => 'reset';
use constant BASELINE => 'baseline';
use constant DELTA => 'delta';
#
# common sections for all files types: u01, u03, mpr
#
use constant INDEX => '[Index]';
use constant INFORMATION => '[Information]';
#
# sections specific to u01
#
use constant TIME => '[Time]';
use constant CYCLETIME => '[CycleTime]';
use constant COUNT => '[Count]';
use constant DISPENSER => '[Dispenser]';
use constant MOUNTPICKUPFEEDER => '[MountPickupFeeder]';
use constant MOUNTPICKUPNOZZLE => '[MountPickupNozzle]';
use constant INSPECTIONDATA => '[InspectionData]';
#
# sections specific to u03
#
use constant BRECG => '[BRecg]';
use constant BRECGCALC => '[BRecgCalc]';
use constant ELAPSETIMERECOG => '[ElapseTimeRecog]';
use constant SBOARD => '[SBoard]';
use constant HEIGHTCORRECT => '[HeightCorrect]';
use constant MOUNTQUALITYTRACE => '[MountQualityTrace]';
use constant MOUNTLATESTREEL => '[MountLatestReel]';
use constant MOUNTEXCHANGEREEL => '[MountExchangeReel]';
#
# sections specfic to mpr
#
use constant TIMEDATASP => '[TimeDataSP]';
use constant COUNTDATASP => '[CountDataSP]';
use constant COUNTDATASP2 => '[CountDataSP2]';
use constant TRACEDATASP => '[TraceDataSP]';
use constant TRACEDATASP_2 => '[TraceDataSP_2]';
use constant ISPINFODATA => '[ISPInfoData]';
use constant MASKISPINFODATA => '[MaskISPInfoData]';
#
# verbose levels
#
use constant NOVERBOSE => 0;
use constant MINVERBOSE => 1;
use constant MIDVERBOSE => 2;
use constant MAXVERBOSE => 3;
#
# processing options
#
use constant PROC_OPT_NONE => 0;
use constant PROC_OPT_IGN12 => 1; # bit positions
use constant PROC_OPT_NMVAL2 => 2;
use constant PROC_OPT_IGNALL12 => 4; # bit positions
#
######################################################################
#
# globals
#
my $cmd = $0;
my $log_fh = *STDOUT;
#
# cmd line options
#
my $logfile = '';
my $verbose = 0;
my $file_type = ""; # default is all files: u01, u03, mpr
my $use_neg_delta = FALSE;
my $audit_only = FALSE;
my $red_flag_trigger = 0; # off by default
my $export_csv = FALSE;
my $export_dir = '/tmp/';
my $proc_option = PROC_OPT_NONE;
#
my %red_flags =();
#
my %verbose_levels =
(
off => 0,
min => 1,
mid => 2,
max => 3
);
#
# report formats
#
my @nozzle_print_cols =
(
{ name => 'Machine', format => '%-8s ' },
{ name => 'Lane', format => '%-8s ' },
{ name => 'Stage', format => '%-8s ' },
{ name => 'NHAdd', format => '%-8s ' },
{ name => 'NCAdd', format => '%-8s ' },
{ name => 'Blkserial', format => '%-30s ' },
{ name => 'Pickup', format => '%-8s ' },
{ name => 'PMiss', format => '%-8s ' },
{ name => 'RMiss', format => '%-8s ' },
{ name => 'DMiss', format => '%-8s ' },
{ name => 'MMiss', format => '%-8s ' },
{ name => 'HMiss', format => '%-8s ' },
{ name => 'TRSMiss', format => '%-8s ' },
{ name => 'Mount', format => '%-8s ' }
);
#
my @nozzle_print_cols2 =
(
{ name => 'Machine', format => '%-8s ' },
{ name => 'Lane', format => '%-8s ' },
{ name => 'Stage', format => '%-8s ' },
{ name => 'NHAdd', format => '%-8s ' },
{ name => 'NCAdd', format => '%-8s ' },
{ name => 'Pickup', format => '%-8s ' },
{ name => 'PMiss', format => '%-8s ' },
{ name => 'RMiss', format => '%-8s ' },
{ name => 'DMiss', format => '%-8s ' },
{ name => 'MMiss', format => '%-8s ' },
{ name => 'HMiss', format => '%-8s ' },
{ name => 'TRSMiss', format => '%-8s ' },
{ name => 'Mount', format => '%-8s ' }
);
#
my @nozzle_export_cols =
(
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s ' },
{ name => 'Stage', format => ',%s' },
{ name => 'NHAdd', format => ',%s' },
{ name => 'NCAdd', format => ',%s' },
{ name => 'Blkserial', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
);
#
my @nozzle_export_cols2 =
(
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => 'NHAdd', format => ',%s' },
{ name => 'NCAdd', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
);
#
my @nozzle_count_cols =
(
'Pickup',
'PMiss',
'RMiss',
'DMiss',
'MMiss',
'HMiss',
'TRSMiss',
'Mount'
);
#
my @feeder_print_cols =
(
{ name => 'Machine', format => '%-8s ' },
{ name => 'Lane', format => '%-8s ' },
{ name => 'Stage', format => '%-8s ' },
{ name => 'FAdd', format => '%-8s ' },
{ name => 'FSAdd', format => '%-8s ' },
# { name => 'Blkserial', format => '%-30s ' },
{ name => 'ReelID', format => '%-30s ' },
{ name => 'Pickup', format => '%-8s ' },
{ name => 'PMiss', format => '%-8s ' },
{ name => 'RMiss', format => '%-8s ' },
{ name => 'DMiss', format => '%-8s ' },
{ name => 'MMiss', format => '%-8s ' },
{ name => 'HMiss', format => '%-8s ' },
{ name => 'TRSMiss', format => '%-8s ' },
{ name => 'Mount', format => '%-8s ' }
);
#
my @feeder_print_cols2 =
(
{ name => 'Machine', format => '%-8s ' },
{ name => 'Lane', format => '%-8s ' },
{ name => 'Stage', format => '%-8s ' },
{ name => 'FAdd', format => '%-8s ' },
{ name => 'FSAdd', format => '%-8s ' },
{ name => 'Pickup', format => '%-8s ' },
{ name => 'PMiss', format => '%-8s ' },
{ name => 'RMiss', format => '%-8s ' },
{ name => 'DMiss', format => '%-8s ' },
{ name => 'MMiss', format => '%-8s ' },
{ name => 'HMiss', format => '%-8s ' },
{ name => 'TRSMiss', format => '%-8s ' },
{ name => 'Mount', format => '%-8s ' }
);
#
my @feeder_export_cols =
(
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => 'FAdd', format => ',%s' },
{ name => 'FSAdd', format => ',%s' },
# { name => 'Blkserial', format => ',%s' },
{ name => 'ReelID', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
);
#
my @feeder_export_cols2 =
(
{ name => 'Machine', format => '%s' },
{ name => 'Lane', format => ',%s' },
{ name => 'Stage', format => ',%s' },
{ name => 'FAdd', format => ',%s' },
{ name => 'FSAdd', format => ',%s' },
{ name => 'Pickup', format => ',%s' },
{ name => 'PMiss', format => ',%s' },
{ name => 'RMiss', format => ',%s' },
{ name => 'DMiss', format => ',%s' },
{ name => 'MMiss', format => ',%s' },
{ name => 'HMiss', format => ',%s' },
{ name => 'TRSMiss', format => ',%s' },
{ name => 'Mount', format => ',%s' }
);
#
my @feeder_count_cols =
(
'Pickup',
'PMiss',
'RMiss',
'DMiss',
'MMiss',
'HMiss',
'TRSMiss',
'Mount'
);
#
my @mount_quality_trace_cols =
(
'B',
'IDNUM',
'TURN',
'MS',
'TS',
'FAdd',
'FSAdd',
'FBLKCode',
'FBLKSerial',
'NHAdd',
'NCAdd',
'NBLKCode',
'NBLKSerial',
'ReelID',
'F',
'RCGX',
'RCGY',
'RCGA',
'TCX',
'TCY',
'MPosiRecX',
'MPosiRecY',
'MPosiRecA',
'MPosiRecZ',
'THMAX',
'THAVE',
'MNTCX',
'MNTCY',
'MNTCA',
'TLX',
'TLY',
'InspectArea',
'DIDNUM',
'DS',
'DispenseID',
'PARTS',
'WarpZ'
);
#
my @mount_latest_reel_cols =
(
'BLKCode',
'BLKSerial',
'Ftype',
'FAdd',
'FSAdd',
'Use',
'PEStatus',
'PCStatus',
'Remain',
'Init',
'PartsName',
'Custom1',
'Custom2',
'Custom3',
'Custom4',
'ReelID',
'PartsEmp'
);
#
my @mount_exchange_reel_cols =
(
'BLKCode',
'BLKSerial',
'Ftype',
'FAdd',
'FSAdd',
'Use',
'PEStatus',
'PCStatus',
'Remain',
'Init',
'PartsName',
'Custom1',
'Custom2',
'Custom3',
'Custom4',
'ReelID',
'PartsEmp'
);
#
# fields to ignore for output=12 files if enabled.
#
my %ignored_output12_fields =
(
'TPICKUP' => 1,
'TPMISS' => 1,
'TRMISS' => 1,
'TDMISS' => 1,
'TMMISS' => 1,
'THMISS' => 1,
'CPERR' => 1,
'CRERR' => 1,
'CDERR' => 1,
'CMERR' => 1,
'CTERR' => 1
);
#
# summary tables.
#
my %totals = ();
my %report_precision = ();
#
######################################################################
#
# miscellaneous routines
#
sub usage
{
my ($arg0) = @_;
print <<EOF;
usage: $arg0 [-?] [-h] \\
[-w | -W |-v level] \\
[-t u10|u03|mpr] \\
[-r value] \\
[-L logfile] \\
[-O option] \\
[-n] \\
[-a] \\
[-x] [-d path] \\
directory ...
where:
-? or -h - print usage.
-w - enable warning (level=min=1)
-W - enable warning and trace (level=mid=2)
-v - verbose level: 0=off,1=min,2=mid,3=max
-t file-type = type of file to process: u01, u03, mpr.
default is all files.
-r value - red flag if counts decrease by more than this amount.
-L logfile - log file path
-O option - enable a procesing option:
ign12 - ignore resetable output=12 fields.
nmval2 - calculate Count/Time data using 2nd algo.
ignall12 - ignore all output=12 files.
-n - use negative deltas (default is NOT to use)
-a - only audit data, do not generate any report.
-x - export reports as CSV files.
-d path - export directory, defaults to '/tmp'.
EOF
}
#
sub get_product_info
{
my ($pdata, $pmjsid, $plotname, $plotnumber) = @_;
#
my $section = INDEX;
$$pmjsid = $pdata->{$section}->{data}->{MJSID};
$$pmjsid = $1 if ($$pmjsid =~ m/"([^"]*)"/);
#
$section = INFORMATION;
$$plotname = $pdata->{$section}->{data}->{LotName};
$$plotname = $1 if ($$plotname =~ m/"([^"]*)"/);
$$plotnumber = $pdata->{$section}->{data}->{LotNumber};
}
#
sub set_product_info_u01
{
my ($pdb, $pu01) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $mjsid = 'UNKNOWN';
my $lotname = 'UNKNOWN';
my $lotnumber = 0;
#
if ( ! exists($pdb->{product}{u01}{$machine}{$lane}{$stage}))
{
$pdb->{product}{u01}{$machine}{$lane}{$stage} = "${mjsid}_${lotname}_${lotnumber}";
$pdb->{change_over}{u01}{$machine}{$lane}{$stage} = FALSE;
}
elsif (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER))
{
get_product_info($pu01, \$mjsid, \$lotname, \$lotnumber);
#
if (($pdb->{product}{u01}{$machine}{$lane}{$stage} ne "${mjsid}_${lotname}_${lotnumber}") &&
($pdb->{product}{u01}{$machine}{$lane}{$stage} ne "UNKNOWN_UNKNOWN_0"))
{
$pdb->{change_over}{u01}{$machine}{$lane}{$stage} = TRUE;
}
else
{
$pdb->{change_over}{u01}{$machine}{$lane}{$stage} = FALSE;
}
#
$pdb->{product}{u01}{$machine}{$lane}{$stage} = "${mjsid}_${lotname}_${lotnumber}";
}
else
{
# clear it this flag.
$pdb->{change_over}{u01}{$machine}{$lane}{$stage} = FALSE;
}
#
printf $log_fh "Product U01: %s, Change Over: %d\n", $pdb->{product}{u01}{$machine}{$lane}{$stage}, $pdb->{change_over}{u01}{$machine}{$lane}{$stage} if ($verbose >= MIDVERBOSE);
}
#
sub set_product_info_u03
{
my ($pdb, $pu03) = @_;
#
my $filename = $pu03->{file_name};
#
my $machine = $pu03->{mach_no};
my $lane = $pu03->{lane};
my $stage = $pu03->{stage};
my $output_no = $pu03->{output_no};
#
my $mjsid = 'UNKNOWN';
my $lotname = 'UNKNOWN';
my $lotnumber = 0;
#
if (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER))
{
get_product_info($pu03, \$mjsid, \$lotname, \$lotnumber);
$pdb->{product}{u03}{$machine}{$lane}{$stage} = "${mjsid}_${lotname}_${lotnumber}";
}
elsif ( ! exists($pdb->{product}{u03}{$machine}{$lane}{$stage}))
{
$pdb->{product}{u03}{$machine}{$lane}{$stage} = "${mjsid}_${lotname}_${lotnumber}";
}
#
printf $log_fh "Product U03: %s\n", $pdb->{product}{u03}{$machine}{$lane}{$stage} if ($verbose >= MIDVERBOSE);
}
#
sub set_red_flag
{
my ($machine, $lane, $stage, $filename, $delta) = @_;
#
return unless ($red_flag_trigger > 0);
#
$delta = -$delta unless ($delta >= 0);
#
if ($delta >= $red_flag_trigger)
{
$red_flags{$machine}{$lane}{$stage} = $filename;
}
}
#
sub check_red_flag
{
my ($pu01) = @_;
#
return unless ($red_flag_trigger > 0);
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
#
if (exists($red_flags{$machine}{$lane}{$stage}))
{
my $previous_filename = $red_flags{$machine}{$lane}{$stage};
#
printf $log_fh "\nRED FLAG FILES FOR MACHINE: %s, Lane: %s, Stage: %s:\n",
$machine, $lane, $stage;
printf $log_fh "==>> Previous File: %s\n", $previous_filename;
printf $log_fh "==>> Current File : %s\n", $filename;
#
delete $red_flags{$machine}{$lane}{$stage};
}
}
#
######################################################################
#
# report routines
#
sub init_report_precision
{
my $section = TIME;
$report_precision{$section}{set} = FALSE;
$report_precision{$section}{precision} = 0;
#
$section = COUNT;
$report_precision{$section}{set} = FALSE;
$report_precision{$section}{precision} = 0;
#
$section = MOUNTPICKUPFEEDER;
$report_precision{$section}{set} = FALSE;
$report_precision{$section}{precision} = 0;
#
$section = MOUNTPICKUPNOZZLE;
$report_precision{$section}{set} = FALSE;
$report_precision{$section}{precision} = 0;
#
$section = MOUNTQUALITYTRACE;
$report_precision{$section}{set} = FALSE;
$report_precision{$section}{precision} = 0;
#
$section = MOUNTLATESTREEL;
$report_precision{$section}{set} = FALSE;
$report_precision{$section}{precision} = 0;
#
$section = MOUNTEXCHANGEREEL;
$report_precision{$section}{set} = FALSE;
$report_precision{$section}{precision} = 0;
}
#
sub set_report_name_value_precision
{
my ($pu01, $section) = @_;
#
return unless ($report_precision{$section}{set} == FALSE);
#
$report_precision{$section}{precision} = 0;
#
foreach my $key (keys %{$pu01->{$section}->{data}})
{
my $keylen = length($key);
if ($keylen > $report_precision{$section}{precision})
{
$report_precision{$section}{precision} = $keylen;
}
}
#
if ($report_precision{$section}{precision} <= 0)
{
$report_precision{$section}{precision} = 20;
}
#
$report_precision{$section}{set} = TRUE;
}
#
sub set_report_nozzle_precision
{
my ($pu01) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
return unless ($report_precision{$section}{set} == FALSE);
#
$report_precision{$section}{precision} = 0;
#
foreach my $key (@nozzle_count_cols)
{
my $keylen = length($key);
if ($keylen > $report_precision{$section}{precision})
{
$report_precision{$section}{precision} = $keylen;
}
}
#
if ($report_precision{$section}{precision} <= 0)
{
$report_precision{$section}{precision} = 20;
}
#
$report_precision{$section}{set} = TRUE;
}
#
sub set_report_feeder_precision
{
my ($pu01) = @_;
#
my $section = MOUNTPICKUPFEEDER;
#
return unless ($report_precision{$section}{set} == FALSE);
#
$report_precision{$section}{precision} = 0;
#
foreach my $key (@feeder_count_cols)
{
my $keylen = length($key);
if ($keylen > $report_precision{$section}{precision})
{
$report_precision{$section}{precision} = $keylen;
}
}
#
if ($report_precision{$section}{precision} <= 0)
{
$report_precision{$section}{precision} = 20;
}
#
$report_precision{$section}{set} = TRUE;
}
#
sub set_report_quality_trace_precision
{
my ($pu03) = @_;
#
my $section = MOUNTQUALITYTRACE;
#
return unless ($report_precision{$section}{set} == FALSE);
#
$report_precision{$section}{precision} = 0;
#
foreach my $key (@mount_quality_trace_cols)
{
my $keylen = length($key);
if ($keylen > $report_precision{$section}{precision})
{
$report_precision{$section}{precision} = $keylen;
}
}
#
if ($report_precision{$section}{precision} <= 0)
{
$report_precision{$section}{precision} = 20;
}
#
$report_precision{$section}{set} = TRUE;
}
#
sub set_report_latest_reel_precision
{
my ($pu03) = @_;
#
my $section = MOUNTLATESTREEL;
#
return unless ($report_precision{$section}{set} == FALSE);
#
$report_precision{$section}{precision} = 0;
#
foreach my $key (@mount_latest_reel_cols)
{
my $keylen = length($key);
if ($keylen > $report_precision{$section}{precision})
{
$report_precision{$section}{precision} = $keylen;
}
}
#
if ($report_precision{$section}{precision} <= 0)
{
$report_precision{$section}{precision} = 20;
}
#
$report_precision{$section}{set} = TRUE;
}
#
sub set_report_exchange_reel_precision
{
my ($pu03) = @_;
#
my $section = MOUNTEXCHANGEREEL;
#
return unless ($report_precision{$section}{set} == FALSE);
#
$report_precision{$section}{precision} = 0;
#
foreach my $key (@mount_exchange_reel_cols)
{
my $keylen = length($key);
if ($keylen > $report_precision{$section}{precision})
{
$report_precision{$section}{precision} = $keylen;
}
}
#
if ($report_precision{$section}{precision} <= 0)
{
$report_precision{$section}{precision} = 20;
}
#
$report_precision{$section}{set} = TRUE;
}
#
######################################################################
#
# read in data file and load all sections
#
sub load
{
my ($pdata) = @_;
#
my $path = $pdata->{full_path};
#
if ( ! -r $path )
{
printf $log_fh "\nERROR: file $path is NOT readable\n\n";
return 0;
}
#
unless (open(INFD, $path))
{
printf $log_fh "\nERROR: unable to open $path.\n\n";
return 0;
}
@{$pdata->{data}} = <INFD>;
close(INFD);
#
# remove newlines
#
chomp(@{$pdata->{data}});
printf $log_fh "Lines read: %d\n", scalar(@{$pdata->{data}})
if ($verbose >= MAXVERBOSE);
#
return 1;
}
#
sub load_name_value
{
my ($pdata, $section) = @_;
#
printf $log_fh "\nLoading Name-Value Section: %s\n", $section
if ($verbose >= MAXVERBOSE);
#
my $re_section = '\\' . $section;
@{$pdata->{raw}->{$section}} =
grep /^${re_section}\s*$/ .. /^\s*$/, @{$pdata->{data}};
#
# printf $log_fh "<%s>\n", join("\n", @{$pdata->{raw}->{$section}});
#
if (scalar(@{$pdata->{raw}->{$section}}) <= 2)
{
$pdata->{$section} = {};
printf $log_fh "No data found.\n"
if ($verbose >= MAXVERBOSE);
return 1;
}
#
shift @{$pdata->{raw}->{$section}};
pop @{$pdata->{raw}->{$section}};
#
printf $log_fh "Section Lines: %d\n", scalar(@{$pdata->{raw}->{$section}})
if ($verbose >= MAXVERBOSE);
#
%{$pdata->{$section}->{data}} =
map { split /\s*=\s*/, $_, 2 } @{$pdata->{raw}->{$section}};
printf $log_fh "Number of Keys: %d\n", scalar(keys %{$pdata->{$section}->{data}})
if ($verbose >= MAXVERBOSE);
#
return 1;
}
#
sub split_quoted_string
{
my $rec = shift;
#
my $rec_len = length($rec);
#
my $istart = -1;
my $iend = -1;
my $in_string = 0;
#
my @tokens = ();
my $token = "";
#
for (my $i=0; $i<$rec_len; $i++)
{
my $c = substr($rec, $i, 1);
#
if ($in_string == 1)
{
if ($c eq '"')
{
$in_string = 0;
}
else
{
$token .= $c;
}
}
elsif ($c eq '"')
{
$in_string = 1;
}
elsif ($c eq ' ')
{
# printf $log_fh "Token ... <%s>\n", $token;
push (@tokens, $token);
$token = '';
}
else
{
$token .= $c;
}
}
#
if (length($token) > 0)
{
# printf $log_fh "Token ... <%s>\n", $token;
push (@tokens, $token);
$token = '';
}
#
# printf $log_fh "Tokens: \n%s\n", join("\n",@tokens);
#
return @tokens;
}
#
sub load_list
{
my ($pdata, $section) = @_;
#
printf $log_fh "\nLoading List Section: %s\n", $section
if ($verbose >= MAXVERBOSE);
#
my $re_section = '\\' . $section;
@{$pdata->{raw}->{$section}} =
grep /^${re_section}\s*$/ .. /^\s*$/, @{$pdata->{data}};
#
# printf $log_fh "<%s>\n", join("\n", @{$pdata->{raw}->{$section}});
#
if (scalar(@{$pdata->{raw}->{$section}}) <= 3)
{
$pdata->{$section} = {};
printf $log_fh "No data found.\n"
if ($verbose >= MAXVERBOSE);
return 1;
}
shift @{$pdata->{raw}->{$section}};
pop @{$pdata->{raw}->{$section}};
$pdata->{$section}->{header} = shift @{$pdata->{raw}->{$section}};
@{$pdata->{$section}->{column_names}} =
split / /, $pdata->{$section}->{header};
my $number_columns = scalar(@{$pdata->{$section}->{column_names}});
#
@{$pdata->{$section}->{data}} = ();
#
printf $log_fh "Section Lines: %d\n", scalar(@{$pdata->{raw}->{$section}})
if ($verbose >= MAXVERBOSE);
# printf $log_fh "Column Names: %d\n", $number_columns;
foreach my $record (@{$pdata->{raw}->{$section}})
{
# printf $log_fh "\nRECORD: %s\n", $record;
#
# printf $log_fh "\nRECORD (original): %s\n", $record;
# $record =~ s/"\s+"\s/"" /g;
# $record =~ s/"\s+"\s*$/""/g;
# printf $log_fh "\nRECORD (final): %s\n", $record;
# my @tokens = split / /, $record;
#
my @tokens = split_quoted_string($record);
my $number_tokens = scalar(@tokens);
printf $log_fh "Number of tokens in record: %d\n", $number_tokens
if ($verbose >= MAXVERBOSE);
#
if ($number_tokens == $number_columns)
{
my %data = ();
@data{@{$pdata->{$section}->{column_names}}} = @tokens;
my $data_size = scalar(keys %data);
# printf $log_fh "Current Data Size: %d\n", $data_size;
unshift @{$pdata->{$section}->{data}}, \%data;
printf $log_fh "Current Number of Records: %d\n", scalar(@{$pdata->{$section}->{data}})
if ($verbose >= MAXVERBOSE);
}
else
{
printf $log_fh "SKIPPING RECORD - NUMBER TOKENS (%d) != NUMBER COLUMNS (%d)\n", $number_tokens, $number_columns;
}
}
#
return 1;
}
#
######################################################################
#
# audit U01 files
#
######################################################################
#
# routines for Count and Time sections
#
sub calculate_u01_name_value_delta
{
my ($pdb, $pu01, $section) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
if ((($proc_option & PROC_OPT_IGN12) != 0) &&
($output_no == TIMER_NOT_RUNNING) &&
($section eq COUNT))
{
foreach my $key (keys %{$pu01->{$section}->{data}})
{
my $delta = 0;
#
my $KEY = $key;
$KEY =~ tr/[a-z]/[A-Z]/;
#
if (exists($ignored_output12_fields{$KEY}))
{
$delta = 0;
}
elsif (exists($pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$key}))
{
$delta =
$pu01->{$section}->{data}->{$key} -
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$key};
}
else
{
$delta = $pu01->{$section}->{data}->{$key};
printf $log_fh "ERROR: [%s] %s key %s NOT found in cache. Taking counts (%d) as is.\n",
$filename, $section, $key, $delta;
}
#
if ($delta >= 0)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = $delta;
}
elsif ($use_neg_delta == TRUE)
{
printf $log_fh "%d WARNING: [%s] using NEGATIVE delta for %s key %s: %d\n", __LINE__, $filename, $section, $key, $delta if ($verbose >= MINVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = $delta;
}
else
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = 0;
printf $log_fh "%d WARNING: [%s] setting NEGATIVE delta (%d) for %s key %s to ZERO\n", __LINE__, $filename, $delta, $section, $key if ($verbose >= MINVERBOSE);
set_red_flag($machine, $lane, $stage, $filename, $delta);
}
#
printf $log_fh "%s: %s = %d\n", $section, $key, $delta
if ($verbose >= MAXVERBOSE);
}
}
else
{
foreach my $key (keys %{$pu01->{$section}->{data}})
{
my $delta = 0;
#
if (exists($pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$key}))
{
$delta =
$pu01->{$section}->{data}->{$key} -
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$key};
}
else
{
$delta = $pu01->{$section}->{data}->{$key};
printf $log_fh "ERROR: [%s] %s key %s NOT found in cache. Taking counts (%d) as is.\n",
$filename, $section, $key, $delta;
}
#
if ($delta >= 0)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = $delta;
}
elsif ($use_neg_delta == TRUE)
{
printf $log_fh "%d WARNING: [%s] using NEGATIVE delta for %s key %s: %d\n", __LINE__, $filename, $section, $key, $delta if ($verbose >= MINVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = $delta;
}
else
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = 0;
printf $log_fh "%d WARNING: [%s] setting NEGATIVE delta (%d) for %s key %s to ZERO\n", __LINE__, $filename, $delta, $section, $key if ($verbose >= MINVERBOSE);
set_red_flag($machine, $lane, $stage, $filename, $delta);
}
#
printf $log_fh "%s: %s = %d\n", $section, $key, $delta
if ($verbose >= MAXVERBOSE);
}
}
}
#
sub copy_u01_name_value_delta
{
my ($pdb, $pu01, $section) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $key (keys %{$pu01->{$section}->{data}})
{
my $delta = $pu01->{$section}->{data}->{$key};
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key} = $delta;
printf $log_fh "%s: %s = %d\n", $section, $key, $delta
if ($verbose >= MAXVERBOSE);
}
}
#
sub copy_u01_name_value_cache
{
my ($pdb, $pu01, $section) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
foreach my $key (keys %{$pu01->{$section}->{data}})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$key} =
$pu01->{$section}->{data}->{$key};
}
}
#
sub tabulate_u01_name_value_delta
{
my ($pdb, $pu01, $section) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $product = $pdb->{product}{u01}{$machine}{$lane}{$stage};
#
foreach my $key (keys %{$pu01->{$section}->{data}})
{
#
# product dependent totals
#
if (exists($totals{by_product}{$product}{$section}{totals}{$key}))
{
$totals{by_product}{$product}{$section}{totals}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{by_product}{$product}{$section}{totals}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
if (exists($totals{by_product}{$product}{$section}{by_machine}{$machine}{$key}))
{
$totals{by_product}{$product}{$section}{by_machine}{$machine}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{by_product}{$product}{$section}{by_machine}{$machine}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key}))
{
$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key}))
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
# product independent totals
#
if (exists($totals{$section}{totals}{$key}))
{
$totals{$section}{totals}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{$section}{totals}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
if (exists($totals{$section}{by_machine}{$machine}{$key}))
{
$totals{$section}{by_machine}{$machine}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{$section}{by_machine}{$machine}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
if (exists($totals{$section}{by_machine_lane}{$machine}{$lane}{$key}))
{
$totals{$section}{by_machine_lane}{$machine}{$lane}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{$section}{by_machine_lane}{$machine}{$lane}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
#
if (exists($totals{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key}))
{
$totals{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key} +=
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
else
{
$totals{$section}{by_machine_lane_stage}{$machine}{$lane}{$stage}{$key} =
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$key};
}
}
}
#
sub audit_u01_name_value
{
my ($pdb, $pu01, $section) = @_;
#
set_report_name_value_precision($pu01, $section);
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $mjsid = '';
my $lotname = '';
my $lotnumber = 0;
#
my $change_over = $pdb->{change_over}{u01}{$machine}{$lane}{$stage};
printf $log_fh "Change Over: %s\n", $change_over if ($verbose >= MAXVERBOSE);
#
get_product_info($pu01, \$mjsid, \$lotname, \$lotnumber);
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu01->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(keys %{$pu01->{$section}->{data}});
}
#
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{state}))
{
printf $log_fh "ENTRY STATE: UNKNOWN\n"
if ($verbose >= MAXVERBOSE);
#
if (($output_no == MANUAL_CLEAR) ||
($output_no == AUTO_CLEAR))
{
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = RESET;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
}
else
{
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
#
copy_u01_name_value_cache($pdb, $pu01, $section);
}
printf $log_fh "EXIT STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
return;
}
#
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
#
$state = RESET if ($change_over == TRUE);
#
printf $log_fh "ENTRY STATE: %s\n", $state if ($verbose >= MAXVERBOSE);
#
if (($output_no == MANUAL_CLEAR) ||
($output_no == AUTO_CLEAR))
{
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = RESET;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
}
elsif ($state eq DELTA)
{
calculate_u01_name_value_delta($pdb, $pu01, $section);
tabulate_u01_name_value_delta($pdb, $pu01, $section);
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($state eq RESET)
{
copy_u01_name_value_delta($pdb, $pu01, $section);
tabulate_u01_name_value_delta($pdb, $pu01, $section);
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($state eq BASELINE)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
#
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
die "ERROR: unknown $section state: $state. Stopped";
}
printf $log_fh "EXIT STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
return;
}
#
# another way to calculate ...
#
sub audit_u01_name_value_2
{
my ($pdb, $pu01, $section) = @_;
#
set_report_name_value_precision($pu01, $section);
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $mjsid = '';
my $lotname = '';
my $lotnumber = 0;
#
my $change_over = $pdb->{change_over}{u01}{$machine}{$lane}{$stage};
printf $log_fh "Change Over: %s\n", $change_over if ($verbose >= MAXVERBOSE);
#
get_product_info($pu01, \$mjsid, \$lotname, \$lotnumber);
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu01->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(keys %{$pu01->{$section}->{data}});
}
#
if (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER) ||
($output_no == DETECT_CHANGE) ||
($output_no == TIMER_NOT_RUNNING))
{
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{state}))
{
printf $log_fh "ENTRY STATE: UNKNOWN\n"
if ($verbose >= MAXVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
$state = RESET if ($change_over == TRUE);
printf $log_fh "ENTRY STATE: %s\n", $state if ($verbose >= MAXVERBOSE);
#
if ($state eq DELTA)
{
calculate_u01_name_value_delta($pdb, $pu01, $section);
tabulate_u01_name_value_delta($pdb, $pu01, $section);
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($state eq RESET)
{
copy_u01_name_value_delta($pdb, $pu01, $section);
tabulate_u01_name_value_delta($pdb, $pu01, $section);
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($state eq BASELINE)
{
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
copy_u01_name_value_cache($pdb, $pu01, $section);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
die "ERROR: unknown $section state: $state. Stopped";
}
printf $log_fh "EXIT STATE: %s\n", $state if ($verbose >= MAXVERBOSE);
}
}
elsif (($output_no == MANUAL_CLEAR) ||
($output_no == AUTO_CLEAR))
{
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
printf $log_fh "ENTRY STATE: %s\n", $state if ($verbose >= MAXVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = RESET;
printf $log_fh "EXIT STATE: %s\n", $state if ($verbose >= MAXVERBOSE);
}
else
{
die "ERROR: unknown output type: $output_no. Stopped";
}
#
return;
}
#
######################################################################
#
# routines for feeder section
#
sub calculate_u01_feeder_delta
{
my ($pdb, $pu01) = @_;
#
my $section = MOUNTPICKUPFEEDER;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $fadd = $prow->{FAdd};
my $fsadd = $prow->{FSAdd};
my $reelid = $prow->{ReelID};
#
my $is_tray = substr($fadd, -4, 2);
if ($is_tray > 0)
{
$is_tray = TRUE;
printf $log_fh "%d: [%s] %s IS tray part (%s) fadd: %s, fsadd: %s\n", __LINE__, $filename, $section, $is_tray, $fadd, $fsadd
if ($verbose >= MAXVERBOSE);
}
else
{
$is_tray = FALSE;
printf $log_fh "%d: [%s] %s IS NOT tray part (%s) fadd: %s, fsadd: %s\n", __LINE__, $filename, $section, $is_tray, $fadd, $fsadd
if ($verbose >= MAXVERBOSE);
}
#
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{data}))
{
printf $log_fh "%d WARNING: [%s] %s FAdd %s, FSAdd %s NOT found in cache. Taking all counts as is.\n", __LINE__, $filename, $section, $fadd, $fsadd if ($verbose >= MINVERBOSE);
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $prow->{$col};
}
}
else
{
my $cache_reelid = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{data}{ReelID};
my $cache_filename = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{filename};
if (($reelid eq $cache_reelid) || ($is_tray == TRUE))
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{ReelID} = $reelid;
#
foreach my $col (@feeder_count_cols)
{
my $u01_value = $prow->{$col};
my $cache_value = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{data}{$col};
#
my $delta = $u01_value - $cache_value;
#
if ($delta >= 0)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $delta;
}
elsif ($use_neg_delta == TRUE)
{
printf $log_fh "%d WARNING: [%s] [%s] %s FAdd %s, FSAdd %s using NEGATIVE delta for key %s: %d\n", __LINE__, $filename, $cache_filename, $section, $fadd, $fsadd, $col, $delta if ($verbose >= MINVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $delta;
}
else
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = 0;
printf $log_fh "%d WARNING: [%s] [%s] %s FAdd %s, FSAdd %s setting NEGATIVE delta (%d) for key %s to ZERO; current value %d, cache value %d\n", __LINE__, $filename, $cache_filename, $section, $fadd, $fsadd, $delta, $col, $u01_value, $cache_value if ($verbose >= MINVERBOSE);
set_red_flag($machine, $lane, $stage, $filename, $delta);
}
}
}
else
{
printf $log_fh "%d WARNING: [%s] %s FAdd %s, FSAdd %s REELID CHANGED: CACHED %s, CURRENT U01 %s\n", __LINE__, $filename, $section, $fadd, $fsadd, $cache_reelid, $reelid if ($verbose >= MINVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $prow->{$col};
}
}
}
}
}
#
sub copy_u01_feeder_cache
{
my ($pdb, $pu01, $state) = @_;
#
my $section = MOUNTPICKUPFEEDER;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $fadd = $prow->{FAdd};
my $fsadd = $prow->{FSAdd};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{data}{$col} = $prow->{$col};
}
#
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{state} = $state;
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$fadd}{$fsadd}{filename} = $filename;
}
}
#
sub copy_u01_feeder_delta
{
my ($pdb, $pu01) = @_;
#
my $section = MOUNTPICKUPFEEDER;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $fadd = $prow->{FAdd};
my $fsadd = $prow->{FSAdd};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col} = $prow->{$col};
}
}
}
#
sub tabulate_u01_feeder_delta
{
my ($pdb, $pu01) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPFEEDER;
#
my $product = $pdb->{product}{u01}{$machine}{$lane}{$stage};
#
foreach my $fadd (sort { $a <=> $b } keys %{$pdb->{$section}{$machine}{$lane}{$stage}{delta}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}})
{
my $reelid = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{ReelID};
#
# product-independent totals
#
if (exists($totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}))
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
#
if (exists($totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}))
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
#
if (exists($totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}))
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
#
# product-dependent totals
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}))
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}))
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}))
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
else
{
foreach my $col (@feeder_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$fadd}{$fsadd}{data}{$col};
}
}
}
}
}
#
sub audit_u01_feeders
{
my ($pdb, $pu01) = @_;
#
set_report_feeder_precision($pu01);
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPFEEDER;
my $filename = $pu01->{file_name};
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu01->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(@{$pu01->{$section}->{data}}) if (defined(@{$pu01->{$section}->{data}}));
}
#
# check if the file has a feeder data section.
#
if ($output_no == TIMER_NOT_RUNNING)
{
printf $log_fh "No Feeder data in Output=%d U01 files. Skipping.\n", $output_no
if ($verbose >= MAXVERBOSE);
return;
}
elsif (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER))
{
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{state}))
{
printf $log_fh "ENTRY STATE: UNKNOWN\n",
if ($verbose >= MAXVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
#
copy_u01_feeder_cache($pdb, $pu01, DELTA);
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq RESET)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
copy_u01_feeder_delta($pdb, $pu01);
tabulate_u01_feeder_delta($pdb, $pu01);
copy_u01_feeder_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq DELTA)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
calculate_u01_feeder_delta($pdb, $pu01);
tabulate_u01_feeder_delta($pdb, $pu01);
copy_u01_feeder_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
die "ERROR: unknown $section state: $state. Stopped";
}
}
elsif ($output_no == DETECT_CHANGE)
{
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{state}))
{
printf $log_fh "ENTRY STATE: UNKNOWN\n",
if ($verbose >= MAXVERBOSE);
#
copy_u01_feeder_cache($pdb, $pu01, DELTA);
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq RESET)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
copy_u01_feeder_delta($pdb, $pu01);
tabulate_u01_feeder_delta($pdb, $pu01);
copy_u01_feeder_cache($pdb, $pu01, DELTA);
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq DELTA)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
calculate_u01_feeder_delta($pdb, $pu01);
tabulate_u01_feeder_delta($pdb, $pu01);
copy_u01_feeder_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
die "ERROR: unknown $section state: $state. Stopped";
}
}
elsif (($output_no == MANUAL_CLEAR) ||
($output_no == AUTO_CLEAR))
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = RESET;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
}
else
{
die "ERROR: unknown $section output type: $output_no. Stopped";
}
#
printf $log_fh "EXIT STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
return;
}
#
######################################################################
#
# routines for nozzle section
#
sub calculate_u01_nozzle_delta
{
my ($pdb, $pu01) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $nhadd = $prow->{NHAdd};
my $ncadd = $prow->{NCAdd};
my $blkserial = $prow->{BLKSerial};
#
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{data}))
{
printf $log_fh "%d WARNING: [%s] %s NHAdd %s, NCAdd %s NOT found in cache. Taking all counts as is.\n", __LINE__, $filename, $section, $nhadd, $ncadd if ($verbose >= MINVERBOSE);
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $prow->{$col};
}
}
else
{
my $cache_blkserial = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{data}{BLKSerial};
if ($blkserial eq $cache_blkserial)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{BLKSerial} = $blkserial;
#
foreach my $col (@nozzle_count_cols)
{
my $u01_value = $prow->{$col};
my $cache_value = $pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{data}{$col};
#
my $delta = $u01_value - $cache_value;
#
if ($delta >= 0)
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $delta;
}
elsif ($use_neg_delta == TRUE)
{
printf $log_fh "%d WARNING: [%s] %s NHAdd %s, NCAdd %s using NEGATIVE delta for key %s: %d\n", __LINE__, $filename, $section, $nhadd, $ncadd, $col, $delta if ($verbose >= MINVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $delta;
}
else
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = 0;
printf $log_fh "%d WARNING: [%s] %s NHAdd %s, NCAdd %s setting NEGATIVE delta (%d) for key %s to ZERO\n", __LINE__, $filename, $section, $nhadd, $ncadd, $delta, $col if ($verbose >= MINVERBOSE);
set_red_flag($machine, $lane, $stage, $filename, $delta);
}
}
}
else
{
printf $log_fh "%d WARNING: [%s] %s NHAdd %s, NCAdd %s BLKSERIAL CHANGED: CACHED %s, CURRENT U01 %s\n", __LINE__, $filename, $section, $nhadd, $ncadd, $cache_blkserial, $blkserial if ($verbose >= MINVERBOSE);
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $prow->{$col};
}
}
}
}
}
#
sub copy_u01_nozzle_cache
{
my ($pdb, $pu01, $state) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $nhadd = $prow->{NHAdd};
my $ncadd = $prow->{NCAdd};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{data}{$col} = $prow->{$col};
}
#
$pdb->{$section}->{$machine}{$lane}{$stage}{cache}{$nhadd}{$ncadd}{state} = $state;
}
}
#
sub copy_u01_nozzle_delta
{
my ($pdb, $pu01) = @_;
#
my $section = MOUNTPICKUPNOZZLE;
#
my $filename = $pu01->{file_name};
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
#
my $pcols = $pu01->{$section}->{column_names};
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{delta};
#
foreach my $prow (@{$pu01->{$section}->{data}})
{
my $nhadd = $prow->{NHAdd};
my $ncadd = $prow->{NCAdd};
#
foreach my $col (@{$pcols})
{
$pdb->{$section}->{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col} = $prow->{$col};
}
}
}
#
sub tabulate_u01_nozzle_delta
{
my ($pdb, $pu01) = @_;
#
my $filename = $pu01->{file_name};
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPNOZZLE;
#
my $product = $pdb->{product}{u01}{$machine}{$lane}{$stage};
#
foreach my $nhadd (sort { $a <=> $b } keys %{$pdb->{$section}{$machine}{$lane}{$stage}{delta}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}})
{
my $blkserial = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{BLKSerial};
#
# product-independent totals
#
if (exists($totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
#
if (exists($totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
#
# product-dependent totals
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
#
if (exists($totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}))
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col} += $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
else
{
foreach my $col (@nozzle_count_cols)
{
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col} = $pdb->{$section}{$machine}{$lane}{$stage}{delta}{$nhadd}{$ncadd}{data}{$col};
}
}
}
}
}
#
sub audit_u01_nozzles
{
my ($pdb, $pu01) = @_;
#
set_report_nozzle_precision($pu01);
#
my $machine = $pu01->{mach_no};
my $lane = $pu01->{lane};
my $stage = $pu01->{stage};
my $output_no = $pu01->{output_no};
my $section = MOUNTPICKUPNOZZLE;
my $filename = $pu01->{file_name};
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu01->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(@{$pu01->{$section}->{data}}) if (defined(@{$pu01->{$section}->{data}}));
}
#
# check if the file has a nozzle data section.
#
if (($output_no == DETECT_CHANGE) ||
($output_no == TIMER_NOT_RUNNING))
{
printf $log_fh "No Nozzle data in Output=%d U01 files. Skipping.\n", $output_no
if ($verbose >= MAXVERBOSE);
return;
}
elsif (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER))
{
if ( ! exists($pdb->{$section}->{$machine}{$lane}{$stage}{state}))
{
printf $log_fh "ENTRY STATE: UNKNOWN\n",
if ($verbose >= MAXVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
#
copy_u01_nozzle_cache($pdb, $pu01, DELTA);
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq RESET)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
copy_u01_nozzle_delta($pdb, $pu01);
tabulate_u01_nozzle_delta($pdb, $pu01);
copy_u01_nozzle_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
elsif ($pdb->{$section}->{$machine}{$lane}{$stage}{state} eq DELTA)
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
calculate_u01_nozzle_delta($pdb, $pu01);
tabulate_u01_nozzle_delta($pdb, $pu01);
copy_u01_nozzle_cache($pdb, $pu01, DELTA);
#
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = DELTA;
}
else
{
my $state = $pdb->{$section}->{$machine}{$lane}{$stage}{state};
die "ERROR: unknown $section state: $state. Stopped";
}
}
elsif (($output_no == MANUAL_CLEAR) ||
($output_no == AUTO_CLEAR))
{
printf $log_fh "ENTRY STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
$pdb->{$section}->{$machine}{$lane}{$stage}{state} = RESET;
delete $pdb->{$section}->{$machine}{$lane}{$stage}{cache};
}
else
{
die "ERROR: unknown $section output type: $output_no. Stopped";
}
#
printf $log_fh "EXIT STATE: %s\n",
$pdb->{$section}->{$machine}{$lane}{$stage}{state}
if ($verbose >= MAXVERBOSE);
#
return;
}
#
######################################################################
#
# high-level u01 file audit functions
#
sub audit_u01_file
{
my ($pdb, $pu01) = @_;
#
my $output_no = $pu01->{output_no};
#
return if (($output_no == TIMER_NOT_RUNNING) &&
(($proc_option & PROC_OPT_IGNALL12) != 0));
#
check_red_flag($pu01);
set_product_info_u01($pdb, $pu01);
#
if (($proc_option & PROC_OPT_NMVAL2) != 0)
{
audit_u01_name_value_2($pdb, $pu01, COUNT);
audit_u01_name_value_2($pdb, $pu01, TIME);
}
else
{
audit_u01_name_value($pdb, $pu01, COUNT);
audit_u01_name_value($pdb, $pu01, TIME);
}
audit_u01_feeders($pdb, $pu01);
audit_u01_nozzles($pdb, $pu01);
#
return;
}
#
sub load_u01_sections
{
my ($pu01) = @_;
#
load_name_value($pu01, INDEX);
load_name_value($pu01, INFORMATION);
#
load_name_value($pu01, TIME);
load_name_value($pu01, CYCLETIME);
load_name_value($pu01, COUNT);
load_list($pu01, DISPENSER);
load_list($pu01, MOUNTPICKUPFEEDER);
load_list($pu01, MOUNTPICKUPNOZZLE);
load_name_value($pu01, INSPECTIONDATA);
}
#
sub audit_u01_files
{
my ($pu01s, $pdb) = @_;
#
printf $log_fh "\nAudit U01 files:\n";
#
foreach my $pu01 (@{$pu01s})
{
printf $log_fh "\nAudit U01: %s\n", $pu01->{file_name}
if ($verbose >= MIDVERBOSE);
#
next unless (load($pu01) != 0);
#
load_u01_sections($pu01);
#
audit_u01_file($pdb, $pu01);
}
#
return;
}
#
######################################################################
#
# print u01 file report functions
#
sub print_u01_count_report
{
my ($pdb) = @_;
#
###############################################################
#
my $section = COUNT;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For %s by Machine:\n", $section;
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine}})
{
printf $log_fh "Machine: %s\n", $machine;
foreach my $key (sort keys %{$totals{$section}{by_machine}{$machine}})
{
printf $log_fh "\t%-${section_precision}s: %d\n",
$key,
$totals{$section}{by_machine}{$machine}{$key};
}
}
#
$section = COUNT;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For %s by Machine and Lane:\n", $section;
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}{$machine}})
{
printf $log_fh "Machine: %s, Lane: %s\n", $machine, $lane;
foreach my $key (sort keys %{$totals{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $log_fh "\t%-${section_precision}s: %d\n",
$key,
$totals{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
}
}
#
###############################################################
#
foreach my $product (sort keys %{$totals{by_product}})
{
$section = COUNT;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For Product %s %s by Machine:\n", $product, $section;
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine}})
{
printf $log_fh "Machine: %s\n", $machine;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine}{$machine}})
{
printf $log_fh "\t%-${section_precision}s: %d\n",
$key,
$totals{by_product}{$product}{$section}{by_machine}{$machine}{$key};
}
}
}
#
foreach my $product (sort keys %{$totals{by_product}})
{
$section = COUNT;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For Product %s %s by Machine and Lane:\n", $product, $section;
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}})
{
printf $log_fh "Machine: %s, Lane: %s\n", $machine, $lane;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $log_fh "\t%-${section_precision}s: %d\n",
$key,
$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
}
}
}
}
#
sub print_u01_time_report
{
my ($pdb) = @_;
#
###############################################################
#
my $section = TIME;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For %s by Machine:\n", $section;
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine}})
{
printf $log_fh "Machine: %s\n", $machine;
foreach my $key (sort keys %{$totals{$section}{by_machine}{$machine}})
{
printf $log_fh "\t%-${section_precision}s: %d\n",
$key,
$totals{$section}{by_machine}{$machine}{$key};
}
}
#
$section = TIME;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For %s by Machine and Lane:\n", $section;
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}{$machine}})
{
printf $log_fh "Machine: %s, Lane: %s\n", $machine, $lane;
foreach my $key (sort keys %{$totals{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $log_fh "\t%-${section_precision}s: %d\n",
$key,
$totals{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
}
}
#
###############################################################
#
foreach my $product (sort keys %{$totals{by_product}})
{
$section = TIME;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For Product %s %s by Machine:\n", $product, $section;
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine}})
{
printf $log_fh "Machine: %s\n", $machine;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine}{$machine}})
{
printf $log_fh "\t%-${section_precision}s: %d\n",
$key,
$totals{by_product}{$product}{$section}{by_machine}{$machine}{$key};
}
}
}
#
foreach my $product (sort keys %{$totals{by_product}})
{
$section = TIME;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For Product %s %s by Machine and Lane:\n", $product, $section;
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}})
{
printf $log_fh "Machine: %s, Lane: %s\n", $machine, $lane;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $log_fh "\t%-${section_precision}s: %d\n",
$key,
$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
}
}
}
}
#
sub print_u01_nozzle_report
{
my ($pdb) = @_;
#
###############################################################
#
my $section = MOUNTPICKUPNOZZLE;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For %s by Machine, Lane, Stage, NHAdd, NCAdd, Blkserial:\n", $section;
#
foreach my $pcol (@nozzle_print_cols)
{
printf $log_fh $pcol->{format}, $pcol->{name};
}
printf $log_fh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}})
{
foreach my $blkserial (sort keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}})
{
printf $log_fh "%-8s %-8s %-8s %-8s %-8s %-30s ",
$machine, $lane, $stage, $nhadd, $ncadd, $blkserial;
foreach my $col (@nozzle_count_cols)
{
printf $log_fh "%-8d ",
$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col};
}
printf $log_fh "\n";
}
}
}
}
}
}
#
printf $log_fh "\nData For %s by Machine, Lane, Stage, NHAdd, NCAdd:\n", $section;
#
foreach my $pcol (@nozzle_print_cols2)
{
printf $log_fh $pcol->{format}, $pcol->{name};
}
printf $log_fh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}})
{
printf $log_fh "%-8s %-8s %-8s %-8s %-8s ",
$machine, $lane, $stage, $nhadd, $ncadd;
foreach my $col (@nozzle_count_cols)
{
printf $log_fh "%-8d ",
$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col};
}
printf $log_fh "\n";
}
}
}
}
}
#
###############################################################
#
foreach my $product (sort keys %{$totals{by_product}})
{
my $section = MOUNTPICKUPNOZZLE;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For %s %s by Machine, Lane, Stage, NHAdd, NCAdd, Blkserial:\n", $product, $section;
#
foreach my $pcol (@nozzle_print_cols)
{
printf $log_fh $pcol->{format}, $pcol->{name};
}
printf $log_fh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}})
{
foreach my $blkserial (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}})
{
printf $log_fh "%-8s %-8s %-8s %-8s %-8s %-30s ",
$machine, $lane, $stage, $nhadd, $ncadd, $blkserial;
foreach my $col (@nozzle_count_cols)
{
printf $log_fh "%-8d ",
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col};
}
printf $log_fh "\n";
}
}
}
}
}
}
#
printf $log_fh "\nData For %s %s by Machine, Lane, Stage, NHAdd, NCAdd:\n", $product, $section;
#
foreach my $pcol (@nozzle_print_cols2)
{
printf $log_fh $pcol->{format}, $pcol->{name};
}
printf $log_fh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}})
{
printf $log_fh "%-8s %-8s %-8s %-8s %-8s ",
$machine, $lane, $stage, $nhadd, $ncadd;
foreach my $col (@nozzle_count_cols)
{
printf $log_fh "%-8d ",
$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col};
}
printf $log_fh "\n";
}
}
}
}
}
}
}
#
sub print_u01_feeder_report
{
my ($pdb) = @_;
#
###############################################################
#
my $section = MOUNTPICKUPFEEDER;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For %s by Machine, Lane, Stage, FAdd, FSAdd, ReelID:\n", $section;
#
foreach my $pcol (@feeder_print_cols)
{
printf $log_fh $pcol->{format}, $pcol->{name};
}
printf $log_fh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}})
{
foreach my $reelid (sort keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}})
{
printf $log_fh "%-8s %-8s %-8s %-8s %-8s %-30s ",
$machine, $lane, $stage, $fadd, $fsadd, $reelid;
foreach my $col (@feeder_count_cols)
{
printf $log_fh "%-8d ",
$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col};
}
printf $log_fh "\n";
}
}
}
}
}
}
#
printf $log_fh "\nData For %s by Machine, Lane, Stage, FAdd, FSAdd:\n", $section;
#
foreach my $pcol (@feeder_print_cols2)
{
printf $log_fh $pcol->{format}, $pcol->{name};
}
printf $log_fh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}})
{
printf $log_fh "%-8s %-8s %-8s %-8s %-8s ",
$machine, $lane, $stage, $fadd, $fsadd;
foreach my $col (@feeder_count_cols)
{
printf $log_fh "%-8d ",
$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col};
}
printf $log_fh "\n";
}
}
}
}
}
#
###############################################################
#
foreach my $product (sort keys %{$totals{by_product}})
{
my $section = MOUNTPICKUPFEEDER;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nData For %s %s by Machine, Lane, Stage, FAdd, FSAdd, ReelID:\n", $product, $section;
#
foreach my $pcol (@feeder_print_cols)
{
printf $log_fh $pcol->{format}, $pcol->{name};
}
printf $log_fh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}})
{
foreach my $reelid (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}})
{
printf $log_fh "%-8s %-8s %-8s %-8s %-8s %-30s ",
$machine, $lane, $stage, $fadd, $fsadd, $reelid;
foreach my $col (@feeder_count_cols)
{
printf $log_fh "%-8d ",
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col};
}
printf $log_fh "\n";
}
}
}
}
}
}
#
printf $log_fh "\nData For %s %s by Machine, Lane, Stage, FAdd, FSAdd:\n", $product, $section;
#
foreach my $pcol (@feeder_print_cols2)
{
printf $log_fh $pcol->{format}, $pcol->{name};
}
printf $log_fh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}})
{
printf $log_fh "%-8s %-8s %-8s %-8s %-8s ",
$machine, $lane, $stage, $fadd, $fsadd;
foreach my $col (@feeder_count_cols)
{
printf $log_fh "%-8d ",
$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col};
}
printf $log_fh "\n";
}
}
}
}
}
}
}
#
sub print_u01_report
{
my ($pdb) = @_;
#
print_u01_count_report($pdb);
print_u01_time_report($pdb);
print_u01_nozzle_report($pdb);
print_u01_feeder_report($pdb);
}
#
######################################################################
#
# export u01 file report functions
#
sub export_u01_count_report
{
my ($pdb) = @_;
#
###############################################################
#
my $section = COUNT;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Total Data For %s:\n", $section;
#
my $first_time = TRUE;
#
open(my $outfh, ">" , "${export_dir}/COUNT_TOTALS.csv") || die $!;
#
foreach my $key (sort keys %{$totals{$section}{totals}})
{
if ($first_time == TRUE)
{
printf $outfh "%s", $key;
$first_time = FALSE;
}
else
{
printf $outfh ",%s", $key;
}
}
printf $outfh "\n";
#
$first_time = TRUE;
foreach my $key (sort keys %{$totals{$section}{totals}})
{
if ($first_time == TRUE)
{
printf $outfh "%d", $totals{$section}{totals}{$key};
$first_time = FALSE;
}
else
{
printf $outfh ",%d", $totals{$section}{totals}{$key};
}
}
printf $outfh "\n";
close($outfh);
#
$section = COUNT;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Machine:\n", $section;
#
$first_time = TRUE;
#
open($outfh, ">" , "${export_dir}/COUNT_BY_MACHINE.csv") || die $!;
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine}})
{
if ($first_time == TRUE)
{
printf $outfh "machine";
foreach my $key (sort keys %{$totals{$section}{by_machine}{$machine}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s", $machine;
foreach my $key (sort keys %{$totals{$section}{by_machine}{$machine}})
{
printf $outfh ",%d", $totals{$section}{by_machine}{$machine}{$key};
}
printf $outfh "\n";
}
close($outfh);
#
$section = COUNT;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Machine and Lane:\n", $section;
#
$first_time = TRUE;
open($outfh, ">" , "${export_dir}/COUNT_BY_MACHINE_LANE.csv") || die $!;
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}{$machine}})
{
if ($first_time == TRUE)
{
printf $outfh "machine,lane";
foreach my $key (sort keys %{$totals{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s,%s", $machine, $lane;
foreach my $key (sort keys %{$totals{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%d", $totals{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
printf $outfh "\n";
}
}
close($outfh);
#
###############################################################
#
$section = COUNT;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Total Data For %s by Product:\n", $section;
#
$first_time = TRUE;
open($outfh, ">" , "${export_dir}/COUNT_TOTALS_BY_PRODUCT.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
if ($first_time == TRUE)
{
printf $outfh "product";
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{totals}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s", $product;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{totals}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{totals}{$key};
}
printf $outfh "\n";
}
close($outfh);
#
$section = COUNT;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Product and Machine:\n", $section;
#
$first_time = TRUE;
open($outfh, ">" , "${export_dir}/COUNT_BY_PRODUCT_MACHINE.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine}})
{
if ($first_time == TRUE)
{
printf $outfh "product,machine";
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine}{$machine}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s,%s", $product, $machine;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine}{$machine}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine}{$machine}{$key};
}
printf $outfh "\n";
}
}
close($outfh);
#
$section = COUNT;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Product, Machine and Lane:\n", $section;
#
$first_time = TRUE;
open($outfh, ">" , "${export_dir}/COUNT_BY_PRODUCT_MACHINE_LANE.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}})
{
if ($first_time == TRUE)
{
printf $outfh "product,machine,lane";
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%s", $key;
}
printf $outfh "\n";
$first_time = FALSE;
}
#
printf $outfh "%s,%s,%s", $product, $machine, $lane;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
printf $outfh "\n";
}
}
}
close($outfh);
}
#
sub export_u01_time_report
{
my ($pdb) = @_;
#
###############################################################
#
my $section = TIME;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Machine:\n", $section;
#
open(my $outfh, ">" , "${export_dir}/TIME_BY_MACHINE.csv") || die $!;
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine}})
{
printf $outfh "%s", $machine;
foreach my $key (sort keys %{$totals{$section}{by_machine}{$machine}})
{
printf $outfh ",%d", $totals{$section}{by_machine}{$machine}{$key};
}
printf $outfh "\n",
}
close($outfh);
#
$section = TIME;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Machine and Lane:\n", $section;
#
open($outfh, ">" , "${export_dir}/TIME_BY_MACHINE_LANE.csv") || die $!;
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane}{$machine}})
{
printf $outfh "%s,%s", $machine, $lane;
foreach my $key (sort keys %{$totals{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%d", $totals{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
printf $outfh "\n";
}
}
close($outfh);
#
###############################################################
#
$section = TIME;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Product and Machine:\n", $section;
#
open($outfh, ">" , "${export_dir}/TIME_BY_PRODUCT_MACHINE.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine}})
{
printf $outfh "%s,%s", $product, $machine;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine}{$machine}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine}{$machine}{$key};
}
printf $outfh "\n";
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Product, Machine and Lane:\n", $section;
#
open($outfh, ">" , "${export_dir}/TIME_BY_PRODUCT_MACHINE_LANE.csv") || die $!;
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}})
{
printf $outfh "%s,s,%s", $product, $machine, $lane;
foreach my $key (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}})
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane}{$machine}{$lane}{$key};
}
printf $outfh "\n";
}
}
}
close($outfh);
}
#
sub export_u01_nozzle_report
{
my ($pdb) = @_;
#
###############################################################
#
my $section = MOUNTPICKUPNOZZLE;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, NHAdd, NCAdd, Blkserial:\n", $section;
#
open(my $outfh, ">" , "${export_dir}/NOZZLE_BY_MACHINE_LANE_STAGE_NHADD_NCADD_BLKSERIAL.csv") || die $!;
foreach my $pcol (@nozzle_export_cols)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}})
{
foreach my $blkserial (sort keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s",
$machine, $lane, $stage, $nhadd, $ncadd, $blkserial;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d",
$totals{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, NHAdd, NCAdd:\n", $section;
#
open($outfh, ">" , "${export_dir}/NOZZLE_BY_MACHINE_LANE_STAGE_NHADD_NCADD.csv") || die $!;
foreach my $pcol (@nozzle_export_cols2)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}})
{
printf $outfh "%s,%s,%s,%s,%s", $machine, $lane, $stage, $nhadd, $ncadd;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d", $totals{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col};
}
printf $outfh "\n";
}
}
}
}
}
close($outfh);
#
###############################################################
#
$section = MOUNTPICKUPNOZZLE;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, NHAdd, NCAdd, Blkserial:\n", $section;
#
open($outfh, ">" , "${export_dir}/NOZZLE_BY_MACHINE_LANE_STAGE_NHADD_NCADD_BLKSERIAL.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@nozzle_export_cols)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}})
{
foreach my $blkserial (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s,%s", $product, $machine, $lane, $stage, $nhadd, $ncadd, $blkserial;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd_blkserial}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$blkserial}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, NHAdd, NCAdd:\n", $section;
#
open($outfh, ">" , "${export_dir}/NOZZLE_BY_PRODUCT_MACHINE_LANE_STAGE_NHADD_NCADD.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@nozzle_export_cols2)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}})
{
foreach my $nhadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}})
{
foreach my $ncadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s",
$product, $machine, $lane, $stage, $nhadd, $ncadd;
foreach my $col (@nozzle_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane_stage_nhadd_ncadd}{$machine}{$lane}{$stage}{$nhadd}{$ncadd}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
}
#
sub export_u01_feeder_report
{
my ($pdb) = @_;
#
###############################################################
#
my $section = MOUNTPICKUPFEEDER;
my $section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, FAdd, FSAdd, ReelID:\n", $section;
#
open(my $outfh, ">" , "${export_dir}/FEEDER_BY_MACHINE_LANE_STAGE_FADD_FSADD_REELID.csv") || die $!;
foreach my $pcol (@feeder_export_cols)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}})
{
foreach my $reelid (sort keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s", $machine, $lane, $stage, $fadd, $fsadd, $reelid;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Machine, Lane, Stage, FAdd, FSAdd:\n", $section;
#
open($outfh, ">" , "${export_dir}/FEEDER_BY_MACHINE_LANE_STAGE_FADD_FSADD.csv") || die $!;
foreach my $pcol (@feeder_export_cols2)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $machine (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}})
{
printf $outfh "%s,%s,%s,%s,%s", $machine, $lane, $stage, $fadd, $fsadd;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col};
}
printf $outfh "\n";
}
}
}
}
}
close($outfh);
#
###############################################################
#
$section = MOUNTPICKUPFEEDER;
$section_precision = $report_precision{$section}{precision};
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, FAdd, FSAdd, ReelID:\n", $section;
#
open($outfh, ">" , "${export_dir}/FEEDER_BY_PRODUCT_MACHINE_LANE_STAGE_FADD_FSADD_REELID.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@feeder_export_cols)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}})
{
foreach my $reelid (sort keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s,%s", $product, $machine, $lane, $stage, $fadd, $fsadd, $reelid;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd_reelid}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$reelid}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
}
close($outfh);
#
printf $log_fh "\nExport Data For %s by Product, Machine, Lane, Stage, FAdd, FSAdd:\n", $section;
#
open($outfh, ">" , "${export_dir}/FEEDER_BY_PRODUCT_MACHINE_LANE_STAGE_FADD_FSADD.csv") || die $!;
printf $outfh "product,";
foreach my $pcol (@feeder_export_cols2)
{
printf $outfh $pcol->{format}, $pcol->{name};
}
printf $outfh "\n";
#
foreach my $product (sort keys %{$totals{by_product}})
{
foreach my $machine (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}})
{
foreach my $lane (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}})
{
foreach my $stage (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}})
{
foreach my $fadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}})
{
foreach my $fsadd (sort { $a <=> $b } keys %{$totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}})
{
printf $outfh "%s,%s,%s,%s,%s,%s", $product, $machine, $lane, $stage, $fadd, $fsadd;
foreach my $col (@feeder_count_cols)
{
printf $outfh ",%d", $totals{by_product}{$product}{$section}{by_machine_lane_stage_fadd_fsadd}{$machine}{$lane}{$stage}{$fadd}{$fsadd}{$col};
}
printf $outfh "\n";
}
}
}
}
}
}
close($outfh);
}
#
sub export_u01_report
{
my ($pdb) = @_;
#
export_u01_count_report($pdb);
export_u01_time_report($pdb);
export_u01_nozzle_report($pdb);
export_u01_feeder_report($pdb);
}
#
sub process_u01_files
{
my ($pu01s) = @_;
#
# any files to process?
#
if (scalar(@{$pu01s}) <= 0)
{
printf $log_fh "No U01 files to process. Returning.\n\n";
return;
}
#
my %db = ();
audit_u01_files($pu01s, \%db);
#
unless ($audit_only == TRUE)
{
if ($export_csv == TRUE)
{
export_u01_report(\%db);
}
else
{
print_u01_report(\%db);
}
}
#
return;
}
#
######################################################################
#
# audit U03 files
#
sub load_u03_sections
{
my ($pu03) = @_;
#
load_name_value($pu03, INDEX);
load_name_value($pu03, INFORMATION);
#
load_list($pu03, BRECG);
load_list($pu03, BRECGCALC);
load_list($pu03, ELAPSETIMERECOG);
load_list($pu03, SBOARD);
load_list($pu03, HEIGHTCORRECT);
load_list($pu03, MOUNTQUALITYTRACE);
load_list($pu03, MOUNTLATESTREEL);
load_list($pu03, MOUNTEXCHANGEREEL);
}
#
# don't need this. leave it for now in case that changes.
#
sub copy_u03_data
{
my ($pdb, $pu03, $section) = @_;
#
my $filename = $pu03->{file_name};
my $machine = $pu03->{mach_no};
my $lane = $pu03->{lane};
my $stage = $pu03->{stage};
my $output_no = $pu03->{output_no};
#
my $pcols = $pu03->{$section}->{column_names};
$pdb->{$section}->{$machine}{$lane}{$stage}{column_names} = $pcols;
#
delete $pdb->{$section}->{$machine}{$lane}{$stage}{data};
#
foreach my $prow (@{$pu03->{$section}->{data}})
{
unshift @{$pdb->{$section}->{$machine}{$lane}{$stage}{data}}, $prow;
}
}
#
sub tabulate_u03_quality_trace
{
my ($pdb, $pu03) = @_;
#
my $filename = $pu03->{file_name};
my $machine = $pu03->{mach_no};
my $lane = $pu03->{lane};
my $stage = $pu03->{stage};
my $output_no = $pu03->{output_no};
#
my $section = MOUNTQUALITYTRACE;
my $product = $pdb->{product}{u03}{$machine}{$lane}{$stage};
#
my $pcols = $pu03->{$section}->{column_names};
$totals{$section}{by_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{column_names} = $pcols;
#
foreach my $prow (@{$pu03->{$section}->{data}})
{
unshift @{$totals{$section}{by_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{data}}, $prow;
}
#
$totals{$section}{by_product_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{column_names} = $pcols;
#
foreach my $prow (@{$pu03->{$section}->{data}})
{
unshift @{$totals{by_product}{$product}{$section}{by_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{data}}, $prow;
}
}
#
sub tabulate_u03_latest_reel
{
my ($pdb, $pu03) = @_;
#
my $filename = $pu03->{file_name};
my $machine = $pu03->{mach_no};
my $lane = $pu03->{lane};
my $stage = $pu03->{stage};
my $output_no = $pu03->{output_no};
#
my $section = MOUNTLATESTREEL;
my $product = $pdb->{product}{u03}{$machine}{$lane}{$stage};
#
my $pcols = $pu03->{$section}->{column_names};
$totals{$section}{by_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{column_names} = $pcols;
#
foreach my $prow (@{$pu03->{$section}->{data}})
{
unshift @{$totals{$section}{by_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{data}}, $prow;
}
#
$totals{$section}{by_product_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{column_names} = $pcols;
#
foreach my $prow (@{$pu03->{$section}->{data}})
{
unshift @{$totals{by_product}{$product}{$section}{by_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{data}}, $prow;
}
}
#
sub tabulate_u03_exchange_reel
{
my ($pdb, $pu03) = @_;
#
my $filename = $pu03->{file_name};
my $machine = $pu03->{mach_no};
my $lane = $pu03->{lane};
my $stage = $pu03->{stage};
my $output_no = $pu03->{output_no};
#
my $section = MOUNTEXCHANGEREEL;
my $product = $pdb->{product}{u03}{$machine}{$lane}{$stage};
#
my $pcols = $pu03->{$section}->{column_names};
$totals{$section}{by_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{column_names} = $pcols;
#
foreach my $prow (@{$pu03->{$section}->{data}})
{
unshift @{$totals{$section}{by_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{data}}, $prow;
}
#
$totals{$section}{by_product_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{column_names} = $pcols;
#
foreach my $prow (@{$pu03->{$section}->{data}})
{
unshift @{$totals{by_product}{$product}{$section}{by_machine_lane_stage_filename}{$machine}{$lane}{$stage}{$filename}{data}}, $prow;
}
}
#
sub audit_u03_mount_quality_trace
{
my ($pdb, $pu03) = @_;
#
set_report_quality_trace_precision($pu03);
#
my $machine = $pu03->{mach_no};
my $lane = $pu03->{lane};
my $stage = $pu03->{stage};
my $output_no = $pu03->{output_no};
my $section = MOUNTQUALITYTRACE;
my $filename = $pu03->{file_name};
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu03->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(@{$pu03->{$section}->{data}}) if (defined(@{$pu03->{$section}->{data}}));
}
#
# check if the file has a quality trace section.
#
return unless (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER));
#
tabulate_u03_quality_trace($pdb, $pu03);
#
return;
}
#
sub audit_u03_mount_latest_reel
{
my ($pdb, $pu03) = @_;
#
set_report_latest_reel_precision($pu03);
#
my $machine = $pu03->{mach_no};
my $lane = $pu03->{lane};
my $stage = $pu03->{stage};
my $output_no = $pu03->{output_no};
my $section = MOUNTLATESTREEL;
my $filename = $pu03->{file_name};
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu03->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(@{$pu03->{$section}->{data}}) if (defined(@{$pu03->{$section}->{data}}));
}
#
# check if the file has a latest_reel section.
#
return unless (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER));
#
tabulate_u03_latest_reel($pdb, $pu03);
#
return;
}
#
sub audit_u03_mount_exchange_reel
{
my ($pdb, $pu03) = @_;
#
set_report_exchange_reel_precision($pu03);
#
my $machine = $pu03->{mach_no};
my $lane = $pu03->{lane};
my $stage = $pu03->{stage};
my $output_no = $pu03->{output_no};
my $section = MOUNTEXCHANGEREEL;
my $filename = $pu03->{file_name};
#
printf $log_fh "\nSECTION : %s\n", $section
if ($verbose >= MAXVERBOSE);
#
if ($verbose >= MAXVERBOSE)
{
printf $log_fh "MACHINE : %s\n", $machine;
printf $log_fh "LANE : %d\n", $lane;
printf $log_fh "STAGE : %d\n", $stage;
printf $log_fh "OUTPUT NO: %s\n", $output_no;
printf $log_fh "FILE RECS : %d\n", scalar(@{$pu03->{data}});
printf $log_fh "%s RECS: %d\n", $section, scalar(@{$pu03->{$section}->{data}}) if (defined(@{$pu03->{$section}->{data}}));
}
#
# check if the file has a latest reel section.
#
return unless (($output_no == PROD_COMPLETE) ||
($output_no == PROD_COMPLETE_LATER));
#
tabulate_u03_exchange_reel($pdb, $pu03);
#
return;
}
#
sub audit_u03_file
{
my ($pdb, $pu03) = @_;
#
set_product_info_u03($pdb, $pu03);
#
audit_u03_mount_quality_trace($pdb, $pu03);
audit_u03_mount_latest_reel($pdb, $pu03);
audit_u03_mount_exchange_reel($pdb, $pu03);
#
return;
}
#
sub audit_u03_files
{
my ($pu03s, $pdb) = @_;
#
printf $log_fh "\nAudit U03 files:\n";
#
foreach my $pu03 (@{$pu03s})
{
printf $log_fh "\nAudit u03: %s\n", $pu03->{file_name}
if ($verbose >= MIDVERBOSE);
#
next unless (load($pu03) != 0);
#
load_u03_sections($pu03, INDEX);
#
audit_u03_file($pdb, $pu03);
}
#
return;
}
#
sub export_u01_mount_quality_trace_report
{
my ($pdb) = @_;
}
#
sub export_u01_mount_latest_reel_report
{
my ($pdb) = @_;
}
#
sub export_u01_mount_exchange_reel_report
{
my ($pdb) = @_;
}
#
sub export_u03_report
{
my ($pdb) = @_;
#
export_u01_mount_quality_trace_report($pdb);
export_u01_mount_latest_reel_report($pdb);
export_u01_mount_exchange_reel_report($pdb);
}
#
sub print_u03_report
{
my ($pdb) = @_;
#
}
#
sub process_u03_files
{
my ($pu03s) = @_;
#
if (scalar(@{$pu03s}) <= 0)
{
printf $log_fh "\nNo U03 files to process. Returning.\n\n";
return;
}
#
my %db = ();
audit_u03_files($pu03s, \%db);
#
unless ($audit_only == TRUE)
{
if ($export_csv == TRUE)
{
export_u03_report(\%db);
}
else
{
print_u03_report(\%db);
}
}
#
return;
}
#
######################################################################
#
# audit MPR files
#
sub load_mpr_sections
{
my ($pmpr) = @_;
#
load_name_value($pmpr, INDEX);
load_name_value($pmpr, INFORMATION);
#
load_list($pmpr, TIMEDATASP);
load_list($pmpr, COUNTDATASP);
load_list($pmpr, COUNTDATASP2);
load_list($pmpr, TRACEDATASP);
load_list($pmpr, TRACEDATASP_2);
load_list($pmpr, ISPINFODATA);
load_list($pmpr, MASKISPINFODATA);
}
#
sub audit_mpr_files
{
my ($pmprs, $pdb) = @_;
#
printf $log_fh "\nAudit MPR files:\n";
#
foreach my $pmpr (@{$pmprs})
{
printf $log_fh "\nAudit mpr: %s\n", $pmpr->{file_name}
if ($verbose >= MIDVERBOSE);
#
next unless (load($pmpr) != 0);
#
load_mpr_sections($pmpr);
}
#
return;
}
#
sub process_mpr_files
{
my ($pmprs) = @_;
#
if (scalar(@{$pmprs}) <= 0)
{
printf $log_fh "\nNo MPR files to process. Returning.\n\n";
return;
}
#
my %db = ();
audit_mpr_files($pmprs, \%db);
#
return;
}
#
######################################################################
#
# scan directories for U01, U03 and MPR files.
#
my %all_list = ();
my $one_type = '';
#
sub want_one_type
{
if ($_ =~ m/^.*\.${one_type}$/)
{
printf $log_fh "FOUND %s FILE: %s\n", $one_type, $File::Find::name
if ($verbose >= MAXVERBOSE);
#
my $file_name = $_;
#
my $date = '';
my $mach_no = '';
my $stage = '';
my $lane = '';
my $pcb_serial = '';
my $pcb_id = '';
my $output_no = '';
my $pcb_id_lot_no = '';
#
my @parts = split('\+-\+', $file_name);
if (scalar(@parts) >= 9)
{
$date = $parts[0];
$mach_no = $parts[1];
$stage = $parts[2];
$lane = $parts[3];
$pcb_serial = $parts[4];
$pcb_id = $parts[5];
$output_no = $parts[6];
$pcb_id_lot_no = $parts[7];
}
else
{
@parts = split('-', $file_name);
if (scalar(@parts) >= 9)
{
$date = $parts[0];
$mach_no = $parts[1];
$stage = $parts[2];
$lane = $parts[3];
$pcb_serial = $parts[4];
$pcb_id = $parts[5];
$output_no = $parts[6];
$pcb_id_lot_no = $parts[7];
}
}
#
unshift @{$all_list{$one_type}},
{
'file_name' => $file_name,
'full_path' => $File::Find::name,
'directory' => $File::Find::dir,
'date' => $date,
'mach_no' => $mach_no,
'stage' => $stage,
'lane' => $lane,
'pcb_serial' => $pcb_serial,
'pcb_id' => $pcb_id,
'output_no' => $output_no,
'pcb_id_lot_no' => $pcb_id_lot_no
};
}
}
#
sub want_all_types
{
my $dt = '';
#
if ($_ =~ m/^.*\.u01$/)
{
printf $log_fh "FOUND u01 FILE: %s\n", $File::Find::name
if ($verbose >= MAXVERBOSE);
$dt = 'u01';
}
elsif ($_ =~ m/^.*\.u03$/)
{
printf $log_fh "FOUND u03 FILE: %s\n", $File::Find::name
if ($verbose >= MAXVERBOSE);
$dt = 'u03';
}
elsif ($_ =~ m/^.*\.mpr$/)
{
printf $log_fh "FOUND mpr FILE: %s\n", $File::Find::name
if ($verbose >= MAXVERBOSE);
$dt = 'mpr';
}
#
if ($dt ne '')
{
my $file_name = $_;
#
my $date = '';
my $mach_no = '';
my $stage = '';
my $lane = '';
my $pcb_serial = '';
my $pcb_id = '';
my $output_no = '';
my $pcb_id_lot_no = '';
#
my @parts = split('\+-\+', $file_name);
if (scalar(@parts) >= 9)
{
$date = $parts[0];
$mach_no = $parts[1];
$stage = $parts[2];
$lane = $parts[3];
$pcb_serial = $parts[4];
$pcb_id = $parts[5];
$output_no = $parts[6];
$pcb_id_lot_no = $parts[7];
}
else
{
@parts = split('-', $file_name);
if (scalar(@parts) >= 9)
{
$date = $parts[0];
$mach_no = $parts[1];
$stage = $parts[2];
$lane = $parts[3];
$pcb_serial = $parts[4];
$pcb_id = $parts[5];
$output_no = $parts[6];
$pcb_id_lot_no = $parts[7];
}
}
#
unshift @{$all_list{$dt}},
{
'file_name' => $file_name,
'full_path' => $File::Find::name,
'directory' => $File::Find::dir,
'date' => $date,
'mach_no' => $mach_no,
'stage' => $stage,
'lane' => $lane,
'pcb_serial' => $pcb_serial,
'pcb_id' => $pcb_id,
'output_no' => $output_no,
'pcb_id_lot_no' => $pcb_id_lot_no
};
}
}
#
sub get_all_files
{
my ($ftype, $pargv, $pu01, $pu03, $pmpr) = @_;
#
# optimize for file type
#
if ($ftype eq 'u01')
{
$one_type = $ftype;
$all_list{$one_type} = $pu01;
#
find(\&want_one_type, @{$pargv});
#
@{$pu01} = sort { $a->{file_name} cmp $b->{file_name} } @{$pu01};
}
elsif ($ftype eq 'u03')
{
$one_type = $ftype;
$all_list{$one_type} = $pu03;
#
find(\&want_one_type, @{$pargv});
#
@{$pu03} = sort { $a->{file_name} cmp $b->{file_name} } @{$pu03};
}
elsif ($ftype eq 'mpr')
{
$one_type = $ftype;
$all_list{$one_type} = $pmpr;
#
find(\&want_one_type, @{$pargv});
#
@{$pmpr} = sort { $a->{file_name} cmp $b->{file_name} } @{$pmpr};
}
else
{
$all_list{u01} = $pu01;
$all_list{u03} = $pu03;
$all_list{mpr} = $pmpr;
#
find(\&want_all_types, @{$pargv});
#
@{$pu01} = sort { $a->{file_name} cmp $b->{file_name} } @{$pu01};
@{$pu03} = sort { $a->{file_name} cmp $b->{file_name} } @{$pu03};
@{$pmpr} = sort { $a->{file_name} cmp $b->{file_name} } @{$pmpr};
}
}
#
#########################################################################
#
# start of script
#
my %opts;
if (getopts('?L:O:anxhwWv:d:t:r:', \%opts) != 1)
{
usage($cmd);
exit 2;
}
#
foreach my $opt (%opts)
{
if ($opt eq "h")
{
usage($cmd);
exit 0;
}
elsif ($opt eq "w")
{
$verbose = MINVERBOSE;
}
elsif ($opt eq "W")
{
$verbose = MIDVERBOSE;
}
elsif ($opt eq "a")
{
$audit_only = TRUE;
}
elsif ($opt eq "x")
{
$export_csv = TRUE;
}
elsif ($opt eq "n")
{
$use_neg_delta = 1;
printf $log_fh "Will USE negative delta values.\n";
}
elsif ($opt eq "t")
{
$file_type = $opts{$opt};
$file_type =~ tr/[A-Z]/[a-z]/;
if ($file_type !~ m/^(u01|u03|mpr)$/i)
{
printf $log_fh "\nInvalid file type: $opts{$opt}\n";
usage($cmd);
exit 2;
}
}
elsif ($opt eq "O")
{
my $option = $opts{$opt};
$option =~ tr/[a-z]/[A-Z]/;
if ($option eq "IGN12")
{
$proc_option |= PROC_OPT_IGN12;
}
elsif ($option eq "NMVAL2")
{
$proc_option |= PROC_OPT_NMVAL2;
}
elsif ($option eq "IGNALL12")
{
$proc_option |= PROC_OPT_IGNALL12;
}
else
{
printf $log_fh "\nInvalid option type: $opts{$opt}\n";
usage($cmd);
exit 2;
}
}
elsif ($opt eq "L")
{
local *FH;
$logfile = $opts{$opt};
open(FH, '>', $logfile) or die $!;
$log_fh = *FH;
printf $log_fh "\nLog File: %s\n", $logfile;
}
elsif ($opt eq "r")
{
$red_flag_trigger = $opts{$opt};
if (($red_flag_trigger !~ m/^[0-9][0-9]*$/) ||
($red_flag_trigger < 0))
{
printf $log_fh "\nInvalid Red Flag value; must be integer > 0.\n";
usage($cmd);
exit 2;
}
printf $log_fh "\nRed Flag trigger value: %d\n", $red_flag_trigger;
}
elsif ($opt eq "d")
{
$export_dir = $opts{$opt};
mkpath($export_dir) unless ( -d $export_dir );
printf $log_fh "\nExport directory: %s\n", $export_dir;
}
elsif ($opt eq "v")
{
if ($opts{$opt} =~ m/^[0123]$/)
{
$verbose = $opts{$opt};
}
elsif (exist($verbose_levels{$opts{$opt}}))
{
$verbose = $verbose_levels{$opts{$opt}};
}
else
{
printf $log_fh "\nInvalid verbose level: $opts{$opt}\n";
usage($cmd);
exit 2;
}
}
}
#
if (scalar(@ARGV) == 0)
{
printf $log_fh "No directories given.\n";
usage($cmd);
exit 2;
}
#
printf $log_fh "\nScan directories for U01, U03 and MPR files: \n\n";
#
my @u01_files = ();
my @u03_files = ();
my @mpr_files = ();
#
init_report_precision();
#
get_all_files($file_type,
\@ARGV,
\@u01_files,
\@u03_files,
\@mpr_files);
printf $log_fh "Number of U01 files: %d\n", scalar(@u01_files);
printf $log_fh "Number of U03 files: %d\n", scalar(@u03_files);
printf $log_fh "Number of MPR files: %d\n\n", scalar(@mpr_files);
#
process_u01_files(\@u01_files);
process_u03_files(\@u03_files);
process_mpr_files(\@mpr_files);
#
printf $log_fh "\nAll Done\n";
#
exit 0;
| ombt/analytics | sql/1504241859-lnb-file-audit/lnb2csv.pl | Perl | mit | 272,002 |
# please insert nothing before this line: -*- mode: cperl; cperl-indent-level: 4; cperl-continued-statement-offset: 4; indent-tabs-mode: nil -*-
package TestFilter::out_str_req_eos;
# here we test how EOS is passed from one streaming filter to another,
# making sure that it's not lost
use strict;
use warnings FATAL => 'all';
use Apache2::RequestRec ();
use Apache2::RequestIO ();
use Apache2::Filter ();
use TestCommon::Utils ();
use Apache2::Const -compile => qw(OK M_POST);
my $prefix = 'PREFIX_';
my $suffix = '_SUFFIX';
sub add_prefix {
my $filter = shift;
#warn "add_prefix called\n";
unless ($filter->ctx) {
$filter->print($prefix);
$filter->ctx(1);
}
while ($filter->read(my $buffer, 1024)){
#warn "add_prefix read: [$buffer]\n";
$filter->print($buffer);
}
Apache2::Const::OK;
}
sub add_suffix {
my $filter = shift;
#warn "add_suffix called\n";
while ($filter->read(my $buffer, 1024)){
#warn "add_suffix read: [$buffer]\n";
$filter->print($buffer);
}
if ($filter->seen_eos) {
$filter->print($suffix);
$filter->ctx(1);
}
Apache2::Const::OK;
}
sub handler {
my $r = shift;
$r->content_type('text/plain');
if ($r->method_number == Apache2::Const::M_POST) {
$r->print(TestCommon::Utils::read_post($r));
}
return Apache2::Const::OK;
}
1;
__DATA__
<NoAutoConfig>
PerlModule TestFilter::out_str_req_eos
<Location /TestFilter__out_str_req_eos>
SetHandler modperl
PerlResponseHandler TestFilter::out_str_req_eos
PerlOutputFilterHandler TestFilter::out_str_req_eos::add_prefix
PerlOutputFilterHandler TestFilter::out_str_req_eos::add_suffix
</Location>
</NoAutoConfig>
| dreamhost/dpkg-ndn-perl-mod-perl | t/filter/TestFilter/out_str_req_eos.pm | Perl | apache-2.0 | 1,789 |
package NP::Model::VendorZone;
use strict;
use NP::Util qw();
my %reserved_zone_names = map { $_ => 1 } qw(
europe
north-america
south-america
america
asia
africa
root
ntppool
vendor
);
sub validate {
my $vz = shift;
my $errors = {};
unless ($vz->zone_name) {
$errors->{zone_name} = 'A zone name is required.';
}
elsif (length $vz->zone_name < 4) {
$errors->{zone_name} = 'The zone name must be 4 or more characters.';
}
if ($reserved_zone_names{$vz->zone_name}) {
$errors->{zone_name} = 'That zone name is in use or reserved.';
}
{
my $vz2 = NP::Model->vendor_zone->get_objects(query => [zone_name => $vz->zone_name]);
if (@$vz2) {
unless ($vz and grep { $vz->id == $_->id } @$vz2) {
$errors->{zone_name} = 'That zone name is already used.';
}
}
}
for my $f (qw(contact_information device_count organization_name request_information)) {
$errors->{$f} = 'Required field!' unless $vz->$f and $vz->$f =~ m/\S/;
}
$vz->{_validation_errors} = $errors;
%$errors ? 0 : 1;
}
sub validation_errors {
my $self = shift;
$self->{_validation_errors} || {};
}
sub can_edit {
my ($self, $user) = @_;
return 0 unless $user;
return 1 if $user->privileges->vendor_admin;
return 1
if $self->status eq 'New'
and $user->id == $self->user_id; # TODO: many<->many
return 0;
}
sub can_view {
my ($self, $user) = @_;
return 0 unless $user;
return 1 if $user->privileges->vendor_admin;
return 1 if $user->id == $self->user_id; # TODO: many<->many
return 0;
}
sub contact_information_html { NP::Util::convert_to_html(shift->contact_information) }
sub request_information_html { NP::Util::convert_to_html(shift->request_information) }
1;
| punitvara/ntppool | lib/NP/Model/VendorZone.pm | Perl | apache-2.0 | 1,868 |
#
# Copyright 2016 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::server::cisco::ucs::mode::components::localdisk;
use strict;
use warnings;
use hardware::server::cisco::ucs::mode::components::resources qw(%mapping_presence %mapping_operability);
# In MIB 'CISCO-UNIFIED-COMPUTING-STORAGE-MIB'
my $mapping1 = {
cucsStorageLocalDiskPresence => { oid => '.1.3.6.1.4.1.9.9.719.1.45.4.1.10', map => \%mapping_presence },
};
my $mapping2 = {
cucsStorageLocalDiskOperability => { oid => '.1.3.6.1.4.1.9.9.719.1.45.4.1.9', map => \%mapping_operability },
};
my $oid_cucsStorageLocalDiskDn = '.1.3.6.1.4.1.9.9.719.1.45.4.1.2';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $mapping1->{cucsStorageLocalDiskPresence}->{oid} },
{ oid => $mapping2->{cucsStorageLocalDiskOperability}->{oid} }, { oid => $oid_cucsStorageLocalDiskDn };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking local disks");
$self->{components}->{localdisk} = {name => 'local disks', total => 0, skip => 0};
return if ($self->check_filter(section => 'localdisk'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_cucsStorageLocalDiskDn}})) {
$oid =~ /\.(\d+)$/;
my $instance = $1;
my $localdisk_dn = $self->{results}->{$oid_cucsStorageLocalDiskDn}->{$oid};
my $result = $self->{snmp}->map_instance(mapping => $mapping1, results => $self->{results}->{$mapping1->{cucsStorageLocalDiskPresence}->{oid}}, instance => $instance);
my $result2 = $self->{snmp}->map_instance(mapping => $mapping2, results => $self->{results}->{$mapping2->{cucsStorageLocalDiskOperability}->{oid}}, instance => $instance);
next if ($self->absent_problem(section => 'localdisk', instance => $localdisk_dn));
next if ($self->check_filter(section => 'localdisk', instance => $localdisk_dn));
$self->{output}->output_add(long_msg => sprintf("local disk '%s' state is '%s' [presence: %s].",
$localdisk_dn, $result2->{cucsStorageLocalDiskOperability},
$result->{cucsStorageLocalDiskPresence})
);
my $exit = $self->get_severity(section => 'localdisk.presence', label => 'default.presence', value => $result->{cucsStorageLocalDiskPresence});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("local disk '%s' presence is: '%s'",
$localdisk_dn, $result->{cucsStorageLocalDiskPresence})
);
next;
}
$self->{components}->{localdisk}->{total}++;
$exit = $self->get_severity(section => 'localdisk.operability', label => 'default.operability', value => $result2->{cucsStorageLocalDiskOperability});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("local disk '%s' state is '%s'",
$localdisk_dn, $result2->{cucsStorageLocalDiskOperability}
)
);
}
}
}
1; | bcournaud/centreon-plugins | hardware/server/cisco/ucs/mode/components/localdisk.pm | Perl | apache-2.0 | 4,292 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.